Major cleanup of the Emotion GStreamer 1.x video sink

Next step: state handling in the GStreamer backend

Reviewers: cedric

CC: cedric

Differential Revision: https://phab.enlightenment.org/D431

Signed-off-by: Cedric Bail <cedric.bail@free.fr>
This commit is contained in:
Sebastian Dröge 2014-01-04 11:41:37 +09:00 committed by Cedric Bail
parent 319766f837
commit 9637c2b835
4 changed files with 443 additions and 600 deletions

View File

@ -5,23 +5,26 @@
#include "emotion_gstreamer.h"
Emotion_Gstreamer_Buffer *
emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
emotion_gstreamer_buffer_alloc(EmotionVideoSink *sink,
GstBuffer *buffer,
Eina_Bool preroll)
GstVideoInfo *info,
Evas_Colorspace eformat,
int eheight,
Evas_Video_Convert_Cb func)
{
Emotion_Gstreamer_Buffer *send;
if (!sink->ev) return NULL;
if (!sink->priv->emotion_object) return NULL;
send = malloc(sizeof (Emotion_Gstreamer_Buffer));
if (!send) return NULL;
send->sink = sink;
send->sink = gst_object_ref(sink);
send->frame = gst_buffer_ref(buffer);
send->preroll = preroll;
send->force = EINA_FALSE;
sink->ev->out++;
send->ev = sink->ev;
send->info = *info;
send->eformat = eformat;
send->eheight = eheight;
send->func = func;
return send;
}
@ -29,14 +32,8 @@ emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
void
emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send)
{
send->ev->in++;
if (send->ev->in == send->ev->out
&& send->ev->threads == NULL
&& send->ev->delete_me)
send->ev->api->del(send->ev);
gst_buffer_unref(send->frame);
gst_object_unref(send->sink);
gst_buffer_replace(&send->frame, NULL);
free(send);
}
@ -52,6 +49,7 @@ emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
if (!send) return NULL;
ev->out++;
send->ev = ev;
send->msg = gst_message_ref(msg);

View File

@ -13,10 +13,14 @@ static int _emotion_init_count = 0;
static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data);
static void _free_metadata (Emotion_Gstreamer_Metadata *m);
static GstElement * _create_pipeline (Emotion_Gstreamer_Video *ev, Evas_Object *o, const char *uri);
static GstBusSyncReply _eos_sync_fct(GstBus *bus,
GstMessage *message,
gpointer data);
static Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev, Eina_Bool force);
/* Module interface */
static const char *
@ -70,12 +74,6 @@ emotion_visualization_element_name_get(Emotion_Vis visualisation)
static void
em_cleanup(Emotion_Gstreamer_Video *ev)
{
if (ev->send)
{
emotion_gstreamer_buffer_free(ev->send);
ev->send = NULL;
}
if (ev->eos_bus)
{
gst_object_unref(GST_OBJECT(ev->eos_bus));
@ -88,25 +86,10 @@ em_cleanup(Emotion_Gstreamer_Video *ev)
ev->metadata = NULL;
}
if (ev->last_buffer)
{
gst_buffer_unref(ev->last_buffer);
ev->last_buffer = NULL;
}
if (!ev->stream)
{
evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
ev->stream = EINA_TRUE;
}
if (ev->pipeline)
{
gstreamer_video_sink_new(ev, ev->obj, NULL);
g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
gst_element_set_state(ev->pipeline, GST_STATE_NULL);
g_object_set(G_OBJECT(ev->esink), "emotion-object", NULL, NULL);
gst_object_unref(ev->pipeline);
ev->pipeline = NULL;
@ -178,7 +161,7 @@ em_file_open(void *video,
uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
DBG("setting file to '%s'", uri);
ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, uri);
ev->pipeline = _create_pipeline (ev, ev->obj, uri);
if (sbuf) eina_strbuf_free(sbuf);
if (!ev->pipeline)
@ -233,7 +216,6 @@ em_play(void *video,
if (ev->pipeline_parsed)
gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
ev->play = 1;
ev->play_started = 1;
}
static void
@ -1392,12 +1374,6 @@ _eos_main_fct(void *data)
ev = send->ev;
msg = send->msg;
if (ev->play_started && !ev->delete_me)
{
_emotion_playback_started(ev->obj);
ev->play_started = 0;
}
switch (GST_MESSAGE_TYPE(msg))
{
case GST_MESSAGE_EOS:
@ -1425,6 +1401,28 @@ _eos_main_fct(void *data)
case GST_MESSAGE_ASYNC_DONE:
if (!ev->delete_me) _emotion_seek_done(ev->obj);
break;
case GST_MESSAGE_STATE_CHANGED:
{
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
INF("Element %s changed state from %s to %s.",
GST_OBJECT_NAME(msg->src),
gst_element_state_get_name(old_state),
gst_element_state_get_name(new_state));
if (GST_MESSAGE_SRC(msg) == GST_OBJECT(ev->pipeline) && new_state >= GST_STATE_PAUSED && !ev->play_started && !ev->delete_me)
{
_emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
/* FIXME: This is reentrant because of _emotion_open_done() */
if (!ev->play_started)
{
_emotion_playback_started(ev->obj);
ev->play_started = 1;
}
}
break;
}
case GST_MESSAGE_STREAM_STATUS:
break;
case GST_MESSAGE_ERROR:
@ -1454,6 +1452,7 @@ _eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
case GST_MESSAGE_TAG:
case GST_MESSAGE_ASYNC_DONE:
case GST_MESSAGE_STREAM_STATUS:
case GST_MESSAGE_STATE_CHANGED:
INF("bus say: %s [%i - %s]",
GST_MESSAGE_SRC_NAME(msg),
GST_MESSAGE_TYPE(msg),
@ -1467,18 +1466,6 @@ _eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
}
break;
case GST_MESSAGE_STATE_CHANGED:
{
GstState old_state, new_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
INF("Element %s changed state from %s to %s.",
GST_OBJECT_NAME(msg->src),
gst_element_state_get_name(old_state),
gst_element_state_get_name(new_state));
break;
}
case GST_MESSAGE_ERROR:
{
GError *error;
@ -1524,7 +1511,7 @@ _eos_sync_fct(GstBus *bus EINA_UNUSED, GstMessage *msg, gpointer data)
return GST_BUS_DROP;
}
Eina_Bool
static Eina_Bool
_emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
Eina_Bool force)
{
@ -1610,13 +1597,171 @@ _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
em_audio_channel_volume_set(ev, ev->volume);
em_audio_channel_mute_set(ev, ev->audio_mute);
if (ev->play_started)
{
_emotion_playback_started(ev->obj);
ev->play_started = 0;
}
_emotion_open_done(ev->obj);
return EINA_TRUE;
}
static void
_emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
{
Emotion_Gstreamer_Video *ev = data;
gboolean res;
if (ecore_thread_check(thread) || !ev->pipeline) return;
gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
if (res == GST_STATE_CHANGE_NO_PREROLL)
{
gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
}
}
static void
_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
{
Emotion_Gstreamer_Video *ev = data;
ev->threads = eina_list_remove(ev->threads, thread);
if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
if (ev->in == ev->out && ev->delete_me)
ev->api->del(ev);
}
static void
_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
{
Emotion_Gstreamer_Video *ev = data;
ev->threads = eina_list_remove(ev->threads, thread);
if (ev->play)
{
gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
}
if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
if (ev->in == ev->out && ev->delete_me)
ev->api->del(ev);
else
_emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
}
static GstElement *
_create_pipeline (Emotion_Gstreamer_Video *ev,
Evas_Object *o,
const char *uri)
{
GstElement *playbin;
GstElement *bin = NULL;
GstElement *esink = NULL;
GstElement *queue = NULL;
GstPad *pad;
int flags;
const char *launch;
if (!uri)
return NULL;
launch = emotion_webcam_custom_get(uri);
if (launch)
{
GError *error = NULL;
/* FIXME: This code path is broken in many places and won't
* work as is */
playbin = gst_parse_bin_from_description(launch, 1, &error);
if (!playbin)
{
ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
g_error_free(error);
return NULL;
}
if (error)
{
WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
g_error_free(error);
}
}
else
{
playbin = gst_element_factory_make("playbin", "playbin");
if (!playbin)
{
ERR("Unable to create 'playbin' GstElement.");
return NULL;
}
}
bin = gst_bin_new(NULL);
if (!bin)
{
ERR("Unable to create GstBin !");
goto unref_pipeline;
}
esink = gst_element_factory_make("emotion-sink", "sink");
if (!esink)
{
ERR("Unable to create 'emotion-sink' GstElement.");
goto unref_pipeline;
}
g_object_set(G_OBJECT(esink), "emotion-object", o, NULL);
/* We need queue to force each video sink to be in its own thread */
queue = gst_element_factory_make("queue", "equeue");
if (!queue)
{
ERR("Unable to create 'queue' GstElement.");
goto unref_pipeline;
}
gst_bin_add_many(GST_BIN(bin), queue, esink, NULL);
gst_element_link_many(queue, esink, NULL);
/* link both sink to GstTee */
pad = gst_element_get_static_pad(queue, "sink");
gst_element_add_pad(bin, gst_ghost_pad_new("sink", pad));
gst_object_unref(pad);
if (launch)
{
g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
}
else
{
g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
}
eina_stringshare_replace(&ev->uri, uri);
ev->pipeline = playbin;
ev->sink = bin;
ev->esink = esink;
ev->threads = eina_list_append(ev->threads,
ecore_thread_run(_emotion_gstreamer_pause,
_emotion_gstreamer_end,
_emotion_gstreamer_cancel,
ev));
/** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
/** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
return playbin;
unref_pipeline:
gst_object_unref(esink);
gst_object_unref(bin);
gst_object_unref(playbin);
return NULL;
}

View File

@ -25,9 +25,9 @@ typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data,
unsigned int h,
unsigned int output_height);
typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate;
typedef struct _EvasVideoSink EvasVideoSink;
typedef struct _EvasVideoSinkClass EvasVideoSinkClass;
typedef struct _EmotionVideoSinkPrivate EmotionVideoSinkPrivate;
typedef struct _EmotionVideoSink EmotionVideoSink;
typedef struct _EmotionVideoSinkClass EmotionVideoSinkClass;
typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
typedef struct _Emotion_Gstreamer_Metadata Emotion_Gstreamer_Metadata;
typedef struct _Emotion_Gstreamer_Buffer Emotion_Gstreamer_Buffer;
@ -60,9 +60,6 @@ struct _Emotion_Gstreamer_Video
/* eos */
GstBus *eos_bus;
/* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */
GstBuffer *last_buffer;
/* Evas object */
Evas_Object *obj;
@ -70,68 +67,56 @@ struct _Emotion_Gstreamer_Video
double position;
double volume;
volatile int seek_to;
volatile int get_poslen;
Emotion_Gstreamer_Metadata *metadata;
const char *uri;
Emotion_Gstreamer_Buffer *send;
EvasVideoSinkPrivate *sink_data;
int in;
int out;
Emotion_Vis vis;
int in;
int out;
Eina_Bool play : 1;
Eina_Bool video_mute : 1;
Eina_Bool audio_mute : 1;
Eina_Bool play_started : 1;
Eina_Bool pipeline_parsed : 1;
Eina_Bool delete_me : 1;
};
struct _EmotionVideoSink {
/*< private >*/
GstVideoSink parent;
EmotionVideoSinkPrivate *priv;
};
struct _EmotionVideoSinkClass {
/*< private >*/
GstVideoSinkClass parent_class;
};
struct _EmotionVideoSinkPrivate {
Evas_Object *emotion_object;
Evas_Object *evas_object;
GstVideoInfo info;
unsigned int eheight;
Evas_Colorspace eformat;
Evas_Video_Convert_Cb func;
Eina_Lock m;
Eina_Condition c;
Emotion_Gstreamer_Buffer *send;
/* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */
GstBuffer *last_buffer;
int frames;
int flapse;
double rtime;
double rlapse;
struct
{
double width;
double height;
} fill;
Eina_Bool play : 1;
Eina_Bool play_started : 1;
Eina_Bool video_mute : 1;
Eina_Bool audio_mute : 1;
Eina_Bool pipeline_parsed : 1;
Eina_Bool delete_me : 1;
Eina_Bool kill_buffer : 1;
Eina_Bool stream : 1;
};
struct _EvasVideoSink {
/*< private >*/
GstVideoSink parent;
EvasVideoSinkPrivate *priv;
};
struct _EvasVideoSinkClass {
/*< private >*/
GstVideoSinkClass parent_class;
};
struct _EvasVideoSinkPrivate {
Evas_Object *o;
Emotion_Gstreamer_Video *ev;
Evas_Video_Convert_Cb func;
GstVideoInfo info;
unsigned int eheight;
Evas_Colorspace eformat;
Eina_Lock m;
Eina_Condition c;
// If this is TRUE all processing should finish ASAP
// This is necessary because there could be a race between
// unlock() and render(), where unlock() wins, signals the
@ -145,13 +130,14 @@ struct _EvasVideoSinkPrivate {
struct _Emotion_Gstreamer_Buffer
{
Emotion_Gstreamer_Video *ev;
EvasVideoSinkPrivate *sink;
EmotionVideoSink *sink;
GstBuffer *frame;
Eina_Bool preroll : 1;
Eina_Bool force : 1;
GstVideoInfo info;
Evas_Colorspace eformat;
int eheight;
Evas_Video_Convert_Cb func;
};
struct _Emotion_Gstreamer_Message
@ -189,44 +175,41 @@ extern Eina_Bool debug_fps;
#endif
#define CRI(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)
#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type()
#define EMOTION_TYPE_VIDEO_SINK emotion_video_sink_get_type()
#define EVAS_VIDEO_SINK(obj) \
#define EMOTION_VIDEO_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), \
EVAS_TYPE_VIDEO_SINK, EvasVideoSink))
EMOTION_TYPE_VIDEO_SINK, EmotionVideoSink))
#define EVAS_VIDEO_SINK_CLASS(klass) \
#define EMOTION_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), \
EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
EMOTION_TYPE_VIDEO_SINK, EmotionVideoSinkClass))
#define EVAS_IS_VIDEO_SINK(obj) \
#define EMOTION_IS_VIDEO_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), \
EVAS_TYPE_VIDEO_SINK))
EMOTION_TYPE_VIDEO_SINK))
#define EVAS_IS_VIDEO_SINK_CLASS(klass) \
#define EMOTION_IS_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), \
EVAS_TYPE_VIDEO_SINK))
EMOTION_TYPE_VIDEO_SINK))
#define EVAS_VIDEO_SINK_GET_CLASS(obj) \
#define EMOTION_VIDEO_SINK_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), \
EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
Evas_Object *obj,
const char *uri);
EMOTION_TYPE_VIDEO_SINK, EmotionVideoSinkClass))
gboolean gstreamer_plugin_init(GstPlugin *plugin);
Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
GstBuffer *buffer,
Eina_Bool preroll);
Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EmotionVideoSink *sink,
GstBuffer *buffer,
GstVideoInfo *info,
Evas_Colorspace eformat,
int eheight,
Evas_Video_Convert_Cb func);
void emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send);
Emotion_Gstreamer_Message *emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
GstMessage *msg);
void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send);
Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
Eina_Bool force);
typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion;

View File

@ -8,8 +8,8 @@ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
GST_PAD_SINK, GST_PAD_ALWAYS,
GST_STATIC_CAPS(GST_VIDEO_CAPS_MAKE("{ I420, YV12, YUY2, NV12, BGRx, BGR, BGRA }")));
GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
#define GST_CAT_DEFAULT evas_video_sink_debug
GST_DEBUG_CATEGORY_STATIC(emotion_video_sink_debug);
#define GST_CAT_DEFAULT emotion_video_sink_debug
enum {
LAST_SIGNAL
@ -17,39 +17,34 @@ enum {
enum {
PROP_0,
PROP_EVAS_OBJECT,
PROP_WIDTH,
PROP_HEIGHT,
PROP_EV,
PROP_EMOTION_OBJECT,
PROP_LAST
};
#define _do_init \
GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
GST_DEBUG_CATEGORY_INIT(emotion_video_sink_debug, \
"emotion-sink", \
0, \
"emotion video sink")
#define parent_class evas_video_sink_parent_class
G_DEFINE_TYPE_WITH_CODE(EvasVideoSink,
evas_video_sink,
#define parent_class emotion_video_sink_parent_class
G_DEFINE_TYPE_WITH_CODE(EmotionVideoSink,
emotion_video_sink,
GST_TYPE_VIDEO_SINK,
_do_init);
static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
static void evas_video_sink_main_render(void *data);
static void unlock_buffer_mutex(EmotionVideoSinkPrivate* priv);
static void emotion_video_sink_main_render(void *data);
static void
evas_video_sink_init(EvasVideoSink* sink)
emotion_video_sink_init(EmotionVideoSink* sink)
{
EvasVideoSinkPrivate* priv;
EmotionVideoSinkPrivate* priv;
INF("sink init");
sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
priv->o = NULL;
priv->info.width = 0;
priv->info.height = 0;
sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EMOTION_TYPE_VIDEO_SINK, EmotionVideoSinkPrivate);
gst_video_info_init (&priv->info);
priv->eheight = 0;
priv->func = NULL;
priv->eformat = EVAS_COLORSPACE_ARGB8888;
@ -62,41 +57,42 @@ evas_video_sink_init(EvasVideoSink* sink)
static void
_cleanup_priv(void *data, Evas *e EINA_UNUSED, Evas_Object *obj, void *event_info EINA_UNUSED)
{
EvasVideoSinkPrivate* priv;
EmotionVideoSinkPrivate* priv;
priv = data;
eina_lock_take(&priv->m);
if (priv->o == obj)
priv->o = NULL;
if (priv->evas_object == obj)
priv->evas_object = NULL;
eina_lock_release(&priv->m);
}
static void
evas_video_sink_set_property(GObject * object, guint prop_id,
emotion_video_sink_set_property(GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
EvasVideoSink* sink;
EvasVideoSinkPrivate* priv;
EmotionVideoSink* sink;
EmotionVideoSinkPrivate* priv;
sink = EVAS_VIDEO_SINK (object);
sink = EMOTION_VIDEO_SINK (object);
priv = sink->priv;
switch (prop_id) {
case PROP_EVAS_OBJECT:
case PROP_EMOTION_OBJECT:
eina_lock_take(&priv->m);
if (priv->o)
evas_object_event_callback_del(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv);
priv->o = g_value_get_pointer (value);
INF("sink set Evas_Object %p.", priv->o);
if (priv->o)
evas_object_event_callback_add(priv->o, EVAS_CALLBACK_DEL, _cleanup_priv, priv);
eina_lock_release(&priv->m);
break;
case PROP_EV:
INF("sink set ev.");
eina_lock_take(&priv->m);
priv->ev = g_value_get_pointer (value);
if (priv->evas_object)
evas_object_event_callback_del(priv->evas_object, EVAS_CALLBACK_DEL, _cleanup_priv);
priv->emotion_object = g_value_get_pointer (value);
INF("sink set Emotion object %p", priv->emotion_object);
if (priv->emotion_object)
{
priv->evas_object = emotion_object_image_get(priv->emotion_object);
if (priv->evas_object)
{
evas_object_event_callback_add(priv->evas_object, EVAS_CALLBACK_DEL, _cleanup_priv, priv);
evas_object_image_pixels_get_callback_set(priv->evas_object, NULL, NULL);
}
}
eina_lock_release(&priv->m);
break;
default:
@ -107,38 +103,20 @@ evas_video_sink_set_property(GObject * object, guint prop_id,
}
static void
evas_video_sink_get_property(GObject * object, guint prop_id,
emotion_video_sink_get_property(GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
EvasVideoSink* sink;
EvasVideoSinkPrivate* priv;
EmotionVideoSink* sink;
EmotionVideoSinkPrivate* priv;
sink = EVAS_VIDEO_SINK (object);
sink = EMOTION_VIDEO_SINK (object);
priv = sink->priv;
switch (prop_id) {
case PROP_EVAS_OBJECT:
case PROP_EMOTION_OBJECT:
INF("sink get property.");
eina_lock_take(&priv->m);
g_value_set_pointer(value, priv->o);
eina_lock_release(&priv->m);
break;
case PROP_WIDTH:
INF("sink get width.");
eina_lock_take(&priv->m);
g_value_set_int(value, priv->info.width);
eina_lock_release(&priv->m);
break;
case PROP_HEIGHT:
INF("sink get height.");
eina_lock_take(&priv->m);
g_value_set_int (value, priv->eheight);
eina_lock_release(&priv->m);
break;
case PROP_EV:
INF("sink get ev.");
eina_lock_take(&priv->m);
g_value_set_pointer (value, priv->ev);
g_value_set_pointer(value, priv->emotion_object);
eina_lock_release(&priv->m);
break;
default:
@ -149,14 +127,14 @@ evas_video_sink_get_property(GObject * object, guint prop_id,
}
static void
evas_video_sink_dispose(GObject* object)
emotion_video_sink_dispose(GObject* object)
{
EvasVideoSink* sink;
EvasVideoSinkPrivate* priv;
EmotionVideoSink* sink;
EmotionVideoSinkPrivate* priv;
INF("dispose.");
sink = EVAS_VIDEO_SINK(object);
sink = EMOTION_VIDEO_SINK(object);
priv = sink->priv;
eina_lock_free(&priv->m);
@ -168,14 +146,14 @@ evas_video_sink_dispose(GObject* object)
/**** BaseSink methods ****/
gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
gboolean emotion_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
{
EvasVideoSink* sink;
EvasVideoSinkPrivate* priv;
EmotionVideoSink* sink;
EmotionVideoSinkPrivate* priv;
GstVideoInfo info;
unsigned int i;
sink = EVAS_VIDEO_SINK(bsink);
sink = EMOTION_VIDEO_SINK(bsink);
priv = sink->priv;
if (!gst_video_info_from_caps(&info, caps))
@ -197,8 +175,6 @@ gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
{
priv->eheight = (priv->eheight >> 1) << 1;
}
if (priv->ev)
priv->ev->kill_buffer = EINA_TRUE;
return TRUE;
}
@ -207,56 +183,72 @@ gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
}
static gboolean
evas_video_sink_start(GstBaseSink* base_sink)
emotion_video_sink_start(GstBaseSink* base_sink)
{
EvasVideoSinkPrivate* priv;
EmotionVideoSinkPrivate* priv;
gboolean res = TRUE;
INF("sink start");
priv = EVAS_VIDEO_SINK(base_sink)->priv;
priv = EMOTION_VIDEO_SINK(base_sink)->priv;
eina_lock_take(&priv->m);
if (!priv->o)
if (!priv->emotion_object)
res = FALSE;
else
priv->unlocked = EINA_FALSE;
eina_lock_release(&priv->m);
priv->frames = priv->rlapse = priv->flapse = 0;
return res;
}
static gboolean
evas_video_sink_stop(GstBaseSink* base_sink)
emotion_video_sink_stop(GstBaseSink* base_sink)
{
EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
EmotionVideoSinkPrivate* priv = EMOTION_VIDEO_SINK(base_sink)->priv;
INF("sink stop");
gst_buffer_replace(&priv->last_buffer, NULL);
eina_lock_take(&priv->m);
/* If there still is a pending frame, neutralize it */
if (priv->send)
{
gst_buffer_replace(&priv->send->frame, NULL);
priv->send = NULL;
}
unlock_buffer_mutex(priv);
eina_lock_release(&priv->m);
return TRUE;
}
static gboolean
evas_video_sink_unlock(GstBaseSink* object)
emotion_video_sink_unlock(GstBaseSink* object)
{
EvasVideoSink* sink;
EmotionVideoSink* sink;
INF("sink unlock");
sink = EVAS_VIDEO_SINK(object);
sink = EMOTION_VIDEO_SINK(object);
eina_lock_take(&sink->priv->m);
unlock_buffer_mutex(sink->priv);
eina_lock_release(&sink->priv->m);
return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
(object), TRUE);
}
static gboolean
evas_video_sink_unlock_stop(GstBaseSink* object)
emotion_video_sink_unlock_stop(GstBaseSink* object)
{
EvasVideoSink* sink;
EvasVideoSinkPrivate* priv;
EmotionVideoSink* sink;
EmotionVideoSinkPrivate* priv;
sink = EVAS_VIDEO_SINK(object);
sink = EMOTION_VIDEO_SINK(object);
priv = sink->priv;
INF("sink unlock stop");
@ -270,62 +262,40 @@ evas_video_sink_unlock_stop(GstBaseSink* object)
}
static GstFlowReturn
evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
emotion_video_sink_show_frame(GstVideoSink* vsink, GstBuffer* buffer)
{
Emotion_Gstreamer_Buffer *send;
EvasVideoSinkPrivate *priv;
EvasVideoSink *sink;
INF("sink preroll %p [%" G_GSIZE_FORMAT "]", buffer, gst_buffer_get_size(buffer));
sink = EVAS_VIDEO_SINK(bsink);
priv = sink->priv;
if (gst_buffer_get_size(buffer) <= 0)
{
WRN("empty buffer");
return GST_FLOW_OK;
}
send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
if (send)
{
_emotion_pending_ecore_begin();
ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
}
return GST_FLOW_OK;
}
static GstFlowReturn
evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
{
Emotion_Gstreamer_Buffer *send;
EvasVideoSinkPrivate *priv;
EvasVideoSink *sink;
EmotionVideoSinkPrivate *priv;
EmotionVideoSink *sink;
INF("sink render %p", buffer);
sink = EVAS_VIDEO_SINK(bsink);
sink = EMOTION_VIDEO_SINK(vsink);
priv = sink->priv;
eina_lock_take(&priv->m);
if (priv->unlocked) {
ERR("LOCKED");
eina_lock_release(&priv->m);
return GST_FLOW_FLUSHING;
}
send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
send = emotion_gstreamer_buffer_alloc(sink, buffer, &priv->info, priv->eformat, priv->eheight, priv->func);
/* If there still is a pending frame, neutralize it */
if (priv->send)
{
gst_buffer_replace(&priv->send->frame, NULL);
}
priv->send = send;
if (!send) {
eina_lock_release(&priv->m);
return GST_FLOW_ERROR;
}
_emotion_pending_ecore_begin();
ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
ecore_main_loop_thread_safe_call_async(emotion_video_sink_main_render, send);
eina_condition_wait(&priv->c);
eina_lock_release(&priv->m);
@ -334,141 +304,121 @@ evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
}
static void
_update_emotion_fps(Emotion_Gstreamer_Video *ev)
_update_emotion_fps(EmotionVideoSinkPrivate *priv)
{
double tim;
if (!debug_fps) return;
tim = ecore_time_get();
ev->frames++;
priv->frames++;
if (ev->rlapse == 0.0)
if (priv->rlapse == 0.0)
{
ev->rlapse = tim;
ev->flapse = ev->frames;
priv->rlapse = tim;
priv->flapse = priv->frames;
}
else if ((tim - ev->rlapse) >= 0.5)
else if ((tim - priv->rlapse) >= 0.5)
{
printf("FRAME: %i, FPS: %3.1f\n",
ev->frames,
(ev->frames - ev->flapse) / (tim - ev->rlapse));
ev->rlapse = tim;
ev->flapse = ev->frames;
priv->frames,
(priv->frames - priv->flapse) / (tim - priv->rlapse));
priv->rlapse = tim;
priv->flapse = priv->frames;
}
}
static void
evas_video_sink_main_render(void *data)
emotion_video_sink_main_render(void *data)
{
Emotion_Gstreamer_Buffer *send;
Emotion_Gstreamer_Video *ev = NULL;
EvasVideoSinkPrivate *priv = NULL;
GstBuffer *buffer;
EmotionVideoSinkPrivate *priv;
GstBuffer *buffer = NULL;
GstMapInfo map;
unsigned char *evas_data;
Eina_Bool preroll = EINA_FALSE;
double ratio;
send = data;
if (!send) goto exit_point;
priv = send->sink->priv;
priv = send->sink;
buffer = send->frame;
preroll = send->preroll;
ev = send->ev;
/* frame after cleanup */
if (!preroll && !ev->last_buffer)
{
priv = NULL;
goto exit_point;
}
if (!priv || !priv->o || priv->unlocked)
eina_lock_take(&priv->m);
/* Sink was shut down already or this is a stale
* frame */
if (priv->send != send)
goto exit_point;
if (!send->frame)
goto exit_point;
if (ev->send && send != ev->send)
priv->send = NULL;
if (!priv->emotion_object || priv->unlocked)
goto exit_point;
/* Can happen if cleanup_priv was called */
if (!priv->evas_object)
{
emotion_gstreamer_buffer_free(ev->send);
ev->send = NULL;
priv->evas_object = emotion_object_image_get(priv->emotion_object);
if (priv->evas_object)
{
evas_object_event_callback_add(priv->evas_object, EVAS_CALLBACK_DEL, _cleanup_priv, priv);
evas_object_image_pixels_get_callback_set(priv->evas_object, NULL, NULL);
}
}
if (!ev->stream && !send->force)
{
ev->send = send;
_emotion_frame_new(ev->obj);
evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight);
goto exit_stream;
}
if (!priv->evas_object)
goto exit_point;
buffer = gst_buffer_ref(send->frame);
if (!gst_buffer_map(buffer, &map, GST_MAP_READ))
goto exit_stream;
goto exit_point;
INF("sink main render [%i, %i] (source height: %i)", priv->info.width, priv->eheight, priv->info.height);
INF("sink main render [%i, %i] (source height: %i)", send->info.width, send->eheight, send->info.height);
evas_object_image_alpha_set(priv->o, 0);
evas_object_image_colorspace_set(priv->o, priv->eformat);
evas_object_image_size_set(priv->o, priv->info.width, priv->eheight);
evas_object_image_alpha_set(priv->evas_object, 0);
evas_object_image_colorspace_set(priv->evas_object, send->eformat);
evas_object_image_size_set(priv->evas_object, send->info.width, send->eheight);
evas_data = evas_object_image_data_get(priv->o, 1);
evas_data = evas_object_image_data_get(priv->evas_object, 1);
if (priv->func)
priv->func(evas_data, map.data, priv->info.width, priv->info.height, priv->eheight);
if (send->func)
send->func(evas_data, map.data, send->info.width, send->info.height, send->eheight);
else
WRN("No way to decode %x colorspace !", priv->eformat);
WRN("No way to decode %x colorspace !", send->eformat);
gst_buffer_unmap(buffer, &map);
evas_object_image_data_set(priv->o, evas_data);
evas_object_image_data_update_add(priv->o, 0, 0, priv->info.width, priv->eheight);
evas_object_image_pixels_dirty_set(priv->o, 0);
evas_object_image_data_set(priv->evas_object, evas_data);
evas_object_image_data_update_add(priv->evas_object, 0, 0, send->info.width, send->eheight);
evas_object_image_pixels_dirty_set(priv->evas_object, 0);
_update_emotion_fps(ev);
_update_emotion_fps(priv);
ratio = (double) priv->info.width / (double) priv->eheight;
ratio *= (double) priv->info.par_n / (double) priv->info.par_d;
ratio = (double) send->info.width / (double) send->eheight;
ratio *= (double) send->info.par_n / (double) send->info.par_d;
_emotion_frame_resize(ev->obj, priv->info.width, priv->eheight, ratio);
_emotion_frame_resize(priv->emotion_object, send->info.width, send->eheight, ratio);
buffer = gst_buffer_ref(buffer);
if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
ev->last_buffer = buffer;
gst_buffer_replace(&priv->last_buffer, buffer);
_emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
if (!preroll && ev->play_started)
{
_emotion_playback_started(ev->obj);
ev->play_started = 0;
}
if (!send->force)
{
_emotion_frame_new(ev->obj);
}
_emotion_frame_new(priv->emotion_object);
exit_point:
if (!priv->unlocked)
eina_condition_signal(&priv->c);
eina_lock_release(&priv->m);
if (buffer) gst_buffer_unref(buffer);
if (send) emotion_gstreamer_buffer_free(send);
exit_stream:
if (priv)
{
if (preroll || !priv->o)
{
_emotion_pending_ecore_end();
return;
}
if (!priv->unlocked)
eina_condition_signal(&priv->c);
}
_emotion_pending_ecore_end();
}
/* Must be called with priv->m taken */
static void
unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
unlock_buffer_mutex(EmotionVideoSinkPrivate* priv)
{
priv->unlocked = EINA_TRUE;
@ -476,54 +426,41 @@ unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
}
static void
evas_video_sink_class_init(EvasVideoSinkClass* klass)
emotion_video_sink_class_init(EmotionVideoSinkClass* klass)
{
GObjectClass* gobject_class;
GstElementClass* gstelement_class;
GstBaseSinkClass* gstbase_sink_class;
GstVideoSinkClass* gstvideo_sink_class;
gobject_class = G_OBJECT_CLASS(klass);
gstelement_class = GST_ELEMENT_CLASS(klass);
gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
gstvideo_sink_class = GST_VIDEO_SINK_CLASS(klass);
g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
g_type_class_add_private(klass, sizeof(EmotionVideoSinkPrivate));
gobject_class->set_property = evas_video_sink_set_property;
gobject_class->get_property = evas_video_sink_get_property;
gobject_class->set_property = emotion_video_sink_set_property;
gobject_class->get_property = emotion_video_sink_get_property;
g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
g_param_spec_pointer ("evas-object", "Evas Object",
"The Evas object where the display of the video will be done",
g_object_class_install_property (gobject_class, PROP_EMOTION_OBJECT,
g_param_spec_pointer ("emotion-object", "Emotion Object",
"The Emotion object where the display of the video will be done",
G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, PROP_WIDTH,
g_param_spec_int ("width", "Width",
"The width of the video",
0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_HEIGHT,
g_param_spec_int ("height", "Height",
"The height of the video",
0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_EV,
g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
"The internal data of the emotion object",
G_PARAM_READWRITE));
gobject_class->dispose = evas_video_sink_dispose;
gobject_class->dispose = emotion_video_sink_dispose;
gst_element_class_add_pad_template(gstelement_class, gst_static_pad_template_get(&sinktemplate));
gst_element_class_set_static_metadata(gstelement_class, "Evas video sink",
"Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
gst_element_class_set_static_metadata(gstelement_class, "Emotion video sink",
"Sink/Video", "Sends video data from a GStreamer pipeline to an Emotion object",
"Vincent Torri <vtorri@univ-evry.fr>");
gstbase_sink_class->set_caps = evas_video_sink_set_caps;
gstbase_sink_class->stop = evas_video_sink_stop;
gstbase_sink_class->start = evas_video_sink_start;
gstbase_sink_class->unlock = evas_video_sink_unlock;
gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
gstbase_sink_class->render = evas_video_sink_render;
gstbase_sink_class->preroll = evas_video_sink_preroll;
gstbase_sink_class->set_caps = emotion_video_sink_set_caps;
gstbase_sink_class->stop = emotion_video_sink_stop;
gstbase_sink_class->start = emotion_video_sink_start;
gstbase_sink_class->unlock = emotion_video_sink_unlock;
gstbase_sink_class->unlock_stop = emotion_video_sink_unlock_stop;
gstvideo_sink_class->show_frame = emotion_video_sink_show_frame;
}
gboolean
@ -532,226 +469,6 @@ gstreamer_plugin_init (GstPlugin * plugin)
return gst_element_register (plugin,
"emotion-sink",
GST_RANK_NONE,
EVAS_TYPE_VIDEO_SINK);
EMOTION_TYPE_VIDEO_SINK);
}
static void
_emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
{
Emotion_Gstreamer_Video *ev = data;
gboolean res;
if (ecore_thread_check(thread) || !ev->pipeline) return;
gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
if (res == GST_STATE_CHANGE_NO_PREROLL)
{
gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
}
}
static void
_emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
{
Emotion_Gstreamer_Video *ev = data;
ev->threads = eina_list_remove(ev->threads, thread);
if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
if (ev->in == ev->out && ev->delete_me)
ev->api->del(ev);
}
static void
_emotion_gstreamer_end(void *data, Ecore_Thread *thread)
{
Emotion_Gstreamer_Video *ev = data;
ev->threads = eina_list_remove(ev->threads, thread);
if (ev->play)
{
gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
ev->play_started = 1;
}
if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
if (ev->in == ev->out && ev->delete_me)
ev->api->del(ev);
else
_emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
}
static void
_main_frame_resize(void *data)
{
Emotion_Gstreamer_Video *ev;
gint cur;
GstPad *pad;
GstCaps *caps;
GstVideoInfo info;
double ratio;
ev = (Emotion_Gstreamer_Video *)data;
g_object_get(ev->pipeline, "current-video", &cur, NULL);
g_signal_emit_by_name (ev->pipeline, "get-video-pad", cur, &pad);
if (!pad)
goto on_error;
caps = gst_pad_get_current_caps(pad);
gst_object_unref(pad);
if (!caps)
goto on_error;
gst_video_info_from_caps (&info, caps);
gst_caps_unref(caps);
ratio = (double)info.width / (double)info.height;
ratio *= (double)info.par_n / (double)info.par_d;
_emotion_frame_resize(ev->obj, info.width, info.height, ratio);
on_error:
_emotion_pending_ecore_end();
}
static void
_video_changed(GstElement *playbin EINA_UNUSED, gpointer data)
{
Emotion_Gstreamer_Video *ev = data;
_emotion_pending_ecore_begin();
ecore_main_loop_thread_safe_call_async(_main_frame_resize, ev);
}
GstElement *
gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
Evas_Object *o,
const char *uri)
{
GstElement *playbin;
GstElement *bin = NULL;
GstElement *esink = NULL;
GstElement *queue = NULL;
Evas_Object *obj;
GstPad *pad;
int flags;
const char *launch;
obj = emotion_object_image_get(o);
if (!obj)
{
// ERR("Not Evas_Object specified");
return NULL;
}
if (!uri)
return NULL;
launch = emotion_webcam_custom_get(uri);
if (launch)
{
GError *error = NULL;
playbin = gst_parse_bin_from_description(launch, 1, &error);
if (!playbin)
{
ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
g_error_free(error);
return NULL;
}
if (error)
{
WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
g_error_free(error);
}
}
else
{
playbin = gst_element_factory_make("playbin", "playbin");
if (!playbin)
{
ERR("Unable to create 'playbin' GstElement.");
return NULL;
}
g_signal_connect(playbin, "video-changed", G_CALLBACK(_video_changed), ev);
}
bin = gst_bin_new(NULL);
if (!bin)
{
ERR("Unable to create GstBin !");
goto unref_pipeline;
}
esink = gst_element_factory_make("emotion-sink", "sink");
if (!esink)
{
ERR("Unable to create 'emotion-sink' GstElement.");
goto unref_pipeline;
}
g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
g_object_set(G_OBJECT(esink), "ev", ev, NULL);
evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
/* We need queue to force each video sink to be in its own thread */
queue = gst_element_factory_make("queue", "equeue");
if (!queue)
{
ERR("Unable to create 'queue' GstElement.");
goto unref_pipeline;
}
gst_bin_add_many(GST_BIN(bin), queue, esink, NULL);
gst_element_link_many(queue, esink, NULL);
/* link both sink to GstTee */
pad = gst_element_get_static_pad(queue, "sink");
gst_element_add_pad(bin, gst_ghost_pad_new("sink", pad));
gst_object_unref(pad);
if (launch)
{
g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
}
else
{
g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
}
evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
ev->stream = EINA_TRUE;
eina_stringshare_replace(&ev->uri, uri);
ev->pipeline = playbin;
ev->sink = bin;
ev->esink = esink;
ev->threads = eina_list_append(ev->threads,
ecore_thread_run(_emotion_gstreamer_pause,
_emotion_gstreamer_end,
_emotion_gstreamer_cancel,
ev));
/** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
/** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
return playbin;
unref_pipeline:
gst_object_unref(esink);
gst_object_unref(bin);
gst_object_unref(playbin);
return NULL;
}