emotion & evas: remove gstreamer 0.10 support

We have Gstreamer 1.x support for a long time already. We used to keep
this around as fallback. By now Linux distributions start to actually no
longer ship the Gstreamer 0.10.x packages and upstream has not seen a
release in in 5 years. Time to remove it on our side as well.

Signed-off-by: Stefan Schmidt <s.schmidt@samsung.com>
Reviewed-by: Cedric BAIL <cedric.bail@free.fr>
Differential Revision: https://phab.enlightenment.org/D10779
This commit is contained in:
Stefan Schmidt 2019-10-30 17:09:20 +01:00 committed by Cedric BAIL
parent f3d9b8ee70
commit b8dc80c144
14 changed files with 7 additions and 4560 deletions

View File

@ -19,7 +19,7 @@ if [ "$DISTRO" != "" ] ; then
# - RPM fusion repo for xine and libvlc
ENABLED_LINUX_COPTS=" -Dfb=true -Dsdl=true -Dbuffer=true -Dbuild-id=travis-build \
-Ddebug-threads=true -Dglib=true -Dg-mainloop=true -Dxpresent=true -Dxgesture=false -Dxinput22=true \
-Devas-loaders-disabler=json -Decore-imf-loaders-disabler= -Demotion-loaders-disabler=gstreamer,libvlc,xine \
-Devas-loaders-disabler=json -Decore-imf-loaders-disabler= -Demotion-loaders-disabler=libvlc,xine \
-Demotion-generic-loaders-disabler=vlc -Dharfbuzz=true -Dpixman=true -Dhyphen=true \
-Dvnc-server=true -Dbindings=luajit -Delogind=false -Dinstall-eo-files=true -Dphysics=true"
@ -29,7 +29,7 @@ if [ "$DISTRO" != "" ] ; then
-Dcrypto=gnutls -Dglib=false -Dgstreamer=false -Dsystemd=false -Dpulseaudio=false \
-Dnetwork-backend=connman -Dxinput2=false -Dtslib=false \
-Devas-loaders-disabler=gst,pdf,ps,raw,svg,xcf,bmp,dds,eet,generic,gif,ico,jp2k,json,pmaps,psd,tga,tgv,tiff,wbmp,webp,xpm \
-Decore-imf-loaders-disabler=xim,ibus,scim -Demotion-loaders-disabler=gstreamer,gstreamer1,libvlc,xine \
-Decore-imf-loaders-disabler=xim,ibus,scim -Demotion-loaders-disabler=gstreamer1,libvlc,xine \
-Demotion-generic-loaders-disabler=vlc -Dfribidi=false -Dfontconfig=false \
-Dedje-sound-and-video=false -Dembedded-lz4=false -Dlibmount=false -Dv4l2=false \
-Delua=true -Dnls=false -Dbindings= -Dlua-interpreter=luajit -Dnative-arch-optimization=false"
@ -109,6 +109,6 @@ else
export PKG_CONFIG_PATH="/usr/local/opt/openssl/lib/pkgconfig:/usr/local/Cellar/libffi/$LIBFFI_VER/lib/pkgconfig"
export CC="ccache gcc"
travis_fold meson meson
mkdir build && meson build -Dopengl=full -Decore-imf-loaders-disabler=scim,ibus -Dx11=false -Davahi=false -Dbindings=luajit -Deeze=false -Dsystemd=false -Dnls=false -Dcocoa=true -Demotion-loaders-disabler=gstreamer,gstreamer1,libvlc,xine
mkdir build && meson build -Dopengl=full -Decore-imf-loaders-disabler=scim,ibus -Dx11=false -Davahi=false -Dbindings=luajit -Deeze=false -Dsystemd=false -Dnls=false -Dcocoa=true -Demotion-loaders-disabler=gstreamer1,libvlc,xine
travis_endfold meson
fi

2
README
View File

@ -426,7 +426,7 @@ Required by default:
* bullet
* libpng
* libjpeg
* gstreamer (1.x, 0.10 support optional. Ensure all codecs you want are installed.)
* gstreamer (Ensure all codecs you want are installed.)
* zlib
* luajit (lua 5.1 or 5.2 support optional)
* libtiff

View File

@ -209,8 +209,8 @@ option('ecore-imf-loaders-disabler',
option('emotion-loaders-disabler',
type : 'array',
description : 'List of video back-ends to disable in efl',
choices : ['gstreamer', 'gstreamer1', 'libvlc', 'xine'],
value : ['gstreamer', 'libvlc', 'xine']
choices : ['gstreamer1', 'libvlc', 'xine'],
value : ['libvlc', 'xine']
)
option('emotion-generic-loaders-disabler',

View File

@ -1,281 +0,0 @@
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <fcntl.h>
#include <unistd.h>
#include <gst/gst.h>
#include <Eina.h>
#include "shmfile.h"
#include "timeout.h"
#define DATA32 unsigned int
//#define GST_DBG
#ifdef GST_DBG
#define D(fmt, args...) fprintf(stderr, fmt, ## args)
#else
#define D(fmt, args...)
#endif
#define CAPS "video/x-raw-rgb,bpp=(int)32,depth=(int)32,endianness=(int)4321,red_mask=(int)0x0000ff00, green_mask=(int)0x00ff0000, blue_mask=(int)0xff000000"
static GstElement *pipeline = NULL;
static GstElement *sink = NULL;
static gint64 duration = -1;
int width = 0;
int height = 0;
void *data = NULL;
static Eina_Bool
_gst_init(const char *filename)
{
GstPad *pad;
GstCaps *caps;
GstStructure *structure;
gchar *descr;
gchar *uri;
GError *error = NULL;
GstFormat format;
GstStateChangeReturn ret;
// int vidstr = 0;
if (!filename || !*filename)
return EINA_FALSE;
if (!gst_init_check(NULL, NULL, &error))
return EINA_FALSE;
if ((*filename == '/') || (*filename == '~'))
{
uri = g_filename_to_uri(filename, NULL, NULL);
if (!uri)
{
D("could not create new uri from %s", filename);
goto unref_pipeline;
}
}
else
uri = strdup(filename);
D("Setting file %s\n", uri);
descr = g_strdup_printf("uridecodebin uri=%s ! typefind ! ffmpegcolorspace ! "
" appsink name=sink caps=\"" CAPS "\"", uri);
pipeline = gst_parse_launch(descr, &error);
free(uri);
if (error != NULL)
{
D("could not construct pipeline: %s\n", error->message);
g_error_free (error);
goto gst_shutdown;
}
/* needs gst 1.0+
* also only works on playbin objects!!! this is a uridecodebin!
g_object_get(G_OBJECT(pipeline),
"n-video", &vidstr,
NULL);
if (vidstr <= 0)
{
D("no video stream\n");
goto gst_shutdown;
}
*/
sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
ret = gst_element_set_state (pipeline, GST_STATE_PAUSED);
switch (ret)
{
case GST_STATE_CHANGE_FAILURE:
D("failed to play the file\n");
goto unref_pipeline;
case GST_STATE_CHANGE_NO_PREROLL:
D("live sources not supported yet\n");
goto unref_pipeline;
default:
break;
}
ret = gst_element_get_state((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
if (ret == GST_STATE_CHANGE_FAILURE)
{
D("could not complete pause\n");
goto unref_pipeline;
}
format = GST_FORMAT_TIME;
gst_element_query_duration (pipeline, &format, &duration);
if (duration == -1)
{
D("could not retrieve the duration, set it to 1s\n");
duration = 1 * GST_SECOND;
}
pad = gst_element_get_static_pad(sink, "sink");
if (!pad)
{
D("could not retrieve the sink pad\n");
goto unref_pipeline;
}
caps = gst_pad_get_negotiated_caps(pad);
if (!caps)
goto unref_pad;
structure = gst_caps_get_structure(caps, 0);
if (!gst_structure_get_int(structure, "width", &width))
goto unref_caps;
if (!gst_structure_get_int(structure, "height", &height))
goto unref_caps;
gst_caps_unref(caps);
gst_object_unref(pad);
return EINA_TRUE;
unref_caps:
gst_caps_unref(caps);
unref_pad:
gst_object_unref(pad);
unref_pipeline:
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
gst_shutdown:
gst_deinit();
return EINA_FALSE;
}
static void
_gst_shutdown()
{
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
gst_deinit();
}
static void
_gst_load_image(int size_w EINA_UNUSED, int size_h EINA_UNUSED, double pos)
{
GstBuffer *buffer;
D("load image\n");
if (pos >= 0.0)
gst_element_seek_simple(pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
pos * 1000000000.0);
else
gst_element_seek_simple(pipeline, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
duration / 2);
g_signal_emit_by_name(sink, "pull-preroll", &buffer, NULL);
D("load image : %p %d\n", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
shm_alloc(width * height * sizeof(DATA32));
if (!shm_addr) return;
data = shm_addr;
memcpy(data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
}
int
main(int argc, char **argv)
{
char *file, *p;
int i, numonly;
int size_w = 0, size_h = 0;
int head_only = 0;
long long pos = -1.0;
if (argc < 2) return -1;
// file is ALWAYS first arg, other options come after
file = argv[1];
for (i = 2; i < argc; i++)
{
if (!strcmp(argv[i], "-head"))
// asked to only load header, not body/data
head_only = 1;
else if (!strcmp(argv[i], "-key"))
{
i++;
numonly = 1;
for (p = argv[i]; *p; p++)
{
if ((!*p < '0') || (*p > 9))
{
numonly = 0;
break;
}
}
if (numonly) pos = (double)(atoll(argv[i])) / 1000.0;
i++;
}
else if (!strcmp(argv[i], "-opt-scale-down-by"))
{ // not used by ps loader
i++;
// int scale_down = atoi(argv[i]);
}
else if (!strcmp(argv[i], "-opt-dpi"))
{
i++;
}
else if (!strcmp(argv[i], "-opt-size"))
{ // not used by ps loader
i++;
size_w = atoi(argv[i]);
i++;
size_h = atoi(argv[i]);
}
}
timeout_init(10);
D("_gst_init_file\n");
if (!_gst_init(file))
return -1;
D("_gst_init done\n");
if (!head_only)
{
_gst_load_image(size_w, size_h, pos);
}
D("size...: %ix%i\n", width, height);
D("alpha..: 0\n");
printf("size %i %i\n", width, height);
printf("alpha 0\n");
if (!head_only)
{
if (shm_fd >= 0)
{
printf("shmfile %s\n", shmfile);
}
else
{
// could also to "tmpfile %s\n" like shmfile but just
// a mmaped tmp file on the system
printf("data\n");
if (fwrite(data, width * height * sizeof(DATA32), 1, stdout) != 1)
{
shm_free();
return -1;
}
}
shm_free();
}
else
printf("done\n");
_gst_shutdown();
fflush(stdout);
return 0;
}

View File

@ -1,11 +1,3 @@
# gstreamer 0.1 support
#generic_deps = [dependency('gstreamer')]
#generic_src = files([
# 'main_0_10.c'
#])
generic_src = files([
'main.c'
])
@ -14,4 +6,4 @@ generic_deps = []
if get_option('gstreamer') == true
generic_deps += dependency('gstreamer-1.0')
endif
generic_support = ['264','3g2','3gp','3gp2','3gpp','3gpp2','3p2','asf','avi','bdm','bdmv','clpi','cpi','dv','fla','flv','m1v','m2t','m2v','m4v','mkv','mov','mp2','mp2ts','mp4','mpe','mpeg','mpg','mpl','mpls','mts','mxf','nut','nuv','ogg','ogm','ogv','qt','rm','rmj','rmm','rms','rmx','rmvb','rv','swf','ts','webm','weba','wmv']
generic_support = ['264','3g2','3gp','3gp2','3gpp','3gpp2','3p2','asf','avi','bdm','bdmv','clpi','cpi','dv','fla','flv','m1v','m2t','m2v','m4v','mkv','mov','mp2','mp2ts','mp4','mpe','mpeg','mpg','mpl','mpls','mts','mxf','nut','nuv','ogg','ogm','ogv','qt','rm','rmj','rmm','rms','rmx','rmvb','rv','swf','ts','webm','weba','wmv']

View File

@ -81,9 +81,6 @@ _emotion_modules_load(void)
if (stat(buf, &st) == 0)
{
const char *built_modules[] = {
#ifdef EMOTION_BUILD_GSTREAMER
"gstreamer",
#endif
#ifdef EMOTION_BUILD_GSTREAMER1
"gstreamer1",
#endif

View File

@ -1,90 +0,0 @@
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <Eina.h>
#include <Evas.h>
#include <Ecore.h>
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideosink.h>
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
# ifdef HAVE_XOVERLAY_H
# include <gst/interfaces/xoverlay.h>
# endif
#endif
#include "Emotion.h"
#include "emotion_gstreamer.h"
Emotion_Gstreamer_Buffer *
emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
GstBuffer *buffer,
Eina_Bool preroll)
{
Emotion_Gstreamer_Buffer *send;
if (!sink->ev) return NULL;
send = malloc(sizeof (Emotion_Gstreamer_Buffer));
if (!send) return NULL;
send->sink = sink;
send->frame = gst_buffer_ref(buffer);
send->preroll = preroll;
send->force = EINA_FALSE;
sink->ev->out++;
send->ev = sink->ev;
return send;
}
void
emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send)
{
send->ev->in++;
if (send->ev->in == send->ev->out
&& send->ev->threads == NULL
&& send->ev->delete_me)
send->ev->api->del(send->ev);
gst_buffer_unref(send->frame);
free(send);
}
Emotion_Gstreamer_Message *
emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
GstMessage *msg)
{
Emotion_Gstreamer_Message *send;
if (!ev) return NULL;
send = malloc(sizeof (Emotion_Gstreamer_Message));
if (!send) return NULL;
ev->out++;
send->ev = ev;
send->msg = gst_message_ref(msg);
return send;
}
void
emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send)
{
send->ev->in++;
if (send->ev->in == send->ev->out
&& send->ev->threads == NULL
&& send->ev->delete_me)
send->ev->api->del(send->ev);
gst_message_unref(send->msg);
free(send);
}

View File

@ -1,251 +0,0 @@
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <Eina.h>
#include <Evas.h>
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideosink.h>
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
# ifdef HAVE_XOVERLAY_H
# include <gst/interfaces/xoverlay.h>
# endif
#endif
#include "Emotion.h"
#include "emotion_gstreamer.h"
static inline void
_evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data,
unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height, unsigned int step)
{
unsigned int x;
unsigned int y;
for (y = 0; y < output_height; ++y)
{
for (x = 0; x < w; x++)
{
evas_data[0] = gst_data[0];
evas_data[1] = gst_data[1];
evas_data[2] = gst_data[2];
evas_data[3] = 255;
gst_data += step;
evas_data += 4;
}
}
}
static void
_evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
{
_evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 3);
}
static void
_evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
{
_evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 4);
}
static void
_evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
{
unsigned int x;
unsigned int y;
for (y = 0; y < output_height; ++y)
{
unsigned char alpha;
for (x = 0; x < w; ++x)
{
alpha = gst_data[3];
evas_data[0] = (gst_data[0] * alpha) / 255;
evas_data[1] = (gst_data[1] * alpha) / 255;
evas_data[2] = (gst_data[2] * alpha) / 255;
evas_data[3] = alpha;
gst_data += 4;
evas_data += 4;
}
}
}
static void
_evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
{
const unsigned char **rows;
unsigned int i, j;
unsigned int rh;
unsigned int stride_y, stride_uv;
rh = output_height;
rows = (const unsigned char **)evas_data;
stride_y = GST_ROUND_UP_4(w);
stride_uv = GST_ROUND_UP_8(w) / 2;
for (i = 0; i < rh; i++)
rows[i] = &gst_data[i * stride_y];
for (j = 0; j < (rh / 2); j++, i++)
rows[i] = &gst_data[h * stride_y + j * stride_uv];
for (j = 0; j < (rh / 2); j++, i++)
rows[i] = &gst_data[h * stride_y +
(rh / 2) * stride_uv +
j * stride_uv];
}
static void
_evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
{
const unsigned char **rows;
unsigned int i, j;
unsigned int rh;
unsigned int stride_y, stride_uv;
rh = output_height;
rows = (const unsigned char **)evas_data;
stride_y = GST_ROUND_UP_4(w);
stride_uv = GST_ROUND_UP_8(w) / 2;
for (i = 0; i < rh; i++)
rows[i] = &gst_data[i * stride_y];
for (j = 0; j < (rh / 2); j++, i++)
rows[i] = &gst_data[h * stride_y +
(rh / 2) * stride_uv +
j * stride_uv];
for (j = 0; j < (rh / 2); j++, i++)
rows[i] = &gst_data[h * stride_y + j * stride_uv];
}
static void
_evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
{
const unsigned char **rows;
unsigned int i;
unsigned int stride;
rows = (const unsigned char **)evas_data;
stride = GST_ROUND_UP_4(w * 2);
for (i = 0; i < output_height; i++)
rows[i] = &gst_data[i * stride];
}
static void
_evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h EINA_UNUSED, unsigned int output_height)
{
const unsigned char **rows;
unsigned int i, j;
unsigned int rh;
rh = output_height;
rows = (const unsigned char **)evas_data;
for (i = 0; i < rh; i++)
rows[i] = &gst_data[i * w];
for (j = 0; j < (rh / 2); j++, i++)
rows[i] = &gst_data[rh * w + j * w];
}
static void
_evas_video_mt12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED)
{
const unsigned char **rows;
unsigned int i;
unsigned int j;
rows = (const unsigned char **)evas_data;
for (i = 0; i < (h / 32) / 2; i++)
rows[i] = &gst_data[i * w * 2 * 32];
if ((h / 32) % 2)
{
rows[i] = &gst_data[i * w * 2 * 32];
i++;
}
for (j = 0; j < ((h / 2) / 32) / 2; ++j, ++i)
rows[i] = &gst_data[h * w + j * (w / 2) * 2 * 16];
}
void
_evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED)
{
const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) gst_data;
const unsigned char **rows;
unsigned int i;
unsigned int j;
rows = (const unsigned char **)evas_data;
for (i = 0; i < (h / 32) / 2; i++)
rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
if ((h / 32) % 2)
{
rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
i++;
}
for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
{
rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
}
if (((h / 2) / 16) % 2)
rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
}
void
_evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height EINA_UNUSED)
{
const SCMN_IMGB *imgb = (const SCMN_IMGB *) gst_data;
const unsigned char **rows;
unsigned int i, j;
rows = (const unsigned char **)evas_data;
for (i = 0; i < (h / 32) / 2; i++)
rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
if ((h / 32) % 2)
{
rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
i++;
}
for (j = 0; j < (unsigned int) imgb->elevation[1] / 32 / 2; j++, i++)
rows[i] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
if ((imgb->elevation[1] / 32) % 2)
rows[i++] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
}
const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[] = {
{ "I420", GST_MAKE_FOURCC('I', '4', '2', '0'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
{ "YV12", GST_MAKE_FOURCC('Y', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
{ "YUY2", GST_MAKE_FOURCC('Y', 'U', 'Y', '2'), EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
{ "NV12", GST_MAKE_FOURCC('N', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
{ "TM12", GST_MAKE_FOURCC('T', 'M', '1', '2'), EVAS_COLORSPACE_YCBCR420TM12601_PL, _evas_video_mt12, EINA_TRUE },
{ NULL, 0, 0, NULL, 0 }
};
const ColorSpace_Format_Convertion colorspace_format_convertion[] = {
{ "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr },
{ "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx },
{ "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra },
{ NULL, 0, 0, NULL }
};

View File

@ -1,70 +0,0 @@
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <Eina.h>
#include <Evas.h>
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideosink.h>
#ifdef HAVE_ECORE_X
# include <Ecore_X.h>
# ifdef HAVE_XOVERLAY_H
# include <gst/interfaces/xoverlay.h>
# endif
#endif
#include "Emotion.h"
#include "emotion_gstreamer.h"
typedef struct _FakeEOSBin
{
GstBin parent;
} FakeEOSBin;
typedef struct _FakeEOSBinClass
{
GstBinClass parent;
} FakeEOSBinClass;
GST_BOILERPLATE(FakeEOSBin, fakeeos_bin, GstBin,
GST_TYPE_BIN);
static void
fakeeos_bin_handle_message(GstBin * bin, GstMessage * message)
{
/* FakeEOSBin *fakeeos = (FakeEOSBin *)(bin); */
switch (GST_MESSAGE_TYPE(message)) {
case GST_MESSAGE_EOS:
/* what to do here ? just returning at the moment */
return;
default:
break;
}
GST_BIN_CLASS(parent_class)->handle_message(bin, message);
}
static void
fakeeos_bin_base_init(gpointer g_class EINA_UNUSED)
{
}
static void
fakeeos_bin_class_init(FakeEOSBinClass * klass)
{
GstBinClass *gstbin_class = GST_BIN_CLASS(klass);
gstbin_class->handle_message =
GST_DEBUG_FUNCPTR (fakeeos_bin_handle_message);
}
static void
fakeeos_bin_init(FakeEOSBin *src EINA_UNUSED,
FakeEOSBinClass *klass EINA_UNUSED)
{
}

File diff suppressed because it is too large Load Diff

View File

@ -1,352 +0,0 @@
#ifndef __EMOTION_GSTREAMER_H__
#define __EMOTION_GSTREAMER_H__
#include "emotion_modules.h"
typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data,
const unsigned char *gst_data,
unsigned int w,
unsigned int h,
unsigned int output_height);
typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate;
typedef struct _EvasVideoSink EvasVideoSink;
typedef struct _EvasVideoSinkClass EvasVideoSinkClass;
typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream;
typedef struct _Emotion_Gstreamer_Metadata Emotion_Gstreamer_Metadata;
typedef struct _Emotion_Gstreamer_Buffer Emotion_Gstreamer_Buffer;
typedef struct _Emotion_Gstreamer_Message Emotion_Gstreamer_Message;
typedef struct _Emotion_Video_Stream Emotion_Video_Stream;
struct _Emotion_Video_Stream
{
gdouble length_time;
gint width;
gint height;
gint fps_num;
gint fps_den;
guint32 fourcc;
int index;
};
struct _Emotion_Audio_Stream
{
gdouble length_time;
gint channels;
gint samplerate;
};
struct _Emotion_Gstreamer_Metadata
{
char *title;
char *album;
char *artist;
char *genre;
char *comment;
char *year;
char *count;
char *disc_id;
};
struct _Emotion_Gstreamer_Video
{
const Emotion_Engine *api;
/* Gstreamer elements */
GstElement *pipeline;
GstElement *sink;
GstElement *esink;
GstElement *xvsink;
GstElement *tee;
GstElement *convert;
GstPad *eteepad;
GstPad *xvteepad;
GstPad *xvpad;
Eina_List *threads;
/* eos */
GstBus *eos_bus;
/* Strams */
Eina_List *video_streams;
Eina_List *audio_streams;
int video_stream_nbr;
int audio_stream_nbr;
/* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */
GstBuffer *last_buffer;
/* Evas object */
Evas_Object *obj;
/* Characteristics of stream */
double position;
double ratio;
double volume;
volatile int seek_to;
volatile int get_poslen;
Emotion_Gstreamer_Metadata *metadata;
#ifdef HAVE_ECORE_X
Ecore_X_Window win;
#endif
const char *uri;
Emotion_Gstreamer_Buffer *send;
EvasVideoSinkPrivate *sink_data;
Emotion_Vis vis;
int in;
int out;
int frames;
int flapse;
double rtime;
double rlapse;
struct
{
double width;
double height;
} fill;
Eina_Bool play : 1;
Eina_Bool play_started : 1;
Eina_Bool video_mute : 1;
Eina_Bool audio_mute : 1;
Eina_Bool pipeline_parsed : 1;
Eina_Bool delete_me : 1;
Eina_Bool samsung : 1;
Eina_Bool kill_buffer : 1;
Eina_Bool stream : 1;
Eina_Bool priority : 1;
int src_width;
int src_height;
};
struct _EvasVideoSink {
/*< private >*/
GstVideoSink parent;
EvasVideoSinkPrivate *priv;
};
struct _EvasVideoSinkClass {
/*< private >*/
GstVideoSinkClass parent_class;
};
struct _EvasVideoSinkPrivate {
EINA_REFCOUNT;
Evas_Object *o;
Emotion_Gstreamer_Video *ev;
Evas_Video_Convert_Cb func;
unsigned int width;
unsigned int height;
unsigned int source_height;
Evas_Colorspace eformat;
Eina_Lock m;
Eina_Condition c;
// If this is TRUE all processing should finish ASAP
// This is necessary because there could be a race between
// unlock() and render(), where unlock() wins, signals the
// GCond, then render() tries to render a frame although
// everything else isn't running anymore. This will lead
// to deadlocks because render() holds the stream lock.
//
// Protected by the buffer mutex
Eina_Bool unlocked : 1;
Eina_Bool samsung : 1; /** ST12 will only define a Samsung specific GstBuffer */
};
struct _Emotion_Gstreamer_Buffer
{
Emotion_Gstreamer_Video *ev;
EvasVideoSinkPrivate *sink;
GstBuffer *frame;
Eina_Bool preroll : 1;
Eina_Bool force : 1;
};
struct _Emotion_Gstreamer_Message
{
Emotion_Gstreamer_Video *ev;
GstMessage *msg;
};
extern Eina_Bool window_manager_video;
extern Eina_Bool debug_fps;
extern int _emotion_gstreamer_log_domain;
extern Eina_Bool _ecore_x_available;
#ifdef DBG
#undef DBG
#endif
#define DBG(...) EINA_LOG_DOM_DBG(_emotion_gstreamer_log_domain, __VA_ARGS__)
#ifdef INF
#undef INF
#endif
#define INF(...) EINA_LOG_DOM_INFO(_emotion_gstreamer_log_domain, __VA_ARGS__)
#ifdef WRN
#undef WRN
#endif
#define WRN(...) EINA_LOG_DOM_WARN(_emotion_gstreamer_log_domain, __VA_ARGS__)
#ifdef ERR
#undef ERR
#endif
#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__)
#ifdef CRI
#undef CRI
#endif
#define CRI(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)
#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type()
GType fakeeos_bin_get_type(void);
#define EVAS_VIDEO_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), \
EVAS_TYPE_VIDEO_SINK, EvasVideoSink))
#define EVAS_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), \
EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
#define EVAS_IS_VIDEO_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), \
EVAS_TYPE_VIDEO_SINK))
#define EVAS_IS_VIDEO_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), \
EVAS_TYPE_VIDEO_SINK))
#define EVAS_VIDEO_SINK_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), \
EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))
#define GST_TYPE_FAKEEOS_BIN fakeeos_bin_get_type()
GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
Evas_Object *obj,
const char *uri);
gboolean gstreamer_plugin_init(GstPlugin *plugin);
Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
GstBuffer *buffer,
Eina_Bool preroll);
void emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send);
Emotion_Gstreamer_Message *emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
GstMessage *msg);
void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send);
Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
Eina_Bool force);
typedef struct _ColorSpace_FourCC_Convertion ColorSpace_FourCC_Convertion;
typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion;
struct _ColorSpace_FourCC_Convertion
{
const char *name;
guint32 fourcc;
Evas_Colorspace eformat;
Evas_Video_Convert_Cb func;
Eina_Bool force_height;
};
struct _ColorSpace_Format_Convertion
{
const char *name;
GstVideoFormat format;
Evas_Colorspace eformat;
Evas_Video_Convert_Cb func;
};
extern const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[];
extern const ColorSpace_Format_Convertion colorspace_format_convertion[];
/** Samsung specific infrastructure - do not touch, do not modify */
#define MPLANE_IMGB_MAX_COUNT 4
#define SCMN_IMGB_MAX_PLANE 4
typedef struct _GstMultiPlaneImageBuffer GstMultiPlaneImageBuffer;
typedef struct _SCMN_IMGB SCMN_IMGB;
struct _GstMultiPlaneImageBuffer
{
GstBuffer buffer;
/* width of each image plane */
gint width[MPLANE_IMGB_MAX_COUNT];
/* height of each image plane */
gint height[MPLANE_IMGB_MAX_COUNT];
/* stride of each image plane */
gint stride[MPLANE_IMGB_MAX_COUNT];
/* elevation of each image plane */
gint elevation[MPLANE_IMGB_MAX_COUNT];
/* user space address of each image plane */
guchar *uaddr[MPLANE_IMGB_MAX_COUNT];
/* Index of real address of each image plane, if needs */
guchar *index[MPLANE_IMGB_MAX_COUNT];
/* left postion, if needs */
gint x;
/* top position, if needs */
gint y;
/* to align memory */
gint __dummy2;
/* arbitrary data */
gint data[16];
};
struct _SCMN_IMGB
{
/* width of each image plane */
int width[SCMN_IMGB_MAX_PLANE];
/* height of each image plane */
int height[SCMN_IMGB_MAX_PLANE];
/* stride of each image plane */
int stride[SCMN_IMGB_MAX_PLANE];
/* elevation of each image plane */
int elevation[SCMN_IMGB_MAX_PLANE];
/* user space address of each image plane */
guchar *uaddr[SCMN_IMGB_MAX_PLANE];
/* physical address of each image plane, if needs */
guchar *p[SCMN_IMGB_MAX_PLANE];
/* color space type of image */
int cs;
/* left postion, if needs */
int x;
/* top position, if needs */
int y;
/* to align memory */
int __dummy2;
/* arbitrary data */
int data[16];
};
void _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED);
void _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height EINA_UNUSED);
#endif /* __EMOTION_GSTREAMER_H__ */

File diff suppressed because it is too large Load Diff

View File

@ -1,18 +0,0 @@
generic_src = files([
'emotion_gstreamer.h',
'emotion_gstreamer.c',
'emotion_alloc.c',
'emotion_convert.c',
'emotion_sink.c'
])
generic_deps = [dependency('gstreamer-0.10')]
shared_module(emotion_loader,
generic_src,
include_directories : config_dir,
dependencies: [eina, ecore_x, evas, emotion, generic_deps],
install: true,
install_dir : mod_install_dir,
c_args : package_c_args,
)

View File

@ -1,5 +1,4 @@
emotion_loaders = [
'gstreamer',
'gstreamer1',
'libvlc',
'xine'