summaryrefslogtreecommitdiff
path: root/src/modules/emotion/gstreamer/emotion_gstreamer.h
blob: 4b15ae5777982a61b8038754d3d97d3648eeff88 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
#ifndef __EMOTION_GSTREAMER_H__
#define __EMOTION_GSTREAMER_H__

#include "emotion_modules.h"

typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data,
                                      const unsigned char *gst_data,
                                      unsigned int w,
                                      unsigned int h,
                                      unsigned int output_height);

typedef struct _EvasVideoSinkPrivate EvasVideoSinkPrivate;
typedef struct _EvasVideoSink        EvasVideoSink;
typedef struct _EvasVideoSinkClass   EvasVideoSinkClass;
typedef struct _Emotion_Gstreamer_Video Emotion_Gstreamer_Video;
typedef struct _Emotion_Audio_Stream Emotion_Audio_Stream;
typedef struct _Emotion_Gstreamer_Metadata Emotion_Gstreamer_Metadata;
typedef struct _Emotion_Gstreamer_Buffer Emotion_Gstreamer_Buffer;
typedef struct _Emotion_Gstreamer_Message Emotion_Gstreamer_Message;
typedef struct _Emotion_Video_Stream Emotion_Video_Stream;

struct _Emotion_Video_Stream
{
   gdouble     length_time;
   gint        width;
   gint        height;
   gint        fps_num;
   gint        fps_den;
   guint32     fourcc;
   int         index;
};

struct _Emotion_Audio_Stream
{
   gdouble     length_time;
   gint        channels;
   gint        samplerate;
};

struct _Emotion_Gstreamer_Metadata
{
   char *title;
   char *album;
   char *artist;
   char *genre;
   char *comment;
   char *year;
   char *count;
   char *disc_id;
};

struct _Emotion_Gstreamer_Video
{
   const Emotion_Engine *api;

   /* Gstreamer elements */
   GstElement       *pipeline;
   GstElement       *sink;
   GstElement       *esink;
   GstElement       *xvsink;
   GstElement       *tee;
   GstElement       *convert;

   GstPad           *eteepad;
   GstPad           *xvteepad;
   GstPad           *xvpad;
   Eina_List        *threads;

   /* eos */
   GstBus           *eos_bus;

   /* Strams */
   Eina_List        *video_streams;
   Eina_List        *audio_streams;

   int               video_stream_nbr;
   int               audio_stream_nbr;

    /* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */
   GstBuffer        *last_buffer;

   /* Evas object */
   Evas_Object      *obj;

   /* Characteristics of stream */
   double            position;
   double            ratio;
   double            volume;

   volatile int      seek_to;
   volatile int      get_poslen;

   Emotion_Gstreamer_Metadata *metadata;

#ifdef HAVE_ECORE_X
   Ecore_X_Window    win;
#endif

   const char       *uri;

   Emotion_Gstreamer_Buffer *send;

   EvasVideoSinkPrivate *sink_data;

   Emotion_Vis       vis;

   int               in;
   int               out;

   int frames;
   int flapse;
   double rtime;
   double rlapse;

   struct
   {
      double         width;
      double         height;
   } fill;

   Eina_Bool         play         : 1;
   Eina_Bool         play_started : 1;
   Eina_Bool         video_mute   : 1;
   Eina_Bool         audio_mute   : 1;
   Eina_Bool         pipeline_parsed : 1;
   Eina_Bool         delete_me    : 1;
   Eina_Bool         samsung      : 1;
   Eina_Bool         kill_buffer  : 1;
   Eina_Bool         stream       : 1;
   Eina_Bool         priority     : 1;

   int src_width;
   int src_height;
};

struct _EvasVideoSink {
    /*< private >*/
    GstVideoSink parent;
    EvasVideoSinkPrivate *priv;
};

struct _EvasVideoSinkClass {
    /*< private >*/
    GstVideoSinkClass parent_class;
};

struct _EvasVideoSinkPrivate {
   EINA_REFCOUNT;

   Evas_Object *o;

   Emotion_Gstreamer_Video *ev;

   Evas_Video_Convert_Cb func;

   unsigned int width;
   unsigned int height;
   unsigned int source_height;
   Evas_Colorspace eformat;

   Eina_Lock m;
   Eina_Condition c;

   // If this is TRUE all processing should finish ASAP
   // This is necessary because there could be a race between
   // unlock() and render(), where unlock() wins, signals the
   // GCond, then render() tries to render a frame although
   // everything else isn't running anymore. This will lead
   // to deadlocks because render() holds the stream lock.
   //
   // Protected by the buffer mutex
   Eina_Bool unlocked : 1;
   Eina_Bool samsung : 1; /** ST12 will only define a Samsung specific GstBuffer */
};

struct _Emotion_Gstreamer_Buffer
{
   Emotion_Gstreamer_Video *ev;
   EvasVideoSinkPrivate *sink;

   GstBuffer *frame;

   Eina_Bool preroll : 1;
   Eina_Bool force : 1;
};

struct _Emotion_Gstreamer_Message
{
   Emotion_Gstreamer_Video *ev;

   GstMessage *msg;
};

extern Eina_Bool window_manager_video;
extern Eina_Bool debug_fps;
extern int _emotion_gstreamer_log_domain;
extern Eina_Bool _ecore_x_available;

#ifdef DBG
#undef DBG
#endif
#define DBG(...) EINA_LOG_DOM_DBG(_emotion_gstreamer_log_domain, __VA_ARGS__)

#ifdef INF
#undef INF
#endif
#define INF(...) EINA_LOG_DOM_INFO(_emotion_gstreamer_log_domain, __VA_ARGS__)

#ifdef WRN
#undef WRN
#endif
#define WRN(...) EINA_LOG_DOM_WARN(_emotion_gstreamer_log_domain, __VA_ARGS__)

#ifdef ERR
#undef ERR
#endif
#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__)

#ifdef CRI
#undef CRI
#endif
#define CRI(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)

#define EVAS_TYPE_VIDEO_SINK evas_video_sink_get_type()

GType fakeeos_bin_get_type(void);

#define EVAS_VIDEO_SINK(obj) \
    (G_TYPE_CHECK_INSTANCE_CAST((obj), \
    EVAS_TYPE_VIDEO_SINK, EvasVideoSink))

#define EVAS_VIDEO_SINK_CLASS(klass) \
    (G_TYPE_CHECK_CLASS_CAST((klass), \
    EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))

#define EVAS_IS_VIDEO_SINK(obj) \
    (G_TYPE_CHECK_INSTANCE_TYPE((obj), \
    EVAS_TYPE_VIDEO_SINK))

#define EVAS_IS_VIDEO_SINK_CLASS(klass) \
    (G_TYPE_CHECK_CLASS_TYPE((klass), \
    EVAS_TYPE_VIDEO_SINK))

#define EVAS_VIDEO_SINK_GET_CLASS(obj) \
    (G_TYPE_INSTANCE_GET_CLASS((obj), \
    EVAS_TYPE_VIDEO_SINK, EvasVideoSinkClass))

#define GST_TYPE_FAKEEOS_BIN fakeeos_bin_get_type()

GstElement *gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
                                     Evas_Object *obj,
                                     const char *uri);

gboolean    gstreamer_plugin_init(GstPlugin *plugin);

Emotion_Gstreamer_Buffer *emotion_gstreamer_buffer_alloc(EvasVideoSinkPrivate *sink,
							 GstBuffer *buffer,
                                                         Eina_Bool preroll);
void emotion_gstreamer_buffer_free(Emotion_Gstreamer_Buffer *send);

Emotion_Gstreamer_Message *emotion_gstreamer_message_alloc(Emotion_Gstreamer_Video *ev,
                                                           GstMessage *msg);
void emotion_gstreamer_message_free(Emotion_Gstreamer_Message *send);
Eina_Bool _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
                                                  Eina_Bool force);

typedef struct _ColorSpace_FourCC_Convertion ColorSpace_FourCC_Convertion;
typedef struct _ColorSpace_Format_Convertion ColorSpace_Format_Convertion;

struct _ColorSpace_FourCC_Convertion
{
   const char *name;
   guint32 fourcc;
   Evas_Colorspace eformat;
   Evas_Video_Convert_Cb func;
   Eina_Bool force_height;
};

struct _ColorSpace_Format_Convertion
{
   const char *name;
   GstVideoFormat format;
   Evas_Colorspace eformat;
   Evas_Video_Convert_Cb func;
};

extern const ColorSpace_FourCC_Convertion colorspace_fourcc_convertion[];
extern const ColorSpace_Format_Convertion colorspace_format_convertion[];

/** Samsung specific infrastructure - do not touch, do not modify */
#define MPLANE_IMGB_MAX_COUNT 4
#define SCMN_IMGB_MAX_PLANE 4

typedef struct _GstMultiPlaneImageBuffer GstMultiPlaneImageBuffer;
typedef struct _SCMN_IMGB SCMN_IMGB;

struct _GstMultiPlaneImageBuffer
{
   GstBuffer buffer;

   /* width of each image plane */
   gint      width[MPLANE_IMGB_MAX_COUNT];
   /* height of each image plane */
   gint      height[MPLANE_IMGB_MAX_COUNT];
   /* stride of each image plane */
   gint      stride[MPLANE_IMGB_MAX_COUNT];
   /* elevation of each image plane */
   gint      elevation[MPLANE_IMGB_MAX_COUNT];
   /* user space address of each image plane */
   guchar   *uaddr[MPLANE_IMGB_MAX_COUNT];
   /* Index of real address of each image plane, if needs */
   guchar   *index[MPLANE_IMGB_MAX_COUNT];
   /* left postion, if needs */
   gint      x;
   /* top position, if needs */
   gint      y;
   /* to align memory */
   gint      __dummy2;
   /* arbitrary data */
   gint      data[16];
};

struct _SCMN_IMGB
{
   /* width of each image plane */
   int      width[SCMN_IMGB_MAX_PLANE];
   /* height of each image plane */
   int      height[SCMN_IMGB_MAX_PLANE];
   /* stride of each image plane */
   int      stride[SCMN_IMGB_MAX_PLANE];
   /* elevation of each image plane */
   int      elevation[SCMN_IMGB_MAX_PLANE];
   /* user space address of each image plane */
   guchar  *uaddr[SCMN_IMGB_MAX_PLANE];
   /* physical address of each image plane, if needs */
   guchar  *p[SCMN_IMGB_MAX_PLANE];
   /* color space type of image */
   int      cs;
   /* left postion, if needs */
   int      x;
   /* top position, if needs */
   int      y;
   /* to align memory */
   int      __dummy2;
   /* arbitrary data */
   int      data[16];
};

void _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height EINA_UNUSED);
void _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w EINA_UNUSED, unsigned int h, unsigned int output_height EINA_UNUSED);

#endif /* __EMOTION_GSTREAMER_H__ */