summaryrefslogtreecommitdiff
path: root/src/lib/emotion/Emotion.h
diff options
context:
space:
mode:
authorGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
committerGustavo Sverzut Barbieri <barbieri@gmail.com>2013-01-10 03:43:32 +0000
commitdfb84c1657bfb14a5236b881193b81f4c0b8a69b (patch)
treeb51b210fc88a21eec8e5907b8bbfe12ebc669f90 /src/lib/emotion/Emotion.h
parent532284dbbe4259a9f2291f44d3eff376849e8031 (diff)
efl: merge emotion.
this one was quite a huge work, but hopefully it's correct. NOTES: * removed vlc generic module, it should go into a separate package. * gstreamer is enabled by default (see --disable-gstreamer) * xine is disabled by default (see --enable-gstreamer) * generic is always built statically if supported * gstreamer and xine can't be configured as static (just lacks command line options, build system supports it) * v4l2 is enabled by default on linux if eeze is built (see --disable-v4l2) * emotion_test moved to src/tests/emotion and depends on EFL_ENABLE_TESTS (--with-tests), but is still installed if enabled. TODO (need your help!): * fix warnings with gstreamer and xine engine * call engine shutdown functions if building as static * remove direct usage of PACKAGE_*_DIR and use eina_prefix * add eina_prefix checkme file as evas and others * add support for $EFL_RUN_IN_TREE * create separate package for emotion_generic_modules * check docs hierarchy (doxygen is segv'in here) SVN revision: 82501
Diffstat (limited to 'src/lib/emotion/Emotion.h')
-rw-r--r--src/lib/emotion/Emotion.h1332
1 files changed, 1332 insertions, 0 deletions
diff --git a/src/lib/emotion/Emotion.h b/src/lib/emotion/Emotion.h
new file mode 100644
index 0000000000..454ee0f42e
--- /dev/null
+++ b/src/lib/emotion/Emotion.h
@@ -0,0 +1,1332 @@
1#ifndef EMOTION_H
2#define EMOTION_H
3
4/**
5 * @file
6 * @brief Emotion Media Library
7 *
8 * These routines are used for Emotion.
9 */
10
11/**
12 *
13 * @page emotion_main Emotion
14 *
15 * @date 2003 (created)
16 *
17 * @section toc Table of Contents
18 *
19 * @li @ref emotion_main_intro
20 * @li @ref emotion_main_work
21 * @li @ref emotion_main_compiling
22 * @li @ref emotion_main_next_steps
23 * @li @ref emotion_main_intro_example
24 *
25 * @section emotion_main_intro Introduction
26 *
27 * A media object library for Evas and Ecore.
28 *
29 * Emotion is a library that allows playing audio and video files, using one of
30 * its backends (gstreamer, xine or generic shm player).
31 *
32 * It is integrated into Ecore through its mainloop, and is transparent to the
33 * user of the library how the decoding of audio and video is being done. Once
34 * the objects are created, the user can set callbacks to the specific events
35 * and set options to this object, all in the main loop (no threads are needed).
36 *
37 * Emotion is also integrated with Evas. The emotion object returned by
38 * emotion_object_add() is an Evas smart object, so it can be manipulated with
39 * default Evas object functions. Callbacks can be added to the signals emitted
40 * by this object with evas_object_smart_callback_add().
41 *
42 * @section emotion_main_work How does Emotion work?
43 *
44 * The Emotion library uses Evas smart objects to allow you to manipulate the
45 * created object as any other Evas object, and to connect to its signals,
46 * handling them when needed. It's also possible to swallow Emotion objects
47 * inside Edje themes, and expect it to behave as a normal image or rectangle
48 * when regarding to its dimensions.
49 *
50 * @section emotion_main_compiling How to compile
51 *
52 * Emotion is a library your application links to. The procedure for this is
53 * very simple. You simply have to compile your application with the
54 * appropriate compiler flags that the @c pkg-config script outputs. For
55 * example:
56 *
57 * Compiling C or C++ files into object files:
58 *
59 * @verbatim
60 gcc -c -o main.o main.c `pkg-config --cflags emotion`
61 @endverbatim
62 *
63 * Linking object files into a binary executable:
64 *
65 * @verbatim
66 gcc -o my_application main.o `pkg-config --libs emotion`
67 @endverbatim
68 *
69 * See @ref pkgconfig
70 *
71 * @section emotion_main_next_steps Next Steps
72 *
73 * After you understood what Emotion is and installed it in your
74 * system you should proceed understanding the programming
75 * interface. We'd recommend you to take a while to learn @ref Ecore and
76 * @ref Evas to get started.
77 *
78 * Recommended reading:
79 *
80 * @li @ref Emotion_Init to initialize the library.
81 * @li @ref Emotion_Video to control video parameters.
82 * @li @ref Emotion_Audio to control audio parameters.
83 * @li @ref Emotion_Play to control playback.
84 * @li @ref Emotion_Webcam to show cameras.
85 * @li @ref Emotion_API for general programming interface.
86 *
87 * @section emotion_main_intro_example Introductory Example
88 *
89 * @include emotion_basic_example.c
90 *
91 * More examples can be found at @ref emotion_examples.
92 */
93
94#include <Evas.h>
95
96#ifdef EAPI
97# undef EAPI
98#endif
99
100#ifdef _WIN32
101# ifdef EFL_EMOTION_BUILD
102# ifdef DLL_EXPORT
103# define EAPI __declspec(dllexport)
104# else
105# define EAPI
106# endif /* ! DLL_EXPORT */
107# else
108# define EAPI __declspec(dllimport)
109# endif /* ! EFL_EMOTION_BUILD */
110#else
111# ifdef __GNUC__
112# if __GNUC__ >= 4
113# define EAPI __attribute__ ((visibility("default")))
114# else
115# define EAPI
116# endif
117# else
118# define EAPI
119# endif
120#endif /* ! _WIN32 */
121
122/**
123 * @file Emotion.h
124 * @brief The file that provides Emotion the API, with functions available for
125 * play, seek, change volume, etc.
126 */
127
128enum _Emotion_Module
129{
130 EMOTION_MODULE_XINE,
131 EMOTION_MODULE_GSTREAMER
132};
133
134enum _Emotion_Event
135{
136 EMOTION_EVENT_MENU1, // Escape Menu
137 EMOTION_EVENT_MENU2, // Title Menu
138 EMOTION_EVENT_MENU3, // Root Menu
139 EMOTION_EVENT_MENU4, // Subpicture Menu
140 EMOTION_EVENT_MENU5, // Audio Menu
141 EMOTION_EVENT_MENU6, // Angle Menu
142 EMOTION_EVENT_MENU7, // Part Menu
143 EMOTION_EVENT_UP,
144 EMOTION_EVENT_DOWN,
145 EMOTION_EVENT_LEFT,
146 EMOTION_EVENT_RIGHT,
147 EMOTION_EVENT_SELECT,
148 EMOTION_EVENT_NEXT,
149 EMOTION_EVENT_PREV,
150 EMOTION_EVENT_ANGLE_NEXT,
151 EMOTION_EVENT_ANGLE_PREV,
152 EMOTION_EVENT_FORCE,
153 EMOTION_EVENT_0,
154 EMOTION_EVENT_1,
155 EMOTION_EVENT_2,
156 EMOTION_EVENT_3,
157 EMOTION_EVENT_4,
158 EMOTION_EVENT_5,
159 EMOTION_EVENT_6,
160 EMOTION_EVENT_7,
161 EMOTION_EVENT_8,
162 EMOTION_EVENT_9,
163 EMOTION_EVENT_10
164};
165
166/**
167 * @enum _Emotion_Meta_Info
168 *
169 * Used for retrieving information about the media file being played.
170 *
171 * @see emotion_object_meta_info_get()
172 *
173 * @ingroup Emotion_Info
174 */
175enum _Emotion_Meta_Info
176{
177 EMOTION_META_INFO_TRACK_TITLE, /**< track title */
178 EMOTION_META_INFO_TRACK_ARTIST, /**< artist name */
179 EMOTION_META_INFO_TRACK_ALBUM, /**< album name */
180 EMOTION_META_INFO_TRACK_YEAR, /**< track year */
181 EMOTION_META_INFO_TRACK_GENRE, /**< track genre */
182 EMOTION_META_INFO_TRACK_COMMENT, /**< track comments */
183 EMOTION_META_INFO_TRACK_DISC_ID, /**< track disc ID */
184 EMOTION_META_INFO_TRACK_COUNT /**< track count - number of the track in the album */
185};
186
187/**
188 * @enum _Emotion_Vis
189 *
190 * Used for displaying a visualization on the emotion object.
191 *
192 * @see emotion_object_vis_set()
193 *
194 * @ingroup Emotion_Visualization
195 */
196enum _Emotion_Vis
197{
198 EMOTION_VIS_NONE, /**< no visualization set */
199 EMOTION_VIS_GOOM, /**< goom */
200 EMOTION_VIS_LIBVISUAL_BUMPSCOPE, /**< bumpscope */
201 EMOTION_VIS_LIBVISUAL_CORONA, /**< corona */
202 EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES, /**< dancing particles */
203 EMOTION_VIS_LIBVISUAL_GDKPIXBUF, /**< gdkpixbuf */
204 EMOTION_VIS_LIBVISUAL_G_FORCE, /**< G force */
205 EMOTION_VIS_LIBVISUAL_GOOM, /**< goom */
206 EMOTION_VIS_LIBVISUAL_INFINITE, /**< infinite */
207 EMOTION_VIS_LIBVISUAL_JAKDAW, /**< jakdaw */
208 EMOTION_VIS_LIBVISUAL_JESS, /**< jess */
209 EMOTION_VIS_LIBVISUAL_LV_ANALYSER, /**< lv analyser */
210 EMOTION_VIS_LIBVISUAL_LV_FLOWER, /**< lv flower */
211 EMOTION_VIS_LIBVISUAL_LV_GLTEST, /**< lv gltest */
212 EMOTION_VIS_LIBVISUAL_LV_SCOPE, /**< lv scope */
213 EMOTION_VIS_LIBVISUAL_MADSPIN, /**< madspin */
214 EMOTION_VIS_LIBVISUAL_NEBULUS, /**< nebulus */
215 EMOTION_VIS_LIBVISUAL_OINKSIE, /**< oinksie */
216 EMOTION_VIS_LIBVISUAL_PLASMA, /**< plasma */
217 EMOTION_VIS_LAST /* sentinel */
218};
219
220/**
221 * @enum Emotion_Suspend
222 *
223 * Used for emotion pipeline ressource management.
224 *
225 * @see emotion_object_suspend_set()
226 * @see emotion_object_suspend_get()
227 *
228 * @ingroup Emotion_Ressource
229 */
230typedef enum
231{
232 EMOTION_WAKEUP, /**< pipeline is up and running */
233 EMOTION_SLEEP, /**< turn off hardware ressource usage like overlay */
234 EMOTION_DEEP_SLEEP, /**< destroy the pipeline, but keep full resolution pixels output around */
235 EMOTION_HIBERNATE /**< destroy the pipeline, and keep half resolution or object resolution if lower */
236} Emotion_Suspend;
237
238/**
239 * @enum _Emotion_Aspect
240 * Defines the aspect ratio option.
241 */
242enum _Emotion_Aspect
243{
244 EMOTION_ASPECT_KEEP_NONE, /**< ignore video aspect ratio */
245 EMOTION_ASPECT_KEEP_WIDTH, /**< respect video aspect, fitting its width inside the object width */
246 EMOTION_ASPECT_KEEP_HEIGHT, /**< respect video aspect, fitting its height inside the object height */
247 EMOTION_ASPECT_KEEP_BOTH, /**< respect video aspect, fitting it inside the object area */
248 EMOTION_ASPECT_CROP, /**< respect video aspect, cropping exceding area */
249 EMOTION_ASPECT_CUSTOM, /**< use custom borders/crop for the video */
250};
251
252typedef enum _Emotion_Module Emotion_Module;
253typedef enum _Emotion_Event Emotion_Event;
254typedef enum _Emotion_Meta_Info Emotion_Meta_Info; /**< Meta info type to be retrieved. */
255typedef enum _Emotion_Vis Emotion_Vis; /**< Type of visualization. */
256typedef enum _Emotion_Aspect Emotion_Aspect; /**< Aspect ratio option. */
257
258#define EMOTION_CHANNEL_AUTO -1
259#define EMOTION_CHANNEL_DEFAULT 0
260
261#ifdef __cplusplus
262extern "C" {
263#endif
264
265#define EMOTION_VERSION_MAJOR 1
266#define EMOTION_VERSION_MINOR 8
267
268 typedef struct _Emotion_Version
269 {
270 int major;
271 int minor;
272 int micro;
273 int revision;
274 } Emotion_Version;
275
276 EAPI extern Emotion_Version *emotion_version;
277
278/* api calls available */
279
280/**
281 * @brief How to create, initialize, manipulate and connect to signals of an
282 * Emotion object.
283 * @defgroup Emotion_API API available for manipulating Emotion object.
284 * @ingroup Emotion
285 *
286 * @{
287 *
288 * Emotion provides an Evas smart object that allows to play, control and
289 * display a video or audio file. The API is synchronous but not everything
290 * happens immediately. There are also some signals to report changed states.
291 *
292 * Basically, once the object is created and initialized, a file will be set to
293 * it, and then it can be resized, moved, and controlled by other Evas object
294 * functions.
295 *
296 * However, the decoding of the music and video occurs not in the Ecore main
297 * loop, but usually in another thread (this depends on the module being used).
298 * The synchronization between this other thread and the main loop not visible
299 * to the end user of the library. The user can just register callbacks to the
300 * available signals to receive information about the changed states, and can
301 * call other functions from the API to request more changes on the current
302 * loaded file.
303 *
304 * There will be a delay between an API being called and it being really
305 * executed, since this request will be done in the main thread, and it needs to
306 * be sent to the decoding thread. For this reason, always call functions like
307 * emotion_object_size_get() or emotion_object_length_get() after some signal
308 * being sent, like "playback_started" or "open_done". @ref
309 * emotion_signals_example.c "This example demonstrates this behavior".
310 *
311 * @section signals Available signals
312 * The Evas_Object returned by emotion_object_add() has a number of signals that
313 * can be listened to using evas' smart callbacks mechanism. All signals have
314 * NULL as event info. The following is a list of interesting signals:
315 * @li "playback_started" - Emitted when the playback starts
316 * @li "playback_finished" - Emitted when the playback finishes
317 * @li "frame_decode" - Emitted every time a frame is decoded
318 * @li "open_done" - Emitted when the media file is opened
319 * @li "position_update" - Emitted when emotion_object_position_set is called
320 * @li "decode_stop" - Emitted after the last frame is decoded
321 *
322 * @section Examples
323 *
324 * The following examples exemplify the emotion usage. There's also the
325 * emotion_test binary that is distributed with this library and cover the
326 * entire API, but since it is too long and repetitive to be explained, its code
327 * is just displayed as another example.
328 *
329 * @li @ref emotion_basic_example_c
330 * @li @ref emotion_signals_example.c "Emotion signals"
331 * @li @ref emotion_test_main.c "emotion_test - full API usage"
332 *
333 */
334
335/**
336 * @defgroup Emotion_Init Creation and initialization functions
337 */
338
339/**
340 * @defgroup Emotion_Audio Audio control functions
341 */
342
343/**
344 * @defgroup Emotion_Video Video control functions
345 */
346
347/**
348 * @defgroup Emotion_Visualization Visualization control functions
349 */
350
351/**
352 * @defgroup Emotion_Info Miscellaneous information retrieval functions
353 */
354
355/**
356 * @defgroup Emotion_Ressource Video ressource management
357 */
358
359EAPI Eina_Bool emotion_init(void);
360EAPI Eina_Bool emotion_shutdown(void);
361
362/**
363 * @brief Add an emotion object to the canvas.
364 *
365 * @param evas The canvas where the object will be added to.
366 * @return The emotion object just created.
367 *
368 * This function creates an emotion object and adds it to the specified @p evas.
369 * The returned object can be manipulated as any other Evas object, using the
370 * default object manipulation functions - evas_object_*.
371 *
372 * After creating the object with this function, it's still necessary to
373 * initialize it with emotion_object_init(), and if an audio file is going to be
374 * played with this object instead of a video, use
375 * emotion_object_video_mute_set().
376 *
377 * The next step is to open the desired file with emotion_object_file_set(), and
378 * start playing it with emotion_object_play_set().
379 *
380 * @see emotion_object_init()
381 * @see emotion_object_video_mute_set()
382 * @see emotion_object_file_set()
383 * @see emotion_object_play_set()
384 *
385 * @ingroup Emotion_Init
386 */
387EAPI Evas_Object *emotion_object_add (Evas *evas);
388
389/**
390 * @brief Set the specified option for the current module.
391 *
392 * @param obj The emotion object which the option is being set to.
393 * @param opt The option that is being set. Currently supported optiosn: "video"
394 * and "audio".
395 * @param val The value of the option. Currently only supports "off" (?!?!?!)
396 *
397 * This function allows one to mute the video or audio of the emotion object.
398 *
399 * @note Please don't use this function, consider using
400 * emotion_object_audio_mute_set() and emotion_object_video_mute_set() instead.
401 *
402 * @see emotion_object_audio_mute_set()
403 * @see emotion_object_video_mute_set()
404 *
405 * @ingroup Emotion_Init
406 */
407EAPI void emotion_object_module_option_set (Evas_Object *obj, const char *opt, const char *val);
408
409/**
410 * @brief Initializes an emotion object with the specified module.
411 *
412 * @param obj The emotion object to be initialized.
413 * @param module_filename The name of the module to be used (gstreamer or xine).
414 * @return @c EINA_TRUE if the specified module was successfully initialized for
415 * this object, @c EINA_FALSE otherwise.
416 *
417 * This function is required after creating the emotion object, in order to
418 * specify which module will be used with this object. Different objects can
419 * use different modules to play a media file. The current supported modules are
420 * @b gstreamer and @b xine.
421 *
422 * To use any of them, you need to make sure that support for them was compiled
423 * correctly.
424 *
425 * @note It's possible to disable the build of a module with
426 * --disable-module_name.
427 *
428 * @see emotion_object_add()
429 * @see emotion_object_file_set()
430 *
431 * @ingroup Emotion_Init
432 */
433EAPI Eina_Bool emotion_object_init (Evas_Object *obj, const char *module_filename);
434
435/**
436 * @brief Set borders for the emotion object.
437 *
438 * @param obj The emotion object where borders are being set.
439 * @param l The left border.
440 * @param r The right border.
441 * @param t The top border.
442 * @param b The bottom border.
443 *
444 * This function sets borders for the emotion video object (just when a video is
445 * present). When positive values are given to one of the parameters, a border
446 * will be added to the respective position of the object, representing that
447 * size on the original video size. However, if the video is scaled up or down
448 * (i.e. the emotion object size is different from the video size), the borders
449 * will be scaled respectively too.
450 *
451 * If a negative value is given to one of the parameters, instead of a border,
452 * that respective side of the video will be cropped.
453 *
454 * It's possible to set a color for the added borders (default is transparent)
455 * with emotion_object_bg_color_set(). By default, an Emotion object doesn't
456 * have any border.
457 *
458 * @see emotion_object_border_get()
459 * @see emotion_object_bg_color_set()
460 *
461 * @ingroup Emotion_Video
462 */
463EAPI void emotion_object_border_set(Evas_Object *obj, int l, int r, int t, int b);
464
465/**
466 * @brief Get the borders set for the emotion object.
467 *
468 * @param obj The emotion object from which the borders are being retrieved.
469 * @param l The left border.
470 * @param r The right border.
471 * @param t The top border.
472 * @param b The bottom border.
473 *
474 * @see emotion_object_border_set()
475 *
476 * @ingroup Emotion_Video
477 */
478EAPI void emotion_object_border_get(const Evas_Object *obj, int *l, int *r, int *t, int *b);
479
480/**
481 * @brief Set a color for the background rectangle of this emotion object.
482 *
483 * @param obj The emotion object where the background color is being set.
484 * @param r Red component of the color.
485 * @param g Green component of the color.
486 * @param b Blue component of the color.
487 * @param a Alpha channel of the color.
488 *
489 * This is useful when a border is added to any side of the Emotion object. The
490 * area between the edge of the video and the edge of the object will be filled
491 * with the specified color.
492 *
493 * The default color is 0, 0, 0, 0 (transparent).
494 *
495 * @see emotion_object_bg_color_get()
496 *
497 * @ingroup Emotion_Video
498 */
499EAPI void emotion_object_bg_color_set(Evas_Object *obj, int r, int g, int b, int a);
500
501/**
502 * @brief Get the background color set for the emotion object.
503 *
504 * @param obj The emotion object from which the background color is being retrieved.
505 * @param r Red component of the color.
506 * @param g Green component of the color.
507 * @param b Blue component of the color.
508 * @param a AAlpha channel of the color.
509 *
510 * @see emotion_object_bg_color_set()
511 *
512 * @ingroup Emotion_Video
513 */
514EAPI void emotion_object_bg_color_get(const Evas_Object *obj, int *r, int *g, int *b, int *a);
515
516/**
517 * @brief Set whether emotion should keep the aspect ratio of the video.
518 *
519 * @param obj The emotion object where to set the aspect.
520 * @param a The aspect ratio policy.
521 *
522 * Instead of manually calculating the required border to set with
523 * emotion_object_border_set(), and using this to fix the aspect ratio of the
524 * video when the emotion object has a different aspect, it's possible to just
525 * set the policy to be used.
526 *
527 * The options are:
528 *
529 * - @b #EMOTION_ASPECT_KEEP_NONE - ignore the video aspect ratio, and reset any
530 * border set to 0, stretching the video inside the emotion object area. This
531 * option is similar to EVAS_ASPECT_CONTROL_NONE size hint.
532 * - @b #EMOTION_ASPECT_KEEP_WIDTH - respect the video aspect ratio, fitting the
533 * video width inside the object width. This option is similar to
534 * EVAS_ASPECT_CONTROL_HORIZONTAL size hint.
535 * - @b #EMOTION_ASPECT_KEEP_HEIGHT - respect the video aspect ratio, fitting
536 * the video height inside the object height. This option is similar to
537 * EVAS_ASPECT_CONTROL_VERTIAL size hint.
538 * - @b #EMOTION_ASPECT_KEEP_BOTH - respect the video aspect ratio, fitting both
539 * its width and height inside the object area. This option is similar to
540 * EVAS_ASPECT_CONTROL_BOTH size hint. It's the effect called letterboxing.
541 * - @b #EMOTION_ASPECT_CROP - respect the video aspect ratio, fitting the width
542 * or height inside the object area, and cropping the exceding areas of the
543 * video in height or width. It's the effect called pan-and-scan.
544 * - @b #EMOTION_ASPECT_CUSTOM - ignore the video aspect ratio, and use the
545 * current set from emotion_object_border_set().
546 *
547 * @note Calling this function with any value except #EMOTION_ASPECT_CUSTOM will
548 * invalidate borders set with emotion_object_border_set().
549 *
550 * @note Calling emotion_object_border_set() will automatically set the aspect
551 * policy to #EMOTION_ASPECT_CUSTOM.
552 *
553 * @see emotion_object_border_set()
554 * @see emotion_object_keep_aspect_get()
555 *
556 * @ingroup Emotion_Video
557 */
558EAPI void emotion_object_keep_aspect_set(Evas_Object *obj, Emotion_Aspect a);
559
560/**
561 * @brief Get the current emotion aspect ratio policy.
562 *
563 * @param obj The emotion object from which we are fetching the aspect ratio
564 * policy.
565 * @return The current aspect ratio policy.
566 *
567 * @see emotion_object_keep_aspect_set()
568 *
569 * @ingroup Emotion_Video
570 */
571EAPI Emotion_Aspect emotion_object_keep_aspect_get(const Evas_Object *obj);
572
573/**
574 * @brief Set the file to be played in the Emotion object.
575 *
576 * @param obj The emotion object where the file is being loaded.
577 * @param filename Path to the file to be loaded. It can be absolute or relative
578 * path.
579 * @return EINA_TRUE if the new file could be loaded successfully, and
580 * EINA_FALSE if the file could not be loaded. This happens when the filename is
581 * could not be found, when the module couldn't open the file, when no module is
582 * initialized in this object, or when the @p filename is the same as the
583 * one previously set.
584 *
585 * This function sets the file to be used with this emotion object. If the
586 * object already has another file set, this file will be unset and unloaded,
587 * and the new file will be loaded to this emotion object. The seek position
588 * will be set to 0, and the emotion object will be paused, instead of playing.
589 *
590 * If there was already a filename set, and it's the same as the one being set
591 * now, this function does nothing and returns EINA_FALSE.
592 *
593 * Use @c NULL as argument to @p filename if you want to unload the current file
594 * but don't want to load anything else.
595 *
596 * @see emotion_object_init()
597 * @see emotion_object_play_set()
598 * @see emotion_object_file_get()
599 *
600 * @ingroup Emotion_Init
601 */
602EAPI Eina_Bool emotion_object_file_set (Evas_Object *obj, const char *filename);
603
604/**
605 * @brief Get the filename of the file associated with the emotion object.
606 *
607 * @param obj The emotion object from which the filename will be retrieved.
608 * @return The path to the file loaded into this emotion object.
609 *
610 * This function returns the path of the file loaded in this emotion object. If
611 * no object is loaded, it will return @c NULL.
612 *
613 * @note Don't free or change the string returned by this function in any way.
614 * If you want to unset it, use @c emotion_object_file_set(obj, NULL).
615 *
616 * @see emotion_object_file_set()
617 *
618 * @ingroup Emotion_Init
619 */
620EAPI const char *emotion_object_file_get (const Evas_Object *obj);
621/**
622 * @defgroup Emotion_Play Play control functions
623 * @ingroup Emotion
624 *
625 * @{
626 */
627/**
628 *
629 * @brief Set play/pause state of the media file.
630 *
631 * @param obj The emotion object whose state will be changed.
632 * @param play EINA_TRUE to play, EINA_FALSE to pause.
633 *
634 * This functions sets the currently playing status of the video. Using this
635 * function to play or pause the video doesn't alter it's current position.
636 */
637EAPI void emotion_object_play_set (Evas_Object *obj, Eina_Bool play);
638/**
639 * @brief Get play/pause state of the media file.
640 *
641 * @param obj The emotion object from which the state will be retrieved.
642 * @return EINA_TRUE if playing. EINA_FALSE if not playing.
643 */
644EAPI Eina_Bool emotion_object_play_get (const Evas_Object *obj);
645/**
646 * @brief Set the position in the media file.
647 *
648 * @param obj The emotion object whose position will be changed.
649 * @param sec The position(in seconds) to which the media file will be set.
650 *
651 * This functions sets the current position of the media file to @p sec, this
652 * only works on seekable streams. Setting the position doesn't change the
653 * playing state of the media file.
654 *
655 * @see emotion_object_seekable_get
656 */
657EAPI void emotion_object_position_set (Evas_Object *obj, double sec);
658/**
659 * @brief Get the position in the media file.
660 *
661 * @param obj The emotion object from which the position will be retrieved.
662 * @return The position of the media file.
663 *
664 * The position is returned as the number of seconds since the beginning of the
665 * media file.
666 */
667EAPI double emotion_object_position_get (const Evas_Object *obj);
668
669/**
670 * @brief Get the percentual size of the buffering cache.
671 *
672 * @param obj The emotion object from which the buffer size will be retrieved.
673 * @return The buffer percent size, ranging from 0.0 to 1.0
674 *
675 * The buffer size is returned as a number between 0.0 and 1.0, 0.0 means
676 * the buffer if empty, 1.0 means full.
677 * If no buffering is in progress 1.0 is returned. In all other cases (maybe
678 * the backend don't support buffering) 1.0 is returned, thus you can always
679 * check for buffer_size < 1.0 to know if buffering is in progress.
680 *
681 * @warning Generic backend don't implement this (will return 1.0).
682 */
683EAPI double emotion_object_buffer_size_get (const Evas_Object *obj);
684
685/**
686 * @brief Get whether the media file is seekable.
687 *
688 * @param obj The emotion object from which the seekable status will be
689 * retrieved.
690 * @return EINA_TRUE if the media file is seekable, EINA_FALSE otherwise.
691 */
692EAPI Eina_Bool emotion_object_seekable_get (const Evas_Object *obj);
693/**
694 * @brief Get the length of play for the media file.
695 *
696 * @param obj The emotion object from which the length will be retrieved.
697 * @return The length of the media file in seconds.
698 *
699 * This function returns the length of the media file in seconds.
700 *
701 * @warning This will return 0 if called before the "length_change" signal has,
702 * been emitted.
703 */
704EAPI double emotion_object_play_length_get (const Evas_Object *obj);
705
706/**
707 * @brief Set the play speed of the media file.
708 *
709 * @param obj The emotion object whose speed will be set.
710 * @param speed The speed to be set in the range [0,infinity)
711 *
712 * This function sets the speed with which the media file will be played. 1.0
713 * represents the normal speed, 2 double speed, 0.5 half speed and so on.
714 *
715 * @warning The only backend that implements this is the experimental VLC
716 * backend.
717 */
718EAPI void emotion_object_play_speed_set (Evas_Object *obj, double speed);
719/**
720 * @brief Get the play speed of the media file.
721 *
722 * @param obj The emotion object from which the filename will be retrieved.
723 * @return The current speed of the media file.
724 *
725 * @see emotion_object_play_speed_set
726 */
727EAPI double emotion_object_play_speed_get (const Evas_Object *obj);
728/**
729 * @brief Get how much of the file has been played.
730 *
731 * @param obj The emotion object from which the filename will be retrieved.
732 * @return The progress of the media file.
733 *
734 * @warning Don't change of free the returned string.
735 * @warning gstreamer xine backends don't implement this(will return NULL).
736 */
737EAPI const char *emotion_object_progress_info_get (const Evas_Object *obj);
738/**
739 * @brief Get how much of the file has been played.
740 *
741 * @param obj The emotion object from which the filename will be retrieved
742 * @return The progress of the media file.
743 *
744 * This function gets the progress in playing the file, the return value is in
745 * the [0, 1] range.
746 *
747 * @warning gstreamer xine backends don't implement this(will return 0).
748 */
749EAPI double emotion_object_progress_status_get (const Evas_Object *obj);
750/**
751 * @}
752 */
753EAPI Eina_Bool emotion_object_video_handled_get (const Evas_Object *obj);
754EAPI Eina_Bool emotion_object_audio_handled_get (const Evas_Object *obj);
755
756/**
757 * @brief Retrieve the video aspect ratio of the media file loaded.
758 *
759 * @param obj The emotion object which the video aspect ratio will be retrieved
760 * from.
761 * @return The video aspect ratio of the file loaded.
762 *
763 * This function returns the video aspect ratio (width / height) of the file
764 * loaded. It can be used to adapt the size of the emotion object in the canvas,
765 * so the aspect won't be changed (by wrongly resizing the object). Or to crop
766 * the video correctly, if necessary.
767 *
768 * The described behavior can be applied like following. Consider a given
769 * emotion object that we want to position inside an area, which we will
770 * represent by @c w and @c h. Since we want to position this object either
771 * stretching, or filling the entire area but overflowing the video, or just
772 * adjust the video to fit inside the area without keeping the aspect ratio, we
773 * must compare the video aspect ratio with the area aspect ratio:
774 * @code
775 * int w = 200, h = 300; // an arbitrary value which represents the area where
776 * // the video would be placed
777 * int vw, vh;
778 * double r, vr = emotion_object_ratio_get(obj);
779 * r = (double)w / h;
780 * @endcode
781 *
782 * Now, if we want to make the video fit inside the area, the following code
783 * would do it:
784 * @code
785 * if (vr > r) // the video is wider than the area
786 * {
787 * vw = w;
788 * vh = w / vr;
789 * }
790 * else // the video is taller than the area
791 * {
792 * vh = h;
793 * vw = h * vr;
794 * }
795 * evas_object_resize(obj, vw, vh);
796 * @endcode
797 *
798 * And for keeping the aspect ratio but making the video fill the entire area,
799 * overflowing the content which can't fit inside it, we would do:
800 * @code
801 * if (vr > r) // the video is wider than the area
802 * {
803 * vh = h;
804 * vw = h * vr;
805 * }
806 * else // the video is taller than the area
807 * {
808 * vw = w;
809 * vh = w / vr;
810 * }
811 * evas_object_resize(obj, vw, vh);
812 * @endcode
813 *
814 * Finally, by just resizing the video to the video area, we would have the
815 * video stretched:
816 * @code
817 * vw = w;
818 * vh = h;
819 * evas_object_resize(obj, vw, vh);
820 * @endcode
821 *
822 * The following diagram exemplifies what would happen to the video,
823 * respectively, in each case:
824 *
825 * @image html emotion_ratio.png
826 * @image latex emotion_ratio.eps width=\textwidth
827 *
828 * @note This function returns the aspect ratio that the video @b should be, but
829 * sometimes the reported size from emotion_object_size_get() represents a
830 * different aspect ratio. You can safely resize the video to respect the aspect
831 * ratio returned by @b this function.
832 *
833 * @see emotion_object_size_get()
834 *
835 * @ingroup Emotion_Video
836 */
837EAPI double emotion_object_ratio_get (const Evas_Object *obj);
838
839/**
840 * @brief Retrieve the video size of the loaded file.
841 *
842 * @param obj The object from which we are retrieving the video size.
843 * @param iw A pointer to a variable where the width will be stored.
844 * @param ih A pointer to a variable where the height will be stored.
845 *
846 * This function returns the reported size of the loaded video file. If a file
847 * that doesn't contain a video channel is loaded, then this size can be
848 * ignored.
849 *
850 * The value reported by this function should be consistent with the aspect
851 * ratio returned by emotion_object_ratio_get(), but sometimes the information
852 * stored in the file is wrong. So use the ratio size reported by
853 * emotion_object_ratio_get(), since it is more likely going to be accurate.
854 *
855 * @note Use @c NULL for @p iw or @p ih if you don't need one of these values.
856 *
857 * @see emotion_object_ratio_get()
858 *
859 * @ingroup Emotion_Video
860 */
861EAPI void emotion_object_size_get (const Evas_Object *obj, int *iw, int *ih);
862
863/**
864 * @brief Sets whether to use of high-quality image scaling algorithm
865 * of the given video object.
866 *
867 * When enabled, a higher quality video scaling algorithm is used when
868 * scaling videos to sizes other than the source video. This gives
869 * better results but is more computationally expensive.
870 *
871 * @param obj The given video object.
872 * @param smooth Whether to use smooth scale or not.
873 *
874 * @see emotion_object_smooth_scale_get()
875 *
876 * @ingroup Emotion_Video
877 */
878EAPI void emotion_object_smooth_scale_set (Evas_Object *obj, Eina_Bool smooth);
879
880/**
881 * @brief Gets whether the high-quality image scaling algorithm
882 * of the given video object is used.
883 *
884 * @param obj The given video object.
885 * @return Whether the smooth scale is used or not.
886 *
887 * @see emotion_object_smooth_scale_set()
888 *
889 * @ingroup Emotion_Video
890 */
891EAPI Eina_Bool emotion_object_smooth_scale_get (const Evas_Object *obj);
892EAPI void emotion_object_event_simple_send (Evas_Object *obj, Emotion_Event ev);
893
894/**
895 * @brief Set the audio volume.
896 *
897 * @param obj The object where the volume is being set.
898 * @param vol The new volume parameter. Range is from 0.0 to 1.0.
899 *
900 * Sets the audio volume of the stream being played. This has nothing to do with
901 * the system volume. This volume will be multiplied by the system volume. e.g.:
902 * if the current volume level is 0.5, and the system volume is 50%, it will be
903 * 0.5 * 0.5 = 0.25.
904 *
905 * The default value depends on the module used. This value doesn't get changed
906 * when another file is loaded.
907 *
908 * @see emotion_object_audio_volume_get()
909 *
910 * @ingroup Emotion_Audio
911 */
912EAPI void emotion_object_audio_volume_set (Evas_Object *obj, double vol);
913
914/**
915 * @brief Get the audio volume.
916 *
917 * @param obj The object from which we are retrieving the volume.
918 * @return The current audio volume level for this object.
919 *
920 * Get the current value for the audio volume level. Range is from 0.0 to 1.0.
921 * This volume is set with emotion_object_audio_volume_set().
922 *
923 * @see emotion_object_audio_volume_set()
924 *
925 * @ingroup Emotion_Audio
926 */
927EAPI double emotion_object_audio_volume_get (const Evas_Object *obj);
928
929/**
930 * @brief Set the mute audio option for this object.
931 *
932 * @param obj The object which we are setting the mute audio option.
933 * @param mute Whether the audio should be muted (@c EINA_TRUE) or not (@c
934 * EINA_FALSE).
935 *
936 * This function sets the mute audio option for this emotion object. The current
937 * module used for this object can use this to avoid decoding the audio portion
938 * of the loaded media file.
939 *
940 * @see emotion_object_audio_mute_get()
941 * @see emotion_object_video_mute_set()
942 *
943 * @ingroup Emotion_Audio
944 */
945EAPI void emotion_object_audio_mute_set (Evas_Object *obj, Eina_Bool mute);
946
947/**
948 * @brief Get the mute audio option of this object.
949 *
950 * @param obj The object which we are retrieving the mute audio option from.
951 * @return Whether the audio is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
952 *
953 * This function return the mute audio option from this emotion object. It can
954 * be set with emotion_object_audio_mute_set().
955 *
956 * @see emotion_object_audio_mute_set()
957 *
958 * @ingroup Emotion_Audio
959 */
960EAPI Eina_Bool emotion_object_audio_mute_get (const Evas_Object *obj);
961EAPI int emotion_object_audio_channel_count (const Evas_Object *obj);
962EAPI const char *emotion_object_audio_channel_name_get(const Evas_Object *obj, int channel);
963EAPI void emotion_object_audio_channel_set (Evas_Object *obj, int channel);
964EAPI int emotion_object_audio_channel_get (const Evas_Object *obj);
965
966/**
967 * @brief Set the mute video option for this object.
968 *
969 * @param obj The object which we are setting the mute video option.
970 * @param mute Whether the video should be muted (@c EINA_TRUE) or not (@c
971 * EINA_FALSE).
972 *
973 * This function sets the mute video option for this emotion object. The
974 * current module used for this object can use this information to avoid
975 * decoding the video portion of the loaded media file.
976 *
977 * @see emotion_object_video_mute_get()
978 * @see emotion_object_audio_mute_set()
979 *
980 * @ingroup Emotion_Video
981 */
982EAPI void emotion_object_video_mute_set (Evas_Object *obj, Eina_Bool mute);
983
984/**
985 * @brief Get the mute video option of this object.
986 *
987 * @param obj The object which we are retrieving the mute video option from.
988 * @return Whether the video is muted (@c EINA_TRUE) or not (@c EINA_FALSE).
989 *
990 * This function returns the mute video option from this emotion object. It can
991 * be set with emotion_object_video_mute_set().
992 *
993 * @see emotion_object_video_mute_set()
994 *
995 * @ingroup Emotion_Video
996 */
997EAPI Eina_Bool emotion_object_video_mute_get (const Evas_Object *obj);
998
999/**
1000 * @brief Set the video's subtitle file path.
1001 *
1002 * @param obj The object which we are setting a subtitle file path.
1003 * @param filepath The subtitle file path.
1004 *
1005 * This function sets a video's subtitle file path(i.e an .srt file) for
1006 * supported subtitle formats consult the backend's documentation.
1007 *
1008 * @see emotion_object_video_subtitle_file_get().
1009 *
1010 * @ingroup Emotion_Video
1011 * @since 1.8
1012 */
1013EAPI void emotion_object_video_subtitle_file_set (Evas_Object *obj, const char *filepath);
1014
1015/**
1016 * @brief Get the video's subtitle file path.
1017 *
1018 * @param obj The object which we are retrieving the subtitle file path from.
1019 * @return The video's subtitle file path previously set, NULL otherwise.
1020 *
1021 * This function returns the video's subtitle file path, if not previously set
1022 * or in error NULL is returned.
1023 *
1024 * @see emotion_object_video_subtitle_file_set().
1025 *
1026 * @ingroup Emotion_Video
1027 * @since 1.8
1028 */
1029EAPI const char *emotion_object_video_subtitle_file_get (const Evas_Object *obj);
1030
1031/**
1032 * @brief Get the number of available video channel
1033 *
1034 * @param obj The object which we are retrieving the channel count from
1035 * @return the number of available channel.
1036 *
1037 * @see emotion_object_video_channel_name_get()
1038 *
1039 * @ingroup Emotion_Video
1040 */
1041EAPI int emotion_object_video_channel_count (const Evas_Object *obj);
1042EAPI const char *emotion_object_video_channel_name_get(const Evas_Object *obj, int channel);
1043EAPI void emotion_object_video_channel_set (Evas_Object *obj, int channel);
1044EAPI int emotion_object_video_channel_get (const Evas_Object *obj);
1045EAPI void emotion_object_spu_mute_set (Evas_Object *obj, Eina_Bool mute);
1046EAPI Eina_Bool emotion_object_spu_mute_get (const Evas_Object *obj);
1047EAPI int emotion_object_spu_channel_count (const Evas_Object *obj);
1048EAPI const char *emotion_object_spu_channel_name_get (const Evas_Object *obj, int channel);
1049EAPI void emotion_object_spu_channel_set (Evas_Object *obj, int channel);
1050EAPI int emotion_object_spu_channel_get (const Evas_Object *obj);
1051EAPI int emotion_object_chapter_count (const Evas_Object *obj);
1052EAPI void emotion_object_chapter_set (Evas_Object *obj, int chapter);
1053EAPI int emotion_object_chapter_get (const Evas_Object *obj);
1054EAPI const char *emotion_object_chapter_name_get (const Evas_Object *obj, int chapter);
1055EAPI void emotion_object_eject (Evas_Object *obj);
1056
1057/**
1058 * @brief Get the dvd title from this emotion object.
1059 *
1060 * @param obj The object which the title will be retrieved from.
1061 * @return A string containing the title.
1062 *
1063 * This function is only useful when playing a DVD.
1064 *
1065 * @note Don't change or free the string returned by this function.
1066 *
1067 * @ingroup Emotion_Info
1068 */
1069EAPI const char *emotion_object_title_get (const Evas_Object *obj);
1070EAPI const char *emotion_object_ref_file_get (const Evas_Object *obj);
1071EAPI int emotion_object_ref_num_get (const Evas_Object *obj);
1072EAPI int emotion_object_spu_button_count_get (const Evas_Object *obj);
1073EAPI int emotion_object_spu_button_get (const Evas_Object *obj);
1074
1075/**
1076 * @brief Retrieve meta information from this file being played.
1077 *
1078 * @param obj The object which the meta info will be extracted from.
1079 * @param meta The type of meta information that will be extracted.
1080 *
1081 * This function retrieves information about the file loaded. It can retrieve
1082 * the track title, artist name, album name, etc. See @ref Emotion_Meta_Info
1083 * for all the possibilities.
1084 *
1085 * The meta info may be not available on all types of files. It will return @c
1086 * NULL if the the file doesn't have meta info, or if this specific field is
1087 * empty.
1088 *
1089 * @note Don't change or free the string returned by this function.
1090 *
1091 * @see Emotion_Meta_Info
1092 *
1093 * @ingroup Emotion_Info
1094 */
1095EAPI const char *emotion_object_meta_info_get (const Evas_Object *obj, Emotion_Meta_Info meta);
1096
1097/**
1098 * @brief Set the visualization to be used with this object.
1099 *
1100 * @param obj The object where the visualization will be set on.
1101 * @param visualization The type of visualization to be used.
1102 *
1103 * The @p visualization specified will be played instead of a video. This is
1104 * commonly used to display a visualization for audio only files (musics).
1105 *
1106 * The available visualizations are @ref Emotion_Vis.
1107 *
1108 * @see Emotion_Vis
1109 * @see emotion_object_vis_get()
1110 * @see emotion_object_vis_supported()
1111 *
1112 * @ingroup Emotion_Visualization
1113 */
1114EAPI void emotion_object_vis_set (Evas_Object *obj, Emotion_Vis visualization);
1115
1116/**
1117 * @brief Get the type of visualization in use by this emotion object.
1118 *
1119 * @param obj The emotion object which the visualization is being retrieved
1120 * from.
1121 * @return The type of visualization in use by this object.
1122 *
1123 * The type of visualization can be set by emotion_object_vis_set().
1124 *
1125 * @see Emotion_Vis
1126 * @see emotion_object_vis_set()
1127 * @see emotion_object_vis_supported()
1128 *
1129 * @ingroup Emotion_Visualization
1130 */
1131EAPI Emotion_Vis emotion_object_vis_get (const Evas_Object *obj);
1132
1133/**
1134 * @brief Query whether a type of visualization is supported by this object.
1135 *
1136 * @param obj The object which the query is being ran on.
1137 * @param visualization The type of visualization that is being queried.
1138 * @return EINA_TRUE if the visualization is supported, EINA_FALSE otherwise.
1139 *
1140 * This can be used to check if a visualization is supported. e.g.: one wants to
1141 * display a list of available visualizations for a specific object.
1142 *
1143 * @see Emotion_Vis
1144 * @see emotion_object_vis_set()
1145 * @see emotion_object_vis_get()
1146 *
1147 * @ingroup Emotion_Visualization
1148 */
1149EAPI Eina_Bool emotion_object_vis_supported (const Evas_Object *obj, Emotion_Vis visualization);
1150
1151/**
1152 * @brief Raise priority of an object so it will have a priviledged access to hardware ressource.
1153 *
1154 * @param obj The object which the query is being ran on.
1155 * @param priority EINA_TRUE means give me a priority access to the hardware ressource.
1156 *
1157 * Hardware have a few dedicated hardware pipeline that process the video at no cost for the CPU.
1158 * Especially on SoC, you mostly have one (on mobile phone SoC) or two (on Set Top Box SoC) when
1159 * Picture in Picture is needed. And most application just have a few video stream that really
1160 * deserve high frame rate, hiogh quality output. That's why this call is for.
1161 *
1162 * Please note that if Emotion can't acquire a priviledged hardware ressource, it will fallback
1163 * to the no-priority path. This work on the first asking first get basis system.
1164 *
1165 * @see emotion_object_priority_get()
1166 *
1167 * @ingroup Emotion_Ressource
1168 */
1169EAPI void emotion_object_priority_set(Evas_Object *obj, Eina_Bool priority);
1170
1171/**
1172 * @brief Get the actual priority of an object.
1173 *
1174 * @param obj The object which the query is being ran on.
1175 * @return EINA_TRUE if the object has a priority access to the hardware.
1176 *
1177 * This actually return the priority status of an object. If it failed to have a priviledged
1178 * access to the hardware, it will return EINA_FALSE.
1179 *
1180 * @see emotion_object_priority_get()
1181 *
1182 * @ingroup Emotion_Ressource
1183 */
1184EAPI Eina_Bool emotion_object_priority_get(const Evas_Object *obj);
1185
1186/**
1187 * @brief Change the state of an object pipeline.
1188 *
1189 * @param obj The object which the query is being ran on.
1190 * @param state The new state for the object.
1191 *
1192 * Changing the state of a pipeline should help preserve the battery of an embedded device.
1193 * But it will only work sanely if the pipeline is not playing at the time you change its
1194 * state. Depending on the engine all state may be not implemented.
1195 *
1196 * @see Emotion_Suspend
1197 * @see emotion_object_suspend_get()
1198 *
1199 * @ingroup Emotion_Ressource
1200 */
1201EAPI void emotion_object_suspend_set(Evas_Object *obj, Emotion_Suspend state);
1202
1203/**
1204 * @brief Get the current state of the pipeline
1205 *
1206 * @param obj The object which the query is being ran on.
1207 * @return the current state of the pipeline.
1208 *
1209 * @see Emotion_Suspend
1210 * @see emotion_object_suspend_set()
1211 *
1212 * @ingroup Emotion_Ressource
1213 */
1214EAPI Emotion_Suspend emotion_object_suspend_get(Evas_Object *obj);
1215
1216/**
1217 * @brief Load the last known position if available
1218 *
1219 * @param obj The object which the query is being ran on.
1220 *
1221 * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1222 * the latest position. It should trigger some smart callback to let the application
1223 * know when it succeed or fail. Every operation is fully asynchronous and not
1224 * linked to the actual engine used to play the vide.
1225 *
1226 * @see emotion_object_last_position_save()
1227 *
1228 * @ingroup Emotion_Info
1229 */
1230EAPI void emotion_object_last_position_load(Evas_Object *obj);
1231
1232/**
1233 * @brief Save the lastest position if possible
1234 *
1235 * @param obj The object which the query is being ran on.
1236 *
1237 * By using Xattr, Emotion is able, if the system permitt it, to store and retrieve
1238 * the latest position. It should trigger some smart callback to let the application
1239 * know when it succeed or fail. Every operation is fully asynchronous and not
1240 * linked to the actual engine used to play the vide.
1241 *
1242 * @see emotion_object_last_position_load()
1243 *
1244 * @ingroup Emotion_Info
1245 */
1246EAPI void emotion_object_last_position_save(Evas_Object *obj);
1247
1248/**
1249 * @brief Do we have a chance to play that file
1250 *
1251 * @param file A stringshared filename that we want to know if Emotion can play.
1252 *
1253 * This just actually look at the extention of the file, it doesn't check the mime-type
1254 * nor if the file is actually sane. So this is just an hint for your application.
1255 *
1256 * @see emotion_object_extension_may_play_get()
1257 */
1258EAPI Eina_Bool emotion_object_extension_may_play_fast_get(const char *file);
1259
1260/**
1261 * @brief Do we have a chance to play that file
1262 *
1263 * @param file A filename that we want to know if Emotion can play.
1264 *
1265 * This just actually look at the extention of the file, it doesn't check the mime-type
1266 * nor if the file is actually sane. So this is just an hint for your application.
1267 *
1268 * @see emotion_object_extension_may_play_fast_get()
1269 */
1270EAPI Eina_Bool emotion_object_extension_may_play_get(const char *file);
1271
1272/**
1273 * @brief Get the actual image object that contains the pixels of the video stream
1274 *
1275 * @param obj The object which the query is being ran on.
1276 *
1277 * This function is usefull when you want to get a direct access to the pixels.
1278 *
1279 * @see emotion_object_image_get()
1280 */
1281EAPI Evas_Object *emotion_object_image_get(const Evas_Object *obj);
1282
1283/**
1284 * @defgroup Emotion_Webcam API available for accessing webcam
1285 * @ingroup Emotion
1286 */
1287
1288typedef struct _Emotion_Webcam Emotion_Webcam; /**< Webcam description */
1289
1290EAPI extern int EMOTION_WEBCAM_UPDATE; /**< Ecore_Event triggered when a new webcam is plugged in */
1291
1292/**
1293 * @brief Get a list of active and available webcam
1294 *
1295 * @return the list of available webcam at the time of the call.
1296 *
1297 * It will return the current live list of webcam. It is updated before
1298 * triggering EMOTION_WEBCAM_UPDATE and should never be modified.
1299 *
1300 * @ingroup Emotion_Webcam
1301 */
1302EAPI const Eina_List *emotion_webcams_get(void);
1303
1304/**
1305 * @brief Get the human understandable name of a Webcam
1306 *
1307 * @param ew The webcam to get the name from.
1308 * @return the actual human readable name.
1309 *
1310 * @ingroup Emotion_Webcam
1311 */
1312EAPI const char *emotion_webcam_name_get(const Emotion_Webcam *ew);
1313
1314/**
1315 * @brief Get the uri of a Webcam that will be understood by emotion
1316 *
1317 * @param ew The webcam to get the uri from.
1318 * @return the actual uri that emotion will later understood.
1319 *
1320 * @ingroup Emotion_Webcam
1321 */
1322EAPI const char *emotion_webcam_device_get(const Emotion_Webcam *ew);
1323
1324/**
1325 * @}
1326 */
1327
1328#ifdef __cplusplus
1329}
1330#endif
1331
1332#endif