diff options
author | Rafael Antognolli <antognolli@gmail.com> | 2011-07-04 14:31:01 +0000 |
---|---|---|
committer | Rafael Antognolli <antognolli@gmail.com> | 2011-07-04 14:31:01 +0000 |
commit | ffcf50987fa92e2007d3c6e7cf5c0ad6df65c882 (patch) | |
tree | b85b6d4c4800f2758226b7a35c5dbf34ddfeb4d1 /legacy | |
parent | 1d4852f11e989a375fd23d355735ccc7f70571d2 (diff) |
emotion - better introduction and link to emotion_test.
SVN revision: 61008
Diffstat (limited to '')
-rw-r--r-- | legacy/emotion/doc/Doxyfile.in | 2 | ||||
-rw-r--r-- | legacy/emotion/doc/emotion.dox.in | 8 | ||||
-rw-r--r-- | legacy/emotion/doc/examples.dox | 6 | ||||
-rw-r--r-- | legacy/emotion/src/lib/Emotion.h | 41 | ||||
-rw-r--r-- | legacy/emotion/src/lib/emotion_smart.c | 2 |
5 files changed, 51 insertions, 8 deletions
diff --git a/legacy/emotion/doc/Doxyfile.in b/legacy/emotion/doc/Doxyfile.in index 853fce1390..33d7f3323f 100644 --- a/legacy/emotion/doc/Doxyfile.in +++ b/legacy/emotion/doc/Doxyfile.in | |||
@@ -69,7 +69,7 @@ RECURSIVE = NO | |||
69 | EXCLUDE = | 69 | EXCLUDE = |
70 | EXCLUDE_SYMLINKS = NO | 70 | EXCLUDE_SYMLINKS = NO |
71 | EXCLUDE_PATTERNS = | 71 | EXCLUDE_PATTERNS = |
72 | EXAMPLE_PATH = @top_srcdir@/src/examples | 72 | EXAMPLE_PATH = @top_srcdir@/src/examples @top_srcdir@/src/bin |
73 | EXAMPLE_PATTERNS = | 73 | EXAMPLE_PATTERNS = |
74 | EXAMPLE_RECURSIVE = NO | 74 | EXAMPLE_RECURSIVE = NO |
75 | INPUT_FILTER = | 75 | INPUT_FILTER = |
diff --git a/legacy/emotion/doc/emotion.dox.in b/legacy/emotion/doc/emotion.dox.in index dfbf4ce812..5d9000bc2f 100644 --- a/legacy/emotion/doc/emotion.dox.in +++ b/legacy/emotion/doc/emotion.dox.in | |||
@@ -12,10 +12,10 @@ | |||
12 | * @image html e.png | 12 | * @image html e.png |
13 | * | 13 | * |
14 | * Emotion is a library that allows playing audio and video files, using one of | 14 | * Emotion is a library that allows playing audio and video files, using one of |
15 | * its backends (gstreamer and xine). | 15 | * its backends (gstreamer or xine). |
16 | * | 16 | * |
17 | * It is integrated into Ecore through its mainloop, and is transparent to the | 17 | * It is integrated into Ecore through its mainloop, and is transparent to the |
18 | * user of the library how the decoding of audio and video is happening. Once | 18 | * user of the library how the decoding of audio and video is being done. Once |
19 | * the objects are created, the user can set callbacks to the specific events | 19 | * the objects are created, the user can set callbacks to the specific events |
20 | * and set options to this object, all in the main loop (no threads are needed). | 20 | * and set options to this object, all in the main loop (no threads are needed). |
21 | * | 21 | * |
@@ -38,8 +38,8 @@ | |||
38 | * @section work How does Emotion work? | 38 | * @section work How does Emotion work? |
39 | * | 39 | * |
40 | * The Emotion library uses Evas smart objects to allow you to manipulate the | 40 | * The Emotion library uses Evas smart objects to allow you to manipulate the |
41 | * created object as any other Evas object, and to connect to its signals and | 41 | * created object as any other Evas object, and to connect to its signals, |
42 | * process them when needed. It's also possible to swallow Emotion objects | 42 | * handling them when needed. It's also possible to swallow Emotion objects |
43 | * inside Edje themes, and expect it to behave as a normal image or rectangle | 43 | * inside Edje themes, and expect it to behave as a normal image or rectangle |
44 | * when regarding to its dimensions. | 44 | * when regarding to its dimensions. |
45 | * | 45 | * |
diff --git a/legacy/emotion/doc/examples.dox b/legacy/emotion/doc/examples.dox index 054608b338..5e193f166a 100644 --- a/legacy/emotion/doc/examples.dox +++ b/legacy/emotion/doc/examples.dox | |||
@@ -5,6 +5,7 @@ | |||
5 | * | 5 | * |
6 | * @li @ref emotion_basic_example_c | 6 | * @li @ref emotion_basic_example_c |
7 | * @li @ref emotion_signals_example.c "Emotion signals" | 7 | * @li @ref emotion_signals_example.c "Emotion signals" |
8 | * @li @ref emotion_test_main.c "emotion_test - full API usage" | ||
8 | * | 9 | * |
9 | */ | 10 | */ |
10 | 11 | ||
@@ -96,3 +97,8 @@ | |||
96 | * signals are emitted can change depending on the module being used. Following | 97 | * signals are emitted can change depending on the module being used. Following |
97 | * is the full source code of this example: | 98 | * is the full source code of this example: |
98 | */ | 99 | */ |
100 | |||
101 | /** | ||
102 | * @example emotion_test_main.c | ||
103 | * This example covers the entire emotion API. Use it as a reference. | ||
104 | */ | ||
diff --git a/legacy/emotion/src/lib/Emotion.h b/legacy/emotion/src/lib/Emotion.h index a8f7b6ac59..15051317c7 100644 --- a/legacy/emotion/src/lib/Emotion.h +++ b/legacy/emotion/src/lib/Emotion.h | |||
@@ -156,8 +156,29 @@ extern "C" { | |||
156 | * | 156 | * |
157 | * @{ | 157 | * @{ |
158 | * | 158 | * |
159 | * @li Add the description of modules here. | 159 | * Emotion provides an Evas smart object that allows to play, control and |
160 | * @li Basic emotion example | 160 | * display a video or audio file. The API is synchronous but not everything |
161 | * happens immediately. There are also some signals to report changed states. | ||
162 | * | ||
163 | * Basically, once the object is created and initialized, a file will be set to | ||
164 | * it, and then it can be resized, moved, and controled by other Evas object | ||
165 | * functions. | ||
166 | * | ||
167 | * However, the decoding of the music and video occurs not in the Ecore main | ||
168 | * loop, but usually in another thread (this depends on the module being used). | ||
169 | * The synchronization between this other thread and the main loop not visible | ||
170 | * to the end user of the library. The user can just register callbacks to the | ||
171 | * available signals to receive information about the changed states, and can | ||
172 | * call other functions from the API to request more changes on the current | ||
173 | * loaded file. | ||
174 | * | ||
175 | * There will be a delay between an API being called and it being really | ||
176 | * executed, since this request will be done in the main thread, and it needs to | ||
177 | * be sent to the decoding thread. For this reason, always call functions like | ||
178 | * emotion_object_size_get() or emotion_object_length_get() after some signal | ||
179 | * being sent, like "playback_started" or "open_done". @ref | ||
180 | * emotion_signals_example.c "This example demonstrates this behavior". | ||
181 | * | ||
161 | * @section signals Available signals | 182 | * @section signals Available signals |
162 | * The Evas_Object returned by emotion_object_add() has a number of signals that | 183 | * The Evas_Object returned by emotion_object_add() has a number of signals that |
163 | * can be listened to using evas' smart callbacks mechanism. All signals have | 184 | * can be listened to using evas' smart callbacks mechanism. All signals have |
@@ -168,6 +189,18 @@ extern "C" { | |||
168 | * @li "open_done" - Emitted when the media file is opened | 189 | * @li "open_done" - Emitted when the media file is opened |
169 | * @li "position_update" - Emitted when emotion_object_position_set is called | 190 | * @li "position_update" - Emitted when emotion_object_position_set is called |
170 | * @li "decode_stop" - Emitted after the last frame is decoded | 191 | * @li "decode_stop" - Emitted after the last frame is decoded |
192 | * | ||
193 | * @section Examples | ||
194 | * | ||
195 | * The following examples exemplify the emotion usage. There's also the | ||
196 | * emotion_test binary that is distributed with this library and cover the | ||
197 | * entire API, but since it is too long and repetitive to be explained, its code | ||
198 | * is just displayed as another example. | ||
199 | * | ||
200 | * @li @ref emotion_basic_example_c | ||
201 | * @li @ref emotion_signals_example.c "Emotion signals" | ||
202 | * @li @ref emotion_test_main.c "emotion_test - full API usage" | ||
203 | * | ||
171 | */ | 204 | */ |
172 | 205 | ||
173 | /** | 206 | /** |
@@ -488,6 +521,8 @@ EAPI void emotion_object_size_get (const Evas_Object *obj, | |||
488 | * @param smooth Whether to use smooth scale or not. | 521 | * @param smooth Whether to use smooth scale or not. |
489 | * | 522 | * |
490 | * @see emotion_object_smooth_scale_get() | 523 | * @see emotion_object_smooth_scale_get() |
524 | * | ||
525 | * @ingroup Emotion_Video | ||
491 | */ | 526 | */ |
492 | EAPI void emotion_object_smooth_scale_set (Evas_Object *obj, Eina_Bool smooth); | 527 | EAPI void emotion_object_smooth_scale_set (Evas_Object *obj, Eina_Bool smooth); |
493 | 528 | ||
@@ -499,6 +534,8 @@ EAPI void emotion_object_smooth_scale_set (Evas_Object *obj, Eina_B | |||
499 | * @return Whether the smooth scale is used or not. | 534 | * @return Whether the smooth scale is used or not. |
500 | * | 535 | * |
501 | * @see emotion_object_smooth_scale_set() | 536 | * @see emotion_object_smooth_scale_set() |
537 | * | ||
538 | * @ingroup Emotion_Video | ||
502 | */ | 539 | */ |
503 | EAPI Eina_Bool emotion_object_smooth_scale_get (const Evas_Object *obj); | 540 | EAPI Eina_Bool emotion_object_smooth_scale_get (const Evas_Object *obj); |
504 | EAPI void emotion_object_event_simple_send (Evas_Object *obj, Emotion_Event ev); | 541 | EAPI void emotion_object_event_simple_send (Evas_Object *obj, Emotion_Event ev); |
diff --git a/legacy/emotion/src/lib/emotion_smart.c b/legacy/emotion/src/lib/emotion_smart.c index ebbfdef120..54ee4952fa 100644 --- a/legacy/emotion/src/lib/emotion_smart.c +++ b/legacy/emotion/src/lib/emotion_smart.c | |||
@@ -558,7 +558,7 @@ emotion_object_ratio_get(const Evas_Object *obj) | |||
558 | return sd->ratio; | 558 | return sd->ratio; |
559 | } | 559 | } |
560 | 560 | ||
561 | /** | 561 | /* |
562 | * Send a control event to the DVD. | 562 | * Send a control event to the DVD. |
563 | */ | 563 | */ |
564 | EAPI void | 564 | EAPI void |