diff --git a/legacy/emotion/src/examples/Makefile.am b/legacy/emotion/src/examples/Makefile.am index a19a1ae2c3..15c7ef8785 100644 --- a/legacy/emotion/src/examples/Makefile.am +++ b/legacy/emotion/src/examples/Makefile.am @@ -17,6 +17,7 @@ LDADD = \ SRCS = \ emotion_basic_example.c \ emotion_generic_example.c \ + emotion_border_example.c \ emotion_signals_example.c EXTRA_DIST = $(SRCS) @@ -32,6 +33,7 @@ if EFL_BUILD_EXAMPLES pkglib_PROGRAMS += \ emotion_basic_example \ emotion_generic_example \ + emotion_border_example \ emotion_signals_example endif diff --git a/legacy/emotion/src/examples/emotion_border_example.c b/legacy/emotion/src/examples/emotion_border_example.c new file mode 100644 index 0000000000..dc562b4daf --- /dev/null +++ b/legacy/emotion/src/examples/emotion_border_example.c @@ -0,0 +1,212 @@ +#include +#include +#include +#include +#include +#include + +#define WIDTH (320) +#define HEIGHT (240) + +static Eina_List *filenames = NULL; +static Eina_List *curfile = NULL; + +static void +_playback_started_cb(void *data, Evas_Object *o, void *event_info) +{ + printf("Emotion object started playback.\n"); +} + +static Evas_Object * +_create_emotion_object(Evas *e) +{ + Evas_Object *em = emotion_object_add(e); + + emotion_object_init(em, "gstreamer"); + + evas_object_smart_callback_add( + em, "playback_started", _playback_started_cb, NULL); + + return em; +} + +static void +_on_key_down(void *data, Evas *e, Evas_Object *o, void *event_info) +{ + Evas_Event_Key_Down *ev = event_info; + Evas_Object *em = data; + + if (!strcmp(ev->keyname, "Return")) + { + emotion_object_play_set(em, EINA_TRUE); + } + else if (!strcmp(ev->keyname, "space")) + { + emotion_object_play_set(em, EINA_FALSE); + } + else if (!strcmp(ev->keyname, "Escape")) + { + ecore_main_loop_quit(); + } + else if (!strcmp(ev->keyname, "n")) + { + const char *file; + if (!curfile) + curfile = filenames; + else + curfile = eina_list_next(curfile); + file = eina_list_data_get(curfile); + fprintf(stderr, "playing next file: %s\n", file); + emotion_object_file_set(em, file); + } + else if (!strcmp(ev->keyname, "p")) + { + const char *file; + if (!curfile) + curfile = eina_list_last(filenames); + else + curfile = eina_list_prev(curfile); + file = eina_list_data_get(curfile); + fprintf(stderr, "playing next file: %s\n", file); + emotion_object_file_set(em, file); + } + else + { + fprintf(stderr, "unhandled key: %s\n", ev->keyname); + } +} + +static void +_frame_decode_cb(void *data, Evas_Object *o, void *event_info) +{ + // fprintf(stderr, "smartcb: frame_decode\n"); +} + +static void +_length_change_cb(void *data, Evas_Object *o, void *event_info) +{ + fprintf(stderr, "smartcb: length_change: %0.3f\n", emotion_object_play_length_get(o)); +} + +static void +_position_update_cb(void *data, Evas_Object *o, void *event_info) +{ + fprintf(stderr, "smartcb: position_update: %0.3f\n", emotion_object_position_get(o)); +} + +static void +_progress_change_cb(void *data, Evas_Object *o, void *event_info) +{ + fprintf(stderr, "smartcb: progress_change: %0.3f, %s\n", + emotion_object_progress_status_get(o), + emotion_object_progress_info_get(o)); +} + +static void +_frame_resize_cb(void *data, Evas_Object *o, void *event_info) +{ + int w, h; + emotion_object_size_get(o, &w, &h); + fprintf(stderr, "smartcb: frame_resize: %dx%d\n", w, h); +} + +static void /* adjust canvas' contents on resizes */ +_canvas_resize_cb(Ecore_Evas *ee) +{ + int w, h; + Evas_Object *bg, *em; + + ecore_evas_geometry_get(ee, NULL, NULL, &w, &h); + + bg = ecore_evas_data_get(ee, "bg"); + em = ecore_evas_data_get(ee, "emotion"); + + evas_object_resize(bg, w, h); + evas_object_resize(em, w, h); +} + +int +main(int argc, const char *argv[]) +{ + int err; + Ecore_Evas *ee; + Evas *e; + Evas_Object *bg, *em; + int i; + + if (argc < 2) + { + printf("One argument is necessary. Usage:\n"); + printf("\t%s \n", argv[0]); + } + + eina_init(); + for (i = 1; i < argc; i++) + filenames = eina_list_append(filenames, eina_stringshare_add(argv[i])); + + curfile = filenames; + + if (!ecore_evas_init()) + return EXIT_FAILURE; + + /* this will give you a window with an Evas canvas under the first + * engine available */ + ee = ecore_evas_new(NULL, 10, 10, WIDTH, HEIGHT, NULL); + if (!ee) + goto error; + + ecore_evas_callback_resize_set(ee, _canvas_resize_cb); + + ecore_evas_show(ee); + + /* the canvas pointer, de facto */ + e = ecore_evas_get(ee); + + /* adding a background to this example */ + bg = evas_object_rectangle_add(e); + evas_object_name_set(bg, "our dear rectangle"); + evas_object_color_set(bg, 255, 0, 0, 255); /* white bg */ + evas_object_move(bg, 0, 0); /* at canvas' origin */ + evas_object_resize(bg, WIDTH, HEIGHT); /* covers full canvas */ + evas_object_show(bg); + + ecore_evas_data_set(ee, "bg", bg); + + /* Creating the emotion object */ + em = _create_emotion_object(e); + emotion_object_file_set(em, eina_list_data_get(curfile)); + evas_object_move(em, 0, 0); + evas_object_resize(em, WIDTH, HEIGHT); + emotion_object_border_set(em, -30, -30, -30, -30); + evas_object_show(em); + + ecore_evas_data_set(ee, "emotion", em); + + evas_object_smart_callback_add(em, "frame_decode", _frame_decode_cb, NULL); + evas_object_smart_callback_add(em, "length_change", _length_change_cb, NULL); + evas_object_smart_callback_add(em, "position_update", _position_update_cb, NULL); + evas_object_smart_callback_add(em, "progress_change", _progress_change_cb, NULL); + evas_object_smart_callback_add(em, "frame_resize", _frame_resize_cb, NULL); + + evas_object_event_callback_add(bg, EVAS_CALLBACK_KEY_DOWN, _on_key_down, em); + evas_object_focus_set(bg, EINA_TRUE); + + emotion_object_play_set(em, EINA_TRUE); + + ecore_main_loop_begin(); + + ecore_evas_free(ee); + ecore_evas_shutdown(); + return 0; + +error: + fprintf(stderr, "you got to have at least one evas engine built and linked" + " up to ecore-evas for this example to run properly.\n"); + + EINA_LIST_FREE(filenames, curfile) + eina_stringshare_del(eina_list_data_get(curfile)); + + ecore_evas_shutdown(); + eina_shutdown(); + return -1; +} diff --git a/legacy/emotion/src/lib/Emotion.h b/legacy/emotion/src/lib/Emotion.h index b35125f1ff..af74eb84f5 100644 --- a/legacy/emotion/src/lib/Emotion.h +++ b/legacy/emotion/src/lib/Emotion.h @@ -375,6 +375,47 @@ EAPI void emotion_object_module_option_set (Evas_Object *obj, const */ EAPI Eina_Bool emotion_object_init (Evas_Object *obj, const char *module_filename); +/** + * @brief Set borders for the emotion object. + * + * @param obj The emotion object where borders are being set. + * @param l The left border. + * @param r The right border. + * @param t The top border. + * @param b The bottom border. + * + * This function sets borders for the emotion video object (just when a video is + * present). When positive values are given to one of the parameters, a border + * will be added to the respective position of the object, representing that + * size on the original video size. However, if the video is scaled up or down + * (i.e. the emotion object size is different from the video size), the borders + * will be scaled respectively too. + * + * If a negative value is given to one of the parameters, instead of a border, + * that respective side of the video will be cropped. + * + * It's possible to set a color for the added borders (default is transparent) + * with emotion_object_border_color_set(). By default, an Emotion object doesn't + * have any border. + * + * @see emotion_object_border_get() + * @see emotion_object_border_color_set() + */ +EAPI void emotion_object_border_set(Evas_Object *obj, int l, int r, int t, int b); + +/** + * @brief Get the borders set for the emotion object. + * + * @param obj The emotion object from which the borders are being retrieved. + * @param l The left border. + * @param r The right border. + * @param t The top border. + * @param b The bottom border. + * + * @see emotion_object_border_set() + */ +EAPI void emotion_object_border_get(const Evas_Object *obj, int *l, int *r, int *t, int *b); + /** * @brief Set the file to be played in the Emotion object. * diff --git a/legacy/emotion/src/lib/emotion_smart.c b/legacy/emotion/src/lib/emotion_smart.c index 3fbcbea5f5..59ffb2a0a4 100644 --- a/legacy/emotion/src/lib/emotion_smart.c +++ b/legacy/emotion/src/lib/emotion_smart.c @@ -76,6 +76,13 @@ struct _Smart_Data int button_num; int button; } spu; + struct { + int l; /* left */ + int r; /* right */ + int t; /* top */ + int b; /* bottom */ + Evas_Object *clipper; + } crop; double ratio; double pos; @@ -200,6 +207,7 @@ _smart_data_free(Smart_Data *sd) if (sd->video) sd->module->file_close(sd->video); _emotion_module_close(sd->module, sd->video); evas_object_del(sd->obj); + evas_object_del(sd->crop.clipper); eina_stringshare_del(sd->file); free(sd->module_name); if (sd->job) ecore_job_del(sd->job); @@ -279,6 +287,27 @@ _emotion_module_open(const char *name, Evas_Object *obj, Emotion_Video_Module ** return NULL; } +static void +_clipper_position_size_update(Evas_Object *obj, int vid_w, int vid_h) +{ + Smart_Data *sd; + double scale_w, scale_h; + int x, y; + int w, h; + + E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); + + evas_object_geometry_get(obj, &x, &y, &w, &h); + evas_object_move(sd->crop.clipper, x, y); + scale_w = (double)w / (double)(vid_w - sd->crop.l - sd->crop.r); + scale_h = (double)h / (double)(vid_h - sd->crop.t - sd->crop.b); + + evas_object_image_fill_set(sd->obj, 0, 0, vid_w * scale_w, vid_h * scale_h); + evas_object_resize(sd->obj, vid_w * scale_w, vid_h * scale_h); + evas_object_move(sd->obj, x - sd->crop.l * scale_w, y - sd->crop.t * scale_h); + evas_object_resize(sd->crop.clipper, w, h); +} + /*******************************/ /* Externally accessible calls */ /*******************************/ @@ -420,6 +449,60 @@ emotion_object_file_get(const Evas_Object *obj) return sd->file; } +EAPI void +emotion_object_border_set(Evas_Object *obj, int l, int r, int t, int b) +{ + Smart_Data *sd; + + E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); + sd->crop.l = -l; + sd->crop.r = -r; + sd->crop.t = -t; + sd->crop.b = -b; + if (l == 0 && r == 0 && t == 0 && b == 0) + { + Evas_Object *old_clipper; + if (!sd->crop.clipper) + return; + old_clipper = evas_object_clip_get(sd->crop.clipper); + evas_object_clip_unset(sd->obj); + evas_object_clip_set(sd->obj, old_clipper); + evas_object_del(sd->crop.clipper); + sd->crop.clipper = NULL; + } + else + { + int vid_w, vid_h; + if (!sd->crop.clipper) + { + Evas_Object *old_clipper; + sd->crop.clipper = evas_object_rectangle_add( + evas_object_evas_get(obj)); + evas_object_color_set(sd->crop.clipper, 255, 255, 255, 255); + evas_object_smart_member_add(sd->crop.clipper, obj); + old_clipper = evas_object_clip_get(sd->obj); + evas_object_clip_set(sd->obj, sd->crop.clipper); + evas_object_clip_set(sd->crop.clipper, old_clipper); + if (evas_object_visible_get(sd->obj)) + evas_object_show(sd->crop.clipper); + } + sd->module->video_data_size_get(sd->video, &vid_w, &vid_h); + _clipper_position_size_update(obj, vid_w, vid_h); + } +} + +EAPI void +emotion_object_border_get(Evas_Object *obj, int *l, int *r, int *t, int *b) +{ + Smart_Data *sd; + + E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); + *l = -sd->crop.l; + *r = -sd->crop.r; + *t = -sd->crop.t; + *b = -sd->crop.b; +} + EAPI void emotion_object_play_set(Evas_Object *obj, Eina_Bool play) { @@ -542,6 +625,8 @@ emotion_object_size_get(const Evas_Object *obj, int *iw, int *ih) if (ih) *ih = 0; E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); evas_object_image_size_get(sd->obj, iw, ih); + *iw -= (sd->crop.l + sd->crop.r); + *ih -= (sd->crop.t + sd->crop.b); } EAPI void @@ -1251,6 +1336,7 @@ _emotion_frame_resize(Evas_Object *obj, int w, int h, double ratio) { evas_object_size_hint_request_set(obj, w, h); evas_object_smart_callback_call(obj, SIG_FRAME_RESIZE, NULL); + _clipper_position_size_update(obj, w, h); } } @@ -1629,7 +1715,10 @@ _smart_move(Evas_Object * obj, Evas_Coord x, Evas_Coord y) sd = evas_object_smart_data_get(obj); if (!sd) return; - evas_object_move(sd->obj, x, y); + + int vid_w, vid_h, w, h; + sd->module->video_data_size_get(sd->video, &vid_w, &vid_h); + _clipper_position_size_update(obj, vid_w, vid_h); } static void @@ -1639,8 +1728,11 @@ _smart_resize(Evas_Object * obj, Evas_Coord w, Evas_Coord h) sd = evas_object_smart_data_get(obj); if (!sd) return; - evas_object_image_fill_set(sd->obj, 0, 0, w, h); - evas_object_resize(sd->obj, w, h); + + int vid_w, vid_h; + + sd->module->video_data_size_get(sd->video, &vid_w, &vid_h); + _clipper_position_size_update(obj, vid_w, vid_h); } static void @@ -1651,7 +1743,8 @@ _smart_show(Evas_Object * obj) sd = evas_object_smart_data_get(obj); if (!sd) return; evas_object_show(sd->obj); - + if (sd->crop.clipper) + evas_object_show(sd->crop.clipper); } static void @@ -1662,6 +1755,8 @@ _smart_hide(Evas_Object * obj) sd = evas_object_smart_data_get(obj); if (!sd) return; evas_object_hide(sd->obj); + if (sd->crop.clipper) + evas_object_hide(sd->crop.clipper); } static void @@ -1672,6 +1767,7 @@ _smart_color_set(Evas_Object * obj, int r, int g, int b, int a) sd = evas_object_smart_data_get(obj); if (!sd) return; evas_object_color_set(sd->obj, r, g, b, a); + evas_object_color_set(sd->crop.clipper, r, g, b, a); } static void @@ -1681,7 +1777,10 @@ _smart_clip_set(Evas_Object * obj, Evas_Object * clip) sd = evas_object_smart_data_get(obj); if (!sd) return; - evas_object_clip_set(sd->obj, clip); + if (sd->crop.clipper) + evas_object_clip_set(sd->crop.clipper, clip); + else + evas_object_clip_set(sd->obj, clip); } static void @@ -1691,6 +1790,9 @@ _smart_clip_unset(Evas_Object * obj) sd = evas_object_smart_data_get(obj); if (!sd) return; - evas_object_clip_unset(sd->obj); + if (sd->crop.clipper) + evas_object_clip_unset(sd->crop.clipper); + else + evas_object_clip_unset(sd->obj); }