Fix position reporting.

Always call pos_get() and also fix these on xine and gstreamer, we
should query the current position and do not trust cached value. For
instance, when no video is present, no handoff/frame-decoded is called
so no cache is updated.



SVN revision: 36271
This commit is contained in:
Gustavo Sverzut Barbieri 2008-09-26 23:17:57 +00:00
parent 2acd11a39e
commit aa3aac7dbf
4 changed files with 84 additions and 30 deletions

View File

@ -467,18 +467,17 @@ video_obj_move_cb(void *data, Evas *ev, Evas_Object *obj, void *event_info)
}
static void
video_obj_frame_decode_cb(void *data, Evas_Object *obj, void *event_info)
video_obj_time_changed(Evas_Object *obj, Evas_Object *edje)
{
Evas_Object *oe;
double pos, len;
double pos, len, scale;
char buf[256];
int ph, pm, ps, pf, lh, lm, ls;
oe = data;
pos = emotion_object_position_get(obj);
len = emotion_object_play_length_get(obj);
// printf("%3.3f, %3.3f\n", pos, len);
edje_object_part_drag_value_set(oe, "video_progress", pos / len, 0.0);
scale = (len > 0.0) ? pos / len : 0.0;
edje_object_part_drag_value_set(edje, "video_progress", scale, 0.0);
lh = len / 3600;
lm = len / 60 - (lh * 60);
ls = len - (lm * 60);
@ -488,7 +487,13 @@ video_obj_frame_decode_cb(void *data, Evas_Object *obj, void *event_info)
pf = pos * 100 - (ps * 100) - (pm * 60 * 100) - (ph * 60 * 60 * 100);
snprintf(buf, sizeof(buf), "%i:%02i:%02i.%02i / %i:%02i:%02i",
ph, pm, ps, pf, lh, lm, ls);
edje_object_part_text_set(oe, "video_progress_txt", buf);
edje_object_part_text_set(edje, "video_progress_txt", buf);
}
static void
video_obj_frame_decode_cb(void *data, Evas_Object *obj, void *event_info)
{
video_obj_time_changed(obj, data);
if (0)
{
@ -524,25 +529,13 @@ video_obj_frame_resize_cb(void *data, Evas_Object *obj, void *event_info)
static void
video_obj_length_change_cb(void *data, Evas_Object *obj, void *event_info)
{
Evas_Object *oe;
double pos, len;
char buf[256];
int ph, pm, ps, pf, lh, lm, ls;
video_obj_time_changed(obj, data);
}
oe = data;
pos = emotion_object_position_get(obj);
len = emotion_object_play_length_get(obj);
edje_object_part_drag_value_set(oe, "video_progress", pos / len, 0.0);
lh = len / 3600;
lm = len / 60 - (lh * 60);
ls = len - (lm * 60);
ph = pos / 3600;
pm = pos / 60 - (ph * 60);
ps = pos - (pm * 60);
pf = pos * 100 - (ps * 100) - (pm * 60 * 100) - (ph * 60 * 60 * 100);
snprintf(buf, sizeof(buf), "%i:%02i:%02i.%02i / %i:%02i:%02i",
ph, pm, ps, pf, lh, lm, ls);
edje_object_part_text_set(oe, "video_progress_txt", buf);
static void
video_obj_position_update_cb(void *data, Evas_Object *obj, void *event_info)
{
video_obj_time_changed(obj, data);
}
static void
@ -801,6 +794,7 @@ init_video_object(char *module_filename, char *filename)
evas_object_smart_callback_add(o, "frame_decode", video_obj_frame_decode_cb, oe);
evas_object_smart_callback_add(o, "frame_resize", video_obj_frame_resize_cb, oe);
evas_object_smart_callback_add(o, "length_change", video_obj_length_change_cb, oe);
evas_object_smart_callback_add(o, "position_update", video_obj_position_update_cb, oe);
evas_object_smart_callback_add(o, "decode_stop", video_obj_stopped_cb, oe);
evas_object_smart_callback_add(o, "channels_change", video_obj_channels_cb, oe);
@ -848,6 +842,17 @@ enter_idle(void *data)
return 1;
}
static int
check_positions(void *data)
{
const Evas_List *lst;
for (lst = video_objs; lst != NULL; lst = lst->next)
video_obj_time_changed(lst->data, evas_object_smart_parent_get(lst->data));
return !!video_objs;
}
int
main(int argc, char **argv)
{
@ -902,6 +907,7 @@ main(int argc, char **argv)
}
ecore_idle_enterer_add(enter_idle, NULL);
ecore_animator_add(check_positions, NULL);
ecore_main_loop_begin();
main_stop();

View File

@ -319,6 +319,8 @@ emotion_object_position_get(Evas_Object *obj)
E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0.0);
if (!sd->module) return 0.0;
if (!sd->video) return 0.0;
if (!sd->module->pos_get) return 0.0;
sd->pos = sd->module->pos_get(sd->video);
return sd->pos;
}

View File

@ -590,13 +590,42 @@ static double
em_len_get(void *video)
{
Emotion_Gstreamer_Video *ev;
Emotion_Video_Sink *vsink;
Emotion_Video_Sink *vsink;
Emotion_Audio_Sink *asink;
GstFormat fmt;
gint64 val;
gboolean ret;
ev = (Emotion_Gstreamer_Video *)video;
ev = video;
fmt = GST_FORMAT_TIME;
ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
if (!ret)
goto fallback;
vsink = (Emotion_Video_Sink *)ecore_list_index_goto(ev->video_sinks, ev->video_sink_nbr);
if (vsink)
return (double)vsink->length_time;
if (fmt != GST_FORMAT_TIME)
{
fprintf(stderr, "requrested duration in time, but got %s instead.",
gst_format_get_name(fmt));
goto fallback;
}
if (val <= 0.0)
goto fallback;
return val / 1000000000.0;
fallback:
fputs("Gstreamer reported no length, try existing sinks...\n", stderr);
ecore_list_first_goto(ev->audio_sinks);
while ((asink = ecore_list_next(ev->audio_sinks)) != NULL)
if (asink->length_time >= 0)
return asink->length_time;
ecore_list_first_goto(ev->video_sinks);
while ((vsink = ecore_list_next(ev->video_sinks)) != NULL)
if (vsink->length_time >= 0)
return vsink->length_time;
return 0.0;
}
@ -650,9 +679,24 @@ static double
em_pos_get(void *video)
{
Emotion_Gstreamer_Video *ev;
GstFormat fmt;
gint64 val;
gboolean ret;
ev = (Emotion_Gstreamer_Video *)video;
ev = video;
fmt = GST_FORMAT_TIME;
ret = gst_element_query_position(ev->pipeline, &fmt, &val);
if (!ret)
return ev->position;
if (fmt != GST_FORMAT_TIME)
{
fprintf(stderr, "requrested position in time, but got %s instead.",
gst_format_get_name(fmt));
return ev->position;
}
ev->position = val / 1000000000.0;
return ev->position;
}

View File

@ -551,6 +551,7 @@ em_pos_get(void *ef)
Emotion_Xine_Video *ev;
ev = (Emotion_Xine_Video *)ef;
_em_get_pos_len(ev);
return ev->pos;
}
@ -1456,6 +1457,7 @@ _em_get_pos_len_th(void *par)
static void
_em_get_pos_len(Emotion_Xine_Video *ev)
{
if (!ev->play_ok) return;
if (ev->get_poslen) return;
ev->get_poslen = 1;
pthread_mutex_lock(&(ev->get_pos_len_mutex));