efl/src/lib/emotion/efl_canvas_video.eo

104 lines
4.4 KiB
Plaintext

class @beta Efl.Canvas.Video extends Efl.Canvas.Group implements Efl.File, Efl.Player, Efl.Gfx.Image, Efl.Gfx.Image_Load_Controller
{
[[Efl canvas video class]]
methods {
@property option {
set {
[[Set the specified option for the current module.
This function allows one to mute the video or audio of the
emotion object.
Please don't use this function, consider using
@Efl.Player.mute instead.
]]
}
values {
opt: string; [[The option that is being set. Currently
supported options: "video" and "audio".]]
val: string; [[The value of the option. Currently only
supports "off" (?!?!?!)]]
}
}
@property engine {
set {
[[Initializes an emotion object with the specified module.
This function is required after creating the emotion object,
in order to specify which module will be used with this
object. Different objects can use different modules to
play a media file. The current supported modules are
gstreamer and xine.
To use any of them, you need to make sure that support for
them was compiled correctly.
It's possible to disable the build of a module with
--disable-module_name.
See also @Efl.File.file.
]]
return: bool; [[$true if the specified module was successfully
initialized for this object, $false otherwise.]]
}
values {
module_filename: string; [[The name of the module to be
used (gstreamer or xine).]]
}
}
}
implements {
Efl.Object.constructor;
Efl.Gfx.Entity.position { set; }
Efl.Gfx.Entity.size { set; }
Efl.File.load;
Efl.File.unload;
Efl.File.file { set; }
Efl.File.loaded { get; }
Efl.Player.play { get; set; }
Efl.Player.pos { get; set; }
Efl.Player.progress { get; }
Efl.Player.volume { get; set; }
Efl.Player.mute { get; set; }
Efl.Player.length { get; }
Efl.Player.seekable { get; }
Efl.Gfx.Image_Load_Controller.load_size { get; }
Efl.Gfx.Image.ratio { get; }
Efl.Gfx.Image.smooth_scale { get; set; }
}
events {
frame,decode: void; [[Called when the frame was decoded]]
position,change: void; [[Called when the position changed]]
length,change: void; [[Called when the length changed]]
frame,resize: void; [[Called when the frame was resized]]
playback,start: void; [[Called when playback started]]
playback,stop: void; [[Called when playback stopped]]
volume,change: void; [[Called when volume changed]]
channels,change: void; [[Called when the channels changed]]
title,change: void; [[Called when the title changed]]
progress,change: void; [[Called when the progress changed]]
ref,change: void; [[Called when ref changed]]
button,num,change: void; [[Called when button number changed]]
button,change: void; [[Called when button changed]]
open,done: void; [[Called when the files was opened]]
position,save,done: void; [[Called when the position was saved]]
position,save,fail: void; [[Called when saving the position failed]]
position,load,done: void; [[Called when the position loaded]]
position,load,fail: void; [[Called when loading the position failed]]
}
}
/* FIXME: Need to be added:
EAPI double emotion_object_buffer_size_get (const Evas_Object *obj);
EAPI const char *emotion_object_progress_info_get (const Evas_Object *obj);
Everything starting from (needs to be added):
EAPI int emotion_object_audio_channel_count (const Evas_Object *obj);
Should this be part of player or emotion object?
EAPI void emotion_object_event_simple_send (Evas_Object *obj, Emotion_Event ev);
Deliberations:
Should this really implement the image interface?
*/