shutup stupid emotion debug!

to get them back, use EINA_LOG_LEVELS for each module or the catch-all:

   export EINA_LOG_LEVELS_GLOB='emotion*:4'

ah, that was making me sick while playing enjoy... :-D



SVN revision: 53170
This commit is contained in:
Gustavo Sverzut Barbieri 2010-10-07 22:16:43 +00:00
parent b11b2f95b5
commit 1520c60364
9 changed files with 180 additions and 114 deletions

View File

@ -25,6 +25,12 @@
if (strcmp(_e_smart_str, type)) return ret; \ if (strcmp(_e_smart_str, type)) return ret; \
} }
#define DBG(...) EINA_LOG_DOM_DBG(_log_domain, __VA_ARGS__)
#define INF(...) EINA_LOG_DOM_INFO(_log_domain, __VA_ARGS__)
#define WRN(...) EINA_LOG_DOM_WARN(_log_domain, __VA_ARGS__)
#define ERR(...) EINA_LOG_DOM_ERR(_log_domain, __VA_ARGS__)
#define CRITICAL(...) EINA_LOG_DOM_CRIT(_log_domain, __VA_ARGS__)
#define E_OBJ_NAME "emotion_object" #define E_OBJ_NAME "emotion_object"
typedef struct _Smart_Data Smart_Data; typedef struct _Smart_Data Smart_Data;
@ -88,6 +94,7 @@ static void _smart_clip_unset(Evas_Object * obj);
static Evas_Smart *smart = NULL; static Evas_Smart *smart = NULL;
static Eina_Hash *_backends = NULL; static Eina_Hash *_backends = NULL;
static Eina_Array *_modules = NULL; static Eina_Array *_modules = NULL;
static int _log_domain = -1;
static const char *_backend_priority[] = { static const char *_backend_priority[] = {
"xine", "xine",
@ -160,7 +167,7 @@ _emotion_module_open(const char *name, Evas_Object *obj, Emotion_Video_Module **
E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0); E_SMART_OBJ_GET_RETURN(sd, obj, E_OBJ_NAME, 0);
if (!_backends) if (!_backends)
{ {
fprintf(stderr, "No backend loaded\n"); ERR("No backend loaded");
return NULL; return NULL;
} }
@ -175,7 +182,7 @@ _emotion_module_open(const char *name, Evas_Object *obj, Emotion_Video_Module **
if (index != 0 && index < (sizeof (_backend_priority) / sizeof (char*))) if (index != 0 && index < (sizeof (_backend_priority) / sizeof (char*)))
goto retry; goto retry;
fprintf(stderr, "No backend loaded\n"); ERR("No backend loaded");
return EINA_FALSE; return EINA_FALSE;
} }
@ -191,7 +198,7 @@ _emotion_module_open(const char *name, Evas_Object *obj, Emotion_Video_Module **
if (index != 0 && index < (sizeof (_backend_priority) / sizeof (char*))) if (index != 0 && index < (sizeof (_backend_priority) / sizeof (char*)))
goto retry; goto retry;
fprintf (stderr, "Unable to load module %s\n", name); ERR("Unable to load module: %s", name);
return NULL; return NULL;
} }
@ -298,6 +305,7 @@ emotion_object_file_set(Evas_Object *obj, const char *file)
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("file=%s", file);
if (!sd->module) return; if (!sd->module) return;
if ((file) && (sd->file) && (!strcmp(file, sd->file))) return; if ((file) && (sd->file) && (!strcmp(file, sd->file))) return;
@ -344,6 +352,7 @@ emotion_object_play_set(Evas_Object *obj, Eina_Bool play)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("play=%hhu, was=%hhu", play, sd->play);
if (play == sd->play) return; if (play == sd->play) return;
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
@ -369,6 +378,7 @@ emotion_object_position_set(Evas_Object *obj, double sec)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("sec=%f", sec);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->seek_pos = sec; sd->seek_pos = sec;
@ -513,6 +523,7 @@ emotion_object_audio_volume_set(Evas_Object *obj, double vol)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("vol=%f", vol);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->audio_channel_volume_set(sd->video, vol); sd->module->audio_channel_volume_set(sd->video, vol);
@ -535,6 +546,7 @@ emotion_object_audio_mute_set(Evas_Object *obj, Eina_Bool mute)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("mute=%hhu", mute);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->audio_channel_mute_set(sd->video, mute); sd->module->audio_channel_mute_set(sd->video, mute);
@ -579,6 +591,7 @@ emotion_object_audio_channel_set(Evas_Object *obj, int channel)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("channel=%d", channel);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->audio_channel_set(sd->video, channel); sd->module->audio_channel_set(sd->video, channel);
@ -601,6 +614,7 @@ emotion_object_video_mute_set(Evas_Object *obj, Eina_Bool mute)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("mute=%hhu", mute);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->video_channel_mute_set(sd->video, mute); sd->module->video_channel_mute_set(sd->video, mute);
@ -645,6 +659,7 @@ emotion_object_video_channel_set(Evas_Object *obj, int channel)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("channel=%d", channel);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->video_channel_set(sd->video, channel); sd->module->video_channel_set(sd->video, channel);
@ -667,6 +682,7 @@ emotion_object_spu_mute_set(Evas_Object *obj, Eina_Bool mute)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("mute=%hhu", mute);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->spu_channel_mute_set(sd->video, mute); sd->module->spu_channel_mute_set(sd->video, mute);
@ -711,6 +727,7 @@ emotion_object_spu_channel_set(Evas_Object *obj, int channel)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("channel=%d", channel);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->spu_channel_set(sd->video, channel); sd->module->spu_channel_set(sd->video, channel);
@ -744,6 +761,7 @@ emotion_object_chapter_set(Evas_Object *obj, int chapter)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("chapter=%d", chapter);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->chapter_set(sd->video, chapter); sd->module->chapter_set(sd->video, chapter);
@ -777,6 +795,7 @@ emotion_object_play_speed_set(Evas_Object *obj, double speed)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("speed=%f", speed);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
sd->module->speed_set(sd->video, speed); sd->module->speed_set(sd->video, speed);
@ -910,6 +929,7 @@ emotion_object_vis_set(Evas_Object *obj, Emotion_Vis visualization)
Smart_Data *sd; Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME); E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
DBG("visualization=%d", visualization);
if (!sd->module) return; if (!sd->module) return;
if (!sd->video) return; if (!sd->video) return;
if (!sd->module->vis_set) return; if (!sd->module->vis_set) return;
@ -1315,6 +1335,14 @@ _smart_init(void)
{ {
eina_init(); eina_init();
_log_domain = eina_log_domain_register("emotion", EINA_COLOR_LIGHTCYAN);
if (_log_domain < 0)
{
EINA_LOG_CRIT("Could not register log domain 'emotion'");
eina_shutdown();
return;
}
_backends = eina_hash_string_small_new(free); _backends = eina_hash_string_small_new(free);
_modules = eina_module_list_get(NULL, PACKAGE_LIB_DIR "/emotion/", 0, NULL, NULL); _modules = eina_module_list_get(NULL, PACKAGE_LIB_DIR "/emotion/", 0, NULL, NULL);
@ -1333,8 +1361,8 @@ _smart_init(void)
if (!_modules) if (!_modules)
{ {
fprintf(stderr, "No module found !\n"); ERR("No module found!");
return ; return;
} }
eina_module_list_load(_modules); eina_module_list_load(_modules);

View File

@ -8,6 +8,8 @@
#include "emotion_gstreamer_pipeline.h" #include "emotion_gstreamer_pipeline.h"
#include "Emotion.h" #include "Emotion.h"
int _emotion_gstreamer_log_domain = -1;
/* Callbacks to get the eos */ /* Callbacks to get the eos */
static Eina_Bool _eos_timer_fct (void *data); static Eina_Bool _eos_timer_fct (void *data);
static void _em_buffer_read(void *data, void *buffer, unsigned int nbyte); static void _em_buffer_read(void *data, void *buffer, unsigned int nbyte);
@ -345,10 +347,10 @@ em_file_open(const char *file,
device = NULL; device = NULL;
sscanf(file, "cdda://%d", &track); sscanf(file, "cdda://%d", &track);
} }
fprintf(stderr, "[Emotion] [gst] build CD Audio pipeline\n"); DBG("Build CD Audio pipeline");
if (!(emotion_pipeline_cdda_build(ev, device, track))) if (!(emotion_pipeline_cdda_build(ev, device, track)))
{ {
fprintf(stderr, "[Emotion] [gst] error while building CD Audio pipeline\n"); ERR("Could not build CD Audio pipeline");
gst_object_unref(ev->pipeline); gst_object_unref(ev->pipeline);
return 0; return 0;
} }
@ -357,10 +359,10 @@ em_file_open(const char *file,
else if (strstr(file, "dvd://")) else if (strstr(file, "dvd://"))
{ {
fprintf(stderr, "[Emotion] [gst] build DVD pipeline\n"); DBG("Build DVD pipeline");
if (!(emotion_pipeline_dvd_build(ev, NULL))) if (!(emotion_pipeline_dvd_build(ev, NULL)))
{ {
fprintf(stderr, "[Emotion] [gst] error while building DVD pipeline\n"); ERR("Could not build DVD pipeline");
gst_object_unref(ev->pipeline); gst_object_unref(ev->pipeline);
return 0; return 0;
} }
@ -368,10 +370,10 @@ em_file_open(const char *file,
/* http */ /* http */
else if (strstr(file, "http://")) else if (strstr(file, "http://"))
{ {
fprintf(stderr, "[Emotion] [gst] build URI pipeline\n"); DBG("Build URI pipeline");
if (!(emotion_pipeline_uri_build(ev, file))) if (!(emotion_pipeline_uri_build(ev, file)))
{ {
fprintf(stderr, "[Emotion] [gst] error while building URI pipeline\n"); ERR("Could not build URI pipeline");
gst_object_unref(ev->pipeline); gst_object_unref(ev->pipeline);
return 0; return 0;
} }
@ -379,10 +381,10 @@ em_file_open(const char *file,
/* v4l */ /* v4l */
else if (strstr(file, "v4l://")) else if (strstr(file, "v4l://"))
{ {
fprintf(stderr, "[Emotion] [gst] build V4L pipeline\n"); DBG("Build V4L pipeline");
if (!(emotion_pipeline_v4l_build(ev, file))) if (!(emotion_pipeline_v4l_build(ev, file)))
{ {
fprintf(stderr, "[Emotion] [gst] error while building V4L pipeline\n"); ERR("Could not build V4L pipeline");
gst_object_unref(ev->pipeline); gst_object_unref(ev->pipeline);
return 0; return 0;
} }
@ -396,10 +398,10 @@ em_file_open(const char *file,
? file + strlen("file://") ? file + strlen("file://")
: file; : file;
fprintf(stderr, "[Emotion] [gst] build file pipeline\n"); DBG("Build file pipeline");
if (!(emotion_pipeline_file_build(ev, filename))) if (!(emotion_pipeline_file_build(ev, filename)))
{ {
fprintf(stderr, "[Emotion] [gst] error while building File pipeline\n"); ERR("Could not build File pipeline");
gst_object_unref(ev->pipeline); gst_object_unref(ev->pipeline);
return 0; return 0;
} }
@ -415,22 +417,19 @@ em_file_open(const char *file,
vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks); vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks);
if (vsink) if (vsink)
{ {
fprintf(stderr, "video : \n"); DBG("video size=%dx%d, fps=%d/%d, "
fprintf(stderr, " size : %dx%d\n", vsink->width, vsink->height); "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
fprintf(stderr, " fps : %d/%d\n", vsink->fps_num, vsink->fps_den); vsink->width, vsink->height, vsink->fps_num, vsink->fps_den,
fprintf(stderr, " fourcc : %" GST_FOURCC_FORMAT "\n", GST_FOURCC_ARGS(vsink->fourcc)); GST_FOURCC_ARGS(vsink->fourcc),
fprintf(stderr, " length : %" GST_TIME_FORMAT "\n\n", GST_TIME_ARGS((guint64)(vsink->length_time * GST_SECOND)));
GST_TIME_ARGS((guint64)(vsink->length_time * GST_SECOND)));
} }
asink = (Emotion_Audio_Sink *)eina_list_data_get(ev->audio_sinks); asink = (Emotion_Audio_Sink *)eina_list_data_get(ev->audio_sinks);
if (asink) if (asink)
{ {
fprintf(stderr, "audio : \n"); DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
fprintf(stderr, " chan : %d\n", asink->channels); asink->channels, asink->samplerate,
fprintf(stderr, " rate : %d\n", asink->samplerate); GST_TIME_ARGS((guint64)(asink->length_time * GST_SECOND)));
fprintf(stderr, " length : %" GST_TIME_FORMAT "\n\n",
GST_TIME_ARGS((guint64)(asink->length_time * GST_SECOND)));
} }
} }
@ -598,7 +597,7 @@ em_len_get(void *video)
if (fmt != GST_FORMAT_TIME) if (fmt != GST_FORMAT_TIME)
{ {
fprintf(stderr, "requrested duration in time, but got %s instead.", DBG("requrested duration in time, but got %s instead.",
gst_format_get_name(fmt)); gst_format_get_name(fmt));
goto fallback; goto fallback;
} }
@ -681,8 +680,8 @@ em_pos_get(void *video)
if (fmt != GST_FORMAT_TIME) if (fmt != GST_FORMAT_TIME)
{ {
fprintf(stderr, "requrested position in time, but got %s instead.", ERR("requrested position in time, but got %s instead.",
gst_format_get_name(fmt)); gst_format_get_name(fmt));
return ev->position; return ev->position;
} }
@ -1199,6 +1198,19 @@ module_open(Evas_Object *obj,
if (!module) if (!module)
return EINA_FALSE; return EINA_FALSE;
if (_emotion_gstreamer_log_domain < 0)
{
eina_threads_init();
eina_log_threads_enable();
_emotion_gstreamer_log_domain = eina_log_domain_register
("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
if (_emotion_gstreamer_log_domain < 0)
{
EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
return EINA_FALSE;
}
}
if (!em_module.init(obj, video, opt)) if (!em_module.init(obj, video, opt))
return EINA_FALSE; return EINA_FALSE;
@ -1404,7 +1416,7 @@ _eos_timer_fct(void *data)
gst_message_parse_error(msg, &err, &debug); gst_message_parse_error(msg, &err, &debug);
g_free(debug); g_free(debug);
g_print("Error: %s\n", err->message); ERR("Error: %s", err->message);
g_error_free(err); g_error_free(err);
break; break;

View File

@ -92,4 +92,11 @@ struct _Emotion_Gstreamer_Video
unsigned char audio_mute : 1; unsigned char audio_mute : 1;
}; };
extern int _emotion_gstreamer_log_domain;
#define DBG(...) EINA_LOG_DOM_DBG(_emotion_gstreamer_log_domain, __VA_ARGS__)
#define INF(...) EINA_LOG_DOM_INFO(_emotion_gstreamer_log_domain, __VA_ARGS__)
#define WRN(...) EINA_LOG_DOM_WARN(_emotion_gstreamer_log_domain, __VA_ARGS__)
#define ERR(...) EINA_LOG_DOM_ERR(_emotion_gstreamer_log_domain, __VA_ARGS__)
#define CRITICAL(...) EINA_LOG_DOM_CRIT(_emotion_gstreamer_log_domain, __VA_ARGS__)
#endif /* __EMOTION_GSTREAMER_H__ */ #endif /* __EMOTION_GSTREAMER_H__ */

View File

@ -14,14 +14,14 @@ emotion_pipeline_pause(GstElement *pipeline)
res = gst_element_set_state((pipeline), GST_STATE_PAUSED); res = gst_element_set_state((pipeline), GST_STATE_PAUSED);
if (res == GST_STATE_CHANGE_FAILURE) if (res == GST_STATE_CHANGE_FAILURE)
{ {
g_print("Emotion-Gstreamer ERROR: could not pause\n"); ERR("could not pause");
return 0; return 0;
} }
res = gst_element_get_state((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE); res = gst_element_get_state((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
if (res != GST_STATE_CHANGE_SUCCESS) if (res != GST_STATE_CHANGE_SUCCESS)
{ {
g_print("Emotion-Gstreamer ERROR: could not complete pause\n"); ERR("could not complete pause");
return 0; return 0;
} }
@ -487,7 +487,7 @@ emotion_streams_sinks_get(Emotion_Gstreamer_Video *ev, GstElement *decoder)
caps = gst_pad_get_caps(pad); caps = gst_pad_get_caps(pad);
str = gst_caps_to_string(caps); str = gst_caps_to_string(caps);
g_print("caps !! %s\n", str); DBG("caps %s", str);
/* video stream */ /* video stream */
if (g_str_has_prefix(str, "video/")) if (g_str_has_prefix(str, "video/"))

View File

@ -21,7 +21,7 @@ emotion_pipeline_cdda_build(void *video, const char * device, unsigned int track
cdiocddasrc = gst_element_factory_make("cdiocddasrc", "src"); cdiocddasrc = gst_element_factory_make("cdiocddasrc", "src");
if (!cdiocddasrc) if (!cdiocddasrc)
{ {
g_print("cdiocddasrc element missing. Install it.\n"); ERR("cdiocddasrc gstreamer element missing. Install it.");
goto failure_cdiocddasrc; goto failure_cdiocddasrc;
} }

View File

@ -9,7 +9,7 @@ static void dvd_pad_added_cb (GstElement *dvddemuxer,
static void dvd_no_more_pads_cb (GstElement *dvddemuxer, static void dvd_no_more_pads_cb (GstElement *dvddemuxer,
gpointer user_data); gpointer user_data);
static int no_more_pads = 0; static volatile int no_more_pads = 0;
int int
@ -47,7 +47,7 @@ emotion_pipeline_dvd_build(void *video, const char *device)
while (no_more_pads == 0) while (no_more_pads == 0)
{ {
g_print("toto\n"); DBG("toto");
} }
no_more_pads = 0; no_more_pads = 0;
@ -70,7 +70,7 @@ emotion_pipeline_dvd_build(void *video, const char *device)
caps = gst_pad_get_caps(pad); caps = gst_pad_get_caps(pad);
str = gst_caps_to_string(caps); str = gst_caps_to_string(caps);
g_print("caps !! %s\n", str); DBG("caps %s", str);
/* video stream */ /* video stream */
if (g_str_has_prefix(str, "video/mpeg")) if (g_str_has_prefix(str, "video/mpeg"))
{ {
@ -83,7 +83,7 @@ emotion_pipeline_dvd_build(void *video, const char *device)
sink_pad = gst_element_get_pad(gst_bin_get_by_name(GST_BIN(ev->pipeline), "mpeg2dec"), "src"); sink_pad = gst_element_get_pad(gst_bin_get_by_name(GST_BIN(ev->pipeline), "mpeg2dec"), "src");
sink_caps = gst_pad_get_caps(sink_pad); sink_caps = gst_pad_get_caps(sink_pad);
str = gst_caps_to_string(sink_caps); str = gst_caps_to_string(sink_caps);
g_print(" ** caps v !! %s\n", str); DBG("caps video %s", str);
emotion_video_sink_fill(vsink, sink_pad, sink_caps); emotion_video_sink_fill(vsink, sink_pad, sink_caps);

View File

@ -2,6 +2,8 @@
#include "emotion_private.h" #include "emotion_private.h"
#include "emotion_xine.h" #include "emotion_xine.h"
int _emotion_xine_log_domain = -1;
/* module api */ /* module api */
static unsigned char em_init (Evas_Object *obj, void **emotion_video, Emotion_Module_Options *opt); static unsigned char em_init (Evas_Object *obj, void **emotion_video, Emotion_Module_Options *opt);
static int em_shutdown (void *ef); static int em_shutdown (void *ef);
@ -112,11 +114,11 @@ _em_slave(void *par)
xine_config_update_entry(ev->decoder, &cf); xine_config_update_entry(ev->decoder, &cf);
} }
} }
printf("OPEN VIDEO PLUGIN...\n"); DBG("OPEN VIDEO PLUGIN...");
if (!ev->opt_no_video) if (!ev->opt_no_video)
ev->video = xine_open_video_driver(ev->decoder, "emotion", ev->video = xine_open_video_driver(ev->decoder, "emotion",
XINE_VISUAL_TYPE_NONE, ev); XINE_VISUAL_TYPE_NONE, ev);
printf("RESULT: xine_open_video_driver() = %p\n", ev->video); DBG("RESULT: xine_open_video_driver() = %p", ev->video);
// Let xine autodetect the best audio output driver // Let xine autodetect the best audio output driver
if (!ev->opt_no_audio) if (!ev->opt_no_audio)
ev->audio = xine_open_audio_driver(ev->decoder, NULL, ev); ev->audio = xine_open_audio_driver(ev->decoder, NULL, ev);
@ -136,31 +138,31 @@ _em_slave(void *par)
case 3: /* shutdown */ case 3: /* shutdown */
{ {
_em_module_event(ev, 3); _em_module_event(ev, 3);
printf("EX shutdown stop\n"); DBG("shutdown stop");
xine_stop(ev->stream); xine_stop(ev->stream);
// pthread_mutex_lock(&(ev->get_pos_len_mutex)); // pthread_mutex_lock(&(ev->get_pos_len_mutex));
if (!ev->get_pos_thread_deleted) if (!ev->get_pos_thread_deleted)
{ {
printf("closing get_pos thread, %p\n", ev); DBG("closing get_pos thread, %p", ev);
pthread_mutex_lock(&(ev->get_pos_len_mutex)); pthread_mutex_lock(&(ev->get_pos_len_mutex));
pthread_cond_broadcast(&(ev->get_pos_len_cond)); pthread_cond_broadcast(&(ev->get_pos_len_cond));
pthread_mutex_unlock(&(ev->get_pos_len_mutex)); pthread_mutex_unlock(&(ev->get_pos_len_mutex));
while (ev->get_poslen); while (ev->get_poslen);
} }
printf("EX dispose %p\n", ev); DBG("dispose %p", ev);
xine_dispose(ev->stream); xine_dispose(ev->stream);
printf("EX dispose evq %p\n", ev); DBG("dispose evq %p", ev);
xine_event_dispose_queue(ev->queue); xine_event_dispose_queue(ev->queue);
printf("EX close video drv %p\n", ev); DBG("close video drv %p", ev);
if (ev->video) xine_close_video_driver(ev->decoder, ev->video); if (ev->video) xine_close_video_driver(ev->decoder, ev->video);
printf("EX wait for vo to go\n"); DBG("wait for vo to go");
while (ev->have_vo); while (ev->have_vo);
printf("EX vo gone\n"); DBG("vo gone");
printf("EX close audio drv %p\n", ev); DBG("close audio drv %p", ev);
if (ev->audio) xine_close_audio_driver(ev->decoder, ev->audio); if (ev->audio) xine_close_audio_driver(ev->decoder, ev->audio);
printf("EX xine exit %p\n", ev); DBG("xine exit %p", ev);
xine_exit(ev->decoder); xine_exit(ev->decoder);
printf("EX DONE %p\n", ev); DBG("DONE %p", ev);
close(ev->fd_write); close(ev->fd_write);
close(ev->fd_read); close(ev->fd_read);
close(ev->fd_ev_write); close(ev->fd_ev_write);
@ -183,7 +185,7 @@ _em_slave(void *par)
char *file; char *file;
file = eev->xine_event; file = eev->xine_event;
printf("OPN STREAM %s\n", file); DBG("OPEN STREAM %s", file);
if (xine_open(ev->stream, file)) if (xine_open(ev->stream, file))
{ {
if (xine_get_pos_length(ev->stream, &pos_stream, &pos_time, &length_time)) if (xine_get_pos_length(ev->stream, &pos_stream, &pos_time, &length_time))
@ -222,13 +224,13 @@ _em_slave(void *par)
break; break;
case 11: /* file close */ case 11: /* file close */
{ {
printf("EX done %p\n", ev); DBG("done %p", ev);
em_frame_done(ev); em_frame_done(ev);
printf("EX stop %p\n", ev); DBG("stop %p", ev);
xine_stop(ev->stream); xine_stop(ev->stream);
printf("EX close %p\n", ev); DBG("close %p", ev);
xine_close(ev->stream); xine_close(ev->stream);
printf("EX close done %p\n", ev); DBG("close done %p", ev);
_em_module_event(ev, 11); _em_module_event(ev, 11);
} }
break; break;
@ -428,7 +430,7 @@ em_shutdown(void *ef)
ev = (Emotion_Xine_Video *)ef; ev = (Emotion_Xine_Video *)ef;
ev->closing = 1; ev->closing = 1;
ev->delete_me = 1; ev->delete_me = 1;
printf("EXM del fds %p\n", ev); DBG("del fds %p", ev);
ecore_main_fd_handler_del(ev->fd_handler); ecore_main_fd_handler_del(ev->fd_handler);
ev->fd_handler = NULL; ev->fd_handler = NULL;
ecore_main_fd_handler_del(ev->fd_ev_handler); ecore_main_fd_handler_del(ev->fd_ev_handler);
@ -441,7 +443,7 @@ em_shutdown(void *ef)
ev->closing = 1; ev->closing = 1;
_em_slave_event(ev, 3, NULL); _em_slave_event(ev, 3, NULL);
printf("EXM done %p\n", ev); DBG("done %p", ev);
return 1; return 1;
} }
@ -1367,7 +1369,7 @@ _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh)
break; break;
case XINE_EVENT_UI_MESSAGE: case XINE_EVENT_UI_MESSAGE:
{ {
printf("EV: UI Message [FIXME: break this out to emotion api]\n"); WRN("UI Message [FIXME: break this out to emotion api]");
// e->type = error type(XINE_MSG_NO_ERROR, XINE_MSG_GENERAL_WARNING, XINE_MSG_UNKNOWN_HOST etc.) // e->type = error type(XINE_MSG_NO_ERROR, XINE_MSG_GENERAL_WARNING, XINE_MSG_UNKNOWN_HOST etc.)
// e->messages is a list of messages DOUBLE null terminated // e->messages is a list of messages DOUBLE null terminated
} }
@ -1375,7 +1377,7 @@ _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh)
case XINE_EVENT_AUDIO_LEVEL: case XINE_EVENT_AUDIO_LEVEL:
{ {
_emotion_audio_level_change(ev->obj); _emotion_audio_level_change(ev->obj);
printf("EV: Audio Level [FIXME: break this out to emotion api]\n"); WRN("Audio Level [FIXME: break this out to emotion api]");
// e->left (0->100) // e->left (0->100)
// e->right // e->right
// e->mute // e->mute
@ -1386,7 +1388,7 @@ _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh)
xine_progress_data_t *e; xine_progress_data_t *e;
e = (xine_progress_data_t *)eev->xine_event; e = (xine_progress_data_t *)eev->xine_event;
printf("PROGRESS: %i\n", e->percent); DBG("PROGRESS: %i", e->percent);
_emotion_progress_set(ev->obj, (char *)e->description, (double)e->percent / 100.0); _emotion_progress_set(ev->obj, (char *)e->description, (double)e->percent / 100.0);
} }
break; break;
@ -1422,13 +1424,13 @@ _em_fd_ev_active(void *data, Ecore_Fd_Handler *fdh)
xine_dropped_frames_t *e; xine_dropped_frames_t *e;
e = (xine_dropped_frames_t *)eev->xine_event; e = (xine_dropped_frames_t *)eev->xine_event;
printf("EV: Dropped Frames (skipped %i) (discarded %i) [FIXME: break this out to the emotion api]\n", e->skipped_frames, e->discarded_frames); WRN("Dropped Frames (skipped %i) (discarded %i) [FIXME: break this out to the emotion api]", e->skipped_frames, e->discarded_frames);
// e->skipped_frames = % frames skipped * 10 // e->skipped_frames = % frames skipped * 10
// e->discarded_frames = % frames skipped * 10 // e->discarded_frames = % frames skipped * 10
} }
break; break;
default: default:
// printf("EV: unknown event type %i\n", eev->type); // DBG("unknown event type %i", eev->type);
break; break;
} }
} }
@ -1474,7 +1476,7 @@ _em_get_pos_len_th(void *par)
} }
ev->get_poslen = 0; ev->get_poslen = 0;
_em_module_event(ev, 15); /* event - getpos done */ _em_module_event(ev, 15); /* event - getpos done */
//printf("get pos %3.3f\n", ev->pos); //DBG("get pos %3.3f", ev->pos);
} }
if (ev->delete_me) if (ev->delete_me)
{ {
@ -1563,6 +1565,19 @@ module_open(Evas_Object *obj, const Emotion_Video_Module **module, void **video,
if (!module) if (!module)
return EINA_FALSE; return EINA_FALSE;
if (_emotion_xine_log_domain < 0)
{
eina_threads_init();
eina_log_threads_enable();
_emotion_xine_log_domain = eina_log_domain_register
("emotion-xine", EINA_COLOR_LIGHTCYAN);
if (_emotion_xine_log_domain < 0)
{
EINA_LOG_CRIT("Could not register log domain 'emotion-xine'");
return EINA_FALSE;
}
}
if (!em_module.init(obj, video, opt)) if (!em_module.init(obj, video, opt))
return EINA_FALSE; return EINA_FALSE;
@ -1637,48 +1652,46 @@ em_debug(Emotion_Xine_Video *ev)
spu_channel = xine_get_param(ev->stream, XINE_PARAM_SPU_CHANNEL); spu_channel = xine_get_param(ev->stream, XINE_PARAM_SPU_CHANNEL);
video_ratio = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_VIDEO_RATIO); video_ratio = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_VIDEO_RATIO);
audio_mode = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_AUDIO_MODE); audio_mode = xine_get_stream_info(ev->stream, XINE_STREAM_INFO_AUDIO_MODE);
printf("has_chapters = %i\n", has_chapters); DBG("has_chapters = %i", has_chapters);
printf("max_spu = %i\n", max_spu); DBG("max_spu = %i", max_spu);
printf("max_audio = %i\n", max_audio); DBG("max_audio = %i", max_audio);
printf("video_channels = %i\n", video_channels); DBG("video_channels = %i", video_channels);
printf("video_streams = %i\n", video_streams); DBG("video_streams = %i", video_streams);
printf("video_seekable = %i\n", video_seekable); DBG("video_seekable = %i", video_seekable);
printf("title = %s\n", title); DBG("title = %s", title);
printf("comment = %s\n", comment); DBG("comment = %s", comment);
printf("artist = %s\n", artist); DBG("artist = %s", artist);
printf("genre = %s\n", genre); DBG("genre = %s", genre);
printf("album = %s\n", album); DBG("album = %s", album);
printf("year = %s\n", year); DBG("year = %s", year);
printf("cdindex_discid = %s\n", cdindex_discid); DBG("cdindex_discid = %s", cdindex_discid);
printf("video_channel = %i\n", video_channel); DBG("video_channel = %i", video_channel);
printf("audio_channel = %i\n", audio_channel); DBG("audio_channel = %i", audio_channel);
printf("spu_channels = %i\n", spu_channel); DBG("spu_channels = %i", spu_channel);
printf("video_ratio = %i\n", video_ratio); DBG("video_ratio = %i", video_ratio);
printf("audio_mode = %i\n", audio_mode); DBG("audio_mode = %i", audio_mode);
{ {
int i; int i;
for (i = 0; i <= max_audio; i++) for (i = 0; i <= max_audio; i++)
{ {
char lang[XINE_LANG_MAX + 1]; char lang[XINE_LANG_MAX + 1];
char buf[128] = "NONE";
lang[0] = 0; lang[0] = 0;
printf(" AUDIO %i = ", i);
if (xine_get_audio_lang(ev->stream, i, lang)) if (xine_get_audio_lang(ev->stream, i, lang))
printf("%s\n", lang); eina_strlcpy(buf, lang, sizeof(buf));
else DBG(" AUDIO %i = %s", i, buf);
printf("NONE\n");
} }
for (i = 0; i <= max_spu; i++) for (i = 0; i <= max_spu; i++)
{ {
char lang[XINE_LANG_MAX + 1]; char lang[XINE_LANG_MAX + 1];
char buf[128] = "NONE";
lang[0] = 0; lang[0] = 0;
printf(" SPU %i = ", i);
if (xine_get_spu_lang(ev->stream, i, lang)) if (xine_get_spu_lang(ev->stream, i, lang))
printf("%s\n", lang); eina_strlcpy(buf, lang, sizeof(buf));
else DBG(" SPU %i = %s", i, buf);
printf("NONE\n");
} }
} }
} }

View File

@ -87,4 +87,11 @@ struct _Emotion_Xine_Event
int mtype; int mtype;
}; };
extern int _emotion_xine_log_domain;
#define DBG(...) EINA_LOG_DOM_DBG(_emotion_xine_log_domain, __VA_ARGS__)
#define INF(...) EINA_LOG_DOM_INFO(_emotion_xine_log_domain, __VA_ARGS__)
#define WRN(...) EINA_LOG_DOM_WARN(_emotion_xine_log_domain, __VA_ARGS__)
#define ERR(...) EINA_LOG_DOM_ERR(_emotion_xine_log_domain, __VA_ARGS__)
#define CRITICAL(...) EINA_LOG_DOM_CRIT(_emotion_xine_log_domain, __VA_ARGS__)
#endif #endif

View File

@ -115,7 +115,7 @@ _emotion_class_init(xine_t *xine, void *visual __UNUSED__)
{ {
Emotion_Class *cl; Emotion_Class *cl;
// printf("emotion: _emotion_class_init()\n"); // DBG("");
cl = (Emotion_Class *) malloc(sizeof(Emotion_Class)); cl = (Emotion_Class *) malloc(sizeof(Emotion_Class));
if (!cl) return NULL; if (!cl) return NULL;
cl->driver_class.open_plugin = _emotion_open; cl->driver_class.open_plugin = _emotion_open;
@ -158,7 +158,7 @@ _emotion_open(video_driver_class_t *driver_class, const void *visual)
cl = (Emotion_Class *)driver_class; cl = (Emotion_Class *)driver_class;
/* visual here is the data ptr passed to xine_open_video_driver() */ /* visual here is the data ptr passed to xine_open_video_driver() */
// printf("emotion: _emotion_open()\n"); // DBG("");
dv = (Emotion_Driver *)malloc(sizeof(Emotion_Driver)); dv = (Emotion_Driver *)malloc(sizeof(Emotion_Driver));
if (!dv) return NULL; if (!dv) return NULL;
@ -180,7 +180,7 @@ _emotion_open(video_driver_class_t *driver_class, const void *visual)
dv->vo_driver.redraw_needed = _emotion_redraw; dv->vo_driver.redraw_needed = _emotion_redraw;
dv->ev = (Emotion_Xine_Video *)visual; dv->ev = (Emotion_Xine_Video *)visual;
dv->ev->have_vo = 1; dv->ev->have_vo = 1;
printf("emotion: _emotion_open = %p\n", &dv->vo_driver); DBG("vo_driver = %p", &dv->vo_driver);
return &dv->vo_driver; return &dv->vo_driver;
} }
@ -191,7 +191,7 @@ _emotion_dispose(vo_driver_t *vo_driver)
dv = (Emotion_Driver *)vo_driver; dv = (Emotion_Driver *)vo_driver;
dv->ev->have_vo = 0; dv->ev->have_vo = 0;
printf("emotion: _emotion_dispose(%p)\n", dv); DBG("vo_driver = %p", dv);
free(dv); free(dv);
} }
@ -199,7 +199,7 @@ _emotion_dispose(vo_driver_t *vo_driver)
static int static int
_emotion_redraw(vo_driver_t *vo_driver __UNUSED__) _emotion_redraw(vo_driver_t *vo_driver __UNUSED__)
{ {
// printf("emotion: _emotion_redraw()\n"); // DBG("");
return 0; return 0;
} }
@ -207,7 +207,7 @@ _emotion_redraw(vo_driver_t *vo_driver __UNUSED__)
static uint32_t static uint32_t
_emotion_capabilities_get(vo_driver_t *vo_driver __UNUSED__) _emotion_capabilities_get(vo_driver_t *vo_driver __UNUSED__)
{ {
// printf("emotion: _emotion_capabilities_get()\n"); // DBG("");
return VO_CAP_YV12 | VO_CAP_YUY2; return VO_CAP_YV12 | VO_CAP_YUY2;
} }
@ -215,7 +215,7 @@ _emotion_capabilities_get(vo_driver_t *vo_driver __UNUSED__)
static int static int
_emotion_gui_data_exchange(vo_driver_t *vo_driver __UNUSED__, int data_type, void *data __UNUSED__) _emotion_gui_data_exchange(vo_driver_t *vo_driver __UNUSED__, int data_type, void *data __UNUSED__)
{ {
// printf("emotion: _emotion_gui_data_exchange()\n"); // DBG("");
switch (data_type) switch (data_type)
{ {
case XINE_GUI_SEND_COMPLETION_EVENT: case XINE_GUI_SEND_COMPLETION_EVENT:
@ -243,13 +243,13 @@ _emotion_property_set(vo_driver_t *vo_driver, int property, int value)
Emotion_Driver *dv; Emotion_Driver *dv;
dv = (Emotion_Driver *)vo_driver; dv = (Emotion_Driver *)vo_driver;
// printf("emotion: _emotion_property_set()\n"); // DBG("");
switch (property) switch (property)
{ {
case VO_PROP_ASPECT_RATIO: case VO_PROP_ASPECT_RATIO:
if (value >= XINE_VO_ASPECT_NUM_RATIOS) if (value >= XINE_VO_ASPECT_NUM_RATIOS)
value = XINE_VO_ASPECT_AUTO; value = XINE_VO_ASPECT_AUTO;
// printf("DRIVER RATIO SET %i!\n", value); // DBG("DRIVER RATIO SET %i!", value);
dv->ratio = value; dv->ratio = value;
break; break;
default: default:
@ -264,7 +264,7 @@ _emotion_property_get(vo_driver_t *vo_driver, int property)
Emotion_Driver *dv; Emotion_Driver *dv;
dv = (Emotion_Driver *)vo_driver; dv = (Emotion_Driver *)vo_driver;
// printf("emotion: _emotion_property_get()\n"); // DBG("");
switch (property) switch (property)
{ {
case VO_PROP_ASPECT_RATIO: case VO_PROP_ASPECT_RATIO:
@ -279,7 +279,7 @@ _emotion_property_get(vo_driver_t *vo_driver, int property)
static void static void
_emotion_property_min_max_get(vo_driver_t *vo_driver __UNUSED__, int property __UNUSED__, int *min, int *max) _emotion_property_min_max_get(vo_driver_t *vo_driver __UNUSED__, int property __UNUSED__, int *min, int *max)
{ {
// printf("emotion: _emotion_property_min_max_get()\n"); // DBG("");
*min = 0; *min = 0;
*max = 0; *max = 0;
} }
@ -290,7 +290,7 @@ _emotion_frame_alloc(vo_driver_t *vo_driver __UNUSED__)
{ {
Emotion_Frame *fr; Emotion_Frame *fr;
// printf("emotion: _emotion_frame_alloc()\n"); // DBG("");
fr = (Emotion_Frame *)calloc(1, sizeof(Emotion_Frame)); fr = (Emotion_Frame *)calloc(1, sizeof(Emotion_Frame));
if (!fr) return NULL; if (!fr) return NULL;
@ -313,7 +313,7 @@ _emotion_frame_dispose(vo_frame_t *vo_frame)
Emotion_Frame *fr; Emotion_Frame *fr;
fr = (Emotion_Frame *)vo_frame; fr = (Emotion_Frame *)vo_frame;
// printf("emotion: _emotion_frame_dispose()\n"); // DBG("");
_emotion_frame_data_free(fr); _emotion_frame_data_free(fr);
free(fr); free(fr);
} }
@ -330,7 +330,7 @@ _emotion_frame_format_update(vo_driver_t *vo_driver, vo_frame_t *vo_frame, uint3
if ((fr->width != width) || (fr->height != height) || if ((fr->width != width) || (fr->height != height) ||
(fr->format != format) || (!fr->vo_frame.base[0])) (fr->format != format) || (!fr->vo_frame.base[0]))
{ {
// printf("emotion: _emotion_frame_format_update()\n"); // DBG("");
_emotion_frame_data_free(fr); _emotion_frame_data_free(fr);
fr->width = width; fr->width = width;
@ -417,8 +417,7 @@ _emotion_frame_display(vo_driver_t *vo_driver, vo_frame_t *vo_frame)
dv = (Emotion_Driver *)vo_driver; dv = (Emotion_Driver *)vo_driver;
fr = (Emotion_Frame *)vo_frame; fr = (Emotion_Frame *)vo_frame;
// printf("emotion: _emotion_frame_display()\n"); // DBG("fq %i %p", dv->ev->fq, dv->ev);
// printf("EX VO: fq %i %p\n", dv->ev->fq, dv->ev);
// if my frame queue is too deep ( > 4 frames) simply block and wait for them // if my frame queue is too deep ( > 4 frames) simply block and wait for them
// to drain // to drain
// while (dv->ev->fq > 4) usleep(1); // while (dv->ev->fq > 4) usleep(1);
@ -436,9 +435,9 @@ _emotion_frame_display(vo_driver_t *vo_driver, vo_frame_t *vo_frame)
fr->frame.timestamp = (double)fr->vo_frame.vpts / 90000.0; fr->frame.timestamp = (double)fr->vo_frame.vpts / 90000.0;
fr->frame.done_func = _emotion_frame_data_unlock; fr->frame.done_func = _emotion_frame_data_unlock;
fr->frame.done_data = fr; fr->frame.done_data = fr;
// printf("FRAME FOR %p\n", dv->ev); // DBG("FRAME FOR %p", dv->ev);
write(dv->ev->fd_write, &buf, sizeof(void *)); write(dv->ev->fd_write, &buf, sizeof(void *));
// printf("-- FRAME DEC %p == %i\n", fr->frame.obj, ret); // DBG("-- FRAME DEC %p == %i", fr->frame.obj, ret);
fr->in_use = 1; fr->in_use = 1;
dv->ev->fq++; dv->ev->fq++;
} }
@ -449,7 +448,7 @@ _emotion_frame_display(vo_driver_t *vo_driver, vo_frame_t *vo_frame)
static void static void
_emotion_frame_field(vo_frame_t *vo_frame __UNUSED__, int which_field __UNUSED__) _emotion_frame_field(vo_frame_t *vo_frame __UNUSED__, int which_field __UNUSED__)
{ {
// printf("emotion: _emotion_frame_field()\n"); // DBG("");
} }
/***************************************************************************/ /***************************************************************************/
@ -476,7 +475,7 @@ _emotion_frame_data_free(Emotion_Frame *fr)
static void static void
_emotion_frame_data_unlock(Emotion_Frame *fr) _emotion_frame_data_unlock(Emotion_Frame *fr)
{ {
// printf("emotion: _emotion_frame_data_unlock()\n"); // DBG("");
if (fr->in_use) if (fr->in_use)
{ {
fr->vo_frame.free(&fr->vo_frame); fr->vo_frame.free(&fr->vo_frame);
@ -488,13 +487,13 @@ _emotion_frame_data_unlock(Emotion_Frame *fr)
static void static void
_emotion_overlay_begin(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame __UNUSED__, int changed __UNUSED__) _emotion_overlay_begin(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame __UNUSED__, int changed __UNUSED__)
{ {
// printf("emotion: _emotion_overlay_begin()\n"); // DBG("");
} }
static void static void
_emotion_overlay_end(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame __UNUSED__) _emotion_overlay_end(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame __UNUSED__)
{ {
// printf("emotion: _emotion_overlay_end()\n"); // DBG("");
} }
static void static void
@ -503,7 +502,7 @@ _emotion_overlay_blend(vo_driver_t *vo_driver __UNUSED__, vo_frame_t *vo_frame,
Emotion_Frame *fr; Emotion_Frame *fr;
fr = (Emotion_Frame *)vo_frame; fr = (Emotion_Frame *)vo_frame;
// printf("emotion: _emotion_overlay_blend()\n"); // DBG("");
_emotion_overlay_blend_yuv(fr->vo_frame.base, vo_overlay, _emotion_overlay_blend_yuv(fr->vo_frame.base, vo_overlay,
fr->width, fr->height, fr->width, fr->height,
fr->vo_frame.pitches); fr->vo_frame.pitches);