summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog1
-rw-r--r--NEWS1
-rw-r--r--src/lib/evas/cache/evas_cache_image.c64
-rw-r--r--src/lib/evas/canvas/evas_async_events.c26
-rw-r--r--src/lib/evas/common/evas_image_scalecache.c96
-rw-r--r--src/lib/evas/include/evas_common_private.h15
6 files changed, 106 insertions, 97 deletions
diff --git a/ChangeLog b/ChangeLog
index 9113fc174c..4109be5e7c 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -4,6 +4,7 @@
4 use Eina_Spinlock in Eina_Log, replace Eina_Lock by Eina_Spinlock in Eina_Stringshare, 4 use Eina_Spinlock in Eina_Log, replace Eina_Lock by Eina_Spinlock in Eina_Stringshare,
5 Eina_Chained_Mempool. 5 Eina_Chained_Mempool.
6 * Eet: replace Eina_Lock by Eina_Spinlock in Eet_Dictionnary. 6 * Eet: replace Eina_Lock by Eina_Spinlock in Eet_Dictionnary.
7 * Evas: replace Eina_Lock by Eina_Spinlock in Evas_ScaleCache, Evas_Async_Events and Image_Entry.
7 8
82013-10-10 Carsten Haitzler (The Rasterman) 92013-10-10 Carsten Haitzler (The Rasterman)
9 10
diff --git a/NEWS b/NEWS
index 64bb665ec2..f8895cc595 100644
--- a/NEWS
+++ b/NEWS
@@ -224,6 +224,7 @@ Improvements:
224 - Add neon assembly for upscaling and map routines 224 - Add neon assembly for upscaling and map routines
225 - Use mmap/munmap for image data allocation on system that have mmap. 225 - Use mmap/munmap for image data allocation on system that have mmap.
226 - Add iterator for walking child of smart objects, table and a box. 226 - Add iterator for walking child of smart objects, table and a box.
227 - Use Eina_Spinlock for Evas_ScaleCache, Evas_Async_Events and Image_Entry.
227 * Ecore_Con: 228 * Ecore_Con:
228 - Rebase dns.c against upstream 229 - Rebase dns.c against upstream
229 - URL support now dynamically loads libcurl at runtime via eina_module. 230 - URL support now dynamically loads libcurl at runtime via eina_module.
diff --git a/src/lib/evas/cache/evas_cache_image.c b/src/lib/evas/cache/evas_cache_image.c
index d8ed6555c6..2e2e277cac 100644
--- a/src/lib/evas/cache/evas_cache_image.c
+++ b/src/lib/evas/cache/evas_cache_image.c
@@ -25,7 +25,7 @@ struct _Evas_Cache_Preload
25 Image_Entry *ie; 25 Image_Entry *ie;
26}; 26};
27 27
28static LK(engine_lock); 28static SLK(engine_lock);
29static LK(wakeup); 29static LK(wakeup);
30static int _evas_cache_mutex_init = 0; 30static int _evas_cache_mutex_init = 0;
31 31
@@ -199,9 +199,9 @@ _evas_cache_image_entry_delete(Evas_Cache_Image *cache, Image_Entry *ie)
199 ie->cache = NULL; 199 ie->cache = NULL;
200 cache->func.surface_delete(ie); 200 cache->func.surface_delete(ie);
201 201
202 LKD(ie->lock); 202 SLKD(ie->lock);
203 LKD(ie->lock_cancel); 203 SLKD(ie->lock_cancel);
204 LKD(ie->lock_task); 204 SLKD(ie->lock_task);
205 cache->func.dealloc(ie); 205 cache->func.dealloc(ie);
206} 206}
207 207
@@ -271,9 +271,9 @@ _evas_cache_image_entry_new(Evas_Cache_Image *cache,
271 if (tstamp) ie->tstamp = *tstamp; 271 if (tstamp) ie->tstamp = *tstamp;
272 else memset(&ie->tstamp, 0, sizeof(Image_Timestamp)); 272 else memset(&ie->tstamp, 0, sizeof(Image_Timestamp));
273 273
274 LKI(ie->lock); 274 SLKI(ie->lock);
275 LKI(ie->lock_cancel); 275 SLKI(ie->lock_cancel);
276 LKI(ie->lock_task); 276 SLKI(ie->lock_task);
277 277
278 if (lo) ie->load_opts = *lo; 278 if (lo) ie->load_opts = *lo;
279 if (ie->file || ie->f) 279 if (ie->file || ie->f)
@@ -315,9 +315,9 @@ _evas_cache_image_entry_surface_alloc(Evas_Cache_Image *cache,
315{ 315{
316 int wmin = w > 0 ? w : 1; 316 int wmin = w > 0 ? w : 1;
317 int hmin = h > 0 ? h : 1; 317 int hmin = h > 0 ? h : 1;
318 LKL(engine_lock); 318 SLKL(engine_lock);
319 _evas_cache_image_entry_surface_alloc__locked(cache, ie, wmin, hmin); 319 _evas_cache_image_entry_surface_alloc__locked(cache, ie, wmin, hmin);
320 LKU(engine_lock); 320 SLKU(engine_lock);
321} 321}
322 322
323static void 323static void
@@ -331,7 +331,7 @@ _evas_cache_image_async_heavy(void *data)
331 331
332 current = data; 332 current = data;
333 333
334 LKL(current->lock); 334 SLKL(current->lock);
335 pchannel = current->channel; 335 pchannel = current->channel;
336 current->channel++; 336 current->channel++;
337 cache = current->cache; 337 cache = current->cache;
@@ -352,7 +352,7 @@ _evas_cache_image_async_heavy(void *data)
352 { 352 {
353 current->flags.loaded = 1; 353 current->flags.loaded = 1;
354 354
355 LKL(current->lock_task); 355 SLKL(current->lock_task);
356 EINA_LIST_FREE(current->tasks, task) 356 EINA_LIST_FREE(current->tasks, task)
357 { 357 {
358 if (task != &dummy_task) 358 if (task != &dummy_task)
@@ -361,12 +361,12 @@ _evas_cache_image_async_heavy(void *data)
361 free(task); 361 free(task);
362 } 362 }
363 } 363 }
364 LKU(current->lock_task); 364 SLKU(current->lock_task);
365 } 365 }
366 } 366 }
367 current->channel = pchannel; 367 current->channel = pchannel;
368 // check the unload cancel flag 368 // check the unload cancel flag
369 LKL(current->lock_cancel); 369 SLKL(current->lock_cancel);
370 if (current->flags.unload_cancel) 370 if (current->flags.unload_cancel)
371 { 371 {
372 current->flags.unload_cancel = EINA_FALSE; 372 current->flags.unload_cancel = EINA_FALSE;
@@ -374,8 +374,8 @@ _evas_cache_image_async_heavy(void *data)
374 current->flags.loaded = 0; 374 current->flags.loaded = 0;
375 current->flags.preload_done = 0; 375 current->flags.preload_done = 0;
376 } 376 }
377 LKU(current->lock_cancel); 377 SLKU(current->lock_cancel);
378 LKU(current->lock); 378 SLKU(current->lock);
379} 379}
380 380
381static void 381static void
@@ -460,9 +460,9 @@ _evas_cache_image_entry_preload_add(Image_Entry *ie, const Eo *target,
460 460
461 ie->targets = (Evas_Cache_Target *) 461 ie->targets = (Evas_Cache_Target *)
462 eina_inlist_append(EINA_INLIST_GET(ie->targets), EINA_INLIST_GET(tg)); 462 eina_inlist_append(EINA_INLIST_GET(ie->targets), EINA_INLIST_GET(tg));
463 LKL(ie->lock_task); 463 SLKL(ie->lock_task);
464 ie->tasks = eina_list_append(ie->tasks, task); 464 ie->tasks = eina_list_append(ie->tasks, task);
465 LKU(ie->lock_task); 465 SLKU(ie->lock_task);
466 466
467 if (!ie->preload) 467 if (!ie->preload)
468 { 468 {
@@ -485,7 +485,7 @@ _evas_cache_image_entry_preload_remove(Image_Entry *ie, const Eo *target)
485 485
486 if (target) 486 if (target)
487 { 487 {
488 LKL(ie->lock_task); 488 SLKL(ie->lock_task);
489 l = ie->tasks; 489 l = ie->tasks;
490 EINA_INLIST_FOREACH(ie->targets, tg) 490 EINA_INLIST_FOREACH(ie->targets, tg)
491 { 491 {
@@ -499,7 +499,7 @@ _evas_cache_image_entry_preload_remove(Image_Entry *ie, const Eo *target)
499 task = eina_list_data_get(l); 499 task = eina_list_data_get(l);
500 ie->tasks = eina_list_remove_list(ie->tasks, l); 500 ie->tasks = eina_list_remove_list(ie->tasks, l);
501 if (task != &dummy_task) free(task); 501 if (task != &dummy_task) free(task);
502 LKU(ie->lock_task); 502 SLKU(ie->lock_task);
503 503
504 free(tg); 504 free(tg);
505 break; 505 break;
@@ -507,7 +507,7 @@ _evas_cache_image_entry_preload_remove(Image_Entry *ie, const Eo *target)
507 507
508 l = eina_list_next(l); 508 l = eina_list_next(l);
509 } 509 }
510 LKU(ie->lock_task); 510 SLKU(ie->lock_task);
511 } 511 }
512 else 512 else
513 { 513 {
@@ -520,10 +520,10 @@ _evas_cache_image_entry_preload_remove(Image_Entry *ie, const Eo *target)
520 free(tg); 520 free(tg);
521 } 521 }
522 522
523 LKL(ie->lock_task); 523 SLKL(ie->lock_task);
524 EINA_LIST_FREE(ie->tasks, task) 524 EINA_LIST_FREE(ie->tasks, task)
525 if (task != &dummy_task) free(task); 525 if (task != &dummy_task) free(task);
526 LKU(ie->lock_task); 526 SLKU(ie->lock_task);
527 } 527 }
528 528
529 if ((!ie->targets) && (ie->preload) && (!ie->flags.pending)) 529 if ((!ie->targets) && (ie->preload) && (!ie->flags.pending))
@@ -565,7 +565,7 @@ evas_cache_image_init(const Evas_Cache_Image_Func *cb)
565 565
566 if (_evas_cache_mutex_init++ == 0) 566 if (_evas_cache_mutex_init++ == 0)
567 { 567 {
568 LKI(engine_lock); 568 SLKI(engine_lock);
569 LKI(wakeup); 569 LKI(wakeup);
570 eina_condition_new(&cond_wakeup, &wakeup); 570 eina_condition_new(&cond_wakeup, &wakeup);
571 } 571 }
@@ -649,7 +649,7 @@ evas_cache_image_shutdown(Evas_Cache_Image *cache)
649 if (--_evas_cache_mutex_init == 0) 649 if (--_evas_cache_mutex_init == 0)
650 { 650 {
651 eina_condition_free(&cond_wakeup); 651 eina_condition_free(&cond_wakeup);
652 LKD(engine_lock); 652 SLKD(engine_lock);
653 LKD(wakeup); 653 LKD(wakeup);
654 } 654 }
655} 655}
@@ -1170,11 +1170,11 @@ evas_cache_image_load_data(Image_Entry *im)
1170 1170
1171 if ((im->flags.loaded) && (!im->animated.animated)) return error; 1171 if ((im->flags.loaded) && (!im->animated.animated)) return error;
1172 1172
1173 LKL(im->lock); 1173 SLKL(im->lock);
1174 im->flags.in_progress = EINA_TRUE; 1174 im->flags.in_progress = EINA_TRUE;
1175 error = im->cache->func.load(im); 1175 error = im->cache->func.load(im);
1176 im->flags.in_progress = EINA_FALSE; 1176 im->flags.in_progress = EINA_FALSE;
1177 LKU(im->lock); 1177 SLKU(im->lock);
1178 1178
1179 im->flags.loaded = 1; 1179 im->flags.loaded = 1;
1180 if (im->cache->func.debug) im->cache->func.debug("load", im); 1180 if (im->cache->func.debug) im->cache->func.debug("load", im);
@@ -1193,23 +1193,23 @@ evas_cache_image_unload_data(Image_Entry *im)
1193 if (im->flags.in_progress) return; 1193 if (im->flags.in_progress) return;
1194 evas_cache_image_preload_cancel(im, NULL); 1194 evas_cache_image_preload_cancel(im, NULL);
1195 1195
1196 LKL(im->lock_cancel); 1196 SLKL(im->lock_cancel);
1197 if (LKT(im->lock) == EINA_FALSE) /* can't get image lock - busy async load */ 1197 if (SLKT(im->lock) == EINA_FALSE) /* can't get image lock - busy async load */
1198 { 1198 {
1199 im->flags.unload_cancel = EINA_TRUE; 1199 im->flags.unload_cancel = EINA_TRUE;
1200 LKU(im->lock_cancel); 1200 SLKU(im->lock_cancel);
1201 return; 1201 return;
1202 } 1202 }
1203 LKU(im->lock_cancel); 1203 SLKU(im->lock_cancel);
1204 1204
1205 if ((!im->flags.loaded) || (!im->file && !im->f) || (!im->info.module) || 1205 if ((!im->flags.loaded) || (!im->file && !im->f) || (!im->info.module) ||
1206 (im->flags.dirty)) 1206 (im->flags.dirty))
1207 { 1207 {
1208 LKU(im->lock); 1208 SLKU(im->lock);
1209 return; 1209 return;
1210 } 1210 }
1211 im->cache->func.destructor(im); 1211 im->cache->func.destructor(im);
1212 LKU(im->lock); 1212 SLKU(im->lock);
1213 //FIXME: imagedataunload - inform owners 1213 //FIXME: imagedataunload - inform owners
1214} 1214}
1215 1215
diff --git a/src/lib/evas/canvas/evas_async_events.c b/src/lib/evas/canvas/evas_async_events.c
index dd3c593a51..0990ec8a95 100644
--- a/src/lib/evas/canvas/evas_async_events.c
+++ b/src/lib/evas/canvas/evas_async_events.c
@@ -37,7 +37,7 @@ static Eina_Condition _thread_feedback_cond;
37 37
38static int _thread_loop = 0; 38static int _thread_loop = 0;
39 39
40static Eina_Lock _thread_id_lock; 40static Eina_Spinlock _thread_id_lock;
41static int _thread_id = -1; 41static int _thread_id = -1;
42static int _thread_id_max = 0; 42static int _thread_id_max = 0;
43static int _thread_id_update = 0; 43static int _thread_id_update = 0;
@@ -46,7 +46,7 @@ static int _fd_write = -1;
46static int _fd_read = -1; 46static int _fd_read = -1;
47static pid_t _fd_pid = 0; 47static pid_t _fd_pid = 0;
48 48
49static Eina_Lock async_lock; 49static Eina_Spinlock async_lock;
50static Eina_Inarray async_queue; 50static Eina_Inarray async_queue;
51static Evas_Event_Async *async_queue_cache = NULL; 51static Evas_Event_Async *async_queue_cache = NULL;
52static unsigned int async_queue_cache_max = 0; 52static unsigned int async_queue_cache_max = 0;
@@ -99,7 +99,7 @@ evas_async_events_init(void)
99 fcntl(_fd_read, F_SETFL, O_NONBLOCK); 99 fcntl(_fd_read, F_SETFL, O_NONBLOCK);
100#endif 100#endif
101 101
102 eina_lock_new(&async_lock); 102 eina_spinlock_new(&async_lock);
103 eina_inarray_step_set(&async_queue, sizeof (Eina_Inarray), sizeof (Evas_Event_Async), 16); 103 eina_inarray_step_set(&async_queue, sizeof (Eina_Inarray), sizeof (Evas_Event_Async), 16);
104 104
105 eina_lock_new(&_thread_mutex); 105 eina_lock_new(&_thread_mutex);
@@ -108,7 +108,7 @@ evas_async_events_init(void)
108 eina_lock_new(&_thread_feedback_mutex); 108 eina_lock_new(&_thread_feedback_mutex);
109 eina_condition_new(&_thread_feedback_cond, &_thread_feedback_mutex); 109 eina_condition_new(&_thread_feedback_cond, &_thread_feedback_mutex);
110 110
111 eina_lock_new(&_thread_id_lock); 111 eina_spinlock_new(&_thread_id_lock);
112 112
113 return _init_evas_event; 113 return _init_evas_event;
114} 114}
@@ -123,9 +123,9 @@ evas_async_events_shutdown(void)
123 eina_lock_free(&_thread_mutex); 123 eina_lock_free(&_thread_mutex);
124 eina_condition_free(&_thread_feedback_cond); 124 eina_condition_free(&_thread_feedback_cond);
125 eina_lock_free(&_thread_feedback_mutex); 125 eina_lock_free(&_thread_feedback_mutex);
126 eina_lock_free(&_thread_id_lock); 126 eina_spinlock_free(&_thread_id_lock);
127 127
128 eina_lock_free(&async_lock); 128 eina_spinlock_free(&async_lock);
129 eina_inarray_flush(&async_queue); 129 eina_inarray_flush(&async_queue);
130 free(async_queue_cache); 130 free(async_queue_cache);
131 131
@@ -180,7 +180,7 @@ _evas_async_events_process_single(void)
180 unsigned int len, max; 180 unsigned int len, max;
181 int nr; 181 int nr;
182 182
183 eina_lock_take(&async_lock); 183 eina_spinlock_take(&async_lock);
184 184
185 ev = async_queue.members; 185 ev = async_queue.members;
186 async_queue.members = async_queue_cache; 186 async_queue.members = async_queue_cache;
@@ -193,7 +193,7 @@ _evas_async_events_process_single(void)
193 len = async_queue.len; 193 len = async_queue.len;
194 async_queue.len = 0; 194 async_queue.len = 0;
195 195
196 eina_lock_release(&async_lock); 196 eina_spinlock_release(&async_lock);
197 197
198 DBG("Evas async events queue length: %u", len); 198 DBG("Evas async events queue length: %u", len);
199 nr = len; 199 nr = len;
@@ -269,13 +269,13 @@ evas_async_events_put(const void *target, Evas_Callback_Type type, void *event_i
269 269
270 _evas_async_events_fork_handle(); 270 _evas_async_events_fork_handle();
271 271
272 eina_lock_take(&async_lock); 272 eina_spinlock_take(&async_lock);
273 273
274 count = async_queue.len; 274 count = async_queue.len;
275 ev = eina_inarray_grow(&async_queue, 1); 275 ev = eina_inarray_grow(&async_queue, 1);
276 if (!ev) 276 if (!ev)
277 { 277 {
278 eina_lock_release(&async_lock); 278 eina_spinlock_release(&async_lock);
279 return EINA_FALSE; 279 return EINA_FALSE;
280 } 280 }
281 281
@@ -284,7 +284,7 @@ evas_async_events_put(const void *target, Evas_Callback_Type type, void *event_i
284 ev->type = type; 284 ev->type = type;
285 ev->event_info = event_info; 285 ev->event_info = event_info;
286 286
287 eina_lock_release(&async_lock); 287 eina_spinlock_release(&async_lock);
288 288
289 if (count == 0) 289 if (count == 0)
290 { 290 {
@@ -365,14 +365,14 @@ evas_thread_main_loop_begin(void)
365 order = malloc(sizeof (Evas_Safe_Call)); 365 order = malloc(sizeof (Evas_Safe_Call));
366 if (!order) return -1; 366 if (!order) return -1;
367 367
368 eina_lock_take(&_thread_id_lock); 368 eina_spinlock_take(&_thread_id_lock);
369 order->current_id = ++_thread_id_max; 369 order->current_id = ++_thread_id_max;
370 if (order->current_id < 0) 370 if (order->current_id < 0)
371 { 371 {
372 _thread_id_max = 0; 372 _thread_id_max = 0;
373 order->current_id = ++_thread_id_max; 373 order->current_id = ++_thread_id_max;
374 } 374 }
375 eina_lock_release(&_thread_id_lock); 375 eina_spinlock_release(&_thread_id_lock);
376 376
377 eina_lock_new(&order->m); 377 eina_lock_new(&order->m);
378 eina_condition_new(&order->c, &order->m); 378 eina_condition_new(&order->c, &order->m);
diff --git a/src/lib/evas/common/evas_image_scalecache.c b/src/lib/evas/common/evas_image_scalecache.c
index 490d3c8d3f..fc907614c8 100644
--- a/src/lib/evas/common/evas_image_scalecache.c
+++ b/src/lib/evas/common/evas_image_scalecache.c
@@ -57,7 +57,7 @@ struct _Scaleitem
57#ifdef SCALECACHE 57#ifdef SCALECACHE
58static unsigned long long use_counter = 0; 58static unsigned long long use_counter = 0;
59 59
60static LK(cache_lock); 60static SLK(cache_lock);
61static Eina_Inlist *cache_list = NULL; 61static Eina_Inlist *cache_list = NULL;
62static unsigned int cache_size = 0; 62static unsigned int cache_size = 0;
63static int init = 0; 63static int init = 0;
@@ -121,7 +121,7 @@ evas_common_scalecache_init(void)
121 init++; 121 init++;
122 if (init > 1) return; 122 if (init > 1) return;
123 use_counter = 0; 123 use_counter = 0;
124 LKI(cache_lock); 124 SLKI(cache_lock);
125 s = getenv("EVAS_SCALECACHE_SIZE"); 125 s = getenv("EVAS_SCALECACHE_SIZE");
126 if (s) max_cache_size = atoi(s) * 1024; 126 if (s) max_cache_size = atoi(s) * 1024;
127 s = getenv("EVAS_SCALECACHE_MAX_DIMENSION"); 127 s = getenv("EVAS_SCALECACHE_MAX_DIMENSION");
@@ -141,7 +141,7 @@ evas_common_scalecache_shutdown(void)
141#ifdef SCALECACHE 141#ifdef SCALECACHE
142 init--; 142 init--;
143 if (init ==0) 143 if (init ==0)
144 LKD(cache_lock); 144 SLKD(cache_lock);
145#endif 145#endif
146} 146}
147 147
@@ -151,7 +151,7 @@ evas_common_rgba_image_scalecache_init(Image_Entry *ie)
151#ifdef SCALECACHE 151#ifdef SCALECACHE
152 RGBA_Image *im = (RGBA_Image *)ie; 152 RGBA_Image *im = (RGBA_Image *)ie;
153 // NOTE: this conflicts with evas image cache init and del of lock 153 // NOTE: this conflicts with evas image cache init and del of lock
154 LKI(im->cache.lock); 154 SLKI(im->cache.lock);
155#endif 155#endif
156} 156}
157 157
@@ -162,7 +162,7 @@ evas_common_rgba_image_scalecache_shutdown(Image_Entry *ie)
162 RGBA_Image *im = (RGBA_Image *)ie; 162 RGBA_Image *im = (RGBA_Image *)ie;
163 evas_common_rgba_image_scalecache_dirty(ie); 163 evas_common_rgba_image_scalecache_dirty(ie);
164 // NOTE: this conflicts with evas image cache init and del of lock 164 // NOTE: this conflicts with evas image cache init and del of lock
165 LKD(im->cache.lock); 165 SLKD(im->cache.lock);
166#endif 166#endif
167} 167}
168 168
@@ -172,7 +172,7 @@ evas_common_rgba_image_scalecache_dirty(Image_Entry *ie)
172#ifdef SCALECACHE 172#ifdef SCALECACHE
173 RGBA_Image *im = (RGBA_Image *)ie; 173 RGBA_Image *im = (RGBA_Image *)ie;
174 174
175 LKL(im->cache.lock); 175 SLKL(im->cache.lock);
176 while (im->cache.list) 176 while (im->cache.list)
177 { 177 {
178 Scaleitem *sci = im->cache.list->data; 178 Scaleitem *sci = im->cache.list->data;
@@ -180,7 +180,7 @@ evas_common_rgba_image_scalecache_dirty(Image_Entry *ie)
180 im->cache.list = eina_list_remove(im->cache.list, sci); 180 im->cache.list = eina_list_remove(im->cache.list, sci);
181 if ((sci->im) && (sci->im->cache_entry.references == 0)) 181 if ((sci->im) && (sci->im->cache_entry.references == 0))
182 { 182 {
183 LKL(cache_lock); 183 SLKL(cache_lock);
184 184
185 evas_common_rgba_image_free(&sci->im->cache_entry); 185 evas_common_rgba_image_free(&sci->im->cache_entry);
186 sci->im = NULL; 186 sci->im = NULL;
@@ -191,7 +191,7 @@ evas_common_rgba_image_scalecache_dirty(Image_Entry *ie)
191 cache_size -= sci->size_adjust; 191 cache_size -= sci->size_adjust;
192 cache_list = eina_inlist_remove(cache_list, (Eina_Inlist *)sci); 192 cache_list = eina_inlist_remove(cache_list, (Eina_Inlist *)sci);
193 193
194 LKU(cache_lock); 194 SLKU(cache_lock);
195 } 195 }
196 196
197 if (!sci->im) 197 if (!sci->im)
@@ -199,7 +199,7 @@ evas_common_rgba_image_scalecache_dirty(Image_Entry *ie)
199 } 199 }
200 eina_hash_free(im->cache.hash); 200 eina_hash_free(im->cache.hash);
201 im->cache.hash = NULL; 201 im->cache.hash = NULL;
202 LKU(im->cache.lock); 202 SLKU(im->cache.lock);
203#endif 203#endif
204} 204}
205 205
@@ -208,13 +208,13 @@ evas_common_rgba_image_scalecache_orig_use(Image_Entry *ie)
208{ 208{
209#ifdef SCALECACHE 209#ifdef SCALECACHE
210 RGBA_Image *im = (RGBA_Image *)ie; 210 RGBA_Image *im = (RGBA_Image *)ie;
211 LKL(im->cache.lock); 211 SLKL(im->cache.lock);
212 use_counter++; 212 use_counter++;
213 // FIXME: if orig not loaded, reload 213 // FIXME: if orig not loaded, reload
214 // FIXME: mark orig with current used counter 214 // FIXME: mark orig with current used counter
215 im->cache.orig_usage++; 215 im->cache.orig_usage++;
216 im->cache.usage_count = use_counter; 216 im->cache.usage_count = use_counter;
217 LKU(im->cache.lock); 217 SLKU(im->cache.lock);
218#endif 218#endif
219} 219}
220 220
@@ -226,12 +226,12 @@ evas_common_rgba_image_scalecache_usage_get(Image_Entry *ie)
226 int size = 0; 226 int size = 0;
227 Eina_List *l; 227 Eina_List *l;
228 Scaleitem *sci; 228 Scaleitem *sci;
229 LKL(im->cache.lock); 229 SLKL(im->cache.lock);
230 EINA_LIST_FOREACH(im->cache.list, l, sci) 230 EINA_LIST_FOREACH(im->cache.list, l, sci)
231 { 231 {
232 if (sci->im) size += sci->key.dst_w * sci->key.dst_h * 4; 232 if (sci->im) size += sci->key.dst_w * sci->key.dst_h * 4;
233 } 233 }
234 LKU(im->cache.lock); 234 SLKU(im->cache.lock);
235 return size; 235 return size;
236#else 236#else
237 return 0; 237 return 0;
@@ -247,7 +247,7 @@ evas_common_rgba_image_scalecache_items_ref(Image_Entry *ie, Eina_Array *ret)
247 Eina_List *l; 247 Eina_List *l;
248 Scaleitem *sci; 248 Scaleitem *sci;
249 249
250 LKL(im->cache.lock); 250 SLKL(im->cache.lock);
251 EINA_LIST_FOREACH(im->cache.list, l, sci) 251 EINA_LIST_FOREACH(im->cache.list, l, sci)
252 { 252 {
253 if (sci->im) 253 if (sci->im)
@@ -258,7 +258,7 @@ evas_common_rgba_image_scalecache_items_ref(Image_Entry *ie, Eina_Array *ret)
258 eina_array_push(ret, scie); 258 eina_array_push(ret, scie);
259 } 259 }
260 } 260 }
261 LKU(im->cache.lock); 261 SLKU(im->cache.lock);
262#endif 262#endif
263} 263}
264 264
@@ -435,13 +435,13 @@ EAPI void
435evas_common_rgba_image_scalecache_size_set(unsigned int size) 435evas_common_rgba_image_scalecache_size_set(unsigned int size)
436{ 436{
437#ifdef SCALECACHE 437#ifdef SCALECACHE
438 LKL(cache_lock); 438 SLKL(cache_lock);
439 if (size != max_cache_size) 439 if (size != max_cache_size)
440 { 440 {
441 max_cache_size = size; 441 max_cache_size = size;
442 _cache_prune(NULL, 1); 442 _cache_prune(NULL, 1);
443 } 443 }
444 LKU(cache_lock); 444 SLKU(cache_lock);
445#endif 445#endif
446} 446}
447 447
@@ -450,9 +450,9 @@ evas_common_rgba_image_scalecache_size_get(void)
450{ 450{
451#ifdef SCALECACHE 451#ifdef SCALECACHE
452 int t; 452 int t;
453 LKL(cache_lock); 453 SLKL(cache_lock);
454 t = max_cache_size; 454 t = max_cache_size;
455 LKU(cache_lock); 455 SLKU(cache_lock);
456 return t; 456 return t;
457#else 457#else
458 return 0; 458 return 0;
@@ -463,9 +463,9 @@ EAPI void
463evas_common_rgba_image_scalecache_prune(void) 463evas_common_rgba_image_scalecache_prune(void)
464{ 464{
465#ifdef SCALECACHE 465#ifdef SCALECACHE
466 LKL(cache_lock); 466 SLKL(cache_lock);
467 _cache_prune(NULL, 0); 467 _cache_prune(NULL, 0);
468 LKU(cache_lock); 468 SLKU(cache_lock);
469#endif 469#endif
470} 470}
471 471
@@ -474,12 +474,12 @@ evas_common_rgba_image_scalecache_dump(void)
474{ 474{
475#ifdef SCALECACHE 475#ifdef SCALECACHE
476 int t; 476 int t;
477 LKL(cache_lock); 477 SLKL(cache_lock);
478 t = max_cache_size; 478 t = max_cache_size;
479 max_cache_size = 0; 479 max_cache_size = 0;
480 _cache_prune(NULL, 0); 480 _cache_prune(NULL, 0);
481 max_cache_size = t; 481 max_cache_size = t;
482 LKU(cache_lock); 482 SLKU(cache_lock);
483#endif 483#endif
484} 484}
485 485
@@ -488,12 +488,12 @@ evas_common_rgba_image_scalecache_flush(void)
488{ 488{
489#ifdef SCALECACHE 489#ifdef SCALECACHE
490 int t; 490 int t;
491 LKL(cache_lock); 491 SLKL(cache_lock);
492 t = max_cache_size; 492 t = max_cache_size;
493 max_cache_size = 0; 493 max_cache_size = 0;
494 _cache_prune(NULL, 1); 494 _cache_prune(NULL, 1);
495 max_cache_size = t; 495 max_cache_size = t;
496 LKU(cache_lock); 496 SLKU(cache_lock);
497#endif 497#endif
498} 498}
499 499
@@ -513,11 +513,11 @@ evas_common_rgba_image_scalecache_prepare(Image_Entry *ie, RGBA_Image *dst EINA_
513 if (!im->image.data) return; 513 if (!im->image.data) return;
514 if ((dst_region_w == 0) || (dst_region_h == 0) || 514 if ((dst_region_w == 0) || (dst_region_h == 0) ||
515 (src_region_w == 0) || (src_region_h == 0)) return; 515 (src_region_w == 0) || (src_region_h == 0)) return;
516 // was having major lock issues here - LKL was deadlocking. what was 516 // was having major lock issues here - SLKL was deadlocking. what was
517 // going on? it may have been an eina treads badness but this will stay here 517 // going on? it may have been an eina treads badness but this will stay here
518 // for now for debug 518 // for now for debug
519#if 1 519#if 1
520 ret = LKT(im->cache.lock); 520 ret = SLKT(im->cache.lock);
521 if (ret == EINA_FALSE) /* can't get image lock */ 521 if (ret == EINA_FALSE) /* can't get image lock */
522 { 522 {
523 useconds_t slp = 1, slpt = 0; 523 useconds_t slp = 1, slpt = 0;
@@ -531,7 +531,7 @@ evas_common_rgba_image_scalecache_prepare(Image_Entry *ie, RGBA_Image *dst EINA_
531#endif 531#endif
532 slpt += slp; 532 slpt += slp;
533 slp++; 533 slp++;
534 ret = LKT(im->cache.lock); 534 ret = SLKT(im->cache.lock);
535 if (ret == EINA_LOCK_DEADLOCK) 535 if (ret == EINA_LOCK_DEADLOCK)
536 { 536 {
537 printf("WARNING: DEADLOCK on image %p (%s)\n", im, ie->file); 537 printf("WARNING: DEADLOCK on image %p (%s)\n", im, ie->file);
@@ -546,7 +546,7 @@ evas_common_rgba_image_scalecache_prepare(Image_Entry *ie, RGBA_Image *dst EINA_
546 { 546 {
547 printf("WARNING: lock still there after %i usec\n", slpt); 547 printf("WARNING: lock still there after %i usec\n", slpt);
548 printf("WARNING: stucklock on image %p (%s)\n", im, ie->file); 548 printf("WARNING: stucklock on image %p (%s)\n", im, ie->file);
549 LKDBG(im->cache.lock); 549 /* SLKDBG(im->cache.lock); */
550 } 550 }
551 } 551 }
552 else if (ret == EINA_LOCK_DEADLOCK) 552 else if (ret == EINA_LOCK_DEADLOCK)
@@ -555,14 +555,14 @@ evas_common_rgba_image_scalecache_prepare(Image_Entry *ie, RGBA_Image *dst EINA_
555 } 555 }
556 else locked = 1; 556 else locked = 1;
557#endif 557#endif
558 if (!locked) { LKL(im->cache.lock); locked = 1; } 558 if (!locked) { SLKL(im->cache.lock); locked = 1; }
559 use_counter++; 559 use_counter++;
560 if ((src_region_w == dst_region_w) && (src_region_h == dst_region_h)) 560 if ((src_region_w == dst_region_w) && (src_region_h == dst_region_h))
561 { 561 {
562 // 1:1 scale. 562 // 1:1 scale.
563 im->cache.orig_usage++; 563 im->cache.orig_usage++;
564 im->cache.usage_count = use_counter; 564 im->cache.usage_count = use_counter;
565 if (locked) LKU(im->cache.lock); 565 if (locked) SLKU(im->cache.lock);
566 return; 566 return;
567 } 567 }
568 if ((!im->cache_entry.flags.alpha) && (!smooth)) 568 if ((!im->cache_entry.flags.alpha) && (!smooth))
@@ -571,17 +571,17 @@ evas_common_rgba_image_scalecache_prepare(Image_Entry *ie, RGBA_Image *dst EINA_
571 // or in some cases faster not cached 571 // or in some cases faster not cached
572 im->cache.orig_usage++; 572 im->cache.orig_usage++;
573 im->cache.usage_count = use_counter; 573 im->cache.usage_count = use_counter;
574 if (locked) LKU(im->cache.lock); 574 if (locked) SLKU(im->cache.lock);
575 return; 575 return;
576 } 576 }
577 LKL(cache_lock); 577 SLKL(cache_lock);
578 sci = _sci_find(im, dc, smooth, 578 sci = _sci_find(im, dc, smooth,
579 src_region_x, src_region_y, src_region_w, src_region_h, 579 src_region_x, src_region_y, src_region_w, src_region_h,
580 dst_region_w, dst_region_h); 580 dst_region_w, dst_region_h);
581 if (!sci) 581 if (!sci)
582 { 582 {
583 LKU(cache_lock); 583 SLKU(cache_lock);
584 if (locked) LKU(im->cache.lock); 584 if (locked) SLKU(im->cache.lock);
585 return; 585 return;
586 } 586 }
587// INF("%10i | %4i %4i %4ix%4i -> %4i %4i %4ix%4i | %i", 587// INF("%10i | %4i %4i %4ix%4i -> %4i %4i %4ix%4i | %i",
@@ -609,7 +609,7 @@ evas_common_rgba_image_scalecache_prepare(Image_Entry *ie, RGBA_Image *dst EINA_
609 } 609 }
610 sci->usage++; 610 sci->usage++;
611 sci->usage_count = use_counter; 611 sci->usage_count = use_counter;
612 LKU(cache_lock); 612 SLKU(cache_lock);
613 if (sci->usage > im->cache.newest_usage) 613 if (sci->usage > im->cache.newest_usage)
614 im->cache.newest_usage = sci->usage; 614 im->cache.newest_usage = sci->usage;
615// INF("newset? %p %i > %i", im, 615// INF("newset? %p %i > %i", im,
@@ -618,7 +618,7 @@ evas_common_rgba_image_scalecache_prepare(Image_Entry *ie, RGBA_Image *dst EINA_
618 if (sci->usage_count > im->cache.newest_usage_count) 618 if (sci->usage_count > im->cache.newest_usage_count)
619 im->cache.newest_usage_count = sci->usage_count; 619 im->cache.newest_usage_count = sci->usage_count;
620// INF(" -------------- used %8i#, %8i@", (int)sci->usage, (int)sci->usage_count); 620// INF(" -------------- used %8i#, %8i@", (int)sci->usage, (int)sci->usage_count);
621 if (locked) LKU(im->cache.lock); 621 if (locked) SLKU(im->cache.lock);
622#endif 622#endif
623} 623}
624 624
@@ -682,11 +682,11 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
682 } 682 }
683 return EINA_FALSE; 683 return EINA_FALSE;
684 } 684 }
685 LKL(cache_lock); 685 SLKL(cache_lock);
686 sci = _sci_find(im, dc, smooth, 686 sci = _sci_find(im, dc, smooth,
687 src_region_x, src_region_y, src_region_w, src_region_h, 687 src_region_x, src_region_y, src_region_w, src_region_h,
688 dst_region_w, dst_region_h); 688 dst_region_w, dst_region_h);
689 LKU(cache_lock); 689 SLKU(cache_lock);
690 if (!sci) 690 if (!sci)
691 { 691 {
692 if (im->cache_entry.space == EVAS_COLORSPACE_ARGB8888) 692 if (im->cache_entry.space == EVAS_COLORSPACE_ARGB8888)
@@ -718,7 +718,7 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
718 } 718 }
719 return EINA_FALSE; 719 return EINA_FALSE;
720 } 720 }
721 LKL(im->cache.lock); 721 SLKL(im->cache.lock);
722 if (sci->populate_me) 722 if (sci->populate_me)
723 { 723 {
724 int size, osize, used; 724 int size, osize, used;
@@ -768,7 +768,7 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
768 { 768 {
769 static RGBA_Draw_Context *ct = NULL; 769 static RGBA_Draw_Context *ct = NULL;
770 770
771 LKL(cache_lock); 771 SLKL(cache_lock);
772 im->cache.orig_usage++; 772 im->cache.orig_usage++;
773 im->cache.usage_count = use_counter; 773 im->cache.usage_count = use_counter;
774 im->cache.populate_count--; 774 im->cache.populate_count--;
@@ -780,7 +780,7 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
780 ct = evas_common_draw_context_new(); 780 ct = evas_common_draw_context_new();
781 evas_common_draw_context_set_render_op(ct, _EVAS_RENDER_COPY); 781 evas_common_draw_context_set_render_op(ct, _EVAS_RENDER_COPY);
782 } 782 }
783 LKU(im->cache.lock); 783 SLKU(im->cache.lock);
784 if (im->cache_entry.space == EVAS_COLORSPACE_ARGB8888) 784 if (im->cache_entry.space == EVAS_COLORSPACE_ARGB8888)
785 { 785 {
786#ifdef EVAS_CSERVE2 786#ifdef EVAS_CSERVE2
@@ -790,7 +790,7 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
790#endif 790#endif
791 evas_cache_image_load_data(&im->cache_entry); 791 evas_cache_image_load_data(&im->cache_entry);
792 } 792 }
793 LKL(im->cache.lock); 793 SLKL(im->cache.lock);
794 evas_common_image_colorspace_normalize(im); 794 evas_common_image_colorspace_normalize(im);
795 if (im->image.data) 795 if (im->image.data)
796 { 796 {
@@ -845,7 +845,7 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
845// sci->dst_w * sci->dst_h * 4, sci->flop, 845// sci->dst_w * sci->dst_h * 4, sci->flop,
846// sci->dst_w, sci->dst_h); 846// sci->dst_w, sci->dst_h);
847 cache_list = eina_inlist_append(cache_list, (Eina_Inlist *)sci); 847 cache_list = eina_inlist_append(cache_list, (Eina_Inlist *)sci);
848 LKU(cache_lock); 848 SLKU(cache_lock);
849 didpop = 1; 849 didpop = 1;
850 } 850 }
851 } 851 }
@@ -853,17 +853,17 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
853 { 853 {
854 if (!didpop) 854 if (!didpop)
855 { 855 {
856 LKL(cache_lock); 856 SLKL(cache_lock);
857 cache_list = eina_inlist_remove(cache_list, (Eina_Inlist *)sci); 857 cache_list = eina_inlist_remove(cache_list, (Eina_Inlist *)sci);
858 cache_list = eina_inlist_append(cache_list, (Eina_Inlist *)sci); 858 cache_list = eina_inlist_append(cache_list, (Eina_Inlist *)sci);
859 LKU(cache_lock); 859 SLKU(cache_lock);
860 } 860 }
861 else 861 else
862 { 862 {
863 if (sci->flop >= FLOP_DEL) sci->flop -= FLOP_DEL; 863 if (sci->flop >= FLOP_DEL) sci->flop -= FLOP_DEL;
864 } 864 }
865// INF("use cached!"); 865// INF("use cached!");
866 LKU(im->cache.lock); 866 SLKU(im->cache.lock);
867 ret |= cb_sample(sci->im, dst, dc, 867 ret |= cb_sample(sci->im, dst, dc,
868 0, 0, 868 0, 0,
869 dst_region_w, dst_region_h, 869 dst_region_w, dst_region_h,
@@ -909,7 +909,7 @@ evas_common_rgba_image_scalecache_do_cbs(Image_Entry *ie, RGBA_Image *dst,
909 } 909 }
910 else 910 else
911 { 911 {
912 LKU(im->cache.lock); 912 SLKU(im->cache.lock);
913 if (im->cache_entry.space == EVAS_COLORSPACE_ARGB8888) 913 if (im->cache_entry.space == EVAS_COLORSPACE_ARGB8888)
914 { 914 {
915#ifdef EVAS_CSERVE2 915#ifdef EVAS_CSERVE2
diff --git a/src/lib/evas/include/evas_common_private.h b/src/lib/evas/include/evas_common_private.h
index c07f6e617f..8d8b9f69e6 100644
--- a/src/lib/evas/include/evas_common_private.h
+++ b/src/lib/evas/include/evas_common_private.h
@@ -156,6 +156,13 @@ extern EAPI int _evas_log_dom_global;
156# define __ARM_ARCH__ 73 156# define __ARM_ARCH__ 73
157#endif 157#endif
158 158
159#define SLK(x) Eina_Spinlock x
160#define SLKI(x) eina_spinlock_new(&(x))
161#define SLKD(x) eina_spinlock_free(&(x))
162#define SLKL(x) eina_spinlock_take(&(x))
163#define SLKT(x) eina_spinlock_take_try(&(x))
164#define SLKU(x) eina_spinlock_release(&(x))
165
159#define LK(x) Eina_Lock x 166#define LK(x) Eina_Lock x
160#define LKI(x) eina_lock_new(&(x)) 167#define LKI(x) eina_lock_new(&(x))
161#define LKD(x) eina_lock_free(&(x)) 168#define LKD(x) eina_lock_free(&(x))
@@ -600,9 +607,9 @@ struct _Image_Entry
600 Evas_Image_Load_Func *loader; 607 Evas_Image_Load_Func *loader;
601 } info; 608 } info;
602 609
603 LK(lock); 610 SLK(lock);
604 LK(lock_cancel); 611 SLK(lock_cancel);
605 LK(lock_task); 612 SLK(lock_task);
606 613
607 /* for animation feature */ 614 /* for animation feature */
608 Evas_Image_Animated animated; 615 Evas_Image_Animated animated;
@@ -796,7 +803,7 @@ struct _RGBA_Image
796 } image; 803 } image;
797 804
798 struct { 805 struct {
799 LK(lock); 806 SLK(lock);
800 Eina_List *list; 807 Eina_List *list;
801 Eina_Hash *hash; 808 Eina_Hash *hash;
802 unsigned long long orig_usage; 809 unsigned long long orig_usage;