GCC Code Coverage Report


Directory: src/gate/
File: src/gate/atomics.c
Date: 2025-09-14 13:10:38
Exec Total Coverage
Lines: 99 108 91.7%
Functions: 30 31 96.8%
Branches: 19 28 67.9%

Line Branch Exec Source
1 /* GATE PROJECT LICENSE:
2 +----------------------------------------------------------------------------+
3 | Copyright(c) 2018-2025, Stefan Meislinger <sm@opengate.at> |
4 | All rights reserved. |
5 | |
6 | Redistribution and use in source and binary forms, with or without |
7 | modification, are permitted provided that the following conditions are met:|
8 | |
9 | 1. Redistributions of source code must retain the above copyright notice, |
10 | this list of conditions and the following disclaimer. |
11 | 2. Redistributions in binary form must reproduce the above copyright |
12 | notice, this list of conditions and the following disclaimer in the |
13 | documentation and/or other materials provided with the distribution. |
14 | |
15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"|
16 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
17 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
18 | ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE |
19 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
20 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
21 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
22 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
23 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
24 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF |
25 | THE POSSIBILITY OF SUCH DAMAGE. |
26 +----------------------------------------------------------------------------+
27 */
28
29 #include "gate/atomics.h"
30 #include "gate/results.h"
31 #include "gate/platforms.h"
32
33 #if !defined(GATE_COMPILER_MSVC15) || defined(GATE_COMPILER_WATCOM)
34 /*#define GATE_USE_STD_ATOMICS 1*/
35 #endif
36
37 #if defined(GATE_USE_STD_ATOMICS)
38 #define GATE_ATOMICS_STDC_IMPL 1
39 #elif defined(GATE_SYS_BEOS)
40 #define GATE_ATOMICS_BEOS_IMPL 1
41 #elif defined(GATE_SYS_DOS) || defined(GATE_SYS_EFI)
42 #define GATE_ATOMICS_VOLATILE_IMPL 1
43 #elif defined(GATE_SYS_WIN) && !defined(GATE_SYS_WIN16)
44 #define GATE_ATOMICS_INTERLOCKED_IMPL 1
45 #elif defined(GATE_COMPILER_GCC) || defined(GATE_SYS_DARWIN)
46 #define GATE_ATOMICS_SYNCLOCK_IMPL 1
47 #else
48 #define GATE_ATOMICS_VOLATILE_IMPL 1
49 #endif
50
51 #if defined(GATE_ATOMICS_STDC_IMPL)
52
53 /*
54 #if defined(GATE_SYS_WIN)
55
56 #include <xatomic.h>
57
58 #define atomic_flag _Atomic_flag_t
59 #define atomic_flag_test_and_set(ptr_atomic_flag) _Atomic_flag_test_and_set(ptr_atomic_flag)
60 #define atomic_flag_clear(ptr_atomic_flag) _Atomic_flag_clear(ptr_atomic_flag, memory_order_seq_cst)
61
62 #define atomic_int32 _Uint4_t
63
64 #define atomic_load_int32(ptr_atom_i32) _Atomic_load_4(ptr_atom_i32, memory_order_seq_cst)
65 #define atomic_store_int32(ptr_atom_i32, value) _Atomic_store_4(ptr_atom_i32, value, memory_order_seq_cst)
66 #define atomic_fetch_add_int32(ptr_atom_i32, value) _Atomic_fetch_add_4(ptr_atom_i32, value,memory_order_seq_cst)
67 #define atomic_exchange_int32(ptr_atom_i32, value) _Atomic_exchange_4(ptr_atom_i32, value, memory_order_seq_cst)
68 #define atomic_compare_exchange_strong_int32(ptr_atom_i32, ptr_expected, desired) \
69 _Atomic_compare_exchange_strong_4(ptr_atom_i32, ptr_expected, desired, memory_order_seq_cst, memory_order_seq_cst)
70
71
72 #define atomic_int64 _Uint8_t
73
74 #define atomic_load_int64(ptr_atom_i64) _Atomic_load_8(ptr_atom_i64, memory_order_seq_cst)
75 #define atomic_store_int64(ptr_atom_i64, value) _Atomic_store_8(ptr_atom_i64, value, memory_order_seq_cst)
76 #define atomic_fetch_add_int64(ptr_atom_i64, value) _Atomic_fetch_add_8(ptr_atom_i64, value,memory_order_seq_cst)
77 #define atomic_exchange_int64(ptr_atom_i64, value) _Atomic_exchange_8(ptr_atom_i64, value, memory_order_seq_cst)
78 #define atomic_compare_exchange_strong_int64(ptr_atom_i64, ptr_expected, desired) \
79 _Atomic_compare_exchange_strong_8(ptr_atom_i64, ptr_expected, desired, memory_order_seq_cst, memory_order_seq_cst)
80
81 #if (GATE_ARCH == GATE_ARCH_X86X64) || (GATE_ARCH == GATE_ARCH_X86IA64) || (GATE_ARCH == GATE_ARCH_ARM64)
82
83 # define atomic_intptr atomic_int64
84 # define atomic_load_intptr atomic_load_int64
85 # define atomic_store_intptr atomic_store_int64
86 # define atomic_exchange_intptr atomic_exchange_int64
87 # define atomic_compare_exchange_strong_intptr atomic_compare_exchange_strong_int64
88
89 #else
90 # define atomic_intptr atomic_int32
91 # define atomic_load_intptr atomic_load_int32
92 # define atomic_store_intptr atomic_store_int32
93 # define atomic_exchange_intptr atomic_exchange_int32
94 # define atomic_compare_exchange_strong_intptr atomic_compare_exchange_strong_int32
95 #endif
96
97
98 #else */
99
100 #include <stdatomic.h>
101 #include <threads.h>
102
103 #define atomic_native_int32_t atomic_int_least32_t
104
105 #define atomic_load_int32(ptr_atom_i32) atomic_load(ptr_atom_i32)
106 #define atomic_store_int32(ptr_atom_i32, value) atomic_store(ptr_atom_i32, value)
107 #define atomic_fetch_add_int32(ptr_atom_i32, value) atomic_fetch_add(ptr_atom_i32, value)
108 #define atomic_exchange_int32(ptr_atom_i32, value) atomic_exchange(ptr_atom_i32, value)
109 #define atomic_compare_exchange_strong_int32(ptr_atom_i32, ptr_expected, desired) \
110 atomic_compare_exchange_strong(ptr_atom_i32, ptr_expected, desired)
111
112 #define atomic_native_int64_t atomic_int_least64_t
113 #define atomic_load_int64(ptr_atom_i64) atomic_load(ptr_atom_i64)
114 #define atomic_store_int64(ptr_atom_i64, value) atomic_store(ptr_atom_i64, value)
115 #define atomic_fetch_add_int64(ptr_atom_i64, value) atomic_fetch_add(ptr_atom_i64, value)
116 #define atomic_exchange_int64(ptr_atom_i64, value) atomic_exchange(ptr_atom_i64, value)
117 #define atomic_compare_exchange_strong_int64(ptr_atom_i64, ptr_expected, desired) \
118 atomic_compare_exchange_strong(ptr_atom_i64, ptr_expected, desired)
119
120 #define atomic_load_intptr(ptr_atom_iptr) atomic_load(ptr_atom_iptr)
121 #define atomic_store_intptr(ptr_atom_iptr, value) atomic_store(ptr_atom_iptr, value)
122 #define atomic_fetch_add_intptr(ptr_atom_iptr, value) atomic_fetch_add(ptr_atom_iptr, value)
123 #define atomic_exchange_intptr(ptr_atom_iptr, value) atomic_exchange(ptr_atom_iptr, value)
124 #define atomic_compare_exchange_strong_intptr(ptr_atom_iptr, ptr_expected, desired) \
125 atomic_compare_exchange_strong(ptr_atom_iptr, ptr_expected, desired)
126
127 // #endif /* GATE_SYS_WIN */
128
129 static void* gate_atomic_get_thread_id()
130 {
131 thrd_t current_thread = thrd_current();
132 return (void*)current_thread;
133 }
134
135 static void gate_atomic_yield_thread()
136 {
137 thrd_yield();
138 }
139
140 gate_bool_t gate_atomic_flag_set(gate_atomic_flag_t* atom)
141 {
142 return atomic_flag_test_and_set((atomic_flag volatile*)atom);
143 }
144 void gate_atomic_flag_clear(gate_atomic_flag_t* atom)
145 {
146 atomic_flag_clear((atomic_flag volatile*)atom);
147 }
148
149 void gate_atomic_int_init(gate_atomic_int_t* atom, gate_int32_t value)
150 {
151 atomic_store_int32((atomic_native_int32_t volatile*)atom,
152 (atomic_native_int32_t)value);
153 }
154 gate_int32_t gate_atomic_int_set(gate_atomic_int_t* atom, gate_int32_t value)
155 {
156 return (gate_int32_t)atomic_exchange_int32((atomic_native_int32_t volatile*)atom,
157 (atomic_native_int32_t)value);
158 }
159 gate_int32_t gate_atomic_int_get(gate_atomic_int_t* atom)
160 {
161 return (gate_int32_t)atomic_load_int32((atomic_native_int32_t volatile*)atom);
162 }
163 gate_int32_t gate_atomic_int_inc(gate_atomic_int_t* atom)
164 {
165 return gate_atomic_int_add(atom, 1);
166 }
167 gate_int32_t gate_atomic_int_dec(gate_atomic_int_t* atom)
168 {
169 return gate_atomic_int_add(atom, -1);
170 }
171 gate_int32_t gate_atomic_int_add(gate_atomic_int_t* atom, gate_int32_t value)
172 {
173 gate_int32_t prev_value = (gate_int32_t)atomic_fetch_add_int32((atomic_native_int32_t volatile*)atom,
174 (atomic_native_int32_t)value);
175 return prev_value + value;
176 }
177 gate_int32_t gate_atomic_int_xchg_if(gate_atomic_int_t* atom, gate_int32_t comparand, gate_int32_t xchgvalue)
178 {
179 atomic_native_int32_t expected = (atomic_native_int32_t)comparand;
180 atomic_compare_exchange_strong_int32((atomic_native_int32_t volatile*)atom,
181 &expected,
182 (atomic_native_int32_t)xchgvalue);
183 return (gate_int32_t)expected;
184 }
185
186 void gate_atomic_int64_init(gate_atomic_int64_t* atom, gate_int64_t value)
187 {
188 atomic_store_int64((atomic_native_int64_t volatile*)atom,
189 (atomic_native_int64_t)value);
190 }
191 gate_int64_t gate_atomic_int64_set(gate_atomic_int64_t* atom, gate_int64_t value)
192 {
193 return (gate_int64_t)atomic_exchange_int64((atomic_native_int64_t volatile*)atom,
194 (atomic_native_int64_t)value);
195 }
196 gate_int64_t gate_atomic_int64_get(gate_atomic_int64_t* atom)
197 {
198 return (gate_int64_t)atomic_load_int64((atomic_native_int64_t volatile*)atom);
199 }
200 gate_int64_t gate_atomic_int64_inc(gate_atomic_int64_t* atom)
201 {
202 return gate_atomic_int64_add(atom, 1);
203 }
204 gate_int64_t gate_atomic_int64_dec(gate_atomic_int64_t* atom)
205 {
206 return gate_atomic_int64_add(atom, -1);
207 }
208 gate_int64_t gate_atomic_int64_add(gate_atomic_int64_t* atom, gate_int64_t value)
209 {
210 gate_int64_t prev_value = (gate_int64_t)atomic_fetch_add_int64((atomic_native_int64_t volatile*)atom,
211 (atomic_native_int64_t)value);
212 return prev_value + value;
213 }
214 gate_int64_t gate_atomic_int64_xchg_if(gate_atomic_int64_t* atom, gate_int64_t comparand, gate_int64_t xchgvalue)
215 {
216 atomic_native_int64_t expected = (atomic_native_int64_t)comparand;
217 atomic_compare_exchange_strong_int64((atomic_native_int64_t volatile*)atom,
218 &expected,
219 (atomic_native_int64_t)xchgvalue);
220 return (gate_int64_t)expected;
221 }
222
223 void gate_atomic_ptr_init(gate_atomic_ptr_t* atom, void* value)
224 {
225 atomic_store_intptr((atomic_intptr_t volatile*)atom,
226 (atomic_intptr_t)value);
227 }
228 void* gate_atomic_ptr_set(gate_atomic_ptr_t* atom, void* value)
229 {
230 return (void*)atomic_exchange_intptr((atomic_intptr_t volatile*)atom,
231 (atomic_intptr_t)value);
232 }
233 void* gate_atomic_ptr_get(gate_atomic_ptr_t* atom)
234 {
235 return (void*)atomic_load_intptr((atomic_intptr_t volatile*)atom);
236 }
237 void* gate_atomic_ptr_xchg_if(gate_atomic_ptr_t* atom, void* comparand, void* xchgvalue)
238 {
239 atomic_intptr_t expected = (atomic_intptr_t)comparand;
240 atomic_compare_exchange_strong_intptr((atomic_intptr_t volatile*)atom,
241 &expected,
242 (atomic_intptr_t)xchgvalue);
243 return (void*)expected;
244 }
245
246 #endif /* GATE_ATOMICS_STDC_IMPL */
247
248 #if defined(GATE_ATOMICS_INTERLOCKED_IMPL)
249
250 #if defined(GATE_SYS_WINCE)
251
252 #define IL_Exchange(target, value) InterlockedExchange((LPLONG)(target), value)
253 #define IL_CompareExchange(dest, exch, op) InterlockedCompareExchange((LPLONG)(dest), exch, op)
254 #define IL_Increment(addend) InterlockedIncrement((LPLONG)(addend))
255 #define IL_Decrement(addend) InterlockedDecrement((LPLONG)(addend))
256 #define IL_ExchangeAdd(addend, incr) InterlockedExchangeAdd((LPLONG)(addend), incr)
257 #define IL_ExchangePtr(target, value) InterlockedExchangePointer((PVOID *)(target), value)
258 #define IL_CompareExchangePtr(dest, exch, op) InterlockedCompareExchangePointer((PVOID *)(dest), exch, op)
259
260 #define GATE_WIN32_ATOMIC_64BIT_MISSING 1
261
262 #else
263
264 #if defined(GATE_COMPILER_MSVC05)
265 #include <intrin.h>
266 #define InterlockedExchange _InterlockedExchange
267 #define InterlockedCompareExchange _InterlockedCompareExchange
268 #define InterlockedIncrement _InterlockedIncrement
269 #define InterlockedDecrement _InterlockedDecrement
270 #define InterlockedExchangeAdd _InterlockedExchangeAdd
271 #endif
272
273 #define IL_Exchange InterlockedExchange
274
275 #if defined(GATE_WIN32_UNICODE)
276 #define IL_CompareExchange InterlockedCompareExchange
277 #define IL_ExchangeAdd InterlockedExchangeAdd
278 #define IL_Increment InterlockedIncrement
279 #define IL_Decrement InterlockedDecrement
280 #else
281 /* support windows 95 and NT 3.5 */
282 #define IL_CompareExchange(ptr_target, xchg_value, comp_value) \
283 gate_win32_atomic_compare_exchange((gate_int32_t volatile *)(ptr_target), (xchg_value), (comp_value))
284 #define IL_ExchangeAdd(ptr_target, add_value) \
285 gate_win32_atomic_exchange_add((gate_int32_t volatile *)(ptr_target), (add_value))
286 #define IL_Increment(ptr_target) \
287 gate_win32_atomic_increment((gate_int32_t volatile *)ptr_target)
288 #define IL_Decrement(ptr_target) \
289 gate_win32_atomic_decrement((gate_int32_t volatile *)ptr_target)
290 #endif
291
292 #if defined(GATE_COMPILER_MSVC98) || (defined(GATE_WIN32_ANSI) && (GATE_ARCH == GATE_ARCH_X86IA32))
293 #define IL_ExchangePtr(target, value) ((void *)IL_Exchange((LONG volatile *)(target), (LONG)value))
294 #define IL_CompareExchangePtr(dest, exch, op) ((void *)IL_CompareExchange((LONG volatile *)(dest), (LONG)exch, (LONG)op))
295 #else
296 #define IL_ExchangePtr InterlockedExchangePointer
297 #define IL_CompareExchangePtr InterlockedCompareExchangePointer
298 #endif
299
300 #if (GATE_ARCH == GATE_ARCH_X86AMD64)
301 #define IL_Exchange64 InterlockedExchange64
302 #define IL_CompareExchange64 InterlockedCompareExchange64
303 #define IL_Increment64 InterlockedIncrement64
304 #define IL_Decrement64 InterlockedDecrement64
305 #define IL_ExchangeAdd64 InterlockedExchangeAdd64
306 #else
307 #define GATE_WIN32_ATOMIC_64BIT_MISSING 1
308 #endif
309 #endif
310
311 #if (GATE_WIN32_ATOMIC_64BIT_MISSING)
312
313 static gate_atomic_lock_t gate_win32_global_64bit_lock = GATE_ATOMIC_LOCK_INIT;
314
315 static LONGLONG IL_Exchange64(LONG64 volatile* target, LONG64 value)
316 {
317 LONGLONG ret;
318 gate_atomic_lock_acquire(&gate_win32_global_64bit_lock);
319 ret = *target;
320 *target = value;
321 gate_atomic_lock_release(&gate_win32_global_64bit_lock);
322 return ret;
323 }
324
325 static LONGLONG IL_CompareExchange64(LONG64 volatile* target, LONG64 exchange, LONG64 comperand)
326 {
327 LONGLONG ret;
328 gate_atomic_lock_acquire(&gate_win32_global_64bit_lock);
329 ret = *target;
330 if (*target == comperand)
331 {
332 *target = exchange;
333 }
334 gate_atomic_lock_release(&gate_win32_global_64bit_lock);
335 return ret;
336 }
337
338 static LONGLONG IL_Increment64(LONGLONG volatile* target)
339 {
340 LONGLONG ret;
341 gate_atomic_lock_acquire(&gate_win32_global_64bit_lock);
342 ret = ++(*target);
343 gate_atomic_lock_release(&gate_win32_global_64bit_lock);
344 return ret;
345 }
346 static LONGLONG IL_Decrement64(LONGLONG volatile* target)
347 {
348 LONGLONG ret;
349 gate_atomic_lock_acquire(&gate_win32_global_64bit_lock);
350 ret = --(*target);
351 gate_atomic_lock_release(&gate_win32_global_64bit_lock);
352 return ret;
353 }
354 static LONGLONG IL_ExchangeAdd64(LONGLONG volatile* target, LONGLONG value)
355 {
356 LONGLONG ret;
357 gate_atomic_lock_acquire(&gate_win32_global_64bit_lock);
358 *target += value;
359 ret = *target;
360 gate_atomic_lock_release(&gate_win32_global_64bit_lock);
361 return ret;
362 }
363
364 #endif
365
366 static void* gate_atomic_get_thread_id()
367 {
368 return (void*)(gate_uintptr_t)gate_win32_get_thread_id();
369 }
370
371 static void gate_atomic_yield_thread()
372 {
373 Sleep(0);
374 }
375
376 gate_bool_t gate_atomic_flag_set(gate_atomic_flag_t* atom)
377 {
378 return IL_Exchange((LONG volatile*)atom, 1) ? true : false;
379 }
380 void gate_atomic_flag_clear(gate_atomic_flag_t* atom)
381 {
382 IL_Exchange((LONG volatile*)atom, 0);
383 }
384
385 void gate_atomic_int_init(gate_atomic_int_t* atom, gate_int32_t value)
386 {
387 *atom = value;
388 }
389 gate_int32_t gate_atomic_int_set(gate_atomic_int_t* atom, gate_int32_t value)
390 {
391 return (gate_int32_t)IL_Exchange((LONG volatile*)atom, (LONG)value);
392 }
393 gate_int32_t gate_atomic_int_get(gate_atomic_int_t* atom)
394 {
395 return (gate_int32_t)IL_CompareExchange((LONG volatile*)atom, 0, 0);
396 }
397 gate_int32_t gate_atomic_int_inc(gate_atomic_int_t* atom)
398 {
399 return (gate_int32_t)IL_Increment((LONG volatile*)atom);
400 }
401 gate_int32_t gate_atomic_int_dec(gate_atomic_int_t* atom)
402 {
403 return (gate_int32_t)IL_Decrement((LONG volatile*)atom);
404 }
405 gate_int32_t gate_atomic_int_add(gate_atomic_int_t* atom, gate_int32_t value)
406 {
407 return (gate_int32_t)IL_ExchangeAdd((LONG volatile*)atom, (LONG)value) + value;
408 }
409 gate_int32_t gate_atomic_int_xchg_if(gate_atomic_int_t* atom, gate_int32_t comparand, gate_int32_t xchgvalue)
410 {
411 return (gate_int32_t)IL_CompareExchange((LONG volatile*)atom, (LONG)xchgvalue, (LONG)comparand);
412 }
413
414 void gate_atomic_int64_init(gate_atomic_int64_t* atom, gate_int64_t value)
415 {
416 *atom = value;
417 }
418 gate_int64_t gate_atomic_int64_set(gate_atomic_int64_t* atom, gate_int64_t value)
419 {
420 return IL_Exchange64(atom, value);
421 }
422 gate_int64_t gate_atomic_int64_get(gate_atomic_int64_t* atom)
423 {
424 return IL_CompareExchange64(atom, 0, 0);
425 }
426 gate_int64_t gate_atomic_int64_inc(gate_atomic_int64_t* atom)
427 {
428 return IL_Increment64(atom);
429 }
430 gate_int64_t gate_atomic_int64_dec(gate_atomic_int64_t* atom)
431 {
432 return IL_Decrement64(atom);
433 }
434 gate_int64_t gate_atomic_int64_add(gate_atomic_int64_t* atom, gate_int64_t value)
435 {
436 return IL_ExchangeAdd64(atom, value);
437 }
438 gate_int64_t gate_atomic_int64_xchg_if(gate_atomic_int64_t* atom, gate_int64_t comparand, gate_int64_t xchgvalue)
439 {
440 return IL_CompareExchange64(atom, xchgvalue, comparand);
441 }
442
443 void gate_atomic_ptr_init(gate_atomic_ptr_t* atom, void* value)
444 {
445 *atom = value;
446 }
447 void* gate_atomic_ptr_set(gate_atomic_ptr_t* atom, void* value)
448 {
449 return (void*)(IL_ExchangePtr(atom, value));
450 }
451 void* gate_atomic_ptr_get(gate_atomic_ptr_t* atom)
452 {
453 return (void*)(IL_CompareExchangePtr(atom, NULL, NULL));
454 }
455 void* gate_atomic_ptr_xchg_if(gate_atomic_ptr_t* atom, void* comparand, void* xchgvalue)
456 {
457 return (void*)(IL_CompareExchangePtr(atom, xchgvalue, comparand));
458 }
459
460 #endif /* GATE_ATOMICS_INTERLOCKED_IMPL */
461
462 #ifdef GATE_ATOMICS_SYNCLOCK_IMPL
463
464 #if defined(GATE_SYS_POSIX)
465
466 #include <pthread.h>
467 #include <sched.h>
468
469 12 static void* gate_atomic_get_thread_id()
470 {
471 12 return (void*)(gate_uintptr_t)pthread_self();
472 }
473
474 static void gate_atomic_yield_thread()
475 {
476 sched_yield();
477 }
478
479 #endif /* GATE_SYS_POSIX */
480
481 104 gate_bool_t gate_atomic_flag_set(gate_atomic_flag_t* atom)
482 {
483 104 return __sync_lock_test_and_set((int*)atom, (int)1) ? true : false;
484 }
485 3 void gate_atomic_flag_clear(gate_atomic_flag_t* atom)
486 {
487 3 __sync_lock_test_and_set((int*)atom, (int)0);
488 3 }
489
490 2120 void gate_atomic_int_init(gate_atomic_int_t* atom, gate_int32_t value)
491 {
492 2120 *atom = value;
493 2120 }
494 117 gate_int32_t gate_atomic_int_set(gate_atomic_int_t* atom, gate_int32_t value)
495 {
496 117 return (gate_int32_t)__sync_lock_test_and_set(atom, value);
497 }
498 271 gate_int32_t gate_atomic_int_get(gate_atomic_int_t* atom)
499 {
500 271 return (gate_int32_t)__sync_val_compare_and_swap(atom, 0, 0);
501 }
502 13194 gate_int32_t gate_atomic_int_inc(gate_atomic_int_t* atom)
503 {
504 13194 return (gate_int32_t)__sync_add_and_fetch(atom, 1);
505 }
506 15302 gate_int32_t gate_atomic_int_dec(gate_atomic_int_t* atom)
507 {
508 15302 return (gate_int32_t)__sync_sub_and_fetch(atom, 1);
509 }
510 3 gate_int32_t gate_atomic_int_add(gate_atomic_int_t* atom, gate_int32_t value)
511 {
512 3 return (gate_int32_t)__sync_add_and_fetch(atom, value);
513 }
514 43 gate_int32_t gate_atomic_int_xchg_if(gate_atomic_int_t* atom, gate_int32_t comparand, gate_int32_t xchgvalue)
515 {
516 43 return (gate_int32_t)__sync_val_compare_and_swap(atom, comparand, xchgvalue);
517 }
518
519 3 void gate_atomic_int64_init(gate_atomic_int64_t* atom, gate_int64_t value)
520 {
521 3 *atom = value;
522 3 }
523 4 gate_int64_t gate_atomic_int64_set(gate_atomic_int64_t* atom, gate_int64_t value)
524 {
525 4 return (gate_int64_t)__sync_lock_test_and_set(atom, value);
526 }
527 15 gate_int64_t gate_atomic_int64_get(gate_atomic_int64_t* atom)
528 {
529 15 return (gate_int64_t)__sync_val_compare_and_swap(atom, 0, 0);
530 }
531 3 gate_int64_t gate_atomic_int64_inc(gate_atomic_int64_t* atom)
532 {
533 3 return (gate_int64_t)__sync_add_and_fetch(atom, 1);
534 }
535 3 gate_int64_t gate_atomic_int64_dec(gate_atomic_int64_t* atom)
536 {
537 3 return (gate_int64_t)__sync_sub_and_fetch(atom, 1);
538 }
539 11 gate_int64_t gate_atomic_int64_add(gate_atomic_int64_t* atom, gate_int64_t value)
540 {
541 11 return (gate_int64_t)__sync_add_and_fetch(atom, value);
542 }
543 4 gate_int64_t gate_atomic_int64_xchg_if(gate_atomic_int64_t* atom, gate_int64_t comparand, gate_int64_t xchgvalue)
544 {
545 4 return (gate_int64_t)__sync_val_compare_and_swap(atom, comparand, xchgvalue);
546 }
547
548 7 void gate_atomic_ptr_init(gate_atomic_ptr_t* atom, void* value)
549 {
550 7 *atom = value;
551 7 }
552 32 void* gate_atomic_ptr_set(gate_atomic_ptr_t* atom, void* value)
553 {
554 32 return __sync_lock_test_and_set(atom, value);
555 }
556 21 void* gate_atomic_ptr_get(gate_atomic_ptr_t* atom)
557 {
558 21 return __sync_val_compare_and_swap(atom, NULL, NULL);
559 }
560 50 void* gate_atomic_ptr_xchg_if(gate_atomic_ptr_t* atom, void* comparand, void* xchgvalue)
561 {
562 50 return __sync_val_compare_and_swap(atom, comparand, xchgvalue);
563 }
564
565 #endif /* GATE_ATOMICS_SYNCLOCK_IMPL */
566
567 #if defined(GATE_ATOMICS_BEOS_IMPL)
568
569 #include <kernel/OS.h>
570 #include <support/SupportDefs.h>
571
572 static void* gate_atomic_get_thread_id()
573 {
574 thread_id tid = find_thread(NULL);
575 return (void*)(gate_uintptr_t)tid;
576 }
577
578 static void gate_atomic_yield_thread()
579 {
580 }
581
582 gate_bool_t gate_atomic_flag_set(gate_atomic_flag_t* atom)
583 {
584 int32 prev_value = atomic_get_and_set((int32*)atom, 1);
585 return prev_value == 1;
586 }
587
588 void gate_atomic_flag_clear(gate_atomic_flag_t* atom)
589 {
590 atomic_set((int32*)atom, 0);
591 }
592
593 void gate_atomic_int_init(gate_atomic_int_t* atom, gate_int32_t value)
594 {
595 atomic_set((int32*)atom, (int32)value);
596 }
597
598 gate_int32_t gate_atomic_int_set(gate_atomic_int_t* atom, gate_int32_t value)
599 {
600 return (gate_int32_t)atomic_get_and_set((int32*)atom, (int32)value);
601 }
602
603 gate_int32_t gate_atomic_int_get(gate_atomic_int_t* atom)
604 {
605 return (gate_int32_t)atomic_get((int32*)atom);
606 }
607
608 gate_int32_t gate_atomic_int_inc(gate_atomic_int_t* atom)
609 {
610 return (gate_int32_t)(atomic_add((int32*)atom, 1) + 1);
611 }
612
613 gate_int32_t gate_atomic_int_dec(gate_atomic_int_t* atom)
614 {
615 return (gate_int32_t)(atomic_add((int32*)atom, -1) - 1);
616 }
617
618 gate_int32_t gate_atomic_int_add(gate_atomic_int_t* atom, gate_int32_t value)
619 {
620 return (gate_int32_t)(atomic_add((int32*)atom, (int32)value) + value);
621 }
622
623 gate_int32_t gate_atomic_int_xchg_if(gate_atomic_int_t* atom, gate_int32_t comparand, gate_int32_t xchgvalue)
624 {
625 return (gate_int32_t)atomic_test_and_set((int32*)atom, (int32)xchgvalue, (int32)comparand);
626 }
627
628 void gate_atomic_int64_init(gate_atomic_int64_t* atom, gate_int64_t value)
629 {
630 atomic_set64((int64*)atom, (int64)value);
631 }
632
633 gate_int64_t gate_atomic_int64_set(gate_atomic_int64_t* atom, gate_int64_t value)
634 {
635 gate_int64_t old_value = *atom;
636 *atom = value;
637 return old_value;
638 }
639
640 gate_int64_t gate_atomic_int64_get(gate_atomic_int64_t* atom)
641 {
642 return (gate_int64_t)atomic_get64((int64*)atom);
643 }
644
645 gate_int64_t gate_atomic_int64_inc(gate_atomic_int64_t* atom)
646 {
647 return (gate_int64_t)atomic_add64((int64*)atom, 1) + 1;
648 }
649
650 gate_int64_t gate_atomic_int64_dec(gate_atomic_int64_t* atom)
651 {
652 return (gate_int64_t)atomic_add64((int64*)atom, 1) - 1;
653 }
654
655 gate_int64_t gate_atomic_int64_add(gate_atomic_int64_t* atom, gate_int64_t value)
656 {
657 return (gate_int64_t)atomic_add64((int64*)atom, (int64)value) + value;
658 }
659
660 gate_int64_t gate_atomic_int64_xchg_if(gate_atomic_int64_t* atom, gate_int64_t comparand, gate_int64_t xchgvalue)
661 {
662 return (gate_int64_t)atomic_test_and_set64((int64*)atom, (int64)xchgvalue, (int64)comparand);
663 }
664
665 void gate_atomic_ptr_init(gate_atomic_ptr_t* atom, void* value)
666 {
667 *atom = value;
668 }
669
670 void* gate_atomic_ptr_set(gate_atomic_ptr_t* atom, void* value)
671 {
672 void* old_value = *atom;
673 *atom = value;
674 return old_value;
675 }
676
677 void* gate_atomic_ptr_get(gate_atomic_ptr_t* atom)
678 {
679 return *atom;
680 }
681
682 void* gate_atomic_ptr_xchg_if(gate_atomic_ptr_t* atom, void* comparand, void* xchgvalue)
683 {
684 void* old_value = *atom;
685 if (old_value == comparand)
686 {
687 *atom = xchgvalue;
688 }
689 return old_value;
690 }
691
692 #endif /* GATE_ATOMICS_BEOS_IMPL */
693
694 #if defined(GATE_ATOMICS_VOLATILE_IMPL)
695
696 static void* gate_atomic_get_thread_id()
697 {
698 return (void*)(gate_uintptr_t)1;
699 }
700
701 static void gate_atomic_yield_thread()
702 {
703 }
704
705 gate_bool_t gate_atomic_flag_set(gate_atomic_flag_t* atom)
706 {
707 int old_value = *((int volatile*)atom);
708 *((int volatile*)atom) = 1;
709 return old_value ? true : false;
710 }
711
712 void gate_atomic_flag_clear(gate_atomic_flag_t* atom)
713 {
714 *((int*)atom) = 0;
715 }
716
717 void gate_atomic_int_init(gate_atomic_int_t* atom, gate_int32_t value)
718 {
719 *atom = value;
720 }
721
722 gate_int32_t gate_atomic_int_set(gate_atomic_int_t* atom, gate_int32_t value)
723 {
724 gate_int32_t old_value = *atom;
725 *atom = value;
726 return old_value;
727 }
728
729 gate_int32_t gate_atomic_int_get(gate_atomic_int_t* atom)
730 {
731 return *atom;
732 }
733
734 gate_int32_t gate_atomic_int_inc(gate_atomic_int_t* atom)
735 {
736 return ++(*atom);
737 }
738
739 gate_int32_t gate_atomic_int_dec(gate_atomic_int_t* atom)
740 {
741 return --(*atom);
742 }
743
744 gate_int32_t gate_atomic_int_add(gate_atomic_int_t* atom, gate_int32_t value)
745 {
746 return *atom += value;
747 }
748
749 gate_int32_t gate_atomic_int_xchg_if(gate_atomic_int_t* atom, gate_int32_t comparand, gate_int32_t xchgvalue)
750 {
751 gate_int32_t old_value = *atom;
752 if (old_value == comparand)
753 {
754 *atom = xchgvalue;
755 }
756 return old_value;
757 }
758
759 void gate_atomic_int64_init(gate_atomic_int64_t* atom, gate_int64_t value)
760 {
761 *atom = value;
762 }
763
764 gate_int64_t gate_atomic_int64_set(gate_atomic_int64_t* atom, gate_int64_t value)
765 {
766 gate_int64_t old_value = *atom;
767 *atom = value;
768 return old_value;
769 }
770
771 gate_int64_t gate_atomic_int64_get(gate_atomic_int64_t* atom)
772 {
773 return *atom;
774 }
775
776 gate_int64_t gate_atomic_int64_inc(gate_atomic_int64_t* atom)
777 {
778 return ++(*atom);
779 }
780
781 gate_int64_t gate_atomic_int64_dec(gate_atomic_int64_t* atom)
782 {
783 return --(*atom);
784 }
785
786 gate_int64_t gate_atomic_int64_add(gate_atomic_int64_t* atom, gate_int64_t value)
787 {
788 return (*atom) += value;
789 }
790
791 gate_int64_t gate_atomic_int64_xchg_if(gate_atomic_int64_t* atom, gate_int64_t comparand, gate_int64_t xchgvalue)
792 {
793 gate_int64_t old_value = *atom;
794 if (old_value == comparand)
795 {
796 *atom = xchgvalue;
797 }
798 return old_value;
799 }
800
801 void gate_atomic_ptr_init(gate_atomic_ptr_t* atom, void* value)
802 {
803 *atom = value;
804 }
805
806 void* gate_atomic_ptr_set(gate_atomic_ptr_t* atom, void* value)
807 {
808 void* old_value = *atom;
809 *atom = value;
810 return old_value;
811 }
812
813 void* gate_atomic_ptr_get(gate_atomic_ptr_t* atom)
814 {
815 return *atom;
816 }
817
818 void* gate_atomic_ptr_xchg_if(gate_atomic_ptr_t* atom, void* comparand, void* xchgvalue)
819 {
820 void* old_value = *atom;
821 if (old_value == comparand)
822 {
823 *atom = xchgvalue;
824 }
825 return old_value;
826 }
827
828 #endif /* GATE_ATOMICS_VOLATILE_IMPL */
829
830 /****************************
831 * generic implementation *
832 ****************************/
833
834 3 GATE_CORE_API gate_bool_t gate_atomic_int_update_if(gate_atomic_int_t* atom, gate_int32_t fromvalue, gate_int32_t tovalue)
835 {
836 3 return gate_atomic_int_xchg_if(atom, fromvalue, tovalue) == fromvalue;
837 }
838
839 static void* const gate_atomic_lock_unlocked = ((void*)0);
840 static void* const gate_atomic_lock_locked = ((void*)1);
841
842 21 void gate_atomic_lock_acquire(gate_atomic_lock_t* atom)
843 {
844
1/2
✗ Branch 1 not taken.
✓ Branch 2 taken 21 times.
21 while (gate_atomic_lock_locked == gate_atomic_ptr_xchg_if(&atom->lock,
845 gate_atomic_lock_unlocked,
846 gate_atomic_lock_locked))
847 {
848 /* wait until previous state is unlocked */
849 gate_atomic_yield_thread();
850 }
851 21 }
852 3 gate_bool_t gate_atomic_lock_acquire_spin(gate_atomic_lock_t* atom, gate_uint32_t spincount)
853 {
854
2/2
✓ Branch 1 taken 11 times.
✓ Branch 2 taken 2 times.
13 while (gate_atomic_lock_locked == gate_atomic_ptr_xchg_if(&atom->lock, gate_atomic_lock_unlocked, gate_atomic_lock_locked))
855 {
856
2/2
✓ Branch 0 taken 1 times.
✓ Branch 1 taken 10 times.
11 if (spincount-- == 0)
857 {
858 1 return false;
859 }
860 }
861 2 return true;
862 }
863 23 void gate_atomic_lock_release(gate_atomic_lock_t* atom)
864 {
865 23 gate_atomic_ptr_set(&atom->lock, gate_atomic_lock_unlocked);
866 23 }
867
868 4 void gate_atomic_rlock_acquire(gate_atomic_rlock_t* atom)
869 {
870 4 void* const currentThread = gate_atomic_get_thread_id();
871 void* tmpValue;
872 for (;;)
873 {
874 4 tmpValue = gate_atomic_ptr_xchg_if(&atom->lock, gate_atomic_lock_unlocked, currentThread);
875
2/2
✓ Branch 0 taken 3 times.
✓ Branch 1 taken 1 times.
4 if (tmpValue == gate_atomic_lock_unlocked)
876 {
877 /* lock now acquired */
878 3 gate_atomic_int_inc(&atom->count);
879 3 break;
880 }
881
1/2
✓ Branch 0 taken 1 times.
✗ Branch 1 not taken.
1 else if (tmpValue == currentThread)
882 {
883 /* lock was previously aquired by this thread */
884 1 gate_atomic_int_inc(&atom->count);
885 1 break;
886 }
887 else
888 {
889 /* continue spinning */
890 }
891 }
892 4 }
893 2 gate_bool_t gate_atomic_rlock_acquire_spin(gate_atomic_rlock_t* atom, gate_uint32_t spincount)
894 {
895 2 void* const currentThread = gate_atomic_get_thread_id();
896 void* tmpValue;
897 for (;;)
898 {
899 2 tmpValue = gate_atomic_ptr_xchg_if(&atom->lock, gate_atomic_lock_unlocked, currentThread);
900
2/2
✓ Branch 0 taken 1 times.
✓ Branch 1 taken 1 times.
2 if (tmpValue == gate_atomic_lock_unlocked)
901 {
902 /* lock now acquired */
903 1 gate_atomic_int_inc(&atom->count);
904 1 break;
905 }
906
1/2
✓ Branch 0 taken 1 times.
✗ Branch 1 not taken.
1 else if (tmpValue == currentThread)
907 {
908 /* lock was previously aquired by this thread */
909 1 gate_atomic_int_inc(&atom->count);
910 1 break;
911 }
912 else
913 {
914 if (spincount-- == 0)
915 {
916 return false;
917 }
918 /* continue spinning */
919 }
920 }
921 2 return true;
922 }
923 6 void gate_atomic_rlock_release(gate_atomic_rlock_t* atom)
924 {
925 6 void* const currentThread = gate_atomic_get_thread_id();
926 void* currentValue;
927 6 gate_int32_t lockcount = gate_atomic_int_dec(&atom->count);
928
2/2
✓ Branch 0 taken 4 times.
✓ Branch 1 taken 2 times.
6 if (lockcount == 0)
929 {
930 /* release lock */
931 4 currentValue = gate_atomic_ptr_xchg_if(&atom->lock, currentThread, gate_atomic_lock_unlocked);
932
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 4 times.
4 if (currentValue != currentThread)
933 {
934 /* error: invoked from the wrong thread */
935 gate_atomic_ptr_set(&atom->lock, currentThread);
936 gate_atomic_int_inc(&atom->count);
937 }
938 }
939 else
940 {
941 /* */
942 2 currentValue = gate_atomic_ptr_get(&atom->lock);
943
1/2
✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
2 if (currentValue != currentThread)
944 {
945 /* error: invoked from the wrong thread */
946 gate_atomic_int_inc(&atom->count);
947 }
948 }
949 6 }
950
951 6 void gate_atomic_call_once(gate_atomic_flag_t* ptr_once_flag, void (*func)(void))
952 {
953
2/2
✓ Branch 1 taken 2 times.
✓ Branch 2 taken 4 times.
6 if (false == gate_atomic_flag_set(ptr_once_flag))
954 {
955
1/2
✓ Branch 0 taken 2 times.
✗ Branch 1 not taken.
2 if (func)
956 {
957 2 func();
958 }
959 }
960 6 }
961
962 3 void gate_atomic_call_once_exclusive(gate_atomic_flag_t* ptr_once_flag, void (*func)(void))
963 {
964 3 gate_result_t result = gate_platform_lock();
965 3 gate_atomic_call_once(ptr_once_flag, func);
966
1/2
✓ Branch 0 taken 3 times.
✗ Branch 1 not taken.
3 if (GATE_SUCCEEDED(result))
967 {
968 3 gate_platform_unlock();
969 }
970 3 }
971