| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | /* GATE PROJECT LICENSE: | ||
| 2 | +----------------------------------------------------------------------------+ | ||
| 3 | | Copyright(c) 2018-2025, Stefan Meislinger <sm@opengate.at> | | ||
| 4 | | All rights reserved. | | ||
| 5 | | | | ||
| 6 | | Redistribution and use in source and binary forms, with or without | | ||
| 7 | | modification, are permitted provided that the following conditions are met:| | ||
| 8 | | | | ||
| 9 | | 1. Redistributions of source code must retain the above copyright notice, | | ||
| 10 | | this list of conditions and the following disclaimer. | | ||
| 11 | | 2. Redistributions in binary form must reproduce the above copyright | | ||
| 12 | | notice, this list of conditions and the following disclaimer in the | | ||
| 13 | | documentation and/or other materials provided with the distribution. | | ||
| 14 | | | | ||
| 15 | | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"| | ||
| 16 | | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE | | ||
| 17 | | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | | ||
| 18 | | ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE | | ||
| 19 | | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | | ||
| 20 | | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | | ||
| 21 | | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | | ||
| 22 | | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | | ||
| 23 | | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | | ||
| 24 | | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF | | ||
| 25 | | THE POSSIBILITY OF SUCH DAMAGE. | | ||
| 26 | +----------------------------------------------------------------------------+ | ||
| 27 | */ | ||
| 28 | |||
| 29 | #include "gate/atomics.hpp" | ||
| 30 | #include "gate/memalloc.h" | ||
| 31 | |||
| 32 | namespace gate | ||
| 33 | { | ||
| 34 | 2 | AtomicFlag::AtomicFlag(bool init) noexcept | |
| 35 | { | ||
| 36 | 2 | impl.value = 0; | |
| 37 |
2/2✓ Branch 0 taken 1 times.
✓ Branch 1 taken 1 times.
|
2 | if (init) |
| 38 | { | ||
| 39 | 1 | this->set(); | |
| 40 | } | ||
| 41 | 2 | } | |
| 42 | 2 | AtomicFlag::~AtomicFlag() noexcept | |
| 43 | { | ||
| 44 | 2 | } | |
| 45 | |||
| 46 | 1 | void AtomicFlag::clear() noexcept | |
| 47 | { | ||
| 48 | 1 | gate_atomic_flag_clear(const_cast<gate_atomic_flag_t*>(&this->impl)); | |
| 49 | 1 | } | |
| 50 | 8 | bool_t AtomicFlag::set() noexcept | |
| 51 | { | ||
| 52 | 8 | return gate_atomic_flag_set(&this->impl); | |
| 53 | } | ||
| 54 | |||
| 55 | |||
| 56 | |||
| 57 | |||
| 58 | |||
| 59 | |||
| 60 | |||
| 61 | 23 | AtomicInt::AtomicInt(int32_t init) noexcept | |
| 62 | { | ||
| 63 | 23 | gate_atomic_int_init(&this->impl, init); | |
| 64 | 23 | } | |
| 65 | 1 | AtomicInt::AtomicInt(AtomicInt const& src) noexcept | |
| 66 | { | ||
| 67 | 1 | gate_atomic_int_init(&this->impl, src.get()); | |
| 68 | 1 | } | |
| 69 | 1 | AtomicInt& AtomicInt::operator=(AtomicInt const& src) noexcept | |
| 70 | { | ||
| 71 | 1 | gate_atomic_int_set(&this->impl, src.get()); | |
| 72 | 1 | return *this; | |
| 73 | } | ||
| 74 | 24 | AtomicInt::~AtomicInt() noexcept | |
| 75 | { | ||
| 76 | 24 | } | |
| 77 | |||
| 78 | 6 | gate_atomic_int_t* AtomicInt::c_impl() | |
| 79 | { | ||
| 80 | 6 | return &this->impl; | |
| 81 | } | ||
| 82 | |||
| 83 | 38 | int32_t AtomicInt::get() const noexcept | |
| 84 | { | ||
| 85 | 38 | return gate_atomic_int_get(const_cast<gate_atomic_int_t*>(&this->impl)); | |
| 86 | } | ||
| 87 | 7 | int32_t AtomicInt::set(int32_t newvalue) noexcept | |
| 88 | { | ||
| 89 | 7 | return gate_atomic_int_set(&this->impl, newvalue); | |
| 90 | } | ||
| 91 | 16 | int32_t AtomicInt::changeIf(int32_t comparand, int32_t newvalue) noexcept | |
| 92 | { | ||
| 93 | 16 | return gate_atomic_int_xchg_if(&this->impl, comparand, newvalue); | |
| 94 | } | ||
| 95 | 3 | bool_t AtomicInt::updateIf(int32_t fromvalue, int32_t tovalue) noexcept | |
| 96 | { | ||
| 97 | 3 | return gate_atomic_int_update_if(&this->impl, fromvalue, tovalue); | |
| 98 | } | ||
| 99 | |||
| 100 | 30 | int32_t AtomicInt::operator++() noexcept | |
| 101 | { | ||
| 102 | 30 | return gate_atomic_int_inc(&this->impl); | |
| 103 | } | ||
| 104 | 1 | int32_t AtomicInt::operator++(int) noexcept | |
| 105 | { | ||
| 106 | 1 | return gate_atomic_int_inc(&this->impl) - 1; | |
| 107 | } | ||
| 108 | 27 | int32_t AtomicInt::operator--() noexcept | |
| 109 | { | ||
| 110 | 27 | return gate_atomic_int_dec(&this->impl); | |
| 111 | } | ||
| 112 | 1 | int32_t AtomicInt::operator--(int) noexcept | |
| 113 | { | ||
| 114 | 1 | return gate_atomic_int_dec(&this->impl) + 1; | |
| 115 | } | ||
| 116 | |||
| 117 | 1 | int32_t AtomicInt::operator+=(int32_t value) noexcept | |
| 118 | { | ||
| 119 | 1 | return gate_atomic_int_add(&this->impl, value); | |
| 120 | } | ||
| 121 | 1 | int32_t AtomicInt::operator-=(int32_t value) noexcept | |
| 122 | { | ||
| 123 | 1 | return gate_atomic_int_add(&this->impl, -value); | |
| 124 | } | ||
| 125 | |||
| 126 | |||
| 127 | |||
| 128 | |||
| 129 | |||
| 130 | 1 | AtomicInt64::AtomicInt64(int64_t init) noexcept | |
| 131 | { | ||
| 132 | 1 | gate_atomic_int64_init(&this->impl, init); | |
| 133 | 1 | } | |
| 134 | 1 | AtomicInt64::AtomicInt64(AtomicInt64 const& src) noexcept | |
| 135 | { | ||
| 136 | 1 | gate_atomic_int64_init(&this->impl, src.get()); | |
| 137 | 1 | } | |
| 138 | 1 | AtomicInt64& AtomicInt64::operator=(AtomicInt64 const& src) noexcept | |
| 139 | { | ||
| 140 | 1 | gate_atomic_int64_set(&this->impl, src.get()); | |
| 141 | 1 | return *this; | |
| 142 | } | ||
| 143 | 2 | AtomicInt64::~AtomicInt64() noexcept | |
| 144 | { | ||
| 145 | 2 | } | |
| 146 | |||
| 147 | 8 | int64_t AtomicInt64::get() const noexcept | |
| 148 | { | ||
| 149 | 8 | return gate_atomic_int64_get(const_cast<gate_atomic_int64_t*>(&this->impl)); | |
| 150 | } | ||
| 151 | 2 | int64_t AtomicInt64::set(int64_t newvalue) noexcept | |
| 152 | { | ||
| 153 | 2 | return gate_atomic_int64_set(&this->impl, newvalue); | |
| 154 | } | ||
| 155 | 2 | int64_t AtomicInt64::changeIf(int64_t comparand, int64_t newvalue) noexcept | |
| 156 | { | ||
| 157 | 2 | return gate_atomic_int64_xchg_if(&this->impl, comparand, newvalue); | |
| 158 | } | ||
| 159 | |||
| 160 | 1 | int64_t AtomicInt64::operator++() noexcept | |
| 161 | { | ||
| 162 | 1 | return gate_atomic_int64_inc(&this->impl); | |
| 163 | } | ||
| 164 | 1 | int64_t AtomicInt64::operator++(int) noexcept | |
| 165 | { | ||
| 166 | 1 | return gate_atomic_int64_inc(&this->impl) - 1; | |
| 167 | } | ||
| 168 | 1 | int64_t AtomicInt64::operator--() noexcept | |
| 169 | { | ||
| 170 | 1 | return gate_atomic_int64_dec(&this->impl); | |
| 171 | } | ||
| 172 | 1 | int64_t AtomicInt64::operator--(int) noexcept | |
| 173 | { | ||
| 174 | 1 | return gate_atomic_int64_dec(&this->impl) + 1; | |
| 175 | } | ||
| 176 | |||
| 177 | 1 | int64_t AtomicInt64::operator+=(int64_t value) noexcept | |
| 178 | { | ||
| 179 | 1 | return gate_atomic_int64_add(&this->impl, value); | |
| 180 | } | ||
| 181 | 1 | int64_t AtomicInt64::operator-=(int64_t value) noexcept | |
| 182 | { | ||
| 183 | 1 | return gate_atomic_int64_add(&this->impl, -value); | |
| 184 | } | ||
| 185 | |||
| 186 | |||
| 187 | |||
| 188 | |||
| 189 | |||
| 190 | 3 | AtomicPointer::AtomicPointer(void* init) noexcept | |
| 191 | { | ||
| 192 | 3 | gate_atomic_ptr_init(&this->impl, init); | |
| 193 | 3 | } | |
| 194 | 1 | AtomicPointer::AtomicPointer(AtomicPointer const& src) noexcept | |
| 195 | { | ||
| 196 | 1 | gate_atomic_ptr_init(&this->impl, src.get()); | |
| 197 | 1 | } | |
| 198 | 1 | AtomicPointer& AtomicPointer::operator=(AtomicPointer const& src) noexcept | |
| 199 | { | ||
| 200 | 1 | this->set(src.get()); | |
| 201 | 1 | return *this; | |
| 202 | } | ||
| 203 | 4 | AtomicPointer::~AtomicPointer() noexcept | |
| 204 | { | ||
| 205 | 4 | } | |
| 206 | |||
| 207 | 12 | void* AtomicPointer::get() const noexcept | |
| 208 | { | ||
| 209 | 12 | return gate_atomic_ptr_get(const_cast<gate_atomic_ptr_t*>(&this->impl)); | |
| 210 | } | ||
| 211 | 6 | void* AtomicPointer::set(void* newvalue) noexcept | |
| 212 | { | ||
| 213 | 6 | return gate_atomic_ptr_set(&this->impl, newvalue); | |
| 214 | } | ||
| 215 | 4 | void* AtomicPointer::changeIf(void* comparand, void* newvalue) noexcept | |
| 216 | { | ||
| 217 | 4 | return gate_atomic_ptr_xchg_if(&this->impl, comparand, newvalue); | |
| 218 | } | ||
| 219 | |||
| 220 | |||
| 221 | |||
| 222 | |||
| 223 | |||
| 224 | 1 | AtomicMutex::AtomicMutex() noexcept | |
| 225 | { | ||
| 226 | 1 | gate_mem_clear(&this->impl, sizeof(this->impl)); | |
| 227 | //this->impl = GATE_ATOMIC_LOCK_INIT; | ||
| 228 | 1 | } | |
| 229 | 1 | AtomicMutex::~AtomicMutex() noexcept | |
| 230 | { | ||
| 231 | 1 | } | |
| 232 | 1 | gate_atomic_lock_t* AtomicMutex::c_impl() noexcept | |
| 233 | { | ||
| 234 | 1 | return &this->impl; | |
| 235 | } | ||
| 236 | |||
| 237 | 1 | void AtomicMutex::lock() noexcept | |
| 238 | { | ||
| 239 | 1 | gate_atomic_lock_acquire(&this->impl); | |
| 240 | 1 | } | |
| 241 | 2 | bool AtomicMutex::lock(uint32_t spinCount) noexcept | |
| 242 | { | ||
| 243 | 2 | return gate_atomic_lock_acquire_spin(&this->impl, spinCount); | |
| 244 | } | ||
| 245 | 2 | void AtomicMutex::unlock() noexcept | |
| 246 | { | ||
| 247 | 2 | gate_atomic_lock_release(&this->impl); | |
| 248 | 2 | } | |
| 249 | |||
| 250 | |||
| 251 | 1 | AtomicLock::AtomicLock(AtomicMutex& mtx) noexcept | |
| 252 | 1 | : mutex(mtx.c_impl()) | |
| 253 | { | ||
| 254 | 1 | gate_atomic_lock_acquire(this->mutex); | |
| 255 | 1 | } | |
| 256 | 1 | AtomicLock::AtomicLock(gate_atomic_lock_t& ref_mtx) noexcept | |
| 257 | 1 | : mutex(&ref_mtx) | |
| 258 | { | ||
| 259 | 1 | gate_atomic_lock_acquire(this->mutex); | |
| 260 | 1 | } | |
| 261 | 4 | AtomicLock::~AtomicLock() noexcept | |
| 262 | { | ||
| 263 | 2 | gate_atomic_lock_release(this->mutex); | |
| 264 | 2 | } | |
| 265 | |||
| 266 | |||
| 267 | 1 | AtomicRecursiveMutex::AtomicRecursiveMutex() noexcept | |
| 268 | { | ||
| 269 | 1 | gate_mem_clear(&this->impl, sizeof(this->impl)); | |
| 270 | 1 | } | |
| 271 | 1 | AtomicRecursiveMutex::~AtomicRecursiveMutex() noexcept | |
| 272 | { | ||
| 273 | 1 | } | |
| 274 | 2 | gate_atomic_rlock_t* AtomicRecursiveMutex::c_impl() noexcept | |
| 275 | { | ||
| 276 | 2 | return &this->impl; | |
| 277 | } | ||
| 278 | |||
| 279 | 1 | void AtomicRecursiveMutex::lock() noexcept | |
| 280 | { | ||
| 281 | 1 | gate_atomic_rlock_acquire(&this->impl); | |
| 282 | 1 | } | |
| 283 | 2 | bool AtomicRecursiveMutex::lock(uint32_t spinCount) noexcept | |
| 284 | { | ||
| 285 | 2 | return gate_atomic_rlock_acquire_spin(&this->impl, spinCount); | |
| 286 | } | ||
| 287 | 3 | void AtomicRecursiveMutex::unlock() noexcept | |
| 288 | { | ||
| 289 | 3 | gate_atomic_rlock_release(&this->impl); | |
| 290 | 3 | } | |
| 291 | |||
| 292 | |||
| 293 | 2 | AtomicRecursiveLock::AtomicRecursiveLock(AtomicRecursiveMutex& mtx) noexcept | |
| 294 | 2 | : mutex(mtx.c_impl()) | |
| 295 | { | ||
| 296 | 2 | gate_atomic_rlock_acquire(this->mutex); | |
| 297 | 2 | } | |
| 298 | 1 | AtomicRecursiveLock::AtomicRecursiveLock(gate_atomic_rlock_t& ref_mtx) noexcept | |
| 299 | 1 | : mutex(&ref_mtx) | |
| 300 | { | ||
| 301 | 1 | gate_atomic_rlock_acquire(this->mutex); | |
| 302 | 1 | } | |
| 303 | 6 | AtomicRecursiveLock::~AtomicRecursiveLock() noexcept | |
| 304 | { | ||
| 305 | 3 | gate_atomic_rlock_release(this->mutex); | |
| 306 | 3 | } | |
| 307 | |||
| 308 | |||
| 309 | 1 | AtomicCallOnce::AtomicCallOnce() noexcept | |
| 310 | { | ||
| 311 | 1 | this->state.value = 0; | |
| 312 | 1 | } | |
| 313 | 1 | AtomicCallOnce::~AtomicCallOnce() noexcept | |
| 314 | { | ||
| 315 | 1 | } | |
| 316 | 3 | void AtomicCallOnce::operator()(void(*func)(void)) | |
| 317 | { | ||
| 318 | 3 | gate_atomic_call_once(&this->state, func); | |
| 319 | 3 | } | |
| 320 | |||
| 321 | |||
| 322 | 1 | AtomicCallOnceEx::AtomicCallOnceEx() noexcept | |
| 323 | { | ||
| 324 | 1 | this->state.value = 0; | |
| 325 | 1 | } | |
| 326 | 1 | AtomicCallOnceEx::~AtomicCallOnceEx() noexcept | |
| 327 | { | ||
| 328 | 1 | } | |
| 329 | 3 | void AtomicCallOnceEx::operator()(void(*func)(void)) | |
| 330 | { | ||
| 331 | 3 | gate_atomic_call_once_exclusive(&this->state, func); | |
| 332 | 3 | } | |
| 333 | |||
| 334 | } // end of namespace gate | ||
| 335 |