Mistake on this page?
Report an issue in GitHub or email us
mbed_atomic.h
1 
2 /*
3  * Copyright (c) 2015-2019, ARM Limited, All Rights Reserved
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License"); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 #ifndef __MBED_UTIL_ATOMIC_H__
20 #define __MBED_UTIL_ATOMIC_H__
21 
22 #include "cmsis.h"
23 
24 #include <stdbool.h>
25 #include <stdint.h>
26 #include <stddef.h>
27 #include "platform/mbed_toolchain.h"
28 
29 /** \addtogroup platform-public-api */
30 /** @{*/
31 
32 /**
33  * \defgroup platform_atomic atomic functions
34  *
35  * Atomic functions function analogously to C11 and C++11 - loads have
36  * acquire semantics, stores have release semantics, and atomic operations
37  * are sequentially consistent. Atomicity is enforced both between threads and
38  * interrupt handlers.
39  *
40  * @{
41  */
42 
43 #ifdef __cplusplus
44 extern "C" {
45 #endif
46 
47 /**
48  * Memory order constraints for atomic operations. Intended semantics
49  * are as per C++11.
50  */
51 typedef enum mbed_memory_order {
52  /* Bits 0 = consume
53  * 1 = acquire (explicitly requested, or implied by seq.cst)
54  * 2 = release (explicitly requested, or implied by seq.cst)
55  * 4 = sequentially consistent
56  */
57  mbed_memory_order_relaxed = 0x00,
58  mbed_memory_order_consume = 0x01,
59  mbed_memory_order_acquire = 0x02,
60  mbed_memory_order_release = 0x04,
61  mbed_memory_order_acq_rel = 0x06,
62  mbed_memory_order_seq_cst = 0x16
64 
65 // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
66 #ifndef MBED_EXCLUSIVE_ACCESS
67 #ifndef __EXCLUSIVE_ACCESS
68 #if defined __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
69 #if ((__ARM_ARCH_7M__ == 1U) || \
70  (__ARM_ARCH_7EM__ == 1U) || \
71  (__ARM_ARCH_8M_BASE__ == 1U) || \
72  (__ARM_ARCH_8M_MAIN__ == 1U) || \
73  (__ARM_ARCH_8_1M_MAIN__ == 1U)) || \
74  (__ARM_ARCH_7A__ == 1U)
75 #define MBED_EXCLUSIVE_ACCESS 1U
76 #define MBED_EXCLUSIVE_ACCESS_THUMB1 (__ARM_ARCH_8M_BASE__ == 1U)
77 #ifdef __ICCARM__
78 #if __CPU_MODE__ == 2
79 #define MBED_EXCLUSIVE_ACCESS_ARM 1U
80 #else
81 #define MBED_EXCLUSIVE_ACCESS_ARM 0U
82 #endif
83 #else
84 #if !defined (__thumb__)
85 #define MBED_EXCLUSIVE_ACCESS_ARM 1U
86 #else
87 #define MBED_EXCLUSIVE_ACCESS_ARM 0U
88 #endif
89 #endif
90 #elif (__ARM_ARCH_6M__ == 1U)
91 #define MBED_EXCLUSIVE_ACCESS 0U
92 #elif defined __aarch64__ // Apple M1 Mac
93 #define MBED_EXCLUSIVE_ACCESS 0U
94 #else
95 #error "Unknown ARM architecture for exclusive access"
96 #endif // __ARM_ARCH_xxx
97 #else // __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
98 // Seem to be compiling for non-ARM, so stick with critical section implementations
99 #define MBED_EXCLUSIVE_ACCESS 0U
100 #endif
101 #else
102 #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
103 #endif
104 #endif
105 
106 #if MBED_EXCLUSIVE_ACCESS
107 #define MBED_INLINE_IF_EX inline
108 #else
109 #define MBED_INLINE_IF_EX
110 #endif
111 
112 /**
113  * A lock-free, primitive atomic flag.
114  *
115  * Emulate C11's atomic_flag. The flag is initially in an indeterminate state
116  * unless explicitly initialized with CORE_UTIL_ATOMIC_FLAG_INIT.
117  */
118 typedef struct core_util_atomic_flag {
119  uint8_t _flag;
121 
122 /**
123  * Initializer for a core_util_atomic_flag.
124  *
125  * Example:
126  * ~~~
127  * core_util_atomic_flag in_progress = CORE_UTIL_ATOMIC_FLAG_INIT;
128  * ~~~
129  */
130 #define CORE_UTIL_ATOMIC_FLAG_INIT { 0 }
131 
132 /**
133  * Atomic test and set.
134  *
135  * Atomically tests then sets the flag to true, returning the previous value.
136  *
137  * @param flagPtr Target flag being tested and set.
138  * @return The previous value.
139  */
140 MBED_INLINE_IF_EX bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr);
141 
142 /** \ copydoc core_util_atomic_flag_test_and_set
143  * @param order memory ordering constraint
144  */
146 
147 /**
148  * Atomic clear.
149  *
150  * @param flagPtr Target flag being cleared.
151  */
153 
154 /** \ copydoc core_util_atomic_flag_clear
155  * @param order memory ordering constraint
156  */
158 
159 
160 /**
161  * Atomic compare and set. It compares the contents of a memory location to a
162  * given value and, only if they are the same, modifies the contents of that
163  * memory location to a given new value. This is done as a single atomic
164  * operation. The atomicity guarantees that the new value is calculated based on
165  * up-to-date information; if the value had been updated by another thread in
166  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
167  *
168  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
169  * you to the article on compare-and swap].
170  *
171  * @param ptr The target memory location.
172  * @param[in,out] expectedCurrentValue A pointer to some location holding the
173  * expected current value of the data being set atomically.
174  * The computed 'desiredValue' should be a function of this current value.
175  * @note: This is an in-out parameter. In the
176  * failure case of atomic_cas (where the
177  * destination isn't set), the pointee of expectedCurrentValue is
178  * updated with the current value.
179  * @param[in] desiredValue The new value computed based on '*expectedCurrentValue'.
180  *
181  * @return true if the memory location was atomically
182  * updated with the desired value (after verifying
183  * that it contained the expectedCurrentValue),
184  * false otherwise. In the failure case,
185  * exepctedCurrentValue is updated with the new
186  * value of the target memory location.
187  *
188  * pseudocode:
189  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
190  * if *p != *old {
191  * *old = *p
192  * return false
193  * }
194  * *p = new
195  * return true
196  * }
197  *
198  * @note: In the failure case (where the destination isn't set), the value
199  * pointed to by expectedCurrentValue is instead updated with the current value.
200  * This property helps writing concise code for the following incr:
201  *
202  * function incr(p : pointer to int, a : int) returns int {
203  * done = false
204  * value = atomic_load(p)
205  * while not done {
206  * done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success
207  * }
208  * return value + a
209  * }
210  *
211  * However, if the call is made in a loop like this, the atomic_compare_exchange_weak
212  * functions are to be preferred.
213  *
214  * @note: This corresponds to the C11 "atomic_compare_exchange_strong" - it
215  * always succeeds if the current value is expected, as per the pseudocode
216  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
217  * This call would normally be used when a fail return does not retry.
218  */
219 MBED_INLINE_IF_EX bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
220 
221 /** \copydoc core_util_atomic_cas_u8
222  * @param success memory ordering constraint for successful exchange
223  * @param failure memory ordering constraint for failure
224  */
225 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
226 
227 /** \copydoc core_util_atomic_cas_u8 */
228 MBED_INLINE_IF_EX bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
229 
230 /** \copydoc core_util_atomic_cas_explicit_u8 */
231 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
232 
233 /** \copydoc core_util_atomic_cas_u8 */
234 MBED_INLINE_IF_EX bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
235 
236 /** \copydoc core_util_atomic_cas_explicit_u8 */
237 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
238 
239 /** \copydoc core_util_atomic_cas_u8 */
240 bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
241 
242 /** \copydoc core_util_atomic_cas_explicit_u8 */
243 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
244 
245 /** \copydoc core_util_atomic_cas_u8 */
246 MBED_FORCEINLINE bool core_util_atomic_cas_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
247 
248 /** \copydoc core_util_atomic_cas_explicit_u8 */
249 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
250 
251 /** \copydoc core_util_atomic_cas_u8 */
252 MBED_FORCEINLINE bool core_util_atomic_cas_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
253 
254 /** \copydoc core_util_atomic_cas_explicit_u8 */
255 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
256 
257 /** \copydoc core_util_atomic_cas_u8 */
258 MBED_FORCEINLINE bool core_util_atomic_cas_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
259 
260 /** \copydoc core_util_atomic_cas_explicit_u8 */
261 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
262 
263 /** \copydoc core_util_atomic_cas_u8 */
264 MBED_FORCEINLINE bool core_util_atomic_cas_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
265 
266 /** \copydoc core_util_atomic_cas_explicit_u8 */
267 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
268 
269 /** \copydoc core_util_atomic_cas_u8 */
270 MBED_FORCEINLINE bool core_util_atomic_cas_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
271 
272 /** \copydoc core_util_atomic_cas_explicit_u8 */
273 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
274 
275 /** \copydoc core_util_atomic_cas_u8 */
276 inline bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
277 
278 /** \copydoc core_util_atomic_cas_explicit_u8 */
279 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
280 
281 
282 
283 /**
284  * Atomic compare and set. It compares the contents of a memory location to a
285  * given value and, only if they are the same, modifies the contents of that
286  * memory location to a given new value. This is done as a single atomic
287  * operation. The atomicity guarantees that the new value is calculated based on
288  * up-to-date information; if the value had been updated by another thread in
289  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
290  *
291  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
292  * you to the article on compare-and swap].
293  *
294  * @param ptr The target memory location.
295  * @param[in,out] expectedCurrentValue A pointer to some location holding the
296  * expected current value of the data being set atomically.
297  * The computed 'desiredValue' should be a function of this current value.
298  * @note: This is an in-out parameter. In the
299  * failure case of atomic_cas (where the
300  * destination isn't set), the pointee of expectedCurrentValue is
301  * updated with the current value.
302  * @param[in] desiredValue The new value computed based on '*expectedCurrentValue'.
303  *
304  * @return true if the memory location was atomically
305  * updated with the desired value (after verifying
306  * that it contained the expectedCurrentValue),
307  * false otherwise. In the failure case,
308  * exepctedCurrentValue is updated with the new
309  * value of the target memory location.
310  *
311  * pseudocode:
312  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
313  * if *p != *old or spurious failure {
314  * *old = *p
315  * return false
316  * }
317  * *p = new
318  * return true
319  * }
320  *
321  * @note: In the failure case (where the destination isn't set), the value
322  * pointed to by expectedCurrentValue is instead updated with the current value.
323  * This property helps writing concise code for the following incr:
324  *
325  * function incr(p : pointer to int, a : int) returns int {
326  * done = false
327  * value = *p // This fetch operation need not be atomic.
328  * while not done {
329  * done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success
330  * }
331  * return value + a
332  * }
333  *
334  * @note: This corresponds to the C11 "atomic_compare_exchange_weak" - it
335  * may spuriously fail if the current value is expected, as per the pseudocode
336  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
337  * This call would normally be used when a fail return will cause a retry anyway,
338  * saving the need for an extra loop inside the cas operation.
339  */
340 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
341 
342 /** \copydoc core_util_atomic_compare_exchange_weak_u8
343  * @param success memory ordering constraint for successful exchange
344  * @param failure memory ordering constraint for failure
345  */
346 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
347 
348 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
349 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
350 
351 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
352 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
353 
354 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
355 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
356 
357 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
358 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
359 
360 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
361 bool core_util_atomic_compare_exchange_weak_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
362 
363 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
364 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
365 
366 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
367 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
368 
369 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
370 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
371 
372 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
373 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
374 
375 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
376 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
377 
378 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
379 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
380 
381 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
382 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
383 
384 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
385 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
386 
387 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
388 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
389 
390 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
391 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
392 
393 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
394 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
395 
396 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
397 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
398 
399 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
400 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
401 
402 
403 /**
404  * Atomic load.
405  * @param valuePtr Target memory location.
406  * @return The loaded value.
407  */
408 MBED_FORCEINLINE uint8_t core_util_atomic_load_u8(const volatile uint8_t *valuePtr);
409 
410 /**
411  * \copydoc core_util_atomic_load_u8
412  * @param order memory ordering constraint
413  */
414 MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8(const volatile uint8_t *valuePtr, mbed_memory_order order);
415 
416 /** \copydoc core_util_atomic_load_u8 */
417 MBED_FORCEINLINE uint16_t core_util_atomic_load_u16(const volatile uint16_t *valuePtr);
418 
419 /** \copydoc core_util_atomic_load_explicit_u8 */
420 MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16(const volatile uint16_t *valuePtr, mbed_memory_order order);
421 
422 /** \copydoc core_util_atomic_load_u8 */
423 MBED_FORCEINLINE uint32_t core_util_atomic_load_u32(const volatile uint32_t *valuePtr);
424 
425 /** \copydoc core_util_atomic_load_explicit_u8 */
426 MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32(const volatile uint32_t *valuePtr, mbed_memory_order order);
427 
428 /** \copydoc core_util_atomic_load_u8 */
429 uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr);
430 
431 /** \copydoc core_util_atomic_load_explicit_u8 */
432 MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64(const volatile uint64_t *valuePtr, mbed_memory_order order);
433 
434 /** \copydoc core_util_atomic_load_u8 */
435 MBED_FORCEINLINE int8_t core_util_atomic_load_s8(const volatile int8_t *valuePtr);
436 
437 /** \copydoc core_util_atomic_load_explicit_u8 */
438 MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8(const volatile int8_t *valuePtr, mbed_memory_order order);
439 
440 /** \copydoc core_util_atomic_load_u8 */
441 MBED_FORCEINLINE int16_t core_util_atomic_load_s16(const volatile int16_t *valuePtr);
442 
443 /** \copydoc core_util_atomic_load_explicit_u8 */
444 MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16(const volatile int16_t *valuePtr, mbed_memory_order order);
445 
446 /** \copydoc core_util_atomic_load_u8 */
447 MBED_FORCEINLINE int32_t core_util_atomic_load_s32(const volatile int32_t *valuePtr);
448 
449 /** \copydoc core_util_atomic_load_explicit_u8 */
450 MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32(const volatile int32_t *valuePtr, mbed_memory_order order);
451 
452 /** \copydoc core_util_atomic_load_u8 */
453 MBED_FORCEINLINE int64_t core_util_atomic_load_s64(const volatile int64_t *valuePtr);
454 
455 /** \copydoc core_util_atomic_load_u8 */
456 MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64(const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order);
457 
458 /** \copydoc core_util_atomic_load_u8 */
459 MBED_FORCEINLINE bool core_util_atomic_load_bool(const volatile bool *valuePtr);
460 
461 /** \copydoc core_util_atomic_load_u8 */
462 MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool(const volatile bool *valuePtr, mbed_memory_order order);
463 
464 /** \copydoc core_util_atomic_load_u8 */
465 MBED_FORCEINLINE void *core_util_atomic_load_ptr(void *const volatile *valuePtr);
466 
467 /** \copydoc core_util_atomic_load_u8 */
468 MBED_FORCEINLINE void *core_util_atomic_load_explicit_ptr(void *const volatile *valuePtr, mbed_memory_order order);
469 
470 /**
471  * Atomic store.
472  * @param valuePtr Target memory location.
473  * @param desiredValue The value to store.
474  */
475 MBED_FORCEINLINE void core_util_atomic_store_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
476 
477 /**
478  * \copydoc core_util_atomic_store_u8
479  * @param order memory ordering constraint
480  */
481 MBED_FORCEINLINE void core_util_atomic_store_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
482 
483 /** \copydoc core_util_atomic_store_u8 */
484 MBED_FORCEINLINE void core_util_atomic_store_u16(volatile uint16_t *valuePtr, uint16_t desiredValue);
485 
486 /** \copydoc core_util_atomic_store_explicit_u8 */
487 MBED_FORCEINLINE void core_util_atomic_store_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
488 
489 /** \copydoc core_util_atomic_store_u8 */
490 MBED_FORCEINLINE void core_util_atomic_store_u32(volatile uint32_t *valuePtr, uint32_t desiredValue);
491 
492 /** \copydoc core_util_atomic_store_explicit_u8 */
493 MBED_FORCEINLINE void core_util_atomic_store_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
494 
495 /** \copydoc core_util_atomic_store_u8 */
496 void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
497 
498 /** \copydoc core_util_atomic_store_explicit_u8 */
499 MBED_FORCEINLINE void core_util_atomic_store_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
500 
501 /** \copydoc core_util_atomic_store_u8 */
502 MBED_FORCEINLINE void core_util_atomic_store_s8(volatile int8_t *valuePtr, int8_t desiredValue);
503 
504 /** \copydoc core_util_atomic_store_explicit_u8 */
505 MBED_FORCEINLINE void core_util_atomic_store_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
506 
507 /** \copydoc core_util_atomic_store_u8 */
508 MBED_FORCEINLINE void core_util_atomic_store_s16(volatile int16_t *valuePtr, int16_t desiredValue);
509 
510 /** \copydoc core_util_atomic_store_explicit_u8 */
511 MBED_FORCEINLINE void core_util_atomic_store_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
512 
513 /** \copydoc core_util_atomic_store_u8 */
514 MBED_FORCEINLINE void core_util_atomic_store_s32(volatile int32_t *valuePtr, int32_t desiredValue);
515 
516 /** \copydoc core_util_atomic_store_explicit_u8 */
517 MBED_FORCEINLINE void core_util_atomic_store_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
518 
519 /** \copydoc core_util_atomic_store_u8 */
520 MBED_FORCEINLINE void core_util_atomic_store_s64(volatile int64_t *valuePtr, int64_t desiredValue);
521 
522 /** \copydoc core_util_atomic_store_explicit_u8 */
523 MBED_FORCEINLINE void core_util_atomic_store_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
524 
525 /** \copydoc core_util_atomic_store_u8 */
526 MBED_FORCEINLINE void core_util_atomic_store_bool(volatile bool *valuePtr, bool desiredValue);
527 
528 /** \copydoc core_util_atomic_store_explicit_u8 */
529 MBED_FORCEINLINE void core_util_atomic_store_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
530 
531 /** \copydoc core_util_atomic_store_u8 */
532 MBED_FORCEINLINE void core_util_atomic_store_ptr(void *volatile *valuePtr, void *desiredValue);
533 
534 /** \copydoc core_util_atomic_store_explicit_u8 */
535 MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
536 
537 /**
538  * Atomic exchange.
539  * @param valuePtr Target memory location.
540  * @param desiredValue The value to store.
541  * @return The previous value.
542  */
543 MBED_INLINE_IF_EX uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
544 
545 /** \copydoc core_util_atomic_exchange_u8
546  * @param order memory ordering constraint
547  */
548 MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
549 
550 /** \copydoc core_util_atomic_exchange_u8 */
551 MBED_INLINE_IF_EX uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue);
552 
553 /** \copydoc core_util_atomic_exchange_explicit_u8 */
554 MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
555 
556 /** \copydoc core_util_atomic_exchange_u8 */
557 MBED_INLINE_IF_EX uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue);
558 
559 /** \copydoc core_util_atomic_exchange_explicit_u8 */
560 MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
561 
562 /** \copydoc core_util_atomic_exchange_u8 */
563 uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
564 
565 /** \copydoc core_util_atomic_exchange_explicit_u8 */
566 MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
567 
568 /** \copydoc core_util_atomic_exchange_u8 */
569 MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8(volatile int8_t *valuePtr, int8_t desiredValue);
570 
571 /** \copydoc core_util_atomic_exchange_explicit_u8 */
572 MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
573 
574 /** \copydoc core_util_atomic_exchange_u8 */
575 MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16(volatile int16_t *valuePtr, int16_t desiredValue);
576 
577 /** \copydoc core_util_atomic_exchange_explicit_u8 */
578 MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
579 
580 /** \copydoc core_util_atomic_exchange_u8 */
581 MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32(volatile int32_t *valuePtr, int32_t desiredValue);
582 
583 /** \copydoc core_util_atomic_exchange_explicit_u8 */
584 MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
585 
586 /** \copydoc core_util_atomic_exchange_u8 */
587 MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64(volatile int64_t *valuePtr, int64_t desiredValue);
588 
589 /** \copydoc core_util_atomic_exchange_explicit_u8 */
590 MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
591 
592 /** \copydoc core_util_atomic_exchange_u8 */
593 MBED_FORCEINLINE bool core_util_atomic_exchange_bool(volatile bool *valuePtr, bool desiredValue);
594 
595 /** \copydoc core_util_atomic_exchange_explicit_u8 */
596 MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
597 
598 /** \copydoc core_util_atomic_exchange_u8 */
599 inline void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue);
600 
601 /** \copydoc core_util_atomic_exchange_explicit_u8 */
602 MBED_FORCEINLINE void *core_util_atomic_exchange_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
603 
604 /**
605  * Atomic increment.
606  * @param valuePtr Target memory location being incremented.
607  * @param delta The amount being incremented.
608  * @return The new incremented value.
609  */
610 MBED_INLINE_IF_EX uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta);
611 
612 /** \copydoc core_util_atomic_incr_u8 */
613 MBED_INLINE_IF_EX uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta);
614 
615 /** \copydoc core_util_atomic_incr_u8 */
616 MBED_INLINE_IF_EX uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta);
617 
618 /** \copydoc core_util_atomic_incr_u8 */
619 uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta);
620 
621 /** \copydoc core_util_atomic_incr_u8 */
622 MBED_FORCEINLINE int8_t core_util_atomic_incr_s8(volatile int8_t *valuePtr, int8_t delta);
623 
624 /** \copydoc core_util_atomic_incr_u8 */
625 MBED_FORCEINLINE int16_t core_util_atomic_incr_s16(volatile int16_t *valuePtr, int16_t delta);
626 
627 /** \copydoc core_util_atomic_incr_u8 */
628 MBED_FORCEINLINE int32_t core_util_atomic_incr_s32(volatile int32_t *valuePtr, int32_t delta);
629 
630 /** \copydoc core_util_atomic_incr_u8 */
631 MBED_FORCEINLINE int64_t core_util_atomic_incr_s64(volatile int64_t *valuePtr, int64_t delta);
632 
633 /** \copydoc core_util_atomic_incr_u8 */
634 inline void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
635 
636 /**
637  * Atomic decrement.
638  * @param valuePtr Target memory location being decremented.
639  * @param delta The amount being decremented.
640  * @return The new decremented value.
641  */
642 MBED_INLINE_IF_EX uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta);
643 
644 /** \copydoc core_util_atomic_decr_u8 */
645 MBED_INLINE_IF_EX uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta);
646 
647 /** \copydoc core_util_atomic_decr_u8 */
648 MBED_INLINE_IF_EX uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta);
649 
650 /** \copydoc core_util_atomic_decr_u8 */
651 uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta);
652 
653 /** \copydoc core_util_atomic_decr_u8 */
654 MBED_FORCEINLINE int8_t core_util_atomic_decr_s8(volatile int8_t *valuePtr, int8_t delta);
655 
656 /** \copydoc core_util_atomic_decr_u8 */
657 MBED_FORCEINLINE int16_t core_util_atomic_decr_s16(volatile int16_t *valuePtr, int16_t delta);
658 
659 /** \copydoc core_util_atomic_decr_u8 */
660 MBED_FORCEINLINE int32_t core_util_atomic_decr_s32(volatile int32_t *valuePtr, int32_t delta);
661 
662 /** \copydoc core_util_atomic_decr_u8 */
663 MBED_FORCEINLINE int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, int64_t delta);
664 
665 /** \copydoc core_util_atomic_decr_u8 */
666 inline void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
667 
668 /**
669  * Atomic add.
670  * @param valuePtr Target memory location being modified.
671  * @param arg The argument for the addition.
672  * @return The original value.
673  */
674 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg);
675 
676 /** \copydoc core_util_atomic_fetch_add_u8
677  * @param order memory ordering constraint
678  */
679 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
680 
681 /** \copydoc core_util_atomic_fetch_add_u8 */
682 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_add_u16(volatile uint16_t *valuePtr, uint16_t arg);
683 
684 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
685 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
686 
687 /** \copydoc core_util_atomic_fetch_add_u8 */
688 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_add_u32(volatile uint32_t *valuePtr, uint32_t arg);
689 
690 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
691 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
692 
693 /** \copydoc core_util_atomic_fetch_add_u8 */
694 uint64_t core_util_atomic_fetch_add_u64(volatile uint64_t *valuePtr, uint64_t arg);
695 
696 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
697 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
698 
699 /** \copydoc core_util_atomic_fetch_add_u8 */
700 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8(volatile int8_t *valuePtr, int8_t arg);
701 
702 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
703 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
704 
705 /** \copydoc core_util_atomic_fetch_add_u8 */
706 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16(volatile int16_t *valuePtr, int16_t arg);
707 
708 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
709 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
710 
711 /** \copydoc core_util_atomic_fetch_add_u8 */
712 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32(volatile int32_t *valuePtr, int32_t arg);
713 
714 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
715 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
716 
717 /** \copydoc core_util_atomic_fetch_add_u8 */
718 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64(volatile int64_t *valuePtr, int64_t arg);
719 
720 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
721 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
722 
723 /** \copydoc core_util_atomic_fetch_add_u8 */
724 MBED_FORCEINLINE void *core_util_atomic_fetch_add_ptr(void *volatile *valuePtr, ptrdiff_t arg);
725 
726 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
727 MBED_FORCEINLINE void *core_util_atomic_fetch_add_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
728 
729 /**
730  * Atomic subtract.
731  * @param valuePtr Target memory location being modified.
732  * @param arg The argument for the subtraction.
733  * @return The original value.
734  */
735 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg);
736 
737 /** \copydoc core_util_atomic_fetch_sub_u8
738  * @param order memory ordering constraint
739  */
740 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
741 
742 /** \copydoc core_util_atomic_fetch_sub_u8 */
743 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_sub_u16(volatile uint16_t *valuePtr, uint16_t arg);
744 
745 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
746 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
747 
748 /** \copydoc core_util_atomic_fetch_sub_u8 */
749 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_sub_u32(volatile uint32_t *valuePtr, uint32_t arg);
750 
751 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
752 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
753 
754 /** \copydoc core_util_atomic_fetch_sub_u8 */
755 uint64_t core_util_atomic_fetch_sub_u64(volatile uint64_t *valuePtr, uint64_t arg);
756 
757 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
758 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
759 
760 /** \copydoc core_util_atomic_fetch_sub_u8 */
761 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8(volatile int8_t *valuePtr, int8_t arg);
762 
763 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
764 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
765 
766 /** \copydoc core_util_atomic_fetch_sub_u8 */
767 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16(volatile int16_t *valuePtr, int16_t arg);
768 
769 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
770 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
771 
772 /** \copydoc core_util_atomic_fetch_sub_u8 */
773 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32(volatile int32_t *valuePtr, int32_t arg);
774 
775 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
776 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
777 
778 /** \copydoc core_util_atomic_fetch_sub_u8 */
779 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64(volatile int64_t *valuePtr, int64_t arg);
780 
781 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
782 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
783 
784 /** \copydoc core_util_atomic_fetch_sub_u8 */
785 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_ptr(void *volatile *valuePtr, ptrdiff_t arg);
786 
787 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
788 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
789 
790 /**
791  * Atomic bitwise and.
792  * @param valuePtr Target memory location being modified.
793  * @param arg The argument for the bitwise operation.
794  * @return The original value.
795  */
796 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg);
797 
798 /** \copydoc core_util_atomic_fetch_and_u8
799  * @param order memory ordering constraint
800  */
801 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
802 
803 /** \copydoc core_util_atomic_fetch_and_u8 */
804 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg);
805 
806 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
807 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
808 
809 /** \copydoc core_util_atomic_fetch_and_u8 */
810 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg);
811 
812 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
813 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
814 
815 /** \copydoc core_util_atomic_fetch_and_u8 */
816 uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg);
817 
818 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
819 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
820 
821 /**
822  * Atomic bitwise inclusive or.
823  * @param valuePtr Target memory location being modified.
824  * @param arg The argument for the bitwise operation.
825  * @return The original value.
826  */
827 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg);
828 
829 /** \copydoc core_util_atomic_fetch_or_u8
830  * @param order memory ordering constraint
831  */
832 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
833 
834 /** \copydoc core_util_atomic_fetch_or_u8 */
835 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg);
836 
837 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
838 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
839 
840 /** \copydoc core_util_atomic_fetch_or_u8 */
841 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg);
842 
843 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
844 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
845 
846 /** \copydoc core_util_atomic_fetch_or_u8 */
847 uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg);
848 
849 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
850 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
851 
852 /**
853  * Atomic bitwise exclusive or.
854  * @param valuePtr Target memory location being modified.
855  * @param arg The argument for the bitwise operation.
856  * @return The original value.
857  */
858 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg);
859 
860 /** \copydoc core_util_atomic_fetch_xor_u8
861  * @param order memory ordering constraint
862  */
863 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
864 
865 /** \copydoc core_util_atomic_fetch_xor_u8 */
866 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg);
867 
868 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
869 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
870 
871 /** \copydoc core_util_atomic_fetch_xor_u8 */
872 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg);
873 
874 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
875 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
876 
877 /** \copydoc core_util_atomic_fetch_xor_u8 */
878 uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg);
879 
880 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
881 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
882 
883 #ifdef __cplusplus
884 } // extern "C"
885 
886 #include <mstd_type_traits>
887 
888 // For each operation, two overloaded templates:
889 // * one for non-pointer types, which has implementations based on the
890 // u8/u16/u32/u64/s8/s16/s32/s64/bool functions above. No base implementation.
891 // * one for any pointer type, generically implemented based on ptr function above.
892 //
893 // Templates use standard C/C++ naming - old incr/decr/cas forms are not provided.
894 //
895 // The `type_identity_t<T>` used here means "same type as T", blocking template
896 // argument deduction. It forces type selection based on the type of the actual pointer
897 // to the atomic. If just `T` was used, the following would be ambiguous:
898 // core_util_atomic_store(&my_uint8_t, 1) - it wouldn't be able to select between T
899 // being uint8_t and int.
900 
901 /** \copydoc core_util_atomic_load_u8 */
902 template<typename T> T core_util_atomic_load(const volatile T *valuePtr) noexcept;
903 /** \copydoc core_util_atomic_load_u8 */
904 template<typename T> T core_util_atomic_load(const T *valuePtr) noexcept;
905 /** \copydoc core_util_atomic_store_u8 */
906 template<typename T> void core_util_atomic_store(volatile T *valuePtr, mstd::type_identity_t<T> desiredValue) noexcept;
907 /** \copydoc core_util_atomic_store_u8 */
908 template<typename T> void core_util_atomic_store(T *valuePtr, mstd::type_identity_t<T> desiredValue) noexcept;
909 /** \copydoc core_util_atomic_exchange_u8 */
910 template<typename T> T core_util_atomic_exchange(volatile T *ptr, mstd::type_identity_t<T> desiredValue) noexcept;
911 /** \copydoc core_util_atomic_cas_u8 */
912 template<typename T> bool core_util_atomic_compare_exchange_strong(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue) noexcept;
913 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
914 template<typename T> bool core_util_atomic_compare_exchange_weak(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue) noexcept;
915 /** \copydoc core_util_fetch_add_u8 */
916 template<typename T> T core_util_atomic_fetch_add(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
917 /** \copydoc core_util_fetch_sub_u8 */
918 template<typename T> T core_util_atomic_fetch_sub(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
919 /** \copydoc core_util_fetch_and_u8 */
920 template<typename T> T core_util_atomic_fetch_and(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
921 /** \copydoc core_util_fetch_or_u8 */
922 template<typename T> T core_util_atomic_fetch_or(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
923 /** \copydoc core_util_fetch_xor_u8 */
924 template<typename T> T core_util_atomic_fetch_xor(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
925 
926 /** \copydoc core_util_atomic_load_explicit_u8 */
927 template<typename T> T core_util_atomic_load_explicit(const volatile T *valuePtr, mbed_memory_order order) noexcept;
928 /** \copydoc core_util_atomic_load_explicit_u8 */
929 template<typename T> T core_util_atomic_load_explicit(const T *valuePtr, mbed_memory_order order) noexcept;
930 /** \copydoc core_util_atomic_store_explicit_u8 */
931 template<typename T> void core_util_atomic_store_explicit(volatile T *valuePtr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
932 /** \copydoc core_util_atomic_store_explicit_u8 */
933 template<typename T> void core_util_atomic_store_explicit(T *valuePtr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
934 /** \copydoc core_util_atomic_exchange_explicit_u8 */
935 template<typename T> T core_util_atomic_exchange_explicit(volatile T *ptr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
936 /** \copydoc core_util_atomic_cas_explicit_u8 */
937 template<typename T> bool core_util_atomic_compare_exchange_strong_explicit(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
938 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
939 template<typename T> bool core_util_atomic_compare_exchange_weak_explicit(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
940 /** \copydoc core_util_fetch_add_explicit_u8 */
941 template<typename T> T core_util_atomic_fetch_add_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
942 /** \copydoc core_util_fetch_sub_explicit_u8 */
943 template<typename T> T core_util_atomic_fetch_sub_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
944 /** \copydoc core_util_fetch_and_explicit_u8 */
945 template<typename T> T core_util_atomic_fetch_and_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
946 /** \copydoc core_util_fetch_or_explicit_u8 */
947 template<typename T> T core_util_atomic_fetch_or_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
948 /** \copydoc core_util_fetch_xor_explicit_u8 */
949 template<typename T> T core_util_atomic_fetch_xor_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
950 
951 /** \copydoc core_util_atomic_load_ptr */
952 template<typename T> inline T *core_util_atomic_load(T *const volatile *valuePtr) noexcept;
953 /** \copydoc core_util_atomic_load_ptr */
954 template<typename T> inline T *core_util_atomic_load(T *const *valuePtr) noexcept;
955 /** \copydoc core_util_atomic_store_ptr */
956 template<typename T> inline void core_util_atomic_store(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
957 /** \copydoc core_util_atomic_store_ptr */
958 template<typename T> inline void core_util_atomic_store(T **valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
959 /** \copydoc core_util_atomic_exchange_ptr */
960 template<typename T> inline T *core_util_atomic_exchange(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
961 /** \copydoc core_util_atomic_cas_ptr */
962 template<typename T> inline bool core_util_atomic_compare_exchange_strong(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue) noexcept;
963 /** \copydoc core_util_atomic_compare_exchange_weak_ptr */
964 template<typename T> inline bool core_util_atomic_compare_exchange_weak(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue) noexcept;
965 /** \copydoc core_util_fetch_add_ptr */
966 template<typename T> inline T *core_util_atomic_fetch_add(T *volatile *valuePtr, ptrdiff_t arg) noexcept;
967 /** \copydoc core_util_fetch_sub_ptr */
968 template<typename T> inline T *core_util_atomic_fetch_sub(T *volatile *valuePtr, ptrdiff_t arg) noexcept;
969 
970 /** \copydoc core_util_atomic_load_explicit_ptr */
971 template<typename T> inline T *core_util_atomic_load_explicit(T *const volatile *valuePtr, mbed_memory_order order) noexcept;
972 /** \copydoc core_util_atomic_load_explicit_ptr */
973 template<typename T> inline T *core_util_atomic_load_explicit(T *const *valuePtr, mbed_memory_order order) noexcept;
974 /** \copydoc core_util_atomic_store_explicit_ptr */
975 template<typename T> inline void core_util_atomic_store_explicit(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
976 /** \copydoc core_util_atomic_store_explicit_ptr */
977 template<typename T> inline void core_util_atomic_store_explicit(T **valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
978 /** \copydoc core_util_atomic_exchange_explicit_ptr */
979 template<typename T> inline T *core_util_atomic_exchange_explicit(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
980 /** \copydoc core_util_atomic_cas_explicit_ptr */
981 template<typename T> inline bool core_util_atomic_compare_exchange_strong_explicit(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
982 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_ptr */
983 template<typename T> inline bool core_util_atomic_compare_exchange_weak_explicit(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
984 /** \copydoc core_util_fetch_add_explicit_ptr */
985 template<typename T> inline T *core_util_atomic_fetch_add_explicit(T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept;
986 /** \copydoc core_util_fetch_sub_explicit_ptr */
987 template<typename T> inline T *core_util_atomic_fetch_sub_explicit(T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept;
988 
989 #endif // __cplusplus
990 
991 /**@}*/
992 
993 /**@}*/
994 
995 /* Hide the implementation away */
996 #include "platform/internal/mbed_atomic_impl.h"
997 
998 #endif // __MBED_UTIL_ATOMICL_H__
999 
1000 
1001 
MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64(volatile int64_t *valuePtr, int64_t arg)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8(volatile int8_t *valuePtr, int8_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE int8_t core_util_atomic_incr_s8(volatile int8_t *valuePtr, int8_t delta)
Atomic increment.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
Atomic exchange.
struct core_util_atomic_flag core_util_atomic_flag
A lock-free, primitive atomic flag.
MBED_FORCEINLINE void core_util_atomic_flag_clear(volatile core_util_atomic_flag *flagPtr)
Atomic clear.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
Atomic add.
void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic subtract.
#define MBED_FORCEINLINE
MBED_FORCEINLINE Declare a function that must always be inlined.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16(volatile int16_t *valuePtr, int16_t arg)
Atomic subtract.
uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise inclusive or.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16(volatile int16_t *valuePtr, int16_t arg)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
Atomic compare and set.
uint64_t core_util_atomic_fetch_add_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic add.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64(volatile int64_t *valuePtr, int64_t arg)
Atomic subtract.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16(const volatile int16_t *valuePtr, mbed_memory_order order)
Atomic load.
uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise inclusive or.
MBED_FORCEINLINE void * core_util_atomic_fetch_add_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
Atomic add.
uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise inclusive or.
bool core_util_atomic_compare_exchange_weak_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int8_t core_util_atomic_decr_s8(volatile int8_t *valuePtr, int8_t delta)
Atomic decrement.
MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool(const volatile bool *valuePtr, mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE void core_util_atomic_store_s16(volatile int16_t *valuePtr, int16_t desiredValue)
Atomic store.
bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_ptr(void *volatile *valuePtr, void *desiredValue)
Atomic store.
uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16(volatile int16_t *valuePtr, int16_t desiredValue)
Atomic exchange.
bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void * core_util_atomic_fetch_add_ptr(void *volatile *valuePtr, ptrdiff_t arg)
Atomic add.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise and.
MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE void core_util_atomic_store_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
Atomic store.
MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64(const volatile uint64_t *valuePtr, mbed_memory_order order)
Atomic load.
uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
Atomic decrement.
MBED_FORCEINLINE void core_util_atomic_store_bool(volatile bool *valuePtr, bool desiredValue)
Atomic store.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
Atomic add.
uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
Atomic exchange.
bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
Atomic compare and set.
uint16_t core_util_atomic_fetch_add_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic add.
uint16_t core_util_atomic_fetch_sub_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic subtract.
MBED_FORCEINLINE int32_t core_util_atomic_load_s32(const volatile int32_t *valuePtr)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32(const volatile uint32_t *valuePtr, mbed_memory_order order)
Atomic load.
A lock-free, primitive atomic flag.
Definition: mbed_atomic.h:118
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE void core_util_atomic_store_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
Atomic subtract.
void * core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
Atomic decrement.
void * core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
Atomic increment.
uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta)
Atomic increment.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_exchange_bool(volatile bool *valuePtr, bool desiredValue)
Atomic exchange.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise and.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic subtract.
bool core_util_atomic_compare_exchange_weak_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_s8(volatile int8_t *valuePtr, int8_t desiredValue)
Atomic store.
MBED_FORCEINLINE uint8_t core_util_atomic_load_u8(const volatile uint8_t *valuePtr)
Atomic load.
uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
Atomic decrement.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
Atomic add.
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_cas_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int64_t core_util_atomic_incr_s64(volatile int64_t *valuePtr, int64_t delta)
Atomic increment.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32(volatile int32_t *valuePtr, int32_t arg)
Atomic add.
MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
Atomic exchange.
mbed_memory_order
Memory order constraints for atomic operations.
Definition: mbed_atomic.h:51
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
void * core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue)
Atomic exchange.
MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8(const volatile int8_t *valuePtr, mbed_memory_order order)
Atomic load.
uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise inclusive or.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise and.
MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32(const volatile int32_t *valuePtr, mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_load_bool(const volatile bool *valuePtr)
Atomic load.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32(volatile int32_t *valuePtr, int32_t arg)
Atomic subtract.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_flag_test_and_set_explicit(volatile core_util_atomic_flag *valuePtr, mbed_memory_order order)
\ copydoc core_util_atomic_flag_test_and_set
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
MBED_FORCEINLINE int16_t core_util_atomic_load_s16(const volatile int16_t *valuePtr)
Atomic load.
MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
Atomic exchange.
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
Atomic increment.
MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64(const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE int32_t core_util_atomic_incr_s32(volatile int32_t *valuePtr, int32_t delta)
Atomic increment.
uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic subtract.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
Atomic subtract.
uint32_t core_util_atomic_fetch_sub_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic subtract.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise and.
bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_s32(volatile int32_t *valuePtr, int32_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_cas_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_load_u16(const volatile uint16_t *valuePtr)
Atomic load.
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
Atomic exchange.
uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
Atomic decrement.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
Atomic subtract.
bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
Atomic test and set.
MBED_FORCEINLINE void * core_util_atomic_exchange_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8(volatile int8_t *valuePtr, int8_t arg)
Atomic add.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE void * core_util_atomic_load_ptr(void *const volatile *valuePtr)
Atomic load.
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_ptr(void *volatile *valuePtr, ptrdiff_t arg)
Atomic subtract.
MBED_FORCEINLINE bool core_util_atomic_cas_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int8_t core_util_atomic_load_s8(const volatile int8_t *valuePtr)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int16_t core_util_atomic_decr_s16(volatile int16_t *valuePtr, int16_t delta)
Atomic decrement.
uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
Atomic increment.
MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8(const volatile uint8_t *valuePtr, mbed_memory_order order)
Atomic load.
uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise and.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic add.
uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void * core_util_atomic_load_explicit_ptr(void *const volatile *valuePtr, mbed_memory_order order)
Atomic load.
uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise and.
uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int64_t core_util_atomic_load_s64(const volatile int64_t *valuePtr)
Atomic load.
uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32(volatile int32_t *valuePtr, int32_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
uint32_t core_util_atomic_fetch_add_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic add.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
Atomic decrement.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_cas_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16(const volatile uint16_t *valuePtr, mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_cas_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
Atomic compare and set.
bool core_util_atomic_compare_exchange_weak_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
Atomic compare and set.
uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
Atomic increment.
uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int16_t core_util_atomic_incr_s16(volatile int16_t *valuePtr, int16_t delta)
Atomic increment.
MBED_FORCEINLINE uint32_t core_util_atomic_load_u32(const volatile uint32_t *valuePtr)
Atomic load.
uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise and.
uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise and.
MBED_FORCEINLINE int32_t core_util_atomic_decr_s32(volatile int32_t *valuePtr, int32_t delta)
Atomic decrement.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8(volatile int8_t *valuePtr, int8_t arg)
Atomic subtract.
MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64(volatile int64_t *valuePtr, int64_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE void core_util_atomic_store_s64(volatile int64_t *valuePtr, int64_t desiredValue)
Atomic store.
MBED_FORCEINLINE void core_util_atomic_flag_clear_explicit(volatile core_util_atomic_flag *flagPtr, mbed_memory_order order)
\ copydoc core_util_atomic_flag_clear
#define MBED_UNUSED
MBED_UNUSED Declare a function argument to be unused, suppressing compiler warnings.
MBED_FORCEINLINE int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, int64_t delta)
Atomic decrement.
MBED_FORCEINLINE void core_util_atomic_store_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint64_t core_util_atomic_fetch_sub_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic subtract.
MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
Atomic exchange.
Important Information for this Arm website

This site uses cookies to store information on your computer. By continuing to use our site, you consent to our cookies. If you are not happy with the use of these cookies, please review our Cookie Policy to learn how they can be disabled. By disabling cookies, some features of the site will not work.