Mistake on this page?
Report an issue in GitHub or email us
mbed_atomic.h
1 
2 /*
3  * Copyright (c) 2015-2019, ARM Limited, All Rights Reserved
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License"); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 #ifndef __MBED_UTIL_ATOMIC_H__
20 #define __MBED_UTIL_ATOMIC_H__
21 
22 #include "cmsis.h"
23 
24 #include <stdbool.h>
25 #include <stdint.h>
26 #include <stddef.h>
27 #include "platform/mbed_toolchain.h"
28 
29 /** \addtogroup platform-public-api */
30 /** @{*/
31 
32 /**
33  * \defgroup platform_atomic atomic functions
34  *
35  * Atomic functions function analogously to C11 and C++11 - loads have
36  * acquire semantics, stores have release semantics, and atomic operations
37  * are sequentially consistent. Atomicity is enforced both between threads and
38  * interrupt handlers.
39  *
40  * @{
41  */
42 
43 #ifdef __cplusplus
44 extern "C" {
45 #endif
46 
47 /**
48  * Memory order constraints for atomic operations. Intended semantics
49  * are as per C++11.
50  */
51 typedef enum mbed_memory_order {
52  /* Bits 0 = consume
53  * 1 = acquire (explicitly requested, or implied by seq.cst)
54  * 2 = release (explicitly requested, or implied by seq.cst)
55  * 4 = sequentially consistent
56  */
57  mbed_memory_order_relaxed = 0x00,
58  mbed_memory_order_consume = 0x01,
59  mbed_memory_order_acquire = 0x02,
60  mbed_memory_order_release = 0x04,
61  mbed_memory_order_acq_rel = 0x06,
62  mbed_memory_order_seq_cst = 0x16
64 
65 // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
66 #ifndef MBED_EXCLUSIVE_ACCESS
67 #ifndef __EXCLUSIVE_ACCESS
68 #if defined __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
69 #if ((__ARM_ARCH_7M__ == 1U) || \
70  (__ARM_ARCH_7EM__ == 1U) || \
71  (__ARM_ARCH_8M_BASE__ == 1U) || \
72  (__ARM_ARCH_8M_MAIN__ == 1U)) || \
73  (__ARM_ARCH_7A__ == 1U)
74 #define MBED_EXCLUSIVE_ACCESS 1U
75 #define MBED_EXCLUSIVE_ACCESS_THUMB1 (__ARM_ARCH_8M_BASE__ == 1U)
76 #ifdef __ICCARM__
77 #if __CPU_MODE__ == 2
78 #define MBED_EXCLUSIVE_ACCESS_ARM 1U
79 #else
80 #define MBED_EXCLUSIVE_ACCESS_ARM 0U
81 #endif
82 #else
83 #if !defined (__thumb__)
84 #define MBED_EXCLUSIVE_ACCESS_ARM 1U
85 #else
86 #define MBED_EXCLUSIVE_ACCESS_ARM 0U
87 #endif
88 #endif
89 #elif (__ARM_ARCH_6M__ == 1U)
90 #define MBED_EXCLUSIVE_ACCESS 0U
91 #elif defined __aarch64__ // Apple M1 Mac
92 #define MBED_EXCLUSIVE_ACCESS 0U
93 #else
94 #error "Unknown ARM architecture for exclusive access"
95 #endif // __ARM_ARCH_xxx
96 #else // __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
97 // Seem to be compiling for non-ARM, so stick with critical section implementations
98 #define MBED_EXCLUSIVE_ACCESS 0U
99 #endif
100 #else
101 #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
102 #endif
103 #endif
104 
105 #if MBED_EXCLUSIVE_ACCESS
106 #define MBED_INLINE_IF_EX inline
107 #else
108 #define MBED_INLINE_IF_EX
109 #endif
110 
111 /**
112  * A lock-free, primitive atomic flag.
113  *
114  * Emulate C11's atomic_flag. The flag is initially in an indeterminate state
115  * unless explicitly initialized with CORE_UTIL_ATOMIC_FLAG_INIT.
116  */
117 typedef struct core_util_atomic_flag {
118  uint8_t _flag;
120 
121 /**
122  * Initializer for a core_util_atomic_flag.
123  *
124  * Example:
125  * ~~~
126  * core_util_atomic_flag in_progress = CORE_UTIL_ATOMIC_FLAG_INIT;
127  * ~~~
128  */
129 #define CORE_UTIL_ATOMIC_FLAG_INIT { 0 }
130 
131 /**
132  * Atomic test and set.
133  *
134  * Atomically tests then sets the flag to true, returning the previous value.
135  *
136  * @param flagPtr Target flag being tested and set.
137  * @return The previous value.
138  */
139 MBED_INLINE_IF_EX bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr);
140 
141 /** \ copydoc core_util_atomic_flag_test_and_set
142  * @param order memory ordering constraint
143  */
145 
146 /**
147  * Atomic clear.
148  *
149  * @param flagPtr Target flag being cleared.
150  */
152 
153 /** \ copydoc core_util_atomic_flag_clear
154  * @param order memory ordering constraint
155  */
157 
158 
159 /**
160  * Atomic compare and set. It compares the contents of a memory location to a
161  * given value and, only if they are the same, modifies the contents of that
162  * memory location to a given new value. This is done as a single atomic
163  * operation. The atomicity guarantees that the new value is calculated based on
164  * up-to-date information; if the value had been updated by another thread in
165  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
166  *
167  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
168  * you to the article on compare-and swap].
169  *
170  * @param ptr The target memory location.
171  * @param[in,out] expectedCurrentValue A pointer to some location holding the
172  * expected current value of the data being set atomically.
173  * The computed 'desiredValue' should be a function of this current value.
174  * @note: This is an in-out parameter. In the
175  * failure case of atomic_cas (where the
176  * destination isn't set), the pointee of expectedCurrentValue is
177  * updated with the current value.
178  * @param[in] desiredValue The new value computed based on '*expectedCurrentValue'.
179  *
180  * @return true if the memory location was atomically
181  * updated with the desired value (after verifying
182  * that it contained the expectedCurrentValue),
183  * false otherwise. In the failure case,
184  * exepctedCurrentValue is updated with the new
185  * value of the target memory location.
186  *
187  * pseudocode:
188  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
189  * if *p != *old {
190  * *old = *p
191  * return false
192  * }
193  * *p = new
194  * return true
195  * }
196  *
197  * @note: In the failure case (where the destination isn't set), the value
198  * pointed to by expectedCurrentValue is instead updated with the current value.
199  * This property helps writing concise code for the following incr:
200  *
201  * function incr(p : pointer to int, a : int) returns int {
202  * done = false
203  * value = atomic_load(p)
204  * while not done {
205  * done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success
206  * }
207  * return value + a
208  * }
209  *
210  * However, if the call is made in a loop like this, the atomic_compare_exchange_weak
211  * functions are to be preferred.
212  *
213  * @note: This corresponds to the C11 "atomic_compare_exchange_strong" - it
214  * always succeeds if the current value is expected, as per the pseudocode
215  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
216  * This call would normally be used when a fail return does not retry.
217  */
218 MBED_INLINE_IF_EX bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
219 
220 /** \copydoc core_util_atomic_cas_u8
221  * @param success memory ordering constraint for successful exchange
222  * @param failure memory ordering constraint for failure
223  */
224 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
225 
226 /** \copydoc core_util_atomic_cas_u8 */
227 MBED_INLINE_IF_EX bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
228 
229 /** \copydoc core_util_atomic_cas_explicit_u8 */
230 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
231 
232 /** \copydoc core_util_atomic_cas_u8 */
233 MBED_INLINE_IF_EX bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
234 
235 /** \copydoc core_util_atomic_cas_explicit_u8 */
236 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
237 
238 /** \copydoc core_util_atomic_cas_u8 */
239 bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
240 
241 /** \copydoc core_util_atomic_cas_explicit_u8 */
242 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
243 
244 /** \copydoc core_util_atomic_cas_u8 */
245 MBED_FORCEINLINE bool core_util_atomic_cas_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
246 
247 /** \copydoc core_util_atomic_cas_explicit_u8 */
248 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
249 
250 /** \copydoc core_util_atomic_cas_u8 */
251 MBED_FORCEINLINE bool core_util_atomic_cas_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
252 
253 /** \copydoc core_util_atomic_cas_explicit_u8 */
254 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
255 
256 /** \copydoc core_util_atomic_cas_u8 */
257 MBED_FORCEINLINE bool core_util_atomic_cas_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
258 
259 /** \copydoc core_util_atomic_cas_explicit_u8 */
260 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
261 
262 /** \copydoc core_util_atomic_cas_u8 */
263 MBED_FORCEINLINE bool core_util_atomic_cas_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
264 
265 /** \copydoc core_util_atomic_cas_explicit_u8 */
266 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
267 
268 /** \copydoc core_util_atomic_cas_u8 */
269 MBED_FORCEINLINE bool core_util_atomic_cas_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
270 
271 /** \copydoc core_util_atomic_cas_explicit_u8 */
272 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
273 
274 /** \copydoc core_util_atomic_cas_u8 */
275 inline bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
276 
277 /** \copydoc core_util_atomic_cas_explicit_u8 */
278 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
279 
280 
281 
282 /**
283  * Atomic compare and set. It compares the contents of a memory location to a
284  * given value and, only if they are the same, modifies the contents of that
285  * memory location to a given new value. This is done as a single atomic
286  * operation. The atomicity guarantees that the new value is calculated based on
287  * up-to-date information; if the value had been updated by another thread in
288  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
289  *
290  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
291  * you to the article on compare-and swap].
292  *
293  * @param ptr The target memory location.
294  * @param[in,out] expectedCurrentValue A pointer to some location holding the
295  * expected current value of the data being set atomically.
296  * The computed 'desiredValue' should be a function of this current value.
297  * @note: This is an in-out parameter. In the
298  * failure case of atomic_cas (where the
299  * destination isn't set), the pointee of expectedCurrentValue is
300  * updated with the current value.
301  * @param[in] desiredValue The new value computed based on '*expectedCurrentValue'.
302  *
303  * @return true if the memory location was atomically
304  * updated with the desired value (after verifying
305  * that it contained the expectedCurrentValue),
306  * false otherwise. In the failure case,
307  * exepctedCurrentValue is updated with the new
308  * value of the target memory location.
309  *
310  * pseudocode:
311  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
312  * if *p != *old or spurious failure {
313  * *old = *p
314  * return false
315  * }
316  * *p = new
317  * return true
318  * }
319  *
320  * @note: In the failure case (where the destination isn't set), the value
321  * pointed to by expectedCurrentValue is instead updated with the current value.
322  * This property helps writing concise code for the following incr:
323  *
324  * function incr(p : pointer to int, a : int) returns int {
325  * done = false
326  * value = *p // This fetch operation need not be atomic.
327  * while not done {
328  * done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success
329  * }
330  * return value + a
331  * }
332  *
333  * @note: This corresponds to the C11 "atomic_compare_exchange_weak" - it
334  * may spuriously fail if the current value is expected, as per the pseudocode
335  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
336  * This call would normally be used when a fail return will cause a retry anyway,
337  * saving the need for an extra loop inside the cas operation.
338  */
339 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
340 
341 /** \copydoc core_util_atomic_compare_exchange_weak_u8
342  * @param success memory ordering constraint for successful exchange
343  * @param failure memory ordering constraint for failure
344  */
345 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
346 
347 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
348 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
349 
350 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
351 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
352 
353 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
354 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
355 
356 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
357 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
358 
359 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
360 bool core_util_atomic_compare_exchange_weak_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
361 
362 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
363 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
364 
365 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
366 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
367 
368 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
369 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
370 
371 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
372 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
373 
374 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
375 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
376 
377 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
378 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
379 
380 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
381 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
382 
383 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
384 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
385 
386 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
387 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
388 
389 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
390 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
391 
392 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
393 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
394 
395 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
396 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
397 
398 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
399 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
400 
401 
402 /**
403  * Atomic load.
404  * @param valuePtr Target memory location.
405  * @return The loaded value.
406  */
407 MBED_FORCEINLINE uint8_t core_util_atomic_load_u8(const volatile uint8_t *valuePtr);
408 
409 /**
410  * \copydoc core_util_atomic_load_u8
411  * @param order memory ordering constraint
412  */
413 MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8(const volatile uint8_t *valuePtr, mbed_memory_order order);
414 
415 /** \copydoc core_util_atomic_load_u8 */
416 MBED_FORCEINLINE uint16_t core_util_atomic_load_u16(const volatile uint16_t *valuePtr);
417 
418 /** \copydoc core_util_atomic_load_explicit_u8 */
419 MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16(const volatile uint16_t *valuePtr, mbed_memory_order order);
420 
421 /** \copydoc core_util_atomic_load_u8 */
422 MBED_FORCEINLINE uint32_t core_util_atomic_load_u32(const volatile uint32_t *valuePtr);
423 
424 /** \copydoc core_util_atomic_load_explicit_u8 */
425 MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32(const volatile uint32_t *valuePtr, mbed_memory_order order);
426 
427 /** \copydoc core_util_atomic_load_u8 */
428 uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr);
429 
430 /** \copydoc core_util_atomic_load_explicit_u8 */
431 MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64(const volatile uint64_t *valuePtr, mbed_memory_order order);
432 
433 /** \copydoc core_util_atomic_load_u8 */
434 MBED_FORCEINLINE int8_t core_util_atomic_load_s8(const volatile int8_t *valuePtr);
435 
436 /** \copydoc core_util_atomic_load_explicit_u8 */
437 MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8(const volatile int8_t *valuePtr, mbed_memory_order order);
438 
439 /** \copydoc core_util_atomic_load_u8 */
440 MBED_FORCEINLINE int16_t core_util_atomic_load_s16(const volatile int16_t *valuePtr);
441 
442 /** \copydoc core_util_atomic_load_explicit_u8 */
443 MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16(const volatile int16_t *valuePtr, mbed_memory_order order);
444 
445 /** \copydoc core_util_atomic_load_u8 */
446 MBED_FORCEINLINE int32_t core_util_atomic_load_s32(const volatile int32_t *valuePtr);
447 
448 /** \copydoc core_util_atomic_load_explicit_u8 */
449 MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32(const volatile int32_t *valuePtr, mbed_memory_order order);
450 
451 /** \copydoc core_util_atomic_load_u8 */
452 MBED_FORCEINLINE int64_t core_util_atomic_load_s64(const volatile int64_t *valuePtr);
453 
454 /** \copydoc core_util_atomic_load_u8 */
455 MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64(const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order);
456 
457 /** \copydoc core_util_atomic_load_u8 */
458 MBED_FORCEINLINE bool core_util_atomic_load_bool(const volatile bool *valuePtr);
459 
460 /** \copydoc core_util_atomic_load_u8 */
461 MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool(const volatile bool *valuePtr, mbed_memory_order order);
462 
463 /** \copydoc core_util_atomic_load_u8 */
464 MBED_FORCEINLINE void *core_util_atomic_load_ptr(void *const volatile *valuePtr);
465 
466 /** \copydoc core_util_atomic_load_u8 */
467 MBED_FORCEINLINE void *core_util_atomic_load_explicit_ptr(void *const volatile *valuePtr, mbed_memory_order order);
468 
469 /**
470  * Atomic store.
471  * @param valuePtr Target memory location.
472  * @param desiredValue The value to store.
473  */
474 MBED_FORCEINLINE void core_util_atomic_store_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
475 
476 /**
477  * \copydoc core_util_atomic_store_u8
478  * @param order memory ordering constraint
479  */
480 MBED_FORCEINLINE void core_util_atomic_store_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
481 
482 /** \copydoc core_util_atomic_store_u8 */
483 MBED_FORCEINLINE void core_util_atomic_store_u16(volatile uint16_t *valuePtr, uint16_t desiredValue);
484 
485 /** \copydoc core_util_atomic_store_explicit_u8 */
486 MBED_FORCEINLINE void core_util_atomic_store_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
487 
488 /** \copydoc core_util_atomic_store_u8 */
489 MBED_FORCEINLINE void core_util_atomic_store_u32(volatile uint32_t *valuePtr, uint32_t desiredValue);
490 
491 /** \copydoc core_util_atomic_store_explicit_u8 */
492 MBED_FORCEINLINE void core_util_atomic_store_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
493 
494 /** \copydoc core_util_atomic_store_u8 */
495 void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
496 
497 /** \copydoc core_util_atomic_store_explicit_u8 */
498 MBED_FORCEINLINE void core_util_atomic_store_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
499 
500 /** \copydoc core_util_atomic_store_u8 */
501 MBED_FORCEINLINE void core_util_atomic_store_s8(volatile int8_t *valuePtr, int8_t desiredValue);
502 
503 /** \copydoc core_util_atomic_store_explicit_u8 */
504 MBED_FORCEINLINE void core_util_atomic_store_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
505 
506 /** \copydoc core_util_atomic_store_u8 */
507 MBED_FORCEINLINE void core_util_atomic_store_s16(volatile int16_t *valuePtr, int16_t desiredValue);
508 
509 /** \copydoc core_util_atomic_store_explicit_u8 */
510 MBED_FORCEINLINE void core_util_atomic_store_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
511 
512 /** \copydoc core_util_atomic_store_u8 */
513 MBED_FORCEINLINE void core_util_atomic_store_s32(volatile int32_t *valuePtr, int32_t desiredValue);
514 
515 /** \copydoc core_util_atomic_store_explicit_u8 */
516 MBED_FORCEINLINE void core_util_atomic_store_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
517 
518 /** \copydoc core_util_atomic_store_u8 */
519 MBED_FORCEINLINE void core_util_atomic_store_s64(volatile int64_t *valuePtr, int64_t desiredValue);
520 
521 /** \copydoc core_util_atomic_store_explicit_u8 */
522 MBED_FORCEINLINE void core_util_atomic_store_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
523 
524 /** \copydoc core_util_atomic_store_u8 */
525 MBED_FORCEINLINE void core_util_atomic_store_bool(volatile bool *valuePtr, bool desiredValue);
526 
527 /** \copydoc core_util_atomic_store_explicit_u8 */
528 MBED_FORCEINLINE void core_util_atomic_store_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
529 
530 /** \copydoc core_util_atomic_store_u8 */
531 MBED_FORCEINLINE void core_util_atomic_store_ptr(void *volatile *valuePtr, void *desiredValue);
532 
533 /** \copydoc core_util_atomic_store_explicit_u8 */
534 MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
535 
536 /**
537  * Atomic exchange.
538  * @param valuePtr Target memory location.
539  * @param desiredValue The value to store.
540  * @return The previous value.
541  */
542 MBED_INLINE_IF_EX uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
543 
544 /** \copydoc core_util_atomic_exchange_u8
545  * @param order memory ordering constraint
546  */
547 MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
548 
549 /** \copydoc core_util_atomic_exchange_u8 */
550 MBED_INLINE_IF_EX uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue);
551 
552 /** \copydoc core_util_atomic_exchange_explicit_u8 */
553 MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
554 
555 /** \copydoc core_util_atomic_exchange_u8 */
556 MBED_INLINE_IF_EX uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue);
557 
558 /** \copydoc core_util_atomic_exchange_explicit_u8 */
559 MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
560 
561 /** \copydoc core_util_atomic_exchange_u8 */
562 uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
563 
564 /** \copydoc core_util_atomic_exchange_explicit_u8 */
565 MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
566 
567 /** \copydoc core_util_atomic_exchange_u8 */
568 MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8(volatile int8_t *valuePtr, int8_t desiredValue);
569 
570 /** \copydoc core_util_atomic_exchange_explicit_u8 */
571 MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
572 
573 /** \copydoc core_util_atomic_exchange_u8 */
574 MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16(volatile int16_t *valuePtr, int16_t desiredValue);
575 
576 /** \copydoc core_util_atomic_exchange_explicit_u8 */
577 MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
578 
579 /** \copydoc core_util_atomic_exchange_u8 */
580 MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32(volatile int32_t *valuePtr, int32_t desiredValue);
581 
582 /** \copydoc core_util_atomic_exchange_explicit_u8 */
583 MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
584 
585 /** \copydoc core_util_atomic_exchange_u8 */
586 MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64(volatile int64_t *valuePtr, int64_t desiredValue);
587 
588 /** \copydoc core_util_atomic_exchange_explicit_u8 */
589 MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
590 
591 /** \copydoc core_util_atomic_exchange_u8 */
592 MBED_FORCEINLINE bool core_util_atomic_exchange_bool(volatile bool *valuePtr, bool desiredValue);
593 
594 /** \copydoc core_util_atomic_exchange_explicit_u8 */
595 MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
596 
597 /** \copydoc core_util_atomic_exchange_u8 */
598 inline void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue);
599 
600 /** \copydoc core_util_atomic_exchange_explicit_u8 */
601 MBED_FORCEINLINE void *core_util_atomic_exchange_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
602 
603 /**
604  * Atomic increment.
605  * @param valuePtr Target memory location being incremented.
606  * @param delta The amount being incremented.
607  * @return The new incremented value.
608  */
609 MBED_INLINE_IF_EX uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta);
610 
611 /** \copydoc core_util_atomic_incr_u8 */
612 MBED_INLINE_IF_EX uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta);
613 
614 /** \copydoc core_util_atomic_incr_u8 */
615 MBED_INLINE_IF_EX uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta);
616 
617 /** \copydoc core_util_atomic_incr_u8 */
618 uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta);
619 
620 /** \copydoc core_util_atomic_incr_u8 */
621 MBED_FORCEINLINE int8_t core_util_atomic_incr_s8(volatile int8_t *valuePtr, int8_t delta);
622 
623 /** \copydoc core_util_atomic_incr_u8 */
624 MBED_FORCEINLINE int16_t core_util_atomic_incr_s16(volatile int16_t *valuePtr, int16_t delta);
625 
626 /** \copydoc core_util_atomic_incr_u8 */
627 MBED_FORCEINLINE int32_t core_util_atomic_incr_s32(volatile int32_t *valuePtr, int32_t delta);
628 
629 /** \copydoc core_util_atomic_incr_u8 */
630 MBED_FORCEINLINE int64_t core_util_atomic_incr_s64(volatile int64_t *valuePtr, int64_t delta);
631 
632 /** \copydoc core_util_atomic_incr_u8 */
633 inline void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
634 
635 /**
636  * Atomic decrement.
637  * @param valuePtr Target memory location being decremented.
638  * @param delta The amount being decremented.
639  * @return The new decremented value.
640  */
641 MBED_INLINE_IF_EX uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta);
642 
643 /** \copydoc core_util_atomic_decr_u8 */
644 MBED_INLINE_IF_EX uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta);
645 
646 /** \copydoc core_util_atomic_decr_u8 */
647 MBED_INLINE_IF_EX uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta);
648 
649 /** \copydoc core_util_atomic_decr_u8 */
650 uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta);
651 
652 /** \copydoc core_util_atomic_decr_u8 */
653 MBED_FORCEINLINE int8_t core_util_atomic_decr_s8(volatile int8_t *valuePtr, int8_t delta);
654 
655 /** \copydoc core_util_atomic_decr_u8 */
656 MBED_FORCEINLINE int16_t core_util_atomic_decr_s16(volatile int16_t *valuePtr, int16_t delta);
657 
658 /** \copydoc core_util_atomic_decr_u8 */
659 MBED_FORCEINLINE int32_t core_util_atomic_decr_s32(volatile int32_t *valuePtr, int32_t delta);
660 
661 /** \copydoc core_util_atomic_decr_u8 */
662 MBED_FORCEINLINE int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, int64_t delta);
663 
664 /** \copydoc core_util_atomic_decr_u8 */
665 inline void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
666 
667 /**
668  * Atomic add.
669  * @param valuePtr Target memory location being modified.
670  * @param arg The argument for the addition.
671  * @return The original value.
672  */
673 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg);
674 
675 /** \copydoc core_util_atomic_fetch_add_u8
676  * @param order memory ordering constraint
677  */
678 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
679 
680 /** \copydoc core_util_atomic_fetch_add_u8 */
681 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_add_u16(volatile uint16_t *valuePtr, uint16_t arg);
682 
683 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
684 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
685 
686 /** \copydoc core_util_atomic_fetch_add_u8 */
687 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_add_u32(volatile uint32_t *valuePtr, uint32_t arg);
688 
689 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
690 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
691 
692 /** \copydoc core_util_atomic_fetch_add_u8 */
693 uint64_t core_util_atomic_fetch_add_u64(volatile uint64_t *valuePtr, uint64_t arg);
694 
695 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
696 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
697 
698 /** \copydoc core_util_atomic_fetch_add_u8 */
699 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8(volatile int8_t *valuePtr, int8_t arg);
700 
701 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
702 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
703 
704 /** \copydoc core_util_atomic_fetch_add_u8 */
705 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16(volatile int16_t *valuePtr, int16_t arg);
706 
707 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
708 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
709 
710 /** \copydoc core_util_atomic_fetch_add_u8 */
711 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32(volatile int32_t *valuePtr, int32_t arg);
712 
713 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
714 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
715 
716 /** \copydoc core_util_atomic_fetch_add_u8 */
717 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64(volatile int64_t *valuePtr, int64_t arg);
718 
719 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
720 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
721 
722 /** \copydoc core_util_atomic_fetch_add_u8 */
723 MBED_FORCEINLINE void *core_util_atomic_fetch_add_ptr(void *volatile *valuePtr, ptrdiff_t arg);
724 
725 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
726 MBED_FORCEINLINE void *core_util_atomic_fetch_add_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
727 
728 /**
729  * Atomic subtract.
730  * @param valuePtr Target memory location being modified.
731  * @param arg The argument for the subtraction.
732  * @return The original value.
733  */
734 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg);
735 
736 /** \copydoc core_util_atomic_fetch_sub_u8
737  * @param order memory ordering constraint
738  */
739 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
740 
741 /** \copydoc core_util_atomic_fetch_sub_u8 */
742 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_sub_u16(volatile uint16_t *valuePtr, uint16_t arg);
743 
744 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
745 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
746 
747 /** \copydoc core_util_atomic_fetch_sub_u8 */
748 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_sub_u32(volatile uint32_t *valuePtr, uint32_t arg);
749 
750 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
751 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
752 
753 /** \copydoc core_util_atomic_fetch_sub_u8 */
754 uint64_t core_util_atomic_fetch_sub_u64(volatile uint64_t *valuePtr, uint64_t arg);
755 
756 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
757 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
758 
759 /** \copydoc core_util_atomic_fetch_sub_u8 */
760 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8(volatile int8_t *valuePtr, int8_t arg);
761 
762 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
763 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
764 
765 /** \copydoc core_util_atomic_fetch_sub_u8 */
766 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16(volatile int16_t *valuePtr, int16_t arg);
767 
768 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
769 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
770 
771 /** \copydoc core_util_atomic_fetch_sub_u8 */
772 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32(volatile int32_t *valuePtr, int32_t arg);
773 
774 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
775 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
776 
777 /** \copydoc core_util_atomic_fetch_sub_u8 */
778 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64(volatile int64_t *valuePtr, int64_t arg);
779 
780 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
781 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
782 
783 /** \copydoc core_util_atomic_fetch_sub_u8 */
784 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_ptr(void *volatile *valuePtr, ptrdiff_t arg);
785 
786 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
787 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
788 
789 /**
790  * Atomic bitwise and.
791  * @param valuePtr Target memory location being modified.
792  * @param arg The argument for the bitwise operation.
793  * @return The original value.
794  */
795 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg);
796 
797 /** \copydoc core_util_atomic_fetch_and_u8
798  * @param order memory ordering constraint
799  */
800 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
801 
802 /** \copydoc core_util_atomic_fetch_and_u8 */
803 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg);
804 
805 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
806 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
807 
808 /** \copydoc core_util_atomic_fetch_and_u8 */
809 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg);
810 
811 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
812 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
813 
814 /** \copydoc core_util_atomic_fetch_and_u8 */
815 uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg);
816 
817 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
818 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
819 
820 /**
821  * Atomic bitwise inclusive or.
822  * @param valuePtr Target memory location being modified.
823  * @param arg The argument for the bitwise operation.
824  * @return The original value.
825  */
826 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg);
827 
828 /** \copydoc core_util_atomic_fetch_or_u8
829  * @param order memory ordering constraint
830  */
831 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
832 
833 /** \copydoc core_util_atomic_fetch_or_u8 */
834 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg);
835 
836 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
837 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
838 
839 /** \copydoc core_util_atomic_fetch_or_u8 */
840 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg);
841 
842 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
843 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
844 
845 /** \copydoc core_util_atomic_fetch_or_u8 */
846 uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg);
847 
848 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
849 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
850 
851 /**
852  * Atomic bitwise exclusive or.
853  * @param valuePtr Target memory location being modified.
854  * @param arg The argument for the bitwise operation.
855  * @return The original value.
856  */
857 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg);
858 
859 /** \copydoc core_util_atomic_fetch_xor_u8
860  * @param order memory ordering constraint
861  */
862 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
863 
864 /** \copydoc core_util_atomic_fetch_xor_u8 */
865 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg);
866 
867 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
868 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
869 
870 /** \copydoc core_util_atomic_fetch_xor_u8 */
871 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg);
872 
873 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
874 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
875 
876 /** \copydoc core_util_atomic_fetch_xor_u8 */
877 uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg);
878 
879 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
880 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
881 
882 #ifdef __cplusplus
883 } // extern "C"
884 
885 #include <mstd_type_traits>
886 
887 // For each operation, two overloaded templates:
888 // * one for non-pointer types, which has implementations based on the
889 // u8/u16/u32/u64/s8/s16/s32/s64/bool functions above. No base implementation.
890 // * one for any pointer type, generically implemented based on ptr function above.
891 //
892 // Templates use standard C/C++ naming - old incr/decr/cas forms are not provided.
893 //
894 // The `type_identity_t<T>` used here means "same type as T", blocking template
895 // argument deduction. It forces type selection based on the type of the actual pointer
896 // to the atomic. If just `T` was used, the following would be ambiguous:
897 // core_util_atomic_store(&my_uint8_t, 1) - it wouldn't be able to select between T
898 // being uint8_t and int.
899 
900 /** \copydoc core_util_atomic_load_u8 */
901 template<typename T> T core_util_atomic_load(const volatile T *valuePtr) noexcept;
902 /** \copydoc core_util_atomic_load_u8 */
903 template<typename T> T core_util_atomic_load(const T *valuePtr) noexcept;
904 /** \copydoc core_util_atomic_store_u8 */
905 template<typename T> void core_util_atomic_store(volatile T *valuePtr, mstd::type_identity_t<T> desiredValue) noexcept;
906 /** \copydoc core_util_atomic_store_u8 */
907 template<typename T> void core_util_atomic_store(T *valuePtr, mstd::type_identity_t<T> desiredValue) noexcept;
908 /** \copydoc core_util_atomic_exchange_u8 */
909 template<typename T> T core_util_atomic_exchange(volatile T *ptr, mstd::type_identity_t<T> desiredValue) noexcept;
910 /** \copydoc core_util_atomic_cas_u8 */
911 template<typename T> bool core_util_atomic_compare_exchange_strong(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue) noexcept;
912 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
913 template<typename T> bool core_util_atomic_compare_exchange_weak(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue) noexcept;
914 /** \copydoc core_util_fetch_add_u8 */
915 template<typename T> T core_util_atomic_fetch_add(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
916 /** \copydoc core_util_fetch_sub_u8 */
917 template<typename T> T core_util_atomic_fetch_sub(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
918 /** \copydoc core_util_fetch_and_u8 */
919 template<typename T> T core_util_atomic_fetch_and(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
920 /** \copydoc core_util_fetch_or_u8 */
921 template<typename T> T core_util_atomic_fetch_or(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
922 /** \copydoc core_util_fetch_xor_u8 */
923 template<typename T> T core_util_atomic_fetch_xor(volatile T *valuePtr, mstd::type_identity_t<T> arg) noexcept;
924 
925 /** \copydoc core_util_atomic_load_explicit_u8 */
926 template<typename T> T core_util_atomic_load_explicit(const volatile T *valuePtr, mbed_memory_order order) noexcept;
927 /** \copydoc core_util_atomic_load_explicit_u8 */
928 template<typename T> T core_util_atomic_load_explicit(const T *valuePtr, mbed_memory_order order) noexcept;
929 /** \copydoc core_util_atomic_store_explicit_u8 */
930 template<typename T> void core_util_atomic_store_explicit(volatile T *valuePtr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
931 /** \copydoc core_util_atomic_store_explicit_u8 */
932 template<typename T> void core_util_atomic_store_explicit(T *valuePtr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
933 /** \copydoc core_util_atomic_exchange_explicit_u8 */
934 template<typename T> T core_util_atomic_exchange_explicit(volatile T *ptr, mstd::type_identity_t<T> desiredValue, mbed_memory_order order) noexcept;
935 /** \copydoc core_util_atomic_cas_explicit_u8 */
936 template<typename T> bool core_util_atomic_compare_exchange_strong_explicit(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
937 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
938 template<typename T> bool core_util_atomic_compare_exchange_weak_explicit(volatile T *ptr, mstd::type_identity_t<T> *expectedCurrentValue, mstd::type_identity_t<T> desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
939 /** \copydoc core_util_fetch_add_explicit_u8 */
940 template<typename T> T core_util_atomic_fetch_add_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
941 /** \copydoc core_util_fetch_sub_explicit_u8 */
942 template<typename T> T core_util_atomic_fetch_sub_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
943 /** \copydoc core_util_fetch_and_explicit_u8 */
944 template<typename T> T core_util_atomic_fetch_and_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
945 /** \copydoc core_util_fetch_or_explicit_u8 */
946 template<typename T> T core_util_atomic_fetch_or_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
947 /** \copydoc core_util_fetch_xor_explicit_u8 */
948 template<typename T> T core_util_atomic_fetch_xor_explicit(volatile T *valuePtr, mstd::type_identity_t<T> arg, mbed_memory_order order) noexcept;
949 
950 /** \copydoc core_util_atomic_load_ptr */
951 template<typename T> inline T *core_util_atomic_load(T *const volatile *valuePtr) noexcept;
952 /** \copydoc core_util_atomic_load_ptr */
953 template<typename T> inline T *core_util_atomic_load(T *const *valuePtr) noexcept;
954 /** \copydoc core_util_atomic_store_ptr */
955 template<typename T> inline void core_util_atomic_store(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
956 /** \copydoc core_util_atomic_store_ptr */
957 template<typename T> inline void core_util_atomic_store(T **valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
958 /** \copydoc core_util_atomic_exchange_ptr */
959 template<typename T> inline T *core_util_atomic_exchange(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue) noexcept;
960 /** \copydoc core_util_atomic_cas_ptr */
961 template<typename T> inline bool core_util_atomic_compare_exchange_strong(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue) noexcept;
962 /** \copydoc core_util_atomic_compare_exchange_weak_ptr */
963 template<typename T> inline bool core_util_atomic_compare_exchange_weak(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue) noexcept;
964 /** \copydoc core_util_fetch_add_ptr */
965 template<typename T> inline T *core_util_atomic_fetch_add(T *volatile *valuePtr, ptrdiff_t arg) noexcept;
966 /** \copydoc core_util_fetch_sub_ptr */
967 template<typename T> inline T *core_util_atomic_fetch_sub(T *volatile *valuePtr, ptrdiff_t arg) noexcept;
968 
969 /** \copydoc core_util_atomic_load_explicit_ptr */
970 template<typename T> inline T *core_util_atomic_load_explicit(T *const volatile *valuePtr, mbed_memory_order order) noexcept;
971 /** \copydoc core_util_atomic_load_explicit_ptr */
972 template<typename T> inline T *core_util_atomic_load_explicit(T *const *valuePtr, mbed_memory_order order) noexcept;
973 /** \copydoc core_util_atomic_store_explicit_ptr */
974 template<typename T> inline void core_util_atomic_store_explicit(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
975 /** \copydoc core_util_atomic_store_explicit_ptr */
976 template<typename T> inline void core_util_atomic_store_explicit(T **valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
977 /** \copydoc core_util_atomic_exchange_explicit_ptr */
978 template<typename T> inline T *core_util_atomic_exchange_explicit(T *volatile *valuePtr, mstd::type_identity_t<T> *desiredValue, mbed_memory_order order) noexcept;
979 /** \copydoc core_util_atomic_cas_explicit_ptr */
980 template<typename T> inline bool core_util_atomic_compare_exchange_strong_explicit(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
981 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_ptr */
982 template<typename T> inline bool core_util_atomic_compare_exchange_weak_explicit(T *volatile *ptr, mstd::type_identity_t<T> **expectedCurrentValue, mstd::type_identity_t<T> *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept;
983 /** \copydoc core_util_fetch_add_explicit_ptr */
984 template<typename T> inline T *core_util_atomic_fetch_add_explicit(T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept;
985 /** \copydoc core_util_fetch_sub_explicit_ptr */
986 template<typename T> inline T *core_util_atomic_fetch_sub_explicit(T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept;
987 
988 #endif // __cplusplus
989 
990 /**@}*/
991 
992 /**@}*/
993 
994 /* Hide the implementation away */
995 #include "platform/internal/mbed_atomic_impl.h"
996 
997 #endif // __MBED_UTIL_ATOMICL_H__
998 
999 
1000 
MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64(volatile int64_t *valuePtr, int64_t arg)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8(volatile int8_t *valuePtr, int8_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE int8_t core_util_atomic_incr_s8(volatile int8_t *valuePtr, int8_t delta)
Atomic increment.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
Atomic exchange.
struct core_util_atomic_flag core_util_atomic_flag
A lock-free, primitive atomic flag.
MBED_FORCEINLINE void core_util_atomic_flag_clear(volatile core_util_atomic_flag *flagPtr)
Atomic clear.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
Atomic add.
void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic subtract.
#define MBED_FORCEINLINE
MBED_FORCEINLINE Declare a function that must always be inlined.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16(volatile int16_t *valuePtr, int16_t arg)
Atomic subtract.
uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise inclusive or.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16(volatile int16_t *valuePtr, int16_t arg)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
Atomic compare and set.
uint64_t core_util_atomic_fetch_add_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic add.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64(volatile int64_t *valuePtr, int64_t arg)
Atomic subtract.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16(const volatile int16_t *valuePtr, mbed_memory_order order)
Atomic load.
uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise inclusive or.
MBED_FORCEINLINE void * core_util_atomic_fetch_add_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
Atomic add.
uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise inclusive or.
bool core_util_atomic_compare_exchange_weak_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int8_t core_util_atomic_decr_s8(volatile int8_t *valuePtr, int8_t delta)
Atomic decrement.
MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool(const volatile bool *valuePtr, mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE void core_util_atomic_store_s16(volatile int16_t *valuePtr, int16_t desiredValue)
Atomic store.
bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_ptr(void *volatile *valuePtr, void *desiredValue)
Atomic store.
uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16(volatile int16_t *valuePtr, int16_t desiredValue)
Atomic exchange.
bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void * core_util_atomic_fetch_add_ptr(void *volatile *valuePtr, ptrdiff_t arg)
Atomic add.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise and.
MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE void core_util_atomic_store_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
Atomic store.
MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64(const volatile uint64_t *valuePtr, mbed_memory_order order)
Atomic load.
uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
Atomic decrement.
MBED_FORCEINLINE void core_util_atomic_store_bool(volatile bool *valuePtr, bool desiredValue)
Atomic store.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
Atomic add.
uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
Atomic exchange.
bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
Atomic compare and set.
uint16_t core_util_atomic_fetch_add_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic add.
uint16_t core_util_atomic_fetch_sub_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic subtract.
MBED_FORCEINLINE int32_t core_util_atomic_load_s32(const volatile int32_t *valuePtr)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32(const volatile uint32_t *valuePtr, mbed_memory_order order)
Atomic load.
A lock-free, primitive atomic flag.
Definition: mbed_atomic.h:117
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE void core_util_atomic_store_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
Atomic subtract.
void * core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
Atomic decrement.
void * core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
Atomic increment.
uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta)
Atomic increment.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_exchange_bool(volatile bool *valuePtr, bool desiredValue)
Atomic exchange.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise and.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic subtract.
bool core_util_atomic_compare_exchange_weak_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_s8(volatile int8_t *valuePtr, int8_t desiredValue)
Atomic store.
MBED_FORCEINLINE uint8_t core_util_atomic_load_u8(const volatile uint8_t *valuePtr)
Atomic load.
uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
Atomic decrement.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
Atomic add.
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_cas_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int64_t core_util_atomic_incr_s64(volatile int64_t *valuePtr, int64_t delta)
Atomic increment.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32(volatile int32_t *valuePtr, int32_t arg)
Atomic add.
MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
Atomic exchange.
mbed_memory_order
Memory order constraints for atomic operations.
Definition: mbed_atomic.h:51
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
void * core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue)
Atomic exchange.
MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8(const volatile int8_t *valuePtr, mbed_memory_order order)
Atomic load.
uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise inclusive or.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise and.
MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32(const volatile int32_t *valuePtr, mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_load_bool(const volatile bool *valuePtr)
Atomic load.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32(volatile int32_t *valuePtr, int32_t arg)
Atomic subtract.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_flag_test_and_set_explicit(volatile core_util_atomic_flag *valuePtr, mbed_memory_order order)
\ copydoc core_util_atomic_flag_test_and_set
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
MBED_FORCEINLINE int16_t core_util_atomic_load_s16(const volatile int16_t *valuePtr)
Atomic load.
MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
Atomic exchange.
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
Atomic increment.
MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64(const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE int32_t core_util_atomic_incr_s32(volatile int32_t *valuePtr, int32_t delta)
Atomic increment.
uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic subtract.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
Atomic subtract.
uint32_t core_util_atomic_fetch_sub_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic subtract.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise and.
bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_s32(volatile int32_t *valuePtr, int32_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_cas_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE void core_util_atomic_store_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_load_u16(const volatile uint16_t *valuePtr)
Atomic load.
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
Atomic exchange.
uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
Atomic decrement.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
Atomic subtract.
bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
Atomic test and set.
MBED_FORCEINLINE void * core_util_atomic_exchange_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
Atomic store.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8(volatile int8_t *valuePtr, int8_t arg)
Atomic add.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE void * core_util_atomic_load_ptr(void *const volatile *valuePtr)
Atomic load.
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_ptr(void *volatile *valuePtr, ptrdiff_t arg)
Atomic subtract.
MBED_FORCEINLINE bool core_util_atomic_cas_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int8_t core_util_atomic_load_s8(const volatile int8_t *valuePtr)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE int16_t core_util_atomic_decr_s16(volatile int16_t *valuePtr, int16_t delta)
Atomic decrement.
uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
Atomic increment.
MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8(const volatile uint8_t *valuePtr, mbed_memory_order order)
Atomic load.
uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise and.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic subtract.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic add.
uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE void * core_util_atomic_load_explicit_ptr(void *const volatile *valuePtr, mbed_memory_order order)
Atomic load.
uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise and.
uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int64_t core_util_atomic_load_s64(const volatile int64_t *valuePtr)
Atomic load.
uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32(volatile int32_t *valuePtr, int32_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
uint32_t core_util_atomic_fetch_add_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic add.
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
Atomic decrement.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
Atomic exchange.
MBED_FORCEINLINE bool core_util_atomic_cas_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16(const volatile uint16_t *valuePtr, mbed_memory_order order)
Atomic load.
MBED_FORCEINLINE bool core_util_atomic_cas_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
Atomic compare and set.
bool core_util_atomic_compare_exchange_weak_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
Atomic compare and set.
uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
Atomic increment.
uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise exclusive or.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
Atomic compare and set.
MBED_FORCEINLINE int16_t core_util_atomic_incr_s16(volatile int16_t *valuePtr, int16_t delta)
Atomic increment.
MBED_FORCEINLINE uint32_t core_util_atomic_load_u32(const volatile uint32_t *valuePtr)
Atomic load.
uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise and.
uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise and.
MBED_FORCEINLINE int32_t core_util_atomic_decr_s32(volatile int32_t *valuePtr, int32_t delta)
Atomic decrement.
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic add.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8(volatile int8_t *valuePtr, int8_t arg)
Atomic subtract.
MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64(volatile int64_t *valuePtr, int64_t desiredValue)
Atomic exchange.
MBED_FORCEINLINE void core_util_atomic_store_s64(volatile int64_t *valuePtr, int64_t desiredValue)
Atomic store.
MBED_FORCEINLINE void core_util_atomic_flag_clear_explicit(volatile core_util_atomic_flag *flagPtr, mbed_memory_order order)
\ copydoc core_util_atomic_flag_clear
#define MBED_UNUSED
MBED_UNUSED Declare a function argument to be unused, suppressing compiler warnings.
MBED_FORCEINLINE int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, int64_t delta)
Atomic decrement.
MBED_FORCEINLINE void core_util_atomic_store_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
Atomic store.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint64_t core_util_atomic_fetch_sub_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic subtract.
MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
Atomic exchange.
Important Information for this Arm website

This site uses cookies to store information on your computer. By continuing to use our site, you consent to our cookies. If you are not happy with the use of these cookies, please review our Cookie Policy to learn how they can be disabled. By disabling cookies, some features of the site will not work.