Mistake on this page?
Report an issue in GitHub or email us
mbed_atomic.h
1 
2 /*
3  * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
4  * SPDX-License-Identifier: Apache-2.0
5  *
6  * Licensed under the Apache License, Version 2.0 (the "License"); you may
7  * not use this file except in compliance with the License.
8  * You may obtain a copy of the License at
9  *
10  * http://www.apache.org/licenses/LICENSE-2.0
11  *
12  * Unless required by applicable law or agreed to in writing, software
13  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
14  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15  * See the License for the specific language governing permissions and
16  * limitations under the License.
17  */
18 
19 #ifndef __MBED_UTIL_ATOMIC_H__
20 #define __MBED_UTIL_ATOMIC_H__
21 
22 #include "cmsis.h"
23 
24 #include <stdbool.h>
25 #include <stdint.h>
26 #include <stddef.h>
27 #include "mbed_toolchain.h"
28 
29 /** \addtogroup platform */
30 /** @{*/
31 
32 /**
33  * \defgroup platform_atomic atomic functions
34  *
35  * Atomic functions function analogously to C11 and C++11 - loads have
36  * acquire semantics, stores have release semantics, and atomic operations
37  * are sequentially consistent. Atomicity is enforced both between threads and
38  * interrupt handlers.
39  *
40  * @{
41  */
42 
43 #ifdef __cplusplus
44 extern "C" {
45 #endif
46 
47 /**
48  * Memory order constraints for atomic operations. Intended semantics
49  * are as per C++11.
50  */
51 typedef enum mbed_memory_order {
52  /* Bits 0 = consume
53  * 1 = acquire (explicitly requested, or implied by seq.cst)
54  * 2 = release (explicitly requested, or implied by seq.cst)
55  * 4 = sequentially consistent
56  */
57  mbed_memory_order_relaxed = 0x00,
58  mbed_memory_order_consume = 0x01,
59  mbed_memory_order_acquire = 0x02,
60  mbed_memory_order_release = 0x04,
61  mbed_memory_order_acq_rel = 0x06,
62  mbed_memory_order_seq_cst = 0x16
64 
65 // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
66 #ifndef MBED_EXCLUSIVE_ACCESS
67 #ifndef __EXCLUSIVE_ACCESS
68 #if defined __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
69 #if ((__ARM_ARCH_7M__ == 1U) || \
70  (__ARM_ARCH_7EM__ == 1U) || \
71  (__ARM_ARCH_8M_BASE__ == 1U) || \
72  (__ARM_ARCH_8M_MAIN__ == 1U)) || \
73  (__ARM_ARCH_7A__ == 1U)
74 #define MBED_EXCLUSIVE_ACCESS 1U
75 #define MBED_EXCLUSIVE_ACCESS_THUMB1 (__ARM_ARCH_8M_BASE__ == 1U)
76 #ifdef __ICCARM__
77 #if __CPU_MODE__ == 2
78 #define MBED_EXCLUSIVE_ACCESS_ARM 1U
79 #else
80 #define MBED_EXCLUSIVE_ACCESS_ARM 0U
81 #endif
82 #else
83 #if !defined (__thumb__)
84 #define MBED_EXCLUSIVE_ACCESS_ARM 1U
85 #else
86 #define MBED_EXCLUSIVE_ACCESS_ARM 0U
87 #endif
88 #endif
89 #elif (__ARM_ARCH_6M__ == 1U)
90 #define MBED_EXCLUSIVE_ACCESS 0U
91 #else
92 #error "Unknown ARM architecture for exclusive access"
93 #endif // __ARM_ARCH_xxx
94 #else // __arm__ || defined __ICC_ARM__ || defined __ARM_ARCH
95 // Seem to be compiling for non-ARM, so stick with critical section implementations
96 #define MBED_EXCLUSIVE_ACCESS 0U
97 #endif
98 #else
99 #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
100 #endif
101 #endif
102 
103 #if MBED_EXCLUSIVE_ACCESS
104 #define MBED_INLINE_IF_EX inline
105 #else
106 #define MBED_INLINE_IF_EX
107 #endif
108 
109 /**
110  * A lock-free, primitive atomic flag.
111  *
112  * Emulate C11's atomic_flag. The flag is initially in an indeterminate state
113  * unless explicitly initialized with CORE_UTIL_ATOMIC_FLAG_INIT.
114  */
115 typedef struct core_util_atomic_flag {
116  uint8_t _flag;
118 
119 /**
120  * Initializer for a core_util_atomic_flag.
121  *
122  * Example:
123  * ~~~
124  * core_util_atomic_flag in_progress = CORE_UTIL_ATOMIC_FLAG_INIT;
125  * ~~~
126  */
127 #define CORE_UTIL_ATOMIC_FLAG_INIT { 0 }
128 
129 /**
130  * Atomic test and set.
131  *
132  * Atomically tests then sets the flag to true, returning the previous value.
133  *
134  * @param flagPtr Target flag being tested and set.
135  * @return The previous value.
136  */
137 MBED_INLINE_IF_EX bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr);
138 
139 /** \ copydoc core_util_atomic_flag_test_and_set
140  * @param order memory ordering constraint
141  */
143 
144 /**
145  * Atomic clear.
146  *
147  * @param flagPtr Target flag being cleared.
148  */
150 
151 /** \ copydoc core_util_atomic_flag_clear
152  * @param order memory ordering constraint
153  */
155 
156 
157 /**
158  * Atomic compare and set. It compares the contents of a memory location to a
159  * given value and, only if they are the same, modifies the contents of that
160  * memory location to a given new value. This is done as a single atomic
161  * operation. The atomicity guarantees that the new value is calculated based on
162  * up-to-date information; if the value had been updated by another thread in
163  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
164  *
165  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
166  * you to the article on compare-and swap].
167  *
168  * @param ptr The target memory location.
169  * @param[in,out] expectedCurrentValue A pointer to some location holding the
170  * expected current value of the data being set atomically.
171  * The computed 'desiredValue' should be a function of this current value.
172  * @note: This is an in-out parameter. In the
173  * failure case of atomic_cas (where the
174  * destination isn't set), the pointee of expectedCurrentValue is
175  * updated with the current value.
176  * @param[in] desiredValue The new value computed based on '*expectedCurrentValue'.
177  *
178  * @return true if the memory location was atomically
179  * updated with the desired value (after verifying
180  * that it contained the expectedCurrentValue),
181  * false otherwise. In the failure case,
182  * exepctedCurrentValue is updated with the new
183  * value of the target memory location.
184  *
185  * pseudocode:
186  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
187  * if *p != *old {
188  * *old = *p
189  * return false
190  * }
191  * *p = new
192  * return true
193  * }
194  *
195  * @note: In the failure case (where the destination isn't set), the value
196  * pointed to by expectedCurrentValue is instead updated with the current value.
197  * This property helps writing concise code for the following incr:
198  *
199  * function incr(p : pointer to int, a : int) returns int {
200  * done = false
201  * value = atomic_load(p)
202  * while not done {
203  * done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success
204  * }
205  * return value + a
206  * }
207  *
208  * However, if the call is made in a loop like this, the atomic_compare_exchange_weak
209  * functions are to be preferred.
210  *
211  * @note: This corresponds to the C11 "atomic_compare_exchange_strong" - it
212  * always succeeds if the current value is expected, as per the pseudocode
213  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
214  * This call would normally be used when a fail return does not retry.
215  */
216 MBED_INLINE_IF_EX bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
217 
218 /** \copydoc core_util_atomic_cas_u8
219  * @param success memory ordering constraint for successful exchange
220  * @param failure memory ordering constraint for failure
221  */
222 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
223 
224 /** \copydoc core_util_atomic_cas_u8 */
225 MBED_INLINE_IF_EX bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
226 
227 /** \copydoc core_util_atomic_cas_explicit_u8 */
228 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
229 
230 /** \copydoc core_util_atomic_cas_u8 */
231 MBED_INLINE_IF_EX bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
232 
233 /** \copydoc core_util_atomic_cas_explicit_u8 */
234 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
235 
236 /** \copydoc core_util_atomic_cas_u8 */
237 bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
238 
239 /** \copydoc core_util_atomic_cas_explicit_u8 */
240 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
241 
242 /** \copydoc core_util_atomic_cas_u8 */
243 MBED_FORCEINLINE bool core_util_atomic_cas_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
244 
245 /** \copydoc core_util_atomic_cas_explicit_u8 */
246 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
247 
248 /** \copydoc core_util_atomic_cas_u8 */
249 MBED_FORCEINLINE bool core_util_atomic_cas_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
250 
251 /** \copydoc core_util_atomic_cas_explicit_u8 */
252 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
253 
254 /** \copydoc core_util_atomic_cas_u8 */
255 MBED_FORCEINLINE bool core_util_atomic_cas_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
256 
257 /** \copydoc core_util_atomic_cas_explicit_u8 */
258 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
259 
260 /** \copydoc core_util_atomic_cas_u8 */
261 MBED_FORCEINLINE bool core_util_atomic_cas_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
262 
263 /** \copydoc core_util_atomic_cas_explicit_u8 */
264 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
265 
266 /** \copydoc core_util_atomic_cas_u8 */
267 MBED_FORCEINLINE bool core_util_atomic_cas_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
268 
269 /** \copydoc core_util_atomic_cas_explicit_u8 */
270 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
271 
272 /** \copydoc core_util_atomic_cas_u8 */
273 inline bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
274 
275 /** \copydoc core_util_atomic_cas_explicit_u8 */
276 MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
277 
278 
279 
280 /**
281  * Atomic compare and set. It compares the contents of a memory location to a
282  * given value and, only if they are the same, modifies the contents of that
283  * memory location to a given new value. This is done as a single atomic
284  * operation. The atomicity guarantees that the new value is calculated based on
285  * up-to-date information; if the value had been updated by another thread in
286  * the meantime, the write would fail due to a mismatched expectedCurrentValue.
287  *
288  * Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect
289  * you to the article on compare-and swap].
290  *
291  * @param ptr The target memory location.
292  * @param[in,out] expectedCurrentValue A pointer to some location holding the
293  * expected current value of the data being set atomically.
294  * The computed 'desiredValue' should be a function of this current value.
295  * @note: This is an in-out parameter. In the
296  * failure case of atomic_cas (where the
297  * destination isn't set), the pointee of expectedCurrentValue is
298  * updated with the current value.
299  * @param[in] desiredValue The new value computed based on '*expectedCurrentValue'.
300  *
301  * @return true if the memory location was atomically
302  * updated with the desired value (after verifying
303  * that it contained the expectedCurrentValue),
304  * false otherwise. In the failure case,
305  * exepctedCurrentValue is updated with the new
306  * value of the target memory location.
307  *
308  * pseudocode:
309  * function cas(p : pointer to int, old : pointer to int, new : int) returns bool {
310  * if *p != *old or spurious failure {
311  * *old = *p
312  * return false
313  * }
314  * *p = new
315  * return true
316  * }
317  *
318  * @note: In the failure case (where the destination isn't set), the value
319  * pointed to by expectedCurrentValue is instead updated with the current value.
320  * This property helps writing concise code for the following incr:
321  *
322  * function incr(p : pointer to int, a : int) returns int {
323  * done = false
324  * value = *p // This fetch operation need not be atomic.
325  * while not done {
326  * done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success
327  * }
328  * return value + a
329  * }
330  *
331  * @note: This corresponds to the C11 "atomic_compare_exchange_weak" - it
332  * may spuriously fail if the current value is expected, as per the pseudocode
333  * above; it will not spuriously fail as "atomic_compare_exchange_weak" may.
334  * This call would normally be used when a fail return will cause a retry anyway,
335  * saving the need for an extra loop inside the cas operation.
336  */
337 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue);
338 
339 /** \copydoc core_util_atomic_compare_exchange_weak_u8
340  * @param success memory ordering constraint for successful exchange
341  * @param failure memory ordering constraint for failure
342  */
343 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
344 
345 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
346 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue);
347 
348 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
349 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
350 
351 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
352 MBED_INLINE_IF_EX bool core_util_atomic_compare_exchange_weak_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue);
353 
354 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
355 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
356 
357 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
358 bool core_util_atomic_compare_exchange_weak_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue);
359 
360 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
361 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
362 
363 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
364 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue);
365 
366 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
367 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
368 
369 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
370 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue);
371 
372 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
373 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
374 
375 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
376 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue);
377 
378 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
379 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
380 
381 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
382 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue);
383 
384 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
385 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure);
386 
387 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
388 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue);
389 
390 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
391 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure);
392 
393 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
394 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue);
395 
396 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
397 MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure);
398 
399 
400 /**
401  * Atomic load.
402  * @param valuePtr Target memory location.
403  * @return The loaded value.
404  */
405 MBED_FORCEINLINE uint8_t core_util_atomic_load_u8(const volatile uint8_t *valuePtr);
406 
407 /**
408  * \copydoc core_util_atomic_load_u8
409  * @param order memory ordering constraint
410  */
411 MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8(const volatile uint8_t *valuePtr, mbed_memory_order order);
412 
413 /** \copydoc core_util_atomic_load_u8 */
414 MBED_FORCEINLINE uint16_t core_util_atomic_load_u16(const volatile uint16_t *valuePtr);
415 
416 /** \copydoc core_util_atomic_load_explicit_u8 */
417 MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16(const volatile uint16_t *valuePtr, mbed_memory_order order);
418 
419 /** \copydoc core_util_atomic_load_u8 */
420 MBED_FORCEINLINE uint32_t core_util_atomic_load_u32(const volatile uint32_t *valuePtr);
421 
422 /** \copydoc core_util_atomic_load_explicit_u8 */
423 MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32(const volatile uint32_t *valuePtr, mbed_memory_order order);
424 
425 /** \copydoc core_util_atomic_load_u8 */
426 uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr);
427 
428 /** \copydoc core_util_atomic_load_explicit_u8 */
429 MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64(const volatile uint64_t *valuePtr, mbed_memory_order order);
430 
431 /** \copydoc core_util_atomic_load_u8 */
432 MBED_FORCEINLINE int8_t core_util_atomic_load_s8(const volatile int8_t *valuePtr);
433 
434 /** \copydoc core_util_atomic_load_explicit_u8 */
435 MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8(const volatile int8_t *valuePtr, mbed_memory_order order);
436 
437 /** \copydoc core_util_atomic_load_u8 */
438 MBED_FORCEINLINE int16_t core_util_atomic_load_s16(const volatile int16_t *valuePtr);
439 
440 /** \copydoc core_util_atomic_load_explicit_u8 */
441 MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16(const volatile int16_t *valuePtr, mbed_memory_order order);
442 
443 /** \copydoc core_util_atomic_load_u8 */
444 MBED_FORCEINLINE int32_t core_util_atomic_load_s32(const volatile int32_t *valuePtr);
445 
446 /** \copydoc core_util_atomic_load_explicit_u8 */
447 MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32(const volatile int32_t *valuePtr, mbed_memory_order order);
448 
449 /** \copydoc core_util_atomic_load_u8 */
450 MBED_FORCEINLINE int64_t core_util_atomic_load_s64(const volatile int64_t *valuePtr);
451 
452 /** \copydoc core_util_atomic_load_u8 */
453 MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64(const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order);
454 
455 /** \copydoc core_util_atomic_load_u8 */
456 MBED_FORCEINLINE bool core_util_atomic_load_bool(const volatile bool *valuePtr);
457 
458 /** \copydoc core_util_atomic_load_u8 */
459 MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool(const volatile bool *valuePtr, mbed_memory_order order);
460 
461 /** \copydoc core_util_atomic_load_u8 */
462 MBED_FORCEINLINE void *core_util_atomic_load_ptr(void *const volatile *valuePtr);
463 
464 /** \copydoc core_util_atomic_load_u8 */
465 MBED_FORCEINLINE void *core_util_atomic_load_explicit_ptr(void *const volatile *valuePtr, mbed_memory_order order);
466 
467 /**
468  * Atomic store.
469  * @param valuePtr Target memory location.
470  * @param desiredValue The value to store.
471  */
472 MBED_FORCEINLINE void core_util_atomic_store_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
473 
474 /**
475  * \copydoc core_util_atomic_store_u8
476  * @param order memory ordering constraint
477  */
478 MBED_FORCEINLINE void core_util_atomic_store_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
479 
480 /** \copydoc core_util_atomic_store_u8 */
481 MBED_FORCEINLINE void core_util_atomic_store_u16(volatile uint16_t *valuePtr, uint16_t desiredValue);
482 
483 /** \copydoc core_util_atomic_store_explicit_u8 */
484 MBED_FORCEINLINE void core_util_atomic_store_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
485 
486 /** \copydoc core_util_atomic_store_u8 */
487 MBED_FORCEINLINE void core_util_atomic_store_u32(volatile uint32_t *valuePtr, uint32_t desiredValue);
488 
489 /** \copydoc core_util_atomic_store_explicit_u8 */
490 MBED_FORCEINLINE void core_util_atomic_store_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
491 
492 /** \copydoc core_util_atomic_store_u8 */
493 void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
494 
495 /** \copydoc core_util_atomic_store_explicit_u8 */
496 MBED_FORCEINLINE void core_util_atomic_store_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
497 
498 /** \copydoc core_util_atomic_store_u8 */
499 MBED_FORCEINLINE void core_util_atomic_store_s8(volatile int8_t *valuePtr, int8_t desiredValue);
500 
501 /** \copydoc core_util_atomic_store_explicit_u8 */
502 MBED_FORCEINLINE void core_util_atomic_store_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
503 
504 /** \copydoc core_util_atomic_store_u8 */
505 MBED_FORCEINLINE void core_util_atomic_store_s16(volatile int16_t *valuePtr, int16_t desiredValue);
506 
507 /** \copydoc core_util_atomic_store_explicit_u8 */
508 MBED_FORCEINLINE void core_util_atomic_store_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
509 
510 /** \copydoc core_util_atomic_store_u8 */
511 MBED_FORCEINLINE void core_util_atomic_store_s32(volatile int32_t *valuePtr, int32_t desiredValue);
512 
513 /** \copydoc core_util_atomic_store_explicit_u8 */
514 MBED_FORCEINLINE void core_util_atomic_store_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
515 
516 /** \copydoc core_util_atomic_store_u8 */
517 MBED_FORCEINLINE void core_util_atomic_store_s64(volatile int64_t *valuePtr, int64_t desiredValue);
518 
519 /** \copydoc core_util_atomic_store_explicit_u8 */
520 MBED_FORCEINLINE void core_util_atomic_store_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
521 
522 /** \copydoc core_util_atomic_store_u8 */
523 MBED_FORCEINLINE void core_util_atomic_store_bool(volatile bool *valuePtr, bool desiredValue);
524 
525 /** \copydoc core_util_atomic_store_explicit_u8 */
526 MBED_FORCEINLINE void core_util_atomic_store_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
527 
528 /** \copydoc core_util_atomic_store_u8 */
529 MBED_FORCEINLINE void core_util_atomic_store_ptr(void *volatile *valuePtr, void *desiredValue);
530 
531 /** \copydoc core_util_atomic_store_explicit_u8 */
532 MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
533 
534 /**
535  * Atomic exchange.
536  * @param valuePtr Target memory location.
537  * @param desiredValue The value to store.
538  * @return The previous value.
539  */
540 MBED_INLINE_IF_EX uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue);
541 
542 /** \copydoc core_util_atomic_exchange_u8
543  * @param order memory ordering constraint
544  */
545 MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order);
546 
547 /** \copydoc core_util_atomic_exchange_u8 */
548 MBED_INLINE_IF_EX uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue);
549 
550 /** \copydoc core_util_atomic_exchange_explicit_u8 */
551 MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order);
552 
553 /** \copydoc core_util_atomic_exchange_u8 */
554 MBED_INLINE_IF_EX uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue);
555 
556 /** \copydoc core_util_atomic_exchange_explicit_u8 */
557 MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order);
558 
559 /** \copydoc core_util_atomic_exchange_u8 */
560 uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue);
561 
562 /** \copydoc core_util_atomic_exchange_explicit_u8 */
563 MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order);
564 
565 /** \copydoc core_util_atomic_exchange_u8 */
566 MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8(volatile int8_t *valuePtr, int8_t desiredValue);
567 
568 /** \copydoc core_util_atomic_exchange_explicit_u8 */
569 MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order);
570 
571 /** \copydoc core_util_atomic_exchange_u8 */
572 MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16(volatile int16_t *valuePtr, int16_t desiredValue);
573 
574 /** \copydoc core_util_atomic_exchange_explicit_u8 */
575 MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order);
576 
577 /** \copydoc core_util_atomic_exchange_u8 */
578 MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32(volatile int32_t *valuePtr, int32_t desiredValue);
579 
580 /** \copydoc core_util_atomic_exchange_explicit_u8 */
581 MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order);
582 
583 /** \copydoc core_util_atomic_exchange_u8 */
584 MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64(volatile int64_t *valuePtr, int64_t desiredValue);
585 
586 /** \copydoc core_util_atomic_exchange_explicit_u8 */
587 MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order);
588 
589 /** \copydoc core_util_atomic_exchange_u8 */
590 MBED_FORCEINLINE bool core_util_atomic_exchange_bool(volatile bool *valuePtr, bool desiredValue);
591 
592 /** \copydoc core_util_atomic_exchange_explicit_u8 */
593 MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order);
594 
595 /** \copydoc core_util_atomic_exchange_u8 */
596 inline void *core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue);
597 
598 /** \copydoc core_util_atomic_exchange_explicit_u8 */
599 MBED_FORCEINLINE void *core_util_atomic_exchange_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order);
600 
601 /**
602  * Atomic increment.
603  * @param valuePtr Target memory location being incremented.
604  * @param delta The amount being incremented.
605  * @return The new incremented value.
606  */
607 MBED_INLINE_IF_EX uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta);
608 
609 /** \copydoc core_util_atomic_incr_u8 */
610 MBED_INLINE_IF_EX uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta);
611 
612 /** \copydoc core_util_atomic_incr_u8 */
613 MBED_INLINE_IF_EX uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta);
614 
615 /** \copydoc core_util_atomic_incr_u8 */
616 uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta);
617 
618 /** \copydoc core_util_atomic_incr_u8 */
619 MBED_FORCEINLINE int8_t core_util_atomic_incr_s8(volatile int8_t *valuePtr, int8_t delta);
620 
621 /** \copydoc core_util_atomic_incr_u8 */
622 MBED_FORCEINLINE int16_t core_util_atomic_incr_s16(volatile int16_t *valuePtr, int16_t delta);
623 
624 /** \copydoc core_util_atomic_incr_u8 */
625 MBED_FORCEINLINE int32_t core_util_atomic_incr_s32(volatile int32_t *valuePtr, int32_t delta);
626 
627 /** \copydoc core_util_atomic_incr_u8 */
628 MBED_FORCEINLINE int64_t core_util_atomic_incr_s64(volatile int64_t *valuePtr, int64_t delta);
629 
630 /** \copydoc core_util_atomic_incr_u8 */
631 inline void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
632 
633 /**
634  * Atomic decrement.
635  * @param valuePtr Target memory location being decremented.
636  * @param delta The amount being decremented.
637  * @return The new decremented value.
638  */
639 MBED_INLINE_IF_EX uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta);
640 
641 /** \copydoc core_util_atomic_decr_u8 */
642 MBED_INLINE_IF_EX uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta);
643 
644 /** \copydoc core_util_atomic_decr_u8 */
645 MBED_INLINE_IF_EX uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta);
646 
647 /** \copydoc core_util_atomic_decr_u8 */
648 uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta);
649 
650 /** \copydoc core_util_atomic_decr_u8 */
651 MBED_FORCEINLINE int8_t core_util_atomic_decr_s8(volatile int8_t *valuePtr, int8_t delta);
652 
653 /** \copydoc core_util_atomic_decr_u8 */
654 MBED_FORCEINLINE int16_t core_util_atomic_decr_s16(volatile int16_t *valuePtr, int16_t delta);
655 
656 /** \copydoc core_util_atomic_decr_u8 */
657 MBED_FORCEINLINE int32_t core_util_atomic_decr_s32(volatile int32_t *valuePtr, int32_t delta);
658 
659 /** \copydoc core_util_atomic_decr_u8 */
660 MBED_FORCEINLINE int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, int64_t delta);
661 
662 /** \copydoc core_util_atomic_decr_u8 */
663 inline void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta);
664 
665 /**
666  * Atomic add.
667  * @param valuePtr Target memory location being modified.
668  * @param arg The argument for the addition.
669  * @return The original value.
670  */
671 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg);
672 
673 /** \copydoc core_util_atomic_fetch_add_u8
674  * @param order memory ordering constraint
675  */
676 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
677 
678 /** \copydoc core_util_atomic_fetch_add_u8 */
679 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_add_u16(volatile uint16_t *valuePtr, uint16_t arg);
680 
681 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
682 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
683 
684 /** \copydoc core_util_atomic_fetch_add_u8 */
685 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_add_u32(volatile uint32_t *valuePtr, uint32_t arg);
686 
687 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
688 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
689 
690 /** \copydoc core_util_atomic_fetch_add_u8 */
691 uint64_t core_util_atomic_fetch_add_u64(volatile uint64_t *valuePtr, uint64_t arg);
692 
693 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
694 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
695 
696 /** \copydoc core_util_atomic_fetch_add_u8 */
697 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8(volatile int8_t *valuePtr, int8_t arg);
698 
699 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
700 MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
701 
702 /** \copydoc core_util_atomic_fetch_add_u8 */
703 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16(volatile int16_t *valuePtr, int16_t arg);
704 
705 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
706 MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
707 
708 /** \copydoc core_util_atomic_fetch_add_u8 */
709 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32(volatile int32_t *valuePtr, int32_t arg);
710 
711 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
712 MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
713 
714 /** \copydoc core_util_atomic_fetch_add_u8 */
715 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64(volatile int64_t *valuePtr, int64_t arg);
716 
717 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
718 MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
719 
720 /** \copydoc core_util_atomic_fetch_add_u8 */
721 MBED_FORCEINLINE void *core_util_atomic_fetch_add_ptr(void *volatile *valuePtr, ptrdiff_t arg);
722 
723 /** \copydoc core_util_atomic_fetch_add_explicit_u8 */
724 MBED_FORCEINLINE void *core_util_atomic_fetch_add_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
725 
726 /**
727  * Atomic subtract.
728  * @param valuePtr Target memory location being modified.
729  * @param arg The argument for the subtraction.
730  * @return The original value.
731  */
732 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg);
733 
734 /** \copydoc core_util_atomic_fetch_sub_u8
735  * @param order memory ordering constraint
736  */
737 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
738 
739 /** \copydoc core_util_atomic_fetch_sub_u8 */
740 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_sub_u16(volatile uint16_t *valuePtr, uint16_t arg);
741 
742 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
743 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
744 
745 /** \copydoc core_util_atomic_fetch_sub_u8 */
746 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_sub_u32(volatile uint32_t *valuePtr, uint32_t arg);
747 
748 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
749 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
750 
751 /** \copydoc core_util_atomic_fetch_sub_u8 */
752 uint64_t core_util_atomic_fetch_sub_u64(volatile uint64_t *valuePtr, uint64_t arg);
753 
754 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
755 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
756 
757 /** \copydoc core_util_atomic_fetch_sub_u8 */
758 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8(volatile int8_t *valuePtr, int8_t arg);
759 
760 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
761 MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order);
762 
763 /** \copydoc core_util_atomic_fetch_sub_u8 */
764 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16(volatile int16_t *valuePtr, int16_t arg);
765 
766 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
767 MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order);
768 
769 /** \copydoc core_util_atomic_fetch_sub_u8 */
770 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32(volatile int32_t *valuePtr, int32_t arg);
771 
772 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
773 MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order);
774 
775 /** \copydoc core_util_atomic_fetch_sub_u8 */
776 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64(volatile int64_t *valuePtr, int64_t arg);
777 
778 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
779 MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order);
780 
781 /** \copydoc core_util_atomic_fetch_sub_u8 */
782 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_ptr(void *volatile *valuePtr, ptrdiff_t arg);
783 
784 /** \copydoc core_util_atomic_fetch_sub_explicit_u8 */
785 MBED_FORCEINLINE void *core_util_atomic_fetch_sub_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
786 
787 /**
788  * Atomic bitwise and.
789  * @param valuePtr Target memory location being modified.
790  * @param arg The argument for the bitwise operation.
791  * @return The original value.
792  */
793 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg);
794 
795 /** \copydoc core_util_atomic_fetch_and_u8
796  * @param order memory ordering constraint
797  */
798 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
799 
800 /** \copydoc core_util_atomic_fetch_and_u8 */
801 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg);
802 
803 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
804 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
805 
806 /** \copydoc core_util_atomic_fetch_and_u8 */
807 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg);
808 
809 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
810 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
811 
812 /** \copydoc core_util_atomic_fetch_and_u8 */
813 uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg);
814 
815 /** \copydoc core_util_atomic_fetch_and_explicit_u8 */
816 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
817 
818 /**
819  * Atomic bitwise inclusive or.
820  * @param valuePtr Target memory location being modified.
821  * @param arg The argument for the bitwise operation.
822  * @return The original value.
823  */
824 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg);
825 
826 /** \copydoc core_util_atomic_fetch_or_u8
827  * @param order memory ordering constraint
828  */
829 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
830 
831 /** \copydoc core_util_atomic_fetch_or_u8 */
832 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg);
833 
834 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
835 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
836 
837 /** \copydoc core_util_atomic_fetch_or_u8 */
838 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg);
839 
840 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
841 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
842 
843 /** \copydoc core_util_atomic_fetch_or_u8 */
844 uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg);
845 
846 /** \copydoc core_util_atomic_fetch_or_explicit_u8 */
847 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
848 
849 /**
850  * Atomic bitwise exclusive or.
851  * @param valuePtr Target memory location being modified.
852  * @param arg The argument for the bitwise operation.
853  * @return The original value.
854  */
855 MBED_INLINE_IF_EX uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg);
856 
857 /** \copydoc core_util_atomic_fetch_xor_u8
858  * @param order memory ordering constraint
859  */
860 MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order);
861 
862 /** \copydoc core_util_atomic_fetch_xor_u8 */
863 MBED_INLINE_IF_EX uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg);
864 
865 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
866 MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order);
867 
868 /** \copydoc core_util_atomic_fetch_xor_u8 */
869 MBED_INLINE_IF_EX uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg);
870 
871 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
872 MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order);
873 
874 /** \copydoc core_util_atomic_fetch_xor_u8 */
875 uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg);
876 
877 /** \copydoc core_util_atomic_fetch_xor_explicit_u8 */
878 MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order);
879 
880 #ifdef __cplusplus
881 } // extern "C"
882 
883 // For each operation, two overloaded templates:
884 // * one for non-pointer types, which has implementations based on the
885 // u8/u16/u32/u64/s8/s16/s32/s64/bool functions above. No base implementation.
886 // * one for any pointer type, generically implemented based on ptr function above.
887 //
888 // Templates use standard C/C++ naming - old incr/decr/cas forms are not provided.
889 //
890 // Note that C++ template selection somewhat inhibits the ease of use of these templates.
891 // Ambiguities arise with setting pointers to NULL, or adding constants to integers.
892 // It may be necessary to cast the argument or desired value to the correct type, or
893 // explictly specify the type - eg core_util_atomic_store<FileHandle>(&fh, NULL) or
894 // core_util_atomic_store(&val, (uint8_t)1).
895 // A proper mbed::Atomic<T> class would solve the issue.
896 
897 /** \copydoc core_util_atomic_load_u8 */
898 template<typename T> T core_util_atomic_load(const volatile T *valuePtr);
899 /** \copydoc core_util_atomic_load_u8 */
900 template<typename T> T core_util_atomic_load(const T *valuePtr);
901 /** \copydoc core_util_atomic_store_u8 */
902 template<typename T> void core_util_atomic_store(volatile T *valuePtr, T desiredValue);
903 /** \copydoc core_util_atomic_store_u8 */
904 template<typename T> void core_util_atomic_store(T *valuePtr, T desiredValue);
905 /** \copydoc core_util_atomic_exchange_u8 */
906 template<typename T> T core_util_atomic_exchange(volatile T *ptr, T desiredValue);
907 /** \copydoc core_util_atomic_cas_u8 */
908 template<typename T> bool core_util_atomic_compare_exchange_strong(volatile T *ptr, T *expectedCurrentValue, T desiredValue);
909 /** \copydoc core_util_atomic_compare_exchange_weak_u8 */
910 template<typename T> bool core_util_atomic_compare_exchange_weak(volatile T *ptr, T *expectedCurrentValue, T desiredValue);
911 /** \copydoc core_util_fetch_add_u8 */
912 template<typename T> T core_util_atomic_fetch_add(volatile T *valuePtr, T arg);
913 /** \copydoc core_util_fetch_sub_u8 */
914 template<typename T> T core_util_atomic_fetch_sub(volatile T *valuePtr, T arg);
915 /** \copydoc core_util_fetch_and_u8 */
916 template<typename T> T core_util_atomic_fetch_and(volatile T *valuePtr, T arg);
917 /** \copydoc core_util_fetch_or_u8 */
918 template<typename T> T core_util_atomic_fetch_or(volatile T *valuePtr, T arg);
919 /** \copydoc core_util_fetch_xor_u8 */
920 template<typename T> T core_util_atomic_fetch_xor(volatile T *valuePtr, T arg);
921 
922 /** \copydoc core_util_atomic_load_explicit_u8 */
923 template<typename T> T core_util_atomic_load_explicit(const volatile T *valuePtr, mbed_memory_order order);
924 /** \copydoc core_util_atomic_load_explicit_u8 */
925 template<typename T> T core_util_atomic_load_explicit(const T *valuePtr, mbed_memory_order order);
926 /** \copydoc core_util_atomic_store_explicit_u8 */
927 template<typename T> void core_util_atomic_store_explicit(volatile T *valuePtr, T desiredValue, mbed_memory_order order);
928 /** \copydoc core_util_atomic_store_explicit_u8 */
929 template<typename T> void core_util_atomic_store_explicit(T *valuePtr, T desiredValue, mbed_memory_order order);
930 /** \copydoc core_util_atomic_exchange_explicit_u8 */
931 template<typename T> T core_util_atomic_exchange_explicit(volatile T *ptr, T desiredValue, mbed_memory_order order);
932 /** \copydoc core_util_atomic_cas_explicit_u8 */
933 template<typename T> bool core_util_atomic_compare_exchange_strong_explicit(volatile T *ptr, T *expectedCurrentValue, T desiredValue, mbed_memory_order success, mbed_memory_order failure);
934 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_u8 */
935 template<typename T> bool core_util_atomic_compare_exchange_weak_explicit(volatile T *ptr, T *expectedCurrentValue, T desiredValue, mbed_memory_order success, mbed_memory_order failure);
936 /** \copydoc core_util_fetch_add_explicit_u8 */
937 template<typename T> T core_util_atomic_fetch_add_explicit(volatile T *valuePtr, T arg, mbed_memory_order order);
938 /** \copydoc core_util_fetch_sub_explicit_u8 */
939 template<typename T> T core_util_atomic_fetch_sub_explicit(volatile T *valuePtr, T arg, mbed_memory_order order);
940 /** \copydoc core_util_fetch_and_explicit_u8 */
941 template<typename T> T core_util_atomic_fetch_and_explicit(volatile T *valuePtr, T arg, mbed_memory_order order);
942 /** \copydoc core_util_fetch_or_explicit_u8 */
943 template<typename T> T core_util_atomic_fetch_or_explicit(volatile T *valuePtr, T arg, mbed_memory_order order);
944 /** \copydoc core_util_fetch_xor_explicit_u8 */
945 template<typename T> T core_util_atomic_fetch_xor_explicit(volatile T *valuePtr, T arg, mbed_memory_order order);
946 
947 /** \copydoc core_util_atomic_load_ptr */
948 template<typename T> inline T *core_util_atomic_load(T *const volatile *valuePtr);
949 /** \copydoc core_util_atomic_load_ptr */
950 template<typename T> inline T *core_util_atomic_load(T *const *valuePtr);
951 /** \copydoc core_util_atomic_store_ptr */
952 template<typename T> inline void core_util_atomic_store(T *volatile *valuePtr, T *desiredValue);
953 /** \copydoc core_util_atomic_store_ptr */
954 template<typename T> inline void core_util_atomic_store(T **valuePtr, T *desiredValue);
955 /** \copydoc core_util_atomic_exchange_ptr */
956 template<typename T> inline T *core_util_atomic_exchange(T *volatile *valuePtr, T *desiredValue);
957 /** \copydoc core_util_atomic_cas_ptr */
958 template<typename T> inline bool core_util_atomic_compare_exchange_strong(T *volatile *ptr, T **expectedCurrentValue, T *desiredValue);
959 /** \copydoc core_util_atomic_compare_exchange_weak_ptr */
960 template<typename T> inline bool core_util_atomic_compare_exchange_weak(T *volatile *ptr, T **expectedCurrentValue, T *desiredValue);
961 /** \copydoc core_util_fetch_add_ptr */
962 template<typename T> inline T *core_util_atomic_fetch_add(T *volatile *valuePtr, ptrdiff_t arg);
963 /** \copydoc core_util_fetch_sub_ptr */
964 template<typename T> inline T *core_util_atomic_fetch_sub(T *volatile *valuePtr, ptrdiff_t arg);
965 
966 /** \copydoc core_util_atomic_load_explicit_ptr */
967 template<typename T> inline T *core_util_atomic_load_explicit(T *const volatile *valuePtr, mbed_memory_order order);
968 /** \copydoc core_util_atomic_load_explicit_ptr */
969 template<typename T> inline T *core_util_atomic_load_explicit(T *const *valuePtr, mbed_memory_order order);
970 /** \copydoc core_util_atomic_store_explicit_ptr */
971 template<typename T> inline void core_util_atomic_store_explicit(T *volatile *valuePtr, T *desiredValue, mbed_memory_order order);
972 /** \copydoc core_util_atomic_store_explicit_ptr */
973 template<typename T> inline void core_util_atomic_store_explicit(T **valuePtr, T *desiredValue, mbed_memory_order order);
974 /** \copydoc core_util_atomic_exchange_explicit_ptr */
975 template<typename T> inline T *core_util_atomic_exchange_explicit(T *volatile *valuePtr, T *desiredValue, mbed_memory_order order);
976 /** \copydoc core_util_atomic_cas_explicit_ptr */
977 template<typename T> inline bool core_util_atomic_compare_exchange_strong_explicit(T *volatile *ptr, T **expectedCurrentValue, T *desiredValue, mbed_memory_order success, mbed_memory_order failure);
978 /** \copydoc core_util_atomic_compare_exchange_weak_explicit_ptr */
979 template<typename T> inline bool core_util_atomic_compare_exchange_weak_explicit(T *volatile *ptr, T **expectedCurrentValue, T *desiredValue, mbed_memory_order success, mbed_memory_order failure);
980 /** \copydoc core_util_fetch_add_explicit_ptr */
981 template<typename T> inline T *core_util_atomic_fetch_add_explicit(T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
982 /** \copydoc core_util_fetch_sub_explicit_ptr */
983 template<typename T> inline T *core_util_atomic_fetch_sub_explicit(T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order);
984 
985 #endif // __cplusplus
986 
987 /**@}*/
988 
989 /**@}*/
990 
991 /* Hide the implementation away */
992 #include "platform/internal/mbed_atomic_impl.h"
993 
994 #endif // __MBED_UTIL_ATOMICL_H__
995 
996 
997 
uint8_t core_util_atomic_fetch_xor_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
bool core_util_atomic_load_explicit_bool(const volatile bool *valuePtr, mbed_memory_order order)
Atomic load.
int16_t core_util_atomic_fetch_sub_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
Atomic subtract.
int32_t core_util_atomic_exchange_s32(volatile int32_t *valuePtr, int32_t desiredValue)
Atomic exchange.
int16_t core_util_atomic_exchange_s16(volatile int16_t *valuePtr, int16_t desiredValue)
Atomic exchange.
bool core_util_atomic_compare_exchange_weak_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
void * core_util_atomic_fetch_sub_ptr(void *volatile *valuePtr, ptrdiff_t arg)
Atomic subtract.
struct core_util_atomic_flag core_util_atomic_flag
A lock-free, primitive atomic flag.
bool core_util_atomic_cas_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_compare_exchange_weak_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint16_t core_util_atomic_load_u16(const volatile uint16_t *valuePtr)
Atomic load.
bool core_util_atomic_cas_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
Atomic compare and set.
void core_util_atomic_store_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
Atomic store.
int16_t core_util_atomic_fetch_sub_s16(volatile int16_t *valuePtr, int16_t arg)
Atomic subtract.
#define MBED_FORCEINLINE
MBED_FORCEINLINE Declare a function that must always be inlined.
int8_t core_util_atomic_decr_s8(volatile int8_t *valuePtr, int8_t delta)
Atomic decrement.
int8_t core_util_atomic_fetch_sub_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
Atomic subtract.
void core_util_atomic_store_s64(volatile int64_t *valuePtr, int64_t desiredValue)
Atomic store.
void core_util_atomic_store_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
Atomic store.
bool core_util_atomic_compare_exchange_weak_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
Atomic compare and set.
uint16_t core_util_atomic_fetch_or_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise inclusive or.
bool core_util_atomic_cas_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint64_t core_util_atomic_fetch_add_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic add.
int8_t core_util_atomic_fetch_add_s8(volatile int8_t *valuePtr, int8_t arg)
Atomic add.
int8_t core_util_atomic_load_explicit_s8(const volatile int8_t *valuePtr, mbed_memory_order order)
Atomic load.
uint64_t core_util_atomic_fetch_or_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise inclusive or.
int32_t core_util_atomic_exchange_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
Atomic exchange.
void core_util_atomic_store_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
Atomic store.
uint32_t core_util_atomic_fetch_or_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise inclusive or.
bool core_util_atomic_compare_exchange_weak_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
Atomic compare and set.
uint8_t core_util_atomic_load_explicit_u8(const volatile uint8_t *valuePtr, mbed_memory_order order)
Atomic load.
int64_t core_util_atomic_exchange_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
Atomic exchange.
uint32_t core_util_atomic_fetch_sub_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic subtract.
bool core_util_atomic_cas_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
Atomic compare and set.
bool core_util_atomic_cas_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_compare_exchange_weak_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint32_t core_util_atomic_fetch_xor_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise exclusive or.
bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
Atomic compare and set.
int16_t core_util_atomic_load_explicit_s16(const volatile int16_t *valuePtr, mbed_memory_order order)
Atomic load.
bool core_util_atomic_cas_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
Atomic compare and set.
void core_util_atomic_store_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
Atomic store.
bool core_util_atomic_cas_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_compare_exchange_weak_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint16_t core_util_atomic_load_explicit_u16(const volatile uint16_t *valuePtr, mbed_memory_order order)
Atomic load.
int16_t core_util_atomic_fetch_add_s16(volatile int16_t *valuePtr, int16_t arg)
Atomic add.
int32_t core_util_atomic_load_explicit_s32(const volatile int32_t *valuePtr, mbed_memory_order order)
Atomic load.
uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
Atomic decrement.
int8_t core_util_atomic_exchange_s8(volatile int8_t *valuePtr, int8_t desiredValue)
Atomic exchange.
void * core_util_atomic_load_ptr(void *const volatile *valuePtr)
Atomic load.
bool core_util_atomic_cas_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
Atomic compare and set.
void * core_util_atomic_fetch_add_ptr(void *volatile *valuePtr, ptrdiff_t arg)
Atomic add.
int16_t core_util_atomic_decr_s16(volatile int16_t *valuePtr, int16_t delta)
Atomic decrement.
bool core_util_atomic_compare_exchange_weak_explicit_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint64_t core_util_atomic_exchange_u64(volatile uint64_t *valuePtr, uint64_t desiredValue)
Atomic exchange.
bool core_util_atomic_compare_exchange_weak_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
Atomic compare and set.
int64_t core_util_atomic_decr_s64(volatile int64_t *valuePtr, int64_t delta)
Atomic decrement.
uint16_t core_util_atomic_fetch_add_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic add.
uint64_t core_util_atomic_fetch_sub_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic subtract.
uint16_t core_util_atomic_fetch_sub_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic subtract.
uint64_t core_util_atomic_exchange_explicit_u64(volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
Atomic exchange.
int32_t core_util_atomic_fetch_sub_s32(volatile int32_t *valuePtr, int32_t arg)
Atomic subtract.
bool core_util_atomic_compare_exchange_weak_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
Atomic compare and set.
void core_util_atomic_flag_clear_explicit(volatile core_util_atomic_flag *flagPtr, mbed_memory_order order)
\ copydoc core_util_atomic_flag_clear
A lock-free, primitive atomic flag.
Definition: mbed_atomic.h:115
void core_util_atomic_store_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
Atomic store.
void core_util_atomic_store_explicit_s64(volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
Atomic store.
void core_util_atomic_store_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
Atomic store.
uint16_t core_util_atomic_exchange_u16(volatile uint16_t *valuePtr, uint16_t desiredValue)
Atomic exchange.
void * core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
Atomic decrement.
void core_util_atomic_store_bool(volatile bool *valuePtr, bool desiredValue)
Atomic store.
uint16_t core_util_atomic_fetch_sub_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic subtract.
bool core_util_atomic_compare_exchange_weak_explicit_s16(volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
int16_t core_util_atomic_load_s16(const volatile int16_t *valuePtr)
Atomic load.
void * core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
Atomic increment.
uint64_t core_util_atomic_incr_u64(volatile uint64_t *valuePtr, uint64_t delta)
Atomic increment.
uint8_t core_util_atomic_fetch_and_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise and.
int64_t core_util_atomic_fetch_add_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
Atomic add.
int64_t core_util_atomic_incr_s64(volatile int64_t *valuePtr, int64_t delta)
Atomic increment.
int32_t core_util_atomic_load_s32(const volatile int32_t *valuePtr)
Atomic load.
uint16_t core_util_atomic_fetch_xor_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
bool core_util_atomic_compare_exchange_weak_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
Atomic compare and set.
uint64_t core_util_atomic_decr_u64(volatile uint64_t *valuePtr, uint64_t delta)
Atomic decrement.
uint32_t core_util_atomic_exchange_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
Atomic exchange.
void core_util_atomic_store_s8(volatile int8_t *valuePtr, int8_t desiredValue)
Atomic store.
void core_util_atomic_store_s16(volatile int16_t *valuePtr, int16_t desiredValue)
Atomic store.
uint8_t core_util_atomic_fetch_or_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
uint16_t core_util_atomic_exchange_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
Atomic exchange.
int32_t core_util_atomic_fetch_sub_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
Atomic subtract.
uint32_t core_util_atomic_fetch_or_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
void * core_util_atomic_fetch_sub_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
Atomic subtract.
uint8_t core_util_atomic_fetch_add_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic add.
bool core_util_atomic_cas_explicit_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_compare_exchange_weak_explicit_s32(volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
mbed_memory_order
Memory order constraints for atomic operations.
Definition: mbed_atomic.h:51
uint16_t core_util_atomic_fetch_and_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise and.
void * core_util_atomic_exchange_ptr(void *volatile *valuePtr, void *desiredValue)
Atomic exchange.
uint16_t core_util_atomic_fetch_add_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic add.
uint8_t core_util_atomic_fetch_or_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise inclusive or.
bool core_util_atomic_compare_exchange_weak_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
Atomic compare and set.
bool core_util_atomic_cas_explicit_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
void core_util_atomic_store_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
Atomic store.
uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
Atomic increment.
uint64_t core_util_atomic_fetch_or_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
int64_t core_util_atomic_fetch_sub_explicit_s64(volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
Atomic subtract.
uint8_t core_util_atomic_fetch_sub_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic subtract.
uint32_t core_util_atomic_fetch_sub_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic subtract.
int64_t core_util_atomic_exchange_s64(volatile int64_t *valuePtr, int64_t desiredValue)
Atomic exchange.
uint64_t core_util_atomic_fetch_add_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic add.
void * core_util_atomic_load_explicit_ptr(void *const volatile *valuePtr, mbed_memory_order order)
Atomic load.
bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
Atomic compare and set.
void core_util_atomic_store_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
Atomic store.
int32_t core_util_atomic_decr_s32(volatile int32_t *valuePtr, int32_t delta)
Atomic decrement.
uint8_t core_util_atomic_exchange_u8(volatile uint8_t *valuePtr, uint8_t desiredValue)
Atomic exchange.
bool core_util_atomic_cas_explicit_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
Atomic decrement.
bool core_util_atomic_flag_test_and_set(volatile core_util_atomic_flag *flagPtr)
Atomic test and set.
void core_util_atomic_store_u32(volatile uint32_t *valuePtr, uint32_t desiredValue)
Atomic store.
uint32_t core_util_atomic_exchange_explicit_u32(volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
Atomic exchange.
uint64_t core_util_atomic_fetch_xor_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
uint16_t core_util_atomic_fetch_or_explicit_u16(volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
Atomic bitwise inclusive or.
int64_t core_util_atomic_fetch_sub_s64(volatile int64_t *valuePtr, int64_t arg)
Atomic subtract.
bool core_util_atomic_flag_test_and_set_explicit(volatile core_util_atomic_flag *valuePtr, mbed_memory_order order)
\ copydoc core_util_atomic_flag_test_and_set
int64_t core_util_atomic_fetch_add_s64(volatile int64_t *valuePtr, int64_t arg)
Atomic add.
bool core_util_atomic_compare_exchange_weak_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
Atomic compare and set.
uint32_t core_util_atomic_load_u32(const volatile uint32_t *valuePtr)
Atomic load.
void * core_util_atomic_exchange_explicit_ptr(void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
Atomic exchange.
void core_util_atomic_store_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
Atomic store.
uint32_t core_util_atomic_fetch_and_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise and.
uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
Atomic increment.
uint8_t core_util_atomic_fetch_and_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise and.
void core_util_atomic_store_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
Atomic store.
bool core_util_atomic_exchange_explicit_bool(volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
Atomic exchange.
bool core_util_atomic_cas_explicit_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
int8_t core_util_atomic_incr_s8(volatile int8_t *valuePtr, int8_t delta)
Atomic increment.
int8_t core_util_atomic_exchange_explicit_s8(volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
Atomic exchange.
uint8_t core_util_atomic_fetch_add_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic add.
uint64_t core_util_atomic_load_u64(const volatile uint64_t *valuePtr)
Atomic load.
int64_t core_util_atomic_load_s64(const volatile int64_t *valuePtr)
Atomic load.
uint64_t core_util_atomic_fetch_and_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise and.
uint32_t core_util_atomic_fetch_add_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic add.
bool core_util_atomic_compare_exchange_weak_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
Atomic compare and set.
uint8_t core_util_atomic_exchange_explicit_u8(volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
Atomic exchange.
uint16_t core_util_atomic_fetch_xor_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise exclusive or.
uint8_t core_util_atomic_fetch_xor_u8(volatile uint8_t *valuePtr, uint8_t arg)
Atomic bitwise exclusive or.
uint8_t core_util_atomic_load_u8(const volatile uint8_t *valuePtr)
Atomic load.
uint32_t core_util_atomic_fetch_add_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic add.
uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
Atomic decrement.
uint64_t core_util_atomic_fetch_and_explicit_u64(volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
Atomic bitwise and.
bool core_util_atomic_compare_exchange_weak_explicit_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
Atomic compare and set.
int8_t core_util_atomic_fetch_add_explicit_s8(volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
Atomic add.
int16_t core_util_atomic_fetch_add_explicit_s16(volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
Atomic add.
int8_t core_util_atomic_load_s8(const volatile int8_t *valuePtr)
Atomic load.
uint64_t core_util_atomic_load_explicit_u64(const volatile uint64_t *valuePtr, mbed_memory_order order)
Atomic load.
bool core_util_atomic_compare_exchange_weak_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
Atomic compare and set.
uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
Atomic increment.
bool core_util_atomic_cas_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
Atomic compare and set.
void core_util_atomic_store_explicit_s32(volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
Atomic store.
void core_util_atomic_store_explicit_u16(volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
Atomic store.
uint64_t core_util_atomic_fetch_xor_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic bitwise exclusive or.
bool core_util_atomic_exchange_bool(volatile bool *valuePtr, bool desiredValue)
Atomic exchange.
bool core_util_atomic_cas_explicit_s64(volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
bool core_util_atomic_cas_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
uint32_t core_util_atomic_fetch_xor_explicit_u32(volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
Atomic bitwise exclusive or.
void * core_util_atomic_fetch_add_explicit_ptr(void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
Atomic add.
int32_t core_util_atomic_fetch_add_s32(volatile int32_t *valuePtr, int32_t arg)
Atomic add.
uint32_t core_util_atomic_fetch_and_u32(volatile uint32_t *valuePtr, uint32_t arg)
Atomic bitwise and.
int32_t core_util_atomic_incr_s32(volatile int32_t *valuePtr, int32_t delta)
Atomic increment.
uint16_t core_util_atomic_fetch_and_u16(volatile uint16_t *valuePtr, uint16_t arg)
Atomic bitwise and.
bool core_util_atomic_compare_exchange_weak_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
Atomic compare and set.
int16_t core_util_atomic_incr_s16(volatile int16_t *valuePtr, int16_t delta)
Atomic increment.
int8_t core_util_atomic_fetch_sub_s8(volatile int8_t *valuePtr, int8_t arg)
Atomic subtract.
uint8_t core_util_atomic_fetch_sub_explicit_u8(volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
Atomic subtract.
void core_util_atomic_store_ptr(void *volatile *valuePtr, void *desiredValue)
Atomic store.
int16_t core_util_atomic_exchange_explicit_s16(volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
Atomic exchange.
bool core_util_atomic_cas_s8(volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
Atomic compare and set.
int32_t core_util_atomic_fetch_add_explicit_s32(volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
Atomic add.
void core_util_atomic_flag_clear(volatile core_util_atomic_flag *flagPtr)
Atomic clear.
uint32_t core_util_atomic_load_explicit_u32(const volatile uint32_t *valuePtr, mbed_memory_order order)
Atomic load.
#define MBED_UNUSED
MBED_UNUSED Declare a function argument to be unused, suppressing compiler warnings.
bool core_util_atomic_compare_exchange_weak_bool(volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
Atomic compare and set.
void core_util_atomic_store_s32(volatile int32_t *valuePtr, int32_t desiredValue)
Atomic store.
uint64_t core_util_atomic_fetch_sub_u64(volatile uint64_t *valuePtr, uint64_t arg)
Atomic subtract.
bool core_util_atomic_load_bool(const volatile bool *valuePtr)
Atomic load.
bool core_util_atomic_compare_exchange_weak_explicit_u64(volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
Atomic compare and set.
int64_t core_util_atomic_load_explicit_s64(const volatile int64_t *valuePtr, mbed_memory_order order)
Atomic load.
Important Information for this Arm website

This site uses cookies to store information on your computer. By continuing to use our site, you consent to our cookies. If you are not happy with the use of these cookies, please review our Cookie Policy to learn how they can be disabled. By disabling cookies, some features of the site will not work.