mirror of
https://github.com/RIOT-OS/RIOT.git
synced 2024-12-29 04:50:03 +01:00
Merge pull request #15465 from maribu/atomic-utils-volatile
sys/atomic_utils: Use volatile qualifier
This commit is contained in:
commit
575189510d
@ -30,37 +30,37 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U16
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var)
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return __atomic_load_2(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U32
|
||||
static inline uint32_t atomic_load_u32(const uint32_t *var)
|
||||
static inline uint32_t atomic_load_u32(const volatile uint32_t *var)
|
||||
{
|
||||
return __atomic_load_4(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U16
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val)
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
__atomic_store_2(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U32
|
||||
static inline void atomic_store_u32(uint32_t *dest, uint32_t val)
|
||||
static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
__atomic_store_4(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
@ -30,13 +30,13 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
@ -31,37 +31,37 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U16
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var)
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return __atomic_load_2(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U32
|
||||
static inline uint32_t atomic_load_u32(const uint32_t *var)
|
||||
static inline uint32_t atomic_load_u32(const volatile uint32_t *var)
|
||||
{
|
||||
return __atomic_load_4(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U16
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val)
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
__atomic_store_2(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U32
|
||||
static inline void atomic_store_u32(uint32_t *dest, uint32_t val)
|
||||
static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
__atomic_store_4(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
@ -86,7 +86,7 @@ static inline void __attribute__((always_inline)) _bit_barrier_post(void)
|
||||
__asm__ volatile ("" : : : "memory");
|
||||
}
|
||||
|
||||
static inline bool _is_addr_valid_for_bitbanding(void *_addr)
|
||||
static inline bool _is_addr_valid_for_bitbanding(volatile void *_addr)
|
||||
{
|
||||
/* SRAM bit-band region goes from 0x20000000 to 0x200fffff,
|
||||
* peripheral bit-band region goes from 0x40000000 to 0x400fffff */
|
||||
@ -102,25 +102,25 @@ static inline bool _is_addr_valid_for_bitbanding(void *_addr)
|
||||
return true;
|
||||
}
|
||||
|
||||
static inline atomic_bit_u8_t atomic_bit_u8(uint8_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
|
||||
{
|
||||
assert(_is_addr_valid_for_bitbanding(dest));
|
||||
return bitband_addr(dest, bit);
|
||||
}
|
||||
|
||||
static inline atomic_bit_u16_t atomic_bit_u16(uint16_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
|
||||
{
|
||||
assert(_is_addr_valid_for_bitbanding(dest));
|
||||
return bitband_addr(dest, bit);
|
||||
}
|
||||
|
||||
static inline atomic_bit_u32_t atomic_bit_u32(uint32_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
|
||||
{
|
||||
assert(_is_addr_valid_for_bitbanding(dest));
|
||||
return bitband_addr(dest, bit);
|
||||
}
|
||||
|
||||
static inline atomic_bit_u64_t atomic_bit_u64(uint64_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
|
||||
{
|
||||
assert(_is_addr_valid_for_bitbanding(dest));
|
||||
return bitband_addr(dest, bit);
|
||||
|
@ -30,37 +30,37 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U16
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var)
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return __atomic_load_2(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U32
|
||||
static inline uint32_t atomic_load_u32(const uint32_t *var)
|
||||
static inline uint32_t atomic_load_u32(const volatile uint32_t *var)
|
||||
{
|
||||
return __atomic_load_4(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U16
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val)
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
__atomic_store_2(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U32
|
||||
static inline void atomic_store_u32(uint32_t *dest, uint32_t val)
|
||||
static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
__atomic_store_4(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
@ -30,37 +30,37 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U16
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var)
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return __atomic_load_2(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U32
|
||||
static inline uint32_t atomic_load_u32(const uint32_t *var)
|
||||
static inline uint32_t atomic_load_u32(const volatile uint32_t *var)
|
||||
{
|
||||
return __atomic_load_4(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U16
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val)
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
__atomic_store_2(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U32
|
||||
static inline void atomic_store_u32(uint32_t *dest, uint32_t val)
|
||||
static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
__atomic_store_4(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
@ -30,37 +30,37 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U16
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var)
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return __atomic_load_2(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U32
|
||||
static inline uint32_t atomic_load_u32(const uint32_t *var)
|
||||
static inline uint32_t atomic_load_u32(const volatile uint32_t *var)
|
||||
{
|
||||
return __atomic_load_4(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U16
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val)
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
__atomic_store_2(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U32
|
||||
static inline void atomic_store_u32(uint32_t *dest, uint32_t val)
|
||||
static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
__atomic_store_4(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
@ -30,25 +30,25 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U16
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var)
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return __atomic_load_2(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U16
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val)
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
__atomic_store_2(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
@ -30,37 +30,37 @@ extern "C" {
|
||||
#ifndef __clang__
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U8
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var)
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return __atomic_load_1(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U16
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var)
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return __atomic_load_2(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_LOAD_U32
|
||||
static inline uint32_t atomic_load_u32(const uint32_t *var)
|
||||
static inline uint32_t atomic_load_u32(const volatile uint32_t *var)
|
||||
{
|
||||
return __atomic_load_4(var, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U8
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val)
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
__atomic_store_1(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U16
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val)
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
__atomic_store_2(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
||||
#define HAS_ATOMIC_STORE_U32
|
||||
static inline void atomic_store_u32(uint32_t *dest, uint32_t val)
|
||||
static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
__atomic_store_4(dest, val, __ATOMIC_SEQ_CST);
|
||||
}
|
||||
|
@ -168,8 +168,8 @@ extern "C" {
|
||||
* @warning This is an implementation specific type!
|
||||
*/
|
||||
typedef struct {
|
||||
uint8_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint8_t mask; /**< Bitmask used for setting the bit */
|
||||
volatile uint8_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint8_t mask; /**< Bitmask used for setting the bit */
|
||||
} atomic_bit_u8_t;
|
||||
|
||||
/**
|
||||
@ -178,8 +178,8 @@ typedef struct {
|
||||
* @warning This is an implementation specific type!
|
||||
*/
|
||||
typedef struct {
|
||||
uint16_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint16_t mask; /**< Bitmask used for setting the bit */
|
||||
volatile uint16_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint16_t mask; /**< Bitmask used for setting the bit */
|
||||
} atomic_bit_u16_t;
|
||||
|
||||
/**
|
||||
@ -188,8 +188,8 @@ typedef struct {
|
||||
* @warning This is an implementation specific type!
|
||||
*/
|
||||
typedef struct {
|
||||
uint32_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint32_t mask; /**< Bitmask used for setting the bit */
|
||||
volatile uint32_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint32_t mask; /**< Bitmask used for setting the bit */
|
||||
} atomic_bit_u32_t;
|
||||
|
||||
/**
|
||||
@ -198,8 +198,8 @@ typedef struct {
|
||||
* @warning This is an implementation specific type!
|
||||
*/
|
||||
typedef struct {
|
||||
uint64_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint64_t mask; /**< Bitmask used for setting the bit */
|
||||
volatile uint64_t *dest; /**< Memory containing the bit to set/clear */
|
||||
uint64_t mask; /**< Bitmask used for setting the bit */
|
||||
} atomic_bit_u64_t;
|
||||
/** @} */
|
||||
#endif /* HAS_ATOMIC_BIT */
|
||||
@ -214,28 +214,28 @@ typedef struct {
|
||||
* @param[in] var Variable to load atomically
|
||||
* @return The value stored in @p var
|
||||
*/
|
||||
static inline uint8_t atomic_load_u8(const uint8_t *var);
|
||||
static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
|
||||
/**
|
||||
* @brief Load an `uint16_t` atomically
|
||||
*
|
||||
* @param[in] var Variable to load atomically
|
||||
* @return The value stored in @p var
|
||||
*/
|
||||
static inline uint16_t atomic_load_u16(const uint16_t *var);
|
||||
static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
|
||||
/**
|
||||
* @brief Load an `uint32_t` atomically
|
||||
*
|
||||
* @param[in] var Variable to load atomically
|
||||
* @return The value stored in @p var
|
||||
*/
|
||||
static inline uint32_t atomic_load_u32(const uint32_t *var);
|
||||
static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
|
||||
/**
|
||||
* @brief Load an `uint64_t` atomically
|
||||
*
|
||||
* @param[in] var Variable to load atomically
|
||||
* @return The value stored in @p var
|
||||
*/
|
||||
static inline uint64_t atomic_load_u64(const uint64_t *var);
|
||||
static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -247,25 +247,25 @@ static inline uint64_t atomic_load_u64(const uint64_t *var);
|
||||
* @param[out] dest Location to atomically write the new value to
|
||||
* @param[in] val Value to write
|
||||
*/
|
||||
static inline void atomic_store_u8(uint8_t *dest, uint8_t val);
|
||||
static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
|
||||
/**
|
||||
* @brief Store an `uint16_t` atomically
|
||||
* @param[out] dest Location to atomically write the new value to
|
||||
* @param[in] val Value to write
|
||||
*/
|
||||
static inline void atomic_store_u16(uint16_t *dest, uint16_t val);
|
||||
static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
|
||||
/**
|
||||
* @brief Store an `uint32_t` atomically
|
||||
* @param[out] dest Location to atomically write the new value to
|
||||
* @param[in] val Value to write
|
||||
*/
|
||||
static inline void atomic_store_u32(uint32_t *dest, uint32_t val);
|
||||
static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
|
||||
/**
|
||||
* @brief Store an `uint64_t` atomically
|
||||
* @param[out] dest Location to atomically write the new value to
|
||||
* @param[in] val Value to write
|
||||
*/
|
||||
static inline void atomic_store_u64(uint64_t *dest, uint64_t val);
|
||||
static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -277,25 +277,28 @@ static inline void atomic_store_u64(uint64_t *dest, uint64_t val);
|
||||
* @param[in,out] dest Add @p summand onto this value atomically in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_add_u8(uint8_t *dest, uint8_t summand);
|
||||
static inline void atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand);
|
||||
/**
|
||||
* @brief Atomically add a value onto a given value
|
||||
* @param[in,out] dest Add @p summand onto this value atomically in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_add_u16(uint16_t *dest, uint16_t summand);
|
||||
static inline void atomic_fetch_add_u16(volatile uint16_t *dest,
|
||||
uint16_t summand);
|
||||
/**
|
||||
* @brief Atomically add a value onto a given value
|
||||
* @param[in,out] dest Add @p summand onto this value atomically in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_add_u32(uint32_t *dest, uint32_t summand);
|
||||
static inline void atomic_fetch_add_u32(volatile uint32_t *dest,
|
||||
uint32_t summand);
|
||||
/**
|
||||
* @brief Atomically add a value onto a given value
|
||||
* @param[in,out] dest Add @p summand onto this value atomically in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_add_u64(uint64_t *dest, uint64_t summand);
|
||||
static inline void atomic_fetch_add_u64(volatile uint64_t *dest,
|
||||
uint64_t summand);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -308,28 +311,32 @@ static inline void atomic_fetch_add_u64(uint64_t *dest, uint64_t summand);
|
||||
* atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_sub_u8(uint8_t *dest, uint8_t subtrahend);
|
||||
static inline void atomic_fetch_sub_u8(volatile uint8_t *dest,
|
||||
uint8_t subtrahend);
|
||||
/**
|
||||
* @brief Atomically subtract a value from a given value
|
||||
* @param[in,out] dest Subtract @p subtrahend from this value
|
||||
* atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_sub_u16(uint16_t *dest, uint16_t subtrahend);
|
||||
static inline void atomic_fetch_sub_u16(volatile uint16_t *dest,
|
||||
uint16_t subtrahend);
|
||||
/**
|
||||
* @brief Atomically subtract a value from a given value
|
||||
* @param[in,out] dest Subtract @p subtrahend from this value
|
||||
* atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_sub_u32(uint32_t *dest, uint32_t subtrahend);
|
||||
static inline void atomic_fetch_sub_u32(volatile uint32_t *dest,
|
||||
uint32_t subtrahend);
|
||||
/**
|
||||
* @brief Atomically subtract a value from a given value
|
||||
* @param[in,out] dest Subtract @p subtrahend from this value
|
||||
* atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void atomic_fetch_sub_u64(uint64_t *dest, uint64_t subtrahend);
|
||||
static inline void atomic_fetch_sub_u64(volatile uint64_t *dest,
|
||||
uint64_t subtrahend);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -342,28 +349,28 @@ static inline void atomic_fetch_sub_u64(uint64_t *dest, uint64_t subtrahend);
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_or_u8(uint8_t *dest, uint8_t val);
|
||||
static inline void atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest |= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_or_u16(uint16_t *dest, uint16_t val);
|
||||
static inline void atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest |= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_or_u32(uint32_t *dest, uint32_t val);
|
||||
static inline void atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest |= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_or_u64(uint64_t *dest, uint64_t val);
|
||||
static inline void atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -376,28 +383,28 @@ static inline void atomic_fetch_or_u64(uint64_t *dest, uint64_t val);
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_xor_u8(uint8_t *dest, uint8_t val);
|
||||
static inline void atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest ^= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_xor_u16(uint16_t *dest, uint16_t val);
|
||||
static inline void atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest ^= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_xor_u32(uint32_t *dest, uint32_t val);
|
||||
static inline void atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest ^= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_xor_u64(uint64_t *dest, uint64_t val);
|
||||
static inline void atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -410,28 +417,28 @@ static inline void atomic_fetch_xor_u64(uint64_t *dest, uint64_t val);
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_and_u8(uint8_t *dest, uint8_t val);
|
||||
static inline void atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest &= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_and_u16(uint16_t *dest, uint16_t val);
|
||||
static inline void atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest &= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_and_u32(uint32_t *dest, uint32_t val);
|
||||
static inline void atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val);
|
||||
/**
|
||||
* @brief Atomic version of `*dest &= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void atomic_fetch_and_u64(uint64_t *dest, uint64_t val);
|
||||
static inline void atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -443,28 +450,32 @@ static inline void atomic_fetch_and_u64(uint64_t *dest, uint64_t val);
|
||||
* @param[in] dest Memory containing the bit
|
||||
* @param[in] bit Bit number (`0` refers to the least significant)
|
||||
*/
|
||||
static inline atomic_bit_u8_t atomic_bit_u8(uint8_t *dest, uint8_t bit);
|
||||
static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
|
||||
uint8_t bit);
|
||||
|
||||
/**
|
||||
* @brief Create a reference to a bit in an `uint16_t`
|
||||
* @param[in] dest Memory containing the bit
|
||||
* @param[in] bit Bit number (`0` refers to the least significant)
|
||||
*/
|
||||
static inline atomic_bit_u16_t atomic_bit_u16(uint16_t *dest, uint8_t bit);
|
||||
static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
|
||||
uint8_t bit);
|
||||
|
||||
/**
|
||||
* @brief Create a reference to a bit in an `uint32_t`
|
||||
* @param[in] dest Memory containing the bit
|
||||
* @param[in] bit Bit number (`0` refers to the least significant)
|
||||
*/
|
||||
static inline atomic_bit_u32_t atomic_bit_u32(uint32_t *dest, uint8_t bit);
|
||||
static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
|
||||
uint8_t bit);
|
||||
|
||||
/**
|
||||
* @brief Create a reference to a bit in an `uint64_t`
|
||||
* @param[in] dest Memory containing the bit
|
||||
* @param[in] bit Bit number (`0` refers to the least significant)
|
||||
*/
|
||||
static inline atomic_bit_u64_t atomic_bit_u64(uint64_t *dest, uint8_t bit);
|
||||
static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
|
||||
uint8_t bit);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -529,28 +540,32 @@ static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
|
||||
* in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_add_u8(uint8_t *dest, uint8_t summand);
|
||||
static inline void semi_atomic_fetch_add_u8(volatile uint8_t *dest,
|
||||
uint8_t summand);
|
||||
/**
|
||||
* @brief Semi-atomically add a value onto a given value
|
||||
* @param[in,out] dest Add @p summand onto this value semi-atomically
|
||||
* in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_add_u16(uint16_t *dest, uint16_t summand);
|
||||
static inline void semi_atomic_fetch_add_u16(volatile uint16_t *dest,
|
||||
uint16_t summand);
|
||||
/**
|
||||
* @brief Semi-atomically add a value onto a given value
|
||||
* @param[in,out] dest Add @p summand onto this value semi-atomically
|
||||
* in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_add_u32(uint32_t *dest, uint32_t summand);
|
||||
static inline void semi_atomic_fetch_add_u32(volatile uint32_t *dest,
|
||||
uint32_t summand);
|
||||
/**
|
||||
* @brief Semi-atomically add a value onto a given value
|
||||
* @param[in,out] dest Add @p summand onto this value semi-atomically
|
||||
* in-place
|
||||
* @param[in] summand Value to add onto @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t summand);
|
||||
static inline void semi_atomic_fetch_add_u64(volatile uint64_t *dest,
|
||||
uint64_t summand);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -563,14 +578,15 @@ static inline void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t summand);
|
||||
* semi-atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_sub_u8(uint8_t *dest, uint8_t subtrahend);
|
||||
static inline void semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
|
||||
uint8_t subtrahend);
|
||||
/**
|
||||
* @brief Semi-atomically subtract a value from a given value
|
||||
* @param[in,out] dest Subtract @p subtrahend from this value
|
||||
* semi-atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_sub_u16(uint16_t *dest,
|
||||
static inline void semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
|
||||
uint16_t subtrahend);
|
||||
/**
|
||||
* @brief Semi-atomically subtract a value from a given value
|
||||
@ -578,7 +594,7 @@ static inline void semi_atomic_fetch_sub_u16(uint16_t *dest,
|
||||
* semi-atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_sub_u32(uint32_t *dest,
|
||||
static inline void semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
|
||||
uint32_t subtrahend);
|
||||
/**
|
||||
* @brief Semi-atomically subtract a value from a given value
|
||||
@ -586,7 +602,7 @@ static inline void semi_atomic_fetch_sub_u32(uint32_t *dest,
|
||||
* semi-atomically in-place
|
||||
* @param[in] subtrahend Value to subtract from @p dest
|
||||
*/
|
||||
static inline void semi_atomic_fetch_sub_u64(uint64_t *dest,
|
||||
static inline void semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
|
||||
uint64_t subtrahend);
|
||||
/** @} */
|
||||
|
||||
@ -600,28 +616,31 @@ static inline void semi_atomic_fetch_sub_u64(uint64_t *dest,
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_or_u8(uint8_t *dest, uint8_t val);
|
||||
static inline void semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest |= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_or_u16(uint16_t *dest, uint16_t val);
|
||||
static inline void semi_atomic_fetch_or_u16(volatile uint16_t *dest,
|
||||
uint16_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest |= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_or_u32(uint32_t *dest, uint32_t val);
|
||||
static inline void semi_atomic_fetch_or_u32(volatile uint32_t *dest,
|
||||
uint32_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest |= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest | val`
|
||||
* @param[in] val Value to bitwise or into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val);
|
||||
static inline void semi_atomic_fetch_or_u64(volatile uint64_t *dest,
|
||||
uint64_t val);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -634,28 +653,32 @@ static inline void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val);
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_xor_u8(uint8_t *dest, uint8_t val);
|
||||
static inline void semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
|
||||
uint8_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest ^= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_xor_u16(uint16_t *dest, uint16_t val);
|
||||
static inline void semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
|
||||
uint16_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest ^= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_xor_u32(uint32_t *dest, uint32_t val);
|
||||
static inline void semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
|
||||
uint32_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest ^= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest ^ val`
|
||||
* @param[in] val Value to bitwise xor into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val);
|
||||
static inline void semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
|
||||
uint64_t val);
|
||||
/** @} */
|
||||
|
||||
/**
|
||||
@ -668,28 +691,32 @@ static inline void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val);
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_and_u8(uint8_t *dest, uint8_t val);
|
||||
static inline void semi_atomic_fetch_and_u8(volatile uint8_t *dest,
|
||||
uint8_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest &= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_and_u16(uint16_t *dest, uint16_t val);
|
||||
static inline void semi_atomic_fetch_and_u16(volatile uint16_t *dest,
|
||||
uint16_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest &= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_and_u32(uint32_t *dest, uint32_t val);
|
||||
static inline void semi_atomic_fetch_and_u32(volatile uint32_t *dest,
|
||||
uint32_t val);
|
||||
/**
|
||||
* @brief Semi-atomic version of `*dest &= val`
|
||||
* @param[in,out] dest Replace this value with the result of
|
||||
* `*dest & val`
|
||||
* @param[in] val Value to bitwise and into @p dest in-place
|
||||
*/
|
||||
static inline void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val);
|
||||
static inline void semi_atomic_fetch_and_u64(volatile uint64_t *dest,
|
||||
uint64_t val);
|
||||
/** @} */
|
||||
|
||||
/* Fallback implementations of atomic utility functions: */
|
||||
@ -712,14 +739,10 @@ static inline void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val);
|
||||
* @param type Variable type, e.g. `uint8_t`
|
||||
*/
|
||||
#define ATOMIC_LOAD_IMPL(name, type) \
|
||||
static inline type CONCAT(atomic_load_, name)(const type *var) \
|
||||
static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
|
||||
{ \
|
||||
unsigned state = irq_disable(); \
|
||||
/* var can be register allocated, hence the memory barrier of \
|
||||
* irq_disable() and irq_restore() may not apply here. Using volatile \
|
||||
* ensures that the compiler allocates it in memory and that the \
|
||||
* memory access is not optimized out. */ \
|
||||
type result = *((const volatile type *)var); \
|
||||
type result = *var; \
|
||||
irq_restore(state); \
|
||||
return result; \
|
||||
}
|
||||
@ -744,15 +767,12 @@ ATOMIC_LOAD_IMPL(u64, uint64_t)
|
||||
* @param name Name of the variable type, e.g. "u8"
|
||||
* @param type Variable type, e.g. `uint8_t`
|
||||
*/
|
||||
#define ATOMIC_STORE_IMPL(name, type) \
|
||||
static inline void CONCAT(atomic_store_, name)(type *dest, type val) \
|
||||
#define ATOMIC_STORE_IMPL(name, type) \
|
||||
static inline void CONCAT(atomic_store_, name) \
|
||||
(volatile type *dest, type val) \
|
||||
{ \
|
||||
unsigned state = irq_disable(); \
|
||||
/* dest can be register allocated, hence the memory barrier of \
|
||||
* irq_disable() and irq_restore() may not apply here. Using volatile \
|
||||
* ensures that the compiler allocates it in memory and that the \
|
||||
* memory access is not optimized out. */ \
|
||||
*((volatile type *)dest) = val; \
|
||||
*dest = val; \
|
||||
irq_restore(state); \
|
||||
}
|
||||
|
||||
@ -779,16 +799,11 @@ ATOMIC_STORE_IMPL(u64, uint64_t)
|
||||
* @param type Variable type, e.g. `uint8_t`
|
||||
*/
|
||||
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
|
||||
static inline void CONCAT4(atomic_fetch_, opname, _, name)(type *dest, \
|
||||
type val) \
|
||||
static inline void CONCAT4(atomic_fetch_, opname, _, name) \
|
||||
(volatile type *dest, type val) \
|
||||
{ \
|
||||
unsigned state = irq_disable(); \
|
||||
/* dest can be register allocated, hence the memory barrier of \
|
||||
* irq_disable() and irq_restore() may not apply here. Using volatile \
|
||||
* ensures that the compiler allocates it in memory and that the \
|
||||
* memory access is not optimized out. */ \
|
||||
volatile type *tmp = dest; \
|
||||
*tmp = *tmp op val; \
|
||||
*dest = *dest op val; \
|
||||
irq_restore(state); \
|
||||
}
|
||||
|
||||
@ -858,22 +873,26 @@ ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
|
||||
#endif
|
||||
|
||||
#ifndef HAS_ATOMIC_BIT
|
||||
static inline atomic_bit_u8_t atomic_bit_u8(uint8_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
|
||||
uint8_t bit)
|
||||
{
|
||||
atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
|
||||
return result;
|
||||
}
|
||||
static inline atomic_bit_u16_t atomic_bit_u16(uint16_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
|
||||
uint8_t bit)
|
||||
{
|
||||
atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
|
||||
return result;
|
||||
}
|
||||
static inline atomic_bit_u32_t atomic_bit_u32(uint32_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
|
||||
uint8_t bit)
|
||||
{
|
||||
atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
|
||||
return result;
|
||||
}
|
||||
static inline atomic_bit_u64_t atomic_bit_u64(uint64_t *dest, uint8_t bit)
|
||||
static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
|
||||
uint8_t bit)
|
||||
{
|
||||
atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
|
||||
return result;
|
||||
@ -926,205 +945,275 @@ static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit)
|
||||
|
||||
/* FETCH_ADD */
|
||||
#if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
|
||||
static inline void semi_atomic_fetch_add_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_fetch_add_u8(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_add_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_store_u8(dest, *dest + val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
|
||||
static inline void semi_atomic_fetch_add_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_add_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_fetch_add_u16(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_add_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_add_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_store_u16(dest, *dest + val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
|
||||
static inline void semi_atomic_fetch_add_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_add_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_fetch_add_u32(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_add_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_add_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_store_u32(dest, *dest + val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
|
||||
static inline void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_add_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_fetch_add_u64(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_add_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_add_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_store_u64(dest, *dest + val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
|
||||
|
||||
/* FETCH_SUB */
|
||||
#if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
|
||||
static inline void semi_atomic_fetch_sub_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_fetch_sub_u8(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_sub_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_store_u8(dest, *dest - val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
|
||||
static inline void semi_atomic_fetch_sub_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_fetch_sub_u16(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_sub_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_store_u16(dest, *dest - val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
|
||||
static inline void semi_atomic_fetch_sub_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_fetch_sub_u32(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_sub_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_store_u32(dest, *dest - val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
|
||||
static inline void semi_atomic_fetch_sub_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_fetch_sub_u64(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_sub_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_store_u64(dest, *dest - val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
|
||||
|
||||
/* FETCH_OR */
|
||||
#if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
|
||||
static inline void semi_atomic_fetch_or_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_fetch_or_u8(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_or_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_store_u8(dest, *dest | val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
|
||||
static inline void semi_atomic_fetch_or_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_or_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_fetch_or_u16(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_or_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_or_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_store_u16(dest, *dest | val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
|
||||
static inline void semi_atomic_fetch_or_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_or_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_fetch_or_u32(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_or_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_or_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_store_u32(dest, *dest | val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
|
||||
static inline void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_or_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_fetch_or_u64(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_or_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_or_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_store_u64(dest, *dest | val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
|
||||
|
||||
/* FETCH_XOR */
|
||||
#if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
|
||||
static inline void semi_atomic_fetch_xor_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_fetch_xor_u8(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_xor_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_store_u8(dest, *dest ^ val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
|
||||
static inline void semi_atomic_fetch_xor_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_fetch_xor_u16(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_xor_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_store_u16(dest, *dest ^ val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
|
||||
static inline void semi_atomic_fetch_xor_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_fetch_xor_u32(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_xor_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_store_u32(dest, *dest ^ val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
|
||||
static inline void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_fetch_xor_u64(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_xor_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_store_u64(dest, *dest ^ val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
|
||||
|
||||
/* FETCH_AND */
|
||||
#if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
|
||||
static inline void semi_atomic_fetch_and_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_fetch_and_u8(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_and_u8(uint8_t *dest, uint8_t val) {
|
||||
static inline void semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
atomic_store_u8(dest, *dest & val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
|
||||
static inline void semi_atomic_fetch_and_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_and_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_fetch_and_u16(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_and_u16(uint16_t *dest, uint16_t val) {
|
||||
static inline void semi_atomic_fetch_and_u16(volatile uint16_t *dest,
|
||||
uint16_t val)
|
||||
{
|
||||
atomic_store_u16(dest, *dest & val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
|
||||
static inline void semi_atomic_fetch_and_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_and_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_fetch_and_u32(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_and_u32(uint32_t *dest, uint32_t val) {
|
||||
static inline void semi_atomic_fetch_and_u32(volatile uint32_t *dest,
|
||||
uint32_t val)
|
||||
{
|
||||
atomic_store_u32(dest, *dest & val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
|
||||
|
||||
#if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
|
||||
static inline void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_and_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_fetch_and_u64(dest, val);
|
||||
}
|
||||
#else
|
||||
static inline void semi_atomic_fetch_and_u64(uint64_t *dest, uint64_t val) {
|
||||
static inline void semi_atomic_fetch_and_u64(volatile uint64_t *dest,
|
||||
uint64_t val)
|
||||
{
|
||||
atomic_store_u64(dest, *dest & val);
|
||||
}
|
||||
#endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
|
||||
|
300
sys/include/volatile_utils.h
Normal file
300
sys/include/volatile_utils.h
Normal file
@ -0,0 +1,300 @@
|
||||
/*
|
||||
* Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
|
||||
*
|
||||
* This file is subject to the terms and conditions of the GNU Lesser General
|
||||
* Public License v2.1. See the file LICENSE in the top level directory for more
|
||||
* details.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @defgroup sys_atomic_utils_volatile Helpers for volatile accesses
|
||||
* @ingroup sys_atomic_utils
|
||||
* @{
|
||||
*
|
||||
* @file
|
||||
* @brief Utility functions for non-atomic but volatile access
|
||||
* @author Marian Buschsieweke <marian.buschsieweke@ovgu.de>
|
||||
*
|
||||
* This functions intentionally have the same signature as the
|
||||
* @ref sys_atomic_utils but don't guarantee atomic access and sequential
|
||||
* consistency (or any other consistency). They basically only guarantee that
|
||||
* no compiler optimization is applied to that operation and that the compiler
|
||||
* will not reorder these operations in regard to each other or other `volatile`
|
||||
* accesses.
|
||||
*
|
||||
* @warning In most cases using this over @ref sys_atomic_utils is a bug!
|
||||
*/
|
||||
|
||||
#ifndef VOLATILE_UTILS_H
|
||||
#define VOLATILE_UTILS_H
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/**
|
||||
* @brief Load an 8 bit value completely unoptimized
|
||||
* @param var Address to load the value from
|
||||
* @return The value read unoptimized from address @p var
|
||||
*/
|
||||
static inline uint8_t volatile_load_u8(const volatile uint8_t *var)
|
||||
{
|
||||
return *var;
|
||||
}
|
||||
/**
|
||||
* @brief Load an 16 bit value completely unoptimized
|
||||
* @param var Address to load the value from
|
||||
* @return The value read unoptimized from address @p var
|
||||
*/
|
||||
static inline uint16_t volatile_load_u16(const volatile uint16_t *var)
|
||||
{
|
||||
return *var;
|
||||
}
|
||||
/**
|
||||
* @brief Load an 32 bit value completely unoptimized
|
||||
* @param var Address to load the value from
|
||||
* @return The value read unoptimized from address @p var
|
||||
*/
|
||||
static inline uint32_t volatile_load_u32(const volatile uint32_t *var)
|
||||
{
|
||||
return *var;
|
||||
}
|
||||
/**
|
||||
* @brief Load an 64 bit value completely unoptimized
|
||||
* @param var Address to load the value from
|
||||
* @return The value read unoptimized from address @p var
|
||||
*/
|
||||
static inline uint64_t volatile_load_u64(const volatile uint64_t *var)
|
||||
{
|
||||
return *var;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Store an 8 bit value completely unoptimized
|
||||
* @param dest Address to write the given value unoptimized to
|
||||
* @param val Value to write unoptimized
|
||||
*/
|
||||
static inline void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*dest = val;
|
||||
}
|
||||
/**
|
||||
* @brief Store a 16 bit value completely unoptimized
|
||||
* @param dest Address to write the given value unoptimized to
|
||||
* @param val Value to write unoptimized
|
||||
*/
|
||||
static inline void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*dest = val;
|
||||
}
|
||||
/**
|
||||
* @brief Store a 32 bit value completely unoptimized
|
||||
* @param dest Address to write the given value unoptimized to
|
||||
* @param val Value to write unoptimized
|
||||
*/
|
||||
static inline void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*dest = val;
|
||||
}
|
||||
/**
|
||||
* @brief Store a 64 bit value completely unoptimized
|
||||
* @param dest Address to write the given value unoptimized to
|
||||
* @param val Value to write unoptimized
|
||||
*/
|
||||
static inline void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*dest = val;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest += val`
|
||||
* @param dest Address of the value to add to
|
||||
* @param val Value to add
|
||||
*/
|
||||
static inline void volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*dest += val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest -= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*dest -= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest |= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*dest |= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest ^= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*dest ^= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest &= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*dest &= val;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest += val`
|
||||
* @param dest Address of the value to add to
|
||||
* @param val Value to add
|
||||
*/
|
||||
static inline void volatile_fetch_add_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*dest += val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest -= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_sub_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*dest -= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest |= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*dest |= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest ^= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*dest ^= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest &= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*dest &= val;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest += val`
|
||||
* @param dest Address of the value to add to
|
||||
* @param val Value to add
|
||||
*/
|
||||
static inline void volatile_fetch_add_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*dest += val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest -= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_sub_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*dest -= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest |= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*dest |= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest ^= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*dest ^= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest &= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*dest &= val;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest += val`
|
||||
* @param dest Address of the value to add to
|
||||
* @param val Value to add
|
||||
*/
|
||||
static inline void volatile_fetch_add_u64(volatile uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*dest += val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest -= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_sub_u64(volatile uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*dest -= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest |= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*dest |= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest ^= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*dest ^= val;
|
||||
}
|
||||
/**
|
||||
* @brief Unoptimized version of `*dest &= val`
|
||||
* @param dest Address of the value to apply the operation on
|
||||
* @param val Second operand
|
||||
*/
|
||||
static inline void volatile_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*dest &= val;
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* VOLATILE_UTILS_H */
|
||||
/** @} */
|
@ -47,10 +47,10 @@ typedef enum {
|
||||
TEST_WIDTH_NUMOF
|
||||
} test_width_t;
|
||||
|
||||
typedef void (*fetch_op_u8_t)(uint8_t *dest, uint8_t val);
|
||||
typedef void (*fetch_op_u16_t)(uint16_t *dest, uint16_t val);
|
||||
typedef void (*fetch_op_u32_t)(uint32_t *dest, uint32_t val);
|
||||
typedef void (*fetch_op_u64_t)(uint64_t *dest, uint64_t val);
|
||||
typedef void (*fetch_op_u8_t)(volatile uint8_t *dest, uint8_t val);
|
||||
typedef void (*fetch_op_u16_t)(volatile uint16_t *dest, uint16_t val);
|
||||
typedef void (*fetch_op_u32_t)(volatile uint32_t *dest, uint32_t val);
|
||||
typedef void (*fetch_op_u64_t)(volatile uint64_t *dest, uint64_t val);
|
||||
|
||||
typedef struct {
|
||||
const char *name;
|
||||
|
@ -1,192 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
|
||||
*
|
||||
* This file is subject to the terms and conditions of the GNU Lesser General
|
||||
* Public License v2.1. See the file LICENSE in the top level directory for more
|
||||
* details.
|
||||
*/
|
||||
|
||||
/**
|
||||
* @ingroup tests
|
||||
* @{
|
||||
*
|
||||
* @file
|
||||
* @brief For comparison: "Atomic" accesses using volatile
|
||||
* @author Marian Buschsieweke <marian.buschsieweke@ovgu.de>
|
||||
*
|
||||
* This header implements the `volatile_*()` family of functions of
|
||||
* @ref sys_volatile_utils with `volatile_` instead of `volatile_` as prefix.
|
||||
* These implementation rely on the `volatile` type qualifier for implementing
|
||||
* "atomic" accesses; which in many cases will not result in atomic operations.
|
||||
* So this is a known to be ***BROKEN*** implementation. Its sole purpose is
|
||||
* to verify that the tests does detect broken implementations. Do not use
|
||||
* these functions for anything else but testing ;-)
|
||||
*/
|
||||
|
||||
#ifndef VOLATILE_UTILS_H
|
||||
#define VOLATILE_UTILS_H
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
static inline uint8_t volatile_load_u8(const uint8_t *var)
|
||||
{
|
||||
return *((const volatile uint8_t *)var);
|
||||
}
|
||||
static inline uint16_t volatile_load_u16(const uint16_t *var)
|
||||
{
|
||||
return *((const volatile uint16_t *)var);
|
||||
}
|
||||
static inline uint32_t volatile_load_u32(const uint32_t *var)
|
||||
{
|
||||
return *((const volatile uint32_t *)var);
|
||||
}
|
||||
static inline uint64_t volatile_load_u64(const uint64_t *var)
|
||||
{
|
||||
return *((const volatile uint64_t *)var);
|
||||
}
|
||||
|
||||
static inline void volatile_store_u8(uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*((volatile uint8_t *)dest) = val;
|
||||
}
|
||||
static inline void volatile_store_u16(uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*((volatile uint16_t *)dest) = val;
|
||||
}
|
||||
static inline void volatile_store_u32(uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*((volatile uint32_t *)dest) = val;
|
||||
}
|
||||
static inline void volatile_store_u64(uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*((volatile uint64_t *)dest) = val;
|
||||
}
|
||||
|
||||
static inline void volatile_fetch_add_u8(uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*((volatile uint8_t *)dest) += val;
|
||||
}
|
||||
static inline void volatile_fetch_sub_u8(uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*((volatile uint8_t *)dest) -= val;
|
||||
}
|
||||
static inline void volatile_fetch_or_u8(uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*((volatile uint8_t *)dest) |= val;
|
||||
}
|
||||
static inline void volatile_fetch_xor_u8(uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*((volatile uint8_t *)dest) ^= val;
|
||||
}
|
||||
static inline void volatile_fetch_and_u8(uint8_t *dest, uint8_t val)
|
||||
{
|
||||
*((volatile uint8_t *)dest) &= val;
|
||||
}
|
||||
|
||||
static inline void volatile_fetch_add_u16(uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*((volatile uint16_t *)dest) += val;
|
||||
}
|
||||
static inline void volatile_fetch_sub_u16(uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*((volatile uint16_t *)dest) -= val;
|
||||
}
|
||||
static inline void volatile_fetch_or_u16(uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*((volatile uint16_t *)dest) |= val;
|
||||
}
|
||||
static inline void volatile_fetch_xor_u16(uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*((volatile uint16_t *)dest) ^= val;
|
||||
}
|
||||
static inline void volatile_fetch_and_u16(uint16_t *dest, uint16_t val)
|
||||
{
|
||||
*((volatile uint16_t *)dest) &= val;
|
||||
}
|
||||
|
||||
static inline void volatile_fetch_add_u32(uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*((volatile uint32_t *)dest) += val;
|
||||
}
|
||||
static inline void volatile_fetch_sub_u32(uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*((volatile uint32_t *)dest) -= val;
|
||||
}
|
||||
static inline void volatile_fetch_or_u32(uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*((volatile uint32_t *)dest) |= val;
|
||||
}
|
||||
static inline void volatile_fetch_xor_u32(uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*((volatile uint32_t *)dest) ^= val;
|
||||
}
|
||||
static inline void volatile_fetch_and_u32(uint32_t *dest, uint32_t val)
|
||||
{
|
||||
*((volatile uint32_t *)dest) &= val;
|
||||
}
|
||||
|
||||
static inline void volatile_fetch_add_u64(uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*((volatile uint64_t *)dest) += val;
|
||||
}
|
||||
static inline void volatile_fetch_sub_u64(uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*((volatile uint64_t *)dest) -= val;
|
||||
}
|
||||
static inline void volatile_fetch_or_u64(uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*((volatile uint64_t *)dest) |= val;
|
||||
}
|
||||
static inline void volatile_fetch_xor_u64(uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*((volatile uint64_t *)dest) ^= val;
|
||||
}
|
||||
static inline void volatile_fetch_and_u64(uint64_t *dest, uint64_t val)
|
||||
{
|
||||
*((volatile uint64_t *)dest) &= val;
|
||||
}
|
||||
|
||||
static inline void volatile_set_bit_u8(uint8_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint8_t *)mask) |= 1 << bit;
|
||||
}
|
||||
static inline void volatile_set_bit_u16(uint16_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint16_t *)mask) |= 1 << bit;
|
||||
}
|
||||
static inline void volatile_set_bit_u32(uint32_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint32_t *)mask) |= 1UL << bit;
|
||||
}
|
||||
static inline void volatile_set_bit_u64(uint64_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint64_t *)mask) |= 1ULL << bit;
|
||||
}
|
||||
|
||||
static inline void volatile_clear_bit_u8(uint8_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint8_t *)mask) &= ~(1 << bit);
|
||||
}
|
||||
static inline void volatile_clear_bit_u16(uint16_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint16_t *)mask) &= ~(1 << bit);
|
||||
}
|
||||
static inline void volatile_clear_bit_u32(uint32_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint32_t *)mask) &= ~(1UL << bit);
|
||||
}
|
||||
static inline void volatile_clear_bit_u64(uint64_t *mask, uint8_t bit)
|
||||
{
|
||||
*((volatile uint64_t *)mask) &= ~(1ULL << bit);
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif /* VOLATILE_UTILS_H */
|
||||
/** @} */
|
@ -23,40 +23,16 @@
|
||||
#include "embUnit.h"
|
||||
|
||||
#include "atomic_utils.h"
|
||||
#include "volatile_utils.h"
|
||||
#include "random.h"
|
||||
|
||||
#define ENABLE_DEBUG 0
|
||||
#include "debug.h"
|
||||
|
||||
typedef void (*fetch_op_u8_t)(uint8_t *dest, uint8_t val);
|
||||
typedef void (*fetch_op_u16_t)(uint16_t *dest, uint16_t val);
|
||||
typedef void (*fetch_op_u32_t)(uint32_t *dest, uint32_t val);
|
||||
typedef void (*fetch_op_u64_t)(uint64_t *dest, uint64_t val);
|
||||
|
||||
static void fetch_add_u8(uint8_t *dest, uint8_t val){ *dest += val; }
|
||||
static void fetch_add_u16(uint16_t *dest, uint16_t val){ *dest += val; }
|
||||
static void fetch_add_u32(uint32_t *dest, uint32_t val){ *dest += val; }
|
||||
static void fetch_add_u64(uint64_t *dest, uint64_t val){ *dest += val; }
|
||||
|
||||
static void fetch_sub_u8(uint8_t *dest, uint8_t val){ *dest -= val; }
|
||||
static void fetch_sub_u16(uint16_t *dest, uint16_t val){ *dest -= val; }
|
||||
static void fetch_sub_u32(uint32_t *dest, uint32_t val){ *dest -= val; }
|
||||
static void fetch_sub_u64(uint64_t *dest, uint64_t val){ *dest -= val; }
|
||||
|
||||
static void fetch_or_u8(uint8_t *dest, uint8_t val){ *dest |= val; }
|
||||
static void fetch_or_u16(uint16_t *dest, uint16_t val){ *dest |= val; }
|
||||
static void fetch_or_u32(uint32_t *dest, uint32_t val){ *dest |= val; }
|
||||
static void fetch_or_u64(uint64_t *dest, uint64_t val){ *dest |= val; }
|
||||
|
||||
static void fetch_xor_u8(uint8_t *dest, uint8_t val){ *dest ^= val; }
|
||||
static void fetch_xor_u16(uint16_t *dest, uint16_t val){ *dest ^= val; }
|
||||
static void fetch_xor_u32(uint32_t *dest, uint32_t val){ *dest ^= val; }
|
||||
static void fetch_xor_u64(uint64_t *dest, uint64_t val){ *dest ^= val; }
|
||||
|
||||
static void fetch_and_u8(uint8_t *dest, uint8_t val){ *dest &= val; }
|
||||
static void fetch_and_u16(uint16_t *dest, uint16_t val){ *dest &= val; }
|
||||
static void fetch_and_u32(uint32_t *dest, uint32_t val){ *dest &= val; }
|
||||
static void fetch_and_u64(uint64_t *dest, uint64_t val){ *dest &= val; }
|
||||
typedef void (*fetch_op_u8_t)(volatile uint8_t *dest, uint8_t val);
|
||||
typedef void (*fetch_op_u16_t)(volatile uint16_t *dest, uint16_t val);
|
||||
typedef void (*fetch_op_u32_t)(volatile uint32_t *dest, uint32_t val);
|
||||
typedef void (*fetch_op_u64_t)(volatile uint64_t *dest, uint64_t val);
|
||||
|
||||
static void test_load_store(void)
|
||||
{
|
||||
@ -91,16 +67,16 @@ static void test_fetch_op_u8(fetch_op_u8_t atomic_op, fetch_op_u8_t op)
|
||||
|
||||
static void test_fetch_ops_u8(void)
|
||||
{
|
||||
test_fetch_op_u8(atomic_fetch_add_u8, fetch_add_u8);
|
||||
test_fetch_op_u8(atomic_fetch_sub_u8, fetch_sub_u8);
|
||||
test_fetch_op_u8(atomic_fetch_or_u8, fetch_or_u8);
|
||||
test_fetch_op_u8(atomic_fetch_xor_u8, fetch_xor_u8);
|
||||
test_fetch_op_u8(atomic_fetch_and_u8, fetch_and_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_add_u8, fetch_add_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_sub_u8, fetch_sub_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_or_u8, fetch_or_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_xor_u8, fetch_xor_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_and_u8, fetch_and_u8);
|
||||
test_fetch_op_u8(atomic_fetch_add_u8, volatile_fetch_add_u8);
|
||||
test_fetch_op_u8(atomic_fetch_sub_u8, volatile_fetch_sub_u8);
|
||||
test_fetch_op_u8(atomic_fetch_or_u8, volatile_fetch_or_u8);
|
||||
test_fetch_op_u8(atomic_fetch_xor_u8, volatile_fetch_xor_u8);
|
||||
test_fetch_op_u8(atomic_fetch_and_u8, volatile_fetch_and_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_add_u8, volatile_fetch_add_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_sub_u8, volatile_fetch_sub_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_or_u8, volatile_fetch_or_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_xor_u8, volatile_fetch_xor_u8);
|
||||
test_fetch_op_u8(semi_atomic_fetch_and_u8, volatile_fetch_and_u8);
|
||||
}
|
||||
|
||||
static void test_fetch_op_u16(fetch_op_u16_t atomic_op, fetch_op_u16_t op)
|
||||
@ -118,16 +94,16 @@ static void test_fetch_op_u16(fetch_op_u16_t atomic_op, fetch_op_u16_t op)
|
||||
|
||||
static void test_fetch_ops_u16(void)
|
||||
{
|
||||
test_fetch_op_u16(atomic_fetch_add_u16, fetch_add_u16);
|
||||
test_fetch_op_u16(atomic_fetch_sub_u16, fetch_sub_u16);
|
||||
test_fetch_op_u16(atomic_fetch_or_u16, fetch_or_u16);
|
||||
test_fetch_op_u16(atomic_fetch_xor_u16, fetch_xor_u16);
|
||||
test_fetch_op_u16(atomic_fetch_and_u16, fetch_and_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_add_u16, fetch_add_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_sub_u16, fetch_sub_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_or_u16, fetch_or_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_xor_u16, fetch_xor_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_and_u16, fetch_and_u16);
|
||||
test_fetch_op_u16(atomic_fetch_add_u16, volatile_fetch_add_u16);
|
||||
test_fetch_op_u16(atomic_fetch_sub_u16, volatile_fetch_sub_u16);
|
||||
test_fetch_op_u16(atomic_fetch_or_u16, volatile_fetch_or_u16);
|
||||
test_fetch_op_u16(atomic_fetch_xor_u16, volatile_fetch_xor_u16);
|
||||
test_fetch_op_u16(atomic_fetch_and_u16, volatile_fetch_and_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_add_u16, volatile_fetch_add_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_sub_u16, volatile_fetch_sub_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_or_u16, volatile_fetch_or_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_xor_u16, volatile_fetch_xor_u16);
|
||||
test_fetch_op_u16(semi_atomic_fetch_and_u16, volatile_fetch_and_u16);
|
||||
}
|
||||
|
||||
static void test_fetch_op_u32(fetch_op_u32_t atomic_op, fetch_op_u32_t op)
|
||||
@ -145,16 +121,16 @@ static void test_fetch_op_u32(fetch_op_u32_t atomic_op, fetch_op_u32_t op)
|
||||
|
||||
static void test_fetch_ops_u32(void)
|
||||
{
|
||||
test_fetch_op_u32(atomic_fetch_add_u32, fetch_add_u32);
|
||||
test_fetch_op_u32(atomic_fetch_sub_u32, fetch_sub_u32);
|
||||
test_fetch_op_u32(atomic_fetch_or_u32, fetch_or_u32);
|
||||
test_fetch_op_u32(atomic_fetch_xor_u32, fetch_xor_u32);
|
||||
test_fetch_op_u32(atomic_fetch_and_u32, fetch_and_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_add_u32, fetch_add_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_sub_u32, fetch_sub_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_or_u32, fetch_or_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_xor_u32, fetch_xor_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_and_u32, fetch_and_u32);
|
||||
test_fetch_op_u32(atomic_fetch_add_u32, volatile_fetch_add_u32);
|
||||
test_fetch_op_u32(atomic_fetch_sub_u32, volatile_fetch_sub_u32);
|
||||
test_fetch_op_u32(atomic_fetch_or_u32, volatile_fetch_or_u32);
|
||||
test_fetch_op_u32(atomic_fetch_xor_u32, volatile_fetch_xor_u32);
|
||||
test_fetch_op_u32(atomic_fetch_and_u32, volatile_fetch_and_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_add_u32, volatile_fetch_add_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_sub_u32, volatile_fetch_sub_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_or_u32, volatile_fetch_or_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_xor_u32, volatile_fetch_xor_u32);
|
||||
test_fetch_op_u32(semi_atomic_fetch_and_u32, volatile_fetch_and_u32);
|
||||
}
|
||||
|
||||
static void test_fetch_op_u64(fetch_op_u64_t atomic_op, fetch_op_u64_t op)
|
||||
@ -173,16 +149,16 @@ static void test_fetch_op_u64(fetch_op_u64_t atomic_op, fetch_op_u64_t op)
|
||||
|
||||
static void test_fetch_ops_u64(void)
|
||||
{
|
||||
test_fetch_op_u64(atomic_fetch_add_u64, fetch_add_u64);
|
||||
test_fetch_op_u64(atomic_fetch_sub_u64, fetch_sub_u64);
|
||||
test_fetch_op_u64(atomic_fetch_or_u64, fetch_or_u64);
|
||||
test_fetch_op_u64(atomic_fetch_xor_u64, fetch_xor_u64);
|
||||
test_fetch_op_u64(atomic_fetch_and_u64, fetch_and_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_add_u64, fetch_add_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_sub_u64, fetch_sub_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_or_u64, fetch_or_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_xor_u64, fetch_xor_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_and_u64, fetch_and_u64);
|
||||
test_fetch_op_u64(atomic_fetch_add_u64, volatile_fetch_add_u64);
|
||||
test_fetch_op_u64(atomic_fetch_sub_u64, volatile_fetch_sub_u64);
|
||||
test_fetch_op_u64(atomic_fetch_or_u64, volatile_fetch_or_u64);
|
||||
test_fetch_op_u64(atomic_fetch_xor_u64, volatile_fetch_xor_u64);
|
||||
test_fetch_op_u64(atomic_fetch_and_u64, volatile_fetch_and_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_add_u64, volatile_fetch_add_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_sub_u64, volatile_fetch_sub_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_or_u64, volatile_fetch_or_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_xor_u64, volatile_fetch_xor_u64);
|
||||
test_fetch_op_u64(semi_atomic_fetch_and_u64, volatile_fetch_and_u64);
|
||||
}
|
||||
|
||||
static void test_atomic_set_bit(void)
|
||||
|
Loading…
Reference in New Issue
Block a user