Have rte_bit_[test|set|clear|assign|flip]() and rte_bit_atomic_*()
handle volatile-marked pointers.

Signed-off-by: Mattias Rönnblom <mattias.ronnb...@ericsson.com>
---
 app/test/test_bitops.c       |  30 ++-
 lib/eal/include/rte_bitops.h | 427 ++++++++++++++++++++++-------------
 2 files changed, 289 insertions(+), 168 deletions(-)

diff --git a/app/test/test_bitops.c b/app/test/test_bitops.c
index b80216a0a1..e6e9f7ec44 100644
--- a/app/test/test_bitops.c
+++ b/app/test/test_bitops.c
@@ -14,13 +14,13 @@
 #include "test.h"
 
 #define GEN_TEST_BIT_ACCESS(test_name, set_fun, clear_fun, assign_fun, \
-                           flip_fun, test_fun, size)                   \
+                           flip_fun, test_fun, size, mod)              \
        static int                                                      \
        test_name(void)                                                 \
        {                                                               \
                uint ## size ## _t reference = (uint ## size ## _t)rte_rand(); \
                unsigned int bit_nr;                                    \
-               uint ## size ## _t word = (uint ## size ## _t)rte_rand(); \
+               mod uint ## size ## _t word = (uint ## size ## _t)rte_rand(); \
                                                                        \
                for (bit_nr = 0; bit_nr < size; bit_nr++) {             \
                        bool reference_bit = (reference >> bit_nr) & 1; \
@@ -41,7 +41,7 @@
                                    "Bit %d had unflipped value", bit_nr); \
                        flip_fun(&word, bit_nr);                        \
                                                                        \
-                       const uint ## size ## _t *const_ptr = &word;    \
+                       const mod uint ## size ## _t *const_ptr = &word; \
                        TEST_ASSERT(test_fun(const_ptr, bit_nr) ==      \
                                    reference_bit,                      \
                                    "Bit %d had unexpected value", bit_nr); \
@@ -59,10 +59,16 @@
        }
 
 GEN_TEST_BIT_ACCESS(test_bit_access32, rte_bit_set, rte_bit_clear,
-                   rte_bit_assign, rte_bit_flip, rte_bit_test, 32)
+                   rte_bit_assign, rte_bit_flip, rte_bit_test, 32,)
 
 GEN_TEST_BIT_ACCESS(test_bit_access64, rte_bit_set, rte_bit_clear,
-                   rte_bit_assign, rte_bit_flip, rte_bit_test, 64)
+                   rte_bit_assign, rte_bit_flip, rte_bit_test, 64,)
+
+GEN_TEST_BIT_ACCESS(test_bit_v_access32, rte_bit_set, rte_bit_clear,
+                   rte_bit_assign, rte_bit_flip, rte_bit_test, 32, volatile)
+
+GEN_TEST_BIT_ACCESS(test_bit_v_access64, rte_bit_set, rte_bit_clear,
+                   rte_bit_assign, rte_bit_flip, rte_bit_test, 64, volatile)
 
 #define bit_atomic_set(addr, nr)                               \
        rte_bit_atomic_set(addr, nr, rte_memory_order_relaxed)
@@ -81,11 +87,19 @@ GEN_TEST_BIT_ACCESS(test_bit_access64, rte_bit_set, 
rte_bit_clear,
 
 GEN_TEST_BIT_ACCESS(test_bit_atomic_access32, bit_atomic_set,
                    bit_atomic_clear, bit_atomic_assign,
-                   bit_atomic_flip, bit_atomic_test, 32)
+                   bit_atomic_flip, bit_atomic_test, 32,)
 
 GEN_TEST_BIT_ACCESS(test_bit_atomic_access64, bit_atomic_set,
                    bit_atomic_clear, bit_atomic_assign,
-                   bit_atomic_flip, bit_atomic_test, 64)
+                   bit_atomic_flip, bit_atomic_test, 64,)
+
+GEN_TEST_BIT_ACCESS(test_bit_atomic_v_access32, bit_atomic_set,
+                   bit_atomic_clear, bit_atomic_assign,
+                   bit_atomic_flip, bit_atomic_test, 32, volatile)
+
+GEN_TEST_BIT_ACCESS(test_bit_atomic_v_access64, bit_atomic_set,
+                   bit_atomic_clear, bit_atomic_assign,
+                   bit_atomic_flip, bit_atomic_test, 64, volatile)
 
 #define PARALLEL_TEST_RUNTIME 0.25
 
@@ -480,6 +494,8 @@ static struct unit_test_suite test_suite = {
                TEST_CASE(test_bit_access64),
                TEST_CASE(test_bit_access32),
                TEST_CASE(test_bit_access64),
+               TEST_CASE(test_bit_v_access32),
+               TEST_CASE(test_bit_v_access64),
                TEST_CASE(test_bit_atomic_access32),
                TEST_CASE(test_bit_atomic_access64),
                TEST_CASE(test_bit_atomic_parallel_assign32),
diff --git a/lib/eal/include/rte_bitops.h b/lib/eal/include/rte_bitops.h
index 4d878099ed..1355949fb6 100644
--- a/lib/eal/include/rte_bitops.h
+++ b/lib/eal/include/rte_bitops.h
@@ -127,12 +127,16 @@ extern "C" {
  * @param nr
  *   The index of the bit.
  */
-#define rte_bit_test(addr, nr)                                 \
-       _Generic((addr),                                        \
-               uint32_t *: __rte_bit_test32,                   \
-               const uint32_t *: __rte_bit_test32,             \
-               uint64_t *: __rte_bit_test64,                   \
-               const uint64_t *: __rte_bit_test64)(addr, nr)
+#define rte_bit_test(addr, nr)                                         \
+       _Generic((addr),                                                \
+                uint32_t *: __rte_bit_test32,                          \
+                const uint32_t *: __rte_bit_test32,                    \
+                volatile uint32_t *: __rte_bit_v_test32,               \
+                const volatile uint32_t *: __rte_bit_v_test32,         \
+                uint64_t *: __rte_bit_test64,                          \
+                const uint64_t *: __rte_bit_test64,                    \
+                volatile uint64_t *: __rte_bit_v_test64,               \
+                const volatile uint64_t *: __rte_bit_v_test64)(addr, nr)
 
 /**
  * @warning
@@ -152,10 +156,12 @@ extern "C" {
  * @param nr
  *   The index of the bit.
  */
-#define rte_bit_set(addr, nr)                          \
-       _Generic((addr),                                \
-                uint32_t *: __rte_bit_set32,           \
-                uint64_t *: __rte_bit_set64)(addr, nr)
+#define rte_bit_set(addr, nr)                                          \
+       _Generic((addr),                                                \
+                uint32_t *: __rte_bit_set32,                           \
+                volatile uint32_t *: __rte_bit_v_set32,                \
+                uint64_t *: __rte_bit_set64,                           \
+                volatile uint64_t *: __rte_bit_v_set64)(addr, nr)
 
 /**
  * @warning
@@ -175,10 +181,12 @@ extern "C" {
  * @param nr
  *   The index of the bit.
  */
-#define rte_bit_clear(addr, nr)                                        \
-       _Generic((addr),                                        \
-                uint32_t *: __rte_bit_clear32,                 \
-                uint64_t *: __rte_bit_clear64)(addr, nr)
+#define rte_bit_clear(addr, nr)                                                
\
+       _Generic((addr),                                                \
+                uint32_t *: __rte_bit_clear32,                         \
+                volatile uint32_t *: __rte_bit_v_clear32,              \
+                uint64_t *: __rte_bit_clear64,                         \
+                volatile uint64_t *: __rte_bit_v_clear64)(addr, nr)
 
 /**
  * @warning
@@ -202,7 +210,9 @@ extern "C" {
 #define rte_bit_assign(addr, nr, value)                                        
\
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_assign32,                        \
-                uint64_t *: __rte_bit_assign64)(addr, nr, value)
+                volatile uint32_t *: __rte_bit_v_assign32,             \
+                uint64_t *: __rte_bit_assign64,                        \
+                volatile uint64_t *: __rte_bit_v_assign64)(addr, nr, value)
 
 /**
  * @warning
@@ -225,7 +235,9 @@ extern "C" {
 #define rte_bit_flip(addr, nr)                                         \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_flip32,                          \
-                uint64_t *: __rte_bit_flip64)(addr, nr)
+                volatile uint32_t *: __rte_bit_v_flip32,               \
+                uint64_t *: __rte_bit_flip64,                          \
+                volatile uint64_t *: __rte_bit_v_flip64)(addr, nr)
 
 /**
  * @warning
@@ -250,9 +262,13 @@ extern "C" {
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_test32,                   \
                 const uint32_t *: __rte_bit_atomic_test32,             \
+                volatile uint32_t *: __rte_bit_atomic_v_test32,        \
+                const volatile uint32_t *: __rte_bit_atomic_v_test32,  \
                 uint64_t *: __rte_bit_atomic_test64,                   \
-                const uint64_t *: __rte_bit_atomic_test64)(addr, nr,   \
-                                                           memory_order)
+                const uint64_t *: __rte_bit_atomic_test64,             \
+                volatile uint64_t *: __rte_bit_atomic_v_test64,        \
+                const volatile uint64_t *: __rte_bit_atomic_v_test64) \
+                                                   (addr, nr, memory_order)
 
 /**
  * @warning
@@ -274,7 +290,10 @@ extern "C" {
 #define rte_bit_atomic_set(addr, nr, memory_order)                     \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_set32,                    \
-                uint64_t *: __rte_bit_atomic_set64)(addr, nr, memory_order)
+                volatile uint32_t *: __rte_bit_atomic_v_set32,         \
+                uint64_t *: __rte_bit_atomic_set64,                    \
+                volatile uint64_t *: __rte_bit_atomic_v_set64)(addr, nr, \
+                                                               memory_order)
 
 /**
  * @warning
@@ -296,7 +315,10 @@ extern "C" {
 #define rte_bit_atomic_clear(addr, nr, memory_order)                   \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_clear32,                  \
-                uint64_t *: __rte_bit_atomic_clear64)(addr, nr, memory_order)
+                volatile uint32_t *: __rte_bit_atomic_v_clear32,       \
+                uint64_t *: __rte_bit_atomic_clear64,                  \
+                volatile uint64_t *: __rte_bit_atomic_v_clear64)(addr, nr, \
+                                                                 memory_order)
 
 /**
  * @warning
@@ -320,8 +342,11 @@ extern "C" {
 #define rte_bit_atomic_assign(addr, nr, value, memory_order)           \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_assign32,                 \
-                uint64_t *: __rte_bit_atomic_assign64)(addr, nr, value, \
-                                                       memory_order)
+                volatile uint32_t *: __rte_bit_atomic_v_assign32,      \
+                uint64_t *: __rte_bit_atomic_assign64,                 \
+                volatile uint64_t *: __rte_bit_atomic_v_assign64)(addr, nr, \
+                                                                  value, \
+                                                                  memory_order)
 
 /**
  * @warning
@@ -344,7 +369,10 @@ extern "C" {
 #define rte_bit_atomic_flip(addr, nr, memory_order)                    \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_flip32,                   \
-                uint64_t *: __rte_bit_atomic_flip64)(addr, nr, memory_order)
+                volatile uint32_t *: __rte_bit_atomic_v_flip32,        \
+                uint64_t *: __rte_bit_atomic_flip64,                   \
+                volatile uint64_t *: __rte_bit_atomic_v_flip64)(addr, nr, \
+                                                                memory_order)
 
 /**
  * @warning
@@ -368,8 +396,10 @@ extern "C" {
 #define rte_bit_atomic_test_and_set(addr, nr, memory_order)            \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_test_and_set32,           \
-                uint64_t *: __rte_bit_atomic_test_and_set64)(addr, nr, \
-                                                             memory_order)
+                volatile uint32_t *: __rte_bit_atomic_v_test_and_set32, \
+                uint64_t *: __rte_bit_atomic_test_and_set64,           \
+                volatile uint64_t *: __rte_bit_atomic_v_test_and_set64) \
+                                                   (addr, nr, memory_order)
 
 /**
  * @warning
@@ -393,8 +423,10 @@ extern "C" {
 #define rte_bit_atomic_test_and_clear(addr, nr, memory_order)          \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_test_and_clear32,         \
-                uint64_t *: __rte_bit_atomic_test_and_clear64)(addr, nr, \
-                                                               memory_order)
+                volatile uint32_t *: __rte_bit_atomic_v_test_and_clear32, \
+                uint64_t *: __rte_bit_atomic_test_and_clear64,         \
+                volatile uint64_t *: __rte_bit_atomic_v_test_and_clear64) \
+                                                      (addr, nr, memory_order)
 
 /**
  * @warning
@@ -421,9 +453,10 @@ extern "C" {
 #define rte_bit_atomic_test_and_assign(addr, nr, value, memory_order)  \
        _Generic((addr),                                                \
                 uint32_t *: __rte_bit_atomic_test_and_assign32,        \
-                uint64_t *: __rte_bit_atomic_test_and_assign64)(addr, nr, \
-                                                                value, \
-                                                                memory_order)
+                volatile uint32_t *: __rte_bit_atomic_v_test_and_assign32, \
+                uint64_t *: __rte_bit_atomic_test_and_assign64,        \
+                volatile uint64_t *: __rte_bit_atomic_v_test_and_assign64) \
+                                               (addr, nr, value, memory_order)
 
 #define __RTE_GEN_BIT_TEST(family, fun, qualifier, size)               \
        __rte_experimental                                              \
@@ -491,93 +524,105 @@ __RTE_GEN_BIT_CLEAR(, clear,, 32)
 __RTE_GEN_BIT_ASSIGN(, assign,, 32)
 __RTE_GEN_BIT_FLIP(, flip,, 32)
 
+__RTE_GEN_BIT_TEST(v_, test, volatile, 32)
+__RTE_GEN_BIT_SET(v_, set, volatile, 32)
+__RTE_GEN_BIT_CLEAR(v_, clear, volatile, 32)
+__RTE_GEN_BIT_ASSIGN(v_, assign, volatile, 32)
+__RTE_GEN_BIT_FLIP(v_, flip, volatile, 32)
+
 __RTE_GEN_BIT_TEST(, test,, 64)
 __RTE_GEN_BIT_SET(, set,, 64)
 __RTE_GEN_BIT_CLEAR(, clear,, 64)
 __RTE_GEN_BIT_ASSIGN(, assign,, 64)
 __RTE_GEN_BIT_FLIP(, flip,, 64)
 
-#define __RTE_GEN_BIT_ATOMIC_TEST(size)                                        
\
+__RTE_GEN_BIT_TEST(v_, test, volatile, 64)
+__RTE_GEN_BIT_SET(v_, set, volatile, 64)
+__RTE_GEN_BIT_CLEAR(v_, clear, volatile, 64)
+__RTE_GEN_BIT_ASSIGN(v_, assign, volatile, 64)
+__RTE_GEN_BIT_FLIP(v_, flip, volatile, 64)
+
+#define __RTE_GEN_BIT_ATOMIC_TEST(v, qualifier, size)                  \
        __rte_experimental                                              \
        static inline bool                                              \
-       __rte_bit_atomic_test ## size(const uint ## size ## _t *addr,   \
-                                     unsigned int nr, int memory_order) \
+       __rte_bit_atomic_ ## v ## test ## size(const qualifier uint ## size ## 
_t *addr, \
+                                              unsigned int nr, int 
memory_order) \
        {                                                               \
                RTE_ASSERT(nr < size);                                  \
                                                                        \
-               const RTE_ATOMIC(uint ## size ## _t) *a_addr =          \
-                       (const RTE_ATOMIC(uint ## size ## _t) *)addr;   \
+               const qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
+                       (const qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; 
\
                uint ## size ## _t mask = (uint ## size ## _t)1 << nr;  \
                return rte_atomic_load_explicit(a_addr, memory_order) & mask; \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_SET(size)                                 \
+#define __RTE_GEN_BIT_ATOMIC_SET(v, qualifier, size)                   \
        __rte_experimental                                              \
        static inline void                                              \
-       __rte_bit_atomic_set ## size(uint ## size ## _t *addr,          \
-                                    unsigned int nr, int memory_order) \
+       __rte_bit_atomic_ ## v ## set ## size(qualifier uint ## size ## _t 
*addr, \
+                                             unsigned int nr, int 
memory_order) \
        {                                                               \
                RTE_ASSERT(nr < size);                                  \
                                                                        \
-               RTE_ATOMIC(uint ## size ## _t) *a_addr =                \
-                       (RTE_ATOMIC(uint ## size ## _t) *)addr;         \
+               qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr =      \
+                       (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
                uint ## size ## _t mask = (uint ## size ## _t)1 << nr;  \
                rte_atomic_fetch_or_explicit(a_addr, mask, memory_order); \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_CLEAR(size)                               \
+#define __RTE_GEN_BIT_ATOMIC_CLEAR(v, qualifier, size)                 \
        __rte_experimental                                              \
        static inline void                                              \
-       __rte_bit_atomic_clear ## size(uint ## size ## _t *addr,        \
-                                      unsigned int nr, int memory_order) \
+       __rte_bit_atomic_ ## v ## clear ## size(qualifier uint ## size ## _t 
*addr,     \
+                                               unsigned int nr, int 
memory_order) \
        {                                                               \
                RTE_ASSERT(nr < size);                                  \
                                                                        \
-               RTE_ATOMIC(uint ## size ## _t) *a_addr =                \
-                       (RTE_ATOMIC(uint ## size ## _t) *)addr;         \
+               qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr =      \
+                       (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
                uint ## size ## _t mask = (uint ## size ## _t)1 << nr;  \
                rte_atomic_fetch_and_explicit(a_addr, ~mask, memory_order); \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_FLIP(size)                                        
\
+#define __RTE_GEN_BIT_ATOMIC_FLIP(v, qualifier, size)                  \
        __rte_experimental                                              \
        static inline void                                              \
-       __rte_bit_atomic_flip ## size(uint ## size ## _t *addr,         \
-                                      unsigned int nr, int memory_order) \
+       __rte_bit_atomic_ ## v ## flip ## size(qualifier uint ## size ## _t 
*addr, \
+                                              unsigned int nr, int 
memory_order) \
        {                                                               \
                RTE_ASSERT(nr < size);                                  \
                                                                        \
-               RTE_ATOMIC(uint ## size ## _t) *a_addr =                \
-                       (RTE_ATOMIC(uint ## size ## _t) *)addr;         \
+               qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr =      \
+                       (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
                uint ## size ## _t mask = (uint ## size ## _t)1 << nr;  \
                rte_atomic_fetch_xor_explicit(a_addr, mask, memory_order); \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_ASSIGN(size)                              \
+#define __RTE_GEN_BIT_ATOMIC_ASSIGN(v, qualifier, size)                        
\
        __rte_experimental                                              \
        static inline void                                              \
-       __rte_bit_atomic_assign ## size(uint ## size ## _t *addr,       \
-                                       unsigned int nr, bool value,    \
-                                       int memory_order)               \
+       __rte_bit_atomic_## v ## assign ## size(qualifier uint ## size ## _t 
*addr, \
+                                               unsigned int nr, bool value, \
+                                               int memory_order)       \
        {                                                               \
                if (value)                                              \
-                       __rte_bit_atomic_set ## size(addr, nr, memory_order); \
+                       __rte_bit_atomic_ ## v ## set ## size(addr, nr, 
memory_order); \
                else                                                    \
-                       __rte_bit_atomic_clear ## size(addr, nr,        \
-                                                      memory_order);   \
+                       __rte_bit_atomic_ ## v ## clear ## size(addr, nr, \
+                                                                    
memory_order); \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(size)                                
\
+#define __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(v, qualifier, size)          \
        __rte_experimental                                              \
        static inline bool                                              \
-       __rte_bit_atomic_test_and_set ## size(uint ## size ## _t *addr, \
-                                             unsigned int nr,          \
-                                             int memory_order)         \
+       __rte_bit_atomic_ ## v ## test_and_set ## size(qualifier uint ## size 
## _t *addr, \
+                                                      unsigned int nr, \
+                                                      int memory_order) \
        {                                                               \
                RTE_ASSERT(nr < size);                                  \
                                                                        \
-               RTE_ATOMIC(uint ## size ## _t) *a_addr =                \
-                       (RTE_ATOMIC(uint ## size ## _t) *)addr;         \
+               qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr =      \
+                       (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
                uint ## size ## _t mask = (uint ## size ## _t)1 << nr;  \
                uint ## size ## _t prev;                                \
                                                                        \
@@ -587,17 +632,17 @@ __RTE_GEN_BIT_FLIP(, flip,, 64)
                return prev & mask;                                     \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(size)                      \
+#define __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(v, qualifier, size)                
\
        __rte_experimental                                              \
        static inline bool                                              \
-       __rte_bit_atomic_test_and_clear ## size(uint ## size ## _t *addr, \
-                                               unsigned int nr,        \
-                                               int memory_order)       \
+       __rte_bit_atomic_ ## v ## test_and_clear ## size(qualifier uint ## size 
## _t *addr, \
+                                                        unsigned int nr, \
+                                                        int memory_order) \
        {                                                               \
                RTE_ASSERT(nr < size);                                  \
                                                                        \
-               RTE_ATOMIC(uint ## size ## _t) *a_addr =                \
-                       (RTE_ATOMIC(uint ## size ## _t) *)addr;         \
+               qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr =      \
+                       (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
                uint ## size ## _t mask = (uint ## size ## _t)1 << nr;  \
                uint ## size ## _t prev;                                \
                                                                        \
@@ -607,34 +652,36 @@ __RTE_GEN_BIT_FLIP(, flip,, 64)
                return prev & mask;                                     \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(size)                     \
+#define __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(v, qualifier, size)       \
        __rte_experimental                                              \
        static inline bool                                              \
-       __rte_bit_atomic_test_and_assign ## size(uint ## size ## _t *addr, \
-                                                unsigned int nr,       \
-                                                bool value,            \
-                                                int memory_order)      \
+       __rte_bit_atomic_ ## v ## test_and_assign ## size(qualifier uint ## 
size ## _t *addr, \
+                                                         unsigned int nr, \
+                                                         bool value,   \
+                                                         int memory_order) \
        {                                                               \
                if (value)                                              \
-                       return __rte_bit_atomic_test_and_set ## size(addr, nr, \
-                                                                    
memory_order); \
+                       return __rte_bit_atomic_ ## v ## test_and_set ## 
size(addr, nr, memory_order); \
                else                                                    \
-                       return __rte_bit_atomic_test_and_clear ## size(addr, 
nr, \
-                                                                      
memory_order); \
+                       return __rte_bit_atomic_ ## v ## test_and_clear ## 
size(addr, nr, memory_order); \
        }
 
-#define __RTE_GEN_BIT_ATOMIC_OPS(size)                 \
-       __RTE_GEN_BIT_ATOMIC_TEST(size)                 \
-       __RTE_GEN_BIT_ATOMIC_SET(size)                  \
-       __RTE_GEN_BIT_ATOMIC_CLEAR(size)                \
-       __RTE_GEN_BIT_ATOMIC_ASSIGN(size)               \
-       __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(size)         \
-       __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(size)       \
-       __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(size)      \
-       __RTE_GEN_BIT_ATOMIC_FLIP(size)
+#define __RTE_GEN_BIT_ATOMIC_OPS(v, qualifier, size)   \
+       __RTE_GEN_BIT_ATOMIC_TEST(v, qualifier, size)   \
+       __RTE_GEN_BIT_ATOMIC_SET(v, qualifier, size)    \
+       __RTE_GEN_BIT_ATOMIC_CLEAR(v, qualifier, size)  \
+       __RTE_GEN_BIT_ATOMIC_ASSIGN(v, qualifier, size) \
+       __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(v, qualifier, size) \
+       __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(v, qualifier, size) \
+       __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(v, qualifier, size) \
+       __RTE_GEN_BIT_ATOMIC_FLIP(v, qualifier, size)
 
-__RTE_GEN_BIT_ATOMIC_OPS(32)
-__RTE_GEN_BIT_ATOMIC_OPS(64)
+#define __RTE_GEN_BIT_ATOMIC_OPS_SIZE(size) \
+       __RTE_GEN_BIT_ATOMIC_OPS(,, size) \
+       __RTE_GEN_BIT_ATOMIC_OPS(v_, volatile, size)
+
+__RTE_GEN_BIT_ATOMIC_OPS_SIZE(32)
+__RTE_GEN_BIT_ATOMIC_OPS_SIZE(64)
 
 /*------------------------ 32-bit relaxed operations ------------------------*/
 
@@ -1340,120 +1387,178 @@ rte_log2_u64(uint64_t v)
 #undef rte_bit_atomic_test_and_clear
 #undef rte_bit_atomic_test_and_assign
 
-#define __RTE_BIT_OVERLOAD_SZ_2(fun, qualifier, size, arg1_type, arg1_name) \
+#define __RTE_BIT_OVERLOAD_V_2(family, v, fun, c, size, arg1_type, arg1_name) \
        static inline void                                              \
-       rte_bit_ ## fun(qualifier uint ## size ## _t *addr,             \
-                       arg1_type arg1_name)                            \
+       rte_bit_ ## family ## fun(c uint ## size ## _t *addr,           \
+                                 arg1_type arg1_name)                  \
        {                                                               \
-               __rte_bit_ ## fun ## size(addr, arg1_name);             \
+               __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name); \
        }
 
-#define __RTE_BIT_OVERLOAD_2(fun, qualifier, arg1_type, arg1_name)     \
-       __RTE_BIT_OVERLOAD_SZ_2(fun, qualifier, 32, arg1_type, arg1_name) \
-       __RTE_BIT_OVERLOAD_SZ_2(fun, qualifier, 64, arg1_type, arg1_name)
+#define __RTE_BIT_OVERLOAD_SZ_2(family, fun, c, size, arg1_type, arg1_name) \
+       __RTE_BIT_OVERLOAD_V_2(family,, fun, c, size, arg1_type,        \
+                              arg1_name)                               \
+       __RTE_BIT_OVERLOAD_V_2(family, v_, fun, c volatile, size, \
+                              arg1_type, arg1_name)
 
-#define __RTE_BIT_OVERLOAD_SZ_2R(fun, qualifier, size, ret_type, arg1_type, \
-                                arg1_name)                             \
+#define __RTE_BIT_OVERLOAD_2(family, fun, c, arg1_type, arg1_name)     \
+       __RTE_BIT_OVERLOAD_SZ_2(family, fun, c, 32, arg1_type, arg1_name) \
+       __RTE_BIT_OVERLOAD_SZ_2(family, fun, c, 64, arg1_type, arg1_name)
+
+#define __RTE_BIT_OVERLOAD_V_2R(family, v, fun, c, size, ret_type, arg1_type, \
+                               arg1_name)                              \
        static inline ret_type                                          \
-       rte_bit_ ## fun(qualifier uint ## size ## _t *addr,             \
+       rte_bit_ ## family ## fun(c uint ## size ## _t *addr,           \
                        arg1_type arg1_name)                            \
        {                                                               \
-               return __rte_bit_ ## fun ## size(addr, arg1_name);      \
+               return __rte_bit_ ## family ## v ## fun ## size(addr,   \
+                                                               arg1_name); \
        }
 
-#define __RTE_BIT_OVERLOAD_2R(fun, qualifier, ret_type, arg1_type, arg1_name) \
-       __RTE_BIT_OVERLOAD_SZ_2R(fun, qualifier, 32, ret_type, arg1_type, \
+#define __RTE_BIT_OVERLOAD_SZ_2R(family, fun, c, size, ret_type, arg1_type, \
+                                arg1_name)                             \
+       __RTE_BIT_OVERLOAD_V_2R(family,, fun, c, size, ret_type, arg1_type, \
+                               arg1_name)                              \
+       __RTE_BIT_OVERLOAD_V_2R(family, v_, fun, c volatile,            \
+                               size, ret_type, arg1_type, arg1_name)
+
+#define __RTE_BIT_OVERLOAD_2R(family, fun, c, ret_type, arg1_type, arg1_name) \
+       __RTE_BIT_OVERLOAD_SZ_2R(family, fun, c, 32, ret_type, arg1_type, \
                                 arg1_name)                             \
-       __RTE_BIT_OVERLOAD_SZ_2R(fun, qualifier, 64, ret_type, arg1_type, \
+       __RTE_BIT_OVERLOAD_SZ_2R(family, fun, c, 64, ret_type, arg1_type, \
                                 arg1_name)
 
-#define __RTE_BIT_OVERLOAD_SZ_3(fun, qualifier, size, arg1_type, arg1_name, \
-                               arg2_type, arg2_name)                   \
+#define __RTE_BIT_OVERLOAD_V_3(family, v, fun, c, size, arg1_type, arg1_name, \
+                              arg2_type, arg2_name)                    \
        static inline void                                              \
-       rte_bit_ ## fun(uint ## size ## _t *addr, arg1_type arg1_name,  \
-                       arg2_type arg2_name)                            \
+       rte_bit_ ## family ## fun(c uint ## size ## _t *addr,           \
+                                 arg1_type arg1_name, arg2_type arg2_name) \
        {                                                               \
-               __rte_bit_ ## fun ## size(addr, arg1_name, arg2_name);  \
+               __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, \
+                                                        arg2_name);    \
        }
 
-#define __RTE_BIT_OVERLOAD_3(fun, qualifier, arg1_type, arg1_name, arg2_type, \
+#define __RTE_BIT_OVERLOAD_SZ_3(family, fun, c, size, arg1_type, arg1_name, \
+                               arg2_type, arg2_name)                   \
+       __RTE_BIT_OVERLOAD_V_3(family,, fun, c, size, arg1_type, arg1_name, \
+                              arg2_type, arg2_name)                    \
+       __RTE_BIT_OVERLOAD_V_3(family, v_, fun, c volatile, size, arg1_type, \
+                              arg1_name, arg2_type, arg2_name)
+
+#define __RTE_BIT_OVERLOAD_3(family, fun, c, arg1_type, arg1_name, arg2_type, \
                             arg2_name)                                 \
-       __RTE_BIT_OVERLOAD_SZ_3(fun, qualifier, 32, arg1_type, arg1_name, \
+       __RTE_BIT_OVERLOAD_SZ_3(family, fun, c, 32, arg1_type, arg1_name, \
                                arg2_type, arg2_name)                   \
-       __RTE_BIT_OVERLOAD_SZ_3(fun, qualifier, 64, arg1_type, arg1_name, \
+       __RTE_BIT_OVERLOAD_SZ_3(family, fun, c, 64, arg1_type, arg1_name, \
                                arg2_type, arg2_name)
 
-#define __RTE_BIT_OVERLOAD_SZ_3R(fun, qualifier, size, ret_type, arg1_type, \
-                                arg1_name, arg2_type, arg2_name)       \
+#define __RTE_BIT_OVERLOAD_V_3R(family, v, fun, c, size, ret_type, arg1_type, \
+                               arg1_name, arg2_type, arg2_name)        \
        static inline ret_type                                          \
-       rte_bit_ ## fun(uint ## size ## _t *addr, arg1_type arg1_name,  \
-                       arg2_type arg2_name)                            \
+       rte_bit_ ## family ## fun(c uint ## size ## _t *addr,           \
+                                 arg1_type arg1_name, arg2_type arg2_name) \
        {                                                               \
-               return __rte_bit_ ## fun ## size(addr, arg1_name, arg2_name); \
+               return __rte_bit_ ## family ## v ## fun ## size(addr,   \
+                                                               arg1_name, \
+                                                               arg2_name); \
        }
 
-#define __RTE_BIT_OVERLOAD_3R(fun, qualifier, ret_type, arg1_type, arg1_name, \
-                             arg2_type, arg2_name)                     \
-       __RTE_BIT_OVERLOAD_SZ_3R(fun, qualifier, 32, ret_type, arg1_type, \
+#define __RTE_BIT_OVERLOAD_SZ_3R(family, fun, c, size, ret_type, arg1_type, \
                                 arg1_name, arg2_type, arg2_name)       \
-       __RTE_BIT_OVERLOAD_SZ_3R(fun, qualifier, 64, ret_type, arg1_type, \
-                                arg1_name, arg2_type, arg2_name)
+       __RTE_BIT_OVERLOAD_V_3R(family,, fun, c, size, ret_type, \
+                               arg1_type, arg1_name, arg2_type, arg2_name) \
+       __RTE_BIT_OVERLOAD_V_3R(family, v_, fun, c volatile, size, \
+                               ret_type, arg1_type, arg1_name, arg2_type, \
+                               arg2_name)
 
-#define __RTE_BIT_OVERLOAD_SZ_4(fun, qualifier, size, arg1_type, arg1_name, \
-                               arg2_type, arg2_name, arg3_type, arg3_name) \
+#define __RTE_BIT_OVERLOAD_3R(family, fun, c, ret_type, arg1_type, arg1_name, \
+                             arg2_type, arg2_name)                     \
+       __RTE_BIT_OVERLOAD_SZ_3R(family, fun, c, 32, ret_type,          \
+                                arg1_type, arg1_name, arg2_type, arg2_name) \
+       __RTE_BIT_OVERLOAD_SZ_3R(family, fun, c, 64, ret_type, \
+                                arg1_type, arg1_name, arg2_type, arg2_name)
+
+#define __RTE_BIT_OVERLOAD_V_4(family, v, fun, c, size, arg1_type, arg1_name, \
+                              arg2_type, arg2_name, arg3_type, arg3_name) \
        static inline void                                              \
-       rte_bit_ ## fun(uint ## size ## _t *addr, arg1_type arg1_name,  \
-                       arg2_type arg2_name, arg3_type arg3_name)       \
+       rte_bit_ ## family ## fun(c uint ## size ## _t *addr,           \
+                                 arg1_type arg1_name, arg2_type arg2_name, \
+                                 arg3_type arg3_name)                  \
        {                                                               \
-               __rte_bit_ ## fun ## size(addr, arg1_name, arg2_name,   \
-                                         arg3_name);                 \
+               __rte_bit_ ## family ## v ## fun ## size(addr, arg1_name, \
+                                                        arg2_name,     \
+                                                        arg3_name);    \
        }
 
-#define __RTE_BIT_OVERLOAD_4(fun, qualifier, arg1_type, arg1_name, arg2_type, \
-                            arg2_name, arg3_type, arg3_name)           \
-       __RTE_BIT_OVERLOAD_SZ_4(fun, qualifier, 32, arg1_type, arg1_name, \
+#define __RTE_BIT_OVERLOAD_SZ_4(family, fun, c, size, arg1_type, arg1_name, \
                                arg2_type, arg2_name, arg3_type, arg3_name) \
-       __RTE_BIT_OVERLOAD_SZ_4(fun, qualifier, 64, arg1_type, arg1_name, \
-                               arg2_type, arg2_name, arg3_type, arg3_name)
-
-#define __RTE_BIT_OVERLOAD_SZ_4R(fun, qualifier, size, ret_type, arg1_type, \
-                                arg1_name, arg2_type, arg2_name, arg3_type, \
-                                arg3_name)                             \
+       __RTE_BIT_OVERLOAD_V_4(family,, fun, c, size, arg1_type,        \
+                              arg1_name, arg2_type, arg2_name, arg3_type, \
+                              arg3_name)                               \
+       __RTE_BIT_OVERLOAD_V_4(family, v_, fun, c volatile, size,       \
+                              arg1_type, arg1_name, arg2_type, arg2_name, \
+                              arg3_type, arg3_name)
+
+#define __RTE_BIT_OVERLOAD_4(family, fun, c, arg1_type, arg1_name, arg2_type, \
+                            arg2_name, arg3_type, arg3_name)           \
+       __RTE_BIT_OVERLOAD_SZ_4(family, fun, c, 32, arg1_type,          \
+                               arg1_name, arg2_type, arg2_name, arg3_type, \
+                               arg3_name)                              \
+       __RTE_BIT_OVERLOAD_SZ_4(family, fun, c, 64, arg1_type,          \
+                               arg1_name, arg2_type, arg2_name, arg3_type, \
+                               arg3_name)
+
+#define __RTE_BIT_OVERLOAD_V_4R(family, v, fun, c, size, ret_type, arg1_type, \
+                               arg1_name, arg2_type, arg2_name, arg3_type, \
+                               arg3_name)                              \
        static inline ret_type                                          \
-       rte_bit_ ## fun(uint ## size ## _t *addr, arg1_type arg1_name,  \
-                       arg2_type arg2_name, arg3_type arg3_name)       \
+       rte_bit_ ## family ## fun(c uint ## size ## _t *addr,           \
+                                 arg1_type arg1_name, arg2_type arg2_name, \
+                                 arg3_type arg3_name)                  \
        {                                                               \
-               return __rte_bit_ ## fun ## size(addr, arg1_name, arg2_name, \
-                                                arg3_name);            \
+               return __rte_bit_ ## family ## v ## fun ## size(addr,   \
+                                                               arg1_name, \
+                                                               arg2_name, \
+                                                               arg3_name); \
        }
 
-#define __RTE_BIT_OVERLOAD_4R(fun, qualifier, ret_type, arg1_type, arg1_name, \
-                             arg2_type, arg2_name, arg3_type, arg3_name) \
-       __RTE_BIT_OVERLOAD_SZ_4R(fun, qualifier, 32, ret_type, arg1_type, \
+#define __RTE_BIT_OVERLOAD_SZ_4R(family, fun, c, size, ret_type, arg1_type, \
                                 arg1_name, arg2_type, arg2_name, arg3_type, \
                                 arg3_name)                             \
-       __RTE_BIT_OVERLOAD_SZ_4R(fun, qualifier, 64, ret_type, arg1_type, \
-                                arg1_name, arg2_type, arg2_name, arg3_type, \
-                                arg3_name)
-
-__RTE_BIT_OVERLOAD_2R(test, const, bool, unsigned int, nr)
-__RTE_BIT_OVERLOAD_2(set,, unsigned int, nr)
-__RTE_BIT_OVERLOAD_2(clear,, unsigned int, nr)
-__RTE_BIT_OVERLOAD_3(assign,, unsigned int, nr, bool, value)
-__RTE_BIT_OVERLOAD_2(flip,, unsigned int, nr)
-
-__RTE_BIT_OVERLOAD_3R(atomic_test, const, bool, unsigned int, nr,
+       __RTE_BIT_OVERLOAD_V_4R(family,, fun, c, size, ret_type, arg1_type, \
+                               arg1_name, arg2_type, arg2_name, arg3_type, \
+                               arg3_name)                              \
+       __RTE_BIT_OVERLOAD_V_4R(family, v_, fun, c volatile, size,      \
+                               ret_type, arg1_type, arg1_name, arg2_type, \
+                               arg2_name, arg3_type, arg3_name)
+
+#define __RTE_BIT_OVERLOAD_4R(family, fun, c, ret_type, arg1_type, arg1_name, \
+                             arg2_type, arg2_name, arg3_type, arg3_name) \
+       __RTE_BIT_OVERLOAD_SZ_4R(family, fun, c, 32, ret_type,          \
+                                arg1_type, arg1_name, arg2_type, arg2_name, \
+                                arg3_type, arg3_name)                  \
+       __RTE_BIT_OVERLOAD_SZ_4R(family, fun, c, 64, ret_type,          \
+                                arg1_type, arg1_name, arg2_type, arg2_name, \
+                                arg3_type, arg3_name)
+
+__RTE_BIT_OVERLOAD_2R(, test, const, bool, unsigned int, nr)
+__RTE_BIT_OVERLOAD_2(, set,, unsigned int, nr)
+__RTE_BIT_OVERLOAD_2(, clear,, unsigned int, nr)
+__RTE_BIT_OVERLOAD_3(, assign,, unsigned int, nr, bool, value)
+__RTE_BIT_OVERLOAD_2(, flip,, unsigned int, nr)
+
+__RTE_BIT_OVERLOAD_3R(atomic_, test, const, bool, unsigned int, nr,
                      int, memory_order)
-__RTE_BIT_OVERLOAD_3(atomic_set,, unsigned int, nr, int, memory_order)
-__RTE_BIT_OVERLOAD_3(atomic_clear,, unsigned int, nr, int, memory_order)
-__RTE_BIT_OVERLOAD_4(atomic_assign,, unsigned int, nr, bool, value,
+__RTE_BIT_OVERLOAD_3(atomic_, set,, unsigned int, nr, int, memory_order)
+__RTE_BIT_OVERLOAD_3(atomic_, clear,, unsigned int, nr, int, memory_order)
+__RTE_BIT_OVERLOAD_4(atomic_, assign,, unsigned int, nr, bool, value,
                     int, memory_order)
-__RTE_BIT_OVERLOAD_3(atomic_flip,, unsigned int, nr, int, memory_order)
-__RTE_BIT_OVERLOAD_3R(atomic_test_and_set,, bool, unsigned int, nr,
+__RTE_BIT_OVERLOAD_3(atomic_, flip,, unsigned int, nr, int, memory_order)
+__RTE_BIT_OVERLOAD_3R(atomic_, test_and_set,, bool, unsigned int, nr,
                      int, memory_order)
-__RTE_BIT_OVERLOAD_3R(atomic_test_and_clear,, bool, unsigned int, nr,
+__RTE_BIT_OVERLOAD_3R(atomic_, test_and_clear,, bool, unsigned int, nr,
                      int, memory_order)
-__RTE_BIT_OVERLOAD_4R(atomic_test_and_assign,, bool, unsigned int, nr,
+__RTE_BIT_OVERLOAD_4R(atomic_, test_and_assign,, bool, unsigned int, nr,
                      bool, value, int, memory_order)
 
 #endif
-- 
2.34.1


Reply via email to