2013-03-18 09:52:19 +07:00
|
|
|
#if defined(__i386__) || defined(__x86_64__)
|
|
|
|
#define barrier() asm volatile("" ::: "memory")
|
2016-01-21 02:12:58 +07:00
|
|
|
#define virt_mb() __sync_synchronize()
|
|
|
|
#define virt_rmb() barrier()
|
|
|
|
#define virt_wmb() barrier()
|
|
|
|
/* Atomic store should be enough, but gcc generates worse code in that case. */
|
|
|
|
#define virt_store_mb(var, value) do { \
|
|
|
|
typeof(var) virt_store_mb_value = (value); \
|
|
|
|
__atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \
|
|
|
|
__ATOMIC_SEQ_CST); \
|
|
|
|
barrier(); \
|
|
|
|
} while (0);
|
2013-03-18 09:52:19 +07:00
|
|
|
/* Weak barriers should be used. If not - it's a bug */
|
2016-01-21 02:12:58 +07:00
|
|
|
# define mb() abort()
|
|
|
|
# define rmb() abort()
|
|
|
|
# define wmb() abort()
|
2013-03-18 09:52:19 +07:00
|
|
|
#else
|
|
|
|
#error Please fill in barrier macros
|
|
|
|
#endif
|
|
|
|
|