Rusty Russell | 61d0b5a4 | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 1 | #if defined(__i386__) || defined(__x86_64__) |
| 2 | #define barrier() asm volatile("" ::: "memory") |
| 3 | #define mb() __sync_synchronize() |
| 4 | |
| 5 | #define smp_mb() mb() |
Michael S. Tsirkin | ddab2c0 | 2015-09-09 22:03:30 +0300 | [diff] [blame] | 6 | # define dma_rmb() barrier() |
| 7 | # define dma_wmb() barrier() |
Rusty Russell | 61d0b5a4 | 2013-03-18 13:22:19 +1030 | [diff] [blame] | 8 | # define smp_rmb() barrier() |
| 9 | # define smp_wmb() barrier() |
| 10 | /* Weak barriers should be used. If not - it's a bug */ |
| 11 | # define rmb() abort() |
| 12 | # define wmb() abort() |
| 13 | #else |
| 14 | #error Please fill in barrier macros |
| 15 | #endif |
| 16 | |