Changeset View
Changeset View
Standalone View
Standalone View
contrib/ofed/libibverbs/include/infiniband/arch.h
Show All 28 Lines | |||||
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||||
* SOFTWARE. | * SOFTWARE. | ||||
*/ | */ | ||||
#ifndef INFINIBAND_ARCH_H | #ifndef INFINIBAND_ARCH_H | ||||
#define INFINIBAND_ARCH_H | #define INFINIBAND_ARCH_H | ||||
#include <stdint.h> | #include <stdint.h> | ||||
#include <infiniband/endian.h> | #include <endian.h> | ||||
#include <infiniband/byteswap.h> | #include <byteswap.h> | ||||
#if __BYTE_ORDER == __LITTLE_ENDIAN | #ifdef htonll | ||||
#undef htonll | |||||
#endif | |||||
#ifdef ntohll | |||||
#undef ntohll | |||||
#endif | |||||
#if BYTE_ORDER == LITTLE_ENDIAN | |||||
static inline uint64_t htonll(uint64_t x) { return bswap_64(x); } | static inline uint64_t htonll(uint64_t x) { return bswap_64(x); } | ||||
static inline uint64_t ntohll(uint64_t x) { return bswap_64(x); } | static inline uint64_t ntohll(uint64_t x) { return bswap_64(x); } | ||||
#elif __BYTE_ORDER == __BIG_ENDIAN | #elif BYTE_ORDER == BIG_ENDIAN | ||||
static inline uint64_t htonll(uint64_t x) { return x; } | static inline uint64_t htonll(uint64_t x) { return x; } | ||||
static inline uint64_t ntohll(uint64_t x) { return x; } | static inline uint64_t ntohll(uint64_t x) { return x; } | ||||
#else | #else | ||||
#error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN | #error BYTE_ORDER is neither LITTLE_ENDIAN nor BIG_ENDIAN | ||||
#endif | #endif | ||||
/* | /* | ||||
* Architecture-specific defines. Currently, an architecture is | * Architecture-specific defines. Currently, an architecture is | ||||
* required to implement the following operations: | * required to implement the following operations: | ||||
* | * | ||||
* mb() - memory barrier. No loads or stores may be reordered across | * mb() - memory barrier. No loads or stores may be reordered across | ||||
* this macro by either the compiler or the CPU. | * this macro by either the compiler or the CPU. | ||||
* rmb() - read memory barrier. No loads may be reordered across this | * rmb() - read memory barrier. No loads may be reordered across this | ||||
* macro by either the compiler or the CPU. | * macro by either the compiler or the CPU. | ||||
* wmb() - write memory barrier. No stores may be reordered across | * wmb() - write memory barrier. No stores may be reordered across | ||||
* this macro by either the compiler or the CPU. | * this macro by either the compiler or the CPU. | ||||
* wc_wmb() - flush write combine buffers. No write-combined writes | * wc_wmb() - flush write combine buffers. No write-combined writes | ||||
* will be reordered across this macro by either the compiler or | * will be reordered across this macro by either the compiler or | ||||
* the CPU. | * the CPU. | ||||
*/ | */ | ||||
#if defined(__i386__) | #if defined(__i386__) | ||||
#define mb() asm volatile("lock; addl $0,0(%%esp) " ::: "memory") | #define mb() asm volatile("lock; addl $0,0(%%esp) " ::: "memory") | ||||
#define rmb() mb() | #define rmb() mb() | ||||
#define wmb() asm volatile("" ::: "memory") | #define wmb() asm volatile("" ::: "memory") | ||||
#define wc_wmb() mb() | #define wc_wmb() mb() | ||||
#define nc_wmb() wmb() | |||||
#elif defined(__x86_64__) | #elif defined(__x86_64__) | ||||
/* | #define mb() asm volatile("" ::: "memory") | ||||
* Only use lfence for mb() and rmb() because we don't care about | |||||
* ordering against non-temporal stores (for now at least). | |||||
*/ | |||||
#define mb() asm volatile("lfence" ::: "memory") | |||||
#define rmb() mb() | #define rmb() mb() | ||||
#define wmb() asm volatile("" ::: "memory") | #define wmb() asm volatile("" ::: "memory") | ||||
#define wc_wmb() asm volatile("sfence" ::: "memory") | #define wc_wmb() asm volatile("sfence" ::: "memory") | ||||
#define nc_wmb() wmb() | |||||
#define WC_AUTO_EVICT_SIZE 64 | |||||
#elif defined(__PPC64__) | #elif defined(__PPC64__) | ||||
#define mb() asm volatile("sync" ::: "memory") | #define mb() asm volatile("sync" ::: "memory") | ||||
#define rmb() asm volatile("lwsync" ::: "memory") | #define rmb() asm volatile("lwsync" ::: "memory") | ||||
#define wmb() mb() | #define wmb() rmb() | ||||
#define wc_wmb() wmb() | #define wc_wmb() mb() | ||||
#define nc_wmb() mb() | |||||
#define WC_AUTO_EVICT_SIZE 64 | |||||
#elif defined(__ia64__) | #elif defined(__ia64__) | ||||
#define mb() asm volatile("mf" ::: "memory") | #define mb() asm volatile("mf" ::: "memory") | ||||
#define rmb() mb() | #define rmb() mb() | ||||
#define wmb() mb() | #define wmb() mb() | ||||
#define wc_wmb() asm volatile("fwb" ::: "memory") | #define wc_wmb() asm volatile("fwb" ::: "memory") | ||||
#define nc_wmb() wmb() | |||||
#elif defined(__PPC__) | #elif defined(__PPC__) | ||||
#define mb() asm volatile("sync" ::: "memory") | #define mb() asm volatile("sync" ::: "memory") | ||||
#define rmb() mb() | #define rmb() mb() | ||||
#define wmb() mb() | #define wmb() mb() | ||||
#define wc_wmb() wmb() | #define wc_wmb() wmb() | ||||
#define nc_wmb() wmb() | |||||
#elif defined(__sparc_v9__) | #elif defined(__sparc_v9__) | ||||
#define mb() asm volatile("membar #LoadLoad | #LoadStore | #StoreStore | #StoreLoad" ::: "memory") | #define mb() asm volatile("membar #LoadLoad | #LoadStore | #StoreStore | #StoreLoad" ::: "memory") | ||||
#define rmb() asm volatile("membar #LoadLoad" ::: "memory") | #define rmb() asm volatile("membar #LoadLoad" ::: "memory") | ||||
#define wmb() asm volatile("membar #StoreStore" ::: "memory") | #define wmb() asm volatile("membar #StoreStore" ::: "memory") | ||||
#define wc_wmb() wmb() | #define wc_wmb() wmb() | ||||
#define nc_wmb() wmb() | |||||
#elif defined(__sparc__) | #elif defined(__sparc__) | ||||
#define mb() asm volatile("" ::: "memory") | #define mb() asm volatile("" ::: "memory") | ||||
#define rmb() mb() | #define rmb() mb() | ||||
#define wmb() mb() | #define wmb() mb() | ||||
#define wc_wmb() wmb() | #define wc_wmb() wmb() | ||||
#define nc_wmb() wmb() | |||||
#elif defined(__aarch64__) | |||||
#define mb() asm volatile("" ::: "memory") | |||||
#define rmb() mb() | |||||
#define wmb() mb() | |||||
#define wc_wmb() wmb() | |||||
#define nc_wmb() wmb() | |||||
#else | #else | ||||
#warning No architecture specific defines found. Using generic implementation. | #warning No architecture specific defines found. Using generic implementation. | ||||
#define mb() asm volatile("" ::: "memory") | #define mb() asm volatile("" ::: "memory") | ||||
#define rmb() mb() | #define rmb() mb() | ||||
#define wmb() mb() | #define wmb() mb() | ||||
#define wc_wmb() wmb() | #define wc_wmb() wmb() | ||||
#define nc_wmb() wmb() | |||||
#endif | |||||
#ifdef WC_AUTO_EVICT_SIZE | |||||
static inline int wc_auto_evict_size(void) { return WC_AUTO_EVICT_SIZE; }; | |||||
#else | |||||
static inline int wc_auto_evict_size(void) { return 0; }; | |||||
#endif | #endif | ||||
#endif /* INFINIBAND_ARCH_H */ | #endif /* INFINIBAND_ARCH_H */ |