1/*
2 * Copyright (c) 2005 Topspin Communications.  All rights reserved.
3 *
4 * This software is available to you under a choice of one of two
5 * licenses.  You may choose to be licensed under the terms of the GNU
6 * General Public License (GPL) Version 2, available from the file
7 * COPYING in the main directory of this source tree, or the
8 * OpenIB.org BSD license below:
9 *
10 *     Redistribution and use in source and binary forms, with or
11 *     without modification, are permitted provided that the following
12 *     conditions are met:
13 *
14 *      - Redistributions of source code must retain the above
15 *        copyright notice, this list of conditions and the following
16 *        disclaimer.
17 *
18 *      - Redistributions in binary form must reproduce the above
19 *        copyright notice, this list of conditions and the following
20 *        disclaimer in the documentation and/or other materials
21 *        provided with the distribution.
22 *
23 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
24 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
25 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
26 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
27 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
28 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
29 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
30 * SOFTWARE.
31 */
32
33#ifndef INFINIBAND_ARCH_H
34#define INFINIBAND_ARCH_H
35
36#include <stdint.h>
37#include <infiniband/endian.h>
38#include <infiniband/byteswap.h>
39
40#if __BYTE_ORDER == __LITTLE_ENDIAN
41static inline uint64_t htonll(uint64_t x) { return bswap_64(x); }
42static inline uint64_t ntohll(uint64_t x) { return bswap_64(x); }
43#elif __BYTE_ORDER == __BIG_ENDIAN
44static inline uint64_t htonll(uint64_t x) { return x; }
45static inline uint64_t ntohll(uint64_t x) { return x; }
46#else
47#error __BYTE_ORDER is neither __LITTLE_ENDIAN nor __BIG_ENDIAN
48#endif
49
50/*
51 * Architecture-specific defines.  Currently, an architecture is
52 * required to implement the following operations:
53 *
54 * mb() - memory barrier.  No loads or stores may be reordered across
55 *     this macro by either the compiler or the CPU.
56 * rmb() - read memory barrier.  No loads may be reordered across this
57 *     macro by either the compiler or the CPU.
58 * wmb() - write memory barrier.  No stores may be reordered across
59 *     this macro by either the compiler or the CPU.
60 * wc_wmb() - flush write combine buffers.  No write-combined writes
61 *     will be reordered across this macro by either the compiler or
62 *     the CPU.
63 */
64
65#if defined(__i386__)
66
67#define mb()	 asm volatile("lock; addl $0,0(%%esp) " ::: "memory")
68#define rmb()	 mb()
69#define wmb()	 asm volatile("" ::: "memory")
70#define wc_wmb() mb()
71
72#elif defined(__x86_64__)
73
74/*
75 * Only use lfence for mb() and rmb() because we don't care about
76 * ordering against non-temporal stores (for now at least).
77 */
78#define mb()	 asm volatile("lfence" ::: "memory")
79#define rmb()	 mb()
80#define wmb()	 asm volatile("" ::: "memory")
81#define wc_wmb() asm volatile("sfence" ::: "memory")
82
83#elif defined(__PPC64__)
84
85#define mb()	 asm volatile("sync" ::: "memory")
86#define rmb()	 asm volatile("lwsync" ::: "memory")
87#define wmb()	 mb()
88#define wc_wmb() wmb()
89
90#elif defined(__ia64__)
91
92#define mb()	 asm volatile("mf" ::: "memory")
93#define rmb()	 mb()
94#define wmb()	 mb()
95#define wc_wmb() asm volatile("fwb" ::: "memory")
96
97#elif defined(__PPC__)
98
99#define mb()	 asm volatile("sync" ::: "memory")
100#define rmb()	 mb()
101#define wmb()	 mb()
102#define wc_wmb() wmb()
103
104#elif defined(__sparc_v9__)
105
106#define mb()	 asm volatile("membar #LoadLoad | #LoadStore | #StoreStore | #StoreLoad" ::: "memory")
107#define rmb()	 asm volatile("membar #LoadLoad" ::: "memory")
108#define wmb()	 asm volatile("membar #StoreStore" ::: "memory")
109#define wc_wmb() wmb()
110
111#elif defined(__sparc__)
112
113#define mb()	 asm volatile("" ::: "memory")
114#define rmb()	 mb()
115#define wmb()	 mb()
116#define wc_wmb() wmb()
117
118#else
119
120#warning No architecture specific defines found.  Using generic implementation.
121
122#define mb()	 asm volatile("" ::: "memory")
123#define rmb()	 mb()
124#define wmb()	 mb()
125#define wc_wmb() wmb()
126
127#endif
128
129#endif /* INFINIBAND_ARCH_H */
130