ck_pr.h revision 343494
1/*
2 * Copyright 2009, 2010 Samy Al Bahra.
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 */
26
27#ifndef CK_PR_SPARCV9_H
28#define CK_PR_SPARCV9_H
29
30#ifndef CK_PR_H
31#error Do not include this file directly, use ck_pr.h
32#endif
33
34#include <ck_cc.h>
35#include <ck_md.h>
36
37/*
38 * The following represent supported atomic operations.
39 * These operations may be emulated.
40 */
41#include "ck_f_pr.h"
42
43/*
44 * Minimum interface requirement met.
45 */
46#define CK_F_PR
47
48/*
49 * Order loads at the least.
50 */
51CK_CC_INLINE static void
52ck_pr_stall(void)
53{
54
55	__asm__ __volatile__("membar #LoadLoad" ::: "memory");
56	return;
57}
58
59#define CK_PR_FENCE(T, I)				\
60	CK_CC_INLINE static void			\
61	ck_pr_fence_strict_##T(void)			\
62	{						\
63		__asm__ __volatile__(I ::: "memory");   \
64	}
65
66/*
67 * Atomic operations are treated as both load and store
68 * operations on SPARCv9.
69 */
70CK_PR_FENCE(atomic, "membar #StoreStore")
71CK_PR_FENCE(atomic_store, "membar #StoreStore")
72CK_PR_FENCE(atomic_load, "membar #StoreLoad")
73CK_PR_FENCE(store_atomic, "membar #StoreStore")
74CK_PR_FENCE(load_atomic, "membar #LoadStore")
75CK_PR_FENCE(store, "membar #StoreStore")
76CK_PR_FENCE(store_load, "membar #StoreLoad")
77CK_PR_FENCE(load, "membar #LoadLoad")
78CK_PR_FENCE(load_store, "membar #LoadStore")
79CK_PR_FENCE(memory, "membar #MemIssue")
80CK_PR_FENCE(acquire, "membar #LoadLoad | #LoadStore")
81CK_PR_FENCE(release, "membar #LoadStore | #StoreStore")
82CK_PR_FENCE(acqrel, "membar #LoadLoad | #LoadStore | #StoreStore")
83CK_PR_FENCE(lock, "membar #LoadLoad | #LoadStore | #StoreStore | #StoreLoad")
84CK_PR_FENCE(unlock, "membar #LoadStore | #StoreStore")
85
86#undef CK_PR_FENCE
87
88#define CK_PR_LOAD(S, M, T, C, I)				\
89	CK_CC_INLINE static T					\
90	ck_pr_md_load_##S(const M *target)			\
91	{							\
92		T r;						\
93		__asm__ __volatile__(I " [%1], %0"		\
94					: "=&r" (r)		\
95					: "r"   (target)	\
96					: "memory");		\
97		return (r);					\
98	}
99
100CK_PR_LOAD(ptr, void, void *, uint64_t, "ldx")
101
102#define CK_PR_LOAD_S(S, T, I) CK_PR_LOAD(S, T, T, T, I)
103
104CK_PR_LOAD_S(64, uint64_t, "ldx")
105CK_PR_LOAD_S(32, uint32_t, "lduw")
106CK_PR_LOAD_S(uint, unsigned int, "lduw")
107CK_PR_LOAD_S(double, double, "ldx")
108CK_PR_LOAD_S(int, int, "ldsw")
109
110#undef CK_PR_LOAD_S
111#undef CK_PR_LOAD
112
113#define CK_PR_STORE(S, M, T, C, I)				\
114	CK_CC_INLINE static void				\
115	ck_pr_md_store_##S(M *target, T v)			\
116	{							\
117		__asm__ __volatile__(I " %0, [%1]"		\
118					:			\
119					: "r" (v),		\
120					  "r" (target)		\
121					: "memory");		\
122		return;						\
123	}
124
125CK_PR_STORE(ptr, void, const void *, uint64_t, "stx")
126
127#define CK_PR_STORE_S(S, T, I) CK_PR_STORE(S, T, T, T, I)
128
129CK_PR_STORE_S(8, uint8_t, "stub")
130CK_PR_STORE_S(64, uint64_t, "stx")
131CK_PR_STORE_S(32, uint32_t, "stuw")
132CK_PR_STORE_S(uint, unsigned int, "stuw")
133CK_PR_STORE_S(double, double, "stx")
134CK_PR_STORE_S(int, int, "stsw")
135
136#undef CK_PR_STORE_S
137#undef CK_PR_STORE
138
139/* Use the appropriate address space for atomics within the FreeBSD kernel. */
140#if defined(__FreeBSD__) && defined(_KERNEL)
141#include <sys/cdefs.h>
142#include <machine/atomic.h>
143#define CK_PR_INS_CAS "casa"
144#define CK_PR_INS_CASX "casxa"
145#define CK_PR_INS_SWAP "swapa"
146#define CK_PR_ASI_ATOMIC __XSTRING(__ASI_ATOMIC)
147#else
148#define CK_PR_INS_CAS "cas"
149#define CK_PR_INS_CASX "casx"
150#define CK_PR_INS_SWAP "swap"
151#define CK_PR_ASI_ATOMIC ""
152#endif
153
154CK_CC_INLINE static bool
155ck_pr_cas_64_value(uint64_t *target, uint64_t compare, uint64_t set, uint64_t *value)
156{
157
158	__asm__ __volatile__(CK_PR_INS_CASX " [%1] " CK_PR_ASI_ATOMIC ", %2, %0"
159				: "+&r" (set)
160				: "r"   (target),
161				  "r"   (compare)
162				: "memory");
163
164	*value = set;
165	return (compare == set);
166}
167
168CK_CC_INLINE static bool
169ck_pr_cas_64(uint64_t *target, uint64_t compare, uint64_t set)
170{
171
172	__asm__ __volatile__(CK_PR_INS_CASX " [%1] " CK_PR_ASI_ATOMIC ", %2, %0"
173				: "+&r" (set)
174				: "r" (target),
175				  "r" (compare)
176				: "memory");
177
178	return (compare == set);
179}
180
181CK_CC_INLINE static bool
182ck_pr_cas_ptr(void *target, void *compare, void *set)
183{
184
185	return ck_pr_cas_64(target, (uint64_t)compare, (uint64_t)set);
186}
187
188CK_CC_INLINE static bool
189ck_pr_cas_ptr_value(void *target, void *compare, void *set, void *previous)
190{
191
192	return ck_pr_cas_64_value(target, (uint64_t)compare, (uint64_t)set, previous);
193}
194
195#define CK_PR_CAS(N, T)							\
196	CK_CC_INLINE static bool					\
197	ck_pr_cas_##N##_value(T *target, T compare, T set, T *value)	\
198	{								\
199		__asm__ __volatile__(CK_PR_INS_CAS " [%1] " CK_PR_ASI_ATOMIC ", %2, %0" \
200					: "+&r" (set)			\
201					: "r"   (target),		\
202					  "r"   (compare)		\
203					: "memory");			\
204		*value = set;						\
205		return (compare == set);				\
206	} 								\
207	CK_CC_INLINE static bool					\
208	ck_pr_cas_##N(T *target, T compare, T set)			\
209	{								\
210		__asm__ __volatile__(CK_PR_INS_CAS " [%1] " CK_PR_ASI_ATOMIC ", %2, %0" \
211					: "+&r" (set)			\
212					: "r" (target),			\
213					  "r" (compare)			\
214					: "memory");			\
215		return (compare == set);				\
216	}
217
218CK_PR_CAS(32, uint32_t)
219CK_PR_CAS(uint, unsigned int)
220CK_PR_CAS(int, int)
221
222#undef CK_PR_CAS
223
224#define CK_PR_FAS(N, T)						\
225	CK_CC_INLINE static T 					\
226	ck_pr_fas_##N(T *target, T update)			\
227	{							\
228								\
229		__asm__ __volatile__(CK_PR_INS_SWAP " [%1] " CK_PR_ASI_ATOMIC ", %0"		\
230					: "+&r" (update)	\
231					: "r"   (target)	\
232					: "memory");		\
233		return (update);				\
234	}
235
236CK_PR_FAS(int, int)
237CK_PR_FAS(uint, unsigned int)
238CK_PR_FAS(32, uint32_t)
239
240#undef CK_PR_FAS
241
242#undef CK_PR_INS_CAS
243#undef CK_PR_INS_CASX
244#undef CK_PR_INS_SWAP
245#undef CK_PR_ASI_ATOMIC
246
247#endif /* CK_PR_SPARCV9_H */
248
249