1/*
2 * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
26#define OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
27
28// Implementation of class atomic
29
30inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
31inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
32inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
33inline void Atomic::store    (jlong    store_value, jlong*    dest) { *dest = store_value; }
34inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
35inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
36
37inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
38inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
39inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
40inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
41inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
42inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
43
44inline void Atomic::inc    (volatile jint*     dest) { (void)add    (1, dest); }
45inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); }
46inline void Atomic::inc_ptr(volatile void*     dest) { (void)add_ptr(1, dest); }
47
48inline void Atomic::dec    (volatile jint*     dest) { (void)add    (-1, dest); }
49inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); }
50inline void Atomic::dec_ptr(volatile void*     dest) { (void)add_ptr(-1, dest); }
51
52inline jlong Atomic::load(volatile jlong* src) { return *src; }
53
54inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
55  intptr_t rv;
56  __asm__ volatile(
57    "1: \n\t"
58    " ld     [%2], %%o2\n\t"
59    " add    %1, %%o2, %%o3\n\t"
60    " cas    [%2], %%o2, %%o3\n\t"
61    " cmp    %%o2, %%o3\n\t"
62    " bne    1b\n\t"
63    "  nop\n\t"
64    " add    %1, %%o2, %0\n\t"
65    : "=r" (rv)
66    : "r" (add_value), "r" (dest)
67    : "memory", "o2", "o3");
68  return rv;
69}
70
71inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
72  intptr_t rv;
73#ifdef _LP64
74  __asm__ volatile(
75    "1: \n\t"
76    " ldx    [%2], %%o2\n\t"
77    " add    %1, %%o2, %%o3\n\t"
78    " casx   [%2], %%o2, %%o3\n\t"
79    " cmp    %%o2, %%o3\n\t"
80    " bne    %%xcc, 1b\n\t"
81    "  nop\n\t"
82    " add    %1, %%o2, %0\n\t"
83    : "=r" (rv)
84    : "r" (add_value), "r" (dest)
85    : "memory", "o2", "o3");
86#else
87  __asm__ volatile(
88    "1: \n\t"
89    " ld     [%2], %%o2\n\t"
90    " add    %1, %%o2, %%o3\n\t"
91    " cas    [%2], %%o2, %%o3\n\t"
92    " cmp    %%o2, %%o3\n\t"
93    " bne    1b\n\t"
94    "  nop\n\t"
95    " add    %1, %%o2, %0\n\t"
96    : "=r" (rv)
97    : "r" (add_value), "r" (dest)
98    : "memory", "o2", "o3");
99#endif // _LP64
100  return rv;
101}
102
103inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
104  return (void*)add_ptr((intptr_t)add_value, (volatile intptr_t*)dest);
105}
106
107
108inline jint     Atomic::xchg    (jint     exchange_value, volatile jint*     dest) {
109  intptr_t rv = exchange_value;
110  __asm__ volatile(
111    " swap   [%2],%1\n\t"
112    : "=r" (rv)
113    : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
114    : "memory");
115  return rv;
116}
117
118inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
119  intptr_t rv = exchange_value;
120#ifdef _LP64
121  __asm__ volatile(
122    "1:\n\t"
123    " mov    %1, %%o3\n\t"
124    " ldx    [%2], %%o2\n\t"
125    " casx   [%2], %%o2, %%o3\n\t"
126    " cmp    %%o2, %%o3\n\t"
127    " bne    %%xcc, 1b\n\t"
128    "  nop\n\t"
129    " mov    %%o2, %0\n\t"
130    : "=r" (rv)
131    : "r" (exchange_value), "r" (dest)
132    : "memory", "o2", "o3");
133#else
134  __asm__ volatile(
135    "swap    [%2],%1\n\t"
136    : "=r" (rv)
137    : "0" (exchange_value) /* we use same register as for return value */, "r" (dest)
138    : "memory");
139#endif // _LP64
140  return rv;
141}
142
143inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
144  return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
145}
146
147
148inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value, cmpxchg_memory_order order) {
149  jint rv;
150  __asm__ volatile(
151    " cas    [%2], %3, %0"
152    : "=r" (rv)
153    : "0" (exchange_value), "r" (dest), "r" (compare_value)
154    : "memory");
155  return rv;
156}
157
158inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value, cmpxchg_memory_order order) {
159#ifdef _LP64
160  jlong rv;
161  __asm__ volatile(
162    " casx   [%2], %3, %0"
163    : "=r" (rv)
164    : "0" (exchange_value), "r" (dest), "r" (compare_value)
165    : "memory");
166  return rv;
167#else
168  volatile jlong_accessor evl, cvl, rv;
169  evl.long_value = exchange_value;
170  cvl.long_value = compare_value;
171
172  __asm__ volatile(
173    " sllx   %2, 32, %2\n\t"
174    " srl    %3, 0,  %3\n\t"
175    " or     %2, %3, %2\n\t"
176    " sllx   %5, 32, %5\n\t"
177    " srl    %6, 0,  %6\n\t"
178    " or     %5, %6, %5\n\t"
179    " casx   [%4], %5, %2\n\t"
180    " srl    %2, 0, %1\n\t"
181    " srlx   %2, 32, %0\n\t"
182    : "=r" (rv.words[0]), "=r" (rv.words[1])
183    : "r"  (evl.words[0]), "r" (evl.words[1]), "r" (dest), "r" (cvl.words[0]), "r" (cvl.words[1])
184    : "memory");
185
186  return rv.long_value;
187#endif
188}
189
190inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
191  intptr_t rv;
192#ifdef _LP64
193  __asm__ volatile(
194    " casx    [%2], %3, %0"
195    : "=r" (rv)
196    : "0" (exchange_value), "r" (dest), "r" (compare_value)
197    : "memory");
198#else
199  __asm__ volatile(
200    " cas     [%2], %3, %0"
201    : "=r" (rv)
202    : "0" (exchange_value), "r" (dest), "r" (compare_value)
203    : "memory");
204#endif // _LP64
205  return rv;
206}
207
208inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
209  return (void*)cmpxchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest, (intptr_t)compare_value, order);
210}
211
212#endif // OS_CPU_LINUX_SPARC_VM_ATOMIC_LINUX_SPARC_INLINE_HPP
213