atomic_solaris_x86.hpp revision 11857:d0fbf661cc16
1/*
2 * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_HPP
26#define OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_HPP
27
28#include "runtime/os.hpp"
29
30inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
31inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
32inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
33
34
35inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
36inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
37
38inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
39inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
40inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
41inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
42inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
43
44inline void Atomic::inc    (volatile jint*     dest) { (void)add    (1, dest); }
45inline void Atomic::inc_ptr(volatile intptr_t* dest) { (void)add_ptr(1, dest); }
46inline void Atomic::inc_ptr(volatile void*     dest) { (void)add_ptr(1, dest); }
47
48inline void Atomic::dec    (volatile jint*     dest) { (void)add    (-1, dest); }
49inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); }
50inline void Atomic::dec_ptr(volatile void*     dest) { (void)add_ptr(-1, dest); }
51
52// For Sun Studio - implementation is in solaris_x86_[32/64].il.
53// For gcc - implementation is just below.
54
55// The lock prefix can be omitted for certain instructions on uniprocessors; to
56// facilitate this, os::is_MP() is passed as an additional argument.  64-bit
57// processors are assumed to be multi-threaded and/or multi-core, so the extra
58// argument is unnecessary.
59#ifndef _LP64
60#define IS_MP_DECL() , int is_mp
61#define IS_MP_ARG()  , (int) os::is_MP()
62#else
63#define IS_MP_DECL()
64#define IS_MP_ARG()
65#endif // _LP64
66
67extern "C" {
68  jint _Atomic_add(jint add_value, volatile jint* dest IS_MP_DECL());
69  jint _Atomic_xchg(jint exchange_value, volatile jint* dest);
70  jbyte _Atomic_cmpxchg_byte(jbyte exchange_value, volatile jbyte* dest,
71                       jbyte compare_value IS_MP_DECL());
72  jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest,
73                       jint compare_value IS_MP_DECL());
74  jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest,
75                             jlong compare_value IS_MP_DECL());
76}
77
78inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
79  return _Atomic_add(add_value, dest IS_MP_ARG());
80}
81
82inline jint     Atomic::xchg       (jint     exchange_value, volatile jint*     dest) {
83  return _Atomic_xchg(exchange_value, dest);
84}
85
86#define VM_HAS_SPECIALIZED_CMPXCHG_BYTE
87inline jbyte    Atomic::cmpxchg    (jbyte    exchange_value, volatile jbyte*    dest, jbyte    compare_value, cmpxchg_memory_order order) {
88  return _Atomic_cmpxchg_byte(exchange_value, dest, compare_value IS_MP_ARG());
89}
90
91inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value, cmpxchg_memory_order order) {
92  return _Atomic_cmpxchg(exchange_value, dest, compare_value IS_MP_ARG());
93}
94
95inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value, cmpxchg_memory_order order) {
96  return _Atomic_cmpxchg_long(exchange_value, dest, compare_value IS_MP_ARG());
97}
98
99
100#ifdef AMD64
101inline void Atomic::store    (jlong    store_value, jlong*             dest) { *dest = store_value; }
102inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
103extern "C" jlong _Atomic_add_long(jlong add_value, volatile jlong* dest);
104extern "C" jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest);
105
106inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
107  return (intptr_t)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest);
108}
109
110inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
111  return (void*)_Atomic_add_long((jlong)add_value, (volatile jlong*)dest);
112}
113
114inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
115  return (intptr_t)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest);
116}
117
118inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
119  return (void*)_Atomic_xchg_long((jlong)exchange_value, (volatile jlong*)dest);
120}
121
122inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
123  return (intptr_t)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
124}
125
126inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
127  return (void*)_Atomic_cmpxchg_long((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
128}
129
130inline jlong Atomic::load(volatile jlong* src) { return *src; }
131
132#else // !AMD64
133
134inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
135  return (intptr_t)add((jint)add_value, (volatile jint*)dest);
136}
137
138inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
139  return (void*)add((jint)add_value, (volatile jint*)dest);
140}
141
142inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
143  return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
144}
145
146inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
147  return (void*)xchg((jint)exchange_value, (volatile jint*)dest);
148}
149
150inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
151  return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
152}
153
154inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
155  return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
156}
157
158extern "C" void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
159
160inline jlong Atomic::load(volatile jlong* src) {
161  volatile jlong dest;
162  _Atomic_move_long(src, &dest);
163  return dest;
164}
165
166inline void Atomic::store(jlong store_value, jlong* dest) {
167  _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
168}
169
170inline void Atomic::store(jlong store_value, volatile jlong* dest) {
171  _Atomic_move_long((volatile jlong*)&store_value, dest);
172}
173
174#endif // AMD64
175
176#ifdef _GNU_SOURCE
177// Add a lock prefix to an instruction on an MP machine
178#define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: "
179
180extern "C" {
181  inline jint _Atomic_add(jint add_value, volatile jint* dest, int mp) {
182    jint addend = add_value;
183    __asm__ volatile (  LOCK_IF_MP(%3) "xaddl %0,(%2)"
184                    : "=r" (addend)
185                    : "0" (addend), "r" (dest), "r" (mp)
186                    : "cc", "memory");
187    return addend + add_value;
188  }
189
190#ifdef AMD64
191  inline jlong _Atomic_add_long(jlong add_value, volatile jlong* dest, int mp) {
192    intptr_t addend = add_value;
193    __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
194                        : "=r" (addend)
195                        : "0" (addend), "r" (dest), "r" (mp)
196                        : "cc", "memory");
197    return addend + add_value;
198  }
199
200  inline jlong _Atomic_xchg_long(jlong exchange_value, volatile jlong* dest) {
201    __asm__ __volatile__ ("xchgq (%2),%0"
202                        : "=r" (exchange_value)
203                        : "0" (exchange_value), "r" (dest)
204                        : "memory");
205    return exchange_value;
206  }
207
208#endif // AMD64
209
210  inline jint _Atomic_xchg(jint exchange_value, volatile jint* dest) {
211    __asm__ __volatile__ ("xchgl (%2),%0"
212                          : "=r" (exchange_value)
213                        : "0" (exchange_value), "r" (dest)
214                        : "memory");
215    return exchange_value;
216  }
217
218  inline jint _Atomic_cmpxchg(jint exchange_value, volatile jint* dest, jint compare_value, int mp) {
219    __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
220                    : "=a" (exchange_value)
221                    : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
222                    : "cc", "memory");
223    return exchange_value;
224  }
225
226
227  inline jbyte _Atomic_cmpxchg_byte(jbyte exchange_value, volatile jbyte* dest, jbyte compare_value, int mp) {
228    __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgb %1,(%3)"
229                    : "=a" (exchange_value)
230                    : "q" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
231                    : "cc", "memory");
232    return exchange_value;
233  }
234
235  // This is the interface to the atomic instruction in solaris_i486.s.
236  jlong _Atomic_cmpxchg_long_gcc(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp);
237
238  inline jlong _Atomic_cmpxchg_long(jlong exchange_value, volatile jlong* dest, jlong compare_value, int mp) {
239#ifdef AMD64
240    __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)"
241                        : "=a" (exchange_value)
242                        : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
243                        : "cc", "memory");
244    return exchange_value;
245#else
246    return _Atomic_cmpxchg_long_gcc(exchange_value, dest, compare_value, os::is_MP());
247
248    #if 0
249    // The code below does not work presumably because of the bug in gcc
250    // The error message says:
251    //   can't find a register in class BREG while reloading asm
252    // However I want to save this code and later replace _Atomic_cmpxchg_long_gcc
253    // with such inline asm code:
254
255    volatile jlong_accessor evl, cvl, rv;
256    evl.long_value = exchange_value;
257    cvl.long_value = compare_value;
258    int mp = os::is_MP();
259
260    __asm__ volatile ("cmp $0, %%esi\n\t"
261       "je 1f \n\t"
262       "lock\n\t"
263       "1: cmpxchg8b (%%edi)\n\t"
264       : "=a"(cvl.words[0]),   "=d"(cvl.words[1])
265       : "a"(cvl.words[0]), "d"(cvl.words[1]),
266         "b"(evl.words[0]), "c"(evl.words[1]),
267         "D"(dest), "S"(mp)
268       :  "cc", "memory");
269    return cvl.long_value;
270    #endif // if 0
271#endif // AMD64
272  }
273}
274#undef LOCK_IF_MP
275
276#endif // _GNU_SOURCE
277
278#endif // OS_CPU_SOLARIS_X86_VM_ATOMIC_SOLARIS_X86_HPP
279