1/*
2 * Copyright (c) 1999, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
26#define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
27
28#include "runtime/os.hpp"
29
30// Implementation of class atomic
31
32inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
33inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
34inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
35inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
36inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
37
38inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
39inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
40inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
41inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
42inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
43
44
45// Adding a lock prefix to an instruction on MP machine
46#define LOCK_IF_MP(mp) "cmp $0, " #mp "; je 1f; lock; 1: "
47
48inline jint     Atomic::add    (jint     add_value, volatile jint*     dest) {
49  jint addend = add_value;
50  int mp = os::is_MP();
51  __asm__ volatile (  LOCK_IF_MP(%3) "xaddl %0,(%2)"
52                    : "=r" (addend)
53                    : "0" (addend), "r" (dest), "r" (mp)
54                    : "cc", "memory");
55  return addend + add_value;
56}
57
58inline void Atomic::inc    (volatile jint*     dest) {
59  int mp = os::is_MP();
60  __asm__ volatile (LOCK_IF_MP(%1) "addl $1,(%0)" :
61                    : "r" (dest), "r" (mp) : "cc", "memory");
62}
63
64inline void Atomic::inc_ptr(volatile void*     dest) {
65  inc_ptr((volatile intptr_t*)dest);
66}
67
68inline void Atomic::dec    (volatile jint*     dest) {
69  int mp = os::is_MP();
70  __asm__ volatile (LOCK_IF_MP(%1) "subl $1,(%0)" :
71                    : "r" (dest), "r" (mp) : "cc", "memory");
72}
73
74inline void Atomic::dec_ptr(volatile void*     dest) {
75  dec_ptr((volatile intptr_t*)dest);
76}
77
78inline jint     Atomic::xchg    (jint     exchange_value, volatile jint*     dest) {
79  __asm__ volatile (  "xchgl (%2),%0"
80                    : "=r" (exchange_value)
81                    : "0" (exchange_value), "r" (dest)
82                    : "memory");
83  return exchange_value;
84}
85
86inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
87  return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
88}
89
90#define VM_HAS_SPECIALIZED_CMPXCHG_BYTE
91inline jbyte    Atomic::cmpxchg    (jbyte    exchange_value, volatile jbyte*    dest, jbyte    compare_value, cmpxchg_memory_order order) {
92  int mp = os::is_MP();
93  __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgb %1,(%3)"
94                    : "=a" (exchange_value)
95                    : "q" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
96                    : "cc", "memory");
97  return exchange_value;
98}
99
100inline jint     Atomic::cmpxchg    (jint     exchange_value, volatile jint*     dest, jint     compare_value, cmpxchg_memory_order order) {
101  int mp = os::is_MP();
102  __asm__ volatile (LOCK_IF_MP(%4) "cmpxchgl %1,(%3)"
103                    : "=a" (exchange_value)
104                    : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
105                    : "cc", "memory");
106  return exchange_value;
107}
108
109#ifdef AMD64
110inline void Atomic::store    (jlong    store_value, jlong*    dest) { *dest = store_value; }
111inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
112
113inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
114  intptr_t addend = add_value;
115  bool mp = os::is_MP();
116  __asm__ __volatile__ (LOCK_IF_MP(%3) "xaddq %0,(%2)"
117                        : "=r" (addend)
118                        : "0" (addend), "r" (dest), "r" (mp)
119                        : "cc", "memory");
120  return addend + add_value;
121}
122
123inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
124  return (void*)add_ptr(add_value, (volatile intptr_t*)dest);
125}
126
127inline void Atomic::inc_ptr(volatile intptr_t* dest) {
128  bool mp = os::is_MP();
129  __asm__ __volatile__ (LOCK_IF_MP(%1) "addq $1,(%0)"
130                        :
131                        : "r" (dest), "r" (mp)
132                        : "cc", "memory");
133}
134
135inline void Atomic::dec_ptr(volatile intptr_t* dest) {
136  bool mp = os::is_MP();
137  __asm__ __volatile__ (LOCK_IF_MP(%1) "subq $1,(%0)"
138                        :
139                        : "r" (dest), "r" (mp)
140                        : "cc", "memory");
141}
142
143inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
144  __asm__ __volatile__ ("xchgq (%2),%0"
145                        : "=r" (exchange_value)
146                        : "0" (exchange_value), "r" (dest)
147                        : "memory");
148  return exchange_value;
149}
150
151inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value, cmpxchg_memory_order order) {
152  bool mp = os::is_MP();
153  __asm__ __volatile__ (LOCK_IF_MP(%4) "cmpxchgq %1,(%3)"
154                        : "=a" (exchange_value)
155                        : "r" (exchange_value), "a" (compare_value), "r" (dest), "r" (mp)
156                        : "cc", "memory");
157  return exchange_value;
158}
159
160inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
161  return (intptr_t)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order);
162}
163
164inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
165  return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value, order);
166}
167
168inline jlong Atomic::load(volatile jlong* src) { return *src; }
169
170#else // !AMD64
171
172inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
173  return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
174}
175
176inline void*    Atomic::add_ptr(intptr_t add_value, volatile void*     dest) {
177  return (void*)Atomic::add((jint)add_value, (volatile jint*)dest);
178}
179
180
181inline void Atomic::inc_ptr(volatile intptr_t* dest) {
182  inc((volatile jint*)dest);
183}
184
185inline void Atomic::dec_ptr(volatile intptr_t* dest) {
186  dec((volatile jint*)dest);
187}
188
189inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
190  return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
191}
192
193extern "C" {
194  // defined in linux_x86.s
195  jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool);
196  void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
197}
198
199inline jlong    Atomic::cmpxchg    (jlong    exchange_value, volatile jlong*    dest, jlong    compare_value, cmpxchg_memory_order order) {
200  return _Atomic_cmpxchg_long(exchange_value, dest, compare_value, os::is_MP());
201}
202
203inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t* dest, intptr_t compare_value, cmpxchg_memory_order order) {
204  return (intptr_t)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
205}
206
207inline void*    Atomic::cmpxchg_ptr(void*    exchange_value, volatile void*     dest, void*    compare_value, cmpxchg_memory_order order) {
208  return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value, order);
209}
210
211inline jlong Atomic::load(volatile jlong* src) {
212  volatile jlong dest;
213  _Atomic_move_long(src, &dest);
214  return dest;
215}
216
217inline void Atomic::store(jlong store_value, jlong* dest) {
218  _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
219}
220
221inline void Atomic::store(jlong store_value, volatile jlong* dest) {
222  _Atomic_move_long((volatile jlong*)&store_value, dest);
223}
224
225#endif // AMD64
226
227#endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
228