1/*
2 * Copyright (c) 1999, 2017, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
26#define OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
27
28// Implementation of class atomic
29
30inline void Atomic::store    (jbyte    store_value, jbyte*    dest) { *dest = store_value; }
31inline void Atomic::store    (jshort   store_value, jshort*   dest) { *dest = store_value; }
32inline void Atomic::store    (jint     store_value, jint*     dest) { *dest = store_value; }
33inline void Atomic::store_ptr(intptr_t store_value, intptr_t* dest) { *dest = store_value; }
34inline void Atomic::store_ptr(void*    store_value, void*     dest) { *(void**)dest = store_value; }
35
36inline void Atomic::store    (jbyte    store_value, volatile jbyte*    dest) { *dest = store_value; }
37inline void Atomic::store    (jshort   store_value, volatile jshort*   dest) { *dest = store_value; }
38inline void Atomic::store    (jint     store_value, volatile jint*     dest) { *dest = store_value; }
39inline void Atomic::store_ptr(intptr_t store_value, volatile intptr_t* dest) { *dest = store_value; }
40inline void Atomic::store_ptr(void*    store_value, volatile void*     dest) { *(void* volatile *)dest = store_value; }
41
42
43template<size_t byte_size>
44struct Atomic::PlatformAdd
45  : Atomic::FetchAndAdd<Atomic::PlatformAdd<byte_size> >
46{
47  template<typename I, typename D>
48  D fetch_and_add(I add_value, D volatile* dest) const;
49};
50
51template<>
52template<typename I, typename D>
53inline D Atomic::PlatformAdd<4>::fetch_and_add(I add_value, D volatile* dest) const {
54  STATIC_ASSERT(4 == sizeof(I));
55  STATIC_ASSERT(4 == sizeof(D));
56  D old_value;
57  __asm__ volatile (  "lock xaddl %0,(%2)"
58                    : "=r" (old_value)
59                    : "0" (add_value), "r" (dest)
60                    : "cc", "memory");
61  return old_value;
62}
63
64inline void Atomic::inc    (volatile jint*     dest) {
65  __asm__ volatile (  "lock addl $1,(%0)" :
66                    : "r" (dest) : "cc", "memory");
67}
68
69inline void Atomic::inc_ptr(volatile void*     dest) {
70  inc_ptr((volatile intptr_t*)dest);
71}
72
73inline void Atomic::dec    (volatile jint*     dest) {
74  __asm__ volatile (  "lock subl $1,(%0)" :
75                    : "r" (dest) : "cc", "memory");
76}
77
78inline void Atomic::dec_ptr(volatile void*     dest) {
79  dec_ptr((volatile intptr_t*)dest);
80}
81
82inline jint     Atomic::xchg    (jint     exchange_value, volatile jint*     dest) {
83  __asm__ volatile (  "xchgl (%2),%0"
84                    : "=r" (exchange_value)
85                    : "0" (exchange_value), "r" (dest)
86                    : "memory");
87  return exchange_value;
88}
89
90inline void*    Atomic::xchg_ptr(void*    exchange_value, volatile void*     dest) {
91  return (void*)xchg_ptr((intptr_t)exchange_value, (volatile intptr_t*)dest);
92}
93
94template<>
95template<typename T>
96inline T Atomic::PlatformCmpxchg<1>::operator()(T exchange_value,
97                                                T volatile* dest,
98                                                T compare_value,
99                                                cmpxchg_memory_order /* order */) const {
100  STATIC_ASSERT(1 == sizeof(T));
101  __asm__ volatile ("lock cmpxchgb %1,(%3)"
102                    : "=a" (exchange_value)
103                    : "q" (exchange_value), "a" (compare_value), "r" (dest)
104                    : "cc", "memory");
105  return exchange_value;
106}
107
108template<>
109template<typename T>
110inline T Atomic::PlatformCmpxchg<4>::operator()(T exchange_value,
111                                                T volatile* dest,
112                                                T compare_value,
113                                                cmpxchg_memory_order /* order */) const {
114  STATIC_ASSERT(4 == sizeof(T));
115  __asm__ volatile ("lock cmpxchgl %1,(%3)"
116                    : "=a" (exchange_value)
117                    : "r" (exchange_value), "a" (compare_value), "r" (dest)
118                    : "cc", "memory");
119  return exchange_value;
120}
121
122#ifdef AMD64
123inline void Atomic::store    (jlong    store_value, jlong*    dest) { *dest = store_value; }
124inline void Atomic::store    (jlong    store_value, volatile jlong*    dest) { *dest = store_value; }
125
126template<>
127template<typename I, typename D>
128inline D Atomic::PlatformAdd<8>::fetch_and_add(I add_value, D volatile* dest) const {
129  STATIC_ASSERT(8 == sizeof(I));
130  STATIC_ASSERT(8 == sizeof(D));
131  D old_value;
132  __asm__ __volatile__ ("lock xaddq %0,(%2)"
133                        : "=r" (old_value)
134                        : "0" (add_value), "r" (dest)
135                        : "cc", "memory");
136  return old_value;
137}
138
139inline void Atomic::inc_ptr(volatile intptr_t* dest) {
140  __asm__ __volatile__ ("lock addq $1,(%0)"
141                        :
142                        : "r" (dest)
143                        : "cc", "memory");
144}
145
146inline void Atomic::dec_ptr(volatile intptr_t* dest) {
147  __asm__ __volatile__ ("lock subq $1,(%0)"
148                        :
149                        : "r" (dest)
150                        : "cc", "memory");
151}
152
153inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
154  __asm__ __volatile__ ("xchgq (%2),%0"
155                        : "=r" (exchange_value)
156                        : "0" (exchange_value), "r" (dest)
157                        : "memory");
158  return exchange_value;
159}
160
161template<>
162template<typename T>
163inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
164                                                T volatile* dest,
165                                                T compare_value,
166                                                cmpxchg_memory_order /* order */) const {
167  STATIC_ASSERT(8 == sizeof(T));
168  __asm__ __volatile__ ("lock cmpxchgq %1,(%3)"
169                        : "=a" (exchange_value)
170                        : "r" (exchange_value), "a" (compare_value), "r" (dest)
171                        : "cc", "memory");
172  return exchange_value;
173}
174
175inline jlong Atomic::load(const volatile jlong* src) { return *src; }
176
177#else // !AMD64
178
179inline void Atomic::inc_ptr(volatile intptr_t* dest) {
180  inc((volatile jint*)dest);
181}
182
183inline void Atomic::dec_ptr(volatile intptr_t* dest) {
184  dec((volatile jint*)dest);
185}
186
187inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* dest) {
188  return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
189}
190
191extern "C" {
192  // defined in linux_x86.s
193  jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong);
194  void _Atomic_move_long(const volatile jlong* src, volatile jlong* dst);
195}
196
197template<>
198template<typename T>
199inline T Atomic::PlatformCmpxchg<8>::operator()(T exchange_value,
200                                                T volatile* dest,
201                                                T compare_value,
202                                                cmpxchg_memory_order order) const {
203  STATIC_ASSERT(8 == sizeof(T));
204  return cmpxchg_using_helper<jlong>(_Atomic_cmpxchg_long, exchange_value, dest, compare_value);
205}
206
207inline jlong Atomic::load(const volatile jlong* src) {
208  volatile jlong dest;
209  _Atomic_move_long(src, &dest);
210  return dest;
211}
212
213inline void Atomic::store(jlong store_value, jlong* dest) {
214  _Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
215}
216
217inline void Atomic::store(jlong store_value, volatile jlong* dest) {
218  _Atomic_move_long((volatile jlong*)&store_value, dest);
219}
220
221#endif // AMD64
222
223#endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_HPP
224