atomic.h revision 290001
1/*
2 * Copyright (C) 2005, 2007, 2008  Internet Systems Consortium, Inc. ("ISC")
3 *
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
9 * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
10 * AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
11 * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
12 * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
13 * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
14 * PERFORMANCE OF THIS SOFTWARE.
15 */
16
17/* $Id: atomic.h,v 1.10 2008/01/24 23:47:00 tbox Exp $ */
18
19#ifndef ISC_ATOMIC_H
20#define ISC_ATOMIC_H 1
21
22#include <isc/platform.h>
23#include <isc/types.h>
24
25#ifdef ISC_PLATFORM_USEGCCASM
26/*
27 * This routine atomically increments the value stored in 'p' by 'val', and
28 * returns the previous value.
29 */
30static __inline__ isc_int32_t
31isc_atomic_xadd(isc_int32_t *p, isc_int32_t val) {
32	isc_int32_t prev = val;
33
34	__asm__ volatile(
35#ifdef ISC_PLATFORM_USETHREADS
36		"lock;"
37#endif
38		"xadd %0, %1"
39		:"=q"(prev)
40		:"m"(*p), "0"(prev)
41		:"memory", "cc");
42
43	return (prev);
44}
45
46#ifdef ISC_PLATFORM_HAVEXADDQ
47static __inline__ isc_int64_t
48isc_atomic_xaddq(isc_int64_t *p, isc_int64_t val) {
49	isc_int64_t prev = val;
50
51	__asm__ volatile(
52#ifdef ISC_PLATFORM_USETHREADS
53	    "lock;"
54#endif
55	    "xaddq %0, %1"
56	    :"=q"(prev)
57	    :"m"(*p), "0"(prev)
58	    :"memory", "cc");
59
60	return (prev);
61}
62#endif /* ISC_PLATFORM_HAVEXADDQ */
63
64/*
65 * This routine atomically stores the value 'val' in 'p'.
66 */
67static __inline__ void
68isc_atomic_store(isc_int32_t *p, isc_int32_t val) {
69	__asm__ volatile(
70#ifdef ISC_PLATFORM_USETHREADS
71		/*
72		 * xchg should automatically lock memory, but we add it
73		 * explicitly just in case (it at least doesn't harm)
74		 */
75		"lock;"
76#endif
77
78		"xchgl %1, %0"
79		:
80		: "r"(val), "m"(*p)
81		: "memory");
82}
83
84/*
85 * This routine atomically replaces the value in 'p' with 'val', if the
86 * original value is equal to 'cmpval'.  The original value is returned in any
87 * case.
88 */
89static __inline__ isc_int32_t
90isc_atomic_cmpxchg(isc_int32_t *p, isc_int32_t cmpval, isc_int32_t val) {
91	__asm__ volatile(
92#ifdef ISC_PLATFORM_USETHREADS
93		"lock;"
94#endif
95		"cmpxchgl %1, %2"
96		: "=a"(cmpval)
97		: "r"(val), "m"(*p), "a"(cmpval)
98		: "memory");
99
100	return (cmpval);
101}
102
103#elif defined(ISC_PLATFORM_USESTDASM)
104/*
105 * The followings are "generic" assembly code which implements the same
106 * functionality in case the gcc extension cannot be used.  It should be
107 * better to avoid inlining below, since we directly refer to specific
108 * positions of the stack frame, which would not actually point to the
109 * intended address in the embedded mnemonic.
110 */
111#include <isc/util.h>		/* for 'UNUSED' macro */
112
113static isc_int32_t
114isc_atomic_xadd(isc_int32_t *p, isc_int32_t val) {
115	UNUSED(p);
116	UNUSED(val);
117
118	__asm (
119		"movl 8(%ebp), %ecx\n"
120		"movl 12(%ebp), %edx\n"
121#ifdef ISC_PLATFORM_USETHREADS
122		"lock;"
123#endif
124		"xadd %edx, (%ecx)\n"
125
126		/*
127		 * set the return value directly in the register so that we
128		 * can avoid guessing the correct position in the stack for a
129		 * local variable.
130		 */
131		"movl %edx, %eax"
132		);
133}
134
135static void
136isc_atomic_store(isc_int32_t *p, isc_int32_t val) {
137	UNUSED(p);
138	UNUSED(val);
139
140	__asm (
141		"movl 8(%ebp), %ecx\n"
142		"movl 12(%ebp), %edx\n"
143#ifdef ISC_PLATFORM_USETHREADS
144		"lock;"
145#endif
146		"xchgl (%ecx), %edx\n"
147		);
148}
149
150static isc_int32_t
151isc_atomic_cmpxchg(isc_int32_t *p, isc_int32_t cmpval, isc_int32_t val) {
152	UNUSED(p);
153	UNUSED(cmpval);
154	UNUSED(val);
155
156	__asm (
157		"movl 8(%ebp), %ecx\n"
158		"movl 12(%ebp), %eax\n"	/* must be %eax for cmpxchgl */
159		"movl 16(%ebp), %edx\n"
160#ifdef ISC_PLATFORM_USETHREADS
161		"lock;"
162#endif
163
164		/*
165		 * If (%ecx) == %eax then (%ecx) := %edx.
166		 % %eax is set to old (%ecx), which will be the return value.
167		 */
168		"cmpxchgl %edx, (%ecx)"
169		);
170}
171#else /* !ISC_PLATFORM_USEGCCASM && !ISC_PLATFORM_USESTDASM */
172
173#error "unsupported compiler.  disable atomic ops by --disable-atomic"
174
175#endif
176#endif /* ISC_ATOMIC_H */
177