1258945Sroberto/*
2258945Sroberto * Copyright (C) 2005, 2007, 2008  Internet Systems Consortium, Inc. ("ISC")
3258945Sroberto *
4258945Sroberto * Permission to use, copy, modify, and/or distribute this software for any
5258945Sroberto * purpose with or without fee is hereby granted, provided that the above
6258945Sroberto * copyright notice and this permission notice appear in all copies.
7258945Sroberto *
8258945Sroberto * THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
9258945Sroberto * REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
10258945Sroberto * AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
11258945Sroberto * INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
12258945Sroberto * LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
13258945Sroberto * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
14258945Sroberto * PERFORMANCE OF THIS SOFTWARE.
15258945Sroberto */
16258945Sroberto
17258945Sroberto/* $Id: atomic.h,v 1.10 2008/01/24 23:47:00 tbox Exp $ */
18258945Sroberto
19258945Sroberto#ifndef ISC_ATOMIC_H
20258945Sroberto#define ISC_ATOMIC_H 1
21258945Sroberto
22258945Sroberto#include <isc/platform.h>
23258945Sroberto#include <isc/types.h>
24258945Sroberto
25258945Sroberto#ifdef ISC_PLATFORM_USEGCCASM
26258945Sroberto/*
27258945Sroberto * This routine atomically increments the value stored in 'p' by 'val', and
28258945Sroberto * returns the previous value.
29258945Sroberto */
30258945Srobertostatic __inline__ isc_int32_t
31258945Srobertoisc_atomic_xadd(isc_int32_t *p, isc_int32_t val) {
32258945Sroberto	isc_int32_t prev = val;
33258945Sroberto
34258945Sroberto	__asm__ volatile(
35258945Sroberto#ifdef ISC_PLATFORM_USETHREADS
36258945Sroberto		"lock;"
37258945Sroberto#endif
38258945Sroberto		"xadd %0, %1"
39258945Sroberto		:"=q"(prev)
40258945Sroberto		:"m"(*p), "0"(prev)
41258945Sroberto		:"memory", "cc");
42258945Sroberto
43258945Sroberto	return (prev);
44258945Sroberto}
45258945Sroberto
46258945Sroberto#ifdef ISC_PLATFORM_HAVEXADDQ
47258945Srobertostatic __inline__ isc_int64_t
48258945Srobertoisc_atomic_xaddq(isc_int64_t *p, isc_int64_t val) {
49258945Sroberto	isc_int64_t prev = val;
50258945Sroberto
51258945Sroberto	__asm__ volatile(
52258945Sroberto#ifdef ISC_PLATFORM_USETHREADS
53258945Sroberto	    "lock;"
54258945Sroberto#endif
55258945Sroberto	    "xaddq %0, %1"
56258945Sroberto	    :"=q"(prev)
57258945Sroberto	    :"m"(*p), "0"(prev)
58258945Sroberto	    :"memory", "cc");
59258945Sroberto
60258945Sroberto	return (prev);
61258945Sroberto}
62258945Sroberto#endif /* ISC_PLATFORM_HAVEXADDQ */
63258945Sroberto
64258945Sroberto/*
65258945Sroberto * This routine atomically stores the value 'val' in 'p'.
66258945Sroberto */
67258945Srobertostatic __inline__ void
68258945Srobertoisc_atomic_store(isc_int32_t *p, isc_int32_t val) {
69258945Sroberto	__asm__ volatile(
70258945Sroberto#ifdef ISC_PLATFORM_USETHREADS
71258945Sroberto		/*
72258945Sroberto		 * xchg should automatically lock memory, but we add it
73258945Sroberto		 * explicitly just in case (it at least doesn't harm)
74258945Sroberto		 */
75258945Sroberto		"lock;"
76258945Sroberto#endif
77258945Sroberto
78258945Sroberto		"xchgl %1, %0"
79258945Sroberto		:
80258945Sroberto		: "r"(val), "m"(*p)
81258945Sroberto		: "memory");
82258945Sroberto}
83258945Sroberto
84258945Sroberto/*
85258945Sroberto * This routine atomically replaces the value in 'p' with 'val', if the
86258945Sroberto * original value is equal to 'cmpval'.  The original value is returned in any
87258945Sroberto * case.
88258945Sroberto */
89258945Srobertostatic __inline__ isc_int32_t
90258945Srobertoisc_atomic_cmpxchg(isc_int32_t *p, isc_int32_t cmpval, isc_int32_t val) {
91258945Sroberto	__asm__ volatile(
92258945Sroberto#ifdef ISC_PLATFORM_USETHREADS
93258945Sroberto		"lock;"
94258945Sroberto#endif
95258945Sroberto		"cmpxchgl %1, %2"
96258945Sroberto		: "=a"(cmpval)
97258945Sroberto		: "r"(val), "m"(*p), "a"(cmpval)
98258945Sroberto		: "memory");
99258945Sroberto
100258945Sroberto	return (cmpval);
101258945Sroberto}
102258945Sroberto
103258945Sroberto#elif defined(ISC_PLATFORM_USESTDASM)
104258945Sroberto/*
105258945Sroberto * The followings are "generic" assembly code which implements the same
106258945Sroberto * functionality in case the gcc extension cannot be used.  It should be
107258945Sroberto * better to avoid inlining below, since we directly refer to specific
108258945Sroberto * positions of the stack frame, which would not actually point to the
109258945Sroberto * intended address in the embedded mnemonic.
110258945Sroberto */
111258945Sroberto#include <isc/util.h>		/* for 'UNUSED' macro */
112258945Sroberto
113258945Srobertostatic isc_int32_t
114258945Srobertoisc_atomic_xadd(isc_int32_t *p, isc_int32_t val) {
115258945Sroberto	UNUSED(p);
116258945Sroberto	UNUSED(val);
117258945Sroberto
118258945Sroberto	__asm (
119258945Sroberto		"movl 8(%ebp), %ecx\n"
120258945Sroberto		"movl 12(%ebp), %edx\n"
121258945Sroberto#ifdef ISC_PLATFORM_USETHREADS
122258945Sroberto		"lock;"
123258945Sroberto#endif
124258945Sroberto		"xadd %edx, (%ecx)\n"
125258945Sroberto
126258945Sroberto		/*
127258945Sroberto		 * set the return value directly in the register so that we
128258945Sroberto		 * can avoid guessing the correct position in the stack for a
129258945Sroberto		 * local variable.
130258945Sroberto		 */
131258945Sroberto		"movl %edx, %eax"
132258945Sroberto		);
133258945Sroberto}
134258945Sroberto
135258945Srobertostatic void
136258945Srobertoisc_atomic_store(isc_int32_t *p, isc_int32_t val) {
137258945Sroberto	UNUSED(p);
138258945Sroberto	UNUSED(val);
139258945Sroberto
140258945Sroberto	__asm (
141258945Sroberto		"movl 8(%ebp), %ecx\n"
142258945Sroberto		"movl 12(%ebp), %edx\n"
143258945Sroberto#ifdef ISC_PLATFORM_USETHREADS
144258945Sroberto		"lock;"
145258945Sroberto#endif
146258945Sroberto		"xchgl (%ecx), %edx\n"
147258945Sroberto		);
148258945Sroberto}
149258945Sroberto
150258945Srobertostatic isc_int32_t
151258945Srobertoisc_atomic_cmpxchg(isc_int32_t *p, isc_int32_t cmpval, isc_int32_t val) {
152258945Sroberto	UNUSED(p);
153258945Sroberto	UNUSED(cmpval);
154258945Sroberto	UNUSED(val);
155258945Sroberto
156258945Sroberto	__asm (
157258945Sroberto		"movl 8(%ebp), %ecx\n"
158258945Sroberto		"movl 12(%ebp), %eax\n"	/* must be %eax for cmpxchgl */
159258945Sroberto		"movl 16(%ebp), %edx\n"
160258945Sroberto#ifdef ISC_PLATFORM_USETHREADS
161258945Sroberto		"lock;"
162258945Sroberto#endif
163258945Sroberto
164258945Sroberto		/*
165258945Sroberto		 * If (%ecx) == %eax then (%ecx) := %edx.
166258945Sroberto		 % %eax is set to old (%ecx), which will be the return value.
167258945Sroberto		 */
168258945Sroberto		"cmpxchgl %edx, (%ecx)"
169258945Sroberto		);
170258945Sroberto}
171258945Sroberto#else /* !ISC_PLATFORM_USEGCCASM && !ISC_PLATFORM_USESTDASM */
172258945Sroberto
173258945Sroberto#error "unsupported compiler.  disable atomic ops by --disable-atomic"
174258945Sroberto
175258945Sroberto#endif
176258945Sroberto#endif /* ISC_ATOMIC_H */
177