1/*
2 * Copyright (c) 2003-2007 Apple Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28
29#include <machine/cpu_capabilities.h>
30
31
32/*
33 * extern void	commpage_sched_gen_inc(void);
34 */
35	.text
36	.align  2, 0x90
37	.globl	_commpage_sched_gen_inc
38
39_commpage_sched_gen_inc:
40	push	%ebp
41	mov	%esp,%ebp
42
43	/* Increment 32-bit commpage field if present */
44	mov	_commPagePtr32,%edx
45	testl	%edx,%edx
46	je	1f
47	sub	$(_COMM_PAGE32_BASE_ADDRESS),%edx
48	lock
49	incl	_COMM_PAGE_SCHED_GEN(%edx)
50
51	/* Increment 64-bit commpage field if present */
52	mov	_commPagePtr64,%edx
53	testl	%edx,%edx
54	je	1f
55	sub	$(_COMM_PAGE32_START_ADDRESS),%edx
56	lock
57	incl	_COMM_PAGE_SCHED_GEN(%edx)
581:
59	pop	%ebp
60	ret
61
62#define	CPN(routine)	_commpage_ ## routine
63
64/* pointers to the 32-bit commpage routine descriptors */
65/* WARNING: these must be sorted by commpage address! */
66	.const_data
67	.align	2
68	.globl	_commpage_32_routines
69_commpage_32_routines:
70	.long	CPN(compare_and_swap32_mp)
71	.long	CPN(compare_and_swap32_up)
72	.long	CPN(compare_and_swap64_mp)
73	.long	CPN(compare_and_swap64_up)
74	.long	CPN(AtomicEnqueue)
75	.long	CPN(AtomicDequeue)
76	.long	CPN(memory_barrier)
77	.long	CPN(memory_barrier_sse2)
78	.long	CPN(atomic_add32_mp)
79	.long	CPN(atomic_add32_up)
80	.long	CPN(mach_absolute_time)
81	.long	CPN(spin_lock_try_mp)
82	.long	CPN(spin_lock_try_up)
83	.long	CPN(spin_lock_mp)
84	.long	CPN(spin_lock_up)
85	.long	CPN(spin_unlock)
86	.long	CPN(pthread_getspecific)
87	.long	CPN(gettimeofday)
88	.long	CPN(sys_flush_dcache)
89	.long	CPN(sys_icache_invalidate)
90	.long	CPN(pthread_self)
91//	.long	CPN(relinquish)
92	.long	CPN(bit_test_and_set_mp)
93	.long	CPN(bit_test_and_set_up)
94	.long	CPN(bit_test_and_clear_mp)
95	.long	CPN(bit_test_and_clear_up)
96	.long	CPN(bzero_scalar)
97	.long	CPN(bzero_sse2)
98	.long	CPN(bzero_sse42)
99	.long	CPN(bcopy_scalar)
100	.long	CPN(bcopy_sse2)
101	.long	CPN(bcopy_sse3x)
102	.long	CPN(bcopy_sse42)
103	.long	CPN(memset_pattern_sse2)
104	.long	CPN(longcopy_sse3x)
105	.long	CPN(nanotime)
106	.long	CPN(nanotime_slow)
107	.long	0
108
109
110/* pointers to the 64-bit commpage routine descriptors */
111/* WARNING: these must be sorted by commpage address! */
112	.const_data
113	.align	2
114	.globl	_commpage_64_routines
115_commpage_64_routines:
116	.long	CPN(compare_and_swap32_mp_64)
117	.long	CPN(compare_and_swap32_up_64)
118	.long	CPN(compare_and_swap64_mp_64)
119	.long	CPN(compare_and_swap64_up_64)
120	.long	CPN(AtomicEnqueue_64)
121	.long	CPN(AtomicDequeue_64)
122	.long	CPN(memory_barrier_sse2)	/* same routine as 32-bit version */
123	.long	CPN(atomic_add32_mp_64)
124	.long	CPN(atomic_add32_up_64)
125	.long	CPN(atomic_add64_mp_64)
126	.long	CPN(atomic_add64_up_64)
127	.long	CPN(mach_absolute_time)
128	.long	CPN(spin_lock_try_mp_64)
129	.long	CPN(spin_lock_try_up_64)
130	.long	CPN(spin_lock_mp_64)
131	.long	CPN(spin_lock_up_64)
132	.long	CPN(spin_unlock_64)
133	.long	CPN(pthread_getspecific_64)
134	.long	CPN(gettimeofday_64)
135	.long	CPN(sys_flush_dcache_64)
136	.long	CPN(sys_icache_invalidate)	/* same routine as 32-bit version, just a "ret" */
137	.long	CPN(pthread_self_64)
138	.long	CPN(bit_test_and_set_mp_64)
139	.long	CPN(bit_test_and_set_up_64)
140	.long	CPN(bit_test_and_clear_mp_64)
141	.long	CPN(bit_test_and_clear_up_64)
142	.long	CPN(bzero_sse2_64)
143	.long	CPN(bzero_sse42_64)
144	.long	CPN(bcopy_sse3x_64)
145	.long	CPN(bcopy_sse42_64)
146	.long	CPN(memset_pattern_sse2_64)
147	.long	CPN(longcopy_sse3x_64)
148	.long	CPN(nanotime_64)
149	.long	0
150
151