1/*
2 * Copyright (c) 2000-2006 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_OSREFERENCE_LICENSE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the License
10 * may not be used to create, or enable the creation or redistribution of,
11 * unlawful or unlicensed copies of an Apple operating system, or to
12 * circumvent, violate, or enable the circumvention or violation of, any
13 * terms of an Apple operating system software license agreement.
14 *
15 * Please obtain a copy of the License at
16 * http://www.opensource.apple.com/apsl/ and read it before using this file.
17 *
18 * The Original Code and all software distributed under the License are
19 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
20 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
21 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
22 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
23 * Please see the License for the specific language governing rights and
24 * limitations under the License.
25 *
26 * @APPLE_OSREFERENCE_LICENSE_HEADER_END@
27 */
28/*
29 * @OSF_COPYRIGHT@
30 */
31/* CMU_ENDHIST */
32/*
33 * Mach Operating System
34 * Copyright (c) 1991,1990 Carnegie Mellon University
35 * All Rights Reserved.
36 *
37 * Permission to use, copy, modify and distribute this software and its
38 * documentation is hereby granted, provided that both the copyright
39 * notice and this permission notice appear in all copies of the
40 * software, derivative works or modified versions, and any portions
41 * thereof, and that both notices appear in supporting documentation.
42 *
43 * CARNEGIE MELLON ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS"
44 * CONDITION.  CARNEGIE MELLON DISCLAIMS ANY LIABILITY OF ANY KIND FOR
45 * ANY DAMAGES WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE.
46 *
47 * Carnegie Mellon requests users of this software to return to
48 *
49 *  Software Distribution Coordinator  or  Software.Distribution@CS.CMU.EDU
50 *  School of Computer Science
51 *  Carnegie Mellon University
52 *  Pittsburgh PA 15213-3890
53 *
54 * any improvements or extensions that they make and grant Carnegie Mellon
55 * the rights to redistribute these changes.
56 */
57
58/*
59 */
60
61/*
62 * Processor registers for i386 and i486.
63 */
64#ifndef	_I386_PROC_REG_H_
65#define	_I386_PROC_REG_H_
66
67/*
68 * Model Specific Registers
69 */
70#define	MSR_P5_TSC		0x10	/* Time Stamp Register */
71#define	MSR_P5_CESR		0x11	/* Control and Event Select Register */
72#define	MSR_P5_CTR0		0x12	/* Counter #0 */
73#define	MSR_P5_CTR1		0x13	/* Counter #1 */
74
75#define	MSR_P5_CESR_PC		0x0200	/* Pin Control */
76#define	MSR_P5_CESR_CC		0x01C0	/* Counter Control mask */
77#define	MSR_P5_CESR_ES		0x003F	/* Event Control mask */
78
79#define	MSR_P5_CESR_SHIFT	16		/* Shift to get Counter 1 */
80#define	MSR_P5_CESR_MASK	(MSR_P5_CESR_PC|\
81				 MSR_P5_CESR_CC|\
82				 MSR_P5_CESR_ES) /* Mask Counter */
83
84#define	MSR_P5_CESR_CC_CLOCK	0x0100	/* Clock Counting (otherwise Event) */
85#define	MSR_P5_CESR_CC_DISABLE	0x0000	/* Disable counter */
86#define	MSR_P5_CESR_CC_CPL012	0x0040	/* Count if the CPL == 0, 1, 2 */
87#define	MSR_P5_CESR_CC_CPL3	0x0080	/* Count if the CPL == 3 */
88#define	MSR_P5_CESR_CC_CPL	0x00C0	/* Count regardless of the CPL */
89
90#define	MSR_P5_CESR_ES_DATA_READ       0x000000	/* Data Read */
91#define	MSR_P5_CESR_ES_DATA_WRITE      0x000001	/* Data Write */
92#define	MSR_P5_CESR_ES_DATA_RW	       0x101000	/* Data Read or Write */
93#define	MSR_P5_CESR_ES_DATA_TLB_MISS   0x000010	/* Data TLB Miss */
94#define	MSR_P5_CESR_ES_DATA_READ_MISS  0x000011	/* Data Read Miss */
95#define	MSR_P5_CESR_ES_DATA_WRITE_MISS 0x000100	/* Data Write Miss */
96#define	MSR_P5_CESR_ES_DATA_RW_MISS    0x101001	/* Data Read or Write Miss */
97#define	MSR_P5_CESR_ES_HIT_EM	       0x000101	/* Write (hit) to M|E state */
98#define	MSR_P5_CESR_ES_DATA_CACHE_WB   0x000110	/* Cache lines written back */
99#define	MSR_P5_CESR_ES_EXTERNAL_SNOOP  0x000111	/* External Snoop */
100#define	MSR_P5_CESR_ES_CACHE_SNOOP_HIT 0x001000	/* Data cache snoop hits */
101#define	MSR_P5_CESR_ES_MEM_ACCESS_PIPE 0x001001	/* Mem. access in both pipes */
102#define	MSR_P5_CESR_ES_BANK_CONFLICTS  0x001010	/* Bank conflicts */
103#define	MSR_P5_CESR_ES_MISALIGNED      0x001011	/* Misaligned Memory or I/O */
104#define	MSR_P5_CESR_ES_CODE_READ       0x001100	/* Code Read */
105#define	MSR_P5_CESR_ES_CODE_TLB_MISS   0x001101	/* Code TLB miss */
106#define	MSR_P5_CESR_ES_CODE_CACHE_MISS 0x001110	/* Code Cache miss */
107#define	MSR_P5_CESR_ES_SEGMENT_LOADED  0x001111	/* Any segment reg. loaded */
108#define	MSR_P5_CESR_ES_BRANCHE	       0x010010	/* Branches */
109#define	MSR_P5_CESR_ES_BTB_HIT	       0x010011	/* BTB Hits */
110#define	MSR_P5_CESR_ES_BRANCHE_BTB     0x010100	/* Taken branch or BTB Hit */
111#define	MSR_P5_CESR_ES_PIPELINE_FLUSH  0x010101	/* Pipeline Flushes */
112#define	MSR_P5_CESR_ES_INSTRUCTION     0x010110	/* Instruction executed */
113#define	MSR_P5_CESR_ES_INSTRUCTION_V   0x010111	/* Inst. executed (v-pipe) */
114#define	MSR_P5_CESR_ES_BUS_CYCLE       0x011000	/* Clocks while bus cycle */
115#define	MSR_P5_CESR_ES_FULL_WRITE_BUF  0x011001	/* Clocks while full wrt buf. */
116#define	MSR_P5_CESR_ES_DATA_MEM_READ   0x011010	/* Pipeline waiting for read */
117#define	MSR_P5_CESR_ES_WRITE_EM        0x011011	/* Stall on write E|M state */
118#define	MSR_P5_CESR_ES_LOCKED_CYCLE    0x011100	/* Locked bus cycles */
119#define	MSR_P5_CESR_ES_IO_CYCLE	       0x011101	/* I/O Read or Write cycles */
120#define	MSR_P5_CESR_ES_NON_CACHEABLE   0x011110	/* Non-cacheable Mem. read */
121#define	MSR_P5_CESR_ES_AGI	       0x011111	/* Stall because of AGI */
122#define	MSR_P5_CESR_ES_FLOP	       0x100010	/* Floating Point operations */
123#define	MSR_P5_CESR_ES_BREAK_DR0       0x100011	/* Breakpoint matches on DR0 */
124#define	MSR_P5_CESR_ES_BREAK_DR1       0x100100	/* Breakpoint matches on DR1 */
125#define	MSR_P5_CESR_ES_BREAK_DR2       0x100101	/* Breakpoint matches on DR2 */
126#define	MSR_P5_CESR_ES_BREAK_DR3       0x100110	/* Breakpoint matches on DR3 */
127#define	MSR_P5_CESR_ES_HARDWARE_IT     0x100111	/* Hardware interrupts */
128
129/*
130 * CR0
131 */
132#define	CR0_PG	0x80000000	/*	 Enable paging */
133#define	CR0_CD	0x40000000	/* i486: Cache disable */
134#define	CR0_NW	0x20000000	/* i486: No write-through */
135#define	CR0_AM	0x00040000	/* i486: Alignment check mask */
136#define	CR0_WP	0x00010000	/* i486: Write-protect kernel access */
137#define	CR0_NE	0x00000020	/* i486: Handle numeric exceptions */
138#define	CR0_ET	0x00000010	/*	 Extension type is 80387 */
139				/*	 (not official) */
140#define	CR0_TS	0x00000008	/*	 Task switch */
141#define	CR0_EM	0x00000004	/*	 Emulate coprocessor */
142#define	CR0_MP	0x00000002	/*	 Monitor coprocessor */
143#define	CR0_PE	0x00000001	/*	 Enable protected mode */
144
145/*
146 * CR4
147 */
148#define CR4_VMXE 0x00002000	/* Enable VMX operation */
149#define CR4_FXS 0x00000200    	/* SSE/SSE2 OS supports FXSave */
150#define CR4_XMM 0x00000400    	/* SSE/SSE2 instructions supported in OS */
151#define CR4_PGE 0x00000080    	/* p6:   Page Global Enable */
152#define	CR4_MCE	0x00000040	/* p5:   Machine Check Exceptions */
153#define CR4_PAE 0x00000020      /* p5:   Physical Address Extensions */
154#define	CR4_PSE	0x00000010	/* p5:   Page Size Extensions */
155#define	CR4_DE	0x00000008	/* p5:   Debugging Extensions */
156#define	CR4_TSD	0x00000004	/* p5:   Time Stamp Disable */
157#define	CR4_PVI	0x00000002	/* p5:   Protected-mode Virtual Interrupts */
158#define	CR4_VME	0x00000001	/* p5:   Virtual-8086 Mode Extensions */
159
160#ifndef	ASSEMBLER
161
162#include <sys/cdefs.h>
163__BEGIN_DECLS
164
165#define	set_ts() \
166	set_cr0(get_cr0() | CR0_TS)
167
168static inline unsigned int get_cr0(void)
169{
170	register unsigned int cr0;
171	__asm__ volatile("mov %%cr0, %0" : "=r" (cr0));
172	return(cr0);
173}
174
175static inline void set_cr0(unsigned int value)
176{
177	__asm__ volatile("mov %0, %%cr0" : : "r" (value));
178}
179
180static inline unsigned int get_cr2(void)
181{
182	register unsigned int cr2;
183	__asm__ volatile("mov %%cr2, %0" : "=r" (cr2));
184	return(cr2);
185}
186
187static inline unsigned int get_cr3(void)
188{
189	register unsigned int cr3;
190	__asm__ volatile("mov %%cr3, %0" : "=r" (cr3));
191	return(cr3);
192}
193
194static inline void set_cr3(unsigned int value)
195{
196	__asm__ volatile("mov %0, %%cr3" : : "r" (value));
197}
198
199static inline uint32_t get_cr4(void)
200{
201	uint32_t cr4;
202	__asm__ volatile("mov %%cr4, %0" : "=r" (cr4));
203	return(cr4);
204}
205
206static inline void set_cr4(uint32_t value)
207{
208	__asm__ volatile("mov %0, %%cr4" : : "r" (value));
209}
210
211static inline void clear_ts(void)
212{
213	__asm__ volatile("clts");
214}
215
216static inline unsigned short get_tr(void)
217{
218	unsigned short seg;
219	__asm__ volatile("str %0" : "=rm" (seg));
220	return(seg);
221}
222
223static inline void set_tr(unsigned int seg)
224{
225	__asm__ volatile("ltr %0" : : "rm" ((unsigned short)(seg)));
226}
227
228static inline unsigned short sldt(void)
229{
230	unsigned short seg;
231	__asm__ volatile("sldt %0" : "=rm" (seg));
232	return(seg);
233}
234
235static inline void lldt(unsigned int seg)
236{
237	__asm__ volatile("lldt %0" : : "rm" ((unsigned short)(seg)));
238}
239
240#ifdef MACH_KERNEL_PRIVATE
241extern void flush_tlb64(void);
242extern uint64_t get64_cr3(void);
243extern void set64_cr3(uint64_t);
244static inline void flush_tlb(void)
245{
246	unsigned long	cr3_temp;
247	if (cpu_mode_is64bit()) {
248		flush_tlb64();
249		return;
250	}
251	__asm__ volatile("movl %%cr3, %0; movl %0, %%cr3" : "=r" (cr3_temp) :: "memory");
252}
253#endif	/* MACH_KERNEL_PRIVATE */
254
255static inline void wbinvd(void)
256{
257	__asm__ volatile("wbinvd");
258}
259
260static inline void invlpg(unsigned long addr)
261{
262	__asm__  volatile("invlpg (%0)" :: "r" (addr) : "memory");
263}
264
265/*
266 * Access to machine-specific registers (available on 586 and better only)
267 * Note: the rd* operations modify the parameters directly (without using
268 * pointer indirection), this allows gcc to optimize better
269 */
270
271#define rdmsr(msr,lo,hi) \
272	__asm__ volatile("rdmsr" : "=a" (lo), "=d" (hi) : "c" (msr))
273
274#define wrmsr(msr,lo,hi) \
275	__asm__ volatile("wrmsr" : : "c" (msr), "a" (lo), "d" (hi))
276
277#define rdtsc(lo,hi) \
278	__asm__ volatile("lfence; rdtsc; lfence" : "=a" (lo), "=d" (hi))
279
280#define write_tsc(lo,hi) wrmsr(0x10, lo, hi)
281
282#define rdpmc(counter,lo,hi) \
283	__asm__ volatile("rdpmc" : "=a" (lo), "=d" (hi) : "c" (counter))
284
285static inline uint64_t rdmsr64(uint32_t msr)
286{
287	uint64_t ret;
288	__asm__ volatile("rdmsr" : "=A" (ret) : "c" (msr));
289	return ret;
290}
291
292static inline void wrmsr64(uint32_t msr, uint64_t val)
293{
294	__asm__ volatile("wrmsr" : : "c" (msr), "A" (val));
295}
296
297static inline uint64_t rdtsc64(void)
298{
299	uint64_t ret;
300	__asm__ volatile("lfence; rdtsc; lfence" : "=A" (ret));
301	return ret;
302}
303
304static inline uint64_t rdtscp64(uint32_t *aux)
305{
306	uint64_t ret;
307	__asm__ volatile("rdtscp; mov %%ecx, %1"
308				: "=A" (ret), "=m" (*aux)
309				:
310				: "ecx");
311	return ret;
312}
313
314/*
315 * rdmsr_carefully() returns 0 when the MSR has been read successfully,
316 * or non-zero (1) if the MSR does not exist.
317 * The implementation is in locore.s.
318 */
319extern int rdmsr_carefully(uint32_t msr, uint32_t *lo, uint32_t *hi);
320
321__END_DECLS
322
323#endif	/* ASSEMBLER */
324
325#define MSR_IA32_P5_MC_ADDR		0
326#define MSR_IA32_P5_MC_TYPE		1
327#define MSR_IA32_PLATFORM_ID		0x17
328#define MSR_IA32_EBL_CR_POWERON		0x2a
329
330#define MSR_IA32_APIC_BASE		0x1b
331#define MSR_IA32_APIC_BASE_BSP		(1<<8)
332#define MSR_IA32_APIC_BASE_ENABLE	(1<<11)
333#define MSR_IA32_APIC_BASE_BASE		(0xfffff<<12)
334
335#define MSR_IA32_FEATURE_CONTROL	0x3a
336#define MSR_IA32_FEATCTL_LOCK		(1<<0)
337#define MSR_IA32_FEATCTL_VMXON_SMX	(1<<1)
338#define MSR_IA32_FEATCTL_VMXON		(1<<2)
339#define MSR_IA32_FEATCTL_CSTATE_SMI	(1<<16)
340
341#define MSR_IA32_UCODE_WRITE		0x79
342#define MSR_IA32_UCODE_REV		0x8b
343
344#define MSR_IA32_PERFCTR0		0xc1
345#define MSR_IA32_PERFCTR1		0xc2
346
347#define MSR_PMG_CST_CONFIG_CONTROL	0xe2
348
349#define MSR_IA32_BBL_CR_CTL		0x119
350
351#define MSR_IA32_SYSENTER_CS		0x174
352#define MSR_IA32_SYSENTER_ESP		0x175
353#define MSR_IA32_SYSENTER_EIP		0x176
354
355#define MSR_IA32_MCG_CAP		0x179
356#define MSR_IA32_MCG_STATUS		0x17a
357#define MSR_IA32_MCG_CTL		0x17b
358
359#define MSR_IA32_EVNTSEL0		0x186
360#define MSR_IA32_EVNTSEL1		0x187
361
362#define MSR_IA32_PERF_STS		0x198
363#define MSR_IA32_PERF_CTL		0x199
364
365#define MSR_IA32_MISC_ENABLE		0x1a0
366
367#define MSR_IA32_DEBUGCTLMSR		0x1d9
368#define MSR_IA32_LASTBRANCHFROMIP	0x1db
369#define MSR_IA32_LASTBRANCHTOIP		0x1dc
370#define MSR_IA32_LASTINTFROMIP		0x1dd
371#define MSR_IA32_LASTINTTOIP		0x1de
372
373#define MSR_IA32_CR_PAT 		0x277
374
375#define MSR_IA32_MC0_CTL		0x400
376#define MSR_IA32_MC0_STATUS		0x401
377#define MSR_IA32_MC0_ADDR		0x402
378#define MSR_IA32_MC0_MISC		0x403
379
380#define MSR_IA32_MTRRCAP		0xfe
381#define MSR_IA32_MTRR_DEF_TYPE		0x2ff
382#define MSR_IA32_MTRR_PHYSBASE(n)	(0x200 + 2*(n))
383#define MSR_IA32_MTRR_PHYSMASK(n)	(0x200 + 2*(n) + 1)
384#define MSR_IA32_MTRR_FIX64K_00000	0x250
385#define MSR_IA32_MTRR_FIX16K_80000	0x258
386#define MSR_IA32_MTRR_FIX16K_A0000	0x259
387#define MSR_IA32_MTRR_FIX4K_C0000	0x268
388#define MSR_IA32_MTRR_FIX4K_C8000	0x269
389#define MSR_IA32_MTRR_FIX4K_D0000	0x26a
390#define MSR_IA32_MTRR_FIX4K_D8000	0x26b
391#define MSR_IA32_MTRR_FIX4K_E0000	0x26c
392#define MSR_IA32_MTRR_FIX4K_E8000	0x26d
393#define MSR_IA32_MTRR_FIX4K_F0000	0x26e
394#define MSR_IA32_MTRR_FIX4K_F8000	0x26f
395
396#define MSR_IA32_VMX_BASE		0x480
397#define MSR_IA32_VMX_BASIC		MSR_IA32_VMX_BASE
398#define MSR_IA32_VMXPINBASED_CTLS		MSR_IA32_VMX_BASE+1
399#define MSR_IA32_PROCBASED_CTLS		MSR_IA32_VMX_BASE+2
400#define MSR_IA32_VMX_EXIT_CTLS		MSR_IA32_VMX_BASE+3
401#define MSR_IA32_VMX_ENTRY_CTLS		MSR_IA32_VMX_BASE+4
402#define MSR_IA32_VMX_MISC		MSR_IA32_VMX_BASE+5
403#define MSR_IA32_VMX_CR0_FIXED0		MSR_IA32_VMX_BASE+6
404#define MSR_IA32_VMX_CR0_FIXED1		MSR_IA32_VMX_BASE+7
405#define MSR_IA32_VMX_CR4_FIXED0		MSR_IA32_VMX_BASE+8
406#define MSR_IA32_VMX_CR4_FIXED1		MSR_IA32_VMX_BASE+9
407
408#define	MSR_IA32_EFER		0xC0000080
409#define	MSR_IA32_EFER_SCE	0x00000001
410#define	MSR_IA32_EFER_LME	0x00000100
411#define	MSR_IA32_EFER_LMA	0x00000400
412#define	MSR_IA32_EFER_NXE	0x00000800
413
414#define	MSR_IA32_STAR		0xC0000081
415#define	MSR_IA32_LSTAR		0xC0000082
416#define	MSR_IA32_CSTAR		0xC0000083
417#define	MSR_IA32_FMASK		0xC0000084
418
419#define MSR_IA32_FS_BASE	0xC0000100
420#define MSR_IA32_GS_BASE	0xC0000101
421#define MSR_IA32_KERNEL_GS_BASE	0xC0000102
422
423#define MSR_IA32_BIOS_SIGN_ID	0x08B
424
425#define MSR_FLEX_RATIO		0x194
426#define MSR_PLATFORM_INFO	0x0ce
427#define MSR_CORE_THREAD_COUNT	0x035
428
429#endif	/* _I386_PROC_REG_H_ */
430