mach_sfmmu.h revision 6127:3731a676ec5e
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
23 * Use is subject to license terms.
24 */
25
26/*
27 * VM - Hardware Address Translation management.
28 *
29 * This file describes the contents of the sun reference mmu (sfmmu)
30 * specific hat data structures and the sfmmu specific hat procedures.
31 * The machine independent interface is described in <vm/hat.h>.
32 */
33
34#ifndef _VM_MACH_SFMMU_H
35#define	_VM_MACH_SFMMU_H
36
37#pragma ident	"%Z%%M%	%I%	%E% SMI"
38
39#include <sys/x_call.h>
40#include <sys/cheetahregs.h>
41#include <sys/spitregs.h>
42#include <sys/opl_olympus_regs.h>
43#include <sys/mmu.h>
44
45#ifdef	__cplusplus
46extern "C" {
47#endif
48
49/*
50 * On sun4u platforms, user TSBs are accessed via virtual address by default.
51 * Platforms that support ASI_SCRATCHPAD registers can define UTSB_PHYS in the
52 * platform Makefile to access user TSBs via physical address but must also
53 * designate one ASI_SCRATCHPAD register to hold the second user TSB.  To
54 * designate the user TSB scratchpad register, platforms must provide a
55 * definition for SCRATCHPAD_UTSBREG2 below.
56 *
57 * Platforms that use UTSB_PHYS do not allocate 2 locked TLB entries to access
58 * the user TSBs.
59 */
60#if defined(UTSB_PHYS)
61
62#if defined(_OPL)
63#define	SCRATCHPAD_UTSBREG2	OPL_SCRATCHPAD_UTSBREG4 /* 4M-256M pages */
64#define	SCRATCHPAD_UTSBREG3	OPL_SCRATCHPAD_UTSBREG5 /* 8K-512K pages */
65#define	SCRATCHPAD_UTSBREG4	OPL_SCRATCHPAD_UTSBREG6 /* 4M-256M pages */
66#else
67#error "Compiling UTSB_PHYS but no SCRATCHPAD_UTSBREG2 specified"
68#endif /* _OPL */
69
70#endif /* UTSB_PHYS */
71
72
73#ifdef _ASM
74
75/*
76 * This macro is used to set private/shared secondary context register in
77 * sfmmu_alloc_ctx().
78 * if is_shctx = 0 then we set the SCONTEXT to cnum and invalidate the
79 * SHARED_CONTEXT register. If is_shctx = 1 then only the SHARED_CONTEXT
80 * register is set.
81 *  (See additional comments in sfmmu_alloc_ctx)
82 * Input:
83 * cnum     = cnum
84 * is_shctx = sfmmu private/shared flag (0: private, 1: shared)
85 * tmp1 :    %o4 scratch
86 * tmp2 :    %o5 scratch
87 * label: used as local branch targets
88 */
89#define	SET_SECCTX(cnum, is_shctx, tmp1, tmp2, label)	   \
90	/* BEGIN CSTYLED */				   \
91	brnz,pn is_shctx, label/**/1			  ;\
92	  sethi   %hi(FLUSH_ADDR), tmp2			  ;\
93	mov     MMU_SCONTEXT, tmp1			  ;\
94	stxa    cnum, [tmp1]ASI_MMU_CTX			  ;\
95	flush   tmp2					  ;\
96	sethi   %hi(shctx_on), tmp1			  ;\
97	ld      [tmp1 + %lo(shctx_on)], tmp1		  ;\
98	brz,pt  tmp1, label/**/3			  ;\
99	mov    %g0, cnum				  ;\
100	ba,pt    %xcc, label/**/2			  ;\
101label/**/1:						  ;\
102	set     SHCTXREG_VALID_BIT, tmp1		  ;\
103	sllx    cnum, CTXREG_CTX_SHIFT, cnum		  ;\
104	srlx    cnum, CTXREG_CTX_SHIFT, cnum		  ;\
105	or      cnum, tmp1, cnum			  ;\
106	mov     cnum, tmp1				  ;\
107	sllx    cnum, 32, cnum				  ;\
108	or      cnum, tmp1, cnum			  ;\
109label/**/2:					          ;\
110	mov     MMU_SHARED_CONTEXT, tmp1		  ;\
111	stxa    cnum, [tmp1]ASI_MMU_CTX			  ;\
112	flush   tmp2					  ;\
113label/**/3:
114	/* END CSTYLED */
115
116/*
117 * This macro is used in the MMU code to check if TL should be lowered from
118 * 2 to 1 to pop trapstat's state.  See the block comment in trapstat.c
119 * for details.
120 */
121
122#define	TSTAT_CHECK_TL1(label, scr1, scr2)			\
123	rdpr	%tpc, scr1;					\
124	sethi	%hi(KERNELBASE), scr2;				\
125	or	scr2, %lo(KERNELBASE), scr2; 			\
126	cmp	scr1, scr2; 					\
127	bgeu	%xcc, 9f;					\
128	    nop;						\
129	ba	label;						\
130	wrpr	%g0, 1, %tl;					\
1319:
132
133
134/*
135 * The following macros allow us to share majority of the
136 * SFMMU code between sun4u and sun4v platforms.
137 */
138
139#define	SETUP_TSB_ASI(qlp, tmp)					\
140	movrz	qlp, ASI_N, tmp;				\
141	movrnz	qlp, ASI_MEM, tmp;				\
142	mov	tmp, %asi
143
144/*
145 * Macro to swtich to alternate global register on sun4u platforms
146 * (not applicable to sun4v platforms)
147 */
148#define	USE_ALTERNATE_GLOBALS(scr)				\
149	rdpr	%pstate, scr;					\
150	wrpr	scr, PSTATE_MG | PSTATE_AG, %pstate
151
152/*
153 * Macro to set %gl register value on sun4v platforms
154 * (not applicable to sun4u platforms)
155 */
156#define	SET_GL_REG(val)
157
158/*
159 * Get MMU data tag access register value
160 *
161 * In:
162 *   tagacc, scr1 = scratch registers
163 * Out:
164 *   tagacc = MMU data tag access register value
165 */
166#define	GET_MMU_D_TAGACC(tagacc, scr1)				\
167	mov	MMU_TAG_ACCESS, scr1;				\
168	ldxa	[scr1]ASI_DMMU, tagacc
169
170/*
171 * Get MMU data tag target register
172 *
173 * In:
174 *   ttarget, scr1 = scratch registers
175 * Out:
176 *   ttarget = MMU data tag target register value
177 */
178#define	GET_MMU_D_TTARGET(ttarget, scr1)			\
179	ldxa	[%g0]ASI_DMMU, ttarget
180
181/*
182 * Get MMU data/instruction tag access register values
183 *
184 * In:
185 *   dtagacc, itagacc, scr1, scr2 = scratch registers
186 * Out:
187 *   dtagacc = MMU data tag access register value
188 *   itagacc = MMU instruction tag access register value
189 */
190#define	GET_MMU_BOTH_TAGACC(dtagacc, itagacc, scr1, scr2)	\
191	mov	MMU_TAG_ACCESS, scr1;				\
192	ldxa	[scr1]ASI_DMMU, dtagacc;			\
193	ldxa	[scr1]ASI_IMMU, itagacc
194
195/*
196 * Get MMU data fault address from the tag access register
197 *
198 * In:
199 *   daddr, scr1 = scratch registers
200 * Out:
201 *   daddr = MMU data fault address
202 */
203#define	GET_MMU_D_ADDR(daddr, scr1)				\
204	mov	MMU_TAG_ACCESS, scr1;				\
205	ldxa	[scr1]ASI_DMMU, daddr;				\
206	set	TAGACC_CTX_MASK, scr1;				\
207	andn	daddr, scr1, daddr
208
209
210/*
211 * Load ITLB entry
212 *
213 * In:
214 *   tte = reg containing tte
215 *   scr1, scr2, scr3, scr4 = scratch registers (not used)
216 */
217#define	ITLB_STUFF(tte, scr1, scr2, scr3, scr4)			\
218	stxa	tte, [%g0]ASI_ITLB_IN
219
220/*
221 * Load DTLB entry
222 *
223 * In:
224 *   tte = reg containing tte
225 *   scr1, scr2, scr3, scr4 = scratch register (not used)
226 */
227#define	DTLB_STUFF(tte, scr1, scr2, scr3, scr4)			\
228	stxa	tte, [%g0]ASI_DTLB_IN
229
230
231/*
232 * Returns PFN given the TTE and vaddr
233 *
234 * In:
235 *   tte = reg containing tte
236 *   vaddr = reg containing vaddr
237 *   scr1, scr2, scr3 = scratch registers
238 * Out:
239 *   tte = PFN value
240 */
241#define	TTETOPFN(tte, vaddr, label, scr1, scr2, scr3)			\
242	srlx	tte, TTE_SZ_SHFT, scr1;					\
243	and	scr1, TTE_SZ_BITS, scr1;	/* scr1 = tte_size */	\
244	srlx	tte, TTE_SZ2_SHFT, scr3;				\
245	and	scr3, TTE_SZ2_BITS, scr3;	/* scr3 = tte_size2 */	\
246	or	scr1, scr3, scr1;					\
247	sllx	scr1, 1, scr2;						\
248	add	scr2, scr1, scr2;		/* mulx 3 */		\
249	sllx	tte, TTE_PA_LSHIFT, tte;				\
250	add	scr2, MMU_PAGESHIFT + TTE_PA_LSHIFT, scr3;		\
251	/* BEGIN CSTYLED */						\
252	brz,pt	scr2, label/**/1;					\
253	  srlx	tte, scr3, tte;						\
254	/* END CSTYLED */						\
255	sllx	tte, scr2, tte;						\
256	set	1, scr1;						\
257	add	scr2, MMU_PAGESHIFT, scr3;				\
258	sllx	scr1, scr3, scr1;					\
259	sub	scr1, 1, scr1;		/* g2=TTE_PAGE_OFFSET(ttesz) */	\
260	and	vaddr, scr1, scr2;					\
261	srln	scr2, MMU_PAGESHIFT, scr2;				\
262	or	tte, scr2, tte;						\
263	/* CSTYLED */							\
264label/**/1:
265
266
267/*
268 * TTE_SET_REF_ML is a macro that updates the reference bit if it is
269 * not already set.
270 *
271 * Parameters:
272 * tte      = reg containing tte
273 * ttepa    = physical pointer to tte
274 * tteva    = virtual ptr to tte
275 * tsbarea  = tsb miss area
276 * tmp1     = tmp reg
277 * label    = temporary label
278 */
279
280#define	TTE_SET_REF_ML(tte, ttepa, tteva, tsbarea, tmp1, label)		\
281	/* BEGIN CSTYLED */						\
282	/* check reference bit */					\
283	andcc	tte, TTE_REF_INT, %g0;					\
284	bnz,pt	%xcc, label/**/4;	/* if ref bit set-skip ahead */	\
285	  nop;								\
286	GET_CPU_IMPL(tmp1);						\
287	cmp	tmp1, SPITFIRE_IMPL;					\
288	blt	%icc, label/**/2;	/* skip flush if FJ-OPL cpus */	\
289	cmp	tmp1, CHEETAH_IMPL;					\
290	bl,a	%icc, label/**/1;					\
291	/* update reference bit */					\
292	lduh	[tsbarea + TSBMISS_DMASK], tmp1;			\
293	stxa	%g0, [ttepa]ASI_DC_INVAL; /* flush line from dcache */	\
294	membar	#Sync;							\
295	ba	label/**/2;						\
296label/**/1:								\
297	and	tteva, tmp1, tmp1;					\
298	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line from dcache */	\
299	membar	#Sync;							\
300label/**/2:								\
301	or	tte, TTE_REF_INT, tmp1;					\
302	casxa	[ttepa]ASI_MEM, tte, tmp1; 	/* update ref bit */	\
303	cmp	tte, tmp1;						\
304	bne,a,pn %xcc, label/**/2;					\
305	ldxa	[ttepa]ASI_MEM, tte;	/* MMU_READTTE through pa */	\
306	or	tte, TTE_REF_INT, tte;					\
307label/**/4:								\
308	/* END CSTYLED */
309
310
311/*
312 * TTE_SET_REFMOD_ML is a macro that updates the reference and modify bits
313 * if not already set.
314 *
315 * Parameters:
316 * tte      = reg containing tte
317 * ttepa    = physical pointer to tte
318 * tteva    = virtual ptr to tte
319 * tsbarea  = tsb miss area
320 * tmp1     = tmp reg
321 * label    = temporary label
322 * exitlabel = label where to jump to if write perm bit not set.
323 */
324
325#define	TTE_SET_REFMOD_ML(tte, ttepa, tteva, tsbarea, tmp1, label,	\
326	exitlabel)							\
327	/* BEGIN CSTYLED */						\
328	/* check reference bit */					\
329	andcc	tte, TTE_WRPRM_INT, %g0;				\
330	bz,pn	%xcc, exitlabel;	/* exit if wr_perm not set */	\
331	  nop;								\
332	andcc	tte, TTE_HWWR_INT, %g0;					\
333	bnz,pn	%xcc, label/**/4;	/* nothing to do */		\
334	  nop;								\
335	GET_CPU_IMPL(tmp1);						\
336	cmp	tmp1, SPITFIRE_IMPL;					\
337	blt	%icc, label/**/2;	/* skip flush if FJ-OPL cpus */	\
338	cmp	tmp1, CHEETAH_IMPL;					\
339	bl,a	%icc, label/**/1;					\
340	/* update reference bit */					\
341	lduh	[tsbarea + TSBMISS_DMASK], tmp1;			\
342	stxa    %g0, [ttepa]ASI_DC_INVAL; /* flush line from dcache */ 	\
343	membar	#Sync;							\
344	ba	label/**/2;						\
345label/**/1:								\
346	and	tteva, tmp1, tmp1;					\
347	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line from dcache */	\
348	membar	#Sync;							\
349label/**/2:								\
350	or	tte, TTE_HWWR_INT | TTE_REF_INT, tmp1;			\
351	casxa	[ttepa]ASI_MEM, tte, tmp1; /* update ref/mod bit */	\
352	cmp	tte, tmp1;						\
353	bne,a,pn %xcc, label/**/2;					\
354	  ldxa	[ttepa]ASI_MEM, tte;	/* MMU_READTTE through pa */	\
355	or	tte, TTE_HWWR_INT | TTE_REF_INT, tte;			\
356label/**/4:								\
357	/* END CSTYLED */
358
359
360#ifndef UTSB_PHYS
361
362/*
363 * Synthesize TSB base register contents for a process with
364 * a single TSB.
365 *
366 * We patch the virtual address mask in at runtime since the
367 * number of significant virtual address bits in the TSB VA
368 * can vary depending upon the TSB slab size being used on the
369 * machine.
370 *
371 * In:
372 *   tsbinfo = TSB info pointer (ro)
373 *   vabase = value of utsb_vabase (ro)
374 * Out:
375 *   tsbreg = value to program into TSB base register
376 */
377
378#define	MAKE_TSBREG(tsbreg, tsbinfo, vabase, tmp1, tmp2, label)		\
379	/* BEGIN CSTYLED */						\
380	ldx	[tsbinfo + TSBINFO_VADDR], tmp1;			\
381	.global	label/**/_tsbreg_vamask					;\
382label/**/_tsbreg_vamask:						\
383	or	%g0, RUNTIME_PATCH, tsbreg;				\
384	lduh	[tsbinfo + TSBINFO_SZCODE], tmp2;			\
385	sllx	tsbreg, TSBREG_VAMASK_SHIFT, tsbreg;			\
386	or	vabase, tmp2, tmp2;					\
387	and	tmp1, tsbreg, tsbreg;					\
388	or	tsbreg, tmp2, tsbreg;					\
389	/* END CSTYLED */
390
391
392/*
393 * Synthesize TSB base register contents for a process with
394 * two TSBs.  See hat_sfmmu.h for the layout of the TSB base
395 * register in this case.
396 *
397 * In:
398 *   tsb1 = pointer to first TSB info (ro)
399 *   tsb2 = pointer to second TSB info (ro)
400 * Out:
401 *   tsbreg = value to program into TSB base register
402 */
403#define	MAKE_TSBREG_SECTSB(tsbreg, tsb1, tsb2, tmp1, tmp2, tmp3, label)	\
404	/* BEGIN CSTYLED */						\
405	set	TSBREG_MSB_CONST, tmp3					;\
406	sllx	tmp3, TSBREG_MSB_SHIFT, tsbreg				;\
407	.global	label/**/_tsbreg_vamask					;\
408label/**/_tsbreg_vamask:						;\
409	or	%g0, RUNTIME_PATCH, tmp3				;\
410	sll	tmp3, TSBREG_VAMASK_SHIFT, tmp3				;\
411	ldx	[tsb1 + TSBINFO_VADDR], tmp1				;\
412	ldx	[tsb2 + TSBINFO_VADDR], tmp2				;\
413	and	tmp1, tmp3, tmp1					;\
414	and	tmp2, tmp3, tmp2					;\
415	sllx	tmp2, TSBREG_SECTSB_MKSHIFT, tmp2			;\
416	or	tmp1, tmp2, tmp3					;\
417	or	tsbreg, tmp3, tsbreg					;\
418	lduh	[tsb1 + TSBINFO_SZCODE], tmp1				;\
419	lduh	[tsb2 + TSBINFO_SZCODE], tmp2				;\
420	and	tmp1, TSB_SOFTSZ_MASK, tmp1				;\
421	and	tmp2, TSB_SOFTSZ_MASK, tmp2				;\
422	sllx	tmp2, TSBREG_SECSZ_SHIFT, tmp2				;\
423	or	tmp1, tmp2, tmp3					;\
424	or	tsbreg, tmp3, tsbreg					;\
425	/* END CSTYLED */
426
427
428/*
429 * Load the locked TSB TLB entry.
430 *
431 * In:
432 *   tsbinfo = tsb_info pointer as va (ro)
433 *   tteidx = shifted index into TLB to load the locked entry (ro)
434 *   va = virtual address at which to load the locked TSB entry (ro)
435 * Out:
436 * Scratch:
437 *   tmp
438 */
439#define	LOAD_TSBTTE(tsbinfo, tteidx, va, tmp)				\
440	mov	MMU_TAG_ACCESS, tmp;					\
441	stxa	va, [tmp]ASI_DMMU;		/* set tag access */	\
442	membar	#Sync;							\
443	ldx	[tsbinfo + TSBINFO_TTE], tmp;	/* fetch locked tte */	\
444	stxa	tmp, [tteidx]ASI_DTLB_ACCESS;	/* load locked tte */	\
445	membar	#Sync
446
447
448/*
449 * In the current implementation, TSBs usually come from physically
450 * contiguous chunks of memory up to 4MB in size, but 8K TSBs may be
451 * allocated from 8K chunks of memory under certain conditions.  To
452 * prevent aliasing in the virtual address cache when the TSB slab is
453 * 8K in size we must align the reserved (TL>0) TSB virtual address to
454 * have the same low-order bits as the kernel (TL=0) TSB virtual address,
455 * and map 8K TSBs with an 8K TTE.  In cases where the TSB reserved VA
456 * range is smaller than the assumed 4M we will patch the shift at
457 * runtime; otherwise we leave it alone (which is why RUNTIME_PATCH
458 * constant doesn't appear below).
459 *
460 * In:
461 *   tsbinfo (ro)
462 *   resva: reserved VA base for this TSB
463 * Out:
464 *   resva: corrected VA for this TSB
465 */
466#define	RESV_OFFSET(tsbinfo, resva, tmp1, label)			\
467	/* BEGIN CSTYLED */						\
468	lduh	[tsbinfo + TSBINFO_SZCODE], tmp1			;\
469	brgz,pn	tmp1, label/**/9	 				;\
470	  nop								;\
471	ldx	[tsbinfo + TSBINFO_VADDR], tmp1				;\
472	.global	label/**/_resv_offset					;\
473label/**/_resv_offset:							;\
474	sllx	tmp1, (64 - MMU_PAGESHIFT4M), tmp1			;\
475	srlx	tmp1, (64 - MMU_PAGESHIFT4M), tmp1			;\
476	or	tmp1, resva, resva					;\
477label/**/9:								\
478	/* END CSTYLED */
479
480/*
481 * Determine the pointer of the entry in the first TSB to probe given
482 * the 8K TSB pointer register contents.
483 *
484 * In:
485 *   tsbp8k = 8K TSB pointer register (ro)
486 *   tmp = scratch register
487 *   label = label for hot patching of utsb_vabase
488 *
489 * Out: tsbe_ptr = TSB entry address
490 *
491 * Note: This function is patched at runtime for performance reasons.
492 *	 Any changes here require sfmmu_patch_utsb fixed.
493 */
494
495#define	GET_1ST_TSBE_PTR(tsbp8k, tsbe_ptr, tmp, label)			\
496	/* BEGIN CSTYLED */						\
497label/**/_get_1st_tsbe_ptr:						;\
498	RUNTIME_PATCH_SETX(tsbe_ptr, tmp)				;\
499	/* tsbeptr = contents of utsb_vabase */				;\
500	/* clear upper bits leaving just bits 21:0 of TSB ptr. */	;\
501	sllx	tsbp8k, TSBREG_FIRTSB_SHIFT, tmp			;\
502	/* finish clear */						;\
503	srlx	tmp, TSBREG_FIRTSB_SHIFT, tmp				;\
504	/* or-in bits 41:22 of the VA to form the real pointer. */	;\
505	or	tsbe_ptr, tmp, tsbe_ptr					\
506	/* END CSTYLED */
507
508/*
509 * Determine the base address of the second TSB given the 8K TSB
510 * pointer register contents.
511 *
512 * In:
513 *   tsbp8k = 8K TSB pointer register (ro)
514 *   tmp = scratch register
515 *   label = label for hot patching of utsb_vabase
516 *
517 * Out:
518 *   tsbbase = TSB base address
519 *
520 * Note: This function is patched at runtime for performance reasons.
521 *	 Any changes here require sfmmu_patch_utsb fixed.
522 */
523
524#define	GET_2ND_TSB_BASE(tsbp8k, tsbbase, tmp, label)			\
525	/* BEGIN CSTYLED */						\
526label/**/_get_2nd_tsb_base:						;\
527	RUNTIME_PATCH_SETX(tsbbase, tmp)				;\
528	/* tsbbase = contents of utsb4m_vabase */			;\
529	/* clear upper bits leaving just bits 21:xx of TSB addr. */	;\
530	sllx	tsbp8k, TSBREG_SECTSB_LSHIFT, tmp			;\
531	/* clear lower bits leaving just 21:13 in 8:0 */		;\
532	srlx	tmp, (TSBREG_SECTSB_RSHIFT + MMU_PAGESHIFT), tmp	;\
533	/* adjust TSB offset to bits 21:13 */				;\
534	sllx	tmp, MMU_PAGESHIFT, tmp					;\
535	or	tsbbase, tmp, tsbbase					;\
536	/* END CSTYLED */
537
538/*
539 * Determine the size code of the second TSB given the 8K TSB
540 * pointer register contents.
541 *
542 * In:
543 *   tsbp8k = 8K TSB pointer register (ro)
544 * Out:
545 *   size = TSB size code
546 */
547
548#define	GET_2ND_TSB_SIZE(tsbp8k, size)					\
549	srlx	tsbp8k, TSBREG_SECSZ_SHIFT, size;			\
550	and	size, TSB_SOFTSZ_MASK, size
551
552/*
553 * Get the location in the 2nd TSB of the tsbe for this fault.
554 * Assumes that the second TSB only contains 4M mappings.
555 *
556 * In:
557 *   tagacc = tag access register (clobbered)
558 *   tsbp8k = contents of TSB8K pointer register (ro)
559 *   tmp1, tmp2 = scratch registers
560 *   label = label at which to patch in reserved TSB 4M VA range
561 * Out:
562 *   tsbe_ptr = pointer to the tsbe in the 2nd TSB
563 */
564#define	GET_2ND_TSBE_PTR(tagacc, tsbp8k, tsbe_ptr, tmp1, tmp2, label)	\
565	GET_2ND_TSB_BASE(tsbp8k, tsbe_ptr, tmp2, label);		\
566	/* tsbe_ptr = TSB base address, tmp2 = junk */			\
567	GET_2ND_TSB_SIZE(tsbp8k, tmp1);					\
568	/* tmp1 = TSB size code */					\
569	GET_TSBE_POINTER(MMU_PAGESHIFT4M, tsbe_ptr, tagacc, tmp1, tmp2)
570
571
572#else /* !UTSB_PHYS */
573
574
575/*
576 * Determine the pointer of the entry in the first TSB to probe given
577 * the 8K TSB pointer register contents.
578 *
579 * In:
580 *   tagacc = tag access register
581 *   tsbe_ptr = 8K TSB pointer register
582 *   tmp = scratch registers
583 *
584 * Out: tsbe_ptr = TSB entry address
585 *
586 * Note: This macro is a nop since the 8K TSB pointer register
587 *	 is the entry pointer and does not need to be decoded.
588 *	 It is defined to allow for code sharing with sun4v.
589 */
590
591#define	GET_1ST_TSBE_PTR(tagacc, tsbe_ptr, tmp1, tmp2)
592
593#endif /* !UTSB_PHYS */
594
595
596/*
597 * Load TSB base register.  In the single TSB case this register
598 * contains utsb_vabase, bits 21:13 of tsbinfo->tsb_va, and the
599 * TSB size code in bits 2:0.  See hat_sfmmu.h for the layout in
600 * the case where we have multiple TSBs per process.
601 *
602 * In:
603 *   tsbreg = value to load (ro)
604 */
605#define	LOAD_TSBREG(tsbreg, tmp1, tmp2)					\
606	mov	MMU_TSB, tmp1;						\
607	sethi	%hi(FLUSH_ADDR), tmp2;					\
608	stxa	tsbreg, [tmp1]ASI_DMMU;		/* dtsb reg */		\
609	stxa	tsbreg, [tmp1]ASI_IMMU;		/* itsb reg */		\
610	flush	tmp2
611
612#ifdef UTSB_PHYS
613#define	UTSB_PROBE_ASI	ASI_QUAD_LDD_PHYS
614#else
615#define	UTSB_PROBE_ASI	ASI_NQUAD_LD
616#endif
617#define	PROBE_TSB(tsbe_ptr, tag, tsbtag, label)                            \
618	/* BEGIN CSTYLED */                                             \
619        ldda    [tsbe_ptr]UTSB_PROBE_ASI, tsbtag                        ;\
620        cmp     tsbtag, tag             /* compare tag w/ TSB */        ;\
621        bne,pn  %xcc, label/**/1        /* branch if !match */          ;\
622          nop                                                           \
623	/* END CSTYLED */
624/*
625 * Probe a TSB. If miss continue from the end of the macro for most probes
626 * except jump to TSB miss for 3rd ITSB probe. If hit retry faulted
627 * instruction for DTSB probes. For ITSB probes in case of TSB hit check
628 * execute bit and branch to exec_fault if the bit is not set otherwise retry
629 * faulted instruction. Do ITLB synthesis in case of hit in second ITSB if
630 * synthesis bit is set.
631 *
632 * tsbe_ptr = precomputed TSB entry pointer (in, ro)
633 * vpg_4m = 4M virtual page number for tag matching  (in, ro)
634 * label = where to branch to if this is a miss (text)
635 *
636 * For trapstat, we have to explicily use these registers.
637 * g4 = location tag will be retrieved into from TSB (out)
638 * g5 = location data(tte) will be retrieved into from TSB (out)
639 *
640 * In case of first tsb probe tsbe_ptr is %g1. For other tsb probes
641 * move tsbe_ptr into %g1 in case of hit for traptrace.
642 *
643 * If the probe fails and we continue from call site %g4-%g5 are clobbered.
644 * 2nd ITSB probe macro will also clobber %g6 in this case.
645 */
646#define	PROBE_1ST_DTSB(tsbe_ptr, vpg_4m, label)                         \
647	/* BEGIN CSTYLED */                                             \
648        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
649        TT_TRACE(trace_tsbhit)                                          ;\
650        DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
651        retry                      /* retry faulted instruction */      ;\
652label/**/1:                                                             \
653	/* END CSTYLED */
654
655#define	PROBE_2ND_DTSB(tsbe_ptr, vpg_4m, label)                         \
656	/* BEGIN CSTYLED */                                             \
657        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
658        mov     tsbe_ptr, %g1       /* trace_tsbhit wants ptr in %g1 */ ;\
659        TT_TRACE(trace_tsbhit)                                          ;\
660        DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
661        retry                      /* retry faulted instruction */      ;\
662label/**/1:                                                             \
663	/* END CSTYLED */
664
665#define	PROBE_1ST_ITSB(tsbe_ptr, vpg_4m, label)                         \
666	/* BEGIN CSTYLED */                                             \
667        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
668        andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
669        bz,pn   %icc, exec_fault                                        ;\
670          nop                                                           ;\
671        TT_TRACE(trace_tsbhit)                                          ;\
672        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
673        retry                           /* retry faulted instruction */ ;\
674label/**/1:                                                             \
675	/* END CSTYLED */
676
677#define	PROBE_2ND_ITSB(tsbe_ptr, vpg_4m, label)                         \
678	/* BEGIN CSTYLED */                                             \
679        ldda    [tsbe_ptr]UTSB_PROBE_ASI, %g4 /* g4 = tag, g5 = data */ ;\
680        cmp     %g4, vpg_4m             /* compare tag w/ TSB */        ;\
681        bne,pn  %xcc, label/**/2        /* branch if !match */          ;\
682          or    %g0, TTE4M, %g6                                         ;\
683        andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
684        bz,a,pn %icc, label/**/1                                        ;\
685          sllx  %g6, TTE_SZ_SHFT, %g6                                   ;\
686        mov     tsbe_ptr, %g1         /* trap trace wants ptr in %g1 */ ;\
687        TT_TRACE(trace_tsbhit)                                          ;\
688        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
689        retry                        /* retry faulted instruction */    ;\
690label/**/1:                                                             ;\
691        andcc %g5, TTE_E_SYNTH_INT, %g0                                 ;\
692        bz,pn   %icc, exec_fault                                        ;\
693          mov   tsbe_ptr, %g1       /* trap trace wants ptr in %g1 */   ;\
694        or      %g5, %g6, %g5                                           ;\
695        TT_TRACE(trace_tsbhit)                                          ;\
696        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
697        retry                      /* retry faulted instruction */      ;\
698label/**/2:
699	/* END CSTYLED */
700
701#ifdef UTSB_PHYS
702
703/*
704 * Updates the context filed in the tagaccess register with the shared
705 * context to force the next i/DTLB_STUFF() to load this mapping into
706 * the TLB with the shared context.
707 */
708#define	SET_SHCTX_TAGACC(tmp1, tmp2, asi)                               \
709	/* BEGIN CSTYLED */                                             \
710        mov     MMU_TAG_ACCESS, tmp2                                    ;\
711        ldxa    [tmp2]asi, tmp2                 /* tmp2 = VA|CTX */     ;\
712        srlx    tmp2, TAGACC_SHIFT, tmp2                                ;\
713        sllx    tmp2, TAGACC_SHIFT, tmp2        /* tmp2 = VA */         ;\
714        mov     MMU_SHARED_CONTEXT, tmp1        /* clobber tsbe_ptr */  ;\
715        ldxa    [tmp1]ASI_MMU_CTX, tmp1         /* tmp2 = shctx reg */  ;\
716        sllx    tmp1, SHCTXREG_CTX_LSHIFT, tmp1                         ;\
717        srlx    tmp1, SHCTXREG_CTX_LSHIFT, tmp1 /* tmp1 = SHCTX */      ;\
718        or      tmp1, tmp2, tmp1                /* tmp1  = VA|SHCTX */  ;\
719        mov     MMU_TAG_ACCESS, tmp2                                    ;\
720        stxa    tmp1, [tmp2]asi                 /* asi = VA|SHCTX */
721	/* END CSTYLED */
722
723#define	PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                       \
724	/* BEGIN CSTYLED */                                             \
725        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
726        mov     tsbe_ptr, %g1       /* trace_tsbhit wants ptr in %g1 */ ;\
727        TT_TRACE(trace_tsbhit)                                          ;\
728        SET_SHCTX_TAGACC(%g3, %g4, ASI_DMMU)                            ;\
729        DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
730        retry                      /* retry faulted instruction */      ;\
731label/**/1:                                                             \
732	/* END CSTYLED */
733
734#define	PROBE_3RD_DTSB(tsbe_ptr, vpg_4m, label)                         \
735	/* BEGIN CSTYLED */                                             \
736        PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                  ;\
737	/* END CSTYLED */
738
739#define	PROBE_4TH_DTSB(tsbe_ptr, vpg_4m, label)                         \
740	/* BEGIN CSTYLED */                                             \
741        PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                  ;\
742	/* END CSTYLED */
743
744#define	PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, label)                       \
745	/* BEGIN CSTYLED */                                             \
746        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
747        andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
748        bz,pn %icc, exec_fault                                          ;\
749         mov     tsbe_ptr, %g1          /* for traptrace sake */        ;\
750        TT_TRACE(trace_tsbhit)                                          ;\
751        SET_SHCTX_TAGACC(%g3, %g4, ASI_IMMU)                            ;\
752        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
753        retry                           /* retry faulted instruction */ ;\
754label/**/1:
755	/* END CSTYLED */
756
757#define	PROBE_3RD_ITSB(tsbe_ptr, vpg_4m, label)                         \
758	/* BEGIN CSTYLED */                                             \
759        PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, sfmmu_tsb_miss_tt)      ;\
760	/* END CSTYLED */
761
762#define	PROBE_4TH_ITSB(tsbe_ptr, vpg_4m, label)                         \
763	/* BEGIN CSTYLED */                                             \
764        PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, label)                  ;\
765	/* END CSTYLED */
766
767/*
768 * The traptype is supplied by caller.
769 *
770 * If iTSB miss, store shctx into IMMU TAG ACCESS REG
771 * If dTSB miss, store shctx into DMMU TAG ACCESS REG
772 * Thus the [D|I]TLB_STUFF will work as expected.
773 */
774#define	SAVE_CTX1(traptype, tmp1, tmp2, label)                          \
775	/* BEGIN CSTYLED */                                             \
776        cmp     traptype, FAST_IMMU_MISS_TT                             ;\
777        be,pn %icc, label/**/1                                          ;\
778          nop                                                           ;\
779        SET_SHCTX_TAGACC(tmp1, tmp2, ASI_DMMU)                          ;\
780        membar  #Sync                                                   ;\
781        ba,a    label/**/2                                              ;\
782label/**/1:                                                             ;\
783        SET_SHCTX_TAGACC(tmp1, tmp2, ASI_IMMU)                          ;\
784        sethi   %hi(FLUSH_ADDR), tmp1                                   ;\
785        flush   tmp1                                                    ;\
786label/**/2:
787	/* END CSTYLED */
788
789#endif /* UTSB_PHYS */
790
791#endif /* _ASM */
792
793#ifdef	__cplusplus
794}
795#endif
796
797#endif	/* _VM_MACH_SFMMU_H */
798