1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
23 * Use is subject to license terms.
24 */
25
26/*
27 * VM - Hardware Address Translation management.
28 *
29 * This file describes the contents of the sun reference mmu (sfmmu)
30 * specific hat data structures and the sfmmu specific hat procedures.
31 * The machine independent interface is described in <vm/hat.h>.
32 */
33
34#ifndef _VM_MACH_SFMMU_H
35#define	_VM_MACH_SFMMU_H
36
37#include <sys/x_call.h>
38#include <sys/cheetahregs.h>
39#include <sys/spitregs.h>
40#include <sys/opl_olympus_regs.h>
41#include <sys/mmu.h>
42
43#ifdef	__cplusplus
44extern "C" {
45#endif
46
47/*
48 * On sun4u platforms, user TSBs are accessed via virtual address by default.
49 * Platforms that support ASI_SCRATCHPAD registers can define UTSB_PHYS in the
50 * platform Makefile to access user TSBs via physical address but must also
51 * designate one ASI_SCRATCHPAD register to hold the second user TSB.  To
52 * designate the user TSB scratchpad register, platforms must provide a
53 * definition for SCRATCHPAD_UTSBREG2 below.
54 *
55 * Platforms that use UTSB_PHYS do not allocate 2 locked TLB entries to access
56 * the user TSBs.
57 */
58#if defined(UTSB_PHYS)
59
60#if defined(_OPL)
61#define	SCRATCHPAD_UTSBREG2	OPL_SCRATCHPAD_UTSBREG4 /* 4M-256M pages */
62#define	SCRATCHPAD_UTSBREG3	OPL_SCRATCHPAD_UTSBREG5 /* 8K-512K pages */
63#define	SCRATCHPAD_UTSBREG4	OPL_SCRATCHPAD_UTSBREG6 /* 4M-256M pages */
64#else
65#error "Compiling UTSB_PHYS but no SCRATCHPAD_UTSBREG2 specified"
66#endif /* _OPL */
67
68#endif /* UTSB_PHYS */
69
70
71#ifdef _ASM
72
73/*
74 * This macro is used to set private/shared secondary context register in
75 * sfmmu_alloc_ctx().
76 * if is_shctx = 0 then we set the SCONTEXT to cnum and invalidate the
77 * SHARED_CONTEXT register. If is_shctx = 1 then only the SHARED_CONTEXT
78 * register is set.
79 *  (See additional comments in sfmmu_alloc_ctx)
80 * Input:
81 * cnum     = cnum
82 * is_shctx = sfmmu private/shared flag (0: private, 1: shared)
83 * tmp1 :    %o4 scratch
84 * tmp2 :    %o5 scratch
85 * label: used as local branch targets
86 */
87#define	SET_SECCTX(cnum, is_shctx, tmp1, tmp2, label)	   \
88	/* BEGIN CSTYLED */				   \
89	brnz,pn is_shctx, label/**/1			  ;\
90	  sethi   %hi(FLUSH_ADDR), tmp2			  ;\
91	mov     MMU_SCONTEXT, tmp1			  ;\
92	stxa    cnum, [tmp1]ASI_MMU_CTX			  ;\
93	flush   tmp2					  ;\
94	sethi   %hi(shctx_on), tmp1			  ;\
95	ld      [tmp1 + %lo(shctx_on)], tmp1		  ;\
96	brz,pt  tmp1, label/**/3			  ;\
97	mov    %g0, cnum				  ;\
98	ba,pt    %xcc, label/**/2			  ;\
99label/**/1:						  ;\
100	set     SHCTXREG_VALID_BIT, tmp1		  ;\
101	sllx    cnum, CTXREG_CTX_SHIFT, cnum		  ;\
102	srlx    cnum, CTXREG_CTX_SHIFT, cnum		  ;\
103	or      cnum, tmp1, cnum			  ;\
104	mov     cnum, tmp1				  ;\
105	sllx    cnum, 32, cnum				  ;\
106	or      cnum, tmp1, cnum			  ;\
107label/**/2:					          ;\
108	mov     MMU_SHARED_CONTEXT, tmp1		  ;\
109	stxa    cnum, [tmp1]ASI_MMU_CTX			  ;\
110	flush   tmp2					  ;\
111label/**/3:
112	/* END CSTYLED */
113
114/*
115 * This macro is used in the MMU code to check if TL should be lowered from
116 * 2 to 1 to pop trapstat's state.  See the block comment in trapstat.c
117 * for details.
118 */
119
120#define	TSTAT_CHECK_TL1(label, scr1, scr2)			\
121	rdpr	%tpc, scr1;					\
122	sethi	%hi(KERNELBASE), scr2;				\
123	or	scr2, %lo(KERNELBASE), scr2; 			\
124	cmp	scr1, scr2; 					\
125	bgeu	%xcc, 9f;					\
126	    nop;						\
127	ba	label;						\
128	wrpr	%g0, 1, %tl;					\
1299:
130
131
132/*
133 * The following macros allow us to share majority of the
134 * SFMMU code between sun4u and sun4v platforms.
135 */
136
137#define	SETUP_TSB_ASI(qlp, tmp)					\
138	movrz	qlp, ASI_N, tmp;				\
139	movrnz	qlp, ASI_MEM, tmp;				\
140	mov	tmp, %asi
141
142/*
143 * Macro to swtich to alternate global register on sun4u platforms
144 * (not applicable to sun4v platforms)
145 */
146#define	USE_ALTERNATE_GLOBALS(scr)				\
147	rdpr	%pstate, scr;					\
148	wrpr	scr, PSTATE_MG | PSTATE_AG, %pstate
149
150/*
151 * Macro to set %gl register value on sun4v platforms
152 * (not applicable to sun4u platforms)
153 */
154#define	SET_GL_REG(val)
155
156/*
157 * Get MMU data tag access register value
158 *
159 * In:
160 *   tagacc, scr1 = scratch registers
161 * Out:
162 *   tagacc = MMU data tag access register value
163 */
164#define	GET_MMU_D_TAGACC(tagacc, scr1)				\
165	mov	MMU_TAG_ACCESS, scr1;				\
166	ldxa	[scr1]ASI_DMMU, tagacc
167
168/*
169 * Get MMU data tag target register
170 *
171 * In:
172 *   ttarget, scr1 = scratch registers
173 * Out:
174 *   ttarget = MMU data tag target register value
175 */
176#define	GET_MMU_D_TTARGET(ttarget, scr1)			\
177	ldxa	[%g0]ASI_DMMU, ttarget
178
179/*
180 * Get MMU data/instruction tag access register values
181 *
182 * In:
183 *   dtagacc, itagacc, scr1, scr2 = scratch registers
184 * Out:
185 *   dtagacc = MMU data tag access register value
186 *   itagacc = MMU instruction tag access register value
187 */
188#define	GET_MMU_BOTH_TAGACC(dtagacc, itagacc, scr1, scr2)	\
189	mov	MMU_TAG_ACCESS, scr1;				\
190	ldxa	[scr1]ASI_DMMU, dtagacc;			\
191	ldxa	[scr1]ASI_IMMU, itagacc
192
193/*
194 * Get MMU data fault address from the tag access register
195 *
196 * In:
197 *   daddr, scr1 = scratch registers
198 * Out:
199 *   daddr = MMU data fault address
200 */
201#define	GET_MMU_D_ADDR(daddr, scr1)				\
202	mov	MMU_TAG_ACCESS, scr1;				\
203	ldxa	[scr1]ASI_DMMU, daddr;				\
204	set	TAGACC_CTX_MASK, scr1;				\
205	andn	daddr, scr1, daddr
206
207
208/*
209 * Load ITLB entry
210 *
211 * In:
212 *   tte = reg containing tte
213 *   scr1, scr2, scr3, scr4 = scratch registers (not used)
214 */
215#define	ITLB_STUFF(tte, scr1, scr2, scr3, scr4)			\
216	stxa	tte, [%g0]ASI_ITLB_IN
217
218/*
219 * Load DTLB entry
220 *
221 * In:
222 *   tte = reg containing tte
223 *   scr1, scr2, scr3, scr4 = scratch register (not used)
224 */
225#define	DTLB_STUFF(tte, scr1, scr2, scr3, scr4)			\
226	stxa	tte, [%g0]ASI_DTLB_IN
227
228
229/*
230 * Returns PFN given the TTE and vaddr
231 *
232 * In:
233 *   tte = reg containing tte
234 *   vaddr = reg containing vaddr
235 *   scr1, scr2, scr3 = scratch registers
236 * Out:
237 *   tte = PFN value
238 */
239#define	TTETOPFN(tte, vaddr, label, scr1, scr2, scr3)			\
240	srlx	tte, TTE_SZ_SHFT, scr1;					\
241	and	scr1, TTE_SZ_BITS, scr1;	/* scr1 = tte_size */	\
242	srlx	tte, TTE_SZ2_SHFT, scr3;				\
243	and	scr3, TTE_SZ2_BITS, scr3;	/* scr3 = tte_size2 */	\
244	or	scr1, scr3, scr1;					\
245	sllx	scr1, 1, scr2;						\
246	add	scr2, scr1, scr2;		/* mulx 3 */		\
247	sllx	tte, TTE_PA_LSHIFT, tte;				\
248	add	scr2, MMU_PAGESHIFT + TTE_PA_LSHIFT, scr3;		\
249	/* BEGIN CSTYLED */						\
250	brz,pt	scr2, label/**/1;					\
251	  srlx	tte, scr3, tte;						\
252	/* END CSTYLED */						\
253	sllx	tte, scr2, tte;						\
254	set	1, scr1;						\
255	add	scr2, MMU_PAGESHIFT, scr3;				\
256	sllx	scr1, scr3, scr1;					\
257	sub	scr1, 1, scr1;		/* g2=TTE_PAGE_OFFSET(ttesz) */	\
258	and	vaddr, scr1, scr2;					\
259	srln	scr2, MMU_PAGESHIFT, scr2;				\
260	or	tte, scr2, tte;						\
261	/* CSTYLED */							\
262label/**/1:
263
264
265/*
266 * TTE_SET_REF_ML is a macro that updates the reference bit if it is
267 * not already set. Older sun4u platform use the virtual address to
268 * flush entries from dcache, this is not available here but there are
269 * only two positions in the 64K dcache where the cache line can reside
270 * so we need to flush both of them.
271 *
272 * Parameters:
273 * tte      = reg containing tte
274 * ttepa    = physical pointer to tte
275 * tsbarea  = tsb miss area
276 * tmp1     = tmp reg
277 * tmp2     = tmp reg
278 * label    = temporary label
279 */
280
281#define	TTE_SET_REF_ML(tte, ttepa, tsbarea, tmp1, tmp2, label)	\
282	/* BEGIN CSTYLED */						\
283	/* check reference bit */					\
284	andcc	tte, TTE_REF_INT, %g0;					\
285	bnz,pt	%xcc, label/**/4;	/* if ref bit set-skip ahead */	\
286	  nop;								\
287	GET_CPU_IMPL(tmp1);						\
288	cmp	tmp1, SPITFIRE_IMPL;					\
289	blt	%icc, label/**/2;	/* skip flush if FJ-OPL cpus */	\
290	cmp	tmp1, CHEETAH_IMPL;					\
291	bl,a	%icc, label/**/1;					\
292	/* update reference bit */					\
293	lduh	[tsbarea + TSBMISS_DMASK], tmp1;			\
294	stxa	%g0, [ttepa]ASI_DC_INVAL; /* flush line from dcache */	\
295	membar	#Sync;							\
296	ba	label/**/2;						\
297label/**/1:								\
298	and	ttepa, tmp1, tmp1;					\
299	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line1 from dcache */	\
300	or	%g0, 1, tmp2;						\
301	sllx	tmp2, MMU_PAGESHIFT, tmp2;				\
302	xor	tmp1, tmp2, tmp1;					\
303	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line2 from dcache */	\
304	membar	#Sync;							\
305label/**/2:								\
306	or	tte, TTE_REF_INT, tmp1;					\
307	casxa	[ttepa]ASI_MEM, tte, tmp1; 	/* update ref bit */	\
308	cmp	tte, tmp1;						\
309	bne,a,pn %xcc, label/**/2;					\
310	ldxa	[ttepa]ASI_MEM, tte;	/* MMU_READTTE through pa */	\
311	or	tte, TTE_REF_INT, tte;					\
312label/**/4:								\
313	/* END CSTYLED */
314
315
316/*
317 * TTE_SET_REFMOD_ML is a macro that updates the reference and modify bits
318 * if not already set.
319 *
320 * Parameters:
321 * tte      = reg containing tte
322 * ttepa    = physical pointer to tte
323 * tsbarea  = tsb miss area
324 * tmp1     = tmp reg
325 * tmp2     = tmp reg
326 * label    = temporary label
327 * exitlabel = label where to jump to if write perm bit not set.
328 */
329
330#define	TTE_SET_REFMOD_ML(tte, ttepa, tsbarea, tmp1, tmp2, label,	\
331    exitlabel)								\
332	/* BEGIN CSTYLED */						\
333	/* check reference bit */					\
334	andcc	tte, TTE_WRPRM_INT, %g0;				\
335	bz,pn	%xcc, exitlabel;	/* exit if wr_perm not set */	\
336	  nop;								\
337	andcc	tte, TTE_HWWR_INT, %g0;					\
338	bnz,pn	%xcc, label/**/4;	/* nothing to do */		\
339	  nop;								\
340	GET_CPU_IMPL(tmp1);						\
341	cmp	tmp1, SPITFIRE_IMPL;					\
342	blt	%icc, label/**/2;	/* skip flush if FJ-OPL cpus */	\
343	cmp	tmp1, CHEETAH_IMPL;					\
344	bl,a	%icc, label/**/1;					\
345	/* update reference bit */					\
346	lduh	[tsbarea + TSBMISS_DMASK], tmp1;			\
347	stxa    %g0, [ttepa]ASI_DC_INVAL; /* flush line from dcache */ 	\
348	membar	#Sync;							\
349	ba	label/**/2;						\
350label/**/1:								\
351	and	ttepa, tmp1, tmp1;					\
352	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line1 from dcache */	\
353	or	%g0, 1, tmp2;						\
354	sllx	tmp2, MMU_PAGESHIFT, tmp2;				\
355	xor	tmp1, tmp2, tmp1;					\
356	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line2 from dcache */	\
357	membar	#Sync;							\
358label/**/2:								\
359	or	tte, TTE_HWWR_INT | TTE_REF_INT, tmp1;			\
360	casxa	[ttepa]ASI_MEM, tte, tmp1; /* update ref/mod bit */	\
361	cmp	tte, tmp1;						\
362	bne,a,pn %xcc, label/**/2;					\
363	  ldxa	[ttepa]ASI_MEM, tte;	/* MMU_READTTE through pa */	\
364	or	tte, TTE_HWWR_INT | TTE_REF_INT, tte;			\
365label/**/4:								\
366	/* END CSTYLED */
367
368
369#ifndef UTSB_PHYS
370
371/*
372 * Synthesize TSB base register contents for a process with
373 * a single TSB.
374 *
375 * We patch the virtual address mask in at runtime since the
376 * number of significant virtual address bits in the TSB VA
377 * can vary depending upon the TSB slab size being used on the
378 * machine.
379 *
380 * In:
381 *   tsbinfo = TSB info pointer (ro)
382 *   vabase = value of utsb_vabase (ro)
383 * Out:
384 *   tsbreg = value to program into TSB base register
385 */
386
387#define	MAKE_TSBREG(tsbreg, tsbinfo, vabase, tmp1, tmp2, label)		\
388	/* BEGIN CSTYLED */						\
389	ldx	[tsbinfo + TSBINFO_VADDR], tmp1;			\
390	.global	label/**/_tsbreg_vamask					;\
391label/**/_tsbreg_vamask:						\
392	or	%g0, RUNTIME_PATCH, tsbreg;				\
393	lduh	[tsbinfo + TSBINFO_SZCODE], tmp2;			\
394	sllx	tsbreg, TSBREG_VAMASK_SHIFT, tsbreg;			\
395	or	vabase, tmp2, tmp2;					\
396	and	tmp1, tsbreg, tsbreg;					\
397	or	tsbreg, tmp2, tsbreg;					\
398	/* END CSTYLED */
399
400
401/*
402 * Synthesize TSB base register contents for a process with
403 * two TSBs.  See hat_sfmmu.h for the layout of the TSB base
404 * register in this case.
405 *
406 * In:
407 *   tsb1 = pointer to first TSB info (ro)
408 *   tsb2 = pointer to second TSB info (ro)
409 * Out:
410 *   tsbreg = value to program into TSB base register
411 */
412#define	MAKE_TSBREG_SECTSB(tsbreg, tsb1, tsb2, tmp1, tmp2, tmp3, label)	\
413	/* BEGIN CSTYLED */						\
414	set	TSBREG_MSB_CONST, tmp3					;\
415	sllx	tmp3, TSBREG_MSB_SHIFT, tsbreg				;\
416	.global	label/**/_tsbreg_vamask					;\
417label/**/_tsbreg_vamask:						;\
418	or	%g0, RUNTIME_PATCH, tmp3				;\
419	sll	tmp3, TSBREG_VAMASK_SHIFT, tmp3				;\
420	ldx	[tsb1 + TSBINFO_VADDR], tmp1				;\
421	ldx	[tsb2 + TSBINFO_VADDR], tmp2				;\
422	and	tmp1, tmp3, tmp1					;\
423	and	tmp2, tmp3, tmp2					;\
424	sllx	tmp2, TSBREG_SECTSB_MKSHIFT, tmp2			;\
425	or	tmp1, tmp2, tmp3					;\
426	or	tsbreg, tmp3, tsbreg					;\
427	lduh	[tsb1 + TSBINFO_SZCODE], tmp1				;\
428	lduh	[tsb2 + TSBINFO_SZCODE], tmp2				;\
429	and	tmp1, TSB_SOFTSZ_MASK, tmp1				;\
430	and	tmp2, TSB_SOFTSZ_MASK, tmp2				;\
431	sllx	tmp2, TSBREG_SECSZ_SHIFT, tmp2				;\
432	or	tmp1, tmp2, tmp3					;\
433	or	tsbreg, tmp3, tsbreg					;\
434	/* END CSTYLED */
435
436
437/*
438 * Load the locked TSB TLB entry.
439 *
440 * In:
441 *   tsbinfo = tsb_info pointer as va (ro)
442 *   tteidx = shifted index into TLB to load the locked entry (ro)
443 *   va = virtual address at which to load the locked TSB entry (ro)
444 * Out:
445 * Scratch:
446 *   tmp
447 */
448#define	LOAD_TSBTTE(tsbinfo, tteidx, va, tmp)				\
449	mov	MMU_TAG_ACCESS, tmp;					\
450	stxa	va, [tmp]ASI_DMMU;		/* set tag access */	\
451	membar	#Sync;							\
452	ldx	[tsbinfo + TSBINFO_TTE], tmp;	/* fetch locked tte */	\
453	stxa	tmp, [tteidx]ASI_DTLB_ACCESS;	/* load locked tte */	\
454	membar	#Sync
455
456
457/*
458 * In the current implementation, TSBs usually come from physically
459 * contiguous chunks of memory up to 4MB in size, but 8K TSBs may be
460 * allocated from 8K chunks of memory under certain conditions.  To
461 * prevent aliasing in the virtual address cache when the TSB slab is
462 * 8K in size we must align the reserved (TL>0) TSB virtual address to
463 * have the same low-order bits as the kernel (TL=0) TSB virtual address,
464 * and map 8K TSBs with an 8K TTE.  In cases where the TSB reserved VA
465 * range is smaller than the assumed 4M we will patch the shift at
466 * runtime; otherwise we leave it alone (which is why RUNTIME_PATCH
467 * constant doesn't appear below).
468 *
469 * In:
470 *   tsbinfo (ro)
471 *   resva: reserved VA base for this TSB
472 * Out:
473 *   resva: corrected VA for this TSB
474 */
475#define	RESV_OFFSET(tsbinfo, resva, tmp1, label)			\
476	/* BEGIN CSTYLED */						\
477	lduh	[tsbinfo + TSBINFO_SZCODE], tmp1			;\
478	brgz,pn	tmp1, label/**/9	 				;\
479	  nop								;\
480	ldx	[tsbinfo + TSBINFO_VADDR], tmp1				;\
481	.global	label/**/_resv_offset					;\
482label/**/_resv_offset:							;\
483	sllx	tmp1, (64 - MMU_PAGESHIFT4M), tmp1			;\
484	srlx	tmp1, (64 - MMU_PAGESHIFT4M), tmp1			;\
485	or	tmp1, resva, resva					;\
486label/**/9:								\
487	/* END CSTYLED */
488
489/*
490 * Determine the pointer of the entry in the first TSB to probe given
491 * the 8K TSB pointer register contents.
492 *
493 * In:
494 *   tsbp8k = 8K TSB pointer register (ro)
495 *   tmp = scratch register
496 *   label = label for hot patching of utsb_vabase
497 *
498 * Out: tsbe_ptr = TSB entry address
499 *
500 * Note: This function is patched at runtime for performance reasons.
501 *	 Any changes here require sfmmu_patch_utsb fixed.
502 */
503
504#define	GET_1ST_TSBE_PTR(tsbp8k, tsbe_ptr, tmp, label)			\
505	/* BEGIN CSTYLED */						\
506label/**/_get_1st_tsbe_ptr:						;\
507	RUNTIME_PATCH_SETX(tsbe_ptr, tmp)				;\
508	/* tsbeptr = contents of utsb_vabase */				;\
509	/* clear upper bits leaving just bits 21:0 of TSB ptr. */	;\
510	sllx	tsbp8k, TSBREG_FIRTSB_SHIFT, tmp			;\
511	/* finish clear */						;\
512	srlx	tmp, TSBREG_FIRTSB_SHIFT, tmp				;\
513	/* or-in bits 41:22 of the VA to form the real pointer. */	;\
514	or	tsbe_ptr, tmp, tsbe_ptr					\
515	/* END CSTYLED */
516
517/*
518 * Determine the base address of the second TSB given the 8K TSB
519 * pointer register contents.
520 *
521 * In:
522 *   tsbp8k = 8K TSB pointer register (ro)
523 *   tmp = scratch register
524 *   label = label for hot patching of utsb_vabase
525 *
526 * Out:
527 *   tsbbase = TSB base address
528 *
529 * Note: This function is patched at runtime for performance reasons.
530 *	 Any changes here require sfmmu_patch_utsb fixed.
531 */
532
533#define	GET_2ND_TSB_BASE(tsbp8k, tsbbase, tmp, label)			\
534	/* BEGIN CSTYLED */						\
535label/**/_get_2nd_tsb_base:						;\
536	RUNTIME_PATCH_SETX(tsbbase, tmp)				;\
537	/* tsbbase = contents of utsb4m_vabase */			;\
538	/* clear upper bits leaving just bits 21:xx of TSB addr. */	;\
539	sllx	tsbp8k, TSBREG_SECTSB_LSHIFT, tmp			;\
540	/* clear lower bits leaving just 21:13 in 8:0 */		;\
541	srlx	tmp, (TSBREG_SECTSB_RSHIFT + MMU_PAGESHIFT), tmp	;\
542	/* adjust TSB offset to bits 21:13 */				;\
543	sllx	tmp, MMU_PAGESHIFT, tmp					;\
544	or	tsbbase, tmp, tsbbase					;\
545	/* END CSTYLED */
546
547/*
548 * Determine the size code of the second TSB given the 8K TSB
549 * pointer register contents.
550 *
551 * In:
552 *   tsbp8k = 8K TSB pointer register (ro)
553 * Out:
554 *   size = TSB size code
555 */
556
557#define	GET_2ND_TSB_SIZE(tsbp8k, size)					\
558	srlx	tsbp8k, TSBREG_SECSZ_SHIFT, size;			\
559	and	size, TSB_SOFTSZ_MASK, size
560
561/*
562 * Get the location in the 2nd TSB of the tsbe for this fault.
563 * Assumes that the second TSB only contains 4M mappings.
564 *
565 * In:
566 *   tagacc = tag access register (clobbered)
567 *   tsbp8k = contents of TSB8K pointer register (ro)
568 *   tmp1, tmp2 = scratch registers
569 *   label = label at which to patch in reserved TSB 4M VA range
570 * Out:
571 *   tsbe_ptr = pointer to the tsbe in the 2nd TSB
572 */
573#define	GET_2ND_TSBE_PTR(tagacc, tsbp8k, tsbe_ptr, tmp1, tmp2, label)	\
574	GET_2ND_TSB_BASE(tsbp8k, tsbe_ptr, tmp2, label);		\
575	/* tsbe_ptr = TSB base address, tmp2 = junk */			\
576	GET_2ND_TSB_SIZE(tsbp8k, tmp1);					\
577	/* tmp1 = TSB size code */					\
578	GET_TSBE_POINTER(MMU_PAGESHIFT4M, tsbe_ptr, tagacc, tmp1, tmp2)
579
580
581#else /* !UTSB_PHYS */
582
583
584/*
585 * Determine the pointer of the entry in the first TSB to probe given
586 * the 8K TSB pointer register contents.
587 *
588 * In:
589 *   tagacc = tag access register
590 *   tsbe_ptr = 8K TSB pointer register
591 *   tmp = scratch registers
592 *
593 * Out: tsbe_ptr = TSB entry address
594 *
595 * Note: This macro is a nop since the 8K TSB pointer register
596 *	 is the entry pointer and does not need to be decoded.
597 *	 It is defined to allow for code sharing with sun4v.
598 */
599
600#define	GET_1ST_TSBE_PTR(tagacc, tsbe_ptr, tmp1, tmp2)
601
602#endif /* !UTSB_PHYS */
603
604
605/*
606 * Load TSB base register.  In the single TSB case this register
607 * contains utsb_vabase, bits 21:13 of tsbinfo->tsb_va, and the
608 * TSB size code in bits 2:0.  See hat_sfmmu.h for the layout in
609 * the case where we have multiple TSBs per process.
610 *
611 * In:
612 *   tsbreg = value to load (ro)
613 */
614#define	LOAD_TSBREG(tsbreg, tmp1, tmp2)					\
615	mov	MMU_TSB, tmp1;						\
616	sethi	%hi(FLUSH_ADDR), tmp2;					\
617	stxa	tsbreg, [tmp1]ASI_DMMU;		/* dtsb reg */		\
618	stxa	tsbreg, [tmp1]ASI_IMMU;		/* itsb reg */		\
619	flush	tmp2
620
621#ifdef UTSB_PHYS
622#define	UTSB_PROBE_ASI	ASI_QUAD_LDD_PHYS
623#else
624#define	UTSB_PROBE_ASI	ASI_NQUAD_LD
625#endif
626#define	PROBE_TSB(tsbe_ptr, tag, tsbtag, label)                            \
627	/* BEGIN CSTYLED */                                             \
628        ldda    [tsbe_ptr]UTSB_PROBE_ASI, tsbtag                        ;\
629        cmp     tsbtag, tag             /* compare tag w/ TSB */        ;\
630        bne,pn  %xcc, label/**/1        /* branch if !match */          ;\
631          nop                                                           \
632	/* END CSTYLED */
633/*
634 * Probe a TSB. If miss continue from the end of the macro for most probes
635 * except jump to TSB miss for 3rd ITSB probe. If hit retry faulted
636 * instruction for DTSB probes. For ITSB probes in case of TSB hit check
637 * execute bit and branch to exec_fault if the bit is not set otherwise retry
638 * faulted instruction. Do ITLB synthesis in case of hit in second ITSB if
639 * synthesis bit is set.
640 *
641 * tsbe_ptr = precomputed TSB entry pointer (in, ro)
642 * vpg_4m = 4M virtual page number for tag matching  (in, ro)
643 * label = where to branch to if this is a miss (text)
644 *
645 * For trapstat, we have to explicily use these registers.
646 * g4 = location tag will be retrieved into from TSB (out)
647 * g5 = location data(tte) will be retrieved into from TSB (out)
648 *
649 * In case of first tsb probe tsbe_ptr is %g1. For other tsb probes
650 * move tsbe_ptr into %g1 in case of hit for traptrace.
651 *
652 * If the probe fails and we continue from call site %g4-%g5 are clobbered.
653 * 2nd ITSB probe macro will also clobber %g6 in this case.
654 */
655#define	PROBE_1ST_DTSB(tsbe_ptr, vpg_4m, label)                         \
656	/* BEGIN CSTYLED */                                             \
657        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
658        TT_TRACE(trace_tsbhit)                                          ;\
659        DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
660        retry                      /* retry faulted instruction */      ;\
661label/**/1:                                                             \
662	/* END CSTYLED */
663
664#define	PROBE_2ND_DTSB(tsbe_ptr, vpg_4m, label)                         \
665	/* BEGIN CSTYLED */                                             \
666        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
667        mov     tsbe_ptr, %g1       /* trace_tsbhit wants ptr in %g1 */ ;\
668        TT_TRACE(trace_tsbhit)                                          ;\
669        DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
670        retry                      /* retry faulted instruction */      ;\
671label/**/1:                                                             \
672	/* END CSTYLED */
673
674#define	PROBE_1ST_ITSB(tsbe_ptr, vpg_4m, label)                         \
675	/* BEGIN CSTYLED */                                             \
676        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
677        andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
678        bz,pn   %icc, exec_fault                                        ;\
679          nop                                                           ;\
680        TT_TRACE(trace_tsbhit)                                          ;\
681        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
682        retry                           /* retry faulted instruction */ ;\
683label/**/1:                                                             \
684	/* END CSTYLED */
685
686#define	PROBE_2ND_ITSB(tsbe_ptr, vpg_4m, label)                         \
687	/* BEGIN CSTYLED */                                             \
688        ldda    [tsbe_ptr]UTSB_PROBE_ASI, %g4 /* g4 = tag, g5 = data */ ;\
689        cmp     %g4, vpg_4m             /* compare tag w/ TSB */        ;\
690        bne,pn  %xcc, label/**/2        /* branch if !match */          ;\
691          or    %g0, TTE4M, %g6                                         ;\
692        andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
693        bz,a,pn %icc, label/**/1                                        ;\
694          sllx  %g6, TTE_SZ_SHFT, %g6                                   ;\
695        mov     tsbe_ptr, %g1         /* trap trace wants ptr in %g1 */ ;\
696        TT_TRACE(trace_tsbhit)                                          ;\
697        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
698        retry                        /* retry faulted instruction */    ;\
699label/**/1:                                                             ;\
700        andcc %g5, TTE_E_SYNTH_INT, %g0                                 ;\
701        bz,pn   %icc, exec_fault                                        ;\
702          mov   tsbe_ptr, %g1       /* trap trace wants ptr in %g1 */   ;\
703        or      %g5, %g6, %g5                                           ;\
704        TT_TRACE(trace_tsbhit)                                          ;\
705        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
706        retry                      /* retry faulted instruction */      ;\
707label/**/2:
708	/* END CSTYLED */
709
710#ifdef UTSB_PHYS
711
712/*
713 * Updates the context filed in the tagaccess register with the shared
714 * context to force the next i/DTLB_STUFF() to load this mapping into
715 * the TLB with the shared context.
716 */
717#define	SET_SHCTX_TAGACC(tmp1, tmp2, asi)                               \
718	/* BEGIN CSTYLED */                                             \
719        mov     MMU_TAG_ACCESS, tmp2                                    ;\
720        ldxa    [tmp2]asi, tmp2                 /* tmp2 = VA|CTX */     ;\
721        srlx    tmp2, TAGACC_SHIFT, tmp2                                ;\
722        sllx    tmp2, TAGACC_SHIFT, tmp2        /* tmp2 = VA */         ;\
723        mov     MMU_SHARED_CONTEXT, tmp1        /* clobber tsbe_ptr */  ;\
724        ldxa    [tmp1]ASI_MMU_CTX, tmp1         /* tmp2 = shctx reg */  ;\
725        sllx    tmp1, SHCTXREG_CTX_LSHIFT, tmp1                         ;\
726        srlx    tmp1, SHCTXREG_CTX_LSHIFT, tmp1 /* tmp1 = SHCTX */      ;\
727        or      tmp1, tmp2, tmp1                /* tmp1  = VA|SHCTX */  ;\
728        mov     MMU_TAG_ACCESS, tmp2                                    ;\
729        stxa    tmp1, [tmp2]asi                 /* asi = VA|SHCTX */
730	/* END CSTYLED */
731
732#define	PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                       \
733	/* BEGIN CSTYLED */                                             \
734        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
735        mov     tsbe_ptr, %g1       /* trace_tsbhit wants ptr in %g1 */ ;\
736        TT_TRACE(trace_tsbhit)                                          ;\
737        SET_SHCTX_TAGACC(%g3, %g4, ASI_DMMU)                            ;\
738        DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
739        retry                      /* retry faulted instruction */      ;\
740label/**/1:                                                             \
741	/* END CSTYLED */
742
743#define	PROBE_3RD_DTSB(tsbe_ptr, vpg_4m, label)                         \
744	/* BEGIN CSTYLED */                                             \
745        PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                  ;\
746	/* END CSTYLED */
747
748#define	PROBE_4TH_DTSB(tsbe_ptr, vpg_4m, label)                         \
749	/* BEGIN CSTYLED */                                             \
750        PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                  ;\
751	/* END CSTYLED */
752
753#define	PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, label)                       \
754	/* BEGIN CSTYLED */                                             \
755        PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
756        andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
757        bz,pn %icc, exec_fault                                          ;\
758         mov     tsbe_ptr, %g1          /* for traptrace sake */        ;\
759        TT_TRACE(trace_tsbhit)                                          ;\
760        SET_SHCTX_TAGACC(%g3, %g4, ASI_IMMU)                            ;\
761        ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
762        retry                           /* retry faulted instruction */ ;\
763label/**/1:
764	/* END CSTYLED */
765
766#define	PROBE_3RD_ITSB(tsbe_ptr, vpg_4m, label)                         \
767	/* BEGIN CSTYLED */                                             \
768        PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, sfmmu_tsb_miss_tt)      ;\
769	/* END CSTYLED */
770
771#define	PROBE_4TH_ITSB(tsbe_ptr, vpg_4m, label)                         \
772	/* BEGIN CSTYLED */                                             \
773        PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, label)                  ;\
774	/* END CSTYLED */
775
776/*
777 * The traptype is supplied by caller.
778 *
779 * If iTSB miss, store shctx into IMMU TAG ACCESS REG
780 * If dTSB miss, store shctx into DMMU TAG ACCESS REG
781 * Thus the [D|I]TLB_STUFF will work as expected.
782 */
783#define	SAVE_CTX1(traptype, tmp1, tmp2, label)                          \
784	/* BEGIN CSTYLED */                                             \
785        cmp     traptype, FAST_IMMU_MISS_TT                             ;\
786        be,pn %icc, label/**/1                                          ;\
787          nop                                                           ;\
788        SET_SHCTX_TAGACC(tmp1, tmp2, ASI_DMMU)                          ;\
789        membar  #Sync                                                   ;\
790        ba,a    label/**/2                                              ;\
791label/**/1:                                                             ;\
792        SET_SHCTX_TAGACC(tmp1, tmp2, ASI_IMMU)                          ;\
793        sethi   %hi(FLUSH_ADDR), tmp1                                   ;\
794        flush   tmp1                                                    ;\
795label/**/2:
796	/* END CSTYLED */
797
798#endif /* UTSB_PHYS */
799
800#endif /* _ASM */
801
802#ifdef	__cplusplus
803}
804#endif
805
806#endif	/* _VM_MACH_SFMMU_H */
807