cvmx-tlb.c revision 215976
1215976Sjmallett/***********************license start***************
2215976Sjmallett * Copyright (c) 2003-2010  Cavium Networks (support@cavium.com). All rights
3215976Sjmallett * reserved.
4215976Sjmallett *
5215976Sjmallett *
6215976Sjmallett * Redistribution and use in source and binary forms, with or without
7215976Sjmallett * modification, are permitted provided that the following conditions are
8215976Sjmallett * met:
9215976Sjmallett *
10215976Sjmallett *   * Redistributions of source code must retain the above copyright
11215976Sjmallett *     notice, this list of conditions and the following disclaimer.
12215976Sjmallett *
13215976Sjmallett *   * Redistributions in binary form must reproduce the above
14215976Sjmallett *     copyright notice, this list of conditions and the following
15215976Sjmallett *     disclaimer in the documentation and/or other materials provided
16215976Sjmallett *     with the distribution.
17215976Sjmallett
18215976Sjmallett *   * Neither the name of Cavium Networks nor the names of
19215976Sjmallett *     its contributors may be used to endorse or promote products
20215976Sjmallett *     derived from this software without specific prior written
21215976Sjmallett *     permission.
22215976Sjmallett
23215976Sjmallett * This Software, including technical data, may be subject to U.S. export  control
24215976Sjmallett * laws, including the U.S. Export Administration Act and its  associated
25215976Sjmallett * regulations, and may be subject to export or import  regulations in other
26215976Sjmallett * countries.
27215976Sjmallett
28215976Sjmallett * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS"
29215976Sjmallett * AND WITH ALL FAULTS AND CAVIUM  NETWORKS MAKES NO PROMISES, REPRESENTATIONS OR
30215976Sjmallett * WARRANTIES, EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO
31215976Sjmallett * THE SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR
32215976Sjmallett * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM
33215976Sjmallett * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE,
34215976Sjmallett * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF
35215976Sjmallett * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR
36215976Sjmallett * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE  RISK ARISING OUT OF USE OR
37215976Sjmallett * PERFORMANCE OF THE SOFTWARE LIES WITH YOU.
38215976Sjmallett ***********************license end**************************************/
39215976Sjmallett
40215976Sjmallett
41215976Sjmallett
42215976Sjmallett
43215976Sjmallett/**
44215976Sjmallett * @file
45215976Sjmallett *
46215976Sjmallett * cvmx-tlb supplies per core TLB access functions for simple executive
47215976Sjmallett * applications.
48215976Sjmallett *
49215976Sjmallett * <hr>$Revision: 41586 $<hr>
50215976Sjmallett */
51215976Sjmallett#include "cvmx.h"
52215976Sjmallett#include "cvmx-tlb.h"
53215976Sjmallett
54215976Sjmallett//#define DEBUG
55215976Sjmallett
56215976Sjmallett/**
57215976Sjmallett * @INTERNAL
58215976Sjmallett * Convert page mask to string
59215976Sjmallett */
60215976Sjmallettstatic inline const char* __mask_to_str(uint64_t mask)
61215976Sjmallett{
62215976Sjmallett    /* Most OCTEON processor does not support 1K page sizes */
63215976Sjmallett    uint64_t non_1k_mask = mask + CVMX_TLB_PAGEMASK_4K;
64215976Sjmallett
65215976Sjmallett    switch (non_1k_mask) {
66215976Sjmallett    case CVMX_TLB_PAGEMASK_4K:     return "4kb";
67215976Sjmallett    case CVMX_TLB_PAGEMASK_16K:    return "16kb";
68215976Sjmallett    case CVMX_TLB_PAGEMASK_64K:    return "64kb";
69215976Sjmallett    case CVMX_TLB_PAGEMASK_256K:   return "256kb";
70215976Sjmallett    case CVMX_TLB_PAGEMASK_1M:     return "1Mb";
71215976Sjmallett    case CVMX_TLB_PAGEMASK_4M:     return "4Mb";
72215976Sjmallett    case CVMX_TLB_PAGEMASK_16M:    return "16Mb";
73215976Sjmallett    case CVMX_TLB_PAGEMASK_64M:    return "64Mb";
74215976Sjmallett    case CVMX_TLB_PAGEMASK_256M:   return "256Mb";
75215976Sjmallett    }
76215976Sjmallett
77215976Sjmallett    return "";
78215976Sjmallett}
79215976Sjmallett
80215976Sjmallett/**
81215976Sjmallett * @INTERNAL
82215976Sjmallett * issue the tlb read instruction
83215976Sjmallett */
84215976Sjmallettstatic inline void __tlb_read(void){
85215976Sjmallett    CVMX_EHB;
86215976Sjmallett    CVMX_TLBR;
87215976Sjmallett    CVMX_EHB;
88215976Sjmallett}
89215976Sjmallett
90215976Sjmallett/**
91215976Sjmallett * @INTERNAL
92215976Sjmallett * issue the tlb write instruction
93215976Sjmallett */
94215976Sjmallettstatic inline void __tlb_write(void){
95215976Sjmallett
96215976Sjmallett    CVMX_EHB;
97215976Sjmallett    CVMX_TLBWI;
98215976Sjmallett    CVMX_EHB;
99215976Sjmallett}
100215976Sjmallett
101215976Sjmallett/**
102215976Sjmallett * @INTERNAL
103215976Sjmallett * issue the tlb read instruction
104215976Sjmallett */
105215976Sjmallettstatic inline int __tlb_probe(uint64_t hi){
106215976Sjmallett    int index;
107215976Sjmallett    CVMX_EHB;
108215976Sjmallett    CVMX_MT_ENTRY_HIGH(hi);
109215976Sjmallett    CVMX_TLBP;
110215976Sjmallett    CVMX_EHB;
111215976Sjmallett
112215976Sjmallett    CVMX_MF_TLB_INDEX(index);
113215976Sjmallett
114215976Sjmallett    if (index < 0) index = -1;
115215976Sjmallett
116215976Sjmallett    return index;
117215976Sjmallett}
118215976Sjmallett
119215976Sjmallett/**
120215976Sjmallett * @INTERNAL
121215976Sjmallett * read a single tlb entry
122215976Sjmallett *
123215976Sjmallett * return 0: tlb entry is read
124215976Sjmallett *    -1: index is invalid
125215976Sjmallett */
126215976Sjmallettstatic inline int __tlb_read_index(uint32_t tlbi){
127215976Sjmallett
128215976Sjmallett    if (tlbi >= cvmx_tlb_size_limit()) {
129215976Sjmallett        return -1;
130215976Sjmallett    }
131215976Sjmallett
132215976Sjmallett    CVMX_MT_TLB_INDEX(tlbi);
133215976Sjmallett    __tlb_read();
134215976Sjmallett
135215976Sjmallett    return 0;
136215976Sjmallett}
137215976Sjmallett
138215976Sjmallett/**
139215976Sjmallett * @INTERNAL
140215976Sjmallett * write a single tlb entry
141215976Sjmallett *
142215976Sjmallett * return 0: tlb entry is read
143215976Sjmallett *    -1: index is invalid
144215976Sjmallett */
145215976Sjmallettstatic inline int __tlb_write_index(uint32_t tlbi,
146215976Sjmallett        			    uint64_t hi, uint64_t lo0,
147215976Sjmallett				    uint64_t lo1, uint64_t pagemask)
148215976Sjmallett{
149215976Sjmallett
150215976Sjmallett    if (tlbi >= cvmx_tlb_size_limit()) {
151215976Sjmallett        return -1;
152215976Sjmallett    }
153215976Sjmallett
154215976Sjmallett#ifdef DEBUG
155215976Sjmallett    cvmx_dprintf("cvmx-tlb-dbg: "
156215976Sjmallett	    "write TLB %d: hi %lx, lo0 %lx, lo1 %lx, pagemask %lx \n",
157215976Sjmallett		tlbi, hi, lo0, lo1, pagemask);
158215976Sjmallett#endif
159215976Sjmallett
160215976Sjmallett    CVMX_MT_TLB_INDEX(tlbi);
161215976Sjmallett    CVMX_MT_ENTRY_HIGH(hi);
162215976Sjmallett    CVMX_MT_ENTRY_LO_0(lo0);
163215976Sjmallett    CVMX_MT_ENTRY_LO_1(lo1);
164215976Sjmallett    CVMX_MT_PAGEMASK(pagemask);
165215976Sjmallett    __tlb_write();
166215976Sjmallett
167215976Sjmallett    return 0;
168215976Sjmallett}
169215976Sjmallett
170215976Sjmallett/**
171215976Sjmallett * @INTERNAL
172215976Sjmallett * Determine if a TLB entry is free to use
173215976Sjmallett */
174215976Sjmallettstatic inline int __tlb_entry_is_free(uint32_t tlbi) {
175215976Sjmallett    int ret = 0;
176215976Sjmallett    uint64_t lo0 = 0, lo1 = 0;
177215976Sjmallett
178215976Sjmallett    if (tlbi < cvmx_tlb_size_limit()) {
179215976Sjmallett
180215976Sjmallett        __tlb_read_index(tlbi);
181215976Sjmallett
182215976Sjmallett        /* Unused entries have neither even nor odd page mapped */
183215976Sjmallett    	CVMX_MF_ENTRY_LO_0(lo0);
184215976Sjmallett    	CVMX_MF_ENTRY_LO_1(lo1);
185215976Sjmallett
186215976Sjmallett        if ( !(lo0 & TLB_VALID) && !(lo1 & TLB_VALID)) {
187215976Sjmallett            ret = 1;
188215976Sjmallett        }
189215976Sjmallett    }
190215976Sjmallett
191215976Sjmallett    return ret;
192215976Sjmallett}
193215976Sjmallett
194215976Sjmallett
195215976Sjmallett/**
196215976Sjmallett * @INTERNAL
197215976Sjmallett * dump a single tlb entry
198215976Sjmallett */
199215976Sjmallettstatic inline void __tlb_dump_index(uint32_t tlbi)
200215976Sjmallett{
201215976Sjmallett    if (tlbi < cvmx_tlb_size_limit()) {
202215976Sjmallett
203215976Sjmallett        if (__tlb_entry_is_free(tlbi)) {
204215976Sjmallett#ifdef DEBUG
205215976Sjmallett            cvmx_dprintf("Index: %3d Free \n", tlbi);
206215976Sjmallett#endif
207215976Sjmallett        } else {
208215976Sjmallett            uint64_t lo0, lo1, pgmask;
209215976Sjmallett            uint32_t hi, c0, c1;
210215976Sjmallett#ifdef DEBUG
211215976Sjmallett            int width = 13;
212215976Sjmallett#endif
213215976Sjmallett
214215976Sjmallett            __tlb_read_index(tlbi);
215215976Sjmallett
216215976Sjmallett            CVMX_MF_ENTRY_HIGH(hi);
217215976Sjmallett            CVMX_MF_ENTRY_LO_0(lo0);
218215976Sjmallett            CVMX_MF_ENTRY_LO_1(lo1);
219215976Sjmallett            CVMX_MF_PAGEMASK(pgmask);
220215976Sjmallett
221215976Sjmallett
222215976Sjmallett#ifdef DEBUG
223215976Sjmallett            cvmx_dprintf("Index: %3d pgmask=%s ", tlbi, __mask_to_str(pgmask));
224215976Sjmallett#endif
225215976Sjmallett
226215976Sjmallett            c0 = ( lo0 >> 3 ) & 7;
227215976Sjmallett            c1 = ( lo1 >> 3 ) & 7;
228215976Sjmallett
229215976Sjmallett#ifdef DEBUG
230215976Sjmallett            cvmx_dprintf("va=%0*lx asid=%02x\n",
231215976Sjmallett                               width, (hi & ~0x1fffUL), hi & 0xff);
232215976Sjmallett
233215976Sjmallett            cvmx_dprintf("\t[pa=%0*lx c=%d d=%d v=%d g=%d] ",
234215976Sjmallett                               width,
235215976Sjmallett                               (lo0 << 6) & PAGE_MASK, c0,
236215976Sjmallett                               (lo0 & 4) ? 1 : 0,
237215976Sjmallett                               (lo0 & 2) ? 1 : 0,
238215976Sjmallett                               (lo0 & 1) ? 1 : 0);
239215976Sjmallett            cvmx_dprintf("[pa=%0*lx c=%d d=%d v=%d g=%d]\n",
240215976Sjmallett                               width,
241215976Sjmallett                               (lo1 << 6) & PAGE_MASK, c1,
242215976Sjmallett                               (lo1 & 4) ? 1 : 0,
243215976Sjmallett                               (lo1 & 2) ? 1 : 0,
244215976Sjmallett                               (lo1 & 1) ? 1 : 0);
245215976Sjmallett
246215976Sjmallett#endif
247215976Sjmallett        }
248215976Sjmallett    }
249215976Sjmallett}
250215976Sjmallett
251215976Sjmallett/**
252215976Sjmallett * @INTERNAL
253215976Sjmallett * dump a single tlb entry
254215976Sjmallett */
255215976Sjmallettstatic inline uint32_t __tlb_wired_index() {
256215976Sjmallett    uint32_t  tlbi;
257215976Sjmallett
258215976Sjmallett    CVMX_MF_TLB_WIRED(tlbi);
259215976Sjmallett    return tlbi;
260215976Sjmallett}
261215976Sjmallett
262215976Sjmallett/**
263215976Sjmallett *  Set up a wired entry. This function is designed to be used by Simple
264215976Sjmallett *  Executive to set up its virtual to physical address mapping at start up
265215976Sjmallett *  time. After the mapping is set up, the remaining unused TLB entries can
266215976Sjmallett *  be use for run time shared memory mapping.
267215976Sjmallett *
268215976Sjmallett *  Calling this function causes the C0 wired index register to increase.
269215976Sjmallett *  Wired index register points to the separation between fixed TLB mapping
270215976Sjmallett *  and run time shared memory mapping.
271215976Sjmallett *
272215976Sjmallett *  @param  hi      Entry Hi
273215976Sjmallett *  @param  lo0     Entry Low0
274215976Sjmallett *  @param  lo1     Entry Low1
275215976Sjmallett *  @param  pagemask Pagemask
276215976Sjmallett *
277215976Sjmallett *  @return 0: the entry is added
278215976Sjmallett *  @return -1: out of TLB entry
279215976Sjmallett */
280215976Sjmallettint cvmx_tlb_add_wired_entry( uint64_t hi, uint64_t lo0,
281215976Sjmallett                              uint64_t lo1, uint64_t pagemask)
282215976Sjmallett{
283215976Sjmallett    uint64_t index;
284215976Sjmallett    int ret = -1;
285215976Sjmallett
286215976Sjmallett    index = __tlb_wired_index();
287215976Sjmallett
288215976Sjmallett    /* Check to make sure if the index is free to use */
289215976Sjmallett    if (index < cvmx_tlb_size_limit() && __tlb_entry_is_free(index) ) {
290215976Sjmallett        /* increase the wired index by 1*/
291215976Sjmallett        __tlb_write_index(index, hi, lo0, lo1, pagemask);
292215976Sjmallett        CVMX_MT_TLB_WIRED(index + 1);
293215976Sjmallett        ret = 0;
294215976Sjmallett    }
295215976Sjmallett
296215976Sjmallett    return ret;
297215976Sjmallett}
298215976Sjmallett
299215976Sjmallett/**
300215976Sjmallett *  Find a free entry that can be used for share memory mapping.
301215976Sjmallett *
302215976Sjmallett *  @return -1: no free entry found
303215976Sjmallett *  @return :  a free entry
304215976Sjmallett */
305215976Sjmallettint cvmx_tlb_allocate_runtime_entry(void)
306215976Sjmallett{
307215976Sjmallett    uint32_t i, ret = -1;
308215976Sjmallett
309215976Sjmallett    for (i = __tlb_wired_index(); i< cvmx_tlb_size_limit(); i++) {
310215976Sjmallett
311215976Sjmallett    	/* Check to make sure the index is free to use */
312215976Sjmallett        if (__tlb_entry_is_free(i)) {
313215976Sjmallett		/* Found and return */
314215976Sjmallett        	ret = i;
315215976Sjmallett        	break;
316215976Sjmallett	}
317215976Sjmallett    }
318215976Sjmallett
319215976Sjmallett    return ret;
320215976Sjmallett}
321215976Sjmallett
322215976Sjmallett/**
323215976Sjmallett *  Invalidate the TLB entry. Remove previous mapping if one was set up
324215976Sjmallett */
325215976Sjmallettvoid cvmx_tlb_free_runtime_entry(uint32_t tlbi)
326215976Sjmallett{
327215976Sjmallett    /* Invalidate an unwired TLB entry */
328215976Sjmallett    if ((tlbi < cvmx_tlb_size_limit()) && (tlbi >= __tlb_wired_index())) {
329215976Sjmallett        __tlb_write_index(tlbi, 0xffffffff80000000ULL, 0, 0, 0);
330215976Sjmallett    }
331215976Sjmallett}
332215976Sjmallett
333215976Sjmallett
334215976Sjmallett/**
335215976Sjmallett *  Program a single TLB entry to enable the provided vaddr to paddr mapping.
336215976Sjmallett *
337215976Sjmallett *  @param index  Index of the TLB entry
338215976Sjmallett *  @param vaddr  The virtual address for this mapping
339215976Sjmallett *  @param paddr  The physical address for this mapping
340215976Sjmallett *  @param size   Size of the mapping
341215976Sjmallett *  @param tlb_flags  Entry mapping flags
342215976Sjmallett */
343215976Sjmallett
344215976Sjmallettvoid cvmx_tlb_write_entry(int index, uint64_t vaddr, uint64_t paddr,
345215976Sjmallett			uint64_t size, uint64_t tlb_flags) {
346215976Sjmallett	uint64_t lo0, lo1, hi, pagemask;
347215976Sjmallett
348215976Sjmallett	if ( __is_power_of_two(size) ) {
349215976Sjmallett		if ( (__log2(size) & 1 ) == 0) {
350215976Sjmallett			/* size is not power of 4,  we only need to map
351215976Sjmallett  			   one page, figure out even or odd page to map */
352215976Sjmallett			if ((vaddr >> __log2(size) & 1))  {
353215976Sjmallett				lo0 =  0;
354215976Sjmallett				lo1 =  ((paddr >> 12) << 6) | tlb_flags;
355215976Sjmallett				hi =   ((vaddr - size) >> 12) << 12;
356215976Sjmallett			}else {
357215976Sjmallett				lo0 =  ((paddr >> 12) << 6) | tlb_flags;
358215976Sjmallett				lo1 =  0;
359215976Sjmallett				hi =   ((vaddr) >> 12) << 12;
360215976Sjmallett			}
361215976Sjmallett			pagemask = (size - 1) & (~1<<11);
362215976Sjmallett		}else {
363215976Sjmallett			lo0 =  ((paddr >> 12)<< 6) | tlb_flags;
364215976Sjmallett			lo1 =  (((paddr + size /2) >> 12) << 6) | tlb_flags;
365215976Sjmallett			hi =   ((vaddr) >> 12) << 12;
366215976Sjmallett			pagemask = ((size/2) -1) & (~1<<11);
367215976Sjmallett		}
368215976Sjmallett
369215976Sjmallett
370215976Sjmallett        	__tlb_write_index(index, hi, lo0, lo1, pagemask);
371215976Sjmallett
372215976Sjmallett	}
373215976Sjmallett}
374215976Sjmallett
375215976Sjmallett
376215976Sjmallett/**
377215976Sjmallett *  Program a single TLB entry to enable the provided vaddr to paddr mapping.
378215976Sjmallett *  This version adds a wired entry that should not be changed at run time
379215976Sjmallett *
380215976Sjmallett *  @param vaddr  The virtual address for this mapping
381215976Sjmallett *  @param paddr  The physical address for this mapping
382215976Sjmallett *  @param size   Size of the mapping
383215976Sjmallett *  @param tlb_flags  Entry mapping flags
384215976Sjmallett *  @return -1: TLB out of entries
385215976Sjmallett * 	     0:  fixed entry added
386215976Sjmallett */
387215976Sjmallettint cvmx_tlb_add_fixed_entry( uint64_t vaddr, uint64_t paddr, uint64_t size, uint64_t tlb_flags) {
388215976Sjmallett
389215976Sjmallett    uint64_t index;
390215976Sjmallett    int ret = 0;
391215976Sjmallett
392215976Sjmallett    CVMX_MF_TLB_WIRED(index);
393215976Sjmallett
394215976Sjmallett    /* Check to make sure if the index is free to use */
395215976Sjmallett    if (index < cvmx_tlb_size_limit() && __tlb_entry_is_free(index) ) {
396215976Sjmallett	cvmx_tlb_write_entry(index, vaddr, paddr, size, tlb_flags);
397215976Sjmallett
398215976Sjmallett	if (!__tlb_entry_is_free(index)) {
399215976Sjmallett        	/* Bump up the wired register*/
400215976Sjmallett        	CVMX_MT_TLB_WIRED(index + 1);
401215976Sjmallett		ret  = 1;
402215976Sjmallett	}
403215976Sjmallett    }
404215976Sjmallett    return ret;
405215976Sjmallett}
406215976Sjmallett
407215976Sjmallett
408215976Sjmallett/**
409215976Sjmallett *  Program a single TLB entry to enable the provided vaddr to paddr mapping.
410215976Sjmallett *  This version writes a runtime entry. It will check the index to make sure
411215976Sjmallett *  not to overwrite any fixed entries.
412215976Sjmallett *
413215976Sjmallett *  @param index  Index of the TLB entry
414215976Sjmallett *  @param vaddr  The virtual address for this mapping
415215976Sjmallett *  @param paddr  The physical address for this mapping
416215976Sjmallett *  @param size   Size of the mapping
417215976Sjmallett *  @param tlb_flags  Entry mapping flags
418215976Sjmallett */
419215976Sjmallettvoid cvmx_tlb_write_runtime_entry(int index, uint64_t vaddr, uint64_t paddr,
420215976Sjmallett                          uint64_t size, uint64_t tlb_flags)
421215976Sjmallett{
422215976Sjmallett
423215976Sjmallett    int wired_index;
424215976Sjmallett    CVMX_MF_TLB_WIRED(wired_index);
425215976Sjmallett
426215976Sjmallett    if (index >= wired_index) {
427215976Sjmallett	cvmx_tlb_write_entry(index, vaddr, paddr, size, tlb_flags);
428215976Sjmallett    }
429215976Sjmallett
430215976Sjmallett}
431215976Sjmallett
432215976Sjmallett
433215976Sjmallett
434215976Sjmallett/**
435215976Sjmallett * Find the TLB index of a given virtual address
436215976Sjmallett *
437215976Sjmallett *  @param vaddr  The virtual address to look up
438215976Sjmallett *  @return  -1  not TLB mapped
439215976Sjmallett *           >=0 TLB TLB index
440215976Sjmallett */
441215976Sjmallettint cvmx_tlb_lookup(uint64_t vaddr) {
442215976Sjmallett	uint64_t hi= (vaddr >> 12 ) << 12; /* We always use ASID 0 */
443215976Sjmallett
444215976Sjmallett	return  __tlb_probe(hi);
445215976Sjmallett}
446215976Sjmallett
447215976Sjmallett/**
448215976Sjmallett *  Debug routine to show all shared memory mapping
449215976Sjmallett */
450215976Sjmallettvoid cvmx_tlb_dump_shared_mapping(void) {
451215976Sjmallett    uint32_t tlbi;
452215976Sjmallett
453215976Sjmallett    for ( tlbi = __tlb_wired_index(); tlbi<cvmx_tlb_size_limit(); tlbi++ ) {
454215976Sjmallett        __tlb_dump_index(tlbi);
455215976Sjmallett    }
456215976Sjmallett}
457215976Sjmallett
458215976Sjmallett/**
459215976Sjmallett *  Debug routine to show all TLB entries of this core
460215976Sjmallett *
461215976Sjmallett */
462215976Sjmallettvoid cvmx_tlb_dump_all(void) {
463215976Sjmallett
464215976Sjmallett    uint32_t tlbi;
465215976Sjmallett
466215976Sjmallett    for (tlbi = 0; tlbi<= cvmx_tlb_size_limit(); tlbi++ ) {
467215976Sjmallett        __tlb_dump_index(tlbi);
468215976Sjmallett    }
469215976Sjmallett}
470215976Sjmallett
471