Lines Matching refs:self

24     def __init__(self):
28 self.ops = aarch64_page_ops()
33 def __init__(self):
34 self.SUBSECTION_SHIFT = 21
35 self.SEBSECTION_SIZE = 1 << self.SUBSECTION_SHIFT
36 self.MODULES_VSIZE = 128 * 1024 * 1024
39 self.SECTION_SIZE_BITS = 29
41 self.SECTION_SIZE_BITS = 27
42 self.MAX_PHYSMEM_BITS = constants.LX_CONFIG_ARM64_VA_BITS
44 self.PAGE_SHIFT = constants.LX_CONFIG_PAGE_SHIFT
45 self.PAGE_SIZE = 1 << self.PAGE_SHIFT
46 self.PAGE_MASK = (~(self.PAGE_SIZE - 1)) & ((1 << 64) - 1)
48 self.VA_BITS = constants.LX_CONFIG_ARM64_VA_BITS
49 if self.VA_BITS > 48:
50 self.VA_BITS_MIN = 48
51 self.vabits_actual = gdb.parse_and_eval('vabits_actual')
53 self.VA_BITS_MIN = self.VA_BITS
54 self.vabits_actual = self.VA_BITS
55 self.kimage_voffset = gdb.parse_and_eval('kimage_voffset') & ((1 << 64) - 1)
57 self.SECTIONS_SHIFT = self.MAX_PHYSMEM_BITS - self.SECTION_SIZE_BITS
60 self.MAX_ORDER = constants.LX_CONFIG_ARCH_FORCE_MAX_ORDER
62 self.MAX_ORDER = 11
64 self.MAX_ORDER_NR_PAGES = 1 << (self.MAX_ORDER - 1)
65 self.PFN_SECTION_SHIFT = self.SECTION_SIZE_BITS - self.PAGE_SHIFT
66 self.NR_MEM_SECTIONS = 1 << self.SECTIONS_SHIFT
67 self.PAGES_PER_SECTION = 1 << self.PFN_SECTION_SHIFT
68 self.PAGE_SECTION_MASK = (~(self.PAGES_PER_SECTION - 1)) & ((1 << 64) - 1)
71 self.SECTIONS_PER_ROOT = self.PAGE_SIZE // gdb.lookup_type("struct mem_section").sizeof
73 self.SECTIONS_PER_ROOT = 1
75 self.NR_SECTION_ROOTS = DIV_ROUND_UP(self.NR_MEM_SECTIONS, self.SECTIONS_PER_ROOT)
76 self.SECTION_ROOT_MASK = self.SECTIONS_PER_ROOT - 1
77 self.SUBSECTION_SHIFT = 21
78 self.SEBSECTION_SIZE = 1 << self.SUBSECTION_SHIFT
79 self.PFN_SUBSECTION_SHIFT = self.SUBSECTION_SHIFT - self.PAGE_SHIFT
80 self.PAGES_PER_SUBSECTION = 1 << self.PFN_SUBSECTION_SHIFT
82 self.SECTION_HAS_MEM_MAP = 1 << int(gdb.parse_and_eval('SECTION_HAS_MEM_MAP_BIT'))
83 self.SECTION_IS_EARLY = 1 << int(gdb.parse_and_eval('SECTION_IS_EARLY_BIT'))
85 self.struct_page_size = utils.get_page_type().sizeof
86 self.STRUCT_PAGE_MAX_SHIFT = (int)(math.log(self.struct_page_size, 2))
88 self.PAGE_OFFSET = self._PAGE_OFFSET(self.VA_BITS)
89 self.MODULES_VADDR = self._PAGE_END(self.VA_BITS_MIN)
90 self.MODULES_END = self.MODULES_VADDR + self.MODULES_VSIZE
92 self.VMEMMAP_SHIFT = (self.PAGE_SHIFT - self.STRUCT_PAGE_MAX_SHIFT)
93 self.VMEMMAP_SIZE = ((self._PAGE_END(self.VA_BITS_MIN) - self.PAGE_OFFSET) >> self.VMEMMAP_SHIFT)
94 self.VMEMMAP_START = (-(1 << (self.VA_BITS - self.VMEMMAP_SHIFT))) & 0xffffffffffffffff
95 self.VMEMMAP_END = self.VMEMMAP_START + self.VMEMMAP_SIZE
97 self.VMALLOC_START = self.MODULES_END
98 self.VMALLOC_END = self.VMEMMAP_START - 256 * 1024 * 1024
100 self.memstart_addr = gdb.parse_and_eval("memstart_addr")
101 self.PHYS_OFFSET = self.memstart_addr
102 self.vmemmap = gdb.Value(self.VMEMMAP_START).cast(utils.get_page_type().pointer()) - (self.memstart_addr >> self.PAGE_SHIFT)
104 self.KERNEL_START = gdb.parse_and_eval("_text")
105 self.KERNEL_END = gdb.parse_and_eval("_end")
109 self.KASAN_SHADOW_SCALE_SHIFT = 3
111 self.KASAN_SHADOW_SCALE_SHIFT = 4
112 self.KASAN_SHADOW_OFFSET = constants.LX_CONFIG_KASAN_SHADOW_OFFSET
113 self.KASAN_SHADOW_END = (1 << (64 - self.KASAN_SHADOW_SCALE_SHIFT)) + self.KASAN_SHADOW_OFFSET
114 self.PAGE_END = self.KASAN_SHADOW_END - (1 << (self.vabits_actual - self.KASAN_SHADOW_SCALE_SHIFT))
116 self.PAGE_END = self._PAGE_END(self.VA_BITS_MIN)
119 self.NODE_SHIFT = constants.LX_CONFIG_NODES_SHIFT
121 self.NODE_SHIFT = 0
123 self.MAX_NUMNODES = 1 << self.NODE_SHIFT
125 def SECTION_NR_TO_ROOT(self, sec):
126 return sec // self.SECTIONS_PER_ROOT
128 def __nr_to_section(self, nr):
129 root = self.SECTION_NR_TO_ROOT(nr)
131 return mem_section[root][nr & self.SECTION_ROOT_MASK]
133 def pfn_to_section_nr(self, pfn):
134 return pfn >> self.PFN_SECTION_SHIFT
136 def section_nr_to_pfn(self, sec):
137 return sec << self.PFN_SECTION_SHIFT
139 def __pfn_to_section(self, pfn):
140 return self.__nr_to_section(self.pfn_to_section_nr(pfn))
142 def pfn_to_section(self, pfn):
143 return self.__pfn_to_section(pfn)
145 def subsection_map_index(self, pfn):
146 return (pfn & ~(self.PAGE_SECTION_MASK)) // self.PAGES_PER_SUBSECTION
148 def pfn_section_valid(self, ms, pfn):
150 idx = self.subsection_map_index(pfn)
155 def valid_section(self, mem_section):
156 if mem_section != None and (mem_section['section_mem_map'] & self.SECTION_HAS_MEM_MAP):
160 def early_section(self, mem_section):
161 if mem_section != None and (mem_section['section_mem_map'] & self.SECTION_IS_EARLY):
165 def pfn_valid(self, pfn):
167 if self.PHYS_PFN(self.PFN_PHYS(pfn)) != pfn:
169 if self.pfn_to_section_nr(pfn) >= self.NR_MEM_SECTIONS:
171 ms = self.__pfn_to_section(pfn)
173 if not self.valid_section(ms):
175 return self.early_section(ms) or self.pfn_section_valid(ms, pfn)
177 def _PAGE_OFFSET(self, va):
180 def _PAGE_END(self, va):
183 def kasan_reset_tag(self, addr):
189 def __is_lm_address(self, addr):
190 if (addr - self.PAGE_OFFSET) < (self.PAGE_END - self.PAGE_OFFSET):
194 def __lm_to_phys(self, addr):
195 return addr - self.PAGE_OFFSET + self.PHYS_OFFSET
197 def __kimg_to_phys(self, addr):
198 return addr - self.kimage_voffset
200 def __virt_to_phys_nodebug(self, va):
201 untagged_va = self.kasan_reset_tag(va)
202 if self.__is_lm_address(untagged_va):
203 return self.__lm_to_phys(untagged_va)
205 return self.__kimg_to_phys(untagged_va)
207 def __virt_to_phys(self, va):
209 if not self.__is_lm_address(self.kasan_reset_tag(va)):
211 return self.__virt_to_phys_nodebug(va)
213 def virt_to_phys(self, va):
214 return self.__virt_to_phys(va)
216 def PFN_PHYS(self, pfn):
217 return pfn << self.PAGE_SHIFT
219 def PHYS_PFN(self, phys):
220 return phys >> self.PAGE_SHIFT
222 def __phys_to_virt(self, pa):
223 return (pa - self.PHYS_OFFSET) | self.PAGE_OFFSET
225 def __phys_to_pfn(self, pa):
226 return self.PHYS_PFN(pa)
228 def __pfn_to_phys(self, pfn):
229 return self.PFN_PHYS(pfn)
231 def __pa_symbol_nodebug(self, x):
232 return self.__kimg_to_phys(x)
234 def __phys_addr_symbol(self, x):
236 if x < self.KERNEL_START or x > self.KERNEL_END:
238 return self.__pa_symbol_nodebug(x)
240 def __pa_symbol(self, x):
241 return self.__phys_addr_symbol(x)
243 def __va(self, pa):
244 return self.__phys_to_virt(pa)
246 def pfn_to_kaddr(self, pfn):
247 return self.__va(pfn << self.PAGE_SHIFT)
249 def virt_to_pfn(self, va):
250 return self.__phys_to_pfn(self.__virt_to_phys(va))
252 def sym_to_pfn(self, x):
253 return self.__phys_to_pfn(self.__pa_symbol(x))
255 def page_to_pfn(self, page):
256 return int(page.cast(utils.get_page_type().pointer()) - self.vmemmap.cast(utils.get_page_type().pointer()))
258 def page_to_phys(self, page):
259 return self.__pfn_to_phys(self.page_to_pfn(page))
261 def pfn_to_page(self, pfn):
262 return (self.vmemmap + pfn).cast(utils.get_page_type().pointer())
264 def page_to_virt(self, page):
266 return self.__va(self.page_to_phys(page))
268 __idx = int((page.cast(gdb.lookup_type("unsigned long")) - self.VMEMMAP_START).cast(utils.get_ulong_type())) // self.struct_page_size
269 return self.PAGE_OFFSET + (__idx * self.PAGE_SIZE)
271 def virt_to_page(self, va):
273 return self.pfn_to_page(self.virt_to_pfn(va))
275 __idx = int(self.kasan_reset_tag(va) - self.PAGE_OFFSET) // self.PAGE_SIZE
276 addr = self.VMEMMAP_START + (__idx * self.struct_page_size)
279 def page_address(self, page):
280 return self.page_to_virt(page)
282 def folio_address(self, folio):
283 return self.page_address(folio['page'].address)
288 def __init__(self):
289 super(LxPFN2Page, self).__init__("lx-pfn_to_page", gdb.COMMAND_USER)
291 def invoke(self, arg, from_tty):
302 def __init__(self):
303 super(LxPage2PFN, self).__init__("lx-page_to_pfn", gdb.COMMAND_USER)
305 def invoke(self, arg, from_tty):
317 def __init__(self):
318 super(LxPageAddress, self).__init__("lx-page_address", gdb.COMMAND_USER)
320 def invoke(self, arg, from_tty):
332 def __init__(self):
333 super(LxPage2Phys, self).__init__("lx-page_to_phys", gdb.COMMAND_USER)
335 def invoke(self, arg, from_tty):
347 def __init__(self):
348 super(LxVirt2Phys, self).__init__("lx-virt_to_phys", gdb.COMMAND_USER)
350 def invoke(self, arg, from_tty):
361 def __init__(self):
362 super(LxVirt2Page, self).__init__("lx-virt_to_page", gdb.COMMAND_USER)
364 def invoke(self, arg, from_tty):
375 def __init__(self):
376 super(LxSym2PFN, self).__init__("lx-sym_to_pfn", gdb.COMMAND_USER)
378 def invoke(self, arg, from_tty):
389 def __init__(self):
390 super(LxPFN2Kaddr, self).__init__("lx-pfn_to_kaddr", gdb.COMMAND_USER)
392 def invoke(self, arg, from_tty):