1(*
2 * Copyright 2014, General Dynamics C4 Systems
3 *
4 * This software may be distributed and modified according to the terms of
5 * the GNU General Public License version 2. Note that NO WARRANTY is provided.
6 * See "LICENSE_GPLv2.txt" for details.
7 *
8 * @TAG(GD_GPL)
9 *)
10
11chapter {* ARM-specific definitions for abstract datatype for the abstract specification *}
12
13theory ArchADT_AI
14imports
15  "Lib.Simulation"
16  "../Invariants_AI"
17begin
18context Arch begin global_naming ARM
19
20lemma word_1FF_is_mask:
21  "0x1FF = mask 9"
22  by (simp add: mask_def)
23
24(*** FIXME end ***)
25
26subsection {* Constructing a virtual-memory view *}
27
28text {*
29  Function @{text get_pd_of_thread} takes three parameters:
30  the kernel heap, the architecture-specific state, and
31  a thread identifier.
32  It returns the identifier of the corresponding page directory.
33  Note that this function is total.
34  If the traversal stops before a page directory can be found
35  (e.g. because the VTable entry is not set or a reference has been invalid),
36  the function returns 0.
37
38  Looking up the page directory for a thread reference involves the following
39  steps:
40
41    At first, we check that the reference actually points to a TCB object in
42  the kernel heap.
43  If so, we check whether the vtable entry of the TCB contains a capability
44  to a page directory with valid mapping data.
45  Note that the mapping data might become stale.
46  Hence, we have to follow the mapping data through the ASID table.
47*}
48definition
49  get_pd_of_thread :: "kheap \<Rightarrow> arch_state \<Rightarrow> obj_ref \<Rightarrow> obj_ref"
50where
51  get_pd_of_thread_def:
52  "get_pd_of_thread khp astate tcb_ref \<equiv>
53   case khp tcb_ref of Some (TCB tcb) \<Rightarrow>
54     (case tcb_vtable tcb of
55        ArchObjectCap (PageDirectoryCap pd_ref (Some asid))
56          \<Rightarrow> (case arm_asid_table astate (asid_high_bits_of asid) of
57                None \<Rightarrow> 0
58              | Some p \<Rightarrow> (case khp p of
59                            Some (ArchObj ako) \<Rightarrow>
60                               if (VSRef (asid && mask asid_low_bits)
61                                         (Some AASIDPool), pd_ref)
62                                  \<in> vs_refs_arch ako
63                                 then pd_ref
64                               else 0
65                             | _ \<Rightarrow> 0))
66      | _ \<Rightarrow>  0)
67   | _ \<Rightarrow> 0"
68
69
70lemma VSRef_AASIDPool_in_vs_refs:
71  "(VSRef (asid && mask asid_low_bits) (Some AASIDPool), r) \<in> vs_refs_arch ko =
72   (\<exists>apool. ko = arch_kernel_obj.ASIDPool apool \<and>
73            apool (ucast (asid && mask asid_low_bits)) = Some r)"
74  apply simp
75  apply (case_tac ko, simp_all add: image_def graph_of_def)
76  apply (rename_tac arch_kernel_obj)
77  apply (rule iffI)
78   apply clarsimp
79   apply (subst ucast_up_ucast_id, simp add: is_up, assumption)
80  apply (intro exI conjI, assumption)
81  apply (rule sym, rule ucast_ucast_len)
82  apply (cut_tac and_mask_less'[of asid_low_bits asid])
83   apply (simp_all add: asid_low_bits_def)
84  done
85
86context
87notes vs_refs_arch_def[simp del]
88begin
89
90lemma get_pd_of_thread_def2:
91  "get_pd_of_thread khp astate tcb_ref \<equiv>
92        case khp tcb_ref of Some (TCB tcb) \<Rightarrow>
93          (case tcb_vtable tcb of
94             ArchObjectCap (PageDirectoryCap pd_ref (Some asid))
95               \<Rightarrow> if (\<exists>p apool.
96                        arm_asid_table astate (asid_high_bits_of asid) = Some p \<and>
97                        khp p = Some (ArchObj (ASIDPool apool)) \<and>
98                        apool (ucast (asid && mask asid_low_bits)) = Some pd_ref)
99                    then pd_ref
100                    else 0
101           | _ \<Rightarrow>  0)
102        | _ \<Rightarrow>  0"
103  apply (rule eq_reflection)
104  apply (clarsimp simp: get_pd_of_thread_def
105                 split: kernel_object.splits option.splits)
106  apply (rename_tac tcb)
107  apply (case_tac "tcb_vtable tcb",
108         simp_all split: cap.splits arch_cap.splits kernel_object.splits
109                         arch_kernel_obj.splits option.splits)
110  apply (auto simp: VSRef_AASIDPool_in_vs_refs)
111  done
112
113lemma the_arch_cap_simp[simp]: "the_arch_cap (ArchObjectCap x) = x"
114  by (simp add: the_arch_cap_def)
115
116lemma get_pd_of_thread_vs_lookup:
117  "get_pd_of_thread (kheap s) (arch_state s) tcb_ref =
118   (case kheap s tcb_ref of
119      Some (TCB tcb) \<Rightarrow>
120        (case tcb_vtable tcb of
121           ArchObjectCap (PageDirectoryCap pd_ref (Some asid)) \<Rightarrow>
122             if (the (vs_cap_ref (tcb_vtable tcb)) \<rhd> pd_ref) s then pd_ref
123             else 0
124         | _ \<Rightarrow> 0)
125    | _ \<Rightarrow> 0)"
126  apply (clarsimp simp: get_pd_of_thread_def split: option.splits)
127  apply (case_tac "the (kheap s tcb_ref)", simp_all, clarsimp)
128  apply (rename_tac tcb)
129  apply (case_tac "\<not> is_pd_cap (tcb_vtable tcb)")
130   apply (clarsimp simp: is_pd_cap_def split: cap.split arch_cap.split)
131  apply (clarsimp simp: is_pd_cap_def vs_cap_ref_def)
132  apply (case_tac asid, simp_all, clarsimp)
133  apply (intro conjI impI)
134
135   apply (erule vs_lookupE)
136   apply (clarsimp simp: vs_asid_refs_def split_def image_def graph_of_def)
137   apply (erule rtranclE, simp)
138   apply (clarsimp dest!: vs_lookup1D)
139   apply (clarsimp simp: vs_refs_def vs_refs_arch_def graph_of_def
140                  split: kernel_object.split_asm arch_kernel_obj.split_asm)
141   apply (erule rtranclE)
142    apply (clarsimp simp: up_ucast_inj_eq obj_at_def vs_refs_def vs_refs_arch_def graph_of_def
143                          image_def
144                   split: arch_kernel_obj.split_asm)
145   apply (clarsimp dest!: vs_lookup1D)
146   apply (clarsimp simp: vs_refs_def vs_refs_arch_def graph_of_def
147                   split: kernel_object.split_asm arch_kernel_obj.split_asm)
148
149  apply (erule swap)
150  apply (clarsimp split: kernel_object.split_asm arch_kernel_obj.split_asm
151                         option.split_asm if_split_asm)
152  apply (rule vs_lookupI)
153   apply (fastforce simp: vs_asid_refs_def image_def graph_of_def)
154  apply (rule rtrancl_into_rtrancl)
155   apply (rule rtrancl_refl)
156  apply (rule vs_lookup1I, (simp add: obj_at_def vs_refs_def vs_refs_arch_def)+)
157  done
158
159end
160
161(* NOTE: This statement would clearly be nicer for a partial function
162         but later on, we really want the function to be total. *)
163lemma get_pd_of_thread_eq: (* ARMHYP *)
164  "pd_ref \<noteq> 0 \<Longrightarrow>
165   get_pd_of_thread (kheap s) (arch_state s) tcb_ref = pd_ref \<longleftrightarrow>
166   (\<exists>tcb. kheap s tcb_ref = Some (TCB tcb) \<and>
167          (\<exists>asid. tcb_vtable tcb =
168                  cap.ArchObjectCap (PageDirectoryCap
169                                       pd_ref (Some asid)) \<and>
170                  (the (vs_cap_ref_arch (the_arch_cap (tcb_vtable tcb))) \<rhd> pd_ref) s))"
171  by (auto simp: get_pd_of_thread_vs_lookup vs_cap_ref_def
172          split: option.splits Structures_A.kernel_object.splits
173                 cap.splits arch_cap.splits)
174
175
176text {* Non-monad versions of @{term get_pte} and @{term get_pde}.
177  The parameters are:
178  \begin{description}
179  \item[@{term ahp}] a heap of architecture-specific objects,
180  \item[@{term pt_ref}] a page-table reference,
181  \item[@{term pd_ref}] a page-directory reference, and
182  \item[@{term vptr}] a virtual address.
183  \end{description}
184*}
185
186definition
187  "get_pt_entry ahp pt_ref vptr \<equiv>
188   case ahp pt_ref of
189     Some (PageTable pt) \<Rightarrow>
190       Some (pt (ucast ((vptr >> 12) && mask 9)))
191   | _ \<Rightarrow> None"
192
193definition
194  "get_pd_entry ahp pd_ref vptr \<equiv>
195   case ahp pd_ref of
196     Some (PageDirectory pd) \<Rightarrow> Some (pd (ucast (vptr >> pageBits + pt_bits - pte_bits)))
197   | _ \<Rightarrow> None"
198
199text {* The following function is used to extract the
200  architecture-specific objects from the kernel heap  *}
201definition
202  "get_arch_obj ==
203   case_option None (%x. case x of ArchObj a \<Rightarrow> Some a | _ \<Rightarrow> None)"
204
205lemma get_pd_entry_None_iff_get_pde_fail:
206  "is_aligned pd_ref pd_bits \<Longrightarrow>
207   get_pd_entry (\<lambda>obj. get_arch_obj (kheap s obj)) pd_ref vptr = None \<longleftrightarrow>
208   get_pde (pd_ref + (vptr >> 21 << 3)) s = ({}, True)"
209apply (subgoal_tac "(vptr >> 21 << 3) && ~~ mask pd_bits = 0")
210 apply (clarsimp simp add: get_pd_entry_def get_arch_obj_def
211            split: option.splits Structures_A.kernel_object.splits
212                   arch_kernel_obj.splits)
213 apply (clarsimp simp add: get_pde_def get_pd_def bind_def return_def assert_def
214  get_object_def simpler_gets_def fail_def split_def mask_out_sub_mask mask_eqs)
215 apply (subgoal_tac "pd_ref + (vptr >> 21 << 3) -
216                    (pd_ref + (vptr >> 21 << 3) && mask pd_bits) = pd_ref")
217  apply (simp (no_asm_simp) add: fail_def return_def)
218  apply clarsimp
219 apply (simp add: mask_add_aligned pd_bits_def pageBits_def)
220apply (simp add: pd_bits_def pageBits_def)
221apply (simp add: and_not_mask)
222apply (simp add: shiftl_shiftr3 word_size shiftr_shiftr)
223apply (subgoal_tac "vptr >> 32 = 0", simp add: pde_bits_def)
224apply (cut_tac shiftr_less_t2n'[of vptr 32 0], simp)
225 apply (simp add: mask_eq_iff)
226 apply (cut_tac lt2p_lem[of 32 vptr])
227  apply (cut_tac word_bits_len_of, simp+)
228done
229
230lemma get_pd_entry_Some_eq_get_pde:
231  "is_aligned pd_ref pd_bits \<Longrightarrow>
232   get_pd_entry (\<lambda>obj. get_arch_obj (kheap s obj)) pd_ref vptr = Some x \<longleftrightarrow>
233   get_pde (pd_ref + (vptr >> 21 << 3)) s = ({(x,s)}, False)"
234apply (subgoal_tac "(vptr >> 21 << 3) && ~~ mask pd_bits = 0")
235 apply (clarsimp simp add: get_pd_entry_def get_arch_obj_def
236            split: option.splits Structures_A.kernel_object.splits
237                   arch_kernel_obj.splits)
238 apply (clarsimp simp add: get_pde_def get_pd_def bind_def return_def assert_def
239            get_object_def simpler_gets_def fail_def split_def mask_out_sub_mask
240            mask_eqs)
241 apply (subgoal_tac "pd_ref + (vptr >> 21 << 3) -
242                    (pd_ref + (vptr >> 21 << 3) && mask pd_bits) = pd_ref")
243  apply (simp (no_asm_simp) add: fail_def return_def)
244  apply (clarsimp simp add: mask_add_aligned pd_bits_def pageBits_def)
245  apply (cut_tac shiftl_shiftr_id[of 3 "vptr >> 21"])
246    apply (simp add: vspace_bits_defs)+
247  apply (cut_tac shiftr_less_t2n'[of vptr 21 29])
248    apply (simp add: word_bits_def)
249   apply (simp add: mask_eq_iff)
250   apply (cut_tac lt2p_lem[of 50 vptr])
251    apply (cut_tac word_bits_len_of, simp+)
252 apply (simp add: mask_add_aligned pd_bits_def pageBits_def)
253apply (simp add: pd_bits_def pageBits_def)
254apply (simp add: and_not_mask)
255apply (simp add: shiftl_shiftr3 word_size shiftr_shiftr)
256apply (subgoal_tac "vptr >> 32 = 0", simp add: pde_bits_def)
257apply (cut_tac shiftr_less_t2n'[of vptr 32 0], simp)
258 apply (simp add: mask_eq_iff)
259 apply (cut_tac lt2p_lem[of 32 vptr])
260  apply (cut_tac word_bits_len_of, simp+)
261done
262
263lemma get_pt_entry_None_iff_get_pte_fail:
264  "is_aligned pt_ref pt_bits \<Longrightarrow>
265   get_pt_entry (\<lambda>obj. get_arch_obj (kheap s obj)) pt_ref vptr = None \<longleftrightarrow>
266   get_pte (pt_ref + ((vptr >> 12) && 0x1FF << 3)) s = ({}, True)"
267apply (clarsimp simp add: get_pt_entry_def get_arch_obj_def
268             split: option.splits Structures_A.kernel_object.splits
269                    arch_kernel_obj.splits)
270apply (clarsimp simp add: get_pte_def get_pt_def bind_def return_def assert_def
271  get_object_def simpler_gets_def fail_def split_def mask_out_sub_mask mask_eqs)
272apply (subgoal_tac "pt_ref + ((vptr >> 12) && 0x1FF << 3) -
273                    (pt_ref + ((vptr >> 12) && 0x1FF << 3) && mask pt_bits) =
274                    pt_ref")
275 apply (simp (no_asm_simp) add: fail_def return_def)
276 apply clarsimp
277apply (simp add: mask_add_aligned pt_bits_def pageBits_def pte_bits_def)
278apply (cut_tac and_mask_shiftl_comm[of 9 3 "vptr >> 12"])
279 apply (simp_all add: word_size mask_def AND_twice)
280done
281
282lemma get_pt_entry_Some_eq_get_pte:
283  "is_aligned pt_ref pt_bits \<Longrightarrow>
284   get_pt_entry (\<lambda>obj. get_arch_obj (kheap s obj)) pt_ref vptr = Some x \<longleftrightarrow>
285   get_pte (pt_ref + ((vptr >> 12) && mask 9 << 3)) s = ({(x,s)}, False)"
286  apply (clarsimp simp add: get_pt_entry_def get_arch_obj_def
287             split: option.splits Structures_A.kernel_object.splits
288                    arch_kernel_obj.splits)
289  apply (clarsimp simp add: get_pte_def get_pt_def bind_def return_def
290            assert_def get_object_def simpler_gets_def fail_def split_def
291            mask_out_sub_mask mask_eqs)
292  apply (subgoal_tac "pt_ref + ((vptr >> 12) && mask 9 << 3) -
293                      (pt_ref + ((vptr >> 12) && mask 9 << 3) && mask pt_bits) =
294                      pt_ref")
295   apply (simp (no_asm_simp) add: fail_def return_def)
296   apply (clarsimp simp add: mask_add_aligned vspace_bits_defs
297              word_size
298              and_mask_shiftr_comm and_mask_shiftl_comm shiftr_shiftr AND_twice)
299   apply (cut_tac shiftl_shiftr_id[of 3 "(vptr >> 12)"])
300     apply (simp add: vspace_bits_defs)+
301   apply (cut_tac shiftr_less_t2n'[of vptr 12 29])
302     apply (simp add: vspace_bits_defs)
303    apply (simp add: mask_eq_iff)
304    apply (cut_tac lt2p_lem[of 32 vptr])
305     apply (cut_tac word_bits_len_of, simp+)
306  apply (simp add: mask_add_aligned vspace_bits_defs
307                   word_size and_mask_shiftl_comm  AND_twice)
308done
309
310definition
311  "get_pt_info ahp pt_ref vptr \<equiv>
312   case get_pt_entry ahp pt_ref vptr of
313     Some (SmallPagePTE base attrs rights) \<Rightarrow> Some (base, 12, attrs, rights)
314   | Some (LargePagePTE base attrs rights) \<Rightarrow> Some (base, 16, attrs, rights)
315   | _ \<Rightarrow> None"
316
317text {*
318  @{text get_page_info} takes the architecture-specific part of the kernel heap,
319  a reference to the page directory, and a virtual memory address.
320  It returns a tuple containing
321  (a) the physical address, where the associated page table starts,
322  (b) the page table's size in bits, and
323  (c) the page attributes (cachable, XNever, etc)
324  (d) the access rights (a subset of @{term "{AllowRead, AllowWrite}"}).
325*}
326definition
327  get_page_info :: "(obj_ref \<rightharpoonup> arch_kernel_obj) \<Rightarrow> obj_ref \<Rightarrow>
328                    word32 \<rightharpoonup> (word32 \<times> nat \<times> vm_attributes \<times> vm_rights)"
329where
330  get_page_info_def:
331  "get_page_info ahp pd_ref vptr \<equiv>
332   if pd_ref = 0 then None else
333   case get_pd_entry ahp pd_ref vptr of
334     Some (PageTablePDE p) \<Rightarrow>
335       get_pt_info ahp (ptrFromPAddr p) vptr
336   | Some (SectionPDE base attrs rights) \<Rightarrow> Some (base, 21, attrs, rights)
337   | Some (SuperSectionPDE base attrs rights) \<Rightarrow> Some (base,25, attrs, rights)
338   | _ \<Rightarrow> None"
339
340
341(* FIXME: Lemma can be found in Untyped_R;
342   proof mostly copied from ArchAcc_R.pd_shifting *)
343lemma pd_shifting':
344   "is_aligned pd pd_bits \<Longrightarrow>
345    (pd + (vptr >> 21 << 3) && ~~ mask pd_bits) = (pd::word32)"
346  apply (simp add: pd_bits_def pageBits_def pde_bits_def)
347  apply (rule word_eqI[rule_format])
348  apply (subst word_plus_and_or_coroll)
349   apply (rule word_eqI)
350   apply (clarsimp simp: word_size nth_shiftr nth_shiftl is_aligned_nth)
351   apply (erule_tac x=na in allE)
352   apply (simp add: linorder_not_less)
353   apply (drule test_bit_size)+
354   apply (simp add: word_size)
355  apply (clarsimp simp: word_size nth_shiftr nth_shiftl is_aligned_nth
356                        word_ops_nth_size pd_bits_def linorder_not_less)
357  apply (rule iffI)
358   apply clarsimp
359   apply (drule test_bit_size)+
360   apply (simp add: word_size)
361  apply clarsimp
362  apply (erule_tac x=n in allE)
363  apply simp
364  done
365
366lemma lookup_pt_slot_fail:
367  "is_aligned pd pd_bits \<Longrightarrow>
368   lookup_pt_slot pd vptr s = ({},True) \<longleftrightarrow>
369   (\<forall>pdo. kheap s pd \<noteq> Some (ArchObj (PageDirectory pdo)))"
370  apply (frule pd_shifting'[of _ vptr])
371by (auto simp add: lookup_pt_slot_def lookup_pd_slot_def liftE_def bindE_def
372        returnOk_def lift_def bind_def split_def throwError_def return_def
373        get_pde_def get_pd_def Union_eq get_object_def simpler_gets_def
374        assert_def fail_def mask_eqs vspace_bits_defs
375      split: sum.splits if_split_asm Structures_A.kernel_object.splits
376             arch_kernel_obj.splits pde.splits)
377
378(* FIXME: Lemma can be found in ArchAcc_R *)
379lemma shiftr_shiftl_mask_pd_bits:
380  "(((vptr :: word32) >> 21) << 3) && mask pd_bits = (vptr >> 21) << 3"
381  apply (rule iffD2 [OF mask_eq_iff_w2p])
382   apply (simp add: vspace_bits_defs word_size)
383  apply (rule shiftl_less_t2n)
384   apply (simp_all add: vspace_bits_defs word_bits_def word_size)
385  apply (cut_tac shiftr_less_t2n'[of vptr 21 11])
386    apply simp
387   apply (simp add: mask_eq_iff)
388  apply simp
389  done
390
391lemma lookup_pt_slot_no_fail:
392  "is_aligned pd pd_bits \<Longrightarrow>
393   kheap s pd = Some (ArchObj (PageDirectory pdo)) \<Longrightarrow>
394   lookup_pt_slot pd vptr s =
395   (case pdo (ucast (vptr >> 21)) of
396      InvalidPDE \<Rightarrow>
397        ({(Inl (ExceptionTypes_A.MissingCapability 21),s)},False)
398    | PageTablePDE p \<Rightarrow>
399        ({(Inr (ptrFromPAddr p + ((vptr >> 12) && 0x1FF << 3)),s)},
400         False)
401    | SectionPDE _ _ _ \<Rightarrow>
402        ({(Inl (ExceptionTypes_A.MissingCapability 21),s)},False)
403    | SuperSectionPDE _ _ _ \<Rightarrow>
404        ({(Inl (ExceptionTypes_A.MissingCapability 21),s)},False)  )"
405  apply (frule pd_shifting'[of _ vptr])
406  apply (cut_tac shiftr_shiftl_mask_pd_bits[of vptr])
407  apply (subgoal_tac "vptr >> 21 << 3 >> 3 = vptr >> 21")
408  defer
409   apply (rule shiftl_shiftr_id)
410    apply (simp_all add: word_bits_def)
411    apply (cut_tac shiftr_less_t2n'[of vptr 21 29])
412      apply (simp add: word_bits_def)
413     apply (simp add: mask_eq_iff)
414     apply (cut_tac lt2p_lem[of 32 vptr])
415      apply (cut_tac word_bits_len_of, simp_all)
416by (clarsimp simp add: lookup_pt_slot_def lookup_pd_slot_def liftE_def bindE_def
417        returnOk_def lift_def bind_def split_def throwError_def return_def
418        get_pde_def get_pd_def Union_eq get_object_def simpler_gets_def
419        assert_def fail_def mask_add_aligned vspace_bits_defs word_1FF_is_mask
420      split: sum.splits if_split_asm kernel_object.splits arch_kernel_obj.splits
421             pde.splits)
422
423lemma get_page_info_pte:
424  "is_aligned pd_ref pd_bits \<Longrightarrow>
425   lookup_pt_slot pd_ref vptr s = ({(Inr x,s)},False) \<Longrightarrow>
426   is_aligned (x - ((vptr >> 12) && 0x1FF << 3)) pt_bits \<Longrightarrow>
427   get_pte x s = ({(pte,s)},False) \<Longrightarrow>
428   get_page_info (\<lambda>obj. get_arch_obj (kheap s obj)) pd_ref vptr =
429   (if pd_ref = 0 then None else
430    case pte of
431     SmallPagePTE base attrs rights \<Rightarrow> Some (base, 12, attrs, rights)
432   | LargePagePTE base attrs rights \<Rightarrow> Some (base, 16, attrs, rights)
433   | _ \<Rightarrow> None)"
434  apply (clarsimp simp add: get_page_info_def get_pd_entry_def
435                split: option.splits)
436  apply (intro conjI impI allI)
437    apply (frule lookup_pt_slot_fail[of _ vptr s],
438         clarsimp simp add: get_arch_obj_def)
439   apply (frule lookup_pt_slot_fail[of _ vptr s],
440        clarsimp simp add: get_arch_obj_def)
441  apply (frule lookup_pt_slot_fail[of _ vptr s],
442       clarsimp simp add: get_arch_obj_def)
443  apply (frule (1) lookup_pt_slot_no_fail[where vptr=vptr])
444  apply (clarsimp simp: vspace_bits_defs split: pde.splits option.splits)
445  apply (clarsimp simp add: get_pt_info_def split: option.splits)
446  apply (intro conjI impI)
447   apply (drule get_pt_entry_None_iff_get_pte_fail[simplified vspace_bits_defs, simplified, where s=s and vptr=vptr])
448   apply (simp add: pt_bits_def pageBits_def mask_def)
449  apply clarsimp
450  apply (rename_tac z)
451  apply (drule_tac x=z in get_pt_entry_Some_eq_get_pte[simplified vspace_bits_defs, simplified, where s=s and vptr=vptr])
452  apply (simp add: pt_bits_def pageBits_def mask_def)
453done
454
455lemma get_page_info_section:
456  "is_aligned pd_ref pd_bits \<Longrightarrow>
457   get_pde (lookup_pd_slot pd_ref vptr) s =
458     ({(SectionPDE base attrs rights, s)},False) \<Longrightarrow>
459   get_page_info (\<lambda>obj. get_arch_obj (kheap s obj)) pd_ref vptr =
460     (if pd_ref = 0 then None else Some (base, 21, attrs, rights))"
461  apply (simp add: lookup_pd_slot_def get_page_info_def split: option.splits)
462  apply (intro conjI impI allI)
463   apply (drule get_pd_entry_None_iff_get_pde_fail[where s=s and vptr=vptr])
464   apply (simp add: vspace_bits_defs split: option.splits)
465  apply (drule_tac x=x2 in get_pd_entry_Some_eq_get_pde[where s=s and vptr=vptr])
466  apply (clarsimp simp: vspace_bits_defs)
467done
468
469lemma get_page_info_super_section:
470  "is_aligned pd_ref pd_bits \<Longrightarrow>
471   get_pde (lookup_pd_slot pd_ref vptr) s =
472     ({(SuperSectionPDE base attrs rights,s)},False) \<Longrightarrow>
473   get_page_info (\<lambda>obj. get_arch_obj (kheap s obj)) pd_ref vptr =
474     (if pd_ref = 0 then None else Some (base, 25, attrs, rights))"
475  apply (simp add: lookup_pd_slot_def get_page_info_def split: option.splits)
476  apply (intro conjI impI allI)
477   apply (drule get_pd_entry_None_iff_get_pde_fail[where s=s and vptr=vptr])
478   apply (simp add: vspace_bits_defs split: option.splits)
479  apply (drule_tac x=x2 in get_pd_entry_Some_eq_get_pde[where s=s and vptr=vptr])
480  apply (clarsimp simp: vspace_bits_defs)
481done
482
483text {*
484  Both functions, @{text ptable_lift} and @{text vm_rights},
485  take a kernel state and a virtual address.
486  The former returns the physical address, the latter the associated rights.
487*}
488definition
489  ptable_lift :: "obj_ref \<Rightarrow> 'z state \<Rightarrow> word32 \<rightharpoonup> word32" where
490  "ptable_lift tcb s \<equiv> \<lambda>addr.
491   case_option None (\<lambda>(base, bits, rights). Some (base + (addr && mask bits)))
492     (get_page_info (\<lambda>obj. get_arch_obj (kheap s obj))
493        (get_pd_of_thread (kheap s) (arch_state s) tcb) addr)"
494definition
495  ptable_rights :: "obj_ref \<Rightarrow> 'z state \<Rightarrow> word32 \<Rightarrow> vm_rights" where
496 "ptable_rights tcb s \<equiv> \<lambda>addr.
497  case_option {} (snd o snd o snd)
498     (get_page_info (\<lambda>obj. get_arch_obj (kheap s obj))
499        (get_pd_of_thread (kheap s) (arch_state s) tcb) addr)"
500
501end
502end
503