Lines Matching defs:VmAddressRegion

24 VmAddressRegion::VmAddressRegion(VmAspace& aspace, vaddr_t base, size_t size, uint32_t vmar_flags)
35 VmAddressRegion::VmAddressRegion(VmAddressRegion& parent, vaddr_t base, size_t size,
44 VmAddressRegion::VmAddressRegion(VmAspace& kernel_aspace)
45 : VmAddressRegion(kernel_aspace, kernel_aspace.base(), kernel_aspace.size(),
52 VmAddressRegion::VmAddressRegion()
59 zx_status_t VmAddressRegion::CreateRoot(VmAspace& aspace, uint32_t vmar_flags,
60 fbl::RefPtr<VmAddressRegion>* out) {
64 auto vmar = new (&ac) VmAddressRegion(aspace, aspace.base(), aspace.size(), vmar_flags);
74 zx_status_t VmAddressRegion::CreateSubVmarInternal(size_t offset, size_t size, uint8_t align_pow2,
171 VmAddressRegion(*this, new_base, size, vmar_flags, name));
193 zx_status_t VmAddressRegion::CreateSubVmar(size_t offset, size_t size, uint8_t align_pow2,
195 fbl::RefPtr<VmAddressRegion>* out) {
218 zx_status_t VmAddressRegion::CreateVmMapping(size_t mapping_offset, size_t size, uint8_t align_pow2,
269 zx_status_t VmAddressRegion::OverwriteVmMapping(
299 zx_status_t VmAddressRegion::DestroyLocked() {
306 fbl::RefPtr<VmAddressRegion> cur(this);
310 fbl::RefPtr<VmAddressRegion> child_region = nullptr;
335 VmAddressRegion* cur_parent = cur->parent_;
346 void VmAddressRegion::RemoveSubregion(VmAddressRegionOrMapping* region) {
350 fbl::RefPtr<VmAddressRegionOrMapping> VmAddressRegion::FindRegion(vaddr_t addr) {
358 fbl::RefPtr<VmAddressRegionOrMapping> VmAddressRegion::FindRegionLocked(vaddr_t addr) {
371 size_t VmAddressRegion::AllocatedPagesLocked() const {
386 zx_status_t VmAddressRegion::PageFault(vaddr_t va, uint pf_flags) {
401 bool VmAddressRegion::IsRangeAvailableLocked(vaddr_t base, size_t size) {
434 bool VmAddressRegion::CheckGapLocked(const ChildList::iterator& prev,
518 zx_status_t VmAddressRegion::AllocSpotLocked(size_t size, uint8_t align_pow2, uint arch_mmu_flags,
538 bool VmAddressRegion::EnumerateChildrenLocked(VmEnumerator* ve, uint depth) {
547 VmAddressRegion* up = curr->parent_;
556 VmAddressRegion* vmar = curr->as_vm_address_region().get();
590 bool VmAddressRegion::has_parent() const {
595 void VmAddressRegion::Dump(uint depth, bool verbose) const {
607 void VmAddressRegion::Activate() {
615 zx_status_t VmAddressRegion::Unmap(vaddr_t base, size_t size) {
632 zx_status_t VmAddressRegion::UnmapAllowPartial(vaddr_t base, size_t size) {
649 VmAddressRegion::ChildList::iterator VmAddressRegion::UpperBoundInternalLocked(vaddr_t base) {
662 zx_status_t VmAddressRegion::UnmapInternalLocked(vaddr_t base, size_t size,
702 VmAddressRegion* up = curr->parent_;
738 fbl::RefPtr<VmAddressRegion> vmar = curr->as_vm_address_region();
775 zx_status_t VmAddressRegion::Protect(vaddr_t base, size_t size, uint new_arch_mmu_flags) {
855 zx_status_t VmAddressRegion::LinearRegionAllocatorLocked(size_t size, uint8_t align_pow2,
888 void VmAddressRegion::ForEachGap(F func, uint8_t align_pow2) {
927 zx_status_t VmAddressRegion::NonCompactRandomizedRegionAllocatorLocked(size_t size, uint8_t align_pow2,
998 zx_status_t VmAddressRegion::CompactRandomizedRegionAllocatorLocked(size_t size, uint8_t align_pow2,