Lines Matching defs:obj

3485 			panic("vm_allocate_cpm:  obj %p off 0x%llx no page",
3502 printf("obj %p off 0x%llx\n", cpm_obj, (uint64_t)offset);
8889 XPR(XPR_VM_MAP, "vm_map_copyin_common src_obj 0x%x ent 0x%x obj 0x%x was_wired %d\n",
11128 #define OBJ_RESIDENT_COUNT(obj, entry_size) \
11130 ((obj)->all_reusable ? \
11131 (obj)->wired_page_count : \
11132 (obj)->resident_page_count - (obj)->reusable_page_count))
11148 struct vm_object *obj, *tmp_obj;
11154 obj = entry->object.vm_object;
11156 vm_object_lock(obj);
11158 if ((ref_count = obj->ref_count) > 1 && obj->paging_in_progress)
11161 assert(obj->reusable_page_count <= obj->resident_page_count);
11162 if (obj->shadow) {
11165 OBJ_RESIDENT_COUNT(obj, entry_size);
11168 OBJ_RESIDENT_COUNT(obj, entry_size);
11172 while ((tmp_obj = obj->shadow)) {
11174 vm_object_unlock(obj);
11175 obj = tmp_obj;
11177 if ((ref_count = obj->ref_count) > 1 && obj->paging_in_progress)
11180 assert(obj->reusable_page_count <= obj->resident_page_count);
11182 OBJ_RESIDENT_COUNT(obj, entry_size);
11193 OBJ_RESIDENT_COUNT(obj, entry_size);
11196 (ref_count == 2 && !(obj->pager_trusted) && !(obj->internal))) {
11199 OBJ_RESIDENT_COUNT(obj,
11204 OBJ_RESIDENT_COUNT(obj,
11211 top->obj_id = (unsigned int) (uintptr_t)VM_KERNEL_ADDRPERM(obj);
11213 vm_object_unlock(obj);
11228 register struct vm_object *obj, *tmp_obj;
11255 obj = entry->object.vm_object;
11257 vm_object_lock(obj);
11259 if ((ref_count = obj->ref_count) > 1 && obj->paging_in_progress)
11266 vm_map_region_look_for_page(map, va, obj,
11271 shadow_object = obj->shadow;
11274 if ( !(obj->pager_trusted) && !(obj->internal))
11305 if (obj->true_share)
11314 if ((tmp_obj = obj->shadow) == 0)
11317 vm_object_unlock(obj);
11323 obj = tmp_obj;
11325 vm_object_unlock(obj);
11332 obj = entry->object.vm_object;
11336 if ((ref_count = obj->ref_count) > 1 && obj->paging_in_progress)
11339 my_refs += vm_map_region_count_obj_refs(cur, obj);