Lines Matching refs:kvm_memory_slot

267 	struct kvm_memory_slot *slot;
398 struct kvm_memory_slot *last_used_slot;
584 struct kvm_memory_slot {
605 static inline bool kvm_slot_can_be_private(const struct kvm_memory_slot *slot)
610 static inline bool kvm_slot_dirty_track_enabled(const struct kvm_memory_slot *slot)
615 static inline unsigned long kvm_dirty_bitmap_bytes(struct kvm_memory_slot *memslot)
620 static inline unsigned long *kvm_second_dirty_bitmap(struct kvm_memory_slot *memslot)
1059 struct kvm_memory_slot *id_to_memslot(struct kvm_memslots *slots, int id)
1061 struct kvm_memory_slot *slot;
1076 struct kvm_memory_slot *slot;
1085 iter->slot = container_of(iter->node, struct kvm_memory_slot, gfn_node[iter->slots->node_idx]);
1094 struct kvm_memory_slot *slot;
1104 slot = container_of(tmp, struct kvm_memory_slot, gfn_node[idx]);
1132 iter->slot = container_of(iter->node, struct kvm_memory_slot, gfn_node[idx]);
1188 void kvm_arch_free_memslot(struct kvm *kvm, struct kvm_memory_slot *slot);
1191 const struct kvm_memory_slot *old,
1192 struct kvm_memory_slot *new,
1195 struct kvm_memory_slot *old,
1196 const struct kvm_memory_slot *new,
1202 struct kvm_memory_slot *slot);
1204 int gfn_to_page_many_atomic(struct kvm_memory_slot *slot, gfn_t gfn,
1210 unsigned long gfn_to_hva_memslot(struct kvm_memory_slot *slot, gfn_t gfn);
1211 unsigned long gfn_to_hva_memslot_prot(struct kvm_memory_slot *slot, gfn_t gfn,
1219 kvm_pfn_t gfn_to_pfn_memslot(const struct kvm_memory_slot *slot, gfn_t gfn);
1220 kvm_pfn_t gfn_to_pfn_memslot_atomic(const struct kvm_memory_slot *slot, gfn_t gfn);
1221 kvm_pfn_t __gfn_to_pfn_memslot(const struct kvm_memory_slot *slot, gfn_t gfn,
1294 struct kvm_memory_slot *gfn_to_memslot(struct kvm *kvm, gfn_t gfn);
1298 void mark_page_dirty_in_slot(struct kvm *kvm, const struct kvm_memory_slot *memslot, gfn_t gfn);
1302 struct kvm_memory_slot *kvm_vcpu_gfn_to_memslot(struct kvm_vcpu *vcpu, gfn_t gfn);
1438 const struct kvm_memory_slot *memslot);
1462 struct kvm_memory_slot *slot,
1465 void kvm_arch_sync_dirty_log(struct kvm *kvm, struct kvm_memory_slot *memslot);
1470 int *is_dirty, struct kvm_memory_slot **memslot);
1701 static inline struct kvm_memory_slot *
1702 try_get_memslot(struct kvm_memory_slot *slot, gfn_t gfn)
1720 static inline struct kvm_memory_slot *
1723 struct kvm_memory_slot *slot;
1729 slot = container_of(node, struct kvm_memory_slot, gfn_node[idx]);
1741 static inline struct kvm_memory_slot *
1744 struct kvm_memory_slot *slot;
1746 slot = (struct kvm_memory_slot *)atomic_long_read(&slots->last_used_slot);
1765 static inline struct kvm_memory_slot *
1772 __gfn_to_hva_memslot(const struct kvm_memory_slot *slot, gfn_t gfn)
1791 hva_to_gfn_memslot(unsigned long hva, struct kvm_memory_slot *slot)
2288 static inline bool kvm_is_visible_memslot(struct kvm_memory_slot *memslot)
2436 int kvm_gmem_get_pfn(struct kvm *kvm, struct kvm_memory_slot *slot,
2440 struct kvm_memory_slot *slot, gfn_t gfn,