Searched refs:gpa (Results 76 - 100 of 119) sorted by relevance

12345

/linux-master/arch/arm64/kvm/vgic/
H A Dvgic.h136 static inline int vgic_write_guest_lock(struct kvm *kvm, gpa_t gpa, argument
143 ret = kvm_write_guest_lock(kvm, gpa, data, len);
/linux-master/arch/powerpc/include/asm/
H A Dkvm_book3s.h164 unsigned long gpa, gva_t ea, int is_store);
196 extern void kvmppc_unmap_pte(struct kvm *kvm, pte_t *pte, unsigned long gpa,
201 bool writing, unsigned long gpa,
204 unsigned long gpa,
237 extern kvm_pfn_t kvmppc_gpa_to_pfn(struct kvm_vcpu *vcpu, gpa_t gpa,
250 unsigned long gpa, bool dirty);
H A Dkvm_book3s_64.h626 unsigned long gpa, unsigned int level,
636 unsigned long gpa, unsigned long hpa,
/linux-master/arch/powerpc/kvm/
H A Dbook3s_hv_rm_mmu.c188 unsigned long i, pa, gpa, gfn, psize; local
226 gpa = (ptel & HPTE_R_RPN) & ~(psize - 1);
227 gfn = gpa >> PAGE_SHIFT;
274 pa |= gpa & ~PAGE_MASK;
884 unsigned long gpa, int writing, unsigned long *hpa,
894 gfn = gpa >> PAGE_SHIFT;
915 pa |= gpa & ~PAGE_MASK;
883 kvmppc_get_hpa(struct kvm_vcpu *vcpu, unsigned long mmu_seq, unsigned long gpa, int writing, unsigned long *hpa, struct kvm_memory_slot **memslot_p) argument
H A Dbook3s.c424 kvm_pfn_t kvmppc_gpa_to_pfn(struct kvm_vcpu *vcpu, gpa_t gpa, bool writing, argument
428 gfn_t gfn = gpa >> PAGE_SHIFT;
434 gpa &= ~0xFFFULL;
435 if (unlikely(mp_pa) && unlikely((gpa & KVM_PAM) == mp_pa)) {
H A Dbook3s_hv.c682 unsigned long gpa; local
693 gpa = vpap->next_gpa;
697 if (gpa)
698 va = kvmppc_pin_guest_page(kvm, gpa, &nb);
700 if (gpa == vpap->next_gpa)
704 kvmppc_unpin_guest_page(kvm, va, gpa, false);
714 kvmppc_unpin_guest_page(kvm, va, gpa, false);
719 vpap->gpa = gpa;
742 kvmppc_unpin_guest_page(kvm, old_vpa.pinned_addr, old_vpa.gpa,
[all...]
H A Dbook3s_pr.c667 static bool kvmppc_visible_gpa(struct kvm_vcpu *vcpu, gpa_t gpa) argument
674 gpa &= ~0xFFFULL;
675 if (unlikely(mp_pa) && unlikely((mp_pa & KVM_PAM) == (gpa & KVM_PAM))) {
679 return kvm_is_visible_gfn(vcpu->kvm, gpa >> PAGE_SHIFT);
/linux-master/arch/x86/kvm/vmx/
H A Dnested.c733 if (ghc->gpa != vmcs12->vmcs_link_pointer &&
752 if (ghc->gpa != vmcs12->vmcs_link_pointer &&
960 static u32 nested_vmx_load_msr(struct kvm_vcpu *vcpu, u64 gpa, u32 count) argument
970 if (kvm_vcpu_read_guest(vcpu, gpa + i * sizeof(e),
974 __func__, i, gpa + i * sizeof(e));
1027 static bool read_and_check_msr_entry(struct kvm_vcpu *vcpu, u64 gpa, int i, argument
1031 gpa + i * sizeof(*e),
1035 __func__, i, gpa + i * sizeof(*e));
1047 static int nested_vmx_store_msr(struct kvm_vcpu *vcpu, u64 gpa, u32 count) argument
1058 if (!read_and_check_msr_entry(vcpu, gpa,
1081 u64 gpa = vmcs12->vm_exit_msr_store_addr; local
3372 nested_vmx_write_pml_buffer(struct kvm_vcpu *vcpu, gpa_t gpa) argument
4687 gpa_t gpa; local
5736 u64 eptp, gpa; member in struct:__anon35
[all...]
/linux-master/drivers/gpu/drm/i915/gvt/
H A Dgtt.c303 unsigned long index, bool hypervisor_access, unsigned long gpa,
313 ret = intel_gvt_read_gpa(vgpu, gpa +
328 unsigned long index, bool hypervisor_access, unsigned long gpa,
338 ret = intel_gvt_write_gpa(vgpu, gpa +
785 u64 gpa, void *data, int bytes)
794 ret = ppgtt_handle_guest_write_page_table_bytes(spt, gpa, data, bytes);
2075 unsigned long gpa = INTEL_GVT_INVALID_ADDR; local
2091 gpa = (pte_ops->get_pfn(&e) << I915_GTT_PAGE_SHIFT)
2094 trace_gma_translate(vgpu->id, "ggtt", 0, 0, gma, gpa);
2118 /* walk the shadow page table and get gpa fro
301 gtt_get_entry64(void *pt, struct intel_gvt_gtt_entry *e, unsigned long index, bool hypervisor_access, unsigned long gpa, struct intel_vgpu *vgpu) argument
326 gtt_set_entry64(void *pt, struct intel_gvt_gtt_entry *e, unsigned long index, bool hypervisor_access, unsigned long gpa, struct intel_vgpu *vgpu) argument
783 ppgtt_write_protection_handler( struct intel_vgpu_page_track *page_track, u64 gpa, void *data, int bytes) argument
[all...]
H A Dscheduler.c136 unsigned long gpa_base; /* first gpa of consecutive GPAs */
190 gvt_dbg_sched("ring %s workload lrca %x, ctx_id %x, ctx gpa %llx",
909 u64 gpa; local
912 gpa = ring_context_gpa + RING_CTX_OFF(pdps[0].val);
915 intel_gvt_write_gpa(vgpu, gpa + i * 8, &pdp[7 - i], 4);
945 unsigned long gpa_base; /* first gpa of consecutive GPAs */
1570 u64 gpa; local
1573 gpa = ring_context_gpa + RING_CTX_OFF(pdps[0].val);
1577 gpa + i * 8, &pdp[7 - i], 4);
H A Dkvmgt.c109 static void kvmgt_page_track_write(gpa_t gpa, const u8 *val, int len,
1581 static void kvmgt_page_track_write(gpa_t gpa, const u8 *val, int len, argument
1589 if (kvmgt_gfn_is_write_protected(info, gpa >> PAGE_SHIFT))
1590 intel_vgpu_page_track_handler(info, gpa,
/linux-master/include/asm-generic/
H A Dhyperv-tlfs.h858 u64 gpa; member in struct:hv_mmio_read_input
868 u64 gpa; member in struct:hv_mmio_write_input
/linux-master/arch/x86/kvm/
H A Dx86.h266 static inline bool vcpu_match_mmio_gpa(struct kvm_vcpu *vcpu, gpa_t gpa) argument
269 vcpu->arch.mmio_gfn == gpa >> PAGE_SHIFT)
/linux-master/arch/x86/kvm/mmu/
H A Dmmu.c303 u64 gpa = spte & shadow_nonpresent_or_rsvd_lower_gfn_mask; local
305 gpa |= (spte >> SHADOW_NONPRESENT_OR_RSVD_MASK_LEN)
308 return gpa >> PAGE_SHIFT;
2737 gpa_t gpa; local
2743 gpa = kvm_mmu_gva_to_gpa_read(vcpu, gva, NULL);
2745 r = kvm_mmu_unprotect_page(vcpu->kvm, gpa >> PAGE_SHIFT);
3390 * gpa, and sets *spte to the spte value. This spte may be non-preset. If no
3397 static u64 *fast_pf_get_last_sptep(struct kvm_vcpu *vcpu, gpa_t gpa, u64 *spte) argument
3403 for_each_shadow_entry_lockless(vcpu, gpa, iterator, old_spte) {
5702 static u64 mmu_pte_write_fetch_gpte(struct kvm_vcpu *vcpu, gpa_t *gpa, argument
5749 detect_write_misaligned(struct kvm_mmu_page *sp, gpa_t gpa, int bytes) argument
5770 get_written_sptes(struct kvm_mmu_page *sp, gpa_t gpa, int *nspte) argument
5801 kvm_mmu_track_write(struct kvm_vcpu *vcpu, gpa_t gpa, const u8 *new, int bytes) argument
5932 kvm_mmu_print_sptes(struct kvm_vcpu *vcpu, gpa_t gpa, const char *msg) argument
[all...]
H A Dpaging_tmpl.h873 gpa_t gpa = INVALID_GPA; local
884 gpa = gfn_to_gpa(walker.gfn);
885 gpa |= addr & ~PAGE_MASK;
889 return gpa;
/linux-master/kernel/rcu/
H A Dtree_stall.h595 unsigned long gpa; local
645 gpa = data_race(READ_ONCE(rcu_state.gp_activity));
647 rcu_state.name, j - gpa, j, gpa,
/linux-master/arch/loongarch/include/asm/
H A Dkvm_host.h236 void kvm_flush_tlb_gpa(struct kvm_vcpu *vcpu, unsigned long gpa);
/linux-master/tools/testing/selftests/kvm/x86_64/
H A Dxen_shinfo_test.c524 .u.gpa = VCPU_INFO_ADDR,
530 .u.gpa = PVTIME_ADDR,
545 .u.gpa = RUNSTATE_ADDR,
1098 .u.gpa = runstate_addr,
/linux-master/arch/s390/kvm/
H A Dkvm-s390.c4580 * @gpa: Guest physical address
4587 long kvm_arch_fault_in_page(struct kvm_vcpu *vcpu, gpa_t gpa, int writable) argument
4589 return gmap_fault(vcpu->arch.gmap, gpa,
5104 int kvm_s390_store_status_unloaded(struct kvm_vcpu *vcpu, unsigned long gpa) argument
5113 if (gpa == KVM_S390_STORE_STATUS_NOADDR) {
5116 gpa = 0;
5117 } else if (gpa == KVM_S390_STORE_STATUS_PREFIXED) {
5120 gpa = px;
5122 gpa -= __LC_FPREGS_SAVE_AREA;
5127 rc = write_guest_abs(vcpu, gpa
[all...]
H A Dpriv.c1473 unsigned long gpa; local
1490 ret = guest_translate_address_with_key(vcpu, address, ar, &gpa,
1493 gfn_to_hva_prot(vcpu->kvm, gpa_to_gfn(gpa), &writable);
1497 ret = guest_translate_address_with_key(vcpu, address, ar, &gpa,
/linux-master/arch/mips/kvm/
H A Dvz.c199 /* VZ guest has already converted gva to gpa */
681 * @gpa: Output guest physical address.
690 unsigned long *gpa)
742 *gpa = pa;
746 *gpa = gva32 & 0x1fffffff;
772 *gpa = gva & 0x07ffffffffffffff;
778 return kvm_vz_guest_tlb_lookup(vcpu, gva, gpa);
785 * @gpa: Output guest physical address.
797 unsigned long *gpa)
804 *gpa
689 kvm_vz_gva_to_gpa(struct kvm_vcpu *vcpu, unsigned long gva, unsigned long *gpa) argument
796 kvm_vz_badvaddr_to_gpa(struct kvm_vcpu *vcpu, unsigned long badvaddr, unsigned long *gpa) argument
[all...]
/linux-master/arch/arm64/kvm/
H A Dmmu.c978 gpa_t gpa = addr + (vm_start - memslot->userspace_addr); local
979 unmap_stage2_range(&kvm->arch.mmu, gpa, vm_end - vm_start);
2033 gpa_t gpa = slot->base_gfn << PAGE_SHIFT; local
2037 unmap_stage2_range(&kvm->arch.mmu, gpa, size);
/linux-master/tools/testing/selftests/kvm/include/x86_64/
H A Dprocessor.h1294 static inline uint64_t __kvm_hypercall_map_gpa_range(uint64_t gpa, argument
1297 return kvm_hypercall(KVM_HC_MAP_GPA_RANGE, gpa, size >> PAGE_SHIFT, flags, 0);
1300 static inline void kvm_hypercall_map_gpa_range(uint64_t gpa, uint64_t size, argument
1303 uint64_t ret = __kvm_hypercall_map_gpa_range(gpa, size, flags);
/linux-master/drivers/misc/sgi-gru/
H A Dgrukservices.c905 int gru_read_gpa(unsigned long *value, unsigned long gpa) argument
914 iaa = gpa >> 62;
915 gru_vload_phys(cb, gpa, gru_get_tri(dsr), iaa, IMA);
/linux-master/arch/x86/kernel/
H A Dsev.c2266 u64 gpa; local
2271 gpa = get_secrets_page();
2272 if (!gpa)
2275 data.secrets_gpa = gpa;

Completed in 386 milliseconds

12345