Searched refs:vgic_cpu (Results 1 - 17 of 17) sorted by relevance

/linux-master/arch/arm64/kvm/vgic/
H A Dvgic.c31 * vgic_cpu->ap_list_lock must be taken with IRQs disabled
49 * raw_spin_lock(vcpuX->arch.vgic_cpu.ap_list_lock);
50 * raw_spin_lock(vcpuY->arch.vgic_cpu.ap_list_lock);
89 return &vcpu->arch.vgic_cpu.private_irqs[intid];
135 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
139 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags);
141 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) {
151 raw_spin_unlock_irqrestore(&vgic_cpu
282 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
617 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
756 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
781 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
955 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
[all...]
H A Dvgic-v2.c31 struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2;
51 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
52 struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2;
59 for (lr = 0; lr < vgic_cpu->vgic_v2.used_lrs; lr++) {
197 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val;
202 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0;
207 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
234 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu
[all...]
H A Dvgic-mmio-v3.c240 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
242 return atomic_read(&vgic_cpu->ctlr) == GICR_CTLR_ENABLE_LPIS;
248 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
251 val = atomic_read(&vgic_cpu->ctlr);
262 struct vgic_cpu *vgic_cpu local
295 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
483 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
495 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
750 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
[all...]
H A Dvgic-init.c196 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
201 vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF;
203 INIT_LIST_HEAD(&vgic_cpu->ap_list_head);
204 raw_spin_lock_init(&vgic_cpu->ap_list_lock);
205 atomic_set(&vgic_cpu->vgic_v3.its_vpe.vlpi_count, 0);
212 struct vgic_irq *irq = &vgic_cpu->private_irqs[i];
288 struct vgic_cpu *vgic_cpu local
375 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
[all...]
H A Dvgic-v3.c24 struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3;
37 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
38 struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3;
186 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val;
191 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0;
196 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
226 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
262 struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu
546 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; local
[all...]
H A Dvgic-v4.c98 raw_spin_lock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock);
99 vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true;
100 raw_spin_unlock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock);
117 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
215 struct its_vpe *vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
264 dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
341 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
351 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
387 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
451 .vpe = &irq->target_vcpu->arch.vgic_cpu
[all...]
H A Dvgic-mmio-v2.c373 return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr;
375 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
399 vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val;
401 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
H A Dvgic.h284 struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu;
H A Dvgic-its.c384 map.vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
448 gpa_t pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser);
1392 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.its_vm)
1393 its_invall_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe);
1885 if (!(vcpu->arch.vgic_cpu.pendbaser & GICR_PENDBASER_PTZ))
/linux-master/arch/arm64/kvm/
H A Dvgic-sys-reg-v3.c17 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu;
62 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu;
210 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
220 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
293 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
H A Dsys_regs.c477 p->regval = vcpu->arch.vgic_cpu.vgic_v3.vgic_sre;
/linux-master/arch/arm64/kvm/hyp/nvhe/
H A Dswitch.c119 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3);
120 __vgic_v3_deactivate_traps(&vcpu->arch.vgic_cpu.vgic_v3);
128 __vgic_v3_activate_traps(&vcpu->arch.vgic_cpu.vgic_v3);
129 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3);
H A Dhyp-main.c49 hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3 = host_vcpu->arch.vgic_cpu.vgic_v3;
55 struct vgic_v3_cpu_if *hyp_cpu_if = &hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3;
56 struct vgic_v3_cpu_if *host_cpu_if = &host_vcpu->arch.vgic_cpu.vgic_v3;
/linux-master/include/kvm/
H A Darm_vgic.h326 struct vgic_cpu { struct
/linux-master/arch/arm64/include/asm/
H A Dkvm_emulate.h115 if (atomic_read(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count) ||
H A Dkvm_host.h656 struct vgic_cpu vgic_cpu; member in struct:kvm_vcpu_arch
/linux-master/arch/arm64/kvm/hyp/
H A Dvgic-v3-sr.c487 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
526 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;

Completed in 279 milliseconds