Home
last modified time | relevance | path

Searched refs:vgic_cpu (Results 1 – 11 of 11) sorted by relevance

/linux-4.19.296/virt/kvm/arm/vgic/
Dvgic.c107 return &vcpu->arch.vgic_cpu.private_irqs[intid]; in vgic_get_irq()
281 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_sort_ap_list() local
283 DEBUG_SPINLOCK_BUG_ON(!spin_is_locked(&vgic_cpu->ap_list_lock)); in vgic_sort_ap_list()
285 list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp); in vgic_sort_ap_list()
361 spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
378 spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
389 list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head); in vgic_queue_irq_unlock()
393 spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
601 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_prune_ap_list() local
607 spin_lock(&vgic_cpu->ap_list_lock); in vgic_prune_ap_list()
[all …]
Dvgic-v2.c42 struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_underflow()
62 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v2_fold_lr_state() local
63 struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2; in vgic_v2_fold_lr_state()
70 for (lr = 0; lr < vgic_cpu->used_lrs; lr++) { in vgic_v2_fold_lr_state()
134 vgic_cpu->used_lrs = 0; in vgic_v2_fold_lr_state()
223 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val; in vgic_v2_populate_lr()
228 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0; in vgic_v2_clear_lr()
233 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_vmcr()
260 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_get_vmcr()
293 vcpu->arch.vgic_cpu.vgic_v2.vgic_vmcr = 0; in vgic_v2_enable()
[all …]
Dvgic-mmio-v3.c185 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_read_v3r_ctlr() local
187 return vgic_cpu->lpis_enabled ? GICR_CTLR_ENABLE_LPIS : 0; in vgic_mmio_read_v3r_ctlr()
195 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_write_v3r_ctlr() local
196 bool was_enabled = vgic_cpu->lpis_enabled; in vgic_mmio_write_v3r_ctlr()
201 vgic_cpu->lpis_enabled = val & GICR_CTLR_ENABLE_LPIS; in vgic_mmio_write_v3r_ctlr()
203 if (!was_enabled && vgic_cpu->lpis_enabled) in vgic_mmio_write_v3r_ctlr()
211 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_read_v3r_typer() local
212 struct vgic_redist_region *rdreg = vgic_cpu->rdreg; in vgic_mmio_read_v3r_typer()
419 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_write_propbase() local
423 if (vgic_cpu->lpis_enabled) in vgic_mmio_write_propbase()
[all …]
Dvgic-init.c208 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_init() local
213 vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF; in kvm_vgic_vcpu_init()
214 vgic_cpu->sgi_iodev.base_addr = VGIC_ADDR_UNDEF; in kvm_vgic_vcpu_init()
216 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in kvm_vgic_vcpu_init()
217 spin_lock_init(&vgic_cpu->ap_list_lock); in kvm_vgic_vcpu_init()
224 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in kvm_vgic_vcpu_init()
298 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_init() local
301 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in vgic_init()
366 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_destroy() local
368 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in kvm_vgic_vcpu_destroy()
Dvgic-v3.c32 struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_underflow()
45 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v3_fold_lr_state() local
46 struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3; in vgic_v3_fold_lr_state()
54 for (lr = 0; lr < vgic_cpu->used_lrs; lr++) { in vgic_v3_fold_lr_state()
126 vgic_cpu->used_lrs = 0; in vgic_v3_fold_lr_state()
209 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val; in vgic_v3_populate_lr()
214 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0; in vgic_v3_clear_lr()
219 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_vmcr()
249 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_get_vmcr()
285 struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_enable()
[all …]
Dvgic-v4.c99 vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true; in vgic_v4_doorbell_handler()
137 dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_init()
211 return its_schedule_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe, false); in vgic_v4_sync_hwstate()
216 int irq = vcpu->arch.vgic_cpu.vgic_v3.its_vpe.irq; in vgic_v4_flush_hwstate()
232 err = its_schedule_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe, true); in vgic_v4_flush_hwstate()
294 .vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe, in kvm_vgic_v4_set_forwarding()
353 int irq = vcpu->arch.vgic_cpu.vgic_v3.its_vpe.irq; in kvm_vgic_v4_enable_doorbell()
362 int irq = vcpu->arch.vgic_cpu.vgic_v3.its_vpe.irq; in kvm_vgic_v4_disable_doorbell()
Dvgic-mmio-v2.c370 return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr; in vgic_mmio_read_apr()
372 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_read_apr()
396 vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val; in vgic_mmio_write_apr()
398 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_write_apr()
Dvgic.h269 struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu; in vgic_v3_max_apr_idx()
Dvgic-its.c380 map.vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in update_affinity()
437 gpa_t pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser); in its_sync_lpi_pending_table()
572 if (!vcpu->arch.vgic_cpu.lpis_enabled) in vgic_its_resolve_lpi()
1192 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.its_vm) in vgic_its_cmd_handle_invall()
1193 its_invall_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe); in vgic_its_cmd_handle_invall()
1662 if (!(vcpu->arch.vgic_cpu.pendbaser & GICR_PENDBASER_PTZ)) in vgic_enable_lpis()
/linux-4.19.296/virt/kvm/arm/hyp/
Dvgic-v3-sr.c210 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in __vgic_v3_save_state()
211 u64 used_lrs = vcpu->arch.vgic_cpu.used_lrs; in __vgic_v3_save_state()
246 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in __vgic_v3_restore_state()
247 u64 used_lrs = vcpu->arch.vgic_cpu.used_lrs; in __vgic_v3_restore_state()
273 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in __vgic_v3_activate_traps()
322 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in __vgic_v3_deactivate_traps()
354 cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in __vgic_v3_save_aprs()
387 cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in __vgic_v3_restore_aprs()
459 unsigned int used_lrs = vcpu->arch.vgic_cpu.used_lrs; in __vgic_v3_highest_priority_lr()
498 unsigned int used_lrs = vcpu->arch.vgic_cpu.used_lrs; in __vgic_v3_find_active_lr()
/linux-4.19.296/include/kvm/
Darm_vgic.h300 struct vgic_cpu { struct