Home
last modified time | relevance | path

Searched refs:gfn (Results 1 – 14 of 14) sorted by relevance

/linux-4.19.296/include/linux/
Dkvm_host.h224 kvm_pfn_t gfn; member
704 int gfn_to_page_many_atomic(struct kvm_memory_slot *slot, gfn_t gfn,
707 struct page *gfn_to_page(struct kvm *kvm, gfn_t gfn);
708 unsigned long gfn_to_hva(struct kvm *kvm, gfn_t gfn);
709 unsigned long gfn_to_hva_prot(struct kvm *kvm, gfn_t gfn, bool *writable);
710 unsigned long gfn_to_hva_memslot(struct kvm_memory_slot *slot, gfn_t gfn);
711 unsigned long gfn_to_hva_memslot_prot(struct kvm_memory_slot *slot, gfn_t gfn,
717 kvm_pfn_t gfn_to_pfn_atomic(struct kvm *kvm, gfn_t gfn);
718 kvm_pfn_t gfn_to_pfn(struct kvm *kvm, gfn_t gfn);
719 kvm_pfn_t gfn_to_pfn_prot(struct kvm *kvm, gfn_t gfn, bool write_fault,
[all …]
Dkvm_types.h68 gfn_t gfn; member
/linux-4.19.296/include/trace/events/
Dkvm.h259 TP_PROTO(ulong gfn, int level, struct kvm_memory_slot *slot, int ref),
260 TP_ARGS(gfn, level, slot, ref),
264 __field( u64, gfn )
270 __entry->gfn = gfn;
272 __entry->hva = ((gfn - slot->base_gfn) <<
278 __entry->hva, __entry->gfn, __entry->level,
285 TP_PROTO(u64 gva, u64 gfn),
287 TP_ARGS(gva, gfn),
291 __field(u64, gfn)
296 __entry->gfn = gfn;
[all …]
/linux-4.19.296/virt/kvm/
Dkvm_main.c133 static void mark_page_dirty_in_slot(struct kvm_memory_slot *memslot, gfn_t gfn);
1295 struct kvm_memory_slot *gfn_to_memslot(struct kvm *kvm, gfn_t gfn) in gfn_to_memslot() argument
1297 return __gfn_to_memslot(kvm_memslots(kvm), gfn); in gfn_to_memslot()
1301 struct kvm_memory_slot *kvm_vcpu_gfn_to_memslot(struct kvm_vcpu *vcpu, gfn_t gfn) in kvm_vcpu_gfn_to_memslot() argument
1303 return __gfn_to_memslot(kvm_vcpu_memslots(vcpu), gfn); in kvm_vcpu_gfn_to_memslot()
1306 bool kvm_is_visible_gfn(struct kvm *kvm, gfn_t gfn) in kvm_is_visible_gfn() argument
1308 struct kvm_memory_slot *memslot = gfn_to_memslot(kvm, gfn); in kvm_is_visible_gfn()
1318 unsigned long kvm_host_page_size(struct kvm_vcpu *vcpu, gfn_t gfn) in kvm_host_page_size() argument
1325 addr = kvm_vcpu_gfn_to_hva_prot(vcpu, gfn, NULL); in kvm_host_page_size()
1347 static unsigned long __gfn_to_hva_many(struct kvm_memory_slot *slot, gfn_t gfn, in __gfn_to_hva_many() argument
[all …]
/linux-4.19.296/drivers/xen/
Dxlate_mmu.c45 typedef void (*xen_gfn_fn_t)(unsigned long gfn, void *data);
84 static void setup_hparams(unsigned long gfn, void *data) in setup_hparams() argument
89 info->h_gpfns[info->h_iter] = gfn; in setup_hparams()
146 xen_pfn_t *gfn, int nr, in xen_xlate_remap_gfn_array() argument
159 data.fgfn = gfn; in xen_xlate_remap_gfn_array()
175 static void unmap_gfn(unsigned long gfn, void *data) in unmap_gfn() argument
180 xrp.gpfn = gfn; in unmap_gfn()
198 static void setup_balloon_gfn(unsigned long gfn, void *data) in setup_balloon_gfn() argument
202 info->pfns[info->idx++] = gfn; in setup_balloon_gfn()
Dgntdev.c925 unsigned long *gfn) in gntdev_get_page() argument
939 *gfn = pfn_to_gfn(xen_pfn); in gntdev_get_page()
1020 unsigned long gfn; in gntdev_grant_copy_seg() local
1045 ret = gntdev_get_page(batch, virt, &gfn); in gntdev_grant_copy_seg()
1049 op->source.u.gmfn = gfn; in gntdev_grant_copy_seg()
1065 ret = gntdev_get_page(batch, virt, &gfn); in gntdev_grant_copy_seg()
1069 op->dest.u.gmfn = gfn; in gntdev_grant_copy_seg()
Dprivcmd.c375 xen_pfn_t gfn; in mmap_return_error() local
377 ret = get_user(gfn, st->user_gfn); in mmap_return_error()
385 gfn |= (err == -ENOENT) ? in mmap_return_error()
388 return __put_user(gfn, st->user_gfn++); in mmap_return_error()
Dgrant-table.c410 unsigned long gfn = gnttab_interface->read_frame(ref); in gnttab_add_deferred() local
412 page = pfn_to_page(gfn_to_pfn(gfn)); in gnttab_add_deferred()
/linux-4.19.296/include/xen/
Dxen-ops.h84 xen_pfn_t *gfn, int nr,
125 xen_pfn_t gfn, int nr,
134 xen_pfn_t *gfn, int nr,
147 xen_pfn_t *gfn, int nr, in xen_xlate_remap_gfn_array() argument
Dgrant_table.h286 typedef void (*xen_grant_fn_t)(unsigned long gfn, unsigned int offset,
/linux-4.19.296/include/xen/arm/
Dpage.h52 static inline unsigned long gfn_to_pfn(unsigned long gfn) in gfn_to_pfn() argument
54 return gfn; in gfn_to_pfn()
/linux-4.19.296/drivers/xen/xenbus/
Dxenbus_client.c381 unsigned long gfn; in xenbus_grant_ring() local
384 gfn = pfn_to_gfn(vmalloc_to_pfn(vaddr)); in xenbus_grant_ring()
386 gfn = virt_to_gfn(vaddr); in xenbus_grant_ring()
390 gfn, 0); in xenbus_grant_ring()
548 static void xenbus_map_ring_setup_grant_hvm(unsigned long gfn, in xenbus_map_ring_setup_grant_hvm() argument
554 unsigned long vaddr = (unsigned long)gfn_to_virt(gfn); in xenbus_map_ring_setup_grant_hvm()
821 static void xenbus_unmap_ring_setup_grant_hvm(unsigned long gfn, in xenbus_unmap_ring_setup_grant_hvm() argument
828 info->addrs[info->idx] = (unsigned long)gfn_to_virt(gfn); in xenbus_unmap_ring_setup_grant_hvm()
/linux-4.19.296/virt/kvm/arm/
Dmmu.c1239 gfn_t gfn = *ipap >> PAGE_SHIFT; in transparent_hugepage_adjust() local
1268 VM_BUG_ON((gfn & mask) != (pfn & mask)); in transparent_hugepage_adjust()
1492 gfn_t gfn = fault_ipa >> PAGE_SHIFT; in user_mem_abort() local
1521 gfn = (fault_ipa & PMD_MASK) >> PAGE_SHIFT; in user_mem_abort()
1556 pfn = gfn_to_pfn_prot(kvm, gfn, write_fault, &writable); in user_mem_abort()
1618 mark_page_dirty(kvm, gfn); in user_mem_abort()
1704 gfn_t gfn; in kvm_handle_guest_abort() local
1742 gfn = fault_ipa >> PAGE_SHIFT; in kvm_handle_guest_abort()
1743 memslot = gfn_to_memslot(vcpu->kvm, gfn); in kvm_handle_guest_abort()
1744 hva = gfn_to_hva_memslot_prot(memslot, gfn, &writable); in kvm_handle_guest_abort()
/linux-4.19.296/virt/kvm/arm/vgic/
Dvgic-its.c771 gfn_t gfn; in vgic_its_check_id() local
795 gfn = addr >> PAGE_SHIFT; in vgic_its_check_id()
830 gfn = indirect_ptr >> PAGE_SHIFT; in vgic_its_check_id()
837 ret = kvm_is_visible_gfn(its->dev->kvm, gfn); in vgic_its_check_id()