Lines Matching refs:pmd

85 static void kvm_flush_dcache_pmd(pmd_t pmd)  in kvm_flush_dcache_pmd()  argument
87 __kvm_flush_dcache_pmd(pmd); in kvm_flush_dcache_pmd()
109 static void stage2_dissolve_pmd(struct kvm *kvm, phys_addr_t addr, pmd_t *pmd) in stage2_dissolve_pmd() argument
111 if (!pmd_thp_or_huge(*pmd)) in stage2_dissolve_pmd()
114 pmd_clear(pmd); in stage2_dissolve_pmd()
116 put_page(virt_to_page(pmd)); in stage2_dissolve_pmd()
170 static void clear_stage2_pmd_entry(struct kvm *kvm, pmd_t *pmd, phys_addr_t addr) in clear_stage2_pmd_entry() argument
172 pte_t *pte_table = pte_offset_kernel(pmd, 0); in clear_stage2_pmd_entry()
173 VM_BUG_ON(pmd_thp_or_huge(*pmd)); in clear_stage2_pmd_entry()
174 pmd_clear(pmd); in clear_stage2_pmd_entry()
177 put_page(virt_to_page(pmd)); in clear_stage2_pmd_entry()
233 static void unmap_stage2_ptes(struct kvm *kvm, pmd_t *pmd, in unmap_stage2_ptes() argument
239 start_pte = pte = pte_offset_kernel(pmd, addr); in unmap_stage2_ptes()
256 clear_stage2_pmd_entry(kvm, pmd, start_addr); in unmap_stage2_ptes()
263 pmd_t *pmd, *start_pmd; in unmap_stage2_pmds() local
265 start_pmd = pmd = stage2_pmd_offset(pud, addr); in unmap_stage2_pmds()
268 if (!pmd_none(*pmd)) { in unmap_stage2_pmds()
269 if (pmd_thp_or_huge(*pmd)) { in unmap_stage2_pmds()
270 pmd_t old_pmd = *pmd; in unmap_stage2_pmds()
272 pmd_clear(pmd); in unmap_stage2_pmds()
277 put_page(virt_to_page(pmd)); in unmap_stage2_pmds()
279 unmap_stage2_ptes(kvm, pmd, addr, next); in unmap_stage2_pmds()
282 } while (pmd++, addr = next, addr != end); in unmap_stage2_pmds()
362 static void stage2_flush_ptes(struct kvm *kvm, pmd_t *pmd, in stage2_flush_ptes() argument
367 pte = pte_offset_kernel(pmd, addr); in stage2_flush_ptes()
377 pmd_t *pmd; in stage2_flush_pmds() local
380 pmd = stage2_pmd_offset(pud, addr); in stage2_flush_pmds()
383 if (!pmd_none(*pmd)) { in stage2_flush_pmds()
384 if (pmd_thp_or_huge(*pmd)) in stage2_flush_pmds()
385 kvm_flush_dcache_pmd(*pmd); in stage2_flush_pmds()
387 stage2_flush_ptes(kvm, pmd, addr, next); in stage2_flush_pmds()
389 } while (pmd++, addr = next, addr != end); in stage2_flush_pmds()
467 static void clear_hyp_pmd_entry(pmd_t *pmd) in clear_hyp_pmd_entry() argument
469 pte_t *pte_table = pte_offset_kernel(pmd, 0); in clear_hyp_pmd_entry()
470 VM_BUG_ON(pmd_thp_or_huge(*pmd)); in clear_hyp_pmd_entry()
471 pmd_clear(pmd); in clear_hyp_pmd_entry()
473 put_page(virt_to_page(pmd)); in clear_hyp_pmd_entry()
476 static void unmap_hyp_ptes(pmd_t *pmd, phys_addr_t addr, phys_addr_t end) in unmap_hyp_ptes() argument
480 start_pte = pte = pte_offset_kernel(pmd, addr); in unmap_hyp_ptes()
489 clear_hyp_pmd_entry(pmd); in unmap_hyp_ptes()
495 pmd_t *pmd, *start_pmd; in unmap_hyp_pmds() local
497 start_pmd = pmd = pmd_offset(pud, addr); in unmap_hyp_pmds()
501 if (!pmd_none(*pmd)) in unmap_hyp_pmds()
502 unmap_hyp_ptes(pmd, addr, next); in unmap_hyp_pmds()
503 } while (pmd++, addr = next, addr != end); in unmap_hyp_pmds()
607 static void create_hyp_pte_mappings(pmd_t *pmd, unsigned long start, in create_hyp_pte_mappings() argument
616 pte = pte_offset_kernel(pmd, addr); in create_hyp_pte_mappings()
627 pmd_t *pmd; in create_hyp_pmd_mappings() local
633 pmd = pmd_offset(pud, addr); in create_hyp_pmd_mappings()
635 BUG_ON(pmd_sect(*pmd)); in create_hyp_pmd_mappings()
637 if (pmd_none(*pmd)) { in create_hyp_pmd_mappings()
643 kvm_pmd_populate(pmd, pte); in create_hyp_pmd_mappings()
644 get_page(virt_to_page(pmd)); in create_hyp_pmd_mappings()
649 create_hyp_pte_mappings(pmd, addr, next, pfn, prot); in create_hyp_pmd_mappings()
661 pmd_t *pmd; in create_hyp_pud_mappings() local
670 pmd = pmd_alloc_one(NULL, addr); in create_hyp_pud_mappings()
671 if (!pmd) { in create_hyp_pud_mappings()
675 kvm_pud_populate(pud, pmd); in create_hyp_pud_mappings()
1029 pmd_t *pmd; in stage2_get_pmd() local
1038 pmd = mmu_memory_cache_alloc(cache); in stage2_get_pmd()
1039 stage2_pud_populate(pud, pmd); in stage2_get_pmd()
1049 pmd_t *pmd, old_pmd; in stage2_set_pmd_huge() local
1051 pmd = stage2_get_pmd(kvm, cache, addr); in stage2_set_pmd_huge()
1052 VM_BUG_ON(!pmd); in stage2_set_pmd_huge()
1054 old_pmd = *pmd; in stage2_set_pmd_huge()
1083 pmd_clear(pmd); in stage2_set_pmd_huge()
1086 get_page(virt_to_page(pmd)); in stage2_set_pmd_huge()
1089 kvm_set_pmd(pmd, *new_pmd); in stage2_set_pmd_huge()
1116 pmd_t *pmd; in stage2_set_pte() local
1124 pmd = stage2_get_pmd(kvm, cache, addr); in stage2_set_pte()
1125 if (!pmd) { in stage2_set_pte()
1138 stage2_dissolve_pmd(kvm, addr, pmd); in stage2_set_pte()
1141 if (pmd_none(*pmd)) { in stage2_set_pte()
1145 kvm_pmd_populate(pmd, pte); in stage2_set_pte()
1146 get_page(virt_to_page(pmd)); in stage2_set_pte()
1149 pte = pte_offset_kernel(pmd, addr); in stage2_set_pte()
1187 static int stage2_pmdp_test_and_clear_young(pmd_t *pmd) in stage2_pmdp_test_and_clear_young() argument
1189 return stage2_ptep_test_and_clear_young((pte_t *)pmd); in stage2_pmdp_test_and_clear_young()
1300 static void stage2_wp_ptes(pmd_t *pmd, phys_addr_t addr, phys_addr_t end) in stage2_wp_ptes() argument
1304 pte = pte_offset_kernel(pmd, addr); in stage2_wp_ptes()
1321 pmd_t *pmd; in stage2_wp_pmds() local
1324 pmd = stage2_pmd_offset(pud, addr); in stage2_wp_pmds()
1328 if (!pmd_none(*pmd)) { in stage2_wp_pmds()
1329 if (pmd_thp_or_huge(*pmd)) { in stage2_wp_pmds()
1330 if (!kvm_s2pmd_readonly(pmd)) in stage2_wp_pmds()
1331 kvm_set_s2pmd_readonly(pmd); in stage2_wp_pmds()
1333 stage2_wp_ptes(pmd, addr, next); in stage2_wp_pmds()
1336 } while (pmd++, addr = next, addr != end); in stage2_wp_pmds()
1652 pmd_t *pmd; in handle_access_fault() local
1661 pmd = stage2_get_pmd(vcpu->kvm, NULL, fault_ipa); in handle_access_fault()
1662 if (!pmd || pmd_none(*pmd)) /* Nothing there */ in handle_access_fault()
1665 if (pmd_thp_or_huge(*pmd)) { /* THP, HugeTLB */ in handle_access_fault()
1666 *pmd = pmd_mkyoung(*pmd); in handle_access_fault()
1667 pfn = pmd_pfn(*pmd); in handle_access_fault()
1672 pte = pte_offset_kernel(pmd, fault_ipa); in handle_access_fault()
1888 pmd_t *pmd; in kvm_age_hva_handler() local
1892 pmd = stage2_get_pmd(kvm, NULL, gpa); in kvm_age_hva_handler()
1893 if (!pmd || pmd_none(*pmd)) /* Nothing there */ in kvm_age_hva_handler()
1896 if (pmd_thp_or_huge(*pmd)) /* THP, HugeTLB */ in kvm_age_hva_handler()
1897 return stage2_pmdp_test_and_clear_young(pmd); in kvm_age_hva_handler()
1899 pte = pte_offset_kernel(pmd, gpa); in kvm_age_hva_handler()
1908 pmd_t *pmd; in kvm_test_age_hva_handler() local
1912 pmd = stage2_get_pmd(kvm, NULL, gpa); in kvm_test_age_hva_handler()
1913 if (!pmd || pmd_none(*pmd)) /* Nothing there */ in kvm_test_age_hva_handler()
1916 if (pmd_thp_or_huge(*pmd)) /* THP, HugeTLB */ in kvm_test_age_hva_handler()
1917 return pmd_young(*pmd); in kvm_test_age_hva_handler()
1919 pte = pte_offset_kernel(pmd, gpa); in kvm_test_age_hva_handler()