Searched refs:pgd_page (Results 1 – 14 of 14) sorted by relevance
390 static inline struct page *pgd_page(pgd_t pgd) in pgd_page() function394 #define pgd_page(pgd) pgd_page(pgd) macro
17 int kernel_ident_mapping_init(struct x86_mapping_info *info, pgd_t *pgd_page,
1199 #define pgd_page(pgd) pfn_to_page(pgd_pfn(pgd)) macro
693 struct page *pgd_page; in kvm_riscv_gstage_alloc_pgd() local700 pgd_page = alloc_pages(GFP_KERNEL | __GFP_ZERO, in kvm_riscv_gstage_alloc_pgd()702 if (!pgd_page) in kvm_riscv_gstage_alloc_pgd()704 kvm->arch.pgd = page_to_virt(pgd_page); in kvm_riscv_gstage_alloc_pgd()705 kvm->arch.pgd_phys = page_to_phys(pgd_page); in kvm_riscv_gstage_alloc_pgd()
183 int kernel_ident_mapping_init(struct x86_mapping_info *info, pgd_t *pgd_page, in kernel_ident_mapping_init() argument199 pgd_t *pgd = pgd_page + pgd_index(addr); in kernel_ident_mapping_init()
1576 get_page_bootmem(section_nr, pgd_page(*pgd), MIX_SECTION_INFO); in register_page_bootmem_memmap()
43 #define pgd_page(pgd) (p4d_page((p4d_t){ pgd })) macro
35 return pgd_page(pgd) == virt_to_page(lm_alias(kasan_early_shadow_p4d)); in kasan_p4d_table()354 p4d_free(&init_mm, (p4d_t *)page_to_virt(pgd_page(*pgd))); in kasan_free_p4d()
37 if (pgd_page(val) == virt_to_page(lm_alias(kasan_early_shadow_p4d))) in ptdump_pgd_entry()
3194 page = pgd_page(orig); in gup_fast_pgd_leaf()
463 pgd_t *pgd_page = (pgd_t *)(((unsigned long)pgd) & PAGE_MASK); in xen_get_user_pgd() local464 unsigned offset = pgd - pgd_page; in xen_get_user_pgd()468 struct page *page = virt_to_page(pgd_page); in xen_get_user_pgd()
1550 #define pgd_page(pgd) pfn_to_page(pgd_pfn(pgd)) macro
1106 #define pgd_page(pgd) pfn_to_page(__phys_to_pfn(__pgd_to_phys(pgd))) macro
1075 struct page *pgd_page = virt_to_page(domain->pgd); in domain_unmap() local1076 list_add_tail(&pgd_page->lru, freelist); in domain_unmap()