Home
last modified time | relevance | path

Searched refs:vaddr_end (Results 1 – 11 of 11) sorted by relevance

/linux-6.12.1/arch/x86/mm/
Dmem_encrypt_identity.c82 unsigned long vaddr_end; member
104 pgd_end = ppd->vaddr_end & PGDIR_MASK; in sme_clear_pgd()
194 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pmd()
204 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pte()
215 unsigned long vaddr_end; in __sme_map_range() local
221 vaddr_end = ppd->vaddr_end; in __sme_map_range()
224 ppd->vaddr_end = ALIGN(ppd->vaddr, PMD_SIZE); in __sme_map_range()
228 ppd->vaddr_end = vaddr_end & PMD_MASK; in __sme_map_range()
232 ppd->vaddr_end = vaddr_end; in __sme_map_range()
392 ppd.vaddr_end = workarea_end; in sme_encrypt_kernel()
[all …]
Dmem_encrypt_amd.c259 unsigned long vaddr_end = vaddr + size; in enc_dec_hypercall() local
261 while (vaddr < vaddr_end) { in enc_dec_hypercall()
372 unsigned long vaddr_end, vaddr_next, start; in early_set_memory_enc_dec() local
380 vaddr_end = vaddr + size; in early_set_memory_enc_dec()
382 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in early_set_memory_enc_dec()
405 ((vaddr_end - vaddr) >= psize)) { in early_set_memory_enc_dec()
427 __pa((vaddr_end & pmask) + psize), in early_set_memory_enc_dec()
523 unsigned long vaddr, vaddr_end, npages; in mem_encrypt_free_decrypted_mem() local
527 vaddr_end = (unsigned long)__end_bss_decrypted; in mem_encrypt_free_decrypted_mem()
528 npages = (vaddr_end - vaddr) >> PAGE_SHIFT; in mem_encrypt_free_decrypted_mem()
[all …]
Dkaslr.c41 static const unsigned long vaddr_end = CPU_ENTRY_AREA_BASE; variable
92 BUILD_BUG_ON(vaddr_start >= vaddr_end); in kernel_randomize_memory()
93 BUILD_BUG_ON(vaddr_end != CPU_ENTRY_AREA_BASE); in kernel_randomize_memory()
94 BUILD_BUG_ON(vaddr_end > __START_KERNEL_map); in kernel_randomize_memory()
125 remain_entropy = vaddr_end - vaddr_start; in kernel_randomize_memory()
Dinit_64.c429 unsigned long vaddr_end = __START_KERNEL_map + KERNEL_IMAGE_SIZE; in cleanup_highmap() local
439 vaddr_end = __START_KERNEL_map + (max_pfn_mapped << PAGE_SHIFT); in cleanup_highmap()
441 for (; vaddr + PMD_SIZE - 1 < vaddr_end; pmd++, vaddr += PMD_SIZE) { in cleanup_highmap()
684 unsigned long vaddr, vaddr_end, vaddr_next, paddr_next, paddr_last; in phys_p4d_init() local
688 vaddr_end = (unsigned long)__va(paddr_end); in phys_p4d_init()
694 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in phys_p4d_init()
716 paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end), in phys_p4d_init()
722 paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end), in phys_p4d_init()
740 unsigned long vaddr, vaddr_start, vaddr_end, vaddr_next, paddr_last; in __kernel_physical_mapping_init() local
744 vaddr_end = (unsigned long)__va(paddr_end); in __kernel_physical_mapping_init()
[all …]
/linux-6.12.1/arch/x86/kernel/
Dhead64.c96 unsigned long vaddr, vaddr_end; in sme_postprocess_startup() local
110 vaddr_end = (unsigned long)__end_bss_decrypted; in sme_postprocess_startup()
112 for (; vaddr < vaddr_end; vaddr += PMD_SIZE) { in sme_postprocess_startup()
/linux-6.12.1/arch/x86/coco/sev/
Dcore.c897 unsigned long vaddr_end, int op) in __set_pages_state() argument
914 while (vaddr < vaddr_end && i < ARRAY_SIZE(data->entries)) { in __set_pages_state()
929 (vaddr_end - vaddr) >= PMD_SIZE) { in __set_pages_state()
971 unsigned long vaddr_end; in set_pages_state() local
978 vaddr_end = vaddr + (npages << PAGE_SHIFT); in set_pages_state()
980 while (vaddr < vaddr_end) in set_pages_state()
981 vaddr = __set_pages_state(&desc, vaddr, vaddr_end, op); in set_pages_state()
Dshared.c1323 unsigned long vaddr_end = vaddr + PMD_SIZE; in pval_pages() local
1325 for (; vaddr < vaddr_end; vaddr += PAGE_SIZE, pfn++) { in pval_pages()
/linux-6.12.1/Documentation/arch/x86/x86_64/
Dmm.rst58 | | | | vaddr_end for KASLR
117 | | | | vaddr_end for KASLR
/linux-6.12.1/arch/x86/xen/
Dmmu_pv.c1012 unsigned long vaddr_end) in xen_cleanhighmap() argument
1019 for (; vaddr <= vaddr_end && (pmd < (level2_kernel_pgt + PTRS_PER_PMD)); in xen_cleanhighmap()
1037 void *vaddr_end = vaddr + size; in xen_free_ro_pages() local
1039 for (; vaddr < vaddr_end; vaddr += PAGE_SIZE) in xen_free_ro_pages()
/linux-6.12.1/mm/
Dhuge_memory.c3886 unsigned long vaddr_end, unsigned int new_order) in split_huge_pages_pid() argument
3895 vaddr_end &= PAGE_MASK; in split_huge_pages_pid()
3913 pid, vaddr_start, vaddr_end); in split_huge_pages_pid()
3920 for (addr = vaddr_start; addr < vaddr_end; addr += PAGE_SIZE) { in split_huge_pages_pid()
4075 unsigned long vaddr_start, vaddr_end; in split_huge_pages_write() local
4117 ret = sscanf(input_buf, "%d,0x%lx,0x%lx,%d", &pid, &vaddr_start, &vaddr_end, &new_order); in split_huge_pages_write()
4127 ret = split_huge_pages_pid(pid, vaddr_start, vaddr_end, new_order); in split_huge_pages_write()
/linux-6.12.1/arch/x86/kvm/svm/
Dsev.c741 unsigned long vaddr, vaddr_end, next_vaddr, npages, pages, size, i; in sev_launch_update_data() local
756 vaddr_end = vaddr + size; in sev_launch_update_data()
772 for (i = 0; vaddr < vaddr_end; vaddr = next_vaddr, i += pages) { in sev_launch_update_data()
1226 unsigned long vaddr, vaddr_end, next_vaddr; in sev_dbg_crypt() local
1247 vaddr_end = vaddr + size; in sev_dbg_crypt()
1250 for (; vaddr < vaddr_end; vaddr = next_vaddr) { in sev_dbg_crypt()