/linux-6.12.1/arch/microblaze/kernel/ |
D | asm-offsets.c | 89 DEFINE(TI_CPU_CONTEXT, offsetof(struct thread_info, cpu_context)); in main() 94 DEFINE(CC_R1, offsetof(struct cpu_context, r1)); /* r1 */ in main() 95 DEFINE(CC_R2, offsetof(struct cpu_context, r2)); in main() 97 DEFINE(CC_R13, offsetof(struct cpu_context, r13)); in main() 98 DEFINE(CC_R14, offsetof(struct cpu_context, r14)); in main() 99 DEFINE(CC_R15, offsetof(struct cpu_context, r15)); in main() 100 DEFINE(CC_R16, offsetof(struct cpu_context, r16)); in main() 101 DEFINE(CC_R17, offsetof(struct cpu_context, r17)); in main() 102 DEFINE(CC_R18, offsetof(struct cpu_context, r18)); in main() 104 DEFINE(CC_R19, offsetof(struct cpu_context, r19)); in main() [all …]
|
D | process.c | 67 memset(&ti->cpu_context, 0, sizeof(struct cpu_context)); in copy_thread() 68 ti->cpu_context.r1 = (unsigned long)childregs; in copy_thread() 69 ti->cpu_context.r20 = (unsigned long)args->fn; in copy_thread() 70 ti->cpu_context.r19 = (unsigned long)args->fn_arg; in copy_thread() 73 ti->cpu_context.msr = childregs->msr & ~MSR_IE; in copy_thread() 74 ti->cpu_context.r15 = (unsigned long)ret_from_kernel_thread - 8; in copy_thread() 81 memset(&ti->cpu_context, 0, sizeof(struct cpu_context)); in copy_thread() 82 ti->cpu_context.r1 = (unsigned long)childregs; in copy_thread() 101 ti->cpu_context.msr = (childregs->msr|MSR_VM); in copy_thread() 102 ti->cpu_context.msr &= ~MSR_UMS; /* switch_to to kernel mode */ in copy_thread() [all …]
|
D | unwind.c | 284 const struct cpu_context *cpu_context = in microblaze_unwind() local 285 &thread_info->cpu_context; in microblaze_unwind() 289 cpu_context->r1, in microblaze_unwind() 290 cpu_context->r15, in microblaze_unwind()
|
D | traps.c | 43 (task->stack))->cpu_context.r1; in show_stack()
|
/linux-6.12.1/arch/arm64/kernel/ |
D | kgdb.c | 131 struct cpu_context *cpu_context = &task->thread.cpu_context; in sleeping_thread_to_gdb_regs() local 136 gdb_regs[19] = cpu_context->x19; in sleeping_thread_to_gdb_regs() 137 gdb_regs[20] = cpu_context->x20; in sleeping_thread_to_gdb_regs() 138 gdb_regs[21] = cpu_context->x21; in sleeping_thread_to_gdb_regs() 139 gdb_regs[22] = cpu_context->x22; in sleeping_thread_to_gdb_regs() 140 gdb_regs[23] = cpu_context->x23; in sleeping_thread_to_gdb_regs() 141 gdb_regs[24] = cpu_context->x24; in sleeping_thread_to_gdb_regs() 142 gdb_regs[25] = cpu_context->x25; in sleeping_thread_to_gdb_regs() 143 gdb_regs[26] = cpu_context->x26; in sleeping_thread_to_gdb_regs() 144 gdb_regs[27] = cpu_context->x27; in sleeping_thread_to_gdb_regs() [all …]
|
D | process.c | 359 memset(&p->thread.cpu_context, 0, sizeof(struct cpu_context)); in copy_thread() 413 p->thread.cpu_context.x19 = (unsigned long)args->fn; in copy_thread() 414 p->thread.cpu_context.x20 = (unsigned long)args->fn_arg; in copy_thread() 419 p->thread.cpu_context.pc = (unsigned long)ret_from_fork; in copy_thread() 420 p->thread.cpu_context.sp = (unsigned long)childregs; in copy_thread() 425 p->thread.cpu_context.fp = (unsigned long)childregs->stackframe; in copy_thread()
|
/linux-6.12.1/arch/mips/mm/ |
D | tlb-r3k.c | 74 if (cpu_context(cpu, mm) != 0) { in local_flush_tlb_range() 79 cpu_context(cpu, mm) & asid_mask, start, end); in local_flush_tlb_range() 85 int newpid = cpu_context(cpu, mm) & asid_mask; in local_flush_tlb_range() 152 if (cpu_context(cpu, vma->vm_mm) != 0) { in local_flush_tlb_page() 157 printk("[tlbpage<%lu,0x%08lx>]", cpu_context(cpu, vma->vm_mm), page); in local_flush_tlb_page() 159 newpid = cpu_context(cpu, vma->vm_mm) & asid_mask; in local_flush_tlb_page() 194 if ((pid != (cpu_context(cpu, vma->vm_mm) & asid_mask)) || (cpu_context(cpu, vma->vm_mm) == 0)) { in __update_tlb() 196 (cpu_context(cpu, vma->vm_mm)), pid); in __update_tlb()
|
D | context.c | 59 if (!asid_versions_eq(cpu, cpu_context(cpu, mm), asid_cache(cpu))) in check_mmu_context() 129 mmid = cpu_context(0, mm); in get_new_mmid() 209 ctx = cpu_context(cpu, mm); in check_switch_mmu_context() 216 ctx = cpu_context(cpu, mm); in check_switch_mmu_context()
|
D | c-r3k.c | 246 cpu_context(smp_processor_id(), mm), addr); in r3k_flush_cache_page() 249 if (cpu_context(smp_processor_id(), mm) == 0) in r3k_flush_cache_page()
|
/linux-6.12.1/arch/arm/kernel/ |
D | kgdb.c | 91 gdb_regs[_R4] = ti->cpu_context.r4; in sleeping_thread_to_gdb_regs() 92 gdb_regs[_R5] = ti->cpu_context.r5; in sleeping_thread_to_gdb_regs() 93 gdb_regs[_R6] = ti->cpu_context.r6; in sleeping_thread_to_gdb_regs() 94 gdb_regs[_R7] = ti->cpu_context.r7; in sleeping_thread_to_gdb_regs() 95 gdb_regs[_R8] = ti->cpu_context.r8; in sleeping_thread_to_gdb_regs() 96 gdb_regs[_R9] = ti->cpu_context.r9; in sleeping_thread_to_gdb_regs() 97 gdb_regs[_R10] = ti->cpu_context.sl; in sleeping_thread_to_gdb_regs() 98 gdb_regs[_FP] = ti->cpu_context.fp; in sleeping_thread_to_gdb_regs() 99 gdb_regs[_SPT] = ti->cpu_context.sp; in sleeping_thread_to_gdb_regs() 100 gdb_regs[_PC] = ti->cpu_context.pc; in sleeping_thread_to_gdb_regs()
|
D | xscale-cp0.c | 39 thread->cpu_context.extra[0] = 0; in dsp_do() 40 thread->cpu_context.extra[1] = 0; in dsp_do() 44 dsp_save_state(current_thread_info()->cpu_context.extra); in dsp_do() 45 dsp_load_state(thread->cpu_context.extra); in dsp_do()
|
D | process.c | 243 memset(&thread->cpu_context, 0, sizeof(struct cpu_context_save)); in copy_thread() 262 thread->cpu_context.r4 = (unsigned long)args->fn_arg; in copy_thread() 263 thread->cpu_context.r5 = (unsigned long)args->fn; in copy_thread() 266 thread->cpu_context.pc = (unsigned long)ret_from_fork; in copy_thread() 267 thread->cpu_context.sp = (unsigned long)childregs; in copy_thread()
|
/linux-6.12.1/arch/loongarch/include/asm/ |
D | mmu_context.h | 34 #define cpu_context(cpu, mm) ((mm)->context.asid[cpu]) macro 36 #define cpu_asid(cpu, mm) (cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu])) 40 if ((cpu_context(cpu, mm) ^ asid_cache(cpu)) & asid_version_mask(cpu)) in asid_valid() 59 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_new_mmu_context() 72 cpu_context(i, mm) = 0; in init_new_context() 165 cpu_context(cpu, mm) = 0; in drop_mmu_context()
|
/linux-6.12.1/arch/sh/include/asm/ |
D | mmu_context.h | 40 #define cpu_context(cpu, mm) ((mm)->context.id[cpu]) macro 43 (cpu_context((cpu), (mm)) & MMU_CONTEXT_ASID_MASK) 60 if (((cpu_context(cpu, mm) ^ asid) & MMU_CONTEXT_VERSION_MASK) == 0) in get_mmu_context() 80 cpu_context(cpu, mm) = asid_cache(cpu) = asid; in get_mmu_context() 94 cpu_context(i, mm) = NO_CONTEXT; in init_new_context()
|
/linux-6.12.1/arch/arm/include/asm/ |
D | thread_info.h | 68 struct cpu_context_save cpu_context; /* cpu context */ member 90 ((unsigned long)(task_thread_info(tsk)->cpu_context.pc)) 92 ((unsigned long)(task_thread_info(tsk)->cpu_context.sp)) 96 ((unsigned long)(task_thread_info(tsk)->cpu_context.fp)) 99 ((unsigned long)(task_thread_info(tsk)->cpu_context.r7))
|
/linux-6.12.1/arch/sh/mm/ |
D | tlbflush_32.c | 19 if (vma->vm_mm && cpu_context(cpu, vma->vm_mm) != NO_CONTEXT) { in local_flush_tlb_page() 45 if (cpu_context(cpu, mm) != NO_CONTEXT) { in local_flush_tlb_range() 52 cpu_context(cpu, mm) = NO_CONTEXT; in local_flush_tlb_range() 112 if (cpu_context(cpu, mm) != NO_CONTEXT) { in local_flush_tlb_mm() 116 cpu_context(cpu, mm) = NO_CONTEXT; in local_flush_tlb_mm()
|
D | cache-sh4.c | 202 if (cpu_context(smp_processor_id(), mm) == NO_CONTEXT) in sh4_flush_cache_mm() 231 if (cpu_context(smp_processor_id(), vma->vm_mm) == NO_CONTEXT) in sh4_flush_cache_page() 293 if (cpu_context(smp_processor_id(), vma->vm_mm) == NO_CONTEXT) in sh4_flush_cache_range()
|
/linux-6.12.1/drivers/hv/ |
D | hv.c | 36 hv_context.cpu_context = alloc_percpu(struct hv_per_cpu_context); in hv_init() 37 if (!hv_context.cpu_context) in hv_init() 67 aligned_msg = this_cpu_ptr(hv_context.cpu_context)->post_msg_page; in hv_post_message() 108 hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_alloc() 120 hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_alloc() 212 per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_free() 265 per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_enable_regs() 348 per_cpu_ptr(hv_context.cpu_context, cpu); in hv_synic_disable_regs() 412 struct hv_per_cpu_context *hv_cpu = this_cpu_ptr(hv_context.cpu_context); in hv_synic_event_pending()
|
/linux-6.12.1/arch/microblaze/include/asm/ |
D | thread_info.h | 28 struct cpu_context { struct 66 struct cpu_context cpu_context; argument
|
/linux-6.12.1/arch/arm64/include/asm/ |
D | thread_info.h | 49 ((unsigned long)(tsk->thread.cpu_context.pc)) 51 ((unsigned long)(tsk->thread.cpu_context.sp)) 53 ((unsigned long)(tsk->thread.cpu_context.fp))
|
D | processor.h | 131 struct cpu_context { struct 148 struct cpu_context cpu_context; /* cpu context */ argument
|
/linux-6.12.1/arch/mips/include/asm/ |
D | mmu_context.h | 106 static inline u64 cpu_context(unsigned int cpu, const struct mm_struct *mm) in cpu_context() function 125 (cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu])) 197 ctx = cpu_context(cpu, mm); in drop_mmu_context()
|
/linux-6.12.1/arch/loongarch/kernel/ |
D | smp.c | 651 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_mm() 652 cpu_context(cpu, mm) = 0; in flush_tlb_mm() 690 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_range() 691 cpu_context(cpu, mm) = 0; in flush_tlb_range() 736 if (cpu != smp_processor_id() && cpu_context(cpu, vma->vm_mm)) in flush_tlb_page() 737 cpu_context(cpu, vma->vm_mm) = 0; in flush_tlb_page()
|
/linux-6.12.1/arch/sh/kernel/ |
D | smp.c | 372 cpu_context(i, mm) = 0; in flush_tlb_mm() 409 cpu_context(i, mm) = 0; in flush_tlb_range() 452 cpu_context(i, vma->vm_mm) = 0; in flush_tlb_page()
|
/linux-6.12.1/arch/mips/kernel/ |
D | smp.c | 559 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_mm() 623 if (cpu != smp_processor_id() && cpu_context(cpu, mm)) in flush_tlb_range() 689 if (cpu != smp_processor_id() && cpu_context(cpu, vma->vm_mm)) in flush_tlb_page()
|