Searched refs:AARCH64_INSN_SIZE (Results 1 – 16 of 16) sorted by relevance
/linux-6.12.1/arch/arm64/include/asm/ |
D | uprobes.h | 14 #define UPROBE_SWBP_INSN_SIZE AARCH64_INSN_SIZE 15 #define UPROBE_XOL_SLOT_BYTES AARCH64_INSN_SIZE
|
D | insn-def.h | 9 #define AARCH64_INSN_SIZE 4 macro
|
D | jump_label.h | 17 #define JUMP_LABEL_NOP_SIZE AARCH64_INSN_SIZE
|
D | debug-monitors.h | 35 #define BREAK_INSTR_SIZE AARCH64_INSN_SIZE
|
D | ftrace.h | 22 #define MCOUNT_INSN_SIZE AARCH64_INSN_SIZE
|
D | alternative-macros.h | 203 nops (662b-661b) / AARCH64_INSN_SIZE
|
D | kvm_asm.h | 37 #define KVM_VECTOR_PREAMBLE (2 * AARCH64_INSN_SIZE)
|
/linux-6.12.1/arch/arm64/kernel/ |
D | ftrace.c | 92 return addr + AARCH64_INSN_SIZE; in ftrace_call_adjust() 126 addr += 2 * AARCH64_INSN_SIZE; in ftrace_call_adjust() 133 addr += AARCH64_INSN_SIZE; in ftrace_call_adjust() 141 addr += AARCH64_INSN_SIZE; in ftrace_call_adjust() 393 unsigned long pc = rec->ip - AARCH64_INSN_SIZE; in ftrace_init_nop()
|
D | patching.c | 62 ret = copy_from_kernel_nofault(&val, addr, AARCH64_INSN_SIZE); in aarch64_insn_read() 78 ret = copy_to_kernel_nofault(waddr, &insn, AARCH64_INSN_SIZE); in __aarch64_insn_write() 195 (uintptr_t)tp + AARCH64_INSN_SIZE); in aarch64_insn_patch_text_nosync()
|
D | traps.c | 586 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in user_cache_maint_handler() 605 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in ctr_read_handler() 616 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in cntvct_read_handler() 628 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in cntfrq_read_handler() 645 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in wfi_handler() 1006 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in bug_handler() 1036 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in cfi_handler() 1097 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in kasan_handler()
|
D | entry-ftrace.S | 52 ldr x11, [x11, #-(4 * AARCH64_INSN_SIZE)] // op 98 sub x0, x30, #AARCH64_INSN_SIZE // ip (callsite's BL insn) 226 sub \rd, \rn, #AARCH64_INSN_SIZE
|
D | alternative.c | 167 nr_inst = alt->orig_len / AARCH64_INSN_SIZE; in __apply_alternatives()
|
D | kgdb.c | 344 BUILD_BUG_ON(AARCH64_INSN_SIZE != BREAK_INSTR_SIZE); in kgdb_arch_set_breakpoint()
|
D | cpufeature.c | 3720 arm64_skip_faulting_instruction(regs, AARCH64_INSN_SIZE); in do_emulate_mrs()
|
/linux-6.12.1/arch/arm64/kernel/probes/ |
D | uprobes.c | 42 else if (!IS_ALIGNED(addr, AARCH64_INSN_SIZE)) in arch_uprobe_analyze_insn()
|
/linux-6.12.1/arch/arm64/net/ |
D | bpf_jit_comp.c | 848 if ((ctx->idx + PLT_TARGET_OFFSET / AARCH64_INSN_SIZE) % 2) in build_plt() 853 emit(A64_LDR64LIT(tmp, 2 * AARCH64_INSN_SIZE), ctx); in build_plt() 951 fixup_offset = (long)&ex->fixup - (pc + AARCH64_INSN_SIZE); in add_exception_handler() 1940 ctx.offset[i] *= AARCH64_INSN_SIZE; in bpf_int_jit_compile() 2250 emit(A64_ADR(A64_LR, AARCH64_INSN_SIZE * 2), ctx); in prepare_trampoline() 2346 return ret < 0 ? ret : ret * AARCH64_INSN_SIZE; in arch_bpf_trampoline_size() 2401 ret *= AARCH64_INSN_SIZE; in arch_prepare_bpf_trampoline() 2546 ip = image + POKE_OFFSET * AARCH64_INSN_SIZE; in bpf_arch_text_poke()
|