/linux-6.12.1/arch/loongarch/mm/ |
D | tlbex.S | 51 csrwr t1, EXCEPTION_KS1 59 csrrd t1, LOONGARCH_CSR_PGDL 64 alsl.d t1, ra, t1, 3 66 ld.d t1, t1, 0 68 alsl.d t1, ra, t1, 3 71 ld.d t1, t1, 0 73 alsl.d t1, ra, t1, 3 75 ld.d ra, t1, 0 87 alsl.d t1, t0, ra, _PTE_T_LOG2 91 ll.d t0, t1, 0 [all …]
|
/linux-6.12.1/arch/loongarch/kernel/ |
D | lbt.S | 27 movscr2gr t1, $scr0 # save scr 28 stptr.d t1, a0, THREAD_SCR0 29 movscr2gr t1, $scr1 30 stptr.d t1, a0, THREAD_SCR1 31 movscr2gr t1, $scr2 32 stptr.d t1, a0, THREAD_SCR2 33 movscr2gr t1, $scr3 34 stptr.d t1, a0, THREAD_SCR3 36 x86mfflag t1, 0x3f # save eflags 37 stptr.d t1, a0, THREAD_EFLAGS [all …]
|
D | fpu.S | 308 fpu_save_csr a0 t1 309 fpu_save_double a0 t1 # clobbers t1 310 fpu_save_cc a0 t1 t2 # clobbers t1, t2 319 fpu_restore_double a0 t1 # clobbers t1 320 fpu_restore_csr a0 t1 t2 321 fpu_restore_cc a0 t1 t2 # clobbers t1, t2 331 lsx_save_all a0 t1 t2 340 lsx_restore_all a0 t1 t2 345 lsx_save_all_upper a0 t0 t1 350 lsx_restore_all_upper a0 t0 t1 [all …]
|
/linux-6.12.1/arch/arm/crypto/ |
D | sha512-armv4.pl | 74 $t1="r10"; 99 mov $t1,$Ehi,lsr#14 103 eor $t1,$t1,$Elo,lsl#18 106 eor $t1,$t1,$Ehi,lsr#18 108 eor $t1,$t1,$Elo,lsl#14 110 eor $t1,$t1,$Elo,lsr#9 112 eor $t1,$t1,$Ehi,lsl#23 @ Sigma1(e) 115 adc $Thi,$Thi,$t1 @ T += Sigma1(e) 116 ldr $t1,[sp,#$Foff+4] @ f.hi 124 eor $t1,$t1,$t3 [all …]
|
D | sha256-armv4.pl | 52 $len="r2"; $t1="r2"; 76 @ ldr $t1,[$inp],#4 @ $i 84 rev $t1,$t1 87 @ ldrb $t1,[$inp,#3] @ $i 91 orr $t1,$t1,$t2,lsl#8 93 orr $t1,$t1,$t0,lsl#16 98 orr $t1,$t1,$t2,lsl#24 104 add $h,$h,$t1 @ h+=X[i] 105 str $t1,[sp,#`$i%16`*4] 106 eor $t1,$f,$g [all …]
|
/linux-6.12.1/arch/mips/kernel/ |
D | cps-vec.S | 148 1: PTR_L t1, VPEBOOTCFG_PC(v1) 151 jr t1 207 PTR_LA t1, 1f 208 jr.hb t1 238 sll t1, ta1, VPECONF0_XTC_SHIFT 239 or t0, t0, t1 275 li t1, COREBOOTCFG_SIZE 276 mul t0, t0, t1 277 PTR_LA t1, mips_cps_core_bootcfg 278 PTR_L t1, 0(t1) [all …]
|
D | octeon_switch.S | 26 mfc0 t1, CP0_STATUS 27 LONG_S t1, THREAD_STATUS(a0) 41 li t1, -32768 /* Base address of CVMSEG */ 46 LONG_L t8, 0(t1) /* Load from CVMSEG */ 48 LONG_L t9, LONGSIZE(t1)/* Load from CVMSEG */ 49 LONG_ADDU t1, LONGSIZE*2 /* Increment loc in CVMSEG */ 77 set_saved_sp t0, t1, t2 79 mfc0 t1, CP0_STATUS /* Do we really need this? */ 81 and t1, a3 85 or a2, t1 [all …]
|
D | bmips_5xxx_init.S | 30 addu t1, kva, size ; \ 34 addiu t1, t1, -1 ; \ 35 and t1, t2 ; \ 37 bne t0, t1, 9b ; \ 421 li t1, 0x4 422 or t0, t1 427 li t1, 0x4 428 or t0, t1 433 li t1, 0x4 434 or t0, t1 [all …]
|
/linux-6.12.1/arch/riscv/lib/ |
D | strlen.S | 23 mv t1, a0 25 lbu t0, 0(t1) 27 addi t1, t1, 1 30 sub a0, t1, a0 72 REG_L t1, 0(t0) 79 SHIFT t1, t1, t2 82 orc.b t1, t1 85 not t1, t1 91 CZ t1, t1 97 srli a0, t1, 3 [all …]
|
D | memmove.S | 88 andi t1, t0, (SZREG - 1) 89 beqz t1, .Lcoaligned_copy 134 REG_L t1, (1 * SZREG)(a1) 137 sll t2, t1, a7 145 srl t1, t1, a6 147 or t2, t1, t2 194 REG_L t1, ( 0 * SZREG)(a4) 198 sll t1, t1, a7 200 or t2, t1, t2 205 REG_L t1, (-2 * SZREG)(a4) [all …]
|
/linux-6.12.1/arch/powerpc/crypto/ |
D | ghashp10-ppc.pl | 57 my ($zero,$t0,$t1,$t2,$xC2,$H,$Hh,$Hl,$lemask)=map("v$_",(4..12)); 88 vsldoi $t1,$zero,$t0,1 # ...1 91 vor $xC2,$xC2,$t1 # 0xc2....01 92 vspltb $t1,$H,0 # most significant byte 94 vsrab $t1,$t1,$t2 # broadcast carry bit 95 vand $t1,$t1,$xC2 96 vxor $H,$H,$t1 # twisted H 130 vsldoi $t1,$zero,$t0,1 # ...1 133 vor $xC2,$xC2,$t1 # 0xc2....01 134 vspltb $t1,$H,0 # most significant byte [all …]
|
D | ghashp8-ppc.pl | 57 my ($zero,$t0,$t1,$t2,$xC2,$H,$Hh,$Hl,$lemask)=map("v$_",(4..12)); 86 vsldoi $t1,$zero,$t0,1 # ...1 89 vor $xC2,$xC2,$t1 # 0xc2....01 90 vspltb $t1,$H,0 # most significant byte 92 vsrab $t1,$t1,$t2 # broadcast carry bit 93 vand $t1,$t1,$xC2 94 vxor $H,$H,$t1 # twisted H 139 vsldoi $t1,$zero,$Xm,8 141 vxor $Xh,$Xh,$t1 146 vsldoi $t1,$Xl,$Xl,8 # 2nd phase [all …]
|
/linux-6.12.1/include/crypto/ |
D | aria.h | 343 static inline void aria_sbox_layer1_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer1_with_pre_diff() argument 350 *t1 = s1[get_u8(*t1, 0)] ^ in aria_sbox_layer1_with_pre_diff() 351 s2[get_u8(*t1, 1)] ^ in aria_sbox_layer1_with_pre_diff() 352 x1[get_u8(*t1, 2)] ^ in aria_sbox_layer1_with_pre_diff() 353 x2[get_u8(*t1, 3)]; in aria_sbox_layer1_with_pre_diff() 365 static inline void aria_sbox_layer2_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer2_with_pre_diff() argument 372 *t1 = x1[get_u8(*t1, 0)] ^ in aria_sbox_layer2_with_pre_diff() 373 x2[get_u8(*t1, 1)] ^ in aria_sbox_layer2_with_pre_diff() 374 s1[get_u8(*t1, 2)] ^ in aria_sbox_layer2_with_pre_diff() 375 s2[get_u8(*t1, 3)]; in aria_sbox_layer2_with_pre_diff() [all …]
|
/linux-6.12.1/arch/csky/abiv2/ |
D | strcmp.S | 13 andi t1, a0, 0x3 14 bnez t1, 5f 19 ldw t1, (a1, 0) 21 cmpne t0, t1 29 ldw t1, (a1, 4) 30 cmpne t0, t1 36 ldw t1, (a1, 8) 37 cmpne t0, t1 43 ldw t1, (a1, 12) 44 cmpne t0, t1 [all …]
|
/linux-6.12.1/arch/alpha/lib/ |
D | stxcpy.S | 49 mskqh t1, a1, t3 # e0 : 50 ornot t1, t2, t2 # .. e1 : 53 or t0, t3, t1 # e0 : 61 stq_u t1, 0(a0) # e0 : 63 ldq_u t1, 0(a1) # e0 : 65 cmpbge zero, t1, t8 # e0 (stall) 85 zapnot t1, t6, t1 # e0 : clear src bytes >= null 88 or t0, t1, t1 # e1 : 90 1: stq_u t1, 0(a0) # e0 : 109 ldq_u t1, 0(a1) # e0 : load first src word [all …]
|
D | ev6-stxcpy.S | 60 mskqh t1, a1, t3 # U : 61 ornot t1, t2, t2 # E : (stall) 65 or t0, t3, t1 # E : (stall) 74 stq_u t1, 0(a0) # L : 79 ldq_u t1, 0(a1) # L : Latency=3 81 cmpbge zero, t1, t8 # E : (3 cycle stall) 100 zapnot t1, t6, t1 # U : clear src bytes >= null (stall) 104 or t0, t1, t1 # E : (stall) 108 1: stq_u t1, 0(a0) # L : 129 ldq_u t1, 0(a1) # L : load first src word [all …]
|
/linux-6.12.1/drivers/soc/bcm/brcmstb/pm/ |
D | s2-mips.S | 52 addiu t1, s3, -1 53 not t1 56 and t0, t1 59 and t2, t1 68 2: move t1, s4 69 cache 0x1c, 0(t1) 70 addu t1, s3 89 li t1, ~(ST0_IM | ST0_IE) 90 and t0, t1 121 lw t1, TIMER_TIMER1_STAT(s2) [all …]
|
D | s3-mips.S | 40 mfc0 t1, CP0_STATUS 41 sw t1, 48(t0) 44 addiu t1, a1, -1 45 not t1 46 and t0, t1 50 and t2, t1 57 li t1, PM_WARM_CONFIG 60 sw t1, AON_CTRL_PM_CTRL(a0) 61 lw t1, AON_CTRL_PM_CTRL(a0) 63 li t1, (PM_WARM_CONFIG | PM_PWR_DOWN) [all …]
|
/linux-6.12.1/arch/loongarch/net/ |
D | bpf_jit.c | 217 u8 t1 = LOONGARCH_GPR_T1; in emit_bpf_tail_call() local 234 emit_insn(ctx, ldwu, t1, a1, off); in emit_bpf_tail_call() 236 if (emit_tailcall_jmp(ctx, BPF_JGE, a2, t1, jmp_offset) < 0) in emit_bpf_tail_call() 284 const u8 t1 = LOONGARCH_GPR_T1; in emit_atomic() local 294 move_imm(ctx, t1, off, false); in emit_atomic() 295 emit_insn(ctx, addd, t1, dst, t1); in emit_atomic() 302 emit_insn(ctx, amaddd, t2, t1, src); in emit_atomic() 304 emit_insn(ctx, amaddw, t2, t1, src); in emit_atomic() 308 emit_insn(ctx, amandd, t2, t1, src); in emit_atomic() 310 emit_insn(ctx, amandw, t2, t1, src); in emit_atomic() [all …]
|
/linux-6.12.1/arch/mips/dec/ |
D | int-handler.S | 133 mfc0 t1,CP0_STATUS 138 and t0,t1 # isolate allowed ones 150 # open coded PTR_LA t1, cpu_mask_nr_tbl 152 # open coded la t1, cpu_mask_nr_tbl 153 lui t1, %hi(cpu_mask_nr_tbl) 154 addiu t1, %lo(cpu_mask_nr_tbl) 214 2: lw t2,(t1) 218 addu t1,2*PTRSIZE # delay slot 223 lw a0,%lo(-PTRSIZE)(t1) 239 li t1,CAUSEF_IP>>CAUSEB_IP # mask [all …]
|
/linux-6.12.1/arch/mips/include/asm/mach-cavium-octeon/ |
D | kernel-entry-init.h | 47 and t1, v1, 0xfff8 48 xor t1, t1, 0x9000 # 63-P1 49 beqz t1, 4f 50 and t1, v1, 0xfff8 51 xor t1, t1, 0x9008 # 63-P2 52 beqz t1, 4f 53 and t1, v1, 0xfff8 54 xor t1, t1, 0x9100 # 68-P1 55 beqz t1, 4f 56 and t1, v1, 0xff00 [all …]
|
/linux-6.12.1/arch/parisc/lib/ |
D | lusercopy.S | 93 t1 = r19 define 101 a1 = t1 119 extru t0,31,2,t1 120 cmpib,<>,n 0,t1,.Lunaligned_copy 124 extru t0,31,3,t1 125 cmpib,<>,n 0,t1,.Lalign_loop32 129 extru dst,31,3,t1 130 cmpib,=,n 0,t1,.Lcopy_loop_16_start 131 20: ldb,ma 1(srcspc,src),t1 132 21: stb,ma t1,1(dstspc,dst) [all …]
|
/linux-6.12.1/crypto/ |
D | sha512_generic.c | 101 u64 a, b, c, d, e, f, g, h, t1, t2; in sha512_transform() local 126 t1 = h + e1(e) + Ch(e,f,g) + sha512_K[i ] + W[(i & 15)]; in sha512_transform() 127 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2; in sha512_transform() 128 t1 = g + e1(d) + Ch(d,e,f) + sha512_K[i+1] + W[(i & 15) + 1]; in sha512_transform() 129 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2; in sha512_transform() 130 t1 = f + e1(c) + Ch(c,d,e) + sha512_K[i+2] + W[(i & 15) + 2]; in sha512_transform() 131 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2; in sha512_transform() 132 t1 = e + e1(b) + Ch(b,c,d) + sha512_K[i+3] + W[(i & 15) + 3]; in sha512_transform() 133 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2; in sha512_transform() 134 t1 = d + e1(a) + Ch(a,b,c) + sha512_K[i+4] + W[(i & 15) + 4]; in sha512_transform() [all …]
|
/linux-6.12.1/arch/loongarch/kvm/ |
D | switch.S | 68 ld.d t1, a2, KVM_VCPU_KVM - KVM_VCPU_ARCH 72 ldx.d t0, t1, t0 76 csrrd t1, LOONGARCH_CSR_GSTAT 77 bstrpick.w t1, t1, CSR_GSTAT_GID_SHIFT_END, CSR_GSTAT_GID_SHIFT 79 bstrins.w t0, t1, CSR_GTLBC_TGID_SHIFT_END, CSR_GTLBC_TGID_SHIFT 141 ld.d t1, a2, KVM_ARCH_HECFG 142 or t0, t0, t1 230 fpu_save_csr a0 t1 231 fpu_save_double a0 t1 232 fpu_save_cc a0 t1 t2 [all …]
|
/linux-6.12.1/arch/mips/include/asm/sibyte/ |
D | board.h | 25 #define setleds(t0, t1, c0, c1, c2, c3) \ 27 li t1, c0; \ 28 sb t1, 0x18(t0); \ 29 li t1, c1; \ 30 sb t1, 0x10(t0); \ 31 li t1, c2; \ 32 sb t1, 0x08(t0); \ 33 li t1, c3; \ 34 sb t1, 0x00(t0) 36 #define setleds(t0, t1, c0, c1, c2, c3)
|