/linux-6.12.1/arch/x86/virt/vmx/tdx/ |
D | tdxcall.S | 52 movq TDX_MODULE_rcx(%rsi), %rcx 53 movq TDX_MODULE_rdx(%rsi), %rdx 54 movq TDX_MODULE_r8(%rsi), %r8 55 movq TDX_MODULE_r9(%rsi), %r9 56 movq TDX_MODULE_r10(%rsi), %r10 57 movq TDX_MODULE_r11(%rsi), %r11 72 movq TDX_MODULE_r12(%rsi), %r12 73 movq TDX_MODULE_r13(%rsi), %r13 74 movq TDX_MODULE_r14(%rsi), %r14 75 movq TDX_MODULE_r15(%rsi), %r15 [all …]
|
/linux-6.12.1/arch/x86/lib/ |
D | memmove_64.S | 34 cmp %rdi, %rsi 36 mov %rsi, %r8 67 movq 0*8(%rsi), %r11 68 movq 1*8(%rsi), %r10 69 movq 2*8(%rsi), %r9 70 movq 3*8(%rsi), %r8 71 leaq 4*8(%rsi), %rsi 87 movq -8(%rsi, %rdx), %r11 101 movq (%rsi), %r11 103 leaq -8(%rsi, %rdx), %rsi [all …]
|
D | copy_page_64.S | 33 movq 0x8*0(%rsi), %rax 34 movq 0x8*1(%rsi), %rbx 35 movq 0x8*2(%rsi), %rdx 36 movq 0x8*3(%rsi), %r8 37 movq 0x8*4(%rsi), %r9 38 movq 0x8*5(%rsi), %r10 39 movq 0x8*6(%rsi), %r11 40 movq 0x8*7(%rsi), %r12 42 prefetcht0 5*64(%rsi) 53 leaq 64 (%rsi), %rsi [all …]
|
D | memcpy_64.S | 65 movq 0*8(%rsi), %r8 66 movq 1*8(%rsi), %r9 67 movq 2*8(%rsi), %r10 68 movq 3*8(%rsi), %r11 69 leaq 4*8(%rsi), %rsi 84 addq %rdx, %rsi 94 movq -1*8(%rsi), %r8 95 movq -2*8(%rsi), %r9 96 movq -3*8(%rsi), %r10 97 movq -4*8(%rsi), %r11 [all …]
|
D | csum-copy_64.S | 107 movq %rbx, (%rsi) 109 movq %r8, 8(%rsi) 111 movq %r11, 16(%rsi) 113 movq %rdx, 24(%rsi) 116 movq %r10, 32(%rsi) 118 movq %r15, 40(%rsi) 120 movq %r14, 48(%rsi) 122 movq %r13, 56(%rsi) 125 leaq 64(%rsi), %rsi 146 movq %rbx, (%rsi) [all …]
|
D | copy_user_uncached_64.S | 40 10: movq (%rsi),%r8 41 11: movq 8(%rsi),%r9 42 12: movq 16(%rsi),%r10 43 13: movq 24(%rsi),%r11 48 30: movq 32(%rsi),%r8 49 31: movq 40(%rsi),%r9 50 32: movq 48(%rsi),%r10 51 33: movq 56(%rsi),%r11 57 addq $64,%rsi 102 50: movq (%rsi),%rax [all …]
|
D | cmpxchg16b_emu.S | 26 cmpq __percpu (%rsi), %rax 28 cmpq __percpu 8(%rsi), %rdx 32 movq %rbx, __percpu (%rsi) 33 movq %rcx, __percpu 8(%rsi) 45 movq __percpu (%rsi), %rax 46 movq __percpu 8(%rsi), %rdx
|
D | copy_mc_64.S | 37 movb (%rsi), %al 40 incq %rsi 52 movq (%rsi), %r8 55 addq $8, %rsi 68 movb (%rsi), %al 71 incq %rsi
|
D | copy_user_64.S | 43 0: movb (%rsi),%al 46 inc %rsi 57 2: movq (%rsi),%rax 59 addq $8,%rsi
|
/linux-6.12.1/tools/arch/x86/lib/ |
D | memcpy_64.S | 64 movq 0*8(%rsi), %r8 65 movq 1*8(%rsi), %r9 66 movq 2*8(%rsi), %r10 67 movq 3*8(%rsi), %r11 68 leaq 4*8(%rsi), %rsi 83 addq %rdx, %rsi 93 movq -1*8(%rsi), %r8 94 movq -2*8(%rsi), %r9 95 movq -3*8(%rsi), %r10 96 movq -4*8(%rsi), %r11 [all …]
|
/linux-6.12.1/arch/x86/kernel/ |
D | sev_verify_cbit.S | 25 movq sme_me_mask(%rip), %rsi 26 testq %rsi, %rsi 30 movq sev_status(%rip), %rsi 31 testq %rsi, %rsi 35 movq %cr4, %rsi 38 movq %rsi, %rdx 71 movq %rsi, %cr4
|
D | relocate_kernel_64.S | 67 movq PTR(VA_CONTROL_PAGE)(%rsi), %r11 90 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8 93 movq PTR(PA_TABLE_PAGE)(%rsi), %r9 96 movq PTR(PA_SWAP_PAGE)(%rsi), %r10 290 movq %rcx, %rsi /* For ever source page do a copy */ 291 andq $0xfffffffffffff000, %rsi 294 movq %rsi, %rax /* Save source page to %rax */ 303 movq %rdx, %rsi 309 movq %r10, %rsi 313 lea PAGE_SIZE(%rax), %rsi
|
/linux-6.12.1/arch/x86/crypto/ |
D | blake2s-core.S | 71 movd (%rsi,%rax,4),%xmm4 73 movd (%rsi,%rax,4),%xmm5 75 movd (%rsi,%rax,4),%xmm6 77 movd (%rsi,%rax,4),%xmm7 92 movd (%rsi,%rax,4),%xmm5 94 movd (%rsi,%rax,4),%xmm6 96 movd (%rsi,%rax,4),%xmm7 98 movd (%rsi,%rax,4),%xmm4 116 movd (%rsi,%rax,4),%xmm6 118 movd (%rsi,%rax,4),%xmm7 [all …]
|
D | sm4-aesni-avx-asm_64.S | 235 vmovdqu RA0, 0*16(%rsi); 238 vmovdqu RA1, 1*16(%rsi); 240 vmovdqu RA2, 2*16(%rsi); 243 vmovdqu RA3, 3*16(%rsi); 396 vmovdqu RA0, (0 * 16)(%rsi); 397 vmovdqu RA1, (1 * 16)(%rsi); 398 vmovdqu RA2, (2 * 16)(%rsi); 399 vmovdqu RA3, (3 * 16)(%rsi); 400 vmovdqu RB0, (4 * 16)(%rsi); 402 vmovdqu RB1, (5 * 16)(%rsi); [all …]
|
D | aes-gcm-aesni-x86_64.S | 258 mov %rax, (\dst, %rsi) // Store last LEN - 8 bytes 271 mov %eax, (\dst, %rsi) // Store last LEN - 4 bytes 500 .set RNDKEYLAST_PTR, %rsi 575 .set GHASH_ACC_PTR, %rsi 686 .set LE_CTR_PTR, %rsi // Note: overlaps with usage as temp reg 748 lea 16(KEY), %rsi 751 movdqa (%rsi), TMP0 753 add $16, %rsi 754 cmp %rsi, RNDKEYLAST_PTR 756 movdqa (%rsi), TMP0 [all …]
|
D | chacha-avx512vl-x86_64.S | 29 # %rsi: up to 2 data blocks output, o 118 vmovdqu %xmm6,0x00(%rsi) 125 vmovdqu %xmm6,0x10(%rsi) 132 vmovdqu %xmm6,0x20(%rsi) 139 vmovdqu %xmm6,0x30(%rsi) 147 vmovdqu %xmm6,0x40(%rsi) 153 vmovdqu %xmm6,0x50(%rsi) 159 vmovdqu %xmm6,0x60(%rsi) 165 vmovdqu %xmm6,0x70(%rsi) 186 vmovdqu8 %xmm1,(%rsi,%r9){%k1} [all …]
|
D | chacha-avx2-x86_64.S | 39 # %rsi: up to 2 data blocks output, o 145 vmovdqu %xmm6,0x00(%rsi) 152 vmovdqu %xmm6,0x10(%rsi) 159 vmovdqu %xmm6,0x20(%rsi) 166 vmovdqu %xmm6,0x30(%rsi) 174 vmovdqu %xmm6,0x40(%rsi) 180 vmovdqu %xmm6,0x50(%rsi) 186 vmovdqu %xmm6,0x60(%rsi) 192 vmovdqu %xmm6,0x70(%rsi) 205 mov %rsi,%r11 [all …]
|
D | sm4-aesni-avx2-asm_64.S | 376 vmovdqu RA0, (0 * 32)(%rsi); 377 vmovdqu RA1, (1 * 32)(%rsi); 378 vmovdqu RA2, (2 * 32)(%rsi); 379 vmovdqu RA3, (3 * 32)(%rsi); 380 vmovdqu RB0, (4 * 32)(%rsi); 381 vmovdqu RB1, (5 * 32)(%rsi); 382 vmovdqu RB2, (6 * 32)(%rsi); 383 vmovdqu RB3, (7 * 32)(%rsi); 429 vmovdqu RA0, (0 * 32)(%rsi); 430 vmovdqu RA1, (1 * 32)(%rsi); [all …]
|
D | chacha-ssse3-x86_64.S | 116 # %rsi: up to 1 data block output, o 141 movdqu %xmm0,0x00(%rsi) 149 movdqu %xmm0,0x10(%rsi) 157 movdqu %xmm0,0x20(%rsi) 165 movdqu %xmm0,0x30(%rsi) 178 mov %rsi,%r11 184 lea (%rdx,%rax),%rsi 192 mov %rsp,%rsi 204 # %rsi: output (8 32-bit words) 216 movdqu %xmm0,0x00(%rsi) [all …]
|
/linux-6.12.1/arch/x86/mm/ |
D | mem_encrypt_boot.S | 43 movq %rsi, %r11 /* Decrypted area */ 48 leaq __enc_copy(%rip), %rsi /* Encryption routine */ 54 movq %r11, %rsi /* Decrypted area */ 113 movq %rsi, %r11 /* Save decrypted area address */ 133 movq %r11, %rsi /* Source - decrypted area */ 138 movq %r8, %rsi /* Source - intermediate copy buffer */
|
/linux-6.12.1/net/sunrpc/auth_gss/ |
D | svcauth_gss.c | 98 struct rsi { struct 106 static struct rsi *rsi_update(struct cache_detail *cd, struct rsi *new, struct rsi *old); argument 107 static struct rsi *rsi_lookup(struct cache_detail *cd, struct rsi *item); 109 static void rsi_free(struct rsi *rsii) in rsi_free() 119 struct rsi *rsii = container_of(head, struct rsi, rcu_head); in rsi_free_rcu() 127 struct rsi *rsii = container_of(ref, struct rsi, h.ref); in rsi_put() 132 static inline int rsi_hash(struct rsi *item) in rsi_hash() 140 struct rsi *item = container_of(a, struct rsi, h); in rsi_match() 141 struct rsi *tmp = container_of(b, struct rsi, h); in rsi_match() 162 struct rsi *new = container_of(cnew, struct rsi, h); in rsi_init() [all …]
|
/linux-6.12.1/arch/x86/boot/compressed/ |
D | idt_handlers_64.S | 24 pushq %rsi 42 movq ORIG_RAX(%rsp), %rsi 59 popq %rsi
|
/linux-6.12.1/arch/x86/um/ |
D | setjmp_64.S | 25 pop %rsi # Return address, and adjust the stack 29 push %rsi # Make the call/return stack happy 35 movq %rsi,56(%rdi) # Return address
|
/linux-6.12.1/arch/x86/entry/ |
D | calling.h | 70 pushq %rsi /* pt_regs->si */ 71 movq 8(%rsp), %rsi /* temporarily store the return address in %rsi */ 75 pushq %rsi /* pt_regs->si */ 96 pushq %rsi /* return address on top of stack */ 144 popq %rsi 438 pushq %rsi 456 popq %rsi
|
/linux-6.12.1/arch/x86/kernel/cpu/ |
D | vmware.c | 73 unsigned long out0, rbx, rcx, rdx, rsi, rdi; in vmware_hypercall_slow() local 79 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 91 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 103 "=d" (rdx), "=S" (rsi), "=D" (rdi) in vmware_hypercall_slow() 122 *out4 = rsi; in vmware_hypercall_slow() 517 args.rsi = in4; in vmware_tdx_hypercall() 535 *out4 = args.rsi; in vmware_tdx_hypercall()
|