/linux-6.12.1/drivers/s390/cio/ |
D | airq.c | 129 struct airq_iv *iv; in airq_iv_create() local 132 iv = kzalloc(sizeof(*iv), GFP_KERNEL); in airq_iv_create() 133 if (!iv) in airq_iv_create() 135 iv->bits = bits; in airq_iv_create() 136 iv->flags = flags; in airq_iv_create() 144 iv->vector = dma_pool_zalloc(airq_iv_cache, GFP_KERNEL, in airq_iv_create() 145 &iv->vector_dma); in airq_iv_create() 146 if (!iv->vector) in airq_iv_create() 149 iv->vector = vec; in airq_iv_create() 151 iv->vector = cio_dma_zalloc(size); in airq_iv_create() [all …]
|
/linux-6.12.1/arch/s390/include/asm/ |
D | airq.h | 53 void airq_iv_release(struct airq_iv *iv); 54 unsigned long airq_iv_alloc(struct airq_iv *iv, unsigned long num); 55 void airq_iv_free(struct airq_iv *iv, unsigned long bit, unsigned long num); 56 unsigned long airq_iv_scan(struct airq_iv *iv, unsigned long start, 59 static inline unsigned long airq_iv_alloc_bit(struct airq_iv *iv) in airq_iv_alloc_bit() argument 61 return airq_iv_alloc(iv, 1); in airq_iv_alloc_bit() 64 static inline void airq_iv_free_bit(struct airq_iv *iv, unsigned long bit) in airq_iv_free_bit() argument 66 airq_iv_free(iv, bit, 1); in airq_iv_free_bit() 69 static inline unsigned long airq_iv_end(struct airq_iv *iv) in airq_iv_end() argument 71 return iv->end; in airq_iv_end() [all …]
|
/linux-6.12.1/crypto/ |
D | testmgr.h | 68 const char *iv; member 109 const char *iv; member 10434 .iv = "\xfe\xdc\xba\x98\x76\x54\x32\x10", 10446 .iv = "\x12\x34\x56\x78\x90\xab\xcd\xef", 10454 .iv = "\xe5\xc7\xcd\xde\x87\x2b\xf2\x7c", 10462 .iv = "\x43\xe9\x34\x00\x8c\x38\x9c\x0f", 10470 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 10542 .iv = "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFD", 10610 .iv = "\xE7\x82\x1D\xB8\x53\x11\xAC\x47", 10842 .iv = "\x7D\x33\x88\x93\x0F\x93\xB2\x42", [all …]
|
D | cbc.c | 17 u8 *iv) in crypto_cbc_encrypt_segment() argument 22 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment() 23 crypto_lskcipher_encrypt(tfm, iv, dst, bsize, NULL); in crypto_cbc_encrypt_segment() 24 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment() 34 u8 *iv = oiv; in crypto_cbc_encrypt_inplace() local 40 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace() 42 iv = src; in crypto_cbc_encrypt_inplace() 47 memcpy(oiv, iv, bsize); in crypto_cbc_encrypt_inplace() 54 u8 *dst, unsigned len, u8 *iv, u32 flags) in crypto_cbc_encrypt() argument 62 rem = crypto_cbc_encrypt_inplace(cipher, dst, len, iv); in crypto_cbc_encrypt() [all …]
|
D | pcbc.c | 28 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_segment() local 31 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment() 32 crypto_cipher_encrypt_one(tfm, dst, iv); in crypto_pcbc_encrypt_segment() 33 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 49 u8 * const iv = walk->iv; in crypto_pcbc_encrypt_inplace() local 54 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace() 55 crypto_cipher_encrypt_one(tfm, src, iv); in crypto_pcbc_encrypt_inplace() 56 crypto_xor_cpy(iv, tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace() 95 u8 * const iv = walk->iv; in crypto_pcbc_decrypt_segment() local 99 crypto_xor(dst, iv, bsize); in crypto_pcbc_decrypt_segment() [all …]
|
/linux-6.12.1/arch/riscv/crypto/ |
D | chacha-riscv64-glue.c | 17 size_t len, const u32 iv[4]); 21 u32 iv[CHACHA_IV_SIZE / sizeof(u32)]; in riscv64_chacha20_crypt() local 30 iv[0] = get_unaligned_le32(req->iv); in riscv64_chacha20_crypt() 31 iv[1] = get_unaligned_le32(req->iv + 4); in riscv64_chacha20_crypt() 32 iv[2] = get_unaligned_le32(req->iv + 8); in riscv64_chacha20_crypt() 33 iv[3] = get_unaligned_le32(req->iv + 12); in riscv64_chacha20_crypt() 42 walk.dst.virt.addr, nbytes, iv); in riscv64_chacha20_crypt() 43 iv[0] += nbytes / CHACHA_BLOCK_SIZE; in riscv64_chacha20_crypt() 49 CHACHA_BLOCK_SIZE, iv); in riscv64_chacha20_crypt()
|
/linux-6.12.1/drivers/crypto/nx/ |
D | nx-aes-ccm.c | 123 static inline int crypto_ccm_check_iv(const u8 *iv) in crypto_ccm_check_iv() argument 126 if (1 > iv[0] || iv[0] > 7) in crypto_ccm_check_iv() 133 static int generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize, in generate_b0() argument 138 memcpy(b0, iv, 16); in generate_b0() 153 static int generate_pat(u8 *iv, in generate_pat() argument 169 memset(iv + 15 - iv[0], 0, iv[0] + 1); in generate_pat() 208 rc = generate_b0(iv, assoclen, authsize, nbytes, b0); in generate_pat() 327 u8 *iv, in ccm_nx_decrypt() argument 348 rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen, in ccm_nx_decrypt() 367 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ccm_nx_decrypt() [all …]
|
D | nx-aes-ctr.c | 72 static int ctr_aes_nx_crypt(struct skcipher_request *req, u8 *iv) in ctr_aes_nx_crypt() argument 86 rc = nx_build_sg_lists(nx_ctx, iv, req->dst, req->src, in ctr_aes_nx_crypt() 88 csbcpb->cpb.aes_ctr.iv); in ctr_aes_nx_crypt() 102 memcpy(iv, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); in ctr_aes_nx_crypt() 119 u8 iv[16]; in ctr3686_aes_nx_crypt() local 121 memcpy(iv, nx_ctx->priv.ctr.nonce, CTR_RFC3686_NONCE_SIZE); in ctr3686_aes_nx_crypt() 122 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE); in ctr3686_aes_nx_crypt() 123 iv[12] = iv[13] = iv[14] = 0; in ctr3686_aes_nx_crypt() 124 iv[15] = 1; in ctr3686_aes_nx_crypt() 126 return ctr_aes_nx_crypt(req, iv); in ctr3686_aes_nx_crypt()
|
D | nx-aes-gcm.c | 169 static int gmac(struct aead_request *req, const u8 *iv, unsigned int assoclen) in gmac() argument 192 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, iv, AES_BLOCK_SIZE); in gmac() 242 static int gcm_empty(struct aead_request *req, const u8 *iv, int enc) in gcm_empty() argument 269 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) iv, in gcm_empty() 322 *(u32 *)&rctx->iv[NX_GCM_CTR_OFFSET] = 1; in gcm_aes_nx_crypt() 326 rc = gcm_empty(req, rctx->iv, enc); in gcm_aes_nx_crypt() 328 rc = gmac(req, rctx->iv, assoclen); in gcm_aes_nx_crypt() 357 rc = nx_build_sg_lists(nx_ctx, rctx->iv, req->dst, in gcm_aes_nx_crypt() 376 memcpy(rctx->iv, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() 419 char *iv = rctx->iv; in gcm_aes_nx_encrypt() local [all …]
|
/linux-6.12.1/lib/crypto/ |
D | aescfb.c | 43 int len, const u8 iv[AES_BLOCK_SIZE]) in aescfb_encrypt() 46 const u8 *v = iv; in aescfb_encrypt() 72 int len, const u8 iv[AES_BLOCK_SIZE]) in aescfb_decrypt() 76 aescfb_encrypt_block(ctx, ks[0], iv); in aescfb_decrypt() 113 u8 iv[AES_BLOCK_SIZE]; member 122 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 146 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 171 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 194 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" 207 .iv = "\x00\x01\x02\x03\x04\x05\x06\x07" [all …]
|
D | chacha20poly1305.c | 39 u8 iv[CHACHA_IV_SIZE]; in xchacha_init() local 41 memset(iv, 0, 8); in xchacha_init() 42 memcpy(iv + 8, nonce + 16, 8); in xchacha_init() 50 chacha_init(chacha_state, k, iv); in xchacha_init() 53 memzero_explicit(iv, sizeof(iv)); in xchacha_init() 97 __le64 iv[2]; in chacha20poly1305_encrypt() local 101 iv[0] = 0; in chacha20poly1305_encrypt() 102 iv[1] = cpu_to_le64(nonce); in chacha20poly1305_encrypt() 104 chacha_init(chacha_state, k, (u8 *)iv); in chacha20poly1305_encrypt() 107 memzero_explicit(iv, sizeof(iv)); in chacha20poly1305_encrypt() [all …]
|
D | aesgcm.c | 150 const u8 iv[GCM_AES_IV_SIZE], u8 *authtag) in aesgcm_encrypt() 154 memcpy(ctr, iv, GCM_AES_IV_SIZE); in aesgcm_encrypt() 180 int assoc_len, const u8 iv[GCM_AES_IV_SIZE], in aesgcm_decrypt() 186 memcpy(ctr, iv, GCM_AES_IV_SIZE); in aesgcm_decrypt() 563 u8 iv[GCM_AES_IV_SIZE]; member 585 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 595 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 620 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 642 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" 654 .iv = "\xca\xfe\xba\xbe\xfa\xce\xdb\xad" [all …]
|
/linux-6.12.1/arch/x86/crypto/ |
D | sm4_aesni_avx_glue.c | 27 const u8 *src, u8 *iv); 29 const u8 *src, u8 *iv); 102 const u8 *iv = walk.iv; in sm4_cbc_encrypt() local 107 crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 109 iv = dst; in sm4_cbc_encrypt() 114 if (iv != walk.iv) in sm4_cbc_encrypt() 115 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 142 func(ctx->rkey_dec, dst, src, walk.iv); in sm4_avx_cbc_decrypt() 150 u8 iv[SM4_BLOCK_SIZE]; in sm4_avx_cbc_decrypt() local 159 memcpy(iv, src + SM4_BLOCK_SIZE, SM4_BLOCK_SIZE); in sm4_avx_cbc_decrypt() [all …]
|
/linux-6.12.1/block/ |
D | blk-integrity.c | 29 struct bio_vec iv, ivprv = { NULL }; in blk_rq_count_integrity_sg() local 35 bio_for_each_integrity_vec(iv, bio, iter) { in blk_rq_count_integrity_sg() 38 if (!biovec_phys_mergeable(q, &ivprv, &iv)) in blk_rq_count_integrity_sg() 40 if (seg_size + iv.bv_len > queue_max_segment_size(q)) in blk_rq_count_integrity_sg() 43 seg_size += iv.bv_len; in blk_rq_count_integrity_sg() 47 seg_size = iv.bv_len; in blk_rq_count_integrity_sg() 51 ivprv = iv; in blk_rq_count_integrity_sg() 69 struct bio_vec iv, ivprv = { NULL }; in blk_rq_map_integrity_sg() local 77 bio_for_each_integrity_vec(iv, bio, iter) { in blk_rq_map_integrity_sg() 79 if (!biovec_phys_mergeable(q, &ivprv, &iv)) in blk_rq_map_integrity_sg() [all …]
|
/linux-6.12.1/include/crypto/ |
D | chacha.h | 65 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv); 66 static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv) in chacha_init_generic() argument 77 state[12] = get_unaligned_le32(iv + 0); in chacha_init_generic() 78 state[13] = get_unaligned_le32(iv + 4); in chacha_init_generic() 79 state[14] = get_unaligned_le32(iv + 8); in chacha_init_generic() 80 state[15] = get_unaligned_le32(iv + 12); in chacha_init_generic() 83 static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv) in chacha_init() argument 86 chacha_init_arch(state, key, iv); in chacha_init() 88 chacha_init_generic(state, key, iv); in chacha_init()
|
/linux-6.12.1/arch/arm64/crypto/ |
D | sm4-neon-glue.c | 24 u8 *iv, unsigned int nblocks); 26 u8 *iv, unsigned int nblocks); 91 const u8 *iv = walk.iv; in sm4_cbc_encrypt() local 96 crypto_xor_cpy(dst, src, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 98 iv = dst; in sm4_cbc_encrypt() 103 if (iv != walk.iv) in sm4_cbc_encrypt() 104 memcpy(walk.iv, iv, SM4_BLOCK_SIZE); in sm4_cbc_encrypt() 132 walk.iv, nblocks); in sm4_cbc_decrypt() 163 walk.iv, nblocks); in sm4_ctr_crypt() 176 sm4_crypt_block(ctx->rkey_enc, keystream, walk.iv); in sm4_ctr_crypt() [all …]
|
D | ghash-ce-glue.c | 335 static int gcm_encrypt(struct aead_request *req, char *iv, int assoclen) in gcm_encrypt() argument 353 put_unaligned_be32(2, iv + GCM_AES_IV_SIZE); in gcm_encrypt() 374 dg, iv, ctx->aes_key.key_enc, nrounds, in gcm_encrypt() 398 static int gcm_decrypt(struct aead_request *req, char *iv, int assoclen) in gcm_decrypt() argument 419 put_unaligned_be32(2, iv + GCM_AES_IV_SIZE); in gcm_decrypt() 444 dg, iv, ctx->aes_key.key_enc, in gcm_decrypt() 466 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_encrypt() local 468 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_encrypt() 469 return gcm_encrypt(req, iv, req->assoclen); in gcm_aes_encrypt() 474 u8 iv[AES_BLOCK_SIZE]; in gcm_aes_decrypt() local [all …]
|
/linux-6.12.1/drivers/crypto/ccp/ |
D | ccp-crypto-aes.c | 33 memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 83 if (!req->iv) in ccp_aes_crypt() 86 memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE); in ccp_aes_crypt() 89 sg_init_one(iv_sg, rctx->iv, iv_len); in ccp_aes_crypt() 101 rctx->cmd.u.aes.iv = iv_sg; in ccp_aes_crypt() 139 req->iv = rctx->rfc3686_info; in ccp_aes_rfc3686_complete() 163 u8 *iv; in ccp_aes_rfc3686_crypt() local 166 iv = rctx->rfc3686_iv; in ccp_aes_rfc3686_crypt() 167 memcpy(iv, ctx->u.aes.nonce, CTR_RFC3686_NONCE_SIZE); in ccp_aes_rfc3686_crypt() 169 iv += CTR_RFC3686_NONCE_SIZE; in ccp_aes_rfc3686_crypt() [all …]
|
/linux-6.12.1/net/mac80211/ |
D | wep.c | 32 static inline bool ieee80211_wep_weak_iv(u32 iv, int keylen) in ieee80211_wep_weak_iv() argument 39 if ((iv & 0xff00) == 0xff00) { in ieee80211_wep_weak_iv() 40 u8 B = (iv >> 16) & 0xff; in ieee80211_wep_weak_iv() 49 int keylen, int keyidx, u8 *iv) in ieee80211_wep_get_iv() argument 55 if (!iv) in ieee80211_wep_get_iv() 58 *iv++ = (local->wep_iv >> 16) & 0xff; in ieee80211_wep_get_iv() 59 *iv++ = (local->wep_iv >> 8) & 0xff; in ieee80211_wep_get_iv() 60 *iv++ = local->wep_iv & 0xff; in ieee80211_wep_get_iv() 61 *iv++ = keyidx << 6; in ieee80211_wep_get_iv() 136 u8 *iv; in ieee80211_wep_encrypt() local [all …]
|
/linux-6.12.1/tools/testing/crypto/chacha20-s390/ |
D | test-cipher.c | 54 u8 iv[16], key[32]; in test_lib_chacha() local 58 memset(iv, 'I', sizeof(iv)); in test_lib_chacha() 65 16, 1, iv, 16, 1); in test_lib_chacha() 69 chacha_init_arch(chacha_state, (u32*)key, iv); in test_lib_chacha() 84 chacha_init_arch(chacha_state, (u32 *)key, iv); in test_lib_chacha() 131 u8 iv[16], key[32]; in test_skcipher() local 153 memset(iv, 'I', sizeof(iv)); in test_skcipher() 166 16, 1, iv, 16, 1); in test_skcipher() 176 data_size, iv); in test_skcipher() 195 memset(iv, 'I', sizeof(iv)); in test_skcipher() [all …]
|
/linux-6.12.1/net/qrtr/ |
D | ns.c | 114 struct kvec iv; in service_announce_new() local 119 iv.iov_base = &pkt; in service_announce_new() 120 iv.iov_len = sizeof(pkt); in service_announce_new() 132 return kernel_sendmsg(qrtr_ns.sock, &msg, &iv, 1, sizeof(pkt)); in service_announce_new() 140 struct kvec iv; in service_announce_del() local 146 iv.iov_base = &pkt; in service_announce_del() 147 iv.iov_len = sizeof(pkt); in service_announce_del() 159 ret = kernel_sendmsg(qrtr_ns.sock, &msg, &iv, 1, sizeof(pkt)); in service_announce_del() 171 struct kvec iv; in lookup_notify() local 174 iv.iov_base = &pkt; in lookup_notify() [all …]
|
/linux-6.12.1/drivers/crypto/amcc/ |
D | crypto4xx_alg.c | 75 __le32 iv[AES_IV_SIZE]; in crypto4xx_crypt() local 81 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen); in crypto4xx_crypt() 84 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out, in crypto4xx_crypt() 212 __le32 iv[AES_IV_SIZE / 4] = { in crypto4xx_rfc3686_encrypt() local 214 cpu_to_le32p((u32 *) req->iv), in crypto4xx_rfc3686_encrypt() 215 cpu_to_le32p((u32 *) (req->iv + 4)), in crypto4xx_rfc3686_encrypt() 219 req->cryptlen, iv, AES_IV_SIZE, in crypto4xx_rfc3686_encrypt() 227 __le32 iv[AES_IV_SIZE / 4] = { in crypto4xx_rfc3686_decrypt() local 229 cpu_to_le32p((u32 *) req->iv), in crypto4xx_rfc3686_decrypt() 230 cpu_to_le32p((u32 *) (req->iv + 4)), in crypto4xx_rfc3686_decrypt() [all …]
|
/linux-6.12.1/arch/powerpc/crypto/ |
D | aes-gcm-p10-glue.c | 37 void *rkey, u8 *iv, void *Xi); 39 void *rkey, u8 *iv, void *Xi); 50 u8 iv[16]; member 113 static void gcmp10_init(struct gcm_ctx *gctx, u8 *iv, unsigned char *rdkey, in gcmp10_init() argument 122 *((__be32 *)(iv+12)) = counter; in gcmp10_init() 129 aes_p10_encrypt(iv, gctx->ivtag, rdkey); in gcmp10_init() 132 *((__be32 *)(iv+12)) = counter; in gcmp10_init() 133 memcpy(gctx->iv, iv, 16); in gcmp10_init() 216 unsigned char *iv = PTR_ALIGN((void *)ivbuf, PPC_ALIGN); in p10_aes_gcm_crypt() local 225 memcpy(iv, req->iv, GCM_IV_SIZE); in p10_aes_gcm_crypt() [all …]
|
/linux-6.12.1/include/uapi/linux/ |
D | tls.h | 127 unsigned char iv[TLS_CIPHER_AES_GCM_128_IV_SIZE]; member 135 unsigned char iv[TLS_CIPHER_AES_GCM_256_IV_SIZE]; member 143 unsigned char iv[TLS_CIPHER_AES_CCM_128_IV_SIZE]; member 151 unsigned char iv[TLS_CIPHER_CHACHA20_POLY1305_IV_SIZE]; member 159 unsigned char iv[TLS_CIPHER_SM4_GCM_IV_SIZE]; member 167 unsigned char iv[TLS_CIPHER_SM4_CCM_IV_SIZE]; member 175 unsigned char iv[TLS_CIPHER_ARIA_GCM_128_IV_SIZE]; member 183 unsigned char iv[TLS_CIPHER_ARIA_GCM_256_IV_SIZE]; member
|
/linux-6.12.1/tools/testing/selftests/bpf/progs/ |
D | linked_list_fail.c | 11 struct map_value *v, *v2, *iv, *iv2; \ 25 iv = bpf_map_lookup_elem(map, &(int){ 0 }); \ 26 if (!iv) \ 66 CHECK(inner_map, pop_front, &iv->head); 67 CHECK(inner_map, pop_back, &iv->head); 89 CHECK(inner_map, push_front, &iv->head, &f->node2); 90 CHECK(inner_map, push_back, &iv->head, &f->node2); 109 CHECK(kptr_inner_map, op, &f1->lock, &iv->head); \ 114 CHECK(global_inner_map, op, &glock, &iv->head); \ 119 CHECK(map_inner_map, op, &v->lock, &iv->head); \ [all …]
|