Home
last modified time | relevance | path

Searched refs:dst_len (Results 1 – 25 of 97) sorted by relevance

1234

/linux-6.12.1/crypto/
Drsa-pkcs1pad.c209 len = req_ctx->child_req.dst_len; in pkcs1pad_encrypt_sign_complete()
229 req->dst_len = ctx->key_size; in pkcs1pad_encrypt_sign_complete()
263 if (req->dst_len < ctx->key_size) { in pkcs1pad_encrypt()
264 req->dst_len = ctx->key_size; in pkcs1pad_encrypt()
288 req->dst, ctx->key_size - 1, req->dst_len); in pkcs1pad_encrypt()
302 unsigned int dst_len; in pkcs1pad_decrypt_complete() local
310 dst_len = req_ctx->child_req.dst_len; in pkcs1pad_decrypt_complete()
311 if (dst_len < ctx->key_size - 1) in pkcs1pad_decrypt_complete()
315 if (dst_len == ctx->key_size) { in pkcs1pad_decrypt_complete()
320 dst_len--; in pkcs1pad_decrypt_complete()
[all …]
Decrdsa.c77 unsigned int ndigits = req->dst_len / sizeof(u64); in ecrdsa_verify()
96 req->dst_len != ctx->digest_len || in ecrdsa_verify()
97 req->dst_len != ctx->curve->g.ndigits * sizeof(u64) || in ecrdsa_verify()
99 req->dst_len * 2 != req->src_len || in ecrdsa_verify()
101 WARN_ON(req->dst_len > sizeof(digest))) in ecrdsa_verify()
108 req->src_len + req->dst_len), in ecrdsa_verify()
109 digest, req->dst_len, req->src_len); in ecrdsa_verify()
Decdsa.c144 buffer = kmalloc(req->src_len + req->dst_len, GFP_KERNEL); in ecdsa_verify()
149 sg_nents_for_len(req->src, req->src_len + req->dst_len), in ecdsa_verify()
150 buffer, req->src_len + req->dst_len, 0); in ecdsa_verify()
157 if (bufsize > req->dst_len) in ecdsa_verify()
158 bufsize = req->dst_len; in ecdsa_verify()
/linux-6.12.1/fs/bcachefs/
Dcompress.c157 size_t dst_len = crc.uncompressed_size << 9; in __bio_uncompress() local
167 src_len, dst_len, dst_len); in __bio_uncompress()
168 if (ret != dst_len) in __bio_uncompress()
176 .avail_out = dst_len, in __bio_uncompress()
202 dst_data, dst_len, in __bio_uncompress()
207 if (ret != dst_len) in __bio_uncompress()
227 size_t dst_len = crc->uncompressed_size << 9; in bch2_bio_uncompress_inplace() local
239 data = __bounce_alloc(c, dst_len, WRITE); in bch2_bio_uncompress_inplace()
272 size_t dst_len = crc.uncompressed_size << 9; in bch2_bio_uncompress() local
279 dst_data = dst_len == dst_iter.bi_size in bch2_bio_uncompress()
[all …]
/linux-6.12.1/lib/
Ddecompress_unlzo.c105 u32 src_len, dst_len; in unlzo() local
181 dst_len = get_unaligned_be32(in_buf); in unlzo()
186 if (dst_len == 0) { in unlzo()
192 if (dst_len > LZO_BLOCK_SIZE) { in unlzo()
211 if (src_len <= 0 || src_len > dst_len) { in unlzo()
226 tmp = dst_len; in unlzo()
231 if (unlikely(dst_len == src_len)) in unlzo()
237 if (r != LZO_E_OK || dst_len != tmp) { in unlzo()
243 if (flush && flush(out_buf, dst_len) != dst_len) in unlzo()
246 out_buf += dst_len; in unlzo()
/linux-6.12.1/net/smc/
Dsmc_tx.c358 size_t dst_off, size_t dst_len, in smcr_tx_rdma_writes() argument
366 int src_len_sum = src_len, dst_len_sum = dst_len; in smcr_tx_rdma_writes()
377 if (dst_len < link->qp_attr.cap.max_inline_data) { in smcr_tx_rdma_writes()
398 if (src_len_sum == dst_len) in smcr_tx_rdma_writes()
401 src_len = dst_len - src_len; /* remainder */ in smcr_tx_rdma_writes()
411 dst_len = len - dst_len; /* remainder */ in smcr_tx_rdma_writes()
412 dst_len_sum += dst_len; in smcr_tx_rdma_writes()
413 src_len = min_t(int, dst_len, conn->sndbuf_desc->len - in smcr_tx_rdma_writes()
423 size_t dst_off, size_t dst_len) in smcd_tx_rdma_writes() argument
425 int src_len_sum = src_len, dst_len_sum = dst_len; in smcd_tx_rdma_writes()
[all …]
/linux-6.12.1/net/ipv4/
Dfib_rules.c37 u8 dst_len; member
54 if (r->dst_len || r->src_len || r->dscp) in fib4_rule_matchall()
290 if (frh->dst_len) in fib4_rule_configure()
306 rule4->dst_len = frh->dst_len; in fib4_rule_configure()
307 rule4->dstmask = inet_make_mask(rule4->dst_len); in fib4_rule_configure()
347 if (frh->dst_len && (rule4->dst_len != frh->dst_len)) in fib4_rule_compare()
371 if (frh->dst_len && (rule4->dst != nla_get_in_addr(tb[FRA_DST]))) in fib4_rule_compare()
382 frh->dst_len = rule4->dst_len; in fib4_rule_fill()
394 if ((rule4->dst_len && in fib4_rule_fill()
/linux-6.12.1/drivers/block/zram/
Dbackend_lz4hc.c78 req->dst_len, params->level, in lz4hc_compress()
88 req->src_len, req->dst_len); in lz4hc_compress()
92 req->dst_len = ret; in lz4hc_compress()
104 req->dst_len); in lz4hc_decompress()
113 req->dst_len); in lz4hc_decompress()
Dbackend_lz4.c78 req->dst_len, params->level, in lz4_compress()
87 req->dst_len, params->level); in lz4_compress()
91 req->dst_len = ret; in lz4_compress()
103 req->dst_len); in lz4_decompress()
112 req->dst_len); in lz4_decompress()
Dbackend_842.c35 unsigned int dlen = req->dst_len; in compress_842()
41 req->dst_len = dlen; in compress_842()
48 unsigned int dlen = req->dst_len; in decompress_842()
Dbackend_zstd.c186 ret = zstd_compress_cctx(zctx->cctx, req->dst, req->dst_len, in zstd_compress()
190 req->dst_len, req->src, in zstd_compress()
195 req->dst_len = ret; in zstd_compress()
207 ret = zstd_decompress_dctx(zctx->dctx, req->dst, req->dst_len, in zstd_decompress()
211 req->dst_len, req->src, in zstd_decompress()
Dzcomp.c122 const void *src, unsigned int *dst_len) in zcomp_compress() argument
128 .dst_len = 2 * PAGE_SIZE, in zcomp_compress()
134 *dst_len = req.dst_len; in zcomp_compress()
145 .dst_len = PAGE_SIZE, in zcomp_decompress()
Dbackend_deflate.c102 deflate->avail_out = req->dst_len; in deflate_compress()
108 req->dst_len = deflate->total_out; in deflate_compress()
129 inflate->avail_out = req->dst_len; in deflate_decompress()
Dbackend_lzo.c37 &req->dst_len, ctx->context); in lzo_compress()
47 req->dst, &req->dst_len); in lzo_decompress()
Dbackend_lzorle.c37 &req->dst_len, ctx->context); in lzorle_compress()
47 req->dst, &req->dst_len); in lzorle_decompress()
Dzcomp.h45 size_t dst_len; member
83 const void *src, unsigned int *dst_len);
/linux-6.12.1/include/linux/
Dlzo.h25 unsigned char *dst, size_t *dst_len, void *wrkmem);
29 unsigned char *dst, size_t *dst_len, void *wrkmem);
33 unsigned char *dst, size_t *dst_len);
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/lag/
Dmp.c103 static void mlx5_lag_fib_set(struct lag_mp *mp, struct fib_info *fi, u32 dst, int dst_len) in mlx5_lag_fib_set() argument
108 mp->fib.dst_len = dst_len; in mlx5_lag_fib_set()
167 (mp->fib.dst != fen_info->dst || mp->fib.dst_len != fen_info->dst_len) && in mlx5_lag_fib_route_event()
176 if (mp->fib.dst == fen_info->dst && mp->fib.dst_len == fen_info->dst_len) in mlx5_lag_fib_route_event()
193 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event()
208 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event()
/linux-6.12.1/lib/crypto/
Dchacha20poly1305.c130 size_t dst_len; in __chacha20poly1305_decrypt() local
148 dst_len = src_len - POLY1305_DIGEST_SIZE; in __chacha20poly1305_decrypt()
149 poly1305_update(&poly1305_state, src, dst_len); in __chacha20poly1305_decrypt()
150 if (dst_len & 0xf) in __chacha20poly1305_decrypt()
151 poly1305_update(&poly1305_state, pad0, 0x10 - (dst_len & 0xf)); in __chacha20poly1305_decrypt()
154 b.lens[1] = cpu_to_le64(dst_len); in __chacha20poly1305_decrypt()
159 ret = crypto_memneq(b.mac, src + dst_len, POLY1305_DIGEST_SIZE); in __chacha20poly1305_decrypt()
161 chacha20_crypt(chacha_state, dst, src, dst_len); in __chacha20poly1305_decrypt()
/linux-6.12.1/include/crypto/
Dakcipher.h40 unsigned int dst_len; member
255 unsigned int dst_len) in akcipher_request_set_crypt() argument
260 req->dst_len = dst_len; in akcipher_request_set_crypt()
/linux-6.12.1/drivers/crypto/virtio/
Dvirtio_crypto_skcipher_algs.c337 u64 dst_len; in __virtio_crypto_skcipher_do_req() local
387 dst_len = virtio_crypto_alg_sg_nents_length(req->dst); in __virtio_crypto_skcipher_do_req()
388 if (unlikely(dst_len > U32_MAX)) { in __virtio_crypto_skcipher_do_req()
394 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
396 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req()
398 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req()
406 cpu_to_le32((uint32_t)dst_len); in __virtio_crypto_skcipher_do_req()
/linux-6.12.1/security/keys/
Ddh.c271 if (copy_from_user(outbuf + req->dst_len, kdfcopy->otherinfo, in __keyctl_dh_compute()
278 req->dst_len + kdfcopy->otherinfolen); in __keyctl_dh_compute()
279 } else if (copy_to_user(buffer, outbuf, req->dst_len) == 0) { in __keyctl_dh_compute()
280 ret = req->dst_len; in __keyctl_dh_compute()
/linux-6.12.1/drivers/misc/mei/
Dvsc-tp.c161 int ret, offset = 0, cpy_len, src_len, dst_len = sizeof(struct vsc_tp_packet); in vsc_tp_xfer_helper() local
189 cpy_len = min(src_len, dst_len); in vsc_tp_xfer_helper()
195 dst_len -= cpy_len; in vsc_tp_xfer_helper()
206 dst_len = min(ilen, le16_to_cpu(ack.len)); in vsc_tp_xfer_helper()
209 dst_len = sizeof(recv_crc); in vsc_tp_xfer_helper()
225 dst_len = sizeof(recv_crc); in vsc_tp_xfer_helper()
/linux-6.12.1/net/sched/
Dact_tunnel_key.c88 tunnel_key_copy_geneve_opt(const struct nlattr *nla, void *dst, int dst_len, in tunnel_key_copy_geneve_opt() argument
123 WARN_ON(dst_len < opt_len); in tunnel_key_copy_geneve_opt()
140 tunnel_key_copy_vxlan_opt(const struct nlattr *nla, void *dst, int dst_len, in tunnel_key_copy_vxlan_opt() argument
167 tunnel_key_copy_erspan_opt(const struct nlattr *nla, void *dst, int dst_len, in tunnel_key_copy_erspan_opt() argument
220 int dst_len, struct netlink_ext_ack *extack) in tunnel_key_copy_opts() argument
238 dst_len, extack); in tunnel_key_copy_opts()
247 dst_len -= opt_len; in tunnel_key_copy_opts()
258 dst_len, extack); in tunnel_key_copy_opts()
270 dst_len, extack); in tunnel_key_copy_opts()
/linux-6.12.1/kernel/bpf/
Dcrypto.c270 u32 src_len, dst_len, siv_len; in bpf_crypto_crypt() local
280 dst_len = __bpf_dynptr_size(dst); in bpf_crypto_crypt()
281 if (!src_len || !dst_len) in bpf_crypto_crypt()
290 pdst = __bpf_dynptr_data_rw(dst, dst_len); in bpf_crypto_crypt()

1234