/linux-6.12.1/arch/mips/crypto/ |
D | poly1305-glue.c | 20 void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 key[POLY1305_KEY_SIZE]) in poly1305_init_arch() argument 22 poly1305_init_mips(&dctx->h, key); in poly1305_init_arch() 23 dctx->s[0] = get_unaligned_le32(key + 16); in poly1305_init_arch() 24 dctx->s[1] = get_unaligned_le32(key + 20); in poly1305_init_arch() 25 dctx->s[2] = get_unaligned_le32(key + 24); in poly1305_init_arch() 26 dctx->s[3] = get_unaligned_le32(key + 28); in poly1305_init_arch() 27 dctx->buflen = 0; in poly1305_init_arch() 33 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc); in mips_poly1305_init() local 35 dctx->buflen = 0; in mips_poly1305_init() 36 dctx->rset = 0; in mips_poly1305_init() [all …]
|
/linux-6.12.1/arch/arm64/crypto/ |
D | poly1305-glue.c | 28 void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 key[POLY1305_KEY_SIZE]) in poly1305_init_arch() argument 30 poly1305_init_arm64(&dctx->h, key); in poly1305_init_arch() 31 dctx->s[0] = get_unaligned_le32(key + 16); in poly1305_init_arch() 32 dctx->s[1] = get_unaligned_le32(key + 20); in poly1305_init_arch() 33 dctx->s[2] = get_unaligned_le32(key + 24); in poly1305_init_arch() 34 dctx->s[3] = get_unaligned_le32(key + 28); in poly1305_init_arch() 35 dctx->buflen = 0; in poly1305_init_arch() 41 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc); in neon_poly1305_init() local 43 dctx->buflen = 0; in neon_poly1305_init() 44 dctx->rset = 0; in neon_poly1305_init() [all …]
|
D | polyval-ce-glue.c | 94 struct polyval_desc_ctx *dctx = shash_desc_ctx(desc); in polyval_arm64_init() local 96 memset(dctx, 0, sizeof(*dctx)); in polyval_arm64_init() 104 struct polyval_desc_ctx *dctx = shash_desc_ctx(desc); in polyval_arm64_update() local 110 if (dctx->bytes) { in polyval_arm64_update() 111 n = min(srclen, dctx->bytes); in polyval_arm64_update() 112 pos = dctx->buffer + POLYVAL_BLOCK_SIZE - dctx->bytes; in polyval_arm64_update() 114 dctx->bytes -= n; in polyval_arm64_update() 120 if (!dctx->bytes) in polyval_arm64_update() 121 internal_polyval_mul(dctx->buffer, in polyval_arm64_update() 128 internal_polyval_update(tctx, src, nblocks, dctx->buffer); in polyval_arm64_update() [all …]
|
/linux-6.12.1/arch/arm/crypto/ |
D | poly1305-glue.c | 32 void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 key[POLY1305_KEY_SIZE]) in poly1305_init_arch() argument 34 poly1305_init_arm(&dctx->h, key); in poly1305_init_arch() 35 dctx->s[0] = get_unaligned_le32(key + 16); in poly1305_init_arch() 36 dctx->s[1] = get_unaligned_le32(key + 20); in poly1305_init_arch() 37 dctx->s[2] = get_unaligned_le32(key + 24); in poly1305_init_arch() 38 dctx->s[3] = get_unaligned_le32(key + 28); in poly1305_init_arch() 39 dctx->buflen = 0; in poly1305_init_arch() 45 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc); in arm_poly1305_init() local 47 dctx->buflen = 0; in arm_poly1305_init() 48 dctx->rset = 0; in arm_poly1305_init() [all …]
|
/linux-6.12.1/arch/powerpc/crypto/ |
D | poly1305-p10-glue.c | 39 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc); in crypto_poly1305_p10_init() local 41 poly1305_core_init(&dctx->h); in crypto_poly1305_p10_init() 42 dctx->buflen = 0; in crypto_poly1305_p10_init() 43 dctx->rset = 0; in crypto_poly1305_p10_init() 44 dctx->sset = false; in crypto_poly1305_p10_init() 49 static unsigned int crypto_poly1305_setdctxkey(struct poly1305_desc_ctx *dctx, in crypto_poly1305_setdctxkey() argument 54 if (unlikely(!dctx->sset)) { in crypto_poly1305_setdctxkey() 55 if (!dctx->rset && len >= POLY1305_BLOCK_SIZE) { in crypto_poly1305_setdctxkey() 56 struct poly1305_core_key *key = &dctx->core_r; in crypto_poly1305_setdctxkey() 63 dctx->rset = 1; in crypto_poly1305_setdctxkey() [all …]
|
D | ghash.c | 48 struct p8_ghash_desc_ctx *dctx = shash_desc_ctx(desc); in p8_ghash_init() local 50 dctx->bytes = 0; in p8_ghash_init() 51 memset(dctx->shash, 0, GHASH_DIGEST_SIZE); in p8_ghash_init() 77 struct p8_ghash_desc_ctx *dctx) in __ghash_block() argument 83 gcm_ghash_p8(dctx->shash, ctx->htable, in __ghash_block() 84 dctx->buffer, GHASH_DIGEST_SIZE); in __ghash_block() 89 crypto_xor((u8 *)dctx->shash, dctx->buffer, GHASH_BLOCK_SIZE); in __ghash_block() 90 gf128mul_lle((be128 *)dctx->shash, &ctx->key); in __ghash_block() 95 struct p8_ghash_desc_ctx *dctx, in __ghash_blocks() argument 102 gcm_ghash_p8(dctx->shash, ctx->htable, in __ghash_blocks() [all …]
|
/linux-6.12.1/lib/zstd/decompress/ |
D | zstd_decompress.c | 213 size_t ZSTD_sizeof_DCtx (const ZSTD_DCtx* dctx) in ZSTD_sizeof_DCtx() argument 215 if (dctx==NULL) return 0; /* support sizeof NULL */ in ZSTD_sizeof_DCtx() 216 return sizeof(*dctx) in ZSTD_sizeof_DCtx() 217 + ZSTD_sizeof_DDict(dctx->ddictLocal) in ZSTD_sizeof_DCtx() 218 + dctx->inBuffSize + dctx->outBuffSize; in ZSTD_sizeof_DCtx() 232 static void ZSTD_DCtx_resetParameters(ZSTD_DCtx* dctx) in ZSTD_DCtx_resetParameters() argument 234 assert(dctx->streamStage == zdss_init); in ZSTD_DCtx_resetParameters() 235 dctx->format = ZSTD_f_zstd1; in ZSTD_DCtx_resetParameters() 236 dctx->maxWindowSize = ZSTD_MAXWINDOWSIZE_DEFAULT; in ZSTD_DCtx_resetParameters() 237 dctx->outBufferMode = ZSTD_bm_buffered; in ZSTD_DCtx_resetParameters() [all …]
|
D | zstd_decompress_block.c | 73 static void ZSTD_allocateLiteralsBuffer(ZSTD_DCtx* dctx, void* const dst, const size_t dstCapacity,… in ZSTD_allocateLiteralsBuffer() argument 79 dctx->litBuffer = (BYTE*)dst + ZSTD_BLOCKSIZE_MAX + WILDCOPY_OVERLENGTH; in ZSTD_allocateLiteralsBuffer() 80 dctx->litBufferEnd = dctx->litBuffer + litSize; in ZSTD_allocateLiteralsBuffer() 81 dctx->litBufferLocation = ZSTD_in_dst; in ZSTD_allocateLiteralsBuffer() 88 …dctx->litBuffer = (BYTE*)dst + expectedWriteSize - litSize + ZSTD_LITBUFFEREXTRASIZE - WILDCOPY_OV… in ZSTD_allocateLiteralsBuffer() 89 dctx->litBufferEnd = dctx->litBuffer + litSize - ZSTD_LITBUFFEREXTRASIZE; in ZSTD_allocateLiteralsBuffer() 93 dctx->litBuffer = (BYTE*)dst + expectedWriteSize - litSize; in ZSTD_allocateLiteralsBuffer() 94 dctx->litBufferEnd = (BYTE*)dst + expectedWriteSize; in ZSTD_allocateLiteralsBuffer() 96 dctx->litBufferLocation = ZSTD_split; in ZSTD_allocateLiteralsBuffer() 101 dctx->litBuffer = dctx->litExtraBuffer; in ZSTD_allocateLiteralsBuffer() [all …]
|
D | zstd_ddict.c | 55 void ZSTD_copyDDictParameters(ZSTD_DCtx* dctx, const ZSTD_DDict* ddict) in ZSTD_copyDDictParameters() argument 58 assert(dctx != NULL); in ZSTD_copyDDictParameters() 60 dctx->dictID = ddict->dictID; in ZSTD_copyDDictParameters() 61 dctx->prefixStart = ddict->dictContent; in ZSTD_copyDDictParameters() 62 dctx->virtualStart = ddict->dictContent; in ZSTD_copyDDictParameters() 63 dctx->dictEnd = (const BYTE*)ddict->dictContent + ddict->dictSize; in ZSTD_copyDDictParameters() 64 dctx->previousDstEnd = dctx->dictEnd; in ZSTD_copyDDictParameters() 66 dctx->dictContentBeginForFuzzing = dctx->prefixStart; in ZSTD_copyDDictParameters() 67 dctx->dictContentEndForFuzzing = dctx->previousDstEnd; in ZSTD_copyDDictParameters() 70 dctx->litEntropy = 1; in ZSTD_copyDDictParameters() [all …]
|
/linux-6.12.1/crypto/ |
D | poly1305_generic.c | 24 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc); in crypto_poly1305_init() local 26 poly1305_core_init(&dctx->h); in crypto_poly1305_init() 27 dctx->buflen = 0; in crypto_poly1305_init() 28 dctx->rset = 0; in crypto_poly1305_init() 29 dctx->sset = false; in crypto_poly1305_init() 34 static unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx, in crypto_poly1305_setdesckey() argument 37 if (!dctx->sset) { in crypto_poly1305_setdesckey() 38 if (!dctx->rset && srclen >= POLY1305_BLOCK_SIZE) { in crypto_poly1305_setdesckey() 39 poly1305_core_setkey(&dctx->core_r, src); in crypto_poly1305_setdesckey() 42 dctx->rset = 2; in crypto_poly1305_setdesckey() [all …]
|
D | ghash-generic.c | 48 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); in ghash_init() local 50 memset(dctx, 0, sizeof(*dctx)); in ghash_init() 81 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); in ghash_update() local 83 u8 *dst = dctx->buffer; in ghash_update() 85 if (dctx->bytes) { in ghash_update() 86 int n = min(srclen, dctx->bytes); in ghash_update() 87 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 89 dctx->bytes -= n; in ghash_update() 95 if (!dctx->bytes) in ghash_update() 107 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() [all …]
|
D | des_generic.c | 22 struct des_ctx *dctx = crypto_tfm_ctx(tfm); in des_setkey() local 25 err = des_expand_key(dctx, key, keylen); in des_setkey() 33 memset(dctx, 0, sizeof(*dctx)); in des_setkey() 39 const struct des_ctx *dctx = crypto_tfm_ctx(tfm); in crypto_des_encrypt() local 41 des_encrypt(dctx, dst, src); in crypto_des_encrypt() 46 const struct des_ctx *dctx = crypto_tfm_ctx(tfm); in crypto_des_decrypt() local 48 des_decrypt(dctx, dst, src); in crypto_des_decrypt() 54 struct des3_ede_ctx *dctx = crypto_tfm_ctx(tfm); in des3_ede_setkey() local 57 err = des3_ede_expand_key(dctx, key, keylen); in des3_ede_setkey() 65 memset(dctx, 0, sizeof(*dctx)); in des3_ede_setkey() [all …]
|
D | polyval-generic.c | 145 struct polyval_desc_ctx *dctx = shash_desc_ctx(desc); in polyval_init() local 147 memset(dctx, 0, sizeof(*dctx)); in polyval_init() 155 struct polyval_desc_ctx *dctx = shash_desc_ctx(desc); in polyval_update() local 161 if (dctx->bytes) { in polyval_update() 162 n = min(srclen, dctx->bytes); in polyval_update() 163 pos = dctx->buffer + dctx->bytes - 1; in polyval_update() 165 dctx->bytes -= n; in polyval_update() 171 if (!dctx->bytes) in polyval_update() 172 gf128mul_4k_lle(&dctx->buffer128, ctx->gf128); in polyval_update() 177 crypto_xor(dctx->buffer, tmp, POLYVAL_BLOCK_SIZE); in polyval_update() [all …]
|
D | vmac.c | 400 struct vmac_desc_ctx *dctx, in vhash_blocks() argument 406 u64 ch = dctx->polytmp[0]; in vhash_blocks() 407 u64 cl = dctx->polytmp[1]; in vhash_blocks() 410 if (!dctx->first_block_processed) { in vhash_blocks() 411 dctx->first_block_processed = true; in vhash_blocks() 426 dctx->polytmp[0] = ch; in vhash_blocks() 427 dctx->polytmp[1] = cl; in vhash_blocks() 483 struct vmac_desc_ctx *dctx = shash_desc_ctx(desc); in vmac_init() local 485 dctx->partial_size = 0; in vmac_init() 486 dctx->first_block_processed = false; in vmac_init() [all …]
|
/linux-6.12.1/arch/x86/crypto/ |
D | poly1305_glue.c | 132 void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 key[POLY1305_KEY_SIZE]) in poly1305_init_arch() argument 134 poly1305_simd_init(&dctx->h, key); in poly1305_init_arch() 135 dctx->s[0] = get_unaligned_le32(&key[16]); in poly1305_init_arch() 136 dctx->s[1] = get_unaligned_le32(&key[20]); in poly1305_init_arch() 137 dctx->s[2] = get_unaligned_le32(&key[24]); in poly1305_init_arch() 138 dctx->s[3] = get_unaligned_le32(&key[28]); in poly1305_init_arch() 139 dctx->buflen = 0; in poly1305_init_arch() 140 dctx->sset = true; in poly1305_init_arch() 144 static unsigned int crypto_poly1305_setdctxkey(struct poly1305_desc_ctx *dctx, in crypto_poly1305_setdctxkey() argument 148 if (unlikely(!dctx->sset)) { in crypto_poly1305_setdctxkey() [all …]
|
D | polyval-clmulni_glue.c | 103 struct polyval_desc_ctx *dctx = shash_desc_ctx(desc); in polyval_x86_init() local 105 memset(dctx, 0, sizeof(*dctx)); in polyval_x86_init() 113 struct polyval_desc_ctx *dctx = shash_desc_ctx(desc); in polyval_x86_update() local 119 if (dctx->bytes) { in polyval_x86_update() 120 n = min(srclen, dctx->bytes); in polyval_x86_update() 121 pos = dctx->buffer + POLYVAL_BLOCK_SIZE - dctx->bytes; in polyval_x86_update() 123 dctx->bytes -= n; in polyval_x86_update() 129 if (!dctx->bytes) in polyval_x86_update() 130 internal_polyval_mul(dctx->buffer, in polyval_x86_update() 137 internal_polyval_update(tctx, src, nblocks, dctx->buffer); in polyval_x86_update() [all …]
|
D | ghash-clmulni-intel_glue.c | 47 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); in ghash_init() local 49 memset(dctx, 0, sizeof(*dctx)); in ghash_init() 98 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); in ghash_update() local 100 u8 *dst = dctx->buffer; in ghash_update() 103 if (dctx->bytes) { in ghash_update() 104 int n = min(srclen, dctx->bytes); in ghash_update() 105 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 107 dctx->bytes -= n; in ghash_update() 113 if (!dctx->bytes) in ghash_update() 123 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() [all …]
|
/linux-6.12.1/arch/riscv/crypto/ |
D | ghash-riscv64-glue.c | 48 struct riscv64_ghash_desc_ctx *dctx = shash_desc_ctx(desc); in riscv64_ghash_init() local 50 *dctx = (struct riscv64_ghash_desc_ctx){}; in riscv64_ghash_init() 57 struct riscv64_ghash_desc_ctx *dctx, in riscv64_ghash_blocks() argument 63 ghash_zvkg(&dctx->accumulator, &tctx->key, src, srclen); in riscv64_ghash_blocks() 67 crypto_xor((u8 *)&dctx->accumulator, src, in riscv64_ghash_blocks() 69 gf128mul_lle(&dctx->accumulator, &tctx->key); in riscv64_ghash_blocks() 80 struct riscv64_ghash_desc_ctx *dctx = shash_desc_ctx(desc); in riscv64_ghash_update() local 83 if (dctx->bytes) { in riscv64_ghash_update() 84 if (dctx->bytes + srclen < GHASH_BLOCK_SIZE) { in riscv64_ghash_update() 85 memcpy(dctx->buffer + dctx->bytes, src, srclen); in riscv64_ghash_update() [all …]
|
/linux-6.12.1/arch/s390/crypto/ |
D | ghash_s390.c | 32 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); in ghash_init() local 35 memset(dctx, 0, sizeof(*dctx)); in ghash_init() 36 memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE); in ghash_init() 57 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc); in ghash_update() local 59 u8 *buf = dctx->buffer; in ghash_update() 61 if (dctx->bytes) { in ghash_update() 62 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 64 n = min(srclen, dctx->bytes); in ghash_update() 65 dctx->bytes -= n; in ghash_update() 71 if (!dctx->bytes) { in ghash_update() [all …]
|
/linux-6.12.1/fs/erofs/ |
D | decompressor.c | 374 int z_erofs_stream_switch_bufs(struct z_erofs_stream_dctx *dctx, void **dst, in z_erofs_stream_switch_bufs() argument 377 struct z_erofs_decompress_req *rq = dctx->rq; in z_erofs_stream_switch_bufs() 382 if (!dctx->avail_out) { in z_erofs_stream_switch_bufs() 383 if (++dctx->no >= dctx->outpages || !rq->outputsize) { in z_erofs_stream_switch_bufs() 388 if (dctx->kout) in z_erofs_stream_switch_bufs() 389 kunmap_local(dctx->kout); in z_erofs_stream_switch_bufs() 390 dctx->avail_out = min(rq->outputsize, PAGE_SIZE - rq->pageofs_out); in z_erofs_stream_switch_bufs() 391 rq->outputsize -= dctx->avail_out; in z_erofs_stream_switch_bufs() 392 pgo = &rq->out[dctx->no]; in z_erofs_stream_switch_bufs() 396 dctx->kout = NULL; in z_erofs_stream_switch_bufs() [all …]
|
D | decompressor_zstd.c | 142 struct z_erofs_stream_dctx dctx = { in z_erofs_zstd_decompress() local 156 dctx.kin = kmap_local_page(*rq->in); in z_erofs_zstd_decompress() 157 err = z_erofs_fixup_insize(rq, dctx.kin + rq->pageofs_in, in z_erofs_zstd_decompress() 160 kunmap_local(dctx.kin); in z_erofs_zstd_decompress() 177 in_buf.src = dctx.kin + rq->pageofs_in; in z_erofs_zstd_decompress() 178 dctx.bounce = strm->bounce; in z_erofs_zstd_decompress() 181 dctx.avail_out = out_buf.size - out_buf.pos; in z_erofs_zstd_decompress() 182 dctx.inbuf_sz = in_buf.size; in z_erofs_zstd_decompress() 183 dctx.inbuf_pos = in_buf.pos; in z_erofs_zstd_decompress() 184 err = z_erofs_stream_switch_bufs(&dctx, &out_buf.dst, in z_erofs_zstd_decompress() [all …]
|
D | decompressor_deflate.c | 104 struct z_erofs_stream_dctx dctx = { in z_erofs_deflate_decompress() local 115 dctx.kin = kmap_local_page(*rq->in); in z_erofs_deflate_decompress() 116 err = z_erofs_fixup_insize(rq, dctx.kin + rq->pageofs_in, in z_erofs_deflate_decompress() 119 kunmap_local(dctx.kin); in z_erofs_deflate_decompress() 145 strm->z.next_in = dctx.kin + rq->pageofs_in; in z_erofs_deflate_decompress() 147 dctx.bounce = strm->bounce; in z_erofs_deflate_decompress() 150 dctx.avail_out = strm->z.avail_out; in z_erofs_deflate_decompress() 151 dctx.inbuf_sz = strm->z.avail_in; in z_erofs_deflate_decompress() 152 err = z_erofs_stream_switch_bufs(&dctx, in z_erofs_deflate_decompress() 157 strm->z.avail_out = dctx.avail_out; in z_erofs_deflate_decompress() [all …]
|
D | decompressor_lzma.c | 153 struct z_erofs_stream_dctx dctx = { in z_erofs_lzma_decompress() local 166 dctx.kin = kmap_local_page(*rq->in); in z_erofs_lzma_decompress() 167 err = z_erofs_fixup_insize(rq, dctx.kin + rq->pageofs_in, in z_erofs_lzma_decompress() 170 kunmap_local(dctx.kin); in z_erofs_lzma_decompress() 191 buf.in = dctx.kin + rq->pageofs_in; in z_erofs_lzma_decompress() 192 dctx.bounce = strm->bounce; in z_erofs_lzma_decompress() 194 dctx.avail_out = buf.out_size - buf.out_pos; in z_erofs_lzma_decompress() 195 dctx.inbuf_sz = buf.in_size; in z_erofs_lzma_decompress() 196 dctx.inbuf_pos = buf.in_pos; in z_erofs_lzma_decompress() 197 err = z_erofs_stream_switch_bufs(&dctx, (void **)&buf.out, in z_erofs_lzma_decompress() [all …]
|
/linux-6.12.1/drivers/crypto/xilinx/ |
D | zynqmp-sha.c | 83 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); in zynqmp_sha_init() local 86 dctx->fbk_req.tfm = tctx->fbk_tfm; in zynqmp_sha_init() 87 return crypto_shash_init(&dctx->fbk_req); in zynqmp_sha_init() 92 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); in zynqmp_sha_update() local 94 return crypto_shash_update(&dctx->fbk_req, data, length); in zynqmp_sha_update() 99 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); in zynqmp_sha_final() local 101 return crypto_shash_final(&dctx->fbk_req, out); in zynqmp_sha_final() 106 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); in zynqmp_sha_finup() local 108 return crypto_shash_finup(&dctx->fbk_req, data, length, out); in zynqmp_sha_finup() 113 struct zynqmp_sha_desc_ctx *dctx = shash_desc_ctx(desc); in zynqmp_sha_import() local [all …]
|
/linux-6.12.1/drivers/crypto/ |
D | padlock-sha.c | 34 struct padlock_sha_desc *dctx = shash_desc_ctx(desc); in padlock_sha_init() local 37 dctx->fallback.tfm = ctx->fallback; in padlock_sha_init() 38 return crypto_shash_init(&dctx->fallback); in padlock_sha_init() 44 struct padlock_sha_desc *dctx = shash_desc_ctx(desc); in padlock_sha_update() local 46 return crypto_shash_update(&dctx->fallback, data, length); in padlock_sha_update() 51 struct padlock_sha_desc *dctx = shash_desc_ctx(desc); in padlock_sha_export() local 53 return crypto_shash_export(&dctx->fallback, out); in padlock_sha_export() 58 struct padlock_sha_desc *dctx = shash_desc_ctx(desc); in padlock_sha_import() local 61 dctx->fallback.tfm = ctx->fallback; in padlock_sha_import() 62 return crypto_shash_import(&dctx->fallback, in); in padlock_sha_import() [all …]
|