Lines Matching refs:cc
76 struct crypt_config *cc; member
104 int (*ctr)(struct crypt_config *cc, struct dm_target *ti,
106 void (*dtr)(struct crypt_config *cc);
107 int (*init)(struct crypt_config *cc);
108 int (*wipe)(struct crypt_config *cc);
109 int (*generator)(struct crypt_config *cc, u8 *iv,
111 int (*post)(struct crypt_config *cc, u8 *iv,
255 static unsigned get_max_request_size(struct crypt_config *cc, bool wrt) in get_max_request_size() argument
261 if (wrt || cc->used_tag_size) { in get_max_request_size()
265 sector_align = max(bdev_logical_block_size(cc->dev->bdev), (unsigned)cc->sector_size); in get_max_request_size()
274 static struct scatterlist *crypt_get_sg_data(struct crypt_config *cc,
277 static bool crypt_integrity_aead(struct crypt_config *cc);
282 static struct crypto_skcipher *any_tfm(struct crypt_config *cc) in any_tfm() argument
284 return cc->cipher_tfm.tfms[0]; in any_tfm()
287 static struct crypto_aead *any_tfm_aead(struct crypt_config *cc) in any_tfm_aead() argument
289 return cc->cipher_tfm.tfms_aead[0]; in any_tfm_aead()
348 static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain_gen() argument
351 memset(iv, 0, cc->iv_size); in crypt_iv_plain_gen()
357 static int crypt_iv_plain64_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain64_gen() argument
360 memset(iv, 0, cc->iv_size); in crypt_iv_plain64_gen()
366 static int crypt_iv_plain64be_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_plain64be_gen() argument
369 memset(iv, 0, cc->iv_size); in crypt_iv_plain64be_gen()
371 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen()
376 static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_essiv_gen() argument
383 memset(iv, 0, cc->iv_size); in crypt_iv_essiv_gen()
389 static int crypt_iv_benbi_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_benbi_ctr() argument
395 if (crypt_integrity_aead(cc)) in crypt_iv_benbi_ctr()
396 bs = crypto_aead_blocksize(any_tfm_aead(cc)); in crypt_iv_benbi_ctr()
398 bs = crypto_skcipher_blocksize(any_tfm(cc)); in crypt_iv_benbi_ctr()
415 cc->iv_gen_private.benbi.shift = 9 - log; in crypt_iv_benbi_ctr()
420 static void crypt_iv_benbi_dtr(struct crypt_config *cc) in crypt_iv_benbi_dtr() argument
424 static int crypt_iv_benbi_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_benbi_gen() argument
429 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */ in crypt_iv_benbi_gen()
431 val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); in crypt_iv_benbi_gen()
432 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64))); in crypt_iv_benbi_gen()
437 static int crypt_iv_null_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_null_gen() argument
440 memset(iv, 0, cc->iv_size); in crypt_iv_null_gen()
445 static void crypt_iv_lmk_dtr(struct crypt_config *cc) in crypt_iv_lmk_dtr() argument
447 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_dtr()
457 static int crypt_iv_lmk_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_lmk_ctr() argument
460 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_ctr()
462 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_lmk_ctr()
475 if (cc->key_parts == cc->tfms_count) { in crypt_iv_lmk_ctr()
482 crypt_iv_lmk_dtr(cc); in crypt_iv_lmk_ctr()
490 static int crypt_iv_lmk_init(struct crypt_config *cc) in crypt_iv_lmk_init() argument
492 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_init()
493 int subkey_size = cc->key_size / cc->key_parts; in crypt_iv_lmk_init()
497 memcpy(lmk->seed, cc->key + (cc->tfms_count * subkey_size), in crypt_iv_lmk_init()
503 static int crypt_iv_lmk_wipe(struct crypt_config *cc) in crypt_iv_lmk_wipe() argument
505 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_wipe()
513 static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_one() argument
517 struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; in crypt_iv_lmk_one()
556 memcpy(iv, &md5state.hash, cc->iv_size); in crypt_iv_lmk_one()
561 static int crypt_iv_lmk_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_gen() argument
569 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_lmk_gen()
571 r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset); in crypt_iv_lmk_gen()
574 memset(iv, 0, cc->iv_size); in crypt_iv_lmk_gen()
579 static int crypt_iv_lmk_post(struct crypt_config *cc, u8 *iv, in crypt_iv_lmk_post() argument
589 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_lmk_post()
591 r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); in crypt_iv_lmk_post()
595 crypto_xor(dst + sg->offset, iv, cc->iv_size); in crypt_iv_lmk_post()
601 static void crypt_iv_tcw_dtr(struct crypt_config *cc) in crypt_iv_tcw_dtr() argument
603 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_dtr()
615 static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_tcw_ctr() argument
618 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_ctr()
620 if (cc->sector_size != (1 << SECTOR_SHIFT)) { in crypt_iv_tcw_ctr()
625 if (cc->key_size <= (cc->iv_size + TCW_WHITENING_SIZE)) { in crypt_iv_tcw_ctr()
637 tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL); in crypt_iv_tcw_ctr()
640 crypt_iv_tcw_dtr(cc); in crypt_iv_tcw_ctr()
648 static int crypt_iv_tcw_init(struct crypt_config *cc) in crypt_iv_tcw_init() argument
650 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_init()
651 int key_offset = cc->key_size - cc->iv_size - TCW_WHITENING_SIZE; in crypt_iv_tcw_init()
653 memcpy(tcw->iv_seed, &cc->key[key_offset], cc->iv_size); in crypt_iv_tcw_init()
654 memcpy(tcw->whitening, &cc->key[key_offset + cc->iv_size], in crypt_iv_tcw_init()
660 static int crypt_iv_tcw_wipe(struct crypt_config *cc) in crypt_iv_tcw_wipe() argument
662 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_wipe()
664 memset(tcw->iv_seed, 0, cc->iv_size); in crypt_iv_tcw_wipe()
670 static int crypt_iv_tcw_whitening(struct crypt_config *cc, in crypt_iv_tcw_whitening() argument
674 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_whitening()
702 static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_gen() argument
706 struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; in crypt_iv_tcw_gen()
713 sg = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_tcw_gen()
715 r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); in crypt_iv_tcw_gen()
721 if (cc->iv_size > 8) in crypt_iv_tcw_gen()
723 cc->iv_size - 8); in crypt_iv_tcw_gen()
728 static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, in crypt_iv_tcw_post() argument
739 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_tcw_post()
741 r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); in crypt_iv_tcw_post()
747 static int crypt_iv_random_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_random_gen() argument
751 get_random_bytes(iv, cc->iv_size); in crypt_iv_random_gen()
755 static int crypt_iv_eboiv_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_eboiv_ctr() argument
758 if (crypt_integrity_aead(cc)) { in crypt_iv_eboiv_ctr()
763 if (crypto_skcipher_blocksize(any_tfm(cc)) != cc->iv_size) { in crypt_iv_eboiv_ctr()
771 static int crypt_iv_eboiv_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_eboiv_gen() argument
774 struct crypto_skcipher *tfm = any_tfm(cc); in crypt_iv_eboiv_gen()
785 req = kmalloc(reqsize + cc->iv_size, GFP_NOIO); in crypt_iv_eboiv_gen()
792 memset(buf, 0, cc->iv_size); in crypt_iv_eboiv_gen()
793 *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_eboiv_gen()
795 sg_init_one(&src, page_address(ZERO_PAGE(0)), cc->iv_size); in crypt_iv_eboiv_gen()
796 sg_init_one(&dst, iv, cc->iv_size); in crypt_iv_eboiv_gen()
797 skcipher_request_set_crypt(req, &src, &dst, cc->iv_size, buf); in crypt_iv_eboiv_gen()
805 static void crypt_iv_elephant_dtr(struct crypt_config *cc) in crypt_iv_elephant_dtr() argument
807 struct iv_elephant_private *elephant = &cc->iv_gen_private.elephant; in crypt_iv_elephant_dtr()
813 static int crypt_iv_elephant_ctr(struct crypt_config *cc, struct dm_target *ti, in crypt_iv_elephant_ctr() argument
816 struct iv_elephant_private *elephant = &cc->iv_gen_private.elephant; in crypt_iv_elephant_ctr()
827 r = crypt_iv_eboiv_ctr(cc, ti, NULL); in crypt_iv_elephant_ctr()
829 crypt_iv_elephant_dtr(cc); in crypt_iv_elephant_ctr()
977 static int crypt_iv_elephant(struct crypt_config *cc, struct dm_crypt_request *dmreq) in crypt_iv_elephant() argument
979 struct iv_elephant_private *elephant = &cc->iv_gen_private.elephant; in crypt_iv_elephant()
995 *(__le64 *)es = cpu_to_le64(dmreq->iv_sector * cc->sector_size); in crypt_iv_elephant()
1013 sg = crypt_get_sg_data(cc, dmreq->sg_out); in crypt_iv_elephant()
1019 sg2 = crypt_get_sg_data(cc, dmreq->sg_in); in crypt_iv_elephant()
1021 memcpy(data_offset, data2 + sg2->offset, cc->sector_size); in crypt_iv_elephant()
1026 diffuser_disk_to_cpu((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1027 diffuser_b_decrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1028 diffuser_a_decrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1029 diffuser_cpu_to_disk((__le32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1032 for (i = 0; i < (cc->sector_size / 32); i++) in crypt_iv_elephant()
1036 diffuser_disk_to_cpu((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1037 diffuser_a_encrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1038 diffuser_b_encrypt((u32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1039 diffuser_cpu_to_disk((__le32 *)data_offset, cc->sector_size / sizeof(u32)); in crypt_iv_elephant()
1050 static int crypt_iv_elephant_gen(struct crypt_config *cc, u8 *iv, in crypt_iv_elephant_gen() argument
1056 r = crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_gen()
1061 return crypt_iv_eboiv_gen(cc, iv, dmreq); in crypt_iv_elephant_gen()
1064 static int crypt_iv_elephant_post(struct crypt_config *cc, u8 *iv, in crypt_iv_elephant_post() argument
1068 return crypt_iv_elephant(cc, dmreq); in crypt_iv_elephant_post()
1073 static int crypt_iv_elephant_init(struct crypt_config *cc) in crypt_iv_elephant_init() argument
1075 struct iv_elephant_private *elephant = &cc->iv_gen_private.elephant; in crypt_iv_elephant_init()
1076 int key_offset = cc->key_size - cc->key_extra_size; in crypt_iv_elephant_init()
1078 return crypto_skcipher_setkey(elephant->tfm, &cc->key[key_offset], cc->key_extra_size); in crypt_iv_elephant_init()
1081 static int crypt_iv_elephant_wipe(struct crypt_config *cc) in crypt_iv_elephant_wipe() argument
1083 struct iv_elephant_private *elephant = &cc->iv_gen_private.elephant; in crypt_iv_elephant_wipe()
1086 memset(key, 0, cc->key_extra_size); in crypt_iv_elephant_wipe()
1087 return crypto_skcipher_setkey(elephant->tfm, key, cc->key_extra_size); in crypt_iv_elephant_wipe()
1155 static bool crypt_integrity_aead(struct crypt_config *cc) in crypt_integrity_aead() argument
1157 return test_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags); in crypt_integrity_aead()
1160 static bool crypt_integrity_hmac(struct crypt_config *cc) in crypt_integrity_hmac() argument
1162 return crypt_integrity_aead(cc) && cc->key_mac_size; in crypt_integrity_hmac()
1166 static struct scatterlist *crypt_get_sg_data(struct crypt_config *cc, in crypt_get_sg_data() argument
1169 if (unlikely(crypt_integrity_aead(cc))) in crypt_get_sg_data()
1181 if (!bio_sectors(bio) || !io->cc->tuple_size) in dm_crypt_integrity_io_alloc()
1188 tag_len = io->cc->tuple_size * (bio_sectors(bio) >> io->cc->sector_shift); in dm_crypt_integrity_io_alloc()
1190 bip->bip_iter.bi_sector = io->cc->start + io->sector; in dm_crypt_integrity_io_alloc()
1200 static int crypt_integrity_ctr(struct crypt_config *cc, struct dm_target *ti) in crypt_integrity_ctr() argument
1203 struct blk_integrity *bi = blk_get_integrity(cc->dev->bdev->bd_disk); in crypt_integrity_ctr()
1212 if (bi->tuple_size < cc->used_tag_size) { in crypt_integrity_ctr()
1216 cc->tuple_size = bi->tuple_size; in crypt_integrity_ctr()
1217 if (1 << bi->interval_exp != cc->sector_size) { in crypt_integrity_ctr()
1222 if (crypt_integrity_aead(cc)) { in crypt_integrity_ctr()
1223 cc->integrity_tag_size = cc->used_tag_size - cc->integrity_iv_size; in crypt_integrity_ctr()
1225 cc->integrity_tag_size, cc->integrity_iv_size); in crypt_integrity_ctr()
1227 if (crypto_aead_setauthsize(any_tfm_aead(cc), cc->integrity_tag_size)) { in crypt_integrity_ctr()
1231 } else if (cc->integrity_iv_size) in crypt_integrity_ctr()
1233 cc->integrity_iv_size); in crypt_integrity_ctr()
1235 if ((cc->integrity_tag_size + cc->integrity_iv_size) > cc->tuple_size) { in crypt_integrity_ctr()
1247 static void crypt_convert_init(struct crypt_config *cc, in crypt_convert_init() argument
1258 ctx->cc_sector = sector + cc->iv_offset; in crypt_convert_init()
1262 static struct dm_crypt_request *dmreq_of_req(struct crypt_config *cc, in dmreq_of_req() argument
1265 return (struct dm_crypt_request *)((char *)req + cc->dmreq_start); in dmreq_of_req()
1268 static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq) in req_of_dmreq() argument
1270 return (void *)((char *)dmreq - cc->dmreq_start); in req_of_dmreq()
1273 static u8 *iv_of_dmreq(struct crypt_config *cc, in iv_of_dmreq() argument
1276 if (crypt_integrity_aead(cc)) in iv_of_dmreq()
1278 crypto_aead_alignmask(any_tfm_aead(cc)) + 1); in iv_of_dmreq()
1281 crypto_skcipher_alignmask(any_tfm(cc)) + 1); in iv_of_dmreq()
1284 static u8 *org_iv_of_dmreq(struct crypt_config *cc, in org_iv_of_dmreq() argument
1287 return iv_of_dmreq(cc, dmreq) + cc->iv_size; in org_iv_of_dmreq()
1290 static __le64 *org_sector_of_dmreq(struct crypt_config *cc, in org_sector_of_dmreq() argument
1293 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; in org_sector_of_dmreq()
1298 static unsigned int *org_tag_of_dmreq(struct crypt_config *cc, in org_tag_of_dmreq() argument
1301 u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + in org_tag_of_dmreq()
1302 cc->iv_size + sizeof(uint64_t); in org_tag_of_dmreq()
1307 static void *tag_from_dmreq(struct crypt_config *cc, in tag_from_dmreq() argument
1313 return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) * in tag_from_dmreq()
1314 cc->tuple_size]; in tag_from_dmreq()
1317 static void *iv_tag_from_dmreq(struct crypt_config *cc, in iv_tag_from_dmreq() argument
1320 return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size; in iv_tag_from_dmreq()
1323 static int crypt_convert_block_aead(struct crypt_config *cc, in crypt_convert_block_aead() argument
1335 BUG_ON(cc->integrity_iv_size && cc->integrity_iv_size != cc->iv_size); in crypt_convert_block_aead()
1338 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_aead()
1341 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_aead()
1343 if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) in crypt_convert_block_aead()
1344 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_aead()
1347 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_aead()
1349 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1350 *sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset); in crypt_convert_block_aead()
1352 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1353 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_aead()
1354 tag = tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1355 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_aead()
1364 sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1365 sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_aead()
1366 sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1370 sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); in crypt_convert_block_aead()
1371 sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_aead()
1372 sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size); in crypt_convert_block_aead()
1374 if (cc->iv_gen_ops) { in crypt_convert_block_aead()
1376 if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) { in crypt_convert_block_aead()
1377 memcpy(org_iv, tag_iv, cc->iv_size); in crypt_convert_block_aead()
1379 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_aead()
1383 if (cc->integrity_iv_size) in crypt_convert_block_aead()
1384 memcpy(tag_iv, org_iv, cc->iv_size); in crypt_convert_block_aead()
1387 memcpy(iv, org_iv, cc->iv_size); in crypt_convert_block_aead()
1390 aead_request_set_ad(req, sizeof(uint64_t) + cc->iv_size); in crypt_convert_block_aead()
1393 cc->sector_size, iv); in crypt_convert_block_aead()
1395 if (cc->integrity_tag_size + cc->integrity_iv_size != cc->tuple_size) in crypt_convert_block_aead()
1396 memset(tag + cc->integrity_tag_size + cc->integrity_iv_size, 0, in crypt_convert_block_aead()
1397 cc->tuple_size - (cc->integrity_tag_size + cc->integrity_iv_size)); in crypt_convert_block_aead()
1400 cc->sector_size + cc->integrity_tag_size, iv); in crypt_convert_block_aead()
1416 if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) in crypt_convert_block_aead()
1417 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_aead()
1419 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_aead()
1420 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_aead()
1425 static int crypt_convert_block_skcipher(struct crypt_config *cc, in crypt_convert_block_skcipher() argument
1439 if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) in crypt_convert_block_skcipher()
1442 dmreq = dmreq_of_req(cc, req); in crypt_convert_block_skcipher()
1444 if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) in crypt_convert_block_skcipher()
1445 dmreq->iv_sector >>= cc->sector_shift; in crypt_convert_block_skcipher()
1448 *org_tag_of_dmreq(cc, dmreq) = tag_offset; in crypt_convert_block_skcipher()
1450 iv = iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1451 org_iv = org_iv_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1452 tag_iv = iv_tag_from_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1454 sector = org_sector_of_dmreq(cc, dmreq); in crypt_convert_block_skcipher()
1455 *sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset); in crypt_convert_block_skcipher()
1462 sg_set_page(sg_in, bv_in.bv_page, cc->sector_size, bv_in.bv_offset); in crypt_convert_block_skcipher()
1465 sg_set_page(sg_out, bv_out.bv_page, cc->sector_size, bv_out.bv_offset); in crypt_convert_block_skcipher()
1467 if (cc->iv_gen_ops) { in crypt_convert_block_skcipher()
1469 if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) { in crypt_convert_block_skcipher()
1470 memcpy(org_iv, tag_iv, cc->integrity_iv_size); in crypt_convert_block_skcipher()
1472 r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1476 if (test_bit(CRYPT_ENCRYPT_PREPROCESS, &cc->cipher_flags)) in crypt_convert_block_skcipher()
1479 if (cc->integrity_iv_size) in crypt_convert_block_skcipher()
1480 memcpy(tag_iv, org_iv, cc->integrity_iv_size); in crypt_convert_block_skcipher()
1483 memcpy(iv, org_iv, cc->iv_size); in crypt_convert_block_skcipher()
1486 skcipher_request_set_crypt(req, sg_in, sg_out, cc->sector_size, iv); in crypt_convert_block_skcipher()
1493 if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) in crypt_convert_block_skcipher()
1494 r = cc->iv_gen_ops->post(cc, org_iv, dmreq); in crypt_convert_block_skcipher()
1496 bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); in crypt_convert_block_skcipher()
1497 bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); in crypt_convert_block_skcipher()
1504 static int crypt_alloc_req_skcipher(struct crypt_config *cc, in crypt_alloc_req_skcipher() argument
1507 unsigned int key_index = ctx->cc_sector & (cc->tfms_count - 1); in crypt_alloc_req_skcipher()
1510 ctx->r.req = mempool_alloc(&cc->req_pool, in_interrupt() ? GFP_ATOMIC : GFP_NOIO); in crypt_alloc_req_skcipher()
1515 skcipher_request_set_tfm(ctx->r.req, cc->cipher_tfm.tfms[key_index]); in crypt_alloc_req_skcipher()
1523 kcryptd_async_done, dmreq_of_req(cc, ctx->r.req)); in crypt_alloc_req_skcipher()
1528 static int crypt_alloc_req_aead(struct crypt_config *cc, in crypt_alloc_req_aead() argument
1532 ctx->r.req_aead = mempool_alloc(&cc->req_pool, in_interrupt() ? GFP_ATOMIC : GFP_NOIO); in crypt_alloc_req_aead()
1537 aead_request_set_tfm(ctx->r.req_aead, cc->cipher_tfm.tfms_aead[0]); in crypt_alloc_req_aead()
1545 kcryptd_async_done, dmreq_of_req(cc, ctx->r.req_aead)); in crypt_alloc_req_aead()
1550 static int crypt_alloc_req(struct crypt_config *cc, in crypt_alloc_req() argument
1553 if (crypt_integrity_aead(cc)) in crypt_alloc_req()
1554 return crypt_alloc_req_aead(cc, ctx); in crypt_alloc_req()
1556 return crypt_alloc_req_skcipher(cc, ctx); in crypt_alloc_req()
1559 static void crypt_free_req_skcipher(struct crypt_config *cc, in crypt_free_req_skcipher() argument
1562 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req_skcipher()
1565 mempool_free(req, &cc->req_pool); in crypt_free_req_skcipher()
1568 static void crypt_free_req_aead(struct crypt_config *cc, in crypt_free_req_aead() argument
1571 struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); in crypt_free_req_aead()
1574 mempool_free(req, &cc->req_pool); in crypt_free_req_aead()
1577 static void crypt_free_req(struct crypt_config *cc, void *req, struct bio *base_bio) in crypt_free_req() argument
1579 if (crypt_integrity_aead(cc)) in crypt_free_req()
1580 crypt_free_req_aead(cc, req, base_bio); in crypt_free_req()
1582 crypt_free_req_skcipher(cc, req, base_bio); in crypt_free_req()
1588 static blk_status_t crypt_convert(struct crypt_config *cc, in crypt_convert() argument
1592 unsigned int sector_step = cc->sector_size >> SECTOR_SHIFT; in crypt_convert()
1605 r = crypt_alloc_req(cc, ctx); in crypt_convert()
1613 if (crypt_integrity_aead(cc)) in crypt_convert()
1614 r = crypt_convert_block_aead(cc, ctx, ctx->r.req_aead, tag_offset); in crypt_convert()
1616 r = crypt_convert_block_skcipher(cc, ctx, ctx->r.req, tag_offset); in crypt_convert()
1682 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone);
1706 struct crypt_config *cc = io->cc; in crypt_alloc_buffer() local
1715 mutex_lock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
1717 clone = bio_alloc_bioset(cc->dev->bdev, nr_iovecs, io->base_bio->bi_opf, in crypt_alloc_buffer()
1718 GFP_NOIO, &cc->bs); in crypt_alloc_buffer()
1732 if (unlikely(percpu_counter_read_positive(&cc->n_allocated_pages) + in crypt_alloc_buffer()
1739 percpu_counter_add(&cc->n_allocated_pages, 1 << order); in crypt_alloc_buffer()
1746 pages = mempool_alloc(&cc->page_pool, gfp_mask); in crypt_alloc_buffer()
1748 crypt_free_buffer_pages(cc, clone); in crypt_alloc_buffer()
1763 crypt_free_buffer_pages(cc, clone); in crypt_alloc_buffer()
1769 mutex_unlock(&cc->bio_alloc_lock); in crypt_alloc_buffer()
1774 static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone) in crypt_free_buffer_pages() argument
1781 percpu_counter_sub(&cc->n_allocated_pages, in crypt_free_buffer_pages()
1785 mempool_free(&fi.folio->page, &cc->page_pool); in crypt_free_buffer_pages()
1791 static void crypt_io_init(struct dm_crypt_io *io, struct crypt_config *cc, in crypt_io_init() argument
1794 io->cc = cc; in crypt_io_init()
1819 struct crypt_config *cc = io->cc; in crypt_dec_pending() local
1827 cc->used_tag_size && bio_data_dir(base_bio) == READ) { in crypt_dec_pending()
1836 crypt_free_req(cc, io->ctx.r.req, base_bio); in crypt_dec_pending()
1839 mempool_free(io->integrity_metadata, &io->cc->tag_pool); in crypt_dec_pending()
1868 struct crypt_config *cc = io->cc; in crypt_endio() local
1881 crypt_free_buffer_pages(cc, clone); in crypt_endio()
1900 struct crypt_config *cc = io->cc; in kcryptd_io_read() local
1912 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_io_read()
1913 crypt_convert_init(cc, &io->ctx, clone, clone, io->sector); in kcryptd_io_read()
1925 clone = bio_alloc_clone(cc->dev->bdev, io->base_bio, gfp, &cc->bs); in kcryptd_io_read()
1933 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_io_read()
1957 struct crypt_config *cc = io->cc; in kcryptd_queue_read() local
1960 queue_work(cc->io_queue, &io->work); in kcryptd_queue_read()
1974 struct crypt_config *cc = data; in dmcrypt_write() local
1981 spin_lock_irq(&cc->write_thread_lock); in dmcrypt_write()
1984 if (!RB_EMPTY_ROOT(&cc->write_tree)) in dmcrypt_write()
1989 spin_unlock_irq(&cc->write_thread_lock); in dmcrypt_write()
1998 spin_lock_irq(&cc->write_thread_lock); in dmcrypt_write()
2002 write_tree = cc->write_tree; in dmcrypt_write()
2003 cc->write_tree = RB_ROOT; in dmcrypt_write()
2004 spin_unlock_irq(&cc->write_thread_lock); in dmcrypt_write()
2027 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_io_submit() local
2033 crypt_free_buffer_pages(cc, clone); in kcryptd_crypt_write_io_submit()
2042 clone->bi_iter.bi_sector = cc->start + io->sector; in kcryptd_crypt_write_io_submit()
2044 if ((likely(!async) && test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) || in kcryptd_crypt_write_io_submit()
2045 test_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags)) { in kcryptd_crypt_write_io_submit()
2050 spin_lock_irqsave(&cc->write_thread_lock, flags); in kcryptd_crypt_write_io_submit()
2051 if (RB_EMPTY_ROOT(&cc->write_tree)) in kcryptd_crypt_write_io_submit()
2052 wake_up_process(cc->write_thread); in kcryptd_crypt_write_io_submit()
2053 rbp = &cc->write_tree.rb_node; in kcryptd_crypt_write_io_submit()
2064 rb_insert_color(&io->rb_node, &cc->write_tree); in kcryptd_crypt_write_io_submit()
2065 spin_unlock_irqrestore(&cc->write_thread_lock, flags); in kcryptd_crypt_write_io_submit()
2068 static bool kcryptd_crypt_write_inline(struct crypt_config *cc, in kcryptd_crypt_write_inline() argument
2072 if (!test_bit(DM_CRYPT_WRITE_INLINE, &cc->flags)) in kcryptd_crypt_write_inline()
2092 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_continue() local
2101 r = crypt_convert(cc, &io->ctx, true, false); in kcryptd_crypt_write_continue()
2105 if (!crypt_finished && kcryptd_crypt_write_inline(cc, ctx)) { in kcryptd_crypt_write_continue()
2122 struct crypt_config *cc = io->cc; in kcryptd_crypt_write_convert() local
2133 crypt_convert_init(cc, ctx, NULL, io->base_bio, sector); in kcryptd_crypt_write_convert()
2144 if (crypt_integrity_aead(cc)) { in kcryptd_crypt_write_convert()
2153 r = crypt_convert(cc, ctx, in kcryptd_crypt_write_convert()
2154 test_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags), true); in kcryptd_crypt_write_convert()
2162 queue_work(cc->crypt_queue, &io->work); in kcryptd_crypt_write_convert()
2168 if (!crypt_finished && kcryptd_crypt_write_inline(cc, ctx)) { in kcryptd_crypt_write_convert()
2191 crypt_free_buffer_pages(io->cc, io->ctx.bio_in); in kcryptd_crypt_read_done()
2200 struct crypt_config *cc = io->cc; in kcryptd_crypt_read_continue() local
2206 r = crypt_convert(cc, &io->ctx, true, false); in kcryptd_crypt_read_continue()
2218 struct crypt_config *cc = io->cc; in kcryptd_crypt_read_convert() local
2224 io->ctx.cc_sector = io->sector + cc->iv_offset; in kcryptd_crypt_read_convert()
2225 r = crypt_convert(cc, &io->ctx, in kcryptd_crypt_read_convert()
2226 test_bit(DM_CRYPT_NO_READ_WORKQUEUE, &cc->flags), true); in kcryptd_crypt_read_convert()
2228 crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio, in kcryptd_crypt_read_convert()
2231 r = crypt_convert(cc, &io->ctx, in kcryptd_crypt_read_convert()
2232 test_bit(DM_CRYPT_NO_READ_WORKQUEUE, &cc->flags), true); in kcryptd_crypt_read_convert()
2240 queue_work(cc->crypt_queue, &io->work); in kcryptd_crypt_read_convert()
2257 struct crypt_config *cc = io->cc; in kcryptd_async_done() local
2269 if (!error && cc->iv_gen_ops && cc->iv_gen_ops->post) in kcryptd_async_done()
2270 error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq); in kcryptd_async_done()
2273 sector_t s = le64_to_cpu(*org_sector_of_dmreq(cc, dmreq)); in kcryptd_async_done()
2286 crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); in kcryptd_async_done()
2300 if (kcryptd_crypt_write_inline(cc, ctx)) { in kcryptd_async_done()
2320 struct crypt_config *cc = io->cc; in kcryptd_queue_crypt() local
2322 if ((bio_data_dir(io->base_bio) == READ && test_bit(DM_CRYPT_NO_READ_WORKQUEUE, &cc->flags)) || in kcryptd_queue_crypt()
2323 (bio_data_dir(io->base_bio) == WRITE && test_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags))) { in kcryptd_queue_crypt()
2340 queue_work(cc->crypt_queue, &io->work); in kcryptd_queue_crypt()
2343 static void crypt_free_tfms_aead(struct crypt_config *cc) in crypt_free_tfms_aead() argument
2345 if (!cc->cipher_tfm.tfms_aead) in crypt_free_tfms_aead()
2348 if (cc->cipher_tfm.tfms_aead[0] && !IS_ERR(cc->cipher_tfm.tfms_aead[0])) { in crypt_free_tfms_aead()
2349 crypto_free_aead(cc->cipher_tfm.tfms_aead[0]); in crypt_free_tfms_aead()
2350 cc->cipher_tfm.tfms_aead[0] = NULL; in crypt_free_tfms_aead()
2353 kfree(cc->cipher_tfm.tfms_aead); in crypt_free_tfms_aead()
2354 cc->cipher_tfm.tfms_aead = NULL; in crypt_free_tfms_aead()
2357 static void crypt_free_tfms_skcipher(struct crypt_config *cc) in crypt_free_tfms_skcipher() argument
2361 if (!cc->cipher_tfm.tfms) in crypt_free_tfms_skcipher()
2364 for (i = 0; i < cc->tfms_count; i++) in crypt_free_tfms_skcipher()
2365 if (cc->cipher_tfm.tfms[i] && !IS_ERR(cc->cipher_tfm.tfms[i])) { in crypt_free_tfms_skcipher()
2366 crypto_free_skcipher(cc->cipher_tfm.tfms[i]); in crypt_free_tfms_skcipher()
2367 cc->cipher_tfm.tfms[i] = NULL; in crypt_free_tfms_skcipher()
2370 kfree(cc->cipher_tfm.tfms); in crypt_free_tfms_skcipher()
2371 cc->cipher_tfm.tfms = NULL; in crypt_free_tfms_skcipher()
2374 static void crypt_free_tfms(struct crypt_config *cc) in crypt_free_tfms() argument
2376 if (crypt_integrity_aead(cc)) in crypt_free_tfms()
2377 crypt_free_tfms_aead(cc); in crypt_free_tfms()
2379 crypt_free_tfms_skcipher(cc); in crypt_free_tfms()
2382 static int crypt_alloc_tfms_skcipher(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms_skcipher() argument
2387 cc->cipher_tfm.tfms = kcalloc(cc->tfms_count, in crypt_alloc_tfms_skcipher()
2390 if (!cc->cipher_tfm.tfms) in crypt_alloc_tfms_skcipher()
2393 for (i = 0; i < cc->tfms_count; i++) { in crypt_alloc_tfms_skcipher()
2394 cc->cipher_tfm.tfms[i] = crypto_alloc_skcipher(ciphermode, 0, in crypt_alloc_tfms_skcipher()
2396 if (IS_ERR(cc->cipher_tfm.tfms[i])) { in crypt_alloc_tfms_skcipher()
2397 err = PTR_ERR(cc->cipher_tfm.tfms[i]); in crypt_alloc_tfms_skcipher()
2398 crypt_free_tfms(cc); in crypt_alloc_tfms_skcipher()
2409 crypto_skcipher_alg(any_tfm(cc))->base.cra_driver_name); in crypt_alloc_tfms_skcipher()
2413 static int crypt_alloc_tfms_aead(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms_aead() argument
2417 cc->cipher_tfm.tfms = kmalloc(sizeof(struct crypto_aead *), GFP_KERNEL); in crypt_alloc_tfms_aead()
2418 if (!cc->cipher_tfm.tfms) in crypt_alloc_tfms_aead()
2421 cc->cipher_tfm.tfms_aead[0] = crypto_alloc_aead(ciphermode, 0, in crypt_alloc_tfms_aead()
2423 if (IS_ERR(cc->cipher_tfm.tfms_aead[0])) { in crypt_alloc_tfms_aead()
2424 err = PTR_ERR(cc->cipher_tfm.tfms_aead[0]); in crypt_alloc_tfms_aead()
2425 crypt_free_tfms(cc); in crypt_alloc_tfms_aead()
2430 crypto_aead_alg(any_tfm_aead(cc))->base.cra_driver_name); in crypt_alloc_tfms_aead()
2434 static int crypt_alloc_tfms(struct crypt_config *cc, char *ciphermode) in crypt_alloc_tfms() argument
2436 if (crypt_integrity_aead(cc)) in crypt_alloc_tfms()
2437 return crypt_alloc_tfms_aead(cc, ciphermode); in crypt_alloc_tfms()
2439 return crypt_alloc_tfms_skcipher(cc, ciphermode); in crypt_alloc_tfms()
2442 static unsigned int crypt_subkey_size(struct crypt_config *cc) in crypt_subkey_size() argument
2444 return (cc->key_size - cc->key_extra_size) >> ilog2(cc->tfms_count); in crypt_subkey_size()
2447 static unsigned int crypt_authenckey_size(struct crypt_config *cc) in crypt_authenckey_size() argument
2449 return crypt_subkey_size(cc) + RTA_SPACE(sizeof(struct crypto_authenc_key_param)); in crypt_authenckey_size()
2474 static int crypt_setkey(struct crypt_config *cc) in crypt_setkey() argument
2480 subkey_size = crypt_subkey_size(cc); in crypt_setkey()
2482 if (crypt_integrity_hmac(cc)) { in crypt_setkey()
2483 if (subkey_size < cc->key_mac_size) in crypt_setkey()
2486 crypt_copy_authenckey(cc->authenc_key, cc->key, in crypt_setkey()
2487 subkey_size - cc->key_mac_size, in crypt_setkey()
2488 cc->key_mac_size); in crypt_setkey()
2491 for (i = 0; i < cc->tfms_count; i++) { in crypt_setkey()
2492 if (crypt_integrity_hmac(cc)) in crypt_setkey()
2493 r = crypto_aead_setkey(cc->cipher_tfm.tfms_aead[i], in crypt_setkey()
2494 cc->authenc_key, crypt_authenckey_size(cc)); in crypt_setkey()
2495 else if (crypt_integrity_aead(cc)) in crypt_setkey()
2496 r = crypto_aead_setkey(cc->cipher_tfm.tfms_aead[i], in crypt_setkey()
2497 cc->key + (i * subkey_size), in crypt_setkey()
2500 r = crypto_skcipher_setkey(cc->cipher_tfm.tfms[i], in crypt_setkey()
2501 cc->key + (i * subkey_size), in crypt_setkey()
2507 if (crypt_integrity_hmac(cc)) in crypt_setkey()
2508 memzero_explicit(cc->authenc_key, crypt_authenckey_size(cc)); in crypt_setkey()
2523 static int set_key_user(struct crypt_config *cc, struct key *key) in set_key_user() argument
2531 if (cc->key_size != ukp->datalen) in set_key_user()
2534 memcpy(cc->key, ukp->data, cc->key_size); in set_key_user()
2539 static int set_key_encrypted(struct crypt_config *cc, struct key *key) in set_key_encrypted() argument
2547 if (cc->key_size != ekp->decrypted_datalen) in set_key_encrypted()
2550 memcpy(cc->key, ekp->decrypted_data, cc->key_size); in set_key_encrypted()
2555 static int set_key_trusted(struct crypt_config *cc, struct key *key) in set_key_trusted() argument
2563 if (cc->key_size != tkp->key_len) in set_key_trusted()
2566 memcpy(cc->key, tkp->key, cc->key_size); in set_key_trusted()
2571 static int crypt_set_keyring_key(struct crypt_config *cc, const char *key_string) in crypt_set_keyring_key() argument
2577 int (*set_key)(struct crypt_config *cc, struct key *key); in crypt_set_keyring_key()
2622 ret = set_key(cc, key); in crypt_set_keyring_key()
2629 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_keyring_key()
2631 ret = crypt_setkey(cc); in crypt_set_keyring_key()
2635 set_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_keyring_key()
2636 kfree_sensitive(cc->key_string); in crypt_set_keyring_key()
2637 cc->key_string = new_key_string; in crypt_set_keyring_key()
2670 static int crypt_set_keyring_key(struct crypt_config *cc, const char *key_string) in crypt_set_keyring_key() argument
2682 static int crypt_set_key(struct crypt_config *cc, char *key) in crypt_set_key() argument
2688 if (!cc->key_size && strcmp(key, "-")) in crypt_set_key()
2693 r = crypt_set_keyring_key(cc, key + 1); in crypt_set_key()
2698 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_key()
2701 kfree_sensitive(cc->key_string); in crypt_set_key()
2702 cc->key_string = NULL; in crypt_set_key()
2705 if (cc->key_size && hex2bin(cc->key, key, cc->key_size) < 0) in crypt_set_key()
2708 r = crypt_setkey(cc); in crypt_set_key()
2710 set_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_set_key()
2719 static int crypt_wipe_key(struct crypt_config *cc) in crypt_wipe_key() argument
2723 clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); in crypt_wipe_key()
2724 get_random_bytes(&cc->key, cc->key_size); in crypt_wipe_key()
2727 if (cc->iv_gen_ops && cc->iv_gen_ops->wipe) { in crypt_wipe_key()
2728 r = cc->iv_gen_ops->wipe(cc); in crypt_wipe_key()
2733 kfree_sensitive(cc->key_string); in crypt_wipe_key()
2734 cc->key_string = NULL; in crypt_wipe_key()
2735 r = crypt_setkey(cc); in crypt_wipe_key()
2736 memset(&cc->key, 0, cc->key_size * sizeof(u8)); in crypt_wipe_key()
2756 struct crypt_config *cc = pool_data; in crypt_page_alloc() local
2764 if (unlikely(percpu_counter_read_positive(&cc->n_allocated_pages) >= dm_crypt_pages_per_client) && in crypt_page_alloc()
2770 percpu_counter_add(&cc->n_allocated_pages, 1); in crypt_page_alloc()
2777 struct crypt_config *cc = pool_data; in crypt_page_free() local
2780 percpu_counter_sub(&cc->n_allocated_pages, 1); in crypt_page_free()
2785 struct crypt_config *cc = ti->private; in crypt_dtr() local
2789 if (!cc) in crypt_dtr()
2792 if (cc->write_thread) in crypt_dtr()
2793 kthread_stop(cc->write_thread); in crypt_dtr()
2795 if (cc->io_queue) in crypt_dtr()
2796 destroy_workqueue(cc->io_queue); in crypt_dtr()
2797 if (cc->crypt_queue) in crypt_dtr()
2798 destroy_workqueue(cc->crypt_queue); in crypt_dtr()
2800 if (cc->workqueue_id) in crypt_dtr()
2801 ida_free(&workqueue_ida, cc->workqueue_id); in crypt_dtr()
2803 crypt_free_tfms(cc); in crypt_dtr()
2805 bioset_exit(&cc->bs); in crypt_dtr()
2807 mempool_exit(&cc->page_pool); in crypt_dtr()
2808 mempool_exit(&cc->req_pool); in crypt_dtr()
2809 mempool_exit(&cc->tag_pool); in crypt_dtr()
2811 WARN_ON(percpu_counter_sum(&cc->n_allocated_pages) != 0); in crypt_dtr()
2812 percpu_counter_destroy(&cc->n_allocated_pages); in crypt_dtr()
2814 if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) in crypt_dtr()
2815 cc->iv_gen_ops->dtr(cc); in crypt_dtr()
2817 if (cc->dev) in crypt_dtr()
2818 dm_put_device(ti, cc->dev); in crypt_dtr()
2820 kfree_sensitive(cc->cipher_string); in crypt_dtr()
2821 kfree_sensitive(cc->key_string); in crypt_dtr()
2822 kfree_sensitive(cc->cipher_auth); in crypt_dtr()
2823 kfree_sensitive(cc->authenc_key); in crypt_dtr()
2825 mutex_destroy(&cc->bio_alloc_lock); in crypt_dtr()
2828 kfree_sensitive(cc); in crypt_dtr()
2841 struct crypt_config *cc = ti->private; in crypt_ctr_ivmode() local
2843 if (crypt_integrity_aead(cc)) in crypt_ctr_ivmode()
2844 cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); in crypt_ctr_ivmode()
2846 cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); in crypt_ctr_ivmode()
2848 if (cc->iv_size) in crypt_ctr_ivmode()
2850 cc->iv_size = max(cc->iv_size, in crypt_ctr_ivmode()
2859 cc->iv_gen_ops = NULL; in crypt_ctr_ivmode()
2861 cc->iv_gen_ops = &crypt_iv_plain_ops; in crypt_ctr_ivmode()
2863 cc->iv_gen_ops = &crypt_iv_plain64_ops; in crypt_ctr_ivmode()
2865 cc->iv_gen_ops = &crypt_iv_plain64be_ops; in crypt_ctr_ivmode()
2867 cc->iv_gen_ops = &crypt_iv_essiv_ops; in crypt_ctr_ivmode()
2869 cc->iv_gen_ops = &crypt_iv_benbi_ops; in crypt_ctr_ivmode()
2871 cc->iv_gen_ops = &crypt_iv_null_ops; in crypt_ctr_ivmode()
2873 cc->iv_gen_ops = &crypt_iv_eboiv_ops; in crypt_ctr_ivmode()
2875 cc->iv_gen_ops = &crypt_iv_elephant_ops; in crypt_ctr_ivmode()
2876 cc->key_parts = 2; in crypt_ctr_ivmode()
2877 cc->key_extra_size = cc->key_size / 2; in crypt_ctr_ivmode()
2878 if (cc->key_extra_size > ELEPHANT_MAX_KEY_SIZE) in crypt_ctr_ivmode()
2880 set_bit(CRYPT_ENCRYPT_PREPROCESS, &cc->cipher_flags); in crypt_ctr_ivmode()
2882 cc->iv_gen_ops = &crypt_iv_lmk_ops; in crypt_ctr_ivmode()
2889 if (cc->key_size % cc->key_parts) { in crypt_ctr_ivmode()
2890 cc->key_parts++; in crypt_ctr_ivmode()
2891 cc->key_extra_size = cc->key_size / cc->key_parts; in crypt_ctr_ivmode()
2894 cc->iv_gen_ops = &crypt_iv_tcw_ops; in crypt_ctr_ivmode()
2895 cc->key_parts += 2; /* IV + whitening */ in crypt_ctr_ivmode()
2896 cc->key_extra_size = cc->iv_size + TCW_WHITENING_SIZE; in crypt_ctr_ivmode()
2898 cc->iv_gen_ops = &crypt_iv_random_ops; in crypt_ctr_ivmode()
2900 cc->integrity_iv_size = cc->iv_size; in crypt_ctr_ivmode()
2914 static int crypt_ctr_auth_cipher(struct crypt_config *cc, char *cipher_api) in crypt_ctr_auth_cipher() argument
2937 if (!test_bit(CRYPT_KEY_MAC_SIZE_SET, &cc->cipher_flags)) in crypt_ctr_auth_cipher()
2938 cc->key_mac_size = crypto_ahash_digestsize(mac); in crypt_ctr_auth_cipher()
2941 cc->authenc_key = kmalloc(crypt_authenckey_size(cc), GFP_KERNEL); in crypt_ctr_auth_cipher()
2942 if (!cc->authenc_key) in crypt_ctr_auth_cipher()
2951 struct crypt_config *cc = ti->private; in crypt_ctr_cipher_new() local
2955 cc->tfms_count = 1; in crypt_ctr_cipher_new()
2979 if (crypt_integrity_aead(cc)) { in crypt_ctr_cipher_new()
2980 ret = crypt_ctr_auth_cipher(cc, cipher_api); in crypt_ctr_cipher_new()
2988 cc->tfms_count = 64; in crypt_ctr_cipher_new()
3004 cc->key_parts = cc->tfms_count; in crypt_ctr_cipher_new()
3007 ret = crypt_alloc_tfms(cc, cipher_api); in crypt_ctr_cipher_new()
3013 if (crypt_integrity_aead(cc)) in crypt_ctr_cipher_new()
3014 cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); in crypt_ctr_cipher_new()
3016 cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); in crypt_ctr_cipher_new()
3024 struct crypt_config *cc = ti->private; in crypt_ctr_cipher_old() local
3030 if (strchr(cipher_in, '(') || crypt_integrity_aead(cc)) { in crypt_ctr_cipher_old()
3044 cc->tfms_count = 1; in crypt_ctr_cipher_old()
3045 else if (sscanf(keycount, "%u%c", &cc->tfms_count, &dummy) != 1 || in crypt_ctr_cipher_old()
3046 !is_power_of_2(cc->tfms_count)) { in crypt_ctr_cipher_old()
3050 cc->key_parts = cc->tfms_count; in crypt_ctr_cipher_old()
3092 ret = crypt_alloc_tfms(cc, cipher_api); in crypt_ctr_cipher_old()
3108 struct crypt_config *cc = ti->private; in crypt_ctr_cipher() local
3112 cc->cipher_string = kstrdup(cipher_in, GFP_KERNEL); in crypt_ctr_cipher()
3113 if (!cc->cipher_string) { in crypt_ctr_cipher()
3131 ret = crypt_set_key(cc, key); in crypt_ctr_cipher()
3138 if (cc->iv_gen_ops && cc->iv_gen_ops->ctr) { in crypt_ctr_cipher()
3139 ret = cc->iv_gen_ops->ctr(cc, ti, ivopts); in crypt_ctr_cipher()
3147 if (cc->iv_gen_ops && cc->iv_gen_ops->init) { in crypt_ctr_cipher()
3148 ret = cc->iv_gen_ops->init(cc); in crypt_ctr_cipher()
3156 if (cc->key_string) in crypt_ctr_cipher()
3157 memset(cc->key, 0, cc->key_size * sizeof(u8)); in crypt_ctr_cipher()
3164 struct crypt_config *cc = ti->private; in crypt_ctr_optional() local
3193 set_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_ctr_optional()
3195 set_bit(DM_CRYPT_HIGH_PRIORITY, &cc->flags); in crypt_ctr_optional()
3198 set_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_ctr_optional()
3200 set_bit(DM_CRYPT_NO_READ_WORKQUEUE, &cc->flags); in crypt_ctr_optional()
3202 set_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags); in crypt_ctr_optional()
3208 cc->used_tag_size = val; in crypt_ctr_optional()
3211 set_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags); in crypt_ctr_optional()
3217 cc->cipher_auth = kstrdup(sval, GFP_KERNEL); in crypt_ctr_optional()
3218 if (!cc->cipher_auth) in crypt_ctr_optional()
3225 cc->key_mac_size = val; in crypt_ctr_optional()
3226 set_bit(CRYPT_KEY_MAC_SIZE_SET, &cc->cipher_flags); in crypt_ctr_optional()
3227 } else if (sscanf(opt_string, "sector_size:%hu%c", &cc->sector_size, &dummy) == 1) { in crypt_ctr_optional()
3228 if (cc->sector_size < (1 << SECTOR_SHIFT) || in crypt_ctr_optional()
3229 cc->sector_size > 4096 || in crypt_ctr_optional()
3230 (cc->sector_size & (cc->sector_size - 1))) { in crypt_ctr_optional()
3234 if (ti->len & ((cc->sector_size >> SECTOR_SHIFT) - 1)) { in crypt_ctr_optional()
3238 cc->sector_shift = __ffs(cc->sector_size) - SECTOR_SHIFT; in crypt_ctr_optional()
3240 set_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags); in crypt_ctr_optional()
3254 struct crypt_config *cc = ti->private; in crypt_report_zones() local
3256 return dm_report_zones(cc->dev->bdev, cc->start, in crypt_report_zones()
3257 cc->start + dm_target_offset(ti, args->next_sector), in crypt_report_zones()
3270 struct crypt_config *cc; in crypt_ctr() local
3291 cc = kzalloc(struct_size(cc, key, key_size), GFP_KERNEL); in crypt_ctr()
3292 if (!cc) { in crypt_ctr()
3296 cc->key_size = key_size; in crypt_ctr()
3297 cc->sector_size = (1 << SECTOR_SHIFT); in crypt_ctr()
3298 cc->sector_shift = 0; in crypt_ctr()
3300 ti->private = cc; in crypt_ctr()
3307 ret = percpu_counter_init(&cc->n_allocated_pages, 0, GFP_KERNEL); in crypt_ctr()
3322 if (crypt_integrity_aead(cc)) { in crypt_ctr()
3323 cc->dmreq_start = sizeof(struct aead_request); in crypt_ctr()
3324 cc->dmreq_start += crypto_aead_reqsize(any_tfm_aead(cc)); in crypt_ctr()
3325 align_mask = crypto_aead_alignmask(any_tfm_aead(cc)); in crypt_ctr()
3327 cc->dmreq_start = sizeof(struct skcipher_request); in crypt_ctr()
3328 cc->dmreq_start += crypto_skcipher_reqsize(any_tfm(cc)); in crypt_ctr()
3329 align_mask = crypto_skcipher_alignmask(any_tfm(cc)); in crypt_ctr()
3331 cc->dmreq_start = ALIGN(cc->dmreq_start, __alignof__(struct dm_crypt_request)); in crypt_ctr()
3335 iv_size_padding = -(cc->dmreq_start + sizeof(struct dm_crypt_request)) in crypt_ctr()
3348 iv_size_padding + cc->iv_size + in crypt_ctr()
3349 cc->iv_size + in crypt_ctr()
3353 ret = mempool_init_kmalloc_pool(&cc->req_pool, MIN_IOS, cc->dmreq_start + additional_req_size); in crypt_ctr()
3359 cc->per_bio_data_size = ti->per_io_data_size = in crypt_ctr()
3360 ALIGN(sizeof(struct dm_crypt_io) + cc->dmreq_start + additional_req_size, in crypt_ctr()
3363 ret = mempool_init(&cc->page_pool, BIO_MAX_VECS, crypt_page_alloc, crypt_page_free, cc); in crypt_ctr()
3369 ret = bioset_init(&cc->bs, MIN_IOS, 0, BIOSET_NEED_BVECS); in crypt_ctr()
3375 mutex_init(&cc->bio_alloc_lock); in crypt_ctr()
3379 (tmpll & ((cc->sector_size >> SECTOR_SHIFT) - 1))) { in crypt_ctr()
3383 cc->iv_offset = tmpll; in crypt_ctr()
3385 ret = dm_get_device(ti, argv[3], dm_table_get_mode(ti->table), &cc->dev); in crypt_ctr()
3396 cc->start = tmpll; in crypt_ctr()
3398 if (bdev_is_zoned(cc->dev->bdev)) { in crypt_ctr()
3404 set_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags); in crypt_ctr()
3405 set_bit(DM_CRYPT_WRITE_INLINE, &cc->flags); in crypt_ctr()
3422 if (crypt_integrity_aead(cc) || cc->integrity_iv_size) { in crypt_ctr()
3423 ret = crypt_integrity_ctr(cc, ti); in crypt_ctr()
3427 cc->tag_pool_max_sectors = POOL_ENTRY_SIZE / cc->tuple_size; in crypt_ctr()
3428 if (!cc->tag_pool_max_sectors) in crypt_ctr()
3429 cc->tag_pool_max_sectors = 1; in crypt_ctr()
3431 ret = mempool_init_kmalloc_pool(&cc->tag_pool, MIN_IOS, in crypt_ctr()
3432 cc->tag_pool_max_sectors * cc->tuple_size); in crypt_ctr()
3438 cc->tag_pool_max_sectors <<= cc->sector_shift; in crypt_ctr()
3447 cc->workqueue_id = wq_id; in crypt_ctr()
3451 if (test_bit(DM_CRYPT_HIGH_PRIORITY, &cc->flags)) in crypt_ctr()
3454 cc->io_queue = alloc_workqueue("kcryptd_io-%s-%d", common_wq_flags, 1, devname, wq_id); in crypt_ctr()
3455 if (!cc->io_queue) { in crypt_ctr()
3460 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) { in crypt_ctr()
3461 cc->crypt_queue = alloc_workqueue("kcryptd-%s-%d", in crypt_ctr()
3469 cc->crypt_queue = alloc_workqueue("kcryptd-%s-%d", in crypt_ctr()
3473 if (!cc->crypt_queue) { in crypt_ctr()
3478 spin_lock_init(&cc->write_thread_lock); in crypt_ctr()
3479 cc->write_tree = RB_ROOT; in crypt_ctr()
3481 cc->write_thread = kthread_run(dmcrypt_write, cc, "dmcrypt_write/%s", devname); in crypt_ctr()
3482 if (IS_ERR(cc->write_thread)) { in crypt_ctr()
3483 ret = PTR_ERR(cc->write_thread); in crypt_ctr()
3484 cc->write_thread = NULL; in crypt_ctr()
3488 if (test_bit(DM_CRYPT_HIGH_PRIORITY, &cc->flags)) in crypt_ctr()
3489 set_user_nice(cc->write_thread, MIN_NICE); in crypt_ctr()
3507 struct crypt_config *cc = ti->private; in crypt_map() local
3517 bio_set_dev(bio, cc->dev->bdev); in crypt_map()
3519 bio->bi_iter.bi_sector = cc->start + in crypt_map()
3527 max_sectors = get_max_request_size(cc, bio_data_dir(bio) == WRITE); in crypt_map()
3535 if (unlikely((bio->bi_iter.bi_sector & ((cc->sector_size >> SECTOR_SHIFT) - 1)) != 0)) in crypt_map()
3538 if (unlikely(bio->bi_iter.bi_size & (cc->sector_size - 1))) in crypt_map()
3541 io = dm_per_bio_data(bio, cc->per_bio_data_size); in crypt_map()
3542 crypt_io_init(io, cc, bio, dm_target_offset(ti, bio->bi_iter.bi_sector)); in crypt_map()
3544 if (cc->tuple_size) { in crypt_map()
3545 unsigned int tag_len = cc->tuple_size * (bio_sectors(bio) >> cc->sector_shift); in crypt_map()
3553 if (bio_sectors(bio) > cc->tag_pool_max_sectors) in crypt_map()
3554 dm_accept_partial_bio(bio, cc->tag_pool_max_sectors); in crypt_map()
3555 io->integrity_metadata = mempool_alloc(&cc->tag_pool, GFP_NOIO); in crypt_map()
3560 if (crypt_integrity_aead(cc)) in crypt_map()
3582 struct crypt_config *cc = ti->private; in crypt_status() local
3592 DMEMIT("%s ", cc->cipher_string); in crypt_status()
3594 if (cc->key_size > 0) { in crypt_status()
3595 if (cc->key_string) in crypt_status()
3596 DMEMIT(":%u:%s", cc->key_size, cc->key_string); in crypt_status()
3598 for (i = 0; i < cc->key_size; i++) { in crypt_status()
3599 DMEMIT("%c%c", hex2asc(cc->key[i] >> 4), in crypt_status()
3600 hex2asc(cc->key[i] & 0xf)); in crypt_status()
3606 DMEMIT(" %llu %s %llu", (unsigned long long)cc->iv_offset, in crypt_status()
3607 cc->dev->name, (unsigned long long)cc->start); in crypt_status()
3610 num_feature_args += test_bit(DM_CRYPT_SAME_CPU, &cc->flags); in crypt_status()
3611 num_feature_args += test_bit(DM_CRYPT_HIGH_PRIORITY, &cc->flags); in crypt_status()
3612 num_feature_args += test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); in crypt_status()
3613 num_feature_args += test_bit(DM_CRYPT_NO_READ_WORKQUEUE, &cc->flags); in crypt_status()
3614 num_feature_args += test_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags); in crypt_status()
3615 num_feature_args += !!cc->used_tag_size; in crypt_status()
3616 num_feature_args += cc->sector_size != (1 << SECTOR_SHIFT); in crypt_status()
3617 num_feature_args += test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags); in crypt_status()
3618 num_feature_args += test_bit(CRYPT_KEY_MAC_SIZE_SET, &cc->cipher_flags); in crypt_status()
3623 if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) in crypt_status()
3625 if (test_bit(DM_CRYPT_HIGH_PRIORITY, &cc->flags)) in crypt_status()
3627 if (test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) in crypt_status()
3629 if (test_bit(DM_CRYPT_NO_READ_WORKQUEUE, &cc->flags)) in crypt_status()
3631 if (test_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags)) in crypt_status()
3633 if (cc->used_tag_size) in crypt_status()
3634 DMEMIT(" integrity:%u:%s", cc->used_tag_size, cc->cipher_auth); in crypt_status()
3635 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
3636 DMEMIT(" sector_size:%d", cc->sector_size); in crypt_status()
3637 if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) in crypt_status()
3639 if (test_bit(CRYPT_KEY_MAC_SIZE_SET, &cc->cipher_flags)) in crypt_status()
3640 DMEMIT(" integrity_key_size:%u", cc->key_mac_size); in crypt_status()
3647 DMEMIT(",same_cpu_crypt=%c", test_bit(DM_CRYPT_SAME_CPU, &cc->flags) ? 'y' : 'n'); in crypt_status()
3648 DMEMIT(",high_priority=%c", test_bit(DM_CRYPT_HIGH_PRIORITY, &cc->flags) ? 'y' : 'n'); in crypt_status()
3649 DMEMIT(",submit_from_crypt_cpus=%c", test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags) ? in crypt_status()
3651 DMEMIT(",no_read_workqueue=%c", test_bit(DM_CRYPT_NO_READ_WORKQUEUE, &cc->flags) ? in crypt_status()
3653 DMEMIT(",no_write_workqueue=%c", test_bit(DM_CRYPT_NO_WRITE_WORKQUEUE, &cc->flags) ? in crypt_status()
3655 DMEMIT(",iv_large_sectors=%c", test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags) ? in crypt_status()
3658 if (cc->used_tag_size) in crypt_status()
3660 cc->used_tag_size, cc->cipher_auth); in crypt_status()
3661 if (cc->sector_size != (1 << SECTOR_SHIFT)) in crypt_status()
3662 DMEMIT(",sector_size=%d", cc->sector_size); in crypt_status()
3663 if (cc->cipher_string) in crypt_status()
3664 DMEMIT(",cipher_string=%s", cc->cipher_string); in crypt_status()
3666 DMEMIT(",key_size=%u", cc->key_size); in crypt_status()
3667 DMEMIT(",key_parts=%u", cc->key_parts); in crypt_status()
3668 DMEMIT(",key_extra_size=%u", cc->key_extra_size); in crypt_status()
3669 DMEMIT(",key_mac_size=%u", cc->key_mac_size); in crypt_status()
3677 struct crypt_config *cc = ti->private; in crypt_postsuspend() local
3679 set_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_postsuspend()
3684 struct crypt_config *cc = ti->private; in crypt_preresume() local
3686 if (!test_bit(DM_CRYPT_KEY_VALID, &cc->flags)) { in crypt_preresume()
3696 struct crypt_config *cc = ti->private; in crypt_resume() local
3698 clear_bit(DM_CRYPT_SUSPENDED, &cc->flags); in crypt_resume()
3708 struct crypt_config *cc = ti->private; in crypt_message() local
3715 if (!test_bit(DM_CRYPT_SUSPENDED, &cc->flags)) { in crypt_message()
3722 if (key_size < 0 || cc->key_size != key_size) { in crypt_message()
3727 ret = crypt_set_key(cc, argv[2]); in crypt_message()
3730 if (cc->iv_gen_ops && cc->iv_gen_ops->init) in crypt_message()
3731 ret = cc->iv_gen_ops->init(cc); in crypt_message()
3733 if (cc->key_string) in crypt_message()
3734 memset(cc->key, 0, cc->key_size * sizeof(u8)); in crypt_message()
3738 return crypt_wipe_key(cc); in crypt_message()
3749 struct crypt_config *cc = ti->private; in crypt_iterate_devices() local
3751 return fn(ti, cc->dev, cc->start, ti->len, data); in crypt_iterate_devices()
3756 struct crypt_config *cc = ti->private; in crypt_io_hints() local
3759 max_t(unsigned int, limits->logical_block_size, cc->sector_size); in crypt_io_hints()
3761 max_t(unsigned int, limits->physical_block_size, cc->sector_size); in crypt_io_hints()
3762 limits->io_min = max_t(unsigned int, limits->io_min, cc->sector_size); in crypt_io_hints()