Lines Matching refs:crypt

351 static void finish_scattered_hmac(struct crypt_ctl *crypt)  in finish_scattered_hmac()  argument
353 struct aead_request *req = crypt->data.aead_req; in finish_scattered_hmac()
363 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes); in finish_scattered_hmac()
369 struct crypt_ctl *crypt; in one_packet() local
375 crypt = crypt_phys2virt(phys); in one_packet()
377 switch (crypt->ctl_flags & CTL_FLAG_MASK) { in one_packet()
379 struct aead_request *req = crypt->data.aead_req; in one_packet()
382 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet()
383 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet()
385 finish_scattered_hmac(crypt); in one_packet()
391 struct skcipher_request *req = crypt->data.ablk_req; in one_packet()
409 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet()
411 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet()
416 ctx = crypto_tfm_ctx(crypt->data.tfm); in one_packet()
417 dma_pool_free(ctx_pool, crypt->regist_ptr, in one_packet()
418 crypt->regist_buf->phys_addr); in one_packet()
419 dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf); in one_packet()
424 ctx = crypto_tfm_ctx(crypt->data.tfm); in one_packet()
432 crypt->ctl_flags = CTL_FLAG_UNUSED; in one_packet()
685 struct crypt_ctl *crypt; in register_chain_var() local
700 crypt = get_crypt_desc_emerg(); in register_chain_var()
701 if (!crypt) { in register_chain_var()
712 crypt->data.tfm = tfm; in register_chain_var()
713 crypt->regist_ptr = pad; in register_chain_var()
714 crypt->regist_buf = buf; in register_chain_var()
716 crypt->auth_offs = 0; in register_chain_var()
717 crypt->auth_len = HMAC_PAD_BLOCKLEN; in register_chain_var()
718 crypt->crypto_ctx = ctx_addr; in register_chain_var()
719 crypt->src_buf = buf_phys; in register_chain_var()
720 crypt->icv_rev_aes = target; in register_chain_var()
721 crypt->mode = NPE_OP_HASH_GEN_ICV; in register_chain_var()
722 crypt->init_len = init_len; in register_chain_var()
723 crypt->ctl_flags |= CTL_FLAG_GEN_ICV; in register_chain_var()
731 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in register_chain_var()
785 struct crypt_ctl *crypt; in gen_rev_aes_key() local
789 crypt = get_crypt_desc_emerg(); in gen_rev_aes_key()
790 if (!crypt) in gen_rev_aes_key()
795 crypt->data.tfm = tfm; in gen_rev_aes_key()
796 crypt->crypt_offs = 0; in gen_rev_aes_key()
797 crypt->crypt_len = AES_BLOCK128; in gen_rev_aes_key()
798 crypt->src_buf = 0; in gen_rev_aes_key()
799 crypt->crypto_ctx = dir->npe_ctx_phys; in gen_rev_aes_key()
800 crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32); in gen_rev_aes_key()
801 crypt->mode = NPE_OP_ENC_GEN_KEY; in gen_rev_aes_key()
802 crypt->init_len = dir->npe_ctx_idx; in gen_rev_aes_key()
803 crypt->ctl_flags |= CTL_FLAG_GEN_REVAES; in gen_rev_aes_key()
806 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in gen_rev_aes_key()
979 struct crypt_ctl *crypt; in ablk_perform() local
1000 crypt = get_crypt_desc(); in ablk_perform()
1001 if (!crypt) in ablk_perform()
1004 crypt->data.ablk_req = req; in ablk_perform()
1005 crypt->crypto_ctx = dir->npe_ctx_phys; in ablk_perform()
1006 crypt->mode = dir->npe_mode; in ablk_perform()
1007 crypt->init_len = dir->npe_ctx_idx; in ablk_perform()
1009 crypt->crypt_offs = 0; in ablk_perform()
1010 crypt->crypt_len = nbytes; in ablk_perform()
1013 memcpy(crypt->iv, req->iv, ivsize); in ablk_perform()
1021 crypt->mode |= NPE_OP_NOT_IN_PLACE; in ablk_perform()
1030 crypt->dst_buf = dst_hook.phys_next; in ablk_perform()
1040 crypt->src_buf = src_hook.phys_next; in ablk_perform()
1041 crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK; in ablk_perform()
1042 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in ablk_perform()
1047 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in ablk_perform()
1050 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in ablk_perform()
1052 crypt->ctl_flags = CTL_FLAG_UNUSED; in ablk_perform()
1096 struct crypt_ctl *crypt; in aead_perform() local
1120 crypt = get_crypt_desc(); in aead_perform()
1121 if (!crypt) in aead_perform()
1124 crypt->data.aead_req = req; in aead_perform()
1125 crypt->crypto_ctx = dir->npe_ctx_phys; in aead_perform()
1126 crypt->mode = dir->npe_mode; in aead_perform()
1127 crypt->init_len = dir->npe_ctx_idx; in aead_perform()
1129 crypt->crypt_offs = cryptoffset; in aead_perform()
1130 crypt->crypt_len = eff_cryptlen; in aead_perform()
1132 crypt->auth_offs = 0; in aead_perform()
1133 crypt->auth_len = req->assoclen + cryptlen; in aead_perform()
1135 memcpy(crypt->iv, req->iv, ivsize); in aead_perform()
1137 buf = chainup_buffers(dev, req->src, crypt->auth_len, in aead_perform()
1140 crypt->src_buf = src_hook.phys_next; in aead_perform()
1146 crypt->icv_rev_aes = buf->phys_addr + in aead_perform()
1154 crypt->mode |= NPE_OP_NOT_IN_PLACE; in aead_perform()
1157 buf = chainup_buffers(dev, req->dst, crypt->auth_len, in aead_perform()
1160 crypt->dst_buf = dst_hook.phys_next; in aead_perform()
1168 crypt->icv_rev_aes = buf->phys_addr + in aead_perform()
1180 crypt->icv_rev_aes = dma; in aead_perform()
1190 crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD; in aead_perform()
1191 qmgr_put_entry(send_qid, crypt_virt2phys(crypt)); in aead_perform()
1196 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in aead_perform()
1198 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in aead_perform()
1199 crypt->ctl_flags = CTL_FLAG_UNUSED; in aead_perform()