Lines Matching refs:dd

92 	struct atmel_aes_dev	*dd;  member
335 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset) in atmel_aes_read() argument
337 u32 value = readl_relaxed(dd->io_base + offset); in atmel_aes_read()
340 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_read()
343 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_aes_read()
351 static inline void atmel_aes_write(struct atmel_aes_dev *dd, in atmel_aes_write() argument
355 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_write()
358 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_aes_write()
363 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
366 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_n() argument
370 *value = atmel_aes_read(dd, offset); in atmel_aes_read_n()
373 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_n() argument
377 atmel_aes_write(dd, offset, *value); in atmel_aes_write_n()
380 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_block() argument
383 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
386 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_block() argument
389 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
392 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd, in atmel_aes_wait_for_data_ready() argument
395 u32 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_wait_for_data_ready()
398 return resume(dd); in atmel_aes_wait_for_data_ready()
400 dd->resume = resume; in atmel_aes_wait_for_data_ready()
401 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_wait_for_data_ready()
423 static int atmel_aes_hw_init(struct atmel_aes_dev *dd) in atmel_aes_hw_init() argument
427 err = clk_enable(dd->iclk); in atmel_aes_hw_init()
431 atmel_aes_write(dd, AES_CR, AES_CR_SWRST); in atmel_aes_hw_init()
432 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET); in atmel_aes_hw_init()
437 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd) in atmel_aes_get_version() argument
439 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff; in atmel_aes_get_version()
442 static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd) in atmel_aes_hw_version_init() argument
446 err = atmel_aes_hw_init(dd); in atmel_aes_hw_version_init()
450 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
452 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
454 clk_disable(dd->iclk); in atmel_aes_hw_version_init()
458 static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd, in atmel_aes_set_mode() argument
462 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode; in atmel_aes_set_mode()
465 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd) in atmel_aes_is_encrypt() argument
467 return (dd->flags & AES_FLAGS_ENCRYPT); in atmel_aes_is_encrypt()
471 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
474 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd) in atmel_aes_set_iv_as_last_ciphertext_block() argument
476 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_set_iv_as_last_ciphertext_block()
497 static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd) in atmel_aes_ctr_update_req_iv() argument
499 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_update_req_iv()
500 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_update_req_iv()
517 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_complete() argument
519 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_complete()
523 if (dd->ctx->is_aead) in atmel_aes_complete()
524 atmel_aes_authenc_complete(dd, err); in atmel_aes_complete()
527 clk_disable(dd->iclk); in atmel_aes_complete()
528 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_complete()
530 if (!err && !dd->ctx->is_aead && in atmel_aes_complete()
533 atmel_aes_set_iv_as_last_ciphertext_block(dd); in atmel_aes_complete()
535 atmel_aes_ctr_update_req_iv(dd); in atmel_aes_complete()
538 if (dd->is_async) in atmel_aes_complete()
539 crypto_request_complete(dd->areq, err); in atmel_aes_complete()
541 tasklet_schedule(&dd->queue_task); in atmel_aes_complete()
546 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl_key() argument
559 valmr |= dd->flags & AES_FLAGS_MODE_MASK; in atmel_aes_write_ctrl_key()
563 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl_key()
569 atmel_aes_write(dd, AES_MR, valmr); in atmel_aes_write_ctrl_key()
571 atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen)); in atmel_aes_write_ctrl_key()
574 atmel_aes_write_block(dd, AES_IVR(0), iv); in atmel_aes_write_ctrl_key()
577 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl() argument
581 atmel_aes_write_ctrl_key(dd, use_dma, iv, in atmel_aes_write_ctrl()
582 dd->ctx->key, dd->ctx->keylen); in atmel_aes_write_ctrl()
587 static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd) in atmel_aes_cpu_transfer() argument
593 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data); in atmel_aes_cpu_transfer()
594 dd->data += 4; in atmel_aes_cpu_transfer()
595 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
597 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
600 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_transfer()
602 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_cpu_transfer()
604 dd->resume = atmel_aes_cpu_transfer; in atmel_aes_cpu_transfer()
605 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_cpu_transfer()
610 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_cpu_transfer()
611 dd->buf, dd->total)) in atmel_aes_cpu_transfer()
615 return atmel_aes_complete(dd, err); in atmel_aes_cpu_transfer()
617 return dd->cpu_transfer_complete(dd); in atmel_aes_cpu_transfer()
620 static int atmel_aes_cpu_start(struct atmel_aes_dev *dd, in atmel_aes_cpu_start() argument
631 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_cpu_start()
633 dd->total = len; in atmel_aes_cpu_start()
634 dd->real_dst = dst; in atmel_aes_cpu_start()
635 dd->cpu_transfer_complete = resume; in atmel_aes_cpu_start()
636 dd->datalen = len + padlen; in atmel_aes_cpu_start()
637 dd->data = (u32 *)dd->buf; in atmel_aes_cpu_start()
638 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_start()
639 return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer); in atmel_aes_cpu_start()
647 static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd, in atmel_aes_check_aligned() argument
654 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
662 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
671 if (!IS_ALIGNED(sg->length, dd->ctx->block_size)) in atmel_aes_check_aligned()
697 static int atmel_aes_map(struct atmel_aes_dev *dd, in atmel_aes_map() argument
705 dd->total = len; in atmel_aes_map()
706 dd->src.sg = src; in atmel_aes_map()
707 dd->dst.sg = dst; in atmel_aes_map()
708 dd->real_dst = dst; in atmel_aes_map()
710 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src); in atmel_aes_map()
714 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst); in atmel_aes_map()
716 padlen = atmel_aes_padlen(len, dd->ctx->block_size); in atmel_aes_map()
718 if (dd->buflen < len + padlen) in atmel_aes_map()
722 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_map()
723 dd->src.sg = &dd->aligned_sg; in atmel_aes_map()
724 dd->src.nents = 1; in atmel_aes_map()
725 dd->src.remainder = 0; in atmel_aes_map()
729 dd->dst.sg = &dd->aligned_sg; in atmel_aes_map()
730 dd->dst.nents = 1; in atmel_aes_map()
731 dd->dst.remainder = 0; in atmel_aes_map()
734 sg_init_table(&dd->aligned_sg, 1); in atmel_aes_map()
735 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen); in atmel_aes_map()
738 if (dd->src.sg == dd->dst.sg) { in atmel_aes_map()
739 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
741 dd->dst.sg_len = dd->src.sg_len; in atmel_aes_map()
742 if (!dd->src.sg_len) in atmel_aes_map()
745 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
747 if (!dd->src.sg_len) in atmel_aes_map()
750 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_map()
752 if (!dd->dst.sg_len) { in atmel_aes_map()
753 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
762 static void atmel_aes_unmap(struct atmel_aes_dev *dd) in atmel_aes_unmap() argument
764 if (dd->src.sg == dd->dst.sg) { in atmel_aes_unmap()
765 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
768 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
769 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
771 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_unmap()
774 if (dd->dst.sg != &dd->aligned_sg) in atmel_aes_unmap()
775 atmel_aes_restore_sg(&dd->dst); in atmel_aes_unmap()
777 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
780 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
781 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
784 if (dd->dst.sg == &dd->aligned_sg) in atmel_aes_unmap()
785 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_unmap()
786 dd->buf, dd->total); in atmel_aes_unmap()
789 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd, in atmel_aes_dma_transfer_start() argument
808 dma = &dd->src; in atmel_aes_dma_transfer_start()
810 config.dst_addr = dd->phys_base + AES_IDATAR(0); in atmel_aes_dma_transfer_start()
814 dma = &dd->dst; in atmel_aes_dma_transfer_start()
816 config.src_addr = dd->phys_base + AES_ODATAR(0); in atmel_aes_dma_transfer_start()
833 desc->callback_param = dd; in atmel_aes_dma_transfer_start()
840 static int atmel_aes_dma_start(struct atmel_aes_dev *dd, in atmel_aes_dma_start() argument
850 switch (dd->ctx->block_size) { in atmel_aes_dma_start()
853 maxburst = dd->caps.max_burst_size; in atmel_aes_dma_start()
861 err = atmel_aes_map(dd, src, dst, len); in atmel_aes_dma_start()
865 dd->resume = resume; in atmel_aes_dma_start()
868 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM, in atmel_aes_dma_start()
874 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV, in atmel_aes_dma_start()
882 dmaengine_terminate_sync(dd->dst.chan); in atmel_aes_dma_start()
884 atmel_aes_unmap(dd); in atmel_aes_dma_start()
886 return atmel_aes_complete(dd, err); in atmel_aes_dma_start()
891 struct atmel_aes_dev *dd = data; in atmel_aes_dma_callback() local
893 atmel_aes_unmap(dd); in atmel_aes_dma_callback()
894 dd->is_async = true; in atmel_aes_dma_callback()
895 (void)dd->resume(dd); in atmel_aes_dma_callback()
898 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, in atmel_aes_handle_queue() argument
907 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
909 ret = crypto_enqueue_request(&dd->queue, new_areq); in atmel_aes_handle_queue()
910 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
911 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
914 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
915 areq = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
917 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
918 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
928 dd->areq = areq; in atmel_aes_handle_queue()
929 dd->ctx = ctx; in atmel_aes_handle_queue()
931 dd->is_async = start_async; in atmel_aes_handle_queue()
934 err = ctx->start(dd); in atmel_aes_handle_queue()
941 static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd) in atmel_aes_transfer_complete() argument
943 return atmel_aes_complete(dd, 0); in atmel_aes_transfer_complete()
946 static int atmel_aes_start(struct atmel_aes_dev *dd) in atmel_aes_start() argument
948 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_start()
951 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
954 atmel_aes_set_mode(dd, rctx); in atmel_aes_start()
956 err = atmel_aes_hw_init(dd); in atmel_aes_start()
958 return atmel_aes_complete(dd, err); in atmel_aes_start()
960 atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv); in atmel_aes_start()
962 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_start()
966 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_start()
970 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd) in atmel_aes_ctr_transfer() argument
972 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_transfer()
973 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_transfer()
981 ctx->offset += dd->total; in atmel_aes_ctr_transfer()
983 return atmel_aes_transfer_complete(dd); in atmel_aes_ctr_transfer()
1008 atmel_aes_write_ctrl(dd, use_dma, ctx->iv); in atmel_aes_ctr_transfer()
1019 return atmel_aes_dma_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1022 return atmel_aes_cpu_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1026 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd) in atmel_aes_ctr_start() argument
1028 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_start()
1029 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_start()
1033 atmel_aes_set_mode(dd, rctx); in atmel_aes_ctr_start()
1035 err = atmel_aes_hw_init(dd); in atmel_aes_ctr_start()
1037 return atmel_aes_complete(dd, err); in atmel_aes_ctr_start()
1041 dd->total = 0; in atmel_aes_ctr_start()
1042 return atmel_aes_ctr_transfer(dd); in atmel_aes_ctr_start()
1104 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_crypt()
1156 struct atmel_aes_dev *dd; in atmel_aes_init_tfm() local
1158 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_init_tfm()
1159 if (!dd) in atmel_aes_init_tfm()
1163 ctx->base.dd = dd; in atmel_aes_init_tfm()
1172 struct atmel_aes_dev *dd; in atmel_aes_ctr_init_tfm() local
1174 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_ctr_init_tfm()
1175 if (!dd) in atmel_aes_ctr_init_tfm()
1179 ctx->base.dd = dd; in atmel_aes_ctr_init_tfm()
1232 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1236 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
1237 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
1239 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
1240 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
1241 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
1242 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
1243 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
1244 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
1245 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
1253 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd, in atmel_aes_gcm_ghash() argument
1258 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash()
1260 dd->data = (u32 *)data; in atmel_aes_gcm_ghash()
1261 dd->datalen = datalen; in atmel_aes_gcm_ghash()
1266 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_ghash()
1267 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init); in atmel_aes_gcm_ghash()
1270 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_init() argument
1272 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_init()
1275 atmel_aes_write(dd, AES_AADLENR, dd->total); in atmel_aes_gcm_ghash_init()
1276 atmel_aes_write(dd, AES_CLENR, 0); in atmel_aes_gcm_ghash_init()
1280 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in); in atmel_aes_gcm_ghash_init()
1282 return atmel_aes_gcm_ghash_finalize(dd); in atmel_aes_gcm_ghash_init()
1285 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_finalize() argument
1287 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_finalize()
1291 while (dd->datalen > 0) { in atmel_aes_gcm_ghash_finalize()
1292 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_ghash_finalize()
1293 dd->data += 4; in atmel_aes_gcm_ghash_finalize()
1294 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1296 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_ghash_finalize()
1298 dd->resume = atmel_aes_gcm_ghash_finalize; in atmel_aes_gcm_ghash_finalize()
1299 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_ghash_finalize()
1305 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out); in atmel_aes_gcm_ghash_finalize()
1307 return ctx->ghash_resume(dd); in atmel_aes_gcm_ghash_finalize()
1311 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd) in atmel_aes_gcm_start() argument
1313 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_start()
1314 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_start()
1320 u8 *data = dd->buf; in atmel_aes_gcm_start()
1323 atmel_aes_set_mode(dd, rctx); in atmel_aes_gcm_start()
1325 err = atmel_aes_hw_init(dd); in atmel_aes_gcm_start()
1327 return atmel_aes_complete(dd, err); in atmel_aes_gcm_start()
1332 return atmel_aes_gcm_process(dd); in atmel_aes_gcm_start()
1337 if (datalen > dd->buflen) in atmel_aes_gcm_start()
1338 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_start()
1344 return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen, in atmel_aes_gcm_start()
1348 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd) in atmel_aes_gcm_process() argument
1350 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_process()
1351 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_process()
1353 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_process()
1365 dd->flags |= AES_FLAGS_GTAGEN; in atmel_aes_gcm_process()
1367 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_process()
1368 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length); in atmel_aes_gcm_process()
1371 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd) in atmel_aes_gcm_length() argument
1373 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_length()
1374 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_length()
1381 atmel_aes_write_block(dd, AES_IVR(0), j0); in atmel_aes_gcm_length()
1385 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length()
1386 atmel_aes_write(dd, AES_CLENR, ctx->textlen); in atmel_aes_gcm_length()
1390 dd->datalen = 0; in atmel_aes_gcm_length()
1391 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1396 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length()
1397 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_length()
1398 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); in atmel_aes_gcm_length()
1401 dd->data = (u32 *)dd->buf; in atmel_aes_gcm_length()
1402 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length()
1403 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1406 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd) in atmel_aes_gcm_data() argument
1408 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_data()
1409 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_data()
1415 while (dd->datalen > 0) { in atmel_aes_gcm_data()
1416 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_data()
1417 dd->data += 4; in atmel_aes_gcm_data()
1418 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1420 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_data()
1422 dd->resume = atmel_aes_gcm_data; in atmel_aes_gcm_data()
1423 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_data()
1430 return atmel_aes_gcm_tag_init(dd); in atmel_aes_gcm_data()
1439 mr = atmel_aes_read(dd, AES_MR); in atmel_aes_gcm_data()
1442 if (dd->caps.has_dualbuff) in atmel_aes_gcm_data()
1444 atmel_aes_write(dd, AES_MR, mr); in atmel_aes_gcm_data()
1446 return atmel_aes_dma_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1450 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1454 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag_init() argument
1456 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag_init()
1457 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_tag_init()
1458 __be64 *data = dd->buf; in atmel_aes_gcm_tag_init()
1460 if (likely(dd->flags & AES_FLAGS_GTAGEN)) { in atmel_aes_gcm_tag_init()
1461 if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) { in atmel_aes_gcm_tag_init()
1462 dd->resume = atmel_aes_gcm_tag_init; in atmel_aes_gcm_tag_init()
1463 atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY); in atmel_aes_gcm_tag_init()
1467 return atmel_aes_gcm_finalize(dd); in atmel_aes_gcm_tag_init()
1471 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash); in atmel_aes_gcm_tag_init()
1476 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE, in atmel_aes_gcm_tag_init()
1480 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag() argument
1482 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag()
1489 flags = dd->flags; in atmel_aes_gcm_tag()
1490 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN); in atmel_aes_gcm_tag()
1491 dd->flags |= AES_FLAGS_CTR; in atmel_aes_gcm_tag()
1492 atmel_aes_write_ctrl(dd, false, ctx->j0); in atmel_aes_gcm_tag()
1493 dd->flags = flags; in atmel_aes_gcm_tag()
1495 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash); in atmel_aes_gcm_tag()
1496 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize); in atmel_aes_gcm_tag()
1499 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_finalize() argument
1501 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_finalize()
1502 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_finalize()
1504 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_finalize()
1509 if (likely(dd->flags & AES_FLAGS_GTAGEN)) in atmel_aes_gcm_finalize()
1510 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag); in atmel_aes_gcm_finalize()
1512 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag); in atmel_aes_gcm_finalize()
1524 return atmel_aes_complete(dd, err); in atmel_aes_gcm_finalize()
1540 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_gcm_crypt()
1578 struct atmel_aes_dev *dd; in atmel_aes_gcm_init() local
1580 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_gcm_init()
1581 if (!dd) in atmel_aes_gcm_init()
1585 ctx->base.dd = dd; in atmel_aes_gcm_init()
1617 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
1619 static int atmel_aes_xts_start(struct atmel_aes_dev *dd) in atmel_aes_xts_start() argument
1621 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); in atmel_aes_xts_start()
1622 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_start()
1627 atmel_aes_set_mode(dd, rctx); in atmel_aes_xts_start()
1629 err = atmel_aes_hw_init(dd); in atmel_aes_xts_start()
1631 return atmel_aes_complete(dd, err); in atmel_aes_xts_start()
1634 flags = dd->flags; in atmel_aes_xts_start()
1635 dd->flags &= ~AES_FLAGS_MODE_MASK; in atmel_aes_xts_start()
1636 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); in atmel_aes_xts_start()
1637 atmel_aes_write_ctrl_key(dd, false, NULL, in atmel_aes_xts_start()
1639 dd->flags = flags; in atmel_aes_xts_start()
1641 atmel_aes_write_block(dd, AES_IDATAR(0), req->iv); in atmel_aes_xts_start()
1642 return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data); in atmel_aes_xts_start()
1645 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd) in atmel_aes_xts_process_data() argument
1647 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_process_data()
1655 atmel_aes_read_block(dd, AES_ODATAR(0), tweak); in atmel_aes_xts_process_data()
1665 atmel_aes_write_ctrl(dd, use_dma, NULL); in atmel_aes_xts_process_data()
1666 atmel_aes_write_block(dd, AES_TWR(0), tweak); in atmel_aes_xts_process_data()
1667 atmel_aes_write_block(dd, AES_ALPHAR(0), one); in atmel_aes_xts_process_data()
1669 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_xts_process_data()
1673 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_xts_process_data()
1714 struct atmel_aes_dev *dd; in atmel_aes_xts_init_tfm() local
1717 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_xts_init_tfm()
1718 if (!dd) in atmel_aes_xts_init_tfm()
1728 ctx->base.dd = dd; in atmel_aes_xts_init_tfm()
1761 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
1762 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
1764 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
1766 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
1767 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
1770 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_authenc_complete() argument
1772 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_complete()
1775 if (err && (dd->flags & AES_FLAGS_OWN_SHA)) in atmel_aes_authenc_complete()
1777 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_complete()
1780 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd) in atmel_aes_authenc_start() argument
1782 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_start()
1788 atmel_aes_set_mode(dd, &rctx->base); in atmel_aes_authenc_start()
1790 err = atmel_aes_hw_init(dd); in atmel_aes_authenc_start()
1792 return atmel_aes_complete(dd, err); in atmel_aes_authenc_start()
1795 atmel_aes_authenc_init, dd); in atmel_aes_authenc_start()
1798 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_init() argument
1801 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_init()
1805 dd->is_async = true; in atmel_aes_authenc_init()
1807 return atmel_aes_complete(dd, err); in atmel_aes_authenc_init()
1810 dd->flags |= AES_FLAGS_OWN_SHA; in atmel_aes_authenc_init()
1816 atmel_aes_authenc_transfer, dd); in atmel_aes_authenc_init()
1819 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_transfer() argument
1822 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_transfer()
1824 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_transfer()
1830 dd->is_async = true; in atmel_aes_authenc_transfer()
1832 return atmel_aes_complete(dd, err); in atmel_aes_authenc_transfer()
1851 atmel_aes_write_ctrl(dd, true, iv); in atmel_aes_authenc_transfer()
1855 atmel_aes_write(dd, AES_EMR, emr); in atmel_aes_authenc_transfer()
1858 return atmel_aes_dma_start(dd, src, dst, rctx->textlen, in atmel_aes_authenc_transfer()
1862 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd) in atmel_aes_authenc_digest() argument
1864 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_digest()
1868 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_digest()
1871 atmel_aes_authenc_final, dd); in atmel_aes_authenc_digest()
1874 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_final() argument
1877 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_final()
1880 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_final()
1885 dd->is_async = true; in atmel_aes_authenc_final()
1900 return atmel_aes_complete(dd, err); in atmel_aes_authenc_final()
1942 struct atmel_aes_dev *dd; in atmel_aes_authenc_init_tfm() local
1944 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_authenc_init_tfm()
1945 if (!dd) in atmel_aes_authenc_init_tfm()
1954 ctx->base.dd = dd; in atmel_aes_authenc_init_tfm()
2018 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_authenc_crypt()
2117 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) in atmel_aes_buff_init() argument
2119 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER); in atmel_aes_buff_init()
2120 dd->buflen = ATMEL_AES_BUFFER_SIZE; in atmel_aes_buff_init()
2121 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
2123 if (!dd->buf) { in atmel_aes_buff_init()
2124 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
2131 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd) in atmel_aes_buff_cleanup() argument
2133 free_page((unsigned long)dd->buf); in atmel_aes_buff_cleanup()
2136 static int atmel_aes_dma_init(struct atmel_aes_dev *dd) in atmel_aes_dma_init() argument
2141 dd->src.chan = dma_request_chan(dd->dev, "tx"); in atmel_aes_dma_init()
2142 if (IS_ERR(dd->src.chan)) { in atmel_aes_dma_init()
2143 ret = PTR_ERR(dd->src.chan); in atmel_aes_dma_init()
2147 dd->dst.chan = dma_request_chan(dd->dev, "rx"); in atmel_aes_dma_init()
2148 if (IS_ERR(dd->dst.chan)) { in atmel_aes_dma_init()
2149 ret = PTR_ERR(dd->dst.chan); in atmel_aes_dma_init()
2156 dma_release_channel(dd->src.chan); in atmel_aes_dma_init()
2158 dev_err(dd->dev, "no DMA channel available\n"); in atmel_aes_dma_init()
2162 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) in atmel_aes_dma_cleanup() argument
2164 dma_release_channel(dd->dst.chan); in atmel_aes_dma_cleanup()
2165 dma_release_channel(dd->src.chan); in atmel_aes_dma_cleanup()
2170 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_queue_task() local
2172 atmel_aes_handle_queue(dd, NULL); in atmel_aes_queue_task()
2177 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_done_task() local
2179 dd->is_async = true; in atmel_aes_done_task()
2180 (void)dd->resume(dd); in atmel_aes_done_task()
2201 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd) in atmel_aes_unregister_algs() argument
2206 if (dd->caps.has_authenc) in atmel_aes_unregister_algs()
2211 if (dd->caps.has_xts) in atmel_aes_unregister_algs()
2214 if (dd->caps.has_gcm) in atmel_aes_unregister_algs()
2229 static int atmel_aes_register_algs(struct atmel_aes_dev *dd) in atmel_aes_register_algs() argument
2241 if (dd->caps.has_gcm) { in atmel_aes_register_algs()
2249 if (dd->caps.has_xts) { in atmel_aes_register_algs()
2258 if (dd->caps.has_authenc) { in atmel_aes_register_algs()
2289 static void atmel_aes_get_cap(struct atmel_aes_dev *dd) in atmel_aes_get_cap() argument
2291 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
2292 dd->caps.has_gcm = 0; in atmel_aes_get_cap()
2293 dd->caps.has_xts = 0; in atmel_aes_get_cap()
2294 dd->caps.has_authenc = 0; in atmel_aes_get_cap()
2295 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
2298 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
2302 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2303 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2304 dd->caps.has_xts = 1; in atmel_aes_get_cap()
2305 dd->caps.has_authenc = 1; in atmel_aes_get_cap()
2306 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2309 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2310 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2311 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2314 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2315 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2320 dev_warn(dd->dev, in atmel_aes_get_cap()