Lines Matching full:engine
3 * Handle async block request by crypto hardware engine.
12 #include <crypto/internal/engine.h>
36 * @engine: the hardware engine
40 static void crypto_finalize_request(struct crypto_engine *engine, in crypto_finalize_request() argument
50 if (!engine->retry_support) { in crypto_finalize_request()
51 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_finalize_request()
52 if (engine->cur_req == req) { in crypto_finalize_request()
53 engine->cur_req = NULL; in crypto_finalize_request()
55 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_finalize_request()
61 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_finalize_request()
65 * crypto_pump_requests - dequeue one request from engine queue to process
66 * @engine: the hardware engine
69 * This function checks if there is any request in the engine queue that
73 static void crypto_pump_requests(struct crypto_engine *engine, in crypto_pump_requests() argument
83 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
86 if (!engine->retry_support && engine->cur_req) in crypto_pump_requests()
90 if (engine->idling) { in crypto_pump_requests()
91 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_pump_requests()
95 /* Check if the engine queue is idle */ in crypto_pump_requests()
96 if (!crypto_queue_len(&engine->queue) || !engine->running) { in crypto_pump_requests()
97 if (!engine->busy) in crypto_pump_requests()
102 kthread_queue_work(engine->kworker, in crypto_pump_requests()
103 &engine->pump_requests); in crypto_pump_requests()
107 engine->busy = false; in crypto_pump_requests()
108 engine->idling = true; in crypto_pump_requests()
109 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
111 if (engine->unprepare_crypt_hardware && in crypto_pump_requests()
112 engine->unprepare_crypt_hardware(engine)) in crypto_pump_requests()
113 dev_err(engine->dev, "failed to unprepare crypt hardware\n"); in crypto_pump_requests()
115 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
116 engine->idling = false; in crypto_pump_requests()
121 /* Get the fist request from the engine queue to handle */ in crypto_pump_requests()
122 backlog = crypto_get_backlog(&engine->queue); in crypto_pump_requests()
123 async_req = crypto_dequeue_request(&engine->queue); in crypto_pump_requests()
132 if (!engine->retry_support) in crypto_pump_requests()
133 engine->cur_req = async_req; in crypto_pump_requests()
135 if (engine->busy) in crypto_pump_requests()
138 engine->busy = true; in crypto_pump_requests()
140 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
143 if (!was_busy && engine->prepare_crypt_hardware) { in crypto_pump_requests()
144 ret = engine->prepare_crypt_hardware(engine); in crypto_pump_requests()
146 dev_err(engine->dev, "failed to prepare crypt hardware\n"); in crypto_pump_requests()
156 dev_err(engine->dev, "failed to do request\n"); in crypto_pump_requests()
161 ret = op->do_one_request(engine, async_req); in crypto_pump_requests()
170 if (!engine->retry_support || in crypto_pump_requests()
172 dev_err(engine->dev, in crypto_pump_requests()
177 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
180 * back in front of crypto-engine queue, to keep the order in crypto_pump_requests()
183 crypto_enqueue_request_head(&engine->queue, async_req); in crypto_pump_requests()
185 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_pump_requests()
198 /* If retry mechanism is supported, send new requests to engine */ in crypto_pump_requests()
199 if (engine->retry_support) { in crypto_pump_requests()
200 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_pump_requests()
206 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_pump_requests()
212 if (engine->do_batch_requests) { in crypto_pump_requests()
213 ret = engine->do_batch_requests(engine); in crypto_pump_requests()
215 dev_err(engine->dev, "failed to do batch requests: %d\n", in crypto_pump_requests()
224 struct crypto_engine *engine = in crypto_pump_work() local
227 crypto_pump_requests(engine, true); in crypto_pump_work()
231 * crypto_transfer_request - transfer the new request into the engine queue
232 * @engine: the hardware engine
233 * @req: the request need to be listed into the engine queue
236 static int crypto_transfer_request(struct crypto_engine *engine, in crypto_transfer_request() argument
243 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_transfer_request()
245 if (!engine->running) { in crypto_transfer_request()
246 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_transfer_request()
250 ret = crypto_enqueue_request(&engine->queue, req); in crypto_transfer_request()
252 if (!engine->busy && need_pump) in crypto_transfer_request()
253 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_transfer_request()
255 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_transfer_request()
261 * into the engine queue
262 * @engine: the hardware engine
263 * @req: the request need to be listed into the engine queue
265 static int crypto_transfer_request_to_engine(struct crypto_engine *engine, in crypto_transfer_request_to_engine() argument
268 return crypto_transfer_request(engine, req, true); in crypto_transfer_request_to_engine()
273 * to list into the engine queue
274 * @engine: the hardware engine
275 * @req: the request need to be listed into the engine queue
277 int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine, in crypto_transfer_aead_request_to_engine() argument
280 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_aead_request_to_engine()
286 * to list into the engine queue
287 * @engine: the hardware engine
288 * @req: the request need to be listed into the engine queue
290 int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine, in crypto_transfer_akcipher_request_to_engine() argument
293 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_akcipher_request_to_engine()
299 * to list into the engine queue
300 * @engine: the hardware engine
301 * @req: the request need to be listed into the engine queue
303 int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine, in crypto_transfer_hash_request_to_engine() argument
306 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_hash_request_to_engine()
312 * into the engine queue
313 * @engine: the hardware engine
314 * @req: the request need to be listed into the engine queue
316 int crypto_transfer_kpp_request_to_engine(struct crypto_engine *engine, in crypto_transfer_kpp_request_to_engine() argument
319 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_kpp_request_to_engine()
325 * to list into the engine queue
326 * @engine: the hardware engine
327 * @req: the request need to be listed into the engine queue
329 int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine, in crypto_transfer_skcipher_request_to_engine() argument
332 return crypto_transfer_request_to_engine(engine, &req->base); in crypto_transfer_skcipher_request_to_engine()
339 * @engine: the hardware engine
343 void crypto_finalize_aead_request(struct crypto_engine *engine, in crypto_finalize_aead_request() argument
346 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_aead_request()
353 * @engine: the hardware engine
357 void crypto_finalize_akcipher_request(struct crypto_engine *engine, in crypto_finalize_akcipher_request() argument
360 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_akcipher_request()
367 * @engine: the hardware engine
371 void crypto_finalize_hash_request(struct crypto_engine *engine, in crypto_finalize_hash_request() argument
374 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_hash_request()
380 * @engine: the hardware engine
384 void crypto_finalize_kpp_request(struct crypto_engine *engine, in crypto_finalize_kpp_request() argument
387 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_kpp_request()
394 * @engine: the hardware engine
398 void crypto_finalize_skcipher_request(struct crypto_engine *engine, in crypto_finalize_skcipher_request() argument
401 return crypto_finalize_request(engine, &req->base, err); in crypto_finalize_skcipher_request()
406 * crypto_engine_start - start the hardware engine
407 * @engine: the hardware engine need to be started
411 int crypto_engine_start(struct crypto_engine *engine) in crypto_engine_start() argument
415 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_engine_start()
417 if (engine->running || engine->busy) { in crypto_engine_start()
418 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_start()
422 engine->running = true; in crypto_engine_start()
423 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_start()
425 kthread_queue_work(engine->kworker, &engine->pump_requests); in crypto_engine_start()
432 * crypto_engine_stop - stop the hardware engine
433 * @engine: the hardware engine need to be stopped
437 int crypto_engine_stop(struct crypto_engine *engine) in crypto_engine_stop() argument
443 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_engine_stop()
446 * If the engine queue is not empty or the engine is on busy state, in crypto_engine_stop()
447 * we need to wait for a while to pump the requests of engine queue. in crypto_engine_stop()
449 while ((crypto_queue_len(&engine->queue) || engine->busy) && limit--) { in crypto_engine_stop()
450 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_stop()
452 spin_lock_irqsave(&engine->queue_lock, flags); in crypto_engine_stop()
455 if (crypto_queue_len(&engine->queue) || engine->busy) in crypto_engine_stop()
458 engine->running = false; in crypto_engine_stop()
460 spin_unlock_irqrestore(&engine->queue_lock, flags); in crypto_engine_stop()
463 dev_warn(engine->dev, "could not stop engine\n"); in crypto_engine_stop()
470 * crypto_engine_alloc_init_and_set - allocate crypto hardware engine structure
472 * crypto-engine queue.
473 * @dev: the device attached with one hardware engine
478 * callback(struct crypto_engine *engine)
480 * engine: the crypto engine structure.
482 * @qlen: maximum size of the crypto-engine queue
485 * Return: the crypto engine structure on success, else NULL.
489 int (*cbk_do_batch)(struct crypto_engine *engine), in crypto_engine_alloc_init_and_set() argument
492 struct crypto_engine *engine; in crypto_engine_alloc_init_and_set() local
497 engine = devm_kzalloc(dev, sizeof(*engine), GFP_KERNEL); in crypto_engine_alloc_init_and_set()
498 if (!engine) in crypto_engine_alloc_init_and_set()
501 engine->dev = dev; in crypto_engine_alloc_init_and_set()
502 engine->rt = rt; in crypto_engine_alloc_init_and_set()
503 engine->running = false; in crypto_engine_alloc_init_and_set()
504 engine->busy = false; in crypto_engine_alloc_init_and_set()
505 engine->idling = false; in crypto_engine_alloc_init_and_set()
506 engine->retry_support = retry_support; in crypto_engine_alloc_init_and_set()
507 engine->priv_data = dev; in crypto_engine_alloc_init_and_set()
512 engine->do_batch_requests = retry_support ? cbk_do_batch : NULL; in crypto_engine_alloc_init_and_set()
514 snprintf(engine->name, sizeof(engine->name), in crypto_engine_alloc_init_and_set()
515 "%s-engine", dev_name(dev)); in crypto_engine_alloc_init_and_set()
517 crypto_init_queue(&engine->queue, qlen); in crypto_engine_alloc_init_and_set()
518 spin_lock_init(&engine->queue_lock); in crypto_engine_alloc_init_and_set()
520 engine->kworker = kthread_create_worker(0, "%s", engine->name); in crypto_engine_alloc_init_and_set()
521 if (IS_ERR(engine->kworker)) { in crypto_engine_alloc_init_and_set()
525 kthread_init_work(&engine->pump_requests, crypto_pump_work); in crypto_engine_alloc_init_and_set()
527 if (engine->rt) { in crypto_engine_alloc_init_and_set()
529 sched_set_fifo(engine->kworker->task); in crypto_engine_alloc_init_and_set()
532 return engine; in crypto_engine_alloc_init_and_set()
537 * crypto_engine_alloc_init - allocate crypto hardware engine structure and
539 * @dev: the device attached with one hardware engine
543 * Return: the crypto engine structure on success, else NULL.
553 * crypto_engine_exit - free the resources of hardware engine when exit
554 * @engine: the hardware engine need to be freed
556 void crypto_engine_exit(struct crypto_engine *engine) in crypto_engine_exit() argument
560 ret = crypto_engine_stop(engine); in crypto_engine_exit()
564 kthread_destroy_worker(engine->kworker); in crypto_engine_exit()
741 MODULE_DESCRIPTION("Crypto hardware engine framework");