Lines Matching +full:tx +full:- +full:device

1 // SPDX-License-Identifier: GPL-2.0-only
17 #include <linux/dma-mapping.h>
21 /* do_async_xor - dma map the pages and perform the xor with an engine */
26 struct dma_device *dma = chan->device; in do_async_xor()
27 struct dma_async_tx_descriptor *tx = NULL; in do_async_xor() local
28 dma_async_tx_callback cb_fn_orig = submit->cb_fn; in do_async_xor()
29 void *cb_param_orig = submit->cb_param; in do_async_xor()
30 enum async_tx_flags flags_orig = submit->flags; in do_async_xor()
32 int src_cnt = unmap->to_cnt; in do_async_xor()
34 dma_addr_t dma_dest = unmap->addr[unmap->to_cnt]; in do_async_xor()
35 dma_addr_t *src_list = unmap->addr; in do_async_xor()
40 submit->flags = flags_orig; in do_async_xor()
41 xor_src_cnt = min(src_cnt, (int)dma->max_xor); in do_async_xor()
46 submit->flags &= ~ASYNC_TX_ACK; in do_async_xor()
47 submit->flags |= ASYNC_TX_FENCE; in do_async_xor()
48 submit->cb_fn = NULL; in do_async_xor()
49 submit->cb_param = NULL; in do_async_xor()
51 submit->cb_fn = cb_fn_orig; in do_async_xor()
52 submit->cb_param = cb_param_orig; in do_async_xor()
54 if (submit->cb_fn) in do_async_xor()
56 if (submit->flags & ASYNC_TX_FENCE) in do_async_xor()
63 if (src_list > unmap->addr) in do_async_xor()
65 tx = dma->device_prep_dma_xor(chan, dma_dest, src_list, in do_async_xor()
66 xor_src_cnt, unmap->len, in do_async_xor()
69 if (unlikely(!tx)) in do_async_xor()
70 async_tx_quiesce(&submit->depend_tx); in do_async_xor()
73 while (unlikely(!tx)) { in do_async_xor()
75 tx = dma->device_prep_dma_xor(chan, dma_dest, in do_async_xor()
77 xor_src_cnt, unmap->len, in do_async_xor()
82 dma_set_unmap(tx, unmap); in do_async_xor()
83 async_tx_submit(chan, tx, submit); in do_async_xor()
84 submit->depend_tx = tx; in do_async_xor()
88 src_cnt -= xor_src_cnt; in do_async_xor()
91 src_list += xor_src_cnt - 1; in do_async_xor()
96 return tx; in do_async_xor()
110 if (submit->scribble) in do_sync_xor_offs()
111 srcs = submit->scribble; in do_sync_xor_offs()
124 if (submit->flags & ASYNC_TX_XOR_ZERO_DST) in do_sync_xor_offs()
133 src_cnt -= xor_src_cnt; in do_sync_xor_offs()
141 dma_xor_aligned_offsets(struct dma_device *device, unsigned int offset, in dma_xor_aligned_offsets() argument
146 if (!is_dma_xor_aligned(device, offset, 0, len)) in dma_xor_aligned_offsets()
153 if (!is_dma_xor_aligned(device, src_offs[i], 0, len)) in dma_xor_aligned_offsets()
160 * async_xor_offs - attempt to xor a set of blocks with a dma engine.
189 struct dma_device *device = chan ? chan->device : NULL; in async_xor_offs() local
194 if (device) in async_xor_offs()
195 unmap = dmaengine_get_unmap_data(device->dev, src_cnt+1, GFP_NOWAIT); in async_xor_offs()
197 if (unmap && dma_xor_aligned_offsets(device, offset, in async_xor_offs()
199 struct dma_async_tx_descriptor *tx; in async_xor_offs() local
205 unmap->len = len; in async_xor_offs()
209 unmap->to_cnt++; in async_xor_offs()
210 unmap->addr[j++] = dma_map_page(device->dev, src_list[i], in async_xor_offs()
215 /* map it bidirectional as it may be re-used as a source */ in async_xor_offs()
216 unmap->addr[j] = dma_map_page(device->dev, dest, offset, len, in async_xor_offs()
218 unmap->bidi_cnt = 1; in async_xor_offs()
220 tx = do_async_xor(chan, unmap, submit); in async_xor_offs()
222 return tx; in async_xor_offs()
233 if (submit->flags & ASYNC_TX_XOR_DROP_DST) { in async_xor_offs()
234 src_cnt--; in async_xor_offs()
241 async_tx_quiesce(&submit->depend_tx); in async_xor_offs()
252 * async_xor - attempt to xor a set of blocks with a dma engine.
298 * async_xor_val_offs - attempt a xor parity check with a dma engine.
305 * @result: 0 if sum == 0 else non-zero
321 struct dma_device *device = chan ? chan->device : NULL; in async_xor_val_offs() local
322 struct dma_async_tx_descriptor *tx = NULL; in async_xor_val_offs() local
327 if (device) in async_xor_val_offs()
328 unmap = dmaengine_get_unmap_data(device->dev, src_cnt, GFP_NOWAIT); in async_xor_val_offs()
330 if (unmap && src_cnt <= device->max_xor && in async_xor_val_offs()
331 dma_xor_aligned_offsets(device, offset, src_offs, src_cnt, len)) { in async_xor_val_offs()
337 if (submit->cb_fn) in async_xor_val_offs()
339 if (submit->flags & ASYNC_TX_FENCE) in async_xor_val_offs()
343 unmap->addr[i] = dma_map_page(device->dev, src_list[i], in async_xor_val_offs()
346 unmap->to_cnt++; in async_xor_val_offs()
348 unmap->len = len; in async_xor_val_offs()
350 tx = device->device_prep_dma_xor_val(chan, unmap->addr, src_cnt, in async_xor_val_offs()
353 if (unlikely(!tx)) { in async_xor_val_offs()
354 async_tx_quiesce(&submit->depend_tx); in async_xor_val_offs()
356 while (!tx) { in async_xor_val_offs()
358 tx = device->device_prep_dma_xor_val(chan, in async_xor_val_offs()
359 unmap->addr, src_cnt, len, result, in async_xor_val_offs()
363 dma_set_unmap(tx, unmap); in async_xor_val_offs()
364 async_tx_submit(chan, tx, submit); in async_xor_val_offs()
366 enum async_tx_flags flags_orig = submit->flags; in async_xor_val_offs()
369 WARN_ONCE(device && src_cnt <= device->max_xor, in async_xor_val_offs()
373 submit->flags |= ASYNC_TX_XOR_DROP_DST; in async_xor_val_offs()
374 submit->flags &= ~ASYNC_TX_ACK; in async_xor_val_offs()
376 tx = async_xor_offs(dest, offset, src_list, src_offs, in async_xor_val_offs()
379 async_tx_quiesce(&tx); in async_xor_val_offs()
384 submit->flags = flags_orig; in async_xor_val_offs()
388 return tx; in async_xor_val_offs()
393 * async_xor_val - attempt a xor parity check with a dma engine.
399 * @result: 0 if sum == 0 else non-zero
419 MODULE_DESCRIPTION("asynchronous xor/xor-zero-sum api");