Lines Matching refs:xfer

130 	void *xfer, *resp, *ibi_status, *ibi_data;  member
167 if (rh->xfer) in hci_dma_cleanup()
170 rh->xfer, rh->xfer_dma); in hci_dma_cleanup()
242 rh->xfer = dma_alloc_coherent(&hci->master.dev, xfers_sz, in hci_dma_init()
250 if (!rh->xfer || !rh->resp || !rh->src_xfers) in hci_dma_init()
345 struct hci_xfer *xfer; in hci_dma_unmap_xfer() local
349 xfer = xfer_list + i; in hci_dma_unmap_xfer()
350 if (!xfer->data) in hci_dma_unmap_xfer()
353 xfer->data_dma, xfer->data_len, in hci_dma_unmap_xfer()
354 xfer->rnw ? DMA_FROM_DEVICE : DMA_TO_DEVICE); in hci_dma_unmap_xfer()
374 struct hci_xfer *xfer = xfer_list + i; in hci_dma_queue_xfer() local
375 u32 *ring_data = rh->xfer + rh->xfer_struct_sz * enqueue_ptr; in hci_dma_queue_xfer()
378 *ring_data++ = xfer->cmd_desc[0]; in hci_dma_queue_xfer()
379 *ring_data++ = xfer->cmd_desc[1]; in hci_dma_queue_xfer()
381 *ring_data++ = xfer->cmd_desc[2]; in hci_dma_queue_xfer()
382 *ring_data++ = xfer->cmd_desc[3]; in hci_dma_queue_xfer()
386 if (!xfer->data) in hci_dma_queue_xfer()
387 xfer->data_len = 0; in hci_dma_queue_xfer()
389 FIELD_PREP(DATA_BUF_BLOCK_SIZE, xfer->data_len) | in hci_dma_queue_xfer()
393 if (xfer->data) { in hci_dma_queue_xfer()
394 buf = xfer->bounce_buf ? xfer->bounce_buf : xfer->data; in hci_dma_queue_xfer()
395 xfer->data_dma = in hci_dma_queue_xfer()
398 xfer->data_len, in hci_dma_queue_xfer()
399 xfer->rnw ? in hci_dma_queue_xfer()
403 xfer->data_dma)) { in hci_dma_queue_xfer()
407 *ring_data++ = lower_32_bits(xfer->data_dma); in hci_dma_queue_xfer()
408 *ring_data++ = upper_32_bits(xfer->data_dma); in hci_dma_queue_xfer()
415 rh->src_xfers[enqueue_ptr] = xfer; in hci_dma_queue_xfer()
417 xfer->ring_number = ring; in hci_dma_queue_xfer()
418 xfer->ring_entry = enqueue_ptr; in hci_dma_queue_xfer()
465 struct hci_xfer *xfer = xfer_list + i; in hci_dma_dequeue_xfer() local
466 int idx = xfer->ring_entry; in hci_dma_dequeue_xfer()
474 u32 *ring_data = rh->xfer + rh->xfer_struct_sz * idx; in hci_dma_dequeue_xfer()
488 hci_dma_unmap_xfer(hci, xfer, 1); in hci_dma_dequeue_xfer()
504 struct hci_xfer *xfer; in hci_dma_xfer_done() local
516 xfer = rh->src_xfers[done_ptr]; in hci_dma_xfer_done()
517 if (!xfer) { in hci_dma_xfer_done()
520 hci_dma_unmap_xfer(hci, xfer, 1); in hci_dma_xfer_done()
521 xfer->ring_entry = -1; in hci_dma_xfer_done()
522 xfer->response = resp; in hci_dma_xfer_done()
523 if (tid != xfer->cmd_tid) { in hci_dma_xfer_done()
526 tid, xfer->cmd_tid); in hci_dma_xfer_done()
529 if (xfer->completion) in hci_dma_xfer_done()
530 complete(xfer->completion); in hci_dma_xfer_done()