Lines Matching full:rh

55 #define rh_reg_read(r)		readl(rh->regs + (RH_##r))
56 #define rh_reg_write(r, v) writel(v, rh->regs + (RH_##r))
153 struct hci_rh_data *rh; in hci_dma_cleanup() local
160 rh = &rings->headers[i]; in hci_dma_cleanup()
167 if (rh->xfer) in hci_dma_cleanup()
169 rh->xfer_struct_sz * rh->xfer_entries, in hci_dma_cleanup()
170 rh->xfer, rh->xfer_dma); in hci_dma_cleanup()
171 if (rh->resp) in hci_dma_cleanup()
173 rh->resp_struct_sz * rh->xfer_entries, in hci_dma_cleanup()
174 rh->resp, rh->resp_dma); in hci_dma_cleanup()
175 kfree(rh->src_xfers); in hci_dma_cleanup()
176 if (rh->ibi_status) in hci_dma_cleanup()
178 rh->ibi_status_sz * rh->ibi_status_entries, in hci_dma_cleanup()
179 rh->ibi_status, rh->ibi_status_dma); in hci_dma_cleanup()
180 if (rh->ibi_data_dma) in hci_dma_cleanup()
181 dma_unmap_single(&hci->master.dev, rh->ibi_data_dma, in hci_dma_cleanup()
182 rh->ibi_chunk_sz * rh->ibi_chunks_total, in hci_dma_cleanup()
184 kfree(rh->ibi_data); in hci_dma_cleanup()
196 struct hci_rh_data *rh; in hci_dma_init() local
227 rh = &rings->headers[i]; in hci_dma_init()
228 rh->regs = hci->base_regs + offset; in hci_dma_init()
229 spin_lock_init(&rh->lock); in hci_dma_init()
230 init_completion(&rh->op_done); in hci_dma_init()
232 rh->xfer_entries = XFER_RING_ENTRIES; in hci_dma_init()
235 rh->xfer_struct_sz = FIELD_GET(CR_XFER_STRUCT_SIZE, regval); in hci_dma_init()
236 rh->resp_struct_sz = FIELD_GET(CR_RESP_STRUCT_SIZE, regval); in hci_dma_init()
238 rh->xfer_struct_sz, rh->resp_struct_sz); in hci_dma_init()
239 xfers_sz = rh->xfer_struct_sz * rh->xfer_entries; in hci_dma_init()
240 resps_sz = rh->resp_struct_sz * rh->xfer_entries; in hci_dma_init()
242 rh->xfer = dma_alloc_coherent(&hci->master.dev, xfers_sz, in hci_dma_init()
243 &rh->xfer_dma, GFP_KERNEL); in hci_dma_init()
244 rh->resp = dma_alloc_coherent(&hci->master.dev, resps_sz, in hci_dma_init()
245 &rh->resp_dma, GFP_KERNEL); in hci_dma_init()
246 rh->src_xfers = in hci_dma_init()
247 kmalloc_array(rh->xfer_entries, sizeof(*rh->src_xfers), in hci_dma_init()
250 if (!rh->xfer || !rh->resp || !rh->src_xfers) in hci_dma_init()
253 rh_reg_write(CMD_RING_BASE_LO, lower_32_bits(rh->xfer_dma)); in hci_dma_init()
254 rh_reg_write(CMD_RING_BASE_HI, upper_32_bits(rh->xfer_dma)); in hci_dma_init()
255 rh_reg_write(RESP_RING_BASE_LO, lower_32_bits(rh->resp_dma)); in hci_dma_init()
256 rh_reg_write(RESP_RING_BASE_HI, upper_32_bits(rh->resp_dma)); in hci_dma_init()
258 regval = FIELD_PREP(CR_RING_SIZE, rh->xfer_entries); in hci_dma_init()
276 rh->ibi_status_sz = FIELD_GET(IBI_STATUS_STRUCT_SIZE, regval); in hci_dma_init()
277 rh->ibi_status_entries = IBI_STATUS_RING_ENTRIES; in hci_dma_init()
278 rh->ibi_chunks_total = IBI_CHUNK_POOL_SIZE; in hci_dma_init()
280 rh->ibi_chunk_sz = dma_get_cache_alignment(); in hci_dma_init()
281 rh->ibi_chunk_sz *= IBI_CHUNK_CACHELINES; in hci_dma_init()
287 rh->ibi_chunk_sz = umax(4, rh->ibi_chunk_sz); in hci_dma_init()
288 rh->ibi_chunk_sz = roundup_pow_of_two(rh->ibi_chunk_sz); in hci_dma_init()
289 if (rh->ibi_chunk_sz > 256) { in hci_dma_init()
294 ibi_status_ring_sz = rh->ibi_status_sz * rh->ibi_status_entries; in hci_dma_init()
295 ibi_data_ring_sz = rh->ibi_chunk_sz * rh->ibi_chunks_total; in hci_dma_init()
297 rh->ibi_status = in hci_dma_init()
299 &rh->ibi_status_dma, GFP_KERNEL); in hci_dma_init()
300 rh->ibi_data = kmalloc(ibi_data_ring_sz, GFP_KERNEL); in hci_dma_init()
302 if (!rh->ibi_status || !rh->ibi_data) in hci_dma_init()
304 rh->ibi_data_dma = in hci_dma_init()
305 dma_map_single(&hci->master.dev, rh->ibi_data, in hci_dma_init()
307 if (dma_mapping_error(&hci->master.dev, rh->ibi_data_dma)) { in hci_dma_init()
308 rh->ibi_data_dma = 0; in hci_dma_init()
313 rh_reg_write(IBI_STATUS_RING_BASE_LO, lower_32_bits(rh->ibi_status_dma)); in hci_dma_init()
314 rh_reg_write(IBI_STATUS_RING_BASE_HI, upper_32_bits(rh->ibi_status_dma)); in hci_dma_init()
315 rh_reg_write(IBI_DATA_RING_BASE_LO, lower_32_bits(rh->ibi_data_dma)); in hci_dma_init()
316 rh_reg_write(IBI_DATA_RING_BASE_HI, upper_32_bits(rh->ibi_data_dma)); in hci_dma_init()
319 rh->ibi_status_entries) | in hci_dma_init()
321 ilog2(rh->ibi_chunk_sz) - 2) | in hci_dma_init()
323 rh->ibi_chunks_total); in hci_dma_init()
362 struct hci_rh_data *rh; in hci_dma_queue_xfer() local
369 rh = &rings->headers[ring]; in hci_dma_queue_xfer()
375 u32 *ring_data = rh->xfer + rh->xfer_struct_sz * enqueue_ptr; in hci_dma_queue_xfer()
415 rh->src_xfers[enqueue_ptr] = xfer; in hci_dma_queue_xfer()
420 enqueue_ptr = (enqueue_ptr + 1) % rh->xfer_entries; in hci_dma_queue_xfer()
435 spin_lock_irq(&rh->lock); in hci_dma_queue_xfer()
440 spin_unlock_irq(&rh->lock); in hci_dma_queue_xfer()
449 struct hci_rh_data *rh = &rings->headers[xfer_list[0].ring_number]; in hci_dma_dequeue_xfer() local
455 if (wait_for_completion_timeout(&rh->op_done, HZ) == 0) { in hci_dma_dequeue_xfer()
474 u32 *ring_data = rh->xfer + rh->xfer_struct_sz * idx; in hci_dma_dequeue_xfer()
485 rh->src_xfers[idx] = NULL; in hci_dma_dequeue_xfer()
500 static void hci_dma_xfer_done(struct i3c_hci *hci, struct hci_rh_data *rh) in hci_dma_xfer_done() argument
503 unsigned int tid, done_ptr = rh->done_ptr; in hci_dma_xfer_done()
511 ring_resp = rh->resp + rh->resp_struct_sz * done_ptr; in hci_dma_xfer_done()
516 xfer = rh->src_xfers[done_ptr]; in hci_dma_xfer_done()
533 done_ptr = (done_ptr + 1) % rh->xfer_entries; in hci_dma_xfer_done()
534 rh->done_ptr = done_ptr; in hci_dma_xfer_done()
538 spin_lock(&rh->lock); in hci_dma_xfer_done()
543 spin_unlock(&rh->lock); in hci_dma_xfer_done()
587 static void hci_dma_process_ibi(struct i3c_hci *hci, struct hci_rh_data *rh) in hci_dma_process_ibi() argument
614 ptr = (ptr + 1) % rh->ibi_status_entries) { in hci_dma_process_ibi()
618 ring_ibi_status = rh->ibi_status + rh->ibi_status_sz * ptr; in hci_dma_process_ibi()
636 ibi_size += chunks * rh->ibi_chunk_sz; in hci_dma_process_ibi()
652 deq_ptr %= rh->ibi_status_entries; in hci_dma_process_ibi()
692 ibi_data_offset = rh->ibi_chunk_sz * rh->ibi_chunk_ptr; in hci_dma_process_ibi()
693 ring_ibi_data = rh->ibi_data + ibi_data_offset; in hci_dma_process_ibi()
694 ring_ibi_data_dma = rh->ibi_data_dma + ibi_data_offset; in hci_dma_process_ibi()
695 first_part = (rh->ibi_chunks_total - rh->ibi_chunk_ptr) in hci_dma_process_ibi()
696 * rh->ibi_chunk_sz; in hci_dma_process_ibi()
706 ring_ibi_data = rh->ibi_data; in hci_dma_process_ibi()
707 ring_ibi_data_dma = rh->ibi_data_dma; in hci_dma_process_ibi()
721 spin_lock(&rh->lock); in hci_dma_process_ibi()
726 spin_unlock(&rh->lock); in hci_dma_process_ibi()
729 rh->ibi_chunk_ptr += ibi_chunks; in hci_dma_process_ibi()
730 rh->ibi_chunk_ptr %= rh->ibi_chunks_total; in hci_dma_process_ibi()
743 struct hci_rh_data *rh; in hci_dma_irq_handler() local
750 rh = &rings->headers[i]; in hci_dma_irq_handler()
752 DBG("rh%d status: %#x", i, status); in hci_dma_irq_handler()
758 hci_dma_process_ibi(hci, rh); in hci_dma_irq_handler()
760 hci_dma_xfer_done(hci, rh); in hci_dma_irq_handler()
762 complete(&rh->op_done); in hci_dma_irq_handler()