/linux-6.12.1/tools/virtio/ringtest/ |
D | virtio_ring_0_9.c | 70 ret = posix_memalign(&p, 0x1000, vring_size(ring_size, 0x1000)); in alloc_ring() 75 memset(p, 0, vring_size(ring_size, 0x1000)); in alloc_ring() 76 vring_init(&ring, ring_size, p, 0x1000); in alloc_ring() 85 for (i = 0; i < ring_size - 1; i++) in alloc_ring() 89 guest.num_free = ring_size; in alloc_ring() 90 data = malloc(ring_size * sizeof *data); in alloc_ring() 95 memset(data, 0, ring_size * sizeof *data); in alloc_ring() 111 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf() 136 ring.avail->ring[avail & (ring_size - 1)] = in add_inbuf() 137 (head | (avail & ~(ring_size - 1))) ^ 0x8000; in add_inbuf() [all …]
|
D | ring.c | 82 ret = posix_memalign((void **)&ring, 0x1000, ring_size * sizeof *ring); in alloc_ring() 97 for (i = 0; i < ring_size; ++i) { in alloc_ring() 103 guest.num_free = ring_size; in alloc_ring() 104 data = calloc(ring_size, sizeof(*data)); in alloc_ring() 120 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf() 145 unsigned head = (ring_size - 1) & guest.last_used_idx; in get_buf() 154 index = ring[head].index & (ring_size - 1); in get_buf() 166 unsigned head = (ring_size - 1) & guest.last_used_idx; in used_empty() 221 unsigned head = (ring_size - 1) & host.used_idx; in avail_empty() 228 unsigned head = (ring_size - 1) & host.used_idx; in use_buf()
|
D | main.c | 29 unsigned ring_size = 256; variable 288 ring_size, in help() 322 ring_size = strtol(optarg, &endptr, 0); in main() 323 assert(ring_size && !(ring_size & (ring_size - 1))); in main()
|
/linux-6.12.1/include/xen/interface/io/ |
D | ring.h | 357 static inline RING_IDX name##_mask(RING_IDX idx, RING_IDX ring_size) \ 359 return idx & (ring_size - 1); \ 364 RING_IDX ring_size) \ 366 return buf + name##_mask(idx, ring_size); \ 374 RING_IDX ring_size) \ 377 size <= ring_size - *masked_cons) { \ 380 memcpy(opaque, buf + *masked_cons, ring_size - *masked_cons); \ 381 memcpy((unsigned char *)opaque + ring_size - *masked_cons, buf, \ 382 size - (ring_size - *masked_cons)); \ 384 *masked_cons = name##_mask(*masked_cons + size, ring_size); \ [all …]
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ih.c | 42 unsigned ring_size, bool use_bus_addr) in amdgpu_ih_ring_init() argument 48 rb_bufsz = order_base_2(ring_size / 4); in amdgpu_ih_ring_init() 49 ring_size = (1 << rb_bufsz) * 4; in amdgpu_ih_ring_init() 50 ih->ring_size = ring_size; in amdgpu_ih_ring_init() 51 ih->ptr_mask = ih->ring_size - 1; in amdgpu_ih_ring_init() 64 ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8, in amdgpu_ih_ring_init() 70 ih->wptr_addr = dma_addr + ih->ring_size; in amdgpu_ih_ring_init() 71 ih->wptr_cpu = &ih->ring[ih->ring_size / 4]; in amdgpu_ih_ring_init() 72 ih->rptr_addr = dma_addr + ih->ring_size + 4; in amdgpu_ih_ring_init() 73 ih->rptr_cpu = &ih->ring[(ih->ring_size / 4) + 1]; in amdgpu_ih_ring_init() [all …]
|
D | navi10_ih.c | 53 if (adev->irq.ih.ring_size) { in navi10_ih_init_register_offset() 66 if (adev->irq.ih1.ring_size) { in navi10_ih_init_register_offset() 77 if (adev->irq.ih2.ring_size) { in navi10_ih_init_register_offset() 203 if (ih[i]->ring_size) { in navi10_ih_toggle_interrupts() 215 int rb_bufsz = order_base_2(ih->ring_size / 4); in navi10_ih_rb_cntl() 353 if (ih[i]->ring_size) { in navi10_ih_irq_init() 373 if (adev->irq.ih_soft.ring_size) in navi10_ih_irq_init() 474 if ((v < ih->ring_size) && (v != ih->rptr)) in navi10_ih_irq_rearm() 581 adev->irq.ih1.ring_size = 0; in navi10_ih_sw_init() 582 adev->irq.ih2.ring_size = 0; in navi10_ih_sw_init()
|
D | vega10_ih.c | 51 if (adev->irq.ih.ring_size) { in vega10_ih_init_register_offset() 64 if (adev->irq.ih1.ring_size) { in vega10_ih_init_register_offset() 75 if (adev->irq.ih2.ring_size) { in vega10_ih_init_register_offset() 148 if (ih[i]->ring_size) { in vega10_ih_toggle_interrupts() 160 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega10_ih_rb_cntl() 285 if (ih[i]->ring_size) { in vega10_ih_irq_init() 303 if (adev->irq.ih_soft.ring_size) in vega10_ih_irq_init() 404 if ((v < ih->ring_size) && (v != ih->rptr)) in vega10_ih_irq_rearm()
|
D | vega20_ih.c | 59 if (adev->irq.ih.ring_size) { in vega20_ih_init_register_offset() 72 if (adev->irq.ih1.ring_size) { in vega20_ih_init_register_offset() 83 if (adev->irq.ih2.ring_size) { in vega20_ih_init_register_offset() 157 if (ih[i]->ring_size) { in vega20_ih_toggle_interrupts() 169 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega20_ih_rb_cntl() 321 if (ih[i]->ring_size) { in vega20_ih_irq_init() 355 if (adev->irq.ih_soft.ring_size) in vega20_ih_irq_init() 457 if ((v < ih->ring_size) && (v != ih->rptr)) in vega20_ih_irq_rearm()
|
D | ih_v7_0.c | 52 if (adev->irq.ih.ring_size) { in ih_v7_0_init_register_offset() 65 if (adev->irq.ih1.ring_size) { in ih_v7_0_init_register_offset() 177 if (ih[i]->ring_size) { in ih_v7_0_toggle_interrupts() 189 int rb_bufsz = order_base_2(ih->ring_size / 4); in ih_v7_0_rb_cntl() 319 if (ih[i]->ring_size) { in ih_v7_0_irq_init() 350 if (adev->irq.ih1.ring_size) { in ih_v7_0_irq_init() 373 if (adev->irq.ih_soft.ring_size) in ih_v7_0_irq_init() 465 if ((v < ih->ring_size) && (v != ih->rptr)) in ih_v7_0_irq_rearm()
|
/linux-6.12.1/tools/testing/selftests/kvm/ |
D | coalesced_io_test.c | 17 uint32_t ring_size; member 47 for (i = 0; i < io->ring_size - 1; i++) { in guest_code() 97 *(uint64_t *)run->mmio.data == io->mmio_gpa + io->ring_size - 1)) || in vcpu_run_and_verify_io_exit() 100 pio_value == io->pio_port + io->ring_size - 1)), in vcpu_run_and_verify_io_exit() 106 (want_pio ? io->pio_port : io->mmio_gpa) + io->ring_size - 1, run->exit_reason, in vcpu_run_and_verify_io_exit() 122 TEST_ASSERT((ring->last + 1) % io->ring_size == ring->first, in vcpu_run_and_verify_coalesced_io() 124 ring->first, ring->last, io->ring_size, ring_start); in vcpu_run_and_verify_coalesced_io() 126 for (i = 0; i < io->ring_size - 1; i++) { in vcpu_run_and_verify_coalesced_io() 127 uint32_t idx = (ring->first + i) % io->ring_size; in vcpu_run_and_verify_coalesced_io() 214 .ring_size = (getpagesize() - sizeof(struct kvm_coalesced_mmio_ring)) / in main() [all …]
|
/linux-6.12.1/drivers/crypto/ccp/ |
D | tee-dev.c | 25 static int tee_alloc_ring(struct psp_tee_device *tee, int ring_size) in tee_alloc_ring() argument 30 if (!ring_size) in tee_alloc_ring() 36 start_addr = (void *)__get_free_pages(GFP_KERNEL, get_order(ring_size)); in tee_alloc_ring() 40 memset(start_addr, 0x0, ring_size); in tee_alloc_ring() 42 rb_mgr->ring_size = ring_size; in tee_alloc_ring() 57 get_order(rb_mgr->ring_size)); in tee_free_ring() 60 rb_mgr->ring_size = 0; in tee_free_ring() 76 cmd->size = tee->rb_mgr.ring_size; in tee_alloc_cmd_buffer() 91 int ring_size = MAX_RING_BUFFER_ENTRIES * sizeof(struct tee_ring_cmd); in tee_init_ring() local 98 ret = tee_alloc_ring(tee, ring_size); in tee_init_ring() [all …]
|
/linux-6.12.1/drivers/net/ethernet/intel/idpf/ |
D | idpf_controlq.c | 39 wr32(hw, cq->reg.tail, (u32)(cq->ring_size - 1)); in idpf_ctlq_init_regs() 51 wr32(hw, cq->reg.len, (cq->ring_size | cq->reg.len_ena_mask)); in idpf_ctlq_init_regs() 65 for (i = 0; i < cq->ring_size; i++) { in idpf_ctlq_init_rxq_bufs() 105 cq->ring_size = 0; in idpf_ctlq_shutdown() 138 cq->ring_size = qinfo->len; in idpf_ctlq_add() 142 cq->next_to_post = cq->ring_size - 1; in idpf_ctlq_add() 323 if (cq->next_to_use == cq->ring_size) in idpf_ctlq_send() 364 if (*clean_count > cq->ring_size) in idpf_ctlq_clean_sq() 391 if (ntc == cq->ring_size) in idpf_ctlq_clean_sq() 429 if (*buff_count > cq->ring_size) in idpf_ctlq_post_rx_buffs() [all …]
|
D | idpf_controlq_setup.c | 14 size_t size = cq->ring_size * sizeof(struct idpf_ctlq_desc); in idpf_ctlq_alloc_desc_ring() 43 cq->bi.rx_buff = kcalloc(cq->ring_size, sizeof(struct idpf_dma_mem *), in idpf_ctlq_alloc_bufs() 49 for (i = 0; i < cq->ring_size - 1; i++) { in idpf_ctlq_alloc_bufs() 112 for (i = 0; i < cq->ring_size; i++) { in idpf_ctlq_free_bufs()
|
/linux-6.12.1/drivers/gpu/drm/radeon/ |
D | radeon_ring.c | 88 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size() 93 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size() 114 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc() 316 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup() 383 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, in radeon_ring_init() argument 388 ring->ring_size = ring_size; in radeon_ring_init() 394 r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true, in radeon_ring_init() 420 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init() 421 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_init() 476 count = (ring->ring_size / 4) - ring->ring_free_dw; in radeon_debugfs_ring_info_show()
|
/linux-6.12.1/drivers/xen/ |
D | evtchn.c | 67 unsigned int ring_size; member 99 return idx & (u->ring_size - 1); in evtchn_ring_offset() 183 if ((prod - cons) < u->ring_size) { in evtchn_interrupt() 240 if (((c ^ p) & u->ring_size) != 0) { in evtchn_read() 241 bytes1 = (u->ring_size - evtchn_ring_offset(u, c)) * in evtchn_read() 327 if (u->nr_evtchns <= u->ring_size) in evtchn_resize_ring() 330 if (u->ring_size == 0) in evtchn_resize_ring() 333 new_size = 2 * u->ring_size; in evtchn_resize_ring() 359 memcpy(new_ring, old_ring, u->ring_size * sizeof(*u->ring)); in evtchn_resize_ring() 360 memcpy(new_ring + u->ring_size, old_ring, in evtchn_resize_ring() [all …]
|
/linux-6.12.1/drivers/net/ethernet/amazon/ena/ |
D | ena_netdev.h | 88 #define ENA_TX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument 90 #define ENA_RX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument 91 #define ENA_RX_RING_IDX_ADD(idx, n, ring_size) \ argument 92 (((idx) + (n)) & ((ring_size) - 1)) 265 int ring_size; member
|
/linux-6.12.1/drivers/net/ethernet/renesas/ |
D | rswitch.c | 212 if (index + num >= gq->ring_size) in rswitch_next_queue_index() 213 index = (index + num) % gq->ring_size; in rswitch_next_queue_index() 225 return gq->ring_size - gq->dirty + gq->cur; in rswitch_get_num_cur_queues() 245 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_alloc_rx_buf() 257 index = (i + start_index) % gq->ring_size; in rswitch_gwca_queue_alloc_rx_buf() 273 (gq->ring_size + 1), gq->rx_ring, gq->ring_dma); in rswitch_gwca_queue_free() 276 for (i = 0; i < gq->ring_size; i++) in rswitch_gwca_queue_free() 283 (gq->ring_size + 1), gq->tx_ring, gq->ring_dma); in rswitch_gwca_queue_free() 297 sizeof(struct rswitch_ts_desc) * (gq->ring_size + 1), in rswitch_gwca_ts_queue_free() 305 bool dir_tx, unsigned int ring_size) in rswitch_gwca_queue_alloc() argument [all …]
|
/linux-6.12.1/drivers/crypto/intel/qat/qat_common/ |
D | adf_transport.c | 93 ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size)) { in adf_send_message() 103 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_send_message() 124 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_handle_response() 139 u32 ring_config = BUILD_RING_CONFIG(ring->ring_size); in adf_configure_tx_ring() 151 BUILD_RESP_RING_CONFIG(ring->ring_size, in adf_configure_rx_ring() 168 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_init_ring() 194 ring->ring_size); in adf_init_ring() 206 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_cleanup_ring() 272 ring->ring_size = adf_verify_ring_size(msg_size, num_msgs); in adf_create_ring() 275 max_inflights = ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size); in adf_create_ring()
|
/linux-6.12.1/drivers/bus/mhi/ep/ |
D | ring.c | 26 ring->rd_offset = (ring->rd_offset + 1) % ring->ring_size; in mhi_ep_ring_inc_index() 55 buf_info.size = (ring->ring_size - start) * sizeof(struct mhi_ring_element); in __mhi_ep_cache_ring() 125 num_free_elem = ((ring->ring_size - ring->rd_offset) + ring->wr_offset) - 1; in mhi_ep_ring_add_element() 183 ring->ring_size = mhi_ep_ring_num_elems(ring); in mhi_ep_ring_start() 204 ring->ring_cache = kcalloc(ring->ring_size, sizeof(struct mhi_ring_element), GFP_KERNEL); in mhi_ep_ring_start()
|
/linux-6.12.1/drivers/net/ethernet/atheros/ |
D | ag71xx.c | 774 int ring_mask, ring_size; in ag71xx_tx_packets() local 778 ring_size = BIT(ring->order); in ag71xx_tx_packets() 831 if ((ring->curr - ring->dirty) < (ring_size * 3) / 4) in ag71xx_tx_packets() 1153 int ring_size = BIT(ring->order); in ag71xx_ring_tx_init() local 1154 int ring_mask = ring_size - 1; in ag71xx_ring_tx_init() 1157 for (i = 0; i < ring_size; i++) { in ag71xx_ring_tx_init() 1178 int ring_size = BIT(ring->order); in ag71xx_ring_rx_clean() local 1184 for (i = 0; i < ring_size; i++) in ag71xx_ring_rx_clean() 1225 int ring_size = BIT(ring->order); in ag71xx_ring_rx_init() local 1230 for (i = 0; i < ring_size; i++) { in ag71xx_ring_rx_init() [all …]
|
/linux-6.12.1/arch/powerpc/platforms/pasemi/ |
D | dma_lib.c | 242 int pasemi_dma_alloc_ring(struct pasemi_dmachan *chan, int ring_size) in pasemi_dma_alloc_ring() argument 246 chan->ring_size = ring_size; in pasemi_dma_alloc_ring() 249 ring_size * sizeof(u64), in pasemi_dma_alloc_ring() 268 dma_free_coherent(&dma_pdev->dev, chan->ring_size * sizeof(u64), in pasemi_dma_free_ring() 271 chan->ring_size = 0; in pasemi_dma_free_ring()
|
/linux-6.12.1/include/linux/ |
D | pipe_fs_i.h | 64 unsigned int ring_size; member 183 return &pipe->bufs[slot & (pipe->ring_size - 1)]; in pipe_buf() 251 unsigned int mask = pipe->ring_size - 1; in pipe_discard_from()
|
/linux-6.12.1/tools/testing/selftests/bpf/progs/ |
D | test_ringbuf.c | 32 long ring_size = 0; variable 72 ring_size = bpf_ringbuf_query(&ringbuf, BPF_RB_RING_SIZE); in test_ringbuf()
|
/linux-6.12.1/drivers/net/ethernet/intel/ice/ |
D | ice_fwlog.c | 83 int status, ring_size; in ice_fwlog_realloc_rings() local 94 ring_size = ICE_FWLOG_INDEX_TO_BYTES(index) / ICE_AQ_MAX_BUF_LEN; in ice_fwlog_realloc_rings() 95 if (ring_size == hw->fwlog_ring.size) in ice_fwlog_realloc_rings() 102 ring.rings = kcalloc(ring_size, sizeof(*ring.rings), GFP_KERNEL); in ice_fwlog_realloc_rings() 106 ring.size = ring_size; in ice_fwlog_realloc_rings()
|
/linux-6.12.1/drivers/uio/ |
D | uio_hv_generic.c | 254 size_t ring_size = hv_dev_ring_size(channel); in hv_uio_probe() local 256 if (!ring_size) in hv_uio_probe() 257 ring_size = SZ_2M; in hv_uio_probe() 263 ret = vmbus_alloc_ring(channel, ring_size, ring_size); in hv_uio_probe()
|