Home
last modified time | relevance | path

Searched refs:queue_size (Results 1 – 25 of 93) sorted by relevance

1234

/linux-6.12.1/drivers/gpu/drm/amd/amdkfd/
Dkfd_kernel_queue.c43 enum kfd_queue_type type, unsigned int queue_size) in kq_initialize() argument
53 queue_size); in kq_initialize()
86 retval = kfd_gtt_sa_allocate(dev, queue_size, &kq->pq); in kq_initialize()
89 queue_size); in kq_initialize()
126 memset(kq->pq_kernel_addr, 0, queue_size); in kq_initialize()
130 prop.queue_size = queue_size; in kq_initialize()
250 queue_size_dwords = kq->queue->properties.queue_size / 4; in kq_acquire_packet_buffer()
327 (kq->queue->properties.queue_size / 4); in kq_rollback_packet()
Dkfd_mqd_manager_cik.c165 uint32_t wptr_mask = (uint32_t)((p->queue_size / 4) - 1); in load_mqd()
191 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in __update_mqd()
230 m->sdma_rlc_rb_cntl = order_base_2(q->queue_size / 4) in update_mqd_sdma()
351 m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; in update_mqd_hiq()
/linux-6.12.1/drivers/vdpa/ifcvf/
Difcvf_base.c74 u16 queue_size; in ifcvf_get_vq_size() local
80 queue_size = vp_ioread16(&hw->common_cfg->queue_size); in ifcvf_get_vq_size()
82 return queue_size; in ifcvf_get_vq_size()
87 u16 queue_size, max_size, qid; in ifcvf_get_max_vq_size() local
91 queue_size = ifcvf_get_vq_size(hw, qid); in ifcvf_get_max_vq_size()
93 if (!queue_size) in ifcvf_get_max_vq_size()
96 max_size = max(queue_size, max_size); in ifcvf_get_max_vq_size()
352 vp_iowrite16(num, &cfg->queue_size); in ifcvf_set_vq_num()
/linux-6.12.1/drivers/net/ethernet/microsoft/mana/
Dhw_channel.c264 enum gdma_queue_type type, u64 queue_size, in mana_hwc_create_gdma_wq() argument
274 spec.queue_size = queue_size; in mana_hwc_create_gdma_wq()
280 u64 queue_size, in mana_hwc_create_gdma_cq() argument
289 spec.queue_size = queue_size; in mana_hwc_create_gdma_cq()
298 u64 queue_size, in mana_hwc_create_gdma_eq() argument
306 spec.queue_size = queue_size; in mana_hwc_create_gdma_eq()
496 u32 queue_size; in mana_hwc_create_wq() local
502 queue_size = roundup_pow_of_two(GDMA_MAX_RQE_SIZE * q_depth); in mana_hwc_create_wq()
504 queue_size = roundup_pow_of_two(GDMA_MAX_SQE_SIZE * q_depth); in mana_hwc_create_wq()
506 if (queue_size < MANA_MIN_QSIZE) in mana_hwc_create_wq()
[all …]
Dgdma_main.c224 req.queue_size = queue->queue_size; in mana_gd_create_hw_eq()
336 u32 num_cqe = cq->queue_size / GDMA_CQE_SIZE; in mana_gd_ring_cq()
346 u32 head = eq->head % (eq->queue_size / GDMA_EQE_SIZE); in mana_gd_process_eqe()
410 num_eqe = eq->queue_size / GDMA_EQE_SIZE; in mana_gd_process_eq_events()
590 log2_num_entries = ilog2(queue->queue_size / GDMA_EQE_SIZE); in mana_gd_create_eq()
629 u32 log2_num_entries = ilog2(spec->queue_size / GDMA_CQE_SIZE); in mana_gd_create_cq()
665 err = mana_gd_alloc_memory(gc, spec->queue_size, gmi); in mana_gd_create_hwc_queue()
672 queue->queue_size = spec->queue_size; in mana_gd_create_hwc_queue()
792 err = mana_gd_alloc_memory(gc, spec->queue_size, gmi); in mana_gd_create_mana_eq()
803 queue->queue_size = spec->queue_size; in mana_gd_create_mana_eq()
[all …]
/linux-6.12.1/drivers/firmware/tegra/
Divc.c562 unsigned tegra_ivc_total_queue_size(unsigned queue_size) in tegra_ivc_total_queue_size() argument
564 if (!IS_ALIGNED(queue_size, TEGRA_IVC_ALIGN)) { in tegra_ivc_total_queue_size()
566 __func__, queue_size, TEGRA_IVC_ALIGN); in tegra_ivc_total_queue_size()
570 return queue_size + sizeof(struct tegra_ivc_header); in tegra_ivc_total_queue_size()
652 size_t queue_size; in tegra_ivc_init() local
670 queue_size = tegra_ivc_total_queue_size(num_frames * frame_size); in tegra_ivc_init()
673 ivc->rx.phys = dma_map_single(peer, iosys_map_get_vaddr(rx), queue_size, in tegra_ivc_init()
678 ivc->tx.phys = dma_map_single(peer, iosys_map_get_vaddr(tx), queue_size, in tegra_ivc_init()
681 dma_unmap_single(peer, ivc->rx.phys, queue_size, in tegra_ivc_init()
Dbpmp-tegra186.c112 size_t message_size, queue_size; in tegra186_bpmp_channel_init() local
123 queue_size = tegra_ivc_total_queue_size(message_size); in tegra186_bpmp_channel_init()
124 offset = queue_size * index; in tegra186_bpmp_channel_init()
/linux-6.12.1/sound/firewire/
Damdtp-stream.c668 if (++s->packet_index >= s->queue_size) in queue_packet()
928 unsigned int queue_size) in compute_ohci_it_cycle() argument
931 return increment_ohci_cycle_count(cycle, queue_size); in compute_ohci_it_cycle()
941 unsigned int queue_size = s->queue_size; in generate_tx_packet_descs() local
1010 packet_index = (packet_index + 1) % queue_size; in generate_tx_packet_descs()
1043 unsigned int index = (s->packet_index + i) % s->queue_size; in generate_rx_packet_descs()
1046 desc->cycle = compute_ohci_it_cycle(*ctx_header, s->queue_size); in generate_rx_packet_descs()
1257 cycle = compute_ohci_it_cycle(ctx_header[packets - 1], s->queue_size); in skip_rx_packets()
1283 const unsigned int queue_size = s->queue_size; in process_rx_packets_intermediately() local
1294 unsigned int cycle = compute_ohci_it_cycle(ctx_header[offset], queue_size); in process_rx_packets_intermediately()
[all …]
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/steering/hws/
Dmlx5hws_send.h196 u16 queue_size);
202 u16 queue_size);
210 u16 queue_size);
/linux-6.12.1/drivers/net/wireless/intel/iwlwifi/pcie/
Drx.c122 WARN_ON(rxq->queue_size & (rxq->queue_size - 1)); in iwl_rxq_space()
130 return (rxq->read - rxq->write - 1) & (rxq->queue_size - 1); in iwl_rxq_space()
272 rxq->write = (rxq->write + 1) & (rxq->queue_size - 1); in iwl_pcie_rxmq_restock()
685 free_size * rxq->queue_size, in iwl_pcie_free_rxq_dma()
696 rxq->queue_size, in iwl_pcie_free_rxq_dma()
724 rxq->queue_size = trans->cfg->num_rbds; in iwl_pcie_alloc_rxq_dma()
726 rxq->queue_size = RX_QUEUE_SIZE; in iwl_pcie_alloc_rxq_dma()
734 rxq->bd = dma_alloc_coherent(dev, free_size * rxq->queue_size, in iwl_pcie_alloc_rxq_dma()
742 rxq->queue_size, in iwl_pcie_alloc_rxq_dma()
1088 int i, err, queue_size, allocator_pool_size, num_alloc; in _iwl_pcie_rx_init() local
[all …]
/linux-6.12.1/kernel/bpf/
Dqueue_stack_maps.c68 u64 size, queue_size; in queue_stack_map_alloc() local
71 queue_size = sizeof(*qs) + size * attr->value_size; in queue_stack_map_alloc()
73 qs = bpf_map_area_alloc(queue_size, numa_node); in queue_stack_map_alloc()
/linux-6.12.1/drivers/media/pci/intel/ipu6/
Dipu6-fw-isys.c287 input_queue_cfg[i].queue_size = IPU6_ISYS_SIZE_PROXY_SEND_QUEUE; in ipu6_isys_fwcom_cfg_init()
293 input_queue_cfg[base_dev_send + i].queue_size = max_devq_size; in ipu6_isys_fwcom_cfg_init()
299 input_queue_cfg[base_msg_send + i].queue_size = in ipu6_isys_fwcom_cfg_init()
306 output_queue_cfg[i].queue_size = in ipu6_isys_fwcom_cfg_init()
313 output_queue_cfg[base_msg_recv + i].queue_size = in ipu6_isys_fwcom_cfg_init()
Dipu6-fw-com.c200 sizeinput += size_mul(cfg->input[i].queue_size + 1, in ipu6_fw_com_prepare()
204 sizeoutput += size_mul(cfg->output[i].queue_size + 1, in ipu6_fw_com_prepare()
249 cfg->input[i].queue_size, in ipu6_fw_com_prepare()
258 cfg->output[i].queue_size, in ipu6_fw_com_prepare()
Dipu6-fw-com.h11 unsigned int queue_size; /* tokens per queue */ member
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/
Damdgpu_amdkfd_gc_9_4_3.c328 uint32_t queue_size = in kgd_gfx_v9_4_3_hqd_load() local
331 uint64_t guessed_wptr = m->cp_hqd_pq_rptr & (queue_size - 1); in kgd_gfx_v9_4_3_hqd_load()
333 if ((m->cp_hqd_pq_wptr_lo & (queue_size - 1)) < guessed_wptr) in kgd_gfx_v9_4_3_hqd_load()
334 guessed_wptr += queue_size; in kgd_gfx_v9_4_3_hqd_load()
335 guessed_wptr += m->cp_hqd_pq_wptr_lo & ~(queue_size - 1); in kgd_gfx_v9_4_3_hqd_load()
Damdgpu_amdkfd_gfx_v10_3.c239 uint32_t queue_size = in hqd_load_v10_3() local
242 uint64_t guessed_wptr = m->cp_hqd_pq_rptr & (queue_size - 1); in hqd_load_v10_3()
244 if ((m->cp_hqd_pq_wptr_lo & (queue_size - 1)) < guessed_wptr) in hqd_load_v10_3()
245 guessed_wptr += queue_size; in hqd_load_v10_3()
246 guessed_wptr += m->cp_hqd_pq_wptr_lo & ~(queue_size - 1); in hqd_load_v10_3()
Damdgpu_amdkfd_gfx_v11.c224 uint32_t queue_size = in hqd_load_v11() local
227 uint64_t guessed_wptr = m->cp_hqd_pq_rptr & (queue_size - 1); in hqd_load_v11()
229 if ((m->cp_hqd_pq_wptr_lo & (queue_size - 1)) < guessed_wptr) in hqd_load_v11()
230 guessed_wptr += queue_size; in hqd_load_v11()
231 guessed_wptr += m->cp_hqd_pq_wptr_lo & ~(queue_size - 1); in hqd_load_v11()
/linux-6.12.1/include/linux/
Dvmw_vmci_defs.h866 u64 queue_size) in vmci_q_header_add_producer_tail() argument
868 vmci_qp_add_pointer(&q_header->producer_tail, add, queue_size); in vmci_q_header_add_producer_tail()
878 u64 queue_size) in vmci_q_header_add_consumer_head() argument
880 vmci_qp_add_pointer(&q_header->consumer_head, add, queue_size); in vmci_q_header_add_consumer_head()
/linux-6.12.1/drivers/nvme/host/
Drdma.c87 int queue_size; member
266 init_attr.cap.max_send_wr = factor * queue->queue_size + 1; in nvme_rdma_create_qp()
268 init_attr.cap.max_recv_wr = queue->queue_size + 1; in nvme_rdma_create_qp()
444 nvme_rdma_free_ring(ibdev, queue->rsp_ring, queue->queue_size, in nvme_rdma_destroy_queue_ib()
505 queue->cq_size = cq_factor * queue->queue_size + 1; in nvme_rdma_create_queue_ib()
515 queue->rsp_ring = nvme_rdma_alloc_ring(ibdev, queue->queue_size, in nvme_rdma_create_queue_ib()
529 queue->queue_size, in nvme_rdma_create_queue_ib()
535 queue->queue_size, nvme_rdma_queue_idx(queue)); in nvme_rdma_create_queue_ib()
541 queue->queue_size, IB_MR_TYPE_INTEGRITY, in nvme_rdma_create_queue_ib()
546 queue->queue_size, nvme_rdma_queue_idx(queue)); in nvme_rdma_create_queue_ib()
[all …]
/linux-6.12.1/sound/core/
Dtimer.c79 int queue_size; member
1305 prev = tu->qtail == 0 ? tu->queue_size - 1 : tu->qtail - 1; in snd_timer_user_interrupt()
1312 if (tu->qused >= tu->queue_size) { in snd_timer_user_interrupt()
1316 tu->qtail %= tu->queue_size; in snd_timer_user_interrupt()
1329 if (tu->qused >= tu->queue_size) { in snd_timer_user_append_to_tqueue()
1333 tu->qtail %= tu->queue_size; in snd_timer_user_append_to_tqueue()
1406 prev = tu->qtail == 0 ? tu->queue_size - 1 : tu->qtail - 1; in snd_timer_user_tinterrupt()
1447 tu->queue_size = size; in realloc_user_queue()
1809 if (params.queue_size > 0 && in snd_timer_user_params()
1810 (params.queue_size < 32 || params.queue_size > 1024)) { in snd_timer_user_params()
[all …]
/linux-6.12.1/drivers/misc/genwqe/
Dcard_ddcb.c1026 unsigned int queue_size; in setup_ddcb_queue() local
1032 queue_size = roundup(GENWQE_DDCB_MAX * sizeof(struct ddcb), PAGE_SIZE); in setup_ddcb_queue()
1042 queue->ddcb_vaddr = __genwqe_alloc_consistent(cd, queue_size, in setup_ddcb_queue()
1090 __genwqe_free_consistent(cd, queue_size, queue->ddcb_vaddr, in setup_ddcb_queue()
1105 unsigned int queue_size; in free_ddcb_queue() local
1107 queue_size = roundup(queue->ddcb_max * sizeof(struct ddcb), PAGE_SIZE); in free_ddcb_queue()
1113 __genwqe_free_consistent(cd, queue_size, queue->ddcb_vaddr, in free_ddcb_queue()
/linux-6.12.1/drivers/platform/chrome/wilco_ec/
Devent.c102 static int queue_size = 64; variable
103 module_param(queue_size, int, 0644);
468 dev_data->events = event_queue_new(queue_size); in event_device_add()
/linux-6.12.1/include/soc/tegra/
Divc.h95 unsigned tegra_ivc_total_queue_size(unsigned queue_size);
/linux-6.12.1/include/uapi/rdma/
Dmana-abi.h50 __u32 queue_size[4]; member
/linux-6.12.1/drivers/i2c/busses/
Di2c-qcom-cci.c122 u16 queue_size[NUM_QUEUES]; member
323 if (val == cci->data->queue_size[queue]) in cci_validate_queue()
697 .queue_size = { 64, 16 },
731 .queue_size = { 64, 16 },
765 .queue_size = { 64, 16 },

1234