Home
last modified time | relevance | path

Searched refs:cb_list (Results 1 – 25 of 26) sorted by relevance

12

/linux-6.12.1/drivers/dma-buf/
Ddma-fence.c371 struct list_head cb_list; in dma_fence_signal_timestamp_locked() local
380 list_replace(&fence->cb_list, &cb_list); in dma_fence_signal_timestamp_locked()
386 list_for_each_entry_safe(cur, tmp, &cb_list, node) { in dma_fence_signal_timestamp_locked()
538 if (WARN(!list_empty(&fence->cb_list) && in dma_fence_release()
661 list_add_tail(&cb->node, &fence->cb_list); in dma_fence_add_callback()
784 list_add(&cb.base.node, &fence->cb_list); in dma_fence_default_wait()
1016 INIT_LIST_HEAD(&fence->cb_list); in dma_fence_init()
/linux-6.12.1/net/netfilter/
Dnf_tables_offload.c199 struct list_head *cb_list) in nft_setup_cb_call() argument
204 list_for_each_entry(block_cb, cb_list, list) { in nft_setup_cb_call()
281 &basechain->flow_block.cb_list); in nft_flow_offload_cmd()
317 list_splice(&bo->cb_list, &basechain->flow_block.cb_list); in nft_flow_offload_bind()
335 nft_setup_cb_call(TC_SETUP_CLSFLOWER, &cls_flow, &bo->cb_list); in nft_flow_offload_unbind()
338 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in nft_flow_offload_unbind()
379 bo->cb_list_head = &basechain->flow_block.cb_list; in nft_flow_block_offload_init()
380 INIT_LIST_HEAD(&bo->cb_list); in nft_flow_block_offload_init()
414 list_move(&block_cb->list, &bo.cb_list); in nft_indr_block_cleanup()
434 if (list_empty(&bo.cb_list)) in nft_indr_block_offload_cmd()
Dnf_flow_table_offload.c879 &offload->flowtable->flow_block.cb_list); in flow_offload_tuple_add()
888 &offload->flowtable->flow_block.cb_list); in flow_offload_tuple_del()
942 &offload->flowtable->flow_block.cb_list); in flow_offload_tuple_stats()
1108 list_splice(&bo->cb_list, &flowtable->flow_block.cb_list); in nf_flow_table_block_setup()
1111 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in nf_flow_table_block_setup()
1137 bo->cb_list_head = &flowtable->flow_block.cb_list; in nf_flow_table_block_offload_init()
1138 INIT_LIST_HEAD(&bo->cb_list); in nf_flow_table_block_offload_init()
/linux-6.12.1/drivers/dma/
Dbcm2835-dma.c95 struct bcm2835_cb_entry cb_list[]; member
209 dma_pool_free(desc->c->cb_pool, desc->cb_list[i].cb, in bcm2835_dma_free_cb_chain()
210 desc->cb_list[i].paddr); in bcm2835_dma_free_cb_chain()
317 d = kzalloc(struct_size(d, cb_list, frames), gfp); in bcm2835_dma_create_cb_chain()
330 cb_entry = &d->cb_list[frame]; in bcm2835_dma_create_cb_chain()
357 d->cb_list[frame - 1].cb->next = cb_entry->paddr; in bcm2835_dma_create_cb_chain()
370 d->cb_list[d->frames - 1].cb->info |= finalextrainfo; in bcm2835_dma_create_cb_chain()
452 writel(d->cb_list[0].paddr, c->chan_base + BCM2835_DMA_ADDR); in bcm2835_dma_start_desc()
544 struct bcm2835_dma_cb *control_block = d->cb_list[i].cb; in bcm2835_dma_desc_size_pos()
686 bcm2835_dma_fill_cb_chain_with_sg(chan, direction, d->cb_list, in bcm2835_dma_prep_slave_sg()
[all …]
/linux-6.12.1/net/core/
Dflow_offload.c294 list_for_each_entry(block_cb, &block->cb_list, list) { in flow_block_cb_lookup()
414 struct list_head *cb_list; member
426 INIT_LIST_HEAD(&bo.cb_list); in existing_qdiscs_register()
428 list_splice(&bo.cb_list, cur->cb_list); in existing_qdiscs_register()
585 info->cb_list = bo->cb_list_head; in indir_dev_add()
630 return (bo && list_empty(&bo->cb_list)) ? -EOPNOTSUPP : count; in flow_indr_dev_setup_offload()
/linux-6.12.1/include/net/
Dflow_offload.h583 struct list_head cb_list; member
595 struct list_head cb_list; member
652 list_add_tail(&block_cb->list, &offload->cb_list); in flow_block_cb_add()
658 list_move(&block_cb->list, &offload->cb_list); in flow_block_cb_remove()
665 list_move(&block_cb->list, &offload->cb_list); in flow_indr_block_cb_remove()
727 INIT_LIST_HEAD(&flow_block->cb_list); in flow_block_init()
/linux-6.12.1/drivers/gpu/drm/i915/gt/
Dintel_breadcrumbs.c252 struct list_head cb_list; in signal_irq_work() local
258 list_replace(&rq->fence.cb_list, &cb_list); in signal_irq_work()
260 __dma_fence_signal__notify(&rq->fence, &cb_list); in signal_irq_work()
Dselftest_hangcheck.c1544 if (wait_for(!list_empty(&rq->fence.cb_list), 10)) { in __igt_reset_evict_vma()
/linux-6.12.1/block/
Dblk-core.c1115 INIT_LIST_HEAD(&plug->cb_list); in blk_start_plug_nr_ios()
1157 while (!list_empty(&plug->cb_list)) { in flush_plug_callbacks()
1158 list_splice_init(&plug->cb_list, &callbacks); in flush_plug_callbacks()
1179 list_for_each_entry(cb, &plug->cb_list, list) in blk_check_plugged()
1189 list_add(&cb->list, &plug->cb_list); in blk_check_plugged()
1197 if (!list_empty(&plug->cb_list)) in __blk_flush_plug()
/linux-6.12.1/arch/s390/kernel/
Dvtime.c288 LIST_HEAD(cb_list); in virt_timer_expire()
296 list_move_tail(&timer->entry, &cb_list); in virt_timer_expire()
309 list_for_each_entry_safe(timer, tmp, &cb_list, entry) { in virt_timer_expire()
/linux-6.12.1/include/drm/
Ddrm_syncobj.h55 struct list_head cb_list; member
/linux-6.12.1/drivers/infiniband/hw/mlx4/
Dalias_GUID.c499 &dev->sriov.alias_guid.ports_guid[port - 1].cb_list; in set_guid_rec()
809 while (!list_empty(&det->cb_list)) { in mlx4_ib_destroy_alias_guid_service()
810 cb_ctx = list_entry(det->cb_list.next, in mlx4_ib_destroy_alias_guid_service()
866 INIT_LIST_HEAD(&dev->sriov.alias_guid.ports_guid[i].cb_list); in mlx4_ib_init_alias_guid_service()
Dmlx4_ib.h436 struct list_head cb_list; member
/linux-6.12.1/drivers/gpu/drm/
Ddrm_syncobj.c281 list_add_tail(&wait->node, &syncobj->cb_list); in drm_syncobj_fence_add_wait()
352 list_for_each_entry_safe(wait_cur, wait_tmp, &syncobj->cb_list, node) in drm_syncobj_add_point()
388 list_for_each_entry_safe(wait_cur, wait_tmp, &syncobj->cb_list, node) in drm_syncobj_replace_fence()
564 INIT_LIST_HEAD(&syncobj->cb_list); in drm_syncobj_create()
/linux-6.12.1/include/linux/
Ddma-fence.h86 struct list_head cb_list; member
Dblkdev.h1014 struct list_head cb_list; /* md requires an unplug callback */ member
/linux-6.12.1/fs/xfs/
Dxfs_log.c476 LIST_HEAD(cb_list); in xlog_state_shutdown_callbacks()
484 list_splice_init(&iclog->ic_callbacks, &cb_list); in xlog_state_shutdown_callbacks()
487 xlog_cil_process_committed(&cb_list); in xlog_state_shutdown_callbacks()
2513 LIST_HEAD(cb_list); in xlog_state_do_iclog_callbacks()
2521 list_splice_init(&iclog->ic_callbacks, &cb_list); in xlog_state_do_iclog_callbacks()
2525 xlog_cil_process_committed(&cb_list); in xlog_state_do_iclog_callbacks()
/linux-6.12.1/net/sched/
Dcls_api.c797 bo->cb_list_head = &flow_block->cb_list; in tcf_block_offload_init()
798 INIT_LIST_HEAD(&bo->cb_list); in tcf_block_offload_init()
819 list_move(&block_cb->list, &bo.cb_list); in tc_block_indr_cleanup()
1641 list_for_each_entry(block_cb, &bo->cb_list, list) { in tcf_block_bind()
1653 list_splice(&bo->cb_list, &block->flow_block.cb_list); in tcf_block_bind()
1658 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in tcf_block_bind()
1682 list_for_each_entry_safe(block_cb, next, &bo->cb_list, list) { in tcf_block_unbind()
3578 list_for_each_entry(block_cb, &block->flow_block.cb_list, list) { in __tc_setup_cb_call()
Dact_ct.c384 WARN_ON(!list_empty(&block->cb_list)); in tcf_ct_flow_table_cleanup_work()
/linux-6.12.1/include/net/netfilter/
Dnf_flow_table.h245 list_add_tail(&block_cb->list, &block->cb_list); in nf_flow_table_offload_add_cb()
/linux-6.12.1/drivers/gpu/drm/i915/
Di915_active.c1009 list_add_tail((struct list_head *)node, &rq->fence.cb_list); in i915_request_add_active_barriers()
1112 list_add_tail(&active->cb.node, &fence->cb_list); in __i915_active_fence_set()
/linux-6.12.1/drivers/accel/habanalabs/common/
Ddebugfs.c113 list_for_each_entry(cb, &dev_entry->cb_list, debugfs_list) { in command_buffers_show()
1752 INIT_LIST_HEAD(&dev_entry->cb_list); in hl_debugfs_device_init()
1820 list_add(&cb->debugfs_list, &dev_entry->cb_list); in hl_debugfs_add_cb()
Dhabanalabs.h2413 struct list_head cb_list; member
/linux-6.12.1/drivers/gpu/drm/vmwgfx/
Dvmwgfx_fence.c204 list_add(&cb.base.node, &f->cb_list); in vmw_fence_wait()
/linux-6.12.1/drivers/net/wireless/intel/ipw2x00/
Dipw2200.h752 struct command_block cb_list[CB_NUMBER_OF_ELEMENTS_SMALL]; member

12