Home
last modified time | relevance | path

Searched refs:page_pool (Results 1 – 25 of 97) sorted by relevance

1234

/linux-6.12.1/include/net/page_pool/
Dtypes.h159 struct page_pool { struct
244 struct page *page_pool_alloc_pages(struct page_pool *pool, gfp_t gfp); argument
245 netmem_ref page_pool_alloc_netmem(struct page_pool *pool, gfp_t gfp);
246 struct page *page_pool_alloc_frag(struct page_pool *pool, unsigned int *offset,
248 netmem_ref page_pool_alloc_frag_netmem(struct page_pool *pool,
251 struct page_pool *page_pool_create(const struct page_pool_params *params);
252 struct page_pool *page_pool_create_percpu(const struct page_pool_params *params,
258 void page_pool_disable_direct_recycling(struct page_pool *pool);
259 void page_pool_destroy(struct page_pool *pool);
260 void page_pool_use_xdp_mem(struct page_pool *pool, void (*disconnect)(void *),
[all …]
Dhelpers.h67 bool page_pool_get_stats(const struct page_pool *pool,
92 static inline struct page *page_pool_dev_alloc_pages(struct page_pool *pool) in page_pool_dev_alloc_pages()
110 static inline struct page *page_pool_dev_alloc_frag(struct page_pool *pool, in page_pool_dev_alloc_frag()
119 static inline struct page *page_pool_alloc(struct page_pool *pool, in page_pool_alloc()
161 static inline struct page *page_pool_dev_alloc(struct page_pool *pool, in page_pool_dev_alloc()
170 static inline void *page_pool_alloc_va(struct page_pool *pool, in page_pool_alloc_va()
196 static inline void *page_pool_dev_alloc_va(struct page_pool *pool, in page_pool_dev_alloc_va()
212 page_pool_get_dma_dir(const struct page_pool *pool) in page_pool_get_dma_dir()
311 static inline void page_pool_put_netmem(struct page_pool *pool, in page_pool_put_netmem()
340 static inline void page_pool_put_page(struct page_pool *pool, in page_pool_put_page()
[all …]
/linux-6.12.1/net/core/
Dmp_dmabuf_devmem.h14 int mp_dmabuf_devmem_init(struct page_pool *pool);
16 netmem_ref mp_dmabuf_devmem_alloc_netmems(struct page_pool *pool, gfp_t gfp);
18 void mp_dmabuf_devmem_destroy(struct page_pool *pool);
20 bool mp_dmabuf_devmem_release_page(struct page_pool *pool, netmem_ref netmem);
22 static inline int mp_dmabuf_devmem_init(struct page_pool *pool) in mp_dmabuf_devmem_init()
28 mp_dmabuf_devmem_alloc_netmems(struct page_pool *pool, gfp_t gfp) in mp_dmabuf_devmem_alloc_netmems()
33 static inline void mp_dmabuf_devmem_destroy(struct page_pool *pool) in mp_dmabuf_devmem_destroy()
38 mp_dmabuf_devmem_release_page(struct page_pool *pool, netmem_ref netmem) in mp_dmabuf_devmem_release_page()
Dpage_pool.c82 bool page_pool_get_stats(const struct page_pool *pool, in page_pool_get_stats()
158 static bool page_pool_producer_lock(struct page_pool *pool) in page_pool_producer_lock()
171 static void page_pool_producer_unlock(struct page_pool *pool, in page_pool_producer_unlock()
183 CACHELINE_ASSERT_GROUP_MEMBER(struct page_pool, frag, frag_users); in page_pool_struct_check()
184 CACHELINE_ASSERT_GROUP_MEMBER(struct page_pool, frag, frag_page); in page_pool_struct_check()
185 CACHELINE_ASSERT_GROUP_MEMBER(struct page_pool, frag, frag_offset); in page_pool_struct_check()
186 CACHELINE_ASSERT_GROUP_SIZE(struct page_pool, frag, in page_pool_struct_check()
190 static int page_pool_init(struct page_pool *pool, in page_pool_init()
311 static void page_pool_uninit(struct page_pool *pool) in page_pool_uninit()
329 struct page_pool *
[all …]
Dpage_pool_priv.h10 s32 page_pool_inflight(const struct page_pool *pool, bool strict);
12 int page_pool_list(struct page_pool *pool);
13 void page_pool_detached(struct page_pool *pool);
14 void page_pool_unlist(struct page_pool *pool);
39 void page_pool_set_pp_info(struct page_pool *pool, netmem_ref netmem);
44 static inline void page_pool_set_pp_info(struct page_pool *pool, in page_pool_set_pp_info()
Dpage_pool_user.c34 typedef int (*pp_nl_fill_cb)(struct sk_buff *rsp, const struct page_pool *pool,
40 struct page_pool *pool; in netdev_nl_page_pool_get_do()
86 struct page_pool *pool; in netdev_nl_page_pool_get_dump()
112 page_pool_nl_stats_fill(struct sk_buff *rsp, const struct page_pool *pool, in page_pool_nl_stats_fill()
214 page_pool_nl_fill(struct sk_buff *rsp, const struct page_pool *pool, in page_pool_nl_fill()
258 static void netdev_nl_page_pool_event(const struct page_pool *pool, u32 cmd) in netdev_nl_page_pool_event()
307 int page_pool_list(struct page_pool *pool) in page_pool_list()
335 void page_pool_detached(struct page_pool *pool) in page_pool_detached()
343 void page_pool_unlist(struct page_pool *pool) in page_pool_unlist()
357 struct page_pool *pool; in page_pool_check_memory_provider()
[all …]
Ddevmem.c327 int mp_dmabuf_devmem_init(struct page_pool *pool) in mp_dmabuf_devmem_init()
347 netmem_ref mp_dmabuf_devmem_alloc_netmems(struct page_pool *pool, gfp_t gfp) in mp_dmabuf_devmem_alloc_netmems()
366 void mp_dmabuf_devmem_destroy(struct page_pool *pool) in mp_dmabuf_devmem_destroy()
373 bool mp_dmabuf_devmem_release_page(struct page_pool *pool, netmem_ref netmem) in mp_dmabuf_devmem_release_page()
Dnetmem_priv.h21 static inline void netmem_set_pp(netmem_ref netmem, struct page_pool *pool) in netmem_set_pp()
/linux-6.12.1/include/trace/events/
Dpage_pool.h3 #define TRACE_SYSTEM page_pool
16 TP_PROTO(const struct page_pool *pool,
22 __field(const struct page_pool *, pool)
44 TP_PROTO(const struct page_pool *pool,
50 __field(const struct page_pool *, pool)
70 TP_PROTO(const struct page_pool *pool,
76 __field(const struct page_pool *, pool)
96 TP_PROTO(const struct page_pool *pool, int new_nid),
101 __field(const struct page_pool *, pool)
/linux-6.12.1/Documentation/networking/
Dpage_pool.rst7 .. kernel-doc:: include/net/page_pool/helpers.h
8 :doc: page_pool allocator
58 .. kernel-doc:: net/core/page_pool.c
61 .. kernel-doc:: include/net/page_pool/types.h
64 .. kernel-doc:: include/net/page_pool/helpers.h
71 .. kernel-doc:: net/core/page_pool.c
112 It takes a pointer to a ``struct page_pool`` and a pointer to a struct
119 .. kernel-doc:: include/net/page_pool/types.h
138 /* internal DMA mapping in page_pool */
145 page_pool = page_pool_create(&pp_params);
[all …]
/linux-6.12.1/drivers/net/ethernet/apm/xgene/
Dxgene_enet_main.c672 struct xgene_enet_desc_ring *buf_pool, *page_pool; in xgene_enet_rx_frame() local
690 page_pool = rx_ring->page_pool; in xgene_enet_rx_frame()
719 xgene_enet_free_pagepool(page_pool, raw_desc, exp_desc); in xgene_enet_rx_frame()
729 slots = page_pool->slots - 1; in xgene_enet_rx_frame()
730 head = page_pool->head; in xgene_enet_rx_frame()
741 page = page_pool->frag_page[head]; in xgene_enet_rx_frame()
747 page_pool->frag_page[head] = NULL; in xgene_enet_rx_frame()
751 page_pool->head = head; in xgene_enet_rx_frame()
764 ret = xgene_enet_refill_pagepool(page_pool, NUM_NXTBUFPOOL); in xgene_enet_rx_frame()
1062 struct xgene_enet_desc_ring *buf_pool, *page_pool; in xgene_enet_delete_desc_rings() local
[all …]
/linux-6.12.1/drivers/net/vmxnet3/
Dvmxnet3_xdp.c272 page_pool_recycle_direct(rq->page_pool, page); in vmxnet3_run_xdp()
280 page_pool_recycle_direct(rq->page_pool, page); in vmxnet3_run_xdp()
297 page_pool_recycle_direct(rq->page_pool, page); in vmxnet3_run_xdp()
310 page_pool_recycle_direct(rq->page_pool, page); in vmxnet3_build_skb()
335 page = page_pool_alloc_pages(rq->page_pool, GFP_ATOMIC); in vmxnet3_process_xdp_small()
342 xdp_prepare_buff(&xdp, page_address(page), rq->page_pool->p.offset, in vmxnet3_process_xdp_small()
385 rq->page_pool->p.offset, rbi->len, in vmxnet3_process_xdp()
386 page_pool_get_dma_dir(rq->page_pool)); in vmxnet3_process_xdp()
389 xdp_prepare_buff(&xdp, page_address(page), rq->page_pool->p.offset, in vmxnet3_process_xdp()
407 new_data = vmxnet3_pp_get_buff(rq->page_pool, &new_dma_addr, in vmxnet3_process_xdp()
/linux-6.12.1/drivers/net/ethernet/microchip/lan966x/
Dlan966x_fdma.c16 page = page_pool_dev_alloc_pages(rx->page_pool); in lan966x_fdma_rx_dataptr_cb()
58 page_pool_put_full_page(rx->page_pool, in lan966x_fdma_rx_free_pages()
72 page_pool_recycle_direct(rx->page_pool, page); in lan966x_fdma_rx_free_page()
93 rx->page_pool = page_pool_create(&pp_params); in lan966x_fdma_rx_alloc_page_pool()
104 rx->page_pool); in lan966x_fdma_rx_alloc_page_pool()
107 return PTR_ERR_OR_ZERO(rx->page_pool); in lan966x_fdma_rx_alloc_page_pool()
117 return PTR_ERR(rx->page_pool); in lan966x_fdma_rx_alloc()
376 page_pool_recycle_direct(rx->page_pool, in lan966x_fdma_tx_clear_buf()
469 page_pool_recycle_direct(rx->page_pool, page); in lan966x_fdma_rx_get_frame()
812 struct page_pool *page_pool; in lan966x_fdma_reload() local
[all …]
/linux-6.12.1/include/net/
Dxdp_priv.h13 struct page_pool *page_pool; member
Dnetmem.h26 struct page_pool *pp;
130 static inline struct page_pool *netmem_get_pp(netmem_ref netmem) in netmem_get_pp()
/linux-6.12.1/block/
Dbounce.c32 static mempool_t page_pool; variable
61 ret = mempool_init_page_pool(&page_pool, POOL_SIZE, 0); in init_emergency_pool()
116 mempool_free(bvec->bv_page, &page_pool); in bounce_end_io()
248 bounce_page = mempool_alloc(&page_pool, GFP_NOIO); in __blk_queue_bounce()
/linux-6.12.1/drivers/net/netdevsim/
Dnetdev.c373 rq->page_pool = page_pool_create(&p); in nsim_create_page_pool()
374 if (IS_ERR(rq->page_pool)) { in nsim_create_page_pool()
375 int err = PTR_ERR(rq->page_pool); in nsim_create_page_pool()
377 rq->page_pool = NULL; in nsim_create_page_pool()
407 page_pool_destroy(ns->rq[i].page_pool); in nsim_init_napi()
408 ns->rq[i].page_pool = NULL; in nsim_init_napi()
458 page_pool_destroy(ns->rq[i].page_pool); in nsim_del_napi()
459 ns->rq[i].page_pool = NULL; in nsim_del_napi()
592 ns->page = page_pool_dev_alloc_pages(ns->rq[0].page_pool); in nsim_pp_hold_write()
/linux-6.12.1/drivers/net/ethernet/meta/fbnic/
Dfbnic_txrx.h102 struct page_pool *page_pool; member
/linux-6.12.1/drivers/net/ethernet/mellanox/mlxsw/
Dpci.c88 struct page_pool *page_pool; member
398 struct page_pool *page_pool; in mlxsw_pci_rdq_build_skb() local
409 page_pool = cq->u.cq.page_pool; in mlxsw_pci_rdq_build_skb()
410 page_pool_dma_sync_for_cpu(page_pool, pages[page_index], in mlxsw_pci_rdq_build_skb()
435 page_pool_dma_sync_for_cpu(page_pool, page, 0, frag_size); in mlxsw_pci_rdq_build_skb()
453 page = page_pool_dev_alloc_pages(cq->u.cq.page_pool); in mlxsw_pci_rdq_page_alloc()
468 page_pool_put_page(cq->u.cq.page_pool, elem_info->pages[index], -1, in mlxsw_pci_rdq_page_free()
520 page_pool_recycle_direct(cq->u.cq.page_pool, old_pages[i]); in mlxsw_pci_rdq_pages_alloc()
533 page_pool_recycle_direct(cq->u.cq.page_pool, pages[i]); in mlxsw_pci_rdq_pages_recycle()
994 struct page_pool *page_pool; in mlxsw_pci_cq_page_pool_init() local
[all …]
/linux-6.12.1/drivers/net/ethernet/marvell/octeontx2/nic/
Dotx2_txrx.h126 struct page_pool *page_pool; member
/linux-6.12.1/drivers/net/ethernet/socionext/
Dnetsec.c286 struct page_pool *page_pool; member
742 page = page_pool_dev_alloc_pages(dring->page_pool); in netsec_alloc_rx_data()
863 page_pool_get_dma_dir(rx_ring->page_pool); in netsec_xdp_queue_one()
922 page_pool_put_page(dring->page_pool, page, sync, true); in netsec_run_xdp()
932 page_pool_put_page(dring->page_pool, page, sync, true); in netsec_run_xdp()
944 page_pool_put_page(dring->page_pool, page, sync, true); in netsec_run_xdp()
966 dma_dir = page_pool_get_dma_dir(dring->page_pool); in netsec_process_rx()
1041 page_pool_put_page(dring->page_pool, page, pkt_len, in netsec_process_rx()
1215 page_pool_put_full_page(dring->page_pool, page, false); in netsec_uninit_pkt_dring()
1227 page_pool_destroy(dring->page_pool); in netsec_uninit_pkt_dring()
[all …]
/linux-6.12.1/drivers/net/
Dveth.c70 struct page_pool *page_pool; member
188 if (!priv->rq[i].page_pool) in veth_get_page_pool_stats()
190 page_pool_get_stats(priv->rq[i].page_pool, &pp_stats); in veth_get_page_pool_stats()
733 if (skb_pp_cow_data(rq->page_pool, pskb, XDP_PACKET_HEADROOM)) in veth_convert_skb_to_xdp_buff()
974 rq->page_pool = page_pool_create(&pp_params); in veth_create_page_pool()
975 if (IS_ERR(rq->page_pool)) { in veth_create_page_pool()
976 int err = PTR_ERR(rq->page_pool); in veth_create_page_pool()
978 rq->page_pool = NULL; in veth_create_page_pool()
1019 page_pool_destroy(priv->rq[i].page_pool); in __veth_napi_enable_range()
1020 priv->rq[i].page_pool = NULL; in __veth_napi_enable_range()
[all …]
/linux-6.12.1/drivers/net/ethernet/ti/
Dcpsw_priv.c1106 struct page_pool *pool; in cpsw_fill_rx_channels()
1113 pool = cpsw->page_pool[ch]; in cpsw_fill_rx_channels()
1147 static struct page_pool *cpsw_create_page_pool(struct cpsw_common *cpsw, in cpsw_create_page_pool()
1151 struct page_pool *pool; in cpsw_create_page_pool()
1169 struct page_pool *pool; in cpsw_create_rx_pool()
1177 cpsw->page_pool[ch] = pool; in cpsw_create_rx_pool()
1186 struct page_pool *pool; in cpsw_ndev_create_xdp_rxq()
1189 pool = cpsw->page_pool[ch]; in cpsw_ndev_create_xdp_rxq()
1227 page_pool_destroy(cpsw->page_pool[ch]); in cpsw_destroy_xdp_rxqs()
1228 cpsw->page_pool[ch] = NULL; in cpsw_destroy_xdp_rxqs()
[all …]
Dam65-cpsw-nuss.h103 struct page_pool *page_pool; member
/linux-6.12.1/drivers/net/ethernet/tehuti/
Dtn40.h114 struct page_pool *page_pool; member

1234