Home
last modified time | relevance | path

Searched refs:xdp_buff (Results 1 – 25 of 101) sorted by relevance

12345

/linux-6.12.1/include/net/
Dxdp_sock_drv.h85 static inline dma_addr_t xsk_buff_xdp_get_dma(struct xdp_buff *xdp) in xsk_buff_xdp_get_dma()
92 static inline dma_addr_t xsk_buff_xdp_get_frame_dma(struct xdp_buff *xdp) in xsk_buff_xdp_get_frame_dma()
99 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool) in xsk_buff_alloc()
110 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max) in xsk_buff_alloc_batch()
120 static inline void xsk_buff_free(struct xdp_buff *xdp) in xsk_buff_free()
139 static inline void xsk_buff_add_frag(struct xdp_buff *xdp) in xsk_buff_add_frag()
146 static inline struct xdp_buff *xsk_buff_get_frag(struct xdp_buff *first) in xsk_buff_get_frag()
149 struct xdp_buff *ret = NULL; in xsk_buff_get_frag()
162 static inline void xsk_buff_del_tail(struct xdp_buff *tail) in xsk_buff_del_tail()
169 static inline struct xdp_buff *xsk_buff_get_tail(struct xdp_buff *first) in xsk_buff_get_tail()
[all …]
Dxdp.h80 struct xdp_buff { struct
91 static __always_inline bool xdp_buff_has_frags(struct xdp_buff *xdp) in xdp_buff_has_frags() argument
96 static __always_inline void xdp_buff_set_frags_flag(struct xdp_buff *xdp) in xdp_buff_set_frags_flag()
101 static __always_inline void xdp_buff_clear_frags_flag(struct xdp_buff *xdp) in xdp_buff_clear_frags_flag()
106 static __always_inline bool xdp_buff_is_frag_pfmemalloc(struct xdp_buff *xdp) in xdp_buff_is_frag_pfmemalloc()
111 static __always_inline void xdp_buff_set_frag_pfmemalloc(struct xdp_buff *xdp) in xdp_buff_set_frag_pfmemalloc()
117 xdp_init_buff(struct xdp_buff *xdp, u32 frame_sz, struct xdp_rxq_info *rxq) in xdp_init_buff()
125 xdp_prepare_buff(struct xdp_buff *xdp, unsigned char *hard_start, in xdp_prepare_buff()
147 xdp_get_shared_info_from_buff(struct xdp_buff *xdp) in xdp_get_shared_info_from_buff()
152 static __always_inline unsigned int xdp_get_buff_len(struct xdp_buff *xdp) in xdp_get_buff_len()
[all …]
Dxdp_sock.h21 struct xdp_buff;
122 int xsk_generic_rcv(struct xdp_sock *xs, struct xdp_buff *xdp);
123 int __xsk_map_redirect(struct xdp_sock *xs, struct xdp_buff *xdp);
199 static inline int xsk_generic_rcv(struct xdp_sock *xs, struct xdp_buff *xdp) in xsk_generic_rcv()
204 static inline int __xsk_map_redirect(struct xdp_sock *xs, struct xdp_buff *xdp) in __xsk_map_redirect()
Dxsk_buff_pool.h26 struct xdp_buff xdp;
141 struct xdp_buff *xp_alloc(struct xsk_buff_pool *pool);
142 u32 xp_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max);
/linux-6.12.1/drivers/net/ethernet/broadcom/bnxt/
Dbnxt_xdp.h18 struct xdp_buff *xdp);
21 struct xdp_buff *xdp, struct page *page, u8 **data_ptr,
31 struct xdp_buff *xdp);
33 struct xdp_buff *xdp);
36 struct xdp_buff *xdp,
Dbnxt_xdp.c28 struct xdp_buff *xdp) in bnxt_xmit_bd()
97 struct xdp_buff *xdp) in __bnxt_xmit_xdp()
184 struct xdp_buff *xdp) in bnxt_xdp_buff_init()
204 struct xdp_buff *xdp) in bnxt_xdp_buff_frags_free()
225 struct xdp_buff *xdp, struct page *page, u8 **data_ptr, in bnxt_rx_xdp()
466 struct page_pool *pool, struct xdp_buff *xdp, in bnxt_xdp_build_skb()
/linux-6.12.1/drivers/net/ethernet/freescale/dpaa2/
Ddpaa2-xsk.c39 struct xdp_buff *xdp_buff; in dpaa2_xsk_run_xdp() local
50 xdp_buff = swa->xsk.xdp_buff; in dpaa2_xsk_run_xdp()
52 xdp_buff->data_hard_start = vaddr; in dpaa2_xsk_run_xdp()
53 xdp_buff->data = vaddr + dpaa2_fd_get_offset(fd); in dpaa2_xsk_run_xdp()
54 xdp_buff->data_end = xdp_buff->data + dpaa2_fd_get_len(fd); in dpaa2_xsk_run_xdp()
55 xdp_set_data_meta_invalid(xdp_buff); in dpaa2_xsk_run_xdp()
56 xdp_buff->rxq = &ch->xdp_rxq; in dpaa2_xsk_run_xdp()
58 xsk_buff_dma_sync_for_cpu(xdp_buff); in dpaa2_xsk_run_xdp()
59 xdp_act = bpf_prog_run_xdp(xdp_prog, xdp_buff); in dpaa2_xsk_run_xdp()
62 dpaa2_fd_set_offset(fd, xdp_buff->data - vaddr); in dpaa2_xsk_run_xdp()
[all …]
/linux-6.12.1/drivers/net/ethernet/intel/ice/
Dice_txrx_lib.h18 ice_set_rx_bufs_act(struct xdp_buff *xdp, const struct ice_rx_ring *rx_ring, in ice_set_rx_bufs_act()
157 int ice_xmit_xdp_buff(struct xdp_buff *xdp, struct ice_tx_ring *xdp_ring);
158 int __ice_xmit_xdp_ring(struct xdp_buff *xdp, struct ice_tx_ring *xdp_ring,
169 ice_xdp_meta_set_desc(struct xdp_buff *xdp, in ice_xdp_meta_set_desc()
173 xdp_buff); in ice_xdp_meta_set_desc()
Dice_txrx.h174 struct xdp_buff *xdp; /* used for XDP_TX ZC */
266 struct xdp_buff xdp_buff; member
272 static_assert(offsetof(struct ice_xdp_buff, xdp_buff) == 0);
329 struct xdp_buff **xdp_buf;
334 struct xdp_buff xdp;
Dice_xsk.c15 static struct xdp_buff **ice_xdp_buf(struct ice_rx_ring *rx_ring, u32 idx) in ice_xdp_buf()
437 static u16 ice_fill_rx_descs(struct xsk_buff_pool *pool, struct xdp_buff **xdp, in ice_fill_rx_descs()
481 struct xdp_buff **xdp; in __ice_alloc_rx_bufs_zc()
549 ice_construct_skb_zc(struct ice_rx_ring *rx_ring, struct xdp_buff *xdp) in ice_construct_skb_zc()
677 static int ice_xmit_xdp_tx_zc(struct xdp_buff *xdp, in ice_xmit_xdp_tx_zc()
686 struct xdp_buff *head; in ice_xmit_xdp_tx_zc()
760 ice_run_xdp_zc(struct ice_rx_ring *rx_ring, struct xdp_buff *xdp, in ice_run_xdp_zc()
805 ice_add_xsk_frag(struct ice_rx_ring *rx_ring, struct xdp_buff *first, in ice_add_xsk_frag()
806 struct xdp_buff *xdp, const unsigned int size) in ice_add_xsk_frag()
848 struct xdp_buff *first = NULL; in ice_clean_rx_irq_zc()
[all …]
/linux-6.12.1/drivers/net/ethernet/intel/i40e/
Di40e_xsk.c15 static struct xdp_buff **i40e_rx_bi(struct i40e_ring *rx_ring, u32 idx) in i40e_rx_bi()
197 static int i40e_run_xdp_zc(struct i40e_ring *rx_ring, struct xdp_buff *xdp, in i40e_run_xdp_zc()
244 struct xdp_buff **xdp; in i40e_alloc_rx_buffers_zc()
289 struct xdp_buff *xdp) in i40e_construct_skb_zc()
344 struct xdp_buff *xdp_buff, in i40e_handle_xdp_result_zc() argument
354 *rx_bytes = xdp_get_buff_len(xdp_buff); in i40e_handle_xdp_result_zc()
365 xsk_buff_free(xdp_buff); in i40e_handle_xdp_result_zc()
374 skb = i40e_construct_skb_zc(rx_ring, xdp_buff); in i40e_handle_xdp_result_zc()
399 i40e_add_xsk_frag(struct i40e_ring *rx_ring, struct xdp_buff *first, in i40e_add_xsk_frag()
400 struct xdp_buff *xdp, const unsigned int size) in i40e_add_xsk_frag()
[all …]
Di40e_trace.h165 struct xdp_buff *xdp),
193 struct xdp_buff *xdp),
201 struct xdp_buff *xdp),
/linux-6.12.1/tools/testing/selftests/bpf/progs/
Dtest_xdp_bpf2bpf.c23 struct xdp_buff { struct
45 int BPF_PROG(trace_on_entry, struct xdp_buff *xdp) in BPF_PROG() argument
62 int BPF_PROG(trace_on_exit, struct xdp_buff *xdp, int ret) in BPF_PROG()
/linux-6.12.1/net/xfrm/
Dxfrm_state_bpf.c67 struct xdp_buff *xdp = (struct xdp_buff *)ctx; in bpf_xdp_get_xfrm_state()
/linux-6.12.1/Documentation/networking/
Dxdp-rx-metadata.rst42 Within an XDP frame, the metadata layout (accessed via ``xdp_buff``) is
50 xdp_buff->data_meta xdp_buff->data
83 into the kernel. The kernel creates the ``skb`` out of the ``xdp_buff``
85 the descriptors and populate ``skb`` metadata when doing this ``xdp_buff->skb``
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
Drx.c11 static struct mlx5e_xdp_buff *xsk_buff_to_mxbuf(struct xdp_buff *xdp) in xsk_buff_to_mxbuf()
25 struct xdp_buff **xsk_buffs; in mlx5e_xsk_alloc_rx_mpwqe()
34 xsk_buffs = (struct xdp_buff **)wi->alloc_units.xsk_buffs; in mlx5e_xsk_alloc_rx_mpwqe()
163 struct xdp_buff **buffs; in mlx5e_xsk_alloc_rx_wqes_batched()
225 static struct sk_buff *mlx5e_xsk_construct_skb(struct mlx5e_rq *rq, struct xdp_buff *xdp) in mlx5e_xsk_construct_skb()
/linux-6.12.1/net/netfilter/
Dnf_flow_table_bpf.c62 struct xdp_buff *xdp = (struct xdp_buff *)ctx; in bpf_xdp_flow_lookup()
Dnf_conntrack_bpf.c296 struct xdp_buff *ctx = (struct xdp_buff *)xdp_ctx; in bpf_xdp_ct_alloc()
330 struct xdp_buff *ctx = (struct xdp_buff *)xdp_ctx; in bpf_xdp_ct_lookup()
/linux-6.12.1/include/linux/
Dfilter.h37 struct xdp_buff;
974 u32 xdp_master_redirect(struct xdp_buff *xdp);
1181 struct xdp_buff *xdp, struct bpf_prog *prog);
1183 struct xdp_buff *xdp,
1186 struct xdp_buff *xdp,
1747 int __bpf_xdp_load_bytes(struct xdp_buff *xdp, u32 offset, void *buf, u32 len);
1748 int __bpf_xdp_store_bytes(struct xdp_buff *xdp, u32 offset, void *buf, u32 len);
1749 void *bpf_xdp_pointer(struct xdp_buff *xdp, u32 offset, u32 len);
1750 void bpf_xdp_copy_buf(struct xdp_buff *xdp, unsigned long off,
1765 static inline int __bpf_xdp_load_bytes(struct xdp_buff *xdp, u32 offset, in __bpf_xdp_load_bytes()
[all …]
/linux-6.12.1/drivers/net/ethernet/netronome/nfp/
Dnfp_net_xsk.c18 struct xdp_buff *xdp) in nfp_net_xsk_rx_bufs_stash()
62 struct xdp_buff *xdp; in nfp_net_xsk_rx_ring_fill_freelist()
/linux-6.12.1/net/xdp/
Dxsk_buff_pool.c542 struct xdp_buff *xp_alloc(struct xsk_buff_pool *pool) in xp_alloc()
568 static u32 xp_alloc_new_from_fq(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max) in xp_alloc_new_from_fq()
611 static u32 xp_alloc_reused(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 nb_entries) in xp_alloc_reused()
631 static u32 xp_alloc_slow(struct xsk_buff_pool *pool, struct xdp_buff **xdp, in xp_alloc_slow()
637 struct xdp_buff *buff; in xp_alloc_slow()
649 u32 xp_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max) in xp_alloc_batch()
/linux-6.12.1/drivers/net/vmxnet3/
Dvmxnet3_xdp.c251 vmxnet3_run_xdp(struct vmxnet3_rx_queue *rq, struct xdp_buff *xdp, in vmxnet3_run_xdp()
304 const struct xdp_buff *xdp) in vmxnet3_build_skb()
331 struct xdp_buff xdp; in vmxnet3_process_xdp_small()
377 struct xdp_buff xdp; in vmxnet3_process_xdp()
/linux-6.12.1/drivers/net/ethernet/engleder/
Dtsnep.h109 struct xdp_buff *xdp;
132 struct xdp_buff **xdp_batch;
/linux-6.12.1/drivers/net/ethernet/freescale/enetc/
Denetc.c1421 struct xdp_buff *xdp_buff, u16 size) in enetc_map_rx_buff_to_xdp() argument
1429 xdp_prepare_buff(xdp_buff, hard_start - rx_ring->buffer_offset, in enetc_map_rx_buff_to_xdp()
1434 u16 size, struct xdp_buff *xdp_buff) in enetc_add_rx_buff_to_xdp() argument
1436 struct skb_shared_info *shinfo = xdp_get_shared_info_from_buff(xdp_buff); in enetc_add_rx_buff_to_xdp()
1443 if (!xdp_buff_has_frags(xdp_buff)) { in enetc_add_rx_buff_to_xdp()
1444 xdp_buff_set_frags_flag(xdp_buff); in enetc_add_rx_buff_to_xdp()
1452 xdp_buff_set_frag_pfmemalloc(xdp_buff); in enetc_add_rx_buff_to_xdp()
1463 int *cleaned_cnt, struct xdp_buff *xdp_buff) in enetc_build_xdp_buff() argument
1467 xdp_init_buff(xdp_buff, ENETC_RXB_TRUESIZE, &rx_ring->xdp.rxq); in enetc_build_xdp_buff()
1469 enetc_map_rx_buff_to_xdp(rx_ring, *i, xdp_buff, size); in enetc_build_xdp_buff()
[all …]
/linux-6.12.1/net/bpf/
Dtest_run.c102 struct xdp_buff orig_ctx;
103 struct xdp_buff ctx;
112 struct xdp_buff *orig_ctx;
134 struct xdp_buff *new_ctx, *orig_ctx; in xdp_test_run_init_page()
161 static int xdp_test_run_setup(struct xdp_test_data *xdp, struct xdp_buff *orig_ctx) in xdp_test_run_setup()
295 struct xdp_buff *ctx; in xdp_test_run_batch()
372 static int bpf_test_run_xdp_live(struct bpf_prog *prog, struct xdp_buff *ctx, in bpf_test_run_xdp_live()
1143 static int xdp_convert_md_to_buff(struct xdp_md *xdp_md, struct xdp_buff *xdp) in xdp_convert_md_to_buff()
1189 static void xdp_convert_buff_to_md(struct xdp_buff *xdp, struct xdp_md *xdp_md) in xdp_convert_buff_to_md()
1213 struct xdp_buff xdp = {}; in bpf_prog_test_run_xdp()

12345