/linux-6.12.1/drivers/hwtracing/coresight/ |
D | ultrasoc-smb.c | 339 long pg_idx, pg_offset; in smb_sync_perf_buffer() local 341 pg_idx = head >> PAGE_SHIFT; in smb_sync_perf_buffer() 353 memcpy(dst_pages[pg_idx] + pg_offset, in smb_sync_perf_buffer() 359 pg_idx++; in smb_sync_perf_buffer() 360 pg_idx %= buf->nr_pages; in smb_sync_perf_buffer()
|
D | coresight-catu.c | 121 unsigned int table_nr, pg_idx, pg_offset; in catu_get_table() local 134 pg_idx = table_nr / CATU_PAGES_PER_SYSPAGE; in catu_get_table() 137 *daddrp = table_pages->daddrs[pg_idx] + pg_offset; in catu_get_table() 138 ptr = page_address(table_pages->pages[pg_idx]); in catu_get_table()
|
D | coresight-tmc-etr.c | 400 int pg_idx = offset >> PAGE_SHIFT; in tmc_sg_table_get_data() local 413 *bufpp = page_address(data_pages->pages[pg_idx]) + pg_offset; in tmc_sg_table_get_data() 1505 long pg_idx, pg_offset; in tmc_etr_sync_perf_buffer() local 1510 pg_idx = head >> PAGE_SHIFT; in tmc_etr_sync_perf_buffer() 1531 memcpy(dst_pages[pg_idx] + pg_offset, src_buf, bytes); in tmc_etr_sync_perf_buffer() 1539 if (++pg_idx == etr_perf->nr_pages) in tmc_etr_sync_perf_buffer() 1540 pg_idx = 0; in tmc_etr_sync_perf_buffer()
|
/linux-6.12.1/drivers/pci/controller/mobiveil/ |
D | pcie-mobiveil.c | 28 static void mobiveil_pcie_sel_page(struct mobiveil_pcie *pcie, u8 pg_idx) in mobiveil_pcie_sel_page() argument 34 val |= (pg_idx & PAGE_SEL_MASK) << PAGE_SEL_SHIFT; in mobiveil_pcie_sel_page()
|
/linux-6.12.1/drivers/infiniband/hw/bnxt_re/ |
D | qplib_res.h | 384 u32 pg_num, pg_idx; in bnxt_qplib_get_qe() local 387 pg_idx = (indx % hwq->qe_ppg); in bnxt_qplib_get_qe() 390 return (void *)(hwq->pbl_ptr[pg_num] + hwq->element_size * pg_idx); in bnxt_qplib_get_qe()
|
/linux-6.12.1/drivers/net/ethernet/chelsio/libcxgb/ |
D | libcxgb_ppm.h | 113 unsigned char pg_idx:2; member
|
/linux-6.12.1/drivers/scsi/cxgbi/cxgb3i/ |
D | cxgb3i.c | 1154 unsigned int tid, int pg_idx) in ddp_setup_conn_pgidx() argument 1159 u64 val = pg_idx < DDP_PGIDX_MAX ? pg_idx : 0; in ddp_setup_conn_pgidx() 1162 "csk 0x%p, tid %u, pg_idx %d.\n", csk, tid, pg_idx); in ddp_setup_conn_pgidx()
|
/linux-6.12.1/drivers/scsi/cxgbi/cxgb4i/ |
D | cxgb4i.c | 2078 int pg_idx) in ddp_setup_conn_pgidx() argument 2083 if (!pg_idx || pg_idx >= DDP_PGIDX_MAX) in ddp_setup_conn_pgidx() 2097 req->val = cpu_to_be64(pg_idx << 8); in ddp_setup_conn_pgidx() 2101 "csk 0x%p, tid 0x%x, pg_idx %u.\n", csk, csk->tid, pg_idx); in ddp_setup_conn_pgidx()
|
/linux-6.12.1/drivers/infiniband/hw/irdma/ |
D | verbs.c | 2366 u32 pg_idx; in irdma_check_mem_contiguous() local 2368 for (pg_idx = 0; pg_idx < npages; pg_idx++) { in irdma_check_mem_contiguous() 2369 if ((*arr + (pg_size * pg_idx)) != arr[pg_idx]) in irdma_check_mem_contiguous()
|
/linux-6.12.1/drivers/block/ |
D | ublk_drv.c | 762 unsigned pg_idx = 0; in ublk_copy_io_pages() local 769 void *pg_buf = kmap_local_page(data->pages[pg_idx]); in ublk_copy_io_pages() 782 pg_idx += 1; in ublk_copy_io_pages()
|
/linux-6.12.1/drivers/net/ethernet/meta/fbnic/ |
D | fbnic_txrx.c | 619 unsigned int pg_idx = FIELD_GET(FBNIC_RCD_AL_BUFF_PAGE_MASK, rcd); in fbnic_add_rx_frag() local 622 struct page *page = fbnic_page_pool_get(&qt->sub1, pg_idx); in fbnic_add_rx_frag()
|
/linux-6.12.1/drivers/target/iscsi/cxgbit/ |
D | cxgbit_cm.c | 1545 int cxgbit_setup_conn_pgidx(struct cxgbit_sock *csk, u32 pg_idx) in cxgbit_setup_conn_pgidx() argument 1563 req->val = cpu_to_be64(pg_idx << 8); in cxgbit_setup_conn_pgidx()
|
/linux-6.12.1/net/core/ |
D | skbuff.c | 4385 unsigned int pg_idx, pg_off, pg_sz; in skb_seq_read() local 4389 pg_idx = 0; in skb_seq_read() 4394 pg_idx = (pg_off + st->frag_off) >> PAGE_SHIFT; in skb_seq_read() 4403 st->frag_data = kmap_atomic(skb_frag_page(frag) + pg_idx); in skb_seq_read()
|