Lines Matching refs:gle
84 struct fun_dataop_gl *gle; in fun_write_gl() local
87 req->len8 = (sizeof(*req) + ngle * sizeof(*gle)) / 8; in fun_write_gl()
89 for (i = 0, gle = (struct fun_dataop_gl *)req->dataop.imm; in fun_write_gl()
90 i < ngle && txq_to_end(q, gle); i++, gle++) in fun_write_gl()
91 fun_dataop_gl_init(gle, 0, 0, lens[i], addrs[i]); in fun_write_gl()
93 if (txq_to_end(q, gle) == 0) { in fun_write_gl()
94 gle = (struct fun_dataop_gl *)q->desc; in fun_write_gl()
95 for ( ; i < ngle; i++, gle++) in fun_write_gl()
96 fun_dataop_gl_init(gle, 0, 0, lens[i], addrs[i]); in fun_write_gl()
99 return gle; in fun_write_gl()
158 struct fun_dataop_gl *gle; in write_pkt_desc() local
277 gle = fun_write_gl(q, req, addrs, lens, ngle); in write_pkt_desc()
280 struct fun_eth_tls *tls = (struct fun_eth_tls *)gle; in write_pkt_desc()
397 struct fun_dataop_gl *gle; in fun_unmap_pkt() local
400 gle = (struct fun_dataop_gl *)req->dataop.imm; in fun_unmap_pkt()
401 dma_unmap_single(q->dma_dev, be64_to_cpu(gle->sgl_data), in fun_unmap_pkt()
402 be32_to_cpu(gle->sgl_len), DMA_TO_DEVICE); in fun_unmap_pkt()
404 for (gle++; --ngle && txq_to_end(q, gle); gle++) in fun_unmap_pkt()
405 dma_unmap_page(q->dma_dev, be64_to_cpu(gle->sgl_data), in fun_unmap_pkt()
406 be32_to_cpu(gle->sgl_len), in fun_unmap_pkt()
409 for (gle = (struct fun_dataop_gl *)q->desc; ngle; ngle--, gle++) in fun_unmap_pkt()
410 dma_unmap_page(q->dma_dev, be64_to_cpu(gle->sgl_data), in fun_unmap_pkt()
411 be32_to_cpu(gle->sgl_len), in fun_unmap_pkt()