/linux-6.12.1/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | runl.h | 25 struct nvkm_runl *runl; 44 struct nvkm_runl { struct 46 void (*init)(struct nvkm_runl *); 47 void (*fini)(struct nvkm_runl *); 50 int (*update)(struct nvkm_runl *); 53 void (*commit)(struct nvkm_runl *, struct nvkm_memory *, u32 start, int count); 54 int (*wait)(struct nvkm_runl *); 55 bool (*pending)(struct nvkm_runl *); 56 void (*block)(struct nvkm_runl *, u32 engm); 57 void (*allow)(struct nvkm_runl *, u32 engm); [all …]
|
D | runl.c | 58 nvkm_runl_rc(struct nvkm_runl *runl) in nvkm_runl_rc() 132 nvkm_runl_rc_runl(struct nvkm_runl *runl) in nvkm_runl_rc_runl() 155 nvkm_runl_rc_engn(struct nvkm_runl *runl, struct nvkm_engn *engn) in nvkm_runl_rc_engn() 174 struct nvkm_runl *runl = container_of(work, typeof(*runl), work); in nvkm_runl_work() 183 nvkm_runl_chan_get_inst(struct nvkm_runl *runl, u64 inst, unsigned long *pirqflags) in nvkm_runl_chan_get_inst() 207 nvkm_runl_chan_get_chid(struct nvkm_runl *runl, int id, unsigned long *pirqflags) in nvkm_runl_chan_get_chid() 228 nvkm_runl_cgrp_get_cgid(struct nvkm_runl *runl, int id, unsigned long *pirqflags) in nvkm_runl_cgrp_get_cgid() 249 nvkm_runl_preempt_wait(struct nvkm_runl *runl) in nvkm_runl_preempt_wait() 261 nvkm_runl_update_pending(struct nvkm_runl *runl) in nvkm_runl_update_pending() 271 nvkm_runl_update_locked(struct nvkm_runl *runl, bool wait) in nvkm_runl_update_locked() [all …]
|
D | priv.h | 11 struct nvkm_runl; 90 int nv50_runl_update(struct nvkm_runl *); 91 int nv50_runl_wait(struct nvkm_runl *); 116 bool gf100_runl_preempt_pending(struct nvkm_runl *); 118 bool gf100_runq_intr(struct nvkm_runq *, struct nvkm_runl *); 139 void gk104_runl_commit(struct nvkm_runl *, struct nvkm_memory *, u32, int); 140 bool gk104_runl_pending(struct nvkm_runl *); 141 void gk104_runl_block(struct nvkm_runl *, u32); 142 void gk104_runl_allow(struct nvkm_runl *, u32); 143 void gk104_runl_fault_clear(struct nvkm_runl *); [all …]
|
D | ga100.c | 45 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_stop() 53 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_start() 64 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_unbind() 112 struct nvkm_runl *runl = cgrp->runl; in ga100_cgrp_preempt() 125 struct nvkm_runl *runl = engn->runl; in ga100_engn_cxid() 185 ga100_runq_intr_1(struct nvkm_runq *runq, struct nvkm_runl *runl) in ga100_runq_intr_1() 223 ga100_runq_intr_0(struct nvkm_runq *runq, struct nvkm_runl *runl) in ga100_runq_intr_0() 261 ga100_runq_intr(struct nvkm_runq *runq, struct nvkm_runl *runl) in ga100_runq_intr() 288 ga100_runl_preempt_pending(struct nvkm_runl *runl) in ga100_runl_preempt_pending() 294 ga100_runl_preempt(struct nvkm_runl *runl) in ga100_runl_preempt() [all …]
|
D | chan.c | 44 struct nvkm_runl *runl = cgrp->runl; in nvkm_chan_cctx_bind() 138 struct nvkm_runl *runl = chan->cgrp->runl; in nvkm_chan_preempt_locked() 166 struct nvkm_runl *runl = cgrp->runl; in nvkm_chan_remove_locked() 184 struct nvkm_runl *runl = chan->cgrp->runl; in nvkm_chan_remove() 198 struct nvkm_runl *runl = cgrp->runl; in nvkm_chan_insert() 314 struct nvkm_runl *runl; in nvkm_chan_get_inst() 335 struct nvkm_runl *runl; in nvkm_chan_get_chid() 349 nvkm_chan_new_(const struct nvkm_chan_func *func, struct nvkm_runl *runl, int runq, in nvkm_chan_new_()
|
D | gf100.c | 204 struct nvkm_runl *runl = engn->runl; in gf100_engn_mmu_fault_triggered() 224 struct nvkm_runl *runl = engn->runl; in gf100_engn_mmu_fault_trigger() 310 gf100_runq_intr(struct nvkm_runq *runq, struct nvkm_runl *null) in gf100_runq_intr() 370 gf100_runl_preempt_pending(struct nvkm_runl *runl) in gf100_runl_preempt_pending() 376 gf100_runl_fault_clear(struct nvkm_runl *runl) in gf100_runl_fault_clear() 382 gf100_runl_allow(struct nvkm_runl *runl, u32 engm) in gf100_runl_allow() 388 gf100_runl_block(struct nvkm_runl *runl, u32 engm) in gf100_runl_block() 394 gf100_runl_pending(struct nvkm_runl *runl) in gf100_runl_pending() 400 gf100_runl_commit(struct nvkm_runl *runl, struct nvkm_memory *memory, u32 start, int count) in gf100_runl_commit() 540 struct nvkm_runl *runl; in gf100_fifo_mmu_fault_recover() [all …]
|
D | nv50.c | 218 nv50_runl_pending(struct nvkm_runl *runl) in nv50_runl_pending() 224 nv50_runl_wait(struct nvkm_runl *runl) in nv50_runl_wait() 238 nv50_runl_commit(struct nvkm_runl *runl, struct nvkm_memory *memory, u32 start, int count) in nv50_runl_commit() 254 nv50_runl_alloc(struct nvkm_runl *runl, u32 *offset) in nv50_runl_alloc() 285 nv50_runl_update(struct nvkm_runl *runl) in nv50_runl_update() 341 struct nvkm_runl *runl = nvkm_runl_first(fifo); in nv50_fifo_init()
|
D | base.c | 41 struct nvkm_runl *runl; in nvkm_fifo_ctxsw_in_progress() 128 struct nvkm_runl *runl; in nvkm_fifo_fini() 143 struct nvkm_runl *runl; in nvkm_fifo_init() 172 struct nvkm_runl *runl; in nvkm_fifo_info() 246 struct nvkm_runl *runl; in nvkm_fifo_oneinit() 337 struct nvkm_runl *runl, *runt; in nvkm_fifo_dtor()
|
D | tu102.c | 63 tu102_runl_pending(struct nvkm_runl *runl) in tu102_runl_pending() 71 tu102_runl_commit(struct nvkm_runl *runl, struct nvkm_memory *memory, u32 start, int count) in tu102_runl_commit() 139 struct nvkm_runl *runl = engn->runl; in tu102_fifo_intr_ctxsw_timeout_info() 172 struct nvkm_runl *runl; in tu102_fifo_intr_ctxsw_timeout()
|
D | gk104.c | 74 struct nvkm_runl *runl = chan->cgrp->runl; in gk104_chan_bind() 370 gk104_runq_intr(struct nvkm_runq *runq, struct nvkm_runl *null) in gk104_runq_intr() 404 gk104_runl_fault_clear(struct nvkm_runl *runl) in gk104_runl_fault_clear() 410 gk104_runl_allow(struct nvkm_runl *runl, u32 engm) in gk104_runl_allow() 416 gk104_runl_block(struct nvkm_runl *runl, u32 engm) in gk104_runl_block() 422 gk104_runl_pending(struct nvkm_runl *runl) in gk104_runl_pending() 430 gk104_runl_commit(struct nvkm_runl *runl, struct nvkm_memory *memory, u32 start, int count) in gk104_runl_commit() 643 struct nvkm_runl *runl; in gk104_fifo_intr_runlist() 760 struct nvkm_runl *runl; in gk104_fifo_runl_ctor()
|
D | runq.h | 5 struct nvkm_runl; 10 bool (*intr)(struct nvkm_runq *, struct nvkm_runl *);
|
D | cgrp.h | 34 struct nvkm_runl *runl; 57 int nvkm_cgrp_new(struct nvkm_runl *, const char *name, struct nvkm_vmm *, bool hw,
|
D | chan.h | 7 struct nvkm_runl; 59 int nvkm_chan_new_(const struct nvkm_chan_func *, struct nvkm_runl *, int runq, struct nvkm_cgrp *,
|
D | r535.c | 230 struct nvkm_runl *runl = chan->cgrp->runl; in r535_chan_id_put() 259 struct nvkm_runl *runl = chan->cgrp->runl; in r535_chan_id_get_locked() 437 r535_runl_allow(struct nvkm_runl *runl, u32 engm) in r535_runl_allow() 442 r535_runl_block(struct nvkm_runl *runl, u32 engm) in r535_runl_block() 510 struct nvkm_runl *runl; in r535_fifo_ectx_size() 540 struct nvkm_runl *runl; in r535_fifo_runl_ctor()
|
D | g98.c | 31 struct nvkm_runl *runl; in g98_fifo_runl_ctor()
|
D | cgrp.c | 178 struct nvkm_runl *runl = cgrp->runl; in nvkm_cgrp_del() 222 nvkm_cgrp_new(struct nvkm_runl *runl, const char *name, struct nvkm_vmm *vmm, bool hw, in nvkm_cgrp_new()
|
D | gv100.c | 177 gv100_runl_preempt(struct nvkm_runl *runl) in gv100_runl_preempt() 458 struct nvkm_runl *runl; in gv100_fifo_intr_ctxsw_timeout()
|
D | ucgrp.c | 85 struct nvkm_runl *runl; in nvkm_ucgrp_new()
|
D | uchan.c | 45 struct nvkm_runl *runl = chan->cgrp->runl; in nvkm_uchan_uevent() 334 struct nvkm_runl *runl; in nvkm_uchan_new()
|
D | g84.c | 192 struct nvkm_runl *runl; in g84_fifo_runl_ctor()
|
D | nv04.c | 496 struct nvkm_runl *runl; in nv04_fifo_runl_ctor()
|