Lines Matching +full:device +full:- +full:addr
30 struct nvkm_xtensa *xtensa = nvkm_xtensa(oclass->engine); in nvkm_xtensa_oclass_get()
33 while (xtensa->func->sclass[c].oclass) { in nvkm_xtensa_oclass_get()
35 oclass->base = xtensa->func->sclass[index]; in nvkm_xtensa_oclass_get()
47 return nvkm_gpuobj_new(object->engine->subdev.device, 0x10000, align, in nvkm_xtensa_cclass_bind()
60 struct nvkm_subdev *subdev = &xtensa->engine.subdev; in nvkm_xtensa_intr()
61 struct nvkm_device *device = subdev->device; in nvkm_xtensa_intr() local
62 const u32 base = xtensa->addr; in nvkm_xtensa_intr()
63 u32 unk104 = nvkm_rd32(device, base + 0xd04); in nvkm_xtensa_intr()
64 u32 intr = nvkm_rd32(device, base + 0xc20); in nvkm_xtensa_intr()
65 u32 chan = nvkm_rd32(device, base + 0xc28); in nvkm_xtensa_intr()
66 u32 unk10c = nvkm_rd32(device, base + 0xd0c); in nvkm_xtensa_intr()
70 nvkm_wr32(device, base + 0xc20, intr); in nvkm_xtensa_intr()
71 intr = nvkm_rd32(device, base + 0xc20); in nvkm_xtensa_intr()
74 nvkm_mask(device, xtensa->addr + 0xd94, 0, xtensa->func->fifo_val); in nvkm_xtensa_intr()
82 struct nvkm_device *device = xtensa->engine.subdev.device; in nvkm_xtensa_fini() local
83 const u32 base = xtensa->addr; in nvkm_xtensa_fini()
85 nvkm_wr32(device, base + 0xd84, 0); /* INTR_EN */ in nvkm_xtensa_fini()
86 nvkm_wr32(device, base + 0xd94, 0); /* FIFO_CTRL */ in nvkm_xtensa_fini()
89 nvkm_memory_unref(&xtensa->gpu_fw); in nvkm_xtensa_fini()
97 struct nvkm_subdev *subdev = &xtensa->engine.subdev; in nvkm_xtensa_init()
98 struct nvkm_device *device = subdev->device; in nvkm_xtensa_init() local
99 const u32 base = xtensa->addr; in nvkm_xtensa_init()
103 u64 addr, size; in nvkm_xtensa_init() local
106 if (!xtensa->gpu_fw) { in nvkm_xtensa_init()
108 xtensa->addr >> 12); in nvkm_xtensa_init()
110 ret = request_firmware(&fw, name, device->dev); in nvkm_xtensa_init()
116 if (fw->size > 0x40000) { in nvkm_xtensa_init()
119 return -EINVAL; in nvkm_xtensa_init()
122 ret = nvkm_memory_new(device, NVKM_MEM_TARGET_INST, in nvkm_xtensa_init()
124 &xtensa->gpu_fw); in nvkm_xtensa_init()
130 nvkm_kmap(xtensa->gpu_fw); in nvkm_xtensa_init()
131 for (i = 0; i < fw->size / 4; i++) in nvkm_xtensa_init()
132 nvkm_wo32(xtensa->gpu_fw, i * 4, *((u32 *)fw->data + i)); in nvkm_xtensa_init()
133 nvkm_done(xtensa->gpu_fw); in nvkm_xtensa_init()
137 addr = nvkm_memory_addr(xtensa->gpu_fw); in nvkm_xtensa_init()
138 size = nvkm_memory_size(xtensa->gpu_fw); in nvkm_xtensa_init()
140 nvkm_wr32(device, base + 0xd10, 0x1fffffff); /* ?? */ in nvkm_xtensa_init()
141 nvkm_wr32(device, base + 0xd08, 0x0fffffff); /* ?? */ in nvkm_xtensa_init()
143 nvkm_wr32(device, base + 0xd28, xtensa->func->unkd28); /* ?? */ in nvkm_xtensa_init()
144 nvkm_wr32(device, base + 0xc20, 0x3f); /* INTR */ in nvkm_xtensa_init()
145 nvkm_wr32(device, base + 0xd84, 0x3f); /* INTR_EN */ in nvkm_xtensa_init()
147 nvkm_wr32(device, base + 0xcc0, addr >> 8); /* XT_REGION_BASE */ in nvkm_xtensa_init()
148 nvkm_wr32(device, base + 0xcc4, 0x1c); /* XT_REGION_SETUP */ in nvkm_xtensa_init()
149 nvkm_wr32(device, base + 0xcc8, size >> 8); /* XT_REGION_LIMIT */ in nvkm_xtensa_init()
151 tmp = nvkm_rd32(device, 0x0); in nvkm_xtensa_init()
152 nvkm_wr32(device, base + 0xde0, tmp); /* SCRATCH_H2X */ in nvkm_xtensa_init()
154 nvkm_wr32(device, base + 0xce8, 0xf); /* XT_REGION_SETUP */ in nvkm_xtensa_init()
156 nvkm_wr32(device, base + 0xc20, 0x3f); /* INTR */ in nvkm_xtensa_init()
157 nvkm_wr32(device, base + 0xd84, 0x3f); /* INTR_EN */ in nvkm_xtensa_init()
178 nvkm_xtensa_new_(const struct nvkm_xtensa_func *func, struct nvkm_device *device, in nvkm_xtensa_new_() argument
179 enum nvkm_subdev_type type, int inst, bool enable, u32 addr, in nvkm_xtensa_new_() argument
185 return -ENOMEM; in nvkm_xtensa_new_()
186 xtensa->func = func; in nvkm_xtensa_new_()
187 xtensa->addr = addr; in nvkm_xtensa_new_()
188 *pengine = &xtensa->engine; in nvkm_xtensa_new_()
190 return nvkm_engine_ctor(&nvkm_xtensa, device, type, inst, enable, &xtensa->engine); in nvkm_xtensa_new_()