Lines Matching refs:a6xx_state
102 static void *state_kcalloc(struct a6xx_gpu_state *a6xx_state, int nr, size_t objsize) in state_kcalloc() argument
110 list_add_tail(&obj->node, &a6xx_state->objs); in state_kcalloc()
114 static void *state_kmemdup(struct a6xx_gpu_state *a6xx_state, void *src, in state_kmemdup() argument
117 void *dst = state_kcalloc(a6xx_state, 1, size); in state_kmemdup()
247 struct a6xx_gpu_state *a6xx_state, in a6xx_get_vbif_debugbus_block() argument
253 obj->data = state_kcalloc(a6xx_state, VBIF_DEBUGBUS_BLOCK_SIZE, in a6xx_get_vbif_debugbus_block()
301 struct a6xx_gpu_state *a6xx_state, in a6xx_get_debugbus_block() argument
308 obj->data = state_kcalloc(a6xx_state, block->count, sizeof(u64)); in a6xx_get_debugbus_block()
319 struct a6xx_gpu_state *a6xx_state, in a6xx_get_cx_debugbus_block() argument
326 obj->data = state_kcalloc(a6xx_state, block->count, sizeof(u64)); in a6xx_get_cx_debugbus_block()
337 struct a6xx_gpu_state *a6xx_state) in a6xx_get_debugbus_blocks() argument
345 a6xx_state->debugbus = state_kcalloc(a6xx_state, nr_debugbus_blocks, in a6xx_get_debugbus_blocks()
346 sizeof(*a6xx_state->debugbus)); in a6xx_get_debugbus_blocks()
348 if (a6xx_state->debugbus) { in a6xx_get_debugbus_blocks()
353 a6xx_state, in a6xx_get_debugbus_blocks()
355 &a6xx_state->debugbus[i]); in a6xx_get_debugbus_blocks()
357 a6xx_state->nr_debugbus = ARRAY_SIZE(a6xx_debugbus_blocks); in a6xx_get_debugbus_blocks()
365 a6xx_get_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus_blocks()
367 &a6xx_state->debugbus[i]); in a6xx_get_debugbus_blocks()
369 a6xx_state->nr_debugbus += 1; in a6xx_get_debugbus_blocks()
376 a6xx_state, in a6xx_get_debugbus_blocks()
378 &a6xx_state->debugbus[i]); in a6xx_get_debugbus_blocks()
384 struct a6xx_gpu_state *a6xx_state) in a7xx_get_debugbus_blocks() argument
411 a6xx_state->debugbus = state_kcalloc(a6xx_state, total_debugbus_blocks, in a7xx_get_debugbus_blocks()
412 sizeof(*a6xx_state->debugbus)); in a7xx_get_debugbus_blocks()
414 if (a6xx_state->debugbus) { in a7xx_get_debugbus_blocks()
417 a6xx_state, &a7xx_debugbus_blocks[debugbus_blocks[i]], in a7xx_get_debugbus_blocks()
418 &a6xx_state->debugbus[i]); in a7xx_get_debugbus_blocks()
423 a6xx_state, &a7xx_debugbus_blocks[gbif_debugbus_blocks[i]], in a7xx_get_debugbus_blocks()
424 &a6xx_state->debugbus[i + debugbus_blocks_count]); in a7xx_get_debugbus_blocks()
431 struct a6xx_gpu_state *a6xx_state) in a6xx_get_debugbus() argument
491 a7xx_get_debugbus_blocks(gpu, a6xx_state); in a6xx_get_debugbus()
493 a6xx_get_debugbus_blocks(gpu, a6xx_state); in a6xx_get_debugbus()
498 a6xx_state->vbif_debugbus = in a6xx_get_debugbus()
499 state_kcalloc(a6xx_state, 1, in a6xx_get_debugbus()
500 sizeof(*a6xx_state->vbif_debugbus)); in a6xx_get_debugbus()
502 if (a6xx_state->vbif_debugbus) in a6xx_get_debugbus()
503 a6xx_get_vbif_debugbus_block(gpu, a6xx_state, in a6xx_get_debugbus()
504 a6xx_state->vbif_debugbus); in a6xx_get_debugbus()
520 a6xx_state->cx_debugbus = in a6xx_get_debugbus()
521 state_kcalloc(a6xx_state, in a6xx_get_debugbus()
523 sizeof(*a6xx_state->cx_debugbus)); in a6xx_get_debugbus()
525 if (a6xx_state->cx_debugbus) { in a6xx_get_debugbus()
530 a6xx_state, in a6xx_get_debugbus()
532 &a6xx_state->cx_debugbus[i]); in a6xx_get_debugbus()
534 a6xx_state->nr_cx_debugbus = in a6xx_get_debugbus()
546 struct a6xx_gpu_state *a6xx_state, in a6xx_get_dbgahb_cluster() argument
587 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a6xx_get_dbgahb_cluster()
592 struct a6xx_gpu_state *a6xx_state, in a7xx_get_dbgahb_cluster() argument
629 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a7xx_get_dbgahb_cluster()
634 struct a6xx_gpu_state *a6xx_state, in a6xx_get_dbgahb_clusters() argument
639 a6xx_state->dbgahb_clusters = state_kcalloc(a6xx_state, in a6xx_get_dbgahb_clusters()
641 sizeof(*a6xx_state->dbgahb_clusters)); in a6xx_get_dbgahb_clusters()
643 if (!a6xx_state->dbgahb_clusters) in a6xx_get_dbgahb_clusters()
646 a6xx_state->nr_dbgahb_clusters = ARRAY_SIZE(a6xx_dbgahb_clusters); in a6xx_get_dbgahb_clusters()
649 a6xx_get_dbgahb_cluster(gpu, a6xx_state, in a6xx_get_dbgahb_clusters()
651 &a6xx_state->dbgahb_clusters[i], dumper); in a6xx_get_dbgahb_clusters()
655 struct a6xx_gpu_state *a6xx_state, in a7xx_get_dbgahb_clusters() argument
675 a6xx_state->dbgahb_clusters = state_kcalloc(a6xx_state, in a7xx_get_dbgahb_clusters()
677 sizeof(*a6xx_state->dbgahb_clusters)); in a7xx_get_dbgahb_clusters()
679 if (!a6xx_state->dbgahb_clusters) in a7xx_get_dbgahb_clusters()
682 a6xx_state->nr_dbgahb_clusters = dbgahb_clusters_size; in a7xx_get_dbgahb_clusters()
685 a7xx_get_dbgahb_cluster(gpu, a6xx_state, in a7xx_get_dbgahb_clusters()
687 &a6xx_state->dbgahb_clusters[i], dumper); in a7xx_get_dbgahb_clusters()
692 struct a6xx_gpu_state *a6xx_state, in a6xx_get_cluster() argument
747 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a6xx_get_cluster()
752 struct a6xx_gpu_state *a6xx_state, in a7xx_get_cluster() argument
792 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a7xx_get_cluster()
797 struct a6xx_gpu_state *a6xx_state, in a6xx_get_clusters() argument
802 a6xx_state->clusters = state_kcalloc(a6xx_state, in a6xx_get_clusters()
803 ARRAY_SIZE(a6xx_clusters), sizeof(*a6xx_state->clusters)); in a6xx_get_clusters()
805 if (!a6xx_state->clusters) in a6xx_get_clusters()
808 a6xx_state->nr_clusters = ARRAY_SIZE(a6xx_clusters); in a6xx_get_clusters()
811 a6xx_get_cluster(gpu, a6xx_state, &a6xx_clusters[i], in a6xx_get_clusters()
812 &a6xx_state->clusters[i], dumper); in a6xx_get_clusters()
816 struct a6xx_gpu_state *a6xx_state, in a7xx_get_clusters() argument
836 a6xx_state->clusters = state_kcalloc(a6xx_state, in a7xx_get_clusters()
837 clusters_size, sizeof(*a6xx_state->clusters)); in a7xx_get_clusters()
839 if (!a6xx_state->clusters) in a7xx_get_clusters()
842 a6xx_state->nr_clusters = clusters_size; in a7xx_get_clusters()
845 a7xx_get_cluster(gpu, a6xx_state, &clusters[i], in a7xx_get_clusters()
846 &a6xx_state->clusters[i], dumper); in a7xx_get_clusters()
851 struct a6xx_gpu_state *a6xx_state, in a6xx_get_shader_block() argument
880 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a6xx_get_shader_block()
885 struct a6xx_gpu_state *a6xx_state, in a7xx_get_shader_block() argument
925 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a7xx_get_shader_block()
935 struct a6xx_gpu_state *a6xx_state, in a6xx_get_shaders() argument
940 a6xx_state->shaders = state_kcalloc(a6xx_state, in a6xx_get_shaders()
941 ARRAY_SIZE(a6xx_shader_blocks), sizeof(*a6xx_state->shaders)); in a6xx_get_shaders()
943 if (!a6xx_state->shaders) in a6xx_get_shaders()
946 a6xx_state->nr_shaders = ARRAY_SIZE(a6xx_shader_blocks); in a6xx_get_shaders()
949 a6xx_get_shader_block(gpu, a6xx_state, &a6xx_shader_blocks[i], in a6xx_get_shaders()
950 &a6xx_state->shaders[i], dumper); in a6xx_get_shaders()
954 struct a6xx_gpu_state *a6xx_state, in a7xx_get_shaders() argument
974 a6xx_state->shaders = state_kcalloc(a6xx_state, in a7xx_get_shaders()
975 num_shader_blocks, sizeof(*a6xx_state->shaders)); in a7xx_get_shaders()
977 if (!a6xx_state->shaders) in a7xx_get_shaders()
980 a6xx_state->nr_shaders = num_shader_blocks; in a7xx_get_shaders()
983 a7xx_get_shader_block(gpu, a6xx_state, &shader_blocks[i], in a7xx_get_shaders()
984 &a6xx_state->shaders[i], dumper); in a7xx_get_shaders()
989 struct a6xx_gpu_state *a6xx_state, in a6xx_get_crashdumper_hlsq_registers() argument
1021 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a6xx_get_crashdumper_hlsq_registers()
1027 struct a6xx_gpu_state *a6xx_state, in a6xx_get_crashdumper_registers() argument
1064 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a6xx_get_crashdumper_registers()
1069 struct a6xx_gpu_state *a6xx_state, in a7xx_get_crashdumper_registers() argument
1101 obj->data = state_kmemdup(a6xx_state, dumper->ptr + A6XX_CD_DATA_OFFSET, in a7xx_get_crashdumper_registers()
1108 struct a6xx_gpu_state *a6xx_state, in a6xx_get_ahb_gpu_registers() argument
1123 obj->data = state_kcalloc(a6xx_state, regcount, sizeof(u32)); in a6xx_get_ahb_gpu_registers()
1138 struct a6xx_gpu_state *a6xx_state, in a7xx_get_ahb_gpu_registers() argument
1148 obj->data = state_kcalloc(a6xx_state, regcount, sizeof(u32)); in a7xx_get_ahb_gpu_registers()
1162 struct a6xx_gpu_state *a6xx_state, in a7xx_get_ahb_gpu_reglist() argument
1169 a7xx_get_ahb_gpu_registers(gpu, a6xx_state, regs->regs, obj); in a7xx_get_ahb_gpu_reglist()
1174 struct a6xx_gpu_state *a6xx_state, in _a6xx_get_gmu_registers() argument
1188 obj->data = state_kcalloc(a6xx_state, regcount, sizeof(u32)); in _a6xx_get_gmu_registers()
1211 struct a6xx_gpu_state *a6xx_state) in a6xx_get_gmu_registers() argument
1216 a6xx_state->gmu_registers = state_kcalloc(a6xx_state, in a6xx_get_gmu_registers()
1217 3, sizeof(*a6xx_state->gmu_registers)); in a6xx_get_gmu_registers()
1219 if (!a6xx_state->gmu_registers) in a6xx_get_gmu_registers()
1222 a6xx_state->nr_gmu_registers = 3; in a6xx_get_gmu_registers()
1225 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[0], in a6xx_get_gmu_registers()
1226 &a6xx_state->gmu_registers[0], false); in a6xx_get_gmu_registers()
1227 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[1], in a6xx_get_gmu_registers()
1228 &a6xx_state->gmu_registers[1], true); in a6xx_get_gmu_registers()
1236 _a6xx_get_gmu_registers(gpu, a6xx_state, &a6xx_gmu_reglist[2], in a6xx_get_gmu_registers()
1237 &a6xx_state->gmu_registers[2], false); in a6xx_get_gmu_registers()
1241 struct a6xx_gpu_state *a6xx_state, struct a6xx_gmu_bo *bo) in a6xx_snapshot_gmu_bo() argument
1248 snapshot = state_kcalloc(a6xx_state, 1, sizeof(*snapshot)); in a6xx_snapshot_gmu_bo()
1264 struct a6xx_gpu_state *a6xx_state) in a6xx_snapshot_gmu_hfi_history() argument
1271 BUILD_BUG_ON(ARRAY_SIZE(gmu->queues) != ARRAY_SIZE(a6xx_state->hfi_queue_history)); in a6xx_snapshot_gmu_hfi_history()
1277 a6xx_state->hfi_queue_history[i][j] = queue->history[idx]; in a6xx_snapshot_gmu_hfi_history()
1285 struct a6xx_gpu_state *a6xx_state, in a6xx_get_registers() argument
1294 a6xx_state->registers = state_kcalloc(a6xx_state, in a6xx_get_registers()
1295 count, sizeof(*a6xx_state->registers)); in a6xx_get_registers()
1297 if (!a6xx_state->registers) in a6xx_get_registers()
1300 a6xx_state->nr_registers = count; in a6xx_get_registers()
1303 a6xx_state, &a6xx_ahb_reglist, in a6xx_get_registers()
1304 &a6xx_state->registers[index++]); in a6xx_get_registers()
1308 a6xx_state, &a6xx_gbif_reglist, in a6xx_get_registers()
1309 &a6xx_state->registers[index++]); in a6xx_get_registers()
1312 a6xx_state, &a6xx_vbif_reglist, in a6xx_get_registers()
1313 &a6xx_state->registers[index++]); in a6xx_get_registers()
1324 a6xx_state, &a6xx_reglist[i], in a6xx_get_registers()
1325 &a6xx_state->registers[index++]); in a6xx_get_registers()
1331 a6xx_state, &a6xx_reglist[i], in a6xx_get_registers()
1332 &a6xx_state->registers[index++], in a6xx_get_registers()
1337 a6xx_state, &a6xx_hlsq_reglist[i], in a6xx_get_registers()
1338 &a6xx_state->registers[index++], in a6xx_get_registers()
1345 struct a6xx_gpu_state *a6xx_state, in a7xx_get_registers() argument
1380 a6xx_state->registers = state_kcalloc(a6xx_state, in a7xx_get_registers()
1381 count, sizeof(*a6xx_state->registers)); in a7xx_get_registers()
1383 if (!a6xx_state->registers) in a7xx_get_registers()
1386 a6xx_state->nr_registers = count; in a7xx_get_registers()
1388 a7xx_get_ahb_gpu_registers(gpu, a6xx_state, pre_crashdumper_regs, in a7xx_get_registers()
1389 &a6xx_state->registers[index++]); in a7xx_get_registers()
1393 a6xx_state, ®list[0], in a7xx_get_registers()
1394 &a6xx_state->registers[index++]); in a7xx_get_registers()
1400 a6xx_state, ®list[i], in a7xx_get_registers()
1401 &a6xx_state->registers[index++], in a7xx_get_registers()
1406 struct a6xx_gpu_state *a6xx_state) in a7xx_get_post_crashdumper_registers() argument
1415 a6xx_state, regs, in a7xx_get_post_crashdumper_registers()
1416 &a6xx_state->registers[a6xx_state->nr_registers - 1]); in a7xx_get_post_crashdumper_registers()
1439 struct a6xx_gpu_state *a6xx_state, in a6xx_get_indexed_regs() argument
1450 obj->data = state_kcalloc(a6xx_state, count, sizeof(u32)); in a6xx_get_indexed_regs()
1464 struct a6xx_gpu_state *a6xx_state) in a6xx_get_indexed_registers() argument
1470 a6xx_state->indexed_regs = state_kcalloc(a6xx_state, count, in a6xx_get_indexed_registers()
1471 sizeof(*a6xx_state->indexed_regs)); in a6xx_get_indexed_registers()
1472 if (!a6xx_state->indexed_regs) in a6xx_get_indexed_registers()
1476 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_indexed_reglist[i], in a6xx_get_indexed_registers()
1477 &a6xx_state->indexed_regs[i]); in a6xx_get_indexed_registers()
1486 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_cp_mempool_indexed, in a6xx_get_indexed_registers()
1487 &a6xx_state->indexed_regs[i]); in a6xx_get_indexed_registers()
1490 a6xx_state->nr_indexed_regs = count; in a6xx_get_indexed_registers()
1499 a6xx_get_indexed_regs(gpu, a6xx_state, &a6xx_cp_mempool_indexed, in a6xx_get_indexed_registers()
1500 &a6xx_state->indexed_regs[i]); in a6xx_get_indexed_registers()
1506 a6xx_state->indexed_regs[i].data[0x2000] = mempool_size; in a6xx_get_indexed_registers()
1513 struct a6xx_gpu_state *a6xx_state) in a7xx_get_indexed_registers() argument
1530 a6xx_state->indexed_regs = state_kcalloc(a6xx_state, in a7xx_get_indexed_registers()
1532 sizeof(*a6xx_state->indexed_regs)); in a7xx_get_indexed_registers()
1533 if (!a6xx_state->indexed_regs) in a7xx_get_indexed_registers()
1536 a6xx_state->nr_indexed_regs = indexed_count + mempool_count; in a7xx_get_indexed_registers()
1540 a6xx_get_indexed_regs(gpu, a6xx_state, &indexed_regs[i], in a7xx_get_indexed_registers()
1541 &a6xx_state->indexed_regs[i]); in a7xx_get_indexed_registers()
1548 a6xx_get_indexed_regs(gpu, a6xx_state, &a7xx_cp_bv_mempool_indexed[i], in a7xx_get_indexed_registers()
1549 &a6xx_state->indexed_regs[indexed_count + i]); in a7xx_get_indexed_registers()
1561 struct a6xx_gpu_state *a6xx_state = kzalloc(sizeof(*a6xx_state), in a6xx_gpu_state_get() local
1566 if (!a6xx_state) in a6xx_gpu_state_get()
1569 INIT_LIST_HEAD(&a6xx_state->objs); in a6xx_gpu_state_get()
1572 adreno_gpu_state_get(gpu, &a6xx_state->base); in a6xx_gpu_state_get()
1575 a6xx_get_gmu_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1577 a6xx_state->gmu_log = a6xx_snapshot_gmu_bo(a6xx_state, &a6xx_gpu->gmu.log); in a6xx_gpu_state_get()
1578 a6xx_state->gmu_hfi = a6xx_snapshot_gmu_bo(a6xx_state, &a6xx_gpu->gmu.hfi); in a6xx_gpu_state_get()
1579 a6xx_state->gmu_debug = a6xx_snapshot_gmu_bo(a6xx_state, &a6xx_gpu->gmu.debug); in a6xx_gpu_state_get()
1581 a6xx_snapshot_gmu_hfi_history(gpu, a6xx_state); in a6xx_gpu_state_get()
1586 return &a6xx_state->base; in a6xx_gpu_state_get()
1590 a7xx_get_indexed_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1592 a6xx_get_indexed_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1606 a7xx_get_registers(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1609 a7xx_get_shaders(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1610 a7xx_get_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1611 a7xx_get_dbgahb_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1616 a7xx_get_post_crashdumper_registers(gpu, a6xx_state); in a6xx_gpu_state_get()
1618 a6xx_get_registers(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1621 a6xx_get_shaders(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1622 a6xx_get_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1623 a6xx_get_dbgahb_clusters(gpu, a6xx_state, dumper); in a6xx_gpu_state_get()
1630 a6xx_get_debugbus(gpu, a6xx_state); in a6xx_gpu_state_get()
1632 a6xx_state->gpu_initialized = !gpu->needs_hw_init; in a6xx_gpu_state_get()
1634 return &a6xx_state->base; in a6xx_gpu_state_get()
1642 struct a6xx_gpu_state *a6xx_state = container_of(state, in a6xx_gpu_state_destroy() local
1645 if (a6xx_state->gmu_log) in a6xx_gpu_state_destroy()
1646 kvfree(a6xx_state->gmu_log->data); in a6xx_gpu_state_destroy()
1648 if (a6xx_state->gmu_hfi) in a6xx_gpu_state_destroy()
1649 kvfree(a6xx_state->gmu_hfi->data); in a6xx_gpu_state_destroy()
1651 if (a6xx_state->gmu_debug) in a6xx_gpu_state_destroy()
1652 kvfree(a6xx_state->gmu_debug->data); in a6xx_gpu_state_destroy()
1654 list_for_each_entry_safe(obj, tmp, &a6xx_state->objs, node) { in a6xx_gpu_state_destroy()
1660 kfree(a6xx_state); in a6xx_gpu_state_destroy()
1916 static void a6xx_show_debugbus(struct a6xx_gpu_state *a6xx_state, in a6xx_show_debugbus() argument
1921 for (i = 0; i < a6xx_state->nr_debugbus; i++) { in a6xx_show_debugbus()
1922 struct a6xx_gpu_state_obj *obj = &a6xx_state->debugbus[i]; in a6xx_show_debugbus()
1927 if (a6xx_state->vbif_debugbus) { in a6xx_show_debugbus()
1928 struct a6xx_gpu_state_obj *obj = a6xx_state->vbif_debugbus; in a6xx_show_debugbus()
1937 for (i = 0; i < a6xx_state->nr_cx_debugbus; i++) { in a6xx_show_debugbus()
1938 struct a6xx_gpu_state_obj *obj = &a6xx_state->cx_debugbus[i]; in a6xx_show_debugbus()
1948 struct a6xx_gpu_state *a6xx_state = container_of(state, in a6xx_show() local
1955 drm_printf(p, "gpu-initialized: %d\n", a6xx_state->gpu_initialized); in a6xx_show()
1960 if (a6xx_state->gmu_log) { in a6xx_show()
1961 struct msm_gpu_state_bo *gmu_log = a6xx_state->gmu_log; in a6xx_show()
1970 if (a6xx_state->gmu_hfi) { in a6xx_show()
1971 struct msm_gpu_state_bo *gmu_hfi = a6xx_state->gmu_hfi; in a6xx_show()
1976 for (i = 0; i < ARRAY_SIZE(a6xx_state->hfi_queue_history); i++) { in a6xx_show()
1979 drm_printf(p, " %d", a6xx_state->hfi_queue_history[i][j]); in a6xx_show()
1988 if (a6xx_state->gmu_debug) { in a6xx_show()
1989 struct msm_gpu_state_bo *gmu_debug = a6xx_state->gmu_debug; in a6xx_show()
1998 for (i = 0; i < a6xx_state->nr_registers; i++) { in a6xx_show()
1999 struct a6xx_gpu_state_obj *obj = &a6xx_state->registers[i]; in a6xx_show()
2014 for (i = 0; i < a6xx_state->nr_gmu_registers; i++) { in a6xx_show()
2015 struct a6xx_gpu_state_obj *obj = &a6xx_state->gmu_registers[i]; in a6xx_show()
2025 for (i = 0; i < a6xx_state->nr_indexed_regs; i++) in a6xx_show()
2026 a6xx_show_indexed_regs(&a6xx_state->indexed_regs[i], p); in a6xx_show()
2029 for (i = 0; i < a6xx_state->nr_shaders; i++) { in a6xx_show()
2031 a7xx_show_shader(&a6xx_state->shaders[i], p); in a6xx_show()
2033 a6xx_show_shader(&a6xx_state->shaders[i], p); in a6xx_show()
2037 for (i = 0; i < a6xx_state->nr_clusters; i++) { in a6xx_show()
2039 a7xx_show_cluster(&a6xx_state->clusters[i], p); in a6xx_show()
2041 a6xx_show_cluster(&a6xx_state->clusters[i], p); in a6xx_show()
2044 for (i = 0; i < a6xx_state->nr_dbgahb_clusters; i++) { in a6xx_show()
2046 a7xx_show_dbgahb_cluster(&a6xx_state->dbgahb_clusters[i], p); in a6xx_show()
2048 a6xx_show_dbgahb_cluster(&a6xx_state->dbgahb_clusters[i], p); in a6xx_show()
2052 a6xx_show_debugbus(a6xx_state, p); in a6xx_show()