Home
last modified time | relevance | path

Searched refs:num_xcc (Results 1 – 9 of 9) sorted by relevance

/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/
Daqua_vanjaram.c315 int num_xcc, num_xcc_per_xcp = 0, mode = 0; in __aqua_vanjaram_calc_xcp_mode() local
317 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_calc_xcp_mode()
320 if ((num_xcc_per_xcp) && (num_xcc % num_xcc_per_xcp == 0)) in __aqua_vanjaram_calc_xcp_mode()
321 mode = num_xcc / num_xcc_per_xcp; in __aqua_vanjaram_calc_xcp_mode()
367 int num_xcc, num_xcc_per_xcp = 0; in __aqua_vanjaram_get_xcc_per_xcp() local
369 num_xcc = NUM_XCC(xcp_mgr->adev->gfx.xcc_mask); in __aqua_vanjaram_get_xcc_per_xcp()
373 num_xcc_per_xcp = num_xcc; in __aqua_vanjaram_get_xcc_per_xcp()
376 num_xcc_per_xcp = num_xcc / 2; in __aqua_vanjaram_get_xcc_per_xcp()
379 num_xcc_per_xcp = num_xcc / 3; in __aqua_vanjaram_get_xcc_per_xcp()
382 num_xcc_per_xcp = num_xcc / 4; in __aqua_vanjaram_get_xcc_per_xcp()
[all …]
Dgfx_v9_4_3.c335 int i, num_xcc; in gfx_v9_4_3_set_kiq_pm4_funcs() local
337 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_set_kiq_pm4_funcs()
338 for (i = 0; i < num_xcc; i++) in gfx_v9_4_3_set_kiq_pm4_funcs()
344 int i, num_xcc, dev_inst; in gfx_v9_4_3_init_golden_registers() local
346 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_init_golden_registers()
347 for (i = 0; i < num_xcc; i++) { in gfx_v9_4_3_init_golden_registers()
621 int r, i, num_xcc; in gfx_v9_4_3_mec_init() local
630 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gfx_v9_4_3_mec_init()
631 for (i = 0; i < num_xcc; i++) in gfx_v9_4_3_mec_init()
638 adev->gfx.num_compute_rings * num_xcc * GFX9_MEC_HPD_SIZE; in gfx_v9_4_3_mec_init()
[all …]
Damdgpu_gfx.c218 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_compute_queue_acquire() local
223 for (j = 0; j < num_xcc; j++) { in amdgpu_gfx_compute_queue_acquire()
235 for (j = 0; j < num_xcc; j++) { in amdgpu_gfx_compute_queue_acquire()
241 for (j = 0; j < num_xcc; j++) { in amdgpu_gfx_compute_queue_acquire()
1002 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_gfx_ras_error_func() local
1003 uint32_t xcc_mask = GENMASK(num_xcc - 1, 0); in amdgpu_gfx_ras_error_func()
1328 int ret = 0, num_xcc; in amdgpu_gfx_set_compute_partition() local
1330 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in amdgpu_gfx_set_compute_partition()
1331 if (num_xcc % 2 != 0) in amdgpu_gfx_set_compute_partition()
1341 if (num_xcc%4) in amdgpu_gfx_set_compute_partition()
[all …]
Dgmc_v9_0.c1863 int num_xcc, xcc_id; in gmc_v9_0_init_acpi_mem_ranges() local
1866 num_xcc = NUM_XCC(adev->gfx.xcc_mask); in gmc_v9_0_init_acpi_mem_ranges()
1867 xcc_mask = (1U << num_xcc) - 1; in gmc_v9_0_init_acpi_mem_ranges()
Damdgpu_ttm.c862 int num_xcc = max(1U, adev->gfx.num_xcc_per_xcp); in amdgpu_ttm_gart_bind_gfx9_mqd() local
868 do_div(pages_per_xcc, num_xcc); in amdgpu_ttm_gart_bind_gfx9_mqd()
870 for (i = 0, page_idx = 0; i < num_xcc; i++, page_idx += pages_per_xcc) { in amdgpu_ttm_gart_bind_gfx9_mqd()
Damdgpu_ras.c359 int num_xcc = adev->gfx.xcc_mask ? NUM_XCC(adev->gfx.xcc_mask) : 1; in amdgpu_ras_instance_mask_check() local
363 if (num_xcc <= 1 && inst_mask) { in amdgpu_ras_instance_mask_check()
374 mask = GENMASK(num_xcc - 1, 0); in amdgpu_ras_instance_mask_check()
/linux-6.12.1/include/uapi/linux/
Dkfd_ioctl.h142 __u32 num_xcc; member
/linux-6.12.1/drivers/gpu/drm/amd/amdkfd/
Dkfd_topology.c1689 int num_xcc = NUM_XCC(knode->xcc_mask); in fill_in_l2_l3_pcache() local
1695 end = start + num_xcc; in fill_in_l2_l3_pcache()
Dkfd_debug.c1081 device_info.num_xcc = NUM_XCC(pdd->dev->xcc_mask); in kfd_dbg_trap_device_snapshot()