/linux-6.12.1/drivers/gpu/drm/radeon/ |
D | si_dma.c | 78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages() 80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages() 82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() 132 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages() 133 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages() [all …]
|
D | radeon_vce.c | 362 ib.length_dw = 0; in radeon_vce_get_create_msg() 363 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */ in radeon_vce_get_create_msg() 364 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */ in radeon_vce_get_create_msg() 365 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg() 367 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */ in radeon_vce_get_create_msg() 368 ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */ in radeon_vce_get_create_msg() 369 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000); in radeon_vce_get_create_msg() 370 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000042); in radeon_vce_get_create_msg() 371 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000a); in radeon_vce_get_create_msg() 372 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); in radeon_vce_get_create_msg() [all …]
|
D | ni_dma.c | 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages() 328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() 331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages() 367 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, in cayman_dma_vm_write_pages() 369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages() 370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages() 381 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages() [all …]
|
D | radeon_cs.c | 95 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs() 317 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init() 324 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 330 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 336 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 340 size = p->chunks[i].length_dw; in radeon_cs_parser_init() 361 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init() 363 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init() 563 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk() 629 if (ib_chunk->length_dw > RADEON_IB_VM_MAX_SIZE) { in radeon_cs_ib_fill() [all …]
|
D | cik_sdma.c | 156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute() 731 ib.length_dw = 5; in cik_sdma_ib_test() 812 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pages() 814 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages() 815 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages() 816 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pages() 817 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages() 818 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages() 819 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages() 855 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pages() [all …]
|
D | radeon_uvd.c | 574 if (idx >= relocs_chunk->length_dw) { in radeon_uvd_cs_reloc() 576 idx, relocs_chunk->length_dw); in radeon_uvd_cs_reloc() 689 if (p->chunk_ib->length_dw % 16) { in radeon_uvd_cs_parse() 691 p->chunk_ib->length_dw); in radeon_uvd_cs_parse() 719 } while (p->idx < p->chunk_ib->length_dw); in radeon_uvd_cs_parse() 750 ib.length_dw = 16; in radeon_uvd_send_msg()
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vce.c | 469 ib->length_dw = 0; in amdgpu_vce_get_create_msg() 470 ib->ptr[ib->length_dw++] = 0x0000000c; /* len */ in amdgpu_vce_get_create_msg() 471 ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */ in amdgpu_vce_get_create_msg() 472 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg() 475 ib->ptr[ib->length_dw++] = 0x00000040; /* len */ in amdgpu_vce_get_create_msg() 477 ib->ptr[ib->length_dw++] = 0x00000030; /* len */ in amdgpu_vce_get_create_msg() 478 ib->ptr[ib->length_dw++] = 0x01000001; /* create cmd */ in amdgpu_vce_get_create_msg() 479 ib->ptr[ib->length_dw++] = 0x00000000; in amdgpu_vce_get_create_msg() 480 ib->ptr[ib->length_dw++] = 0x00000042; in amdgpu_vce_get_create_msg() 481 ib->ptr[ib->length_dw++] = 0x0000000a; in amdgpu_vce_get_create_msg() [all …]
|
D | si_dma.c | 75 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib() 270 ib.length_dw = 4; in si_dma_ring_test_ib() 312 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pte() 314 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pte() 315 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pte() 316 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pte() 317 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pte() 337 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte() 338 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_write_pte() 339 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in si_dma_vm_write_pte() [all …]
|
D | sdma_v2_4.c | 260 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib() 614 ib.length_dw = 8; in sdma_v2_4_ring_test_ib() 657 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v2_4_vm_copy_pte() 659 ib->ptr[ib->length_dw++] = bytes; in sdma_v2_4_vm_copy_pte() 660 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v2_4_vm_copy_pte() 661 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v2_4_vm_copy_pte() 662 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v2_4_vm_copy_pte() 663 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v2_4_vm_copy_pte() 664 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v2_4_vm_copy_pte() 684 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_vm_write_pte() [all …]
|
D | cik_sdma.c | 235 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib() 680 ib.length_dw = 5; in cik_sdma_ring_test_ib() 722 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pte() 724 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pte() 725 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pte() 726 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pte() 727 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pte() 728 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pte() 729 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pte() 749 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pte() [all …]
|
D | amdgpu_vcn.c | 571 ib->length_dw = 16; in amdgpu_vcn_dec_send_msg() 691 ib->ptr[ib->length_dw++] = 0x00000010; /* single queue checksum */ in amdgpu_vcn_unified_ring_ib_header() 692 ib->ptr[ib->length_dw++] = 0x30000002; in amdgpu_vcn_unified_ring_ib_header() 693 ib_checksum = &ib->ptr[ib->length_dw++]; in amdgpu_vcn_unified_ring_ib_header() 694 ib->ptr[ib->length_dw++] = ib_pack_in_dw; in amdgpu_vcn_unified_ring_ib_header() 696 ib->ptr[ib->length_dw++] = 0x00000010; /* engine info */ in amdgpu_vcn_unified_ring_ib_header() 697 ib->ptr[ib->length_dw++] = 0x30000001; in amdgpu_vcn_unified_ring_ib_header() 698 ib->ptr[ib->length_dw++] = enc ? 0x2 : 0x3; in amdgpu_vcn_unified_ring_ib_header() 699 ib->ptr[ib->length_dw++] = ib_pack_in_dw * sizeof(uint32_t); in amdgpu_vcn_unified_ring_ib_header() 741 ib->length_dw = 0; in amdgpu_vcn_dec_sw_send_msg() [all …]
|
D | sdma_v3_0.c | 436 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib() 888 ib.length_dw = 8; in sdma_v3_0_ring_test_ib() 930 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v3_0_vm_copy_pte() 932 ib->ptr[ib->length_dw++] = bytes; in sdma_v3_0_vm_copy_pte() 933 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v3_0_vm_copy_pte() 934 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v3_0_vm_copy_pte() 935 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v3_0_vm_copy_pte() 936 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v3_0_vm_copy_pte() 937 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v3_0_vm_copy_pte() 957 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_vm_write_pte() [all …]
|
D | sdma_v7_0.c | 316 amdgpu_ring_write(ring, ib->length_dw); in sdma_v7_0_ring_emit_ib() 1036 ib.length_dw = 8; in sdma_v7_0_ring_test_ib() 1088 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COPY) | in sdma_v7_0_vm_copy_pte() 1092 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v7_0_vm_copy_pte() 1093 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v7_0_vm_copy_pte() 1094 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v7_0_vm_copy_pte() 1095 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v7_0_vm_copy_pte() 1096 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v7_0_vm_copy_pte() 1097 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v7_0_vm_copy_pte() 1098 ib->ptr[ib->length_dw++] = 0; in sdma_v7_0_vm_copy_pte() [all …]
|
D | sdma_v6_0.c | 284 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib() 1017 ib.length_dw = 8; in sdma_v6_0_ring_test_ib() 1069 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COPY) | in sdma_v6_0_vm_copy_pte() 1071 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v6_0_vm_copy_pte() 1072 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v6_0_vm_copy_pte() 1073 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v6_0_vm_copy_pte() 1074 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v6_0_vm_copy_pte() 1075 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v6_0_vm_copy_pte() 1076 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v6_0_vm_copy_pte() 1097 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) | in sdma_v6_0_vm_write_pte() [all …]
|
D | uvd_v6_0.c | 227 ib->length_dw = 0; in uvd_v6_0_enc_get_create_msg() 228 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v6_0_enc_get_create_msg() 229 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v6_0_enc_get_create_msg() 230 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_create_msg() 231 ib->ptr[ib->length_dw++] = 0x00010000; in uvd_v6_0_enc_get_create_msg() 232 ib->ptr[ib->length_dw++] = upper_32_bits(addr); in uvd_v6_0_enc_get_create_msg() 233 ib->ptr[ib->length_dw++] = addr; in uvd_v6_0_enc_get_create_msg() 235 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v6_0_enc_get_create_msg() 236 ib->ptr[ib->length_dw++] = 0x00000002; /* task info */ in uvd_v6_0_enc_get_create_msg() 237 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v6_0_enc_get_create_msg() [all …]
|
D | sdma_v5_2.c | 299 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_2_ring_emit_ib() 996 ib.length_dw = 8; in sdma_v5_2_ring_test_ib() 1048 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_2_vm_copy_pte() 1050 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_2_vm_copy_pte() 1051 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_2_vm_copy_pte() 1052 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_2_vm_copy_pte() 1053 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_2_vm_copy_pte() 1054 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_2_vm_copy_pte() 1055 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_2_vm_copy_pte() 1076 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_2_vm_write_pte() [all …]
|
D | sdma_v5_0.c | 482 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_0_ring_emit_ib() 1147 ib.length_dw = 8; in sdma_v5_0_ring_test_ib() 1199 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_0_vm_copy_pte() 1201 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_0_vm_copy_pte() 1202 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_0_vm_copy_pte() 1203 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_0_vm_copy_pte() 1204 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_0_vm_copy_pte() 1205 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_0_vm_copy_pte() 1206 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_0_vm_copy_pte() 1227 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_0_vm_write_pte() [all …]
|
D | sdma_v4_4_2.c | 381 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_4_2_ring_emit_ib() 1093 ib.length_dw = 8; in sdma_v4_4_2_ring_test_ib() 1137 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v4_4_2_vm_copy_pte() 1139 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v4_4_2_vm_copy_pte() 1140 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v4_4_2_vm_copy_pte() 1141 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v4_4_2_vm_copy_pte() 1142 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v4_4_2_vm_copy_pte() 1143 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v4_4_2_vm_copy_pte() 1144 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v4_4_2_vm_copy_pte() 1165 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v4_4_2_vm_write_pte() [all …]
|
D | uvd_v7_0.c | 235 ib->length_dw = 0; in uvd_v7_0_enc_get_create_msg() 236 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v7_0_enc_get_create_msg() 237 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v7_0_enc_get_create_msg() 238 ib->ptr[ib->length_dw++] = handle; in uvd_v7_0_enc_get_create_msg() 239 ib->ptr[ib->length_dw++] = 0x00000000; in uvd_v7_0_enc_get_create_msg() 240 ib->ptr[ib->length_dw++] = upper_32_bits(addr); in uvd_v7_0_enc_get_create_msg() 241 ib->ptr[ib->length_dw++] = addr; in uvd_v7_0_enc_get_create_msg() 243 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v7_0_enc_get_create_msg() 244 ib->ptr[ib->length_dw++] = 0x00000002; /* task info */ in uvd_v7_0_enc_get_create_msg() 245 ib->ptr[ib->length_dw++] = 0x0000001c; in uvd_v7_0_enc_get_create_msg() [all …]
|
D | sdma_v4_0.c | 822 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_0_ring_emit_ib() 1548 ib.length_dw = 8; in sdma_v4_0_ring_test_ib() 1592 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v4_0_vm_copy_pte() 1594 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v4_0_vm_copy_pte() 1595 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v4_0_vm_copy_pte() 1596 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v4_0_vm_copy_pte() 1597 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v4_0_vm_copy_pte() 1598 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v4_0_vm_copy_pte() 1599 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v4_0_vm_copy_pte() 1620 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v4_0_vm_write_pte() [all …]
|
D | amdgpu_vm_sdma.c | 115 WARN_ON(ib->length_dw == 0); in amdgpu_vm_sdma_commit() 121 WARN_ON(ib->length_dw > p->num_dw_left); in amdgpu_vm_sdma_commit() 245 ndw -= p->job->ibs->length_dw; in amdgpu_vm_sdma_update()
|
D | gfx_v9_4_2.c | 377 ib->length_dw = 0; in gfx_v9_4_2_run_shader() 381 ib->ptr[ib->length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1); in gfx_v9_4_2_run_shader() 382 ib->ptr[ib->length_dw++] = SOC15_REG_ENTRY_OFFSET(init_regs[i]) in gfx_v9_4_2_run_shader() 384 ib->ptr[ib->length_dw++] = init_regs[i].reg_value; in gfx_v9_4_2_run_shader() 389 ib->ptr[ib->length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2); in gfx_v9_4_2_run_shader() 390 ib->ptr[ib->length_dw++] = SOC15_REG_OFFSET(GC, 0, regCOMPUTE_PGM_LO) in gfx_v9_4_2_run_shader() 392 ib->ptr[ib->length_dw++] = lower_32_bits(gpu_addr); in gfx_v9_4_2_run_shader() 393 ib->ptr[ib->length_dw++] = upper_32_bits(gpu_addr); in gfx_v9_4_2_run_shader() 396 ib->ptr[ib->length_dw++] = PACKET3(PACKET3_SET_SH_REG, 3); in gfx_v9_4_2_run_shader() 397 ib->ptr[ib->length_dw++] = SOC15_REG_OFFSET(GC, 0, regCOMPUTE_USER_DATA_0) in gfx_v9_4_2_run_shader() [all …]
|
D | gfx_v8_0.c | 896 ib.length_dw = 5; in gfx_v8_0_ring_test_ib() 1535 ib.length_dw = 0; in gfx_v8_0_do_edc_gpr_workarounds() 1540 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 1); in gfx_v8_0_do_edc_gpr_workarounds() 1541 ib.ptr[ib.length_dw++] = vgpr_init_regs[i] - PACKET3_SET_SH_REG_START; in gfx_v8_0_do_edc_gpr_workarounds() 1542 ib.ptr[ib.length_dw++] = vgpr_init_regs[i + 1]; in gfx_v8_0_do_edc_gpr_workarounds() 1546 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_SET_SH_REG, 2); in gfx_v8_0_do_edc_gpr_workarounds() 1547 ib.ptr[ib.length_dw++] = mmCOMPUTE_PGM_LO - PACKET3_SET_SH_REG_START; in gfx_v8_0_do_edc_gpr_workarounds() 1548 ib.ptr[ib.length_dw++] = lower_32_bits(gpu_addr); in gfx_v8_0_do_edc_gpr_workarounds() 1549 ib.ptr[ib.length_dw++] = upper_32_bits(gpu_addr); in gfx_v8_0_do_edc_gpr_workarounds() 1552 ib.ptr[ib.length_dw++] = PACKET3(PACKET3_DISPATCH_DIRECT, 3); in gfx_v8_0_do_edc_gpr_workarounds() [all …]
|
D | amdgpu_cs.c | 222 p->chunks[i].length_dw = user_chunk.length_dw; in amdgpu_cs_pass1() 224 size = p->chunks[i].length_dw; in amdgpu_cs_pass1() 381 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_p2_ib() 394 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_dependencies() 465 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_in() 483 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_wait() 503 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_out() 537 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_signal() 1045 if ((va_start + ib->length_dw * 4) > in amdgpu_cs_patch_ibs() 1059 memcpy(ib->ptr, kptr, ib->length_dw * 4); in amdgpu_cs_patch_ibs()
|
/linux-6.12.1/drivers/net/ethernet/qlogic/qed/ |
D | qed_hw.c | 480 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command() 495 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command() 626 u32 length_dw) in qed_dmae_execute_sub_operation() argument 644 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation() 665 cmd->length_dw = cpu_to_le16((u16)length_dw); in qed_dmae_execute_sub_operation() 674 src_addr, dst_addr, length_dw); in qed_dmae_execute_sub_operation() 681 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()
|