Home
last modified time | relevance | path

Searched refs:vpu_addr (Results 1 – 12 of 12) sorted by relevance

/linux-6.12.1/drivers/accel/ivpu/
Divpu_mmu_context.c232 u64 vpu_addr, dma_addr_t dma_addr, u64 prot) in ivpu_mmu_context_map_page() argument
235 int pgd_idx = FIELD_GET(IVPU_MMU_PGD_INDEX_MASK, vpu_addr); in ivpu_mmu_context_map_page()
236 int pud_idx = FIELD_GET(IVPU_MMU_PUD_INDEX_MASK, vpu_addr); in ivpu_mmu_context_map_page()
237 int pmd_idx = FIELD_GET(IVPU_MMU_PMD_INDEX_MASK, vpu_addr); in ivpu_mmu_context_map_page()
238 int pte_idx = FIELD_GET(IVPU_MMU_PTE_INDEX_MASK, vpu_addr); in ivpu_mmu_context_map_page()
260 ivpu_mmu_context_map_cont_64k(struct ivpu_device *vdev, struct ivpu_mmu_context *ctx, u64 vpu_addr, in ivpu_mmu_context_map_cont_64k() argument
265 drm_WARN_ON(&vdev->drm, !IS_ALIGNED(vpu_addr, size)); in ivpu_mmu_context_map_cont_64k()
271 int ret = ivpu_mmu_context_map_page(vdev, ctx, vpu_addr, dma_addr, prot); in ivpu_mmu_context_map_cont_64k()
277 vpu_addr += IVPU_MMU_PAGE_SIZE; in ivpu_mmu_context_map_cont_64k()
284 static void ivpu_mmu_context_unmap_page(struct ivpu_mmu_context *ctx, u64 vpu_addr) in ivpu_mmu_context_unmap_page() argument
[all …]
Divpu_gem.h21 u64 vpu_addr; member
76 static inline void *ivpu_to_cpu_addr(struct ivpu_bo *bo, u32 vpu_addr) in ivpu_to_cpu_addr() argument
78 if (vpu_addr < bo->vpu_addr) in ivpu_to_cpu_addr()
81 if (vpu_addr >= (bo->vpu_addr + ivpu_bo_size(bo))) in ivpu_to_cpu_addr()
84 return ivpu_bo_vaddr(bo) + (vpu_addr - bo->vpu_addr); in ivpu_to_cpu_addr()
95 return bo->vpu_addr + (cpu_addr - ivpu_bo_vaddr(bo)); in cpu_to_vpu_addr()
Divpu_ipc.c27 struct ivpu_ipc_hdr *ipc_hdr, u32 vpu_addr) in ivpu_ipc_msg_dump() argument
31 c, vpu_addr, ipc_hdr->data_addr, ipc_hdr->data_size, ipc_hdr->channel, in ivpu_ipc_msg_dump()
36 struct vpu_jsm_msg *jsm_msg, u32 vpu_addr) in ivpu_jsm_msg_dump() argument
42 c, vpu_addr, ivpu_jsm_msg_type_to_str(jsm_msg->type), in ivpu_jsm_msg_dump()
122 static void ivpu_ipc_tx_release(struct ivpu_device *vdev, u32 vpu_addr) in ivpu_ipc_tx_release() argument
126 if (vpu_addr) in ivpu_ipc_tx_release()
127 gen_pool_free(ipc->mm_tx, vpu_addr, sizeof(struct ivpu_ipc_tx_buf)); in ivpu_ipc_tx_release()
130 static void ivpu_ipc_tx(struct ivpu_device *vdev, u32 vpu_addr) in ivpu_ipc_tx() argument
132 ivpu_hw_ipc_tx_set(vdev, vpu_addr); in ivpu_ipc_tx()
388 u32 vpu_addr; in ivpu_ipc_irq_handler() local
[all …]
Divpu_gem.c29 action, bo, bo->vpu_addr, ivpu_bo_size(bo), bo->ctx ? bo->ctx->id : 0, in ivpu_dbg_bo()
60 ret = ivpu_mmu_context_map_sgt(vdev, bo->ctx, bo->vpu_addr, sgt, in ivpu_bo_pin()
90 bo->vpu_addr = bo->mm_node.start; in ivpu_bo_alloc_vpu_addr()
112 drm_WARN_ON(&vdev->drm, !bo->vpu_addr); in ivpu_bo_unbind_locked()
114 ivpu_mmu_context_unmap_sgt(vdev, bo->ctx, bo->vpu_addr, bo->base.sgt); in ivpu_bo_unbind_locked()
283 args->vpu_addr = bo->vpu_addr; in ivpu_bo_create_ioctl()
370 args->vpu_addr = bo->vpu_addr; in ivpu_bo_info_ioctl()
409 bo, bo->ctx->id, bo->vpu_addr, bo->base.base.size, in ivpu_bo_print_info()
Divpu_mmu_context.h46 u64 vpu_addr, struct sg_table *sgt, bool llc_coherent);
48 u64 vpu_addr, struct sg_table *sgt);
50 u64 vpu_addr, size_t size);
Divpu_hw.h141 static inline void ivpu_hw_ipc_tx_set(struct ivpu_device *vdev, u32 vpu_addr) in ivpu_hw_ipc_tx_set() argument
143 ivpu_hw_ip_ipc_tx_set(vdev, vpu_addr); in ivpu_hw_ipc_tx_set()
Divpu_ms.c78 ms->active_buff_vpu_addr = ms->bo->vpu_addr; in ivpu_ms_start_ioctl()
79 ms->inactive_buff_vpu_addr = ms->bo->vpu_addr + ms->buff_size; in ivpu_ms_start_ioctl()
261 ret = ivpu_jsm_metric_streamer_info(vdev, args->metric_group_mask, bo->vpu_addr, in ivpu_ms_get_info_ioctl()
Divpu_job.c135 cmdq->mem->vpu_addr, ivpu_bo_size(cmdq->mem)); in ivpu_hws_cmdq_init()
154 cmdq->mem->vpu_addr, ivpu_bo_size(cmdq->mem)); in ivpu_register_db()
157 cmdq->mem->vpu_addr, ivpu_bo_size(cmdq->mem)); in ivpu_register_db()
366 entry->primary_preempt_buf_addr = cmdq->primary_preempt_buf->vpu_addr; in ivpu_cmdq_push_job()
368 entry->secondary_preempt_buf_addr = cmdq->secondary_preempt_buf->vpu_addr; in ivpu_cmdq_push_job()
628 job->cmd_buf_vpu_addr = bo->vpu_addr + commands_offset; in ivpu_job_prepare_bos_for_submit()
Divpu_fw.c564 boot_params->ipc_header_area_start = ipc_mem_rx->vpu_addr; in ivpu_fw_boot_params_setup()
567 boot_params->ipc_payload_area_start = ipc_mem_rx->vpu_addr + ivpu_bo_size(ipc_mem_rx) / 2; in ivpu_fw_boot_params_setup()
582 boot_params->shave_nn_fw_base = vdev->fw->mem_shave_nn->vpu_addr; in ivpu_fw_boot_params_setup()
599 boot_params->crit_tracing_buff_addr = vdev->fw->mem_log_crit->vpu_addr; in ivpu_fw_boot_params_setup()
601 boot_params->verbose_tracing_buff_addr = vdev->fw->mem_log_verb->vpu_addr; in ivpu_fw_boot_params_setup()
Divpu_hw_ip.h29 void ivpu_hw_ip_ipc_tx_set(struct ivpu_device *vdev, u32 vpu_addr);
Divpu_hw_ip.c1171 void ivpu_hw_ip_ipc_tx_set(struct ivpu_device *vdev, u32 vpu_addr) in ivpu_hw_ip_ipc_tx_set() argument
1174 REGV_WR32(VPU_37XX_CPU_SS_TIM_IPC_FIFO, vpu_addr); in ivpu_hw_ip_ipc_tx_set()
1176 REGV_WR32(VPU_40XX_CPU_SS_TIM_IPC_FIFO, vpu_addr); in ivpu_hw_ip_ipc_tx_set()
/linux-6.12.1/include/uapi/drm/
Divpu_accel.h235 __u64 vpu_addr; member
249 __u64 vpu_addr; member