Lines Matching refs:pool
110 static int panthor_get_heap_ctx_offset(struct panthor_heap_pool *pool, int id) in panthor_get_heap_ctx_offset() argument
112 return panthor_heap_ctx_stride(pool->ptdev) * id; in panthor_get_heap_ctx_offset()
115 static void *panthor_get_heap_ctx(struct panthor_heap_pool *pool, int id) in panthor_get_heap_ctx() argument
117 return pool->gpu_contexts->kmap + in panthor_get_heap_ctx()
118 panthor_get_heap_ctx_offset(pool, id); in panthor_get_heap_ctx()
221 panthor_heap_destroy_locked(struct panthor_heap_pool *pool, u32 handle) in panthor_heap_destroy_locked() argument
225 heap = xa_erase(&pool->xa, handle); in panthor_heap_destroy_locked()
229 panthor_free_heap_chunks(pool->vm, heap); in panthor_heap_destroy_locked()
240 int panthor_heap_destroy(struct panthor_heap_pool *pool, u32 handle) in panthor_heap_destroy() argument
244 down_write(&pool->lock); in panthor_heap_destroy()
245 ret = panthor_heap_destroy_locked(pool, handle); in panthor_heap_destroy()
246 up_write(&pool->lock); in panthor_heap_destroy()
267 int panthor_heap_create(struct panthor_heap_pool *pool, in panthor_heap_create() argument
291 down_read(&pool->lock); in panthor_heap_create()
292 vm = panthor_vm_get(pool->vm); in panthor_heap_create()
293 up_read(&pool->lock); in panthor_heap_create()
311 ret = panthor_alloc_heap_chunks(pool->ptdev, vm, heap, in panthor_heap_create()
321 down_write(&pool->lock); in panthor_heap_create()
323 if (!pool->vm) { in panthor_heap_create()
326 ret = xa_alloc(&pool->xa, &id, heap, in panthor_heap_create()
329 void *gpu_ctx = panthor_get_heap_ctx(pool, id); in panthor_heap_create()
331 memset(gpu_ctx, 0, panthor_heap_ctx_stride(pool->ptdev)); in panthor_heap_create()
332 *heap_ctx_gpu_va = panthor_kernel_bo_gpuva(pool->gpu_contexts) + in panthor_heap_create()
333 panthor_get_heap_ctx_offset(pool, id); in panthor_heap_create()
336 up_write(&pool->lock); in panthor_heap_create()
345 panthor_free_heap_chunks(pool->vm, heap); in panthor_heap_create()
364 int panthor_heap_return_chunk(struct panthor_heap_pool *pool, in panthor_heap_return_chunk() argument
368 u64 offset = heap_gpu_va - panthor_kernel_bo_gpuva(pool->gpu_contexts); in panthor_heap_return_chunk()
369 u32 heap_id = (u32)offset / panthor_heap_ctx_stride(pool->ptdev); in panthor_heap_return_chunk()
377 down_read(&pool->lock); in panthor_heap_return_chunk()
378 heap = xa_load(&pool->xa, heap_id); in panthor_heap_return_chunk()
406 up_read(&pool->lock); in panthor_heap_return_chunk()
425 int panthor_heap_grow(struct panthor_heap_pool *pool, in panthor_heap_grow() argument
431 u64 offset = heap_gpu_va - panthor_kernel_bo_gpuva(pool->gpu_contexts); in panthor_heap_grow()
432 u32 heap_id = (u32)offset / panthor_heap_ctx_stride(pool->ptdev); in panthor_heap_grow()
440 down_read(&pool->lock); in panthor_heap_grow()
441 heap = xa_load(&pool->xa, heap_id); in panthor_heap_grow()
469 ret = panthor_alloc_heap_chunk(pool->ptdev, pool->vm, heap, false); in panthor_heap_grow()
481 up_read(&pool->lock); in panthor_heap_grow()
487 struct panthor_heap_pool *pool = in panthor_heap_pool_release() local
490 xa_destroy(&pool->xa); in panthor_heap_pool_release()
491 kfree(pool); in panthor_heap_pool_release()
498 void panthor_heap_pool_put(struct panthor_heap_pool *pool) in panthor_heap_pool_put() argument
500 if (pool) in panthor_heap_pool_put()
501 kref_put(&pool->refcount, panthor_heap_pool_release); in panthor_heap_pool_put()
511 panthor_heap_pool_get(struct panthor_heap_pool *pool) in panthor_heap_pool_get() argument
513 if (pool) in panthor_heap_pool_get()
514 kref_get(&pool->refcount); in panthor_heap_pool_get()
516 return pool; in panthor_heap_pool_get()
534 struct panthor_heap_pool *pool; in panthor_heap_pool_create() local
537 pool = kzalloc(sizeof(*pool), GFP_KERNEL); in panthor_heap_pool_create()
538 if (!pool) in panthor_heap_pool_create()
544 pool->vm = vm; in panthor_heap_pool_create()
545 pool->ptdev = ptdev; in panthor_heap_pool_create()
546 init_rwsem(&pool->lock); in panthor_heap_pool_create()
547 xa_init_flags(&pool->xa, XA_FLAGS_ALLOC); in panthor_heap_pool_create()
548 kref_init(&pool->refcount); in panthor_heap_pool_create()
550 pool->gpu_contexts = panthor_kernel_bo_create(ptdev, vm, bosize, in panthor_heap_pool_create()
554 if (IS_ERR(pool->gpu_contexts)) { in panthor_heap_pool_create()
555 ret = PTR_ERR(pool->gpu_contexts); in panthor_heap_pool_create()
559 ret = panthor_kernel_bo_vmap(pool->gpu_contexts); in panthor_heap_pool_create()
563 return pool; in panthor_heap_pool_create()
566 panthor_heap_pool_destroy(pool); in panthor_heap_pool_create()
585 void panthor_heap_pool_destroy(struct panthor_heap_pool *pool) in panthor_heap_pool_destroy() argument
590 if (!pool) in panthor_heap_pool_destroy()
593 down_write(&pool->lock); in panthor_heap_pool_destroy()
594 xa_for_each(&pool->xa, i, heap) in panthor_heap_pool_destroy()
595 drm_WARN_ON(&pool->ptdev->base, panthor_heap_destroy_locked(pool, i)); in panthor_heap_pool_destroy()
597 if (!IS_ERR_OR_NULL(pool->gpu_contexts)) in panthor_heap_pool_destroy()
598 panthor_kernel_bo_destroy(pool->gpu_contexts); in panthor_heap_pool_destroy()
601 pool->vm = NULL; in panthor_heap_pool_destroy()
602 up_write(&pool->lock); in panthor_heap_pool_destroy()
604 panthor_heap_pool_put(pool); in panthor_heap_pool_destroy()