Lines Matching full:chunk
24 * struct panthor_heap_chunk_header - Heap chunk header
28 * @next: Next heap chunk in the list.
42 /** @node: Used to insert the heap chunk in panthor_heap::chunks. */
45 /** @bo: Buffer object backing the heap chunk. */
59 /** @chunk_size: Size of each chunk. */
123 struct panthor_heap_chunk *chunk) in panthor_free_heap_chunk() argument
126 list_del(&chunk->node); in panthor_free_heap_chunk()
130 panthor_kernel_bo_destroy(chunk->bo); in panthor_free_heap_chunk()
131 kfree(chunk); in panthor_free_heap_chunk()
139 struct panthor_heap_chunk *chunk; in panthor_alloc_heap_chunk() local
143 chunk = kmalloc(sizeof(*chunk), GFP_KERNEL); in panthor_alloc_heap_chunk()
144 if (!chunk) in panthor_alloc_heap_chunk()
147 chunk->bo = panthor_kernel_bo_create(ptdev, vm, heap->chunk_size, in panthor_alloc_heap_chunk()
151 if (IS_ERR(chunk->bo)) { in panthor_alloc_heap_chunk()
152 ret = PTR_ERR(chunk->bo); in panthor_alloc_heap_chunk()
156 ret = panthor_kernel_bo_vmap(chunk->bo); in panthor_alloc_heap_chunk()
160 hdr = chunk->bo->kmap; in panthor_alloc_heap_chunk()
176 panthor_kernel_bo_vunmap(chunk->bo); in panthor_alloc_heap_chunk()
179 list_add(&chunk->node, &heap->chunks); in panthor_alloc_heap_chunk()
186 panthor_kernel_bo_destroy(chunk->bo); in panthor_alloc_heap_chunk()
189 kfree(chunk); in panthor_alloc_heap_chunk()
197 struct panthor_heap_chunk *chunk, *tmp; in panthor_free_heap_chunks() local
199 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) in panthor_free_heap_chunks()
200 panthor_free_heap_chunk(vm, heap, chunk); in panthor_free_heap_chunks()
254 * @initial_chunk_count: Number of chunk allocated at initialization time.
256 * @chunk_size: The size of each chunk. Must be page-aligned and lie in the
262 * @first_chunk_gpu_va: Pointer holding the GPU address of the first chunk
355 * panthor_heap_return_chunk() - Return an unused heap chunk
358 * @chunk_gpu_va: The chunk VA to return.
360 * This function is used when a chunk allocated with panthor_heap_grow()
370 struct panthor_heap_chunk *chunk, *tmp, *removed = NULL; in panthor_heap_return_chunk() local
387 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) { in panthor_heap_return_chunk()
388 if (panthor_kernel_bo_gpuva(chunk->bo) == chunk_gpu_va) { in panthor_heap_return_chunk()
389 removed = chunk; in panthor_heap_return_chunk()
390 list_del(&chunk->node); in panthor_heap_return_chunk()
398 panthor_kernel_bo_destroy(chunk->bo); in panthor_heap_return_chunk()
399 kfree(chunk); in panthor_heap_return_chunk()
416 * @new_chunk_gpu_va: Pointer used to return the chunk VA.
433 struct panthor_heap_chunk *chunk; in panthor_heap_grow() local
465 * faulty so the job that triggered this tiler chunk allocation and all in panthor_heap_grow()
473 chunk = list_first_entry(&heap->chunks, in panthor_heap_grow()
476 *new_chunk_gpu_va = (panthor_kernel_bo_gpuva(chunk->bo) & GENMASK_ULL(63, 12)) | in panthor_heap_grow()
575 * preventing any use of the heap context or the chunk attached to them