Home
last modified time | relevance | path

Searched refs:bos (Results 1 – 25 of 63) sorted by relevance

123

/linux-6.12.1/drivers/gpu/drm/etnaviv/
Detnaviv_gem_submit.c35 size_t sz = size_vstruct(nr_bos, sizeof(submit->bos[0]), sizeof(*submit)); in submit_create()
74 submit->bos[i].flags = bo->flags; in submit_lookup_objects()
81 submit->bos[i].va = bo->presumed; in submit_lookup_objects()
101 submit->bos[i].obj = to_etnaviv_bo(obj); in submit_lookup_objects()
113 if (submit->bos[i].flags & BO_LOCKED) { in submit_unlock_object()
114 struct drm_gem_object *obj = &submit->bos[i].obj->base; in submit_unlock_object()
117 submit->bos[i].flags &= ~BO_LOCKED; in submit_unlock_object()
128 struct drm_gem_object *obj = &submit->bos[i].obj->base; in submit_lock_objects()
135 if (!(submit->bos[i].flags & BO_LOCKED)) { in submit_lock_objects()
142 submit->bos[i].flags |= BO_LOCKED; in submit_lock_objects()
[all …]
Detnaviv_dump.c146 obj = submit->bos[i].obj; in etnaviv_core_dump()
205 obj = submit->bos[i].obj; in etnaviv_core_dump()
206 vram = submit->bos[i].mapping; in etnaviv_core_dump()
/linux-6.12.1/drivers/gpu/drm/qxl/
Dqxl_release.c100 INIT_LIST_HEAD(&release->bos); in qxl_release_alloc()
122 while (!list_empty(&release->bos)) { in qxl_release_free_list()
126 entry = container_of(release->bos.next, in qxl_release_free_list()
150 WARN_ON(list_empty(&release->bos)); in qxl_release_free()
175 list_for_each_entry(entry, &release->bos, tv.head) { in qxl_release_list_add()
187 list_add_tail(&entry->tv.head, &release->bos); in qxl_release_list_add()
221 if (list_is_singular(&release->bos)) in qxl_release_reserve_list()
224 ret = ttm_eu_reserve_buffers(&release->ticket, &release->bos, in qxl_release_reserve_list()
229 list_for_each_entry(entry, &release->bos, tv.head) { in qxl_release_reserve_list()
234 ttm_eu_backoff_reservation(&release->ticket, &release->bos); in qxl_release_reserve_list()
[all …]
/linux-6.12.1/drivers/gpu/drm/lima/
Dlima_gem.c305 struct lima_bo **bos = submit->lbos; in lima_gem_submit() local
317 obj = drm_gem_object_lookup(file, submit->bos[i].handle); in lima_gem_submit()
334 bos[i] = bo; in lima_gem_submit()
337 err = drm_gem_lock_reservations((struct drm_gem_object **)bos, in lima_gem_submit()
344 bos, submit->nr_bos, vm); in lima_gem_submit()
354 submit->task, bos[i], in lima_gem_submit()
355 submit->bos[i].flags & LIMA_SUBMIT_BO_WRITE, in lima_gem_submit()
364 dma_resv_add_fence(lima_bo_resv(bos[i]), fence, in lima_gem_submit()
365 submit->bos[i].flags & LIMA_SUBMIT_BO_WRITE ? in lima_gem_submit()
369 drm_gem_unlock_reservations((struct drm_gem_object **)bos, in lima_gem_submit()
[all …]
Dlima_sched.c115 struct lima_bo **bos, int num_bos, in lima_sched_task_init() argument
120 task->bos = kmemdup(bos, sizeof(*bos) * num_bos, GFP_KERNEL); in lima_sched_task_init()
121 if (!task->bos) in lima_sched_task_init()
125 drm_gem_object_get(&bos[i]->base.base); in lima_sched_task_init()
129 kfree(task->bos); in lima_sched_task_init()
147 if (task->bos) { in lima_sched_task_fini()
149 drm_gem_object_put(&task->bos[i]->base.base); in lima_sched_task_fini()
150 kfree(task->bos); in lima_sched_task_fini()
306 struct lima_bo *bo = task->bos[i]; in lima_sched_build_error_task_list()
351 struct lima_bo *bo = task->bos[i]; in lima_sched_build_error_task_list()
[all …]
Dlima_drv.c107 struct drm_lima_gem_submit_bo *bos; in lima_ioctl_gem_submit() local
125 bos = kvcalloc(args->nr_bos, sizeof(*submit.bos) + sizeof(*submit.lbos), GFP_KERNEL); in lima_ioctl_gem_submit()
126 if (!bos) in lima_ioctl_gem_submit()
129 size = args->nr_bos * sizeof(*submit.bos); in lima_ioctl_gem_submit()
130 if (copy_from_user(bos, u64_to_user_ptr(args->bos), size)) { in lima_ioctl_gem_submit()
158 submit.bos = bos; in lima_ioctl_gem_submit()
159 submit.lbos = (void *)bos + size; in lima_ioctl_gem_submit()
175 kvfree(bos); in lima_ioctl_gem_submit()
Dlima_sched.h26 struct lima_bo **bos; member
90 struct lima_bo **bos, int num_bos,
/linux-6.12.1/drivers/usb/core/
Dconfig.c973 if (dev->bos) { in usb_release_bos_descriptor()
974 kfree(dev->bos->desc); in usb_release_bos_descriptor()
975 kfree(dev->bos); in usb_release_bos_descriptor()
976 dev->bos = NULL; in usb_release_bos_descriptor()
993 struct usb_bos_descriptor *bos; in usb_get_bos_descriptor() local
1001 bos = kzalloc(sizeof(*bos), GFP_KERNEL); in usb_get_bos_descriptor()
1002 if (!bos) in usb_get_bos_descriptor()
1006 ret = usb_get_descriptor(dev, USB_DT_BOS, 0, bos, USB_DT_BOS_SIZE); in usb_get_bos_descriptor()
1007 if (ret < USB_DT_BOS_SIZE || bos->bLength < USB_DT_BOS_SIZE) { in usb_get_bos_descriptor()
1011 kfree(bos); in usb_get_bos_descriptor()
[all …]
Dhub.h161 hdev->bos && hdev->bos->ssp_cap); in hub_is_superspeedplus()
Dhub.c167 if (!udev->bos) in usb_device_supports_lpm()
174 if (udev->bos->ext_cap && in usb_device_supports_lpm()
176 le32_to_cpu(udev->bos->ext_cap->bmAttributes))) in usb_device_supports_lpm()
186 if (!udev->bos->ss_cap) { in usb_device_supports_lpm()
191 if (udev->bos->ss_cap->bU1devExitLat == 0 && in usb_device_supports_lpm()
192 udev->bos->ss_cap->bU2DevExitLat == 0) { in usb_device_supports_lpm()
347 if (!udev->bos) in usb_set_lpm_parameters()
357 udev_u1_del = udev->bos->ss_cap->bU1devExitLat; in usb_set_lpm_parameters()
358 udev_u2_del = le16_to_cpu(udev->bos->ss_cap->bU2DevExitLat); in usb_set_lpm_parameters()
359 hub_u1_del = udev->parent->bos->ss_cap->bU1devExitLat; in usb_set_lpm_parameters()
[all …]
/linux-6.12.1/drivers/gpu/drm/msm/
Dmsm_gem_submit.c40 sz = struct_size(submit, bos, nr_bos) + in submit_create()
68 submit->cmd = (void *)&submit->bos[nr_bos]; in submit_create()
127 u64_to_user_ptr(args->bos + (i * sizeof(submit_bo))); in submit_lookup_objects()
132 submit->bos[i].flags = 0; in submit_lookup_objects()
151 submit->bos[i].handle = submit_bo.handle; in submit_lookup_objects()
152 submit->bos[i].flags = submit_bo.flags; in submit_lookup_objects()
163 obj = idr_find(&file->object_idr, submit->bos[i].handle); in submit_lookup_objects()
165 SUBMIT_ERROR(submit, "invalid handle %u at index %u\n", submit->bos[i].handle, i); in submit_lookup_objects()
172 submit->bos[i].obj = obj; in submit_lookup_objects()
260 struct drm_gem_object *obj = submit->bos[i].obj; in submit_lock_objects()
[all …]
Dmsm_fb.c33 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos);
141 struct drm_gem_object *bos[4] = {0}; in msm_framebuffer_create() local
146 bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); in msm_framebuffer_create()
147 if (!bos[i]) { in msm_framebuffer_create()
153 fb = msm_framebuffer_init(dev, mode_cmd, bos); in msm_framebuffer_create()
163 drm_gem_object_put(bos[i]); in msm_framebuffer_create()
168 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) in msm_framebuffer_init() argument
217 if (bos[i]->size < min_size) { in msm_framebuffer_init()
222 msm_fb->base.obj[i] = bos[i]; in msm_framebuffer_init()
Dmsm_rd.c315 struct drm_gem_object *obj = submit->bos[idx].obj; in snapshot_buf()
320 offset = iova - submit->bos[idx].iova; in snapshot_buf()
322 iova = submit->bos[idx].iova; in snapshot_buf()
337 if (!(submit->bos[idx].flags & MSM_SUBMIT_BO_READ)) in snapshot_buf()
/linux-6.12.1/drivers/gpu/drm/omapdrm/
Domap_fb.c343 struct drm_gem_object *bos[4]; in omap_framebuffer_create() local
348 bos[i] = drm_gem_object_lookup(file, mode_cmd->handles[i]); in omap_framebuffer_create()
349 if (!bos[i]) { in omap_framebuffer_create()
355 fb = omap_framebuffer_init(dev, mode_cmd, bos); in omap_framebuffer_create()
363 drm_gem_object_put(bos[i]); in omap_framebuffer_create()
369 const struct drm_mode_fb_cmd2 *mode_cmd, struct drm_gem_object **bos) in omap_framebuffer_init() argument
431 if (size > omap_gem_mmap_size(bos[i]) - mode_cmd->offsets[i]) { in omap_framebuffer_init()
434 bos[i]->size - mode_cmd->offsets[i], size); in omap_framebuffer_init()
439 fb->obj[i] = bos[i]; in omap_framebuffer_init()
/linux-6.12.1/include/net/
Dmpls.h33 bool bos) in mpls_entry_encode() argument
40 (bos ? (1 << MPLS_LS_S_SHIFT) : 0) | in mpls_entry_encode()
/linux-6.12.1/drivers/gpu/drm/panfrost/
Dpanfrost_job.c261 static int panfrost_acquire_object_fences(struct drm_gem_object **bos, in panfrost_acquire_object_fences() argument
268 ret = dma_resv_reserve_fences(bos[i]->resv, 1); in panfrost_acquire_object_fences()
273 ret = drm_sched_job_add_implicit_dependencies(job, bos[i], in panfrost_acquire_object_fences()
282 static void panfrost_attach_object_fences(struct drm_gem_object **bos, in panfrost_attach_object_fences() argument
289 dma_resv_add_fence(bos[i]->resv, fence, DMA_RESV_USAGE_WRITE); in panfrost_attach_object_fences()
298 ret = drm_gem_lock_reservations(job->bos, job->bo_count, in panfrost_job_push()
308 ret = panfrost_acquire_object_fences(job->bos, job->bo_count, in panfrost_job_push()
321 panfrost_attach_object_fences(job->bos, job->bo_count, in panfrost_job_push()
325 drm_gem_unlock_reservations(job->bos, job->bo_count, &acquire_ctx); in panfrost_job_push()
350 if (job->bos) { in panfrost_job_cleanup()
[all …]
/linux-6.12.1/drivers/gpu/drm/msm/disp/dpu1/
Ddpu_formats.c431 struct drm_gem_object **bos) in dpu_format_check_modified_format() argument
438 if (!fmt || !cmd || !bos) { in dpu_format_check_modified_format()
453 if (!bos[i]) { in dpu_format_check_modified_format()
457 if ((i == 0) || (bos[i] != bos[0])) in dpu_format_check_modified_format()
458 bos_total_size += bos[i]->size; in dpu_format_check_modified_format()
/linux-6.12.1/drivers/accel/ivpu/
Divpu_job.c431 if (job->bos[i]) in ivpu_job_destroy()
432 drm_gem_object_put(&job->bos[i]->base.base); in ivpu_job_destroy()
445 job = kzalloc(struct_size(job, bos, bo_count), GFP_KERNEL); in ivpu_job_create()
496 job->bos[CMD_BUF_IDX]->job_status = job_status; in ivpu_job_signal_and_destroy()
610 job->bos[i] = to_ivpu_bo(obj); in ivpu_job_prepare_bos_for_submit()
612 ret = ivpu_bo_pin(job->bos[i]); in ivpu_job_prepare_bos_for_submit()
617 bo = job->bos[CMD_BUF_IDX]; in ivpu_job_prepare_bos_for_submit()
630 ret = drm_gem_lock_reservations((struct drm_gem_object **)job->bos, buf_count, in ivpu_job_prepare_bos_for_submit()
638 ret = dma_resv_reserve_fences(job->bos[i]->base.base.resv, 1); in ivpu_job_prepare_bos_for_submit()
647 dma_resv_add_fence(job->bos[i]->base.base.resv, job->done_fence, usage); in ivpu_job_prepare_bos_for_submit()
[all …]
/linux-6.12.1/net/mpls/
Dmpls_iptunnel.c50 bool bos; in mpls_xmit() local
124 bos = true; in mpls_xmit()
127 ttl, 0, bos); in mpls_xmit()
128 bos = false; in mpls_xmit()
Daf_mpls.c194 if (!dec.bos) in mpls_multipath_hash()
435 if (unlikely(!new_header_size && dec.bos)) { in mpls_forward()
440 bool bos; in mpls_forward() local
446 bos = dec.bos; in mpls_forward()
449 dec.ttl, 0, bos); in mpls_forward()
450 bos = false; in mpls_forward()
1691 bool bos; in nla_put_labels() local
1698 bos = true; in nla_put_labels()
1700 nla_label[i] = mpls_entry_encode(label[i], 0, 0, bos); in nla_put_labels()
1701 bos = false; in nla_put_labels()
[all …]
Dinternal.h15 u8 bos; member
182 result.bos = (entry & MPLS_LS_S_MASK) >> MPLS_LS_S_SHIFT; in mpls_entry_decode()
/linux-6.12.1/drivers/gpu/drm/msm/adreno/
Dadreno_gpu.c740 for (i = 0; state->bos && i < state->nr_bos; i++) in adreno_gpu_state_destroy()
741 kvfree(state->bos[i].data); in adreno_gpu_state_destroy()
743 kfree(state->bos); in adreno_gpu_state_destroy()
890 if (state->bos) { in adreno_show()
895 state->bos[i].iova); in adreno_show()
896 drm_printf(p, " size: %zd\n", state->bos[i].size); in adreno_show()
897 drm_printf(p, " flags: 0x%x\n", state->bos[i].flags); in adreno_show()
898 drm_printf(p, " name: %-32s\n", state->bos[i].name); in adreno_show()
900 adreno_show_object(p, &state->bos[i].data, in adreno_show()
901 state->bos[i].size, &state->bos[i].encoded); in adreno_show()
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/en/tc/act/
Dmpls.c32 mpls_info->bos = act->mpls_push.bos; in copy_mpls_info()
/linux-6.12.1/drivers/gpu/drm/ttm/tests/
Dttm_bo_validate_test.c820 struct ttm_buffer_object *bos, *bo_val; in ttm_bo_validate_happy_evict() local
832 bos = kunit_kmalloc_array(test, bo_no, sizeof(*bos), GFP_KERNEL); in ttm_bo_validate_happy_evict()
833 KUNIT_ASSERT_NOT_NULL(test, bos); in ttm_bo_validate_happy_evict()
835 memset(bos, 0, sizeof(*bos) * bo_no); in ttm_bo_validate_happy_evict()
837 drm_gem_private_object_init(priv->drm, &bos[i].base, bo_sizes[i]); in ttm_bo_validate_happy_evict()
838 err = ttm_bo_init_reserved(priv->ttm_dev, &bos[i], bo_type, placement, in ttm_bo_validate_happy_evict()
841 dma_resv_unlock(bos[i].base.resv); in ttm_bo_validate_happy_evict()
852 KUNIT_EXPECT_EQ(test, bos[0].resource->mem_type, mem_type_evict); in ttm_bo_validate_happy_evict()
853 KUNIT_EXPECT_TRUE(test, bos[0].ttm->page_flags & TTM_TT_FLAG_ZERO_ALLOC); in ttm_bo_validate_happy_evict()
854 KUNIT_EXPECT_TRUE(test, bos[0].ttm->page_flags & TTM_TT_FLAG_PRIV_POPULATED); in ttm_bo_validate_happy_evict()
[all …]
/linux-6.12.1/drivers/usb/gadget/
Dcomposite.c729 struct usb_bos_descriptor *bos = cdev->req->buf; in bos_desc() local
732 bos->bLength = USB_DT_BOS_SIZE; in bos_desc()
733 bos->bDescriptorType = USB_DT_BOS; in bos_desc()
735 bos->wTotalLength = cpu_to_le16(USB_DT_BOS_SIZE); in bos_desc()
736 bos->bNumDeviceCaps = 0; in bos_desc()
766 usb_ext = cdev->req->buf + le16_to_cpu(bos->wTotalLength); in bos_desc()
767 bos->bNumDeviceCaps++; in bos_desc()
768 le16_add_cpu(&bos->wTotalLength, USB_DT_USB_EXT_CAP_SIZE); in bos_desc()
783 ss_cap = cdev->req->buf + le16_to_cpu(bos->wTotalLength); in bos_desc()
784 bos->bNumDeviceCaps++; in bos_desc()
[all …]

123