Lines Matching +full:num +full:- +full:guest +full:- +full:ids

1 // SPDX-License-Identifier: GPL-2.0 OR MIT
4 * Copyright 2009 - 2023 VMware, Inc., Palo Alto, CA., USA
20 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
47 __sw_context->dx_ctx_node ? __sw_context->dx_ctx_node : ({ \
49 __sw_context->dx_ctx_node; \
60 * struct vmw_relocation - Buffer object relocation
63 * @vbo: Non ref-counted pointer to buffer object
65 * @location: Pointer to location for guest pointer to be modified
77 * enum vmw_resource_relocation_type - Relocation type for resources
84 * validation is -1, the command is replaced with a NOP. Otherwise no action.
85 * @vmw_res_rel_max: Last value in the enum - used for error checking
95 * struct vmw_resource_relocation - Relocation info for resources
98 * @res: Non-ref-counted pointer to the resource.
111 * struct vmw_ctx_validation_info - Extra validation metadata for contexts
126 * struct vmw_cmd_entry - Describe a command for the verifier
128 * @func: Call-back to handle the command.
130 * @gb_disable: Whether disabled if guest-backed objects are available.
131 * @gb_enable: Whether enabled iff guest-backed objects are available.
144 [(_cmd) - SVGA_3D_CMD_BASE] = {(_func), (_user_allow),\
155 * vmw_ptr_diff - Compute the offset from a to b in bytes
164 return (unsigned long) b - (unsigned long) a; in vmw_ptr_diff()
168 * vmw_execbuf_bindings_commit - Commit modified binding state
179 list_for_each_entry(entry, &sw_context->ctx_list, head) { in vmw_execbuf_bindings_commit()
181 vmw_binding_state_commit(entry->cur, entry->staged); in vmw_execbuf_bindings_commit()
183 if (entry->staged != sw_context->staged_bindings) in vmw_execbuf_bindings_commit()
184 vmw_binding_state_free(entry->staged); in vmw_execbuf_bindings_commit()
186 sw_context->staged_bindings_inuse = false; in vmw_execbuf_bindings_commit()
190 INIT_LIST_HEAD(&sw_context->ctx_list); in vmw_execbuf_bindings_commit()
194 * vmw_bind_dx_query_mob - Bind the DX query MOB if referenced
200 if (sw_context->dx_query_mob) in vmw_bind_dx_query_mob()
201 vmw_context_bind_dx_query(sw_context->dx_query_ctx, in vmw_bind_dx_query_mob()
202 sw_context->dx_query_mob); in vmw_bind_dx_query_mob()
206 * vmw_cmd_ctx_first_setup - Perform the setup needed when a context is added to
225 if (!sw_context->staged_bindings) { in vmw_cmd_ctx_first_setup()
226 sw_context->staged_bindings = vmw_binding_state_alloc(dev_priv); in vmw_cmd_ctx_first_setup()
227 if (IS_ERR(sw_context->staged_bindings)) { in vmw_cmd_ctx_first_setup()
228 ret = PTR_ERR(sw_context->staged_bindings); in vmw_cmd_ctx_first_setup()
229 sw_context->staged_bindings = NULL; in vmw_cmd_ctx_first_setup()
234 if (sw_context->staged_bindings_inuse) { in vmw_cmd_ctx_first_setup()
235 node->staged = vmw_binding_state_alloc(dev_priv); in vmw_cmd_ctx_first_setup()
236 if (IS_ERR(node->staged)) { in vmw_cmd_ctx_first_setup()
237 ret = PTR_ERR(node->staged); in vmw_cmd_ctx_first_setup()
238 node->staged = NULL; in vmw_cmd_ctx_first_setup()
242 node->staged = sw_context->staged_bindings; in vmw_cmd_ctx_first_setup()
243 sw_context->staged_bindings_inuse = true; in vmw_cmd_ctx_first_setup()
246 node->ctx = res; in vmw_cmd_ctx_first_setup()
247 node->cur = vmw_context_binding_state(res); in vmw_cmd_ctx_first_setup()
248 list_add_tail(&node->head, &sw_context->ctx_list); in vmw_cmd_ctx_first_setup()
257 * vmw_execbuf_res_size - calculate extra size fore the resource validation node
262 * Guest-backed contexts and DX contexts require extra size to store execbuf
272 (res_type == vmw_res_context && dev_priv->has_mob)) ? in vmw_execbuf_res_size()
277 * vmw_execbuf_rcache_update - Update a resource-node cache entry
281 * @private: Pointer to the execbuf-private space in the resource validation
288 rcache->res = res; in vmw_execbuf_rcache_update()
289 rcache->private = private; in vmw_execbuf_rcache_update()
290 rcache->valid = 1; in vmw_execbuf_rcache_update()
291 rcache->valid_handle = 0; in vmw_execbuf_rcache_update()
300 * vmw_execbuf_res_val_add - Add a resource to the validation list.
303 * @res: Unreferenced rcu-protected pointer to the resource.
308 * are %-EINVAL on inconsistency and %-ESRCH if the resource was doomed.
315 struct vmw_private *dev_priv = res->dev_priv; in vmw_execbuf_res_val_add()
323 rcache = &sw_context->res_cache[res_type]; in vmw_execbuf_res_val_add()
324 if (likely(rcache->valid && rcache->res == res)) { in vmw_execbuf_res_val_add()
326 vmw_validation_res_set_dirty(sw_context->ctx, in vmw_execbuf_res_val_add()
327 rcache->private, dirty); in vmw_execbuf_res_val_add()
332 ret = vmw_validation_add_resource(sw_context->ctx, res, 0, dirty, in vmw_execbuf_res_val_add()
339 ret = vmw_validation_add_resource(sw_context->ctx, res, priv_size, in vmw_execbuf_res_val_add()
360 * vmw_view_res_val_add - Add a view and the surface it's pointing to to the
387 * vmw_view_id_val_add - Look up a view and add it and the surface it's pointing
396 * return an -EINVAL error pointer.
405 struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; in vmw_view_id_val_add()
410 return ERR_PTR(-EINVAL); in vmw_view_id_val_add()
412 view = vmw_view_lookup(sw_context->man, view_type, id); in vmw_view_id_val_add()
424 * vmw_resource_context_res_add - Put resources previously bound to a context on
443 u32 cotable_max = has_sm5_context(ctx->dev_priv) ? in vmw_resource_context_res_add()
463 mutex_lock(&dev_priv->binding_mutex); in vmw_resource_context_res_add()
467 if (vmw_res_type(entry->res) == vmw_res_view) in vmw_resource_context_res_add()
468 ret = vmw_view_res_val_add(sw_context, entry->res); in vmw_resource_context_res_add()
470 ret = vmw_execbuf_res_val_add(sw_context, entry->res, in vmw_resource_context_res_add()
471 vmw_binding_dirtying(entry->bt), in vmw_resource_context_res_add()
486 ret = vmw_validation_add_bo(sw_context->ctx, in vmw_resource_context_res_add()
491 mutex_unlock(&dev_priv->binding_mutex); in vmw_resource_context_res_add()
496 * vmw_resource_relocation_add - Add a relocation to the relocation list
512 rel = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*rel)); in vmw_resource_relocation_add()
515 return -ENOMEM; in vmw_resource_relocation_add()
518 rel->res = res; in vmw_resource_relocation_add()
519 rel->offset = offset; in vmw_resource_relocation_add()
520 rel->rel_type = rel_type; in vmw_resource_relocation_add()
521 list_add_tail(&rel->head, &sw_context->res_relocations); in vmw_resource_relocation_add()
527 * vmw_resource_relocations_free - Free all relocations on a list
538 * vmw_resource_relocations_apply - Apply all relocations on a list
542 * but the contents must be the same modulo the resource ids.
555 u32 *addr = (u32 *)((unsigned long) cb + rel->offset); in vmw_resource_relocations_apply()
556 switch (rel->rel_type) { in vmw_resource_relocations_apply()
558 *addr = rel->res->id; in vmw_resource_relocations_apply()
564 if (rel->res->id == -1) in vmw_resource_relocations_apply()
575 return -EINVAL; in vmw_cmd_invalid()
586 * vmw_resources_reserve - Reserve all resources on the sw_context's resource
599 ret = vmw_validation_res_reserve(sw_context->ctx, true); in vmw_resources_reserve()
603 if (sw_context->dx_query_mob) { in vmw_resources_reserve()
607 vmw_context_get_dx_query_mob(sw_context->dx_query_ctx); in vmw_resources_reserve()
609 expected_dx_query_mob != sw_context->dx_query_mob) { in vmw_resources_reserve()
610 ret = -EINVAL; in vmw_resources_reserve()
618 * vmw_cmd_res_check - Check that a resource is present and if so, put it on the
625 * @converter: User-space visible type specific information.
627 * from where the user-space resource id handle is located.
640 struct vmw_res_cache_entry *rcache = &sw_context->res_cache[res_type]; in vmw_cmd_res_check()
651 return -EINVAL; in vmw_cmd_res_check()
656 if (likely(rcache->valid_handle && *id_loc == rcache->handle)) { in vmw_cmd_res_check()
657 res = rcache->res; in vmw_cmd_res_check()
659 vmw_validation_res_set_dirty(sw_context->ctx, in vmw_cmd_res_check()
660 rcache->private, dirty); in vmw_cmd_res_check()
664 ret = vmw_validation_preload_res(sw_context->ctx, size); in vmw_cmd_res_check()
669 (dev_priv, sw_context->fp->tfile, *id_loc, converter, &res); in vmw_cmd_res_check()
681 if (rcache->valid && rcache->res == res) { in vmw_cmd_res_check()
682 rcache->valid_handle = true; in vmw_cmd_res_check()
683 rcache->handle = *id_loc; in vmw_cmd_res_check()
688 vmw_ptr_diff(sw_context->buf_start, in vmw_cmd_res_check()
702 * vmw_rebind_all_dx_query - Rebind DX query associated with the context
710 struct vmw_private *dev_priv = ctx_res->dev_priv; in vmw_rebind_all_dx_query()
716 if (!dx_query_mob || dx_query_mob->dx_query_ctx) in vmw_rebind_all_dx_query()
719 cmd = VMW_CMD_CTX_RESERVE(dev_priv, sizeof(*cmd), ctx_res->id); in vmw_rebind_all_dx_query()
721 return -ENOMEM; in vmw_rebind_all_dx_query()
723 cmd->header.id = SVGA_3D_CMD_DX_BIND_ALL_QUERY; in vmw_rebind_all_dx_query()
724 cmd->header.size = sizeof(cmd->body); in vmw_rebind_all_dx_query()
725 cmd->body.cid = ctx_res->id; in vmw_rebind_all_dx_query()
726 cmd->body.mobid = dx_query_mob->tbo.resource->start; in vmw_rebind_all_dx_query()
735 * vmw_rebind_contexts - Rebind all resources previously bound to referenced
747 list_for_each_entry(val, &sw_context->ctx_list, head) { in vmw_rebind_contexts()
748 ret = vmw_binding_rebind_all(val->cur); in vmw_rebind_contexts()
750 if (ret != -ERESTARTSYS) in vmw_rebind_contexts()
755 ret = vmw_rebind_all_dx_query(val->ctx); in vmw_rebind_contexts()
766 * vmw_view_bindings_add - Add an array of view bindings to a context binding
773 * @view_ids: Array of view ids to be bound.
774 * @num_views: Number of view ids in @view_ids.
788 return -EINVAL; in vmw_view_bindings_add()
802 binding.bi.ctx = ctx_node->ctx; in vmw_view_bindings_add()
807 vmw_binding_add(ctx_node->staged, &binding.bi, in vmw_view_bindings_add()
815 * vmw_cmd_cid_check - Check a command header for valid context information.
819 * @header: A command header with an embedded user-space context handle.
821 * Convenience function: Call vmw_cmd_res_check with the user-space context
833 &cmd->body, NULL); in vmw_cmd_cid_check()
837 * vmw_execbuf_info_from_res - Get the private validation metadata for a
855 &sw_context->res_cache[vmw_res_type(res)]; in vmw_execbuf_info_from_res()
857 if (rcache->valid && rcache->res == res) in vmw_execbuf_info_from_res()
858 return rcache->private; in vmw_execbuf_info_from_res()
875 if (cmd->body.type >= SVGA3D_RT_MAX) { in vmw_cmd_set_render_target_check()
877 (unsigned int) cmd->body.type); in vmw_cmd_set_render_target_check()
878 return -EINVAL; in vmw_cmd_set_render_target_check()
883 &cmd->body.cid, &ctx); in vmw_cmd_set_render_target_check()
889 &cmd->body.target.sid, &res); in vmw_cmd_set_render_target_check()
893 if (dev_priv->has_mob) { in vmw_cmd_set_render_target_check()
899 return -EINVAL; in vmw_cmd_set_render_target_check()
904 binding.slot = cmd->body.type; in vmw_cmd_set_render_target_check()
905 vmw_binding_add(node->staged, &binding.bi, 0, binding.slot); in vmw_cmd_set_render_target_check()
922 &cmd->body.src.sid, NULL); in vmw_cmd_surface_copy_check()
928 &cmd->body.dest.sid, NULL); in vmw_cmd_surface_copy_check()
941 &cmd->body.src, NULL); in vmw_cmd_buffer_copy_check()
947 &cmd->body.dest, NULL); in vmw_cmd_buffer_copy_check()
960 &cmd->body.srcSid, NULL); in vmw_cmd_pred_copy_check()
966 &cmd->body.dstSid, NULL); in vmw_cmd_pred_copy_check()
979 &cmd->body.src.sid, NULL); in vmw_cmd_stretch_blt_check()
985 &cmd->body.dest.sid, NULL); in vmw_cmd_stretch_blt_check()
997 &cmd->body.srcImage.sid, NULL); in vmw_cmd_blt_surf_screen_check()
1009 &cmd->body.sid, NULL); in vmw_cmd_present_check()
1013 * vmw_query_bo_switch_prepare - Prepare to switch pinned buffer for queries.
1029 &sw_context->res_cache[vmw_res_context]; in vmw_query_bo_switch_prepare()
1032 BUG_ON(!ctx_entry->valid); in vmw_query_bo_switch_prepare()
1033 sw_context->last_query_ctx = ctx_entry->res; in vmw_query_bo_switch_prepare()
1035 if (unlikely(new_query_bo != sw_context->cur_query_bo)) { in vmw_query_bo_switch_prepare()
1037 if (unlikely(PFN_UP(new_query_bo->tbo.resource->size) > 4)) { in vmw_query_bo_switch_prepare()
1039 return -EINVAL; in vmw_query_bo_switch_prepare()
1042 if (unlikely(sw_context->cur_query_bo != NULL)) { in vmw_query_bo_switch_prepare()
1043 sw_context->needs_post_query_barrier = true; in vmw_query_bo_switch_prepare()
1044 vmw_bo_placement_set_default_accelerated(sw_context->cur_query_bo); in vmw_query_bo_switch_prepare()
1045 ret = vmw_validation_add_bo(sw_context->ctx, in vmw_query_bo_switch_prepare()
1046 sw_context->cur_query_bo); in vmw_query_bo_switch_prepare()
1050 sw_context->cur_query_bo = new_query_bo; in vmw_query_bo_switch_prepare()
1052 vmw_bo_placement_set_default_accelerated(dev_priv->dummy_query_bo); in vmw_query_bo_switch_prepare()
1053 ret = vmw_validation_add_bo(sw_context->ctx, in vmw_query_bo_switch_prepare()
1054 dev_priv->dummy_query_bo); in vmw_query_bo_switch_prepare()
1063 * vmw_query_bo_switch_commit - Finalize switching pinned query buffer
1076 * As mentioned above, both the new - and old query buffers need to be fenced
1086 if (sw_context->needs_post_query_barrier) { in vmw_query_bo_switch_commit()
1088 &sw_context->res_cache[vmw_res_context]; in vmw_query_bo_switch_commit()
1092 BUG_ON(!ctx_entry->valid); in vmw_query_bo_switch_commit()
1093 ctx = ctx_entry->res; in vmw_query_bo_switch_commit()
1095 ret = vmw_cmd_emit_dummy_query(dev_priv, ctx->id); in vmw_query_bo_switch_commit()
1101 if (dev_priv->pinned_bo != sw_context->cur_query_bo) { in vmw_query_bo_switch_commit()
1102 if (dev_priv->pinned_bo) { in vmw_query_bo_switch_commit()
1103 vmw_bo_pin_reserved(dev_priv->pinned_bo, false); in vmw_query_bo_switch_commit()
1104 vmw_bo_unreference(&dev_priv->pinned_bo); in vmw_query_bo_switch_commit()
1107 if (!sw_context->needs_post_query_barrier) { in vmw_query_bo_switch_commit()
1108 vmw_bo_pin_reserved(sw_context->cur_query_bo, true); in vmw_query_bo_switch_commit()
1115 if (!dev_priv->dummy_query_bo_pinned) { in vmw_query_bo_switch_commit()
1116 vmw_bo_pin_reserved(dev_priv->dummy_query_bo, in vmw_query_bo_switch_commit()
1118 dev_priv->dummy_query_bo_pinned = true; in vmw_query_bo_switch_commit()
1121 BUG_ON(sw_context->last_query_ctx == NULL); in vmw_query_bo_switch_commit()
1122 dev_priv->query_cid = sw_context->last_query_ctx->id; in vmw_query_bo_switch_commit()
1123 dev_priv->query_cid_valid = true; in vmw_query_bo_switch_commit()
1124 dev_priv->pinned_bo = in vmw_query_bo_switch_commit()
1125 vmw_bo_reference(sw_context->cur_query_bo); in vmw_query_bo_switch_commit()
1131 * vmw_translate_mob_ptr - Prepare to translate a user-space buffer handle
1136 * @id: Pointer to the user-space handle to be translated.
1138 * non-reference-counted pointer to the buffer object identified by the
1139 * user-space handle in @id.
1141 * This function saves information needed to translate a user-space buffer
1160 vmw_validation_preload_bo(sw_context->ctx); in vmw_translate_mob_ptr()
1161 ret = vmw_user_bo_lookup(sw_context->filp, handle, &vmw_bo); in vmw_translate_mob_ptr()
1163 drm_dbg(&dev_priv->drm, "Could not find or use MOB buffer.\n"); in vmw_translate_mob_ptr()
1167 ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo); in vmw_translate_mob_ptr()
1173 reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); in vmw_translate_mob_ptr()
1175 return -ENOMEM; in vmw_translate_mob_ptr()
1177 reloc->mob_loc = id; in vmw_translate_mob_ptr()
1178 reloc->vbo = vmw_bo; in vmw_translate_mob_ptr()
1181 list_add_tail(&reloc->head, &sw_context->bo_relocations); in vmw_translate_mob_ptr()
1187 * vmw_translate_guest_ptr - Prepare to translate a user-space buffer handle
1192 * @ptr: Pointer to the user-space handle to be translated.
1194 * non-reference-counted pointer to the DMA buffer identified by the user-space
1197 * This function saves information needed to translate a user-space buffer
1212 uint32_t handle = ptr->gmrId; in vmw_translate_guest_ptr()
1216 vmw_validation_preload_bo(sw_context->ctx); in vmw_translate_guest_ptr()
1217 ret = vmw_user_bo_lookup(sw_context->filp, handle, &vmw_bo); in vmw_translate_guest_ptr()
1219 drm_dbg(&dev_priv->drm, "Could not find or use GMR region.\n"); in vmw_translate_guest_ptr()
1224 ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo); in vmw_translate_guest_ptr()
1230 reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc)); in vmw_translate_guest_ptr()
1232 return -ENOMEM; in vmw_translate_guest_ptr()
1234 reloc->location = ptr; in vmw_translate_guest_ptr()
1235 reloc->vbo = vmw_bo; in vmw_translate_guest_ptr()
1237 list_add_tail(&reloc->head, &sw_context->bo_relocations); in vmw_translate_guest_ptr()
1243 * vmw_cmd_dx_define_query - validate SVGA_3D_CMD_DX_DEFINE_QUERY command.
1261 return -EINVAL; in vmw_cmd_dx_define_query()
1265 if (cmd->body.type < SVGA3D_QUERYTYPE_MIN || in vmw_cmd_dx_define_query()
1266 cmd->body.type >= SVGA3D_QUERYTYPE_MAX) in vmw_cmd_dx_define_query()
1267 return -EINVAL; in vmw_cmd_dx_define_query()
1269 cotable_res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_DXQUERY); in vmw_cmd_dx_define_query()
1271 return cotable_res ? PTR_ERR(cotable_res) : -EINVAL; in vmw_cmd_dx_define_query()
1272 ret = vmw_cotable_notify(cotable_res, cmd->body.queryId); in vmw_cmd_dx_define_query()
1278 * vmw_cmd_dx_bind_query - validate SVGA_3D_CMD_DX_BIND_QUERY command.
1302 ret = vmw_translate_mob_ptr(dev_priv, sw_context, &cmd->body.mobid, in vmw_cmd_dx_bind_query()
1308 sw_context->dx_query_mob = vmw_bo; in vmw_cmd_dx_bind_query()
1309 sw_context->dx_query_ctx = sw_context->dx_ctx_node->ctx; in vmw_cmd_dx_bind_query()
1314 * vmw_cmd_begin_gb_query - validate SVGA_3D_CMD_BEGIN_GB_QUERY command.
1329 &cmd->body.cid, NULL); in vmw_cmd_begin_gb_query()
1333 * vmw_cmd_begin_query - validate SVGA_3D_CMD_BEGIN_QUERY command.
1346 if (unlikely(dev_priv->has_mob)) { in vmw_cmd_begin_query()
1352 gb_cmd.header.size = cmd->header.size; in vmw_cmd_begin_query()
1353 gb_cmd.body.cid = cmd->body.cid; in vmw_cmd_begin_query()
1354 gb_cmd.body.type = cmd->body.type; in vmw_cmd_begin_query()
1362 &cmd->body.cid, NULL); in vmw_cmd_begin_query()
1366 * vmw_cmd_end_gb_query - validate SVGA_3D_CMD_END_GB_QUERY command.
1385 ret = vmw_translate_mob_ptr(dev_priv, sw_context, &cmd->body.mobid, in vmw_cmd_end_gb_query()
1396 * vmw_cmd_end_query - validate SVGA_3D_CMD_END_QUERY command.
1411 if (dev_priv->has_mob) { in vmw_cmd_end_query()
1417 gb_cmd.header.size = cmd->header.size; in vmw_cmd_end_query()
1418 gb_cmd.body.cid = cmd->body.cid; in vmw_cmd_end_query()
1419 gb_cmd.body.type = cmd->body.type; in vmw_cmd_end_query()
1420 gb_cmd.body.mobid = cmd->body.guestResult.gmrId; in vmw_cmd_end_query()
1421 gb_cmd.body.offset = cmd->body.guestResult.offset; in vmw_cmd_end_query()
1432 &cmd->body.guestResult, &vmw_bo); in vmw_cmd_end_query()
1442 * vmw_cmd_wait_gb_query - validate SVGA_3D_CMD_WAIT_GB_QUERY command.
1461 ret = vmw_translate_mob_ptr(dev_priv, sw_context, &cmd->body.mobid, in vmw_cmd_wait_gb_query()
1470 * vmw_cmd_wait_query - validate SVGA_3D_CMD_WAIT_QUERY command.
1485 if (dev_priv->has_mob) { in vmw_cmd_wait_query()
1491 gb_cmd.header.size = cmd->header.size; in vmw_cmd_wait_query()
1492 gb_cmd.body.cid = cmd->body.cid; in vmw_cmd_wait_query()
1493 gb_cmd.body.type = cmd->body.type; in vmw_cmd_wait_query()
1494 gb_cmd.body.mobid = cmd->body.guestResult.gmrId; in vmw_cmd_wait_query()
1495 gb_cmd.body.offset = cmd->body.guestResult.offset; in vmw_cmd_wait_query()
1506 &cmd->body.guestResult, &vmw_bo); in vmw_cmd_wait_query()
1526 suffix = (SVGA3dCmdSurfaceDMASuffix *)((unsigned long) &cmd->body + in vmw_cmd_dma()
1527 header->size - sizeof(*suffix)); in vmw_cmd_dma()
1530 if (unlikely(suffix->suffixSize != sizeof(*suffix))) { in vmw_cmd_dma()
1532 return -EINVAL; in vmw_cmd_dma()
1536 &cmd->body.guest.ptr, &vmw_bo); in vmw_cmd_dma()
1541 bo_size = vmw_bo->tbo.base.size; in vmw_cmd_dma()
1542 if (unlikely(cmd->body.guest.ptr.offset > bo_size)) { in vmw_cmd_dma()
1544 return -EINVAL; in vmw_cmd_dma()
1547 bo_size -= cmd->body.guest.ptr.offset; in vmw_cmd_dma()
1548 if (unlikely(suffix->maximumOffset > bo_size)) in vmw_cmd_dma()
1549 suffix->maximumOffset = bo_size; in vmw_cmd_dma()
1551 dirty = (cmd->body.transfer == SVGA3D_WRITE_HOST_VRAM) ? in vmw_cmd_dma()
1555 &cmd->body.host.sid, NULL); in vmw_cmd_dma()
1557 if (unlikely(ret != -ERESTARTSYS)) in vmw_cmd_dma()
1562 srf = vmw_res_to_srf(sw_context->res_cache[vmw_res_surface].res); in vmw_cmd_dma()
1564 vmw_kms_cursor_snoop(srf, sw_context->fp->tfile, &vmw_bo->tbo, header); in vmw_cmd_dma()
1586 maxnum = (header->size - sizeof(cmd->body)) / sizeof(*decl); in vmw_cmd_draw()
1588 if (unlikely(cmd->body.numVertexDecls > maxnum)) { in vmw_cmd_draw()
1590 return -EINVAL; in vmw_cmd_draw()
1593 for (i = 0; i < cmd->body.numVertexDecls; ++i, ++decl) { in vmw_cmd_draw()
1597 &decl->array.surfaceId, NULL); in vmw_cmd_draw()
1602 maxnum = (header->size - sizeof(cmd->body) - in vmw_cmd_draw()
1603 cmd->body.numVertexDecls * sizeof(*decl)) / sizeof(*range); in vmw_cmd_draw()
1604 if (unlikely(cmd->body.numRanges > maxnum)) { in vmw_cmd_draw()
1606 return -EINVAL; in vmw_cmd_draw()
1610 for (i = 0; i < cmd->body.numRanges; ++i, ++range) { in vmw_cmd_draw()
1614 &range->indexArray.surfaceId, NULL); in vmw_cmd_draw()
1627 ((unsigned long) header + header->size + sizeof(*header)); in vmw_cmd_tex_state()
1638 &cmd->body.cid, &ctx); in vmw_cmd_tex_state()
1643 if (likely(cur_state->name != SVGA3D_TS_BIND_TEXTURE)) in vmw_cmd_tex_state()
1646 if (cur_state->stage >= SVGA3D_NUM_TEXTURE_UNITS) { in vmw_cmd_tex_state()
1648 (unsigned int) cur_state->stage); in vmw_cmd_tex_state()
1649 return -EINVAL; in vmw_cmd_tex_state()
1655 &cur_state->value, &res); in vmw_cmd_tex_state()
1659 if (dev_priv->has_mob) { in vmw_cmd_tex_state()
1665 return -EINVAL; in vmw_cmd_tex_state()
1670 binding.texture_stage = cur_state->stage; in vmw_cmd_tex_state()
1671 vmw_binding_add(node->staged, &binding.bi, 0, in vmw_cmd_tex_state()
1690 return vmw_translate_guest_ptr(dev_priv, sw_context, &cmd->body.ptr, in vmw_cmd_check_define_gmrfb()
1695 * vmw_cmd_res_switch_backup - Utility function to handle backup buffer
1701 * @buf_id: Pointer to the user-space backup buffer handle in the command
1720 return -EINVAL; in vmw_cmd_res_switch_backup()
1726 vmw_validation_res_switch_backup(sw_context->ctx, info, vbo, in vmw_cmd_res_switch_backup()
1732 * vmw_cmd_switch_backup - Utility function to handle backup buffer switching
1737 * @converter: Information about user-space binding for this resource type.
1738 * @res_id: Pointer to the user-space resource handle in the command stream.
1739 * @buf_id: Pointer to the user-space backup buffer handle in the command
1767 * vmw_cmd_bind_gb_surface - Validate SVGA_3D_CMD_BIND_GB_SURFACE command
1781 user_surface_converter, &cmd->body.sid, in vmw_cmd_bind_gb_surface()
1782 &cmd->body.mobid, 0); in vmw_cmd_bind_gb_surface()
1786 * vmw_cmd_update_gb_image - Validate SVGA_3D_CMD_UPDATE_GB_IMAGE command
1801 &cmd->body.image.sid, NULL); in vmw_cmd_update_gb_image()
1805 * vmw_cmd_update_gb_surface - Validate SVGA_3D_CMD_UPDATE_GB_SURFACE command
1820 &cmd->body.sid, NULL); in vmw_cmd_update_gb_surface()
1824 * vmw_cmd_readback_gb_image - Validate SVGA_3D_CMD_READBACK_GB_IMAGE command
1839 &cmd->body.image.sid, NULL); in vmw_cmd_readback_gb_image()
1843 * vmw_cmd_readback_gb_surface - Validate SVGA_3D_CMD_READBACK_GB_SURFACE
1859 &cmd->body.sid, NULL); in vmw_cmd_readback_gb_surface()
1863 * vmw_cmd_invalidate_gb_image - Validate SVGA_3D_CMD_INVALIDATE_GB_IMAGE
1879 &cmd->body.image.sid, NULL); in vmw_cmd_invalidate_gb_image()
1883 * vmw_cmd_invalidate_gb_surface - Validate SVGA_3D_CMD_INVALIDATE_GB_SURFACE
1899 &cmd->body.sid, NULL); in vmw_cmd_invalidate_gb_surface()
1903 * vmw_cmd_shader_define - Validate SVGA_3D_CMD_SHADER_DEFINE command
1922 &cmd->body.cid, &ctx); in vmw_cmd_shader_define()
1926 if (unlikely(!dev_priv->has_mob)) in vmw_cmd_shader_define()
1929 size = cmd->header.size - sizeof(cmd->body); in vmw_cmd_shader_define()
1931 cmd->body.shid, cmd + 1, cmd->body.type, in vmw_cmd_shader_define()
1932 size, &sw_context->staged_cmd_res); in vmw_cmd_shader_define()
1937 vmw_ptr_diff(sw_context->buf_start, in vmw_cmd_shader_define()
1938 &cmd->header.id), in vmw_cmd_shader_define()
1943 * vmw_cmd_shader_destroy - Validate SVGA_3D_CMD_SHADER_DESTROY command
1961 &cmd->body.cid, &ctx); in vmw_cmd_shader_destroy()
1965 if (unlikely(!dev_priv->has_mob)) in vmw_cmd_shader_destroy()
1968 ret = vmw_shader_remove(vmw_context_res_man(ctx), cmd->body.shid, in vmw_cmd_shader_destroy()
1969 cmd->body.type, &sw_context->staged_cmd_res); in vmw_cmd_shader_destroy()
1974 vmw_ptr_diff(sw_context->buf_start, in vmw_cmd_shader_destroy()
1975 &cmd->header.id), in vmw_cmd_shader_destroy()
1980 * vmw_cmd_set_shader - Validate SVGA_3D_CMD_SET_SHADER command
1998 if (!vmw_shadertype_is_valid(VMW_SM_LEGACY, cmd->body.type)) { in vmw_cmd_set_shader()
2000 (unsigned int) cmd->body.type); in vmw_cmd_set_shader()
2001 return -EINVAL; in vmw_cmd_set_shader()
2006 &cmd->body.cid, &ctx); in vmw_cmd_set_shader()
2010 if (!dev_priv->has_mob) in vmw_cmd_set_shader()
2013 if (cmd->body.shid != SVGA3D_INVALID_ID) { in vmw_cmd_set_shader()
2015 * This is the compat shader path - Per device guest-backed in vmw_cmd_set_shader()
2016 * shaders, but user-space thinks it's per context host- in vmw_cmd_set_shader()
2020 cmd->body.shid, cmd->body.type); in vmw_cmd_set_shader()
2030 vmw_ptr_diff(sw_context->buf_start, in vmw_cmd_set_shader()
2031 &cmd->body.shid), in vmw_cmd_set_shader()
2041 user_shader_converter, &cmd->body.shid, in vmw_cmd_set_shader()
2049 return -EINVAL; in vmw_cmd_set_shader()
2054 binding.shader_slot = cmd->body.type - SVGA3D_SHADERTYPE_MIN; in vmw_cmd_set_shader()
2055 vmw_binding_add(ctx_info->staged, &binding.bi, binding.shader_slot, 0); in vmw_cmd_set_shader()
2061 * vmw_cmd_set_shader_const - Validate SVGA_3D_CMD_SET_SHADER_CONST command
2078 &cmd->body.cid, NULL); in vmw_cmd_set_shader_const()
2082 if (dev_priv->has_mob) in vmw_cmd_set_shader_const()
2083 header->id = SVGA_3D_CMD_SET_GB_SHADERCONSTS_INLINE; in vmw_cmd_set_shader_const()
2089 * vmw_cmd_bind_gb_shader - Validate SVGA_3D_CMD_BIND_GB_SHADER command
2103 user_shader_converter, &cmd->body.shid, in vmw_cmd_bind_gb_shader()
2104 &cmd->body.mobid, cmd->body.offsetInBytes); in vmw_cmd_bind_gb_shader()
2108 * vmw_cmd_dx_set_single_constant_buffer - Validate
2128 return -EINVAL; in vmw_cmd_dx_set_single_constant_buffer()
2133 &cmd->body.sid, &res); in vmw_cmd_dx_set_single_constant_buffer()
2137 if (!vmw_shadertype_is_valid(dev_priv->sm_type, cmd->body.type) || in vmw_cmd_dx_set_single_constant_buffer()
2138 cmd->body.slot >= SVGA3D_DX_MAX_CONSTBUFFERS) { in vmw_cmd_dx_set_single_constant_buffer()
2140 (unsigned int) cmd->body.type, in vmw_cmd_dx_set_single_constant_buffer()
2141 (unsigned int) cmd->body.slot); in vmw_cmd_dx_set_single_constant_buffer()
2142 return -EINVAL; in vmw_cmd_dx_set_single_constant_buffer()
2145 binding.bi.ctx = ctx_node->ctx; in vmw_cmd_dx_set_single_constant_buffer()
2148 binding.shader_slot = cmd->body.type - SVGA3D_SHADERTYPE_MIN; in vmw_cmd_dx_set_single_constant_buffer()
2149 binding.offset = cmd->body.offsetInBytes; in vmw_cmd_dx_set_single_constant_buffer()
2150 binding.size = cmd->body.sizeInBytes; in vmw_cmd_dx_set_single_constant_buffer()
2151 binding.slot = cmd->body.slot; in vmw_cmd_dx_set_single_constant_buffer()
2153 vmw_binding_add(ctx_node->staged, &binding.bi, binding.shader_slot, in vmw_cmd_dx_set_single_constant_buffer()
2160 * vmw_cmd_dx_set_constant_buffer_offset - Validate
2178 return -EINVAL; in vmw_cmd_dx_set_constant_buffer_offset()
2181 return -EINVAL; in vmw_cmd_dx_set_constant_buffer_offset()
2184 if (cmd->body.slot >= SVGA3D_DX_MAX_CONSTBUFFERS) { in vmw_cmd_dx_set_constant_buffer_offset()
2186 (unsigned int) cmd->body.slot); in vmw_cmd_dx_set_constant_buffer_offset()
2187 return -EINVAL; in vmw_cmd_dx_set_constant_buffer_offset()
2190 shader_slot = cmd->header.id - SVGA_3D_CMD_DX_SET_VS_CONSTANT_BUFFER_OFFSET; in vmw_cmd_dx_set_constant_buffer_offset()
2191 vmw_binding_cb_offset_update(ctx_node->staged, shader_slot, in vmw_cmd_dx_set_constant_buffer_offset()
2192 cmd->body.slot, cmd->body.offsetInBytes); in vmw_cmd_dx_set_constant_buffer_offset()
2198 * vmw_cmd_dx_set_shader_res - Validate SVGA_3D_CMD_DX_SET_SHADER_RESOURCES
2212 u32 num_sr_view = (cmd->header.size - sizeof(cmd->body)) / in vmw_cmd_dx_set_shader_res()
2215 if ((u64) cmd->body.startView + (u64) num_sr_view > in vmw_cmd_dx_set_shader_res()
2217 !vmw_shadertype_is_valid(dev_priv->sm_type, cmd->body.type)) { in vmw_cmd_dx_set_shader_res()
2219 return -EINVAL; in vmw_cmd_dx_set_shader_res()
2224 cmd->body.type - SVGA3D_SHADERTYPE_MIN, in vmw_cmd_dx_set_shader_res()
2226 cmd->body.startView); in vmw_cmd_dx_set_shader_res()
2230 * vmw_cmd_dx_set_shader - Validate SVGA_3D_CMD_DX_SET_SHADER command
2247 return -EINVAL; in vmw_cmd_dx_set_shader()
2251 if (!vmw_shadertype_is_valid(dev_priv->sm_type, cmd->body.type)) { in vmw_cmd_dx_set_shader()
2253 (unsigned int) cmd->body.type); in vmw_cmd_dx_set_shader()
2254 return -EINVAL; in vmw_cmd_dx_set_shader()
2257 if (cmd->body.shaderId != SVGA3D_INVALID_ID) { in vmw_cmd_dx_set_shader()
2258 res = vmw_shader_lookup(sw_context->man, cmd->body.shaderId, 0); in vmw_cmd_dx_set_shader()
2271 binding.bi.ctx = ctx_node->ctx; in vmw_cmd_dx_set_shader()
2274 binding.shader_slot = cmd->body.type - SVGA3D_SHADERTYPE_MIN; in vmw_cmd_dx_set_shader()
2276 vmw_binding_add(ctx_node->staged, &binding.bi, binding.shader_slot, 0); in vmw_cmd_dx_set_shader()
2282 * vmw_cmd_dx_set_vertex_buffers - Validates SVGA_3D_CMD_DX_SET_VERTEX_BUFFERS
2301 int i, ret, num; in vmw_cmd_dx_set_vertex_buffers() local
2304 return -EINVAL; in vmw_cmd_dx_set_vertex_buffers()
2307 num = (cmd->header.size - sizeof(cmd->body)) / in vmw_cmd_dx_set_vertex_buffers()
2309 if ((u64)num + (u64)cmd->body.startBuffer > in vmw_cmd_dx_set_vertex_buffers()
2312 return -EINVAL; in vmw_cmd_dx_set_vertex_buffers()
2315 for (i = 0; i < num; i++) { in vmw_cmd_dx_set_vertex_buffers()
2319 &cmd->buf[i].sid, &res); in vmw_cmd_dx_set_vertex_buffers()
2323 binding.bi.ctx = ctx_node->ctx; in vmw_cmd_dx_set_vertex_buffers()
2326 binding.offset = cmd->buf[i].offset; in vmw_cmd_dx_set_vertex_buffers()
2327 binding.stride = cmd->buf[i].stride; in vmw_cmd_dx_set_vertex_buffers()
2328 binding.slot = i + cmd->body.startBuffer; in vmw_cmd_dx_set_vertex_buffers()
2330 vmw_binding_add(ctx_node->staged, &binding.bi, 0, binding.slot); in vmw_cmd_dx_set_vertex_buffers()
2337 * vmw_cmd_dx_set_index_buffer - Validate
2355 return -EINVAL; in vmw_cmd_dx_set_index_buffer()
2360 &cmd->body.sid, &res); in vmw_cmd_dx_set_index_buffer()
2364 binding.bi.ctx = ctx_node->ctx; in vmw_cmd_dx_set_index_buffer()
2367 binding.offset = cmd->body.offset; in vmw_cmd_dx_set_index_buffer()
2368 binding.format = cmd->body.format; in vmw_cmd_dx_set_index_buffer()
2370 vmw_binding_add(ctx_node->staged, &binding.bi, 0, 0); in vmw_cmd_dx_set_index_buffer()
2376 * vmw_cmd_dx_set_rendertargets - Validate SVGA_3D_CMD_DX_SET_RENDERTARGETS
2389 u32 num_rt_view = (cmd->header.size - sizeof(cmd->body)) / in vmw_cmd_dx_set_rendertargets()
2395 return -EINVAL; in vmw_cmd_dx_set_rendertargets()
2399 0, &cmd->body.depthStencilViewId, 1, 0); in vmw_cmd_dx_set_rendertargets()
2409 * vmw_cmd_dx_clear_rendertarget_view - Validate
2425 cmd->body.renderTargetViewId); in vmw_cmd_dx_clear_rendertarget_view()
2431 * vmw_cmd_dx_clear_depthstencil_view - Validate
2447 cmd->body.depthStencilViewId); in vmw_cmd_dx_clear_depthstencil_view()
2472 return -EINVAL; in vmw_cmd_dx_view_define()
2474 view_type = vmw_view_cmd_to_type(header->id); in vmw_cmd_dx_view_define()
2476 return -EINVAL; in vmw_cmd_dx_view_define()
2479 if (unlikely(cmd->sid == SVGA3D_INVALID_ID)) { in vmw_cmd_dx_view_define()
2481 return -EINVAL; in vmw_cmd_dx_view_define()
2485 &cmd->sid, &srf); in vmw_cmd_dx_view_define()
2489 res = vmw_context_cotable(ctx_node->ctx, vmw_view_cotables[view_type]); in vmw_cmd_dx_view_define()
2491 return res ? PTR_ERR(res) : -EINVAL; in vmw_cmd_dx_view_define()
2492 ret = vmw_cotable_notify(res, cmd->defined_id); in vmw_cmd_dx_view_define()
2496 return vmw_view_add(sw_context->man, ctx_node->ctx, srf, view_type, in vmw_cmd_dx_view_define()
2497 cmd->defined_id, header, in vmw_cmd_dx_view_define()
2498 header->size + sizeof(*header), in vmw_cmd_dx_view_define()
2499 &sw_context->staged_cmd_res); in vmw_cmd_dx_view_define()
2503 * vmw_cmd_dx_set_so_targets - Validate SVGA_3D_CMD_DX_SET_SOTARGETS command.
2521 int i, ret, num; in vmw_cmd_dx_set_so_targets() local
2524 return -EINVAL; in vmw_cmd_dx_set_so_targets()
2527 num = (cmd->header.size - sizeof(cmd->body)) / sizeof(SVGA3dSoTarget); in vmw_cmd_dx_set_so_targets()
2529 if (num > SVGA3D_DX_MAX_SOTARGETS) { in vmw_cmd_dx_set_so_targets()
2531 return -EINVAL; in vmw_cmd_dx_set_so_targets()
2534 for (i = 0; i < num; i++) { in vmw_cmd_dx_set_so_targets()
2538 &cmd->targets[i].sid, &res); in vmw_cmd_dx_set_so_targets()
2542 binding.bi.ctx = ctx_node->ctx; in vmw_cmd_dx_set_so_targets()
2545 binding.offset = cmd->targets[i].offset; in vmw_cmd_dx_set_so_targets()
2546 binding.size = cmd->targets[i].sizeInBytes; in vmw_cmd_dx_set_so_targets()
2549 vmw_binding_add(ctx_node->staged, &binding.bi, 0, binding.slot); in vmw_cmd_dx_set_so_targets()
2573 return -EINVAL; in vmw_cmd_dx_so_define()
2575 so_type = vmw_so_cmd_to_type(header->id); in vmw_cmd_dx_so_define()
2576 res = vmw_context_cotable(ctx_node->ctx, vmw_so_cotables[so_type]); in vmw_cmd_dx_so_define()
2578 return res ? PTR_ERR(res) : -EINVAL; in vmw_cmd_dx_so_define()
2580 ret = vmw_cotable_notify(res, cmd->defined_id); in vmw_cmd_dx_so_define()
2586 * vmw_cmd_dx_check_subresource - Validate SVGA_3D_CMD_DX_[X]_SUBRESOURCE
2617 &cmd->sid, NULL); in vmw_cmd_dx_check_subresource()
2627 return -EINVAL; in vmw_cmd_dx_cid_check()
2633 * vmw_cmd_dx_view_remove - validate a view remove command and schedule the view
2652 enum vmw_view_type view_type = vmw_view_cmd_to_type(header->id); in vmw_cmd_dx_view_remove()
2657 return -EINVAL; in vmw_cmd_dx_view_remove()
2659 ret = vmw_view_remove(sw_context->man, cmd->body.view_id, view_type, in vmw_cmd_dx_view_remove()
2660 &sw_context->staged_cmd_res, &view); in vmw_cmd_dx_view_remove()
2671 vmw_ptr_diff(sw_context->buf_start, in vmw_cmd_dx_view_remove()
2672 &cmd->header.id), in vmw_cmd_dx_view_remove()
2677 * vmw_cmd_dx_define_shader - Validate SVGA_3D_CMD_DX_DEFINE_SHADER command
2694 return -EINVAL; in vmw_cmd_dx_define_shader()
2696 res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_DXSHADER); in vmw_cmd_dx_define_shader()
2698 return res ? PTR_ERR(res) : -EINVAL; in vmw_cmd_dx_define_shader()
2699 ret = vmw_cotable_notify(res, cmd->body.shaderId); in vmw_cmd_dx_define_shader()
2703 return vmw_dx_shader_add(sw_context->man, ctx_node->ctx, in vmw_cmd_dx_define_shader()
2704 cmd->body.shaderId, cmd->body.type, in vmw_cmd_dx_define_shader()
2705 &sw_context->staged_cmd_res); in vmw_cmd_dx_define_shader()
2709 * vmw_cmd_dx_destroy_shader - Validate SVGA_3D_CMD_DX_DESTROY_SHADER command
2725 return -EINVAL; in vmw_cmd_dx_destroy_shader()
2727 ret = vmw_shader_remove(sw_context->man, cmd->body.shaderId, 0, in vmw_cmd_dx_destroy_shader()
2728 &sw_context->staged_cmd_res); in vmw_cmd_dx_destroy_shader()
2734 * vmw_cmd_dx_bind_shader - Validate SVGA_3D_CMD_DX_BIND_SHADER command
2750 if (cmd->body.cid != SVGA3D_INVALID_ID) { in vmw_cmd_dx_bind_shader()
2753 user_context_converter, &cmd->body.cid, in vmw_cmd_dx_bind_shader()
2762 return -EINVAL; in vmw_cmd_dx_bind_shader()
2764 ctx = ctx_node->ctx; in vmw_cmd_dx_bind_shader()
2767 res = vmw_shader_lookup(vmw_context_res_man(ctx), cmd->body.shid, 0); in vmw_cmd_dx_bind_shader()
2781 &cmd->body.mobid, in vmw_cmd_dx_bind_shader()
2782 cmd->body.offsetInBytes); in vmw_cmd_dx_bind_shader()
2786 * vmw_cmd_dx_genmips - Validate SVGA_3D_CMD_DX_GENMIPS command
2802 cmd->body.shaderResourceViewId); in vmw_cmd_dx_genmips()
2807 * Normally the shader-resource view is not gpu-dirtying, but for in vmw_cmd_dx_genmips()
2809 * So mark the last looked-up surface, which is the surface in vmw_cmd_dx_genmips()
2810 * the view points to, gpu-dirty. in vmw_cmd_dx_genmips()
2812 rcache = &sw_context->res_cache[vmw_res_surface]; in vmw_cmd_dx_genmips()
2813 vmw_validation_res_set_dirty(sw_context->ctx, rcache->private, in vmw_cmd_dx_genmips()
2819 * vmw_cmd_dx_transfer_from_buffer - Validate
2836 &cmd->body.srcSid, NULL); in vmw_cmd_dx_transfer_from_buffer()
2842 &cmd->body.destSid, NULL); in vmw_cmd_dx_transfer_from_buffer()
2846 * vmw_cmd_intra_surface_copy - Validate SVGA_3D_CMD_INTRA_SURFACE_COPY command
2859 if (!(dev_priv->capabilities2 & SVGA_CAP2_INTRA_SURFACE_COPY)) in vmw_cmd_intra_surface_copy()
2860 return -EINVAL; in vmw_cmd_intra_surface_copy()
2864 &cmd->body.surface.sid, NULL); in vmw_cmd_intra_surface_copy()
2872 return -EINVAL; in vmw_cmd_sm5()
2882 return -EINVAL; in vmw_cmd_sm5_view_define()
2892 return -EINVAL; in vmw_cmd_sm5_view_remove()
2908 return -EINVAL; in vmw_cmd_clear_uav_uint()
2911 cmd->body.uaViewId); in vmw_cmd_clear_uav_uint()
2927 return -EINVAL; in vmw_cmd_clear_uav_float()
2930 cmd->body.uaViewId); in vmw_cmd_clear_uav_float()
2943 u32 num_uav = (cmd->header.size - sizeof(cmd->body)) / in vmw_cmd_set_uav()
2948 return -EINVAL; in vmw_cmd_set_uav()
2952 return -EINVAL; in vmw_cmd_set_uav()
2961 vmw_binding_add_uav_index(sw_context->dx_ctx_node->staged, 0, in vmw_cmd_set_uav()
2962 cmd->body.uavSpliceIndex); in vmw_cmd_set_uav()
2975 u32 num_uav = (cmd->header.size - sizeof(cmd->body)) / in vmw_cmd_set_cs_uav()
2980 return -EINVAL; in vmw_cmd_set_cs_uav()
2984 return -EINVAL; in vmw_cmd_set_cs_uav()
2993 vmw_binding_add_uav_index(sw_context->dx_ctx_node->staged, 1, in vmw_cmd_set_cs_uav()
2994 cmd->body.startIndex); in vmw_cmd_set_cs_uav()
3003 struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; in vmw_cmd_dx_define_streamoutput()
3012 return -EINVAL; in vmw_cmd_dx_define_streamoutput()
3016 return -EINVAL; in vmw_cmd_dx_define_streamoutput()
3019 res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_STREAMOUTPUT); in vmw_cmd_dx_define_streamoutput()
3021 return res ? PTR_ERR(res) : -EINVAL; in vmw_cmd_dx_define_streamoutput()
3022 ret = vmw_cotable_notify(res, cmd->body.soid); in vmw_cmd_dx_define_streamoutput()
3026 return vmw_dx_streamoutput_add(sw_context->man, ctx_node->ctx, in vmw_cmd_dx_define_streamoutput()
3027 cmd->body.soid, in vmw_cmd_dx_define_streamoutput()
3028 &sw_context->staged_cmd_res); in vmw_cmd_dx_define_streamoutput()
3035 struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; in vmw_cmd_dx_destroy_streamoutput()
3044 return -EINVAL; in vmw_cmd_dx_destroy_streamoutput()
3049 * not available to user-space. Simply return in this case. in vmw_cmd_dx_destroy_streamoutput()
3055 * With SM5 capable device if lookup fails then user-space probably used in vmw_cmd_dx_destroy_streamoutput()
3058 res = vmw_dx_streamoutput_lookup(vmw_context_res_man(ctx_node->ctx), in vmw_cmd_dx_destroy_streamoutput()
3059 cmd->body.soid); in vmw_cmd_dx_destroy_streamoutput()
3063 return vmw_dx_streamoutput_remove(sw_context->man, cmd->body.soid, in vmw_cmd_dx_destroy_streamoutput()
3064 &sw_context->staged_cmd_res); in vmw_cmd_dx_destroy_streamoutput()
3071 struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; in vmw_cmd_dx_bind_streamoutput()
3080 return -EINVAL; in vmw_cmd_dx_bind_streamoutput()
3084 return -EINVAL; in vmw_cmd_dx_bind_streamoutput()
3087 res = vmw_dx_streamoutput_lookup(vmw_context_res_man(ctx_node->ctx), in vmw_cmd_dx_bind_streamoutput()
3088 cmd->body.soid); in vmw_cmd_dx_bind_streamoutput()
3094 vmw_dx_streamoutput_set_size(res, cmd->body.sizeInBytes); in vmw_cmd_dx_bind_streamoutput()
3104 &cmd->body.mobid, in vmw_cmd_dx_bind_streamoutput()
3105 cmd->body.offsetInBytes); in vmw_cmd_dx_bind_streamoutput()
3112 struct vmw_ctx_validation_info *ctx_node = sw_context->dx_ctx_node; in vmw_cmd_dx_set_streamoutput()
3123 return -EINVAL; in vmw_cmd_dx_set_streamoutput()
3126 if (cmd->body.soid == SVGA3D_INVALID_ID) in vmw_cmd_dx_set_streamoutput()
3131 * not available to user-space. Simply return in this case. in vmw_cmd_dx_set_streamoutput()
3137 * With SM5 capable device if lookup fails then user-space probably used in vmw_cmd_dx_set_streamoutput()
3140 res = vmw_dx_streamoutput_lookup(vmw_context_res_man(ctx_node->ctx), in vmw_cmd_dx_set_streamoutput()
3141 cmd->body.soid); in vmw_cmd_dx_set_streamoutput()
3153 binding.bi.ctx = ctx_node->ctx; in vmw_cmd_dx_set_streamoutput()
3158 vmw_binding_add(sw_context->dx_ctx_node->staged, &binding.bi, 0, in vmw_cmd_dx_set_streamoutput()
3174 return -EINVAL; in vmw_cmd_indexed_instanced_indirect()
3178 &cmd->body.argsBufferSid, NULL); in vmw_cmd_indexed_instanced_indirect()
3191 return -EINVAL; in vmw_cmd_instanced_indirect()
3195 &cmd->body.argsBufferSid, NULL); in vmw_cmd_instanced_indirect()
3208 return -EINVAL; in vmw_cmd_dispatch_indirect()
3212 &cmd->body.argsBufferSid, NULL); in vmw_cmd_dispatch_indirect()
3238 return -EINVAL; in vmw_cmd_check_not_3d()
3244 return -EINVAL; in vmw_cmd_check_not_3d()
3247 if (unlikely(!sw_context->kernel)) { in vmw_cmd_check_not_3d()
3249 return -EPERM; in vmw_cmd_check_not_3d()
3627 *size = header->size + sizeof(SVGA3dCmdHeader); in vmw_cmd_describe()
3628 cmd_id = header->id; in vmw_cmd_describe()
3632 cmd_id -= SVGA_3D_CMD_BASE; in vmw_cmd_describe()
3634 *cmd = entry->cmd_name; in vmw_cmd_describe()
3673 bool gb = dev_priv->capabilities & SVGA_CAP_GBOBJECTS; in vmw_cmd_check()
3681 cmd_id = header->id; in vmw_cmd_check()
3682 *size = header->size + sizeof(SVGA3dCmdHeader); in vmw_cmd_check()
3684 cmd_id -= SVGA_3D_CMD_BASE; in vmw_cmd_check()
3688 if (unlikely(cmd_id >= SVGA_3D_CMD_MAX - SVGA_3D_CMD_BASE)) in vmw_cmd_check()
3692 if (unlikely(!entry->func)) in vmw_cmd_check()
3695 if (unlikely(!entry->user_allow && !sw_context->kernel)) in vmw_cmd_check()
3698 if (unlikely(entry->gb_disable && gb)) in vmw_cmd_check()
3701 if (unlikely(entry->gb_enable && !gb)) in vmw_cmd_check()
3704 ret = entry->func(dev_priv, sw_context, header); in vmw_cmd_check()
3715 return -EINVAL; in vmw_cmd_check()
3719 return -EPERM; in vmw_cmd_check()
3723 return -EINVAL; in vmw_cmd_check()
3727 return -EINVAL; in vmw_cmd_check()
3737 sw_context->buf_start = buf; in vmw_cmd_check_all()
3745 cur_size -= size; in vmw_cmd_check_all()
3750 return -EINVAL; in vmw_cmd_check_all()
3759 INIT_LIST_HEAD(&sw_context->bo_relocations); in vmw_free_relocations()
3767 list_for_each_entry(reloc, &sw_context->bo_relocations, head) { in vmw_apply_relocations()
3768 bo = &reloc->vbo->tbo; in vmw_apply_relocations()
3769 switch (bo->resource->mem_type) { in vmw_apply_relocations()
3771 reloc->location->offset += bo->resource->start << PAGE_SHIFT; in vmw_apply_relocations()
3772 reloc->location->gmrId = SVGA_GMR_FRAMEBUFFER; in vmw_apply_relocations()
3775 reloc->location->gmrId = bo->resource->start; in vmw_apply_relocations()
3778 *reloc->mob_loc = bo->resource->start; in vmw_apply_relocations()
3790 if (likely(sw_context->cmd_bounce_size >= size)) in vmw_resize_cmd_bounce()
3793 if (sw_context->cmd_bounce_size == 0) in vmw_resize_cmd_bounce()
3794 sw_context->cmd_bounce_size = VMWGFX_CMD_BOUNCE_INIT_SIZE; in vmw_resize_cmd_bounce()
3796 while (sw_context->cmd_bounce_size < size) { in vmw_resize_cmd_bounce()
3797 sw_context->cmd_bounce_size = in vmw_resize_cmd_bounce()
3798 PAGE_ALIGN(sw_context->cmd_bounce_size + in vmw_resize_cmd_bounce()
3799 (sw_context->cmd_bounce_size >> 1)); in vmw_resize_cmd_bounce()
3802 vfree(sw_context->cmd_bounce); in vmw_resize_cmd_bounce()
3803 sw_context->cmd_bounce = vmalloc(sw_context->cmd_bounce_size); in vmw_resize_cmd_bounce()
3805 if (sw_context->cmd_bounce == NULL) { in vmw_resize_cmd_bounce()
3807 sw_context->cmd_bounce_size = 0; in vmw_resize_cmd_bounce()
3808 return -ENOMEM; in vmw_resize_cmd_bounce()
3815 * vmw_execbuf_fence_commands - create and submit a command stream fence
3844 ret = vmw_user_fence_create(file_priv, dev_priv->fman, in vmw_execbuf_fence_commands()
3847 ret = vmw_fence_create(dev_priv->fman, sequence, p_fence); in vmw_execbuf_fence_commands()
3859 * vmw_execbuf_copy_fence_user - copy fence object information to user-space.
3867 * @fence_handle: User-space fence handle.
3868 * @out_fence_fd: exported file descriptor for the fence. -1 if not used
3870 * This function copies fence information to user-space. If copying fails, the
3871 * user-space struct drm_vmw_fence_rep::error member is hopefully left
3872 * untouched, and if it's preloaded with an -EFAULT by user-space, the error
3875 * Also if copying fails, user-space will be unable to signal the fence object
3876 * so we wait for it immediately, and then unreference the user-space reference.
3898 fence_rep.seqno = fence->base.seqno; in vmw_execbuf_copy_fence_user()
3900 fence_rep.passed_seqno = dev_priv->last_read_seqno; in vmw_execbuf_copy_fence_user()
3905 * fence_rep::error filled in. Typically user-space would have pre-set in vmw_execbuf_copy_fence_user()
3906 * that member to -EFAULT. in vmw_execbuf_copy_fence_user()
3912 * User-space lost the fence object. We need to sync and unreference the in vmw_execbuf_copy_fence_user()
3916 ttm_ref_object_base_unref(vmw_fp->tfile, fence_handle); in vmw_execbuf_copy_fence_user()
3922 return ret ? -EFAULT : 0; in vmw_execbuf_copy_fence_user()
3926 * vmw_execbuf_submit_fifo - Patch a command batch and submit it using the fifo.
3942 if (sw_context->dx_ctx_node) in vmw_execbuf_submit_fifo()
3944 sw_context->dx_ctx_node->ctx->id); in vmw_execbuf_submit_fifo()
3949 return -ENOMEM; in vmw_execbuf_submit_fifo()
3953 vmw_resource_relocations_apply(cmd, &sw_context->res_relocations); in vmw_execbuf_submit_fifo()
3954 vmw_resource_relocations_free(&sw_context->res_relocations); in vmw_execbuf_submit_fifo()
3961 * vmw_execbuf_submit_cmdbuf - Patch a command batch and submit it using the
3977 u32 id = ((sw_context->dx_ctx_node) ? sw_context->dx_ctx_node->ctx->id : in vmw_execbuf_submit_cmdbuf()
3979 void *cmd = vmw_cmdbuf_reserve(dev_priv->cman, command_size, id, false, in vmw_execbuf_submit_cmdbuf()
3983 vmw_resource_relocations_apply(cmd, &sw_context->res_relocations); in vmw_execbuf_submit_cmdbuf()
3984 vmw_resource_relocations_free(&sw_context->res_relocations); in vmw_execbuf_submit_cmdbuf()
3985 vmw_cmdbuf_commit(dev_priv->cman, command_size, header, false); in vmw_execbuf_submit_cmdbuf()
3991 * vmw_execbuf_cmdbuf - Prepare, if possible, a user-space command batch for
3995 * @user_commands: User-space pointer to the commands to be submitted.
4004 * command buffer and *@header is set to non-NULL.
4012 * -ERESTARTSYS casted to a pointer error value.
4025 return ERR_PTR(-EINVAL); in vmw_execbuf_cmdbuf()
4028 if (!dev_priv->cman || kernel_commands) in vmw_execbuf_cmdbuf()
4034 kernel_commands = vmw_cmdbuf_alloc(dev_priv->cman, cmdbuf_size, true, in vmw_execbuf_cmdbuf()
4044 return ERR_PTR(-EFAULT); in vmw_execbuf_cmdbuf()
4062 ret = vmw_validation_preload_res(sw_context->ctx, size); in vmw_execbuf_tie_context()
4067 (dev_priv, sw_context->fp->tfile, handle, in vmw_execbuf_tie_context()
4082 sw_context->dx_ctx_node = vmw_execbuf_info_from_res(sw_context, res); in vmw_execbuf_tie_context()
4083 sw_context->man = vmw_context_res_man(res); in vmw_execbuf_tie_context()
4097 struct vmw_sw_context *sw_context = &dev_priv->ctx; in vmw_execbuf_process()
4102 int32_t out_fence_fd = -1; in vmw_execbuf_process()
4126 ret = mutex_lock_interruptible(&dev_priv->cmdbuf_mutex); in vmw_execbuf_process()
4128 ret = -ERESTARTSYS; in vmw_execbuf_process()
4132 sw_context->kernel = false; in vmw_execbuf_process()
4138 ret = copy_from_user(sw_context->cmd_bounce, user_commands, in vmw_execbuf_process()
4141 ret = -EFAULT; in vmw_execbuf_process()
4146 kernel_commands = sw_context->cmd_bounce; in vmw_execbuf_process()
4148 sw_context->kernel = true; in vmw_execbuf_process()
4151 sw_context->filp = file_priv; in vmw_execbuf_process()
4152 sw_context->fp = vmw_fpriv(file_priv); in vmw_execbuf_process()
4153 INIT_LIST_HEAD(&sw_context->ctx_list); in vmw_execbuf_process()
4154 sw_context->cur_query_bo = dev_priv->pinned_bo; in vmw_execbuf_process()
4155 sw_context->last_query_ctx = NULL; in vmw_execbuf_process()
4156 sw_context->needs_post_query_barrier = false; in vmw_execbuf_process()
4157 sw_context->dx_ctx_node = NULL; in vmw_execbuf_process()
4158 sw_context->dx_query_mob = NULL; in vmw_execbuf_process()
4159 sw_context->dx_query_ctx = NULL; in vmw_execbuf_process()
4160 memset(sw_context->res_cache, 0, sizeof(sw_context->res_cache)); in vmw_execbuf_process()
4161 INIT_LIST_HEAD(&sw_context->res_relocations); in vmw_execbuf_process()
4162 INIT_LIST_HEAD(&sw_context->bo_relocations); in vmw_execbuf_process()
4164 if (sw_context->staged_bindings) in vmw_execbuf_process()
4165 vmw_binding_state_reset(sw_context->staged_bindings); in vmw_execbuf_process()
4167 INIT_LIST_HEAD(&sw_context->staged_cmd_res); in vmw_execbuf_process()
4168 sw_context->ctx = &val_ctx; in vmw_execbuf_process()
4196 ret = mutex_lock_interruptible(&dev_priv->binding_mutex); in vmw_execbuf_process()
4198 ret = -ERESTARTSYS; in vmw_execbuf_process()
4202 if (dev_priv->has_mob) { in vmw_execbuf_process()
4216 mutex_unlock(&dev_priv->binding_mutex); in vmw_execbuf_process()
4226 * user-space in @fence_rep in vmw_execbuf_process()
4235 vmw_validation_bo_fence(sw_context->ctx, fence); in vmw_execbuf_process()
4237 if (unlikely(dev_priv->pinned_bo != NULL && !dev_priv->query_cid_valid)) in vmw_execbuf_process()
4247 sync_file = sync_file_create(&fence->base); in vmw_execbuf_process()
4251 out_fence_fd = -1; in vmw_execbuf_process()
4264 fput(sync_file->file); in vmw_execbuf_process()
4268 fd_install(out_fence_fd, sync_file->file); in vmw_execbuf_process()
4280 vmw_cmdbuf_res_commit(&sw_context->staged_cmd_res); in vmw_execbuf_process()
4281 mutex_unlock(&dev_priv->cmdbuf_mutex); in vmw_execbuf_process()
4292 mutex_unlock(&dev_priv->binding_mutex); in vmw_execbuf_process()
4298 vmw_resource_relocations_free(&sw_context->res_relocations); in vmw_execbuf_process()
4300 if (unlikely(dev_priv->pinned_bo != NULL && !dev_priv->query_cid_valid)) in vmw_execbuf_process()
4303 vmw_cmdbuf_res_revert(&sw_context->staged_cmd_res); in vmw_execbuf_process()
4305 WARN_ON(!list_empty(&sw_context->ctx_list)); in vmw_execbuf_process()
4306 mutex_unlock(&dev_priv->cmdbuf_mutex); in vmw_execbuf_process()
4324 * vmw_execbuf_unpin_panic - Idle the fifo and unpin the query buffer.
4337 vmw_bo_pin_reserved(dev_priv->pinned_bo, false); in vmw_execbuf_unpin_panic()
4338 if (dev_priv->dummy_query_bo_pinned) { in vmw_execbuf_unpin_panic()
4339 vmw_bo_pin_reserved(dev_priv->dummy_query_bo, false); in vmw_execbuf_unpin_panic()
4340 dev_priv->dummy_query_bo_pinned = false; in vmw_execbuf_unpin_panic()
4346 * __vmw_execbuf_release_pinned_bo - Flush queries and unpin the pinned query
4350 * @fence: If non-NULL should point to a struct vmw_fence_obj issued _after_ a
4352 * by @dev_priv->pinned_bo
4365 * the @dev_priv->cmdbuf_mutex needs to be held by the current thread before
4375 if (dev_priv->pinned_bo == NULL) in __vmw_execbuf_release_pinned_bo()
4378 vmw_bo_placement_set(dev_priv->pinned_bo, in __vmw_execbuf_release_pinned_bo()
4381 ret = vmw_validation_add_bo(&val_ctx, dev_priv->pinned_bo); in __vmw_execbuf_release_pinned_bo()
4385 vmw_bo_placement_set(dev_priv->dummy_query_bo, in __vmw_execbuf_release_pinned_bo()
4388 ret = vmw_validation_add_bo(&val_ctx, dev_priv->dummy_query_bo); in __vmw_execbuf_release_pinned_bo()
4396 if (dev_priv->query_cid_valid) { in __vmw_execbuf_release_pinned_bo()
4398 ret = vmw_cmd_emit_dummy_query(dev_priv, dev_priv->query_cid); in __vmw_execbuf_release_pinned_bo()
4401 dev_priv->query_cid_valid = false; in __vmw_execbuf_release_pinned_bo()
4404 vmw_bo_pin_reserved(dev_priv->pinned_bo, false); in __vmw_execbuf_release_pinned_bo()
4405 if (dev_priv->dummy_query_bo_pinned) { in __vmw_execbuf_release_pinned_bo()
4406 vmw_bo_pin_reserved(dev_priv->dummy_query_bo, false); in __vmw_execbuf_release_pinned_bo()
4407 dev_priv->dummy_query_bo_pinned = false; in __vmw_execbuf_release_pinned_bo()
4419 vmw_bo_unreference(&dev_priv->pinned_bo); in __vmw_execbuf_release_pinned_bo()
4428 vmw_bo_unreference(&dev_priv->pinned_bo); in __vmw_execbuf_release_pinned_bo()
4432 * vmw_execbuf_release_pinned_bo - Flush queries and unpin the pinned query bo.
4449 mutex_lock(&dev_priv->cmdbuf_mutex); in vmw_execbuf_release_pinned_bo()
4450 if (dev_priv->query_cid_valid) in vmw_execbuf_release_pinned_bo()
4452 mutex_unlock(&dev_priv->cmdbuf_mutex); in vmw_execbuf_release_pinned_bo()
4468 * We take different code paths depending on the value of arg->version. in vmw_execbuf_ioctl()
4472 if (unlikely(arg->version > DRM_VMW_EXECBUF_VERSION || in vmw_execbuf_ioctl()
4473 arg->version == 0)) { in vmw_execbuf_ioctl()
4475 ret = -EINVAL; in vmw_execbuf_ioctl()
4479 switch (arg->version) { in vmw_execbuf_ioctl()
4482 arg->context_handle = (uint32_t) -1; in vmw_execbuf_ioctl()
4491 if (arg->flags & DRM_VMW_EXECBUF_FLAG_IMPORT_FENCE_FD) { in vmw_execbuf_ioctl()
4492 in_fence = sync_file_get_fence(arg->imported_fence_fd); in vmw_execbuf_ioctl()
4496 ret = -EINVAL; in vmw_execbuf_ioctl()
4506 (void __user *)(unsigned long)arg->commands, in vmw_execbuf_ioctl()
4507 NULL, arg->command_size, arg->throttle_us, in vmw_execbuf_ioctl()
4508 arg->context_handle, in vmw_execbuf_ioctl()
4509 (void __user *)(unsigned long)arg->fence_rep, in vmw_execbuf_ioctl()
4510 NULL, arg->flags); in vmw_execbuf_ioctl()