Lines Matching full:tunnel
301 const char *drm_dp_tunnel_name(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_name() argument
303 return tunnel->name; in drm_dp_tunnel_name()
320 * A tunnel group with 0 group ID shouldn't have more than one in lookup_or_alloc_group()
333 "DPTUN: Can't allocate more tunnel groups\n"); in lookup_or_alloc_group()
364 tunnel_get(struct drm_dp_tunnel *tunnel) in tunnel_get() argument
366 kref_get(&tunnel->kref); in tunnel_get()
368 return tunnel; in tunnel_get()
373 struct drm_dp_tunnel *tunnel = container_of(kref, typeof(*tunnel), kref); in free_tunnel() local
374 struct drm_dp_tunnel_group *group = tunnel->group; in free_tunnel()
376 list_del(&tunnel->node); in free_tunnel()
380 kfree(tunnel); in free_tunnel()
383 static void tunnel_put(struct drm_dp_tunnel *tunnel) in tunnel_put() argument
385 kref_put(&tunnel->kref, free_tunnel); in tunnel_put()
389 static void track_tunnel_ref(struct drm_dp_tunnel *tunnel, in track_tunnel_ref() argument
392 ref_tracker_alloc(&tunnel->group->mgr->ref_tracker, in track_tunnel_ref()
396 static void untrack_tunnel_ref(struct drm_dp_tunnel *tunnel, in untrack_tunnel_ref() argument
399 ref_tracker_free(&tunnel->group->mgr->ref_tracker, in untrack_tunnel_ref()
403 static void track_tunnel_ref(struct drm_dp_tunnel *tunnel, in track_tunnel_ref() argument
408 static void untrack_tunnel_ref(struct drm_dp_tunnel *tunnel, in untrack_tunnel_ref() argument
415 * drm_dp_tunnel_get - Get a reference for a DP tunnel
416 * @tunnel: Tunnel object
419 * Get a reference for @tunnel, along with a debug tracker to help locating
423 * passing @tunnel and *@tracker returned from here.
425 * Returns @tunnel - as a convenience - along with *@tracker.
428 drm_dp_tunnel_get(struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_get() argument
431 track_tunnel_ref(tunnel, tracker); in drm_dp_tunnel_get()
433 return tunnel_get(tunnel); in drm_dp_tunnel_get()
438 * drm_dp_tunnel_put - Put a reference for a DP tunnel
439 * @tunnel: Tunnel object
442 * Put a reference for @tunnel along with its debug *@tracker, which
445 void drm_dp_tunnel_put(struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_put() argument
448 untrack_tunnel_ref(tunnel, tracker); in drm_dp_tunnel_put()
450 tunnel_put(tunnel); in drm_dp_tunnel_put()
456 struct drm_dp_tunnel *tunnel) in add_tunnel_to_group() argument
464 tunnel->group = group; in add_tunnel_to_group()
465 list_add(&tunnel->node, &group->tunnels); in add_tunnel_to_group()
476 struct drm_dp_tunnel *tunnel; in create_tunnel() local
478 tunnel = kzalloc(sizeof(*tunnel), GFP_KERNEL); in create_tunnel()
479 if (!tunnel) in create_tunnel()
482 INIT_LIST_HEAD(&tunnel->node); in create_tunnel()
484 kref_init(&tunnel->kref); in create_tunnel()
486 tunnel->aux = aux; in create_tunnel()
488 tunnel->adapter_id = tunnel_reg(regs, DP_IN_ADAPTER_INFO) & DP_IN_ADAPTER_NUMBER_MASK; in create_tunnel()
490 snprintf(tunnel->name, sizeof(tunnel->name), "%d:%d:%d", in create_tunnel()
493 tunnel->adapter_id & ((1 << DP_IN_ADAPTER_NUMBER_BITS) - 1)); in create_tunnel()
495 tunnel->bw_granularity = tunnel_reg_bw_granularity(regs); in create_tunnel()
496 tunnel->allocated_bw = tunnel_reg(regs, DP_ALLOCATED_BW) * in create_tunnel()
497 tunnel->bw_granularity; in create_tunnel()
505 if (!tunnel->allocated_bw) in create_tunnel()
506 tunnel->allocated_bw = -1; in create_tunnel()
508 tunnel->bw_alloc_supported = tunnel_reg_bw_alloc_supported(regs); in create_tunnel()
509 tunnel->bw_alloc_enabled = tunnel_reg_bw_alloc_enabled(regs); in create_tunnel()
511 if (!add_tunnel_to_group(mgr, drv_group_id, tunnel)) { in create_tunnel()
512 kfree(tunnel); in create_tunnel()
517 track_tunnel_ref(tunnel, &tunnel->tracker); in create_tunnel()
519 return tunnel; in create_tunnel()
522 static void destroy_tunnel(struct drm_dp_tunnel *tunnel) in destroy_tunnel() argument
524 untrack_tunnel_ref(tunnel, &tunnel->tracker); in destroy_tunnel()
525 tunnel_put(tunnel); in destroy_tunnel()
529 * drm_dp_tunnel_set_io_error - Set the IO error flag for a DP tunnel
530 * @tunnel: Tunnel object
532 * Set the IO error flag for @tunnel. Drivers can call this function upon
533 * detecting a failure that affects the tunnel functionality, for instance
534 * after a DP AUX transfer failure on the port @tunnel is connected to.
536 * This disables further management of @tunnel, including any related
538 * initiators of these. The driver is supposed to drop this tunnel and -
541 void drm_dp_tunnel_set_io_error(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_set_io_error() argument
543 tunnel->has_io_error = true; in drm_dp_tunnel_set_io_error()
615 static int tunnel_allocated_bw(const struct drm_dp_tunnel *tunnel) in tunnel_allocated_bw() argument
617 return max(tunnel->allocated_bw, 0); in tunnel_allocated_bw()
620 static bool tunnel_info_changes_are_valid(struct drm_dp_tunnel *tunnel, in tunnel_info_changes_are_valid() argument
627 if (tunnel->bw_alloc_supported != tunnel_reg_bw_alloc_supported(regs)) { in tunnel_info_changes_are_valid()
628 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
630 str_yes_no(tunnel->bw_alloc_supported), in tunnel_info_changes_are_valid()
636 if (tunnel->group->drv_group_id != new_drv_group_id) { in tunnel_info_changes_are_valid()
637 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
639 tunnel_group_drv_id(tunnel->group->drv_group_id), in tunnel_info_changes_are_valid()
640 tunnel_group_id(tunnel->group->drv_group_id), in tunnel_info_changes_are_valid()
647 if (!tunnel->bw_alloc_supported) in tunnel_info_changes_are_valid()
650 if (tunnel->bw_granularity != tunnel_reg_bw_granularity(regs)) { in tunnel_info_changes_are_valid()
651 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
653 DPTUN_BW_ARG(tunnel->bw_granularity), in tunnel_info_changes_are_valid()
665 tunnel_allocated_bw(tunnel) != in tunnel_info_changes_are_valid()
666 tunnel_reg(regs, DP_ALLOCATED_BW) * tunnel->bw_granularity) { in tunnel_info_changes_are_valid()
667 tun_dbg(tunnel, in tunnel_info_changes_are_valid()
669 DPTUN_BW_ARG(tunnel->allocated_bw), in tunnel_info_changes_are_valid()
670 DPTUN_BW_ARG(tunnel_reg(regs, DP_ALLOCATED_BW) * tunnel->bw_granularity)); in tunnel_info_changes_are_valid()
679 read_and_verify_tunnel_regs(struct drm_dp_tunnel *tunnel, in read_and_verify_tunnel_regs() argument
685 err = read_tunnel_regs(tunnel->aux, regs); in read_and_verify_tunnel_regs()
687 drm_dp_tunnel_set_io_error(tunnel); in read_and_verify_tunnel_regs()
692 if (!tunnel_regs_are_valid(tunnel->group->mgr, regs, flags)) in read_and_verify_tunnel_regs()
695 if (!tunnel_info_changes_are_valid(tunnel, regs, flags)) in read_and_verify_tunnel_regs()
701 static bool update_dprx_caps(struct drm_dp_tunnel *tunnel, const struct drm_dp_tunnel_regs *regs) in update_dprx_caps() argument
705 if (tunnel_reg_max_dprx_rate(regs) != tunnel->max_dprx_rate) { in update_dprx_caps()
706 tunnel->max_dprx_rate = tunnel_reg_max_dprx_rate(regs); in update_dprx_caps()
710 if (tunnel_reg_max_dprx_lane_count(regs) != tunnel->max_dprx_lane_count) { in update_dprx_caps()
711 tunnel->max_dprx_lane_count = tunnel_reg_max_dprx_lane_count(regs); in update_dprx_caps()
726 static int get_max_dprx_bw(const struct drm_dp_tunnel *tunnel) in get_max_dprx_bw() argument
728 int max_dprx_bw = drm_dp_max_dprx_data_rate(tunnel->max_dprx_rate, in get_max_dprx_bw()
729 tunnel->max_dprx_lane_count); in get_max_dprx_bw()
732 * A BW request of roundup(max_dprx_bw, tunnel->bw_granularity) results in in get_max_dprx_bw()
736 return min(roundup(max_dprx_bw, tunnel->bw_granularity), in get_max_dprx_bw()
737 MAX_DP_REQUEST_BW * tunnel->bw_granularity); in get_max_dprx_bw()
740 static int get_max_tunnel_bw(const struct drm_dp_tunnel *tunnel) in get_max_tunnel_bw() argument
742 return min(get_max_dprx_bw(tunnel), tunnel->group->available_bw); in get_max_tunnel_bw()
746 * drm_dp_tunnel_detect - Detect DP tunnel on the link
747 * @mgr: Tunnel manager
748 * @aux: DP AUX on which the tunnel will be detected
750 * Detect if there is any DP tunnel on the link and add it to the tunnel
751 * group's tunnel list.
753 * Returns a pointer to a tunnel on success, or an ERR_PTR() error on
761 struct drm_dp_tunnel *tunnel; in drm_dp_tunnel_detect() local
776 tunnel = create_tunnel(mgr, aux, ®s); in drm_dp_tunnel_detect()
777 if (!tunnel) in drm_dp_tunnel_detect()
780 tun_dbg(tunnel, in drm_dp_tunnel_detect()
794 str_yes_no(tunnel->bw_alloc_supported), in drm_dp_tunnel_detect()
795 str_yes_no(tunnel->bw_alloc_enabled)); in drm_dp_tunnel_detect()
797 return tunnel; in drm_dp_tunnel_detect()
802 * drm_dp_tunnel_destroy - Destroy tunnel object
803 * @tunnel: Tunnel object
805 * Remove the tunnel from the tunnel topology and destroy it.
807 * Returns 0 on success, -ENODEV if the tunnel has been destroyed already.
809 int drm_dp_tunnel_destroy(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_destroy() argument
811 if (!tunnel) in drm_dp_tunnel_destroy()
814 if (drm_WARN_ON(tunnel->group->mgr->dev, tunnel->destroyed)) in drm_dp_tunnel_destroy()
817 tun_dbg(tunnel, "destroying\n"); in drm_dp_tunnel_destroy()
819 tunnel->destroyed = true; in drm_dp_tunnel_destroy()
820 destroy_tunnel(tunnel); in drm_dp_tunnel_destroy()
826 static int check_tunnel(const struct drm_dp_tunnel *tunnel) in check_tunnel() argument
828 if (tunnel->destroyed) in check_tunnel()
831 if (tunnel->has_io_error) in check_tunnel()
839 struct drm_dp_tunnel *tunnel; in group_allocated_bw() local
842 for_each_tunnel_in_group(group, tunnel) { in group_allocated_bw()
843 if (check_tunnel(tunnel) == 0 && in group_allocated_bw()
844 tunnel->bw_alloc_enabled) in group_allocated_bw()
845 group_allocated_bw += tunnel_allocated_bw(tunnel); in group_allocated_bw()
852 * The estimated BW reported by the TBT Connection Manager for each tunnel in
853 * a group includes the BW already allocated for the given tunnel and the
854 * unallocated BW which is free to be used by any tunnel in the group.
856 static int group_free_bw(const struct drm_dp_tunnel *tunnel) in group_free_bw() argument
858 return tunnel->estimated_bw - tunnel_allocated_bw(tunnel); in group_free_bw()
861 static int calc_group_available_bw(const struct drm_dp_tunnel *tunnel) in calc_group_available_bw() argument
863 return group_allocated_bw(tunnel->group) + in calc_group_available_bw()
864 group_free_bw(tunnel); in calc_group_available_bw()
867 static int update_group_available_bw(struct drm_dp_tunnel *tunnel, in update_group_available_bw() argument
874 tunnel->estimated_bw = tunnel_reg(regs, DP_ESTIMATED_BW) * tunnel->bw_granularity; in update_group_available_bw()
876 if (calc_group_available_bw(tunnel) == tunnel->group->available_bw) in update_group_available_bw()
879 for_each_tunnel_in_group(tunnel->group, tunnel_iter) { in update_group_available_bw()
882 if (tunnel_iter == tunnel) in update_group_available_bw()
898 group_available_bw = calc_group_available_bw(tunnel); in update_group_available_bw()
900 tun_dbg(tunnel, "Updated group available BW: %d->%d\n", in update_group_available_bw()
901 DPTUN_BW_ARG(tunnel->group->available_bw), in update_group_available_bw()
904 changed = tunnel->group->available_bw != group_available_bw; in update_group_available_bw()
906 tunnel->group->available_bw = group_available_bw; in update_group_available_bw()
911 static int set_bw_alloc_mode(struct drm_dp_tunnel *tunnel, bool enable) in set_bw_alloc_mode() argument
916 if (drm_dp_dpcd_readb(tunnel->aux, DP_DPTX_BW_ALLOCATION_MODE_CONTROL, &val) < 0) in set_bw_alloc_mode()
924 if (drm_dp_dpcd_writeb(tunnel->aux, DP_DPTX_BW_ALLOCATION_MODE_CONTROL, val) < 0) in set_bw_alloc_mode()
927 tunnel->bw_alloc_enabled = enable; in set_bw_alloc_mode()
932 drm_dp_tunnel_set_io_error(tunnel); in set_bw_alloc_mode()
938 * drm_dp_tunnel_enable_bw_alloc - Enable DP tunnel BW allocation mode
939 * @tunnel: Tunnel object
941 * Enable the DP tunnel BW allocation mode on @tunnel if it supports it.
945 int drm_dp_tunnel_enable_bw_alloc(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_enable_bw_alloc() argument
950 err = check_tunnel(tunnel); in drm_dp_tunnel_enable_bw_alloc()
954 if (!tunnel->bw_alloc_supported) in drm_dp_tunnel_enable_bw_alloc()
957 if (!tunnel_group_id(tunnel->group->drv_group_id)) in drm_dp_tunnel_enable_bw_alloc()
960 err = set_bw_alloc_mode(tunnel, true); in drm_dp_tunnel_enable_bw_alloc()
971 err = read_and_verify_tunnel_regs(tunnel, ®s, in drm_dp_tunnel_enable_bw_alloc()
974 set_bw_alloc_mode(tunnel, false); in drm_dp_tunnel_enable_bw_alloc()
979 if (!tunnel->max_dprx_rate) in drm_dp_tunnel_enable_bw_alloc()
980 update_dprx_caps(tunnel, ®s); in drm_dp_tunnel_enable_bw_alloc()
982 if (tunnel->group->available_bw == -1) { in drm_dp_tunnel_enable_bw_alloc()
983 err = update_group_available_bw(tunnel, ®s); in drm_dp_tunnel_enable_bw_alloc()
988 tun_dbg_stat(tunnel, err, in drm_dp_tunnel_enable_bw_alloc()
990 tunnel->max_dprx_rate / 100, tunnel->max_dprx_lane_count, in drm_dp_tunnel_enable_bw_alloc()
991 DPTUN_BW_ARG(group_allocated_bw(tunnel->group)), in drm_dp_tunnel_enable_bw_alloc()
992 DPTUN_BW_ARG(tunnel->group->available_bw)); in drm_dp_tunnel_enable_bw_alloc()
999 * drm_dp_tunnel_disable_bw_alloc - Disable DP tunnel BW allocation mode
1000 * @tunnel: Tunnel object
1002 * Disable the DP tunnel BW allocation mode on @tunnel.
1006 int drm_dp_tunnel_disable_bw_alloc(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_disable_bw_alloc() argument
1010 err = check_tunnel(tunnel); in drm_dp_tunnel_disable_bw_alloc()
1014 tunnel->allocated_bw = -1; in drm_dp_tunnel_disable_bw_alloc()
1016 err = set_bw_alloc_mode(tunnel, false); in drm_dp_tunnel_disable_bw_alloc()
1018 tun_dbg_stat(tunnel, err, "Disabling BW alloc mode"); in drm_dp_tunnel_disable_bw_alloc()
1026 * @tunnel: Tunnel object
1028 * Query if the BW allocation mode is enabled for @tunnel.
1030 * Returns %true if the BW allocation mode is enabled for @tunnel.
1032 bool drm_dp_tunnel_bw_alloc_is_enabled(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_bw_alloc_is_enabled() argument
1034 return tunnel && tunnel->bw_alloc_enabled; in drm_dp_tunnel_bw_alloc_is_enabled()
1072 static int allocate_tunnel_bw(struct drm_dp_tunnel *tunnel, int bw) in allocate_tunnel_bw() argument
1074 struct drm_dp_tunnel_mgr *mgr = tunnel->group->mgr; in allocate_tunnel_bw()
1075 int request_bw = DIV_ROUND_UP(bw, tunnel->bw_granularity); in allocate_tunnel_bw()
1085 if (request_bw * tunnel->bw_granularity == tunnel->allocated_bw) in allocate_tunnel_bw()
1094 err = clear_bw_req_state(tunnel->aux); in allocate_tunnel_bw()
1098 if (drm_dp_dpcd_writeb(tunnel->aux, DP_REQUEST_BW, request_bw) < 0) { in allocate_tunnel_bw()
1109 err = bw_req_complete(tunnel->aux, &status_changed); in allocate_tunnel_bw()
1116 err = read_and_verify_tunnel_regs(tunnel, ®s, in allocate_tunnel_bw()
1135 tunnel->allocated_bw = request_bw * tunnel->bw_granularity; in allocate_tunnel_bw()
1138 tun_dbg_stat(tunnel, err, "Allocating %d/%d Mb/s for tunnel: Group alloc:%d/%d Mb/s", in allocate_tunnel_bw()
1139 DPTUN_BW_ARG(request_bw * tunnel->bw_granularity), in allocate_tunnel_bw()
1140 DPTUN_BW_ARG(get_max_tunnel_bw(tunnel)), in allocate_tunnel_bw()
1141 DPTUN_BW_ARG(group_allocated_bw(tunnel->group)), in allocate_tunnel_bw()
1142 DPTUN_BW_ARG(tunnel->group->available_bw)); in allocate_tunnel_bw()
1145 drm_dp_tunnel_set_io_error(tunnel); in allocate_tunnel_bw()
1151 * drm_dp_tunnel_alloc_bw - Allocate BW for a DP tunnel
1152 * @tunnel: Tunnel object
1155 * Allocate @bw kB/s for @tunnel. The allocated BW must be freed after use by
1156 * calling this function for the same tunnel setting @bw to 0.
1160 int drm_dp_tunnel_alloc_bw(struct drm_dp_tunnel *tunnel, int bw) in drm_dp_tunnel_alloc_bw() argument
1164 err = check_tunnel(tunnel); in drm_dp_tunnel_alloc_bw()
1168 return allocate_tunnel_bw(tunnel, bw); in drm_dp_tunnel_alloc_bw()
1173 * drm_dp_tunnel_get_allocated_bw - Get the BW allocated for a DP tunnel
1174 * @tunnel: Tunnel object
1176 * Get the current BW allocated for @tunnel. After the tunnel is created /
1181 * Return the BW allocated for the tunnel, or -1 if the allocation is
1184 int drm_dp_tunnel_get_allocated_bw(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_get_allocated_bw() argument
1186 return tunnel->allocated_bw; in drm_dp_tunnel_get_allocated_bw()
1194 static int check_and_clear_status_change(struct drm_dp_tunnel *tunnel) in check_and_clear_status_change() argument
1199 if (drm_dp_dpcd_readb(tunnel->aux, DP_TUNNELING_STATUS, &val) < 0) in check_and_clear_status_change()
1205 if (drm_dp_dpcd_writeb(tunnel->aux, DP_TUNNELING_STATUS, val) < 0) in check_and_clear_status_change()
1211 if (!drm_dp_tunnel_bw_alloc_is_enabled(tunnel)) in check_and_clear_status_change()
1218 if (drm_dp_dpcd_readb(tunnel->aux, DP_ESTIMATED_BW, &val) < 0) in check_and_clear_status_change()
1221 if (val * tunnel->bw_granularity != tunnel->estimated_bw) in check_and_clear_status_change()
1227 drm_dp_tunnel_set_io_error(tunnel); in check_and_clear_status_change()
1233 * drm_dp_tunnel_update_state - Update DP tunnel SW state with the HW state
1234 * @tunnel: Tunnel object
1236 * Update the SW state of @tunnel with the HW state.
1241 int drm_dp_tunnel_update_state(struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_update_state() argument
1247 ret = check_tunnel(tunnel); in drm_dp_tunnel_update_state()
1251 ret = check_and_clear_status_change(tunnel); in drm_dp_tunnel_update_state()
1258 ret = read_and_verify_tunnel_regs(tunnel, ®s, 0); in drm_dp_tunnel_update_state()
1262 if (update_dprx_caps(tunnel, ®s)) in drm_dp_tunnel_update_state()
1265 ret = update_group_available_bw(tunnel, ®s); in drm_dp_tunnel_update_state()
1270 tun_dbg_stat(tunnel, ret < 0 ? ret : 0, in drm_dp_tunnel_update_state()
1271 "State update: Changed:%s DPRX:%dx%d Tunnel alloc:%d/%d Group alloc:%d/%d Mb/s", in drm_dp_tunnel_update_state()
1273 tunnel->max_dprx_rate / 100, tunnel->max_dprx_lane_count, in drm_dp_tunnel_update_state()
1274 DPTUN_BW_ARG(tunnel->allocated_bw), in drm_dp_tunnel_update_state()
1275 DPTUN_BW_ARG(get_max_tunnel_bw(tunnel)), in drm_dp_tunnel_update_state()
1276 DPTUN_BW_ARG(group_allocated_bw(tunnel->group)), in drm_dp_tunnel_update_state()
1277 DPTUN_BW_ARG(tunnel->group->available_bw)); in drm_dp_tunnel_update_state()
1290 * drm_dp_tunnel_handle_irq - Handle DP tunnel IRQs
1292 * Handle any pending DP tunnel IRQs, waking up waiters for a completion
1295 * Returns 1 if the state of the tunnel has changed which requires calling
1317 * drm_dp_tunnel_max_dprx_rate - Query the maximum rate of the tunnel's DPRX
1318 * @tunnel: Tunnel object
1321 * to @tunnel. Note that this rate will not be limited by the BW limit of the
1322 * tunnel, as opposed to the standard and extended DP_MAX_LINK_RATE DPCD
1327 int drm_dp_tunnel_max_dprx_rate(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_max_dprx_rate() argument
1329 return tunnel->max_dprx_rate; in drm_dp_tunnel_max_dprx_rate()
1334 * drm_dp_tunnel_max_dprx_lane_count - Query the maximum lane count of the tunnel's DPRX
1335 * @tunnel: Tunnel object
1338 * to @tunnel. Note that this lane count will not be limited by the BW limit of
1339 * the tunnel, as opposed to the standard and extended DP_MAX_LANE_COUNT DPCD
1344 int drm_dp_tunnel_max_dprx_lane_count(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_max_dprx_lane_count() argument
1346 return tunnel->max_dprx_lane_count; in drm_dp_tunnel_max_dprx_lane_count()
1351 * drm_dp_tunnel_available_bw - Query the estimated total available BW of the tunnel
1352 * @tunnel: Tunnel object
1355 * tunnel. This includes the currently allocated and free BW for all the
1356 * tunnels in @tunnel's group. The available BW is valid only after the BW
1357 * allocation mode has been enabled for the tunnel and its state got updated
1360 * Returns the @tunnel group's estimated total available bandwidth in kB/s
1362 * not enabled or the tunnel's state hasn't been updated).
1364 int drm_dp_tunnel_available_bw(const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_available_bw() argument
1366 return tunnel->group->available_bw; in drm_dp_tunnel_available_bw()
1372 const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_group_state() argument
1376 &tunnel->group->base); in drm_dp_tunnel_atomic_get_group_state()
1381 struct drm_dp_tunnel *tunnel) in add_tunnel_state() argument
1385 tun_dbg_atomic(tunnel, in add_tunnel_state()
1386 "Adding state for tunnel %p to group state %p\n", in add_tunnel_state()
1387 tunnel, group_state); in add_tunnel_state()
1395 drm_dp_tunnel_ref_get(tunnel, &tunnel_state->tunnel_ref); in add_tunnel_state()
1405 tun_dbg_atomic(tunnel_state->tunnel_ref.tunnel, in free_tunnel_state()
1406 "Freeing state for tunnel %p\n", in free_tunnel_state()
1407 tunnel_state->tunnel_ref.tunnel); in free_tunnel_state()
1430 const struct drm_dp_tunnel *tunnel) in get_tunnel_state() argument
1435 if (tunnel_state->tunnel_ref.tunnel == tunnel) in get_tunnel_state()
1443 struct drm_dp_tunnel *tunnel) in get_or_add_tunnel_state() argument
1447 tunnel_state = get_tunnel_state(group_state, tunnel); in get_or_add_tunnel_state()
1451 return add_tunnel_state(group_state, tunnel); in get_or_add_tunnel_state()
1472 tunnel_state->tunnel_ref.tunnel); in tunnel_group_duplicate_state()
1505 * drm_dp_tunnel_atomic_get_state - get/allocate the new atomic state for a tunnel
1507 * @tunnel: Tunnel to get the state for
1509 * Get the new atomic state for @tunnel, duplicating it from the old tunnel
1516 struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_state() argument
1521 group_state = drm_dp_tunnel_atomic_get_group_state(state, tunnel); in drm_dp_tunnel_atomic_get_state()
1525 tunnel_state = get_or_add_tunnel_state(group_state, tunnel); in drm_dp_tunnel_atomic_get_state()
1534 * drm_dp_tunnel_atomic_get_old_state - get the old atomic state for a tunnel
1536 * @tunnel: Tunnel to get the state for
1538 * Get the old atomic state for @tunnel.
1540 * Return the old state or NULL if the tunnel's atomic state is not in @state.
1544 const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_old_state() argument
1550 if (to_group(old_group_state->base.obj) == tunnel->group) in drm_dp_tunnel_atomic_get_old_state()
1551 return get_tunnel_state(old_group_state, tunnel); in drm_dp_tunnel_atomic_get_old_state()
1558 * drm_dp_tunnel_atomic_get_new_state - get the new atomic state for a tunnel
1560 * @tunnel: Tunnel to get the state for
1562 * Get the new atomic state for @tunnel.
1564 * Return the new state or NULL if the tunnel's atomic state is not in @state.
1568 const struct drm_dp_tunnel *tunnel) in drm_dp_tunnel_atomic_get_new_state() argument
1574 if (to_group(new_group_state->base.obj) == tunnel->group) in drm_dp_tunnel_atomic_get_new_state()
1575 return get_tunnel_state(new_group_state, tunnel); in drm_dp_tunnel_atomic_get_new_state()
1616 tunnel_state->tunnel_ref.tunnel->name, in check_unique_stream_ids()
1691 * drm_dp_tunnel_atomic_set_stream_bw - Set the BW for a DP tunnel stream
1693 * @tunnel: DP tunnel containing the stream
1697 * Set a DP tunnel stream's required BW in the atomic state.
1702 struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_atomic_set_stream_bw() argument
1709 if (drm_WARN_ON(tunnel->group->mgr->dev, in drm_dp_tunnel_atomic_set_stream_bw()
1713 tun_dbg(tunnel, in drm_dp_tunnel_atomic_set_stream_bw()
1717 new_group_state = drm_dp_tunnel_atomic_get_group_state(state, tunnel); in drm_dp_tunnel_atomic_set_stream_bw()
1722 tunnel_state = get_tunnel_state(new_group_state, tunnel); in drm_dp_tunnel_atomic_set_stream_bw()
1729 tunnel_state = get_or_add_tunnel_state(new_group_state, tunnel); in drm_dp_tunnel_atomic_set_stream_bw()
1744 * drm_dp_tunnel_atomic_get_required_bw - Get the BW required by a DP tunnel
1745 * @tunnel_state: Atomic state of the queried tunnel
1747 * Calculate the BW required by a tunnel adding up the required BW of all
1748 * the streams in the tunnel.
1750 * Return the total BW required by the tunnel.
1770 * @tunnel: Tunnel object
1771 * @stream_mask: Mask of streams in @tunnel's group
1773 * Get the mask of all the stream IDs in the tunnel group of @tunnel.
1779 const struct drm_dp_tunnel *tunnel, in drm_dp_tunnel_atomic_get_group_streams_in_state() argument
1785 group_state = drm_dp_tunnel_atomic_get_group_state(state, tunnel); in drm_dp_tunnel_atomic_get_group_streams_in_state()
1807 struct drm_dp_tunnel *tunnel = new_tunnel_state->tunnel_ref.tunnel; in drm_dp_tunnel_atomic_check_group_bw() local
1808 int max_dprx_bw = get_max_dprx_bw(tunnel); in drm_dp_tunnel_atomic_check_group_bw()
1811 tun_dbg(tunnel, in drm_dp_tunnel_atomic_check_group_bw()
1812 "%sRequired %d/%d Mb/s total for tunnel.\n", in drm_dp_tunnel_atomic_check_group_bw()
1822 group_bw += min(roundup(tunnel_bw, tunnel->bw_granularity), in drm_dp_tunnel_atomic_check_group_bw()
1828 "%sRequired %d/%d Mb/s total for tunnel group.\n", in drm_dp_tunnel_atomic_check_group_bw()
1846 * Check the required BW of each DP tunnel in @state against both the DPRX BW
1847 * limit of the tunnel and the BW limit of the tunnel group. Return a mask of
1849 * either all the streams in a tunnel (in case a DPRX BW limit check failed) or
1850 * all the streams in a tunnel group (in case a group BW limit check failed).
1893 * drm_dp_tunnel_mgr_create - Create a DP tunnel manager
1895 * @max_group_count: Maximum number of tunnel groups
1897 * Creates a DP tunnel manager for @dev.
1899 * Returns a pointer to the tunnel manager if created successfully or NULL in
1941 * drm_dp_tunnel_mgr_destroy - Destroy DP tunnel manager
1942 * @mgr: Tunnel manager object
1944 * Destroy the tunnel manager.