Lines Matching full:vc4
7 * DOC: VC4 HVS module.
72 struct drm_device *drm = &hvs->vc4->base; in vc4_hvs_dump_state()
98 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_debugfs_underrun() local
101 drm_printf(&p, "%d\n", atomic_read(&vc4->underrun)); in vc4_hvs_debugfs_underrun()
110 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_debugfs_dlist() local
111 struct vc4_hvs *hvs = vc4->hvs; in vc4_hvs_debugfs_dlist()
129 dlist_word = readl((u32 __iomem *)vc4->hvs->dlist + j); in vc4_hvs_debugfs_dlist()
194 drm_err(&hvs->vc4->base, "Failed to allocate space for filter kernel: %d\n", in vc4_hvs_upload_linear_kernel()
216 struct drm_device *drm = &hvs->vc4->base; in vc4_hvs_lut_load()
262 struct drm_device *drm = &hvs->vc4->base; in vc4_hvs_get_fifo_frame_count()
290 struct vc4_dev *vc4 = hvs->vc4; in vc4_hvs_get_fifo_from_output() local
294 if (!vc4->is_vc5) in vc4_hvs_get_fifo_from_output()
350 struct vc4_dev *vc4 = hvs->vc4; in vc4_hvs_init_channel() local
351 struct drm_device *drm = &vc4->base; in vc4_hvs_init_channel()
375 if (!vc4->is_vc5) { in vc4_hvs_init_channel()
397 ((!vc4->is_vc5) ? SCALER_DISPBKGND_GAMMA : 0) | in vc4_hvs_init_channel()
412 struct drm_device *drm = &hvs->vc4->base; in vc4_hvs_stop_channel()
446 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_atomic_check() local
464 spin_lock_irqsave(&vc4->hvs->mm_lock, flags); in vc4_hvs_atomic_check()
465 ret = drm_mm_insert_node(&vc4->hvs->dlist_mm, &vc4_state->mm, in vc4_hvs_atomic_check()
467 spin_unlock_irqrestore(&vc4->hvs->mm_lock, flags); in vc4_hvs_atomic_check()
477 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_install_dlist() local
478 struct vc4_hvs *hvs = vc4->hvs; in vc4_hvs_install_dlist()
534 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_atomic_enable() local
541 vc4_hvs_init_channel(vc4->hvs, crtc, mode, oneshot); in vc4_hvs_atomic_enable()
548 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_atomic_disable() local
553 vc4_hvs_stop_channel(vc4->hvs, chan); in vc4_hvs_atomic_disable()
562 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_atomic_flush() local
563 struct vc4_hvs *hvs = vc4->hvs; in vc4_hvs_atomic_flush()
571 u32 __iomem *dlist_start = vc4->hvs->dlist + vc4_state->mm.start; in vc4_hvs_atomic_flush()
671 struct drm_device *drm = &hvs->vc4->base; in vc4_hvs_mask_underrun()
679 dispctrl &= ~(hvs->vc4->is_vc5 ? SCALER5_DISPCTRL_DSPEISLUR(channel) : in vc4_hvs_mask_underrun()
689 struct drm_device *drm = &hvs->vc4->base; in vc4_hvs_unmask_underrun()
697 dispctrl |= (hvs->vc4->is_vc5 ? SCALER5_DISPCTRL_DSPEISLUR(channel) : in vc4_hvs_unmask_underrun()
709 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_report_underrun() local
711 atomic_inc(&vc4->underrun); in vc4_hvs_report_underrun()
718 struct vc4_dev *vc4 = to_vc4_dev(dev); in vc4_hvs_irq_handler() local
719 struct vc4_hvs *hvs = vc4->hvs; in vc4_hvs_irq_handler()
741 dspeislur = vc4->is_vc5 ? SCALER5_DISPCTRL_DSPEISLUR(channel) : in vc4_hvs_irq_handler()
764 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_hvs_debugfs_init() local
765 struct vc4_hvs *hvs = vc4->hvs; in vc4_hvs_debugfs_init()
767 if (!vc4->hvs) in vc4_hvs_debugfs_init()
770 if (!vc4->is_vc5) in vc4_hvs_debugfs_init()
773 &vc4->load_tracker_enabled); in vc4_hvs_debugfs_init()
784 struct vc4_hvs *__vc4_hvs_alloc(struct vc4_dev *vc4, struct platform_device *pdev) in __vc4_hvs_alloc() argument
786 struct drm_device *drm = &vc4->base; in __vc4_hvs_alloc()
793 hvs->vc4 = vc4; in __vc4_hvs_alloc()
812 if (!vc4->is_vc5) in __vc4_hvs_alloc()
819 vc4->hvs = hvs; in __vc4_hvs_alloc()
828 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_hvs_bind() local
834 hvs = __vc4_hvs_alloc(vc4, NULL); in vc4_hvs_bind()
846 if (vc4->is_vc5) { in vc4_hvs_bind()
884 if (!vc4->is_vc5) in vc4_hvs_bind()
925 if (!vc4->is_vc5) in vc4_hvs_bind()
955 * VC4 panics when < 2 lines in FIFO. in vc4_hvs_bind()
969 if (!vc4->is_vc5) { in vc4_hvs_bind()
1022 vc4_hvs_irq_handler, 0, "vc4 hvs", drm); in vc4_hvs_bind()
1033 struct vc4_dev *vc4 = to_vc4_dev(drm); in vc4_hvs_unbind() local
1034 struct vc4_hvs *hvs = vc4->hvs; in vc4_hvs_unbind()
1037 if (drm_mm_node_allocated(&vc4->hvs->mitchell_netravali_filter)) in vc4_hvs_unbind()
1038 drm_mm_remove_node(&vc4->hvs->mitchell_netravali_filter); in vc4_hvs_unbind()
1040 drm_mm_for_each_node_safe(node, next, &vc4->hvs->dlist_mm) in vc4_hvs_unbind()
1043 drm_mm_takedown(&vc4->hvs->dlist_mm); in vc4_hvs_unbind()
1045 drm_mm_for_each_node_safe(node, next, &vc4->hvs->lbm_mm) in vc4_hvs_unbind()
1047 drm_mm_takedown(&vc4->hvs->lbm_mm); in vc4_hvs_unbind()
1051 vc4->hvs = NULL; in vc4_hvs_unbind()