Lines Matching +full:comp +full:- +full:disable
1 // SPDX-License-Identifier: GPL-2.0-only
7 #include <linux/dma-mapping.h>
11 #include <linux/soc/mediatek/mtk-cmdq.h>
12 #include <linux/soc/mediatek/mtk-mmsys.h>
13 #include <linux/soc/mediatek/mtk-mutex.h>
29 * struct mtk_crtc - MediaTek specific crtc structure.
97 struct drm_crtc *crtc = &mtk_crtc->base; in mtk_crtc_finish_page_flip()
100 if (mtk_crtc->event) { in mtk_crtc_finish_page_flip()
101 spin_lock_irqsave(&crtc->dev->event_lock, flags); in mtk_crtc_finish_page_flip()
102 drm_crtc_send_vblank_event(crtc, mtk_crtc->event); in mtk_crtc_finish_page_flip()
104 mtk_crtc->event = NULL; in mtk_crtc_finish_page_flip()
105 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); in mtk_crtc_finish_page_flip()
113 drm_crtc_handle_vblank(&mtk_crtc->base); in mtk_drm_finish_page_flip()
115 spin_lock_irqsave(&mtk_crtc->config_lock, flags); in mtk_drm_finish_page_flip()
116 if (!mtk_crtc->config_updating && mtk_crtc->pending_needs_vblank) { in mtk_drm_finish_page_flip()
118 mtk_crtc->pending_needs_vblank = false; in mtk_drm_finish_page_flip()
120 spin_unlock_irqrestore(&mtk_crtc->config_lock, flags); in mtk_drm_finish_page_flip()
128 mtk_mutex_put(mtk_crtc->mutex); in mtk_crtc_destroy()
130 if (mtk_crtc->cmdq_client.chan) { in mtk_crtc_destroy()
131 cmdq_pkt_destroy(&mtk_crtc->cmdq_client, &mtk_crtc->cmdq_handle); in mtk_crtc_destroy()
132 mbox_free_channel(mtk_crtc->cmdq_client.chan); in mtk_crtc_destroy()
133 mtk_crtc->cmdq_client.chan = NULL; in mtk_crtc_destroy()
137 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_destroy()
138 struct mtk_ddp_comp *comp; in mtk_crtc_destroy() local
140 comp = mtk_crtc->ddp_comp[i]; in mtk_crtc_destroy()
141 mtk_ddp_comp_unregister_vblank_cb(comp); in mtk_crtc_destroy()
151 if (crtc->state) in mtk_crtc_reset()
152 __drm_atomic_helper_crtc_destroy_state(crtc->state); in mtk_crtc_reset()
154 kfree(to_mtk_crtc_state(crtc->state)); in mtk_crtc_reset()
155 crtc->state = NULL; in mtk_crtc_reset()
159 __drm_atomic_helper_crtc_reset(crtc, &state->base); in mtk_crtc_reset()
170 __drm_atomic_helper_crtc_duplicate_state(crtc, &state->base); in mtk_crtc_duplicate_state()
172 WARN_ON(state->base.crtc != crtc); in mtk_crtc_duplicate_state()
173 state->base.crtc = crtc; in mtk_crtc_duplicate_state()
174 state->pending_config = false; in mtk_crtc_duplicate_state()
176 return &state->base; in mtk_crtc_duplicate_state()
193 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_mode_valid()
194 status = mtk_ddp_comp_mode_valid(mtk_crtc->ddp_comp[i], mode); in mtk_crtc_mode_valid()
211 struct mtk_crtc_state *state = to_mtk_crtc_state(crtc->state); in mtk_crtc_mode_set_nofb()
213 state->pending_width = crtc->mode.hdisplay; in mtk_crtc_mode_set_nofb()
214 state->pending_height = crtc->mode.vdisplay; in mtk_crtc_mode_set_nofb()
215 state->pending_vrefresh = drm_mode_vrefresh(&crtc->mode); in mtk_crtc_mode_set_nofb()
217 state->pending_config = true; in mtk_crtc_mode_set_nofb()
225 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_ddp_clk_enable()
226 ret = mtk_ddp_comp_clk_enable(mtk_crtc->ddp_comp[i]); in mtk_crtc_ddp_clk_enable()
235 while (--i >= 0) in mtk_crtc_ddp_clk_enable()
236 mtk_ddp_comp_clk_disable(mtk_crtc->ddp_comp[i]); in mtk_crtc_ddp_clk_enable()
244 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) in mtk_crtc_ddp_clk_disable()
245 mtk_ddp_comp_clk_disable(mtk_crtc->ddp_comp[i]); in mtk_crtc_ddp_clk_disable()
254 struct mtk_ddp_comp *comp; in mtk_ddp_comp_for_plane() local
256 unsigned int local_index = plane - mtk_crtc->planes; in mtk_ddp_comp_for_plane()
258 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_ddp_comp_for_plane()
259 comp = mtk_crtc->ddp_comp[i]; in mtk_ddp_comp_for_plane()
260 if (local_index < (count + mtk_ddp_comp_layer_nr(comp))) { in mtk_ddp_comp_for_plane()
261 *local_layer = local_index - count; in mtk_ddp_comp_for_plane()
262 return comp; in mtk_ddp_comp_for_plane()
264 count += mtk_ddp_comp_layer_nr(comp); in mtk_ddp_comp_for_plane()
267 WARN(1, "Failed to find component for plane %d\n", plane->index); in mtk_ddp_comp_for_plane()
281 if (data->sta < 0) in ddp_cmdq_cb()
284 state = to_mtk_crtc_state(mtk_crtc->base.state); in ddp_cmdq_cb()
286 spin_lock_irqsave(&mtk_crtc->config_lock, flags); in ddp_cmdq_cb()
287 if (mtk_crtc->config_updating) { in ddp_cmdq_cb()
288 spin_unlock_irqrestore(&mtk_crtc->config_lock, flags); in ddp_cmdq_cb()
292 state->pending_config = false; in ddp_cmdq_cb()
294 if (mtk_crtc->pending_planes) { in ddp_cmdq_cb()
295 for (i = 0; i < mtk_crtc->layer_nr; i++) { in ddp_cmdq_cb()
296 struct drm_plane *plane = &mtk_crtc->planes[i]; in ddp_cmdq_cb()
299 plane_state = to_mtk_plane_state(plane->state); in ddp_cmdq_cb()
301 plane_state->pending.config = false; in ddp_cmdq_cb()
303 mtk_crtc->pending_planes = false; in ddp_cmdq_cb()
306 if (mtk_crtc->pending_async_planes) { in ddp_cmdq_cb()
307 for (i = 0; i < mtk_crtc->layer_nr; i++) { in ddp_cmdq_cb()
308 struct drm_plane *plane = &mtk_crtc->planes[i]; in ddp_cmdq_cb()
311 plane_state = to_mtk_plane_state(plane->state); in ddp_cmdq_cb()
313 plane_state->pending.async_config = false; in ddp_cmdq_cb()
315 mtk_crtc->pending_async_planes = false; in ddp_cmdq_cb()
318 spin_unlock_irqrestore(&mtk_crtc->config_lock, flags); in ddp_cmdq_cb()
322 mtk_crtc->cmdq_vblank_cnt = 0; in ddp_cmdq_cb()
323 wake_up(&mtk_crtc->cb_blocking_queue); in ddp_cmdq_cb()
329 struct drm_crtc *crtc = &mtk_crtc->base; in mtk_crtc_ddp_hw_init()
337 if (WARN_ON(!crtc->state)) in mtk_crtc_ddp_hw_init()
338 return -EINVAL; in mtk_crtc_ddp_hw_init()
340 width = crtc->state->adjusted_mode.hdisplay; in mtk_crtc_ddp_hw_init()
341 height = crtc->state->adjusted_mode.vdisplay; in mtk_crtc_ddp_hw_init()
342 vrefresh = drm_mode_vrefresh(&crtc->state->adjusted_mode); in mtk_crtc_ddp_hw_init()
344 drm_for_each_encoder(encoder, crtc->dev) { in mtk_crtc_ddp_hw_init()
345 if (encoder->crtc != crtc) in mtk_crtc_ddp_hw_init()
348 drm_connector_list_iter_begin(crtc->dev, &conn_iter); in mtk_crtc_ddp_hw_init()
350 if (connector->encoder != encoder) in mtk_crtc_ddp_hw_init()
352 if (connector->display_info.bpc != 0 && in mtk_crtc_ddp_hw_init()
353 bpc > connector->display_info.bpc) in mtk_crtc_ddp_hw_init()
354 bpc = connector->display_info.bpc; in mtk_crtc_ddp_hw_init()
359 ret = pm_runtime_resume_and_get(crtc->dev->dev); in mtk_crtc_ddp_hw_init()
365 ret = mtk_mutex_prepare(mtk_crtc->mutex); in mtk_crtc_ddp_hw_init()
377 for (i = 0; i < mtk_crtc->ddp_comp_nr - 1; i++) { in mtk_crtc_ddp_hw_init()
378 if (!mtk_ddp_comp_connect(mtk_crtc->ddp_comp[i], mtk_crtc->mmsys_dev, in mtk_crtc_ddp_hw_init()
379 mtk_crtc->ddp_comp[i + 1]->id)) in mtk_crtc_ddp_hw_init()
380 mtk_mmsys_ddp_connect(mtk_crtc->mmsys_dev, in mtk_crtc_ddp_hw_init()
381 mtk_crtc->ddp_comp[i]->id, in mtk_crtc_ddp_hw_init()
382 mtk_crtc->ddp_comp[i + 1]->id); in mtk_crtc_ddp_hw_init()
383 if (!mtk_ddp_comp_add(mtk_crtc->ddp_comp[i], mtk_crtc->mutex)) in mtk_crtc_ddp_hw_init()
384 mtk_mutex_add_comp(mtk_crtc->mutex, in mtk_crtc_ddp_hw_init()
385 mtk_crtc->ddp_comp[i]->id); in mtk_crtc_ddp_hw_init()
387 if (!mtk_ddp_comp_add(mtk_crtc->ddp_comp[i], mtk_crtc->mutex)) in mtk_crtc_ddp_hw_init()
388 mtk_mutex_add_comp(mtk_crtc->mutex, mtk_crtc->ddp_comp[i]->id); in mtk_crtc_ddp_hw_init()
389 mtk_mutex_enable(mtk_crtc->mutex); in mtk_crtc_ddp_hw_init()
391 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_ddp_hw_init()
392 struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[i]; in mtk_crtc_ddp_hw_init() local
395 mtk_ddp_comp_bgclr_in_on(comp); in mtk_crtc_ddp_hw_init()
397 mtk_ddp_comp_config(comp, width, height, vrefresh, bpc, NULL); in mtk_crtc_ddp_hw_init()
398 mtk_ddp_comp_start(comp); in mtk_crtc_ddp_hw_init()
402 for (i = 0; i < mtk_crtc->layer_nr; i++) { in mtk_crtc_ddp_hw_init()
403 struct drm_plane *plane = &mtk_crtc->planes[i]; in mtk_crtc_ddp_hw_init()
405 struct mtk_ddp_comp *comp; in mtk_crtc_ddp_hw_init() local
408 plane_state = to_mtk_plane_state(plane->state); in mtk_crtc_ddp_hw_init()
411 plane_state->pending.enable = false; in mtk_crtc_ddp_hw_init()
412 comp = mtk_ddp_comp_for_plane(crtc, plane, &local_layer); in mtk_crtc_ddp_hw_init()
413 if (comp) in mtk_crtc_ddp_hw_init()
414 mtk_ddp_comp_layer_config(comp, local_layer, in mtk_crtc_ddp_hw_init()
421 mtk_mutex_unprepare(mtk_crtc->mutex); in mtk_crtc_ddp_hw_init()
423 pm_runtime_put(crtc->dev->dev); in mtk_crtc_ddp_hw_init()
429 struct drm_device *drm = mtk_crtc->base.dev; in mtk_crtc_ddp_hw_fini()
430 struct drm_crtc *crtc = &mtk_crtc->base; in mtk_crtc_ddp_hw_fini()
434 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_ddp_hw_fini()
435 mtk_ddp_comp_stop(mtk_crtc->ddp_comp[i]); in mtk_crtc_ddp_hw_fini()
437 mtk_ddp_comp_bgclr_in_off(mtk_crtc->ddp_comp[i]); in mtk_crtc_ddp_hw_fini()
440 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) in mtk_crtc_ddp_hw_fini()
441 if (!mtk_ddp_comp_remove(mtk_crtc->ddp_comp[i], mtk_crtc->mutex)) in mtk_crtc_ddp_hw_fini()
442 mtk_mutex_remove_comp(mtk_crtc->mutex, in mtk_crtc_ddp_hw_fini()
443 mtk_crtc->ddp_comp[i]->id); in mtk_crtc_ddp_hw_fini()
444 mtk_mutex_disable(mtk_crtc->mutex); in mtk_crtc_ddp_hw_fini()
445 for (i = 0; i < mtk_crtc->ddp_comp_nr - 1; i++) { in mtk_crtc_ddp_hw_fini()
446 if (!mtk_ddp_comp_disconnect(mtk_crtc->ddp_comp[i], mtk_crtc->mmsys_dev, in mtk_crtc_ddp_hw_fini()
447 mtk_crtc->ddp_comp[i + 1]->id)) in mtk_crtc_ddp_hw_fini()
448 mtk_mmsys_ddp_disconnect(mtk_crtc->mmsys_dev, in mtk_crtc_ddp_hw_fini()
449 mtk_crtc->ddp_comp[i]->id, in mtk_crtc_ddp_hw_fini()
450 mtk_crtc->ddp_comp[i + 1]->id); in mtk_crtc_ddp_hw_fini()
451 if (!mtk_ddp_comp_remove(mtk_crtc->ddp_comp[i], mtk_crtc->mutex)) in mtk_crtc_ddp_hw_fini()
452 mtk_mutex_remove_comp(mtk_crtc->mutex, in mtk_crtc_ddp_hw_fini()
453 mtk_crtc->ddp_comp[i]->id); in mtk_crtc_ddp_hw_fini()
455 if (!mtk_ddp_comp_remove(mtk_crtc->ddp_comp[i], mtk_crtc->mutex)) in mtk_crtc_ddp_hw_fini()
456 mtk_mutex_remove_comp(mtk_crtc->mutex, mtk_crtc->ddp_comp[i]->id); in mtk_crtc_ddp_hw_fini()
458 mtk_mutex_unprepare(mtk_crtc->mutex); in mtk_crtc_ddp_hw_fini()
460 pm_runtime_put(drm->dev); in mtk_crtc_ddp_hw_fini()
462 if (crtc->state->event && !crtc->state->active) { in mtk_crtc_ddp_hw_fini()
463 spin_lock_irqsave(&crtc->dev->event_lock, flags); in mtk_crtc_ddp_hw_fini()
464 drm_crtc_send_vblank_event(crtc, crtc->state->event); in mtk_crtc_ddp_hw_fini()
465 crtc->state->event = NULL; in mtk_crtc_ddp_hw_fini()
466 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); in mtk_crtc_ddp_hw_fini()
474 struct mtk_crtc_state *state = to_mtk_crtc_state(mtk_crtc->base.state); in mtk_crtc_ddp_config()
475 struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0]; in mtk_crtc_ddp_config() local
484 if (state->pending_config) { in mtk_crtc_ddp_config()
485 mtk_ddp_comp_config(comp, state->pending_width, in mtk_crtc_ddp_config()
486 state->pending_height, in mtk_crtc_ddp_config()
487 state->pending_vrefresh, 0, in mtk_crtc_ddp_config()
491 state->pending_config = false; in mtk_crtc_ddp_config()
494 if (mtk_crtc->pending_planes) { in mtk_crtc_ddp_config()
495 for (i = 0; i < mtk_crtc->layer_nr; i++) { in mtk_crtc_ddp_config()
496 struct drm_plane *plane = &mtk_crtc->planes[i]; in mtk_crtc_ddp_config()
499 plane_state = to_mtk_plane_state(plane->state); in mtk_crtc_ddp_config()
501 if (!plane_state->pending.config) in mtk_crtc_ddp_config()
504 comp = mtk_ddp_comp_for_plane(crtc, plane, &local_layer); in mtk_crtc_ddp_config()
506 if (comp) in mtk_crtc_ddp_config()
507 mtk_ddp_comp_layer_config(comp, local_layer, in mtk_crtc_ddp_config()
511 plane_state->pending.config = false; in mtk_crtc_ddp_config()
515 mtk_crtc->pending_planes = false; in mtk_crtc_ddp_config()
518 if (mtk_crtc->pending_async_planes) { in mtk_crtc_ddp_config()
519 for (i = 0; i < mtk_crtc->layer_nr; i++) { in mtk_crtc_ddp_config()
520 struct drm_plane *plane = &mtk_crtc->planes[i]; in mtk_crtc_ddp_config()
523 plane_state = to_mtk_plane_state(plane->state); in mtk_crtc_ddp_config()
525 if (!plane_state->pending.async_config) in mtk_crtc_ddp_config()
528 comp = mtk_ddp_comp_for_plane(crtc, plane, &local_layer); in mtk_crtc_ddp_config()
530 if (comp) in mtk_crtc_ddp_config()
531 mtk_ddp_comp_layer_config(comp, local_layer, in mtk_crtc_ddp_config()
535 plane_state->pending.async_config = false; in mtk_crtc_ddp_config()
539 mtk_crtc->pending_async_planes = false; in mtk_crtc_ddp_config()
546 struct cmdq_pkt *cmdq_handle = &mtk_crtc->cmdq_handle; in mtk_crtc_update_config()
548 struct drm_crtc *crtc = &mtk_crtc->base; in mtk_crtc_update_config()
549 struct mtk_drm_private *priv = crtc->dev->dev_private; in mtk_crtc_update_config()
554 mutex_lock(&mtk_crtc->hw_lock); in mtk_crtc_update_config()
556 spin_lock_irqsave(&mtk_crtc->config_lock, flags); in mtk_crtc_update_config()
557 mtk_crtc->config_updating = true; in mtk_crtc_update_config()
558 spin_unlock_irqrestore(&mtk_crtc->config_lock, flags); in mtk_crtc_update_config()
561 mtk_crtc->pending_needs_vblank = true; in mtk_crtc_update_config()
563 for (i = 0; i < mtk_crtc->layer_nr; i++) { in mtk_crtc_update_config()
564 struct drm_plane *plane = &mtk_crtc->planes[i]; in mtk_crtc_update_config()
567 plane_state = to_mtk_plane_state(plane->state); in mtk_crtc_update_config()
568 if (plane_state->pending.dirty) { in mtk_crtc_update_config()
569 plane_state->pending.config = true; in mtk_crtc_update_config()
570 plane_state->pending.dirty = false; in mtk_crtc_update_config()
572 } else if (plane_state->pending.async_dirty) { in mtk_crtc_update_config()
573 plane_state->pending.async_config = true; in mtk_crtc_update_config()
574 plane_state->pending.async_dirty = false; in mtk_crtc_update_config()
579 mtk_crtc->pending_planes = true; in mtk_crtc_update_config()
581 mtk_crtc->pending_async_planes = true; in mtk_crtc_update_config()
583 if (priv->data->shadow_register) { in mtk_crtc_update_config()
584 mtk_mutex_acquire(mtk_crtc->mutex); in mtk_crtc_update_config()
586 mtk_mutex_release(mtk_crtc->mutex); in mtk_crtc_update_config()
589 if (mtk_crtc->cmdq_client.chan) { in mtk_crtc_update_config()
590 mbox_flush(mtk_crtc->cmdq_client.chan, 2000); in mtk_crtc_update_config()
591 cmdq_handle->cmd_buf_size = 0; in mtk_crtc_update_config()
592 cmdq_pkt_clear_event(cmdq_handle, mtk_crtc->cmdq_event); in mtk_crtc_update_config()
593 cmdq_pkt_wfe(cmdq_handle, mtk_crtc->cmdq_event, false); in mtk_crtc_update_config()
596 dma_sync_single_for_device(mtk_crtc->cmdq_client.chan->mbox->dev, in mtk_crtc_update_config()
597 cmdq_handle->pa_base, in mtk_crtc_update_config()
598 cmdq_handle->cmd_buf_size, in mtk_crtc_update_config()
607 mtk_crtc->cmdq_vblank_cnt = 3; in mtk_crtc_update_config()
609 mbox_send_message(mtk_crtc->cmdq_client.chan, cmdq_handle); in mtk_crtc_update_config()
610 mbox_client_txdone(mtk_crtc->cmdq_client.chan, 0); in mtk_crtc_update_config()
613 spin_lock_irqsave(&mtk_crtc->config_lock, flags); in mtk_crtc_update_config()
614 mtk_crtc->config_updating = false; in mtk_crtc_update_config()
615 spin_unlock_irqrestore(&mtk_crtc->config_lock, flags); in mtk_crtc_update_config()
617 mutex_unlock(&mtk_crtc->hw_lock); in mtk_crtc_update_config()
624 struct mtk_drm_private *priv = crtc->dev->dev_private; in mtk_crtc_ddp_irq()
627 if (!priv->data->shadow_register && !mtk_crtc->cmdq_client.chan) in mtk_crtc_ddp_irq()
629 else if (mtk_crtc->cmdq_vblank_cnt > 0 && --mtk_crtc->cmdq_vblank_cnt == 0) in mtk_crtc_ddp_irq()
631 drm_crtc_index(&mtk_crtc->base)); in mtk_crtc_ddp_irq()
633 if (!priv->data->shadow_register) in mtk_crtc_ddp_irq()
642 struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0]; in mtk_crtc_enable_vblank() local
644 mtk_ddp_comp_enable_vblank(comp); in mtk_crtc_enable_vblank()
652 struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0]; in mtk_crtc_disable_vblank() local
654 mtk_ddp_comp_disable_vblank(comp); in mtk_crtc_disable_vblank()
663 struct drm_crtc_state *crtc_state = state->crtcs[crtc_index].new_state; in mtk_crtc_update_output()
666 unsigned int encoder_mask = crtc_state->encoder_mask; in mtk_crtc_update_output()
668 if (!crtc_state->connectors_changed) in mtk_crtc_update_output()
671 if (!mtk_crtc->num_conn_routes) in mtk_crtc_update_output()
674 priv = ((struct mtk_drm_private *)crtc->dev->dev_private)->all_drm_private[crtc_index]; in mtk_crtc_update_output()
675 dev = priv->dev; in mtk_crtc_update_output()
678 crtc_state->connectors_changed, encoder_mask, crtc_index); in mtk_crtc_update_output()
680 for (i = 0; i < mtk_crtc->num_conn_routes; i++) { in mtk_crtc_update_output()
681 unsigned int comp_id = mtk_crtc->conn_routes[i].route_ddp; in mtk_crtc_update_output()
682 struct mtk_ddp_comp *comp = &priv->ddp_comp[comp_id]; in mtk_crtc_update_output() local
684 if (comp->encoder_index >= 0 && in mtk_crtc_update_output()
685 (encoder_mask & BIT(comp->encoder_index))) { in mtk_crtc_update_output()
686 mtk_crtc->ddp_comp[mtk_crtc->ddp_comp_nr - 1] = comp; in mtk_crtc_update_output()
688 comp->id, mtk_crtc->ddp_comp_nr - 1); in mtk_crtc_update_output()
698 struct mtk_ddp_comp *comp; in mtk_crtc_plane_check() local
700 comp = mtk_ddp_comp_for_plane(crtc, plane, &local_layer); in mtk_crtc_plane_check()
701 if (comp) in mtk_crtc_plane_check()
702 return mtk_ddp_comp_layer_check(comp, local_layer, state); in mtk_crtc_plane_check()
711 if (!mtk_crtc->enabled) in mtk_crtc_async_update()
721 struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0]; in mtk_crtc_atomic_enable() local
724 DRM_DEBUG_DRIVER("%s %d\n", __func__, crtc->base.id); in mtk_crtc_atomic_enable()
726 ret = mtk_ddp_comp_power_on(comp); in mtk_crtc_atomic_enable()
728 DRM_DEV_ERROR(comp->dev, "Failed to enable power domain: %d\n", ret); in mtk_crtc_atomic_enable()
736 mtk_ddp_comp_power_off(comp); in mtk_crtc_atomic_enable()
741 mtk_crtc->enabled = true; in mtk_crtc_atomic_enable()
748 struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[0]; in mtk_crtc_atomic_disable() local
751 DRM_DEBUG_DRIVER("%s %d\n", __func__, crtc->base.id); in mtk_crtc_atomic_disable()
752 if (!mtk_crtc->enabled) in mtk_crtc_atomic_disable()
756 for (i = 0; i < mtk_crtc->layer_nr; i++) { in mtk_crtc_atomic_disable()
757 struct drm_plane *plane = &mtk_crtc->planes[i]; in mtk_crtc_atomic_disable()
760 plane_state = to_mtk_plane_state(plane->state); in mtk_crtc_atomic_disable()
761 plane_state->pending.enable = false; in mtk_crtc_atomic_disable()
762 plane_state->pending.config = true; in mtk_crtc_atomic_disable()
764 mtk_crtc->pending_planes = true; in mtk_crtc_atomic_disable()
769 if (mtk_crtc->cmdq_client.chan) in mtk_crtc_atomic_disable()
770 wait_event_timeout(mtk_crtc->cb_blocking_queue, in mtk_crtc_atomic_disable()
771 mtk_crtc->cmdq_vblank_cnt == 0, in mtk_crtc_atomic_disable()
779 mtk_ddp_comp_power_off(comp); in mtk_crtc_atomic_disable()
781 mtk_crtc->enabled = false; in mtk_crtc_atomic_disable()
793 if (mtk_crtc->event && mtk_crtc_state->base.event) in mtk_crtc_atomic_begin()
796 if (mtk_crtc_state->base.event) { in mtk_crtc_atomic_begin()
797 mtk_crtc_state->base.event->pipe = drm_crtc_index(crtc); in mtk_crtc_atomic_begin()
800 spin_lock_irqsave(&crtc->dev->event_lock, flags); in mtk_crtc_atomic_begin()
801 mtk_crtc->event = mtk_crtc_state->base.event; in mtk_crtc_atomic_begin()
802 spin_unlock_irqrestore(&crtc->dev->event_lock, flags); in mtk_crtc_atomic_begin()
804 mtk_crtc_state->base.event = NULL; in mtk_crtc_atomic_begin()
814 if (crtc->state->color_mgmt_changed) in mtk_crtc_atomic_flush()
815 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_atomic_flush()
816 mtk_ddp_gamma_set(mtk_crtc->ddp_comp[i], crtc->state); in mtk_crtc_atomic_flush()
817 mtk_ddp_ctm_set(mtk_crtc->ddp_comp[i], crtc->state); in mtk_crtc_atomic_flush()
819 mtk_crtc_update_config(mtk_crtc, !!mtk_crtc->event); in mtk_crtc_atomic_flush()
850 for (i = 0; i < mtk_crtc->layer_nr; i++) { in mtk_crtc_init()
851 if (mtk_crtc->planes[i].type == DRM_PLANE_TYPE_PRIMARY) in mtk_crtc_init()
852 primary = &mtk_crtc->planes[i]; in mtk_crtc_init()
853 else if (mtk_crtc->planes[i].type == DRM_PLANE_TYPE_CURSOR) in mtk_crtc_init()
854 cursor = &mtk_crtc->planes[i]; in mtk_crtc_init()
857 ret = drm_crtc_init_with_planes(drm, &mtk_crtc->base, primary, cursor, in mtk_crtc_init()
862 drm_crtc_helper_add(&mtk_crtc->base, &mtk_crtc_helper_funcs); in mtk_crtc_init()
867 drm_crtc_cleanup(&mtk_crtc->base); in mtk_crtc_init()
873 struct mtk_ddp_comp *comp; in mtk_crtc_num_comp_planes() local
878 comp = mtk_crtc->ddp_comp[comp_idx]; in mtk_crtc_num_comp_planes()
879 if (!comp->funcs) in mtk_crtc_num_comp_planes()
882 if (comp_idx == 1 && !comp->funcs->bgclr_in_on) in mtk_crtc_num_comp_planes()
885 return mtk_ddp_comp_layer_nr(comp); in mtk_crtc_num_comp_planes()
894 else if (plane_idx == (num_planes - 1)) in mtk_crtc_plane_type()
906 struct mtk_ddp_comp *comp = mtk_crtc->ddp_comp[comp_idx]; in mtk_crtc_init_comp_planes() local
911 &mtk_crtc->planes[mtk_crtc->layer_nr], in mtk_crtc_init_comp_planes()
913 mtk_crtc_plane_type(mtk_crtc->layer_nr, num_planes), in mtk_crtc_init_comp_planes()
914 mtk_ddp_comp_supported_rotations(comp), in mtk_crtc_init_comp_planes()
915 mtk_ddp_comp_get_blend_modes(comp), in mtk_crtc_init_comp_planes()
916 mtk_ddp_comp_get_formats(comp), in mtk_crtc_init_comp_planes()
917 mtk_ddp_comp_get_num_formats(comp), i); in mtk_crtc_init_comp_planes()
921 mtk_crtc->layer_nr++; in mtk_crtc_init_comp_planes()
937 return mtk_crtc->dma_dev; in mtk_crtc_dma_dev_get()
945 struct mtk_drm_private *priv = drm_dev->dev_private; in mtk_crtc_create()
946 struct device *dev = drm_dev->dev; in mtk_crtc_create()
959 priv = priv->all_drm_private[priv_data_index]; in mtk_crtc_create()
967 struct mtk_ddp_comp *comp; in mtk_crtc_create() local
969 node = priv->comp_node[comp_id]; in mtk_crtc_create()
970 comp = &priv->ddp_comp[comp_id]; in mtk_crtc_create()
982 if (!comp->dev) { in mtk_crtc_create()
984 return -ENODEV; in mtk_crtc_create()
990 return -ENOMEM; in mtk_crtc_create()
992 mtk_crtc->mmsys_dev = priv->mmsys_dev; in mtk_crtc_create()
993 mtk_crtc->ddp_comp_nr = path_len; in mtk_crtc_create()
994 mtk_crtc->ddp_comp = devm_kcalloc(dev, in mtk_crtc_create()
995 mtk_crtc->ddp_comp_nr + (conn_routes ? 1 : 0), in mtk_crtc_create()
996 sizeof(*mtk_crtc->ddp_comp), in mtk_crtc_create()
998 if (!mtk_crtc->ddp_comp) in mtk_crtc_create()
999 return -ENOMEM; in mtk_crtc_create()
1001 mtk_crtc->mutex = mtk_mutex_get(priv->mutex_dev); in mtk_crtc_create()
1002 if (IS_ERR(mtk_crtc->mutex)) { in mtk_crtc_create()
1003 ret = PTR_ERR(mtk_crtc->mutex); in mtk_crtc_create()
1008 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_create()
1010 struct mtk_ddp_comp *comp; in mtk_crtc_create() local
1012 comp = &priv->ddp_comp[comp_id]; in mtk_crtc_create()
1013 mtk_crtc->ddp_comp[i] = comp; in mtk_crtc_create()
1015 if (comp->funcs) { in mtk_crtc_create()
1016 if (comp->funcs->gamma_set && comp->funcs->gamma_get_lut_size) { in mtk_crtc_create()
1017 unsigned int lut_sz = mtk_ddp_gamma_get_lut_size(comp); in mtk_crtc_create()
1023 if (comp->funcs->ctm_set) in mtk_crtc_create()
1027 mtk_ddp_comp_register_vblank_cb(comp, mtk_crtc_ddp_irq, in mtk_crtc_create()
1028 &mtk_crtc->base); in mtk_crtc_create()
1031 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) in mtk_crtc_create()
1034 mtk_crtc->planes = devm_kcalloc(dev, num_comp_planes, in mtk_crtc_create()
1036 if (!mtk_crtc->planes) in mtk_crtc_create()
1037 return -ENOMEM; in mtk_crtc_create()
1039 for (i = 0; i < mtk_crtc->ddp_comp_nr; i++) { in mtk_crtc_create()
1050 mtk_crtc->dma_dev = mtk_ddp_comp_dma_dev_get(&priv->ddp_comp[path[0]]); in mtk_crtc_create()
1057 drm_mode_crtc_set_gamma_size(&mtk_crtc->base, gamma_lut_size); in mtk_crtc_create()
1058 drm_crtc_enable_color_mgmt(&mtk_crtc->base, 0, has_ctm, gamma_lut_size); in mtk_crtc_create()
1059 mutex_init(&mtk_crtc->hw_lock); in mtk_crtc_create()
1060 spin_lock_init(&mtk_crtc->config_lock); in mtk_crtc_create()
1063 i = priv->mbox_index++; in mtk_crtc_create()
1064 mtk_crtc->cmdq_client.client.dev = mtk_crtc->mmsys_dev; in mtk_crtc_create()
1065 mtk_crtc->cmdq_client.client.tx_block = false; in mtk_crtc_create()
1066 mtk_crtc->cmdq_client.client.knows_txdone = true; in mtk_crtc_create()
1067 mtk_crtc->cmdq_client.client.rx_callback = ddp_cmdq_cb; in mtk_crtc_create()
1068 mtk_crtc->cmdq_client.chan = in mtk_crtc_create()
1069 mbox_request_channel(&mtk_crtc->cmdq_client.client, i); in mtk_crtc_create()
1070 if (IS_ERR(mtk_crtc->cmdq_client.chan)) { in mtk_crtc_create()
1072 drm_crtc_index(&mtk_crtc->base)); in mtk_crtc_create()
1073 mtk_crtc->cmdq_client.chan = NULL; in mtk_crtc_create()
1076 if (mtk_crtc->cmdq_client.chan) { in mtk_crtc_create()
1077 ret = of_property_read_u32_index(priv->mutex_node, in mtk_crtc_create()
1078 "mediatek,gce-events", in mtk_crtc_create()
1080 &mtk_crtc->cmdq_event); in mtk_crtc_create()
1082 dev_dbg(dev, "mtk_crtc %d failed to get mediatek,gce-events property\n", in mtk_crtc_create()
1083 drm_crtc_index(&mtk_crtc->base)); in mtk_crtc_create()
1084 mbox_free_channel(mtk_crtc->cmdq_client.chan); in mtk_crtc_create()
1085 mtk_crtc->cmdq_client.chan = NULL; in mtk_crtc_create()
1087 ret = cmdq_pkt_create(&mtk_crtc->cmdq_client, in mtk_crtc_create()
1088 &mtk_crtc->cmdq_handle, in mtk_crtc_create()
1092 drm_crtc_index(&mtk_crtc->base)); in mtk_crtc_create()
1093 mbox_free_channel(mtk_crtc->cmdq_client.chan); in mtk_crtc_create()
1094 mtk_crtc->cmdq_client.chan = NULL; in mtk_crtc_create()
1098 /* for sending blocking cmd in crtc disable */ in mtk_crtc_create()
1099 init_waitqueue_head(&mtk_crtc->cb_blocking_queue); in mtk_crtc_create()
1106 struct device_node *node = priv->comp_node[comp_id]; in mtk_crtc_create()
1107 struct mtk_ddp_comp *comp = &priv->ddp_comp[comp_id]; in mtk_crtc_create() local
1109 if (!comp->dev) { in mtk_crtc_create()
1112 /* mark encoder_index to -1, if route comp device is not enabled */ in mtk_crtc_create()
1113 comp->encoder_index = -1; in mtk_crtc_create()
1117 mtk_ddp_comp_encoder_index_set(&priv->ddp_comp[comp_id]); in mtk_crtc_create()
1120 mtk_crtc->num_conn_routes = num_conn_routes; in mtk_crtc_create()
1121 mtk_crtc->conn_routes = conn_routes; in mtk_crtc_create()
1124 mtk_crtc->ddp_comp_nr++; in mtk_crtc_create()