1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2015-2018, 2020-2021 The Linux Foundation. All rights reserved.
4  */
5 
6 #define pr_fmt(fmt)	"[drm:%s:%d] " fmt, __func__, __LINE__
7 #include <linux/delay.h>
8 #include "dpu_encoder_phys.h"
9 #include "dpu_hw_interrupts.h"
10 #include "dpu_hw_pingpong.h"
11 #include "dpu_core_irq.h"
12 #include "dpu_formats.h"
13 #include "dpu_trace.h"
14 #include "disp/msm_disp_snapshot.h"
15 
16 #include <drm/drm_managed.h>
17 
18 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
19 		(e) && (e)->base.parent ? \
20 		(e)->base.parent->base.id : -1, \
21 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
22 
23 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
24 		(e) && (e)->base.parent ? \
25 		(e)->base.parent->base.id : -1, \
26 		(e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
27 
28 #define to_dpu_encoder_phys_cmd(x) \
29 	container_of(x, struct dpu_encoder_phys_cmd, base)
30 
31 #define PP_TIMEOUT_MAX_TRIALS	10
32 
33 /*
34  * Tearcheck sync start and continue thresholds are empirically found
35  * based on common panels In the future, may want to allow panels to override
36  * these default values
37  */
38 #define DEFAULT_TEARCHECK_SYNC_THRESH_START	4
39 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE	4
40 
41 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc);
42 
dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys * phys_enc)43 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
44 {
45 	return (phys_enc->split_role != ENC_ROLE_SLAVE);
46 }
47 
_dpu_encoder_phys_cmd_update_intf_cfg(struct dpu_encoder_phys * phys_enc)48 static void _dpu_encoder_phys_cmd_update_intf_cfg(
49 		struct dpu_encoder_phys *phys_enc)
50 {
51 	struct dpu_encoder_phys_cmd *cmd_enc =
52 			to_dpu_encoder_phys_cmd(phys_enc);
53 	struct dpu_hw_ctl *ctl;
54 	struct dpu_hw_intf_cfg intf_cfg = { 0 };
55 	struct dpu_hw_intf_cmd_mode_cfg cmd_mode_cfg = {};
56 
57 	ctl = phys_enc->hw_ctl;
58 	if (!ctl->ops.setup_intf_cfg)
59 		return;
60 
61 	intf_cfg.intf = phys_enc->hw_intf->idx;
62 	intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
63 	intf_cfg.stream_sel = cmd_enc->stream_sel;
64 	intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
65 	intf_cfg.dsc = dpu_encoder_helper_get_dsc(phys_enc);
66 	ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
67 
68 	/* setup which pp blk will connect to this intf */
69 	if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && phys_enc->hw_intf->ops.bind_pingpong_blk)
70 		phys_enc->hw_intf->ops.bind_pingpong_blk(
71 				phys_enc->hw_intf,
72 				phys_enc->hw_pp->idx);
73 
74 	if (intf_cfg.dsc != 0)
75 		cmd_mode_cfg.data_compress = true;
76 
77 	cmd_mode_cfg.wide_bus_en = dpu_encoder_is_widebus_enabled(phys_enc->parent);
78 
79 	if (phys_enc->hw_intf->ops.program_intf_cmd_cfg)
80 		phys_enc->hw_intf->ops.program_intf_cmd_cfg(phys_enc->hw_intf, &cmd_mode_cfg);
81 }
82 
dpu_encoder_phys_cmd_pp_tx_done_irq(void * arg)83 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg)
84 {
85 	struct dpu_encoder_phys *phys_enc = arg;
86 	unsigned long lock_flags;
87 	int new_cnt;
88 	u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
89 
90 	if (!phys_enc->hw_pp)
91 		return;
92 
93 	DPU_ATRACE_BEGIN("pp_done_irq");
94 	/* notify all synchronous clients first, then asynchronous clients */
95 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, event);
96 
97 	spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
98 	new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
99 	spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
100 
101 	trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
102 					  phys_enc->hw_pp->idx - PINGPONG_0,
103 					  new_cnt, event);
104 
105 	/* Signal any waiting atomic commit thread */
106 	wake_up_all(&phys_enc->pending_kickoff_wq);
107 	DPU_ATRACE_END("pp_done_irq");
108 }
109 
dpu_encoder_phys_cmd_te_rd_ptr_irq(void * arg)110 static void dpu_encoder_phys_cmd_te_rd_ptr_irq(void *arg)
111 {
112 	struct dpu_encoder_phys *phys_enc = arg;
113 	struct dpu_encoder_phys_cmd *cmd_enc;
114 
115 	DPU_ATRACE_BEGIN("rd_ptr_irq");
116 	cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
117 
118 	dpu_encoder_vblank_callback(phys_enc->parent, phys_enc);
119 
120 	atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
121 	wake_up_all(&cmd_enc->pending_vblank_wq);
122 	DPU_ATRACE_END("rd_ptr_irq");
123 }
124 
dpu_encoder_phys_cmd_ctl_start_irq(void * arg)125 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg)
126 {
127 	struct dpu_encoder_phys *phys_enc = arg;
128 
129 	DPU_ATRACE_BEGIN("ctl_start_irq");
130 
131 	atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
132 
133 	/* Signal any waiting ctl start interrupt */
134 	wake_up_all(&phys_enc->pending_kickoff_wq);
135 	DPU_ATRACE_END("ctl_start_irq");
136 }
137 
dpu_encoder_phys_cmd_underrun_irq(void * arg)138 static void dpu_encoder_phys_cmd_underrun_irq(void *arg)
139 {
140 	struct dpu_encoder_phys *phys_enc = arg;
141 
142 	dpu_encoder_underrun_callback(phys_enc->parent, phys_enc);
143 }
144 
dpu_encoder_phys_cmd_atomic_mode_set(struct dpu_encoder_phys * phys_enc,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)145 static void dpu_encoder_phys_cmd_atomic_mode_set(
146 		struct dpu_encoder_phys *phys_enc,
147 		struct drm_crtc_state *crtc_state,
148 		struct drm_connector_state *conn_state)
149 {
150 	phys_enc->irq[INTR_IDX_CTL_START] = phys_enc->hw_ctl->caps->intr_start;
151 
152 	phys_enc->irq[INTR_IDX_PINGPONG] = phys_enc->hw_pp->caps->intr_done;
153 
154 	if (phys_enc->has_intf_te)
155 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_intf->cap->intr_tear_rd_ptr;
156 	else
157 		phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_pp->caps->intr_rdptr;
158 
159 	phys_enc->irq[INTR_IDX_UNDERRUN] = phys_enc->hw_intf->cap->intr_underrun;
160 }
161 
_dpu_encoder_phys_cmd_handle_ppdone_timeout(struct dpu_encoder_phys * phys_enc)162 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
163 		struct dpu_encoder_phys *phys_enc)
164 {
165 	struct dpu_encoder_phys_cmd *cmd_enc =
166 			to_dpu_encoder_phys_cmd(phys_enc);
167 	u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
168 	bool do_log = false;
169 	struct drm_encoder *drm_enc;
170 
171 	if (!phys_enc->hw_pp)
172 		return -EINVAL;
173 
174 	drm_enc = phys_enc->parent;
175 
176 	cmd_enc->pp_timeout_report_cnt++;
177 	if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
178 		frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
179 		do_log = true;
180 	} else if (cmd_enc->pp_timeout_report_cnt == 1) {
181 		do_log = true;
182 	}
183 
184 	trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
185 		     phys_enc->hw_pp->idx - PINGPONG_0,
186 		     cmd_enc->pp_timeout_report_cnt,
187 		     atomic_read(&phys_enc->pending_kickoff_cnt),
188 		     frame_event);
189 
190 	/* to avoid flooding, only log first time, and "dead" time */
191 	if (do_log) {
192 		DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
193 			  DRMID(drm_enc),
194 			  phys_enc->hw_pp->idx - PINGPONG_0,
195 			  phys_enc->hw_ctl->idx - CTL_0,
196 			  cmd_enc->pp_timeout_report_cnt,
197 			  atomic_read(&phys_enc->pending_kickoff_cnt));
198 		msm_disp_snapshot_state(drm_enc->dev);
199 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
200 				phys_enc->irq[INTR_IDX_RDPTR]);
201 	}
202 
203 	atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
204 
205 	/* request a ctl reset before the next kickoff */
206 	phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
207 
208 	dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, frame_event);
209 
210 	return -ETIMEDOUT;
211 }
212 
_dpu_encoder_phys_cmd_wait_for_idle(struct dpu_encoder_phys * phys_enc)213 static int _dpu_encoder_phys_cmd_wait_for_idle(
214 		struct dpu_encoder_phys *phys_enc)
215 {
216 	struct dpu_encoder_phys_cmd *cmd_enc =
217 			to_dpu_encoder_phys_cmd(phys_enc);
218 	struct dpu_encoder_wait_info wait_info;
219 	int ret;
220 
221 	wait_info.wq = &phys_enc->pending_kickoff_wq;
222 	wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
223 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
224 
225 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
226 			phys_enc->irq[INTR_IDX_PINGPONG],
227 			dpu_encoder_phys_cmd_pp_tx_done_irq,
228 			&wait_info);
229 	if (ret == -ETIMEDOUT)
230 		_dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
231 	else if (!ret)
232 		cmd_enc->pp_timeout_report_cnt = 0;
233 
234 	return ret;
235 }
236 
dpu_encoder_phys_cmd_control_vblank_irq(struct dpu_encoder_phys * phys_enc,bool enable)237 static int dpu_encoder_phys_cmd_control_vblank_irq(
238 		struct dpu_encoder_phys *phys_enc,
239 		bool enable)
240 {
241 	int ret = 0;
242 	int refcount;
243 
244 	if (!phys_enc->hw_pp) {
245 		DPU_ERROR("invalid encoder\n");
246 		return -EINVAL;
247 	}
248 
249 	mutex_lock(&phys_enc->vblank_ctl_lock);
250 	refcount = phys_enc->vblank_refcount;
251 
252 	/* Slave encoders don't report vblank */
253 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
254 		goto end;
255 
256 	/* protect against negative */
257 	if (!enable && refcount == 0) {
258 		ret = -EINVAL;
259 		goto end;
260 	}
261 
262 	DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
263 		      phys_enc->hw_pp->idx - PINGPONG_0,
264 		      enable ? "true" : "false", refcount);
265 
266 	if (enable) {
267 		if (phys_enc->vblank_refcount == 0)
268 			ret = dpu_core_irq_register_callback(phys_enc->dpu_kms,
269 					phys_enc->irq[INTR_IDX_RDPTR],
270 					dpu_encoder_phys_cmd_te_rd_ptr_irq,
271 					phys_enc);
272 		if (!ret)
273 			phys_enc->vblank_refcount++;
274 	} else if (!enable) {
275 		if (phys_enc->vblank_refcount == 1)
276 			ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
277 					phys_enc->irq[INTR_IDX_RDPTR]);
278 		if (!ret)
279 			phys_enc->vblank_refcount--;
280 	}
281 
282 end:
283 	mutex_unlock(&phys_enc->vblank_ctl_lock);
284 	if (ret) {
285 		DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
286 			  DRMID(phys_enc->parent),
287 			  phys_enc->hw_pp->idx - PINGPONG_0, ret,
288 			  enable ? "true" : "false", refcount);
289 	}
290 
291 	return ret;
292 }
293 
dpu_encoder_phys_cmd_irq_enable(struct dpu_encoder_phys * phys_enc)294 static void dpu_encoder_phys_cmd_irq_enable(struct dpu_encoder_phys *phys_enc)
295 {
296 	trace_dpu_enc_phys_cmd_irq_enable(DRMID(phys_enc->parent),
297 					  phys_enc->hw_pp->idx - PINGPONG_0,
298 					  phys_enc->vblank_refcount);
299 
300 	dpu_core_irq_register_callback(phys_enc->dpu_kms,
301 				       phys_enc->irq[INTR_IDX_PINGPONG],
302 				       dpu_encoder_phys_cmd_pp_tx_done_irq,
303 				       phys_enc);
304 	dpu_core_irq_register_callback(phys_enc->dpu_kms,
305 				       phys_enc->irq[INTR_IDX_UNDERRUN],
306 				       dpu_encoder_phys_cmd_underrun_irq,
307 				       phys_enc);
308 	dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
309 
310 	if (dpu_encoder_phys_cmd_is_master(phys_enc) && phys_enc->irq[INTR_IDX_CTL_START])
311 		dpu_core_irq_register_callback(phys_enc->dpu_kms,
312 					       phys_enc->irq[INTR_IDX_CTL_START],
313 					       dpu_encoder_phys_cmd_ctl_start_irq,
314 					       phys_enc);
315 }
316 
dpu_encoder_phys_cmd_irq_disable(struct dpu_encoder_phys * phys_enc)317 static void dpu_encoder_phys_cmd_irq_disable(struct dpu_encoder_phys *phys_enc)
318 {
319 	trace_dpu_enc_phys_cmd_irq_disable(DRMID(phys_enc->parent),
320 					   phys_enc->hw_pp->idx - PINGPONG_0,
321 					   phys_enc->vblank_refcount);
322 
323 	if (dpu_encoder_phys_cmd_is_master(phys_enc) && phys_enc->irq[INTR_IDX_CTL_START])
324 		dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
325 						 phys_enc->irq[INTR_IDX_CTL_START]);
326 
327 	dpu_core_irq_unregister_callback(phys_enc->dpu_kms, phys_enc->irq[INTR_IDX_UNDERRUN]);
328 	dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
329 	dpu_core_irq_unregister_callback(phys_enc->dpu_kms, phys_enc->irq[INTR_IDX_PINGPONG]);
330 }
331 
dpu_encoder_phys_cmd_tearcheck_config(struct dpu_encoder_phys * phys_enc)332 static void dpu_encoder_phys_cmd_tearcheck_config(
333 		struct dpu_encoder_phys *phys_enc)
334 {
335 	struct dpu_encoder_phys_cmd *cmd_enc =
336 		to_dpu_encoder_phys_cmd(phys_enc);
337 	struct dpu_hw_tear_check tc_cfg = { 0 };
338 	struct drm_display_mode *mode;
339 	bool tc_enable = true;
340 	unsigned long vsync_hz;
341 	struct dpu_kms *dpu_kms;
342 
343 	/*
344 	 * TODO: if/when resource allocation is refactored, move this to a
345 	 * place where the driver can actually return an error.
346 	 */
347 	if (!phys_enc->has_intf_te &&
348 	    (!phys_enc->hw_pp ||
349 	     !phys_enc->hw_pp->ops.enable_tearcheck)) {
350 		DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
351 		return;
352 	}
353 
354 	DPU_DEBUG_CMDENC(cmd_enc, "intf %d pp %d\n",
355 			 phys_enc->hw_intf ? phys_enc->hw_intf->idx - INTF_0 : -1,
356 			 phys_enc->hw_pp ? phys_enc->hw_pp->idx - PINGPONG_0 : -1);
357 
358 	mode = &phys_enc->cached_mode;
359 
360 	dpu_kms = phys_enc->dpu_kms;
361 
362 	/*
363 	 * TE default: dsi byte clock calculated base on 70 fps;
364 	 * around 14 ms to complete a kickoff cycle if te disabled;
365 	 * vclk_line base on 60 fps; write is faster than read;
366 	 * init == start == rdptr;
367 	 *
368 	 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
369 	 * frequency divided by the no. of rows (lines) in the LCDpanel.
370 	 */
371 	vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
372 	if (!vsync_hz) {
373 		DPU_DEBUG_CMDENC(cmd_enc, "invalid - no vsync clock\n");
374 		return;
375 	}
376 
377 	tc_cfg.vsync_count = vsync_hz /
378 				(mode->vtotal * drm_mode_vrefresh(mode));
379 
380 	/*
381 	 * Set the sync_cfg_height to twice vtotal so that if we lose a
382 	 * TE event coming from the display TE pin we won't stall immediately
383 	 */
384 	tc_cfg.hw_vsync_mode = 1;
385 	tc_cfg.sync_cfg_height = mode->vtotal * 2;
386 	tc_cfg.vsync_init_val = mode->vdisplay;
387 	tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
388 	tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
389 	tc_cfg.start_pos = mode->vdisplay;
390 	tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
391 
392 	DPU_DEBUG_CMDENC(cmd_enc,
393 		"tc vsync_clk_speed_hz %lu vtotal %u vrefresh %u\n",
394 		vsync_hz, mode->vtotal, drm_mode_vrefresh(mode));
395 	DPU_DEBUG_CMDENC(cmd_enc,
396 		"tc enable %u start_pos %u rd_ptr_irq %u\n",
397 		tc_enable, tc_cfg.start_pos, tc_cfg.rd_ptr_irq);
398 	DPU_DEBUG_CMDENC(cmd_enc,
399 		"tc hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
400 		tc_cfg.hw_vsync_mode, tc_cfg.vsync_count,
401 		tc_cfg.vsync_init_val);
402 	DPU_DEBUG_CMDENC(cmd_enc,
403 		"tc cfgheight %u thresh_start %u thresh_cont %u\n",
404 		tc_cfg.sync_cfg_height, tc_cfg.sync_threshold_start,
405 		tc_cfg.sync_threshold_continue);
406 
407 	if (phys_enc->has_intf_te)
408 		phys_enc->hw_intf->ops.enable_tearcheck(phys_enc->hw_intf, &tc_cfg);
409 	else
410 		phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, &tc_cfg);
411 }
412 
_dpu_encoder_phys_cmd_pingpong_config(struct dpu_encoder_phys * phys_enc)413 static void _dpu_encoder_phys_cmd_pingpong_config(
414 		struct dpu_encoder_phys *phys_enc)
415 {
416 	struct dpu_encoder_phys_cmd *cmd_enc =
417 		to_dpu_encoder_phys_cmd(phys_enc);
418 
419 	if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
420 		DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
421 		return;
422 	}
423 
424 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
425 			phys_enc->hw_pp->idx - PINGPONG_0);
426 	drm_mode_debug_printmodeline(&phys_enc->cached_mode);
427 
428 	_dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
429 	dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
430 }
431 
dpu_encoder_phys_cmd_needs_single_flush(struct dpu_encoder_phys * phys_enc)432 static bool dpu_encoder_phys_cmd_needs_single_flush(
433 		struct dpu_encoder_phys *phys_enc)
434 {
435 	/**
436 	 * we do separate flush for each CTL and let
437 	 * CTL_START synchronize them
438 	 */
439 	return false;
440 }
441 
dpu_encoder_phys_cmd_enable_helper(struct dpu_encoder_phys * phys_enc)442 static void dpu_encoder_phys_cmd_enable_helper(
443 		struct dpu_encoder_phys *phys_enc)
444 {
445 	struct dpu_hw_ctl *ctl;
446 
447 	if (!phys_enc->hw_pp) {
448 		DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
449 		return;
450 	}
451 
452 	dpu_encoder_helper_split_config(phys_enc, phys_enc->hw_intf->idx);
453 
454 	_dpu_encoder_phys_cmd_pingpong_config(phys_enc);
455 
456 	ctl = phys_enc->hw_ctl;
457 	ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
458 }
459 
dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys * phys_enc)460 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
461 {
462 	struct dpu_encoder_phys_cmd *cmd_enc =
463 		to_dpu_encoder_phys_cmd(phys_enc);
464 
465 	if (!phys_enc->hw_pp) {
466 		DPU_ERROR("invalid phys encoder\n");
467 		return;
468 	}
469 
470 	DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
471 
472 	if (phys_enc->enable_state == DPU_ENC_ENABLED) {
473 		DPU_ERROR("already enabled\n");
474 		return;
475 	}
476 
477 	dpu_encoder_phys_cmd_enable_helper(phys_enc);
478 	phys_enc->enable_state = DPU_ENC_ENABLED;
479 }
480 
_dpu_encoder_phys_cmd_connect_te(struct dpu_encoder_phys * phys_enc,bool enable)481 static void _dpu_encoder_phys_cmd_connect_te(
482 		struct dpu_encoder_phys *phys_enc, bool enable)
483 {
484 	if (phys_enc->has_intf_te) {
485 		if (!phys_enc->hw_intf || !phys_enc->hw_intf->ops.connect_external_te)
486 			return;
487 
488 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
489 		phys_enc->hw_intf->ops.connect_external_te(phys_enc->hw_intf, enable);
490 	} else {
491 		if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
492 			return;
493 
494 		trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
495 		phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
496 	}
497 }
498 
dpu_encoder_phys_cmd_prepare_idle_pc(struct dpu_encoder_phys * phys_enc)499 static void dpu_encoder_phys_cmd_prepare_idle_pc(
500 		struct dpu_encoder_phys *phys_enc)
501 {
502 	_dpu_encoder_phys_cmd_connect_te(phys_enc, false);
503 }
504 
dpu_encoder_phys_cmd_get_line_count(struct dpu_encoder_phys * phys_enc)505 static int dpu_encoder_phys_cmd_get_line_count(
506 		struct dpu_encoder_phys *phys_enc)
507 {
508 	struct dpu_hw_pingpong *hw_pp;
509 	struct dpu_hw_intf *hw_intf;
510 
511 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
512 		return -EINVAL;
513 
514 	if (phys_enc->has_intf_te) {
515 		hw_intf = phys_enc->hw_intf;
516 		if (!hw_intf || !hw_intf->ops.get_line_count)
517 			return -EINVAL;
518 		return hw_intf->ops.get_line_count(hw_intf);
519 	}
520 
521 	hw_pp = phys_enc->hw_pp;
522 	if (!hw_pp || !hw_pp->ops.get_line_count)
523 		return -EINVAL;
524 	return hw_pp->ops.get_line_count(hw_pp);
525 }
526 
dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys * phys_enc)527 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
528 {
529 	struct dpu_encoder_phys_cmd *cmd_enc =
530 		to_dpu_encoder_phys_cmd(phys_enc);
531 	struct dpu_hw_ctl *ctl;
532 
533 	if (phys_enc->enable_state == DPU_ENC_DISABLED) {
534 		DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
535 		return;
536 	}
537 
538 	if (phys_enc->has_intf_te) {
539 		DRM_DEBUG_KMS("id:%u intf:%d state:%d\n", DRMID(phys_enc->parent),
540 			      phys_enc->hw_intf->idx - INTF_0,
541 			      phys_enc->enable_state);
542 
543 		if (phys_enc->hw_intf->ops.disable_tearcheck)
544 			phys_enc->hw_intf->ops.disable_tearcheck(phys_enc->hw_intf);
545 	} else {
546 		if (!phys_enc->hw_pp) {
547 			DPU_ERROR("invalid encoder\n");
548 			return;
549 		}
550 
551 		DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
552 			      phys_enc->hw_pp->idx - PINGPONG_0,
553 			      phys_enc->enable_state);
554 
555 		if (phys_enc->hw_pp->ops.disable_tearcheck)
556 			phys_enc->hw_pp->ops.disable_tearcheck(phys_enc->hw_pp);
557 	}
558 
559 	if (phys_enc->hw_intf->ops.bind_pingpong_blk) {
560 		phys_enc->hw_intf->ops.bind_pingpong_blk(
561 				phys_enc->hw_intf,
562 				PINGPONG_NONE);
563 
564 		ctl = phys_enc->hw_ctl;
565 		ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
566 	}
567 
568 	phys_enc->enable_state = DPU_ENC_DISABLED;
569 }
570 
dpu_encoder_phys_cmd_prepare_for_kickoff(struct dpu_encoder_phys * phys_enc)571 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
572 		struct dpu_encoder_phys *phys_enc)
573 {
574 	struct dpu_encoder_phys_cmd *cmd_enc =
575 			to_dpu_encoder_phys_cmd(phys_enc);
576 	int ret;
577 
578 	if (!phys_enc->hw_pp) {
579 		DPU_ERROR("invalid encoder\n");
580 		return;
581 	}
582 	DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
583 		      phys_enc->hw_pp->idx - PINGPONG_0,
584 		      atomic_read(&phys_enc->pending_kickoff_cnt));
585 
586 	/*
587 	 * Mark kickoff request as outstanding. If there are more than one,
588 	 * outstanding, then we have to wait for the previous one to complete
589 	 */
590 	ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
591 	if (ret) {
592 		/* force pending_kickoff_cnt 0 to discard failed kickoff */
593 		atomic_set(&phys_enc->pending_kickoff_cnt, 0);
594 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
595 			  DRMID(phys_enc->parent), ret,
596 			  phys_enc->hw_pp->idx - PINGPONG_0);
597 	}
598 
599 	dpu_encoder_phys_cmd_enable_te(phys_enc);
600 
601 	DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
602 			phys_enc->hw_pp->idx - PINGPONG_0,
603 			atomic_read(&phys_enc->pending_kickoff_cnt));
604 }
605 
dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys * phys_enc)606 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc)
607 {
608 	if (!phys_enc)
609 		return;
610 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
611 		return;
612 
613 	if (phys_enc->has_intf_te) {
614 		if (!phys_enc->hw_intf->ops.disable_autorefresh)
615 			return;
616 
617 		phys_enc->hw_intf->ops.disable_autorefresh(
618 				phys_enc->hw_intf,
619 				DRMID(phys_enc->parent),
620 				phys_enc->cached_mode.vdisplay);
621 	} else {
622 		if (!phys_enc->hw_pp ||
623 		    !phys_enc->hw_pp->ops.disable_autorefresh)
624 			return;
625 
626 		phys_enc->hw_pp->ops.disable_autorefresh(
627 				phys_enc->hw_pp,
628 				DRMID(phys_enc->parent),
629 				phys_enc->cached_mode.vdisplay);
630 	}
631 }
632 
_dpu_encoder_phys_cmd_wait_for_ctl_start(struct dpu_encoder_phys * phys_enc)633 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
634 		struct dpu_encoder_phys *phys_enc)
635 {
636 	struct dpu_encoder_phys_cmd *cmd_enc =
637 			to_dpu_encoder_phys_cmd(phys_enc);
638 	struct dpu_encoder_wait_info wait_info;
639 	int ret;
640 
641 	wait_info.wq = &phys_enc->pending_kickoff_wq;
642 	wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
643 	wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
644 
645 	ret = dpu_encoder_helper_wait_for_irq(phys_enc,
646 			phys_enc->irq[INTR_IDX_CTL_START],
647 			dpu_encoder_phys_cmd_ctl_start_irq,
648 			&wait_info);
649 	if (ret == -ETIMEDOUT) {
650 		DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
651 		ret = -EINVAL;
652 	} else if (!ret)
653 		ret = 0;
654 
655 	return ret;
656 }
657 
dpu_encoder_phys_cmd_wait_for_tx_complete(struct dpu_encoder_phys * phys_enc)658 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
659 		struct dpu_encoder_phys *phys_enc)
660 {
661 	int rc;
662 
663 	rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
664 	if (rc) {
665 		DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
666 			  DRMID(phys_enc->parent), rc,
667 			  phys_enc->hw_intf->idx - INTF_0);
668 	}
669 
670 	return rc;
671 }
672 
dpu_encoder_phys_cmd_wait_for_commit_done(struct dpu_encoder_phys * phys_enc)673 static int dpu_encoder_phys_cmd_wait_for_commit_done(
674 		struct dpu_encoder_phys *phys_enc)
675 {
676 	/* only required for master controller */
677 	if (!dpu_encoder_phys_cmd_is_master(phys_enc))
678 		return 0;
679 
680 	if (phys_enc->hw_ctl->ops.is_started(phys_enc->hw_ctl))
681 		return dpu_encoder_phys_cmd_wait_for_tx_complete(phys_enc);
682 
683 	return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
684 }
685 
dpu_encoder_phys_cmd_handle_post_kickoff(struct dpu_encoder_phys * phys_enc)686 static void dpu_encoder_phys_cmd_handle_post_kickoff(
687 		struct dpu_encoder_phys *phys_enc)
688 {
689 	/**
690 	 * re-enable external TE, either for the first time after enabling
691 	 * or if disabled for Autorefresh
692 	 */
693 	_dpu_encoder_phys_cmd_connect_te(phys_enc, true);
694 }
695 
dpu_encoder_phys_cmd_trigger_start(struct dpu_encoder_phys * phys_enc)696 static void dpu_encoder_phys_cmd_trigger_start(
697 		struct dpu_encoder_phys *phys_enc)
698 {
699 	dpu_encoder_helper_trigger_start(phys_enc);
700 }
701 
dpu_encoder_phys_cmd_init_ops(struct dpu_encoder_phys_ops * ops)702 static void dpu_encoder_phys_cmd_init_ops(
703 		struct dpu_encoder_phys_ops *ops)
704 {
705 	ops->is_master = dpu_encoder_phys_cmd_is_master;
706 	ops->atomic_mode_set = dpu_encoder_phys_cmd_atomic_mode_set;
707 	ops->enable = dpu_encoder_phys_cmd_enable;
708 	ops->disable = dpu_encoder_phys_cmd_disable;
709 	ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
710 	ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
711 	ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
712 	ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
713 	ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
714 	ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
715 	ops->irq_enable = dpu_encoder_phys_cmd_irq_enable;
716 	ops->irq_disable = dpu_encoder_phys_cmd_irq_disable;
717 	ops->restore = dpu_encoder_phys_cmd_enable_helper;
718 	ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
719 	ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
720 	ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
721 }
722 
dpu_encoder_phys_cmd_init(struct drm_device * dev,struct dpu_enc_phys_init_params * p)723 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(struct drm_device *dev,
724 		struct dpu_enc_phys_init_params *p)
725 {
726 	struct dpu_encoder_phys *phys_enc = NULL;
727 	struct dpu_encoder_phys_cmd *cmd_enc = NULL;
728 
729 	DPU_DEBUG("intf\n");
730 
731 	cmd_enc = drmm_kzalloc(dev, sizeof(*cmd_enc), GFP_KERNEL);
732 	if (!cmd_enc) {
733 		DPU_ERROR("failed to allocate\n");
734 		return ERR_PTR(-ENOMEM);
735 	}
736 	phys_enc = &cmd_enc->base;
737 
738 	dpu_encoder_phys_init(phys_enc, p);
739 
740 	mutex_init(&phys_enc->vblank_ctl_lock);
741 	phys_enc->vblank_refcount = 0;
742 
743 	dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
744 	phys_enc->intf_mode = INTF_MODE_CMD;
745 	cmd_enc->stream_sel = 0;
746 
747 	if (!phys_enc->hw_intf) {
748 		DPU_ERROR_CMDENC(cmd_enc, "no INTF provided\n");
749 		return ERR_PTR(-EINVAL);
750 	}
751 
752 	/* DPU before 5.0 use PINGPONG for TE handling */
753 	if (phys_enc->dpu_kms->catalog->mdss_ver->core_major_ver >= 5)
754 		phys_enc->has_intf_te = true;
755 
756 	if (phys_enc->has_intf_te && !phys_enc->hw_intf->ops.enable_tearcheck) {
757 		DPU_ERROR_CMDENC(cmd_enc, "tearcheck not supported\n");
758 		return ERR_PTR(-EINVAL);
759 	}
760 
761 	atomic_set(&cmd_enc->pending_vblank_cnt, 0);
762 	init_waitqueue_head(&cmd_enc->pending_vblank_wq);
763 
764 	DPU_DEBUG_CMDENC(cmd_enc, "created\n");
765 
766 	return phys_enc;
767 }
768