1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2015-2018, 2020-2021 The Linux Foundation. All rights reserved.
6 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
7 #include <linux/delay.h>
8 #include "dpu_encoder_phys.h"
9 #include "dpu_hw_interrupts.h"
10 #include "dpu_hw_pingpong.h"
11 #include "dpu_core_irq.h"
12 #include "dpu_formats.h"
13 #include "dpu_trace.h"
14 #include "disp/msm_disp_snapshot.h"
16 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
17 (e) && (e)->base.parent ? \
18 (e)->base.parent->base.id : -1, \
19 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
21 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
22 (e) && (e)->base.parent ? \
23 (e)->base.parent->base.id : -1, \
24 (e) ? (e)->base.intf_idx - INTF_0 : -1, ##__VA_ARGS__)
26 #define to_dpu_encoder_phys_cmd(x) \
27 container_of(x, struct dpu_encoder_phys_cmd, base)
29 #define PP_TIMEOUT_MAX_TRIALS 10
32 * Tearcheck sync start and continue thresholds are empirically found
33 * based on common panels In the future, may want to allow panels to override
34 * these default values
36 #define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
37 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE 4
39 #define DPU_ENC_WR_PTR_START_TIMEOUT_US 20000
41 #define DPU_ENC_MAX_POLL_TIMEOUT_US 2000
43 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc);
45 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
47 return (phys_enc->split_role != ENC_ROLE_SLAVE);
50 static void _dpu_encoder_phys_cmd_update_intf_cfg(
51 struct dpu_encoder_phys *phys_enc)
53 struct dpu_encoder_phys_cmd *cmd_enc =
54 to_dpu_encoder_phys_cmd(phys_enc);
55 struct dpu_hw_ctl *ctl;
56 struct dpu_hw_intf_cfg intf_cfg = { 0 };
58 ctl = phys_enc->hw_ctl;
59 if (!ctl->ops.setup_intf_cfg)
62 intf_cfg.intf = phys_enc->intf_idx;
63 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
64 intf_cfg.stream_sel = cmd_enc->stream_sel;
65 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
66 intf_cfg.dsc = dpu_encoder_helper_get_dsc(phys_enc);
67 ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
69 /* setup which pp blk will connect to this intf */
70 if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && phys_enc->hw_intf->ops.bind_pingpong_blk)
71 phys_enc->hw_intf->ops.bind_pingpong_blk(
74 phys_enc->hw_pp->idx);
77 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg, int irq_idx)
79 struct dpu_encoder_phys *phys_enc = arg;
80 unsigned long lock_flags;
82 u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
87 DPU_ATRACE_BEGIN("pp_done_irq");
88 /* notify all synchronous clients first, then asynchronous clients */
89 dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, event);
91 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
92 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
93 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
95 trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
96 phys_enc->hw_pp->idx - PINGPONG_0,
99 /* Signal any waiting atomic commit thread */
100 wake_up_all(&phys_enc->pending_kickoff_wq);
101 DPU_ATRACE_END("pp_done_irq");
104 static void dpu_encoder_phys_cmd_pp_rd_ptr_irq(void *arg, int irq_idx)
106 struct dpu_encoder_phys *phys_enc = arg;
107 struct dpu_encoder_phys_cmd *cmd_enc;
109 if (!phys_enc->hw_pp)
112 DPU_ATRACE_BEGIN("rd_ptr_irq");
113 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
115 dpu_encoder_vblank_callback(phys_enc->parent, phys_enc);
117 atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
118 wake_up_all(&cmd_enc->pending_vblank_wq);
119 DPU_ATRACE_END("rd_ptr_irq");
122 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg, int irq_idx)
124 struct dpu_encoder_phys *phys_enc = arg;
126 DPU_ATRACE_BEGIN("ctl_start_irq");
128 atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
130 /* Signal any waiting ctl start interrupt */
131 wake_up_all(&phys_enc->pending_kickoff_wq);
132 DPU_ATRACE_END("ctl_start_irq");
135 static void dpu_encoder_phys_cmd_underrun_irq(void *arg, int irq_idx)
137 struct dpu_encoder_phys *phys_enc = arg;
139 dpu_encoder_underrun_callback(phys_enc->parent, phys_enc);
142 static void dpu_encoder_phys_cmd_atomic_mode_set(
143 struct dpu_encoder_phys *phys_enc,
144 struct drm_crtc_state *crtc_state,
145 struct drm_connector_state *conn_state)
147 phys_enc->irq[INTR_IDX_CTL_START] = phys_enc->hw_ctl->caps->intr_start;
149 phys_enc->irq[INTR_IDX_PINGPONG] = phys_enc->hw_pp->caps->intr_done;
151 phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_pp->caps->intr_rdptr;
153 phys_enc->irq[INTR_IDX_UNDERRUN] = phys_enc->hw_intf->cap->intr_underrun;
156 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
157 struct dpu_encoder_phys *phys_enc)
159 struct dpu_encoder_phys_cmd *cmd_enc =
160 to_dpu_encoder_phys_cmd(phys_enc);
161 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
163 struct drm_encoder *drm_enc;
165 if (!phys_enc->hw_pp)
168 drm_enc = phys_enc->parent;
170 cmd_enc->pp_timeout_report_cnt++;
171 if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
172 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
174 } else if (cmd_enc->pp_timeout_report_cnt == 1) {
178 trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
179 phys_enc->hw_pp->idx - PINGPONG_0,
180 cmd_enc->pp_timeout_report_cnt,
181 atomic_read(&phys_enc->pending_kickoff_cnt),
184 /* to avoid flooding, only log first time, and "dead" time */
186 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
188 phys_enc->hw_pp->idx - PINGPONG_0,
189 phys_enc->hw_ctl->idx - CTL_0,
190 cmd_enc->pp_timeout_report_cnt,
191 atomic_read(&phys_enc->pending_kickoff_cnt));
192 msm_disp_snapshot_state(drm_enc->dev);
193 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
194 phys_enc->irq[INTR_IDX_RDPTR]);
197 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
199 /* request a ctl reset before the next kickoff */
200 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
202 dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, frame_event);
207 static int _dpu_encoder_phys_cmd_wait_for_idle(
208 struct dpu_encoder_phys *phys_enc)
210 struct dpu_encoder_phys_cmd *cmd_enc =
211 to_dpu_encoder_phys_cmd(phys_enc);
212 struct dpu_encoder_wait_info wait_info;
215 wait_info.wq = &phys_enc->pending_kickoff_wq;
216 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
217 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
219 ret = dpu_encoder_helper_wait_for_irq(phys_enc,
220 phys_enc->irq[INTR_IDX_PINGPONG],
221 dpu_encoder_phys_cmd_pp_tx_done_irq,
223 if (ret == -ETIMEDOUT)
224 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
226 cmd_enc->pp_timeout_report_cnt = 0;
231 static int dpu_encoder_phys_cmd_control_vblank_irq(
232 struct dpu_encoder_phys *phys_enc,
238 if (!phys_enc->hw_pp) {
239 DPU_ERROR("invalid encoder\n");
243 refcount = atomic_read(&phys_enc->vblank_refcount);
245 /* Slave encoders don't report vblank */
246 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
249 /* protect against negative */
250 if (!enable && refcount == 0) {
255 DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
256 phys_enc->hw_pp->idx - PINGPONG_0,
257 enable ? "true" : "false", refcount);
259 if (enable && atomic_inc_return(&phys_enc->vblank_refcount) == 1)
260 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms,
261 phys_enc->irq[INTR_IDX_RDPTR],
262 dpu_encoder_phys_cmd_pp_rd_ptr_irq,
264 else if (!enable && atomic_dec_return(&phys_enc->vblank_refcount) == 0)
265 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
266 phys_enc->irq[INTR_IDX_RDPTR]);
270 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
271 DRMID(phys_enc->parent),
272 phys_enc->hw_pp->idx - PINGPONG_0, ret,
273 enable ? "true" : "false", refcount);
279 static void dpu_encoder_phys_cmd_irq_control(struct dpu_encoder_phys *phys_enc,
282 trace_dpu_enc_phys_cmd_irq_ctrl(DRMID(phys_enc->parent),
283 phys_enc->hw_pp->idx - PINGPONG_0,
284 enable, atomic_read(&phys_enc->vblank_refcount));
287 dpu_core_irq_register_callback(phys_enc->dpu_kms,
288 phys_enc->irq[INTR_IDX_PINGPONG],
289 dpu_encoder_phys_cmd_pp_tx_done_irq,
291 dpu_core_irq_register_callback(phys_enc->dpu_kms,
292 phys_enc->irq[INTR_IDX_UNDERRUN],
293 dpu_encoder_phys_cmd_underrun_irq,
295 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
297 if (dpu_encoder_phys_cmd_is_master(phys_enc))
298 dpu_core_irq_register_callback(phys_enc->dpu_kms,
299 phys_enc->irq[INTR_IDX_CTL_START],
300 dpu_encoder_phys_cmd_ctl_start_irq,
303 if (dpu_encoder_phys_cmd_is_master(phys_enc))
304 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
305 phys_enc->irq[INTR_IDX_CTL_START]);
307 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
308 phys_enc->irq[INTR_IDX_UNDERRUN]);
309 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
310 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
311 phys_enc->irq[INTR_IDX_PINGPONG]);
315 static void dpu_encoder_phys_cmd_tearcheck_config(
316 struct dpu_encoder_phys *phys_enc)
318 struct dpu_encoder_phys_cmd *cmd_enc =
319 to_dpu_encoder_phys_cmd(phys_enc);
320 struct dpu_hw_tear_check tc_cfg = { 0 };
321 struct drm_display_mode *mode;
322 bool tc_enable = true;
324 struct dpu_kms *dpu_kms;
326 if (!phys_enc->hw_pp) {
327 DPU_ERROR("invalid encoder\n");
330 mode = &phys_enc->cached_mode;
332 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
334 if (!phys_enc->hw_pp->ops.setup_tearcheck ||
335 !phys_enc->hw_pp->ops.enable_tearcheck) {
336 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
340 dpu_kms = phys_enc->dpu_kms;
343 * TE default: dsi byte clock calculated base on 70 fps;
344 * around 14 ms to complete a kickoff cycle if te disabled;
345 * vclk_line base on 60 fps; write is faster than read;
346 * init == start == rdptr;
348 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
349 * frequency divided by the no. of rows (lines) in the LCDpanel.
351 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
353 DPU_DEBUG_CMDENC(cmd_enc, "invalid - vsync_hz %u\n",
358 tc_cfg.vsync_count = vsync_hz /
359 (mode->vtotal * drm_mode_vrefresh(mode));
362 * Set the sync_cfg_height to twice vtotal so that if we lose a
363 * TE event coming from the display TE pin we won't stall immediately
365 tc_cfg.hw_vsync_mode = 1;
366 tc_cfg.sync_cfg_height = mode->vtotal * 2;
367 tc_cfg.vsync_init_val = mode->vdisplay;
368 tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
369 tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
370 tc_cfg.start_pos = mode->vdisplay;
371 tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
373 DPU_DEBUG_CMDENC(cmd_enc,
374 "tc %d vsync_clk_speed_hz %u vtotal %u vrefresh %u\n",
375 phys_enc->hw_pp->idx - PINGPONG_0, vsync_hz,
376 mode->vtotal, drm_mode_vrefresh(mode));
377 DPU_DEBUG_CMDENC(cmd_enc,
378 "tc %d enable %u start_pos %u rd_ptr_irq %u\n",
379 phys_enc->hw_pp->idx - PINGPONG_0, tc_enable, tc_cfg.start_pos,
381 DPU_DEBUG_CMDENC(cmd_enc,
382 "tc %d hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
383 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.hw_vsync_mode,
384 tc_cfg.vsync_count, tc_cfg.vsync_init_val);
385 DPU_DEBUG_CMDENC(cmd_enc,
386 "tc %d cfgheight %u thresh_start %u thresh_cont %u\n",
387 phys_enc->hw_pp->idx - PINGPONG_0, tc_cfg.sync_cfg_height,
388 tc_cfg.sync_threshold_start, tc_cfg.sync_threshold_continue);
390 phys_enc->hw_pp->ops.setup_tearcheck(phys_enc->hw_pp, &tc_cfg);
391 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, tc_enable);
394 static void _dpu_encoder_phys_cmd_pingpong_config(
395 struct dpu_encoder_phys *phys_enc)
397 struct dpu_encoder_phys_cmd *cmd_enc =
398 to_dpu_encoder_phys_cmd(phys_enc);
400 if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
401 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
405 DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
406 phys_enc->hw_pp->idx - PINGPONG_0);
407 drm_mode_debug_printmodeline(&phys_enc->cached_mode);
409 _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
410 dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
413 static bool dpu_encoder_phys_cmd_needs_single_flush(
414 struct dpu_encoder_phys *phys_enc)
417 * we do separate flush for each CTL and let
418 * CTL_START synchronize them
423 static void dpu_encoder_phys_cmd_enable_helper(
424 struct dpu_encoder_phys *phys_enc)
426 struct dpu_hw_ctl *ctl;
428 if (!phys_enc->hw_pp) {
429 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
433 dpu_encoder_helper_split_config(phys_enc, phys_enc->intf_idx);
435 _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
437 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
440 ctl = phys_enc->hw_ctl;
441 ctl->ops.update_pending_flush_intf(ctl, phys_enc->intf_idx);
444 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
446 struct dpu_encoder_phys_cmd *cmd_enc =
447 to_dpu_encoder_phys_cmd(phys_enc);
449 if (!phys_enc->hw_pp) {
450 DPU_ERROR("invalid phys encoder\n");
454 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
456 if (phys_enc->enable_state == DPU_ENC_ENABLED) {
457 DPU_ERROR("already enabled\n");
461 dpu_encoder_phys_cmd_enable_helper(phys_enc);
462 phys_enc->enable_state = DPU_ENC_ENABLED;
465 static void _dpu_encoder_phys_cmd_connect_te(
466 struct dpu_encoder_phys *phys_enc, bool enable)
468 if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
471 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
472 phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
475 static void dpu_encoder_phys_cmd_prepare_idle_pc(
476 struct dpu_encoder_phys *phys_enc)
478 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
481 static int dpu_encoder_phys_cmd_get_line_count(
482 struct dpu_encoder_phys *phys_enc)
484 struct dpu_hw_pingpong *hw_pp;
486 if (!phys_enc->hw_pp)
489 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
492 hw_pp = phys_enc->hw_pp;
493 if (!hw_pp->ops.get_line_count)
496 return hw_pp->ops.get_line_count(hw_pp);
499 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
501 struct dpu_encoder_phys_cmd *cmd_enc =
502 to_dpu_encoder_phys_cmd(phys_enc);
503 struct dpu_hw_ctl *ctl;
505 if (!phys_enc->hw_pp) {
506 DPU_ERROR("invalid encoder\n");
509 DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
510 phys_enc->hw_pp->idx - PINGPONG_0,
511 phys_enc->enable_state);
513 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
514 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
518 if (phys_enc->hw_pp->ops.enable_tearcheck)
519 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, false);
521 if (phys_enc->hw_intf->ops.bind_pingpong_blk) {
522 phys_enc->hw_intf->ops.bind_pingpong_blk(
525 phys_enc->hw_pp->idx);
527 ctl = phys_enc->hw_ctl;
528 ctl->ops.update_pending_flush_intf(ctl, phys_enc->intf_idx);
531 phys_enc->enable_state = DPU_ENC_DISABLED;
534 static void dpu_encoder_phys_cmd_destroy(struct dpu_encoder_phys *phys_enc)
536 struct dpu_encoder_phys_cmd *cmd_enc =
537 to_dpu_encoder_phys_cmd(phys_enc);
542 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
543 struct dpu_encoder_phys *phys_enc)
545 struct dpu_encoder_phys_cmd *cmd_enc =
546 to_dpu_encoder_phys_cmd(phys_enc);
549 if (!phys_enc->hw_pp) {
550 DPU_ERROR("invalid encoder\n");
553 DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
554 phys_enc->hw_pp->idx - PINGPONG_0,
555 atomic_read(&phys_enc->pending_kickoff_cnt));
558 * Mark kickoff request as outstanding. If there are more than one,
559 * outstanding, then we have to wait for the previous one to complete
561 ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
563 /* force pending_kickoff_cnt 0 to discard failed kickoff */
564 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
565 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
566 DRMID(phys_enc->parent), ret,
567 phys_enc->hw_pp->idx - PINGPONG_0);
570 dpu_encoder_phys_cmd_enable_te(phys_enc);
572 DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
573 phys_enc->hw_pp->idx - PINGPONG_0,
574 atomic_read(&phys_enc->pending_kickoff_cnt));
577 static bool dpu_encoder_phys_cmd_is_ongoing_pptx(
578 struct dpu_encoder_phys *phys_enc)
580 struct dpu_hw_pp_vsync_info info;
585 phys_enc->hw_pp->ops.get_vsync_info(phys_enc->hw_pp, &info);
586 if (info.wr_ptr_line_count > 0 &&
587 info.wr_ptr_line_count < phys_enc->cached_mode.vdisplay)
593 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc)
595 struct dpu_encoder_phys_cmd *cmd_enc =
596 to_dpu_encoder_phys_cmd(phys_enc);
601 if (!phys_enc->hw_pp)
603 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
606 /* If autorefresh is already disabled, we have nothing to do */
607 if (!phys_enc->hw_pp->ops.get_autorefresh(phys_enc->hw_pp, NULL))
611 * If autorefresh is enabled, disable it and make sure it is safe to
612 * proceed with current frame commit/push. Sequence fallowed is,
614 * 2. Disable autorefresh config
615 * 4. Poll for frame transfer ongoing to be false
618 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
619 phys_enc->hw_pp->ops.setup_autorefresh(phys_enc->hw_pp, 0, false);
622 udelay(DPU_ENC_MAX_POLL_TIMEOUT_US);
623 if ((trial * DPU_ENC_MAX_POLL_TIMEOUT_US)
624 > (KICKOFF_TIMEOUT_MS * USEC_PER_MSEC)) {
625 DPU_ERROR_CMDENC(cmd_enc,
626 "disable autorefresh failed\n");
631 } while (dpu_encoder_phys_cmd_is_ongoing_pptx(phys_enc));
633 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
635 DPU_DEBUG_CMDENC(to_dpu_encoder_phys_cmd(phys_enc),
636 "disabled autorefresh\n");
639 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
640 struct dpu_encoder_phys *phys_enc)
642 struct dpu_encoder_phys_cmd *cmd_enc =
643 to_dpu_encoder_phys_cmd(phys_enc);
644 struct dpu_encoder_wait_info wait_info;
647 wait_info.wq = &phys_enc->pending_kickoff_wq;
648 wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
649 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
651 ret = dpu_encoder_helper_wait_for_irq(phys_enc,
652 phys_enc->irq[INTR_IDX_CTL_START],
653 dpu_encoder_phys_cmd_ctl_start_irq,
655 if (ret == -ETIMEDOUT) {
656 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
664 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
665 struct dpu_encoder_phys *phys_enc)
669 rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
671 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
672 DRMID(phys_enc->parent), rc,
673 phys_enc->intf_idx - INTF_0);
679 static int dpu_encoder_phys_cmd_wait_for_commit_done(
680 struct dpu_encoder_phys *phys_enc)
682 /* only required for master controller */
683 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
686 if (phys_enc->hw_ctl->ops.is_started(phys_enc->hw_ctl))
687 return dpu_encoder_phys_cmd_wait_for_tx_complete(phys_enc);
689 return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
692 static int dpu_encoder_phys_cmd_wait_for_vblank(
693 struct dpu_encoder_phys *phys_enc)
696 struct dpu_encoder_phys_cmd *cmd_enc;
697 struct dpu_encoder_wait_info wait_info;
699 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
701 /* only required for master controller */
702 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
705 wait_info.wq = &cmd_enc->pending_vblank_wq;
706 wait_info.atomic_cnt = &cmd_enc->pending_vblank_cnt;
707 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
709 atomic_inc(&cmd_enc->pending_vblank_cnt);
711 rc = dpu_encoder_helper_wait_for_irq(phys_enc,
712 phys_enc->irq[INTR_IDX_RDPTR],
713 dpu_encoder_phys_cmd_pp_rd_ptr_irq,
719 static void dpu_encoder_phys_cmd_handle_post_kickoff(
720 struct dpu_encoder_phys *phys_enc)
723 * re-enable external TE, either for the first time after enabling
724 * or if disabled for Autorefresh
726 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
729 static void dpu_encoder_phys_cmd_trigger_start(
730 struct dpu_encoder_phys *phys_enc)
732 dpu_encoder_helper_trigger_start(phys_enc);
735 static void dpu_encoder_phys_cmd_init_ops(
736 struct dpu_encoder_phys_ops *ops)
738 ops->is_master = dpu_encoder_phys_cmd_is_master;
739 ops->atomic_mode_set = dpu_encoder_phys_cmd_atomic_mode_set;
740 ops->enable = dpu_encoder_phys_cmd_enable;
741 ops->disable = dpu_encoder_phys_cmd_disable;
742 ops->destroy = dpu_encoder_phys_cmd_destroy;
743 ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
744 ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
745 ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
746 ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
747 ops->wait_for_vblank = dpu_encoder_phys_cmd_wait_for_vblank;
748 ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
749 ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
750 ops->irq_control = dpu_encoder_phys_cmd_irq_control;
751 ops->restore = dpu_encoder_phys_cmd_enable_helper;
752 ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
753 ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
754 ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
757 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(
758 struct dpu_enc_phys_init_params *p)
760 struct dpu_encoder_phys *phys_enc = NULL;
761 struct dpu_encoder_phys_cmd *cmd_enc = NULL;
764 DPU_DEBUG("intf %d\n", p->intf_idx - INTF_0);
766 cmd_enc = kzalloc(sizeof(*cmd_enc), GFP_KERNEL);
769 DPU_ERROR("failed to allocate\n");
772 phys_enc = &cmd_enc->base;
773 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
774 phys_enc->intf_idx = p->intf_idx;
776 dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
777 phys_enc->parent = p->parent;
778 phys_enc->dpu_kms = p->dpu_kms;
779 phys_enc->split_role = p->split_role;
780 phys_enc->intf_mode = INTF_MODE_CMD;
781 phys_enc->enc_spinlock = p->enc_spinlock;
782 cmd_enc->stream_sel = 0;
783 phys_enc->enable_state = DPU_ENC_DISABLED;
784 for (i = 0; i < ARRAY_SIZE(phys_enc->irq); i++)
785 phys_enc->irq[i] = -EINVAL;
787 atomic_set(&phys_enc->vblank_refcount, 0);
788 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
789 atomic_set(&phys_enc->pending_ctlstart_cnt, 0);
790 atomic_set(&cmd_enc->pending_vblank_cnt, 0);
791 init_waitqueue_head(&phys_enc->pending_kickoff_wq);
792 init_waitqueue_head(&cmd_enc->pending_vblank_wq);
794 DPU_DEBUG_CMDENC(cmd_enc, "created\n");