1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2015-2018, 2020-2021 The Linux Foundation. All rights reserved.
6 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
7 #include <linux/delay.h>
8 #include "dpu_encoder_phys.h"
9 #include "dpu_hw_interrupts.h"
10 #include "dpu_hw_pingpong.h"
11 #include "dpu_core_irq.h"
12 #include "dpu_formats.h"
13 #include "dpu_trace.h"
14 #include "disp/msm_disp_snapshot.h"
16 #include <drm/drm_managed.h>
18 #define DPU_DEBUG_CMDENC(e, fmt, ...) DPU_DEBUG("enc%d intf%d " fmt, \
19 (e) && (e)->base.parent ? \
20 (e)->base.parent->base.id : -1, \
21 (e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
23 #define DPU_ERROR_CMDENC(e, fmt, ...) DPU_ERROR("enc%d intf%d " fmt, \
24 (e) && (e)->base.parent ? \
25 (e)->base.parent->base.id : -1, \
26 (e) ? (e)->base.hw_intf->idx - INTF_0 : -1, ##__VA_ARGS__)
28 #define to_dpu_encoder_phys_cmd(x) \
29 container_of(x, struct dpu_encoder_phys_cmd, base)
31 #define PP_TIMEOUT_MAX_TRIALS 10
34 * Tearcheck sync start and continue thresholds are empirically found
35 * based on common panels In the future, may want to allow panels to override
36 * these default values
38 #define DEFAULT_TEARCHECK_SYNC_THRESH_START 4
39 #define DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE 4
41 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc);
43 static bool dpu_encoder_phys_cmd_is_master(struct dpu_encoder_phys *phys_enc)
45 return (phys_enc->split_role != ENC_ROLE_SLAVE);
48 static void _dpu_encoder_phys_cmd_update_intf_cfg(
49 struct dpu_encoder_phys *phys_enc)
51 struct dpu_encoder_phys_cmd *cmd_enc =
52 to_dpu_encoder_phys_cmd(phys_enc);
53 struct dpu_hw_ctl *ctl;
54 struct dpu_hw_intf_cfg intf_cfg = { 0 };
55 struct dpu_hw_intf_cmd_mode_cfg cmd_mode_cfg = {};
57 ctl = phys_enc->hw_ctl;
58 if (!ctl->ops.setup_intf_cfg)
61 intf_cfg.intf = phys_enc->hw_intf->idx;
62 intf_cfg.intf_mode_sel = DPU_CTL_MODE_SEL_CMD;
63 intf_cfg.stream_sel = cmd_enc->stream_sel;
64 intf_cfg.mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
65 intf_cfg.dsc = dpu_encoder_helper_get_dsc(phys_enc);
66 ctl->ops.setup_intf_cfg(ctl, &intf_cfg);
68 /* setup which pp blk will connect to this intf */
69 if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) && phys_enc->hw_intf->ops.bind_pingpong_blk)
70 phys_enc->hw_intf->ops.bind_pingpong_blk(
72 phys_enc->hw_pp->idx);
74 if (intf_cfg.dsc != 0)
75 cmd_mode_cfg.data_compress = true;
77 cmd_mode_cfg.wide_bus_en = dpu_encoder_is_widebus_enabled(phys_enc->parent);
79 if (phys_enc->hw_intf->ops.program_intf_cmd_cfg)
80 phys_enc->hw_intf->ops.program_intf_cmd_cfg(phys_enc->hw_intf, &cmd_mode_cfg);
83 static void dpu_encoder_phys_cmd_pp_tx_done_irq(void *arg)
85 struct dpu_encoder_phys *phys_enc = arg;
86 unsigned long lock_flags;
88 u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
93 DPU_ATRACE_BEGIN("pp_done_irq");
94 /* notify all synchronous clients first, then asynchronous clients */
95 dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, event);
97 spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
98 new_cnt = atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
99 spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
101 trace_dpu_enc_phys_cmd_pp_tx_done(DRMID(phys_enc->parent),
102 phys_enc->hw_pp->idx - PINGPONG_0,
105 /* Signal any waiting atomic commit thread */
106 wake_up_all(&phys_enc->pending_kickoff_wq);
107 DPU_ATRACE_END("pp_done_irq");
110 static void dpu_encoder_phys_cmd_te_rd_ptr_irq(void *arg)
112 struct dpu_encoder_phys *phys_enc = arg;
113 struct dpu_encoder_phys_cmd *cmd_enc;
115 DPU_ATRACE_BEGIN("rd_ptr_irq");
116 cmd_enc = to_dpu_encoder_phys_cmd(phys_enc);
118 dpu_encoder_vblank_callback(phys_enc->parent, phys_enc);
120 atomic_add_unless(&cmd_enc->pending_vblank_cnt, -1, 0);
121 wake_up_all(&cmd_enc->pending_vblank_wq);
122 DPU_ATRACE_END("rd_ptr_irq");
125 static void dpu_encoder_phys_cmd_ctl_start_irq(void *arg)
127 struct dpu_encoder_phys *phys_enc = arg;
129 DPU_ATRACE_BEGIN("ctl_start_irq");
131 atomic_add_unless(&phys_enc->pending_ctlstart_cnt, -1, 0);
133 /* Signal any waiting ctl start interrupt */
134 wake_up_all(&phys_enc->pending_kickoff_wq);
135 DPU_ATRACE_END("ctl_start_irq");
138 static void dpu_encoder_phys_cmd_underrun_irq(void *arg)
140 struct dpu_encoder_phys *phys_enc = arg;
142 dpu_encoder_underrun_callback(phys_enc->parent, phys_enc);
145 static void dpu_encoder_phys_cmd_atomic_mode_set(
146 struct dpu_encoder_phys *phys_enc,
147 struct drm_crtc_state *crtc_state,
148 struct drm_connector_state *conn_state)
150 phys_enc->irq[INTR_IDX_CTL_START] = phys_enc->hw_ctl->caps->intr_start;
152 phys_enc->irq[INTR_IDX_PINGPONG] = phys_enc->hw_pp->caps->intr_done;
154 if (phys_enc->has_intf_te)
155 phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_intf->cap->intr_tear_rd_ptr;
157 phys_enc->irq[INTR_IDX_RDPTR] = phys_enc->hw_pp->caps->intr_rdptr;
159 phys_enc->irq[INTR_IDX_UNDERRUN] = phys_enc->hw_intf->cap->intr_underrun;
162 static int _dpu_encoder_phys_cmd_handle_ppdone_timeout(
163 struct dpu_encoder_phys *phys_enc)
165 struct dpu_encoder_phys_cmd *cmd_enc =
166 to_dpu_encoder_phys_cmd(phys_enc);
167 u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
169 struct drm_encoder *drm_enc;
171 if (!phys_enc->hw_pp)
174 drm_enc = phys_enc->parent;
176 cmd_enc->pp_timeout_report_cnt++;
177 if (cmd_enc->pp_timeout_report_cnt == PP_TIMEOUT_MAX_TRIALS) {
178 frame_event |= DPU_ENCODER_FRAME_EVENT_PANEL_DEAD;
180 } else if (cmd_enc->pp_timeout_report_cnt == 1) {
184 trace_dpu_enc_phys_cmd_pdone_timeout(DRMID(drm_enc),
185 phys_enc->hw_pp->idx - PINGPONG_0,
186 cmd_enc->pp_timeout_report_cnt,
187 atomic_read(&phys_enc->pending_kickoff_cnt),
190 /* to avoid flooding, only log first time, and "dead" time */
192 DRM_ERROR("id:%d pp:%d kickoff timeout %d cnt %d koff_cnt %d\n",
194 phys_enc->hw_pp->idx - PINGPONG_0,
195 phys_enc->hw_ctl->idx - CTL_0,
196 cmd_enc->pp_timeout_report_cnt,
197 atomic_read(&phys_enc->pending_kickoff_cnt));
198 msm_disp_snapshot_state(drm_enc->dev);
199 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
200 phys_enc->irq[INTR_IDX_RDPTR]);
203 atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
205 /* request a ctl reset before the next kickoff */
206 phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
208 dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, frame_event);
213 static int _dpu_encoder_phys_cmd_wait_for_idle(
214 struct dpu_encoder_phys *phys_enc)
216 struct dpu_encoder_phys_cmd *cmd_enc =
217 to_dpu_encoder_phys_cmd(phys_enc);
218 struct dpu_encoder_wait_info wait_info;
221 wait_info.wq = &phys_enc->pending_kickoff_wq;
222 wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
223 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
225 ret = dpu_encoder_helper_wait_for_irq(phys_enc,
226 phys_enc->irq[INTR_IDX_PINGPONG],
227 dpu_encoder_phys_cmd_pp_tx_done_irq,
229 if (ret == -ETIMEDOUT)
230 _dpu_encoder_phys_cmd_handle_ppdone_timeout(phys_enc);
232 cmd_enc->pp_timeout_report_cnt = 0;
237 static int dpu_encoder_phys_cmd_control_vblank_irq(
238 struct dpu_encoder_phys *phys_enc,
244 if (!phys_enc->hw_pp) {
245 DPU_ERROR("invalid encoder\n");
249 mutex_lock(&phys_enc->vblank_ctl_lock);
250 refcount = phys_enc->vblank_refcount;
252 /* Slave encoders don't report vblank */
253 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
256 /* protect against negative */
257 if (!enable && refcount == 0) {
262 DRM_DEBUG_KMS("id:%u pp:%d enable=%s/%d\n", DRMID(phys_enc->parent),
263 phys_enc->hw_pp->idx - PINGPONG_0,
264 enable ? "true" : "false", refcount);
267 if (phys_enc->vblank_refcount == 0)
268 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms,
269 phys_enc->irq[INTR_IDX_RDPTR],
270 dpu_encoder_phys_cmd_te_rd_ptr_irq,
273 phys_enc->vblank_refcount++;
274 } else if (!enable) {
275 if (phys_enc->vblank_refcount == 1)
276 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
277 phys_enc->irq[INTR_IDX_RDPTR]);
279 phys_enc->vblank_refcount--;
283 mutex_unlock(&phys_enc->vblank_ctl_lock);
285 DRM_ERROR("vblank irq err id:%u pp:%d ret:%d, enable %s/%d\n",
286 DRMID(phys_enc->parent),
287 phys_enc->hw_pp->idx - PINGPONG_0, ret,
288 enable ? "true" : "false", refcount);
294 static void dpu_encoder_phys_cmd_irq_enable(struct dpu_encoder_phys *phys_enc)
296 trace_dpu_enc_phys_cmd_irq_enable(DRMID(phys_enc->parent),
297 phys_enc->hw_pp->idx - PINGPONG_0,
298 phys_enc->vblank_refcount);
300 dpu_core_irq_register_callback(phys_enc->dpu_kms,
301 phys_enc->irq[INTR_IDX_PINGPONG],
302 dpu_encoder_phys_cmd_pp_tx_done_irq,
304 dpu_core_irq_register_callback(phys_enc->dpu_kms,
305 phys_enc->irq[INTR_IDX_UNDERRUN],
306 dpu_encoder_phys_cmd_underrun_irq,
308 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, true);
310 if (dpu_encoder_phys_cmd_is_master(phys_enc) && phys_enc->irq[INTR_IDX_CTL_START])
311 dpu_core_irq_register_callback(phys_enc->dpu_kms,
312 phys_enc->irq[INTR_IDX_CTL_START],
313 dpu_encoder_phys_cmd_ctl_start_irq,
317 static void dpu_encoder_phys_cmd_irq_disable(struct dpu_encoder_phys *phys_enc)
319 trace_dpu_enc_phys_cmd_irq_disable(DRMID(phys_enc->parent),
320 phys_enc->hw_pp->idx - PINGPONG_0,
321 phys_enc->vblank_refcount);
323 if (dpu_encoder_phys_cmd_is_master(phys_enc) && phys_enc->irq[INTR_IDX_CTL_START])
324 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
325 phys_enc->irq[INTR_IDX_CTL_START]);
327 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, phys_enc->irq[INTR_IDX_UNDERRUN]);
328 dpu_encoder_phys_cmd_control_vblank_irq(phys_enc, false);
329 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, phys_enc->irq[INTR_IDX_PINGPONG]);
332 static void dpu_encoder_phys_cmd_tearcheck_config(
333 struct dpu_encoder_phys *phys_enc)
335 struct dpu_encoder_phys_cmd *cmd_enc =
336 to_dpu_encoder_phys_cmd(phys_enc);
337 struct dpu_hw_tear_check tc_cfg = { 0 };
338 struct drm_display_mode *mode;
339 bool tc_enable = true;
340 unsigned long vsync_hz;
341 struct dpu_kms *dpu_kms;
344 * TODO: if/when resource allocation is refactored, move this to a
345 * place where the driver can actually return an error.
347 if (!phys_enc->has_intf_te &&
349 !phys_enc->hw_pp->ops.enable_tearcheck)) {
350 DPU_DEBUG_CMDENC(cmd_enc, "tearcheck not supported\n");
354 DPU_DEBUG_CMDENC(cmd_enc, "intf %d pp %d\n",
355 phys_enc->hw_intf ? phys_enc->hw_intf->idx - INTF_0 : -1,
356 phys_enc->hw_pp ? phys_enc->hw_pp->idx - PINGPONG_0 : -1);
358 mode = &phys_enc->cached_mode;
360 dpu_kms = phys_enc->dpu_kms;
363 * TE default: dsi byte clock calculated base on 70 fps;
364 * around 14 ms to complete a kickoff cycle if te disabled;
365 * vclk_line base on 60 fps; write is faster than read;
366 * init == start == rdptr;
368 * vsync_count is ratio of MDP VSYNC clock frequency to LCD panel
369 * frequency divided by the no. of rows (lines) in the LCDpanel.
371 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync");
373 DPU_DEBUG_CMDENC(cmd_enc, "invalid - no vsync clock\n");
377 tc_cfg.vsync_count = vsync_hz /
378 (mode->vtotal * drm_mode_vrefresh(mode));
381 * Set the sync_cfg_height to twice vtotal so that if we lose a
382 * TE event coming from the display TE pin we won't stall immediately
384 tc_cfg.hw_vsync_mode = 1;
385 tc_cfg.sync_cfg_height = mode->vtotal * 2;
386 tc_cfg.vsync_init_val = mode->vdisplay;
387 tc_cfg.sync_threshold_start = DEFAULT_TEARCHECK_SYNC_THRESH_START;
388 tc_cfg.sync_threshold_continue = DEFAULT_TEARCHECK_SYNC_THRESH_CONTINUE;
389 tc_cfg.start_pos = mode->vdisplay;
390 tc_cfg.rd_ptr_irq = mode->vdisplay + 1;
392 DPU_DEBUG_CMDENC(cmd_enc,
393 "tc vsync_clk_speed_hz %lu vtotal %u vrefresh %u\n",
394 vsync_hz, mode->vtotal, drm_mode_vrefresh(mode));
395 DPU_DEBUG_CMDENC(cmd_enc,
396 "tc enable %u start_pos %u rd_ptr_irq %u\n",
397 tc_enable, tc_cfg.start_pos, tc_cfg.rd_ptr_irq);
398 DPU_DEBUG_CMDENC(cmd_enc,
399 "tc hw_vsync_mode %u vsync_count %u vsync_init_val %u\n",
400 tc_cfg.hw_vsync_mode, tc_cfg.vsync_count,
401 tc_cfg.vsync_init_val);
402 DPU_DEBUG_CMDENC(cmd_enc,
403 "tc cfgheight %u thresh_start %u thresh_cont %u\n",
404 tc_cfg.sync_cfg_height, tc_cfg.sync_threshold_start,
405 tc_cfg.sync_threshold_continue);
407 if (phys_enc->has_intf_te)
408 phys_enc->hw_intf->ops.enable_tearcheck(phys_enc->hw_intf, &tc_cfg);
410 phys_enc->hw_pp->ops.enable_tearcheck(phys_enc->hw_pp, &tc_cfg);
413 static void _dpu_encoder_phys_cmd_pingpong_config(
414 struct dpu_encoder_phys *phys_enc)
416 struct dpu_encoder_phys_cmd *cmd_enc =
417 to_dpu_encoder_phys_cmd(phys_enc);
419 if (!phys_enc->hw_pp || !phys_enc->hw_ctl->ops.setup_intf_cfg) {
420 DPU_ERROR("invalid arg(s), enc %d\n", phys_enc != NULL);
424 DPU_DEBUG_CMDENC(cmd_enc, "pp %d, enabling mode:\n",
425 phys_enc->hw_pp->idx - PINGPONG_0);
426 drm_mode_debug_printmodeline(&phys_enc->cached_mode);
428 _dpu_encoder_phys_cmd_update_intf_cfg(phys_enc);
429 dpu_encoder_phys_cmd_tearcheck_config(phys_enc);
432 static bool dpu_encoder_phys_cmd_needs_single_flush(
433 struct dpu_encoder_phys *phys_enc)
436 * we do separate flush for each CTL and let
437 * CTL_START synchronize them
442 static void dpu_encoder_phys_cmd_enable_helper(
443 struct dpu_encoder_phys *phys_enc)
445 struct dpu_hw_ctl *ctl;
447 if (!phys_enc->hw_pp) {
448 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
452 dpu_encoder_helper_split_config(phys_enc, phys_enc->hw_intf->idx);
454 _dpu_encoder_phys_cmd_pingpong_config(phys_enc);
456 ctl = phys_enc->hw_ctl;
457 ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
460 static void dpu_encoder_phys_cmd_enable(struct dpu_encoder_phys *phys_enc)
462 struct dpu_encoder_phys_cmd *cmd_enc =
463 to_dpu_encoder_phys_cmd(phys_enc);
465 if (!phys_enc->hw_pp) {
466 DPU_ERROR("invalid phys encoder\n");
470 DPU_DEBUG_CMDENC(cmd_enc, "pp %d\n", phys_enc->hw_pp->idx - PINGPONG_0);
472 if (phys_enc->enable_state == DPU_ENC_ENABLED) {
473 DPU_ERROR("already enabled\n");
477 dpu_encoder_phys_cmd_enable_helper(phys_enc);
478 phys_enc->enable_state = DPU_ENC_ENABLED;
481 static void _dpu_encoder_phys_cmd_connect_te(
482 struct dpu_encoder_phys *phys_enc, bool enable)
484 if (phys_enc->has_intf_te) {
485 if (!phys_enc->hw_intf || !phys_enc->hw_intf->ops.connect_external_te)
488 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
489 phys_enc->hw_intf->ops.connect_external_te(phys_enc->hw_intf, enable);
491 if (!phys_enc->hw_pp || !phys_enc->hw_pp->ops.connect_external_te)
494 trace_dpu_enc_phys_cmd_connect_te(DRMID(phys_enc->parent), enable);
495 phys_enc->hw_pp->ops.connect_external_te(phys_enc->hw_pp, enable);
499 static void dpu_encoder_phys_cmd_prepare_idle_pc(
500 struct dpu_encoder_phys *phys_enc)
502 _dpu_encoder_phys_cmd_connect_te(phys_enc, false);
505 static int dpu_encoder_phys_cmd_get_line_count(
506 struct dpu_encoder_phys *phys_enc)
508 struct dpu_hw_pingpong *hw_pp;
509 struct dpu_hw_intf *hw_intf;
511 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
514 if (phys_enc->has_intf_te) {
515 hw_intf = phys_enc->hw_intf;
516 if (!hw_intf || !hw_intf->ops.get_line_count)
518 return hw_intf->ops.get_line_count(hw_intf);
521 hw_pp = phys_enc->hw_pp;
522 if (!hw_pp || !hw_pp->ops.get_line_count)
524 return hw_pp->ops.get_line_count(hw_pp);
527 static void dpu_encoder_phys_cmd_disable(struct dpu_encoder_phys *phys_enc)
529 struct dpu_encoder_phys_cmd *cmd_enc =
530 to_dpu_encoder_phys_cmd(phys_enc);
531 struct dpu_hw_ctl *ctl;
533 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
534 DPU_ERROR_CMDENC(cmd_enc, "already disabled\n");
538 if (phys_enc->has_intf_te) {
539 DRM_DEBUG_KMS("id:%u intf:%d state:%d\n", DRMID(phys_enc->parent),
540 phys_enc->hw_intf->idx - INTF_0,
541 phys_enc->enable_state);
543 if (phys_enc->hw_intf->ops.disable_tearcheck)
544 phys_enc->hw_intf->ops.disable_tearcheck(phys_enc->hw_intf);
546 if (!phys_enc->hw_pp) {
547 DPU_ERROR("invalid encoder\n");
551 DRM_DEBUG_KMS("id:%u pp:%d state:%d\n", DRMID(phys_enc->parent),
552 phys_enc->hw_pp->idx - PINGPONG_0,
553 phys_enc->enable_state);
555 if (phys_enc->hw_pp->ops.disable_tearcheck)
556 phys_enc->hw_pp->ops.disable_tearcheck(phys_enc->hw_pp);
559 if (phys_enc->hw_intf->ops.bind_pingpong_blk) {
560 phys_enc->hw_intf->ops.bind_pingpong_blk(
564 ctl = phys_enc->hw_ctl;
565 ctl->ops.update_pending_flush_intf(ctl, phys_enc->hw_intf->idx);
568 phys_enc->enable_state = DPU_ENC_DISABLED;
571 static void dpu_encoder_phys_cmd_prepare_for_kickoff(
572 struct dpu_encoder_phys *phys_enc)
574 struct dpu_encoder_phys_cmd *cmd_enc =
575 to_dpu_encoder_phys_cmd(phys_enc);
578 if (!phys_enc->hw_pp) {
579 DPU_ERROR("invalid encoder\n");
582 DRM_DEBUG_KMS("id:%u pp:%d pending_cnt:%d\n", DRMID(phys_enc->parent),
583 phys_enc->hw_pp->idx - PINGPONG_0,
584 atomic_read(&phys_enc->pending_kickoff_cnt));
587 * Mark kickoff request as outstanding. If there are more than one,
588 * outstanding, then we have to wait for the previous one to complete
590 ret = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
592 /* force pending_kickoff_cnt 0 to discard failed kickoff */
593 atomic_set(&phys_enc->pending_kickoff_cnt, 0);
594 DRM_ERROR("failed wait_for_idle: id:%u ret:%d pp:%d\n",
595 DRMID(phys_enc->parent), ret,
596 phys_enc->hw_pp->idx - PINGPONG_0);
599 dpu_encoder_phys_cmd_enable_te(phys_enc);
601 DPU_DEBUG_CMDENC(cmd_enc, "pp:%d pending_cnt %d\n",
602 phys_enc->hw_pp->idx - PINGPONG_0,
603 atomic_read(&phys_enc->pending_kickoff_cnt));
606 static void dpu_encoder_phys_cmd_enable_te(struct dpu_encoder_phys *phys_enc)
610 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
613 if (phys_enc->has_intf_te) {
614 if (!phys_enc->hw_intf->ops.disable_autorefresh)
617 phys_enc->hw_intf->ops.disable_autorefresh(
619 DRMID(phys_enc->parent),
620 phys_enc->cached_mode.vdisplay);
622 if (!phys_enc->hw_pp ||
623 !phys_enc->hw_pp->ops.disable_autorefresh)
626 phys_enc->hw_pp->ops.disable_autorefresh(
628 DRMID(phys_enc->parent),
629 phys_enc->cached_mode.vdisplay);
633 static int _dpu_encoder_phys_cmd_wait_for_ctl_start(
634 struct dpu_encoder_phys *phys_enc)
636 struct dpu_encoder_phys_cmd *cmd_enc =
637 to_dpu_encoder_phys_cmd(phys_enc);
638 struct dpu_encoder_wait_info wait_info;
641 wait_info.wq = &phys_enc->pending_kickoff_wq;
642 wait_info.atomic_cnt = &phys_enc->pending_ctlstart_cnt;
643 wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
645 ret = dpu_encoder_helper_wait_for_irq(phys_enc,
646 phys_enc->irq[INTR_IDX_CTL_START],
647 dpu_encoder_phys_cmd_ctl_start_irq,
649 if (ret == -ETIMEDOUT) {
650 DPU_ERROR_CMDENC(cmd_enc, "ctl start interrupt wait failed\n");
658 static int dpu_encoder_phys_cmd_wait_for_tx_complete(
659 struct dpu_encoder_phys *phys_enc)
663 rc = _dpu_encoder_phys_cmd_wait_for_idle(phys_enc);
665 DRM_ERROR("failed wait_for_idle: id:%u ret:%d intf:%d\n",
666 DRMID(phys_enc->parent), rc,
667 phys_enc->hw_intf->idx - INTF_0);
673 static int dpu_encoder_phys_cmd_wait_for_commit_done(
674 struct dpu_encoder_phys *phys_enc)
676 /* only required for master controller */
677 if (!dpu_encoder_phys_cmd_is_master(phys_enc))
680 if (phys_enc->hw_ctl->ops.is_started(phys_enc->hw_ctl))
681 return dpu_encoder_phys_cmd_wait_for_tx_complete(phys_enc);
683 return _dpu_encoder_phys_cmd_wait_for_ctl_start(phys_enc);
686 static void dpu_encoder_phys_cmd_handle_post_kickoff(
687 struct dpu_encoder_phys *phys_enc)
690 * re-enable external TE, either for the first time after enabling
691 * or if disabled for Autorefresh
693 _dpu_encoder_phys_cmd_connect_te(phys_enc, true);
696 static void dpu_encoder_phys_cmd_trigger_start(
697 struct dpu_encoder_phys *phys_enc)
699 dpu_encoder_helper_trigger_start(phys_enc);
702 static void dpu_encoder_phys_cmd_init_ops(
703 struct dpu_encoder_phys_ops *ops)
705 ops->is_master = dpu_encoder_phys_cmd_is_master;
706 ops->atomic_mode_set = dpu_encoder_phys_cmd_atomic_mode_set;
707 ops->enable = dpu_encoder_phys_cmd_enable;
708 ops->disable = dpu_encoder_phys_cmd_disable;
709 ops->control_vblank_irq = dpu_encoder_phys_cmd_control_vblank_irq;
710 ops->wait_for_commit_done = dpu_encoder_phys_cmd_wait_for_commit_done;
711 ops->prepare_for_kickoff = dpu_encoder_phys_cmd_prepare_for_kickoff;
712 ops->wait_for_tx_complete = dpu_encoder_phys_cmd_wait_for_tx_complete;
713 ops->trigger_start = dpu_encoder_phys_cmd_trigger_start;
714 ops->needs_single_flush = dpu_encoder_phys_cmd_needs_single_flush;
715 ops->irq_enable = dpu_encoder_phys_cmd_irq_enable;
716 ops->irq_disable = dpu_encoder_phys_cmd_irq_disable;
717 ops->restore = dpu_encoder_phys_cmd_enable_helper;
718 ops->prepare_idle_pc = dpu_encoder_phys_cmd_prepare_idle_pc;
719 ops->handle_post_kickoff = dpu_encoder_phys_cmd_handle_post_kickoff;
720 ops->get_line_count = dpu_encoder_phys_cmd_get_line_count;
724 * dpu_encoder_phys_cmd_init - Construct a new command mode physical encoder
725 * @dev: Corresponding device for devres management
726 * @p: Pointer to init params structure
727 * Return: Error code or newly allocated encoder
729 struct dpu_encoder_phys *dpu_encoder_phys_cmd_init(struct drm_device *dev,
730 struct dpu_enc_phys_init_params *p)
732 struct dpu_encoder_phys *phys_enc = NULL;
733 struct dpu_encoder_phys_cmd *cmd_enc = NULL;
737 cmd_enc = drmm_kzalloc(dev, sizeof(*cmd_enc), GFP_KERNEL);
739 DPU_ERROR("failed to allocate\n");
740 return ERR_PTR(-ENOMEM);
742 phys_enc = &cmd_enc->base;
744 dpu_encoder_phys_init(phys_enc, p);
746 mutex_init(&phys_enc->vblank_ctl_lock);
747 phys_enc->vblank_refcount = 0;
749 dpu_encoder_phys_cmd_init_ops(&phys_enc->ops);
750 phys_enc->intf_mode = INTF_MODE_CMD;
751 cmd_enc->stream_sel = 0;
753 if (!phys_enc->hw_intf) {
754 DPU_ERROR_CMDENC(cmd_enc, "no INTF provided\n");
755 return ERR_PTR(-EINVAL);
758 /* DPU before 5.0 use PINGPONG for TE handling */
759 if (phys_enc->dpu_kms->catalog->mdss_ver->core_major_ver >= 5)
760 phys_enc->has_intf_te = true;
762 if (phys_enc->has_intf_te && !phys_enc->hw_intf->ops.enable_tearcheck) {
763 DPU_ERROR_CMDENC(cmd_enc, "tearcheck not supported\n");
764 return ERR_PTR(-EINVAL);
767 atomic_set(&cmd_enc->pending_vblank_cnt, 0);
768 init_waitqueue_head(&cmd_enc->pending_vblank_wq);
770 DPU_DEBUG_CMDENC(cmd_enc, "created\n");