]> Git Repo - linux.git/blob - drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
5b0e9443f874c525d818a148be079923957ad1b1
[linux.git] / drivers / gpu / drm / msm / disp / dpu1 / dpu_encoder.c
1 /*
2  * Copyright (c) 2014-2018, The Linux Foundation. All rights reserved.
3  * Copyright (C) 2013 Red Hat
4  * Author: Rob Clark <[email protected]>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published by
8  * the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  *
15  * You should have received a copy of the GNU General Public License along with
16  * this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18
19 #define pr_fmt(fmt)     "[drm:%s:%d] " fmt, __func__, __LINE__
20 #include <linux/kthread.h>
21 #include <linux/debugfs.h>
22 #include <linux/seq_file.h>
23
24 #include "msm_drv.h"
25 #include "dpu_kms.h"
26 #include <drm/drm_crtc.h>
27 #include <drm/drm_crtc_helper.h>
28 #include "dpu_hwio.h"
29 #include "dpu_hw_catalog.h"
30 #include "dpu_hw_intf.h"
31 #include "dpu_hw_ctl.h"
32 #include "dpu_formats.h"
33 #include "dpu_encoder_phys.h"
34 #include "dpu_crtc.h"
35 #include "dpu_trace.h"
36 #include "dpu_core_irq.h"
37
38 #define DPU_DEBUG_ENC(e, fmt, ...) DPU_DEBUG("enc%d " fmt,\
39                 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
40
41 #define DPU_ERROR_ENC(e, fmt, ...) DPU_ERROR("enc%d " fmt,\
42                 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
43
44 #define DPU_DEBUG_PHYS(p, fmt, ...) DPU_DEBUG("enc%d intf%d pp%d " fmt,\
45                 (p) ? (p)->parent->base.id : -1, \
46                 (p) ? (p)->intf_idx - INTF_0 : -1, \
47                 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
48                 ##__VA_ARGS__)
49
50 #define DPU_ERROR_PHYS(p, fmt, ...) DPU_ERROR("enc%d intf%d pp%d " fmt,\
51                 (p) ? (p)->parent->base.id : -1, \
52                 (p) ? (p)->intf_idx - INTF_0 : -1, \
53                 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
54                 ##__VA_ARGS__)
55
56 /*
57  * Two to anticipate panels that can do cmd/vid dynamic switching
58  * plan is to create all possible physical encoder types, and switch between
59  * them at runtime
60  */
61 #define NUM_PHYS_ENCODER_TYPES 2
62
63 #define MAX_PHYS_ENCODERS_PER_VIRTUAL \
64         (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
65
66 #define MAX_CHANNELS_PER_ENC 2
67
68 #define MISR_BUFF_SIZE                  256
69
70 #define IDLE_SHORT_TIMEOUT      1
71
72 #define MAX_VDISPLAY_SPLIT 1080
73
74 /**
75  * enum dpu_enc_rc_events - events for resource control state machine
76  * @DPU_ENC_RC_EVENT_KICKOFF:
77  *      This event happens at NORMAL priority.
78  *      Event that signals the start of the transfer. When this event is
79  *      received, enable MDP/DSI core clocks. Regardless of the previous
80  *      state, the resource should be in ON state at the end of this event.
81  * @DPU_ENC_RC_EVENT_FRAME_DONE:
82  *      This event happens at INTERRUPT level.
83  *      Event signals the end of the data transfer after the PP FRAME_DONE
84  *      event. At the end of this event, a delayed work is scheduled to go to
85  *      IDLE_PC state after IDLE_TIMEOUT time.
86  * @DPU_ENC_RC_EVENT_PRE_STOP:
87  *      This event happens at NORMAL priority.
88  *      This event, when received during the ON state, leave the RC STATE
89  *      in the PRE_OFF state. It should be followed by the STOP event as
90  *      part of encoder disable.
91  *      If received during IDLE or OFF states, it will do nothing.
92  * @DPU_ENC_RC_EVENT_STOP:
93  *      This event happens at NORMAL priority.
94  *      When this event is received, disable all the MDP/DSI core clocks, and
95  *      disable IRQs. It should be called from the PRE_OFF or IDLE states.
96  *      IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
97  *      PRE_OFF is expected when PRE_STOP was executed during the ON state.
98  *      Resource state should be in OFF at the end of the event.
99  * @DPU_ENC_RC_EVENT_ENTER_IDLE:
100  *      This event happens at NORMAL priority from a work item.
101  *      Event signals that there were no frame updates for IDLE_TIMEOUT time.
102  *      This would disable MDP/DSI core clocks and change the resource state
103  *      to IDLE.
104  */
105 enum dpu_enc_rc_events {
106         DPU_ENC_RC_EVENT_KICKOFF = 1,
107         DPU_ENC_RC_EVENT_FRAME_DONE,
108         DPU_ENC_RC_EVENT_PRE_STOP,
109         DPU_ENC_RC_EVENT_STOP,
110         DPU_ENC_RC_EVENT_ENTER_IDLE
111 };
112
113 /*
114  * enum dpu_enc_rc_states - states that the resource control maintains
115  * @DPU_ENC_RC_STATE_OFF: Resource is in OFF state
116  * @DPU_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
117  * @DPU_ENC_RC_STATE_ON: Resource is in ON state
118  * @DPU_ENC_RC_STATE_MODESET: Resource is in modeset state
119  * @DPU_ENC_RC_STATE_IDLE: Resource is in IDLE state
120  */
121 enum dpu_enc_rc_states {
122         DPU_ENC_RC_STATE_OFF,
123         DPU_ENC_RC_STATE_PRE_OFF,
124         DPU_ENC_RC_STATE_ON,
125         DPU_ENC_RC_STATE_IDLE
126 };
127
128 /**
129  * struct dpu_encoder_virt - virtual encoder. Container of one or more physical
130  *      encoders. Virtual encoder manages one "logical" display. Physical
131  *      encoders manage one intf block, tied to a specific panel/sub-panel.
132  *      Virtual encoder defers as much as possible to the physical encoders.
133  *      Virtual encoder registers itself with the DRM Framework as the encoder.
134  * @base:               drm_encoder base class for registration with DRM
135  * @enc_spin_lock:      Virtual-Encoder-Wide Spin Lock for IRQ purposes
136  * @bus_scaling_client: Client handle to the bus scaling interface
137  * @num_phys_encs:      Actual number of physical encoders contained.
138  * @phys_encs:          Container of physical encoders managed.
139  * @cur_master:         Pointer to the current master in this mode. Optimization
140  *                      Only valid after enable. Cleared as disable.
141  * @hw_pp               Handle to the pingpong blocks used for the display. No.
142  *                      pingpong blocks can be different than num_phys_encs.
143  * @intfs_swapped       Whether or not the phys_enc interfaces have been swapped
144  *                      for partial update right-only cases, such as pingpong
145  *                      split where virtual pingpong does not generate IRQs
146  * @crtc_vblank_cb:     Callback into the upper layer / CRTC for
147  *                      notification of the VBLANK
148  * @crtc_vblank_cb_data:        Data from upper layer for VBLANK notification
149  * @crtc_kickoff_cb:            Callback into CRTC that will flush & start
150  *                              all CTL paths
151  * @crtc_kickoff_cb_data:       Opaque user data given to crtc_kickoff_cb
152  * @debugfs_root:               Debug file system root file node
153  * @enc_lock:                   Lock around physical encoder create/destroy and
154                                 access.
155  * @frame_busy_mask:            Bitmask tracking which phys_enc we are still
156  *                              busy processing current command.
157  *                              Bit0 = phys_encs[0] etc.
158  * @crtc_frame_event_cb:        callback handler for frame event
159  * @crtc_frame_event_cb_data:   callback handler private data
160  * @frame_done_timeout:         frame done timeout in Hz
161  * @frame_done_timer:           watchdog timer for frame done event
162  * @vsync_event_timer:          vsync timer
163  * @disp_info:                  local copy of msm_display_info struct
164  * @misr_enable:                misr enable/disable status
165  * @misr_frame_count:           misr frame count before start capturing the data
166  * @idle_pc_supported:          indicate if idle power collaps is supported
167  * @rc_lock:                    resource control mutex lock to protect
168  *                              virt encoder over various state changes
169  * @rc_state:                   resource controller state
170  * @delayed_off_work:           delayed worker to schedule disabling of
171  *                              clks and resources after IDLE_TIMEOUT time.
172  * @vsync_event_work:           worker to handle vsync event for autorefresh
173  * @topology:                   topology of the display
174  * @mode_set_complete:          flag to indicate modeset completion
175  * @idle_timeout:               idle timeout duration in milliseconds
176  */
177 struct dpu_encoder_virt {
178         struct drm_encoder base;
179         spinlock_t enc_spinlock;
180         uint32_t bus_scaling_client;
181
182         uint32_t display_num_of_h_tiles;
183
184         unsigned int num_phys_encs;
185         struct dpu_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
186         struct dpu_encoder_phys *cur_master;
187         struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
188
189         bool intfs_swapped;
190
191         void (*crtc_vblank_cb)(void *);
192         void *crtc_vblank_cb_data;
193
194         struct dentry *debugfs_root;
195         struct mutex enc_lock;
196         DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
197         void (*crtc_frame_event_cb)(void *, u32 event);
198         void *crtc_frame_event_cb_data;
199
200         atomic_t frame_done_timeout;
201         struct timer_list frame_done_timer;
202         struct timer_list vsync_event_timer;
203
204         struct msm_display_info disp_info;
205         bool misr_enable;
206         u32 misr_frame_count;
207
208         bool idle_pc_supported;
209         struct mutex rc_lock;
210         enum dpu_enc_rc_states rc_state;
211         struct kthread_delayed_work delayed_off_work;
212         struct kthread_work vsync_event_work;
213         struct msm_display_topology topology;
214         bool mode_set_complete;
215
216         u32 idle_timeout;
217 };
218
219 #define to_dpu_encoder_virt(x) container_of(x, struct dpu_encoder_virt, base)
220 static inline int _dpu_encoder_power_enable(struct dpu_encoder_virt *dpu_enc,
221                                                                 bool enable)
222 {
223         struct drm_encoder *drm_enc;
224         struct msm_drm_private *priv;
225         struct dpu_kms *dpu_kms;
226
227         if (!dpu_enc) {
228                 DPU_ERROR("invalid dpu enc\n");
229                 return -EINVAL;
230         }
231
232         drm_enc = &dpu_enc->base;
233         if (!drm_enc->dev || !drm_enc->dev->dev_private) {
234                 DPU_ERROR("drm device invalid\n");
235                 return -EINVAL;
236         }
237
238         priv = drm_enc->dev->dev_private;
239         if (!priv->kms) {
240                 DPU_ERROR("invalid kms\n");
241                 return -EINVAL;
242         }
243
244         dpu_kms = to_dpu_kms(priv->kms);
245
246         if (enable)
247                 pm_runtime_get_sync(&dpu_kms->pdev->dev);
248         else
249                 pm_runtime_put_sync(&dpu_kms->pdev->dev);
250
251         return 0;
252 }
253
254 void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys *phys_enc,
255                 enum dpu_intr_idx intr_idx)
256 {
257         DRM_ERROR("irq timeout id=%u, intf=%d, pp=%d, intr=%d\n",
258                   DRMID(phys_enc->parent), phys_enc->intf_idx - INTF_0,
259                   phys_enc->hw_pp->idx - PINGPONG_0, intr_idx);
260
261         if (phys_enc->parent_ops->handle_frame_done)
262                 phys_enc->parent_ops->handle_frame_done(
263                                 phys_enc->parent, phys_enc,
264                                 DPU_ENCODER_FRAME_EVENT_ERROR);
265 }
266
267 static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id,
268                 int32_t hw_id, struct dpu_encoder_wait_info *info);
269
270 int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys *phys_enc,
271                 enum dpu_intr_idx intr_idx,
272                 struct dpu_encoder_wait_info *wait_info)
273 {
274         struct dpu_encoder_irq *irq;
275         u32 irq_status;
276         int ret;
277
278         if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
279                 DPU_ERROR("invalid params\n");
280                 return -EINVAL;
281         }
282         irq = &phys_enc->irq[intr_idx];
283
284         /* note: do master / slave checking outside */
285
286         /* return EWOULDBLOCK since we know the wait isn't necessary */
287         if (phys_enc->enable_state == DPU_ENC_DISABLED) {
288                 DRM_ERROR("encoder is disabled id=%u, intr=%d, hw=%d, irq=%d",
289                           DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
290                           irq->irq_idx);
291                 return -EWOULDBLOCK;
292         }
293
294         if (irq->irq_idx < 0) {
295                 DRM_DEBUG_KMS("skip irq wait id=%u, intr=%d, hw=%d, irq=%s",
296                               DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
297                               irq->name);
298                 return 0;
299         }
300
301         DRM_DEBUG_KMS("id=%u, intr=%d, hw=%d, irq=%d, pp=%d, pending_cnt=%d",
302                       DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
303                       irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
304                       atomic_read(wait_info->atomic_cnt));
305
306         ret = dpu_encoder_helper_wait_event_timeout(
307                         DRMID(phys_enc->parent),
308                         irq->hw_idx,
309                         wait_info);
310
311         if (ret <= 0) {
312                 irq_status = dpu_core_irq_read(phys_enc->dpu_kms,
313                                 irq->irq_idx, true);
314                 if (irq_status) {
315                         unsigned long flags;
316
317                         DRM_DEBUG_KMS("irq not triggered id=%u, intr=%d, "
318                                       "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
319                                       DRMID(phys_enc->parent), intr_idx,
320                                       irq->hw_idx, irq->irq_idx,
321                                       phys_enc->hw_pp->idx - PINGPONG_0,
322                                       atomic_read(wait_info->atomic_cnt));
323                         local_irq_save(flags);
324                         irq->cb.func(phys_enc, irq->irq_idx);
325                         local_irq_restore(flags);
326                         ret = 0;
327                 } else {
328                         ret = -ETIMEDOUT;
329                         DRM_DEBUG_KMS("irq timeout id=%u, intr=%d, "
330                                       "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
331                                       DRMID(phys_enc->parent), intr_idx,
332                                       irq->hw_idx, irq->irq_idx,
333                                       phys_enc->hw_pp->idx - PINGPONG_0,
334                                       atomic_read(wait_info->atomic_cnt));
335                 }
336         } else {
337                 ret = 0;
338                 trace_dpu_enc_irq_wait_success(DRMID(phys_enc->parent),
339                         intr_idx, irq->hw_idx, irq->irq_idx,
340                         phys_enc->hw_pp->idx - PINGPONG_0,
341                         atomic_read(wait_info->atomic_cnt));
342         }
343
344         return ret;
345 }
346
347 int dpu_encoder_helper_register_irq(struct dpu_encoder_phys *phys_enc,
348                 enum dpu_intr_idx intr_idx)
349 {
350         struct dpu_encoder_irq *irq;
351         int ret = 0;
352
353         if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
354                 DPU_ERROR("invalid params\n");
355                 return -EINVAL;
356         }
357         irq = &phys_enc->irq[intr_idx];
358
359         if (irq->irq_idx >= 0) {
360                 DPU_DEBUG_PHYS(phys_enc,
361                                 "skipping already registered irq %s type %d\n",
362                                 irq->name, irq->intr_type);
363                 return 0;
364         }
365
366         irq->irq_idx = dpu_core_irq_idx_lookup(phys_enc->dpu_kms,
367                         irq->intr_type, irq->hw_idx);
368         if (irq->irq_idx < 0) {
369                 DPU_ERROR_PHYS(phys_enc,
370                         "failed to lookup IRQ index for %s type:%d\n",
371                         irq->name, irq->intr_type);
372                 return -EINVAL;
373         }
374
375         ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, irq->irq_idx,
376                         &irq->cb);
377         if (ret) {
378                 DPU_ERROR_PHYS(phys_enc,
379                         "failed to register IRQ callback for %s\n",
380                         irq->name);
381                 irq->irq_idx = -EINVAL;
382                 return ret;
383         }
384
385         ret = dpu_core_irq_enable(phys_enc->dpu_kms, &irq->irq_idx, 1);
386         if (ret) {
387                 DRM_ERROR("enable failed id=%u, intr=%d, hw=%d, irq=%d",
388                           DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
389                           irq->irq_idx);
390                 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
391                                 irq->irq_idx, &irq->cb);
392                 irq->irq_idx = -EINVAL;
393                 return ret;
394         }
395
396         trace_dpu_enc_irq_register_success(DRMID(phys_enc->parent), intr_idx,
397                                 irq->hw_idx, irq->irq_idx);
398
399         return ret;
400 }
401
402 int dpu_encoder_helper_unregister_irq(struct dpu_encoder_phys *phys_enc,
403                 enum dpu_intr_idx intr_idx)
404 {
405         struct dpu_encoder_irq *irq;
406         int ret;
407
408         if (!phys_enc) {
409                 DPU_ERROR("invalid encoder\n");
410                 return -EINVAL;
411         }
412         irq = &phys_enc->irq[intr_idx];
413
414         /* silently skip irqs that weren't registered */
415         if (irq->irq_idx < 0) {
416                 DRM_ERROR("duplicate unregister id=%u, intr=%d, hw=%d, irq=%d",
417                           DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
418                           irq->irq_idx);
419                 return 0;
420         }
421
422         ret = dpu_core_irq_disable(phys_enc->dpu_kms, &irq->irq_idx, 1);
423         if (ret) {
424                 DRM_ERROR("disable failed id=%u, intr=%d, hw=%d, irq=%d ret=%d",
425                           DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
426                           irq->irq_idx, ret);
427         }
428
429         ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, irq->irq_idx,
430                         &irq->cb);
431         if (ret) {
432                 DRM_ERROR("unreg cb fail id=%u, intr=%d, hw=%d, irq=%d ret=%d",
433                           DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
434                           irq->irq_idx, ret);
435         }
436
437         trace_dpu_enc_irq_unregister_success(DRMID(phys_enc->parent), intr_idx,
438                                              irq->hw_idx, irq->irq_idx);
439
440         irq->irq_idx = -EINVAL;
441
442         return 0;
443 }
444
445 void dpu_encoder_get_hw_resources(struct drm_encoder *drm_enc,
446                 struct dpu_encoder_hw_resources *hw_res,
447                 struct drm_connector_state *conn_state)
448 {
449         struct dpu_encoder_virt *dpu_enc = NULL;
450         int i = 0;
451
452         if (!hw_res || !drm_enc || !conn_state) {
453                 DPU_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
454                                 drm_enc != 0, hw_res != 0, conn_state != 0);
455                 return;
456         }
457
458         dpu_enc = to_dpu_encoder_virt(drm_enc);
459         DPU_DEBUG_ENC(dpu_enc, "\n");
460
461         /* Query resources used by phys encs, expected to be without overlap */
462         memset(hw_res, 0, sizeof(*hw_res));
463         hw_res->display_num_of_h_tiles = dpu_enc->display_num_of_h_tiles;
464
465         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
466                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
467
468                 if (phys && phys->ops.get_hw_resources)
469                         phys->ops.get_hw_resources(phys, hw_res, conn_state);
470         }
471 }
472
473 static void dpu_encoder_destroy(struct drm_encoder *drm_enc)
474 {
475         struct dpu_encoder_virt *dpu_enc = NULL;
476         int i = 0;
477
478         if (!drm_enc) {
479                 DPU_ERROR("invalid encoder\n");
480                 return;
481         }
482
483         dpu_enc = to_dpu_encoder_virt(drm_enc);
484         DPU_DEBUG_ENC(dpu_enc, "\n");
485
486         mutex_lock(&dpu_enc->enc_lock);
487
488         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
489                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
490
491                 if (phys && phys->ops.destroy) {
492                         phys->ops.destroy(phys);
493                         --dpu_enc->num_phys_encs;
494                         dpu_enc->phys_encs[i] = NULL;
495                 }
496         }
497
498         if (dpu_enc->num_phys_encs)
499                 DPU_ERROR_ENC(dpu_enc, "expected 0 num_phys_encs not %d\n",
500                                 dpu_enc->num_phys_encs);
501         dpu_enc->num_phys_encs = 0;
502         mutex_unlock(&dpu_enc->enc_lock);
503
504         drm_encoder_cleanup(drm_enc);
505         mutex_destroy(&dpu_enc->enc_lock);
506
507         kfree(dpu_enc);
508 }
509
510 void dpu_encoder_helper_split_config(
511                 struct dpu_encoder_phys *phys_enc,
512                 enum dpu_intf interface)
513 {
514         struct dpu_encoder_virt *dpu_enc;
515         struct split_pipe_cfg cfg = { 0 };
516         struct dpu_hw_mdp *hw_mdptop;
517         struct msm_display_info *disp_info;
518
519         if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
520                 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
521                 return;
522         }
523
524         dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
525         hw_mdptop = phys_enc->hw_mdptop;
526         disp_info = &dpu_enc->disp_info;
527
528         if (disp_info->intf_type != DRM_MODE_ENCODER_DSI)
529                 return;
530
531         /**
532          * disable split modes since encoder will be operating in as the only
533          * encoder, either for the entire use case in the case of, for example,
534          * single DSI, or for this frame in the case of left/right only partial
535          * update.
536          */
537         if (phys_enc->split_role == ENC_ROLE_SOLO) {
538                 if (hw_mdptop->ops.setup_split_pipe)
539                         hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
540                 return;
541         }
542
543         cfg.en = true;
544         cfg.mode = phys_enc->intf_mode;
545         cfg.intf = interface;
546
547         if (cfg.en && phys_enc->ops.needs_single_flush &&
548                         phys_enc->ops.needs_single_flush(phys_enc))
549                 cfg.split_flush_en = true;
550
551         if (phys_enc->split_role == ENC_ROLE_MASTER) {
552                 DPU_DEBUG_ENC(dpu_enc, "enable %d\n", cfg.en);
553
554                 if (hw_mdptop->ops.setup_split_pipe)
555                         hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
556         }
557 }
558
559 static void _dpu_encoder_adjust_mode(struct drm_connector *connector,
560                 struct drm_display_mode *adj_mode)
561 {
562         struct drm_display_mode *cur_mode;
563
564         if (!connector || !adj_mode)
565                 return;
566
567         list_for_each_entry(cur_mode, &connector->modes, head) {
568                 if (cur_mode->vdisplay == adj_mode->vdisplay &&
569                         cur_mode->hdisplay == adj_mode->hdisplay &&
570                         cur_mode->vrefresh == adj_mode->vrefresh) {
571                         adj_mode->private = cur_mode->private;
572                         adj_mode->private_flags |= cur_mode->private_flags;
573                 }
574         }
575 }
576
577 static struct msm_display_topology dpu_encoder_get_topology(
578                         struct dpu_encoder_virt *dpu_enc,
579                         struct dpu_kms *dpu_kms,
580                         struct drm_display_mode *mode)
581 {
582         struct msm_display_topology topology;
583         int i, intf_count = 0;
584
585         for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
586                 if (dpu_enc->phys_encs[i])
587                         intf_count++;
588
589         /* User split topology for width > 1080 */
590         topology.num_lm = (mode->vdisplay > MAX_VDISPLAY_SPLIT) ? 2 : 1;
591         topology.num_enc = 0;
592         topology.num_intf = intf_count;
593
594         return topology;
595 }
596 static int dpu_encoder_virt_atomic_check(
597                 struct drm_encoder *drm_enc,
598                 struct drm_crtc_state *crtc_state,
599                 struct drm_connector_state *conn_state)
600 {
601         struct dpu_encoder_virt *dpu_enc;
602         struct msm_drm_private *priv;
603         struct dpu_kms *dpu_kms;
604         const struct drm_display_mode *mode;
605         struct drm_display_mode *adj_mode;
606         struct msm_display_topology topology;
607         int i = 0;
608         int ret = 0;
609
610         if (!drm_enc || !crtc_state || !conn_state) {
611                 DPU_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
612                                 drm_enc != 0, crtc_state != 0, conn_state != 0);
613                 return -EINVAL;
614         }
615
616         dpu_enc = to_dpu_encoder_virt(drm_enc);
617         DPU_DEBUG_ENC(dpu_enc, "\n");
618
619         priv = drm_enc->dev->dev_private;
620         dpu_kms = to_dpu_kms(priv->kms);
621         mode = &crtc_state->mode;
622         adj_mode = &crtc_state->adjusted_mode;
623         trace_dpu_enc_atomic_check(DRMID(drm_enc));
624
625         /*
626          * display drivers may populate private fields of the drm display mode
627          * structure while registering possible modes of a connector with DRM.
628          * These private fields are not populated back while DRM invokes
629          * the mode_set callbacks. This module retrieves and populates the
630          * private fields of the given mode.
631          */
632         _dpu_encoder_adjust_mode(conn_state->connector, adj_mode);
633
634         /* perform atomic check on the first physical encoder (master) */
635         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
636                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
637
638                 if (phys && phys->ops.atomic_check)
639                         ret = phys->ops.atomic_check(phys, crtc_state,
640                                         conn_state);
641                 else if (phys && phys->ops.mode_fixup)
642                         if (!phys->ops.mode_fixup(phys, mode, adj_mode))
643                                 ret = -EINVAL;
644
645                 if (ret) {
646                         DPU_ERROR_ENC(dpu_enc,
647                                         "mode unsupported, phys idx %d\n", i);
648                         break;
649                 }
650         }
651
652         topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
653
654         /* Reserve dynamic resources now. Indicating AtomicTest phase */
655         if (!ret) {
656                 /*
657                  * Avoid reserving resources when mode set is pending. Topology
658                  * info may not be available to complete reservation.
659                  */
660                 if (drm_atomic_crtc_needs_modeset(crtc_state)
661                                 && dpu_enc->mode_set_complete) {
662                         ret = dpu_rm_reserve(&dpu_kms->rm, drm_enc, crtc_state,
663                                 conn_state, topology, true);
664                         dpu_enc->mode_set_complete = false;
665                 }
666         }
667
668         if (!ret)
669                 drm_mode_set_crtcinfo(adj_mode, 0);
670
671         trace_dpu_enc_atomic_check_flags(DRMID(drm_enc), adj_mode->flags,
672                         adj_mode->private_flags);
673
674         return ret;
675 }
676
677 static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt *dpu_enc,
678                         struct msm_display_info *disp_info)
679 {
680         struct dpu_vsync_source_cfg vsync_cfg = { 0 };
681         struct msm_drm_private *priv;
682         struct dpu_kms *dpu_kms;
683         struct dpu_hw_mdp *hw_mdptop;
684         struct drm_encoder *drm_enc;
685         int i;
686
687         if (!dpu_enc || !disp_info) {
688                 DPU_ERROR("invalid param dpu_enc:%d or disp_info:%d\n",
689                                         dpu_enc != NULL, disp_info != NULL);
690                 return;
691         } else if (dpu_enc->num_phys_encs > ARRAY_SIZE(dpu_enc->hw_pp)) {
692                 DPU_ERROR("invalid num phys enc %d/%d\n",
693                                 dpu_enc->num_phys_encs,
694                                 (int) ARRAY_SIZE(dpu_enc->hw_pp));
695                 return;
696         }
697
698         drm_enc = &dpu_enc->base;
699         /* this pointers are checked in virt_enable_helper */
700         priv = drm_enc->dev->dev_private;
701
702         dpu_kms = to_dpu_kms(priv->kms);
703         if (!dpu_kms) {
704                 DPU_ERROR("invalid dpu_kms\n");
705                 return;
706         }
707
708         hw_mdptop = dpu_kms->hw_mdp;
709         if (!hw_mdptop) {
710                 DPU_ERROR("invalid mdptop\n");
711                 return;
712         }
713
714         if (hw_mdptop->ops.setup_vsync_source &&
715                         disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
716                 for (i = 0; i < dpu_enc->num_phys_encs; i++)
717                         vsync_cfg.ppnumber[i] = dpu_enc->hw_pp[i]->idx;
718
719                 vsync_cfg.pp_count = dpu_enc->num_phys_encs;
720                 if (disp_info->is_te_using_watchdog_timer)
721                         vsync_cfg.vsync_source = DPU_VSYNC_SOURCE_WD_TIMER_0;
722                 else
723                         vsync_cfg.vsync_source = DPU_VSYNC0_SOURCE_GPIO;
724
725                 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
726         }
727 }
728
729 static void _dpu_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
730 {
731         struct dpu_encoder_virt *dpu_enc;
732         int i;
733
734         if (!drm_enc) {
735                 DPU_ERROR("invalid encoder\n");
736                 return;
737         }
738
739         dpu_enc = to_dpu_encoder_virt(drm_enc);
740
741         DPU_DEBUG_ENC(dpu_enc, "enable:%d\n", enable);
742         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
743                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
744
745                 if (phys && phys->ops.irq_control)
746                         phys->ops.irq_control(phys, enable);
747         }
748
749 }
750
751 static void _dpu_encoder_resource_control_helper(struct drm_encoder *drm_enc,
752                 bool enable)
753 {
754         struct msm_drm_private *priv;
755         struct dpu_kms *dpu_kms;
756         struct dpu_encoder_virt *dpu_enc;
757
758         dpu_enc = to_dpu_encoder_virt(drm_enc);
759         priv = drm_enc->dev->dev_private;
760         dpu_kms = to_dpu_kms(priv->kms);
761
762         trace_dpu_enc_rc_helper(DRMID(drm_enc), enable);
763
764         if (!dpu_enc->cur_master) {
765                 DPU_ERROR("encoder master not set\n");
766                 return;
767         }
768
769         if (enable) {
770                 /* enable DPU core clks */
771                 pm_runtime_get_sync(&dpu_kms->pdev->dev);
772
773                 /* enable all the irq */
774                 _dpu_encoder_irq_control(drm_enc, true);
775
776         } else {
777                 /* disable all the irq */
778                 _dpu_encoder_irq_control(drm_enc, false);
779
780                 /* disable DPU core clks */
781                 pm_runtime_put_sync(&dpu_kms->pdev->dev);
782         }
783
784 }
785
786 static int dpu_encoder_resource_control(struct drm_encoder *drm_enc,
787                 u32 sw_event)
788 {
789         struct dpu_encoder_virt *dpu_enc;
790         struct msm_drm_private *priv;
791         struct msm_drm_thread *disp_thread;
792         bool is_vid_mode = false;
793
794         if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
795                         !drm_enc->crtc) {
796                 DPU_ERROR("invalid parameters\n");
797                 return -EINVAL;
798         }
799         dpu_enc = to_dpu_encoder_virt(drm_enc);
800         priv = drm_enc->dev->dev_private;
801         is_vid_mode = dpu_enc->disp_info.capabilities &
802                                                 MSM_DISPLAY_CAP_VID_MODE;
803
804         if (drm_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
805                 DPU_ERROR("invalid crtc index\n");
806                 return -EINVAL;
807         }
808         disp_thread = &priv->disp_thread[drm_enc->crtc->index];
809
810         /*
811          * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
812          * events and return early for other events (ie wb display).
813          */
814         if (!dpu_enc->idle_pc_supported &&
815                         (sw_event != DPU_ENC_RC_EVENT_KICKOFF &&
816                         sw_event != DPU_ENC_RC_EVENT_STOP &&
817                         sw_event != DPU_ENC_RC_EVENT_PRE_STOP))
818                 return 0;
819
820         trace_dpu_enc_rc(DRMID(drm_enc), sw_event, dpu_enc->idle_pc_supported,
821                          dpu_enc->rc_state, "begin");
822
823         switch (sw_event) {
824         case DPU_ENC_RC_EVENT_KICKOFF:
825                 /* cancel delayed off work, if any */
826                 if (kthread_cancel_delayed_work_sync(
827                                 &dpu_enc->delayed_off_work))
828                         DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
829                                         sw_event);
830
831                 mutex_lock(&dpu_enc->rc_lock);
832
833                 /* return if the resource control is already in ON state */
834                 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
835                         DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in ON state\n",
836                                       DRMID(drm_enc), sw_event);
837                         mutex_unlock(&dpu_enc->rc_lock);
838                         return 0;
839                 } else if (dpu_enc->rc_state != DPU_ENC_RC_STATE_OFF &&
840                                 dpu_enc->rc_state != DPU_ENC_RC_STATE_IDLE) {
841                         DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in state %d\n",
842                                       DRMID(drm_enc), sw_event,
843                                       dpu_enc->rc_state);
844                         mutex_unlock(&dpu_enc->rc_lock);
845                         return -EINVAL;
846                 }
847
848                 if (is_vid_mode && dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE)
849                         _dpu_encoder_irq_control(drm_enc, true);
850                 else
851                         _dpu_encoder_resource_control_helper(drm_enc, true);
852
853                 dpu_enc->rc_state = DPU_ENC_RC_STATE_ON;
854
855                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
856                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
857                                  "kickoff");
858
859                 mutex_unlock(&dpu_enc->rc_lock);
860                 break;
861
862         case DPU_ENC_RC_EVENT_FRAME_DONE:
863                 /*
864                  * mutex lock is not used as this event happens at interrupt
865                  * context. And locking is not required as, the other events
866                  * like KICKOFF and STOP does a wait-for-idle before executing
867                  * the resource_control
868                  */
869                 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
870                         DRM_DEBUG_KMS("id:%d, sw_event:%d,rc:%d-unexpected\n",
871                                       DRMID(drm_enc), sw_event,
872                                       dpu_enc->rc_state);
873                         return -EINVAL;
874                 }
875
876                 /*
877                  * schedule off work item only when there are no
878                  * frames pending
879                  */
880                 if (dpu_crtc_frame_pending(drm_enc->crtc) > 1) {
881                         DRM_DEBUG_KMS("id:%d skip schedule work\n",
882                                       DRMID(drm_enc));
883                         return 0;
884                 }
885
886                 kthread_queue_delayed_work(
887                         &disp_thread->worker,
888                         &dpu_enc->delayed_off_work,
889                         msecs_to_jiffies(dpu_enc->idle_timeout));
890
891                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
892                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
893                                  "frame done");
894                 break;
895
896         case DPU_ENC_RC_EVENT_PRE_STOP:
897                 /* cancel delayed off work, if any */
898                 if (kthread_cancel_delayed_work_sync(
899                                 &dpu_enc->delayed_off_work))
900                         DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
901                                         sw_event);
902
903                 mutex_lock(&dpu_enc->rc_lock);
904
905                 if (is_vid_mode &&
906                           dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
907                         _dpu_encoder_irq_control(drm_enc, true);
908                 }
909                 /* skip if is already OFF or IDLE, resources are off already */
910                 else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF ||
911                                 dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
912                         DRM_DEBUG_KMS("id:%u, sw_event:%d, rc in %d state\n",
913                                       DRMID(drm_enc), sw_event,
914                                       dpu_enc->rc_state);
915                         mutex_unlock(&dpu_enc->rc_lock);
916                         return 0;
917                 }
918
919                 dpu_enc->rc_state = DPU_ENC_RC_STATE_PRE_OFF;
920
921                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
922                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
923                                  "pre stop");
924
925                 mutex_unlock(&dpu_enc->rc_lock);
926                 break;
927
928         case DPU_ENC_RC_EVENT_STOP:
929                 mutex_lock(&dpu_enc->rc_lock);
930
931                 /* return if the resource control is already in OFF state */
932                 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF) {
933                         DRM_DEBUG_KMS("id: %u, sw_event:%d, rc in OFF state\n",
934                                       DRMID(drm_enc), sw_event);
935                         mutex_unlock(&dpu_enc->rc_lock);
936                         return 0;
937                 } else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
938                         DRM_ERROR("id: %u, sw_event:%d, rc in state %d\n",
939                                   DRMID(drm_enc), sw_event, dpu_enc->rc_state);
940                         mutex_unlock(&dpu_enc->rc_lock);
941                         return -EINVAL;
942                 }
943
944                 /**
945                  * expect to arrive here only if in either idle state or pre-off
946                  * and in IDLE state the resources are already disabled
947                  */
948                 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_PRE_OFF)
949                         _dpu_encoder_resource_control_helper(drm_enc, false);
950
951                 dpu_enc->rc_state = DPU_ENC_RC_STATE_OFF;
952
953                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
954                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
955                                  "stop");
956
957                 mutex_unlock(&dpu_enc->rc_lock);
958                 break;
959
960         case DPU_ENC_RC_EVENT_ENTER_IDLE:
961                 mutex_lock(&dpu_enc->rc_lock);
962
963                 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
964                         DRM_ERROR("id: %u, sw_event:%d, rc:%d !ON state\n",
965                                   DRMID(drm_enc), sw_event, dpu_enc->rc_state);
966                         mutex_unlock(&dpu_enc->rc_lock);
967                         return 0;
968                 }
969
970                 /*
971                  * if we are in ON but a frame was just kicked off,
972                  * ignore the IDLE event, it's probably a stale timer event
973                  */
974                 if (dpu_enc->frame_busy_mask[0]) {
975                         DRM_ERROR("id:%u, sw_event:%d, rc:%d frame pending\n",
976                                   DRMID(drm_enc), sw_event, dpu_enc->rc_state);
977                         mutex_unlock(&dpu_enc->rc_lock);
978                         return 0;
979                 }
980
981                 if (is_vid_mode)
982                         _dpu_encoder_irq_control(drm_enc, false);
983                 else
984                         _dpu_encoder_resource_control_helper(drm_enc, false);
985
986                 dpu_enc->rc_state = DPU_ENC_RC_STATE_IDLE;
987
988                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
989                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
990                                  "idle");
991
992                 mutex_unlock(&dpu_enc->rc_lock);
993                 break;
994
995         default:
996                 DRM_ERROR("id:%u, unexpected sw_event: %d\n", DRMID(drm_enc),
997                           sw_event);
998                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
999                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
1000                                  "error");
1001                 break;
1002         }
1003
1004         trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
1005                          dpu_enc->idle_pc_supported, dpu_enc->rc_state,
1006                          "end");
1007         return 0;
1008 }
1009
1010 static void dpu_encoder_virt_mode_set(struct drm_encoder *drm_enc,
1011                                       struct drm_display_mode *mode,
1012                                       struct drm_display_mode *adj_mode)
1013 {
1014         struct dpu_encoder_virt *dpu_enc;
1015         struct msm_drm_private *priv;
1016         struct dpu_kms *dpu_kms;
1017         struct list_head *connector_list;
1018         struct drm_connector *conn = NULL, *conn_iter;
1019         struct dpu_rm_hw_iter pp_iter;
1020         struct msm_display_topology topology;
1021         enum dpu_rm_topology_name topology_name;
1022         int i = 0, ret;
1023
1024         if (!drm_enc) {
1025                 DPU_ERROR("invalid encoder\n");
1026                 return;
1027         }
1028
1029         dpu_enc = to_dpu_encoder_virt(drm_enc);
1030         DPU_DEBUG_ENC(dpu_enc, "\n");
1031
1032         priv = drm_enc->dev->dev_private;
1033         dpu_kms = to_dpu_kms(priv->kms);
1034         connector_list = &dpu_kms->dev->mode_config.connector_list;
1035
1036         trace_dpu_enc_mode_set(DRMID(drm_enc));
1037
1038         list_for_each_entry(conn_iter, connector_list, head)
1039                 if (conn_iter->encoder == drm_enc)
1040                         conn = conn_iter;
1041
1042         if (!conn) {
1043                 DPU_ERROR_ENC(dpu_enc, "failed to find attached connector\n");
1044                 return;
1045         } else if (!conn->state) {
1046                 DPU_ERROR_ENC(dpu_enc, "invalid connector state\n");
1047                 return;
1048         }
1049
1050         topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
1051
1052         /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
1053         ret = dpu_rm_reserve(&dpu_kms->rm, drm_enc, drm_enc->crtc->state,
1054                         conn->state, topology, false);
1055         if (ret) {
1056                 DPU_ERROR_ENC(dpu_enc,
1057                                 "failed to reserve hw resources, %d\n", ret);
1058                 return;
1059         }
1060
1061         dpu_rm_init_hw_iter(&pp_iter, drm_enc->base.id, DPU_HW_BLK_PINGPONG);
1062         for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1063                 dpu_enc->hw_pp[i] = NULL;
1064                 if (!dpu_rm_get_hw(&dpu_kms->rm, &pp_iter))
1065                         break;
1066                 dpu_enc->hw_pp[i] = (struct dpu_hw_pingpong *) pp_iter.hw;
1067         }
1068
1069         topology_name = dpu_rm_get_topology_name(topology);
1070         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1071                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1072
1073                 if (phys) {
1074                         if (!dpu_enc->hw_pp[i]) {
1075                                 DPU_ERROR_ENC(dpu_enc,
1076                                     "invalid pingpong block for the encoder\n");
1077                                 return;
1078                         }
1079                         phys->hw_pp = dpu_enc->hw_pp[i];
1080                         phys->connector = conn->state->connector;
1081                         phys->topology_name = topology_name;
1082                         if (phys->ops.mode_set)
1083                                 phys->ops.mode_set(phys, mode, adj_mode);
1084                 }
1085         }
1086
1087         dpu_enc->mode_set_complete = true;
1088 }
1089
1090 static void _dpu_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
1091 {
1092         struct dpu_encoder_virt *dpu_enc = NULL;
1093         struct msm_drm_private *priv;
1094         struct dpu_kms *dpu_kms;
1095
1096         if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1097                 DPU_ERROR("invalid parameters\n");
1098                 return;
1099         }
1100
1101         priv = drm_enc->dev->dev_private;
1102         dpu_kms = to_dpu_kms(priv->kms);
1103         if (!dpu_kms) {
1104                 DPU_ERROR("invalid dpu_kms\n");
1105                 return;
1106         }
1107
1108         dpu_enc = to_dpu_encoder_virt(drm_enc);
1109         if (!dpu_enc || !dpu_enc->cur_master) {
1110                 DPU_ERROR("invalid dpu encoder/master\n");
1111                 return;
1112         }
1113
1114         if (dpu_enc->cur_master->hw_mdptop &&
1115                         dpu_enc->cur_master->hw_mdptop->ops.reset_ubwc)
1116                 dpu_enc->cur_master->hw_mdptop->ops.reset_ubwc(
1117                                 dpu_enc->cur_master->hw_mdptop,
1118                                 dpu_kms->catalog);
1119
1120         _dpu_encoder_update_vsync_source(dpu_enc, &dpu_enc->disp_info);
1121 }
1122
1123 void dpu_encoder_virt_restore(struct drm_encoder *drm_enc)
1124 {
1125         struct dpu_encoder_virt *dpu_enc = NULL;
1126         int i;
1127
1128         if (!drm_enc) {
1129                 DPU_ERROR("invalid encoder\n");
1130                 return;
1131         }
1132         dpu_enc = to_dpu_encoder_virt(drm_enc);
1133
1134         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1135                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1136
1137                 if (phys && (phys != dpu_enc->cur_master) && phys->ops.restore)
1138                         phys->ops.restore(phys);
1139         }
1140
1141         if (dpu_enc->cur_master && dpu_enc->cur_master->ops.restore)
1142                 dpu_enc->cur_master->ops.restore(dpu_enc->cur_master);
1143
1144         _dpu_encoder_virt_enable_helper(drm_enc);
1145 }
1146
1147 static void dpu_encoder_virt_enable(struct drm_encoder *drm_enc)
1148 {
1149         struct dpu_encoder_virt *dpu_enc = NULL;
1150         int i, ret = 0;
1151         struct drm_display_mode *cur_mode = NULL;
1152
1153         if (!drm_enc) {
1154                 DPU_ERROR("invalid encoder\n");
1155                 return;
1156         }
1157         dpu_enc = to_dpu_encoder_virt(drm_enc);
1158         cur_mode = &dpu_enc->base.crtc->state->adjusted_mode;
1159
1160         trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay,
1161                              cur_mode->vdisplay);
1162
1163         dpu_enc->cur_master = NULL;
1164         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1165                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1166
1167                 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
1168                         DPU_DEBUG_ENC(dpu_enc, "master is now idx %d\n", i);
1169                         dpu_enc->cur_master = phys;
1170                         break;
1171                 }
1172         }
1173
1174         if (!dpu_enc->cur_master) {
1175                 DPU_ERROR("virt encoder has no master! num_phys %d\n", i);
1176                 return;
1177         }
1178
1179         ret = dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1180         if (ret) {
1181                 DPU_ERROR_ENC(dpu_enc, "dpu resource control failed: %d\n",
1182                                 ret);
1183                 return;
1184         }
1185
1186         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1187                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1188
1189                 if (!phys)
1190                         continue;
1191
1192                 if (phys != dpu_enc->cur_master) {
1193                         if (phys->ops.enable)
1194                                 phys->ops.enable(phys);
1195                 }
1196
1197                 if (dpu_enc->misr_enable && (dpu_enc->disp_info.capabilities &
1198                      MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
1199                         phys->ops.setup_misr(phys, true,
1200                                                 dpu_enc->misr_frame_count);
1201         }
1202
1203         if (dpu_enc->cur_master->ops.enable)
1204                 dpu_enc->cur_master->ops.enable(dpu_enc->cur_master);
1205
1206         _dpu_encoder_virt_enable_helper(drm_enc);
1207 }
1208
1209 static void dpu_encoder_virt_disable(struct drm_encoder *drm_enc)
1210 {
1211         struct dpu_encoder_virt *dpu_enc = NULL;
1212         struct msm_drm_private *priv;
1213         struct dpu_kms *dpu_kms;
1214         struct drm_display_mode *mode;
1215         int i = 0;
1216
1217         if (!drm_enc) {
1218                 DPU_ERROR("invalid encoder\n");
1219                 return;
1220         } else if (!drm_enc->dev) {
1221                 DPU_ERROR("invalid dev\n");
1222                 return;
1223         } else if (!drm_enc->dev->dev_private) {
1224                 DPU_ERROR("invalid dev_private\n");
1225                 return;
1226         }
1227
1228         mode = &drm_enc->crtc->state->adjusted_mode;
1229
1230         dpu_enc = to_dpu_encoder_virt(drm_enc);
1231         DPU_DEBUG_ENC(dpu_enc, "\n");
1232
1233         priv = drm_enc->dev->dev_private;
1234         dpu_kms = to_dpu_kms(priv->kms);
1235
1236         trace_dpu_enc_disable(DRMID(drm_enc));
1237
1238         /* wait for idle */
1239         dpu_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1240
1241         dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_PRE_STOP);
1242
1243         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1244                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1245
1246                 if (phys && phys->ops.disable)
1247                         phys->ops.disable(phys);
1248         }
1249
1250         /* after phys waits for frame-done, should be no more frames pending */
1251         if (atomic_xchg(&dpu_enc->frame_done_timeout, 0)) {
1252                 DPU_ERROR("enc%d timeout pending\n", drm_enc->base.id);
1253                 del_timer_sync(&dpu_enc->frame_done_timer);
1254         }
1255
1256         dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_STOP);
1257
1258         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1259                 if (dpu_enc->phys_encs[i])
1260                         dpu_enc->phys_encs[i]->connector = NULL;
1261         }
1262
1263         dpu_enc->cur_master = NULL;
1264
1265         DPU_DEBUG_ENC(dpu_enc, "encoder disabled\n");
1266
1267         dpu_rm_release(&dpu_kms->rm, drm_enc);
1268 }
1269
1270 static enum dpu_intf dpu_encoder_get_intf(struct dpu_mdss_cfg *catalog,
1271                 enum dpu_intf_type type, u32 controller_id)
1272 {
1273         int i = 0;
1274
1275         for (i = 0; i < catalog->intf_count; i++) {
1276                 if (catalog->intf[i].type == type
1277                     && catalog->intf[i].controller_id == controller_id) {
1278                         return catalog->intf[i].id;
1279                 }
1280         }
1281
1282         return INTF_MAX;
1283 }
1284
1285 static void dpu_encoder_vblank_callback(struct drm_encoder *drm_enc,
1286                 struct dpu_encoder_phys *phy_enc)
1287 {
1288         struct dpu_encoder_virt *dpu_enc = NULL;
1289         unsigned long lock_flags;
1290
1291         if (!drm_enc || !phy_enc)
1292                 return;
1293
1294         DPU_ATRACE_BEGIN("encoder_vblank_callback");
1295         dpu_enc = to_dpu_encoder_virt(drm_enc);
1296
1297         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1298         if (dpu_enc->crtc_vblank_cb)
1299                 dpu_enc->crtc_vblank_cb(dpu_enc->crtc_vblank_cb_data);
1300         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1301
1302         atomic_inc(&phy_enc->vsync_cnt);
1303         DPU_ATRACE_END("encoder_vblank_callback");
1304 }
1305
1306 static void dpu_encoder_underrun_callback(struct drm_encoder *drm_enc,
1307                 struct dpu_encoder_phys *phy_enc)
1308 {
1309         if (!phy_enc)
1310                 return;
1311
1312         DPU_ATRACE_BEGIN("encoder_underrun_callback");
1313         atomic_inc(&phy_enc->underrun_cnt);
1314         trace_dpu_enc_underrun_cb(DRMID(drm_enc),
1315                                   atomic_read(&phy_enc->underrun_cnt));
1316         DPU_ATRACE_END("encoder_underrun_callback");
1317 }
1318
1319 void dpu_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
1320                 void (*vbl_cb)(void *), void *vbl_data)
1321 {
1322         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1323         unsigned long lock_flags;
1324         bool enable;
1325         int i;
1326
1327         enable = vbl_cb ? true : false;
1328
1329         if (!drm_enc) {
1330                 DPU_ERROR("invalid encoder\n");
1331                 return;
1332         }
1333         trace_dpu_enc_vblank_cb(DRMID(drm_enc), enable);
1334
1335         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1336         dpu_enc->crtc_vblank_cb = vbl_cb;
1337         dpu_enc->crtc_vblank_cb_data = vbl_data;
1338         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1339
1340         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1341                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1342
1343                 if (phys && phys->ops.control_vblank_irq)
1344                         phys->ops.control_vblank_irq(phys, enable);
1345         }
1346 }
1347
1348 void dpu_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
1349                 void (*frame_event_cb)(void *, u32 event),
1350                 void *frame_event_cb_data)
1351 {
1352         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1353         unsigned long lock_flags;
1354         bool enable;
1355
1356         enable = frame_event_cb ? true : false;
1357
1358         if (!drm_enc) {
1359                 DPU_ERROR("invalid encoder\n");
1360                 return;
1361         }
1362         trace_dpu_enc_frame_event_cb(DRMID(drm_enc), enable);
1363
1364         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1365         dpu_enc->crtc_frame_event_cb = frame_event_cb;
1366         dpu_enc->crtc_frame_event_cb_data = frame_event_cb_data;
1367         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1368 }
1369
1370 static void dpu_encoder_frame_done_callback(
1371                 struct drm_encoder *drm_enc,
1372                 struct dpu_encoder_phys *ready_phys, u32 event)
1373 {
1374         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1375         unsigned int i;
1376
1377         if (event & (DPU_ENCODER_FRAME_EVENT_DONE
1378                         | DPU_ENCODER_FRAME_EVENT_ERROR
1379                         | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
1380
1381                 if (!dpu_enc->frame_busy_mask[0]) {
1382                         /**
1383                          * suppress frame_done without waiter,
1384                          * likely autorefresh
1385                          */
1386                         trace_dpu_enc_frame_done_cb_not_busy(DRMID(drm_enc),
1387                                         event, ready_phys->intf_idx);
1388                         return;
1389                 }
1390
1391                 /* One of the physical encoders has become idle */
1392                 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1393                         if (dpu_enc->phys_encs[i] == ready_phys) {
1394                                 clear_bit(i, dpu_enc->frame_busy_mask);
1395                                 trace_dpu_enc_frame_done_cb(DRMID(drm_enc), i,
1396                                                 dpu_enc->frame_busy_mask[0]);
1397                         }
1398                 }
1399
1400                 if (!dpu_enc->frame_busy_mask[0]) {
1401                         atomic_set(&dpu_enc->frame_done_timeout, 0);
1402                         del_timer(&dpu_enc->frame_done_timer);
1403
1404                         dpu_encoder_resource_control(drm_enc,
1405                                         DPU_ENC_RC_EVENT_FRAME_DONE);
1406
1407                         if (dpu_enc->crtc_frame_event_cb)
1408                                 dpu_enc->crtc_frame_event_cb(
1409                                         dpu_enc->crtc_frame_event_cb_data,
1410                                         event);
1411                 }
1412         } else {
1413                 if (dpu_enc->crtc_frame_event_cb)
1414                         dpu_enc->crtc_frame_event_cb(
1415                                 dpu_enc->crtc_frame_event_cb_data, event);
1416         }
1417 }
1418
1419 static void dpu_encoder_off_work(struct kthread_work *work)
1420 {
1421         struct dpu_encoder_virt *dpu_enc = container_of(work,
1422                         struct dpu_encoder_virt, delayed_off_work.work);
1423
1424         if (!dpu_enc) {
1425                 DPU_ERROR("invalid dpu encoder\n");
1426                 return;
1427         }
1428
1429         dpu_encoder_resource_control(&dpu_enc->base,
1430                                                 DPU_ENC_RC_EVENT_ENTER_IDLE);
1431
1432         dpu_encoder_frame_done_callback(&dpu_enc->base, NULL,
1433                                 DPU_ENCODER_FRAME_EVENT_IDLE);
1434 }
1435
1436 /**
1437  * _dpu_encoder_trigger_flush - trigger flush for a physical encoder
1438  * drm_enc: Pointer to drm encoder structure
1439  * phys: Pointer to physical encoder structure
1440  * extra_flush_bits: Additional bit mask to include in flush trigger
1441  */
1442 static inline void _dpu_encoder_trigger_flush(struct drm_encoder *drm_enc,
1443                 struct dpu_encoder_phys *phys, uint32_t extra_flush_bits)
1444 {
1445         struct dpu_hw_ctl *ctl;
1446         int pending_kickoff_cnt;
1447         u32 ret = UINT_MAX;
1448
1449         if (!drm_enc || !phys) {
1450                 DPU_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
1451                                 drm_enc != 0, phys != 0);
1452                 return;
1453         }
1454
1455         if (!phys->hw_pp) {
1456                 DPU_ERROR("invalid pingpong hw\n");
1457                 return;
1458         }
1459
1460         ctl = phys->hw_ctl;
1461         if (!ctl || !ctl->ops.trigger_flush) {
1462                 DPU_ERROR("missing trigger cb\n");
1463                 return;
1464         }
1465
1466         pending_kickoff_cnt = dpu_encoder_phys_inc_pending(phys);
1467
1468         if (extra_flush_bits && ctl->ops.update_pending_flush)
1469                 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
1470
1471         ctl->ops.trigger_flush(ctl);
1472
1473         if (ctl->ops.get_pending_flush)
1474                 ret = ctl->ops.get_pending_flush(ctl);
1475
1476         trace_dpu_enc_trigger_flush(DRMID(drm_enc), phys->intf_idx,
1477                                     pending_kickoff_cnt, ctl->idx, ret);
1478 }
1479
1480 /**
1481  * _dpu_encoder_trigger_start - trigger start for a physical encoder
1482  * phys: Pointer to physical encoder structure
1483  */
1484 static inline void _dpu_encoder_trigger_start(struct dpu_encoder_phys *phys)
1485 {
1486         if (!phys) {
1487                 DPU_ERROR("invalid argument(s)\n");
1488                 return;
1489         }
1490
1491         if (!phys->hw_pp) {
1492                 DPU_ERROR("invalid pingpong hw\n");
1493                 return;
1494         }
1495
1496         if (phys->ops.trigger_start && phys->enable_state != DPU_ENC_DISABLED)
1497                 phys->ops.trigger_start(phys);
1498 }
1499
1500 void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys *phys_enc)
1501 {
1502         struct dpu_hw_ctl *ctl;
1503
1504         if (!phys_enc) {
1505                 DPU_ERROR("invalid encoder\n");
1506                 return;
1507         }
1508
1509         ctl = phys_enc->hw_ctl;
1510         if (ctl && ctl->ops.trigger_start) {
1511                 ctl->ops.trigger_start(ctl);
1512                 trace_dpu_enc_trigger_start(DRMID(phys_enc->parent), ctl->idx);
1513         }
1514 }
1515
1516 static int dpu_encoder_helper_wait_event_timeout(
1517                 int32_t drm_id,
1518                 int32_t hw_id,
1519                 struct dpu_encoder_wait_info *info)
1520 {
1521         int rc = 0;
1522         s64 expected_time = ktime_to_ms(ktime_get()) + info->timeout_ms;
1523         s64 jiffies = msecs_to_jiffies(info->timeout_ms);
1524         s64 time;
1525
1526         do {
1527                 rc = wait_event_timeout(*(info->wq),
1528                                 atomic_read(info->atomic_cnt) == 0, jiffies);
1529                 time = ktime_to_ms(ktime_get());
1530
1531                 trace_dpu_enc_wait_event_timeout(drm_id, hw_id, rc, time,
1532                                                  expected_time,
1533                                                  atomic_read(info->atomic_cnt));
1534         /* If we timed out, counter is valid and time is less, wait again */
1535         } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
1536                         (time < expected_time));
1537
1538         return rc;
1539 }
1540
1541 void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys *phys_enc)
1542 {
1543         struct dpu_encoder_virt *dpu_enc;
1544         struct dpu_hw_ctl *ctl;
1545         int rc;
1546
1547         if (!phys_enc) {
1548                 DPU_ERROR("invalid encoder\n");
1549                 return;
1550         }
1551         dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
1552         ctl = phys_enc->hw_ctl;
1553
1554         if (!ctl || !ctl->ops.reset)
1555                 return;
1556
1557         DRM_DEBUG_KMS("id:%u ctl %d reset\n", DRMID(phys_enc->parent),
1558                       ctl->idx);
1559
1560         rc = ctl->ops.reset(ctl);
1561         if (rc) {
1562                 DPU_ERROR_ENC(dpu_enc, "ctl %d reset failure\n",  ctl->idx);
1563                 dpu_dbg_dump(false, __func__, true, true);
1564         }
1565
1566         phys_enc->enable_state = DPU_ENC_ENABLED;
1567 }
1568
1569 /**
1570  * _dpu_encoder_kickoff_phys - handle physical encoder kickoff
1571  *      Iterate through the physical encoders and perform consolidated flush
1572  *      and/or control start triggering as needed. This is done in the virtual
1573  *      encoder rather than the individual physical ones in order to handle
1574  *      use cases that require visibility into multiple physical encoders at
1575  *      a time.
1576  * dpu_enc: Pointer to virtual encoder structure
1577  */
1578 static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt *dpu_enc)
1579 {
1580         struct dpu_hw_ctl *ctl;
1581         uint32_t i, pending_flush;
1582         unsigned long lock_flags;
1583
1584         if (!dpu_enc) {
1585                 DPU_ERROR("invalid encoder\n");
1586                 return;
1587         }
1588
1589         pending_flush = 0x0;
1590
1591         /* update pending counts and trigger kickoff ctl flush atomically */
1592         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1593
1594         /* don't perform flush/start operations for slave encoders */
1595         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1596                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1597
1598                 if (!phys || phys->enable_state == DPU_ENC_DISABLED)
1599                         continue;
1600
1601                 ctl = phys->hw_ctl;
1602                 if (!ctl)
1603                         continue;
1604
1605                 if (phys->split_role != ENC_ROLE_SLAVE)
1606                         set_bit(i, dpu_enc->frame_busy_mask);
1607                 if (!phys->ops.needs_single_flush ||
1608                                 !phys->ops.needs_single_flush(phys))
1609                         _dpu_encoder_trigger_flush(&dpu_enc->base, phys, 0x0);
1610                 else if (ctl->ops.get_pending_flush)
1611                         pending_flush |= ctl->ops.get_pending_flush(ctl);
1612         }
1613
1614         /* for split flush, combine pending flush masks and send to master */
1615         if (pending_flush && dpu_enc->cur_master) {
1616                 _dpu_encoder_trigger_flush(
1617                                 &dpu_enc->base,
1618                                 dpu_enc->cur_master,
1619                                 pending_flush);
1620         }
1621
1622         _dpu_encoder_trigger_start(dpu_enc->cur_master);
1623
1624         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1625 }
1626
1627 void dpu_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
1628 {
1629         struct dpu_encoder_virt *dpu_enc;
1630         struct dpu_encoder_phys *phys;
1631         unsigned int i;
1632         struct dpu_hw_ctl *ctl;
1633         struct msm_display_info *disp_info;
1634
1635         if (!drm_enc) {
1636                 DPU_ERROR("invalid encoder\n");
1637                 return;
1638         }
1639         dpu_enc = to_dpu_encoder_virt(drm_enc);
1640         disp_info = &dpu_enc->disp_info;
1641
1642         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1643                 phys = dpu_enc->phys_encs[i];
1644
1645                 if (phys && phys->hw_ctl) {
1646                         ctl = phys->hw_ctl;
1647                         if (ctl->ops.clear_pending_flush)
1648                                 ctl->ops.clear_pending_flush(ctl);
1649
1650                         /* update only for command mode primary ctl */
1651                         if ((phys == dpu_enc->cur_master) &&
1652                            (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
1653                             && ctl->ops.trigger_pending)
1654                                 ctl->ops.trigger_pending(ctl);
1655                 }
1656         }
1657 }
1658
1659 static u32 _dpu_encoder_calculate_linetime(struct dpu_encoder_virt *dpu_enc,
1660                 struct drm_display_mode *mode)
1661 {
1662         u64 pclk_rate;
1663         u32 pclk_period;
1664         u32 line_time;
1665
1666         /*
1667          * For linetime calculation, only operate on master encoder.
1668          */
1669         if (!dpu_enc->cur_master)
1670                 return 0;
1671
1672         if (!dpu_enc->cur_master->ops.get_line_count) {
1673                 DPU_ERROR("get_line_count function not defined\n");
1674                 return 0;
1675         }
1676
1677         pclk_rate = mode->clock; /* pixel clock in kHz */
1678         if (pclk_rate == 0) {
1679                 DPU_ERROR("pclk is 0, cannot calculate line time\n");
1680                 return 0;
1681         }
1682
1683         pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
1684         if (pclk_period == 0) {
1685                 DPU_ERROR("pclk period is 0\n");
1686                 return 0;
1687         }
1688
1689         /*
1690          * Line time calculation based on Pixel clock and HTOTAL.
1691          * Final unit is in ns.
1692          */
1693         line_time = (pclk_period * mode->htotal) / 1000;
1694         if (line_time == 0) {
1695                 DPU_ERROR("line time calculation is 0\n");
1696                 return 0;
1697         }
1698
1699         DPU_DEBUG_ENC(dpu_enc,
1700                         "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
1701                         pclk_rate, pclk_period, line_time);
1702
1703         return line_time;
1704 }
1705
1706 static int _dpu_encoder_wakeup_time(struct drm_encoder *drm_enc,
1707                 ktime_t *wakeup_time)
1708 {
1709         struct drm_display_mode *mode;
1710         struct dpu_encoder_virt *dpu_enc;
1711         u32 cur_line;
1712         u32 line_time;
1713         u32 vtotal, time_to_vsync;
1714         ktime_t cur_time;
1715
1716         dpu_enc = to_dpu_encoder_virt(drm_enc);
1717
1718         if (!drm_enc->crtc || !drm_enc->crtc->state) {
1719                 DPU_ERROR("crtc/crtc state object is NULL\n");
1720                 return -EINVAL;
1721         }
1722         mode = &drm_enc->crtc->state->adjusted_mode;
1723
1724         line_time = _dpu_encoder_calculate_linetime(dpu_enc, mode);
1725         if (!line_time)
1726                 return -EINVAL;
1727
1728         cur_line = dpu_enc->cur_master->ops.get_line_count(dpu_enc->cur_master);
1729
1730         vtotal = mode->vtotal;
1731         if (cur_line >= vtotal)
1732                 time_to_vsync = line_time * vtotal;
1733         else
1734                 time_to_vsync = line_time * (vtotal - cur_line);
1735
1736         if (time_to_vsync == 0) {
1737                 DPU_ERROR("time to vsync should not be zero, vtotal=%d\n",
1738                                 vtotal);
1739                 return -EINVAL;
1740         }
1741
1742         cur_time = ktime_get();
1743         *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
1744
1745         DPU_DEBUG_ENC(dpu_enc,
1746                         "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
1747                         cur_line, vtotal, time_to_vsync,
1748                         ktime_to_ms(cur_time),
1749                         ktime_to_ms(*wakeup_time));
1750         return 0;
1751 }
1752
1753 static void dpu_encoder_vsync_event_handler(struct timer_list *t)
1754 {
1755         struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
1756                         vsync_event_timer);
1757         struct drm_encoder *drm_enc = &dpu_enc->base;
1758         struct msm_drm_private *priv;
1759         struct msm_drm_thread *event_thread;
1760
1761         if (!drm_enc->dev || !drm_enc->dev->dev_private ||
1762                         !drm_enc->crtc) {
1763                 DPU_ERROR("invalid parameters\n");
1764                 return;
1765         }
1766
1767         priv = drm_enc->dev->dev_private;
1768
1769         if (drm_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
1770                 DPU_ERROR("invalid crtc index\n");
1771                 return;
1772         }
1773         event_thread = &priv->event_thread[drm_enc->crtc->index];
1774         if (!event_thread) {
1775                 DPU_ERROR("event_thread not found for crtc:%d\n",
1776                                 drm_enc->crtc->index);
1777                 return;
1778         }
1779
1780         del_timer(&dpu_enc->vsync_event_timer);
1781 }
1782
1783 static void dpu_encoder_vsync_event_work_handler(struct kthread_work *work)
1784 {
1785         struct dpu_encoder_virt *dpu_enc = container_of(work,
1786                         struct dpu_encoder_virt, vsync_event_work);
1787         ktime_t wakeup_time;
1788
1789         if (!dpu_enc) {
1790                 DPU_ERROR("invalid dpu encoder\n");
1791                 return;
1792         }
1793
1794         if (_dpu_encoder_wakeup_time(&dpu_enc->base, &wakeup_time))
1795                 return;
1796
1797         trace_dpu_enc_vsync_event_work(DRMID(&dpu_enc->base), wakeup_time);
1798         mod_timer(&dpu_enc->vsync_event_timer,
1799                         nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1800 }
1801
1802 void dpu_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
1803                 struct dpu_encoder_kickoff_params *params)
1804 {
1805         struct dpu_encoder_virt *dpu_enc;
1806         struct dpu_encoder_phys *phys;
1807         bool needs_hw_reset = false;
1808         unsigned int i;
1809
1810         if (!drm_enc || !params) {
1811                 DPU_ERROR("invalid args\n");
1812                 return;
1813         }
1814         dpu_enc = to_dpu_encoder_virt(drm_enc);
1815
1816         trace_dpu_enc_prepare_kickoff(DRMID(drm_enc));
1817
1818         /* prepare for next kickoff, may include waiting on previous kickoff */
1819         DPU_ATRACE_BEGIN("enc_prepare_for_kickoff");
1820         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1821                 phys = dpu_enc->phys_encs[i];
1822                 if (phys) {
1823                         if (phys->ops.prepare_for_kickoff)
1824                                 phys->ops.prepare_for_kickoff(phys, params);
1825                         if (phys->enable_state == DPU_ENC_ERR_NEEDS_HW_RESET)
1826                                 needs_hw_reset = true;
1827                 }
1828         }
1829         DPU_ATRACE_END("enc_prepare_for_kickoff");
1830
1831         dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1832
1833         /* if any phys needs reset, reset all phys, in-order */
1834         if (needs_hw_reset) {
1835                 trace_dpu_enc_prepare_kickoff_reset(DRMID(drm_enc));
1836                 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1837                         phys = dpu_enc->phys_encs[i];
1838                         if (phys && phys->ops.hw_reset)
1839                                 phys->ops.hw_reset(phys);
1840                 }
1841         }
1842 }
1843
1844 void dpu_encoder_kickoff(struct drm_encoder *drm_enc)
1845 {
1846         struct dpu_encoder_virt *dpu_enc;
1847         struct dpu_encoder_phys *phys;
1848         ktime_t wakeup_time;
1849         unsigned int i;
1850
1851         if (!drm_enc) {
1852                 DPU_ERROR("invalid encoder\n");
1853                 return;
1854         }
1855         DPU_ATRACE_BEGIN("encoder_kickoff");
1856         dpu_enc = to_dpu_encoder_virt(drm_enc);
1857
1858         trace_dpu_enc_kickoff(DRMID(drm_enc));
1859
1860         atomic_set(&dpu_enc->frame_done_timeout,
1861                         DPU_FRAME_DONE_TIMEOUT * 1000 /
1862                         drm_enc->crtc->state->adjusted_mode.vrefresh);
1863         mod_timer(&dpu_enc->frame_done_timer, jiffies +
1864                 ((atomic_read(&dpu_enc->frame_done_timeout) * HZ) / 1000));
1865
1866         /* All phys encs are ready to go, trigger the kickoff */
1867         _dpu_encoder_kickoff_phys(dpu_enc);
1868
1869         /* allow phys encs to handle any post-kickoff business */
1870         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1871                 phys = dpu_enc->phys_encs[i];
1872                 if (phys && phys->ops.handle_post_kickoff)
1873                         phys->ops.handle_post_kickoff(phys);
1874         }
1875
1876         if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI &&
1877                         !_dpu_encoder_wakeup_time(drm_enc, &wakeup_time)) {
1878                 trace_dpu_enc_early_kickoff(DRMID(drm_enc),
1879                                             ktime_to_ms(wakeup_time));
1880                 mod_timer(&dpu_enc->vsync_event_timer,
1881                                 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1882         }
1883
1884         DPU_ATRACE_END("encoder_kickoff");
1885 }
1886
1887 void dpu_encoder_prepare_commit(struct drm_encoder *drm_enc)
1888 {
1889         struct dpu_encoder_virt *dpu_enc;
1890         struct dpu_encoder_phys *phys;
1891         int i;
1892
1893         if (!drm_enc) {
1894                 DPU_ERROR("invalid encoder\n");
1895                 return;
1896         }
1897         dpu_enc = to_dpu_encoder_virt(drm_enc);
1898
1899         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1900                 phys = dpu_enc->phys_encs[i];
1901                 if (phys && phys->ops.prepare_commit)
1902                         phys->ops.prepare_commit(phys);
1903         }
1904 }
1905
1906 #ifdef CONFIG_DEBUG_FS
1907 static int _dpu_encoder_status_show(struct seq_file *s, void *data)
1908 {
1909         struct dpu_encoder_virt *dpu_enc;
1910         int i;
1911
1912         if (!s || !s->private)
1913                 return -EINVAL;
1914
1915         dpu_enc = s->private;
1916
1917         mutex_lock(&dpu_enc->enc_lock);
1918         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1919                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1920
1921                 if (!phys)
1922                         continue;
1923
1924                 seq_printf(s, "intf:%d    vsync:%8d     underrun:%8d    ",
1925                                 phys->intf_idx - INTF_0,
1926                                 atomic_read(&phys->vsync_cnt),
1927                                 atomic_read(&phys->underrun_cnt));
1928
1929                 switch (phys->intf_mode) {
1930                 case INTF_MODE_VIDEO:
1931                         seq_puts(s, "mode: video\n");
1932                         break;
1933                 case INTF_MODE_CMD:
1934                         seq_puts(s, "mode: command\n");
1935                         break;
1936                 default:
1937                         seq_puts(s, "mode: ???\n");
1938                         break;
1939                 }
1940         }
1941         mutex_unlock(&dpu_enc->enc_lock);
1942
1943         return 0;
1944 }
1945
1946 static int _dpu_encoder_debugfs_status_open(struct inode *inode,
1947                 struct file *file)
1948 {
1949         return single_open(file, _dpu_encoder_status_show, inode->i_private);
1950 }
1951
1952 static ssize_t _dpu_encoder_misr_setup(struct file *file,
1953                 const char __user *user_buf, size_t count, loff_t *ppos)
1954 {
1955         struct dpu_encoder_virt *dpu_enc;
1956         int i = 0, rc;
1957         char buf[MISR_BUFF_SIZE + 1];
1958         size_t buff_copy;
1959         u32 frame_count, enable;
1960
1961         if (!file || !file->private_data)
1962                 return -EINVAL;
1963
1964         dpu_enc = file->private_data;
1965
1966         buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
1967         if (copy_from_user(buf, user_buf, buff_copy))
1968                 return -EINVAL;
1969
1970         buf[buff_copy] = 0; /* end of string */
1971
1972         if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
1973                 return -EINVAL;
1974
1975         rc = _dpu_encoder_power_enable(dpu_enc, true);
1976         if (rc)
1977                 return rc;
1978
1979         mutex_lock(&dpu_enc->enc_lock);
1980         dpu_enc->misr_enable = enable;
1981         dpu_enc->misr_frame_count = frame_count;
1982         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1983                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1984
1985                 if (!phys || !phys->ops.setup_misr)
1986                         continue;
1987
1988                 phys->ops.setup_misr(phys, enable, frame_count);
1989         }
1990         mutex_unlock(&dpu_enc->enc_lock);
1991         _dpu_encoder_power_enable(dpu_enc, false);
1992
1993         return count;
1994 }
1995
1996 static ssize_t _dpu_encoder_misr_read(struct file *file,
1997                 char __user *user_buff, size_t count, loff_t *ppos)
1998 {
1999         struct dpu_encoder_virt *dpu_enc;
2000         int i = 0, len = 0;
2001         char buf[MISR_BUFF_SIZE + 1] = {'\0'};
2002         int rc;
2003
2004         if (*ppos)
2005                 return 0;
2006
2007         if (!file || !file->private_data)
2008                 return -EINVAL;
2009
2010         dpu_enc = file->private_data;
2011
2012         rc = _dpu_encoder_power_enable(dpu_enc, true);
2013         if (rc)
2014                 return rc;
2015
2016         mutex_lock(&dpu_enc->enc_lock);
2017         if (!dpu_enc->misr_enable) {
2018                 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2019                         "disabled\n");
2020                 goto buff_check;
2021         } else if (dpu_enc->disp_info.capabilities &
2022                                                 ~MSM_DISPLAY_CAP_VID_MODE) {
2023                 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2024                         "unsupported\n");
2025                 goto buff_check;
2026         }
2027
2028         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2029                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2030
2031                 if (!phys || !phys->ops.collect_misr)
2032                         continue;
2033
2034                 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2035                         "Intf idx:%d\n", phys->intf_idx - INTF_0);
2036                 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
2037                                         phys->ops.collect_misr(phys));
2038         }
2039
2040 buff_check:
2041         if (count <= len) {
2042                 len = 0;
2043                 goto end;
2044         }
2045
2046         if (copy_to_user(user_buff, buf, len)) {
2047                 len = -EFAULT;
2048                 goto end;
2049         }
2050
2051         *ppos += len;   /* increase offset */
2052
2053 end:
2054         mutex_unlock(&dpu_enc->enc_lock);
2055         _dpu_encoder_power_enable(dpu_enc, false);
2056         return len;
2057 }
2058
2059 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
2060 {
2061         struct dpu_encoder_virt *dpu_enc;
2062         struct msm_drm_private *priv;
2063         struct dpu_kms *dpu_kms;
2064         int i;
2065
2066         static const struct file_operations debugfs_status_fops = {
2067                 .open =         _dpu_encoder_debugfs_status_open,
2068                 .read =         seq_read,
2069                 .llseek =       seq_lseek,
2070                 .release =      single_release,
2071         };
2072
2073         static const struct file_operations debugfs_misr_fops = {
2074                 .open = simple_open,
2075                 .read = _dpu_encoder_misr_read,
2076                 .write = _dpu_encoder_misr_setup,
2077         };
2078
2079         char name[DPU_NAME_SIZE];
2080
2081         if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2082                 DPU_ERROR("invalid encoder or kms\n");
2083                 return -EINVAL;
2084         }
2085
2086         dpu_enc = to_dpu_encoder_virt(drm_enc);
2087         priv = drm_enc->dev->dev_private;
2088         dpu_kms = to_dpu_kms(priv->kms);
2089
2090         snprintf(name, DPU_NAME_SIZE, "encoder%u", drm_enc->base.id);
2091
2092         /* create overall sub-directory for the encoder */
2093         dpu_enc->debugfs_root = debugfs_create_dir(name,
2094                         drm_enc->dev->primary->debugfs_root);
2095         if (!dpu_enc->debugfs_root)
2096                 return -ENOMEM;
2097
2098         /* don't error check these */
2099         debugfs_create_file("status", 0600,
2100                 dpu_enc->debugfs_root, dpu_enc, &debugfs_status_fops);
2101
2102         debugfs_create_file("misr_data", 0600,
2103                 dpu_enc->debugfs_root, dpu_enc, &debugfs_misr_fops);
2104
2105         for (i = 0; i < dpu_enc->num_phys_encs; i++)
2106                 if (dpu_enc->phys_encs[i] &&
2107                                 dpu_enc->phys_encs[i]->ops.late_register)
2108                         dpu_enc->phys_encs[i]->ops.late_register(
2109                                         dpu_enc->phys_encs[i],
2110                                         dpu_enc->debugfs_root);
2111
2112         return 0;
2113 }
2114
2115 static void _dpu_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
2116 {
2117         struct dpu_encoder_virt *dpu_enc;
2118
2119         if (!drm_enc)
2120                 return;
2121
2122         dpu_enc = to_dpu_encoder_virt(drm_enc);
2123         debugfs_remove_recursive(dpu_enc->debugfs_root);
2124 }
2125 #else
2126 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
2127 {
2128         return 0;
2129 }
2130
2131 static void _dpu_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
2132 {
2133 }
2134 #endif
2135
2136 static int dpu_encoder_late_register(struct drm_encoder *encoder)
2137 {
2138         return _dpu_encoder_init_debugfs(encoder);
2139 }
2140
2141 static void dpu_encoder_early_unregister(struct drm_encoder *encoder)
2142 {
2143         _dpu_encoder_destroy_debugfs(encoder);
2144 }
2145
2146 static int dpu_encoder_virt_add_phys_encs(
2147                 u32 display_caps,
2148                 struct dpu_encoder_virt *dpu_enc,
2149                 struct dpu_enc_phys_init_params *params)
2150 {
2151         struct dpu_encoder_phys *enc = NULL;
2152
2153         DPU_DEBUG_ENC(dpu_enc, "\n");
2154
2155         /*
2156          * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
2157          * in this function, check up-front.
2158          */
2159         if (dpu_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
2160                         ARRAY_SIZE(dpu_enc->phys_encs)) {
2161                 DPU_ERROR_ENC(dpu_enc, "too many physical encoders %d\n",
2162                           dpu_enc->num_phys_encs);
2163                 return -EINVAL;
2164         }
2165
2166         if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
2167                 enc = dpu_encoder_phys_vid_init(params);
2168
2169                 if (IS_ERR_OR_NULL(enc)) {
2170                         DPU_ERROR_ENC(dpu_enc, "failed to init vid enc: %ld\n",
2171                                 PTR_ERR(enc));
2172                         return enc == 0 ? -EINVAL : PTR_ERR(enc);
2173                 }
2174
2175                 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2176                 ++dpu_enc->num_phys_encs;
2177         }
2178
2179         if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
2180                 enc = dpu_encoder_phys_cmd_init(params);
2181
2182                 if (IS_ERR_OR_NULL(enc)) {
2183                         DPU_ERROR_ENC(dpu_enc, "failed to init cmd enc: %ld\n",
2184                                 PTR_ERR(enc));
2185                         return enc == 0 ? -EINVAL : PTR_ERR(enc);
2186                 }
2187
2188                 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2189                 ++dpu_enc->num_phys_encs;
2190         }
2191
2192         return 0;
2193 }
2194
2195 static const struct dpu_encoder_virt_ops dpu_encoder_parent_ops = {
2196         .handle_vblank_virt = dpu_encoder_vblank_callback,
2197         .handle_underrun_virt = dpu_encoder_underrun_callback,
2198         .handle_frame_done = dpu_encoder_frame_done_callback,
2199 };
2200
2201 static int dpu_encoder_setup_display(struct dpu_encoder_virt *dpu_enc,
2202                                  struct dpu_kms *dpu_kms,
2203                                  struct msm_display_info *disp_info)
2204 {
2205         int ret = 0;
2206         int i = 0;
2207         enum dpu_intf_type intf_type;
2208         struct dpu_enc_phys_init_params phys_params;
2209
2210         if (!dpu_enc || !dpu_kms) {
2211                 DPU_ERROR("invalid arg(s), enc %d kms %d\n",
2212                                 dpu_enc != 0, dpu_kms != 0);
2213                 return -EINVAL;
2214         }
2215
2216         memset(&phys_params, 0, sizeof(phys_params));
2217         phys_params.dpu_kms = dpu_kms;
2218         phys_params.parent = &dpu_enc->base;
2219         phys_params.parent_ops = &dpu_encoder_parent_ops;
2220         phys_params.enc_spinlock = &dpu_enc->enc_spinlock;
2221
2222         DPU_DEBUG("\n");
2223
2224         switch (disp_info->intf_type) {
2225         case DRM_MODE_ENCODER_DSI:
2226                 intf_type = INTF_DSI;
2227                 break;
2228         default:
2229                 DPU_ERROR_ENC(dpu_enc, "unsupported display interface type\n");
2230                 return -EINVAL;
2231         }
2232
2233         WARN_ON(disp_info->num_of_h_tiles < 1);
2234
2235         dpu_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
2236
2237         DPU_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
2238
2239         if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
2240             (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
2241                 dpu_enc->idle_pc_supported =
2242                                 dpu_kms->catalog->caps->has_idle_pc;
2243
2244         mutex_lock(&dpu_enc->enc_lock);
2245         for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
2246                 /*
2247                  * Left-most tile is at index 0, content is controller id
2248                  * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
2249                  * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
2250                  */
2251                 u32 controller_id = disp_info->h_tile_instance[i];
2252
2253                 if (disp_info->num_of_h_tiles > 1) {
2254                         if (i == 0)
2255                                 phys_params.split_role = ENC_ROLE_MASTER;
2256                         else
2257                                 phys_params.split_role = ENC_ROLE_SLAVE;
2258                 } else {
2259                         phys_params.split_role = ENC_ROLE_SOLO;
2260                 }
2261
2262                 DPU_DEBUG("h_tile_instance %d = %d, split_role %d\n",
2263                                 i, controller_id, phys_params.split_role);
2264
2265                 phys_params.intf_idx = dpu_encoder_get_intf(dpu_kms->catalog,
2266                                                                                                         intf_type,
2267                                                                                                         controller_id);
2268                 if (phys_params.intf_idx == INTF_MAX) {
2269                         DPU_ERROR_ENC(dpu_enc, "could not get intf: type %d, id %d\n",
2270                                                   intf_type, controller_id);
2271                         ret = -EINVAL;
2272                 }
2273
2274                 if (!ret) {
2275                         ret = dpu_encoder_virt_add_phys_encs(disp_info->capabilities,
2276                                                                                                  dpu_enc,
2277                                                                                                  &phys_params);
2278                         if (ret)
2279                                 DPU_ERROR_ENC(dpu_enc, "failed to add phys encs\n");
2280                 }
2281         }
2282
2283         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2284                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2285
2286                 if (phys) {
2287                         atomic_set(&phys->vsync_cnt, 0);
2288                         atomic_set(&phys->underrun_cnt, 0);
2289                 }
2290         }
2291         mutex_unlock(&dpu_enc->enc_lock);
2292
2293         return ret;
2294 }
2295
2296 static void dpu_encoder_frame_done_timeout(struct timer_list *t)
2297 {
2298         struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
2299                         frame_done_timer);
2300         struct drm_encoder *drm_enc = &dpu_enc->base;
2301         struct msm_drm_private *priv;
2302         u32 event;
2303
2304         if (!drm_enc->dev || !drm_enc->dev->dev_private) {
2305                 DPU_ERROR("invalid parameters\n");
2306                 return;
2307         }
2308         priv = drm_enc->dev->dev_private;
2309
2310         if (!dpu_enc->frame_busy_mask[0] || !dpu_enc->crtc_frame_event_cb) {
2311                 DRM_DEBUG_KMS("id:%u invalid timeout frame_busy_mask=%lu\n",
2312                               DRMID(drm_enc), dpu_enc->frame_busy_mask[0]);
2313                 return;
2314         } else if (!atomic_xchg(&dpu_enc->frame_done_timeout, 0)) {
2315                 DRM_DEBUG_KMS("id:%u invalid timeout\n", DRMID(drm_enc));
2316                 return;
2317         }
2318
2319         DPU_ERROR_ENC(dpu_enc, "frame done timeout\n");
2320
2321         event = DPU_ENCODER_FRAME_EVENT_ERROR;
2322         trace_dpu_enc_frame_done_timeout(DRMID(drm_enc), event);
2323         dpu_enc->crtc_frame_event_cb(dpu_enc->crtc_frame_event_cb_data, event);
2324 }
2325
2326 static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs = {
2327         .mode_set = dpu_encoder_virt_mode_set,
2328         .disable = dpu_encoder_virt_disable,
2329         .enable = dpu_kms_encoder_enable,
2330         .atomic_check = dpu_encoder_virt_atomic_check,
2331
2332         /* This is called by dpu_kms_encoder_enable */
2333         .commit = dpu_encoder_virt_enable,
2334 };
2335
2336 static const struct drm_encoder_funcs dpu_encoder_funcs = {
2337                 .destroy = dpu_encoder_destroy,
2338                 .late_register = dpu_encoder_late_register,
2339                 .early_unregister = dpu_encoder_early_unregister,
2340 };
2341
2342 int dpu_encoder_setup(struct drm_device *dev, struct drm_encoder *enc,
2343                 struct msm_display_info *disp_info)
2344 {
2345         struct msm_drm_private *priv = dev->dev_private;
2346         struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms);
2347         struct drm_encoder *drm_enc = NULL;
2348         struct dpu_encoder_virt *dpu_enc = NULL;
2349         int ret = 0;
2350
2351         dpu_enc = to_dpu_encoder_virt(enc);
2352
2353         mutex_init(&dpu_enc->enc_lock);
2354         ret = dpu_encoder_setup_display(dpu_enc, dpu_kms, disp_info);
2355         if (ret)
2356                 goto fail;
2357
2358         dpu_enc->cur_master = NULL;
2359         spin_lock_init(&dpu_enc->enc_spinlock);
2360
2361         atomic_set(&dpu_enc->frame_done_timeout, 0);
2362         timer_setup(&dpu_enc->frame_done_timer,
2363                         dpu_encoder_frame_done_timeout, 0);
2364
2365         if (disp_info->intf_type == DRM_MODE_ENCODER_DSI)
2366                 timer_setup(&dpu_enc->vsync_event_timer,
2367                                 dpu_encoder_vsync_event_handler,
2368                                 0);
2369
2370
2371         mutex_init(&dpu_enc->rc_lock);
2372         kthread_init_delayed_work(&dpu_enc->delayed_off_work,
2373                         dpu_encoder_off_work);
2374         dpu_enc->idle_timeout = IDLE_TIMEOUT;
2375
2376         kthread_init_work(&dpu_enc->vsync_event_work,
2377                         dpu_encoder_vsync_event_work_handler);
2378
2379         memcpy(&dpu_enc->disp_info, disp_info, sizeof(*disp_info));
2380
2381         DPU_DEBUG_ENC(dpu_enc, "created\n");
2382
2383         return ret;
2384
2385 fail:
2386         DPU_ERROR("failed to create encoder\n");
2387         if (drm_enc)
2388                 dpu_encoder_destroy(drm_enc);
2389
2390         return ret;
2391
2392
2393 }
2394
2395 struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
2396                 int drm_enc_mode)
2397 {
2398         struct dpu_encoder_virt *dpu_enc = NULL;
2399         int rc = 0;
2400
2401         dpu_enc = devm_kzalloc(dev->dev, sizeof(*dpu_enc), GFP_KERNEL);
2402         if (!dpu_enc)
2403                 return ERR_PTR(ENOMEM);
2404
2405         rc = drm_encoder_init(dev, &dpu_enc->base, &dpu_encoder_funcs,
2406                         drm_enc_mode, NULL);
2407         if (rc) {
2408                 devm_kfree(dev->dev, dpu_enc);
2409                 return ERR_PTR(rc);
2410         }
2411
2412         drm_encoder_helper_add(&dpu_enc->base, &dpu_encoder_helper_funcs);
2413
2414         return &dpu_enc->base;
2415 }
2416
2417 int dpu_encoder_wait_for_event(struct drm_encoder *drm_enc,
2418         enum msm_event_wait event)
2419 {
2420         int (*fn_wait)(struct dpu_encoder_phys *phys_enc) = NULL;
2421         struct dpu_encoder_virt *dpu_enc = NULL;
2422         int i, ret = 0;
2423
2424         if (!drm_enc) {
2425                 DPU_ERROR("invalid encoder\n");
2426                 return -EINVAL;
2427         }
2428         dpu_enc = to_dpu_encoder_virt(drm_enc);
2429         DPU_DEBUG_ENC(dpu_enc, "\n");
2430
2431         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2432                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2433                 if (!phys)
2434                         continue;
2435
2436                 switch (event) {
2437                 case MSM_ENC_COMMIT_DONE:
2438                         fn_wait = phys->ops.wait_for_commit_done;
2439                         break;
2440                 case MSM_ENC_TX_COMPLETE:
2441                         fn_wait = phys->ops.wait_for_tx_complete;
2442                         break;
2443                 case MSM_ENC_VBLANK:
2444                         fn_wait = phys->ops.wait_for_vblank;
2445                         break;
2446                 default:
2447                         DPU_ERROR_ENC(dpu_enc, "unknown wait event %d\n",
2448                                         event);
2449                         return -EINVAL;
2450                 };
2451
2452                 if (fn_wait) {
2453                         DPU_ATRACE_BEGIN("wait_for_completion_event");
2454                         ret = fn_wait(phys);
2455                         DPU_ATRACE_END("wait_for_completion_event");
2456                         if (ret)
2457                                 return ret;
2458                 }
2459         }
2460
2461         return ret;
2462 }
2463
2464 enum dpu_intf_mode dpu_encoder_get_intf_mode(struct drm_encoder *encoder)
2465 {
2466         struct dpu_encoder_virt *dpu_enc = NULL;
2467         int i;
2468
2469         if (!encoder) {
2470                 DPU_ERROR("invalid encoder\n");
2471                 return INTF_MODE_NONE;
2472         }
2473         dpu_enc = to_dpu_encoder_virt(encoder);
2474
2475         if (dpu_enc->cur_master)
2476                 return dpu_enc->cur_master->intf_mode;
2477
2478         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2479                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2480
2481                 if (phys)
2482                         return phys->intf_mode;
2483         }
2484
2485         return INTF_MODE_NONE;
2486 }
This page took 0.17298 seconds and 2 git commands to generate.