]> Git Repo - linux.git/blob - drivers/gpu/drm/msm/disp/dpu1/dpu_encoder_phys_wb.c
Merge tag 'amd-drm-next-6.5-2023-06-09' of https://gitlab.freedesktop.org/agd5f/linux...
[linux.git] / drivers / gpu / drm / msm / disp / dpu1 / dpu_encoder_phys_wb.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2022 Qualcomm Innovation Center, Inc. All rights reserved.
4  */
5
6 #define pr_fmt(fmt)     "[drm:%s:%d] " fmt, __func__, __LINE__
7
8 #include <linux/debugfs.h>
9
10 #include <drm/drm_framebuffer.h>
11
12 #include "dpu_encoder_phys.h"
13 #include "dpu_formats.h"
14 #include "dpu_hw_top.h"
15 #include "dpu_hw_wb.h"
16 #include "dpu_hw_lm.h"
17 #include "dpu_hw_merge3d.h"
18 #include "dpu_hw_interrupts.h"
19 #include "dpu_core_irq.h"
20 #include "dpu_vbif.h"
21 #include "dpu_crtc.h"
22 #include "disp/msm_disp_snapshot.h"
23
24 #define to_dpu_encoder_phys_wb(x) \
25         container_of(x, struct dpu_encoder_phys_wb, base)
26
27 /**
28  * dpu_encoder_phys_wb_is_master - report wb always as master encoder
29  * @phys_enc:   Pointer to physical encoder
30  */
31 static bool dpu_encoder_phys_wb_is_master(struct dpu_encoder_phys *phys_enc)
32 {
33         /* there is only one physical enc for dpu_writeback */
34         return true;
35 }
36
37 /**
38  * dpu_encoder_phys_wb_set_ot_limit - set OT limit for writeback interface
39  * @phys_enc:   Pointer to physical encoder
40  */
41 static void dpu_encoder_phys_wb_set_ot_limit(
42                 struct dpu_encoder_phys *phys_enc)
43 {
44         struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
45         struct dpu_vbif_set_ot_params ot_params;
46
47         memset(&ot_params, 0, sizeof(ot_params));
48         ot_params.xin_id = hw_wb->caps->xin_id;
49         ot_params.num = hw_wb->idx - WB_0;
50         ot_params.width = phys_enc->cached_mode.hdisplay;
51         ot_params.height = phys_enc->cached_mode.vdisplay;
52         ot_params.is_wfd = true;
53         ot_params.frame_rate = drm_mode_vrefresh(&phys_enc->cached_mode);
54         ot_params.vbif_idx = hw_wb->caps->vbif_idx;
55         ot_params.clk_ctrl = hw_wb->caps->clk_ctrl;
56         ot_params.rd = false;
57
58         dpu_vbif_set_ot_limit(phys_enc->dpu_kms, &ot_params);
59 }
60
61 /**
62  * dpu_encoder_phys_wb_set_qos_remap - set QoS remapper for writeback
63  * @phys_enc:   Pointer to physical encoder
64  */
65 static void dpu_encoder_phys_wb_set_qos_remap(
66                 struct dpu_encoder_phys *phys_enc)
67 {
68         struct dpu_hw_wb *hw_wb;
69         struct dpu_vbif_set_qos_params qos_params;
70
71         if (!phys_enc || !phys_enc->parent || !phys_enc->parent->crtc) {
72                 DPU_ERROR("invalid arguments\n");
73                 return;
74         }
75
76         if (!phys_enc->hw_wb || !phys_enc->hw_wb->caps) {
77                 DPU_ERROR("invalid writeback hardware\n");
78                 return;
79         }
80
81         hw_wb = phys_enc->hw_wb;
82
83         memset(&qos_params, 0, sizeof(qos_params));
84         qos_params.vbif_idx = hw_wb->caps->vbif_idx;
85         qos_params.xin_id = hw_wb->caps->xin_id;
86         qos_params.clk_ctrl = hw_wb->caps->clk_ctrl;
87         qos_params.num = hw_wb->idx - WB_0;
88         qos_params.is_rt = false;
89
90         DPU_DEBUG("[qos_remap] wb:%d vbif:%d xin:%d is_rt:%d\n",
91                         qos_params.num,
92                         qos_params.vbif_idx,
93                         qos_params.xin_id, qos_params.is_rt);
94
95         dpu_vbif_set_qos_remap(phys_enc->dpu_kms, &qos_params);
96 }
97
98 /**
99  * dpu_encoder_phys_wb_set_qos - set QoS/danger/safe LUTs for writeback
100  * @phys_enc:   Pointer to physical encoder
101  */
102 static void dpu_encoder_phys_wb_set_qos(struct dpu_encoder_phys *phys_enc)
103 {
104         struct dpu_hw_wb *hw_wb;
105         struct dpu_hw_wb_qos_cfg qos_cfg;
106         const struct dpu_mdss_cfg *catalog;
107         const struct dpu_qos_lut_tbl *qos_lut_tb;
108
109         if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) {
110                 DPU_ERROR("invalid parameter(s)\n");
111                 return;
112         }
113
114         catalog = phys_enc->dpu_kms->catalog;
115
116         hw_wb = phys_enc->hw_wb;
117
118         memset(&qos_cfg, 0, sizeof(struct dpu_hw_wb_qos_cfg));
119         qos_cfg.danger_safe_en = true;
120         qos_cfg.danger_lut =
121                 catalog->perf->danger_lut_tbl[DPU_QOS_LUT_USAGE_NRT];
122
123         qos_cfg.safe_lut = catalog->perf->safe_lut_tbl[DPU_QOS_LUT_USAGE_NRT];
124
125         qos_lut_tb = &catalog->perf->qos_lut_tbl[DPU_QOS_LUT_USAGE_NRT];
126         qos_cfg.creq_lut = _dpu_hw_get_qos_lut(qos_lut_tb, 0);
127
128         if (hw_wb->ops.setup_qos_lut)
129                 hw_wb->ops.setup_qos_lut(hw_wb, &qos_cfg);
130 }
131
132 /**
133  * dpu_encoder_phys_wb_setup_fb - setup output framebuffer
134  * @phys_enc:   Pointer to physical encoder
135  * @fb:         Pointer to output framebuffer
136  */
137 static void dpu_encoder_phys_wb_setup_fb(struct dpu_encoder_phys *phys_enc,
138                 struct drm_framebuffer *fb)
139 {
140         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
141         struct dpu_hw_wb *hw_wb;
142         struct dpu_hw_wb_cfg *wb_cfg;
143         struct dpu_hw_cdp_cfg cdp_cfg;
144
145         if (!phys_enc || !phys_enc->dpu_kms || !phys_enc->dpu_kms->catalog) {
146                 DPU_ERROR("invalid encoder\n");
147                 return;
148         }
149
150         hw_wb = phys_enc->hw_wb;
151         wb_cfg = &wb_enc->wb_cfg;
152
153         wb_cfg->intf_mode = phys_enc->intf_mode;
154         wb_cfg->roi.x1 = 0;
155         wb_cfg->roi.x2 = phys_enc->cached_mode.hdisplay;
156         wb_cfg->roi.y1 = 0;
157         wb_cfg->roi.y2 = phys_enc->cached_mode.vdisplay;
158
159         if (hw_wb->ops.setup_roi)
160                 hw_wb->ops.setup_roi(hw_wb, wb_cfg);
161
162         if (hw_wb->ops.setup_outformat)
163                 hw_wb->ops.setup_outformat(hw_wb, wb_cfg);
164
165         if (hw_wb->ops.setup_cdp) {
166                 memset(&cdp_cfg, 0, sizeof(struct dpu_hw_cdp_cfg));
167
168                 cdp_cfg.enable = phys_enc->dpu_kms->catalog->perf->cdp_cfg
169                                 [DPU_PERF_CDP_USAGE_NRT].wr_enable;
170                 cdp_cfg.ubwc_meta_enable =
171                                 DPU_FORMAT_IS_UBWC(wb_cfg->dest.format);
172                 cdp_cfg.tile_amortize_enable =
173                                 DPU_FORMAT_IS_UBWC(wb_cfg->dest.format) ||
174                                 DPU_FORMAT_IS_TILE(wb_cfg->dest.format);
175                 cdp_cfg.preload_ahead = DPU_WB_CDP_PRELOAD_AHEAD_64;
176
177                 hw_wb->ops.setup_cdp(hw_wb, &cdp_cfg);
178         }
179
180         if (hw_wb->ops.setup_outaddress)
181                 hw_wb->ops.setup_outaddress(hw_wb, wb_cfg);
182 }
183
184 /**
185  * dpu_encoder_phys_wb_setup_cdp - setup chroma down prefetch block
186  * @phys_enc:Pointer to physical encoder
187  */
188 static void dpu_encoder_phys_wb_setup_cdp(struct dpu_encoder_phys *phys_enc)
189 {
190         struct dpu_hw_wb *hw_wb;
191         struct dpu_hw_ctl *ctl;
192
193         if (!phys_enc) {
194                 DPU_ERROR("invalid encoder\n");
195                 return;
196         }
197
198         hw_wb = phys_enc->hw_wb;
199         ctl = phys_enc->hw_ctl;
200
201         if (test_bit(DPU_CTL_ACTIVE_CFG, &ctl->caps->features) &&
202                 (phys_enc->hw_ctl &&
203                  phys_enc->hw_ctl->ops.setup_intf_cfg)) {
204                 struct dpu_hw_intf_cfg intf_cfg = {0};
205                 struct dpu_hw_pingpong *hw_pp = phys_enc->hw_pp;
206                 enum dpu_3d_blend_mode mode_3d;
207
208                 mode_3d = dpu_encoder_helper_get_3d_blend_mode(phys_enc);
209
210                 intf_cfg.intf = DPU_NONE;
211                 intf_cfg.wb = hw_wb->idx;
212
213                 if (mode_3d && hw_pp && hw_pp->merge_3d)
214                         intf_cfg.merge_3d = hw_pp->merge_3d->idx;
215
216                 if (phys_enc->hw_pp->merge_3d && phys_enc->hw_pp->merge_3d->ops.setup_3d_mode)
217                         phys_enc->hw_pp->merge_3d->ops.setup_3d_mode(phys_enc->hw_pp->merge_3d,
218                                         mode_3d);
219
220                 /* setup which pp blk will connect to this wb */
221                 if (hw_pp && phys_enc->hw_wb->ops.bind_pingpong_blk)
222                         phys_enc->hw_wb->ops.bind_pingpong_blk(phys_enc->hw_wb, true,
223                                         phys_enc->hw_pp->idx);
224
225                 phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg);
226         } else if (phys_enc->hw_ctl && phys_enc->hw_ctl->ops.setup_intf_cfg) {
227                 struct dpu_hw_intf_cfg intf_cfg = {0};
228
229                 intf_cfg.intf = DPU_NONE;
230                 intf_cfg.wb = hw_wb->idx;
231                 intf_cfg.mode_3d =
232                         dpu_encoder_helper_get_3d_blend_mode(phys_enc);
233                 phys_enc->hw_ctl->ops.setup_intf_cfg(phys_enc->hw_ctl, &intf_cfg);
234         }
235 }
236
237 /**
238  * dpu_encoder_phys_wb_atomic_check - verify and fixup given atomic states
239  * @phys_enc:   Pointer to physical encoder
240  * @crtc_state: Pointer to CRTC atomic state
241  * @conn_state: Pointer to connector atomic state
242  */
243 static int dpu_encoder_phys_wb_atomic_check(
244                 struct dpu_encoder_phys *phys_enc,
245                 struct drm_crtc_state *crtc_state,
246                 struct drm_connector_state *conn_state)
247 {
248         struct drm_framebuffer *fb;
249         const struct drm_display_mode *mode = &crtc_state->mode;
250
251         DPU_DEBUG("[atomic_check:%d, \"%s\",%d,%d]\n",
252                         phys_enc->wb_idx, mode->name, mode->hdisplay, mode->vdisplay);
253
254         if (!conn_state || !conn_state->connector) {
255                 DPU_ERROR("invalid connector state\n");
256                 return -EINVAL;
257         } else if (conn_state->connector->status !=
258                         connector_status_connected) {
259                 DPU_ERROR("connector not connected %d\n",
260                                 conn_state->connector->status);
261                 return -EINVAL;
262         }
263
264         if (!conn_state->writeback_job || !conn_state->writeback_job->fb)
265                 return 0;
266
267         fb = conn_state->writeback_job->fb;
268
269         DPU_DEBUG("[fb_id:%u][fb:%u,%u]\n", fb->base.id,
270                         fb->width, fb->height);
271
272         if (fb->width != mode->hdisplay) {
273                 DPU_ERROR("invalid fb w=%d, mode w=%d\n", fb->width,
274                                 mode->hdisplay);
275                 return -EINVAL;
276         } else if (fb->height != mode->vdisplay) {
277                 DPU_ERROR("invalid fb h=%d, mode h=%d\n", fb->height,
278                                   mode->vdisplay);
279                 return -EINVAL;
280         } else if (fb->width > phys_enc->hw_wb->caps->maxlinewidth) {
281                 DPU_ERROR("invalid fb w=%d, maxlinewidth=%u\n",
282                                   fb->width, phys_enc->hw_wb->caps->maxlinewidth);
283                 return -EINVAL;
284         }
285
286         return 0;
287 }
288
289
290 /**
291  * _dpu_encoder_phys_wb_update_flush - flush hardware update
292  * @phys_enc:   Pointer to physical encoder
293  */
294 static void _dpu_encoder_phys_wb_update_flush(struct dpu_encoder_phys *phys_enc)
295 {
296         struct dpu_hw_wb *hw_wb;
297         struct dpu_hw_ctl *hw_ctl;
298         struct dpu_hw_pingpong *hw_pp;
299         u32 pending_flush = 0;
300
301         if (!phys_enc)
302                 return;
303
304         hw_wb = phys_enc->hw_wb;
305         hw_pp = phys_enc->hw_pp;
306         hw_ctl = phys_enc->hw_ctl;
307
308         DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
309
310         if (!hw_ctl) {
311                 DPU_DEBUG("[wb:%d] no ctl assigned\n", hw_wb->idx - WB_0);
312                 return;
313         }
314
315         if (hw_ctl->ops.update_pending_flush_wb)
316                 hw_ctl->ops.update_pending_flush_wb(hw_ctl, hw_wb->idx);
317
318         if (hw_ctl->ops.update_pending_flush_merge_3d && hw_pp && hw_pp->merge_3d)
319                 hw_ctl->ops.update_pending_flush_merge_3d(hw_ctl,
320                                 hw_pp->merge_3d->idx);
321
322         if (hw_ctl->ops.get_pending_flush)
323                 pending_flush = hw_ctl->ops.get_pending_flush(hw_ctl);
324
325         DPU_DEBUG("Pending flush mask for CTL_%d is 0x%x, WB %d\n",
326                         hw_ctl->idx - CTL_0, pending_flush,
327                         hw_wb->idx - WB_0);
328 }
329
330 /**
331  * dpu_encoder_phys_wb_setup - setup writeback encoder
332  * @phys_enc:   Pointer to physical encoder
333  */
334 static void dpu_encoder_phys_wb_setup(
335                 struct dpu_encoder_phys *phys_enc)
336 {
337         struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
338         struct drm_display_mode mode = phys_enc->cached_mode;
339         struct drm_framebuffer *fb = NULL;
340
341         DPU_DEBUG("[mode_set:%d, \"%s\",%d,%d]\n",
342                         hw_wb->idx - WB_0, mode.name,
343                         mode.hdisplay, mode.vdisplay);
344
345         dpu_encoder_phys_wb_set_ot_limit(phys_enc);
346
347         dpu_encoder_phys_wb_set_qos_remap(phys_enc);
348
349         dpu_encoder_phys_wb_set_qos(phys_enc);
350
351         dpu_encoder_phys_wb_setup_fb(phys_enc, fb);
352
353         dpu_encoder_phys_wb_setup_cdp(phys_enc);
354
355 }
356
357 static void _dpu_encoder_phys_wb_frame_done_helper(void *arg)
358 {
359         struct dpu_encoder_phys *phys_enc = arg;
360         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
361
362         struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
363         unsigned long lock_flags;
364         u32 event = DPU_ENCODER_FRAME_EVENT_DONE;
365
366         DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
367
368         dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, event);
369
370         dpu_encoder_vblank_callback(phys_enc->parent, phys_enc);
371
372         spin_lock_irqsave(phys_enc->enc_spinlock, lock_flags);
373         atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
374         spin_unlock_irqrestore(phys_enc->enc_spinlock, lock_flags);
375
376         if (wb_enc->wb_conn)
377                 drm_writeback_signal_completion(wb_enc->wb_conn, 0);
378
379         /* Signal any waiting atomic commit thread */
380         wake_up_all(&phys_enc->pending_kickoff_wq);
381 }
382
383 /**
384  * dpu_encoder_phys_wb_done_irq - writeback interrupt handler
385  * @arg:        Pointer to writeback encoder
386  * @irq_idx:    interrupt index
387  */
388 static void dpu_encoder_phys_wb_done_irq(void *arg, int irq_idx)
389 {
390         _dpu_encoder_phys_wb_frame_done_helper(arg);
391 }
392
393 /**
394  * dpu_encoder_phys_wb_irq_ctrl - irq control of WB
395  * @phys:       Pointer to physical encoder
396  * @enable:     indicates enable or disable interrupts
397  */
398 static void dpu_encoder_phys_wb_irq_ctrl(
399                 struct dpu_encoder_phys *phys, bool enable)
400 {
401
402         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys);
403
404         if (enable && atomic_inc_return(&wb_enc->wbirq_refcount) == 1)
405                 dpu_core_irq_register_callback(phys->dpu_kms,
406                                 phys->irq[INTR_IDX_WB_DONE], dpu_encoder_phys_wb_done_irq, phys);
407         else if (!enable &&
408                         atomic_dec_return(&wb_enc->wbirq_refcount) == 0)
409                 dpu_core_irq_unregister_callback(phys->dpu_kms, phys->irq[INTR_IDX_WB_DONE]);
410 }
411
412 static void dpu_encoder_phys_wb_atomic_mode_set(
413                 struct dpu_encoder_phys *phys_enc,
414                 struct drm_crtc_state *crtc_state,
415                 struct drm_connector_state *conn_state)
416 {
417
418         phys_enc->irq[INTR_IDX_WB_DONE] = phys_enc->hw_wb->caps->intr_wb_done;
419 }
420
421 static void _dpu_encoder_phys_wb_handle_wbdone_timeout(
422                 struct dpu_encoder_phys *phys_enc)
423 {
424         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
425         u32 frame_event = DPU_ENCODER_FRAME_EVENT_ERROR;
426
427         wb_enc->wb_done_timeout_cnt++;
428
429         if (wb_enc->wb_done_timeout_cnt == 1)
430                 msm_disp_snapshot_state(phys_enc->parent->dev);
431
432         atomic_add_unless(&phys_enc->pending_kickoff_cnt, -1, 0);
433
434         /* request a ctl reset before the next kickoff */
435         phys_enc->enable_state = DPU_ENC_ERR_NEEDS_HW_RESET;
436
437         if (wb_enc->wb_conn)
438                 drm_writeback_signal_completion(wb_enc->wb_conn, 0);
439
440         dpu_encoder_frame_done_callback(phys_enc->parent, phys_enc, frame_event);
441 }
442
443 /**
444  * dpu_encoder_phys_wb_wait_for_commit_done - wait until request is committed
445  * @phys_enc:   Pointer to physical encoder
446  */
447 static int dpu_encoder_phys_wb_wait_for_commit_done(
448                 struct dpu_encoder_phys *phys_enc)
449 {
450         unsigned long ret;
451         struct dpu_encoder_wait_info wait_info;
452         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
453
454         wait_info.wq = &phys_enc->pending_kickoff_wq;
455         wait_info.atomic_cnt = &phys_enc->pending_kickoff_cnt;
456         wait_info.timeout_ms = KICKOFF_TIMEOUT_MS;
457
458         ret = dpu_encoder_helper_wait_for_irq(phys_enc, INTR_IDX_WB_DONE,
459                         dpu_encoder_phys_wb_done_irq, &wait_info);
460         if (ret == -ETIMEDOUT)
461                 _dpu_encoder_phys_wb_handle_wbdone_timeout(phys_enc);
462         else if (!ret)
463                 wb_enc->wb_done_timeout_cnt = 0;
464
465         return ret;
466 }
467
468 /**
469  * dpu_encoder_phys_wb_prepare_for_kickoff - pre-kickoff processing
470  * @phys_enc:   Pointer to physical encoder
471  * Returns:     Zero on success
472  */
473 static void dpu_encoder_phys_wb_prepare_for_kickoff(
474                 struct dpu_encoder_phys *phys_enc)
475 {
476         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
477         struct drm_connector *drm_conn;
478         struct drm_connector_state *state;
479
480         DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
481
482         if (!wb_enc->wb_conn || !wb_enc->wb_job) {
483                 DPU_ERROR("invalid wb_conn or wb_job\n");
484                 return;
485         }
486
487         drm_conn = &wb_enc->wb_conn->base;
488         state = drm_conn->state;
489
490         if (wb_enc->wb_conn && wb_enc->wb_job)
491                 drm_writeback_queue_job(wb_enc->wb_conn, state);
492
493         dpu_encoder_phys_wb_setup(phys_enc);
494
495         _dpu_encoder_phys_wb_update_flush(phys_enc);
496 }
497
498 /**
499  * dpu_encoder_phys_wb_needs_single_flush - trigger flush processing
500  * @phys_enc:   Pointer to physical encoder
501  */
502 static bool dpu_encoder_phys_wb_needs_single_flush(struct dpu_encoder_phys *phys_enc)
503 {
504         DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
505         return false;
506 }
507
508 /**
509  * dpu_encoder_phys_wb_handle_post_kickoff - post-kickoff processing
510  * @phys_enc:   Pointer to physical encoder
511  */
512 static void dpu_encoder_phys_wb_handle_post_kickoff(
513                 struct dpu_encoder_phys *phys_enc)
514 {
515         DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
516
517 }
518
519 /**
520  * dpu_encoder_phys_wb_enable - enable writeback encoder
521  * @phys_enc:   Pointer to physical encoder
522  */
523 static void dpu_encoder_phys_wb_enable(struct dpu_encoder_phys *phys_enc)
524 {
525         DPU_DEBUG("[wb:%d]\n", phys_enc->hw_wb->idx - WB_0);
526         phys_enc->enable_state = DPU_ENC_ENABLED;
527 }
528 /**
529  * dpu_encoder_phys_wb_disable - disable writeback encoder
530  * @phys_enc:   Pointer to physical encoder
531  */
532 static void dpu_encoder_phys_wb_disable(struct dpu_encoder_phys *phys_enc)
533 {
534         struct dpu_hw_wb *hw_wb = phys_enc->hw_wb;
535         struct dpu_hw_ctl *hw_ctl = phys_enc->hw_ctl;
536
537         DPU_DEBUG("[wb:%d]\n", hw_wb->idx - WB_0);
538
539         if (phys_enc->enable_state == DPU_ENC_DISABLED) {
540                 DPU_ERROR("encoder is already disabled\n");
541                 return;
542         }
543
544         /* reset h/w before final flush */
545         if (phys_enc->hw_ctl->ops.clear_pending_flush)
546                 phys_enc->hw_ctl->ops.clear_pending_flush(phys_enc->hw_ctl);
547
548         /*
549          * New CTL reset sequence from 5.0 MDP onwards.
550          * If has_3d_merge_reset is not set, legacy reset
551          * sequence is executed.
552          *
553          * Legacy reset sequence has not been implemented yet.
554          * Any target earlier than SM8150 will need it and when
555          * WB support is added to those targets will need to add
556          * the legacy teardown sequence as well.
557          */
558         if (hw_ctl->caps->features & BIT(DPU_CTL_ACTIVE_CFG))
559                 dpu_encoder_helper_phys_cleanup(phys_enc);
560
561         phys_enc->enable_state = DPU_ENC_DISABLED;
562 }
563
564 /**
565  * dpu_encoder_phys_wb_destroy - destroy writeback encoder
566  * @phys_enc:   Pointer to physical encoder
567  */
568 static void dpu_encoder_phys_wb_destroy(struct dpu_encoder_phys *phys_enc)
569 {
570         if (!phys_enc)
571                 return;
572
573         DPU_DEBUG("[wb:%d]\n", phys_enc->wb_idx - WB_0);
574
575         kfree(phys_enc);
576 }
577
578 static void dpu_encoder_phys_wb_prepare_wb_job(struct dpu_encoder_phys *phys_enc,
579                 struct drm_writeback_job *job)
580 {
581         const struct msm_format *format;
582         struct msm_gem_address_space *aspace;
583         struct dpu_hw_wb_cfg *wb_cfg;
584         int ret;
585         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
586
587         if (!job->fb)
588                 return;
589
590         wb_enc->wb_job = job;
591         wb_enc->wb_conn = job->connector;
592         aspace = phys_enc->dpu_kms->base.aspace;
593
594         wb_cfg = &wb_enc->wb_cfg;
595
596         memset(wb_cfg, 0, sizeof(struct dpu_hw_wb_cfg));
597
598         ret = msm_framebuffer_prepare(job->fb, aspace, false);
599         if (ret) {
600                 DPU_ERROR("prep fb failed, %d\n", ret);
601                 return;
602         }
603
604         format = msm_framebuffer_format(job->fb);
605
606         wb_cfg->dest.format = dpu_get_dpu_format_ext(
607                         format->pixel_format, job->fb->modifier);
608         if (!wb_cfg->dest.format) {
609                 /* this error should be detected during atomic_check */
610                 DPU_ERROR("failed to get format %x\n", format->pixel_format);
611                 return;
612         }
613
614         ret = dpu_format_populate_layout(aspace, job->fb, &wb_cfg->dest);
615         if (ret) {
616                 DPU_DEBUG("failed to populate layout %d\n", ret);
617                 return;
618         }
619
620         wb_cfg->dest.width = job->fb->width;
621         wb_cfg->dest.height = job->fb->height;
622         wb_cfg->dest.num_planes = wb_cfg->dest.format->num_planes;
623
624         if ((wb_cfg->dest.format->fetch_planes == DPU_PLANE_PLANAR) &&
625                         (wb_cfg->dest.format->element[0] == C1_B_Cb))
626                 swap(wb_cfg->dest.plane_addr[1], wb_cfg->dest.plane_addr[2]);
627
628         DPU_DEBUG("[fb_offset:%8.8x,%8.8x,%8.8x,%8.8x]\n",
629                         wb_cfg->dest.plane_addr[0], wb_cfg->dest.plane_addr[1],
630                         wb_cfg->dest.plane_addr[2], wb_cfg->dest.plane_addr[3]);
631
632         DPU_DEBUG("[fb_stride:%8.8x,%8.8x,%8.8x,%8.8x]\n",
633                         wb_cfg->dest.plane_pitch[0], wb_cfg->dest.plane_pitch[1],
634                         wb_cfg->dest.plane_pitch[2], wb_cfg->dest.plane_pitch[3]);
635 }
636
637 static void dpu_encoder_phys_wb_cleanup_wb_job(struct dpu_encoder_phys *phys_enc,
638                 struct drm_writeback_job *job)
639 {
640         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
641         struct msm_gem_address_space *aspace;
642
643         if (!job->fb)
644                 return;
645
646         aspace = phys_enc->dpu_kms->base.aspace;
647
648         msm_framebuffer_cleanup(job->fb, aspace, false);
649         wb_enc->wb_job = NULL;
650         wb_enc->wb_conn = NULL;
651 }
652
653 static bool dpu_encoder_phys_wb_is_valid_for_commit(struct dpu_encoder_phys *phys_enc)
654 {
655         struct dpu_encoder_phys_wb *wb_enc = to_dpu_encoder_phys_wb(phys_enc);
656
657         if (wb_enc->wb_job)
658                 return true;
659         else
660                 return false;
661 }
662
663 /**
664  * dpu_encoder_phys_wb_init_ops - initialize writeback operations
665  * @ops:        Pointer to encoder operation table
666  */
667 static void dpu_encoder_phys_wb_init_ops(struct dpu_encoder_phys_ops *ops)
668 {
669         ops->is_master = dpu_encoder_phys_wb_is_master;
670         ops->atomic_mode_set = dpu_encoder_phys_wb_atomic_mode_set;
671         ops->enable = dpu_encoder_phys_wb_enable;
672         ops->disable = dpu_encoder_phys_wb_disable;
673         ops->destroy = dpu_encoder_phys_wb_destroy;
674         ops->atomic_check = dpu_encoder_phys_wb_atomic_check;
675         ops->wait_for_commit_done = dpu_encoder_phys_wb_wait_for_commit_done;
676         ops->prepare_for_kickoff = dpu_encoder_phys_wb_prepare_for_kickoff;
677         ops->handle_post_kickoff = dpu_encoder_phys_wb_handle_post_kickoff;
678         ops->needs_single_flush = dpu_encoder_phys_wb_needs_single_flush;
679         ops->trigger_start = dpu_encoder_helper_trigger_start;
680         ops->prepare_wb_job = dpu_encoder_phys_wb_prepare_wb_job;
681         ops->cleanup_wb_job = dpu_encoder_phys_wb_cleanup_wb_job;
682         ops->irq_control = dpu_encoder_phys_wb_irq_ctrl;
683         ops->is_valid_for_commit = dpu_encoder_phys_wb_is_valid_for_commit;
684
685 }
686
687 /**
688  * dpu_encoder_phys_wb_init - initialize writeback encoder
689  * @p:  Pointer to init info structure with initialization params
690  */
691 struct dpu_encoder_phys *dpu_encoder_phys_wb_init(
692                 struct dpu_enc_phys_init_params *p)
693 {
694         struct dpu_encoder_phys *phys_enc = NULL;
695         struct dpu_encoder_phys_wb *wb_enc = NULL;
696         int ret = 0;
697         int i;
698
699         DPU_DEBUG("\n");
700
701         if (!p || !p->parent) {
702                 DPU_ERROR("invalid params\n");
703                 ret = -EINVAL;
704                 goto fail_alloc;
705         }
706
707         wb_enc = kzalloc(sizeof(*wb_enc), GFP_KERNEL);
708         if (!wb_enc) {
709                 DPU_ERROR("failed to allocate wb phys_enc enc\n");
710                 ret = -ENOMEM;
711                 goto fail_alloc;
712         }
713
714         phys_enc = &wb_enc->base;
715         phys_enc->hw_mdptop = p->dpu_kms->hw_mdp;
716         phys_enc->wb_idx = p->wb_idx;
717
718         dpu_encoder_phys_wb_init_ops(&phys_enc->ops);
719         phys_enc->parent = p->parent;
720         phys_enc->dpu_kms = p->dpu_kms;
721         phys_enc->split_role = p->split_role;
722         phys_enc->intf_mode = INTF_MODE_WB_LINE;
723         phys_enc->wb_idx = p->wb_idx;
724         phys_enc->enc_spinlock = p->enc_spinlock;
725
726         atomic_set(&wb_enc->wbirq_refcount, 0);
727
728         for (i = 0; i < ARRAY_SIZE(phys_enc->irq); i++)
729                 phys_enc->irq[i] = -EINVAL;
730
731         atomic_set(&phys_enc->pending_kickoff_cnt, 0);
732         atomic_set(&phys_enc->vblank_refcount, 0);
733         wb_enc->wb_done_timeout_cnt = 0;
734
735         init_waitqueue_head(&phys_enc->pending_kickoff_wq);
736         phys_enc->enable_state = DPU_ENC_DISABLED;
737
738         DPU_DEBUG("Created dpu_encoder_phys for wb %d\n",
739                         phys_enc->wb_idx);
740
741         return phys_enc;
742
743 fail_alloc:
744         return ERR_PTR(ret);
745 }
This page took 0.105193 seconds and 4 git commands to generate.