]> Git Repo - linux.git/blob - drivers/gpu/drm/i915/display/intel_dp_mst.c
Merge drm/drm-next into drm-intel-next
[linux.git] / drivers / gpu / drm / i915 / display / intel_dp_mst.c
1 /*
2  * Copyright © 2008 Intel Corporation
3  *             2014 Red Hat Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragraph) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
22  * IN THE SOFTWARE.
23  *
24  */
25
26 #include <drm/drm_atomic.h>
27 #include <drm/drm_atomic_helper.h>
28 #include <drm/drm_edid.h>
29 #include <drm/drm_fixed.h>
30 #include <drm/drm_probe_helper.h>
31
32 #include "i915_drv.h"
33 #include "i915_reg.h"
34 #include "intel_atomic.h"
35 #include "intel_audio.h"
36 #include "intel_connector.h"
37 #include "intel_crtc.h"
38 #include "intel_ddi.h"
39 #include "intel_de.h"
40 #include "intel_display_driver.h"
41 #include "intel_display_types.h"
42 #include "intel_dp.h"
43 #include "intel_dp_hdcp.h"
44 #include "intel_dp_link_training.h"
45 #include "intel_dp_mst.h"
46 #include "intel_dp_test.h"
47 #include "intel_dp_tunnel.h"
48 #include "intel_dpio_phy.h"
49 #include "intel_hdcp.h"
50 #include "intel_hotplug.h"
51 #include "intel_link_bw.h"
52 #include "intel_psr.h"
53 #include "intel_vdsc.h"
54 #include "skl_scaler.h"
55
56 /*
57  * DP MST (DisplayPort Multi-Stream Transport)
58  *
59  * MST support on the source depends on the platform and port. DP initialization
60  * sets up MST for each MST capable encoder. This will become the primary
61  * encoder for the port.
62  *
63  * MST initialization of each primary encoder creates MST stream encoders, one
64  * per pipe, and initializes the MST topology manager. The MST stream encoders
65  * are sometimes called "fake encoders", because they're virtual, not
66  * physical. Thus there are (number of MST capable ports) x (number of pipes)
67  * MST stream encoders in total.
68  *
69  * Decision to use MST for a sink happens at detect on the connector attached to
70  * the primary encoder, and this will not change while the sink is connected. We
71  * always use MST when possible, including for SST sinks with sideband messaging
72  * support.
73  *
74  * The connectors for the MST streams are added and removed dynamically by the
75  * topology manager. Their connection status is also determined by the topology
76  * manager.
77  *
78  * On hardware, each transcoder may be associated with a single DDI
79  * port. Multiple transcoders may be associated with the same DDI port only if
80  * the port is in MST mode.
81  *
82  * On TGL+, all the transcoders streaming on the same DDI port will indicate a
83  * primary transcoder; the TGL_DP_TP_CTL and TGL_DP_TP_STATUS registers are
84  * relevant only on the primary transcoder. Prior to that, they are port
85  * registers.
86  */
87
88 /* From fake MST stream encoder to primary encoder */
89 static struct intel_encoder *to_primary_encoder(struct intel_encoder *encoder)
90 {
91         struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
92         struct intel_digital_port *dig_port = intel_mst->primary;
93
94         return &dig_port->base;
95 }
96
97 /* From fake MST stream encoder to primary DP */
98 static struct intel_dp *to_primary_dp(struct intel_encoder *encoder)
99 {
100         struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
101         struct intel_digital_port *dig_port = intel_mst->primary;
102
103         return &dig_port->dp;
104 }
105
106 static int intel_dp_mst_max_dpt_bpp(const struct intel_crtc_state *crtc_state,
107                                     bool dsc)
108 {
109         struct intel_display *display = to_intel_display(crtc_state);
110         const struct drm_display_mode *adjusted_mode =
111                 &crtc_state->hw.adjusted_mode;
112
113         if (!intel_dp_is_uhbr(crtc_state) || DISPLAY_VER(display) >= 20 || !dsc)
114                 return INT_MAX;
115
116         /*
117          * DSC->DPT interface width:
118          *   ICL-MTL: 72 bits (each branch has 72 bits, only left branch is used)
119          *   LNL+:    144 bits (not a bottleneck in any config)
120          *
121          * Bspec/49259 suggests that the FEC overhead needs to be
122          * applied here, though HW people claim that neither this FEC
123          * or any other overhead is applicable here (that is the actual
124          * available_bw is just symbol_clock * 72). However based on
125          * testing on MTL-P the
126          * - DELL U3224KBA display
127          * - Unigraf UCD-500 CTS test sink
128          * devices the
129          * - 5120x2880/995.59Mhz
130          * - 6016x3384/1357.23Mhz
131          * - 6144x3456/1413.39Mhz
132          * modes (all the ones having a DPT limit on the above devices),
133          * both the channel coding efficiency and an additional 3%
134          * overhead needs to be accounted for.
135          */
136         return div64_u64(mul_u32_u32(intel_dp_link_symbol_clock(crtc_state->port_clock) * 72,
137                                      drm_dp_bw_channel_coding_efficiency(true)),
138                          mul_u32_u32(adjusted_mode->crtc_clock, 1030000));
139 }
140
141 static int intel_dp_mst_bw_overhead(const struct intel_crtc_state *crtc_state,
142                                     const struct intel_connector *connector,
143                                     bool ssc, int dsc_slice_count, int bpp_x16)
144 {
145         const struct drm_display_mode *adjusted_mode =
146                 &crtc_state->hw.adjusted_mode;
147         unsigned long flags = DRM_DP_BW_OVERHEAD_MST;
148         int overhead;
149
150         flags |= intel_dp_is_uhbr(crtc_state) ? DRM_DP_BW_OVERHEAD_UHBR : 0;
151         flags |= ssc ? DRM_DP_BW_OVERHEAD_SSC_REF_CLK : 0;
152         flags |= crtc_state->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0;
153
154         if (dsc_slice_count)
155                 flags |= DRM_DP_BW_OVERHEAD_DSC;
156
157         overhead = drm_dp_bw_overhead(crtc_state->lane_count,
158                                       adjusted_mode->hdisplay,
159                                       dsc_slice_count,
160                                       bpp_x16,
161                                       flags);
162
163         /*
164          * TODO: clarify whether a minimum required by the fixed FEC overhead
165          * in the bspec audio programming sequence is required here.
166          */
167         return max(overhead, intel_dp_bw_fec_overhead(crtc_state->fec_enable));
168 }
169
170 static void intel_dp_mst_compute_m_n(const struct intel_crtc_state *crtc_state,
171                                      const struct intel_connector *connector,
172                                      int overhead,
173                                      int bpp_x16,
174                                      struct intel_link_m_n *m_n)
175 {
176         const struct drm_display_mode *adjusted_mode =
177                 &crtc_state->hw.adjusted_mode;
178
179         /* TODO: Check WA 14013163432 to set data M/N for full BW utilization. */
180         intel_link_compute_m_n(bpp_x16, crtc_state->lane_count,
181                                adjusted_mode->crtc_clock,
182                                crtc_state->port_clock,
183                                overhead,
184                                m_n);
185
186         m_n->tu = DIV_ROUND_UP_ULL(mul_u32_u32(m_n->data_m, 64), m_n->data_n);
187 }
188
189 static int intel_dp_mst_calc_pbn(int pixel_clock, int bpp_x16, int bw_overhead)
190 {
191         int effective_data_rate =
192                 intel_dp_effective_data_rate(pixel_clock, bpp_x16, bw_overhead);
193
194         /*
195          * TODO: Use drm_dp_calc_pbn_mode() instead, once it's converted
196          * to calculate PBN with the BW overhead passed to it.
197          */
198         return DIV_ROUND_UP(effective_data_rate * 64, 54 * 1000);
199 }
200
201 static int intel_dp_mst_dsc_get_slice_count(const struct intel_connector *connector,
202                                             const struct intel_crtc_state *crtc_state)
203 {
204         const struct drm_display_mode *adjusted_mode =
205                 &crtc_state->hw.adjusted_mode;
206         int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state);
207
208         return intel_dp_dsc_get_slice_count(connector,
209                                             adjusted_mode->clock,
210                                             adjusted_mode->hdisplay,
211                                             num_joined_pipes);
212 }
213
214 static int mst_stream_find_vcpi_slots_for_bpp(struct intel_dp *intel_dp,
215                                               struct intel_crtc_state *crtc_state,
216                                               int max_bpp, int min_bpp,
217                                               struct link_config_limits *limits,
218                                               struct drm_connector_state *conn_state,
219                                               int step, bool dsc)
220 {
221         struct intel_display *display = to_intel_display(intel_dp);
222         struct drm_atomic_state *state = crtc_state->uapi.state;
223         struct drm_dp_mst_topology_state *mst_state;
224         struct intel_connector *connector =
225                 to_intel_connector(conn_state->connector);
226         const struct drm_display_mode *adjusted_mode =
227                 &crtc_state->hw.adjusted_mode;
228         int bpp, slots = -EINVAL;
229         int dsc_slice_count = 0;
230         int max_dpt_bpp;
231         int ret = 0;
232
233         mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst_mgr);
234         if (IS_ERR(mst_state))
235                 return PTR_ERR(mst_state);
236
237         crtc_state->lane_count = limits->max_lane_count;
238         crtc_state->port_clock = limits->max_rate;
239
240         if (dsc) {
241                 if (!intel_dp_supports_fec(intel_dp, connector, crtc_state))
242                         return -EINVAL;
243
244                 crtc_state->fec_enable = !intel_dp_is_uhbr(crtc_state);
245         }
246
247         mst_state->pbn_div = drm_dp_get_vc_payload_bw(&intel_dp->mst_mgr,
248                                                       crtc_state->port_clock,
249                                                       crtc_state->lane_count);
250
251         max_dpt_bpp = intel_dp_mst_max_dpt_bpp(crtc_state, dsc);
252         if (max_bpp > max_dpt_bpp) {
253                 drm_dbg_kms(display->drm, "Limiting bpp to max DPT bpp (%d -> %d)\n",
254                             max_bpp, max_dpt_bpp);
255                 max_bpp = max_dpt_bpp;
256         }
257
258         drm_dbg_kms(display->drm, "Looking for slots in range min bpp %d max bpp %d\n",
259                     min_bpp, max_bpp);
260
261         if (dsc) {
262                 dsc_slice_count = intel_dp_mst_dsc_get_slice_count(connector, crtc_state);
263                 if (!dsc_slice_count) {
264                         drm_dbg_kms(display->drm, "Can't get valid DSC slice count\n");
265
266                         return -ENOSPC;
267                 }
268         }
269
270         for (bpp = max_bpp; bpp >= min_bpp; bpp -= step) {
271                 int local_bw_overhead;
272                 int remote_bw_overhead;
273                 int link_bpp_x16;
274                 int remote_tu;
275                 fixed20_12 pbn;
276
277                 drm_dbg_kms(display->drm, "Trying bpp %d\n", bpp);
278
279                 link_bpp_x16 = fxp_q4_from_int(dsc ? bpp :
280                                                intel_dp_output_bpp(crtc_state->output_format, bpp));
281
282                 local_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, connector,
283                                                              false, dsc_slice_count, link_bpp_x16);
284                 remote_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, connector,
285                                                               true, dsc_slice_count, link_bpp_x16);
286
287                 intel_dp_mst_compute_m_n(crtc_state, connector,
288                                          local_bw_overhead,
289                                          link_bpp_x16,
290                                          &crtc_state->dp_m_n);
291
292                 /*
293                  * The TU size programmed to the HW determines which slots in
294                  * an MTP frame are used for this stream, which needs to match
295                  * the payload size programmed to the first downstream branch
296                  * device's payload table.
297                  *
298                  * Note that atm the payload's PBN value DRM core sends via
299                  * the ALLOCATE_PAYLOAD side-band message matches the payload
300                  * size (which it calculates from the PBN value) it programs
301                  * to the first branch device's payload table. The allocation
302                  * in the payload table could be reduced though (to
303                  * crtc_state->dp_m_n.tu), provided that the driver doesn't
304                  * enable SSC on the corresponding link.
305                  */
306                 pbn.full = dfixed_const(intel_dp_mst_calc_pbn(adjusted_mode->crtc_clock,
307                                                               link_bpp_x16,
308                                                               remote_bw_overhead));
309                 remote_tu = DIV_ROUND_UP(pbn.full, mst_state->pbn_div.full);
310
311                 /*
312                  * Aligning the TUs ensures that symbols consisting of multiple
313                  * (4) symbol cycles don't get split between two consecutive
314                  * MTPs, as required by Bspec.
315                  * TODO: remove the alignment restriction for 128b/132b links
316                  * on some platforms, where Bspec allows this.
317                  */
318                 remote_tu = ALIGN(remote_tu, 4 / crtc_state->lane_count);
319
320                 /*
321                  * Also align PBNs accordingly, since MST core will derive its
322                  * own copy of TU from the PBN in drm_dp_atomic_find_time_slots().
323                  * The above comment about the difference between the PBN
324                  * allocated for the whole path and the TUs allocated for the
325                  * first branch device's link also applies here.
326                  */
327                 pbn.full = remote_tu * mst_state->pbn_div.full;
328                 crtc_state->pbn = dfixed_trunc(pbn);
329
330                 drm_WARN_ON(display->drm, remote_tu < crtc_state->dp_m_n.tu);
331                 crtc_state->dp_m_n.tu = remote_tu;
332
333                 slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst_mgr,
334                                                       connector->port,
335                                                       crtc_state->pbn);
336                 if (slots == -EDEADLK)
337                         return slots;
338
339                 if (slots >= 0) {
340                         drm_WARN_ON(display->drm, slots != crtc_state->dp_m_n.tu);
341
342                         break;
343                 }
344         }
345
346         /* We failed to find a proper bpp/timeslots, return error */
347         if (ret)
348                 slots = ret;
349
350         if (slots < 0) {
351                 drm_dbg_kms(display->drm, "failed finding vcpi slots:%d\n",
352                             slots);
353         } else {
354                 if (!dsc)
355                         crtc_state->pipe_bpp = bpp;
356                 else
357                         crtc_state->dsc.compressed_bpp_x16 = fxp_q4_from_int(bpp);
358                 drm_dbg_kms(display->drm, "Got %d slots for pipe bpp %d dsc %d\n",
359                             slots, bpp, dsc);
360         }
361
362         return slots;
363 }
364
365 static int mst_stream_compute_link_config(struct intel_dp *intel_dp,
366                                           struct intel_crtc_state *crtc_state,
367                                           struct drm_connector_state *conn_state,
368                                           struct link_config_limits *limits)
369 {
370         int slots = -EINVAL;
371
372         /*
373          * FIXME: allocate the BW according to link_bpp, which in the case of
374          * YUV420 is only half of the pipe bpp value.
375          */
376         slots = mst_stream_find_vcpi_slots_for_bpp(intel_dp, crtc_state,
377                                                    fxp_q4_to_int(limits->link.max_bpp_x16),
378                                                    fxp_q4_to_int(limits->link.min_bpp_x16),
379                                                    limits,
380                                                    conn_state, 2 * 3, false);
381
382         if (slots < 0)
383                 return slots;
384
385         return 0;
386 }
387
388 static int mst_stream_dsc_compute_link_config(struct intel_dp *intel_dp,
389                                               struct intel_crtc_state *crtc_state,
390                                               struct drm_connector_state *conn_state,
391                                               struct link_config_limits *limits)
392 {
393         struct intel_display *display = to_intel_display(intel_dp);
394         struct intel_connector *connector = to_intel_connector(conn_state->connector);
395         struct drm_i915_private *i915 = to_i915(connector->base.dev);
396         int slots = -EINVAL;
397         int i, num_bpc;
398         u8 dsc_bpc[3] = {};
399         int min_bpp, max_bpp, sink_min_bpp, sink_max_bpp;
400         u8 dsc_max_bpc;
401         int min_compressed_bpp, max_compressed_bpp;
402
403         /* Max DSC Input BPC for ICL is 10 and for TGL+ is 12 */
404         if (DISPLAY_VER(display) >= 12)
405                 dsc_max_bpc = min_t(u8, 12, conn_state->max_requested_bpc);
406         else
407                 dsc_max_bpc = min_t(u8, 10, conn_state->max_requested_bpc);
408
409         max_bpp = min_t(u8, dsc_max_bpc * 3, limits->pipe.max_bpp);
410         min_bpp = limits->pipe.min_bpp;
411
412         num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd,
413                                                        dsc_bpc);
414
415         drm_dbg_kms(display->drm, "DSC Source supported min bpp %d max bpp %d\n",
416                     min_bpp, max_bpp);
417
418         sink_max_bpp = dsc_bpc[0] * 3;
419         sink_min_bpp = sink_max_bpp;
420
421         for (i = 1; i < num_bpc; i++) {
422                 if (sink_min_bpp > dsc_bpc[i] * 3)
423                         sink_min_bpp = dsc_bpc[i] * 3;
424                 if (sink_max_bpp < dsc_bpc[i] * 3)
425                         sink_max_bpp = dsc_bpc[i] * 3;
426         }
427
428         drm_dbg_kms(display->drm, "DSC Sink supported min bpp %d max bpp %d\n",
429                     sink_min_bpp, sink_max_bpp);
430
431         if (min_bpp < sink_min_bpp)
432                 min_bpp = sink_min_bpp;
433
434         if (max_bpp > sink_max_bpp)
435                 max_bpp = sink_max_bpp;
436
437         crtc_state->pipe_bpp = max_bpp;
438
439         max_compressed_bpp = intel_dp_dsc_sink_max_compressed_bpp(connector,
440                                                                   crtc_state,
441                                                                   max_bpp / 3);
442         max_compressed_bpp = min(max_compressed_bpp,
443                                  fxp_q4_to_int(limits->link.max_bpp_x16));
444
445         min_compressed_bpp = intel_dp_dsc_sink_min_compressed_bpp(crtc_state);
446         min_compressed_bpp = max(min_compressed_bpp,
447                                  fxp_q4_to_int_roundup(limits->link.min_bpp_x16));
448
449         drm_dbg_kms(display->drm, "DSC Sink supported compressed min bpp %d compressed max bpp %d\n",
450                     min_compressed_bpp, max_compressed_bpp);
451
452         /* Align compressed bpps according to our own constraints */
453         max_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(i915, max_compressed_bpp,
454                                                             crtc_state->pipe_bpp);
455         min_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(i915, min_compressed_bpp,
456                                                             crtc_state->pipe_bpp);
457
458         slots = mst_stream_find_vcpi_slots_for_bpp(intel_dp, crtc_state, max_compressed_bpp,
459                                                    min_compressed_bpp, limits,
460                                                    conn_state, 1, true);
461
462         if (slots < 0)
463                 return slots;
464
465         return 0;
466 }
467
468 static int mst_stream_update_slots(struct intel_dp *intel_dp,
469                                    struct intel_crtc_state *crtc_state,
470                                    struct drm_connector_state *conn_state)
471 {
472         struct intel_display *display = to_intel_display(intel_dp);
473         struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr;
474         struct drm_dp_mst_topology_state *topology_state;
475         u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ?
476                 DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B;
477
478         topology_state = drm_atomic_get_mst_topology_state(conn_state->state, mgr);
479         if (IS_ERR(topology_state)) {
480                 drm_dbg_kms(display->drm, "slot update failed\n");
481                 return PTR_ERR(topology_state);
482         }
483
484         drm_dp_mst_update_slots(topology_state, link_coding_cap);
485
486         return 0;
487 }
488
489 static int mode_hblank_period_ns(const struct drm_display_mode *mode)
490 {
491         return DIV_ROUND_CLOSEST_ULL(mul_u32_u32(mode->htotal - mode->hdisplay,
492                                                  NSEC_PER_SEC / 1000),
493                                      mode->crtc_clock);
494 }
495
496 static bool
497 hblank_expansion_quirk_needs_dsc(const struct intel_connector *connector,
498                                  const struct intel_crtc_state *crtc_state,
499                                  const struct link_config_limits *limits)
500 {
501         const struct drm_display_mode *adjusted_mode =
502                 &crtc_state->hw.adjusted_mode;
503         bool is_uhbr_sink = connector->mst_port &&
504                             drm_dp_128b132b_supported(connector->mst_port->dpcd);
505         int hblank_limit = is_uhbr_sink ? 500 : 300;
506
507         if (!connector->dp.dsc_hblank_expansion_quirk)
508                 return false;
509
510         if (is_uhbr_sink && !drm_dp_is_uhbr_rate(limits->max_rate))
511                 return false;
512
513         if (mode_hblank_period_ns(adjusted_mode) > hblank_limit)
514                 return false;
515
516         if (!intel_dp_mst_dsc_get_slice_count(connector, crtc_state))
517                 return false;
518
519         return true;
520 }
521
522 static bool
523 adjust_limits_for_dsc_hblank_expansion_quirk(const struct intel_connector *connector,
524                                              const struct intel_crtc_state *crtc_state,
525                                              struct link_config_limits *limits,
526                                              bool dsc)
527 {
528         struct intel_display *display = to_intel_display(connector);
529         const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
530         int min_bpp_x16 = limits->link.min_bpp_x16;
531
532         if (!hblank_expansion_quirk_needs_dsc(connector, crtc_state, limits))
533                 return true;
534
535         if (!dsc) {
536                 if (intel_dp_supports_dsc(connector, crtc_state)) {
537                         drm_dbg_kms(display->drm,
538                                     "[CRTC:%d:%s][CONNECTOR:%d:%s] DSC needed by hblank expansion quirk\n",
539                                     crtc->base.base.id, crtc->base.name,
540                                     connector->base.base.id, connector->base.name);
541                         return false;
542                 }
543
544                 drm_dbg_kms(display->drm,
545                             "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to 24 due to hblank expansion quirk\n",
546                             crtc->base.base.id, crtc->base.name,
547                             connector->base.base.id, connector->base.name);
548
549                 if (limits->link.max_bpp_x16 < fxp_q4_from_int(24))
550                         return false;
551
552                 limits->link.min_bpp_x16 = fxp_q4_from_int(24);
553
554                 return true;
555         }
556
557         drm_WARN_ON(display->drm, limits->min_rate != limits->max_rate);
558
559         if (limits->max_rate < 540000)
560                 min_bpp_x16 = fxp_q4_from_int(13);
561         else if (limits->max_rate < 810000)
562                 min_bpp_x16 = fxp_q4_from_int(10);
563
564         if (limits->link.min_bpp_x16 >= min_bpp_x16)
565                 return true;
566
567         drm_dbg_kms(display->drm,
568                     "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to " FXP_Q4_FMT " in DSC mode due to hblank expansion quirk\n",
569                     crtc->base.base.id, crtc->base.name,
570                     connector->base.base.id, connector->base.name,
571                     FXP_Q4_ARGS(min_bpp_x16));
572
573         if (limits->link.max_bpp_x16 < min_bpp_x16)
574                 return false;
575
576         limits->link.min_bpp_x16 = min_bpp_x16;
577
578         return true;
579 }
580
581 static bool
582 mst_stream_compute_config_limits(struct intel_dp *intel_dp,
583                                  const struct intel_connector *connector,
584                                  struct intel_crtc_state *crtc_state,
585                                  bool dsc,
586                                  struct link_config_limits *limits)
587 {
588         /*
589          * for MST we always configure max link bw - the spec doesn't
590          * seem to suggest we should do otherwise.
591          */
592         limits->min_rate = limits->max_rate =
593                 intel_dp_max_link_rate(intel_dp);
594
595         limits->min_lane_count = limits->max_lane_count =
596                 intel_dp_max_lane_count(intel_dp);
597
598         limits->pipe.min_bpp = intel_dp_min_bpp(crtc_state->output_format);
599         /*
600          * FIXME: If all the streams can't fit into the link with
601          * their current pipe_bpp we should reduce pipe_bpp across
602          * the board until things start to fit. Until then we
603          * limit to <= 8bpc since that's what was hardcoded for all
604          * MST streams previously. This hack should be removed once
605          * we have the proper retry logic in place.
606          */
607         limits->pipe.max_bpp = min(crtc_state->pipe_bpp, 24);
608
609         intel_dp_test_compute_config(intel_dp, crtc_state, limits);
610
611         if (!intel_dp_compute_config_link_bpp_limits(intel_dp,
612                                                      crtc_state,
613                                                      dsc,
614                                                      limits))
615                 return false;
616
617         return adjust_limits_for_dsc_hblank_expansion_quirk(connector,
618                                                             crtc_state,
619                                                             limits,
620                                                             dsc);
621 }
622
623 static int mst_stream_compute_config(struct intel_encoder *encoder,
624                                      struct intel_crtc_state *pipe_config,
625                                      struct drm_connector_state *conn_state)
626 {
627         struct intel_display *display = to_intel_display(encoder);
628         struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
629         struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state);
630         struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
631         struct intel_dp *intel_dp = to_primary_dp(encoder);
632         struct intel_connector *connector =
633                 to_intel_connector(conn_state->connector);
634         const struct drm_display_mode *adjusted_mode =
635                 &pipe_config->hw.adjusted_mode;
636         struct link_config_limits limits;
637         bool dsc_needed, joiner_needs_dsc;
638         int num_joined_pipes;
639         int ret = 0;
640
641         if (pipe_config->fec_enable &&
642             !intel_dp_supports_fec(intel_dp, connector, pipe_config))
643                 return -EINVAL;
644
645         if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
646                 return -EINVAL;
647
648         num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector,
649                                                      adjusted_mode->crtc_hdisplay,
650                                                      adjusted_mode->crtc_clock);
651         if (num_joined_pipes > 1)
652                 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe);
653
654         pipe_config->sink_format = INTEL_OUTPUT_FORMAT_RGB;
655         pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
656         pipe_config->has_pch_encoder = false;
657
658         joiner_needs_dsc = intel_dp_joiner_needs_dsc(dev_priv, num_joined_pipes);
659
660         dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en ||
661                 !mst_stream_compute_config_limits(intel_dp, connector,
662                                                   pipe_config, false, &limits);
663
664         if (!dsc_needed) {
665                 ret = mst_stream_compute_link_config(intel_dp, pipe_config,
666                                                      conn_state, &limits);
667
668                 if (ret == -EDEADLK)
669                         return ret;
670
671                 if (ret)
672                         dsc_needed = true;
673         }
674
675         /* enable compression if the mode doesn't fit available BW */
676         if (dsc_needed) {
677                 drm_dbg_kms(display->drm, "Try DSC (fallback=%s, joiner=%s, force=%s)\n",
678                             str_yes_no(ret), str_yes_no(joiner_needs_dsc),
679                             str_yes_no(intel_dp->force_dsc_en));
680
681                 if (!intel_dp_supports_dsc(connector, pipe_config))
682                         return -EINVAL;
683
684                 if (!mst_stream_compute_config_limits(intel_dp, connector,
685                                                       pipe_config, true,
686                                                       &limits))
687                         return -EINVAL;
688
689                 /*
690                  * FIXME: As bpc is hardcoded to 8, as mentioned above,
691                  * WARN and ignore the debug flag force_dsc_bpc for now.
692                  */
693                 drm_WARN(display->drm, intel_dp->force_dsc_bpc,
694                          "Cannot Force BPC for MST\n");
695                 /*
696                  * Try to get at least some timeslots and then see, if
697                  * we can fit there with DSC.
698                  */
699                 drm_dbg_kms(display->drm, "Trying to find VCPI slots in DSC mode\n");
700
701                 ret = mst_stream_dsc_compute_link_config(intel_dp, pipe_config,
702                                                          conn_state, &limits);
703                 if (ret < 0)
704                         return ret;
705
706                 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
707                                                   conn_state, &limits,
708                                                   pipe_config->dp_m_n.tu, false);
709         }
710
711         if (ret)
712                 return ret;
713
714         ret = mst_stream_update_slots(intel_dp, pipe_config, conn_state);
715         if (ret)
716                 return ret;
717
718         pipe_config->limited_color_range =
719                 intel_dp_limited_color_range(pipe_config, conn_state);
720
721         if (display->platform.geminilake || display->platform.broxton)
722                 pipe_config->lane_lat_optim_mask =
723                         bxt_dpio_phy_calc_lane_lat_optim_mask(pipe_config->lane_count);
724
725         intel_dp_audio_compute_config(encoder, pipe_config, conn_state);
726
727         intel_ddi_compute_min_voltage_level(pipe_config);
728
729         intel_psr_compute_config(intel_dp, pipe_config, conn_state);
730
731         return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector,
732                                                         pipe_config);
733 }
734
735 /*
736  * Iterate over all connectors and return a mask of
737  * all CPU transcoders streaming over the same DP link.
738  */
739 static unsigned int
740 intel_dp_mst_transcoder_mask(struct intel_atomic_state *state,
741                              struct intel_dp *mst_port)
742 {
743         struct intel_display *display = to_intel_display(state);
744         const struct intel_digital_connector_state *conn_state;
745         struct intel_connector *connector;
746         u8 transcoders = 0;
747         int i;
748
749         if (DISPLAY_VER(display) < 12)
750                 return 0;
751
752         for_each_new_intel_connector_in_state(state, connector, conn_state, i) {
753                 const struct intel_crtc_state *crtc_state;
754                 struct intel_crtc *crtc;
755
756                 if (connector->mst_port != mst_port || !conn_state->base.crtc)
757                         continue;
758
759                 crtc = to_intel_crtc(conn_state->base.crtc);
760                 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
761
762                 if (!crtc_state->hw.active)
763                         continue;
764
765                 transcoders |= BIT(crtc_state->cpu_transcoder);
766         }
767
768         return transcoders;
769 }
770
771 static u8 get_pipes_downstream_of_mst_port(struct intel_atomic_state *state,
772                                            struct drm_dp_mst_topology_mgr *mst_mgr,
773                                            struct drm_dp_mst_port *parent_port)
774 {
775         const struct intel_digital_connector_state *conn_state;
776         struct intel_connector *connector;
777         u8 mask = 0;
778         int i;
779
780         for_each_new_intel_connector_in_state(state, connector, conn_state, i) {
781                 if (!conn_state->base.crtc)
782                         continue;
783
784                 if (&connector->mst_port->mst_mgr != mst_mgr)
785                         continue;
786
787                 if (connector->port != parent_port &&
788                     !drm_dp_mst_port_downstream_of_parent(mst_mgr,
789                                                           connector->port,
790                                                           parent_port))
791                         continue;
792
793                 mask |= BIT(to_intel_crtc(conn_state->base.crtc)->pipe);
794         }
795
796         return mask;
797 }
798
799 static int intel_dp_mst_check_fec_change(struct intel_atomic_state *state,
800                                          struct drm_dp_mst_topology_mgr *mst_mgr,
801                                          struct intel_link_bw_limits *limits)
802 {
803         struct intel_display *display = to_intel_display(state);
804         struct intel_crtc *crtc;
805         u8 mst_pipe_mask;
806         u8 fec_pipe_mask = 0;
807         int ret;
808
809         mst_pipe_mask = get_pipes_downstream_of_mst_port(state, mst_mgr, NULL);
810
811         for_each_intel_crtc_in_pipe_mask(display->drm, crtc, mst_pipe_mask) {
812                 struct intel_crtc_state *crtc_state =
813                         intel_atomic_get_new_crtc_state(state, crtc);
814
815                 /* Atomic connector check should've added all the MST CRTCs. */
816                 if (drm_WARN_ON(display->drm, !crtc_state))
817                         return -EINVAL;
818
819                 if (crtc_state->fec_enable)
820                         fec_pipe_mask |= BIT(crtc->pipe);
821         }
822
823         if (!fec_pipe_mask || mst_pipe_mask == fec_pipe_mask)
824                 return 0;
825
826         limits->force_fec_pipes |= mst_pipe_mask;
827
828         ret = intel_modeset_pipes_in_mask_early(state, "MST FEC",
829                                                 mst_pipe_mask);
830
831         return ret ? : -EAGAIN;
832 }
833
834 static int intel_dp_mst_check_bw(struct intel_atomic_state *state,
835                                  struct drm_dp_mst_topology_mgr *mst_mgr,
836                                  struct drm_dp_mst_topology_state *mst_state,
837                                  struct intel_link_bw_limits *limits)
838 {
839         struct drm_dp_mst_port *mst_port;
840         u8 mst_port_pipes;
841         int ret;
842
843         ret = drm_dp_mst_atomic_check_mgr(&state->base, mst_mgr, mst_state, &mst_port);
844         if (ret != -ENOSPC)
845                 return ret;
846
847         mst_port_pipes = get_pipes_downstream_of_mst_port(state, mst_mgr, mst_port);
848
849         ret = intel_link_bw_reduce_bpp(state, limits,
850                                        mst_port_pipes, "MST link BW");
851
852         return ret ? : -EAGAIN;
853 }
854
855 /**
856  * intel_dp_mst_atomic_check_link - check all modeset MST link configuration
857  * @state: intel atomic state
858  * @limits: link BW limits
859  *
860  * Check the link configuration for all modeset MST outputs. If the
861  * configuration is invalid @limits will be updated if possible to
862  * reduce the total BW, after which the configuration for all CRTCs in
863  * @state must be recomputed with the updated @limits.
864  *
865  * Returns:
866  *   - 0 if the confugration is valid
867  *   - %-EAGAIN, if the configuration is invalid and @limits got updated
868  *     with fallback values with which the configuration of all CRTCs in
869  *     @state must be recomputed
870  *   - Other negative error, if the configuration is invalid without a
871  *     fallback possibility, or the check failed for another reason
872  */
873 int intel_dp_mst_atomic_check_link(struct intel_atomic_state *state,
874                                    struct intel_link_bw_limits *limits)
875 {
876         struct drm_dp_mst_topology_mgr *mgr;
877         struct drm_dp_mst_topology_state *mst_state;
878         int ret;
879         int i;
880
881         for_each_new_mst_mgr_in_state(&state->base, mgr, mst_state, i) {
882                 ret = intel_dp_mst_check_fec_change(state, mgr, limits);
883                 if (ret)
884                         return ret;
885
886                 ret = intel_dp_mst_check_bw(state, mgr, mst_state,
887                                             limits);
888                 if (ret)
889                         return ret;
890         }
891
892         return 0;
893 }
894
895 static int mst_stream_compute_config_late(struct intel_encoder *encoder,
896                                           struct intel_crtc_state *crtc_state,
897                                           struct drm_connector_state *conn_state)
898 {
899         struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state);
900         struct intel_dp *intel_dp = to_primary_dp(encoder);
901
902         /* lowest numbered transcoder will be designated master */
903         crtc_state->mst_master_transcoder =
904                 ffs(intel_dp_mst_transcoder_mask(state, intel_dp)) - 1;
905
906         return 0;
907 }
908
909 /*
910  * If one of the connectors in a MST stream needs a modeset, mark all CRTCs
911  * that shares the same MST stream as mode changed,
912  * intel_modeset_pipe_config()+intel_crtc_check_fastset() will take care to do
913  * a fastset when possible.
914  *
915  * On TGL+ this is required since each stream go through a master transcoder,
916  * so if the master transcoder needs modeset, all other streams in the
917  * topology need a modeset. All platforms need to add the atomic state
918  * for all streams in the topology, since a modeset on one may require
919  * changing the MST link BW usage of the others, which in turn needs a
920  * recomputation of the corresponding CRTC states.
921  */
922 static int
923 mst_connector_atomic_topology_check(struct intel_connector *connector,
924                                     struct intel_atomic_state *state)
925 {
926         struct intel_display *display = to_intel_display(connector);
927         struct drm_connector_list_iter connector_list_iter;
928         struct intel_connector *connector_iter;
929         int ret = 0;
930
931         if (!intel_connector_needs_modeset(state, &connector->base))
932                 return 0;
933
934         drm_connector_list_iter_begin(display->drm, &connector_list_iter);
935         for_each_intel_connector_iter(connector_iter, &connector_list_iter) {
936                 struct intel_digital_connector_state *conn_iter_state;
937                 struct intel_crtc_state *crtc_state;
938                 struct intel_crtc *crtc;
939
940                 if (connector_iter->mst_port != connector->mst_port ||
941                     connector_iter == connector)
942                         continue;
943
944                 conn_iter_state = intel_atomic_get_digital_connector_state(state,
945                                                                            connector_iter);
946                 if (IS_ERR(conn_iter_state)) {
947                         ret = PTR_ERR(conn_iter_state);
948                         break;
949                 }
950
951                 if (!conn_iter_state->base.crtc)
952                         continue;
953
954                 crtc = to_intel_crtc(conn_iter_state->base.crtc);
955                 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
956                 if (IS_ERR(crtc_state)) {
957                         ret = PTR_ERR(crtc_state);
958                         break;
959                 }
960
961                 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
962                 if (ret)
963                         break;
964                 crtc_state->uapi.mode_changed = true;
965         }
966         drm_connector_list_iter_end(&connector_list_iter);
967
968         return ret;
969 }
970
971 static int
972 mst_connector_atomic_check(struct drm_connector *connector,
973                            struct drm_atomic_state *_state)
974 {
975         struct intel_atomic_state *state = to_intel_atomic_state(_state);
976         struct intel_connector *intel_connector =
977                 to_intel_connector(connector);
978         int ret;
979
980         ret = intel_digital_connector_atomic_check(connector, &state->base);
981         if (ret)
982                 return ret;
983
984         ret = mst_connector_atomic_topology_check(intel_connector, state);
985         if (ret)
986                 return ret;
987
988         if (intel_connector_needs_modeset(state, connector)) {
989                 ret = intel_dp_tunnel_atomic_check_state(state,
990                                                          intel_connector->mst_port,
991                                                          intel_connector);
992                 if (ret)
993                         return ret;
994         }
995
996         return drm_dp_atomic_release_time_slots(&state->base,
997                                                 &intel_connector->mst_port->mst_mgr,
998                                                 intel_connector->port);
999 }
1000
1001 static void mst_stream_disable(struct intel_atomic_state *state,
1002                                struct intel_encoder *encoder,
1003                                const struct intel_crtc_state *old_crtc_state,
1004                                const struct drm_connector_state *old_conn_state)
1005 {
1006         struct intel_display *display = to_intel_display(state);
1007         struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1008         struct intel_dp *intel_dp = to_primary_dp(encoder);
1009         struct intel_connector *connector =
1010                 to_intel_connector(old_conn_state->connector);
1011
1012         drm_dbg_kms(display->drm, "active links %d\n",
1013                     intel_dp->active_mst_links);
1014
1015         if (intel_dp->active_mst_links == 1)
1016                 intel_dp->link_trained = false;
1017
1018         intel_hdcp_disable(intel_mst->connector);
1019
1020         intel_dp_sink_disable_decompression(state, connector, old_crtc_state);
1021 }
1022
1023 static void mst_stream_post_disable(struct intel_atomic_state *state,
1024                                     struct intel_encoder *encoder,
1025                                     const struct intel_crtc_state *old_crtc_state,
1026                                     const struct drm_connector_state *old_conn_state)
1027 {
1028         struct intel_display *display = to_intel_display(encoder);
1029         struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1030         struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1031         struct intel_dp *intel_dp = to_primary_dp(encoder);
1032         struct intel_connector *connector =
1033                 to_intel_connector(old_conn_state->connector);
1034         struct drm_dp_mst_topology_state *old_mst_state =
1035                 drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1036         struct drm_dp_mst_topology_state *new_mst_state =
1037                 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1038         const struct drm_dp_mst_atomic_payload *old_payload =
1039                 drm_atomic_get_mst_payload_state(old_mst_state, connector->port);
1040         struct drm_dp_mst_atomic_payload *new_payload =
1041                 drm_atomic_get_mst_payload_state(new_mst_state, connector->port);
1042         struct intel_crtc *pipe_crtc;
1043         bool last_mst_stream;
1044         int i;
1045
1046         intel_dp->active_mst_links--;
1047         last_mst_stream = intel_dp->active_mst_links == 0;
1048         drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && last_mst_stream &&
1049                     !intel_dp_mst_is_master_trans(old_crtc_state));
1050
1051         for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) {
1052                 const struct intel_crtc_state *old_pipe_crtc_state =
1053                         intel_atomic_get_old_crtc_state(state, pipe_crtc);
1054
1055                 intel_crtc_vblank_off(old_pipe_crtc_state);
1056         }
1057
1058         intel_disable_transcoder(old_crtc_state);
1059
1060         drm_dp_remove_payload_part1(&intel_dp->mst_mgr, new_mst_state, new_payload);
1061
1062         intel_ddi_clear_act_sent(encoder, old_crtc_state);
1063
1064         intel_de_rmw(display,
1065                      TRANS_DDI_FUNC_CTL(display, old_crtc_state->cpu_transcoder),
1066                      TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0);
1067
1068         intel_ddi_wait_for_act_sent(encoder, old_crtc_state);
1069         drm_dp_check_act_status(&intel_dp->mst_mgr);
1070
1071         drm_dp_remove_payload_part2(&intel_dp->mst_mgr, new_mst_state,
1072                                     old_payload, new_payload);
1073
1074         intel_ddi_disable_transcoder_func(old_crtc_state);
1075
1076         for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) {
1077                 const struct intel_crtc_state *old_pipe_crtc_state =
1078                         intel_atomic_get_old_crtc_state(state, pipe_crtc);
1079
1080                 intel_dsc_disable(old_pipe_crtc_state);
1081
1082                 if (DISPLAY_VER(display) >= 9)
1083                         skl_scaler_disable(old_pipe_crtc_state);
1084                 else
1085                         ilk_pfit_disable(old_pipe_crtc_state);
1086         }
1087
1088         /*
1089          * Power down mst path before disabling the port, otherwise we end
1090          * up getting interrupts from the sink upon detecting link loss.
1091          */
1092         drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port,
1093                                      false);
1094
1095         /*
1096          * BSpec 4287: disable DIP after the transcoder is disabled and before
1097          * the transcoder clock select is set to none.
1098          */
1099         intel_dp_set_infoframes(primary_encoder, false, old_crtc_state, NULL);
1100         /*
1101          * From TGL spec: "If multi-stream slave transcoder: Configure
1102          * Transcoder Clock Select to direct no clock to the transcoder"
1103          *
1104          * From older GENs spec: "Configure Transcoder Clock Select to direct
1105          * no clock to the transcoder"
1106          */
1107         if (DISPLAY_VER(display) < 12 || !last_mst_stream)
1108                 intel_ddi_disable_transcoder_clock(old_crtc_state);
1109
1110
1111         intel_mst->connector = NULL;
1112         if (last_mst_stream)
1113                 primary_encoder->post_disable(state, primary_encoder,
1114                                               old_crtc_state, NULL);
1115
1116         drm_dbg_kms(display->drm, "active links %d\n",
1117                     intel_dp->active_mst_links);
1118 }
1119
1120 static void mst_stream_post_pll_disable(struct intel_atomic_state *state,
1121                                         struct intel_encoder *encoder,
1122                                         const struct intel_crtc_state *old_crtc_state,
1123                                         const struct drm_connector_state *old_conn_state)
1124 {
1125         struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1126         struct intel_dp *intel_dp = to_primary_dp(encoder);
1127
1128         if (intel_dp->active_mst_links == 0 &&
1129             primary_encoder->post_pll_disable)
1130                 primary_encoder->post_pll_disable(state, primary_encoder, old_crtc_state, old_conn_state);
1131 }
1132
1133 static void mst_stream_pre_pll_enable(struct intel_atomic_state *state,
1134                                       struct intel_encoder *encoder,
1135                                       const struct intel_crtc_state *pipe_config,
1136                                       const struct drm_connector_state *conn_state)
1137 {
1138         struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1139         struct intel_dp *intel_dp = to_primary_dp(encoder);
1140
1141         if (intel_dp->active_mst_links == 0)
1142                 primary_encoder->pre_pll_enable(state, primary_encoder,
1143                                                 pipe_config, NULL);
1144         else
1145                 /*
1146                  * The port PLL state needs to get updated for secondary
1147                  * streams as for the primary stream.
1148                  */
1149                 intel_ddi_update_active_dpll(state, primary_encoder,
1150                                              to_intel_crtc(pipe_config->uapi.crtc));
1151 }
1152
1153 static bool intel_mst_probed_link_params_valid(struct intel_dp *intel_dp,
1154                                                int link_rate, int lane_count)
1155 {
1156         return intel_dp->link.mst_probed_rate == link_rate &&
1157                 intel_dp->link.mst_probed_lane_count == lane_count;
1158 }
1159
1160 static void intel_mst_set_probed_link_params(struct intel_dp *intel_dp,
1161                                              int link_rate, int lane_count)
1162 {
1163         intel_dp->link.mst_probed_rate = link_rate;
1164         intel_dp->link.mst_probed_lane_count = lane_count;
1165 }
1166
1167 static void intel_mst_reprobe_topology(struct intel_dp *intel_dp,
1168                                        const struct intel_crtc_state *crtc_state)
1169 {
1170         if (intel_mst_probed_link_params_valid(intel_dp,
1171                                                crtc_state->port_clock, crtc_state->lane_count))
1172                 return;
1173
1174         drm_dp_mst_topology_queue_probe(&intel_dp->mst_mgr);
1175
1176         intel_mst_set_probed_link_params(intel_dp,
1177                                          crtc_state->port_clock, crtc_state->lane_count);
1178 }
1179
1180 static void mst_stream_pre_enable(struct intel_atomic_state *state,
1181                                   struct intel_encoder *encoder,
1182                                   const struct intel_crtc_state *pipe_config,
1183                                   const struct drm_connector_state *conn_state)
1184 {
1185         struct intel_display *display = to_intel_display(state);
1186         struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1187         struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1188         struct intel_dp *intel_dp = to_primary_dp(encoder);
1189         struct intel_connector *connector =
1190                 to_intel_connector(conn_state->connector);
1191         struct drm_dp_mst_topology_state *mst_state =
1192                 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1193         int ret;
1194         bool first_mst_stream;
1195
1196         /* MST encoders are bound to a crtc, not to a connector,
1197          * force the mapping here for get_hw_state.
1198          */
1199         connector->encoder = encoder;
1200         intel_mst->connector = connector;
1201         first_mst_stream = intel_dp->active_mst_links == 0;
1202         drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && first_mst_stream &&
1203                     !intel_dp_mst_is_master_trans(pipe_config));
1204
1205         drm_dbg_kms(display->drm, "active links %d\n",
1206                     intel_dp->active_mst_links);
1207
1208         if (first_mst_stream)
1209                 intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
1210
1211         drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port, true);
1212
1213         intel_dp_sink_enable_decompression(state, connector, pipe_config);
1214
1215         if (first_mst_stream) {
1216                 primary_encoder->pre_enable(state, primary_encoder,
1217                                             pipe_config, NULL);
1218
1219                 intel_mst_reprobe_topology(intel_dp, pipe_config);
1220         }
1221
1222         intel_dp->active_mst_links++;
1223
1224         ret = drm_dp_add_payload_part1(&intel_dp->mst_mgr, mst_state,
1225                                        drm_atomic_get_mst_payload_state(mst_state, connector->port));
1226         if (ret < 0)
1227                 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config);
1228
1229         /*
1230          * Before Gen 12 this is not done as part of
1231          * primary_encoder->pre_enable() and should be done here. For
1232          * Gen 12+ the step in which this should be done is different for the
1233          * first MST stream, so it's done on the DDI for the first stream and
1234          * here for the following ones.
1235          */
1236         if (DISPLAY_VER(display) < 12 || !first_mst_stream)
1237                 intel_ddi_enable_transcoder_clock(encoder, pipe_config);
1238
1239         if (DISPLAY_VER(display) >= 13 && !first_mst_stream)
1240                 intel_ddi_config_transcoder_func(encoder, pipe_config);
1241
1242         intel_dsc_dp_pps_write(primary_encoder, pipe_config);
1243         intel_ddi_set_dp_msa(pipe_config, conn_state);
1244 }
1245
1246 static void enable_bs_jitter_was(const struct intel_crtc_state *crtc_state)
1247 {
1248         struct intel_display *display = to_intel_display(crtc_state);
1249         struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
1250         u32 clear = 0;
1251         u32 set = 0;
1252
1253         if (!IS_ALDERLAKE_P(i915))
1254                 return;
1255
1256         if (!IS_DISPLAY_STEP(display, STEP_D0, STEP_FOREVER))
1257                 return;
1258
1259         /* Wa_14013163432:adlp */
1260         if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state))
1261                 set |= DP_MST_FEC_BS_JITTER_WA(crtc_state->cpu_transcoder);
1262
1263         /* Wa_14014143976:adlp */
1264         if (IS_DISPLAY_STEP(display, STEP_E0, STEP_FOREVER)) {
1265                 if (intel_dp_is_uhbr(crtc_state))
1266                         set |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder);
1267                 else if (crtc_state->fec_enable)
1268                         clear |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder);
1269
1270                 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state))
1271                         set |= DP_MST_DPT_DPTP_ALIGN_WA(crtc_state->cpu_transcoder);
1272         }
1273
1274         if (!clear && !set)
1275                 return;
1276
1277         intel_de_rmw(display, CHICKEN_MISC_3, clear, set);
1278 }
1279
1280 static void mst_stream_enable(struct intel_atomic_state *state,
1281                               struct intel_encoder *encoder,
1282                               const struct intel_crtc_state *pipe_config,
1283                               const struct drm_connector_state *conn_state)
1284 {
1285         struct intel_display *display = to_intel_display(encoder);
1286         struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1287         struct intel_dp *intel_dp = to_primary_dp(encoder);
1288         struct intel_connector *connector = to_intel_connector(conn_state->connector);
1289         struct drm_dp_mst_topology_state *mst_state =
1290                 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1291         enum transcoder trans = pipe_config->cpu_transcoder;
1292         bool first_mst_stream = intel_dp->active_mst_links == 1;
1293         struct intel_crtc *pipe_crtc;
1294         int ret, i;
1295
1296         drm_WARN_ON(display->drm, pipe_config->has_pch_encoder);
1297
1298         if (intel_dp_is_uhbr(pipe_config)) {
1299                 const struct drm_display_mode *adjusted_mode =
1300                         &pipe_config->hw.adjusted_mode;
1301                 u64 crtc_clock_hz = KHz(adjusted_mode->crtc_clock);
1302
1303                 intel_de_write(display, TRANS_DP2_VFREQHIGH(pipe_config->cpu_transcoder),
1304                                TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz >> 24));
1305                 intel_de_write(display, TRANS_DP2_VFREQLOW(pipe_config->cpu_transcoder),
1306                                TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz & 0xffffff));
1307         }
1308
1309         enable_bs_jitter_was(pipe_config);
1310
1311         intel_ddi_enable_transcoder_func(encoder, pipe_config);
1312
1313         intel_ddi_clear_act_sent(encoder, pipe_config);
1314
1315         intel_de_rmw(display, TRANS_DDI_FUNC_CTL(display, trans), 0,
1316                      TRANS_DDI_DP_VC_PAYLOAD_ALLOC);
1317
1318         drm_dbg_kms(display->drm, "active links %d\n",
1319                     intel_dp->active_mst_links);
1320
1321         intel_ddi_wait_for_act_sent(encoder, pipe_config);
1322         drm_dp_check_act_status(&intel_dp->mst_mgr);
1323
1324         if (first_mst_stream)
1325                 intel_ddi_wait_for_fec_status(encoder, pipe_config, true);
1326
1327         ret = drm_dp_add_payload_part2(&intel_dp->mst_mgr,
1328                                        drm_atomic_get_mst_payload_state(mst_state,
1329                                                                         connector->port));
1330         if (ret < 0)
1331                 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config);
1332
1333         if (DISPLAY_VER(display) >= 12)
1334                 intel_de_rmw(display, CHICKEN_TRANS(display, trans),
1335                              FECSTALL_DIS_DPTSTREAM_DPTTG,
1336                              pipe_config->fec_enable ? FECSTALL_DIS_DPTSTREAM_DPTTG : 0);
1337
1338         intel_audio_sdp_split_update(pipe_config);
1339
1340         intel_enable_transcoder(pipe_config);
1341
1342         for_each_pipe_crtc_modeset_enable(display, pipe_crtc, pipe_config, i) {
1343                 const struct intel_crtc_state *pipe_crtc_state =
1344                         intel_atomic_get_new_crtc_state(state, pipe_crtc);
1345
1346                 intel_crtc_vblank_on(pipe_crtc_state);
1347         }
1348
1349         intel_hdcp_enable(state, encoder, pipe_config, conn_state);
1350 }
1351
1352 static bool mst_stream_get_hw_state(struct intel_encoder *encoder,
1353                                     enum pipe *pipe)
1354 {
1355         struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1356         *pipe = intel_mst->pipe;
1357         if (intel_mst->connector)
1358                 return true;
1359         return false;
1360 }
1361
1362 static void mst_stream_get_config(struct intel_encoder *encoder,
1363                                   struct intel_crtc_state *pipe_config)
1364 {
1365         struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1366
1367         primary_encoder->get_config(primary_encoder, pipe_config);
1368 }
1369
1370 static bool mst_stream_initial_fastset_check(struct intel_encoder *encoder,
1371                                              struct intel_crtc_state *crtc_state)
1372 {
1373         struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1374
1375         return intel_dp_initial_fastset_check(primary_encoder, crtc_state);
1376 }
1377
1378 static int mst_connector_get_ddc_modes(struct drm_connector *connector)
1379 {
1380         struct intel_display *display = to_intel_display(connector->dev);
1381         struct intel_connector *intel_connector = to_intel_connector(connector);
1382         struct intel_dp *intel_dp = intel_connector->mst_port;
1383         const struct drm_edid *drm_edid;
1384         int ret;
1385
1386         if (drm_connector_is_unregistered(connector))
1387                 return intel_connector_update_modes(connector, NULL);
1388
1389         if (!intel_display_driver_check_access(display))
1390                 return drm_edid_connector_add_modes(connector);
1391
1392         drm_edid = drm_dp_mst_edid_read(connector, &intel_dp->mst_mgr, intel_connector->port);
1393
1394         ret = intel_connector_update_modes(connector, drm_edid);
1395
1396         drm_edid_free(drm_edid);
1397
1398         return ret;
1399 }
1400
1401 static int
1402 mst_connector_late_register(struct drm_connector *connector)
1403 {
1404         struct intel_connector *intel_connector = to_intel_connector(connector);
1405         int ret;
1406
1407         ret = drm_dp_mst_connector_late_register(connector,
1408                                                  intel_connector->port);
1409         if (ret < 0)
1410                 return ret;
1411
1412         ret = intel_connector_register(connector);
1413         if (ret < 0)
1414                 drm_dp_mst_connector_early_unregister(connector,
1415                                                       intel_connector->port);
1416
1417         return ret;
1418 }
1419
1420 static void
1421 mst_connector_early_unregister(struct drm_connector *connector)
1422 {
1423         struct intel_connector *intel_connector = to_intel_connector(connector);
1424
1425         intel_connector_unregister(connector);
1426         drm_dp_mst_connector_early_unregister(connector,
1427                                               intel_connector->port);
1428 }
1429
1430 static const struct drm_connector_funcs mst_connector_funcs = {
1431         .fill_modes = drm_helper_probe_single_connector_modes,
1432         .atomic_get_property = intel_digital_connector_atomic_get_property,
1433         .atomic_set_property = intel_digital_connector_atomic_set_property,
1434         .late_register = mst_connector_late_register,
1435         .early_unregister = mst_connector_early_unregister,
1436         .destroy = intel_connector_destroy,
1437         .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
1438         .atomic_duplicate_state = intel_digital_connector_duplicate_state,
1439 };
1440
1441 static int mst_connector_get_modes(struct drm_connector *connector)
1442 {
1443         return mst_connector_get_ddc_modes(connector);
1444 }
1445
1446 static int
1447 mst_connector_mode_valid_ctx(struct drm_connector *connector,
1448                              struct drm_display_mode *mode,
1449                              struct drm_modeset_acquire_ctx *ctx,
1450                              enum drm_mode_status *status)
1451 {
1452         struct intel_display *display = to_intel_display(connector->dev);
1453         struct drm_i915_private *dev_priv = to_i915(connector->dev);
1454         struct intel_connector *intel_connector = to_intel_connector(connector);
1455         struct intel_dp *intel_dp = intel_connector->mst_port;
1456         struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr;
1457         struct drm_dp_mst_port *port = intel_connector->port;
1458         const int min_bpp = 18;
1459         int max_dotclk = display->cdclk.max_dotclk_freq;
1460         int max_rate, mode_rate, max_lanes, max_link_clock;
1461         int ret;
1462         bool dsc = false;
1463         u16 dsc_max_compressed_bpp = 0;
1464         u8 dsc_slice_count = 0;
1465         int target_clock = mode->clock;
1466         int num_joined_pipes;
1467
1468         if (drm_connector_is_unregistered(connector)) {
1469                 *status = MODE_ERROR;
1470                 return 0;
1471         }
1472
1473         *status = intel_cpu_transcoder_mode_valid(dev_priv, mode);
1474         if (*status != MODE_OK)
1475                 return 0;
1476
1477         if (mode->flags & DRM_MODE_FLAG_DBLCLK) {
1478                 *status = MODE_H_ILLEGAL;
1479                 return 0;
1480         }
1481
1482         if (mode->clock < 10000) {
1483                 *status = MODE_CLOCK_LOW;
1484                 return 0;
1485         }
1486
1487         max_link_clock = intel_dp_max_link_rate(intel_dp);
1488         max_lanes = intel_dp_max_lane_count(intel_dp);
1489
1490         max_rate = intel_dp_max_link_data_rate(intel_dp,
1491                                                max_link_clock, max_lanes);
1492         mode_rate = intel_dp_link_required(mode->clock, min_bpp);
1493
1494         /*
1495          * TODO:
1496          * - Also check if compression would allow for the mode
1497          * - Calculate the overhead using drm_dp_bw_overhead() /
1498          *   drm_dp_bw_channel_coding_efficiency(), similarly to the
1499          *   compute config code, as drm_dp_calc_pbn_mode() doesn't
1500          *   account with all the overheads.
1501          * - Check here and during compute config the BW reported by
1502          *   DFP_Link_Available_Payload_Bandwidth_Number (or the
1503          *   corresponding link capabilities of the sink) in case the
1504          *   stream is uncompressed for it by the last branch device.
1505          */
1506         num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, intel_connector,
1507                                                      mode->hdisplay, target_clock);
1508         max_dotclk *= num_joined_pipes;
1509
1510         ret = drm_modeset_lock(&mgr->base.lock, ctx);
1511         if (ret)
1512                 return ret;
1513
1514         if (mode_rate > max_rate || mode->clock > max_dotclk ||
1515             drm_dp_calc_pbn_mode(mode->clock, min_bpp << 4) > port->full_pbn) {
1516                 *status = MODE_CLOCK_HIGH;
1517                 return 0;
1518         }
1519
1520         if (intel_dp_has_dsc(intel_connector)) {
1521                 /*
1522                  * TBD pass the connector BPC,
1523                  * for now U8_MAX so that max BPC on that platform would be picked
1524                  */
1525                 int pipe_bpp = intel_dp_dsc_compute_max_bpp(intel_connector, U8_MAX);
1526
1527                 if (drm_dp_sink_supports_fec(intel_connector->dp.fec_capability)) {
1528                         dsc_max_compressed_bpp =
1529                                 intel_dp_dsc_get_max_compressed_bpp(dev_priv,
1530                                                                     max_link_clock,
1531                                                                     max_lanes,
1532                                                                     target_clock,
1533                                                                     mode->hdisplay,
1534                                                                     num_joined_pipes,
1535                                                                     INTEL_OUTPUT_FORMAT_RGB,
1536                                                                     pipe_bpp, 64);
1537                         dsc_slice_count =
1538                                 intel_dp_dsc_get_slice_count(intel_connector,
1539                                                              target_clock,
1540                                                              mode->hdisplay,
1541                                                              num_joined_pipes);
1542                 }
1543
1544                 dsc = dsc_max_compressed_bpp && dsc_slice_count;
1545         }
1546
1547         if (intel_dp_joiner_needs_dsc(dev_priv, num_joined_pipes) && !dsc) {
1548                 *status = MODE_CLOCK_HIGH;
1549                 return 0;
1550         }
1551
1552         if (mode_rate > max_rate && !dsc) {
1553                 *status = MODE_CLOCK_HIGH;
1554                 return 0;
1555         }
1556
1557         *status = intel_mode_valid_max_plane_size(dev_priv, mode, num_joined_pipes);
1558         return 0;
1559 }
1560
1561 static struct drm_encoder *
1562 mst_connector_atomic_best_encoder(struct drm_connector *connector,
1563                                   struct drm_atomic_state *state)
1564 {
1565         struct drm_connector_state *connector_state = drm_atomic_get_new_connector_state(state,
1566                                                                                          connector);
1567         struct intel_connector *intel_connector = to_intel_connector(connector);
1568         struct intel_dp *intel_dp = intel_connector->mst_port;
1569         struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc);
1570
1571         return &intel_dp->mst_encoders[crtc->pipe]->base.base;
1572 }
1573
1574 static int
1575 mst_connector_detect_ctx(struct drm_connector *connector,
1576                          struct drm_modeset_acquire_ctx *ctx, bool force)
1577 {
1578         struct intel_display *display = to_intel_display(connector->dev);
1579         struct intel_connector *intel_connector = to_intel_connector(connector);
1580         struct intel_dp *intel_dp = intel_connector->mst_port;
1581
1582         if (!intel_display_device_enabled(display))
1583                 return connector_status_disconnected;
1584
1585         if (drm_connector_is_unregistered(connector))
1586                 return connector_status_disconnected;
1587
1588         if (!intel_display_driver_check_access(display))
1589                 return connector->status;
1590
1591         intel_dp_flush_connector_commits(intel_connector);
1592
1593         return drm_dp_mst_detect_port(connector, ctx, &intel_dp->mst_mgr,
1594                                       intel_connector->port);
1595 }
1596
1597 static const struct drm_connector_helper_funcs mst_connector_helper_funcs = {
1598         .get_modes = mst_connector_get_modes,
1599         .mode_valid_ctx = mst_connector_mode_valid_ctx,
1600         .atomic_best_encoder = mst_connector_atomic_best_encoder,
1601         .atomic_check = mst_connector_atomic_check,
1602         .detect_ctx = mst_connector_detect_ctx,
1603 };
1604
1605 static void mst_stream_encoder_destroy(struct drm_encoder *encoder)
1606 {
1607         struct intel_dp_mst_encoder *intel_mst = enc_to_mst(to_intel_encoder(encoder));
1608
1609         drm_encoder_cleanup(encoder);
1610         kfree(intel_mst);
1611 }
1612
1613 static const struct drm_encoder_funcs mst_stream_encoder_funcs = {
1614         .destroy = mst_stream_encoder_destroy,
1615 };
1616
1617 static bool mst_connector_get_hw_state(struct intel_connector *connector)
1618 {
1619         /* This is the MST stream encoder set in ->pre_enable, if any */
1620         struct intel_encoder *encoder = intel_attached_encoder(connector);
1621         enum pipe pipe;
1622
1623         if (!encoder || !connector->base.state->crtc)
1624                 return false;
1625
1626         return encoder->get_hw_state(encoder, &pipe);
1627 }
1628
1629 static int mst_topology_add_connector_properties(struct intel_dp *intel_dp,
1630                                                  struct drm_connector *connector,
1631                                                  const char *pathprop)
1632 {
1633         struct intel_display *display = to_intel_display(intel_dp);
1634
1635         drm_object_attach_property(&connector->base,
1636                                    display->drm->mode_config.path_property, 0);
1637         drm_object_attach_property(&connector->base,
1638                                    display->drm->mode_config.tile_property, 0);
1639
1640         intel_attach_force_audio_property(connector);
1641         intel_attach_broadcast_rgb_property(connector);
1642
1643         /*
1644          * Reuse the prop from the SST connector because we're
1645          * not allowed to create new props after device registration.
1646          */
1647         connector->max_bpc_property =
1648                 intel_dp->attached_connector->base.max_bpc_property;
1649         if (connector->max_bpc_property)
1650                 drm_connector_attach_max_bpc_property(connector, 6, 12);
1651
1652         return drm_connector_set_path_property(connector, pathprop);
1653 }
1654
1655 static void
1656 intel_dp_mst_read_decompression_port_dsc_caps(struct intel_dp *intel_dp,
1657                                               struct intel_connector *connector)
1658 {
1659         u8 dpcd_caps[DP_RECEIVER_CAP_SIZE];
1660
1661         if (!connector->dp.dsc_decompression_aux)
1662                 return;
1663
1664         if (drm_dp_read_dpcd_caps(connector->dp.dsc_decompression_aux, dpcd_caps) < 0)
1665                 return;
1666
1667         intel_dp_get_dsc_sink_cap(dpcd_caps[DP_DPCD_REV], connector);
1668 }
1669
1670 static bool detect_dsc_hblank_expansion_quirk(const struct intel_connector *connector)
1671 {
1672         struct intel_display *display = to_intel_display(connector);
1673         struct drm_dp_aux *aux = connector->dp.dsc_decompression_aux;
1674         struct drm_dp_desc desc;
1675         u8 dpcd[DP_RECEIVER_CAP_SIZE];
1676
1677         if (!aux)
1678                 return false;
1679
1680         /*
1681          * A logical port's OUI (at least for affected sinks) is all 0, so
1682          * instead of that the parent port's OUI is used for identification.
1683          */
1684         if (drm_dp_mst_port_is_logical(connector->port)) {
1685                 aux = drm_dp_mst_aux_for_parent(connector->port);
1686                 if (!aux)
1687                         aux = &connector->mst_port->aux;
1688         }
1689
1690         if (drm_dp_read_dpcd_caps(aux, dpcd) < 0)
1691                 return false;
1692
1693         if (drm_dp_read_desc(aux, &desc, drm_dp_is_branch(dpcd)) < 0)
1694                 return false;
1695
1696         if (!drm_dp_has_quirk(&desc,
1697                               DP_DPCD_QUIRK_HBLANK_EXPANSION_REQUIRES_DSC))
1698                 return false;
1699
1700         /*
1701          * UHBR (MST sink) devices requiring this quirk don't advertise the
1702          * HBLANK expansion support. Presuming that they perform HBLANK
1703          * expansion internally, or are affected by this issue on modes with a
1704          * short HBLANK for other reasons.
1705          */
1706         if (!drm_dp_128b132b_supported(dpcd) &&
1707             !(dpcd[DP_RECEIVE_PORT_0_CAP_0] & DP_HBLANK_EXPANSION_CAPABLE))
1708                 return false;
1709
1710         drm_dbg_kms(display->drm,
1711                     "[CONNECTOR:%d:%s] DSC HBLANK expansion quirk detected\n",
1712                     connector->base.base.id, connector->base.name);
1713
1714         return true;
1715 }
1716
1717 static struct drm_connector *
1718 mst_topology_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1719                            struct drm_dp_mst_port *port,
1720                            const char *pathprop)
1721 {
1722         struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr);
1723         struct intel_display *display = to_intel_display(intel_dp);
1724         struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1725         struct intel_connector *intel_connector;
1726         struct drm_connector *connector;
1727         enum pipe pipe;
1728         int ret;
1729
1730         intel_connector = intel_connector_alloc();
1731         if (!intel_connector)
1732                 return NULL;
1733
1734         intel_connector->get_hw_state = mst_connector_get_hw_state;
1735         intel_connector->sync_state = intel_dp_connector_sync_state;
1736         intel_connector->mst_port = intel_dp;
1737         intel_connector->port = port;
1738         drm_dp_mst_get_port_malloc(port);
1739
1740         intel_dp_init_modeset_retry_work(intel_connector);
1741
1742         /*
1743          * TODO: The following drm_connector specific initialization belongs
1744          * to DRM core, however it happens atm too late in
1745          * drm_connector_init(). That function will also expose the connector
1746          * to in-kernel users, so it can't be called until the connector is
1747          * sufficiently initialized; init the device pointer used by the
1748          * following DSC setup, until a fix moving this to DRM core.
1749          */
1750         intel_connector->base.dev = mgr->dev;
1751
1752         intel_connector->dp.dsc_decompression_aux = drm_dp_mst_dsc_aux_for_port(port);
1753         intel_dp_mst_read_decompression_port_dsc_caps(intel_dp, intel_connector);
1754         intel_connector->dp.dsc_hblank_expansion_quirk =
1755                 detect_dsc_hblank_expansion_quirk(intel_connector);
1756
1757         connector = &intel_connector->base;
1758         ret = drm_connector_init(display->drm, connector, &mst_connector_funcs,
1759                                  DRM_MODE_CONNECTOR_DisplayPort);
1760         if (ret) {
1761                 drm_dp_mst_put_port_malloc(port);
1762                 intel_connector_free(intel_connector);
1763                 return NULL;
1764         }
1765
1766         drm_connector_helper_add(connector, &mst_connector_helper_funcs);
1767
1768         for_each_pipe(display, pipe) {
1769                 struct drm_encoder *enc =
1770                         &intel_dp->mst_encoders[pipe]->base.base;
1771
1772                 ret = drm_connector_attach_encoder(&intel_connector->base, enc);
1773                 if (ret)
1774                         goto err;
1775         }
1776
1777         ret = mst_topology_add_connector_properties(intel_dp, connector, pathprop);
1778         if (ret)
1779                 goto err;
1780
1781         ret = intel_dp_hdcp_init(dig_port, intel_connector);
1782         if (ret)
1783                 drm_dbg_kms(display->drm, "[%s:%d] HDCP MST init failed, skipping.\n",
1784                             connector->name, connector->base.id);
1785
1786         return connector;
1787
1788 err:
1789         drm_connector_cleanup(connector);
1790         return NULL;
1791 }
1792
1793 static void
1794 mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr)
1795 {
1796         struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr);
1797
1798         intel_hpd_trigger_irq(dp_to_dig_port(intel_dp));
1799 }
1800
1801 static const struct drm_dp_mst_topology_cbs mst_topology_cbs = {
1802         .add_connector = mst_topology_add_connector,
1803         .poll_hpd_irq = mst_topology_poll_hpd_irq,
1804 };
1805
1806 /* Create a fake encoder for an individual MST stream */
1807 static struct intel_dp_mst_encoder *
1808 mst_stream_encoder_create(struct intel_digital_port *dig_port, enum pipe pipe)
1809 {
1810         struct intel_display *display = to_intel_display(dig_port);
1811         struct intel_encoder *primary_encoder = &dig_port->base;
1812         struct intel_dp_mst_encoder *intel_mst;
1813         struct intel_encoder *encoder;
1814
1815         intel_mst = kzalloc(sizeof(*intel_mst), GFP_KERNEL);
1816
1817         if (!intel_mst)
1818                 return NULL;
1819
1820         intel_mst->pipe = pipe;
1821         encoder = &intel_mst->base;
1822         intel_mst->primary = dig_port;
1823
1824         drm_encoder_init(display->drm, &encoder->base, &mst_stream_encoder_funcs,
1825                          DRM_MODE_ENCODER_DPMST, "DP-MST %c", pipe_name(pipe));
1826
1827         encoder->type = INTEL_OUTPUT_DP_MST;
1828         encoder->power_domain = primary_encoder->power_domain;
1829         encoder->port = primary_encoder->port;
1830         encoder->cloneable = 0;
1831         /*
1832          * This is wrong, but broken userspace uses the intersection
1833          * of possible_crtcs of all the encoders of a given connector
1834          * to figure out which crtcs can drive said connector. What
1835          * should be used instead is the union of possible_crtcs.
1836          * To keep such userspace functioning we must misconfigure
1837          * this to make sure the intersection is not empty :(
1838          */
1839         encoder->pipe_mask = ~0;
1840
1841         encoder->compute_config = mst_stream_compute_config;
1842         encoder->compute_config_late = mst_stream_compute_config_late;
1843         encoder->disable = mst_stream_disable;
1844         encoder->post_disable = mst_stream_post_disable;
1845         encoder->post_pll_disable = mst_stream_post_pll_disable;
1846         encoder->update_pipe = intel_ddi_update_pipe;
1847         encoder->pre_pll_enable = mst_stream_pre_pll_enable;
1848         encoder->pre_enable = mst_stream_pre_enable;
1849         encoder->enable = mst_stream_enable;
1850         encoder->audio_enable = intel_audio_codec_enable;
1851         encoder->audio_disable = intel_audio_codec_disable;
1852         encoder->get_hw_state = mst_stream_get_hw_state;
1853         encoder->get_config = mst_stream_get_config;
1854         encoder->initial_fastset_check = mst_stream_initial_fastset_check;
1855
1856         return intel_mst;
1857
1858 }
1859
1860 /* Create the fake encoders for MST streams */
1861 static bool
1862 mst_stream_encoders_create(struct intel_digital_port *dig_port)
1863 {
1864         struct intel_display *display = to_intel_display(dig_port);
1865         struct intel_dp *intel_dp = &dig_port->dp;
1866         enum pipe pipe;
1867
1868         for_each_pipe(display, pipe)
1869                 intel_dp->mst_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe);
1870         return true;
1871 }
1872
1873 int
1874 intel_dp_mst_encoder_active_links(struct intel_digital_port *dig_port)
1875 {
1876         return dig_port->dp.active_mst_links;
1877 }
1878
1879 int
1880 intel_dp_mst_encoder_init(struct intel_digital_port *dig_port, int conn_base_id)
1881 {
1882         struct intel_display *display = to_intel_display(dig_port);
1883         struct intel_dp *intel_dp = &dig_port->dp;
1884         enum port port = dig_port->base.port;
1885         int ret;
1886
1887         if (!HAS_DP_MST(display) || intel_dp_is_edp(intel_dp))
1888                 return 0;
1889
1890         if (DISPLAY_VER(display) < 12 && port == PORT_A)
1891                 return 0;
1892
1893         if (DISPLAY_VER(display) < 11 && port == PORT_E)
1894                 return 0;
1895
1896         intel_dp->mst_mgr.cbs = &mst_topology_cbs;
1897
1898         /* create encoders */
1899         mst_stream_encoders_create(dig_port);
1900         ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst_mgr, display->drm,
1901                                            &intel_dp->aux, 16, 3, conn_base_id);
1902         if (ret) {
1903                 intel_dp->mst_mgr.cbs = NULL;
1904                 return ret;
1905         }
1906
1907         return 0;
1908 }
1909
1910 bool intel_dp_mst_source_support(struct intel_dp *intel_dp)
1911 {
1912         return intel_dp->mst_mgr.cbs;
1913 }
1914
1915 void
1916 intel_dp_mst_encoder_cleanup(struct intel_digital_port *dig_port)
1917 {
1918         struct intel_dp *intel_dp = &dig_port->dp;
1919
1920         if (!intel_dp_mst_source_support(intel_dp))
1921                 return;
1922
1923         drm_dp_mst_topology_mgr_destroy(&intel_dp->mst_mgr);
1924         /* encoders will get killed by normal cleanup */
1925
1926         intel_dp->mst_mgr.cbs = NULL;
1927 }
1928
1929 bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state)
1930 {
1931         return crtc_state->mst_master_transcoder == crtc_state->cpu_transcoder;
1932 }
1933
1934 bool intel_dp_mst_is_slave_trans(const struct intel_crtc_state *crtc_state)
1935 {
1936         return crtc_state->mst_master_transcoder != INVALID_TRANSCODER &&
1937                crtc_state->mst_master_transcoder != crtc_state->cpu_transcoder;
1938 }
1939
1940 /**
1941  * intel_dp_mst_add_topology_state_for_connector - add MST topology state for a connector
1942  * @state: atomic state
1943  * @connector: connector to add the state for
1944  * @crtc: the CRTC @connector is attached to
1945  *
1946  * Add the MST topology state for @connector to @state.
1947  *
1948  * Returns 0 on success, negative error code on failure.
1949  */
1950 static int
1951 intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state *state,
1952                                               struct intel_connector *connector,
1953                                               struct intel_crtc *crtc)
1954 {
1955         struct drm_dp_mst_topology_state *mst_state;
1956
1957         if (!connector->mst_port)
1958                 return 0;
1959
1960         mst_state = drm_atomic_get_mst_topology_state(&state->base,
1961                                                       &connector->mst_port->mst_mgr);
1962         if (IS_ERR(mst_state))
1963                 return PTR_ERR(mst_state);
1964
1965         mst_state->pending_crtc_mask |= drm_crtc_mask(&crtc->base);
1966
1967         return 0;
1968 }
1969
1970 /**
1971  * intel_dp_mst_add_topology_state_for_crtc - add MST topology state for a CRTC
1972  * @state: atomic state
1973  * @crtc: CRTC to add the state for
1974  *
1975  * Add the MST topology state for @crtc to @state.
1976  *
1977  * Returns 0 on success, negative error code on failure.
1978  */
1979 int intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state *state,
1980                                              struct intel_crtc *crtc)
1981 {
1982         struct drm_connector *_connector;
1983         struct drm_connector_state *conn_state;
1984         int i;
1985
1986         for_each_new_connector_in_state(&state->base, _connector, conn_state, i) {
1987                 struct intel_connector *connector = to_intel_connector(_connector);
1988                 int ret;
1989
1990                 if (conn_state->crtc != &crtc->base)
1991                         continue;
1992
1993                 ret = intel_dp_mst_add_topology_state_for_connector(state, connector, crtc);
1994                 if (ret)
1995                         return ret;
1996         }
1997
1998         return 0;
1999 }
2000
2001 static struct intel_connector *
2002 get_connector_in_state_for_crtc(struct intel_atomic_state *state,
2003                                 const struct intel_crtc *crtc)
2004 {
2005         struct drm_connector_state *old_conn_state;
2006         struct drm_connector_state *new_conn_state;
2007         struct drm_connector *_connector;
2008         int i;
2009
2010         for_each_oldnew_connector_in_state(&state->base, _connector,
2011                                            old_conn_state, new_conn_state, i) {
2012                 struct intel_connector *connector =
2013                         to_intel_connector(_connector);
2014
2015                 if (old_conn_state->crtc == &crtc->base ||
2016                     new_conn_state->crtc == &crtc->base)
2017                         return connector;
2018         }
2019
2020         return NULL;
2021 }
2022
2023 /**
2024  * intel_dp_mst_crtc_needs_modeset - check if changes in topology need to modeset the given CRTC
2025  * @state: atomic state
2026  * @crtc: CRTC for which to check the modeset requirement
2027  *
2028  * Check if any change in a MST topology requires a forced modeset on @crtc in
2029  * this topology. One such change is enabling/disabling the DSC decompression
2030  * state in the first branch device's UFP DPCD as required by one CRTC, while
2031  * the other @crtc in the same topology is still active, requiring a full modeset
2032  * on @crtc.
2033  */
2034 bool intel_dp_mst_crtc_needs_modeset(struct intel_atomic_state *state,
2035                                      struct intel_crtc *crtc)
2036 {
2037         const struct intel_connector *crtc_connector;
2038         const struct drm_connector_state *conn_state;
2039         const struct drm_connector *_connector;
2040         int i;
2041
2042         if (!intel_crtc_has_type(intel_atomic_get_new_crtc_state(state, crtc),
2043                                  INTEL_OUTPUT_DP_MST))
2044                 return false;
2045
2046         crtc_connector = get_connector_in_state_for_crtc(state, crtc);
2047
2048         if (!crtc_connector)
2049                 /* None of the connectors in the topology needs modeset */
2050                 return false;
2051
2052         for_each_new_connector_in_state(&state->base, _connector, conn_state, i) {
2053                 const struct intel_connector *connector =
2054                         to_intel_connector(_connector);
2055                 const struct intel_crtc_state *new_crtc_state;
2056                 const struct intel_crtc_state *old_crtc_state;
2057                 struct intel_crtc *crtc_iter;
2058
2059                 if (connector->mst_port != crtc_connector->mst_port ||
2060                     !conn_state->crtc)
2061                         continue;
2062
2063                 crtc_iter = to_intel_crtc(conn_state->crtc);
2064
2065                 new_crtc_state = intel_atomic_get_new_crtc_state(state, crtc_iter);
2066                 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc_iter);
2067
2068                 if (!intel_crtc_needs_modeset(new_crtc_state))
2069                         continue;
2070
2071                 if (old_crtc_state->dsc.compression_enable ==
2072                     new_crtc_state->dsc.compression_enable)
2073                         continue;
2074                 /*
2075                  * Toggling the decompression flag because of this stream in
2076                  * the first downstream branch device's UFP DPCD may reset the
2077                  * whole branch device. To avoid the reset while other streams
2078                  * are also active modeset the whole MST topology in this
2079                  * case.
2080                  */
2081                 if (connector->dp.dsc_decompression_aux ==
2082                     &connector->mst_port->aux)
2083                         return true;
2084         }
2085
2086         return false;
2087 }
2088
2089 /**
2090  * intel_dp_mst_prepare_probe - Prepare an MST link for topology probing
2091  * @intel_dp: DP port object
2092  *
2093  * Prepare an MST link for topology probing, programming the target
2094  * link parameters to DPCD. This step is a requirement of the enumaration
2095  * of path resources during probing.
2096  */
2097 void intel_dp_mst_prepare_probe(struct intel_dp *intel_dp)
2098 {
2099         int link_rate = intel_dp_max_link_rate(intel_dp);
2100         int lane_count = intel_dp_max_lane_count(intel_dp);
2101         u8 rate_select;
2102         u8 link_bw;
2103
2104         if (intel_dp->link_trained)
2105                 return;
2106
2107         if (intel_mst_probed_link_params_valid(intel_dp, link_rate, lane_count))
2108                 return;
2109
2110         intel_dp_compute_rate(intel_dp, link_rate, &link_bw, &rate_select);
2111
2112         intel_dp_link_training_set_mode(intel_dp, link_rate, false);
2113         intel_dp_link_training_set_bw(intel_dp, link_bw, rate_select, lane_count,
2114                                       drm_dp_enhanced_frame_cap(intel_dp->dpcd));
2115
2116         intel_mst_set_probed_link_params(intel_dp, link_rate, lane_count);
2117 }
2118
2119 /*
2120  * intel_dp_mst_verify_dpcd_state - verify the MST SW enabled state wrt. the DPCD
2121  * @intel_dp: DP port object
2122  *
2123  * Verify if @intel_dp's MST enabled SW state matches the corresponding DPCD
2124  * state. A long HPD pulse - not long enough to be detected as a disconnected
2125  * state - could've reset the DPCD state, which requires tearing
2126  * down/recreating the MST topology.
2127  *
2128  * Returns %true if the SW MST enabled and DPCD states match, %false
2129  * otherwise.
2130  */
2131 bool intel_dp_mst_verify_dpcd_state(struct intel_dp *intel_dp)
2132 {
2133         struct intel_display *display = to_intel_display(intel_dp);
2134         struct intel_connector *connector = intel_dp->attached_connector;
2135         struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2136         struct intel_encoder *encoder = &dig_port->base;
2137         int ret;
2138         u8 val;
2139
2140         if (!intel_dp->is_mst)
2141                 return true;
2142
2143         ret = drm_dp_dpcd_readb(intel_dp->mst_mgr.aux, DP_MSTM_CTRL, &val);
2144
2145         /* Adjust the expected register value for SST + SideBand. */
2146         if (ret < 0 || val != (DP_MST_EN | DP_UP_REQ_EN | DP_UPSTREAM_IS_SRC)) {
2147                 drm_dbg_kms(display->drm,
2148                             "[CONNECTOR:%d:%s][ENCODER:%d:%s] MST mode got reset, removing topology (ret=%d, ctrl=0x%02x)\n",
2149                             connector->base.base.id, connector->base.name,
2150                             encoder->base.base.id, encoder->base.name,
2151                             ret, val);
2152
2153                 return false;
2154         }
2155
2156         return true;
2157 }
This page took 0.159404 seconds and 4 git commands to generate.