2 * Copyright © 2008 Intel Corporation
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
26 #include <drm/drm_atomic.h>
27 #include <drm/drm_atomic_helper.h>
28 #include <drm/drm_edid.h>
29 #include <drm/drm_fixed.h>
30 #include <drm/drm_probe_helper.h>
34 #include "intel_atomic.h"
35 #include "intel_audio.h"
36 #include "intel_connector.h"
37 #include "intel_crtc.h"
38 #include "intel_ddi.h"
40 #include "intel_display_driver.h"
41 #include "intel_display_types.h"
43 #include "intel_dp_hdcp.h"
44 #include "intel_dp_link_training.h"
45 #include "intel_dp_mst.h"
46 #include "intel_dp_test.h"
47 #include "intel_dp_tunnel.h"
48 #include "intel_dpio_phy.h"
49 #include "intel_hdcp.h"
50 #include "intel_hotplug.h"
51 #include "intel_link_bw.h"
52 #include "intel_psr.h"
53 #include "intel_vdsc.h"
54 #include "skl_scaler.h"
57 * DP MST (DisplayPort Multi-Stream Transport)
59 * MST support on the source depends on the platform and port. DP initialization
60 * sets up MST for each MST capable encoder. This will become the primary
61 * encoder for the port.
63 * MST initialization of each primary encoder creates MST stream encoders, one
64 * per pipe, and initializes the MST topology manager. The MST stream encoders
65 * are sometimes called "fake encoders", because they're virtual, not
66 * physical. Thus there are (number of MST capable ports) x (number of pipes)
67 * MST stream encoders in total.
69 * Decision to use MST for a sink happens at detect on the connector attached to
70 * the primary encoder, and this will not change while the sink is connected. We
71 * always use MST when possible, including for SST sinks with sideband messaging
74 * The connectors for the MST streams are added and removed dynamically by the
75 * topology manager. Their connection status is also determined by the topology
78 * On hardware, each transcoder may be associated with a single DDI
79 * port. Multiple transcoders may be associated with the same DDI port only if
80 * the port is in MST mode.
82 * On TGL+, all the transcoders streaming on the same DDI port will indicate a
83 * primary transcoder; the TGL_DP_TP_CTL and TGL_DP_TP_STATUS registers are
84 * relevant only on the primary transcoder. Prior to that, they are port
88 /* From fake MST stream encoder to primary encoder */
89 static struct intel_encoder *to_primary_encoder(struct intel_encoder *encoder)
91 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
92 struct intel_digital_port *dig_port = intel_mst->primary;
94 return &dig_port->base;
97 /* From fake MST stream encoder to primary DP */
98 static struct intel_dp *to_primary_dp(struct intel_encoder *encoder)
100 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
101 struct intel_digital_port *dig_port = intel_mst->primary;
103 return &dig_port->dp;
106 static int intel_dp_mst_max_dpt_bpp(const struct intel_crtc_state *crtc_state,
109 struct intel_display *display = to_intel_display(crtc_state);
110 const struct drm_display_mode *adjusted_mode =
111 &crtc_state->hw.adjusted_mode;
113 if (!intel_dp_is_uhbr(crtc_state) || DISPLAY_VER(display) >= 20 || !dsc)
117 * DSC->DPT interface width:
118 * ICL-MTL: 72 bits (each branch has 72 bits, only left branch is used)
119 * LNL+: 144 bits (not a bottleneck in any config)
121 * Bspec/49259 suggests that the FEC overhead needs to be
122 * applied here, though HW people claim that neither this FEC
123 * or any other overhead is applicable here (that is the actual
124 * available_bw is just symbol_clock * 72). However based on
125 * testing on MTL-P the
126 * - DELL U3224KBA display
127 * - Unigraf UCD-500 CTS test sink
129 * - 5120x2880/995.59Mhz
130 * - 6016x3384/1357.23Mhz
131 * - 6144x3456/1413.39Mhz
132 * modes (all the ones having a DPT limit on the above devices),
133 * both the channel coding efficiency and an additional 3%
134 * overhead needs to be accounted for.
136 return div64_u64(mul_u32_u32(intel_dp_link_symbol_clock(crtc_state->port_clock) * 72,
137 drm_dp_bw_channel_coding_efficiency(true)),
138 mul_u32_u32(adjusted_mode->crtc_clock, 1030000));
141 static int intel_dp_mst_bw_overhead(const struct intel_crtc_state *crtc_state,
142 const struct intel_connector *connector,
143 bool ssc, int dsc_slice_count, int bpp_x16)
145 const struct drm_display_mode *adjusted_mode =
146 &crtc_state->hw.adjusted_mode;
147 unsigned long flags = DRM_DP_BW_OVERHEAD_MST;
150 flags |= intel_dp_is_uhbr(crtc_state) ? DRM_DP_BW_OVERHEAD_UHBR : 0;
151 flags |= ssc ? DRM_DP_BW_OVERHEAD_SSC_REF_CLK : 0;
152 flags |= crtc_state->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0;
155 flags |= DRM_DP_BW_OVERHEAD_DSC;
157 overhead = drm_dp_bw_overhead(crtc_state->lane_count,
158 adjusted_mode->hdisplay,
164 * TODO: clarify whether a minimum required by the fixed FEC overhead
165 * in the bspec audio programming sequence is required here.
167 return max(overhead, intel_dp_bw_fec_overhead(crtc_state->fec_enable));
170 static void intel_dp_mst_compute_m_n(const struct intel_crtc_state *crtc_state,
171 const struct intel_connector *connector,
174 struct intel_link_m_n *m_n)
176 const struct drm_display_mode *adjusted_mode =
177 &crtc_state->hw.adjusted_mode;
179 /* TODO: Check WA 14013163432 to set data M/N for full BW utilization. */
180 intel_link_compute_m_n(bpp_x16, crtc_state->lane_count,
181 adjusted_mode->crtc_clock,
182 crtc_state->port_clock,
186 m_n->tu = DIV_ROUND_UP_ULL(mul_u32_u32(m_n->data_m, 64), m_n->data_n);
189 static int intel_dp_mst_calc_pbn(int pixel_clock, int bpp_x16, int bw_overhead)
191 int effective_data_rate =
192 intel_dp_effective_data_rate(pixel_clock, bpp_x16, bw_overhead);
195 * TODO: Use drm_dp_calc_pbn_mode() instead, once it's converted
196 * to calculate PBN with the BW overhead passed to it.
198 return DIV_ROUND_UP(effective_data_rate * 64, 54 * 1000);
201 static int intel_dp_mst_dsc_get_slice_count(const struct intel_connector *connector,
202 const struct intel_crtc_state *crtc_state)
204 const struct drm_display_mode *adjusted_mode =
205 &crtc_state->hw.adjusted_mode;
206 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state);
208 return intel_dp_dsc_get_slice_count(connector,
209 adjusted_mode->clock,
210 adjusted_mode->hdisplay,
214 static int mst_stream_find_vcpi_slots_for_bpp(struct intel_dp *intel_dp,
215 struct intel_crtc_state *crtc_state,
216 int max_bpp, int min_bpp,
217 struct link_config_limits *limits,
218 struct drm_connector_state *conn_state,
221 struct intel_display *display = to_intel_display(intel_dp);
222 struct drm_atomic_state *state = crtc_state->uapi.state;
223 struct drm_dp_mst_topology_state *mst_state;
224 struct intel_connector *connector =
225 to_intel_connector(conn_state->connector);
226 const struct drm_display_mode *adjusted_mode =
227 &crtc_state->hw.adjusted_mode;
228 int bpp, slots = -EINVAL;
229 int dsc_slice_count = 0;
233 mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst_mgr);
234 if (IS_ERR(mst_state))
235 return PTR_ERR(mst_state);
237 crtc_state->lane_count = limits->max_lane_count;
238 crtc_state->port_clock = limits->max_rate;
241 if (!intel_dp_supports_fec(intel_dp, connector, crtc_state))
244 crtc_state->fec_enable = !intel_dp_is_uhbr(crtc_state);
247 mst_state->pbn_div = drm_dp_get_vc_payload_bw(&intel_dp->mst_mgr,
248 crtc_state->port_clock,
249 crtc_state->lane_count);
251 max_dpt_bpp = intel_dp_mst_max_dpt_bpp(crtc_state, dsc);
252 if (max_bpp > max_dpt_bpp) {
253 drm_dbg_kms(display->drm, "Limiting bpp to max DPT bpp (%d -> %d)\n",
254 max_bpp, max_dpt_bpp);
255 max_bpp = max_dpt_bpp;
258 drm_dbg_kms(display->drm, "Looking for slots in range min bpp %d max bpp %d\n",
262 dsc_slice_count = intel_dp_mst_dsc_get_slice_count(connector, crtc_state);
263 if (!dsc_slice_count) {
264 drm_dbg_kms(display->drm, "Can't get valid DSC slice count\n");
270 for (bpp = max_bpp; bpp >= min_bpp; bpp -= step) {
271 int local_bw_overhead;
272 int remote_bw_overhead;
277 drm_dbg_kms(display->drm, "Trying bpp %d\n", bpp);
279 link_bpp_x16 = fxp_q4_from_int(dsc ? bpp :
280 intel_dp_output_bpp(crtc_state->output_format, bpp));
282 local_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, connector,
283 false, dsc_slice_count, link_bpp_x16);
284 remote_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, connector,
285 true, dsc_slice_count, link_bpp_x16);
287 intel_dp_mst_compute_m_n(crtc_state, connector,
290 &crtc_state->dp_m_n);
293 * The TU size programmed to the HW determines which slots in
294 * an MTP frame are used for this stream, which needs to match
295 * the payload size programmed to the first downstream branch
296 * device's payload table.
298 * Note that atm the payload's PBN value DRM core sends via
299 * the ALLOCATE_PAYLOAD side-band message matches the payload
300 * size (which it calculates from the PBN value) it programs
301 * to the first branch device's payload table. The allocation
302 * in the payload table could be reduced though (to
303 * crtc_state->dp_m_n.tu), provided that the driver doesn't
304 * enable SSC on the corresponding link.
306 pbn.full = dfixed_const(intel_dp_mst_calc_pbn(adjusted_mode->crtc_clock,
308 remote_bw_overhead));
309 remote_tu = DIV_ROUND_UP(pbn.full, mst_state->pbn_div.full);
312 * Aligning the TUs ensures that symbols consisting of multiple
313 * (4) symbol cycles don't get split between two consecutive
314 * MTPs, as required by Bspec.
315 * TODO: remove the alignment restriction for 128b/132b links
316 * on some platforms, where Bspec allows this.
318 remote_tu = ALIGN(remote_tu, 4 / crtc_state->lane_count);
321 * Also align PBNs accordingly, since MST core will derive its
322 * own copy of TU from the PBN in drm_dp_atomic_find_time_slots().
323 * The above comment about the difference between the PBN
324 * allocated for the whole path and the TUs allocated for the
325 * first branch device's link also applies here.
327 pbn.full = remote_tu * mst_state->pbn_div.full;
328 crtc_state->pbn = dfixed_trunc(pbn);
330 drm_WARN_ON(display->drm, remote_tu < crtc_state->dp_m_n.tu);
331 crtc_state->dp_m_n.tu = remote_tu;
333 slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst_mgr,
336 if (slots == -EDEADLK)
340 drm_WARN_ON(display->drm, slots != crtc_state->dp_m_n.tu);
346 /* We failed to find a proper bpp/timeslots, return error */
351 drm_dbg_kms(display->drm, "failed finding vcpi slots:%d\n",
355 crtc_state->pipe_bpp = bpp;
357 crtc_state->dsc.compressed_bpp_x16 = fxp_q4_from_int(bpp);
358 drm_dbg_kms(display->drm, "Got %d slots for pipe bpp %d dsc %d\n",
365 static int mst_stream_compute_link_config(struct intel_dp *intel_dp,
366 struct intel_crtc_state *crtc_state,
367 struct drm_connector_state *conn_state,
368 struct link_config_limits *limits)
373 * FIXME: allocate the BW according to link_bpp, which in the case of
374 * YUV420 is only half of the pipe bpp value.
376 slots = mst_stream_find_vcpi_slots_for_bpp(intel_dp, crtc_state,
377 fxp_q4_to_int(limits->link.max_bpp_x16),
378 fxp_q4_to_int(limits->link.min_bpp_x16),
380 conn_state, 2 * 3, false);
388 static int mst_stream_dsc_compute_link_config(struct intel_dp *intel_dp,
389 struct intel_crtc_state *crtc_state,
390 struct drm_connector_state *conn_state,
391 struct link_config_limits *limits)
393 struct intel_display *display = to_intel_display(intel_dp);
394 struct intel_connector *connector = to_intel_connector(conn_state->connector);
395 struct drm_i915_private *i915 = to_i915(connector->base.dev);
399 int min_bpp, max_bpp, sink_min_bpp, sink_max_bpp;
401 int min_compressed_bpp, max_compressed_bpp;
403 /* Max DSC Input BPC for ICL is 10 and for TGL+ is 12 */
404 if (DISPLAY_VER(display) >= 12)
405 dsc_max_bpc = min_t(u8, 12, conn_state->max_requested_bpc);
407 dsc_max_bpc = min_t(u8, 10, conn_state->max_requested_bpc);
409 max_bpp = min_t(u8, dsc_max_bpc * 3, limits->pipe.max_bpp);
410 min_bpp = limits->pipe.min_bpp;
412 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd,
415 drm_dbg_kms(display->drm, "DSC Source supported min bpp %d max bpp %d\n",
418 sink_max_bpp = dsc_bpc[0] * 3;
419 sink_min_bpp = sink_max_bpp;
421 for (i = 1; i < num_bpc; i++) {
422 if (sink_min_bpp > dsc_bpc[i] * 3)
423 sink_min_bpp = dsc_bpc[i] * 3;
424 if (sink_max_bpp < dsc_bpc[i] * 3)
425 sink_max_bpp = dsc_bpc[i] * 3;
428 drm_dbg_kms(display->drm, "DSC Sink supported min bpp %d max bpp %d\n",
429 sink_min_bpp, sink_max_bpp);
431 if (min_bpp < sink_min_bpp)
432 min_bpp = sink_min_bpp;
434 if (max_bpp > sink_max_bpp)
435 max_bpp = sink_max_bpp;
437 crtc_state->pipe_bpp = max_bpp;
439 max_compressed_bpp = intel_dp_dsc_sink_max_compressed_bpp(connector,
442 max_compressed_bpp = min(max_compressed_bpp,
443 fxp_q4_to_int(limits->link.max_bpp_x16));
445 min_compressed_bpp = intel_dp_dsc_sink_min_compressed_bpp(crtc_state);
446 min_compressed_bpp = max(min_compressed_bpp,
447 fxp_q4_to_int_roundup(limits->link.min_bpp_x16));
449 drm_dbg_kms(display->drm, "DSC Sink supported compressed min bpp %d compressed max bpp %d\n",
450 min_compressed_bpp, max_compressed_bpp);
452 /* Align compressed bpps according to our own constraints */
453 max_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(i915, max_compressed_bpp,
454 crtc_state->pipe_bpp);
455 min_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(i915, min_compressed_bpp,
456 crtc_state->pipe_bpp);
458 slots = mst_stream_find_vcpi_slots_for_bpp(intel_dp, crtc_state, max_compressed_bpp,
459 min_compressed_bpp, limits,
460 conn_state, 1, true);
468 static int mst_stream_update_slots(struct intel_dp *intel_dp,
469 struct intel_crtc_state *crtc_state,
470 struct drm_connector_state *conn_state)
472 struct intel_display *display = to_intel_display(intel_dp);
473 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr;
474 struct drm_dp_mst_topology_state *topology_state;
475 u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ?
476 DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B;
478 topology_state = drm_atomic_get_mst_topology_state(conn_state->state, mgr);
479 if (IS_ERR(topology_state)) {
480 drm_dbg_kms(display->drm, "slot update failed\n");
481 return PTR_ERR(topology_state);
484 drm_dp_mst_update_slots(topology_state, link_coding_cap);
489 static int mode_hblank_period_ns(const struct drm_display_mode *mode)
491 return DIV_ROUND_CLOSEST_ULL(mul_u32_u32(mode->htotal - mode->hdisplay,
492 NSEC_PER_SEC / 1000),
497 hblank_expansion_quirk_needs_dsc(const struct intel_connector *connector,
498 const struct intel_crtc_state *crtc_state,
499 const struct link_config_limits *limits)
501 const struct drm_display_mode *adjusted_mode =
502 &crtc_state->hw.adjusted_mode;
503 bool is_uhbr_sink = connector->mst_port &&
504 drm_dp_128b132b_supported(connector->mst_port->dpcd);
505 int hblank_limit = is_uhbr_sink ? 500 : 300;
507 if (!connector->dp.dsc_hblank_expansion_quirk)
510 if (is_uhbr_sink && !drm_dp_is_uhbr_rate(limits->max_rate))
513 if (mode_hblank_period_ns(adjusted_mode) > hblank_limit)
516 if (!intel_dp_mst_dsc_get_slice_count(connector, crtc_state))
523 adjust_limits_for_dsc_hblank_expansion_quirk(const struct intel_connector *connector,
524 const struct intel_crtc_state *crtc_state,
525 struct link_config_limits *limits,
528 struct intel_display *display = to_intel_display(connector);
529 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
530 int min_bpp_x16 = limits->link.min_bpp_x16;
532 if (!hblank_expansion_quirk_needs_dsc(connector, crtc_state, limits))
536 if (intel_dp_supports_dsc(connector, crtc_state)) {
537 drm_dbg_kms(display->drm,
538 "[CRTC:%d:%s][CONNECTOR:%d:%s] DSC needed by hblank expansion quirk\n",
539 crtc->base.base.id, crtc->base.name,
540 connector->base.base.id, connector->base.name);
544 drm_dbg_kms(display->drm,
545 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to 24 due to hblank expansion quirk\n",
546 crtc->base.base.id, crtc->base.name,
547 connector->base.base.id, connector->base.name);
549 if (limits->link.max_bpp_x16 < fxp_q4_from_int(24))
552 limits->link.min_bpp_x16 = fxp_q4_from_int(24);
557 drm_WARN_ON(display->drm, limits->min_rate != limits->max_rate);
559 if (limits->max_rate < 540000)
560 min_bpp_x16 = fxp_q4_from_int(13);
561 else if (limits->max_rate < 810000)
562 min_bpp_x16 = fxp_q4_from_int(10);
564 if (limits->link.min_bpp_x16 >= min_bpp_x16)
567 drm_dbg_kms(display->drm,
568 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to " FXP_Q4_FMT " in DSC mode due to hblank expansion quirk\n",
569 crtc->base.base.id, crtc->base.name,
570 connector->base.base.id, connector->base.name,
571 FXP_Q4_ARGS(min_bpp_x16));
573 if (limits->link.max_bpp_x16 < min_bpp_x16)
576 limits->link.min_bpp_x16 = min_bpp_x16;
582 mst_stream_compute_config_limits(struct intel_dp *intel_dp,
583 const struct intel_connector *connector,
584 struct intel_crtc_state *crtc_state,
586 struct link_config_limits *limits)
589 * for MST we always configure max link bw - the spec doesn't
590 * seem to suggest we should do otherwise.
592 limits->min_rate = limits->max_rate =
593 intel_dp_max_link_rate(intel_dp);
595 limits->min_lane_count = limits->max_lane_count =
596 intel_dp_max_lane_count(intel_dp);
598 limits->pipe.min_bpp = intel_dp_min_bpp(crtc_state->output_format);
600 * FIXME: If all the streams can't fit into the link with
601 * their current pipe_bpp we should reduce pipe_bpp across
602 * the board until things start to fit. Until then we
603 * limit to <= 8bpc since that's what was hardcoded for all
604 * MST streams previously. This hack should be removed once
605 * we have the proper retry logic in place.
607 limits->pipe.max_bpp = min(crtc_state->pipe_bpp, 24);
609 intel_dp_test_compute_config(intel_dp, crtc_state, limits);
611 if (!intel_dp_compute_config_link_bpp_limits(intel_dp,
617 return adjust_limits_for_dsc_hblank_expansion_quirk(connector,
623 static int mst_stream_compute_config(struct intel_encoder *encoder,
624 struct intel_crtc_state *pipe_config,
625 struct drm_connector_state *conn_state)
627 struct intel_display *display = to_intel_display(encoder);
628 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
629 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state);
630 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
631 struct intel_dp *intel_dp = to_primary_dp(encoder);
632 struct intel_connector *connector =
633 to_intel_connector(conn_state->connector);
634 const struct drm_display_mode *adjusted_mode =
635 &pipe_config->hw.adjusted_mode;
636 struct link_config_limits limits;
637 bool dsc_needed, joiner_needs_dsc;
638 int num_joined_pipes;
641 if (pipe_config->fec_enable &&
642 !intel_dp_supports_fec(intel_dp, connector, pipe_config))
645 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)
648 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector,
649 adjusted_mode->crtc_hdisplay,
650 adjusted_mode->crtc_clock);
651 if (num_joined_pipes > 1)
652 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe);
654 pipe_config->sink_format = INTEL_OUTPUT_FORMAT_RGB;
655 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
656 pipe_config->has_pch_encoder = false;
658 joiner_needs_dsc = intel_dp_joiner_needs_dsc(dev_priv, num_joined_pipes);
660 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en ||
661 !mst_stream_compute_config_limits(intel_dp, connector,
662 pipe_config, false, &limits);
665 ret = mst_stream_compute_link_config(intel_dp, pipe_config,
666 conn_state, &limits);
675 /* enable compression if the mode doesn't fit available BW */
677 drm_dbg_kms(display->drm, "Try DSC (fallback=%s, joiner=%s, force=%s)\n",
678 str_yes_no(ret), str_yes_no(joiner_needs_dsc),
679 str_yes_no(intel_dp->force_dsc_en));
681 if (!intel_dp_supports_dsc(connector, pipe_config))
684 if (!mst_stream_compute_config_limits(intel_dp, connector,
690 * FIXME: As bpc is hardcoded to 8, as mentioned above,
691 * WARN and ignore the debug flag force_dsc_bpc for now.
693 drm_WARN(display->drm, intel_dp->force_dsc_bpc,
694 "Cannot Force BPC for MST\n");
696 * Try to get at least some timeslots and then see, if
697 * we can fit there with DSC.
699 drm_dbg_kms(display->drm, "Trying to find VCPI slots in DSC mode\n");
701 ret = mst_stream_dsc_compute_link_config(intel_dp, pipe_config,
702 conn_state, &limits);
706 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config,
708 pipe_config->dp_m_n.tu, false);
714 ret = mst_stream_update_slots(intel_dp, pipe_config, conn_state);
718 pipe_config->limited_color_range =
719 intel_dp_limited_color_range(pipe_config, conn_state);
721 if (display->platform.geminilake || display->platform.broxton)
722 pipe_config->lane_lat_optim_mask =
723 bxt_dpio_phy_calc_lane_lat_optim_mask(pipe_config->lane_count);
725 intel_dp_audio_compute_config(encoder, pipe_config, conn_state);
727 intel_ddi_compute_min_voltage_level(pipe_config);
729 intel_psr_compute_config(intel_dp, pipe_config, conn_state);
731 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector,
736 * Iterate over all connectors and return a mask of
737 * all CPU transcoders streaming over the same DP link.
740 intel_dp_mst_transcoder_mask(struct intel_atomic_state *state,
741 struct intel_dp *mst_port)
743 struct intel_display *display = to_intel_display(state);
744 const struct intel_digital_connector_state *conn_state;
745 struct intel_connector *connector;
749 if (DISPLAY_VER(display) < 12)
752 for_each_new_intel_connector_in_state(state, connector, conn_state, i) {
753 const struct intel_crtc_state *crtc_state;
754 struct intel_crtc *crtc;
756 if (connector->mst_port != mst_port || !conn_state->base.crtc)
759 crtc = to_intel_crtc(conn_state->base.crtc);
760 crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
762 if (!crtc_state->hw.active)
765 transcoders |= BIT(crtc_state->cpu_transcoder);
771 static u8 get_pipes_downstream_of_mst_port(struct intel_atomic_state *state,
772 struct drm_dp_mst_topology_mgr *mst_mgr,
773 struct drm_dp_mst_port *parent_port)
775 const struct intel_digital_connector_state *conn_state;
776 struct intel_connector *connector;
780 for_each_new_intel_connector_in_state(state, connector, conn_state, i) {
781 if (!conn_state->base.crtc)
784 if (&connector->mst_port->mst_mgr != mst_mgr)
787 if (connector->port != parent_port &&
788 !drm_dp_mst_port_downstream_of_parent(mst_mgr,
793 mask |= BIT(to_intel_crtc(conn_state->base.crtc)->pipe);
799 static int intel_dp_mst_check_fec_change(struct intel_atomic_state *state,
800 struct drm_dp_mst_topology_mgr *mst_mgr,
801 struct intel_link_bw_limits *limits)
803 struct intel_display *display = to_intel_display(state);
804 struct intel_crtc *crtc;
806 u8 fec_pipe_mask = 0;
809 mst_pipe_mask = get_pipes_downstream_of_mst_port(state, mst_mgr, NULL);
811 for_each_intel_crtc_in_pipe_mask(display->drm, crtc, mst_pipe_mask) {
812 struct intel_crtc_state *crtc_state =
813 intel_atomic_get_new_crtc_state(state, crtc);
815 /* Atomic connector check should've added all the MST CRTCs. */
816 if (drm_WARN_ON(display->drm, !crtc_state))
819 if (crtc_state->fec_enable)
820 fec_pipe_mask |= BIT(crtc->pipe);
823 if (!fec_pipe_mask || mst_pipe_mask == fec_pipe_mask)
826 limits->force_fec_pipes |= mst_pipe_mask;
828 ret = intel_modeset_pipes_in_mask_early(state, "MST FEC",
831 return ret ? : -EAGAIN;
834 static int intel_dp_mst_check_bw(struct intel_atomic_state *state,
835 struct drm_dp_mst_topology_mgr *mst_mgr,
836 struct drm_dp_mst_topology_state *mst_state,
837 struct intel_link_bw_limits *limits)
839 struct drm_dp_mst_port *mst_port;
843 ret = drm_dp_mst_atomic_check_mgr(&state->base, mst_mgr, mst_state, &mst_port);
847 mst_port_pipes = get_pipes_downstream_of_mst_port(state, mst_mgr, mst_port);
849 ret = intel_link_bw_reduce_bpp(state, limits,
850 mst_port_pipes, "MST link BW");
852 return ret ? : -EAGAIN;
856 * intel_dp_mst_atomic_check_link - check all modeset MST link configuration
857 * @state: intel atomic state
858 * @limits: link BW limits
860 * Check the link configuration for all modeset MST outputs. If the
861 * configuration is invalid @limits will be updated if possible to
862 * reduce the total BW, after which the configuration for all CRTCs in
863 * @state must be recomputed with the updated @limits.
866 * - 0 if the confugration is valid
867 * - %-EAGAIN, if the configuration is invalid and @limits got updated
868 * with fallback values with which the configuration of all CRTCs in
869 * @state must be recomputed
870 * - Other negative error, if the configuration is invalid without a
871 * fallback possibility, or the check failed for another reason
873 int intel_dp_mst_atomic_check_link(struct intel_atomic_state *state,
874 struct intel_link_bw_limits *limits)
876 struct drm_dp_mst_topology_mgr *mgr;
877 struct drm_dp_mst_topology_state *mst_state;
881 for_each_new_mst_mgr_in_state(&state->base, mgr, mst_state, i) {
882 ret = intel_dp_mst_check_fec_change(state, mgr, limits);
886 ret = intel_dp_mst_check_bw(state, mgr, mst_state,
895 static int mst_stream_compute_config_late(struct intel_encoder *encoder,
896 struct intel_crtc_state *crtc_state,
897 struct drm_connector_state *conn_state)
899 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state);
900 struct intel_dp *intel_dp = to_primary_dp(encoder);
902 /* lowest numbered transcoder will be designated master */
903 crtc_state->mst_master_transcoder =
904 ffs(intel_dp_mst_transcoder_mask(state, intel_dp)) - 1;
910 * If one of the connectors in a MST stream needs a modeset, mark all CRTCs
911 * that shares the same MST stream as mode changed,
912 * intel_modeset_pipe_config()+intel_crtc_check_fastset() will take care to do
913 * a fastset when possible.
915 * On TGL+ this is required since each stream go through a master transcoder,
916 * so if the master transcoder needs modeset, all other streams in the
917 * topology need a modeset. All platforms need to add the atomic state
918 * for all streams in the topology, since a modeset on one may require
919 * changing the MST link BW usage of the others, which in turn needs a
920 * recomputation of the corresponding CRTC states.
923 mst_connector_atomic_topology_check(struct intel_connector *connector,
924 struct intel_atomic_state *state)
926 struct intel_display *display = to_intel_display(connector);
927 struct drm_connector_list_iter connector_list_iter;
928 struct intel_connector *connector_iter;
931 if (!intel_connector_needs_modeset(state, &connector->base))
934 drm_connector_list_iter_begin(display->drm, &connector_list_iter);
935 for_each_intel_connector_iter(connector_iter, &connector_list_iter) {
936 struct intel_digital_connector_state *conn_iter_state;
937 struct intel_crtc_state *crtc_state;
938 struct intel_crtc *crtc;
940 if (connector_iter->mst_port != connector->mst_port ||
941 connector_iter == connector)
944 conn_iter_state = intel_atomic_get_digital_connector_state(state,
946 if (IS_ERR(conn_iter_state)) {
947 ret = PTR_ERR(conn_iter_state);
951 if (!conn_iter_state->base.crtc)
954 crtc = to_intel_crtc(conn_iter_state->base.crtc);
955 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
956 if (IS_ERR(crtc_state)) {
957 ret = PTR_ERR(crtc_state);
961 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
964 crtc_state->uapi.mode_changed = true;
966 drm_connector_list_iter_end(&connector_list_iter);
972 mst_connector_atomic_check(struct drm_connector *connector,
973 struct drm_atomic_state *_state)
975 struct intel_atomic_state *state = to_intel_atomic_state(_state);
976 struct intel_connector *intel_connector =
977 to_intel_connector(connector);
980 ret = intel_digital_connector_atomic_check(connector, &state->base);
984 ret = mst_connector_atomic_topology_check(intel_connector, state);
988 if (intel_connector_needs_modeset(state, connector)) {
989 ret = intel_dp_tunnel_atomic_check_state(state,
990 intel_connector->mst_port,
996 return drm_dp_atomic_release_time_slots(&state->base,
997 &intel_connector->mst_port->mst_mgr,
998 intel_connector->port);
1001 static void mst_stream_disable(struct intel_atomic_state *state,
1002 struct intel_encoder *encoder,
1003 const struct intel_crtc_state *old_crtc_state,
1004 const struct drm_connector_state *old_conn_state)
1006 struct intel_display *display = to_intel_display(state);
1007 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1008 struct intel_dp *intel_dp = to_primary_dp(encoder);
1009 struct intel_connector *connector =
1010 to_intel_connector(old_conn_state->connector);
1012 drm_dbg_kms(display->drm, "active links %d\n",
1013 intel_dp->active_mst_links);
1015 if (intel_dp->active_mst_links == 1)
1016 intel_dp->link_trained = false;
1018 intel_hdcp_disable(intel_mst->connector);
1020 intel_dp_sink_disable_decompression(state, connector, old_crtc_state);
1023 static void mst_stream_post_disable(struct intel_atomic_state *state,
1024 struct intel_encoder *encoder,
1025 const struct intel_crtc_state *old_crtc_state,
1026 const struct drm_connector_state *old_conn_state)
1028 struct intel_display *display = to_intel_display(encoder);
1029 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1030 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1031 struct intel_dp *intel_dp = to_primary_dp(encoder);
1032 struct intel_connector *connector =
1033 to_intel_connector(old_conn_state->connector);
1034 struct drm_dp_mst_topology_state *old_mst_state =
1035 drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1036 struct drm_dp_mst_topology_state *new_mst_state =
1037 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1038 const struct drm_dp_mst_atomic_payload *old_payload =
1039 drm_atomic_get_mst_payload_state(old_mst_state, connector->port);
1040 struct drm_dp_mst_atomic_payload *new_payload =
1041 drm_atomic_get_mst_payload_state(new_mst_state, connector->port);
1042 struct intel_crtc *pipe_crtc;
1043 bool last_mst_stream;
1046 intel_dp->active_mst_links--;
1047 last_mst_stream = intel_dp->active_mst_links == 0;
1048 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && last_mst_stream &&
1049 !intel_dp_mst_is_master_trans(old_crtc_state));
1051 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) {
1052 const struct intel_crtc_state *old_pipe_crtc_state =
1053 intel_atomic_get_old_crtc_state(state, pipe_crtc);
1055 intel_crtc_vblank_off(old_pipe_crtc_state);
1058 intel_disable_transcoder(old_crtc_state);
1060 drm_dp_remove_payload_part1(&intel_dp->mst_mgr, new_mst_state, new_payload);
1062 intel_ddi_clear_act_sent(encoder, old_crtc_state);
1064 intel_de_rmw(display,
1065 TRANS_DDI_FUNC_CTL(display, old_crtc_state->cpu_transcoder),
1066 TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0);
1068 intel_ddi_wait_for_act_sent(encoder, old_crtc_state);
1069 drm_dp_check_act_status(&intel_dp->mst_mgr);
1071 drm_dp_remove_payload_part2(&intel_dp->mst_mgr, new_mst_state,
1072 old_payload, new_payload);
1074 intel_ddi_disable_transcoder_func(old_crtc_state);
1076 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) {
1077 const struct intel_crtc_state *old_pipe_crtc_state =
1078 intel_atomic_get_old_crtc_state(state, pipe_crtc);
1080 intel_dsc_disable(old_pipe_crtc_state);
1082 if (DISPLAY_VER(display) >= 9)
1083 skl_scaler_disable(old_pipe_crtc_state);
1085 ilk_pfit_disable(old_pipe_crtc_state);
1089 * Power down mst path before disabling the port, otherwise we end
1090 * up getting interrupts from the sink upon detecting link loss.
1092 drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port,
1096 * BSpec 4287: disable DIP after the transcoder is disabled and before
1097 * the transcoder clock select is set to none.
1099 intel_dp_set_infoframes(primary_encoder, false, old_crtc_state, NULL);
1101 * From TGL spec: "If multi-stream slave transcoder: Configure
1102 * Transcoder Clock Select to direct no clock to the transcoder"
1104 * From older GENs spec: "Configure Transcoder Clock Select to direct
1105 * no clock to the transcoder"
1107 if (DISPLAY_VER(display) < 12 || !last_mst_stream)
1108 intel_ddi_disable_transcoder_clock(old_crtc_state);
1111 intel_mst->connector = NULL;
1112 if (last_mst_stream)
1113 primary_encoder->post_disable(state, primary_encoder,
1114 old_crtc_state, NULL);
1116 drm_dbg_kms(display->drm, "active links %d\n",
1117 intel_dp->active_mst_links);
1120 static void mst_stream_post_pll_disable(struct intel_atomic_state *state,
1121 struct intel_encoder *encoder,
1122 const struct intel_crtc_state *old_crtc_state,
1123 const struct drm_connector_state *old_conn_state)
1125 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1126 struct intel_dp *intel_dp = to_primary_dp(encoder);
1128 if (intel_dp->active_mst_links == 0 &&
1129 primary_encoder->post_pll_disable)
1130 primary_encoder->post_pll_disable(state, primary_encoder, old_crtc_state, old_conn_state);
1133 static void mst_stream_pre_pll_enable(struct intel_atomic_state *state,
1134 struct intel_encoder *encoder,
1135 const struct intel_crtc_state *pipe_config,
1136 const struct drm_connector_state *conn_state)
1138 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1139 struct intel_dp *intel_dp = to_primary_dp(encoder);
1141 if (intel_dp->active_mst_links == 0)
1142 primary_encoder->pre_pll_enable(state, primary_encoder,
1146 * The port PLL state needs to get updated for secondary
1147 * streams as for the primary stream.
1149 intel_ddi_update_active_dpll(state, primary_encoder,
1150 to_intel_crtc(pipe_config->uapi.crtc));
1153 static bool intel_mst_probed_link_params_valid(struct intel_dp *intel_dp,
1154 int link_rate, int lane_count)
1156 return intel_dp->link.mst_probed_rate == link_rate &&
1157 intel_dp->link.mst_probed_lane_count == lane_count;
1160 static void intel_mst_set_probed_link_params(struct intel_dp *intel_dp,
1161 int link_rate, int lane_count)
1163 intel_dp->link.mst_probed_rate = link_rate;
1164 intel_dp->link.mst_probed_lane_count = lane_count;
1167 static void intel_mst_reprobe_topology(struct intel_dp *intel_dp,
1168 const struct intel_crtc_state *crtc_state)
1170 if (intel_mst_probed_link_params_valid(intel_dp,
1171 crtc_state->port_clock, crtc_state->lane_count))
1174 drm_dp_mst_topology_queue_probe(&intel_dp->mst_mgr);
1176 intel_mst_set_probed_link_params(intel_dp,
1177 crtc_state->port_clock, crtc_state->lane_count);
1180 static void mst_stream_pre_enable(struct intel_atomic_state *state,
1181 struct intel_encoder *encoder,
1182 const struct intel_crtc_state *pipe_config,
1183 const struct drm_connector_state *conn_state)
1185 struct intel_display *display = to_intel_display(state);
1186 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1187 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1188 struct intel_dp *intel_dp = to_primary_dp(encoder);
1189 struct intel_connector *connector =
1190 to_intel_connector(conn_state->connector);
1191 struct drm_dp_mst_topology_state *mst_state =
1192 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1194 bool first_mst_stream;
1196 /* MST encoders are bound to a crtc, not to a connector,
1197 * force the mapping here for get_hw_state.
1199 connector->encoder = encoder;
1200 intel_mst->connector = connector;
1201 first_mst_stream = intel_dp->active_mst_links == 0;
1202 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && first_mst_stream &&
1203 !intel_dp_mst_is_master_trans(pipe_config));
1205 drm_dbg_kms(display->drm, "active links %d\n",
1206 intel_dp->active_mst_links);
1208 if (first_mst_stream)
1209 intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
1211 drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port, true);
1213 intel_dp_sink_enable_decompression(state, connector, pipe_config);
1215 if (first_mst_stream) {
1216 primary_encoder->pre_enable(state, primary_encoder,
1219 intel_mst_reprobe_topology(intel_dp, pipe_config);
1222 intel_dp->active_mst_links++;
1224 ret = drm_dp_add_payload_part1(&intel_dp->mst_mgr, mst_state,
1225 drm_atomic_get_mst_payload_state(mst_state, connector->port));
1227 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config);
1230 * Before Gen 12 this is not done as part of
1231 * primary_encoder->pre_enable() and should be done here. For
1232 * Gen 12+ the step in which this should be done is different for the
1233 * first MST stream, so it's done on the DDI for the first stream and
1234 * here for the following ones.
1236 if (DISPLAY_VER(display) < 12 || !first_mst_stream)
1237 intel_ddi_enable_transcoder_clock(encoder, pipe_config);
1239 if (DISPLAY_VER(display) >= 13 && !first_mst_stream)
1240 intel_ddi_config_transcoder_func(encoder, pipe_config);
1242 intel_dsc_dp_pps_write(primary_encoder, pipe_config);
1243 intel_ddi_set_dp_msa(pipe_config, conn_state);
1246 static void enable_bs_jitter_was(const struct intel_crtc_state *crtc_state)
1248 struct intel_display *display = to_intel_display(crtc_state);
1249 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
1253 if (!IS_ALDERLAKE_P(i915))
1256 if (!IS_DISPLAY_STEP(display, STEP_D0, STEP_FOREVER))
1259 /* Wa_14013163432:adlp */
1260 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state))
1261 set |= DP_MST_FEC_BS_JITTER_WA(crtc_state->cpu_transcoder);
1263 /* Wa_14014143976:adlp */
1264 if (IS_DISPLAY_STEP(display, STEP_E0, STEP_FOREVER)) {
1265 if (intel_dp_is_uhbr(crtc_state))
1266 set |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder);
1267 else if (crtc_state->fec_enable)
1268 clear |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder);
1270 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state))
1271 set |= DP_MST_DPT_DPTP_ALIGN_WA(crtc_state->cpu_transcoder);
1277 intel_de_rmw(display, CHICKEN_MISC_3, clear, set);
1280 static void mst_stream_enable(struct intel_atomic_state *state,
1281 struct intel_encoder *encoder,
1282 const struct intel_crtc_state *pipe_config,
1283 const struct drm_connector_state *conn_state)
1285 struct intel_display *display = to_intel_display(encoder);
1286 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1287 struct intel_dp *intel_dp = to_primary_dp(encoder);
1288 struct intel_connector *connector = to_intel_connector(conn_state->connector);
1289 struct drm_dp_mst_topology_state *mst_state =
1290 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr);
1291 enum transcoder trans = pipe_config->cpu_transcoder;
1292 bool first_mst_stream = intel_dp->active_mst_links == 1;
1293 struct intel_crtc *pipe_crtc;
1296 drm_WARN_ON(display->drm, pipe_config->has_pch_encoder);
1298 if (intel_dp_is_uhbr(pipe_config)) {
1299 const struct drm_display_mode *adjusted_mode =
1300 &pipe_config->hw.adjusted_mode;
1301 u64 crtc_clock_hz = KHz(adjusted_mode->crtc_clock);
1303 intel_de_write(display, TRANS_DP2_VFREQHIGH(pipe_config->cpu_transcoder),
1304 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz >> 24));
1305 intel_de_write(display, TRANS_DP2_VFREQLOW(pipe_config->cpu_transcoder),
1306 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz & 0xffffff));
1309 enable_bs_jitter_was(pipe_config);
1311 intel_ddi_enable_transcoder_func(encoder, pipe_config);
1313 intel_ddi_clear_act_sent(encoder, pipe_config);
1315 intel_de_rmw(display, TRANS_DDI_FUNC_CTL(display, trans), 0,
1316 TRANS_DDI_DP_VC_PAYLOAD_ALLOC);
1318 drm_dbg_kms(display->drm, "active links %d\n",
1319 intel_dp->active_mst_links);
1321 intel_ddi_wait_for_act_sent(encoder, pipe_config);
1322 drm_dp_check_act_status(&intel_dp->mst_mgr);
1324 if (first_mst_stream)
1325 intel_ddi_wait_for_fec_status(encoder, pipe_config, true);
1327 ret = drm_dp_add_payload_part2(&intel_dp->mst_mgr,
1328 drm_atomic_get_mst_payload_state(mst_state,
1331 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config);
1333 if (DISPLAY_VER(display) >= 12)
1334 intel_de_rmw(display, CHICKEN_TRANS(display, trans),
1335 FECSTALL_DIS_DPTSTREAM_DPTTG,
1336 pipe_config->fec_enable ? FECSTALL_DIS_DPTSTREAM_DPTTG : 0);
1338 intel_audio_sdp_split_update(pipe_config);
1340 intel_enable_transcoder(pipe_config);
1342 for_each_pipe_crtc_modeset_enable(display, pipe_crtc, pipe_config, i) {
1343 const struct intel_crtc_state *pipe_crtc_state =
1344 intel_atomic_get_new_crtc_state(state, pipe_crtc);
1346 intel_crtc_vblank_on(pipe_crtc_state);
1349 intel_hdcp_enable(state, encoder, pipe_config, conn_state);
1352 static bool mst_stream_get_hw_state(struct intel_encoder *encoder,
1355 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder);
1356 *pipe = intel_mst->pipe;
1357 if (intel_mst->connector)
1362 static void mst_stream_get_config(struct intel_encoder *encoder,
1363 struct intel_crtc_state *pipe_config)
1365 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1367 primary_encoder->get_config(primary_encoder, pipe_config);
1370 static bool mst_stream_initial_fastset_check(struct intel_encoder *encoder,
1371 struct intel_crtc_state *crtc_state)
1373 struct intel_encoder *primary_encoder = to_primary_encoder(encoder);
1375 return intel_dp_initial_fastset_check(primary_encoder, crtc_state);
1378 static int mst_connector_get_ddc_modes(struct drm_connector *connector)
1380 struct intel_display *display = to_intel_display(connector->dev);
1381 struct intel_connector *intel_connector = to_intel_connector(connector);
1382 struct intel_dp *intel_dp = intel_connector->mst_port;
1383 const struct drm_edid *drm_edid;
1386 if (drm_connector_is_unregistered(connector))
1387 return intel_connector_update_modes(connector, NULL);
1389 if (!intel_display_driver_check_access(display))
1390 return drm_edid_connector_add_modes(connector);
1392 drm_edid = drm_dp_mst_edid_read(connector, &intel_dp->mst_mgr, intel_connector->port);
1394 ret = intel_connector_update_modes(connector, drm_edid);
1396 drm_edid_free(drm_edid);
1402 mst_connector_late_register(struct drm_connector *connector)
1404 struct intel_connector *intel_connector = to_intel_connector(connector);
1407 ret = drm_dp_mst_connector_late_register(connector,
1408 intel_connector->port);
1412 ret = intel_connector_register(connector);
1414 drm_dp_mst_connector_early_unregister(connector,
1415 intel_connector->port);
1421 mst_connector_early_unregister(struct drm_connector *connector)
1423 struct intel_connector *intel_connector = to_intel_connector(connector);
1425 intel_connector_unregister(connector);
1426 drm_dp_mst_connector_early_unregister(connector,
1427 intel_connector->port);
1430 static const struct drm_connector_funcs mst_connector_funcs = {
1431 .fill_modes = drm_helper_probe_single_connector_modes,
1432 .atomic_get_property = intel_digital_connector_atomic_get_property,
1433 .atomic_set_property = intel_digital_connector_atomic_set_property,
1434 .late_register = mst_connector_late_register,
1435 .early_unregister = mst_connector_early_unregister,
1436 .destroy = intel_connector_destroy,
1437 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
1438 .atomic_duplicate_state = intel_digital_connector_duplicate_state,
1441 static int mst_connector_get_modes(struct drm_connector *connector)
1443 return mst_connector_get_ddc_modes(connector);
1447 mst_connector_mode_valid_ctx(struct drm_connector *connector,
1448 struct drm_display_mode *mode,
1449 struct drm_modeset_acquire_ctx *ctx,
1450 enum drm_mode_status *status)
1452 struct intel_display *display = to_intel_display(connector->dev);
1453 struct drm_i915_private *dev_priv = to_i915(connector->dev);
1454 struct intel_connector *intel_connector = to_intel_connector(connector);
1455 struct intel_dp *intel_dp = intel_connector->mst_port;
1456 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr;
1457 struct drm_dp_mst_port *port = intel_connector->port;
1458 const int min_bpp = 18;
1459 int max_dotclk = display->cdclk.max_dotclk_freq;
1460 int max_rate, mode_rate, max_lanes, max_link_clock;
1463 u16 dsc_max_compressed_bpp = 0;
1464 u8 dsc_slice_count = 0;
1465 int target_clock = mode->clock;
1466 int num_joined_pipes;
1468 if (drm_connector_is_unregistered(connector)) {
1469 *status = MODE_ERROR;
1473 *status = intel_cpu_transcoder_mode_valid(dev_priv, mode);
1474 if (*status != MODE_OK)
1477 if (mode->flags & DRM_MODE_FLAG_DBLCLK) {
1478 *status = MODE_H_ILLEGAL;
1482 if (mode->clock < 10000) {
1483 *status = MODE_CLOCK_LOW;
1487 max_link_clock = intel_dp_max_link_rate(intel_dp);
1488 max_lanes = intel_dp_max_lane_count(intel_dp);
1490 max_rate = intel_dp_max_link_data_rate(intel_dp,
1491 max_link_clock, max_lanes);
1492 mode_rate = intel_dp_link_required(mode->clock, min_bpp);
1496 * - Also check if compression would allow for the mode
1497 * - Calculate the overhead using drm_dp_bw_overhead() /
1498 * drm_dp_bw_channel_coding_efficiency(), similarly to the
1499 * compute config code, as drm_dp_calc_pbn_mode() doesn't
1500 * account with all the overheads.
1501 * - Check here and during compute config the BW reported by
1502 * DFP_Link_Available_Payload_Bandwidth_Number (or the
1503 * corresponding link capabilities of the sink) in case the
1504 * stream is uncompressed for it by the last branch device.
1506 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, intel_connector,
1507 mode->hdisplay, target_clock);
1508 max_dotclk *= num_joined_pipes;
1510 ret = drm_modeset_lock(&mgr->base.lock, ctx);
1514 if (mode_rate > max_rate || mode->clock > max_dotclk ||
1515 drm_dp_calc_pbn_mode(mode->clock, min_bpp << 4) > port->full_pbn) {
1516 *status = MODE_CLOCK_HIGH;
1520 if (intel_dp_has_dsc(intel_connector)) {
1522 * TBD pass the connector BPC,
1523 * for now U8_MAX so that max BPC on that platform would be picked
1525 int pipe_bpp = intel_dp_dsc_compute_max_bpp(intel_connector, U8_MAX);
1527 if (drm_dp_sink_supports_fec(intel_connector->dp.fec_capability)) {
1528 dsc_max_compressed_bpp =
1529 intel_dp_dsc_get_max_compressed_bpp(dev_priv,
1535 INTEL_OUTPUT_FORMAT_RGB,
1538 intel_dp_dsc_get_slice_count(intel_connector,
1544 dsc = dsc_max_compressed_bpp && dsc_slice_count;
1547 if (intel_dp_joiner_needs_dsc(dev_priv, num_joined_pipes) && !dsc) {
1548 *status = MODE_CLOCK_HIGH;
1552 if (mode_rate > max_rate && !dsc) {
1553 *status = MODE_CLOCK_HIGH;
1557 *status = intel_mode_valid_max_plane_size(dev_priv, mode, num_joined_pipes);
1561 static struct drm_encoder *
1562 mst_connector_atomic_best_encoder(struct drm_connector *connector,
1563 struct drm_atomic_state *state)
1565 struct drm_connector_state *connector_state = drm_atomic_get_new_connector_state(state,
1567 struct intel_connector *intel_connector = to_intel_connector(connector);
1568 struct intel_dp *intel_dp = intel_connector->mst_port;
1569 struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc);
1571 return &intel_dp->mst_encoders[crtc->pipe]->base.base;
1575 mst_connector_detect_ctx(struct drm_connector *connector,
1576 struct drm_modeset_acquire_ctx *ctx, bool force)
1578 struct intel_display *display = to_intel_display(connector->dev);
1579 struct intel_connector *intel_connector = to_intel_connector(connector);
1580 struct intel_dp *intel_dp = intel_connector->mst_port;
1582 if (!intel_display_device_enabled(display))
1583 return connector_status_disconnected;
1585 if (drm_connector_is_unregistered(connector))
1586 return connector_status_disconnected;
1588 if (!intel_display_driver_check_access(display))
1589 return connector->status;
1591 intel_dp_flush_connector_commits(intel_connector);
1593 return drm_dp_mst_detect_port(connector, ctx, &intel_dp->mst_mgr,
1594 intel_connector->port);
1597 static const struct drm_connector_helper_funcs mst_connector_helper_funcs = {
1598 .get_modes = mst_connector_get_modes,
1599 .mode_valid_ctx = mst_connector_mode_valid_ctx,
1600 .atomic_best_encoder = mst_connector_atomic_best_encoder,
1601 .atomic_check = mst_connector_atomic_check,
1602 .detect_ctx = mst_connector_detect_ctx,
1605 static void mst_stream_encoder_destroy(struct drm_encoder *encoder)
1607 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(to_intel_encoder(encoder));
1609 drm_encoder_cleanup(encoder);
1613 static const struct drm_encoder_funcs mst_stream_encoder_funcs = {
1614 .destroy = mst_stream_encoder_destroy,
1617 static bool mst_connector_get_hw_state(struct intel_connector *connector)
1619 /* This is the MST stream encoder set in ->pre_enable, if any */
1620 struct intel_encoder *encoder = intel_attached_encoder(connector);
1623 if (!encoder || !connector->base.state->crtc)
1626 return encoder->get_hw_state(encoder, &pipe);
1629 static int mst_topology_add_connector_properties(struct intel_dp *intel_dp,
1630 struct drm_connector *connector,
1631 const char *pathprop)
1633 struct intel_display *display = to_intel_display(intel_dp);
1635 drm_object_attach_property(&connector->base,
1636 display->drm->mode_config.path_property, 0);
1637 drm_object_attach_property(&connector->base,
1638 display->drm->mode_config.tile_property, 0);
1640 intel_attach_force_audio_property(connector);
1641 intel_attach_broadcast_rgb_property(connector);
1644 * Reuse the prop from the SST connector because we're
1645 * not allowed to create new props after device registration.
1647 connector->max_bpc_property =
1648 intel_dp->attached_connector->base.max_bpc_property;
1649 if (connector->max_bpc_property)
1650 drm_connector_attach_max_bpc_property(connector, 6, 12);
1652 return drm_connector_set_path_property(connector, pathprop);
1656 intel_dp_mst_read_decompression_port_dsc_caps(struct intel_dp *intel_dp,
1657 struct intel_connector *connector)
1659 u8 dpcd_caps[DP_RECEIVER_CAP_SIZE];
1661 if (!connector->dp.dsc_decompression_aux)
1664 if (drm_dp_read_dpcd_caps(connector->dp.dsc_decompression_aux, dpcd_caps) < 0)
1667 intel_dp_get_dsc_sink_cap(dpcd_caps[DP_DPCD_REV], connector);
1670 static bool detect_dsc_hblank_expansion_quirk(const struct intel_connector *connector)
1672 struct intel_display *display = to_intel_display(connector);
1673 struct drm_dp_aux *aux = connector->dp.dsc_decompression_aux;
1674 struct drm_dp_desc desc;
1675 u8 dpcd[DP_RECEIVER_CAP_SIZE];
1681 * A logical port's OUI (at least for affected sinks) is all 0, so
1682 * instead of that the parent port's OUI is used for identification.
1684 if (drm_dp_mst_port_is_logical(connector->port)) {
1685 aux = drm_dp_mst_aux_for_parent(connector->port);
1687 aux = &connector->mst_port->aux;
1690 if (drm_dp_read_dpcd_caps(aux, dpcd) < 0)
1693 if (drm_dp_read_desc(aux, &desc, drm_dp_is_branch(dpcd)) < 0)
1696 if (!drm_dp_has_quirk(&desc,
1697 DP_DPCD_QUIRK_HBLANK_EXPANSION_REQUIRES_DSC))
1701 * UHBR (MST sink) devices requiring this quirk don't advertise the
1702 * HBLANK expansion support. Presuming that they perform HBLANK
1703 * expansion internally, or are affected by this issue on modes with a
1704 * short HBLANK for other reasons.
1706 if (!drm_dp_128b132b_supported(dpcd) &&
1707 !(dpcd[DP_RECEIVE_PORT_0_CAP_0] & DP_HBLANK_EXPANSION_CAPABLE))
1710 drm_dbg_kms(display->drm,
1711 "[CONNECTOR:%d:%s] DSC HBLANK expansion quirk detected\n",
1712 connector->base.base.id, connector->base.name);
1717 static struct drm_connector *
1718 mst_topology_add_connector(struct drm_dp_mst_topology_mgr *mgr,
1719 struct drm_dp_mst_port *port,
1720 const char *pathprop)
1722 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr);
1723 struct intel_display *display = to_intel_display(intel_dp);
1724 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1725 struct intel_connector *intel_connector;
1726 struct drm_connector *connector;
1730 intel_connector = intel_connector_alloc();
1731 if (!intel_connector)
1734 connector = &intel_connector->base;
1736 intel_connector->get_hw_state = mst_connector_get_hw_state;
1737 intel_connector->sync_state = intel_dp_connector_sync_state;
1738 intel_connector->mst_port = intel_dp;
1739 intel_connector->port = port;
1740 drm_dp_mst_get_port_malloc(port);
1742 intel_dp_init_modeset_retry_work(intel_connector);
1744 ret = drm_connector_dynamic_init(display->drm, connector, &mst_connector_funcs,
1745 DRM_MODE_CONNECTOR_DisplayPort, NULL);
1747 drm_dp_mst_put_port_malloc(port);
1748 intel_connector_free(intel_connector);
1752 intel_connector->dp.dsc_decompression_aux = drm_dp_mst_dsc_aux_for_port(port);
1753 intel_dp_mst_read_decompression_port_dsc_caps(intel_dp, intel_connector);
1754 intel_connector->dp.dsc_hblank_expansion_quirk =
1755 detect_dsc_hblank_expansion_quirk(intel_connector);
1757 drm_connector_helper_add(connector, &mst_connector_helper_funcs);
1759 for_each_pipe(display, pipe) {
1760 struct drm_encoder *enc =
1761 &intel_dp->mst_encoders[pipe]->base.base;
1763 ret = drm_connector_attach_encoder(&intel_connector->base, enc);
1768 ret = mst_topology_add_connector_properties(intel_dp, connector, pathprop);
1772 ret = intel_dp_hdcp_init(dig_port, intel_connector);
1774 drm_dbg_kms(display->drm, "[%s:%d] HDCP MST init failed, skipping.\n",
1775 connector->name, connector->base.id);
1780 drm_connector_cleanup(connector);
1785 mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr)
1787 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr);
1789 intel_hpd_trigger_irq(dp_to_dig_port(intel_dp));
1792 static const struct drm_dp_mst_topology_cbs mst_topology_cbs = {
1793 .add_connector = mst_topology_add_connector,
1794 .poll_hpd_irq = mst_topology_poll_hpd_irq,
1797 /* Create a fake encoder for an individual MST stream */
1798 static struct intel_dp_mst_encoder *
1799 mst_stream_encoder_create(struct intel_digital_port *dig_port, enum pipe pipe)
1801 struct intel_display *display = to_intel_display(dig_port);
1802 struct intel_encoder *primary_encoder = &dig_port->base;
1803 struct intel_dp_mst_encoder *intel_mst;
1804 struct intel_encoder *encoder;
1806 intel_mst = kzalloc(sizeof(*intel_mst), GFP_KERNEL);
1811 intel_mst->pipe = pipe;
1812 encoder = &intel_mst->base;
1813 intel_mst->primary = dig_port;
1815 drm_encoder_init(display->drm, &encoder->base, &mst_stream_encoder_funcs,
1816 DRM_MODE_ENCODER_DPMST, "DP-MST %c", pipe_name(pipe));
1818 encoder->type = INTEL_OUTPUT_DP_MST;
1819 encoder->power_domain = primary_encoder->power_domain;
1820 encoder->port = primary_encoder->port;
1821 encoder->cloneable = 0;
1823 * This is wrong, but broken userspace uses the intersection
1824 * of possible_crtcs of all the encoders of a given connector
1825 * to figure out which crtcs can drive said connector. What
1826 * should be used instead is the union of possible_crtcs.
1827 * To keep such userspace functioning we must misconfigure
1828 * this to make sure the intersection is not empty :(
1830 encoder->pipe_mask = ~0;
1832 encoder->compute_config = mst_stream_compute_config;
1833 encoder->compute_config_late = mst_stream_compute_config_late;
1834 encoder->disable = mst_stream_disable;
1835 encoder->post_disable = mst_stream_post_disable;
1836 encoder->post_pll_disable = mst_stream_post_pll_disable;
1837 encoder->update_pipe = intel_ddi_update_pipe;
1838 encoder->pre_pll_enable = mst_stream_pre_pll_enable;
1839 encoder->pre_enable = mst_stream_pre_enable;
1840 encoder->enable = mst_stream_enable;
1841 encoder->audio_enable = intel_audio_codec_enable;
1842 encoder->audio_disable = intel_audio_codec_disable;
1843 encoder->get_hw_state = mst_stream_get_hw_state;
1844 encoder->get_config = mst_stream_get_config;
1845 encoder->initial_fastset_check = mst_stream_initial_fastset_check;
1851 /* Create the fake encoders for MST streams */
1853 mst_stream_encoders_create(struct intel_digital_port *dig_port)
1855 struct intel_display *display = to_intel_display(dig_port);
1856 struct intel_dp *intel_dp = &dig_port->dp;
1859 for_each_pipe(display, pipe)
1860 intel_dp->mst_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe);
1865 intel_dp_mst_encoder_active_links(struct intel_digital_port *dig_port)
1867 return dig_port->dp.active_mst_links;
1871 intel_dp_mst_encoder_init(struct intel_digital_port *dig_port, int conn_base_id)
1873 struct intel_display *display = to_intel_display(dig_port);
1874 struct intel_dp *intel_dp = &dig_port->dp;
1875 enum port port = dig_port->base.port;
1878 if (!HAS_DP_MST(display) || intel_dp_is_edp(intel_dp))
1881 if (DISPLAY_VER(display) < 12 && port == PORT_A)
1884 if (DISPLAY_VER(display) < 11 && port == PORT_E)
1887 intel_dp->mst_mgr.cbs = &mst_topology_cbs;
1889 /* create encoders */
1890 mst_stream_encoders_create(dig_port);
1891 ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst_mgr, display->drm,
1892 &intel_dp->aux, 16, 3, conn_base_id);
1894 intel_dp->mst_mgr.cbs = NULL;
1901 bool intel_dp_mst_source_support(struct intel_dp *intel_dp)
1903 return intel_dp->mst_mgr.cbs;
1907 intel_dp_mst_encoder_cleanup(struct intel_digital_port *dig_port)
1909 struct intel_dp *intel_dp = &dig_port->dp;
1911 if (!intel_dp_mst_source_support(intel_dp))
1914 drm_dp_mst_topology_mgr_destroy(&intel_dp->mst_mgr);
1915 /* encoders will get killed by normal cleanup */
1917 intel_dp->mst_mgr.cbs = NULL;
1920 bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state)
1922 return crtc_state->mst_master_transcoder == crtc_state->cpu_transcoder;
1925 bool intel_dp_mst_is_slave_trans(const struct intel_crtc_state *crtc_state)
1927 return crtc_state->mst_master_transcoder != INVALID_TRANSCODER &&
1928 crtc_state->mst_master_transcoder != crtc_state->cpu_transcoder;
1932 * intel_dp_mst_add_topology_state_for_connector - add MST topology state for a connector
1933 * @state: atomic state
1934 * @connector: connector to add the state for
1935 * @crtc: the CRTC @connector is attached to
1937 * Add the MST topology state for @connector to @state.
1939 * Returns 0 on success, negative error code on failure.
1942 intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state *state,
1943 struct intel_connector *connector,
1944 struct intel_crtc *crtc)
1946 struct drm_dp_mst_topology_state *mst_state;
1948 if (!connector->mst_port)
1951 mst_state = drm_atomic_get_mst_topology_state(&state->base,
1952 &connector->mst_port->mst_mgr);
1953 if (IS_ERR(mst_state))
1954 return PTR_ERR(mst_state);
1956 mst_state->pending_crtc_mask |= drm_crtc_mask(&crtc->base);
1962 * intel_dp_mst_add_topology_state_for_crtc - add MST topology state for a CRTC
1963 * @state: atomic state
1964 * @crtc: CRTC to add the state for
1966 * Add the MST topology state for @crtc to @state.
1968 * Returns 0 on success, negative error code on failure.
1970 int intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state *state,
1971 struct intel_crtc *crtc)
1973 struct drm_connector *_connector;
1974 struct drm_connector_state *conn_state;
1977 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) {
1978 struct intel_connector *connector = to_intel_connector(_connector);
1981 if (conn_state->crtc != &crtc->base)
1984 ret = intel_dp_mst_add_topology_state_for_connector(state, connector, crtc);
1992 static struct intel_connector *
1993 get_connector_in_state_for_crtc(struct intel_atomic_state *state,
1994 const struct intel_crtc *crtc)
1996 struct drm_connector_state *old_conn_state;
1997 struct drm_connector_state *new_conn_state;
1998 struct drm_connector *_connector;
2001 for_each_oldnew_connector_in_state(&state->base, _connector,
2002 old_conn_state, new_conn_state, i) {
2003 struct intel_connector *connector =
2004 to_intel_connector(_connector);
2006 if (old_conn_state->crtc == &crtc->base ||
2007 new_conn_state->crtc == &crtc->base)
2015 * intel_dp_mst_crtc_needs_modeset - check if changes in topology need to modeset the given CRTC
2016 * @state: atomic state
2017 * @crtc: CRTC for which to check the modeset requirement
2019 * Check if any change in a MST topology requires a forced modeset on @crtc in
2020 * this topology. One such change is enabling/disabling the DSC decompression
2021 * state in the first branch device's UFP DPCD as required by one CRTC, while
2022 * the other @crtc in the same topology is still active, requiring a full modeset
2025 bool intel_dp_mst_crtc_needs_modeset(struct intel_atomic_state *state,
2026 struct intel_crtc *crtc)
2028 const struct intel_connector *crtc_connector;
2029 const struct drm_connector_state *conn_state;
2030 const struct drm_connector *_connector;
2033 if (!intel_crtc_has_type(intel_atomic_get_new_crtc_state(state, crtc),
2034 INTEL_OUTPUT_DP_MST))
2037 crtc_connector = get_connector_in_state_for_crtc(state, crtc);
2039 if (!crtc_connector)
2040 /* None of the connectors in the topology needs modeset */
2043 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) {
2044 const struct intel_connector *connector =
2045 to_intel_connector(_connector);
2046 const struct intel_crtc_state *new_crtc_state;
2047 const struct intel_crtc_state *old_crtc_state;
2048 struct intel_crtc *crtc_iter;
2050 if (connector->mst_port != crtc_connector->mst_port ||
2054 crtc_iter = to_intel_crtc(conn_state->crtc);
2056 new_crtc_state = intel_atomic_get_new_crtc_state(state, crtc_iter);
2057 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc_iter);
2059 if (!intel_crtc_needs_modeset(new_crtc_state))
2062 if (old_crtc_state->dsc.compression_enable ==
2063 new_crtc_state->dsc.compression_enable)
2066 * Toggling the decompression flag because of this stream in
2067 * the first downstream branch device's UFP DPCD may reset the
2068 * whole branch device. To avoid the reset while other streams
2069 * are also active modeset the whole MST topology in this
2072 if (connector->dp.dsc_decompression_aux ==
2073 &connector->mst_port->aux)
2081 * intel_dp_mst_prepare_probe - Prepare an MST link for topology probing
2082 * @intel_dp: DP port object
2084 * Prepare an MST link for topology probing, programming the target
2085 * link parameters to DPCD. This step is a requirement of the enumaration
2086 * of path resources during probing.
2088 void intel_dp_mst_prepare_probe(struct intel_dp *intel_dp)
2090 int link_rate = intel_dp_max_link_rate(intel_dp);
2091 int lane_count = intel_dp_max_lane_count(intel_dp);
2095 if (intel_dp->link_trained)
2098 if (intel_mst_probed_link_params_valid(intel_dp, link_rate, lane_count))
2101 intel_dp_compute_rate(intel_dp, link_rate, &link_bw, &rate_select);
2103 intel_dp_link_training_set_mode(intel_dp, link_rate, false);
2104 intel_dp_link_training_set_bw(intel_dp, link_bw, rate_select, lane_count,
2105 drm_dp_enhanced_frame_cap(intel_dp->dpcd));
2107 intel_mst_set_probed_link_params(intel_dp, link_rate, lane_count);
2111 * intel_dp_mst_verify_dpcd_state - verify the MST SW enabled state wrt. the DPCD
2112 * @intel_dp: DP port object
2114 * Verify if @intel_dp's MST enabled SW state matches the corresponding DPCD
2115 * state. A long HPD pulse - not long enough to be detected as a disconnected
2116 * state - could've reset the DPCD state, which requires tearing
2117 * down/recreating the MST topology.
2119 * Returns %true if the SW MST enabled and DPCD states match, %false
2122 bool intel_dp_mst_verify_dpcd_state(struct intel_dp *intel_dp)
2124 struct intel_display *display = to_intel_display(intel_dp);
2125 struct intel_connector *connector = intel_dp->attached_connector;
2126 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
2127 struct intel_encoder *encoder = &dig_port->base;
2131 if (!intel_dp->is_mst)
2134 ret = drm_dp_dpcd_readb(intel_dp->mst_mgr.aux, DP_MSTM_CTRL, &val);
2136 /* Adjust the expected register value for SST + SideBand. */
2137 if (ret < 0 || val != (DP_MST_EN | DP_UP_REQ_EN | DP_UPSTREAM_IS_SRC)) {
2138 drm_dbg_kms(display->drm,
2139 "[CONNECTOR:%d:%s][ENCODER:%d:%s] MST mode got reset, removing topology (ret=%d, ctrl=0x%02x)\n",
2140 connector->base.base.id, connector->base.name,
2141 encoder->base.base.id, encoder->base.name,