2 * Copyright 2009 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <drm/display/drm_dp_helper.h>
27 #include "nouveau_drv.h"
28 #include "nouveau_connector.h"
29 #include "nouveau_encoder.h"
30 #include "nouveau_crtc.h"
32 #include <nvif/if0011.h>
34 MODULE_PARM_DESC(mst, "Enable DisplayPort multi-stream (default: enabled)");
35 static int nouveau_mst = 1;
36 module_param_named(mst, nouveau_mst, int, 0400);
39 nouveau_dp_has_sink_count(struct drm_connector *connector,
40 struct nouveau_encoder *outp)
42 return drm_dp_read_sink_count_cap(connector, outp->dp.dpcd, &outp->dp.desc);
46 nouveau_dp_probe_lttpr(struct nouveau_encoder *outp)
48 u8 rev, size = sizeof(rev);
51 ret = nvif_outp_dp_aux_xfer(&outp->outp, DP_AUX_NATIVE_READ, &size,
52 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV,
54 if (ret || size < sizeof(rev) || rev < 0x14)
60 static enum drm_connector_status
61 nouveau_dp_probe_dpcd(struct nouveau_connector *nv_connector,
62 struct nouveau_encoder *outp)
64 struct drm_connector *connector = &nv_connector->base;
65 struct drm_dp_aux *aux = &nv_connector->aux;
66 struct nv50_mstm *mstm = NULL;
67 enum drm_connector_status status = connector_status_disconnected;
69 u8 *dpcd = outp->dp.dpcd;
71 outp->dp.lttpr.nr = 0;
76 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP &&
77 nouveau_dp_probe_lttpr(outp) &&
78 !drm_dp_read_dpcd_caps(aux, dpcd) &&
79 !drm_dp_read_lttpr_common_caps(aux, dpcd, outp->dp.lttpr.caps)) {
80 int nr = drm_dp_lttpr_count(outp->dp.lttpr.caps);
83 drm_dp_dpcd_writeb(aux, DP_PHY_REPEATER_MODE,
84 DP_PHY_REPEATER_MODE_TRANSPARENT);
87 ret = drm_dp_dpcd_writeb(aux, DP_PHY_REPEATER_MODE,
88 DP_PHY_REPEATER_MODE_NON_TRANSPARENT);
90 drm_dp_dpcd_writeb(aux, DP_PHY_REPEATER_MODE,
91 DP_PHY_REPEATER_MODE_TRANSPARENT);
93 outp->dp.lttpr.nr = nr;
99 ret = drm_dp_read_dpcd_caps(aux, dpcd);
103 outp->dp.link_nr = dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
104 if (outp->dcb->dpconf.link_nr < outp->dp.link_nr)
105 outp->dp.link_nr = outp->dcb->dpconf.link_nr;
107 if (outp->dp.lttpr.nr) {
108 int links = drm_dp_lttpr_max_lane_count(outp->dp.lttpr.caps);
110 if (links && links < outp->dp.link_nr)
111 outp->dp.link_nr = links;
114 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP && dpcd[DP_DPCD_REV] >= 0x13) {
115 __le16 rates[DP_MAX_SUPPORTED_RATES];
117 ret = drm_dp_dpcd_read(aux, DP_SUPPORTED_LINK_RATES, rates, sizeof(rates));
118 if (ret == sizeof(rates)) {
119 for (int i = 0; i < ARRAY_SIZE(rates); i++) {
120 u32 rate = (le16_to_cpu(rates[i]) * 200) / 10;
126 for (j = 0; j < outp->dp.rate_nr; j++) {
127 if (rate > outp->dp.rate[j].rate) {
128 for (int k = outp->dp.rate_nr; k > j; k--)
129 outp->dp.rate[k] = outp->dp.rate[k - 1];
134 outp->dp.rate[j].dpcd = i;
135 outp->dp.rate[j].rate = rate;
141 if (!outp->dp.rate_nr) {
142 const u32 rates[] = { 810000, 540000, 270000, 162000 };
143 u32 max_rate = dpcd[DP_MAX_LINK_RATE] * 27000;
145 if (outp->dp.lttpr.nr) {
146 int rate = drm_dp_lttpr_max_link_rate(outp->dp.lttpr.caps);
148 if (rate && rate < max_rate)
152 max_rate = min_t(int, max_rate, outp->dcb->dpconf.link_bw);
154 for (int i = 0; i < ARRAY_SIZE(rates); i++) {
155 if (rates[i] <= max_rate) {
156 outp->dp.rate[outp->dp.rate_nr].dpcd = -1;
157 outp->dp.rate[outp->dp.rate_nr].rate = rates[i];
162 if (WARN_ON(!outp->dp.rate_nr))
166 ret = nvif_outp_dp_rates(&outp->outp, outp->dp.rate, outp->dp.rate_nr);
170 for (int i = 0; i < outp->dp.rate_nr; i++) {
171 u32 link_bw = outp->dp.rate[i].rate;
173 if (link_bw > outp->dp.link_bw)
174 outp->dp.link_bw = link_bw;
177 ret = drm_dp_read_desc(aux, &outp->dp.desc, drm_dp_is_branch(dpcd));
182 mstm = outp->dp.mstm;
184 mstm->can_mst = drm_dp_read_mst_cap(aux, dpcd) == DRM_DP_MST;
187 if (nouveau_dp_has_sink_count(connector, outp)) {
188 ret = drm_dp_read_sink_count(aux);
192 outp->dp.sink_count = ret;
195 * Dongle connected, but no display. Don't bother reading
196 * downstream port info
198 if (!outp->dp.sink_count)
199 return connector_status_disconnected;
202 ret = drm_dp_read_downstream_info(aux, dpcd,
203 outp->dp.downstream_ports);
207 status = connector_status_connected;
209 if (status != connector_status_connected) {
210 /* Clear any cached info */
211 outp->dp.sink_count = 0;
217 nouveau_dp_detect(struct nouveau_connector *nv_connector,
218 struct nouveau_encoder *nv_encoder)
220 struct drm_device *dev = nv_encoder->base.base.dev;
221 struct nouveau_drm *drm = nouveau_drm(dev);
222 struct drm_connector *connector = &nv_connector->base;
223 struct nv50_mstm *mstm = nv_encoder->dp.mstm;
224 enum drm_connector_status status;
225 u8 *dpcd = nv_encoder->dp.dpcd;
226 int ret = NOUVEAU_DP_NONE, hpd;
228 /* eDP ports don't support hotplugging - so there's no point in probing eDP ports unless we
229 * haven't probed them once before.
231 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
232 if (connector->status == connector_status_connected)
233 return NOUVEAU_DP_SST;
234 else if (connector->status == connector_status_disconnected)
235 return NOUVEAU_DP_NONE;
238 // Ensure that the aux bus is enabled for probing
239 drm_dp_dpcd_set_powered(&nv_connector->aux, true);
241 mutex_lock(&nv_encoder->dp.hpd_irq_lock);
243 /* If we're not ready to handle MST state changes yet, just
244 * report the last status of the connector. We'll reprobe it
245 * once we've resumed.
247 if (mstm->suspended) {
249 ret = NOUVEAU_DP_MST;
250 else if (connector->status ==
251 connector_status_connected)
252 ret = NOUVEAU_DP_SST;
258 hpd = nvif_outp_detect(&nv_encoder->outp);
259 if (hpd == NOT_PRESENT) {
260 nvif_outp_dp_aux_pwr(&nv_encoder->outp, false);
263 nvif_outp_dp_aux_pwr(&nv_encoder->outp, true);
265 status = nouveau_dp_probe_dpcd(nv_connector, nv_encoder);
266 if (status == connector_status_disconnected) {
267 nvif_outp_dp_aux_pwr(&nv_encoder->outp, false);
271 /* If we're in MST mode, we're done here */
272 if (mstm && mstm->can_mst && mstm->is_mst) {
273 ret = NOUVEAU_DP_MST;
277 NV_DEBUG(drm, "sink dpcd version: 0x%02x\n", dpcd[DP_DPCD_REV]);
278 for (int i = 0; i < nv_encoder->dp.rate_nr; i++)
279 NV_DEBUG(drm, "sink rate %d: %d\n", i, nv_encoder->dp.rate[i].rate);
281 NV_DEBUG(drm, "encoder: %dx%d\n", nv_encoder->dcb->dpconf.link_nr,
282 nv_encoder->dcb->dpconf.link_bw);
283 NV_DEBUG(drm, "maximum: %dx%d\n", nv_encoder->dp.link_nr,
284 nv_encoder->dp.link_bw);
286 if (mstm && mstm->can_mst) {
287 ret = nv50_mstm_detect(nv_encoder);
289 ret = NOUVEAU_DP_MST;
291 } else if (ret != 0) {
292 nvif_outp_dp_aux_pwr(&nv_encoder->outp, false);
296 ret = NOUVEAU_DP_SST;
299 if (mstm && !mstm->suspended && ret != NOUVEAU_DP_MST)
300 nv50_mstm_remove(mstm);
302 /* GSP doesn't like when we try to do aux transactions on a port it considers disconnected,
303 * and since we don't really have a usecase for that anyway - just disable the aux bus here
304 * if we've decided the connector is disconnected
306 if (ret == NOUVEAU_DP_NONE)
307 drm_dp_dpcd_set_powered(&nv_connector->aux, false);
309 mutex_unlock(&nv_encoder->dp.hpd_irq_lock);
314 nouveau_dp_power_down(struct nouveau_encoder *outp)
316 struct drm_dp_aux *aux = &outp->conn->aux;
320 mutex_lock(&outp->dp.hpd_irq_lock);
322 ret = drm_dp_dpcd_readb(aux, DP_SET_POWER, &pwr);
324 pwr &= ~DP_SET_POWER_MASK;
325 pwr |= DP_SET_POWER_D3;
326 drm_dp_dpcd_writeb(aux, DP_SET_POWER, pwr);
330 mutex_unlock(&outp->dp.hpd_irq_lock);
334 nouveau_dp_train_link(struct nouveau_encoder *outp, bool retrain)
336 struct drm_dp_aux *aux = &outp->conn->aux;
337 bool post_lt = false;
338 int ret, retries = 0;
340 if ( (outp->dp.dpcd[DP_MAX_LANE_COUNT] & 0x20) &&
341 !(outp->dp.dpcd[DP_MAX_DOWNSPREAD] & DP_TPS4_SUPPORTED))
345 ret = nvif_outp_dp_train(&outp->outp, outp->dp.dpcd,
356 u8 stat[DP_LINK_STATUS_SIZE];
358 u8 time = 0, adjusts = 0, tmp;
360 ret = drm_dp_dpcd_read_phy_link_status(aux, DP_PHY_DPRX, stat);
365 if (!drm_dp_channel_eq_ok(stat, outp->dp.lt.nr)) {
370 if (!(stat[2] & 0x02))
376 memcpy(prev, &stat[4], sizeof(prev));
377 ret = drm_dp_dpcd_read_phy_link_status(aux, DP_PHY_DPRX, stat);
381 if (!memcmp(prev, &stat[4], sizeof(prev))) {
390 for (int i = 0; i < outp->dp.lt.nr; i++) {
391 pe[i] = drm_dp_get_adjust_request_pre_emphasis(stat, i) >>
392 DP_TRAIN_PRE_EMPHASIS_SHIFT;
393 vs[i] = drm_dp_get_adjust_request_voltage(stat, i) >>
394 DP_TRAIN_VOLTAGE_SWING_SHIFT;
397 ret = nvif_outp_dp_drive(&outp->outp, outp->dp.lt.nr, pe, vs);
405 if (drm_dp_dpcd_readb(aux, DP_LANE_COUNT_SET, &tmp) == 1) {
407 drm_dp_dpcd_writeb(aux, DP_LANE_COUNT_SET, tmp);
411 if (ret == 1 && retries++ < 3)
418 nouveau_dp_train(struct nouveau_encoder *outp, bool mst, u32 khz, u8 bpc)
420 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
421 struct drm_dp_aux *aux = &outp->conn->aux;
427 min_rate = outp->dp.link_nr * outp->dp.rate[0].rate;
429 min_rate = DIV_ROUND_UP(khz * bpc * 3, 8);
431 NV_DEBUG(drm, "%s link training (mst:%d min_rate:%d)\n",
432 outp->base.base.name, mst, min_rate);
434 mutex_lock(&outp->dp.hpd_irq_lock);
436 if (drm_dp_dpcd_readb(aux, DP_SET_POWER, &pwr) == 1) {
437 if ((pwr & DP_SET_POWER_MASK) != DP_SET_POWER_D0) {
438 pwr &= ~DP_SET_POWER_MASK;
439 pwr |= DP_SET_POWER_D0;
440 drm_dp_dpcd_writeb(aux, DP_SET_POWER, pwr);
444 for (int nr = outp->dp.link_nr; nr; nr >>= 1) {
445 for (int rate = 0; rate < outp->dp.rate_nr; rate++) {
446 if (outp->dp.rate[rate].rate * nr >= min_rate) {
448 outp->dp.lt.bw = outp->dp.rate[rate].rate;
449 outp->dp.lt.mst = mst;
450 if (nouveau_dp_train_link(outp, false))
458 mutex_unlock(&outp->dp.hpd_irq_lock);
463 nouveau_dp_link_check_locked(struct nouveau_encoder *outp)
465 u8 link_status[DP_LINK_STATUS_SIZE];
467 if (!outp || !outp->dp.lt.nr)
470 if (drm_dp_dpcd_read_phy_link_status(&outp->conn->aux, DP_PHY_DPRX, link_status) < 0)
473 if (drm_dp_channel_eq_ok(link_status, outp->dp.lt.nr))
476 return nouveau_dp_train_link(outp, true);
480 nouveau_dp_link_check(struct nouveau_connector *nv_connector)
482 struct nouveau_encoder *outp = nv_connector->dp_encoder;
486 mutex_lock(&outp->dp.hpd_irq_lock);
488 link_ok = nouveau_dp_link_check_locked(outp);
489 mutex_unlock(&outp->dp.hpd_irq_lock);
496 nouveau_dp_irq(struct work_struct *work)
498 struct nouveau_connector *nv_connector =
499 container_of(work, typeof(*nv_connector), irq_work);
500 struct drm_connector *connector = &nv_connector->base;
501 struct nouveau_encoder *outp = find_encoder(connector, DCB_OUTPUT_DP);
502 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
503 struct nv50_mstm *mstm;
510 mstm = outp->dp.mstm;
511 NV_DEBUG(drm, "service %s\n", connector->name);
513 mutex_lock(&outp->dp.hpd_irq_lock);
515 if (mstm && mstm->is_mst) {
516 if (!nv50_mstm_service(drm, nv_connector, mstm))
517 hpd |= NVIF_CONN_EVENT_V0_UNPLUG;
519 drm_dp_cec_irq(&nv_connector->aux);
521 if (nouveau_dp_has_sink_count(connector, outp)) {
522 ret = drm_dp_read_sink_count(&nv_connector->aux);
523 if (ret != outp->dp.sink_count)
524 hpd |= NVIF_CONN_EVENT_V0_PLUG;
526 outp->dp.sink_count = ret;
530 mutex_unlock(&outp->dp.hpd_irq_lock);
532 nouveau_connector_hpd(nv_connector, NVIF_CONN_EVENT_V0_IRQ | hpd);
536 * - Validate against the DP caps advertised by the GPU (we don't check these
540 nv50_dp_mode_valid(struct nouveau_encoder *outp,
541 const struct drm_display_mode *mode,
544 const unsigned int min_clock = 25000;
545 unsigned int max_rate, mode_rate, ds_max_dotclock, clock = mode->clock;
546 /* Check with the minmum bpc always, so we can advertise better modes.
547 * In particlar not doing this causes modes to be dropped on HDR
548 * displays as we might check with a bpc of 16 even.
550 const u8 bpp = 6 * 3;
552 if (mode->flags & DRM_MODE_FLAG_INTERLACE && !outp->caps.dp_interlace)
553 return MODE_NO_INTERLACE;
555 if ((mode->flags & DRM_MODE_FLAG_3D_MASK) == DRM_MODE_FLAG_3D_FRAME_PACKING)
558 max_rate = outp->dp.link_nr * outp->dp.link_bw;
559 mode_rate = DIV_ROUND_UP(clock * bpp, 8);
560 if (mode_rate > max_rate)
561 return MODE_CLOCK_HIGH;
563 ds_max_dotclock = drm_dp_downstream_max_dotclock(outp->dp.dpcd, outp->dp.downstream_ports);
564 if (ds_max_dotclock && clock > ds_max_dotclock)
565 return MODE_CLOCK_HIGH;
567 if (clock < min_clock)
568 return MODE_CLOCK_LOW;