1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
6 #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__
8 #include <linux/rational.h>
9 #include <linux/delay.h>
10 #include <linux/iopoll.h>
11 #include <linux/phy/phy.h>
12 #include <linux/phy/phy-dp.h>
13 #include <linux/rational.h>
14 #include <drm/drm_dp_helper.h>
15 #include <drm/drm_print.h>
17 #include "dp_catalog.h"
20 #define POLLING_SLEEP_US 1000
21 #define POLLING_TIMEOUT_US 10000
23 #define SCRAMBLER_RESET_COUNT_VALUE 0xFC
25 #define DP_INTERRUPT_STATUS_ACK_SHIFT 1
26 #define DP_INTERRUPT_STATUS_MASK_SHIFT 2
28 #define MSM_DP_CONTROLLER_AHB_OFFSET 0x0000
29 #define MSM_DP_CONTROLLER_AHB_SIZE 0x0200
30 #define MSM_DP_CONTROLLER_AUX_OFFSET 0x0200
31 #define MSM_DP_CONTROLLER_AUX_SIZE 0x0200
32 #define MSM_DP_CONTROLLER_LINK_OFFSET 0x0400
33 #define MSM_DP_CONTROLLER_LINK_SIZE 0x0C00
34 #define MSM_DP_CONTROLLER_P0_OFFSET 0x1000
35 #define MSM_DP_CONTROLLER_P0_SIZE 0x0400
37 #define DP_INTERRUPT_STATUS1 \
38 (DP_INTR_AUX_I2C_DONE| \
39 DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
40 DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
41 DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
42 DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
44 #define DP_INTERRUPT_STATUS1_ACK \
45 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46 #define DP_INTERRUPT_STATUS1_MASK \
47 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
49 #define DP_INTERRUPT_STATUS2 \
50 (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
51 DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
53 #define DP_INTERRUPT_STATUS2_ACK \
54 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
55 #define DP_INTERRUPT_STATUS2_MASK \
56 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
58 struct dp_catalog_private {
61 u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
62 struct dp_catalog dp_catalog;
63 u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
66 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
68 offset += MSM_DP_CONTROLLER_AUX_OFFSET;
69 return readl_relaxed(catalog->io->dp_controller.base + offset);
72 static inline void dp_write_aux(struct dp_catalog_private *catalog,
75 offset += MSM_DP_CONTROLLER_AUX_OFFSET;
77 * To make sure aux reg writes happens before any other operation,
78 * this function uses writel() instread of writel_relaxed()
80 writel(data, catalog->io->dp_controller.base + offset);
83 static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
85 offset += MSM_DP_CONTROLLER_AHB_OFFSET;
86 return readl_relaxed(catalog->io->dp_controller.base + offset);
89 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
92 offset += MSM_DP_CONTROLLER_AHB_OFFSET;
94 * To make sure phy reg writes happens before any other operation,
95 * this function uses writel() instread of writel_relaxed()
97 writel(data, catalog->io->dp_controller.base + offset);
100 static inline void dp_write_p0(struct dp_catalog_private *catalog,
101 u32 offset, u32 data)
103 offset += MSM_DP_CONTROLLER_P0_OFFSET;
105 * To make sure interface reg writes happens before any other operation,
106 * this function uses writel() instread of writel_relaxed()
108 writel(data, catalog->io->dp_controller.base + offset);
111 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
114 offset += MSM_DP_CONTROLLER_P0_OFFSET;
116 * To make sure interface reg writes happens before any other operation,
117 * this function uses writel() instread of writel_relaxed()
119 return readl_relaxed(catalog->io->dp_controller.base + offset);
122 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
124 offset += MSM_DP_CONTROLLER_LINK_OFFSET;
125 return readl_relaxed(catalog->io->dp_controller.base + offset);
128 static inline void dp_write_link(struct dp_catalog_private *catalog,
129 u32 offset, u32 data)
131 offset += MSM_DP_CONTROLLER_LINK_OFFSET;
133 * To make sure link reg writes happens before any other operation,
134 * this function uses writel() instread of writel_relaxed()
136 writel(data, catalog->io->dp_controller.base + offset);
139 /* aux related catalog functions */
140 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
142 struct dp_catalog_private *catalog = container_of(dp_catalog,
143 struct dp_catalog_private, dp_catalog);
145 return dp_read_aux(catalog, REG_DP_AUX_DATA);
148 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
150 struct dp_catalog_private *catalog = container_of(dp_catalog,
151 struct dp_catalog_private, dp_catalog);
153 dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
157 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
159 struct dp_catalog_private *catalog = container_of(dp_catalog,
160 struct dp_catalog_private, dp_catalog);
162 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
166 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
169 struct dp_catalog_private *catalog = container_of(dp_catalog,
170 struct dp_catalog_private, dp_catalog);
173 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
174 data &= ~DP_AUX_TRANS_CTRL_GO;
175 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
177 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
182 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
184 struct dp_catalog_private *catalog = container_of(dp_catalog,
185 struct dp_catalog_private, dp_catalog);
187 dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
188 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
189 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
190 dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
194 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
197 struct dp_catalog_private *catalog = container_of(dp_catalog,
198 struct dp_catalog_private, dp_catalog);
200 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
202 aux_ctrl |= DP_AUX_CTRL_RESET;
203 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
204 usleep_range(1000, 1100); /* h/w recommended delay */
206 aux_ctrl &= ~DP_AUX_CTRL_RESET;
207 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
210 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
213 struct dp_catalog_private *catalog = container_of(dp_catalog,
214 struct dp_catalog_private, dp_catalog);
216 aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
219 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
220 dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
221 aux_ctrl |= DP_AUX_CTRL_ENABLE;
223 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
226 dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
229 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
231 struct dp_catalog_private *catalog = container_of(dp_catalog,
232 struct dp_catalog_private, dp_catalog);
233 struct dp_io *dp_io = catalog->io;
234 struct phy *phy = dp_io->phy;
239 static void dump_regs(void __iomem *base, int len)
245 len = DIV_ROUND_UP(len, 16);
246 for (i = 0; i < len; i++) {
247 x0 = readl_relaxed(base + addr_off);
248 x4 = readl_relaxed(base + addr_off + 0x04);
249 x8 = readl_relaxed(base + addr_off + 0x08);
250 xc = readl_relaxed(base + addr_off + 0x0c);
252 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
257 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
260 struct dp_catalog_private *catalog = container_of(dp_catalog,
261 struct dp_catalog_private, dp_catalog);
263 pr_info("AHB regs\n");
264 offset = MSM_DP_CONTROLLER_AHB_OFFSET;
265 len = MSM_DP_CONTROLLER_AHB_SIZE;
266 dump_regs(catalog->io->dp_controller.base + offset, len);
268 pr_info("AUXCLK regs\n");
269 offset = MSM_DP_CONTROLLER_AUX_OFFSET;
270 len = MSM_DP_CONTROLLER_AUX_SIZE;
271 dump_regs(catalog->io->dp_controller.base + offset, len);
273 pr_info("LCLK regs\n");
274 offset = MSM_DP_CONTROLLER_LINK_OFFSET;
275 len = MSM_DP_CONTROLLER_LINK_SIZE;
276 dump_regs(catalog->io->dp_controller.base + offset, len);
278 pr_info("P0CLK regs\n");
279 offset = MSM_DP_CONTROLLER_P0_OFFSET;
280 len = MSM_DP_CONTROLLER_P0_SIZE;
281 dump_regs(catalog->io->dp_controller.base + offset, len);
284 int dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
286 struct dp_catalog_private *catalog = container_of(dp_catalog,
287 struct dp_catalog_private, dp_catalog);
290 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
291 intr &= ~DP_INTERRUPT_STATUS1_MASK;
292 intr_ack = (intr & DP_INTERRUPT_STATUS1)
293 << DP_INTERRUPT_STATUS_ACK_SHIFT;
294 dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
295 DP_INTERRUPT_STATUS1_MASK);
301 /* controller related catalog functions */
302 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
303 u32 dp_tu, u32 valid_boundary,
306 struct dp_catalog_private *catalog = container_of(dp_catalog,
307 struct dp_catalog_private, dp_catalog);
309 dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
310 dp_write_link(catalog, REG_DP_TU, dp_tu);
311 dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
314 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
316 struct dp_catalog_private *catalog = container_of(dp_catalog,
317 struct dp_catalog_private, dp_catalog);
319 dp_write_link(catalog, REG_DP_STATE_CTRL, state);
322 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
324 struct dp_catalog_private *catalog = container_of(dp_catalog,
325 struct dp_catalog_private, dp_catalog);
327 DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
329 dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
332 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
334 struct dp_catalog_private *catalog = container_of(dp_catalog,
335 struct dp_catalog_private, dp_catalog);
336 u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
339 ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
340 ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
341 ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
342 ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
344 dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
348 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
352 struct dp_catalog_private *catalog = container_of(dp_catalog,
353 struct dp_catalog_private, dp_catalog);
357 * To make sure link reg writes happens before other operation,
358 * dp_write_link() function uses writel()
360 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
362 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
363 DP_MAINLINK_CTRL_ENABLE);
364 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
366 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
367 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
369 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
370 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
372 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
373 DP_MAINLINK_FB_BOUNDARY_SEL);
374 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
376 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
377 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
378 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
382 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
387 struct dp_catalog_private *catalog = container_of(dp_catalog,
388 struct dp_catalog_private, dp_catalog);
390 misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
393 misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
394 misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
395 misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
396 /* Configure clock to synchronous mode */
397 misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
399 DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
400 dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
403 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
404 u32 rate, u32 stream_rate_khz,
407 u32 pixel_m, pixel_n;
408 u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
409 u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
410 u32 const link_rate_hbr2 = 540000;
411 u32 const link_rate_hbr3 = 810000;
412 unsigned long den, num;
414 struct dp_catalog_private *catalog = container_of(dp_catalog,
415 struct dp_catalog_private, dp_catalog);
417 if (rate == link_rate_hbr3)
419 else if (rate == 1620000 || rate == 270000)
421 else if (rate == link_rate_hbr2)
424 DRM_ERROR("Invalid pixel mux divider\n");
426 dispcc_input_rate = (rate * 10) / pixel_div;
428 rational_best_approximation(dispcc_input_rate, stream_rate_khz,
429 (unsigned long)(1 << 16) - 1,
430 (unsigned long)(1 << 16) - 1, &den, &num);
437 mvid = (pixel_m & 0xFFFF) * 5;
438 nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
440 if (nvid < nvid_fixed) {
443 temp = (nvid_fixed / nvid) * nvid;
444 mvid = (nvid_fixed / nvid) * mvid;
448 if (link_rate_hbr2 == rate)
451 if (link_rate_hbr3 == rate)
454 DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
455 dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
456 dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
457 dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
460 int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
465 struct dp_catalog_private *catalog = container_of(dp_catalog,
466 struct dp_catalog_private, dp_catalog);
468 bit = BIT(pattern - 1);
469 DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
470 dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
472 bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
474 /* Poll for mainlink ready status */
475 ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
476 MSM_DP_CONTROLLER_LINK_OFFSET +
477 REG_DP_MAINLINK_READY,
479 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
481 DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
487 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
490 struct dp_catalog_private *catalog = container_of(dp_catalog,
491 struct dp_catalog_private, dp_catalog);
493 sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
495 sw_reset |= DP_SW_RESET;
496 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
497 usleep_range(1000, 1100); /* h/w recommended delay */
499 sw_reset &= ~DP_SW_RESET;
500 dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
503 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
507 struct dp_catalog_private *catalog = container_of(dp_catalog,
508 struct dp_catalog_private, dp_catalog);
510 /* Poll for mainlink ready status */
511 ret = readl_poll_timeout(catalog->io->dp_controller.base +
512 MSM_DP_CONTROLLER_LINK_OFFSET +
513 REG_DP_MAINLINK_READY,
514 data, data & DP_MAINLINK_READY_FOR_VIDEO,
515 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
517 DRM_ERROR("mainlink not ready\n");
524 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
527 struct dp_catalog_private *catalog = container_of(dp_catalog,
528 struct dp_catalog_private, dp_catalog);
531 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
532 DP_INTERRUPT_STATUS1_MASK);
533 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
534 DP_INTERRUPT_STATUS2_MASK);
536 dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
537 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
541 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
542 u32 intr_mask, bool en)
544 struct dp_catalog_private *catalog = container_of(dp_catalog,
545 struct dp_catalog_private, dp_catalog);
547 u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
549 config = (en ? config | intr_mask : config & ~intr_mask);
551 dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
552 config & DP_DP_HPD_INT_MASK);
555 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
557 struct dp_catalog_private *catalog = container_of(dp_catalog,
558 struct dp_catalog_private, dp_catalog);
560 u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
562 /* enable HPD interrupts */
563 dp_catalog_hpd_config_intr(dp_catalog,
564 DP_DP_HPD_PLUG_INT_MASK | DP_DP_IRQ_HPD_INT_MASK
565 | DP_DP_HPD_UNPLUG_INT_MASK | DP_DP_HPD_REPLUG_INT_MASK, true);
567 /* Configure REFTIMER and enable it */
568 reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
569 dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
572 dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
575 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
577 struct dp_catalog_private *catalog = container_of(dp_catalog,
578 struct dp_catalog_private, dp_catalog);
581 isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
582 dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
583 (isr & DP_DP_HPD_INT_MASK));
588 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
590 struct dp_catalog_private *catalog = container_of(dp_catalog,
591 struct dp_catalog_private, dp_catalog);
594 intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
595 intr &= ~DP_INTERRUPT_STATUS2_MASK;
596 intr_ack = (intr & DP_INTERRUPT_STATUS2)
597 << DP_INTERRUPT_STATUS_ACK_SHIFT;
598 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
599 intr_ack | DP_INTERRUPT_STATUS2_MASK);
604 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
606 struct dp_catalog_private *catalog = container_of(dp_catalog,
607 struct dp_catalog_private, dp_catalog);
609 dp_write_ahb(catalog, REG_DP_PHY_CTRL,
610 DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
611 usleep_range(1000, 1100); /* h/w recommended delay */
612 dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
615 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
616 u8 v_level, u8 p_level)
618 struct dp_catalog_private *catalog = container_of(dp_catalog,
619 struct dp_catalog_private, dp_catalog);
620 struct dp_io *dp_io = catalog->io;
621 struct phy *phy = dp_io->phy;
622 struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
624 /* TODO: Update for all lanes instead of just first one */
625 opts_dp->voltage[0] = v_level;
626 opts_dp->pre[0] = p_level;
627 opts_dp->set_voltages = 1;
628 phy_configure(phy, &dp_io->phy_opts);
629 opts_dp->set_voltages = 0;
634 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
637 struct dp_catalog_private *catalog = container_of(dp_catalog,
638 struct dp_catalog_private, dp_catalog);
641 /* Make sure to clear the current pattern before starting a new one */
642 dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
645 case DP_PHY_TEST_PATTERN_D10_2:
646 dp_write_link(catalog, REG_DP_STATE_CTRL,
647 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
649 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
651 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
653 value |= SCRAMBLER_RESET_COUNT_VALUE;
654 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
656 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
657 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
658 dp_write_link(catalog, REG_DP_STATE_CTRL,
659 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
661 case DP_PHY_TEST_PATTERN_PRBS7:
662 dp_write_link(catalog, REG_DP_STATE_CTRL,
663 DP_STATE_CTRL_LINK_PRBS7);
665 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
666 dp_write_link(catalog, REG_DP_STATE_CTRL,
667 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
668 /* 00111110000011111000001111100000 */
669 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
671 /* 00001111100000111110000011111000 */
672 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
674 /* 1111100000111110 */
675 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
678 case DP_PHY_TEST_PATTERN_CP2520:
679 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
680 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
681 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
683 value = DP_HBR2_ERM_PATTERN;
684 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
686 value |= SCRAMBLER_RESET_COUNT_VALUE;
687 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
689 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
690 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
691 dp_write_link(catalog, REG_DP_STATE_CTRL,
692 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
693 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
694 value |= DP_MAINLINK_CTRL_ENABLE;
695 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
697 case DP_PHY_TEST_PATTERN_SEL_MASK:
698 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
699 DP_MAINLINK_CTRL_ENABLE);
700 dp_write_link(catalog, REG_DP_STATE_CTRL,
701 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
704 DRM_DEBUG_DP("No valid test pattern requested:0x%x\n", pattern);
709 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
711 struct dp_catalog_private *catalog = container_of(dp_catalog,
712 struct dp_catalog_private, dp_catalog);
714 return dp_read_link(catalog, REG_DP_MAINLINK_READY);
717 /* panel related catalog functions */
718 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
720 struct dp_catalog_private *catalog = container_of(dp_catalog,
721 struct dp_catalog_private, dp_catalog);
723 dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
725 dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
726 dp_catalog->sync_start);
727 dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
728 dp_catalog->width_blanking);
729 dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
733 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
734 struct drm_display_mode *drm_mode)
736 struct dp_catalog_private *catalog = container_of(dp_catalog,
737 struct dp_catalog_private, dp_catalog);
738 u32 hsync_period, vsync_period;
739 u32 display_v_start, display_v_end;
740 u32 hsync_start_x, hsync_end_x;
745 /* TPG config parameters*/
746 hsync_period = drm_mode->htotal;
747 vsync_period = drm_mode->vtotal;
749 display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
751 display_v_end = ((vsync_period - (drm_mode->vsync_start -
755 display_v_start += drm_mode->htotal - drm_mode->hsync_start;
756 display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
758 hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
759 hsync_end_x = hsync_period - (drm_mode->hsync_start -
760 drm_mode->hdisplay) - 1;
762 v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
764 hsync_ctl = (hsync_period << 16) |
765 (drm_mode->hsync_end - drm_mode->hsync_start);
766 display_hctl = (hsync_end_x << 16) | hsync_start_x;
769 dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
770 dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
771 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
773 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
775 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
776 dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
777 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
778 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
779 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
780 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
781 dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
782 dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
783 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
784 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
785 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
786 dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
787 dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
789 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
790 DP_TPG_CHECKERED_RECT_PATTERN);
791 dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
792 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
793 DP_TPG_VIDEO_CONFIG_RGB);
794 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
795 DP_BIST_ENABLE_DPBIST_EN);
796 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
797 DP_TIMING_ENGINE_EN_EN);
798 DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
801 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
803 struct dp_catalog_private *catalog = container_of(dp_catalog,
804 struct dp_catalog_private, dp_catalog);
806 dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
807 dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
808 dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
811 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
813 struct dp_catalog_private *catalog;
816 DRM_ERROR("invalid input\n");
817 return ERR_PTR(-EINVAL);
820 catalog = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
822 return ERR_PTR(-ENOMEM);
827 return &catalog->dp_catalog;
830 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
832 struct dp_catalog_private *catalog;
833 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
834 enum dp_catalog_audio_sdp_type sdp;
835 enum dp_catalog_audio_header_type header;
840 catalog = container_of(dp_catalog,
841 struct dp_catalog_private, dp_catalog);
843 sdp_map = catalog->audio_map;
844 sdp = dp_catalog->sdp_type;
845 header = dp_catalog->sdp_header;
847 dp_catalog->audio_data = dp_read_link(catalog,
848 sdp_map[sdp][header]);
851 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
853 struct dp_catalog_private *catalog;
854 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
855 enum dp_catalog_audio_sdp_type sdp;
856 enum dp_catalog_audio_header_type header;
862 catalog = container_of(dp_catalog,
863 struct dp_catalog_private, dp_catalog);
865 sdp_map = catalog->audio_map;
866 sdp = dp_catalog->sdp_type;
867 header = dp_catalog->sdp_header;
868 data = dp_catalog->audio_data;
870 dp_write_link(catalog, sdp_map[sdp][header], data);
873 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
875 struct dp_catalog_private *catalog;
876 u32 acr_ctrl, select;
881 catalog = container_of(dp_catalog,
882 struct dp_catalog_private, dp_catalog);
884 select = dp_catalog->audio_data;
885 acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
887 DRM_DEBUG_DP("select = 0x%x, acr_ctrl = 0x%x\n", select, acr_ctrl);
889 dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
892 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
894 struct dp_catalog_private *catalog;
901 catalog = container_of(dp_catalog,
902 struct dp_catalog_private, dp_catalog);
904 enable = !!dp_catalog->audio_data;
905 audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
908 audio_ctrl |= BIT(0);
910 audio_ctrl &= ~BIT(0);
912 DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
914 dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
915 /* make sure audio engine is disabled */
919 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
921 struct dp_catalog_private *catalog;
928 catalog = container_of(dp_catalog,
929 struct dp_catalog_private, dp_catalog);
931 sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
932 /* AUDIO_TIMESTAMP_SDP_EN */
934 /* AUDIO_STREAM_SDP_EN */
936 /* AUDIO_COPY_MANAGEMENT_SDP_EN */
938 /* AUDIO_ISRC_SDP_EN */
940 /* AUDIO_INFOFRAME_SDP_EN */
943 DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
945 dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
947 sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
948 /* IFRM_REGSRC -> Do not use reg values */
950 /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
953 DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
955 dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
958 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
960 struct dp_catalog_private *catalog;
962 static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
964 MMSS_DP_AUDIO_STREAM_0,
965 MMSS_DP_AUDIO_STREAM_1,
966 MMSS_DP_AUDIO_STREAM_1,
969 MMSS_DP_AUDIO_TIMESTAMP_0,
970 MMSS_DP_AUDIO_TIMESTAMP_1,
971 MMSS_DP_AUDIO_TIMESTAMP_1,
974 MMSS_DP_AUDIO_INFOFRAME_0,
975 MMSS_DP_AUDIO_INFOFRAME_1,
976 MMSS_DP_AUDIO_INFOFRAME_1,
979 MMSS_DP_AUDIO_COPYMANAGEMENT_0,
980 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
981 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
984 MMSS_DP_AUDIO_ISRC_0,
985 MMSS_DP_AUDIO_ISRC_1,
986 MMSS_DP_AUDIO_ISRC_1,
993 catalog = container_of(dp_catalog,
994 struct dp_catalog_private, dp_catalog);
996 catalog->audio_map = sdp_map;
999 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1001 struct dp_catalog_private *catalog;
1002 u32 mainlink_levels, safe_to_exit_level;
1007 catalog = container_of(dp_catalog,
1008 struct dp_catalog_private, dp_catalog);
1010 safe_to_exit_level = dp_catalog->audio_data;
1011 mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1012 mainlink_levels &= 0xFE0;
1013 mainlink_levels |= safe_to_exit_level;
1015 DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1016 mainlink_levels, safe_to_exit_level);
1018 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);