1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
6 #define pr_fmt(fmt) "[drm-dp] %s: " fmt, __func__
8 #include <linux/delay.h>
9 #include <linux/iopoll.h>
10 #include <linux/platform_device.h>
11 #include <linux/rational.h>
12 #include <drm/display/drm_dp_helper.h>
13 #include <drm/drm_print.h>
15 #include "dp_catalog.h"
18 #define POLLING_SLEEP_US 1000
19 #define POLLING_TIMEOUT_US 10000
21 #define SCRAMBLER_RESET_COUNT_VALUE 0xFC
23 #define DP_INTERRUPT_STATUS_ACK_SHIFT 1
24 #define DP_INTERRUPT_STATUS_MASK_SHIFT 2
26 #define DP_INTF_CONFIG_DATABUS_WIDEN BIT(4)
28 #define DP_INTERRUPT_STATUS1 \
29 (DP_INTR_AUX_XFER_DONE| \
30 DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
31 DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
32 DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
33 DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
35 #define DP_INTERRUPT_STATUS1_ACK \
36 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
37 #define DP_INTERRUPT_STATUS1_MASK \
38 (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
40 #define DP_INTERRUPT_STATUS2 \
41 (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
42 DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
44 #define DP_INTERRUPT_STATUS2_ACK \
45 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46 #define DP_INTERRUPT_STATUS2_MASK \
47 (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
49 #define DP_INTERRUPT_STATUS4 \
50 (PSR_UPDATE_INT | PSR_CAPTURE_INT | PSR_EXIT_INT | \
51 PSR_UPDATE_ERROR_INT | PSR_WAKE_ERROR_INT)
53 #define DP_INTERRUPT_MASK4 \
54 (PSR_UPDATE_MASK | PSR_CAPTURE_MASK | PSR_EXIT_MASK | \
55 PSR_UPDATE_ERROR_MASK | PSR_WAKE_ERROR_MASK)
57 #define DP_DEFAULT_AHB_OFFSET 0x0000
58 #define DP_DEFAULT_AHB_SIZE 0x0200
59 #define DP_DEFAULT_AUX_OFFSET 0x0200
60 #define DP_DEFAULT_AUX_SIZE 0x0200
61 #define DP_DEFAULT_LINK_OFFSET 0x0400
62 #define DP_DEFAULT_LINK_SIZE 0x0C00
63 #define DP_DEFAULT_P0_OFFSET 0x1000
64 #define DP_DEFAULT_P0_SIZE 0x0400
66 struct dss_io_region {
72 struct dss_io_region ahb;
73 struct dss_io_region aux;
74 struct dss_io_region link;
75 struct dss_io_region p0;
78 struct msm_dp_catalog_private {
80 struct drm_device *drm_dev;
81 struct dss_io_data io;
82 u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
83 struct msm_dp_catalog msm_dp_catalog;
86 void msm_dp_catalog_snapshot(struct msm_dp_catalog *msm_dp_catalog, struct msm_disp_state *disp_state)
88 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
89 struct msm_dp_catalog_private, msm_dp_catalog);
90 struct dss_io_data *dss = &catalog->io;
92 msm_disp_snapshot_add_block(disp_state, dss->ahb.len, dss->ahb.base, "dp_ahb");
93 msm_disp_snapshot_add_block(disp_state, dss->aux.len, dss->aux.base, "dp_aux");
94 msm_disp_snapshot_add_block(disp_state, dss->link.len, dss->link.base, "dp_link");
95 msm_disp_snapshot_add_block(disp_state, dss->p0.len, dss->p0.base, "dp_p0");
98 static inline u32 msm_dp_read_aux(struct msm_dp_catalog_private *catalog, u32 offset)
100 return readl_relaxed(catalog->io.aux.base + offset);
103 static inline void msm_dp_write_aux(struct msm_dp_catalog_private *catalog,
104 u32 offset, u32 data)
107 * To make sure aux reg writes happens before any other operation,
108 * this function uses writel() instread of writel_relaxed()
110 writel(data, catalog->io.aux.base + offset);
113 static inline u32 msm_dp_read_ahb(const struct msm_dp_catalog_private *catalog, u32 offset)
115 return readl_relaxed(catalog->io.ahb.base + offset);
118 static inline void msm_dp_write_ahb(struct msm_dp_catalog_private *catalog,
119 u32 offset, u32 data)
122 * To make sure phy reg writes happens before any other operation,
123 * this function uses writel() instread of writel_relaxed()
125 writel(data, catalog->io.ahb.base + offset);
128 static inline void msm_dp_write_p0(struct msm_dp_catalog_private *catalog,
129 u32 offset, u32 data)
132 * To make sure interface reg writes happens before any other operation,
133 * this function uses writel() instread of writel_relaxed()
135 writel(data, catalog->io.p0.base + offset);
138 static inline u32 msm_dp_read_p0(struct msm_dp_catalog_private *catalog,
142 * To make sure interface reg writes happens before any other operation,
143 * this function uses writel() instread of writel_relaxed()
145 return readl_relaxed(catalog->io.p0.base + offset);
148 static inline u32 msm_dp_read_link(struct msm_dp_catalog_private *catalog, u32 offset)
150 return readl_relaxed(catalog->io.link.base + offset);
153 static inline void msm_dp_write_link(struct msm_dp_catalog_private *catalog,
154 u32 offset, u32 data)
157 * To make sure link reg writes happens before any other operation,
158 * this function uses writel() instread of writel_relaxed()
160 writel(data, catalog->io.link.base + offset);
163 /* aux related catalog functions */
164 u32 msm_dp_catalog_aux_read_data(struct msm_dp_catalog *msm_dp_catalog)
166 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
167 struct msm_dp_catalog_private, msm_dp_catalog);
169 return msm_dp_read_aux(catalog, REG_DP_AUX_DATA);
172 int msm_dp_catalog_aux_write_data(struct msm_dp_catalog *msm_dp_catalog, u32 data)
174 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
175 struct msm_dp_catalog_private, msm_dp_catalog);
177 msm_dp_write_aux(catalog, REG_DP_AUX_DATA, data);
181 int msm_dp_catalog_aux_write_trans(struct msm_dp_catalog *msm_dp_catalog, u32 data)
183 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
184 struct msm_dp_catalog_private, msm_dp_catalog);
186 msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
190 int msm_dp_catalog_aux_clear_trans(struct msm_dp_catalog *msm_dp_catalog, bool read)
193 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
194 struct msm_dp_catalog_private, msm_dp_catalog);
197 data = msm_dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
198 data &= ~DP_AUX_TRANS_CTRL_GO;
199 msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
201 msm_dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
206 int msm_dp_catalog_aux_clear_hw_interrupts(struct msm_dp_catalog *msm_dp_catalog)
208 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
209 struct msm_dp_catalog_private, msm_dp_catalog);
211 msm_dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
212 msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
213 msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
214 msm_dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
219 * msm_dp_catalog_aux_reset() - reset AUX controller
221 * @msm_dp_catalog: DP catalog structure
225 * This function reset AUX controller
227 * NOTE: reset AUX controller will also clear any pending HPD related interrupts
230 void msm_dp_catalog_aux_reset(struct msm_dp_catalog *msm_dp_catalog)
233 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
234 struct msm_dp_catalog_private, msm_dp_catalog);
236 aux_ctrl = msm_dp_read_aux(catalog, REG_DP_AUX_CTRL);
238 aux_ctrl |= DP_AUX_CTRL_RESET;
239 msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
240 usleep_range(1000, 1100); /* h/w recommended delay */
242 aux_ctrl &= ~DP_AUX_CTRL_RESET;
243 msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
246 void msm_dp_catalog_aux_enable(struct msm_dp_catalog *msm_dp_catalog, bool enable)
249 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
250 struct msm_dp_catalog_private, msm_dp_catalog);
252 aux_ctrl = msm_dp_read_aux(catalog, REG_DP_AUX_CTRL);
255 msm_dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
256 msm_dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
257 aux_ctrl |= DP_AUX_CTRL_ENABLE;
259 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
262 msm_dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
265 int msm_dp_catalog_aux_wait_for_hpd_connect_state(struct msm_dp_catalog *msm_dp_catalog,
266 unsigned long wait_us)
269 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
270 struct msm_dp_catalog_private, msm_dp_catalog);
272 /* poll for hpd connected status every 2ms and timeout after wait_us */
273 return readl_poll_timeout(catalog->io.aux.base +
274 REG_DP_DP_HPD_INT_STATUS,
275 state, state & DP_DP_HPD_STATE_STATUS_CONNECTED,
276 min(wait_us, 2000), wait_us);
279 static void dump_regs(void __iomem *base, int len)
285 len = DIV_ROUND_UP(len, 16);
286 for (i = 0; i < len; i++) {
287 x0 = readl_relaxed(base + addr_off);
288 x4 = readl_relaxed(base + addr_off + 0x04);
289 x8 = readl_relaxed(base + addr_off + 0x08);
290 xc = readl_relaxed(base + addr_off + 0x0c);
292 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
297 void msm_dp_catalog_dump_regs(struct msm_dp_catalog *msm_dp_catalog)
299 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
300 struct msm_dp_catalog_private, msm_dp_catalog);
301 struct dss_io_data *io = &catalog->io;
303 pr_info("AHB regs\n");
304 dump_regs(io->ahb.base, io->ahb.len);
306 pr_info("AUXCLK regs\n");
307 dump_regs(io->aux.base, io->aux.len);
309 pr_info("LCLK regs\n");
310 dump_regs(io->link.base, io->link.len);
312 pr_info("P0CLK regs\n");
313 dump_regs(io->p0.base, io->p0.len);
316 u32 msm_dp_catalog_aux_get_irq(struct msm_dp_catalog *msm_dp_catalog)
318 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
319 struct msm_dp_catalog_private, msm_dp_catalog);
322 intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS);
323 intr &= ~DP_INTERRUPT_STATUS1_MASK;
324 intr_ack = (intr & DP_INTERRUPT_STATUS1)
325 << DP_INTERRUPT_STATUS_ACK_SHIFT;
326 msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
327 DP_INTERRUPT_STATUS1_MASK);
333 /* controller related catalog functions */
334 void msm_dp_catalog_ctrl_update_transfer_unit(struct msm_dp_catalog *msm_dp_catalog,
335 u32 msm_dp_tu, u32 valid_boundary,
338 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
339 struct msm_dp_catalog_private, msm_dp_catalog);
341 msm_dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
342 msm_dp_write_link(catalog, REG_DP_TU, msm_dp_tu);
343 msm_dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
346 void msm_dp_catalog_ctrl_state_ctrl(struct msm_dp_catalog *msm_dp_catalog, u32 state)
348 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
349 struct msm_dp_catalog_private, msm_dp_catalog);
351 msm_dp_write_link(catalog, REG_DP_STATE_CTRL, state);
354 void msm_dp_catalog_ctrl_config_ctrl(struct msm_dp_catalog *msm_dp_catalog, u32 cfg)
356 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
357 struct msm_dp_catalog_private, msm_dp_catalog);
359 drm_dbg_dp(catalog->drm_dev, "DP_CONFIGURATION_CTRL=0x%x\n", cfg);
361 msm_dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
364 void msm_dp_catalog_ctrl_lane_mapping(struct msm_dp_catalog *msm_dp_catalog)
366 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
367 struct msm_dp_catalog_private, msm_dp_catalog);
368 u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
371 ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
372 ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
373 ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
374 ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
376 msm_dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
380 void msm_dp_catalog_ctrl_psr_mainlink_enable(struct msm_dp_catalog *msm_dp_catalog,
384 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
385 struct msm_dp_catalog_private, msm_dp_catalog);
387 val = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
390 val |= DP_MAINLINK_CTRL_ENABLE;
392 val &= ~DP_MAINLINK_CTRL_ENABLE;
394 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, val);
397 void msm_dp_catalog_ctrl_mainlink_ctrl(struct msm_dp_catalog *msm_dp_catalog,
401 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
402 struct msm_dp_catalog_private, msm_dp_catalog);
404 drm_dbg_dp(catalog->drm_dev, "enable=%d\n", enable);
407 * To make sure link reg writes happens before other operation,
408 * msm_dp_write_link() function uses writel()
410 mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
412 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
413 DP_MAINLINK_CTRL_ENABLE);
414 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
416 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
417 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
419 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
420 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
422 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
423 DP_MAINLINK_FB_BOUNDARY_SEL);
424 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
426 mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
427 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
428 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
432 void msm_dp_catalog_ctrl_config_misc(struct msm_dp_catalog *msm_dp_catalog,
437 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
438 struct msm_dp_catalog_private, msm_dp_catalog);
440 misc_val = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
443 misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
444 misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
445 misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
446 /* Configure clock to synchronous mode */
447 misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
449 drm_dbg_dp(catalog->drm_dev, "misc settings = 0x%x\n", misc_val);
450 msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
453 void msm_dp_catalog_setup_peripheral_flush(struct msm_dp_catalog *msm_dp_catalog)
455 u32 mainlink_ctrl, hw_revision;
456 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
457 struct msm_dp_catalog_private, msm_dp_catalog);
459 mainlink_ctrl = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
461 hw_revision = msm_dp_catalog_hw_revision(msm_dp_catalog);
462 if (hw_revision >= DP_HW_VERSION_1_2)
463 mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_SDE_PERIPH_UPDATE;
465 mainlink_ctrl |= DP_MAINLINK_FLUSH_MODE_UPDATE_SDP;
467 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
470 void msm_dp_catalog_ctrl_config_msa(struct msm_dp_catalog *msm_dp_catalog,
471 u32 rate, u32 stream_rate_khz,
474 u32 pixel_m, pixel_n;
475 u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
476 u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
477 u32 const link_rate_hbr2 = 540000;
478 u32 const link_rate_hbr3 = 810000;
479 unsigned long den, num;
481 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
482 struct msm_dp_catalog_private, msm_dp_catalog);
484 if (rate == link_rate_hbr3)
486 else if (rate == 162000 || rate == 270000)
488 else if (rate == link_rate_hbr2)
491 DRM_ERROR("Invalid pixel mux divider\n");
493 dispcc_input_rate = (rate * 10) / pixel_div;
495 rational_best_approximation(dispcc_input_rate, stream_rate_khz,
496 (unsigned long)(1 << 16) - 1,
497 (unsigned long)(1 << 16) - 1, &den, &num);
504 mvid = (pixel_m & 0xFFFF) * 5;
505 nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
507 if (nvid < nvid_fixed) {
510 temp = (nvid_fixed / nvid) * nvid;
511 mvid = (nvid_fixed / nvid) * mvid;
518 if (link_rate_hbr2 == rate)
521 if (link_rate_hbr3 == rate)
524 drm_dbg_dp(catalog->drm_dev, "mvid=0x%x, nvid=0x%x\n", mvid, nvid);
525 msm_dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
526 msm_dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
527 msm_dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
530 int msm_dp_catalog_ctrl_set_pattern_state_bit(struct msm_dp_catalog *msm_dp_catalog,
535 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
536 struct msm_dp_catalog_private, msm_dp_catalog);
538 bit = BIT(state_bit - 1);
539 drm_dbg_dp(catalog->drm_dev, "hw: bit=%d train=%d\n", bit, state_bit);
540 msm_dp_catalog_ctrl_state_ctrl(msm_dp_catalog, bit);
542 bit = BIT(state_bit - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
544 /* Poll for mainlink ready status */
545 ret = readx_poll_timeout(readl, catalog->io.link.base +
546 REG_DP_MAINLINK_READY,
548 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
550 DRM_ERROR("set state_bit for link_train=%d failed\n", state_bit);
557 * msm_dp_catalog_hw_revision() - retrieve DP hw revision
559 * @msm_dp_catalog: DP catalog structure
561 * Return: DP controller hw revision
564 u32 msm_dp_catalog_hw_revision(const struct msm_dp_catalog *msm_dp_catalog)
566 const struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
567 struct msm_dp_catalog_private, msm_dp_catalog);
569 return msm_dp_read_ahb(catalog, REG_DP_HW_VERSION);
573 * msm_dp_catalog_ctrl_reset() - reset DP controller
575 * @msm_dp_catalog: DP catalog structure
579 * This function reset the DP controller
581 * NOTE: reset DP controller will also clear any pending HPD related interrupts
584 void msm_dp_catalog_ctrl_reset(struct msm_dp_catalog *msm_dp_catalog)
587 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
588 struct msm_dp_catalog_private, msm_dp_catalog);
590 sw_reset = msm_dp_read_ahb(catalog, REG_DP_SW_RESET);
592 sw_reset |= DP_SW_RESET;
593 msm_dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
594 usleep_range(1000, 1100); /* h/w recommended delay */
596 sw_reset &= ~DP_SW_RESET;
597 msm_dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
600 bool msm_dp_catalog_ctrl_mainlink_ready(struct msm_dp_catalog *msm_dp_catalog)
604 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
605 struct msm_dp_catalog_private, msm_dp_catalog);
607 /* Poll for mainlink ready status */
608 ret = readl_poll_timeout(catalog->io.link.base +
609 REG_DP_MAINLINK_READY,
610 data, data & DP_MAINLINK_READY_FOR_VIDEO,
611 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
613 DRM_ERROR("mainlink not ready\n");
620 void msm_dp_catalog_ctrl_enable_irq(struct msm_dp_catalog *msm_dp_catalog,
623 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
624 struct msm_dp_catalog_private, msm_dp_catalog);
627 msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS,
628 DP_INTERRUPT_STATUS1_MASK);
629 msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
630 DP_INTERRUPT_STATUS2_MASK);
632 msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
633 msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
637 void msm_dp_catalog_hpd_config_intr(struct msm_dp_catalog *msm_dp_catalog,
638 u32 intr_mask, bool en)
640 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
641 struct msm_dp_catalog_private, msm_dp_catalog);
643 u32 config = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
645 config = (en ? config | intr_mask : config & ~intr_mask);
647 drm_dbg_dp(catalog->drm_dev, "intr_mask=%#x config=%#x\n",
649 msm_dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
650 config & DP_DP_HPD_INT_MASK);
653 void msm_dp_catalog_ctrl_hpd_enable(struct msm_dp_catalog *msm_dp_catalog)
655 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
656 struct msm_dp_catalog_private, msm_dp_catalog);
658 u32 reftimer = msm_dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
660 /* Configure REFTIMER and enable it */
661 reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
662 msm_dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
665 msm_dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
668 void msm_dp_catalog_ctrl_hpd_disable(struct msm_dp_catalog *msm_dp_catalog)
670 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
671 struct msm_dp_catalog_private, msm_dp_catalog);
673 u32 reftimer = msm_dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
675 reftimer &= ~DP_DP_HPD_REFTIMER_ENABLE;
676 msm_dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
678 msm_dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, 0);
681 static void msm_dp_catalog_enable_sdp(struct msm_dp_catalog_private *catalog)
684 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, UPDATE_SDP);
685 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x0);
688 void msm_dp_catalog_ctrl_config_psr(struct msm_dp_catalog *msm_dp_catalog)
690 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
691 struct msm_dp_catalog_private, msm_dp_catalog);
694 /* enable PSR1 function */
695 config = msm_dp_read_link(catalog, REG_PSR_CONFIG);
696 config |= PSR1_SUPPORTED;
697 msm_dp_write_link(catalog, REG_PSR_CONFIG, config);
699 msm_dp_write_ahb(catalog, REG_DP_INTR_MASK4, DP_INTERRUPT_MASK4);
700 msm_dp_catalog_enable_sdp(catalog);
703 void msm_dp_catalog_ctrl_set_psr(struct msm_dp_catalog *msm_dp_catalog, bool enter)
705 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
706 struct msm_dp_catalog_private, msm_dp_catalog);
709 cmd = msm_dp_read_link(catalog, REG_PSR_CMD);
711 cmd &= ~(PSR_ENTER | PSR_EXIT);
718 msm_dp_catalog_enable_sdp(catalog);
719 msm_dp_write_link(catalog, REG_PSR_CMD, cmd);
722 u32 msm_dp_catalog_link_is_connected(struct msm_dp_catalog *msm_dp_catalog)
724 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
725 struct msm_dp_catalog_private, msm_dp_catalog);
728 status = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
729 drm_dbg_dp(catalog->drm_dev, "aux status: %#x\n", status);
730 status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
731 status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
736 u32 msm_dp_catalog_hpd_get_intr_status(struct msm_dp_catalog *msm_dp_catalog)
738 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
739 struct msm_dp_catalog_private, msm_dp_catalog);
742 isr = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
743 msm_dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
744 (isr & DP_DP_HPD_INT_MASK));
745 mask = msm_dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
748 * We only want to return interrupts that are unmasked to the caller.
749 * However, the interrupt status field also contains other
750 * informational bits about the HPD state status, so we only mask
751 * out the part of the register that tells us about which interrupts
754 return isr & (mask | ~DP_DP_HPD_INT_MASK);
757 u32 msm_dp_catalog_ctrl_read_psr_interrupt_status(struct msm_dp_catalog *msm_dp_catalog)
759 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
760 struct msm_dp_catalog_private, msm_dp_catalog);
763 intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS4);
764 intr_ack = (intr & DP_INTERRUPT_STATUS4)
765 << DP_INTERRUPT_STATUS_ACK_SHIFT;
766 msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS4, intr_ack);
771 int msm_dp_catalog_ctrl_get_interrupt(struct msm_dp_catalog *msm_dp_catalog)
773 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
774 struct msm_dp_catalog_private, msm_dp_catalog);
777 intr = msm_dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
778 intr &= ~DP_INTERRUPT_STATUS2_MASK;
779 intr_ack = (intr & DP_INTERRUPT_STATUS2)
780 << DP_INTERRUPT_STATUS_ACK_SHIFT;
781 msm_dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
782 intr_ack | DP_INTERRUPT_STATUS2_MASK);
787 void msm_dp_catalog_ctrl_phy_reset(struct msm_dp_catalog *msm_dp_catalog)
789 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
790 struct msm_dp_catalog_private, msm_dp_catalog);
792 msm_dp_write_ahb(catalog, REG_DP_PHY_CTRL,
793 DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
794 usleep_range(1000, 1100); /* h/w recommended delay */
795 msm_dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
798 void msm_dp_catalog_ctrl_send_phy_pattern(struct msm_dp_catalog *msm_dp_catalog,
801 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
802 struct msm_dp_catalog_private, msm_dp_catalog);
805 /* Make sure to clear the current pattern before starting a new one */
806 msm_dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
808 drm_dbg_dp(catalog->drm_dev, "pattern: %#x\n", pattern);
810 case DP_PHY_TEST_PATTERN_D10_2:
811 msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
812 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
814 case DP_PHY_TEST_PATTERN_ERROR_COUNT:
816 msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
818 value |= SCRAMBLER_RESET_COUNT_VALUE;
819 msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
821 msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
822 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
823 msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
824 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
826 case DP_PHY_TEST_PATTERN_PRBS7:
827 msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
828 DP_STATE_CTRL_LINK_PRBS7);
830 case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
831 msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
832 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
833 /* 00111110000011111000001111100000 */
834 msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
836 /* 00001111100000111110000011111000 */
837 msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
839 /* 1111100000111110 */
840 msm_dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
843 case DP_PHY_TEST_PATTERN_CP2520:
844 value = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
845 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
846 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
848 value = DP_HBR2_ERM_PATTERN;
849 msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
851 value |= SCRAMBLER_RESET_COUNT_VALUE;
852 msm_dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
854 msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
855 DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
856 msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
857 DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
858 value = msm_dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
859 value |= DP_MAINLINK_CTRL_ENABLE;
860 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
862 case DP_PHY_TEST_PATTERN_SEL_MASK:
863 msm_dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
864 DP_MAINLINK_CTRL_ENABLE);
865 msm_dp_write_link(catalog, REG_DP_STATE_CTRL,
866 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
869 drm_dbg_dp(catalog->drm_dev,
870 "No valid test pattern requested: %#x\n", pattern);
875 u32 msm_dp_catalog_ctrl_read_phy_pattern(struct msm_dp_catalog *msm_dp_catalog)
877 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
878 struct msm_dp_catalog_private, msm_dp_catalog);
880 return msm_dp_read_link(catalog, REG_DP_MAINLINK_READY);
883 /* panel related catalog functions */
884 int msm_dp_catalog_panel_timing_cfg(struct msm_dp_catalog *msm_dp_catalog, u32 total,
885 u32 sync_start, u32 width_blanking, u32 msm_dp_active)
887 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
888 struct msm_dp_catalog_private, msm_dp_catalog);
891 msm_dp_write_link(catalog, REG_DP_TOTAL_HOR_VER, total);
892 msm_dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC, sync_start);
893 msm_dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY, width_blanking);
894 msm_dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, msm_dp_active);
896 reg = msm_dp_read_p0(catalog, MMSS_DP_INTF_CONFIG);
898 if (msm_dp_catalog->wide_bus_en)
899 reg |= DP_INTF_CONFIG_DATABUS_WIDEN;
901 reg &= ~DP_INTF_CONFIG_DATABUS_WIDEN;
904 DRM_DEBUG_DP("wide_bus_en=%d reg=%#x\n", msm_dp_catalog->wide_bus_en, reg);
906 msm_dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, reg);
910 static void msm_dp_catalog_panel_send_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog, struct dp_sdp *vsc_sdp)
912 struct msm_dp_catalog_private *catalog;
917 catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
919 msm_dp_utils_pack_sdp_header(&vsc_sdp->sdp_header, header);
921 msm_dp_write_link(catalog, MMSS_DP_GENERIC0_0, header[0]);
922 msm_dp_write_link(catalog, MMSS_DP_GENERIC0_1, header[1]);
924 for (i = 0; i < sizeof(vsc_sdp->db); i += 4) {
925 val = ((vsc_sdp->db[i]) | (vsc_sdp->db[i + 1] << 8) | (vsc_sdp->db[i + 2] << 16) |
926 (vsc_sdp->db[i + 3] << 24));
927 msm_dp_write_link(catalog, MMSS_DP_GENERIC0_2 + i, val);
931 static void msm_dp_catalog_panel_update_sdp(struct msm_dp_catalog *msm_dp_catalog)
933 struct msm_dp_catalog_private *catalog;
936 catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
938 hw_revision = msm_dp_catalog_hw_revision(msm_dp_catalog);
939 if (hw_revision < DP_HW_VERSION_1_2 && hw_revision >= DP_HW_VERSION_1_0) {
940 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x01);
941 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG3, 0x00);
945 void msm_dp_catalog_panel_enable_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog, struct dp_sdp *vsc_sdp)
947 struct msm_dp_catalog_private *catalog;
950 catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
952 cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
953 cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
954 misc = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
957 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
959 cfg2 |= GENERIC0_SDPSIZE_VALID;
960 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
962 msm_dp_catalog_panel_send_vsc_sdp(msm_dp_catalog, vsc_sdp);
964 /* indicates presence of VSC (BIT(6) of MISC1) */
965 misc |= DP_MISC1_VSC_SDP;
967 drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=1\n");
969 pr_debug("misc settings = 0x%x\n", misc);
970 msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
972 msm_dp_catalog_panel_update_sdp(msm_dp_catalog);
975 void msm_dp_catalog_panel_disable_vsc_sdp(struct msm_dp_catalog *msm_dp_catalog)
977 struct msm_dp_catalog_private *catalog;
980 catalog = container_of(msm_dp_catalog, struct msm_dp_catalog_private, msm_dp_catalog);
982 cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
983 cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
984 misc = msm_dp_read_link(catalog, REG_DP_MISC1_MISC0);
987 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, cfg);
989 cfg2 &= ~GENERIC0_SDPSIZE_VALID;
990 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, cfg2);
992 /* switch back to MSA */
993 misc &= ~DP_MISC1_VSC_SDP;
995 drm_dbg_dp(catalog->drm_dev, "vsc sdp enable=0\n");
997 pr_debug("misc settings = 0x%x\n", misc);
998 msm_dp_write_link(catalog, REG_DP_MISC1_MISC0, misc);
1000 msm_dp_catalog_panel_update_sdp(msm_dp_catalog);
1003 void msm_dp_catalog_panel_tpg_enable(struct msm_dp_catalog *msm_dp_catalog,
1004 struct drm_display_mode *drm_mode)
1006 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1007 struct msm_dp_catalog_private, msm_dp_catalog);
1008 u32 hsync_period, vsync_period;
1009 u32 display_v_start, display_v_end;
1010 u32 hsync_start_x, hsync_end_x;
1015 /* TPG config parameters*/
1016 hsync_period = drm_mode->htotal;
1017 vsync_period = drm_mode->vtotal;
1019 display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
1021 display_v_end = ((vsync_period - (drm_mode->vsync_start -
1022 drm_mode->vdisplay))
1023 * hsync_period) - 1;
1025 display_v_start += drm_mode->htotal - drm_mode->hsync_start;
1026 display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
1028 hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
1029 hsync_end_x = hsync_period - (drm_mode->hsync_start -
1030 drm_mode->hdisplay) - 1;
1032 v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
1034 hsync_ctl = (hsync_period << 16) |
1035 (drm_mode->hsync_end - drm_mode->hsync_start);
1036 display_hctl = (hsync_end_x << 16) | hsync_start_x;
1039 msm_dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
1040 msm_dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
1041 msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
1043 msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
1045 msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
1046 msm_dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
1047 msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
1048 msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
1049 msm_dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
1050 msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
1051 msm_dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
1052 msm_dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
1053 msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
1054 msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
1055 msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
1056 msm_dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
1057 msm_dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
1059 msm_dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
1060 DP_TPG_CHECKERED_RECT_PATTERN);
1061 msm_dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
1062 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
1063 DP_TPG_VIDEO_CONFIG_RGB);
1064 msm_dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
1065 DP_BIST_ENABLE_DPBIST_EN);
1066 msm_dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
1067 DP_TIMING_ENGINE_EN_EN);
1068 drm_dbg_dp(catalog->drm_dev, "%s: enabled tpg\n", __func__);
1071 void msm_dp_catalog_panel_tpg_disable(struct msm_dp_catalog *msm_dp_catalog)
1073 struct msm_dp_catalog_private *catalog = container_of(msm_dp_catalog,
1074 struct msm_dp_catalog_private, msm_dp_catalog);
1076 msm_dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
1077 msm_dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
1078 msm_dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
1081 static void __iomem *msm_dp_ioremap(struct platform_device *pdev, int idx, size_t *len)
1083 struct resource *res;
1086 base = devm_platform_get_and_ioremap_resource(pdev, idx, &res);
1088 *len = resource_size(res);
1093 static int msm_dp_catalog_get_io(struct msm_dp_catalog_private *catalog)
1095 struct platform_device *pdev = to_platform_device(catalog->dev);
1096 struct dss_io_data *dss = &catalog->io;
1098 dss->ahb.base = msm_dp_ioremap(pdev, 0, &dss->ahb.len);
1099 if (IS_ERR(dss->ahb.base))
1100 return PTR_ERR(dss->ahb.base);
1102 dss->aux.base = msm_dp_ioremap(pdev, 1, &dss->aux.len);
1103 if (IS_ERR(dss->aux.base)) {
1105 * The initial binding had a single reg, but in order to
1106 * support variation in the sub-region sizes this was split.
1107 * msm_dp_ioremap() will fail with -EINVAL here if only a single
1108 * reg is specified, so fill in the sub-region offsets and
1109 * lengths based on this single region.
1111 if (PTR_ERR(dss->aux.base) == -EINVAL) {
1112 if (dss->ahb.len < DP_DEFAULT_P0_OFFSET + DP_DEFAULT_P0_SIZE) {
1113 DRM_ERROR("legacy memory region not large enough\n");
1117 dss->ahb.len = DP_DEFAULT_AHB_SIZE;
1118 dss->aux.base = dss->ahb.base + DP_DEFAULT_AUX_OFFSET;
1119 dss->aux.len = DP_DEFAULT_AUX_SIZE;
1120 dss->link.base = dss->ahb.base + DP_DEFAULT_LINK_OFFSET;
1121 dss->link.len = DP_DEFAULT_LINK_SIZE;
1122 dss->p0.base = dss->ahb.base + DP_DEFAULT_P0_OFFSET;
1123 dss->p0.len = DP_DEFAULT_P0_SIZE;
1125 DRM_ERROR("unable to remap aux region: %pe\n", dss->aux.base);
1126 return PTR_ERR(dss->aux.base);
1129 dss->link.base = msm_dp_ioremap(pdev, 2, &dss->link.len);
1130 if (IS_ERR(dss->link.base)) {
1131 DRM_ERROR("unable to remap link region: %pe\n", dss->link.base);
1132 return PTR_ERR(dss->link.base);
1135 dss->p0.base = msm_dp_ioremap(pdev, 3, &dss->p0.len);
1136 if (IS_ERR(dss->p0.base)) {
1137 DRM_ERROR("unable to remap p0 region: %pe\n", dss->p0.base);
1138 return PTR_ERR(dss->p0.base);
1145 struct msm_dp_catalog *msm_dp_catalog_get(struct device *dev)
1147 struct msm_dp_catalog_private *catalog;
1150 catalog = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
1152 return ERR_PTR(-ENOMEM);
1156 ret = msm_dp_catalog_get_io(catalog);
1158 return ERR_PTR(ret);
1160 return &catalog->msm_dp_catalog;
1163 u32 msm_dp_catalog_audio_get_header(struct msm_dp_catalog *msm_dp_catalog,
1164 enum msm_dp_catalog_audio_sdp_type sdp,
1165 enum msm_dp_catalog_audio_header_type header)
1167 struct msm_dp_catalog_private *catalog;
1168 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1170 catalog = container_of(msm_dp_catalog,
1171 struct msm_dp_catalog_private, msm_dp_catalog);
1173 sdp_map = catalog->audio_map;
1175 return msm_dp_read_link(catalog, sdp_map[sdp][header]);
1178 void msm_dp_catalog_audio_set_header(struct msm_dp_catalog *msm_dp_catalog,
1179 enum msm_dp_catalog_audio_sdp_type sdp,
1180 enum msm_dp_catalog_audio_header_type header,
1183 struct msm_dp_catalog_private *catalog;
1184 u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
1186 if (!msm_dp_catalog)
1189 catalog = container_of(msm_dp_catalog,
1190 struct msm_dp_catalog_private, msm_dp_catalog);
1192 sdp_map = catalog->audio_map;
1194 msm_dp_write_link(catalog, sdp_map[sdp][header], data);
1197 void msm_dp_catalog_audio_config_acr(struct msm_dp_catalog *msm_dp_catalog, u32 select)
1199 struct msm_dp_catalog_private *catalog;
1202 if (!msm_dp_catalog)
1205 catalog = container_of(msm_dp_catalog,
1206 struct msm_dp_catalog_private, msm_dp_catalog);
1208 acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
1210 drm_dbg_dp(catalog->drm_dev, "select: %#x, acr_ctrl: %#x\n",
1213 msm_dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
1216 void msm_dp_catalog_audio_enable(struct msm_dp_catalog *msm_dp_catalog, bool enable)
1218 struct msm_dp_catalog_private *catalog;
1221 if (!msm_dp_catalog)
1224 catalog = container_of(msm_dp_catalog,
1225 struct msm_dp_catalog_private, msm_dp_catalog);
1227 audio_ctrl = msm_dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
1230 audio_ctrl |= BIT(0);
1232 audio_ctrl &= ~BIT(0);
1234 drm_dbg_dp(catalog->drm_dev, "dp_audio_cfg = 0x%x\n", audio_ctrl);
1236 msm_dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
1237 /* make sure audio engine is disabled */
1241 void msm_dp_catalog_audio_config_sdp(struct msm_dp_catalog *msm_dp_catalog)
1243 struct msm_dp_catalog_private *catalog;
1247 if (!msm_dp_catalog)
1250 catalog = container_of(msm_dp_catalog,
1251 struct msm_dp_catalog_private, msm_dp_catalog);
1253 sdp_cfg = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG);
1254 /* AUDIO_TIMESTAMP_SDP_EN */
1256 /* AUDIO_STREAM_SDP_EN */
1258 /* AUDIO_COPY_MANAGEMENT_SDP_EN */
1260 /* AUDIO_ISRC_SDP_EN */
1262 /* AUDIO_INFOFRAME_SDP_EN */
1265 drm_dbg_dp(catalog->drm_dev, "sdp_cfg = 0x%x\n", sdp_cfg);
1267 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
1269 sdp_cfg2 = msm_dp_read_link(catalog, MMSS_DP_SDP_CFG2);
1270 /* IFRM_REGSRC -> Do not use reg values */
1271 sdp_cfg2 &= ~BIT(0);
1272 /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
1273 sdp_cfg2 &= ~BIT(1);
1275 drm_dbg_dp(catalog->drm_dev, "sdp_cfg2 = 0x%x\n", sdp_cfg2);
1277 msm_dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
1280 void msm_dp_catalog_audio_init(struct msm_dp_catalog *msm_dp_catalog)
1282 struct msm_dp_catalog_private *catalog;
1284 static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
1286 MMSS_DP_AUDIO_STREAM_0,
1287 MMSS_DP_AUDIO_STREAM_1,
1288 MMSS_DP_AUDIO_STREAM_1,
1291 MMSS_DP_AUDIO_TIMESTAMP_0,
1292 MMSS_DP_AUDIO_TIMESTAMP_1,
1293 MMSS_DP_AUDIO_TIMESTAMP_1,
1296 MMSS_DP_AUDIO_INFOFRAME_0,
1297 MMSS_DP_AUDIO_INFOFRAME_1,
1298 MMSS_DP_AUDIO_INFOFRAME_1,
1301 MMSS_DP_AUDIO_COPYMANAGEMENT_0,
1302 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1303 MMSS_DP_AUDIO_COPYMANAGEMENT_1,
1306 MMSS_DP_AUDIO_ISRC_0,
1307 MMSS_DP_AUDIO_ISRC_1,
1308 MMSS_DP_AUDIO_ISRC_1,
1312 if (!msm_dp_catalog)
1315 catalog = container_of(msm_dp_catalog,
1316 struct msm_dp_catalog_private, msm_dp_catalog);
1318 catalog->audio_map = sdp_map;
1321 void msm_dp_catalog_audio_sfe_level(struct msm_dp_catalog *msm_dp_catalog, u32 safe_to_exit_level)
1323 struct msm_dp_catalog_private *catalog;
1324 u32 mainlink_levels;
1326 if (!msm_dp_catalog)
1329 catalog = container_of(msm_dp_catalog,
1330 struct msm_dp_catalog_private, msm_dp_catalog);
1332 mainlink_levels = msm_dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1333 mainlink_levels &= 0xFE0;
1334 mainlink_levels |= safe_to_exit_level;
1336 drm_dbg_dp(catalog->drm_dev,
1337 "mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1338 mainlink_levels, safe_to_exit_level);
1340 msm_dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);