]> Git Repo - linux.git/blob - drivers/gpu/drm/msm/dp/dp_catalog.c
Merge tag 'locking-urgent-2020-11-01' of git://git.kernel.org/pub/scm/linux/kernel...
[linux.git] / drivers / gpu / drm / msm / dp / dp_catalog.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5
6 #define pr_fmt(fmt)     "[drm-dp] %s: " fmt, __func__
7
8 #include <linux/rational.h>
9 #include <linux/delay.h>
10 #include <linux/iopoll.h>
11 #include <linux/phy/phy.h>
12 #include <linux/phy/phy-dp.h>
13 #include <linux/rational.h>
14 #include <drm/drm_dp_helper.h>
15 #include <drm/drm_print.h>
16
17 #include "dp_catalog.h"
18 #include "dp_reg.h"
19
20 #define POLLING_SLEEP_US                        1000
21 #define POLLING_TIMEOUT_US                      10000
22
23 #define SCRAMBLER_RESET_COUNT_VALUE             0xFC
24
25 #define DP_INTERRUPT_STATUS_ACK_SHIFT   1
26 #define DP_INTERRUPT_STATUS_MASK_SHIFT  2
27
28 #define MSM_DP_CONTROLLER_AHB_OFFSET    0x0000
29 #define MSM_DP_CONTROLLER_AHB_SIZE      0x0200
30 #define MSM_DP_CONTROLLER_AUX_OFFSET    0x0200
31 #define MSM_DP_CONTROLLER_AUX_SIZE      0x0200
32 #define MSM_DP_CONTROLLER_LINK_OFFSET   0x0400
33 #define MSM_DP_CONTROLLER_LINK_SIZE     0x0C00
34 #define MSM_DP_CONTROLLER_P0_OFFSET     0x1000
35 #define MSM_DP_CONTROLLER_P0_SIZE       0x0400
36
37 #define DP_INTERRUPT_STATUS1 \
38         (DP_INTR_AUX_I2C_DONE| \
39         DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
40         DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
41         DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
42         DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
43
44 #define DP_INTERRUPT_STATUS1_ACK \
45         (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46 #define DP_INTERRUPT_STATUS1_MASK \
47         (DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
48
49 #define DP_INTERRUPT_STATUS2 \
50         (DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
51         DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
52
53 #define DP_INTERRUPT_STATUS2_ACK \
54         (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
55 #define DP_INTERRUPT_STATUS2_MASK \
56         (DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
57
58 struct dp_catalog_private {
59         struct device *dev;
60         struct dp_io *io;
61         u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
62         struct dp_catalog dp_catalog;
63         u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
64 };
65
66 static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
67 {
68         offset += MSM_DP_CONTROLLER_AUX_OFFSET;
69         return readl_relaxed(catalog->io->dp_controller.base + offset);
70 }
71
72 static inline void dp_write_aux(struct dp_catalog_private *catalog,
73                                u32 offset, u32 data)
74 {
75         offset += MSM_DP_CONTROLLER_AUX_OFFSET;
76         /*
77          * To make sure aux reg writes happens before any other operation,
78          * this function uses writel() instread of writel_relaxed()
79          */
80         writel(data, catalog->io->dp_controller.base + offset);
81 }
82
83 static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
84 {
85         offset += MSM_DP_CONTROLLER_AHB_OFFSET;
86         return readl_relaxed(catalog->io->dp_controller.base + offset);
87 }
88
89 static inline void dp_write_ahb(struct dp_catalog_private *catalog,
90                                u32 offset, u32 data)
91 {
92         offset += MSM_DP_CONTROLLER_AHB_OFFSET;
93         /*
94          * To make sure phy reg writes happens before any other operation,
95          * this function uses writel() instread of writel_relaxed()
96          */
97         writel(data, catalog->io->dp_controller.base + offset);
98 }
99
100 static inline void dp_write_p0(struct dp_catalog_private *catalog,
101                                u32 offset, u32 data)
102 {
103         offset += MSM_DP_CONTROLLER_P0_OFFSET;
104         /*
105          * To make sure interface reg writes happens before any other operation,
106          * this function uses writel() instread of writel_relaxed()
107          */
108         writel(data, catalog->io->dp_controller.base + offset);
109 }
110
111 static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
112                                u32 offset)
113 {
114         offset += MSM_DP_CONTROLLER_P0_OFFSET;
115         /*
116          * To make sure interface reg writes happens before any other operation,
117          * this function uses writel() instread of writel_relaxed()
118          */
119         return readl_relaxed(catalog->io->dp_controller.base + offset);
120 }
121
122 static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
123 {
124         offset += MSM_DP_CONTROLLER_LINK_OFFSET;
125         return readl_relaxed(catalog->io->dp_controller.base + offset);
126 }
127
128 static inline void dp_write_link(struct dp_catalog_private *catalog,
129                                u32 offset, u32 data)
130 {
131         offset += MSM_DP_CONTROLLER_LINK_OFFSET;
132         /*
133          * To make sure link reg writes happens before any other operation,
134          * this function uses writel() instread of writel_relaxed()
135          */
136         writel(data, catalog->io->dp_controller.base + offset);
137 }
138
139 /* aux related catalog functions */
140 u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
141 {
142         struct dp_catalog_private *catalog = container_of(dp_catalog,
143                                 struct dp_catalog_private, dp_catalog);
144
145         return dp_read_aux(catalog, REG_DP_AUX_DATA);
146 }
147
148 int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
149 {
150         struct dp_catalog_private *catalog = container_of(dp_catalog,
151                                 struct dp_catalog_private, dp_catalog);
152
153         dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
154         return 0;
155 }
156
157 int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
158 {
159         struct dp_catalog_private *catalog = container_of(dp_catalog,
160                                 struct dp_catalog_private, dp_catalog);
161
162         dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
163         return 0;
164 }
165
166 int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
167 {
168         u32 data;
169         struct dp_catalog_private *catalog = container_of(dp_catalog,
170                                 struct dp_catalog_private, dp_catalog);
171
172         if (read) {
173                 data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
174                 data &= ~DP_AUX_TRANS_CTRL_GO;
175                 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
176         } else {
177                 dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
178         }
179         return 0;
180 }
181
182 int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
183 {
184         struct dp_catalog_private *catalog = container_of(dp_catalog,
185                                 struct dp_catalog_private, dp_catalog);
186
187         dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
188         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
189         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
190         dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
191         return 0;
192 }
193
194 void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
195 {
196         u32 aux_ctrl;
197         struct dp_catalog_private *catalog = container_of(dp_catalog,
198                                 struct dp_catalog_private, dp_catalog);
199
200         aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
201
202         aux_ctrl |= DP_AUX_CTRL_RESET;
203         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
204         usleep_range(1000, 1100); /* h/w recommended delay */
205
206         aux_ctrl &= ~DP_AUX_CTRL_RESET;
207         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
208 }
209
210 void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
211 {
212         u32 aux_ctrl;
213         struct dp_catalog_private *catalog = container_of(dp_catalog,
214                                 struct dp_catalog_private, dp_catalog);
215
216         aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
217
218         if (enable) {
219                 dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
220                 dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
221                 aux_ctrl |= DP_AUX_CTRL_ENABLE;
222         } else {
223                 aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
224         }
225
226         dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
227 }
228
229 void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
230 {
231         struct dp_catalog_private *catalog = container_of(dp_catalog,
232                                 struct dp_catalog_private, dp_catalog);
233         struct dp_io *dp_io = catalog->io;
234         struct phy *phy = dp_io->phy;
235
236         phy_calibrate(phy);
237 }
238
239 static void dump_regs(void __iomem *base, int len)
240 {
241         int i;
242         u32 x0, x4, x8, xc;
243         u32 addr_off = 0;
244
245         len = DIV_ROUND_UP(len, 16);
246         for (i = 0; i < len; i++) {
247                 x0 = readl_relaxed(base + addr_off);
248                 x4 = readl_relaxed(base + addr_off + 0x04);
249                 x8 = readl_relaxed(base + addr_off + 0x08);
250                 xc = readl_relaxed(base + addr_off + 0x0c);
251
252                 pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
253                 addr_off += 16;
254         }
255 }
256
257 void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
258 {
259         u32 offset, len;
260         struct dp_catalog_private *catalog = container_of(dp_catalog,
261                 struct dp_catalog_private, dp_catalog);
262
263         pr_info("AHB regs\n");
264         offset = MSM_DP_CONTROLLER_AHB_OFFSET;
265         len = MSM_DP_CONTROLLER_AHB_SIZE;
266         dump_regs(catalog->io->dp_controller.base + offset, len);
267
268         pr_info("AUXCLK regs\n");
269         offset = MSM_DP_CONTROLLER_AUX_OFFSET;
270         len = MSM_DP_CONTROLLER_AUX_SIZE;
271         dump_regs(catalog->io->dp_controller.base + offset, len);
272
273         pr_info("LCLK regs\n");
274         offset = MSM_DP_CONTROLLER_LINK_OFFSET;
275         len = MSM_DP_CONTROLLER_LINK_SIZE;
276         dump_regs(catalog->io->dp_controller.base + offset, len);
277
278         pr_info("P0CLK regs\n");
279         offset = MSM_DP_CONTROLLER_P0_OFFSET;
280         len = MSM_DP_CONTROLLER_P0_SIZE;
281         dump_regs(catalog->io->dp_controller.base + offset, len);
282 }
283
284 int dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
285 {
286         struct dp_catalog_private *catalog = container_of(dp_catalog,
287                                 struct dp_catalog_private, dp_catalog);
288         u32 intr, intr_ack;
289
290         intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
291         intr &= ~DP_INTERRUPT_STATUS1_MASK;
292         intr_ack = (intr & DP_INTERRUPT_STATUS1)
293                         << DP_INTERRUPT_STATUS_ACK_SHIFT;
294         dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
295                         DP_INTERRUPT_STATUS1_MASK);
296
297         return intr;
298
299 }
300
301 /* controller related catalog functions */
302 void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
303                                 u32 dp_tu, u32 valid_boundary,
304                                 u32 valid_boundary2)
305 {
306         struct dp_catalog_private *catalog = container_of(dp_catalog,
307                                 struct dp_catalog_private, dp_catalog);
308
309         dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
310         dp_write_link(catalog, REG_DP_TU, dp_tu);
311         dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
312 }
313
314 void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
315 {
316         struct dp_catalog_private *catalog = container_of(dp_catalog,
317                                 struct dp_catalog_private, dp_catalog);
318
319         dp_write_link(catalog, REG_DP_STATE_CTRL, state);
320 }
321
322 void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
323 {
324         struct dp_catalog_private *catalog = container_of(dp_catalog,
325                                 struct dp_catalog_private, dp_catalog);
326
327         DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
328
329         dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
330 }
331
332 void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
333 {
334         struct dp_catalog_private *catalog = container_of(dp_catalog,
335                                 struct dp_catalog_private, dp_catalog);
336         u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
337         u32 ln_mapping;
338
339         ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
340         ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
341         ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
342         ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
343
344         dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
345                         ln_mapping);
346 }
347
348 void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
349                                                 bool enable)
350 {
351         u32 mainlink_ctrl;
352         struct dp_catalog_private *catalog = container_of(dp_catalog,
353                                 struct dp_catalog_private, dp_catalog);
354
355         if (enable) {
356                 /*
357                  * To make sure link reg writes happens before other operation,
358                  * dp_write_link() function uses writel()
359                  */
360                 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
361
362                 mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
363                                                 DP_MAINLINK_CTRL_ENABLE);
364                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
365
366                 mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
367                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
368
369                 mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
370                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
371
372                 mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
373                                         DP_MAINLINK_FB_BOUNDARY_SEL);
374                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
375         } else {
376                 mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
377                 mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
378                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
379         }
380 }
381
382 void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
383                                         u32 colorimetry_cfg,
384                                         u32 test_bits_depth)
385 {
386         u32 misc_val;
387         struct dp_catalog_private *catalog = container_of(dp_catalog,
388                                 struct dp_catalog_private, dp_catalog);
389
390         misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
391
392         /* clear bpp bits */
393         misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
394         misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
395         misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
396         /* Configure clock to synchronous mode */
397         misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
398
399         DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
400         dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
401 }
402
403 void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
404                                         u32 rate, u32 stream_rate_khz,
405                                         bool fixed_nvid)
406 {
407         u32 pixel_m, pixel_n;
408         u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
409         u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
410         u32 const link_rate_hbr2 = 540000;
411         u32 const link_rate_hbr3 = 810000;
412         unsigned long den, num;
413
414         struct dp_catalog_private *catalog = container_of(dp_catalog,
415                                 struct dp_catalog_private, dp_catalog);
416
417         if (rate == link_rate_hbr3)
418                 pixel_div = 6;
419         else if (rate == 1620000 || rate == 270000)
420                 pixel_div = 2;
421         else if (rate == link_rate_hbr2)
422                 pixel_div = 4;
423         else
424                 DRM_ERROR("Invalid pixel mux divider\n");
425
426         dispcc_input_rate = (rate * 10) / pixel_div;
427
428         rational_best_approximation(dispcc_input_rate, stream_rate_khz,
429                         (unsigned long)(1 << 16) - 1,
430                         (unsigned long)(1 << 16) - 1, &den, &num);
431
432         den = ~(den - num);
433         den = den & 0xFFFF;
434         pixel_m = num;
435         pixel_n = den;
436
437         mvid = (pixel_m & 0xFFFF) * 5;
438         nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
439
440         if (nvid < nvid_fixed) {
441                 u32 temp;
442
443                 temp = (nvid_fixed / nvid) * nvid;
444                 mvid = (nvid_fixed / nvid) * mvid;
445                 nvid = temp;
446         }
447
448         if (link_rate_hbr2 == rate)
449                 nvid *= 2;
450
451         if (link_rate_hbr3 == rate)
452                 nvid *= 3;
453
454         DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
455         dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
456         dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
457         dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
458 }
459
460 int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
461                                         u32 pattern)
462 {
463         int bit, ret;
464         u32 data;
465         struct dp_catalog_private *catalog = container_of(dp_catalog,
466                                 struct dp_catalog_private, dp_catalog);
467
468         bit = BIT(pattern - 1);
469         DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
470         dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
471
472         bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
473
474         /* Poll for mainlink ready status */
475         ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
476                                         MSM_DP_CONTROLLER_LINK_OFFSET +
477                                         REG_DP_MAINLINK_READY,
478                                         data, data & bit,
479                                         POLLING_SLEEP_US, POLLING_TIMEOUT_US);
480         if (ret < 0) {
481                 DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
482                 return ret;
483         }
484         return 0;
485 }
486
487 void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
488 {
489         u32 sw_reset;
490         struct dp_catalog_private *catalog = container_of(dp_catalog,
491                                 struct dp_catalog_private, dp_catalog);
492
493         sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
494
495         sw_reset |= DP_SW_RESET;
496         dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
497         usleep_range(1000, 1100); /* h/w recommended delay */
498
499         sw_reset &= ~DP_SW_RESET;
500         dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
501 }
502
503 bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
504 {
505         u32 data;
506         int ret;
507         struct dp_catalog_private *catalog = container_of(dp_catalog,
508                                 struct dp_catalog_private, dp_catalog);
509
510         /* Poll for mainlink ready status */
511         ret = readl_poll_timeout(catalog->io->dp_controller.base +
512                                 MSM_DP_CONTROLLER_LINK_OFFSET +
513                                 REG_DP_MAINLINK_READY,
514                                 data, data & DP_MAINLINK_READY_FOR_VIDEO,
515                                 POLLING_SLEEP_US, POLLING_TIMEOUT_US);
516         if (ret < 0) {
517                 DRM_ERROR("mainlink not ready\n");
518                 return false;
519         }
520
521         return true;
522 }
523
524 void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
525                                                 bool enable)
526 {
527         struct dp_catalog_private *catalog = container_of(dp_catalog,
528                                 struct dp_catalog_private, dp_catalog);
529
530         if (enable) {
531                 dp_write_ahb(catalog, REG_DP_INTR_STATUS,
532                                 DP_INTERRUPT_STATUS1_MASK);
533                 dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
534                                 DP_INTERRUPT_STATUS2_MASK);
535         } else {
536                 dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
537                 dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
538         }
539 }
540
541 void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
542                         u32 intr_mask, bool en)
543 {
544         struct dp_catalog_private *catalog = container_of(dp_catalog,
545                                 struct dp_catalog_private, dp_catalog);
546
547         u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
548
549         config = (en ? config | intr_mask : config & ~intr_mask);
550
551         dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
552                                 config & DP_DP_HPD_INT_MASK);
553 }
554
555 void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
556 {
557         struct dp_catalog_private *catalog = container_of(dp_catalog,
558                                 struct dp_catalog_private, dp_catalog);
559
560         u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
561
562         /* enable HPD interrupts */
563         dp_catalog_hpd_config_intr(dp_catalog,
564                 DP_DP_HPD_PLUG_INT_MASK | DP_DP_IRQ_HPD_INT_MASK
565                 | DP_DP_HPD_UNPLUG_INT_MASK | DP_DP_HPD_REPLUG_INT_MASK, true);
566
567         /* Configure REFTIMER and enable it */
568         reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
569         dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
570
571         /* Enable HPD */
572         dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
573 }
574
575 u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
576 {
577         struct dp_catalog_private *catalog = container_of(dp_catalog,
578                                 struct dp_catalog_private, dp_catalog);
579         int isr = 0;
580
581         isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
582         dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
583                                  (isr & DP_DP_HPD_INT_MASK));
584
585         return isr;
586 }
587
588 int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
589 {
590         struct dp_catalog_private *catalog = container_of(dp_catalog,
591                                 struct dp_catalog_private, dp_catalog);
592         u32 intr, intr_ack;
593
594         intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
595         intr &= ~DP_INTERRUPT_STATUS2_MASK;
596         intr_ack = (intr & DP_INTERRUPT_STATUS2)
597                         << DP_INTERRUPT_STATUS_ACK_SHIFT;
598         dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
599                         intr_ack | DP_INTERRUPT_STATUS2_MASK);
600
601         return intr;
602 }
603
604 void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
605 {
606         struct dp_catalog_private *catalog = container_of(dp_catalog,
607                                 struct dp_catalog_private, dp_catalog);
608
609         dp_write_ahb(catalog, REG_DP_PHY_CTRL,
610                         DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
611         usleep_range(1000, 1100); /* h/w recommended delay */
612         dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
613 }
614
615 int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
616                 u8 v_level, u8 p_level)
617 {
618         struct dp_catalog_private *catalog = container_of(dp_catalog,
619                                 struct dp_catalog_private, dp_catalog);
620         struct dp_io *dp_io = catalog->io;
621         struct phy *phy = dp_io->phy;
622         struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
623
624         /* TODO: Update for all lanes instead of just first one */
625         opts_dp->voltage[0] = v_level;
626         opts_dp->pre[0] = p_level;
627         opts_dp->set_voltages = 1;
628         phy_configure(phy, &dp_io->phy_opts);
629         opts_dp->set_voltages = 0;
630
631         return 0;
632 }
633
634 void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
635                         u32 pattern)
636 {
637         struct dp_catalog_private *catalog = container_of(dp_catalog,
638                                 struct dp_catalog_private, dp_catalog);
639         u32 value = 0x0;
640
641         /* Make sure to clear the current pattern before starting a new one */
642         dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
643
644         switch (pattern) {
645         case DP_PHY_TEST_PATTERN_D10_2:
646                 dp_write_link(catalog, REG_DP_STATE_CTRL,
647                                 DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
648                 break;
649         case DP_PHY_TEST_PATTERN_ERROR_COUNT:
650                 value &= ~(1 << 16);
651                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
652                                         value);
653                 value |= SCRAMBLER_RESET_COUNT_VALUE;
654                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
655                                         value);
656                 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
657                                         DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
658                 dp_write_link(catalog, REG_DP_STATE_CTRL,
659                                         DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
660                 break;
661         case DP_PHY_TEST_PATTERN_PRBS7:
662                 dp_write_link(catalog, REG_DP_STATE_CTRL,
663                                 DP_STATE_CTRL_LINK_PRBS7);
664                 break;
665         case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
666                 dp_write_link(catalog, REG_DP_STATE_CTRL,
667                                 DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
668                 /* 00111110000011111000001111100000 */
669                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
670                                 0x3E0F83E0);
671                 /* 00001111100000111110000011111000 */
672                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
673                                 0x0F83E0F8);
674                 /* 1111100000111110 */
675                 dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
676                                 0x0000F83E);
677                 break;
678         case DP_PHY_TEST_PATTERN_CP2520:
679                 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
680                 value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
681                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
682
683                 value = DP_HBR2_ERM_PATTERN;
684                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
685                                 value);
686                 value |= SCRAMBLER_RESET_COUNT_VALUE;
687                 dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
688                                         value);
689                 dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
690                                         DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
691                 dp_write_link(catalog, REG_DP_STATE_CTRL,
692                                         DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
693                 value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
694                 value |= DP_MAINLINK_CTRL_ENABLE;
695                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
696                 break;
697         case DP_PHY_TEST_PATTERN_SEL_MASK:
698                 dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
699                                 DP_MAINLINK_CTRL_ENABLE);
700                 dp_write_link(catalog, REG_DP_STATE_CTRL,
701                                 DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
702                 break;
703         default:
704                 DRM_DEBUG_DP("No valid test pattern requested:0x%x\n", pattern);
705                 break;
706         }
707 }
708
709 u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
710 {
711         struct dp_catalog_private *catalog = container_of(dp_catalog,
712                                 struct dp_catalog_private, dp_catalog);
713
714         return dp_read_link(catalog, REG_DP_MAINLINK_READY);
715 }
716
717 /* panel related catalog functions */
718 int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
719 {
720         struct dp_catalog_private *catalog = container_of(dp_catalog,
721                                 struct dp_catalog_private, dp_catalog);
722
723         dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
724                                 dp_catalog->total);
725         dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
726                                 dp_catalog->sync_start);
727         dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
728                                 dp_catalog->width_blanking);
729         dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
730         return 0;
731 }
732
733 void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
734                                 struct drm_display_mode *drm_mode)
735 {
736         struct dp_catalog_private *catalog = container_of(dp_catalog,
737                                 struct dp_catalog_private, dp_catalog);
738         u32 hsync_period, vsync_period;
739         u32 display_v_start, display_v_end;
740         u32 hsync_start_x, hsync_end_x;
741         u32 v_sync_width;
742         u32 hsync_ctl;
743         u32 display_hctl;
744
745         /* TPG config parameters*/
746         hsync_period = drm_mode->htotal;
747         vsync_period = drm_mode->vtotal;
748
749         display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
750                                         hsync_period);
751         display_v_end = ((vsync_period - (drm_mode->vsync_start -
752                                         drm_mode->vdisplay))
753                                         * hsync_period) - 1;
754
755         display_v_start += drm_mode->htotal - drm_mode->hsync_start;
756         display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
757
758         hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
759         hsync_end_x = hsync_period - (drm_mode->hsync_start -
760                                         drm_mode->hdisplay) - 1;
761
762         v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
763
764         hsync_ctl = (hsync_period << 16) |
765                         (drm_mode->hsync_end - drm_mode->hsync_start);
766         display_hctl = (hsync_end_x << 16) | hsync_start_x;
767
768
769         dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
770         dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
771         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
772                         hsync_period);
773         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
774                         hsync_period);
775         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
776         dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
777         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
778         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
779         dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
780         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
781         dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
782         dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
783         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
784         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
785         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
786         dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
787         dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
788
789         dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
790                                 DP_TPG_CHECKERED_RECT_PATTERN);
791         dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
792                                 DP_TPG_VIDEO_CONFIG_BPP_8BIT |
793                                 DP_TPG_VIDEO_CONFIG_RGB);
794         dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
795                                 DP_BIST_ENABLE_DPBIST_EN);
796         dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
797                                 DP_TIMING_ENGINE_EN_EN);
798         DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
799 }
800
801 void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
802 {
803         struct dp_catalog_private *catalog = container_of(dp_catalog,
804                                 struct dp_catalog_private, dp_catalog);
805
806         dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
807         dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
808         dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
809 }
810
811 struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
812 {
813         struct dp_catalog_private *catalog;
814
815         if (!io) {
816                 DRM_ERROR("invalid input\n");
817                 return ERR_PTR(-EINVAL);
818         }
819
820         catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
821         if (!catalog)
822                 return ERR_PTR(-ENOMEM);
823
824         catalog->dev = dev;
825         catalog->io = io;
826
827         return &catalog->dp_catalog;
828 }
829
830 void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
831 {
832         struct dp_catalog_private *catalog;
833         u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
834         enum dp_catalog_audio_sdp_type sdp;
835         enum dp_catalog_audio_header_type header;
836
837         if (!dp_catalog)
838                 return;
839
840         catalog = container_of(dp_catalog,
841                 struct dp_catalog_private, dp_catalog);
842
843         sdp_map = catalog->audio_map;
844         sdp     = dp_catalog->sdp_type;
845         header  = dp_catalog->sdp_header;
846
847         dp_catalog->audio_data = dp_read_link(catalog,
848                         sdp_map[sdp][header]);
849 }
850
851 void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
852 {
853         struct dp_catalog_private *catalog;
854         u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
855         enum dp_catalog_audio_sdp_type sdp;
856         enum dp_catalog_audio_header_type header;
857         u32 data;
858
859         if (!dp_catalog)
860                 return;
861
862         catalog = container_of(dp_catalog,
863                 struct dp_catalog_private, dp_catalog);
864
865         sdp_map = catalog->audio_map;
866         sdp     = dp_catalog->sdp_type;
867         header  = dp_catalog->sdp_header;
868         data    = dp_catalog->audio_data;
869
870         dp_write_link(catalog, sdp_map[sdp][header], data);
871 }
872
873 void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
874 {
875         struct dp_catalog_private *catalog;
876         u32 acr_ctrl, select;
877
878         if (!dp_catalog)
879                 return;
880
881         catalog = container_of(dp_catalog,
882                 struct dp_catalog_private, dp_catalog);
883
884         select = dp_catalog->audio_data;
885         acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
886
887         DRM_DEBUG_DP("select = 0x%x, acr_ctrl = 0x%x\n", select, acr_ctrl);
888
889         dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
890 }
891
892 void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
893 {
894         struct dp_catalog_private *catalog;
895         bool enable;
896         u32 audio_ctrl;
897
898         if (!dp_catalog)
899                 return;
900
901         catalog = container_of(dp_catalog,
902                 struct dp_catalog_private, dp_catalog);
903
904         enable = !!dp_catalog->audio_data;
905         audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
906
907         if (enable)
908                 audio_ctrl |= BIT(0);
909         else
910                 audio_ctrl &= ~BIT(0);
911
912         DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
913
914         dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
915         /* make sure audio engine is disabled */
916         wmb();
917 }
918
919 void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
920 {
921         struct dp_catalog_private *catalog;
922         u32 sdp_cfg = 0;
923         u32 sdp_cfg2 = 0;
924
925         if (!dp_catalog)
926                 return;
927
928         catalog = container_of(dp_catalog,
929                 struct dp_catalog_private, dp_catalog);
930
931         sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
932         /* AUDIO_TIMESTAMP_SDP_EN */
933         sdp_cfg |= BIT(1);
934         /* AUDIO_STREAM_SDP_EN */
935         sdp_cfg |= BIT(2);
936         /* AUDIO_COPY_MANAGEMENT_SDP_EN */
937         sdp_cfg |= BIT(5);
938         /* AUDIO_ISRC_SDP_EN  */
939         sdp_cfg |= BIT(6);
940         /* AUDIO_INFOFRAME_SDP_EN  */
941         sdp_cfg |= BIT(20);
942
943         DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
944
945         dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
946
947         sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
948         /* IFRM_REGSRC -> Do not use reg values */
949         sdp_cfg2 &= ~BIT(0);
950         /* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
951         sdp_cfg2 &= ~BIT(1);
952
953         DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
954
955         dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
956 }
957
958 void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
959 {
960         struct dp_catalog_private *catalog;
961
962         static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
963                 {
964                         MMSS_DP_AUDIO_STREAM_0,
965                         MMSS_DP_AUDIO_STREAM_1,
966                         MMSS_DP_AUDIO_STREAM_1,
967                 },
968                 {
969                         MMSS_DP_AUDIO_TIMESTAMP_0,
970                         MMSS_DP_AUDIO_TIMESTAMP_1,
971                         MMSS_DP_AUDIO_TIMESTAMP_1,
972                 },
973                 {
974                         MMSS_DP_AUDIO_INFOFRAME_0,
975                         MMSS_DP_AUDIO_INFOFRAME_1,
976                         MMSS_DP_AUDIO_INFOFRAME_1,
977                 },
978                 {
979                         MMSS_DP_AUDIO_COPYMANAGEMENT_0,
980                         MMSS_DP_AUDIO_COPYMANAGEMENT_1,
981                         MMSS_DP_AUDIO_COPYMANAGEMENT_1,
982                 },
983                 {
984                         MMSS_DP_AUDIO_ISRC_0,
985                         MMSS_DP_AUDIO_ISRC_1,
986                         MMSS_DP_AUDIO_ISRC_1,
987                 },
988         };
989
990         if (!dp_catalog)
991                 return;
992
993         catalog = container_of(dp_catalog,
994                 struct dp_catalog_private, dp_catalog);
995
996         catalog->audio_map = sdp_map;
997 }
998
999 void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1000 {
1001         struct dp_catalog_private *catalog;
1002         u32 mainlink_levels, safe_to_exit_level;
1003
1004         if (!dp_catalog)
1005                 return;
1006
1007         catalog = container_of(dp_catalog,
1008                 struct dp_catalog_private, dp_catalog);
1009
1010         safe_to_exit_level = dp_catalog->audio_data;
1011         mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1012         mainlink_levels &= 0xFE0;
1013         mainlink_levels |= safe_to_exit_level;
1014
1015         DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1016                          mainlink_levels, safe_to_exit_level);
1017
1018         dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1019 }
This page took 0.101094 seconds and 4 git commands to generate.