1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2015, The Linux Foundation. All rights reserved.
7 #include <linux/clk-provider.h>
11 #include "dsi_phy_28nm.xml.h"
14 * DSI PLL 28nm - clock diagram (eg: DSI0):
16 * dsi0analog_postdiv_clk
17 * | dsi0indirect_path_div2_clk
19 * +------+ | +----+ | |\ dsi0byte_mux
20 * dsi0vco_clk --o--| DIV1 |--o--| /2 |--o--| \ |
21 * | +------+ +----+ | m| | +----+
22 * | | u|--o--| /4 |-- dsi0pllbyte
24 * o--------------------------| /
27 * o----------| DIV3 |------------------------- dsi0pll
31 #define POLL_MAX_READS 10
32 #define POLL_TIMEOUT_US 50
34 #define VCO_REF_CLK_RATE 19200000
35 #define VCO_MIN_RATE 350000000
36 #define VCO_MAX_RATE 750000000
38 /* v2.0.0 28nm LP implementation */
39 #define DSI_PHY_28NM_QUIRK_PHY_LP BIT(0)
40 #define DSI_PHY_28NM_QUIRK_PHY_8226 BIT(1)
42 #define LPFR_LUT_SIZE 10
44 unsigned long vco_rate;
48 /* Loop filter resistance: */
49 static const struct lpfr_cfg lpfr_lut[LPFR_LUT_SIZE] = {
62 struct pll_28nm_cached_state {
63 unsigned long vco_rate;
72 struct msm_dsi_phy *phy;
74 struct pll_28nm_cached_state cached_state;
77 #define to_pll_28nm(x) container_of(x, struct dsi_pll_28nm, clk_hw)
79 static bool pll_28nm_poll_for_ready(struct dsi_pll_28nm *pll_28nm,
80 u32 nb_tries, u32 timeout_us)
82 bool pll_locked = false;
86 val = readl(pll_28nm->phy->pll_base + REG_DSI_28nm_PHY_PLL_STATUS);
87 pll_locked = !!(val & DSI_28nm_PHY_PLL_STATUS_PLL_RDY);
94 DBG("DSI PLL is %slocked", pll_locked ? "" : "*not* ");
99 static void pll_28nm_software_reset(struct dsi_pll_28nm *pll_28nm)
101 void __iomem *base = pll_28nm->phy->pll_base;
104 * Add HW recommended delays after toggling the software
105 * reset bit off and back on.
107 writel(DSI_28nm_PHY_PLL_TEST_CFG_PLL_SW_RESET, base + REG_DSI_28nm_PHY_PLL_TEST_CFG);
109 writel(0, base + REG_DSI_28nm_PHY_PLL_TEST_CFG);
116 static int dsi_pll_28nm_clk_set_rate(struct clk_hw *hw, unsigned long rate,
117 unsigned long parent_rate)
119 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
120 struct device *dev = &pll_28nm->phy->pdev->dev;
121 void __iomem *base = pll_28nm->phy->pll_base;
122 unsigned long div_fbx1000, gen_vco_clk;
123 u32 refclk_cfg, frac_n_mode, frac_n_value;
124 u32 sdm_cfg0, sdm_cfg1, sdm_cfg2, sdm_cfg3;
125 u32 cal_cfg10, cal_cfg11;
129 VERB("rate=%lu, parent's=%lu", rate, parent_rate);
131 /* Force postdiv2 to be div-4 */
132 writel(3, base + REG_DSI_28nm_PHY_PLL_POSTDIV2_CFG);
134 /* Configure the Loop filter resistance */
135 for (i = 0; i < LPFR_LUT_SIZE; i++)
136 if (rate <= lpfr_lut[i].vco_rate)
138 if (i == LPFR_LUT_SIZE) {
139 DRM_DEV_ERROR(dev, "unable to get loop filter resistance. vco=%lu\n",
143 writel(lpfr_lut[i].resistance, base + REG_DSI_28nm_PHY_PLL_LPFR_CFG);
145 /* Loop filter capacitance values : c1 and c2 */
146 writel(0x70, base + REG_DSI_28nm_PHY_PLL_LPFC1_CFG);
147 writel(0x15, base + REG_DSI_28nm_PHY_PLL_LPFC2_CFG);
149 rem = rate % VCO_REF_CLK_RATE;
151 refclk_cfg = DSI_28nm_PHY_PLL_REFCLK_CFG_DBLR;
153 div_fbx1000 = rate / (VCO_REF_CLK_RATE / 500);
154 gen_vco_clk = div_fbx1000 * (VCO_REF_CLK_RATE / 500);
158 div_fbx1000 = rate / (VCO_REF_CLK_RATE / 1000);
159 gen_vco_clk = div_fbx1000 * (VCO_REF_CLK_RATE / 1000);
162 DBG("refclk_cfg = %d", refclk_cfg);
164 rem = div_fbx1000 % 1000;
165 frac_n_value = (rem << 16) / 1000;
167 DBG("div_fb = %lu", div_fbx1000);
168 DBG("frac_n_value = %d", frac_n_value);
170 DBG("Generated VCO Clock: %lu", gen_vco_clk);
172 sdm_cfg1 = readl(base + REG_DSI_28nm_PHY_PLL_SDM_CFG1);
173 sdm_cfg1 &= ~DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET__MASK;
176 sdm_cfg0 |= DSI_28nm_PHY_PLL_SDM_CFG0_BYP_DIV(0);
177 sdm_cfg1 |= DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET(
178 (u32)(((div_fbx1000 / 1000) & 0x3f) - 1));
179 sdm_cfg3 = frac_n_value >> 8;
180 sdm_cfg2 = frac_n_value & 0xff;
182 sdm_cfg0 = DSI_28nm_PHY_PLL_SDM_CFG0_BYP;
183 sdm_cfg0 |= DSI_28nm_PHY_PLL_SDM_CFG0_BYP_DIV(
184 (u32)(((div_fbx1000 / 1000) & 0x3f) - 1));
185 sdm_cfg1 |= DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET(0);
190 DBG("sdm_cfg0=%d", sdm_cfg0);
191 DBG("sdm_cfg1=%d", sdm_cfg1);
192 DBG("sdm_cfg2=%d", sdm_cfg2);
193 DBG("sdm_cfg3=%d", sdm_cfg3);
195 cal_cfg11 = (u32)(gen_vco_clk / (256 * 1000000));
196 cal_cfg10 = (u32)((gen_vco_clk % (256 * 1000000)) / 1000000);
197 DBG("cal_cfg10=%d, cal_cfg11=%d", cal_cfg10, cal_cfg11);
199 writel(0x02, base + REG_DSI_28nm_PHY_PLL_CHGPUMP_CFG);
200 writel(0x2b, base + REG_DSI_28nm_PHY_PLL_CAL_CFG3);
201 writel(0x06, base + REG_DSI_28nm_PHY_PLL_CAL_CFG4);
202 writel(0x0d, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
204 writel(sdm_cfg1, base + REG_DSI_28nm_PHY_PLL_SDM_CFG1);
205 writel(DSI_28nm_PHY_PLL_SDM_CFG2_FREQ_SEED_7_0(sdm_cfg2),
206 base + REG_DSI_28nm_PHY_PLL_SDM_CFG2);
207 writel(DSI_28nm_PHY_PLL_SDM_CFG3_FREQ_SEED_15_8(sdm_cfg3),
208 base + REG_DSI_28nm_PHY_PLL_SDM_CFG3);
209 writel(0, base + REG_DSI_28nm_PHY_PLL_SDM_CFG4);
211 /* Add hardware recommended delay for correct PLL configuration */
212 if (pll_28nm->phy->cfg->quirks & DSI_PHY_28NM_QUIRK_PHY_LP)
217 writel(refclk_cfg, base + REG_DSI_28nm_PHY_PLL_REFCLK_CFG);
218 writel(0x00, base + REG_DSI_28nm_PHY_PLL_PWRGEN_CFG);
219 writel(0x31, base + REG_DSI_28nm_PHY_PLL_VCOLPF_CFG);
220 writel(sdm_cfg0, base + REG_DSI_28nm_PHY_PLL_SDM_CFG0);
221 writel(0x12, base + REG_DSI_28nm_PHY_PLL_CAL_CFG0);
222 writel(0x30, base + REG_DSI_28nm_PHY_PLL_CAL_CFG6);
223 writel(0x00, base + REG_DSI_28nm_PHY_PLL_CAL_CFG7);
224 writel(0x60, base + REG_DSI_28nm_PHY_PLL_CAL_CFG8);
225 writel(0x00, base + REG_DSI_28nm_PHY_PLL_CAL_CFG9);
226 writel(cal_cfg10 & 0xff, base + REG_DSI_28nm_PHY_PLL_CAL_CFG10);
227 writel(cal_cfg11 & 0xff, base + REG_DSI_28nm_PHY_PLL_CAL_CFG11);
228 writel(0x20, base + REG_DSI_28nm_PHY_PLL_EFUSE_CFG);
233 static int dsi_pll_28nm_clk_is_enabled(struct clk_hw *hw)
235 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
237 return pll_28nm_poll_for_ready(pll_28nm, POLL_MAX_READS,
241 static unsigned long dsi_pll_28nm_clk_recalc_rate(struct clk_hw *hw,
242 unsigned long parent_rate)
244 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
245 void __iomem *base = pll_28nm->phy->pll_base;
246 u32 sdm0, doubler, sdm_byp_div;
247 u32 sdm_dc_off, sdm_freq_seed, sdm2, sdm3;
248 u32 ref_clk = VCO_REF_CLK_RATE;
249 unsigned long vco_rate;
251 VERB("parent_rate=%lu", parent_rate);
253 /* Check to see if the ref clk doubler is enabled */
254 doubler = readl(base + REG_DSI_28nm_PHY_PLL_REFCLK_CFG) &
255 DSI_28nm_PHY_PLL_REFCLK_CFG_DBLR;
256 ref_clk += (doubler * VCO_REF_CLK_RATE);
258 /* see if it is integer mode or sdm mode */
259 sdm0 = readl(base + REG_DSI_28nm_PHY_PLL_SDM_CFG0);
260 if (sdm0 & DSI_28nm_PHY_PLL_SDM_CFG0_BYP) {
263 readl(base + REG_DSI_28nm_PHY_PLL_SDM_CFG0),
264 DSI_28nm_PHY_PLL_SDM_CFG0_BYP_DIV) + 1;
265 vco_rate = ref_clk * sdm_byp_div;
269 readl(base + REG_DSI_28nm_PHY_PLL_SDM_CFG1),
270 DSI_28nm_PHY_PLL_SDM_CFG1_DC_OFFSET);
271 DBG("sdm_dc_off = %d", sdm_dc_off);
272 sdm2 = FIELD(readl(base + REG_DSI_28nm_PHY_PLL_SDM_CFG2),
273 DSI_28nm_PHY_PLL_SDM_CFG2_FREQ_SEED_7_0);
274 sdm3 = FIELD(readl(base + REG_DSI_28nm_PHY_PLL_SDM_CFG3),
275 DSI_28nm_PHY_PLL_SDM_CFG3_FREQ_SEED_15_8);
276 sdm_freq_seed = (sdm3 << 8) | sdm2;
277 DBG("sdm_freq_seed = %d", sdm_freq_seed);
279 vco_rate = (ref_clk * (sdm_dc_off + 1)) +
280 mult_frac(ref_clk, sdm_freq_seed, BIT(16));
281 DBG("vco rate = %lu", vco_rate);
284 DBG("returning vco rate = %lu", vco_rate);
289 static int _dsi_pll_28nm_vco_prepare_hpm(struct dsi_pll_28nm *pll_28nm)
291 struct device *dev = &pll_28nm->phy->pdev->dev;
292 void __iomem *base = pll_28nm->phy->pll_base;
293 u32 max_reads = 5, timeout_us = 100;
298 DBG("id=%d", pll_28nm->phy->id);
300 pll_28nm_software_reset(pll_28nm);
303 * PLL power up sequence.
304 * Add necessary delays recommended by hardware.
306 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
307 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
310 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
311 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
314 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
315 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
318 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
319 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
322 for (i = 0; i < 2; i++) {
323 /* DSI Uniphy lock detect setting */
324 writel(0x0c, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
326 writel(0x0d, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
328 /* poll for PLL ready status */
329 locked = pll_28nm_poll_for_ready(pll_28nm, max_reads,
334 pll_28nm_software_reset(pll_28nm);
337 * PLL power up sequence.
338 * Add necessary delays recommended by hardware.
340 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
341 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
344 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
345 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
348 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
349 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
352 val &= ~DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
353 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
356 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
357 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
360 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
361 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
365 if (unlikely(!locked))
366 DRM_DEV_ERROR(dev, "DSI PLL lock failed\n");
368 DBG("DSI PLL Lock success");
370 return locked ? 0 : -EINVAL;
373 static int dsi_pll_28nm_vco_prepare_hpm(struct clk_hw *hw)
375 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
378 if (unlikely(pll_28nm->phy->pll_on))
381 for (i = 0; i < 3; i++) {
382 ret = _dsi_pll_28nm_vco_prepare_hpm(pll_28nm);
384 pll_28nm->phy->pll_on = true;
392 static int dsi_pll_28nm_vco_prepare_8226(struct clk_hw *hw)
394 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
395 struct device *dev = &pll_28nm->phy->pdev->dev;
396 void __iomem *base = pll_28nm->phy->pll_base;
397 u32 max_reads = 5, timeout_us = 100;
402 DBG("id=%d", pll_28nm->phy->id);
404 pll_28nm_software_reset(pll_28nm);
407 * PLL power up sequence.
408 * Add necessary delays recommended by hardware.
410 writel(0x34, base + REG_DSI_28nm_PHY_PLL_CAL_CFG1);
412 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
413 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
416 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
417 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
420 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
421 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
422 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
425 for (i = 0; i < 7; i++) {
426 /* DSI Uniphy lock detect setting */
427 writel(0x0d, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
428 writel(0x0c, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
430 writel(0x0d, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
432 /* poll for PLL ready status */
433 locked = pll_28nm_poll_for_ready(pll_28nm,
434 max_reads, timeout_us);
438 pll_28nm_software_reset(pll_28nm);
441 * PLL power up sequence.
442 * Add necessary delays recommended by hardware.
444 writel(0x00, base + REG_DSI_28nm_PHY_PLL_PWRGEN_CFG);
447 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
448 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
449 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
452 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B;
453 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
454 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
458 if (unlikely(!locked))
459 DRM_DEV_ERROR(dev, "DSI PLL lock failed\n");
461 DBG("DSI PLL Lock success");
463 return locked ? 0 : -EINVAL;
466 static int dsi_pll_28nm_vco_prepare_lp(struct clk_hw *hw)
468 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
469 struct device *dev = &pll_28nm->phy->pdev->dev;
470 void __iomem *base = pll_28nm->phy->pll_base;
472 u32 max_reads = 10, timeout_us = 50;
475 DBG("id=%d", pll_28nm->phy->id);
477 if (unlikely(pll_28nm->phy->pll_on))
480 pll_28nm_software_reset(pll_28nm);
483 * PLL power up sequence.
484 * Add necessary delays recommended by hardware.
486 writel(0x34, base + REG_DSI_28nm_PHY_PLL_CAL_CFG1);
489 val = DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRDN_B;
490 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
493 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_PWRGEN_PWRDN_B;
494 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
497 val |= DSI_28nm_PHY_PLL_GLB_CFG_PLL_LDO_PWRDN_B |
498 DSI_28nm_PHY_PLL_GLB_CFG_PLL_ENABLE;
499 writel(val, base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
502 /* DSI PLL toggle lock detect setting */
503 writel(0x04, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
505 writel(0x05, base + REG_DSI_28nm_PHY_PLL_LKDET_CFG2);
508 locked = pll_28nm_poll_for_ready(pll_28nm, max_reads, timeout_us);
510 if (unlikely(!locked)) {
511 DRM_DEV_ERROR(dev, "DSI PLL lock failed\n");
515 DBG("DSI PLL lock success");
516 pll_28nm->phy->pll_on = true;
521 static void dsi_pll_28nm_vco_unprepare(struct clk_hw *hw)
523 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
525 DBG("id=%d", pll_28nm->phy->id);
527 if (unlikely(!pll_28nm->phy->pll_on))
530 writel(0, pll_28nm->phy->pll_base + REG_DSI_28nm_PHY_PLL_GLB_CFG);
532 pll_28nm->phy->pll_on = false;
535 static long dsi_pll_28nm_clk_round_rate(struct clk_hw *hw,
536 unsigned long rate, unsigned long *parent_rate)
538 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(hw);
540 if (rate < pll_28nm->phy->cfg->min_pll_rate)
541 return pll_28nm->phy->cfg->min_pll_rate;
542 else if (rate > pll_28nm->phy->cfg->max_pll_rate)
543 return pll_28nm->phy->cfg->max_pll_rate;
548 static const struct clk_ops clk_ops_dsi_pll_28nm_vco_hpm = {
549 .round_rate = dsi_pll_28nm_clk_round_rate,
550 .set_rate = dsi_pll_28nm_clk_set_rate,
551 .recalc_rate = dsi_pll_28nm_clk_recalc_rate,
552 .prepare = dsi_pll_28nm_vco_prepare_hpm,
553 .unprepare = dsi_pll_28nm_vco_unprepare,
554 .is_enabled = dsi_pll_28nm_clk_is_enabled,
557 static const struct clk_ops clk_ops_dsi_pll_28nm_vco_lp = {
558 .round_rate = dsi_pll_28nm_clk_round_rate,
559 .set_rate = dsi_pll_28nm_clk_set_rate,
560 .recalc_rate = dsi_pll_28nm_clk_recalc_rate,
561 .prepare = dsi_pll_28nm_vco_prepare_lp,
562 .unprepare = dsi_pll_28nm_vco_unprepare,
563 .is_enabled = dsi_pll_28nm_clk_is_enabled,
566 static const struct clk_ops clk_ops_dsi_pll_28nm_vco_8226 = {
567 .round_rate = dsi_pll_28nm_clk_round_rate,
568 .set_rate = dsi_pll_28nm_clk_set_rate,
569 .recalc_rate = dsi_pll_28nm_clk_recalc_rate,
570 .prepare = dsi_pll_28nm_vco_prepare_8226,
571 .unprepare = dsi_pll_28nm_vco_unprepare,
572 .is_enabled = dsi_pll_28nm_clk_is_enabled,
579 static void dsi_28nm_pll_save_state(struct msm_dsi_phy *phy)
581 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(phy->vco_hw);
582 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state;
583 void __iomem *base = pll_28nm->phy->pll_base;
585 cached_state->postdiv3 =
586 readl(base + REG_DSI_28nm_PHY_PLL_POSTDIV3_CFG);
587 cached_state->postdiv1 =
588 readl(base + REG_DSI_28nm_PHY_PLL_POSTDIV1_CFG);
589 cached_state->byte_mux = readl(base + REG_DSI_28nm_PHY_PLL_VREG_CFG);
590 if (dsi_pll_28nm_clk_is_enabled(phy->vco_hw))
591 cached_state->vco_rate = clk_hw_get_rate(phy->vco_hw);
593 cached_state->vco_rate = 0;
596 static int dsi_28nm_pll_restore_state(struct msm_dsi_phy *phy)
598 struct dsi_pll_28nm *pll_28nm = to_pll_28nm(phy->vco_hw);
599 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state;
600 void __iomem *base = pll_28nm->phy->pll_base;
603 ret = dsi_pll_28nm_clk_set_rate(phy->vco_hw,
604 cached_state->vco_rate, 0);
606 DRM_DEV_ERROR(&pll_28nm->phy->pdev->dev,
607 "restore vco rate failed. ret=%d\n", ret);
611 writel(cached_state->postdiv3, base + REG_DSI_28nm_PHY_PLL_POSTDIV3_CFG);
612 writel(cached_state->postdiv1, base + REG_DSI_28nm_PHY_PLL_POSTDIV1_CFG);
613 writel(cached_state->byte_mux, base + REG_DSI_28nm_PHY_PLL_VREG_CFG);
618 static int pll_28nm_register(struct dsi_pll_28nm *pll_28nm, struct clk_hw **provided_clocks)
621 struct clk_init_data vco_init = {
622 .parent_data = &(const struct clk_parent_data) {
623 .fw_name = "ref", .name = "xo",
627 .flags = CLK_IGNORE_UNUSED,
629 struct device *dev = &pll_28nm->phy->pdev->dev;
630 struct clk_hw *hw, *analog_postdiv, *indirect_path_div2, *byte_mux;
633 DBG("%d", pll_28nm->phy->id);
635 if (pll_28nm->phy->cfg->quirks & DSI_PHY_28NM_QUIRK_PHY_LP)
636 vco_init.ops = &clk_ops_dsi_pll_28nm_vco_lp;
637 else if (pll_28nm->phy->cfg->quirks & DSI_PHY_28NM_QUIRK_PHY_8226)
638 vco_init.ops = &clk_ops_dsi_pll_28nm_vco_8226;
640 vco_init.ops = &clk_ops_dsi_pll_28nm_vco_hpm;
642 snprintf(clk_name, sizeof(clk_name), "dsi%dvco_clk", pll_28nm->phy->id);
643 pll_28nm->clk_hw.init = &vco_init;
644 ret = devm_clk_hw_register(dev, &pll_28nm->clk_hw);
648 snprintf(clk_name, sizeof(clk_name), "dsi%danalog_postdiv_clk", pll_28nm->phy->id);
649 analog_postdiv = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
650 &pll_28nm->clk_hw, CLK_SET_RATE_PARENT,
651 pll_28nm->phy->pll_base +
652 REG_DSI_28nm_PHY_PLL_POSTDIV1_CFG,
654 if (IS_ERR(analog_postdiv))
655 return PTR_ERR(analog_postdiv);
657 snprintf(clk_name, sizeof(clk_name), "dsi%dindirect_path_div2_clk", pll_28nm->phy->id);
658 indirect_path_div2 = devm_clk_hw_register_fixed_factor_parent_hw(dev,
659 clk_name, analog_postdiv, CLK_SET_RATE_PARENT, 1, 2);
660 if (IS_ERR(indirect_path_div2))
661 return PTR_ERR(indirect_path_div2);
663 snprintf(clk_name, sizeof(clk_name), "dsi%dpll", pll_28nm->phy->id);
664 hw = devm_clk_hw_register_divider_parent_hw(dev, clk_name,
665 &pll_28nm->clk_hw, 0, pll_28nm->phy->pll_base +
666 REG_DSI_28nm_PHY_PLL_POSTDIV3_CFG,
670 provided_clocks[DSI_PIXEL_PLL_CLK] = hw;
672 snprintf(clk_name, sizeof(clk_name), "dsi%dbyte_mux", pll_28nm->phy->id);
673 byte_mux = devm_clk_hw_register_mux_parent_hws(dev, clk_name,
674 ((const struct clk_hw *[]){
677 }), 2, CLK_SET_RATE_PARENT, pll_28nm->phy->pll_base +
678 REG_DSI_28nm_PHY_PLL_VREG_CFG, 1, 1, 0, NULL);
679 if (IS_ERR(byte_mux))
680 return PTR_ERR(byte_mux);
682 snprintf(clk_name, sizeof(clk_name), "dsi%dpllbyte", pll_28nm->phy->id);
683 hw = devm_clk_hw_register_fixed_factor_parent_hw(dev, clk_name,
684 byte_mux, CLK_SET_RATE_PARENT, 1, 4);
687 provided_clocks[DSI_BYTE_PLL_CLK] = hw;
692 static int dsi_pll_28nm_init(struct msm_dsi_phy *phy)
694 struct platform_device *pdev = phy->pdev;
695 struct dsi_pll_28nm *pll_28nm;
701 pll_28nm = devm_kzalloc(&pdev->dev, sizeof(*pll_28nm), GFP_KERNEL);
707 ret = pll_28nm_register(pll_28nm, phy->provided_clocks->hws);
709 DRM_DEV_ERROR(&pdev->dev, "failed to register PLL: %d\n", ret);
713 phy->vco_hw = &pll_28nm->clk_hw;
718 static void dsi_28nm_dphy_set_timing(struct msm_dsi_phy *phy,
719 struct msm_dsi_dphy_timing *timing)
721 void __iomem *base = phy->base;
723 writel(DSI_28nm_PHY_TIMING_CTRL_0_CLK_ZERO(timing->clk_zero),
724 base + REG_DSI_28nm_PHY_TIMING_CTRL_0);
725 writel(DSI_28nm_PHY_TIMING_CTRL_1_CLK_TRAIL(timing->clk_trail),
726 base + REG_DSI_28nm_PHY_TIMING_CTRL_1);
727 writel(DSI_28nm_PHY_TIMING_CTRL_2_CLK_PREPARE(timing->clk_prepare),
728 base + REG_DSI_28nm_PHY_TIMING_CTRL_2);
729 if (timing->clk_zero & BIT(8))
730 writel(DSI_28nm_PHY_TIMING_CTRL_3_CLK_ZERO_8,
731 base + REG_DSI_28nm_PHY_TIMING_CTRL_3);
732 writel(DSI_28nm_PHY_TIMING_CTRL_4_HS_EXIT(timing->hs_exit),
733 base + REG_DSI_28nm_PHY_TIMING_CTRL_4);
734 writel(DSI_28nm_PHY_TIMING_CTRL_5_HS_ZERO(timing->hs_zero),
735 base + REG_DSI_28nm_PHY_TIMING_CTRL_5);
736 writel(DSI_28nm_PHY_TIMING_CTRL_6_HS_PREPARE(timing->hs_prepare),
737 base + REG_DSI_28nm_PHY_TIMING_CTRL_6);
738 writel(DSI_28nm_PHY_TIMING_CTRL_7_HS_TRAIL(timing->hs_trail),
739 base + REG_DSI_28nm_PHY_TIMING_CTRL_7);
740 writel(DSI_28nm_PHY_TIMING_CTRL_8_HS_RQST(timing->hs_rqst),
741 base + REG_DSI_28nm_PHY_TIMING_CTRL_8);
742 writel(DSI_28nm_PHY_TIMING_CTRL_9_TA_GO(timing->ta_go) |
743 DSI_28nm_PHY_TIMING_CTRL_9_TA_SURE(timing->ta_sure),
744 base + REG_DSI_28nm_PHY_TIMING_CTRL_9);
745 writel(DSI_28nm_PHY_TIMING_CTRL_10_TA_GET(timing->ta_get),
746 base + REG_DSI_28nm_PHY_TIMING_CTRL_10);
747 writel(DSI_28nm_PHY_TIMING_CTRL_11_TRIG3_CMD(0),
748 base + REG_DSI_28nm_PHY_TIMING_CTRL_11);
751 static void dsi_28nm_phy_regulator_enable_dcdc(struct msm_dsi_phy *phy)
753 void __iomem *base = phy->reg_base;
755 writel(0x0, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_0);
756 writel(1, base + REG_DSI_28nm_PHY_REGULATOR_CAL_PWR_CFG);
757 writel(0, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_5);
758 writel(0, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_3);
759 writel(0x3, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_2);
760 writel(0x9, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_1);
761 writel(0x7, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_0);
762 writel(0x20, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_4);
763 writel(0x00, phy->base + REG_DSI_28nm_PHY_LDO_CNTRL);
766 static void dsi_28nm_phy_regulator_enable_ldo(struct msm_dsi_phy *phy)
768 void __iomem *base = phy->reg_base;
770 writel(0x0, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_0);
771 writel(0, base + REG_DSI_28nm_PHY_REGULATOR_CAL_PWR_CFG);
772 writel(0x7, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_5);
773 writel(0, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_3);
774 writel(0x1, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_2);
775 writel(0x1, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_1);
776 writel(0x20, base + REG_DSI_28nm_PHY_REGULATOR_CTRL_4);
778 if (phy->cfg->quirks & DSI_PHY_28NM_QUIRK_PHY_LP)
779 writel(0x05, phy->base + REG_DSI_28nm_PHY_LDO_CNTRL);
781 writel(0x0d, phy->base + REG_DSI_28nm_PHY_LDO_CNTRL);
784 static void dsi_28nm_phy_regulator_ctrl(struct msm_dsi_phy *phy, bool enable)
787 writel(0, phy->reg_base + REG_DSI_28nm_PHY_REGULATOR_CAL_PWR_CFG);
791 if (phy->regulator_ldo_mode)
792 dsi_28nm_phy_regulator_enable_ldo(phy);
794 dsi_28nm_phy_regulator_enable_dcdc(phy);
797 static int dsi_28nm_phy_enable(struct msm_dsi_phy *phy,
798 struct msm_dsi_phy_clk_request *clk_req)
800 struct msm_dsi_dphy_timing *timing = &phy->timing;
802 void __iomem *base = phy->base;
807 if (msm_dsi_dphy_timing_calc(timing, clk_req)) {
808 DRM_DEV_ERROR(&phy->pdev->dev,
809 "%s: D-PHY timing calculation failed\n",
814 writel(0xff, base + REG_DSI_28nm_PHY_STRENGTH_0);
816 dsi_28nm_phy_regulator_ctrl(phy, true);
818 dsi_28nm_dphy_set_timing(phy, timing);
820 writel(0x00, base + REG_DSI_28nm_PHY_CTRL_1);
821 writel(0x5f, base + REG_DSI_28nm_PHY_CTRL_0);
823 writel(0x6, base + REG_DSI_28nm_PHY_STRENGTH_1);
825 for (i = 0; i < 4; i++) {
826 writel(0, base + REG_DSI_28nm_PHY_LN_CFG_0(i));
827 writel(0, base + REG_DSI_28nm_PHY_LN_CFG_1(i));
828 writel(0, base + REG_DSI_28nm_PHY_LN_CFG_2(i));
829 writel(0, base + REG_DSI_28nm_PHY_LN_CFG_3(i));
830 writel(0, base + REG_DSI_28nm_PHY_LN_CFG_4(i));
831 writel(0, base + REG_DSI_28nm_PHY_LN_TEST_DATAPATH(i));
832 writel(0, base + REG_DSI_28nm_PHY_LN_DEBUG_SEL(i));
833 writel(0x1, base + REG_DSI_28nm_PHY_LN_TEST_STR_0(i));
834 writel(0x97, base + REG_DSI_28nm_PHY_LN_TEST_STR_1(i));
837 writel(0, base + REG_DSI_28nm_PHY_LNCK_CFG_4);
838 writel(0xc0, base + REG_DSI_28nm_PHY_LNCK_CFG_1);
839 writel(0x1, base + REG_DSI_28nm_PHY_LNCK_TEST_STR0);
840 writel(0xbb, base + REG_DSI_28nm_PHY_LNCK_TEST_STR1);
842 writel(0x5f, base + REG_DSI_28nm_PHY_CTRL_0);
844 val = readl(base + REG_DSI_28nm_PHY_GLBL_TEST_CTRL);
845 if (phy->id == DSI_1 && phy->usecase == MSM_DSI_PHY_SLAVE)
846 val &= ~DSI_28nm_PHY_GLBL_TEST_CTRL_BITCLK_HS_SEL;
848 val |= DSI_28nm_PHY_GLBL_TEST_CTRL_BITCLK_HS_SEL;
849 writel(val, base + REG_DSI_28nm_PHY_GLBL_TEST_CTRL);
854 static void dsi_28nm_phy_disable(struct msm_dsi_phy *phy)
856 writel(0, phy->base + REG_DSI_28nm_PHY_CTRL_0);
857 dsi_28nm_phy_regulator_ctrl(phy, false);
860 * Wait for the registers writes to complete in order to
861 * ensure that the phy is completely disabled
866 static const struct regulator_bulk_data dsi_phy_28nm_regulators[] = {
867 { .supply = "vddio", .init_load_uA = 100000 },
870 const struct msm_dsi_phy_cfg dsi_phy_28nm_hpm_cfgs = {
871 .has_phy_regulator = true,
872 .regulator_data = dsi_phy_28nm_regulators,
873 .num_regulators = ARRAY_SIZE(dsi_phy_28nm_regulators),
875 .enable = dsi_28nm_phy_enable,
876 .disable = dsi_28nm_phy_disable,
877 .pll_init = dsi_pll_28nm_init,
878 .save_pll_state = dsi_28nm_pll_save_state,
879 .restore_pll_state = dsi_28nm_pll_restore_state,
881 .min_pll_rate = VCO_MIN_RATE,
882 .max_pll_rate = VCO_MAX_RATE,
883 .io_start = { 0xfd922b00, 0xfd923100 },
887 const struct msm_dsi_phy_cfg dsi_phy_28nm_hpm_famb_cfgs = {
888 .has_phy_regulator = true,
889 .regulator_data = dsi_phy_28nm_regulators,
890 .num_regulators = ARRAY_SIZE(dsi_phy_28nm_regulators),
892 .enable = dsi_28nm_phy_enable,
893 .disable = dsi_28nm_phy_disable,
894 .pll_init = dsi_pll_28nm_init,
895 .save_pll_state = dsi_28nm_pll_save_state,
896 .restore_pll_state = dsi_28nm_pll_restore_state,
898 .min_pll_rate = VCO_MIN_RATE,
899 .max_pll_rate = VCO_MAX_RATE,
900 .io_start = { 0x1a94400, 0x1a96400 },
904 const struct msm_dsi_phy_cfg dsi_phy_28nm_lp_cfgs = {
905 .has_phy_regulator = true,
906 .regulator_data = dsi_phy_28nm_regulators,
907 .num_regulators = ARRAY_SIZE(dsi_phy_28nm_regulators),
909 .enable = dsi_28nm_phy_enable,
910 .disable = dsi_28nm_phy_disable,
911 .pll_init = dsi_pll_28nm_init,
912 .save_pll_state = dsi_28nm_pll_save_state,
913 .restore_pll_state = dsi_28nm_pll_restore_state,
915 .min_pll_rate = VCO_MIN_RATE,
916 .max_pll_rate = VCO_MAX_RATE,
917 .io_start = { 0x1a98500 },
919 .quirks = DSI_PHY_28NM_QUIRK_PHY_LP,
922 const struct msm_dsi_phy_cfg dsi_phy_28nm_8226_cfgs = {
923 .has_phy_regulator = true,
924 .regulator_data = dsi_phy_28nm_regulators,
925 .num_regulators = ARRAY_SIZE(dsi_phy_28nm_regulators),
927 .enable = dsi_28nm_phy_enable,
928 .disable = dsi_28nm_phy_disable,
929 .pll_init = dsi_pll_28nm_init,
930 .save_pll_state = dsi_28nm_pll_save_state,
931 .restore_pll_state = dsi_28nm_pll_restore_state,
933 .min_pll_rate = VCO_MIN_RATE,
934 .max_pll_rate = VCO_MAX_RATE,
935 .io_start = { 0xfd922b00 },
937 .quirks = DSI_PHY_28NM_QUIRK_PHY_8226,
940 const struct msm_dsi_phy_cfg dsi_phy_28nm_8937_cfgs = {
941 .has_phy_regulator = true,
942 .regulator_data = dsi_phy_28nm_regulators,
943 .num_regulators = ARRAY_SIZE(dsi_phy_28nm_regulators),
945 .enable = dsi_28nm_phy_enable,
946 .disable = dsi_28nm_phy_disable,
947 .pll_init = dsi_pll_28nm_init,
948 .save_pll_state = dsi_28nm_pll_save_state,
949 .restore_pll_state = dsi_28nm_pll_restore_state,
951 .min_pll_rate = VCO_MIN_RATE,
952 .max_pll_rate = VCO_MAX_RATE,
953 .io_start = { 0x1a94400, 0x1a96400 },
955 .quirks = DSI_PHY_28NM_QUIRK_PHY_LP,