1 // SPDX-License-Identifier: GPL-2.0+
3 * Copyright 2018-2019 NXP
9 #include <asm/arch/clock.h>
10 #include <asm/arch/imx-regs.h>
11 #include <asm/arch/sys_proto.h>
15 #include <linux/bitops.h>
16 #include <linux/delay.h>
18 DECLARE_GLOBAL_DATA_PTR;
20 static struct anamix_pll *ana_pll = (struct anamix_pll *)ANATOP_BASE_ADDR;
22 static u32 get_root_clk(enum clk_root_index clock_id);
23 void enable_ocotp_clk(unsigned char enable)
25 clock_enable(CCGR_OCOTP, !!enable);
28 int enable_i2c_clk(unsigned char enable, unsigned i2c_num)
30 /* 0 - 3 is valid i2c num */
34 clock_enable(CCGR_I2C1 + i2c_num, !!enable);
39 #ifdef CONFIG_SPL_BUILD
40 static struct imx_int_pll_rate_table imx8mm_fracpll_tbl[] = {
41 PLL_1443X_RATE(1000000000U, 250, 3, 1, 0),
42 PLL_1443X_RATE(800000000U, 300, 9, 0, 0),
43 PLL_1443X_RATE(750000000U, 250, 8, 0, 0),
44 PLL_1443X_RATE(650000000U, 325, 3, 2, 0),
45 PLL_1443X_RATE(600000000U, 300, 3, 2, 0),
46 PLL_1443X_RATE(594000000U, 99, 1, 2, 0),
47 PLL_1443X_RATE(400000000U, 300, 9, 1, 0),
48 PLL_1443X_RATE(266666667U, 400, 9, 2, 0),
49 PLL_1443X_RATE(167000000U, 334, 3, 4, 0),
50 PLL_1443X_RATE(100000000U, 300, 9, 3, 0),
53 static int fracpll_configure(enum pll_clocks pll, u32 freq)
58 struct imx_int_pll_rate_table *rate;
60 for (i = 0; i < ARRAY_SIZE(imx8mm_fracpll_tbl); i++) {
61 if (freq == imx8mm_fracpll_tbl[i].rate)
65 if (i == ARRAY_SIZE(imx8mm_fracpll_tbl)) {
66 printf("No matched freq table %u\n", freq);
70 rate = &imx8mm_fracpll_tbl[i];
74 setbits_le32(GPC_BASE_ADDR + 0xEC, 1 << 7);
75 setbits_le32(GPC_BASE_ADDR + 0xF8, 1 << 5);
76 writel(SRC_DDR1_ENABLE_MASK, SRC_BASE_ADDR + 0x1004);
78 pll_base = &ana_pll->dram_pll_gnrl_ctl;
80 case ANATOP_VIDEO_PLL:
81 pll_base = &ana_pll->video_pll1_gnrl_ctl;
86 /* Bypass clock and set lock to pll output lock */
87 tmp = readl(pll_base);
89 writel(tmp, pll_base);
93 writel(tmp, pll_base);
95 div_val = (rate->mdiv << MDIV_SHIFT) | (rate->pdiv << PDIV_SHIFT) |
96 (rate->sdiv << SDIV_SHIFT);
97 writel(div_val, pll_base + 4);
98 writel(rate->kdiv << KDIV_SHIFT, pll_base + 8);
104 writel(tmp, pll_base);
107 while (!(readl(pll_base) & LOCK_STATUS))
112 writel(tmp, pll_base);
117 void dram_pll_init(ulong pll_val)
119 fracpll_configure(ANATOP_DRAM_PLL, pll_val);
122 static struct dram_bypass_clk_setting imx8mm_dram_bypass_tbl[] = {
123 DRAM_BYPASS_ROOT_CONFIG(MHZ(100), 2, CLK_ROOT_PRE_DIV1, 2,
125 DRAM_BYPASS_ROOT_CONFIG(MHZ(250), 3, CLK_ROOT_PRE_DIV2, 2,
127 DRAM_BYPASS_ROOT_CONFIG(MHZ(400), 1, CLK_ROOT_PRE_DIV2, 3,
131 void dram_enable_bypass(ulong clk_val)
134 struct dram_bypass_clk_setting *config;
136 for (i = 0; i < ARRAY_SIZE(imx8mm_dram_bypass_tbl); i++) {
137 if (clk_val == imx8mm_dram_bypass_tbl[i].clk)
141 if (i == ARRAY_SIZE(imx8mm_dram_bypass_tbl)) {
142 printf("No matched freq table %lu\n", clk_val);
146 config = &imx8mm_dram_bypass_tbl[i];
148 clock_set_target_val(DRAM_ALT_CLK_ROOT, CLK_ROOT_ON |
149 CLK_ROOT_SOURCE_SEL(config->alt_root_sel) |
150 CLK_ROOT_PRE_DIV(config->alt_pre_div));
151 clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
152 CLK_ROOT_SOURCE_SEL(config->apb_root_sel) |
153 CLK_ROOT_PRE_DIV(config->apb_pre_div));
154 clock_set_target_val(DRAM_SEL_CFG, CLK_ROOT_ON |
155 CLK_ROOT_SOURCE_SEL(1));
158 void dram_disable_bypass(void)
160 clock_set_target_val(DRAM_SEL_CFG, CLK_ROOT_ON |
161 CLK_ROOT_SOURCE_SEL(0));
162 clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
163 CLK_ROOT_SOURCE_SEL(4) |
164 CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV5));
168 int intpll_configure(enum pll_clocks pll, ulong freq)
170 void __iomem *pll_gnrl_ctl, __iomem *pll_div_ctl;
171 u32 pll_div_ctl_val, pll_clke_masks;
174 case ANATOP_SYSTEM_PLL1:
175 pll_gnrl_ctl = &ana_pll->sys_pll1_gnrl_ctl;
176 pll_div_ctl = &ana_pll->sys_pll1_div_ctl;
177 pll_clke_masks = INTPLL_DIV20_CLKE_MASK |
178 INTPLL_DIV10_CLKE_MASK | INTPLL_DIV8_CLKE_MASK |
179 INTPLL_DIV6_CLKE_MASK | INTPLL_DIV5_CLKE_MASK |
180 INTPLL_DIV4_CLKE_MASK | INTPLL_DIV3_CLKE_MASK |
181 INTPLL_DIV2_CLKE_MASK | INTPLL_CLKE_MASK;
183 case ANATOP_SYSTEM_PLL2:
184 pll_gnrl_ctl = &ana_pll->sys_pll2_gnrl_ctl;
185 pll_div_ctl = &ana_pll->sys_pll2_div_ctl;
186 pll_clke_masks = INTPLL_DIV20_CLKE_MASK |
187 INTPLL_DIV10_CLKE_MASK | INTPLL_DIV8_CLKE_MASK |
188 INTPLL_DIV6_CLKE_MASK | INTPLL_DIV5_CLKE_MASK |
189 INTPLL_DIV4_CLKE_MASK | INTPLL_DIV3_CLKE_MASK |
190 INTPLL_DIV2_CLKE_MASK | INTPLL_CLKE_MASK;
192 case ANATOP_SYSTEM_PLL3:
193 pll_gnrl_ctl = &ana_pll->sys_pll3_gnrl_ctl;
194 pll_div_ctl = &ana_pll->sys_pll3_div_ctl;
195 pll_clke_masks = INTPLL_CLKE_MASK;
198 pll_gnrl_ctl = &ana_pll->arm_pll_gnrl_ctl;
199 pll_div_ctl = &ana_pll->arm_pll_div_ctl;
200 pll_clke_masks = INTPLL_CLKE_MASK;
203 pll_gnrl_ctl = &ana_pll->gpu_pll_gnrl_ctl;
204 pll_div_ctl = &ana_pll->gpu_pll_div_ctl;
205 pll_clke_masks = INTPLL_CLKE_MASK;
208 pll_gnrl_ctl = &ana_pll->vpu_pll_gnrl_ctl;
209 pll_div_ctl = &ana_pll->vpu_pll_div_ctl;
210 pll_clke_masks = INTPLL_CLKE_MASK;
218 /* 24 * 0x12c / 3 / 2 ^ 2 */
219 pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0x12c) |
220 INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(2);
223 /* 24 * 0xfa / 2 / 2 ^ 2 */
224 pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
225 INTPLL_PRE_DIV_VAL(2) | INTPLL_POST_DIV_VAL(2);
228 /* 24 * 0x190 / 3 / 2 ^ 2 */
229 pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0x190) |
230 INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(2);
233 /* 24 * 0xfa / 3 / 2 ^ 1 */
234 pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
235 INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(1);
238 /* 24 * 0xc8 / 2 / 2 ^ 1 */
239 pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xc8) |
240 INTPLL_PRE_DIV_VAL(2) | INTPLL_POST_DIV_VAL(1);
243 /* 24 * 0xfa / 3 / 2 ^ 0 */
244 pll_div_ctl_val = INTPLL_MAIN_DIV_VAL(0xfa) |
245 INTPLL_PRE_DIV_VAL(3) | INTPLL_POST_DIV_VAL(0);
250 /* Bypass clock and set lock to pll output lock */
251 setbits_le32(pll_gnrl_ctl, INTPLL_BYPASS_MASK | INTPLL_LOCK_SEL_MASK);
253 clrbits_le32(pll_gnrl_ctl, INTPLL_RST_MASK);
255 writel(pll_div_ctl_val, pll_div_ctl);
260 setbits_le32(pll_gnrl_ctl, INTPLL_RST_MASK);
262 while (!(readl(pll_gnrl_ctl) & INTPLL_LOCK_MASK))
265 clrbits_le32(pll_gnrl_ctl, INTPLL_BYPASS_MASK);
266 setbits_le32(pll_gnrl_ctl, pll_clke_masks);
271 void init_uart_clk(u32 index)
274 * set uart clock root
279 clock_enable(CCGR_UART1, 0);
280 clock_set_target_val(UART1_CLK_ROOT, CLK_ROOT_ON |
281 CLK_ROOT_SOURCE_SEL(0));
282 clock_enable(CCGR_UART1, 1);
285 clock_enable(CCGR_UART2, 0);
286 clock_set_target_val(UART2_CLK_ROOT, CLK_ROOT_ON |
287 CLK_ROOT_SOURCE_SEL(0));
288 clock_enable(CCGR_UART2, 1);
291 clock_enable(CCGR_UART3, 0);
292 clock_set_target_val(UART3_CLK_ROOT, CLK_ROOT_ON |
293 CLK_ROOT_SOURCE_SEL(0));
294 clock_enable(CCGR_UART3, 1);
297 clock_enable(CCGR_UART4, 0);
298 clock_set_target_val(UART4_CLK_ROOT, CLK_ROOT_ON |
299 CLK_ROOT_SOURCE_SEL(0));
300 clock_enable(CCGR_UART4, 1);
303 printf("Invalid uart index\n");
308 void init_wdog_clk(void)
310 clock_enable(CCGR_WDOG1, 0);
311 clock_enable(CCGR_WDOG2, 0);
312 clock_enable(CCGR_WDOG3, 0);
313 clock_set_target_val(WDOG_CLK_ROOT, CLK_ROOT_ON |
314 CLK_ROOT_SOURCE_SEL(0));
315 clock_enable(CCGR_WDOG1, 1);
316 clock_enable(CCGR_WDOG2, 1);
317 clock_enable(CCGR_WDOG3, 1);
320 void init_clk_usdhc(u32 index)
323 * set usdhc clock root
328 clock_enable(CCGR_USDHC1, 0);
329 clock_set_target_val(USDHC1_CLK_ROOT, CLK_ROOT_ON |
330 CLK_ROOT_SOURCE_SEL(1));
331 clock_enable(CCGR_USDHC1, 1);
334 clock_enable(CCGR_USDHC2, 0);
335 clock_set_target_val(USDHC2_CLK_ROOT, CLK_ROOT_ON |
336 CLK_ROOT_SOURCE_SEL(1));
337 clock_enable(CCGR_USDHC2, 1);
340 clock_enable(CCGR_USDHC3, 0);
341 clock_set_target_val(USDHC3_CLK_ROOT, CLK_ROOT_ON |
342 CLK_ROOT_SOURCE_SEL(1));
343 clock_enable(CCGR_USDHC3, 1);
346 printf("Invalid usdhc index\n");
351 void init_clk_ecspi(u32 index)
355 clock_enable(CCGR_ECSPI1, 0);
356 clock_set_target_val(ECSPI1_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
357 clock_enable(CCGR_ECSPI1, 1);
360 clock_enable(CCGR_ECSPI2, 0);
361 clock_set_target_val(ECSPI2_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
362 clock_enable(CCGR_ECSPI2, 1);
365 clock_enable(CCGR_ECSPI3, 0);
366 clock_set_target_val(ECSPI3_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(0));
367 clock_enable(CCGR_ECSPI3, 1);
370 printf("Invalid ecspi index\n");
375 void init_nand_clk(void)
381 clock_enable(CCGR_RAWNAND, 0);
382 clock_set_target_val(NAND_CLK_ROOT, CLK_ROOT_ON |
383 CLK_ROOT_SOURCE_SEL(3) | CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4)); /* 100M */
384 clock_enable(CCGR_RAWNAND, 1);
392 * The gate is not exported to clk tree, so configure them here.
393 * According to ANAMIX SPEC
394 * sys pll1 fixed at 800MHz
395 * sys pll2 fixed at 1GHz
396 * Here we only enable the outputs.
398 val_cfg0 = readl(&ana_pll->sys_pll1_gnrl_ctl);
399 val_cfg0 |= INTPLL_CLKE_MASK | INTPLL_DIV2_CLKE_MASK |
400 INTPLL_DIV3_CLKE_MASK | INTPLL_DIV4_CLKE_MASK |
401 INTPLL_DIV5_CLKE_MASK | INTPLL_DIV6_CLKE_MASK |
402 INTPLL_DIV8_CLKE_MASK | INTPLL_DIV10_CLKE_MASK |
403 INTPLL_DIV20_CLKE_MASK;
404 writel(val_cfg0, &ana_pll->sys_pll1_gnrl_ctl);
406 val_cfg0 = readl(&ana_pll->sys_pll2_gnrl_ctl);
407 val_cfg0 |= INTPLL_CLKE_MASK | INTPLL_DIV2_CLKE_MASK |
408 INTPLL_DIV3_CLKE_MASK | INTPLL_DIV4_CLKE_MASK |
409 INTPLL_DIV5_CLKE_MASK | INTPLL_DIV6_CLKE_MASK |
410 INTPLL_DIV8_CLKE_MASK | INTPLL_DIV10_CLKE_MASK |
411 INTPLL_DIV20_CLKE_MASK;
412 writel(val_cfg0, &ana_pll->sys_pll2_gnrl_ctl);
414 /* Configure ARM at 1.2GHz */
415 clock_set_target_val(ARM_A53_CLK_ROOT, CLK_ROOT_ON |
416 CLK_ROOT_SOURCE_SEL(2));
418 intpll_configure(ANATOP_ARM_PLL, MHZ(1200));
420 /* Bypass CCM A53 ROOT, Switch to ARM PLL -> MUX-> CPU */
421 clock_set_target_val(CORE_SEL_CFG, CLK_ROOT_SOURCE_SEL(1));
423 if (is_imx8mn() || is_imx8mp())
424 intpll_configure(ANATOP_SYSTEM_PLL3, MHZ(600));
426 intpll_configure(ANATOP_SYSTEM_PLL3, MHZ(750));
429 /* 8MP ROM already set NOC to 800Mhz, only need to configure NOC_IO clk to 600Mhz */
430 /* 8MP ROM already set GIC to 400Mhz, system_pll1_800m with div = 2 */
431 clock_set_target_val(NOC_IO_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(2));
433 clock_set_target_val(NOC_CLK_ROOT, CLK_ROOT_ON | CLK_ROOT_SOURCE_SEL(2));
435 /* config GIC to sys_pll2_100m */
436 clock_enable(CCGR_GIC, 0);
437 clock_set_target_val(GIC_CLK_ROOT, CLK_ROOT_ON |
438 CLK_ROOT_SOURCE_SEL(3));
439 clock_enable(CCGR_GIC, 1);
442 clock_set_target_val(NAND_USDHC_BUS_CLK_ROOT, CLK_ROOT_ON |
443 CLK_ROOT_SOURCE_SEL(1));
445 clock_enable(CCGR_DDR1, 0);
446 clock_set_target_val(DRAM_ALT_CLK_ROOT, CLK_ROOT_ON |
447 CLK_ROOT_SOURCE_SEL(1));
448 clock_set_target_val(DRAM_APB_CLK_ROOT, CLK_ROOT_ON |
449 CLK_ROOT_SOURCE_SEL(1));
450 clock_enable(CCGR_DDR1, 1);
454 clock_enable(CCGR_TEMP_SENSOR, 1);
456 clock_enable(CCGR_SEC_DEBUG, 1);
461 u32 imx_get_uartclk(void)
466 static u32 decode_intpll(enum clk_root_src intpll)
468 u32 pll_gnrl_ctl, pll_div_ctl, pll_clke_mask;
469 u32 main_div, pre_div, post_div, div;
474 pll_gnrl_ctl = readl(&ana_pll->arm_pll_gnrl_ctl);
475 pll_div_ctl = readl(&ana_pll->arm_pll_div_ctl);
478 pll_gnrl_ctl = readl(&ana_pll->gpu_pll_gnrl_ctl);
479 pll_div_ctl = readl(&ana_pll->gpu_pll_div_ctl);
482 pll_gnrl_ctl = readl(&ana_pll->vpu_pll_gnrl_ctl);
483 pll_div_ctl = readl(&ana_pll->vpu_pll_div_ctl);
485 case SYSTEM_PLL1_800M_CLK:
486 case SYSTEM_PLL1_400M_CLK:
487 case SYSTEM_PLL1_266M_CLK:
488 case SYSTEM_PLL1_200M_CLK:
489 case SYSTEM_PLL1_160M_CLK:
490 case SYSTEM_PLL1_133M_CLK:
491 case SYSTEM_PLL1_100M_CLK:
492 case SYSTEM_PLL1_80M_CLK:
493 case SYSTEM_PLL1_40M_CLK:
494 pll_gnrl_ctl = readl(&ana_pll->sys_pll1_gnrl_ctl);
495 pll_div_ctl = readl(&ana_pll->sys_pll1_div_ctl);
497 case SYSTEM_PLL2_1000M_CLK:
498 case SYSTEM_PLL2_500M_CLK:
499 case SYSTEM_PLL2_333M_CLK:
500 case SYSTEM_PLL2_250M_CLK:
501 case SYSTEM_PLL2_200M_CLK:
502 case SYSTEM_PLL2_166M_CLK:
503 case SYSTEM_PLL2_125M_CLK:
504 case SYSTEM_PLL2_100M_CLK:
505 case SYSTEM_PLL2_50M_CLK:
506 pll_gnrl_ctl = readl(&ana_pll->sys_pll2_gnrl_ctl);
507 pll_div_ctl = readl(&ana_pll->sys_pll2_div_ctl);
509 case SYSTEM_PLL3_CLK:
510 pll_gnrl_ctl = readl(&ana_pll->sys_pll3_gnrl_ctl);
511 pll_div_ctl = readl(&ana_pll->sys_pll3_div_ctl);
517 /* Only support SYS_XTAL 24M, PAD_CLK not take into consideration */
518 if ((pll_gnrl_ctl & INTPLL_REF_CLK_SEL_MASK) != 0)
521 if ((pll_gnrl_ctl & INTPLL_RST_MASK) == 0)
525 * When BYPASS is equal to 1, PLL enters the bypass mode
526 * regardless of the values of RESETB
528 if (pll_gnrl_ctl & INTPLL_BYPASS_MASK)
531 if (!(pll_gnrl_ctl & INTPLL_LOCK_MASK)) {
532 puts("pll not locked\n");
540 case SYSTEM_PLL3_CLK:
541 case SYSTEM_PLL1_800M_CLK:
542 case SYSTEM_PLL2_1000M_CLK:
543 pll_clke_mask = INTPLL_CLKE_MASK;
547 case SYSTEM_PLL1_400M_CLK:
548 case SYSTEM_PLL2_500M_CLK:
549 pll_clke_mask = INTPLL_DIV2_CLKE_MASK;
553 case SYSTEM_PLL1_266M_CLK:
554 case SYSTEM_PLL2_333M_CLK:
555 pll_clke_mask = INTPLL_DIV3_CLKE_MASK;
559 case SYSTEM_PLL1_200M_CLK:
560 case SYSTEM_PLL2_250M_CLK:
561 pll_clke_mask = INTPLL_DIV4_CLKE_MASK;
565 case SYSTEM_PLL1_160M_CLK:
566 case SYSTEM_PLL2_200M_CLK:
567 pll_clke_mask = INTPLL_DIV5_CLKE_MASK;
571 case SYSTEM_PLL1_133M_CLK:
572 case SYSTEM_PLL2_166M_CLK:
573 pll_clke_mask = INTPLL_DIV6_CLKE_MASK;
577 case SYSTEM_PLL1_100M_CLK:
578 case SYSTEM_PLL2_125M_CLK:
579 pll_clke_mask = INTPLL_DIV8_CLKE_MASK;
583 case SYSTEM_PLL1_80M_CLK:
584 case SYSTEM_PLL2_100M_CLK:
585 pll_clke_mask = INTPLL_DIV10_CLKE_MASK;
589 case SYSTEM_PLL1_40M_CLK:
590 case SYSTEM_PLL2_50M_CLK:
591 pll_clke_mask = INTPLL_DIV20_CLKE_MASK;
598 if ((pll_gnrl_ctl & pll_clke_mask) == 0)
601 main_div = (pll_div_ctl & INTPLL_MAIN_DIV_MASK) >>
602 INTPLL_MAIN_DIV_SHIFT;
603 pre_div = (pll_div_ctl & INTPLL_PRE_DIV_MASK) >>
604 INTPLL_PRE_DIV_SHIFT;
605 post_div = (pll_div_ctl & INTPLL_POST_DIV_MASK) >>
606 INTPLL_POST_DIV_SHIFT;
608 /* FFVCO = (m * FFIN) / p, FFOUT = (m * FFIN) / (p * 2^s) */
609 freq = 24000000ULL * main_div;
610 return lldiv(freq, pre_div * (1 << post_div) * div);
613 static u32 decode_fracpll(enum clk_root_src frac_pll)
615 u32 pll_gnrl_ctl, pll_fdiv_ctl0, pll_fdiv_ctl1;
616 u32 main_div, pre_div, post_div, k;
620 pll_gnrl_ctl = readl(&ana_pll->dram_pll_gnrl_ctl);
621 pll_fdiv_ctl0 = readl(&ana_pll->dram_pll_fdiv_ctl0);
622 pll_fdiv_ctl1 = readl(&ana_pll->dram_pll_fdiv_ctl1);
625 pll_gnrl_ctl = readl(&ana_pll->audio_pll1_gnrl_ctl);
626 pll_fdiv_ctl0 = readl(&ana_pll->audio_pll1_fdiv_ctl0);
627 pll_fdiv_ctl1 = readl(&ana_pll->audio_pll1_fdiv_ctl1);
630 pll_gnrl_ctl = readl(&ana_pll->audio_pll2_gnrl_ctl);
631 pll_fdiv_ctl0 = readl(&ana_pll->audio_pll2_fdiv_ctl0);
632 pll_fdiv_ctl1 = readl(&ana_pll->audio_pll2_fdiv_ctl1);
635 pll_gnrl_ctl = readl(&ana_pll->video_pll1_gnrl_ctl);
636 pll_fdiv_ctl0 = readl(&ana_pll->video_pll1_fdiv_ctl0);
637 pll_fdiv_ctl1 = readl(&ana_pll->video_pll1_fdiv_ctl1);
640 printf("Not supported\n");
644 /* Only support SYS_XTAL 24M, PAD_CLK not take into consideration */
645 if ((pll_gnrl_ctl & GENMASK(1, 0)) != 0)
648 if ((pll_gnrl_ctl & RST_MASK) == 0)
651 * When BYPASS is equal to 1, PLL enters the bypass mode
652 * regardless of the values of RESETB
654 if (pll_gnrl_ctl & BYPASS_MASK)
657 if (!(pll_gnrl_ctl & LOCK_STATUS)) {
658 puts("pll not locked\n");
662 if (!(pll_gnrl_ctl & CLKE_MASK))
665 main_div = (pll_fdiv_ctl0 & MDIV_MASK) >>
667 pre_div = (pll_fdiv_ctl0 & PDIV_MASK) >>
669 post_div = (pll_fdiv_ctl0 & SDIV_MASK) >>
672 k = pll_fdiv_ctl1 & KDIV_MASK;
674 return lldiv((main_div * 65536 + k) * 24000000ULL,
675 65536 * pre_div * (1 << post_div));
678 static u32 get_root_src_clk(enum clk_root_src root_src)
690 case SYSTEM_PLL1_800M_CLK:
691 case SYSTEM_PLL1_400M_CLK:
692 case SYSTEM_PLL1_266M_CLK:
693 case SYSTEM_PLL1_200M_CLK:
694 case SYSTEM_PLL1_160M_CLK:
695 case SYSTEM_PLL1_133M_CLK:
696 case SYSTEM_PLL1_100M_CLK:
697 case SYSTEM_PLL1_80M_CLK:
698 case SYSTEM_PLL1_40M_CLK:
699 case SYSTEM_PLL2_1000M_CLK:
700 case SYSTEM_PLL2_500M_CLK:
701 case SYSTEM_PLL2_333M_CLK:
702 case SYSTEM_PLL2_250M_CLK:
703 case SYSTEM_PLL2_200M_CLK:
704 case SYSTEM_PLL2_166M_CLK:
705 case SYSTEM_PLL2_125M_CLK:
706 case SYSTEM_PLL2_100M_CLK:
707 case SYSTEM_PLL2_50M_CLK:
708 case SYSTEM_PLL3_CLK:
709 return decode_intpll(root_src);
714 return decode_fracpll(root_src);
715 case ARM_A53_ALT_CLK:
716 return get_root_clk(ARM_A53_CLK_ROOT);
724 static u32 get_root_clk(enum clk_root_index clock_id)
726 enum clk_root_src root_src;
727 u32 post_podf, pre_podf, root_src_clk;
729 if (clock_root_enabled(clock_id) <= 0)
732 if (clock_get_prediv(clock_id, &pre_podf) < 0)
735 if (clock_get_postdiv(clock_id, &post_podf) < 0)
738 if (clock_get_src(clock_id, &root_src) < 0)
741 root_src_clk = get_root_src_clk(root_src);
743 return root_src_clk / (post_podf + 1) / (pre_podf + 1);
746 u32 get_arm_core_clk(void)
748 enum clk_root_src root_src;
751 if (clock_get_src(CORE_SEL_CFG, &root_src) < 0)
754 root_src_clk = get_root_src_clk(root_src);
759 u32 mxc_get_clock(enum mxc_clock clk)
765 return get_arm_core_clk();
767 clock_get_target_val(IPG_CLK_ROOT, &val);
769 return get_root_clk(AHB_CLK_ROOT) / 2 / (val + 1);
771 return get_root_clk(ECSPI1_CLK_ROOT);
773 return get_root_clk(USDHC1_CLK_ROOT);
775 return get_root_clk(USDHC2_CLK_ROOT);
777 return get_root_clk(USDHC3_CLK_ROOT);
779 return get_root_clk(I2C1_CLK_ROOT);
781 return get_root_clk(UART1_CLK_ROOT);
783 return get_root_clk(QSPI_CLK_ROOT);
785 printf("Unsupported mxc_clock %d\n", clk);
792 #ifdef CONFIG_DWC_ETH_QOS
793 int set_clk_eqos(enum enet_freq type)
800 enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_125M_CLK;
803 enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_50M_CLK;
806 enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_25M_CLK;
812 /* disable the clock first */
813 clock_enable(CCGR_QOS_ETHENET, 0);
814 clock_enable(CCGR_SDMA2, 0);
816 /* set enet axi clock 266Mhz */
817 target = CLK_ROOT_ON | ENET_AXI_CLK_ROOT_FROM_SYS1_PLL_266M |
818 CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
819 CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
820 clock_set_target_val(ENET_AXI_CLK_ROOT, target);
822 target = CLK_ROOT_ON | enet1_ref |
823 CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
824 CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
825 clock_set_target_val(ENET_QOS_CLK_ROOT, target);
827 target = CLK_ROOT_ON |
828 ENET1_TIME_CLK_ROOT_FROM_PLL_ENET_MAIN_100M_CLK |
829 CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
830 CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4);
831 clock_set_target_val(ENET_QOS_TIMER_CLK_ROOT, target);
834 clock_enable(CCGR_QOS_ETHENET, 1);
835 clock_enable(CCGR_SDMA2, 1);
840 int imx_eqos_txclk_set_rate(u32 rate)
845 /* disable the clock first */
846 clock_enable(CCGR_QOS_ETHENET, 0);
847 clock_enable(CCGR_SDMA2, 0);
854 eqos_post_div = 125000000 / 25000000;
857 eqos_post_div = 125000000 / 2500000;
863 clock_get_target_val(ENET_QOS_CLK_ROOT, &val);
864 val &= ~(CLK_ROOT_PRE_DIV_MASK | CLK_ROOT_POST_DIV_MASK);
865 val |= CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
866 CLK_ROOT_POST_DIV(eqos_post_div - 1);
867 clock_set_target_val(ENET_QOS_CLK_ROOT, val);
870 clock_enable(CCGR_QOS_ETHENET, 1);
871 clock_enable(CCGR_SDMA2, 1);
876 u32 imx_get_eqos_csr_clk(void)
878 return get_root_clk(ENET_AXI_CLK_ROOT);
882 #ifdef CONFIG_FEC_MXC
883 int set_clk_enet(enum enet_freq type)
890 enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_125M_CLK;
893 enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_50M_CLK;
896 enet1_ref = ENET1_REF_CLK_ROOT_FROM_PLL_ENET_MAIN_25M_CLK;
902 /* disable the clock first */
903 clock_enable(CCGR_ENET1, 0);
904 clock_enable(CCGR_SIM_ENET, 0);
906 /* set enet axi clock 266Mhz */
907 target = CLK_ROOT_ON | ENET_AXI_CLK_ROOT_FROM_SYS1_PLL_266M |
908 CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
909 CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
910 clock_set_target_val(ENET_AXI_CLK_ROOT, target);
912 target = CLK_ROOT_ON | enet1_ref |
913 CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
914 CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV1);
915 clock_set_target_val(ENET_REF_CLK_ROOT, target);
917 target = CLK_ROOT_ON |
918 ENET1_TIME_CLK_ROOT_FROM_PLL_ENET_MAIN_100M_CLK |
919 CLK_ROOT_PRE_DIV(CLK_ROOT_PRE_DIV1) |
920 CLK_ROOT_POST_DIV(CLK_ROOT_POST_DIV4);
921 clock_set_target_val(ENET_TIMER_CLK_ROOT, target);
924 clock_enable(CCGR_SIM_ENET, 1);
925 clock_enable(CCGR_ENET1, 1);