1 // SPDX-License-Identifier: GPL-2.0
3 * MediaTek common clock driver
5 * Copyright (C) 2018 MediaTek Inc.
9 #include <clk-uclass.h>
13 #include <linux/bitops.h>
14 #include <linux/delay.h>
21 #define CON0_BASE_EN BIT(0)
22 #define CON0_PWR_ON BIT(0)
23 #define CON0_ISO_EN BIT(1)
24 #define CON1_PCW_CHG BIT(31)
26 #define POSTDIV_MASK 0x7
27 #define INTEGER_BITS 7
29 /* scpsys clock off control */
30 #define CLK_SCP_CFG0 0x200
31 #define CLK_SCP_CFG1 0x204
32 #define SCP_ARMCK_OFF_EN GENMASK(9, 0)
33 #define SCP_AXICK_DCM_DIS_EN BIT(0)
34 #define SCP_AXICK_26M_SEL_EN BIT(4)
36 /* shared functions */
39 * In case the rate change propagation to parent clocks is undesirable,
40 * this function is recursively called to find the parent to calculate
41 * the accurate frequency.
43 static ulong mtk_clk_find_parent_rate(struct clk *clk, int id,
46 struct clk parent = { .id = id, };
51 parent.dev = clk->dev;
53 return clk_get_rate(&parent);
56 static int mtk_clk_mux_set_parent(void __iomem *base, u32 parent,
57 const struct mtk_composite *mux)
61 while (mux->parent[index] != parent)
62 if (++index == mux->num_parents)
65 if (mux->flags & CLK_MUX_SETCLR_UPD) {
66 val = (mux->mux_mask << mux->mux_shift);
67 writel(val, base + mux->mux_clr_reg);
69 val = (index << mux->mux_shift);
70 writel(val, base + mux->mux_set_reg);
72 if (mux->upd_shift >= 0)
73 writel(BIT(mux->upd_shift), base + mux->upd_reg);
75 /* switch mux to a select parent */
76 val = readl(base + mux->mux_reg);
77 val &= ~(mux->mux_mask << mux->mux_shift);
79 val |= index << mux->mux_shift;
80 writel(val, base + mux->mux_reg);
86 /* apmixedsys functions */
88 static unsigned long __mtk_pll_recalc_rate(const struct mtk_pll_data *pll,
89 u32 fin, u32 pcw, int postdiv)
91 int pcwbits = pll->pcwbits;
97 /* The fractional part of the PLL divider. */
98 ibits = pll->pcwibits ? pll->pcwibits : INTEGER_BITS;
99 pcwfbits = pcwbits > ibits ? pcwbits - ibits : 0;
101 vco = (u64)fin * pcw;
103 if (pcwfbits && (vco & GENMASK(pcwfbits - 1, 0)))
111 return ((unsigned long)vco + postdiv - 1) / postdiv;
115 * MediaTek PLLs are configured through their pcw value. The pcw value
116 * describes a divider in the PLL feedback loop which consists of 7 bits
117 * for the integer part and the remaining bits (if present) for the
118 * fractional part. Also they have a 3 bit power-of-two post divider.
120 static void mtk_pll_set_rate_regs(struct clk *clk, u32 pcw, int postdiv)
122 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
123 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
127 val = readl(priv->base + pll->pd_reg);
128 val &= ~(POSTDIV_MASK << pll->pd_shift);
129 val |= (ffs(postdiv) - 1) << pll->pd_shift;
131 /* postdiv and pcw need to set at the same time if on same register */
132 if (pll->pd_reg != pll->pcw_reg) {
133 writel(val, priv->base + pll->pd_reg);
134 val = readl(priv->base + pll->pcw_reg);
138 val &= ~GENMASK(pll->pcw_shift + pll->pcwbits - 1, pll->pcw_shift);
139 val |= pcw << pll->pcw_shift;
141 if (pll->pcw_chg_reg) {
142 chg = readl(priv->base + pll->pcw_chg_reg);
144 writel(val, priv->base + pll->pcw_reg);
145 writel(chg, priv->base + pll->pcw_chg_reg);
148 writel(val, priv->base + pll->pcw_reg);
155 * mtk_pll_calc_values - calculate good values for a given input frequency.
157 * @pcw: The pcw value (output)
158 * @postdiv: The post divider (output)
159 * @freq: The desired target frequency
161 static void mtk_pll_calc_values(struct clk *clk, u32 *pcw, u32 *postdiv,
164 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
165 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
166 unsigned long fmin = pll->fmin ? pll->fmin : 1000 * MHZ;
171 if (freq > pll->fmax)
174 for (val = 0; val < 5; val++) {
176 if ((u64)freq * *postdiv >= fmin)
180 /* _pcw = freq * postdiv / xtal_rate * 2^pcwfbits */
181 ibits = pll->pcwibits ? pll->pcwibits : INTEGER_BITS;
182 _pcw = ((u64)freq << val) << (pll->pcwbits - ibits);
183 do_div(_pcw, priv->tree->xtal2_rate);
188 static ulong mtk_apmixedsys_set_rate(struct clk *clk, ulong rate)
193 mtk_pll_calc_values(clk, &pcw, &postdiv, rate);
194 mtk_pll_set_rate_regs(clk, pcw, postdiv);
199 static ulong mtk_apmixedsys_get_rate(struct clk *clk)
201 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
202 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
206 postdiv = (readl(priv->base + pll->pd_reg) >> pll->pd_shift) &
208 postdiv = 1 << postdiv;
210 pcw = readl(priv->base + pll->pcw_reg) >> pll->pcw_shift;
211 pcw &= GENMASK(pll->pcwbits - 1, 0);
213 return __mtk_pll_recalc_rate(pll, priv->tree->xtal2_rate,
217 static int mtk_apmixedsys_enable(struct clk *clk)
219 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
220 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
223 r = readl(priv->base + pll->pwr_reg) | CON0_PWR_ON;
224 writel(r, priv->base + pll->pwr_reg);
227 r = readl(priv->base + pll->pwr_reg) & ~CON0_ISO_EN;
228 writel(r, priv->base + pll->pwr_reg);
231 r = readl(priv->base + pll->reg + REG_CON0);
233 writel(r, priv->base + pll->reg + REG_CON0);
237 if (pll->flags & HAVE_RST_BAR) {
238 r = readl(priv->base + pll->reg + REG_CON0);
239 r |= pll->rst_bar_mask;
240 writel(r, priv->base + pll->reg + REG_CON0);
246 static int mtk_apmixedsys_disable(struct clk *clk)
248 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
249 const struct mtk_pll_data *pll = &priv->tree->plls[clk->id];
252 if (pll->flags & HAVE_RST_BAR) {
253 r = readl(priv->base + pll->reg + REG_CON0);
254 r &= ~pll->rst_bar_mask;
255 writel(r, priv->base + pll->reg + REG_CON0);
258 r = readl(priv->base + pll->reg + REG_CON0);
260 writel(r, priv->base + pll->reg + REG_CON0);
262 r = readl(priv->base + pll->pwr_reg) | CON0_ISO_EN;
263 writel(r, priv->base + pll->pwr_reg);
265 r = readl(priv->base + pll->pwr_reg) & ~CON0_PWR_ON;
266 writel(r, priv->base + pll->pwr_reg);
271 /* topckgen functions */
273 static ulong mtk_factor_recalc_rate(const struct mtk_fixed_factor *fdiv,
276 u64 rate = parent_rate * fdiv->mult;
278 do_div(rate, fdiv->div);
283 static ulong mtk_topckgen_get_factor_rate(struct clk *clk, u32 off)
285 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
286 const struct mtk_fixed_factor *fdiv = &priv->tree->fdivs[off];
289 switch (fdiv->flags & CLK_PARENT_MASK) {
290 case CLK_PARENT_APMIXED:
291 rate = mtk_clk_find_parent_rate(clk, fdiv->parent,
294 case CLK_PARENT_TOPCKGEN:
295 rate = mtk_clk_find_parent_rate(clk, fdiv->parent, NULL);
298 case CLK_PARENT_XTAL:
300 rate = priv->tree->xtal_rate;
303 return mtk_factor_recalc_rate(fdiv, rate);
306 static ulong mtk_infrasys_get_factor_rate(struct clk *clk, u32 off)
308 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
309 const struct mtk_fixed_factor *fdiv = &priv->tree->fdivs[off];
312 switch (fdiv->flags & CLK_PARENT_MASK) {
313 case CLK_PARENT_TOPCKGEN:
314 rate = mtk_clk_find_parent_rate(clk, fdiv->parent,
317 case CLK_PARENT_XTAL:
318 rate = priv->tree->xtal_rate;
321 rate = mtk_clk_find_parent_rate(clk, fdiv->parent, NULL);
324 return mtk_factor_recalc_rate(fdiv, rate);
327 static ulong mtk_topckgen_get_mux_rate(struct clk *clk, u32 off)
329 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
330 const struct mtk_composite *mux = &priv->tree->muxes[off];
333 index = readl(priv->base + mux->mux_reg);
334 index &= mux->mux_mask << mux->mux_shift;
335 index = index >> mux->mux_shift;
337 if (mux->parent[index] > 0 ||
338 (mux->parent[index] == CLK_XTAL &&
339 priv->tree->flags & CLK_BYPASS_XTAL)) {
340 switch (mux->flags & CLK_PARENT_MASK) {
341 case CLK_PARENT_APMIXED:
342 return mtk_clk_find_parent_rate(clk, mux->parent[index],
346 return mtk_clk_find_parent_rate(clk, mux->parent[index],
352 return priv->tree->xtal_rate;
355 static ulong mtk_infrasys_get_mux_rate(struct clk *clk, u32 off)
357 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
358 const struct mtk_composite *mux = &priv->tree->muxes[off];
361 index = readl(priv->base + mux->mux_reg);
362 index &= mux->mux_mask << mux->mux_shift;
363 index = index >> mux->mux_shift;
365 if (mux->parent[index] > 0 ||
366 (mux->parent[index] == CLK_XTAL &&
367 priv->tree->flags & CLK_BYPASS_XTAL)) {
368 switch (mux->flags & CLK_PARENT_MASK) {
369 case CLK_PARENT_TOPCKGEN:
370 return mtk_clk_find_parent_rate(clk, mux->parent[index],
374 return mtk_clk_find_parent_rate(clk, mux->parent[index],
382 static ulong mtk_topckgen_get_rate(struct clk *clk)
384 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
386 if (clk->id < priv->tree->fdivs_offs)
387 return priv->tree->fclks[clk->id].rate;
388 else if (clk->id < priv->tree->muxes_offs)
389 return mtk_topckgen_get_factor_rate(clk, clk->id -
390 priv->tree->fdivs_offs);
392 return mtk_topckgen_get_mux_rate(clk, clk->id -
393 priv->tree->muxes_offs);
396 static ulong mtk_infrasys_get_rate(struct clk *clk)
398 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
402 if (clk->id < priv->tree->fdivs_offs) {
403 rate = priv->tree->fclks[clk->id].rate;
404 } else if (clk->id < priv->tree->muxes_offs) {
405 rate = mtk_infrasys_get_factor_rate(clk, clk->id -
406 priv->tree->fdivs_offs);
408 rate = mtk_infrasys_get_mux_rate(clk, clk->id -
409 priv->tree->muxes_offs);
415 static int mtk_clk_mux_enable(struct clk *clk)
417 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
418 const struct mtk_composite *mux;
421 if (clk->id < priv->tree->muxes_offs)
424 mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
425 if (mux->gate_shift < 0)
428 /* enable clock gate */
429 if (mux->flags & CLK_MUX_SETCLR_UPD) {
430 val = BIT(mux->gate_shift);
431 writel(val, priv->base + mux->mux_clr_reg);
433 val = readl(priv->base + mux->gate_reg);
434 val &= ~BIT(mux->gate_shift);
435 writel(val, priv->base + mux->gate_reg);
438 if (mux->flags & CLK_DOMAIN_SCPSYS) {
439 /* enable scpsys clock off control */
440 writel(SCP_ARMCK_OFF_EN, priv->base + CLK_SCP_CFG0);
441 writel(SCP_AXICK_DCM_DIS_EN | SCP_AXICK_26M_SEL_EN,
442 priv->base + CLK_SCP_CFG1);
448 static int mtk_clk_mux_disable(struct clk *clk)
450 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
451 const struct mtk_composite *mux;
454 if (clk->id < priv->tree->muxes_offs)
457 mux = &priv->tree->muxes[clk->id - priv->tree->muxes_offs];
458 if (mux->gate_shift < 0)
461 /* disable clock gate */
462 if (mux->flags & CLK_MUX_SETCLR_UPD) {
463 val = BIT(mux->gate_shift);
464 writel(val, priv->base + mux->mux_set_reg);
466 val = readl(priv->base + mux->gate_reg);
467 val |= BIT(mux->gate_shift);
468 writel(val, priv->base + mux->gate_reg);
474 static int mtk_common_clk_set_parent(struct clk *clk, struct clk *parent)
476 struct mtk_clk_priv *priv = dev_get_priv(clk->dev);
478 if (clk->id < priv->tree->muxes_offs)
481 return mtk_clk_mux_set_parent(priv->base, parent->id,
482 &priv->tree->muxes[clk->id - priv->tree->muxes_offs]);
487 static int mtk_clk_gate_enable(struct clk *clk)
489 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
490 const struct mtk_gate *gate = &priv->gates[clk->id];
491 u32 bit = BIT(gate->shift);
493 switch (gate->flags & CLK_GATE_MASK) {
494 case CLK_GATE_SETCLR:
495 writel(bit, priv->base + gate->regs->clr_ofs);
497 case CLK_GATE_SETCLR_INV:
498 writel(bit, priv->base + gate->regs->set_ofs);
500 case CLK_GATE_NO_SETCLR:
501 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, 0);
503 case CLK_GATE_NO_SETCLR_INV:
504 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, bit);
514 static int mtk_clk_gate_disable(struct clk *clk)
516 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
517 const struct mtk_gate *gate = &priv->gates[clk->id];
518 u32 bit = BIT(gate->shift);
520 switch (gate->flags & CLK_GATE_MASK) {
521 case CLK_GATE_SETCLR:
522 writel(bit, priv->base + gate->regs->set_ofs);
524 case CLK_GATE_SETCLR_INV:
525 writel(bit, priv->base + gate->regs->clr_ofs);
527 case CLK_GATE_NO_SETCLR:
528 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, bit);
530 case CLK_GATE_NO_SETCLR_INV:
531 clrsetbits_le32(priv->base + gate->regs->sta_ofs, bit, 0);
541 static ulong mtk_clk_gate_get_rate(struct clk *clk)
543 struct mtk_cg_priv *priv = dev_get_priv(clk->dev);
544 const struct mtk_gate *gate = &priv->gates[clk->id];
546 return mtk_clk_find_parent_rate(clk, gate->parent, priv->parent);
549 const struct clk_ops mtk_clk_apmixedsys_ops = {
550 .enable = mtk_apmixedsys_enable,
551 .disable = mtk_apmixedsys_disable,
552 .set_rate = mtk_apmixedsys_set_rate,
553 .get_rate = mtk_apmixedsys_get_rate,
556 const struct clk_ops mtk_clk_topckgen_ops = {
557 .enable = mtk_clk_mux_enable,
558 .disable = mtk_clk_mux_disable,
559 .get_rate = mtk_topckgen_get_rate,
560 .set_parent = mtk_common_clk_set_parent,
563 const struct clk_ops mtk_clk_infrasys_ops = {
564 .enable = mtk_clk_mux_enable,
565 .disable = mtk_clk_mux_disable,
566 .get_rate = mtk_infrasys_get_rate,
567 .set_parent = mtk_common_clk_set_parent,
570 const struct clk_ops mtk_clk_gate_ops = {
571 .enable = mtk_clk_gate_enable,
572 .disable = mtk_clk_gate_disable,
573 .get_rate = mtk_clk_gate_get_rate,
576 int mtk_common_clk_init(struct udevice *dev,
577 const struct mtk_clk_tree *tree)
579 struct mtk_clk_priv *priv = dev_get_priv(dev);
580 struct udevice *parent;
583 priv->base = dev_read_addr_ptr(dev);
587 ret = uclass_get_device_by_phandle(UCLASS_CLK, dev, "clock-parent", &parent);
588 if (ret || !parent) {
589 ret = uclass_get_device_by_driver(UCLASS_CLK,
590 DM_DRIVER_GET(mtk_clk_apmixedsys), &parent);
595 priv->parent = parent;
601 int mtk_common_clk_gate_init(struct udevice *dev,
602 const struct mtk_clk_tree *tree,
603 const struct mtk_gate *gates)
605 struct mtk_cg_priv *priv = dev_get_priv(dev);
606 struct udevice *parent;
609 priv->base = dev_read_addr_ptr(dev);
613 ret = uclass_get_device_by_phandle(UCLASS_CLK, dev, "clock-parent", &parent);
614 if (ret || !parent) {
615 ret = uclass_get_device_by_driver(UCLASS_CLK,
616 DM_DRIVER_GET(mtk_clk_topckgen), &parent);
621 priv->parent = parent;