1 // SPDX-License-Identifier: GPL-2.0+
5 * Driver for SD/MMC controller present on Actions Semi S700/S900 SoC, based
6 * on Linux Driver "drivers/mmc/host/owl-mmc.c".
8 * Though, there is a bit (BSEL, BUS or DMA Special Channel Selection) that
9 * controls the data transfer from SDx_DAT register either using CPU AHB Bus
10 * or DMA channel, but seems like, it only works correctly using external DMA
11 * channel, and those special bits used in this driver is picked from vendor
12 * source exclusively for MMC/SD.
21 #include <linux/bitops.h>
22 #include <linux/delay.h>
23 #include <linux/err.h>
24 #include <linux/iopoll.h>
29 #define OWL_REG_SD_EN 0x0000
30 #define OWL_REG_SD_CTL 0x0004
31 #define OWL_REG_SD_STATE 0x0008
32 #define OWL_REG_SD_CMD 0x000c
33 #define OWL_REG_SD_ARG 0x0010
34 #define OWL_REG_SD_RSPBUF0 0x0014
35 #define OWL_REG_SD_RSPBUF1 0x0018
36 #define OWL_REG_SD_RSPBUF2 0x001c
37 #define OWL_REG_SD_RSPBUF3 0x0020
38 #define OWL_REG_SD_RSPBUF4 0x0024
39 #define OWL_REG_SD_DAT 0x0028
40 #define OWL_REG_SD_BLK_SIZE 0x002c
41 #define OWL_REG_SD_BLK_NUM 0x0030
42 #define OWL_REG_SD_BUF_SIZE 0x0034
45 #define OWL_SD_EN_RANE BIT(31)
46 #define OWL_SD_EN_RESE BIT(10)
47 #define OWL_SD_ENABLE BIT(7)
48 #define OWL_SD_EN_BSEL BIT(6)
49 #define OWL_SD_EN_DATAWID(x) (((x) & 0x3) << 0)
50 #define OWL_SD_EN_DATAWID_MASK 0x03
53 #define OWL_SD_CTL_TOUTEN BIT(31)
54 #define OWL_SD_CTL_DELAY_MSK GENMASK(23, 16)
55 #define OWL_SD_CTL_RDELAY(x) (((x) & 0xf) << 20)
56 #define OWL_SD_CTL_WDELAY(x) (((x) & 0xf) << 16)
57 #define OWL_SD_CTL_TS BIT(7)
58 #define OWL_SD_CTL_LBE BIT(6)
59 #define OWL_SD_CTL_TM(x) (((x) & 0xf) << 0)
61 #define OWL_SD_DELAY_LOW_CLK 0x0f
62 #define OWL_SD_DELAY_MID_CLK 0x0a
63 #define OWL_SD_RDELAY_HIGH 0x08
64 #define OWL_SD_WDELAY_HIGH 0x09
67 #define OWL_SD_STATE_DAT0S BIT(7)
68 #define OWL_SD_STATE_CLNR BIT(4)
69 #define OWL_SD_STATE_CRC7ER BIT(0)
71 #define OWL_MMC_OCR (MMC_VDD_32_33 | MMC_VDD_33_34 | \
74 #define DATA_TRANSFER_TIMEOUT 3000000
75 #define DMA_TRANSFER_TIMEOUT 5000000
78 * Simple DMA transfer operations defines for MMC/SD card
80 #define SD_DMA_CHANNEL(base, channel) ((base) + 0x100 + 0x100 * (channel))
82 #define DMA_MODE 0x0000
83 #define DMA_SOURCE 0x0004
84 #define DMA_DESTINATION 0x0008
85 #define DMA_FRAME_LEN 0x000C
86 #define DMA_FRAME_CNT 0x0010
87 #define DMA_START 0x0024
90 #define DMA_MODE_ST(x) (((x) & 0x3) << 8)
91 #define DMA_MODE_ST_DEV DMA_MODE_ST(0)
92 #define DMA_MODE_DT(x) (((x) & 0x3) << 10)
93 #define DMA_MODE_DT_DCU DMA_MODE_DT(2)
94 #define DMA_MODE_SAM(x) (((x) & 0x3) << 16)
95 #define DMA_MODE_SAM_CONST DMA_MODE_SAM(0)
96 #define DMA_MODE_DAM(x) (((x) & 0x3) << 18)
97 #define DMA_MODE_DAM_INC DMA_MODE_DAM(1)
99 #define DMA_ENABLE 0x1
101 struct owl_mmc_plat {
102 struct mmc_config cfg;
106 struct owl_mmc_priv {
110 unsigned int clock; /* Current clock */
111 unsigned int dma_drq; /* Trigger Source */
114 static void owl_dma_config(struct owl_mmc_priv *priv, unsigned int src,
115 unsigned int dst, unsigned int len)
117 unsigned int mode = priv->dma_drq;
119 /* Set Source and Destination adderess mode */
120 mode |= (DMA_MODE_ST_DEV | DMA_MODE_SAM_CONST | DMA_MODE_DT_DCU |
123 writel(mode, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_MODE);
124 writel(src, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_SOURCE);
125 writel(dst, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_DESTINATION);
126 writel(len, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_FRAME_LEN);
127 writel(0x1, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_FRAME_CNT);
130 static void owl_mmc_prepare_data(struct owl_mmc_priv *priv,
131 struct mmc_data *data)
136 setbits_le32(priv->reg_base + OWL_REG_SD_EN, OWL_SD_EN_BSEL);
138 writel(data->blocks, priv->reg_base + OWL_REG_SD_BLK_NUM);
139 writel(data->blocksize, priv->reg_base + OWL_REG_SD_BLK_SIZE);
140 total = data->blocksize * data->blocks;
143 writel(total, priv->reg_base + OWL_REG_SD_BUF_SIZE);
145 writel(512, priv->reg_base + OWL_REG_SD_BUF_SIZE);
148 writel(0x0, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
151 if (data->flags == MMC_DATA_READ) {
152 buf = (ulong) (data->dest);
153 owl_dma_config(priv, (ulong) priv->reg_base +
154 OWL_REG_SD_DAT, buf, total);
155 invalidate_dcache_range(buf, buf + total);
157 buf = (ulong) (data->src);
158 owl_dma_config(priv, buf, (ulong) priv->reg_base +
159 OWL_REG_SD_DAT, total);
160 flush_dcache_range(buf, buf + total);
163 writel(0x1, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
167 static int owl_mmc_send_cmd(struct udevice *dev, struct mmc_cmd *cmd,
168 struct mmc_data *data)
170 struct owl_mmc_priv *priv = dev_get_priv(dev);
171 unsigned int cmd_rsp_mask, mode, reg;
174 setbits_le32(priv->reg_base + OWL_REG_SD_EN, OWL_SD_ENABLE);
178 if (cmd->resp_type != MMC_RSP_NONE)
179 cmd_rsp_mask = OWL_SD_STATE_CLNR | OWL_SD_STATE_CRC7ER;
180 if (cmd->resp_type == MMC_RSP_R1) {
182 if (data->flags == MMC_DATA_READ)
183 mode |= OWL_SD_CTL_TM(4);
185 mode |= OWL_SD_CTL_TM(5);
187 mode |= OWL_SD_CTL_TM(1);
188 } else if (cmd->resp_type == MMC_RSP_R2) {
189 mode = OWL_SD_CTL_TM(2);
190 } else if (cmd->resp_type == MMC_RSP_R1b) {
191 mode = OWL_SD_CTL_TM(3);
192 } else if (cmd->resp_type == MMC_RSP_R3) {
193 cmd_rsp_mask = OWL_SD_STATE_CLNR;
194 mode = OWL_SD_CTL_TM(1);
197 mode |= (readl(priv->reg_base + OWL_REG_SD_CTL) & (0xff << 16));
200 writel(cmd->cmdidx, priv->reg_base + OWL_REG_SD_CMD);
201 writel(cmd->cmdarg, priv->reg_base + OWL_REG_SD_ARG);
203 /* Set LBE to send clk at the end of last read block */
205 mode |= (OWL_SD_CTL_TS | OWL_SD_CTL_LBE | 0xE4000000);
207 mode |= OWL_SD_CTL_TS;
210 owl_mmc_prepare_data(priv, data);
213 writel(mode, priv->reg_base + OWL_REG_SD_CTL);
215 ret = readl_poll_timeout(priv->reg_base + OWL_REG_SD_CTL, reg,
216 !(reg & OWL_SD_CTL_TS), DATA_TRANSFER_TIMEOUT);
218 if (ret == -ETIMEDOUT) {
219 debug("error: transferred data timeout\n");
223 reg = readl(priv->reg_base + OWL_REG_SD_STATE) & cmd_rsp_mask;
224 if (cmd->resp_type & MMC_RSP_PRESENT) {
225 if (reg & OWL_SD_STATE_CLNR) {
226 printf("Error CMD_NO_RSP\n");
230 if (reg & OWL_SD_STATE_CRC7ER) {
231 printf("Error CMD_RSP_CRC\n");
235 if (cmd->resp_type & MMC_RSP_136) {
236 cmd->response[3] = readl(priv->reg_base + OWL_REG_SD_RSPBUF0);
237 cmd->response[2] = readl(priv->reg_base + OWL_REG_SD_RSPBUF1);
238 cmd->response[1] = readl(priv->reg_base + OWL_REG_SD_RSPBUF2);
239 cmd->response[0] = readl(priv->reg_base + OWL_REG_SD_RSPBUF3);
243 rsp[0] = readl(priv->reg_base + OWL_REG_SD_RSPBUF0);
244 rsp[1] = readl(priv->reg_base + OWL_REG_SD_RSPBUF1);
245 cmd->response[0] = rsp[1] << 24 | rsp[0] >> 8;
246 cmd->response[1] = rsp[1] >> 8;
251 ret = readl_poll_timeout(SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START,
252 reg, !(reg & DMA_ENABLE), DMA_TRANSFER_TIMEOUT);
254 if (ret == -ETIMEDOUT) {
255 debug("error: DMA transfer timeout\n");
260 writel(0x0, SD_DMA_CHANNEL(priv->dma_channel, 0) + DMA_START);
261 /* Transmission STOP */
262 while (readl(priv->reg_base + OWL_REG_SD_CTL) & OWL_SD_CTL_TS)
263 clrbits_le32(priv->reg_base + OWL_REG_SD_CTL,
270 static int owl_mmc_clk_set(struct owl_mmc_priv *priv, int rate)
272 u32 reg, wdelay, rdelay;
274 reg = readl(priv->reg_base + OWL_REG_SD_CTL);
275 reg &= ~OWL_SD_CTL_DELAY_MSK;
277 /* Set RDELAY and WDELAY based on the clock */
279 rdelay = wdelay = OWL_SD_DELAY_LOW_CLK;
280 else if ((rate > 1000000) && (rate <= 26000000))
281 rdelay = wdelay = OWL_SD_DELAY_MID_CLK;
282 else if ((rate > 26000000) && (rate <= 52000000)) {
283 rdelay = OWL_SD_RDELAY_HIGH;
284 wdelay = OWL_SD_WDELAY_HIGH;
286 debug("SD clock rate not supported\n");
290 writel(reg | OWL_SD_CTL_RDELAY(rdelay) | OWL_SD_CTL_WDELAY(wdelay),
291 priv->reg_base + OWL_REG_SD_CTL);
296 static int owl_mmc_set_ios(struct udevice *dev)
298 struct owl_mmc_priv *priv = dev_get_priv(dev);
299 struct owl_mmc_plat *plat = dev_get_plat(dev);
300 struct mmc *mmc = &plat->mmc;
303 if (mmc->clock != priv->clock) {
304 priv->clock = mmc->clock;
305 ret = owl_mmc_clk_set(priv, mmc->clock);
306 if (IS_ERR_VALUE(ret))
309 ret = clk_set_rate(&priv->clk, mmc->clock);
310 if (IS_ERR_VALUE(ret))
314 if (mmc->clk_disable)
315 ret = clk_disable(&priv->clk);
317 ret = clk_enable(&priv->clk);
321 /* Set the Bus width */
322 reg = readl(priv->reg_base + OWL_REG_SD_EN);
323 reg &= ~OWL_SD_EN_DATAWID_MASK;
324 if (mmc->bus_width == 8)
325 reg |= OWL_SD_EN_DATAWID(2);
326 else if (mmc->bus_width == 4)
327 reg |= OWL_SD_EN_DATAWID(1);
329 writel(reg, priv->reg_base + OWL_REG_SD_EN);
334 static const struct dm_mmc_ops owl_mmc_ops = {
335 .send_cmd = owl_mmc_send_cmd,
336 .set_ios = owl_mmc_set_ios,
339 static int owl_mmc_probe(struct udevice *dev)
341 struct mmc_uclass_priv *upriv = dev_get_uclass_priv(dev);
342 struct owl_mmc_plat *plat = dev_get_plat(dev);
343 struct owl_mmc_priv *priv = dev_get_priv(dev);
344 struct mmc_config *cfg = &plat->cfg;
345 struct ofnode_phandle_args args;
349 cfg->name = dev->name;
350 cfg->voltages = OWL_MMC_OCR;
352 cfg->f_max = 52000000;
354 cfg->host_caps = MMC_MODE_HS | MMC_MODE_HS_52MHz;
356 ret = mmc_of_parse(dev, cfg);
360 addr = dev_read_addr(dev);
361 if (addr == FDT_ADDR_T_NONE)
364 priv->reg_base = (void *)addr;
366 ret = dev_read_phandle_with_args(dev, "dmas", "#dma-cells", 0, 0,
371 priv->dma_channel = (void *)ofnode_get_addr(args.node);
372 priv->dma_drq = args.args[0];
374 ret = clk_get_by_index(dev, 0, &priv->clk);
376 debug("clk_get_by_index() failed: %d\n", ret);
380 upriv->mmc = &plat->mmc;
385 static int owl_mmc_bind(struct udevice *dev)
387 struct owl_mmc_plat *plat = dev_get_plat(dev);
389 return mmc_bind(dev, &plat->mmc, &plat->cfg);
392 static const struct udevice_id owl_mmc_ids[] = {
393 { .compatible = "actions,s700-mmc" },
394 { .compatible = "actions,owl-mmc" },
398 U_BOOT_DRIVER(owl_mmc_drv) = {
401 .of_match = owl_mmc_ids,
402 .bind = owl_mmc_bind,
403 .probe = owl_mmc_probe,
405 .plat_auto = sizeof(struct owl_mmc_plat),
406 .priv_auto = sizeof(struct owl_mmc_priv),