2 * SPI bus driver for CSR SiRFprimaII
4 * Copyright (c) 2011 Cambridge Silicon Radio Limited, a CSR plc group company.
6 * Licensed under GPLv2 or later.
9 #include <linux/module.h>
10 #include <linux/kernel.h>
11 #include <linux/slab.h>
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/interrupt.h>
17 #include <linux/bitops.h>
18 #include <linux/err.h>
19 #include <linux/platform_device.h>
20 #include <linux/of_gpio.h>
21 #include <linux/spi/spi.h>
22 #include <linux/spi/spi_bitbang.h>
23 #include <linux/dmaengine.h>
24 #include <linux/dma-direction.h>
25 #include <linux/dma-mapping.h>
27 #define DRIVER_NAME "sirfsoc_spi"
29 #define SIRFSOC_SPI_CTRL 0x0000
30 #define SIRFSOC_SPI_CMD 0x0004
31 #define SIRFSOC_SPI_TX_RX_EN 0x0008
32 #define SIRFSOC_SPI_INT_EN 0x000C
33 #define SIRFSOC_SPI_INT_STATUS 0x0010
34 #define SIRFSOC_SPI_TX_DMA_IO_CTRL 0x0100
35 #define SIRFSOC_SPI_TX_DMA_IO_LEN 0x0104
36 #define SIRFSOC_SPI_TXFIFO_CTRL 0x0108
37 #define SIRFSOC_SPI_TXFIFO_LEVEL_CHK 0x010C
38 #define SIRFSOC_SPI_TXFIFO_OP 0x0110
39 #define SIRFSOC_SPI_TXFIFO_STATUS 0x0114
40 #define SIRFSOC_SPI_TXFIFO_DATA 0x0118
41 #define SIRFSOC_SPI_RX_DMA_IO_CTRL 0x0120
42 #define SIRFSOC_SPI_RX_DMA_IO_LEN 0x0124
43 #define SIRFSOC_SPI_RXFIFO_CTRL 0x0128
44 #define SIRFSOC_SPI_RXFIFO_LEVEL_CHK 0x012C
45 #define SIRFSOC_SPI_RXFIFO_OP 0x0130
46 #define SIRFSOC_SPI_RXFIFO_STATUS 0x0134
47 #define SIRFSOC_SPI_RXFIFO_DATA 0x0138
48 #define SIRFSOC_SPI_DUMMY_DELAY_CTL 0x0144
50 /* SPI CTRL register defines */
51 #define SIRFSOC_SPI_SLV_MODE BIT(16)
52 #define SIRFSOC_SPI_CMD_MODE BIT(17)
53 #define SIRFSOC_SPI_CS_IO_OUT BIT(18)
54 #define SIRFSOC_SPI_CS_IO_MODE BIT(19)
55 #define SIRFSOC_SPI_CLK_IDLE_STAT BIT(20)
56 #define SIRFSOC_SPI_CS_IDLE_STAT BIT(21)
57 #define SIRFSOC_SPI_TRAN_MSB BIT(22)
58 #define SIRFSOC_SPI_DRV_POS_EDGE BIT(23)
59 #define SIRFSOC_SPI_CS_HOLD_TIME BIT(24)
60 #define SIRFSOC_SPI_CLK_SAMPLE_MODE BIT(25)
61 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_8 (0 << 26)
62 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_12 (1 << 26)
63 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_16 (2 << 26)
64 #define SIRFSOC_SPI_TRAN_DAT_FORMAT_32 (3 << 26)
65 #define SIRFSOC_SPI_CMD_BYTE_NUM(x) ((x & 3) << 28)
66 #define SIRFSOC_SPI_ENA_AUTO_CLR BIT(30)
67 #define SIRFSOC_SPI_MUL_DAT_MODE BIT(31)
69 /* Interrupt Enable */
70 #define SIRFSOC_SPI_RX_DONE_INT_EN BIT(0)
71 #define SIRFSOC_SPI_TX_DONE_INT_EN BIT(1)
72 #define SIRFSOC_SPI_RX_OFLOW_INT_EN BIT(2)
73 #define SIRFSOC_SPI_TX_UFLOW_INT_EN BIT(3)
74 #define SIRFSOC_SPI_RX_IO_DMA_INT_EN BIT(4)
75 #define SIRFSOC_SPI_TX_IO_DMA_INT_EN BIT(5)
76 #define SIRFSOC_SPI_RXFIFO_FULL_INT_EN BIT(6)
77 #define SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN BIT(7)
78 #define SIRFSOC_SPI_RXFIFO_THD_INT_EN BIT(8)
79 #define SIRFSOC_SPI_TXFIFO_THD_INT_EN BIT(9)
80 #define SIRFSOC_SPI_FRM_END_INT_EN BIT(10)
82 #define SIRFSOC_SPI_INT_MASK_ALL 0x1FFF
84 /* Interrupt status */
85 #define SIRFSOC_SPI_RX_DONE BIT(0)
86 #define SIRFSOC_SPI_TX_DONE BIT(1)
87 #define SIRFSOC_SPI_RX_OFLOW BIT(2)
88 #define SIRFSOC_SPI_TX_UFLOW BIT(3)
89 #define SIRFSOC_SPI_RX_IO_DMA BIT(4)
90 #define SIRFSOC_SPI_RX_FIFO_FULL BIT(6)
91 #define SIRFSOC_SPI_TXFIFO_EMPTY BIT(7)
92 #define SIRFSOC_SPI_RXFIFO_THD_REACH BIT(8)
93 #define SIRFSOC_SPI_TXFIFO_THD_REACH BIT(9)
94 #define SIRFSOC_SPI_FRM_END BIT(10)
97 #define SIRFSOC_SPI_RX_EN BIT(0)
98 #define SIRFSOC_SPI_TX_EN BIT(1)
99 #define SIRFSOC_SPI_CMD_TX_EN BIT(2)
101 #define SIRFSOC_SPI_IO_MODE_SEL BIT(0)
102 #define SIRFSOC_SPI_RX_DMA_FLUSH BIT(2)
105 #define SIRFSOC_SPI_FIFO_RESET BIT(0)
106 #define SIRFSOC_SPI_FIFO_START BIT(1)
109 #define SIRFSOC_SPI_FIFO_WIDTH_BYTE (0 << 0)
110 #define SIRFSOC_SPI_FIFO_WIDTH_WORD (1 << 0)
111 #define SIRFSOC_SPI_FIFO_WIDTH_DWORD (2 << 0)
114 #define SIRFSOC_SPI_FIFO_LEVEL_MASK 0xFF
115 #define SIRFSOC_SPI_FIFO_FULL BIT(8)
116 #define SIRFSOC_SPI_FIFO_EMPTY BIT(9)
118 /* 256 bytes rx/tx FIFO */
119 #define SIRFSOC_SPI_FIFO_SIZE 256
120 #define SIRFSOC_SPI_DAT_FRM_LEN_MAX (64 * 1024)
122 #define SIRFSOC_SPI_FIFO_SC(x) ((x) & 0x3F)
123 #define SIRFSOC_SPI_FIFO_LC(x) (((x) & 0x3F) << 10)
124 #define SIRFSOC_SPI_FIFO_HC(x) (((x) & 0x3F) << 20)
125 #define SIRFSOC_SPI_FIFO_THD(x) (((x) & 0xFF) << 2)
128 * only if the rx/tx buffer and transfer size are 4-bytes aligned, we use dma
129 * due to the limitation of dma controller
132 #define ALIGNED(x) (!((u32)x & 0x3))
133 #define IS_DMA_VALID(x) (x && ALIGNED(x->tx_buf) && ALIGNED(x->rx_buf) && \
134 ALIGNED(x->len) && (x->len < 2 * PAGE_SIZE))
136 #define SIRFSOC_MAX_CMD_BYTES 4
139 struct spi_bitbang bitbang;
140 struct completion rx_done;
141 struct completion tx_done;
144 u32 ctrl_freq; /* SPI controller clock speed */
147 /* rx & tx bufs from the spi_transfer */
151 /* place received word into rx buffer */
152 void (*rx_word) (struct sirfsoc_spi *);
153 /* get word from tx buffer for sending */
154 void (*tx_word) (struct sirfsoc_spi *);
156 /* number of words left to be tranmitted/received */
157 unsigned int left_tx_word;
158 unsigned int left_rx_word;
160 /* rx & tx DMA channels */
161 struct dma_chan *rx_chan;
162 struct dma_chan *tx_chan;
163 dma_addr_t src_start;
164 dma_addr_t dst_start;
166 int word_width; /* in bytes */
169 * if tx size is not more than 4 and rx size is NULL, use
176 static void spi_sirfsoc_rx_word_u8(struct sirfsoc_spi *sspi)
181 data = readl(sspi->base + SIRFSOC_SPI_RXFIFO_DATA);
188 sspi->left_rx_word--;
191 static void spi_sirfsoc_tx_word_u8(struct sirfsoc_spi *sspi)
194 const u8 *tx = sspi->tx;
201 writel(data, sspi->base + SIRFSOC_SPI_TXFIFO_DATA);
202 sspi->left_tx_word--;
205 static void spi_sirfsoc_rx_word_u16(struct sirfsoc_spi *sspi)
210 data = readl(sspi->base + SIRFSOC_SPI_RXFIFO_DATA);
217 sspi->left_rx_word--;
220 static void spi_sirfsoc_tx_word_u16(struct sirfsoc_spi *sspi)
223 const u16 *tx = sspi->tx;
230 writel(data, sspi->base + SIRFSOC_SPI_TXFIFO_DATA);
231 sspi->left_tx_word--;
234 static void spi_sirfsoc_rx_word_u32(struct sirfsoc_spi *sspi)
239 data = readl(sspi->base + SIRFSOC_SPI_RXFIFO_DATA);
246 sspi->left_rx_word--;
250 static void spi_sirfsoc_tx_word_u32(struct sirfsoc_spi *sspi)
253 const u32 *tx = sspi->tx;
260 writel(data, sspi->base + SIRFSOC_SPI_TXFIFO_DATA);
261 sspi->left_tx_word--;
264 static irqreturn_t spi_sirfsoc_irq(int irq, void *dev_id)
266 struct sirfsoc_spi *sspi = dev_id;
267 u32 spi_stat = readl(sspi->base + SIRFSOC_SPI_INT_STATUS);
268 if (sspi->tx_by_cmd && (spi_stat & SIRFSOC_SPI_FRM_END)) {
269 complete(&sspi->tx_done);
270 writel(0x0, sspi->base + SIRFSOC_SPI_INT_EN);
271 writel(SIRFSOC_SPI_INT_MASK_ALL,
272 sspi->base + SIRFSOC_SPI_INT_STATUS);
276 /* Error Conditions */
277 if (spi_stat & SIRFSOC_SPI_RX_OFLOW ||
278 spi_stat & SIRFSOC_SPI_TX_UFLOW) {
279 complete(&sspi->tx_done);
280 complete(&sspi->rx_done);
281 writel(0x0, sspi->base + SIRFSOC_SPI_INT_EN);
282 writel(SIRFSOC_SPI_INT_MASK_ALL,
283 sspi->base + SIRFSOC_SPI_INT_STATUS);
286 if (spi_stat & SIRFSOC_SPI_TXFIFO_EMPTY)
287 complete(&sspi->tx_done);
288 while (!(readl(sspi->base + SIRFSOC_SPI_INT_STATUS) &
289 SIRFSOC_SPI_RX_IO_DMA))
291 complete(&sspi->rx_done);
292 writel(0x0, sspi->base + SIRFSOC_SPI_INT_EN);
293 writel(SIRFSOC_SPI_INT_MASK_ALL,
294 sspi->base + SIRFSOC_SPI_INT_STATUS);
299 static void spi_sirfsoc_dma_fini_callback(void *data)
301 struct completion *dma_complete = data;
303 complete(dma_complete);
306 static void spi_sirfsoc_cmd_transfer(struct spi_device *spi,
307 struct spi_transfer *t)
309 struct sirfsoc_spi *sspi;
310 int timeout = t->len * 10;
313 sspi = spi_master_get_devdata(spi->master);
314 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
315 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
316 memcpy(&cmd, sspi->tx, t->len);
317 if (sspi->word_width == 1 && !(spi->mode & SPI_LSB_FIRST))
318 cmd = cpu_to_be32(cmd) >>
319 ((SIRFSOC_MAX_CMD_BYTES - t->len) * 8);
320 if (sspi->word_width == 2 && t->len == 4 &&
321 (!(spi->mode & SPI_LSB_FIRST)))
322 cmd = ((cmd & 0xffff) << 16) | (cmd >> 16);
323 writel(cmd, sspi->base + SIRFSOC_SPI_CMD);
324 writel(SIRFSOC_SPI_FRM_END_INT_EN,
325 sspi->base + SIRFSOC_SPI_INT_EN);
326 writel(SIRFSOC_SPI_CMD_TX_EN,
327 sspi->base + SIRFSOC_SPI_TX_RX_EN);
328 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
329 dev_err(&spi->dev, "cmd transfer timeout\n");
332 sspi->left_rx_word -= t->len;
335 static void spi_sirfsoc_dma_transfer(struct spi_device *spi,
336 struct spi_transfer *t)
338 struct sirfsoc_spi *sspi;
339 struct dma_async_tx_descriptor *rx_desc, *tx_desc;
340 int timeout = t->len * 10;
342 sspi = spi_master_get_devdata(spi->master);
343 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
344 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
345 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
346 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
347 writel(0, sspi->base + SIRFSOC_SPI_INT_EN);
348 writel(SIRFSOC_SPI_INT_MASK_ALL, sspi->base + SIRFSOC_SPI_INT_STATUS);
349 if (sspi->left_tx_word < SIRFSOC_SPI_DAT_FRM_LEN_MAX) {
350 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) |
351 SIRFSOC_SPI_ENA_AUTO_CLR | SIRFSOC_SPI_MUL_DAT_MODE,
352 sspi->base + SIRFSOC_SPI_CTRL);
353 writel(sspi->left_tx_word - 1,
354 sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
355 writel(sspi->left_tx_word - 1,
356 sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
358 writel(readl(sspi->base + SIRFSOC_SPI_CTRL),
359 sspi->base + SIRFSOC_SPI_CTRL);
360 writel(0, sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
361 writel(0, sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
363 sspi->dst_start = dma_map_single(&spi->dev, sspi->rx, t->len,
364 (t->tx_buf != t->rx_buf) ?
365 DMA_FROM_DEVICE : DMA_BIDIRECTIONAL);
366 rx_desc = dmaengine_prep_slave_single(sspi->rx_chan,
367 sspi->dst_start, t->len, DMA_DEV_TO_MEM,
368 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
369 rx_desc->callback = spi_sirfsoc_dma_fini_callback;
370 rx_desc->callback_param = &sspi->rx_done;
372 sspi->src_start = dma_map_single(&spi->dev, (void *)sspi->tx, t->len,
373 (t->tx_buf != t->rx_buf) ?
374 DMA_TO_DEVICE : DMA_BIDIRECTIONAL);
375 tx_desc = dmaengine_prep_slave_single(sspi->tx_chan,
376 sspi->src_start, t->len, DMA_MEM_TO_DEV,
377 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
378 tx_desc->callback = spi_sirfsoc_dma_fini_callback;
379 tx_desc->callback_param = &sspi->tx_done;
381 dmaengine_submit(tx_desc);
382 dmaengine_submit(rx_desc);
383 dma_async_issue_pending(sspi->tx_chan);
384 dma_async_issue_pending(sspi->rx_chan);
385 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
386 sspi->base + SIRFSOC_SPI_TX_RX_EN);
387 if (wait_for_completion_timeout(&sspi->rx_done, timeout) == 0) {
388 dev_err(&spi->dev, "transfer timeout\n");
389 dmaengine_terminate_all(sspi->rx_chan);
391 sspi->left_rx_word = 0;
393 * we only wait tx-done event if transferring by DMA. for PIO,
394 * we get rx data by writing tx data, so if rx is done, tx has
397 if (wait_for_completion_timeout(&sspi->tx_done, timeout) == 0) {
398 dev_err(&spi->dev, "transfer timeout\n");
399 dmaengine_terminate_all(sspi->tx_chan);
401 dma_unmap_single(&spi->dev, sspi->src_start, t->len, DMA_TO_DEVICE);
402 dma_unmap_single(&spi->dev, sspi->dst_start, t->len, DMA_FROM_DEVICE);
403 /* TX, RX FIFO stop */
404 writel(0, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
405 writel(0, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
406 if (sspi->left_tx_word >= SIRFSOC_SPI_DAT_FRM_LEN_MAX)
407 writel(0, sspi->base + SIRFSOC_SPI_TX_RX_EN);
410 static void spi_sirfsoc_pio_transfer(struct spi_device *spi,
411 struct spi_transfer *t)
413 struct sirfsoc_spi *sspi;
414 int timeout = t->len * 10;
416 sspi = spi_master_get_devdata(spi->master);
418 writel(SIRFSOC_SPI_FIFO_RESET,
419 sspi->base + SIRFSOC_SPI_RXFIFO_OP);
420 writel(SIRFSOC_SPI_FIFO_RESET,
421 sspi->base + SIRFSOC_SPI_TXFIFO_OP);
422 writel(SIRFSOC_SPI_FIFO_START,
423 sspi->base + SIRFSOC_SPI_RXFIFO_OP);
424 writel(SIRFSOC_SPI_FIFO_START,
425 sspi->base + SIRFSOC_SPI_TXFIFO_OP);
426 writel(0, sspi->base + SIRFSOC_SPI_INT_EN);
427 writel(SIRFSOC_SPI_INT_MASK_ALL,
428 sspi->base + SIRFSOC_SPI_INT_STATUS);
429 writel(readl(sspi->base + SIRFSOC_SPI_CTRL) |
430 SIRFSOC_SPI_MUL_DAT_MODE | SIRFSOC_SPI_ENA_AUTO_CLR,
431 sspi->base + SIRFSOC_SPI_CTRL);
432 writel(min(sspi->left_tx_word, (u32)(256 / sspi->word_width))
433 - 1, sspi->base + SIRFSOC_SPI_TX_DMA_IO_LEN);
434 writel(min(sspi->left_rx_word, (u32)(256 / sspi->word_width))
435 - 1, sspi->base + SIRFSOC_SPI_RX_DMA_IO_LEN);
436 while (!((readl(sspi->base + SIRFSOC_SPI_TXFIFO_STATUS)
437 & SIRFSOC_SPI_FIFO_FULL)) && sspi->left_tx_word)
439 writel(SIRFSOC_SPI_TXFIFO_EMPTY_INT_EN |
440 SIRFSOC_SPI_TX_UFLOW_INT_EN |
441 SIRFSOC_SPI_RX_OFLOW_INT_EN |
442 SIRFSOC_SPI_RX_IO_DMA_INT_EN,
443 sspi->base + SIRFSOC_SPI_INT_EN);
444 writel(SIRFSOC_SPI_RX_EN | SIRFSOC_SPI_TX_EN,
445 sspi->base + SIRFSOC_SPI_TX_RX_EN);
446 if (!wait_for_completion_timeout(&sspi->tx_done, timeout) ||
447 !wait_for_completion_timeout(&sspi->rx_done, timeout)) {
448 dev_err(&spi->dev, "transfer timeout\n");
451 while (!((readl(sspi->base + SIRFSOC_SPI_RXFIFO_STATUS)
452 & SIRFSOC_SPI_FIFO_EMPTY)) && sspi->left_rx_word)
454 writel(0, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
455 writel(0, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
456 } while (sspi->left_tx_word != 0 || sspi->left_rx_word != 0);
459 static int spi_sirfsoc_transfer(struct spi_device *spi, struct spi_transfer *t)
461 struct sirfsoc_spi *sspi;
462 sspi = spi_master_get_devdata(spi->master);
464 sspi->tx = t->tx_buf ? t->tx_buf : sspi->dummypage;
465 sspi->rx = t->rx_buf ? t->rx_buf : sspi->dummypage;
466 sspi->left_tx_word = sspi->left_rx_word = t->len / sspi->word_width;
467 reinit_completion(&sspi->rx_done);
468 reinit_completion(&sspi->tx_done);
470 * in the transfer, if transfer data using command register with rx_buf
471 * null, just fill command data into command register and wait for its
475 spi_sirfsoc_cmd_transfer(spi, t);
476 else if (IS_DMA_VALID(t))
477 spi_sirfsoc_dma_transfer(spi, t);
479 spi_sirfsoc_pio_transfer(spi, t);
481 return t->len - sspi->left_rx_word * sspi->word_width;
484 static void spi_sirfsoc_chipselect(struct spi_device *spi, int value)
486 struct sirfsoc_spi *sspi = spi_master_get_devdata(spi->master);
489 u32 regval = readl(sspi->base + SIRFSOC_SPI_CTRL);
491 case BITBANG_CS_ACTIVE:
492 if (spi->mode & SPI_CS_HIGH)
493 regval |= SIRFSOC_SPI_CS_IO_OUT;
495 regval &= ~SIRFSOC_SPI_CS_IO_OUT;
497 case BITBANG_CS_INACTIVE:
498 if (spi->mode & SPI_CS_HIGH)
499 regval &= ~SIRFSOC_SPI_CS_IO_OUT;
501 regval |= SIRFSOC_SPI_CS_IO_OUT;
504 writel(regval, sspi->base + SIRFSOC_SPI_CTRL);
507 case BITBANG_CS_ACTIVE:
508 gpio_direction_output(spi->cs_gpio,
509 spi->mode & SPI_CS_HIGH ? 1 : 0);
511 case BITBANG_CS_INACTIVE:
512 gpio_direction_output(spi->cs_gpio,
513 spi->mode & SPI_CS_HIGH ? 0 : 1);
520 spi_sirfsoc_setup_transfer(struct spi_device *spi, struct spi_transfer *t)
522 struct sirfsoc_spi *sspi;
523 u8 bits_per_word = 0;
526 u32 txfifo_ctrl, rxfifo_ctrl;
527 u32 fifo_size = SIRFSOC_SPI_FIFO_SIZE / 4;
529 sspi = spi_master_get_devdata(spi->master);
531 bits_per_word = (t) ? t->bits_per_word : spi->bits_per_word;
532 hz = t && t->speed_hz ? t->speed_hz : spi->max_speed_hz;
534 regval = (sspi->ctrl_freq / (2 * hz)) - 1;
535 if (regval > 0xFFFF || regval < 0) {
536 dev_err(&spi->dev, "Speed %d not supported\n", hz);
540 switch (bits_per_word) {
542 regval |= SIRFSOC_SPI_TRAN_DAT_FORMAT_8;
543 sspi->rx_word = spi_sirfsoc_rx_word_u8;
544 sspi->tx_word = spi_sirfsoc_tx_word_u8;
548 regval |= (bits_per_word == 12) ?
549 SIRFSOC_SPI_TRAN_DAT_FORMAT_12 :
550 SIRFSOC_SPI_TRAN_DAT_FORMAT_16;
551 sspi->rx_word = spi_sirfsoc_rx_word_u16;
552 sspi->tx_word = spi_sirfsoc_tx_word_u16;
555 regval |= SIRFSOC_SPI_TRAN_DAT_FORMAT_32;
556 sspi->rx_word = spi_sirfsoc_rx_word_u32;
557 sspi->tx_word = spi_sirfsoc_tx_word_u32;
563 sspi->word_width = DIV_ROUND_UP(bits_per_word, 8);
564 txfifo_ctrl = SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE / 2) |
566 rxfifo_ctrl = SIRFSOC_SPI_FIFO_THD(SIRFSOC_SPI_FIFO_SIZE / 2) |
569 if (!(spi->mode & SPI_CS_HIGH))
570 regval |= SIRFSOC_SPI_CS_IDLE_STAT;
571 if (!(spi->mode & SPI_LSB_FIRST))
572 regval |= SIRFSOC_SPI_TRAN_MSB;
573 if (spi->mode & SPI_CPOL)
574 regval |= SIRFSOC_SPI_CLK_IDLE_STAT;
577 * Data should be driven at least 1/2 cycle before the fetch edge
578 * to make sure that data gets stable at the fetch edge.
580 if (((spi->mode & SPI_CPOL) && (spi->mode & SPI_CPHA)) ||
581 (!(spi->mode & SPI_CPOL) && !(spi->mode & SPI_CPHA)))
582 regval &= ~SIRFSOC_SPI_DRV_POS_EDGE;
584 regval |= SIRFSOC_SPI_DRV_POS_EDGE;
586 writel(SIRFSOC_SPI_FIFO_SC(fifo_size - 2) |
587 SIRFSOC_SPI_FIFO_LC(fifo_size / 2) |
588 SIRFSOC_SPI_FIFO_HC(2),
589 sspi->base + SIRFSOC_SPI_TXFIFO_LEVEL_CHK);
590 writel(SIRFSOC_SPI_FIFO_SC(2) |
591 SIRFSOC_SPI_FIFO_LC(fifo_size / 2) |
592 SIRFSOC_SPI_FIFO_HC(fifo_size - 2),
593 sspi->base + SIRFSOC_SPI_RXFIFO_LEVEL_CHK);
594 writel(txfifo_ctrl, sspi->base + SIRFSOC_SPI_TXFIFO_CTRL);
595 writel(rxfifo_ctrl, sspi->base + SIRFSOC_SPI_RXFIFO_CTRL);
597 if (t && t->tx_buf && !t->rx_buf && (t->len <= SIRFSOC_MAX_CMD_BYTES)) {
598 regval |= (SIRFSOC_SPI_CMD_BYTE_NUM((t->len - 1)) |
599 SIRFSOC_SPI_CMD_MODE);
600 sspi->tx_by_cmd = true;
602 regval &= ~SIRFSOC_SPI_CMD_MODE;
603 sspi->tx_by_cmd = false;
606 * it should never set to hardware cs mode because in hardware cs mode,
607 * cs signal can't controlled by driver.
609 regval |= SIRFSOC_SPI_CS_IO_MODE;
610 writel(regval, sspi->base + SIRFSOC_SPI_CTRL);
612 if (IS_DMA_VALID(t)) {
613 /* Enable DMA mode for RX, TX */
614 writel(0, sspi->base + SIRFSOC_SPI_TX_DMA_IO_CTRL);
615 writel(SIRFSOC_SPI_RX_DMA_FLUSH,
616 sspi->base + SIRFSOC_SPI_RX_DMA_IO_CTRL);
618 /* Enable IO mode for RX, TX */
619 writel(SIRFSOC_SPI_IO_MODE_SEL,
620 sspi->base + SIRFSOC_SPI_TX_DMA_IO_CTRL);
621 writel(SIRFSOC_SPI_IO_MODE_SEL,
622 sspi->base + SIRFSOC_SPI_RX_DMA_IO_CTRL);
628 static int spi_sirfsoc_setup(struct spi_device *spi)
630 struct sirfsoc_spi *sspi;
632 if (!spi->max_speed_hz)
635 sspi = spi_master_get_devdata(spi->master);
637 if (spi->cs_gpio == -ENOENT)
641 return spi_sirfsoc_setup_transfer(spi, NULL);
644 static int spi_sirfsoc_probe(struct platform_device *pdev)
646 struct sirfsoc_spi *sspi;
647 struct spi_master *master;
648 struct resource *mem_res;
652 master = spi_alloc_master(&pdev->dev, sizeof(*sspi));
654 dev_err(&pdev->dev, "Unable to allocate SPI master\n");
657 platform_set_drvdata(pdev, master);
658 sspi = spi_master_get_devdata(master);
660 mem_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
661 sspi->base = devm_ioremap_resource(&pdev->dev, mem_res);
662 if (IS_ERR(sspi->base)) {
663 ret = PTR_ERR(sspi->base);
667 irq = platform_get_irq(pdev, 0);
672 ret = devm_request_irq(&pdev->dev, irq, spi_sirfsoc_irq, 0,
677 sspi->bitbang.master = master;
678 sspi->bitbang.chipselect = spi_sirfsoc_chipselect;
679 sspi->bitbang.setup_transfer = spi_sirfsoc_setup_transfer;
680 sspi->bitbang.txrx_bufs = spi_sirfsoc_transfer;
681 sspi->bitbang.master->setup = spi_sirfsoc_setup;
682 master->bus_num = pdev->id;
683 master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_LSB_FIRST | SPI_CS_HIGH;
684 master->bits_per_word_mask = SPI_BPW_MASK(8) | SPI_BPW_MASK(12) |
685 SPI_BPW_MASK(16) | SPI_BPW_MASK(32);
686 sspi->bitbang.master->dev.of_node = pdev->dev.of_node;
688 /* request DMA channels */
689 sspi->rx_chan = dma_request_slave_channel(&pdev->dev, "rx");
690 if (!sspi->rx_chan) {
691 dev_err(&pdev->dev, "can not allocate rx dma channel\n");
695 sspi->tx_chan = dma_request_slave_channel(&pdev->dev, "tx");
696 if (!sspi->tx_chan) {
697 dev_err(&pdev->dev, "can not allocate tx dma channel\n");
702 sspi->clk = clk_get(&pdev->dev, NULL);
703 if (IS_ERR(sspi->clk)) {
704 ret = PTR_ERR(sspi->clk);
707 clk_prepare_enable(sspi->clk);
708 sspi->ctrl_freq = clk_get_rate(sspi->clk);
710 init_completion(&sspi->rx_done);
711 init_completion(&sspi->tx_done);
713 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
714 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
715 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
716 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
717 /* We are not using dummy delay between command and data */
718 writel(0, sspi->base + SIRFSOC_SPI_DUMMY_DELAY_CTL);
720 sspi->dummypage = kmalloc(2 * PAGE_SIZE, GFP_KERNEL);
721 if (!sspi->dummypage) {
726 ret = spi_bitbang_start(&sspi->bitbang);
729 for (i = 0; master->cs_gpios && i < master->num_chipselect; i++) {
730 if (master->cs_gpios[i] == -ENOENT)
732 if (!gpio_is_valid(master->cs_gpios[i])) {
733 dev_err(&pdev->dev, "no valid gpio\n");
737 ret = devm_gpio_request(&pdev->dev,
738 master->cs_gpios[i], DRIVER_NAME);
740 dev_err(&pdev->dev, "failed to request gpio\n");
744 dev_info(&pdev->dev, "registerred, bus number = %d\n", master->bus_num);
748 kfree(sspi->dummypage);
750 clk_disable_unprepare(sspi->clk);
753 dma_release_channel(sspi->tx_chan);
755 dma_release_channel(sspi->rx_chan);
757 spi_master_put(master);
762 static int spi_sirfsoc_remove(struct platform_device *pdev)
764 struct spi_master *master;
765 struct sirfsoc_spi *sspi;
767 master = platform_get_drvdata(pdev);
768 sspi = spi_master_get_devdata(master);
770 spi_bitbang_stop(&sspi->bitbang);
771 kfree(sspi->dummypage);
772 clk_disable_unprepare(sspi->clk);
774 dma_release_channel(sspi->rx_chan);
775 dma_release_channel(sspi->tx_chan);
776 spi_master_put(master);
780 #ifdef CONFIG_PM_SLEEP
781 static int spi_sirfsoc_suspend(struct device *dev)
783 struct spi_master *master = dev_get_drvdata(dev);
784 struct sirfsoc_spi *sspi = spi_master_get_devdata(master);
787 ret = spi_master_suspend(master);
791 clk_disable(sspi->clk);
795 static int spi_sirfsoc_resume(struct device *dev)
797 struct spi_master *master = dev_get_drvdata(dev);
798 struct sirfsoc_spi *sspi = spi_master_get_devdata(master);
800 clk_enable(sspi->clk);
801 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
802 writel(SIRFSOC_SPI_FIFO_RESET, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
803 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_RXFIFO_OP);
804 writel(SIRFSOC_SPI_FIFO_START, sspi->base + SIRFSOC_SPI_TXFIFO_OP);
806 return spi_master_resume(master);
810 static SIMPLE_DEV_PM_OPS(spi_sirfsoc_pm_ops, spi_sirfsoc_suspend,
813 static const struct of_device_id spi_sirfsoc_of_match[] = {
814 { .compatible = "sirf,prima2-spi", },
815 { .compatible = "sirf,marco-spi", },
818 MODULE_DEVICE_TABLE(of, spi_sirfsoc_of_match);
820 static struct platform_driver spi_sirfsoc_driver = {
823 .owner = THIS_MODULE,
824 .pm = &spi_sirfsoc_pm_ops,
825 .of_match_table = spi_sirfsoc_of_match,
827 .probe = spi_sirfsoc_probe,
828 .remove = spi_sirfsoc_remove,
830 module_platform_driver(spi_sirfsoc_driver);
831 MODULE_DESCRIPTION("SiRF SoC SPI master driver");
834 MODULE_LICENSE("GPL v2");