]>
Commit | Line | Data |
---|---|---|
9b3b8171 BW |
1 | /* |
2 | * Copyright (C) 2017 Spreadtrum Communications Inc. | |
3 | * | |
4 | * SPDX-License-Identifier: GPL-2.0 | |
5 | */ | |
6 | ||
7 | #include <linux/clk.h> | |
8 | #include <linux/dma-mapping.h> | |
ab42ddb9 | 9 | #include <linux/dma/sprd-dma.h> |
9b3b8171 BW |
10 | #include <linux/errno.h> |
11 | #include <linux/init.h> | |
12 | #include <linux/interrupt.h> | |
13 | #include <linux/io.h> | |
14 | #include <linux/kernel.h> | |
15 | #include <linux/module.h> | |
16 | #include <linux/of.h> | |
17 | #include <linux/of_dma.h> | |
18 | #include <linux/of_device.h> | |
19 | #include <linux/pm_runtime.h> | |
20 | #include <linux/slab.h> | |
21 | ||
22 | #include "virt-dma.h" | |
23 | ||
24 | #define SPRD_DMA_CHN_REG_OFFSET 0x1000 | |
25 | #define SPRD_DMA_CHN_REG_LENGTH 0x40 | |
26 | #define SPRD_DMA_MEMCPY_MIN_SIZE 64 | |
27 | ||
28 | /* DMA global registers definition */ | |
29 | #define SPRD_DMA_GLB_PAUSE 0x0 | |
30 | #define SPRD_DMA_GLB_FRAG_WAIT 0x4 | |
31 | #define SPRD_DMA_GLB_REQ_PEND0_EN 0x8 | |
32 | #define SPRD_DMA_GLB_REQ_PEND1_EN 0xc | |
33 | #define SPRD_DMA_GLB_INT_RAW_STS 0x10 | |
34 | #define SPRD_DMA_GLB_INT_MSK_STS 0x14 | |
35 | #define SPRD_DMA_GLB_REQ_STS 0x18 | |
36 | #define SPRD_DMA_GLB_CHN_EN_STS 0x1c | |
37 | #define SPRD_DMA_GLB_DEBUG_STS 0x20 | |
38 | #define SPRD_DMA_GLB_ARB_SEL_STS 0x24 | |
770399df EL |
39 | #define SPRD_DMA_GLB_2STAGE_GRP1 0x28 |
40 | #define SPRD_DMA_GLB_2STAGE_GRP2 0x2c | |
9b3b8171 BW |
41 | #define SPRD_DMA_GLB_REQ_UID(uid) (0x4 * ((uid) - 1)) |
42 | #define SPRD_DMA_GLB_REQ_UID_OFFSET 0x2000 | |
43 | ||
44 | /* DMA channel registers definition */ | |
45 | #define SPRD_DMA_CHN_PAUSE 0x0 | |
46 | #define SPRD_DMA_CHN_REQ 0x4 | |
47 | #define SPRD_DMA_CHN_CFG 0x8 | |
48 | #define SPRD_DMA_CHN_INTC 0xc | |
49 | #define SPRD_DMA_CHN_SRC_ADDR 0x10 | |
50 | #define SPRD_DMA_CHN_DES_ADDR 0x14 | |
51 | #define SPRD_DMA_CHN_FRG_LEN 0x18 | |
52 | #define SPRD_DMA_CHN_BLK_LEN 0x1c | |
53 | #define SPRD_DMA_CHN_TRSC_LEN 0x20 | |
54 | #define SPRD_DMA_CHN_TRSF_STEP 0x24 | |
55 | #define SPRD_DMA_CHN_WARP_PTR 0x28 | |
56 | #define SPRD_DMA_CHN_WARP_TO 0x2c | |
57 | #define SPRD_DMA_CHN_LLIST_PTR 0x30 | |
58 | #define SPRD_DMA_CHN_FRAG_STEP 0x34 | |
59 | #define SPRD_DMA_CHN_SRC_BLK_STEP 0x38 | |
60 | #define SPRD_DMA_CHN_DES_BLK_STEP 0x3c | |
61 | ||
770399df EL |
62 | /* SPRD_DMA_GLB_2STAGE_GRP register definition */ |
63 | #define SPRD_DMA_GLB_2STAGE_EN BIT(24) | |
64 | #define SPRD_DMA_GLB_CHN_INT_MASK GENMASK(23, 20) | |
9bb9fe0c BW |
65 | #define SPRD_DMA_GLB_DEST_INT BIT(22) |
66 | #define SPRD_DMA_GLB_SRC_INT BIT(20) | |
770399df EL |
67 | #define SPRD_DMA_GLB_LIST_DONE_TRG BIT(19) |
68 | #define SPRD_DMA_GLB_TRANS_DONE_TRG BIT(18) | |
69 | #define SPRD_DMA_GLB_BLOCK_DONE_TRG BIT(17) | |
70 | #define SPRD_DMA_GLB_FRAG_DONE_TRG BIT(16) | |
71 | #define SPRD_DMA_GLB_TRG_OFFSET 16 | |
72 | #define SPRD_DMA_GLB_DEST_CHN_MASK GENMASK(13, 8) | |
73 | #define SPRD_DMA_GLB_DEST_CHN_OFFSET 8 | |
74 | #define SPRD_DMA_GLB_SRC_CHN_MASK GENMASK(5, 0) | |
75 | ||
9b3b8171 BW |
76 | /* SPRD_DMA_CHN_INTC register definition */ |
77 | #define SPRD_DMA_INT_MASK GENMASK(4, 0) | |
78 | #define SPRD_DMA_INT_CLR_OFFSET 24 | |
79 | #define SPRD_DMA_FRAG_INT_EN BIT(0) | |
80 | #define SPRD_DMA_BLK_INT_EN BIT(1) | |
81 | #define SPRD_DMA_TRANS_INT_EN BIT(2) | |
82 | #define SPRD_DMA_LIST_INT_EN BIT(3) | |
83 | #define SPRD_DMA_CFG_ERR_INT_EN BIT(4) | |
84 | ||
85 | /* SPRD_DMA_CHN_CFG register definition */ | |
86 | #define SPRD_DMA_CHN_EN BIT(0) | |
4ac69546 | 87 | #define SPRD_DMA_LINKLIST_EN BIT(4) |
9b3b8171 BW |
88 | #define SPRD_DMA_WAIT_BDONE_OFFSET 24 |
89 | #define SPRD_DMA_DONOT_WAIT_BDONE 1 | |
90 | ||
91 | /* SPRD_DMA_CHN_REQ register definition */ | |
92 | #define SPRD_DMA_REQ_EN BIT(0) | |
93 | ||
94 | /* SPRD_DMA_CHN_PAUSE register definition */ | |
95 | #define SPRD_DMA_PAUSE_EN BIT(0) | |
96 | #define SPRD_DMA_PAUSE_STS BIT(2) | |
97 | #define SPRD_DMA_PAUSE_CNT 0x2000 | |
98 | ||
99 | /* DMA_CHN_WARP_* register definition */ | |
100 | #define SPRD_DMA_HIGH_ADDR_MASK GENMASK(31, 28) | |
101 | #define SPRD_DMA_LOW_ADDR_MASK GENMASK(31, 0) | |
a7e335de | 102 | #define SPRD_DMA_WRAP_ADDR_MASK GENMASK(27, 0) |
9b3b8171 BW |
103 | #define SPRD_DMA_HIGH_ADDR_OFFSET 4 |
104 | ||
105 | /* SPRD_DMA_CHN_INTC register definition */ | |
106 | #define SPRD_DMA_FRAG_INT_STS BIT(16) | |
107 | #define SPRD_DMA_BLK_INT_STS BIT(17) | |
108 | #define SPRD_DMA_TRSC_INT_STS BIT(18) | |
109 | #define SPRD_DMA_LIST_INT_STS BIT(19) | |
110 | #define SPRD_DMA_CFGERR_INT_STS BIT(20) | |
111 | #define SPRD_DMA_CHN_INT_STS \ | |
112 | (SPRD_DMA_FRAG_INT_STS | SPRD_DMA_BLK_INT_STS | \ | |
113 | SPRD_DMA_TRSC_INT_STS | SPRD_DMA_LIST_INT_STS | \ | |
114 | SPRD_DMA_CFGERR_INT_STS) | |
115 | ||
116 | /* SPRD_DMA_CHN_FRG_LEN register definition */ | |
117 | #define SPRD_DMA_SRC_DATAWIDTH_OFFSET 30 | |
118 | #define SPRD_DMA_DES_DATAWIDTH_OFFSET 28 | |
119 | #define SPRD_DMA_SWT_MODE_OFFSET 26 | |
120 | #define SPRD_DMA_REQ_MODE_OFFSET 24 | |
121 | #define SPRD_DMA_REQ_MODE_MASK GENMASK(1, 0) | |
a7e335de EL |
122 | #define SPRD_DMA_WRAP_SEL_DEST BIT(23) |
123 | #define SPRD_DMA_WRAP_EN BIT(22) | |
9b3b8171 BW |
124 | #define SPRD_DMA_FIX_SEL_OFFSET 21 |
125 | #define SPRD_DMA_FIX_EN_OFFSET 20 | |
4ac69546 | 126 | #define SPRD_DMA_LLIST_END BIT(19) |
9b3b8171 BW |
127 | #define SPRD_DMA_FRG_LEN_MASK GENMASK(16, 0) |
128 | ||
129 | /* SPRD_DMA_CHN_BLK_LEN register definition */ | |
130 | #define SPRD_DMA_BLK_LEN_MASK GENMASK(16, 0) | |
131 | ||
132 | /* SPRD_DMA_CHN_TRSC_LEN register definition */ | |
133 | #define SPRD_DMA_TRSC_LEN_MASK GENMASK(27, 0) | |
134 | ||
135 | /* SPRD_DMA_CHN_TRSF_STEP register definition */ | |
136 | #define SPRD_DMA_DEST_TRSF_STEP_OFFSET 16 | |
137 | #define SPRD_DMA_SRC_TRSF_STEP_OFFSET 0 | |
138 | #define SPRD_DMA_TRSF_STEP_MASK GENMASK(15, 0) | |
139 | ||
8b6bc5fd ZW |
140 | /* SPRD DMA_SRC_BLK_STEP register definition */ |
141 | #define SPRD_DMA_LLIST_HIGH_MASK GENMASK(31, 28) | |
142 | #define SPRD_DMA_LLIST_HIGH_SHIFT 28 | |
143 | ||
770399df EL |
144 | /* define DMA channel mode & trigger mode mask */ |
145 | #define SPRD_DMA_CHN_MODE_MASK GENMASK(7, 0) | |
146 | #define SPRD_DMA_TRG_MODE_MASK GENMASK(7, 0) | |
9bb9fe0c | 147 | #define SPRD_DMA_INT_TYPE_MASK GENMASK(7, 0) |
770399df | 148 | |
6b1d255e EL |
149 | /* define the DMA transfer step type */ |
150 | #define SPRD_DMA_NONE_STEP 0 | |
151 | #define SPRD_DMA_BYTE_STEP 1 | |
152 | #define SPRD_DMA_SHORT_STEP 2 | |
153 | #define SPRD_DMA_WORD_STEP 4 | |
154 | #define SPRD_DMA_DWORD_STEP 8 | |
155 | ||
9b3b8171 BW |
156 | #define SPRD_DMA_SOFTWARE_UID 0 |
157 | ||
d7c33cf8 BW |
158 | /* dma data width values */ |
159 | enum sprd_dma_datawidth { | |
160 | SPRD_DMA_DATAWIDTH_1_BYTE, | |
161 | SPRD_DMA_DATAWIDTH_2_BYTES, | |
162 | SPRD_DMA_DATAWIDTH_4_BYTES, | |
163 | SPRD_DMA_DATAWIDTH_8_BYTES, | |
9b3b8171 BW |
164 | }; |
165 | ||
166 | /* dma channel hardware configuration */ | |
167 | struct sprd_dma_chn_hw { | |
168 | u32 pause; | |
169 | u32 req; | |
170 | u32 cfg; | |
171 | u32 intc; | |
172 | u32 src_addr; | |
173 | u32 des_addr; | |
174 | u32 frg_len; | |
175 | u32 blk_len; | |
176 | u32 trsc_len; | |
177 | u32 trsf_step; | |
178 | u32 wrap_ptr; | |
179 | u32 wrap_to; | |
180 | u32 llist_ptr; | |
181 | u32 frg_step; | |
182 | u32 src_blk_step; | |
183 | u32 des_blk_step; | |
184 | }; | |
185 | ||
186 | /* dma request description */ | |
187 | struct sprd_dma_desc { | |
188 | struct virt_dma_desc vd; | |
189 | struct sprd_dma_chn_hw chn_hw; | |
d762ab33 | 190 | enum dma_transfer_direction dir; |
9b3b8171 BW |
191 | }; |
192 | ||
193 | /* dma channel description */ | |
194 | struct sprd_dma_chn { | |
195 | struct virt_dma_chan vc; | |
196 | void __iomem *chn_base; | |
4ac69546 | 197 | struct sprd_dma_linklist linklist; |
ca1b7d3d | 198 | struct dma_slave_config slave_cfg; |
9b3b8171 BW |
199 | u32 chn_num; |
200 | u32 dev_id; | |
770399df EL |
201 | enum sprd_dma_chn_mode chn_mode; |
202 | enum sprd_dma_trg_mode trg_mode; | |
9bb9fe0c | 203 | enum sprd_dma_int_type int_type; |
9b3b8171 BW |
204 | struct sprd_dma_desc *cur_desc; |
205 | }; | |
206 | ||
207 | /* SPRD dma device */ | |
208 | struct sprd_dma_dev { | |
209 | struct dma_device dma_dev; | |
210 | void __iomem *glb_base; | |
211 | struct clk *clk; | |
212 | struct clk *ashb_clk; | |
213 | int irq; | |
214 | u32 total_chns; | |
a18cd9be | 215 | struct sprd_dma_chn channels[]; |
9b3b8171 BW |
216 | }; |
217 | ||
ec1ac309 | 218 | static void sprd_dma_free_desc(struct virt_dma_desc *vd); |
9b3b8171 BW |
219 | static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param); |
220 | static struct of_dma_filter_info sprd_dma_info = { | |
221 | .filter_fn = sprd_dma_filter_fn, | |
222 | }; | |
223 | ||
224 | static inline struct sprd_dma_chn *to_sprd_dma_chan(struct dma_chan *c) | |
225 | { | |
226 | return container_of(c, struct sprd_dma_chn, vc.chan); | |
227 | } | |
228 | ||
229 | static inline struct sprd_dma_dev *to_sprd_dma_dev(struct dma_chan *c) | |
230 | { | |
231 | struct sprd_dma_chn *schan = to_sprd_dma_chan(c); | |
232 | ||
233 | return container_of(schan, struct sprd_dma_dev, channels[c->chan_id]); | |
234 | } | |
235 | ||
236 | static inline struct sprd_dma_desc *to_sprd_dma_desc(struct virt_dma_desc *vd) | |
237 | { | |
238 | return container_of(vd, struct sprd_dma_desc, vd); | |
239 | } | |
240 | ||
770399df EL |
241 | static void sprd_dma_glb_update(struct sprd_dma_dev *sdev, u32 reg, |
242 | u32 mask, u32 val) | |
243 | { | |
244 | u32 orig = readl(sdev->glb_base + reg); | |
245 | u32 tmp; | |
246 | ||
247 | tmp = (orig & ~mask) | val; | |
248 | writel(tmp, sdev->glb_base + reg); | |
249 | } | |
250 | ||
9b3b8171 BW |
251 | static void sprd_dma_chn_update(struct sprd_dma_chn *schan, u32 reg, |
252 | u32 mask, u32 val) | |
253 | { | |
254 | u32 orig = readl(schan->chn_base + reg); | |
255 | u32 tmp; | |
256 | ||
257 | tmp = (orig & ~mask) | val; | |
258 | writel(tmp, schan->chn_base + reg); | |
259 | } | |
260 | ||
261 | static int sprd_dma_enable(struct sprd_dma_dev *sdev) | |
262 | { | |
263 | int ret; | |
264 | ||
265 | ret = clk_prepare_enable(sdev->clk); | |
266 | if (ret) | |
267 | return ret; | |
268 | ||
269 | /* | |
270 | * The ashb_clk is optional and only for AGCP DMA controller, so we | |
271 | * need add one condition to check if the ashb_clk need enable. | |
272 | */ | |
273 | if (!IS_ERR(sdev->ashb_clk)) | |
274 | ret = clk_prepare_enable(sdev->ashb_clk); | |
275 | ||
276 | return ret; | |
277 | } | |
278 | ||
279 | static void sprd_dma_disable(struct sprd_dma_dev *sdev) | |
280 | { | |
281 | clk_disable_unprepare(sdev->clk); | |
282 | ||
283 | /* | |
284 | * Need to check if we need disable the optional ashb_clk for AGCP DMA. | |
285 | */ | |
286 | if (!IS_ERR(sdev->ashb_clk)) | |
287 | clk_disable_unprepare(sdev->ashb_clk); | |
288 | } | |
289 | ||
290 | static void sprd_dma_set_uid(struct sprd_dma_chn *schan) | |
291 | { | |
292 | struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); | |
293 | u32 dev_id = schan->dev_id; | |
294 | ||
295 | if (dev_id != SPRD_DMA_SOFTWARE_UID) { | |
296 | u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET + | |
297 | SPRD_DMA_GLB_REQ_UID(dev_id); | |
298 | ||
299 | writel(schan->chn_num + 1, sdev->glb_base + uid_offset); | |
300 | } | |
301 | } | |
302 | ||
303 | static void sprd_dma_unset_uid(struct sprd_dma_chn *schan) | |
304 | { | |
305 | struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); | |
306 | u32 dev_id = schan->dev_id; | |
307 | ||
308 | if (dev_id != SPRD_DMA_SOFTWARE_UID) { | |
309 | u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET + | |
310 | SPRD_DMA_GLB_REQ_UID(dev_id); | |
311 | ||
312 | writel(0, sdev->glb_base + uid_offset); | |
313 | } | |
314 | } | |
315 | ||
316 | static void sprd_dma_clear_int(struct sprd_dma_chn *schan) | |
317 | { | |
318 | sprd_dma_chn_update(schan, SPRD_DMA_CHN_INTC, | |
319 | SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET, | |
320 | SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET); | |
321 | } | |
322 | ||
323 | static void sprd_dma_enable_chn(struct sprd_dma_chn *schan) | |
324 | { | |
325 | sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN, | |
326 | SPRD_DMA_CHN_EN); | |
327 | } | |
328 | ||
329 | static void sprd_dma_disable_chn(struct sprd_dma_chn *schan) | |
330 | { | |
331 | sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN, 0); | |
332 | } | |
333 | ||
334 | static void sprd_dma_soft_request(struct sprd_dma_chn *schan) | |
335 | { | |
336 | sprd_dma_chn_update(schan, SPRD_DMA_CHN_REQ, SPRD_DMA_REQ_EN, | |
337 | SPRD_DMA_REQ_EN); | |
338 | } | |
339 | ||
340 | static void sprd_dma_pause_resume(struct sprd_dma_chn *schan, bool enable) | |
341 | { | |
342 | struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); | |
343 | u32 pause, timeout = SPRD_DMA_PAUSE_CNT; | |
344 | ||
345 | if (enable) { | |
346 | sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE, | |
347 | SPRD_DMA_PAUSE_EN, SPRD_DMA_PAUSE_EN); | |
348 | ||
349 | do { | |
350 | pause = readl(schan->chn_base + SPRD_DMA_CHN_PAUSE); | |
351 | if (pause & SPRD_DMA_PAUSE_STS) | |
352 | break; | |
353 | ||
354 | cpu_relax(); | |
355 | } while (--timeout > 0); | |
356 | ||
357 | if (!timeout) | |
358 | dev_warn(sdev->dma_dev.dev, | |
359 | "pause dma controller timeout\n"); | |
360 | } else { | |
361 | sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE, | |
362 | SPRD_DMA_PAUSE_EN, 0); | |
363 | } | |
364 | } | |
365 | ||
366 | static void sprd_dma_stop_and_disable(struct sprd_dma_chn *schan) | |
367 | { | |
368 | u32 cfg = readl(schan->chn_base + SPRD_DMA_CHN_CFG); | |
369 | ||
370 | if (!(cfg & SPRD_DMA_CHN_EN)) | |
371 | return; | |
372 | ||
373 | sprd_dma_pause_resume(schan, true); | |
374 | sprd_dma_disable_chn(schan); | |
375 | } | |
376 | ||
d762ab33 EL |
377 | static unsigned long sprd_dma_get_src_addr(struct sprd_dma_chn *schan) |
378 | { | |
379 | unsigned long addr, addr_high; | |
380 | ||
381 | addr = readl(schan->chn_base + SPRD_DMA_CHN_SRC_ADDR); | |
382 | addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_PTR) & | |
383 | SPRD_DMA_HIGH_ADDR_MASK; | |
384 | ||
385 | return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET); | |
386 | } | |
387 | ||
9b3b8171 BW |
388 | static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn *schan) |
389 | { | |
390 | unsigned long addr, addr_high; | |
391 | ||
392 | addr = readl(schan->chn_base + SPRD_DMA_CHN_DES_ADDR); | |
393 | addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_TO) & | |
394 | SPRD_DMA_HIGH_ADDR_MASK; | |
395 | ||
396 | return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET); | |
397 | } | |
398 | ||
399 | static enum sprd_dma_int_type sprd_dma_get_int_type(struct sprd_dma_chn *schan) | |
400 | { | |
401 | struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); | |
402 | u32 intc_sts = readl(schan->chn_base + SPRD_DMA_CHN_INTC) & | |
403 | SPRD_DMA_CHN_INT_STS; | |
404 | ||
405 | switch (intc_sts) { | |
406 | case SPRD_DMA_CFGERR_INT_STS: | |
407 | return SPRD_DMA_CFGERR_INT; | |
408 | ||
409 | case SPRD_DMA_LIST_INT_STS: | |
410 | return SPRD_DMA_LIST_INT; | |
411 | ||
412 | case SPRD_DMA_TRSC_INT_STS: | |
413 | return SPRD_DMA_TRANS_INT; | |
414 | ||
415 | case SPRD_DMA_BLK_INT_STS: | |
416 | return SPRD_DMA_BLK_INT; | |
417 | ||
418 | case SPRD_DMA_FRAG_INT_STS: | |
419 | return SPRD_DMA_FRAG_INT; | |
420 | ||
421 | default: | |
422 | dev_warn(sdev->dma_dev.dev, "incorrect dma interrupt type\n"); | |
423 | return SPRD_DMA_NO_INT; | |
424 | } | |
425 | } | |
426 | ||
427 | static enum sprd_dma_req_mode sprd_dma_get_req_type(struct sprd_dma_chn *schan) | |
428 | { | |
429 | u32 frag_reg = readl(schan->chn_base + SPRD_DMA_CHN_FRG_LEN); | |
430 | ||
431 | return (frag_reg >> SPRD_DMA_REQ_MODE_OFFSET) & SPRD_DMA_REQ_MODE_MASK; | |
432 | } | |
433 | ||
770399df EL |
434 | static int sprd_dma_set_2stage_config(struct sprd_dma_chn *schan) |
435 | { | |
436 | struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); | |
437 | u32 val, chn = schan->chn_num + 1; | |
438 | ||
439 | switch (schan->chn_mode) { | |
440 | case SPRD_DMA_SRC_CHN0: | |
441 | val = chn & SPRD_DMA_GLB_SRC_CHN_MASK; | |
442 | val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET; | |
443 | val |= SPRD_DMA_GLB_2STAGE_EN; | |
9bb9fe0c BW |
444 | if (schan->int_type != SPRD_DMA_NO_INT) |
445 | val |= SPRD_DMA_GLB_SRC_INT; | |
446 | ||
770399df EL |
447 | sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val); |
448 | break; | |
449 | ||
450 | case SPRD_DMA_SRC_CHN1: | |
451 | val = chn & SPRD_DMA_GLB_SRC_CHN_MASK; | |
452 | val |= BIT(schan->trg_mode - 1) << SPRD_DMA_GLB_TRG_OFFSET; | |
453 | val |= SPRD_DMA_GLB_2STAGE_EN; | |
9bb9fe0c BW |
454 | if (schan->int_type != SPRD_DMA_NO_INT) |
455 | val |= SPRD_DMA_GLB_SRC_INT; | |
456 | ||
770399df EL |
457 | sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val); |
458 | break; | |
459 | ||
460 | case SPRD_DMA_DST_CHN0: | |
461 | val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) & | |
462 | SPRD_DMA_GLB_DEST_CHN_MASK; | |
463 | val |= SPRD_DMA_GLB_2STAGE_EN; | |
9bb9fe0c BW |
464 | if (schan->int_type != SPRD_DMA_NO_INT) |
465 | val |= SPRD_DMA_GLB_DEST_INT; | |
466 | ||
770399df EL |
467 | sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP1, val, val); |
468 | break; | |
469 | ||
470 | case SPRD_DMA_DST_CHN1: | |
471 | val = (chn << SPRD_DMA_GLB_DEST_CHN_OFFSET) & | |
472 | SPRD_DMA_GLB_DEST_CHN_MASK; | |
473 | val |= SPRD_DMA_GLB_2STAGE_EN; | |
9bb9fe0c BW |
474 | if (schan->int_type != SPRD_DMA_NO_INT) |
475 | val |= SPRD_DMA_GLB_DEST_INT; | |
476 | ||
770399df EL |
477 | sprd_dma_glb_update(sdev, SPRD_DMA_GLB_2STAGE_GRP2, val, val); |
478 | break; | |
479 | ||
480 | default: | |
481 | dev_err(sdev->dma_dev.dev, "invalid channel mode setting %d\n", | |
482 | schan->chn_mode); | |
483 | return -EINVAL; | |
484 | } | |
485 | ||
486 | return 0; | |
487 | } | |
488 | ||
d0f19a48 ZW |
489 | static void sprd_dma_set_pending(struct sprd_dma_chn *schan, bool enable) |
490 | { | |
491 | struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan); | |
492 | u32 reg, val, req_id; | |
493 | ||
494 | if (schan->dev_id == SPRD_DMA_SOFTWARE_UID) | |
495 | return; | |
496 | ||
497 | /* The DMA request id always starts from 0. */ | |
498 | req_id = schan->dev_id - 1; | |
499 | ||
500 | if (req_id < 32) { | |
501 | reg = SPRD_DMA_GLB_REQ_PEND0_EN; | |
502 | val = BIT(req_id); | |
503 | } else { | |
504 | reg = SPRD_DMA_GLB_REQ_PEND1_EN; | |
505 | val = BIT(req_id - 32); | |
506 | } | |
507 | ||
508 | sprd_dma_glb_update(sdev, reg, val, enable ? val : 0); | |
509 | } | |
510 | ||
9b3b8171 BW |
511 | static void sprd_dma_set_chn_config(struct sprd_dma_chn *schan, |
512 | struct sprd_dma_desc *sdesc) | |
513 | { | |
514 | struct sprd_dma_chn_hw *cfg = &sdesc->chn_hw; | |
515 | ||
516 | writel(cfg->pause, schan->chn_base + SPRD_DMA_CHN_PAUSE); | |
517 | writel(cfg->cfg, schan->chn_base + SPRD_DMA_CHN_CFG); | |
518 | writel(cfg->intc, schan->chn_base + SPRD_DMA_CHN_INTC); | |
519 | writel(cfg->src_addr, schan->chn_base + SPRD_DMA_CHN_SRC_ADDR); | |
520 | writel(cfg->des_addr, schan->chn_base + SPRD_DMA_CHN_DES_ADDR); | |
521 | writel(cfg->frg_len, schan->chn_base + SPRD_DMA_CHN_FRG_LEN); | |
522 | writel(cfg->blk_len, schan->chn_base + SPRD_DMA_CHN_BLK_LEN); | |
523 | writel(cfg->trsc_len, schan->chn_base + SPRD_DMA_CHN_TRSC_LEN); | |
524 | writel(cfg->trsf_step, schan->chn_base + SPRD_DMA_CHN_TRSF_STEP); | |
525 | writel(cfg->wrap_ptr, schan->chn_base + SPRD_DMA_CHN_WARP_PTR); | |
526 | writel(cfg->wrap_to, schan->chn_base + SPRD_DMA_CHN_WARP_TO); | |
527 | writel(cfg->llist_ptr, schan->chn_base + SPRD_DMA_CHN_LLIST_PTR); | |
528 | writel(cfg->frg_step, schan->chn_base + SPRD_DMA_CHN_FRAG_STEP); | |
529 | writel(cfg->src_blk_step, schan->chn_base + SPRD_DMA_CHN_SRC_BLK_STEP); | |
530 | writel(cfg->des_blk_step, schan->chn_base + SPRD_DMA_CHN_DES_BLK_STEP); | |
531 | writel(cfg->req, schan->chn_base + SPRD_DMA_CHN_REQ); | |
532 | } | |
533 | ||
534 | static void sprd_dma_start(struct sprd_dma_chn *schan) | |
535 | { | |
536 | struct virt_dma_desc *vd = vchan_next_desc(&schan->vc); | |
537 | ||
538 | if (!vd) | |
539 | return; | |
540 | ||
541 | list_del(&vd->node); | |
542 | schan->cur_desc = to_sprd_dma_desc(vd); | |
543 | ||
770399df EL |
544 | /* |
545 | * Set 2-stage configuration if the channel starts one 2-stage | |
546 | * transfer. | |
547 | */ | |
548 | if (schan->chn_mode && sprd_dma_set_2stage_config(schan)) | |
549 | return; | |
550 | ||
9b3b8171 BW |
551 | /* |
552 | * Copy the DMA configuration from DMA descriptor to this hardware | |
553 | * channel. | |
554 | */ | |
555 | sprd_dma_set_chn_config(schan, schan->cur_desc); | |
556 | sprd_dma_set_uid(schan); | |
d0f19a48 | 557 | sprd_dma_set_pending(schan, true); |
9b3b8171 BW |
558 | sprd_dma_enable_chn(schan); |
559 | ||
3d626a97 EL |
560 | if (schan->dev_id == SPRD_DMA_SOFTWARE_UID && |
561 | schan->chn_mode != SPRD_DMA_DST_CHN0 && | |
562 | schan->chn_mode != SPRD_DMA_DST_CHN1) | |
9b3b8171 BW |
563 | sprd_dma_soft_request(schan); |
564 | } | |
565 | ||
566 | static void sprd_dma_stop(struct sprd_dma_chn *schan) | |
567 | { | |
568 | sprd_dma_stop_and_disable(schan); | |
d0f19a48 | 569 | sprd_dma_set_pending(schan, false); |
9b3b8171 BW |
570 | sprd_dma_unset_uid(schan); |
571 | sprd_dma_clear_int(schan); | |
0e5d7b1e | 572 | schan->cur_desc = NULL; |
9b3b8171 BW |
573 | } |
574 | ||
575 | static bool sprd_dma_check_trans_done(struct sprd_dma_desc *sdesc, | |
576 | enum sprd_dma_int_type int_type, | |
577 | enum sprd_dma_req_mode req_mode) | |
578 | { | |
579 | if (int_type == SPRD_DMA_NO_INT) | |
580 | return false; | |
581 | ||
582 | if (int_type >= req_mode + 1) | |
583 | return true; | |
584 | else | |
585 | return false; | |
586 | } | |
587 | ||
588 | static irqreturn_t dma_irq_handle(int irq, void *dev_id) | |
589 | { | |
590 | struct sprd_dma_dev *sdev = (struct sprd_dma_dev *)dev_id; | |
591 | u32 irq_status = readl(sdev->glb_base + SPRD_DMA_GLB_INT_MSK_STS); | |
592 | struct sprd_dma_chn *schan; | |
593 | struct sprd_dma_desc *sdesc; | |
594 | enum sprd_dma_req_mode req_type; | |
595 | enum sprd_dma_int_type int_type; | |
97dbd6ea | 596 | bool trans_done = false, cyclic = false; |
9b3b8171 BW |
597 | u32 i; |
598 | ||
599 | while (irq_status) { | |
600 | i = __ffs(irq_status); | |
601 | irq_status &= (irq_status - 1); | |
602 | schan = &sdev->channels[i]; | |
603 | ||
604 | spin_lock(&schan->vc.lock); | |
58152b0e BW |
605 | |
606 | sdesc = schan->cur_desc; | |
607 | if (!sdesc) { | |
608 | spin_unlock(&schan->vc.lock); | |
609 | return IRQ_HANDLED; | |
610 | } | |
611 | ||
9b3b8171 BW |
612 | int_type = sprd_dma_get_int_type(schan); |
613 | req_type = sprd_dma_get_req_type(schan); | |
614 | sprd_dma_clear_int(schan); | |
615 | ||
97dbd6ea EL |
616 | /* cyclic mode schedule callback */ |
617 | cyclic = schan->linklist.phy_addr ? true : false; | |
618 | if (cyclic == true) { | |
619 | vchan_cyclic_callback(&sdesc->vd); | |
620 | } else { | |
621 | /* Check if the dma request descriptor is done. */ | |
622 | trans_done = sprd_dma_check_trans_done(sdesc, int_type, | |
623 | req_type); | |
624 | if (trans_done == true) { | |
625 | vchan_cookie_complete(&sdesc->vd); | |
626 | schan->cur_desc = NULL; | |
627 | sprd_dma_start(schan); | |
628 | } | |
9b3b8171 BW |
629 | } |
630 | spin_unlock(&schan->vc.lock); | |
631 | } | |
632 | ||
633 | return IRQ_HANDLED; | |
634 | } | |
635 | ||
636 | static int sprd_dma_alloc_chan_resources(struct dma_chan *chan) | |
637 | { | |
ffb5be7c | 638 | return pm_runtime_get_sync(chan->device->dev); |
9b3b8171 BW |
639 | } |
640 | ||
641 | static void sprd_dma_free_chan_resources(struct dma_chan *chan) | |
642 | { | |
643 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
ec1ac309 | 644 | struct virt_dma_desc *cur_vd = NULL; |
9b3b8171 BW |
645 | unsigned long flags; |
646 | ||
647 | spin_lock_irqsave(&schan->vc.lock, flags); | |
ec1ac309 BW |
648 | if (schan->cur_desc) |
649 | cur_vd = &schan->cur_desc->vd; | |
650 | ||
9b3b8171 BW |
651 | sprd_dma_stop(schan); |
652 | spin_unlock_irqrestore(&schan->vc.lock, flags); | |
653 | ||
ec1ac309 BW |
654 | if (cur_vd) |
655 | sprd_dma_free_desc(cur_vd); | |
656 | ||
9b3b8171 BW |
657 | vchan_free_chan_resources(&schan->vc); |
658 | pm_runtime_put(chan->device->dev); | |
659 | } | |
660 | ||
661 | static enum dma_status sprd_dma_tx_status(struct dma_chan *chan, | |
662 | dma_cookie_t cookie, | |
663 | struct dma_tx_state *txstate) | |
664 | { | |
665 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
666 | struct virt_dma_desc *vd; | |
667 | unsigned long flags; | |
668 | enum dma_status ret; | |
669 | u32 pos; | |
670 | ||
671 | ret = dma_cookie_status(chan, cookie, txstate); | |
672 | if (ret == DMA_COMPLETE || !txstate) | |
673 | return ret; | |
674 | ||
675 | spin_lock_irqsave(&schan->vc.lock, flags); | |
676 | vd = vchan_find_desc(&schan->vc, cookie); | |
677 | if (vd) { | |
678 | struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd); | |
679 | struct sprd_dma_chn_hw *hw = &sdesc->chn_hw; | |
680 | ||
681 | if (hw->trsc_len > 0) | |
682 | pos = hw->trsc_len; | |
683 | else if (hw->blk_len > 0) | |
684 | pos = hw->blk_len; | |
685 | else if (hw->frg_len > 0) | |
686 | pos = hw->frg_len; | |
687 | else | |
688 | pos = 0; | |
689 | } else if (schan->cur_desc && schan->cur_desc->vd.tx.cookie == cookie) { | |
16d0f85e | 690 | struct sprd_dma_desc *sdesc = schan->cur_desc; |
d762ab33 EL |
691 | |
692 | if (sdesc->dir == DMA_DEV_TO_MEM) | |
693 | pos = sprd_dma_get_dst_addr(schan); | |
694 | else | |
695 | pos = sprd_dma_get_src_addr(schan); | |
9b3b8171 BW |
696 | } else { |
697 | pos = 0; | |
698 | } | |
699 | spin_unlock_irqrestore(&schan->vc.lock, flags); | |
700 | ||
701 | dma_set_residue(txstate, pos); | |
702 | return ret; | |
703 | } | |
704 | ||
705 | static void sprd_dma_issue_pending(struct dma_chan *chan) | |
706 | { | |
707 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
708 | unsigned long flags; | |
709 | ||
710 | spin_lock_irqsave(&schan->vc.lock, flags); | |
711 | if (vchan_issue_pending(&schan->vc) && !schan->cur_desc) | |
712 | sprd_dma_start(schan); | |
713 | spin_unlock_irqrestore(&schan->vc.lock, flags); | |
714 | } | |
715 | ||
ca1b7d3d EL |
716 | static int sprd_dma_get_datawidth(enum dma_slave_buswidth buswidth) |
717 | { | |
718 | switch (buswidth) { | |
719 | case DMA_SLAVE_BUSWIDTH_1_BYTE: | |
720 | case DMA_SLAVE_BUSWIDTH_2_BYTES: | |
721 | case DMA_SLAVE_BUSWIDTH_4_BYTES: | |
722 | case DMA_SLAVE_BUSWIDTH_8_BYTES: | |
723 | return ffs(buswidth) - 1; | |
724 | ||
725 | default: | |
726 | return -EINVAL; | |
727 | } | |
728 | } | |
729 | ||
730 | static int sprd_dma_get_step(enum dma_slave_buswidth buswidth) | |
731 | { | |
732 | switch (buswidth) { | |
733 | case DMA_SLAVE_BUSWIDTH_1_BYTE: | |
734 | case DMA_SLAVE_BUSWIDTH_2_BYTES: | |
735 | case DMA_SLAVE_BUSWIDTH_4_BYTES: | |
736 | case DMA_SLAVE_BUSWIDTH_8_BYTES: | |
737 | return buswidth; | |
738 | ||
739 | default: | |
740 | return -EINVAL; | |
741 | } | |
742 | } | |
743 | ||
744 | static int sprd_dma_fill_desc(struct dma_chan *chan, | |
4ac69546 EL |
745 | struct sprd_dma_chn_hw *hw, |
746 | unsigned int sglen, int sg_index, | |
ca1b7d3d EL |
747 | dma_addr_t src, dma_addr_t dst, u32 len, |
748 | enum dma_transfer_direction dir, | |
749 | unsigned long flags, | |
750 | struct dma_slave_config *slave_cfg) | |
9b3b8171 BW |
751 | { |
752 | struct sprd_dma_dev *sdev = to_sprd_dma_dev(chan); | |
ca1b7d3d | 753 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); |
770399df | 754 | enum sprd_dma_chn_mode chn_mode = schan->chn_mode; |
ca1b7d3d EL |
755 | u32 req_mode = (flags >> SPRD_DMA_REQ_SHIFT) & SPRD_DMA_REQ_MODE_MASK; |
756 | u32 int_mode = flags & SPRD_DMA_INT_MASK; | |
757 | int src_datawidth, dst_datawidth, src_step, dst_step; | |
758 | u32 temp, fix_mode = 0, fix_en = 0; | |
8b6bc5fd | 759 | phys_addr_t llist_ptr; |
ca1b7d3d EL |
760 | |
761 | if (dir == DMA_MEM_TO_DEV) { | |
762 | src_step = sprd_dma_get_step(slave_cfg->src_addr_width); | |
763 | if (src_step < 0) { | |
764 | dev_err(sdev->dma_dev.dev, "invalid source step\n"); | |
765 | return src_step; | |
766 | } | |
770399df EL |
767 | |
768 | /* | |
769 | * For 2-stage transfer, destination channel step can not be 0, | |
770 | * since destination device is AON IRAM. | |
771 | */ | |
772 | if (chn_mode == SPRD_DMA_DST_CHN0 || | |
773 | chn_mode == SPRD_DMA_DST_CHN1) | |
774 | dst_step = src_step; | |
775 | else | |
776 | dst_step = SPRD_DMA_NONE_STEP; | |
9b3b8171 | 777 | } else { |
ca1b7d3d EL |
778 | dst_step = sprd_dma_get_step(slave_cfg->dst_addr_width); |
779 | if (dst_step < 0) { | |
780 | dev_err(sdev->dma_dev.dev, "invalid destination step\n"); | |
781 | return dst_step; | |
782 | } | |
783 | src_step = SPRD_DMA_NONE_STEP; | |
9b3b8171 BW |
784 | } |
785 | ||
ca1b7d3d EL |
786 | src_datawidth = sprd_dma_get_datawidth(slave_cfg->src_addr_width); |
787 | if (src_datawidth < 0) { | |
788 | dev_err(sdev->dma_dev.dev, "invalid source datawidth\n"); | |
789 | return src_datawidth; | |
9b3b8171 BW |
790 | } |
791 | ||
ca1b7d3d EL |
792 | dst_datawidth = sprd_dma_get_datawidth(slave_cfg->dst_addr_width); |
793 | if (dst_datawidth < 0) { | |
794 | dev_err(sdev->dma_dev.dev, "invalid destination datawidth\n"); | |
795 | return dst_datawidth; | |
796 | } | |
797 | ||
9b3b8171 | 798 | hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET; |
9b3b8171 | 799 | |
ca1b7d3d EL |
800 | /* |
801 | * wrap_ptr and wrap_to will save the high 4 bits source address and | |
802 | * destination address. | |
803 | */ | |
804 | hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK; | |
805 | hw->wrap_to = (dst >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK; | |
806 | hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK; | |
807 | hw->des_addr = dst & SPRD_DMA_LOW_ADDR_MASK; | |
9b3b8171 | 808 | |
ca1b7d3d EL |
809 | /* |
810 | * If the src step and dst step both are 0 or both are not 0, that means | |
811 | * we can not enable the fix mode. If one is 0 and another one is not, | |
812 | * we can enable the fix mode. | |
813 | */ | |
814 | if ((src_step != 0 && dst_step != 0) || (src_step | dst_step) == 0) { | |
9b3b8171 BW |
815 | fix_en = 0; |
816 | } else { | |
817 | fix_en = 1; | |
818 | if (src_step) | |
819 | fix_mode = 1; | |
820 | else | |
821 | fix_mode = 0; | |
822 | } | |
823 | ||
ca1b7d3d | 824 | hw->intc = int_mode | SPRD_DMA_CFG_ERR_INT_EN; |
9b3b8171 | 825 | |
ca1b7d3d EL |
826 | temp = src_datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET; |
827 | temp |= dst_datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET; | |
828 | temp |= req_mode << SPRD_DMA_REQ_MODE_OFFSET; | |
829 | temp |= fix_mode << SPRD_DMA_FIX_SEL_OFFSET; | |
830 | temp |= fix_en << SPRD_DMA_FIX_EN_OFFSET; | |
a7e335de EL |
831 | temp |= schan->linklist.wrap_addr ? |
832 | SPRD_DMA_WRAP_EN | SPRD_DMA_WRAP_SEL_DEST : 0; | |
ca1b7d3d EL |
833 | temp |= slave_cfg->src_maxburst & SPRD_DMA_FRG_LEN_MASK; |
834 | hw->frg_len = temp; | |
9b3b8171 | 835 | |
89d03b3c | 836 | hw->blk_len = slave_cfg->src_maxburst & SPRD_DMA_BLK_LEN_MASK; |
ca1b7d3d | 837 | hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK; |
9b3b8171 | 838 | |
ca1b7d3d EL |
839 | temp = (dst_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET; |
840 | temp |= (src_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET; | |
841 | hw->trsf_step = temp; | |
9b3b8171 | 842 | |
4ac69546 EL |
843 | /* link-list configuration */ |
844 | if (schan->linklist.phy_addr) { | |
4ac69546 EL |
845 | hw->cfg |= SPRD_DMA_LINKLIST_EN; |
846 | ||
847 | /* link-list index */ | |
13e89979 EL |
848 | temp = sglen ? (sg_index + 1) % sglen : 0; |
849 | ||
4ac69546 EL |
850 | /* Next link-list configuration's physical address offset */ |
851 | temp = temp * sizeof(*hw) + SPRD_DMA_CHN_SRC_ADDR; | |
852 | /* | |
853 | * Set the link-list pointer point to next link-list | |
854 | * configuration's physical address. | |
855 | */ | |
8b6bc5fd ZW |
856 | llist_ptr = schan->linklist.phy_addr + temp; |
857 | hw->llist_ptr = lower_32_bits(llist_ptr); | |
858 | hw->src_blk_step = (upper_32_bits(llist_ptr) << SPRD_DMA_LLIST_HIGH_SHIFT) & | |
859 | SPRD_DMA_LLIST_HIGH_MASK; | |
a7e335de EL |
860 | |
861 | if (schan->linklist.wrap_addr) { | |
862 | hw->wrap_ptr |= schan->linklist.wrap_addr & | |
863 | SPRD_DMA_WRAP_ADDR_MASK; | |
864 | hw->wrap_to |= dst & SPRD_DMA_WRAP_ADDR_MASK; | |
865 | } | |
4ac69546 EL |
866 | } else { |
867 | hw->llist_ptr = 0; | |
8b6bc5fd | 868 | hw->src_blk_step = 0; |
4ac69546 EL |
869 | } |
870 | ||
ca1b7d3d | 871 | hw->frg_step = 0; |
ca1b7d3d EL |
872 | hw->des_blk_step = 0; |
873 | return 0; | |
874 | } | |
9b3b8171 | 875 | |
4ac69546 EL |
876 | static int sprd_dma_fill_linklist_desc(struct dma_chan *chan, |
877 | unsigned int sglen, int sg_index, | |
878 | dma_addr_t src, dma_addr_t dst, u32 len, | |
879 | enum dma_transfer_direction dir, | |
880 | unsigned long flags, | |
881 | struct dma_slave_config *slave_cfg) | |
882 | { | |
883 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
884 | struct sprd_dma_chn_hw *hw; | |
885 | ||
886 | if (!schan->linklist.virt_addr) | |
887 | return -EINVAL; | |
888 | ||
889 | hw = (struct sprd_dma_chn_hw *)(schan->linklist.virt_addr + | |
890 | sg_index * sizeof(*hw)); | |
891 | ||
892 | return sprd_dma_fill_desc(chan, hw, sglen, sg_index, src, dst, len, | |
893 | dir, flags, slave_cfg); | |
894 | } | |
895 | ||
1ab8da11 | 896 | static struct dma_async_tx_descriptor * |
9b3b8171 BW |
897 | sprd_dma_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src, |
898 | size_t len, unsigned long flags) | |
899 | { | |
900 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
901 | struct sprd_dma_desc *sdesc; | |
32fa2013 EL |
902 | struct sprd_dma_chn_hw *hw; |
903 | enum sprd_dma_datawidth datawidth; | |
904 | u32 step, temp; | |
9b3b8171 | 905 | |
9b3b8171 BW |
906 | sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT); |
907 | if (!sdesc) | |
908 | return NULL; | |
9b3b8171 | 909 | |
32fa2013 | 910 | hw = &sdesc->chn_hw; |
9b3b8171 | 911 | |
32fa2013 EL |
912 | hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET; |
913 | hw->intc = SPRD_DMA_TRANS_INT | SPRD_DMA_CFG_ERR_INT_EN; | |
914 | hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK; | |
915 | hw->des_addr = dest & SPRD_DMA_LOW_ADDR_MASK; | |
916 | hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) & | |
917 | SPRD_DMA_HIGH_ADDR_MASK; | |
918 | hw->wrap_to = (dest >> SPRD_DMA_HIGH_ADDR_OFFSET) & | |
919 | SPRD_DMA_HIGH_ADDR_MASK; | |
920 | ||
921 | if (IS_ALIGNED(len, 8)) { | |
922 | datawidth = SPRD_DMA_DATAWIDTH_8_BYTES; | |
923 | step = SPRD_DMA_DWORD_STEP; | |
924 | } else if (IS_ALIGNED(len, 4)) { | |
925 | datawidth = SPRD_DMA_DATAWIDTH_4_BYTES; | |
926 | step = SPRD_DMA_WORD_STEP; | |
927 | } else if (IS_ALIGNED(len, 2)) { | |
928 | datawidth = SPRD_DMA_DATAWIDTH_2_BYTES; | |
929 | step = SPRD_DMA_SHORT_STEP; | |
930 | } else { | |
931 | datawidth = SPRD_DMA_DATAWIDTH_1_BYTE; | |
932 | step = SPRD_DMA_BYTE_STEP; | |
9b3b8171 BW |
933 | } |
934 | ||
32fa2013 EL |
935 | temp = datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET; |
936 | temp |= datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET; | |
937 | temp |= SPRD_DMA_TRANS_REQ << SPRD_DMA_REQ_MODE_OFFSET; | |
938 | temp |= len & SPRD_DMA_FRG_LEN_MASK; | |
939 | hw->frg_len = temp; | |
9b3b8171 | 940 | |
32fa2013 EL |
941 | hw->blk_len = len & SPRD_DMA_BLK_LEN_MASK; |
942 | hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK; | |
9b3b8171 | 943 | |
32fa2013 EL |
944 | temp = (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET; |
945 | temp |= (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET; | |
946 | hw->trsf_step = temp; | |
947 | ||
9b3b8171 | 948 | return vchan_tx_prep(&schan->vc, &sdesc->vd, flags); |
9b3b8171 BW |
949 | } |
950 | ||
1ab8da11 | 951 | static struct dma_async_tx_descriptor * |
ca1b7d3d EL |
952 | sprd_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, |
953 | unsigned int sglen, enum dma_transfer_direction dir, | |
954 | unsigned long flags, void *context) | |
9b3b8171 BW |
955 | { |
956 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
ca1b7d3d EL |
957 | struct dma_slave_config *slave_cfg = &schan->slave_cfg; |
958 | dma_addr_t src = 0, dst = 0; | |
689379c2 | 959 | dma_addr_t start_src = 0, start_dst = 0; |
9b3b8171 | 960 | struct sprd_dma_desc *sdesc; |
ca1b7d3d EL |
961 | struct scatterlist *sg; |
962 | u32 len = 0; | |
963 | int ret, i; | |
964 | ||
4ac69546 | 965 | if (!is_slave_direction(dir)) |
ca1b7d3d | 966 | return NULL; |
9b3b8171 | 967 | |
4ac69546 EL |
968 | if (context) { |
969 | struct sprd_dma_linklist *ll_cfg = | |
970 | (struct sprd_dma_linklist *)context; | |
971 | ||
972 | schan->linklist.phy_addr = ll_cfg->phy_addr; | |
973 | schan->linklist.virt_addr = ll_cfg->virt_addr; | |
a7e335de | 974 | schan->linklist.wrap_addr = ll_cfg->wrap_addr; |
4ac69546 EL |
975 | } else { |
976 | schan->linklist.phy_addr = 0; | |
977 | schan->linklist.virt_addr = 0; | |
a7e335de | 978 | schan->linklist.wrap_addr = 0; |
4ac69546 EL |
979 | } |
980 | ||
9bb9fe0c BW |
981 | /* |
982 | * Set channel mode, interrupt mode and trigger mode for 2-stage | |
983 | * transfer. | |
984 | */ | |
c434e377 EL |
985 | schan->chn_mode = |
986 | (flags >> SPRD_DMA_CHN_MODE_SHIFT) & SPRD_DMA_CHN_MODE_MASK; | |
987 | schan->trg_mode = | |
988 | (flags >> SPRD_DMA_TRG_MODE_SHIFT) & SPRD_DMA_TRG_MODE_MASK; | |
9bb9fe0c | 989 | schan->int_type = flags & SPRD_DMA_INT_TYPE_MASK; |
c434e377 | 990 | |
9b3b8171 BW |
991 | sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT); |
992 | if (!sdesc) | |
993 | return NULL; | |
994 | ||
d762ab33 EL |
995 | sdesc->dir = dir; |
996 | ||
ca1b7d3d EL |
997 | for_each_sg(sgl, sg, sglen, i) { |
998 | len = sg_dma_len(sg); | |
999 | ||
1000 | if (dir == DMA_MEM_TO_DEV) { | |
1001 | src = sg_dma_address(sg); | |
1002 | dst = slave_cfg->dst_addr; | |
1003 | } else { | |
1004 | src = slave_cfg->src_addr; | |
1005 | dst = sg_dma_address(sg); | |
1006 | } | |
4ac69546 | 1007 | |
689379c2 BW |
1008 | if (!i) { |
1009 | start_src = src; | |
1010 | start_dst = dst; | |
1011 | } | |
1012 | ||
4ac69546 EL |
1013 | /* |
1014 | * The link-list mode needs at least 2 link-list | |
1015 | * configurations. If there is only one sg, it doesn't | |
1016 | * need to fill the link-list configuration. | |
1017 | */ | |
1018 | if (sglen < 2) | |
1019 | break; | |
1020 | ||
1021 | ret = sprd_dma_fill_linklist_desc(chan, sglen, i, src, dst, len, | |
1022 | dir, flags, slave_cfg); | |
1023 | if (ret) { | |
1024 | kfree(sdesc); | |
1025 | return NULL; | |
1026 | } | |
ca1b7d3d EL |
1027 | } |
1028 | ||
689379c2 BW |
1029 | ret = sprd_dma_fill_desc(chan, &sdesc->chn_hw, 0, 0, start_src, |
1030 | start_dst, len, dir, flags, slave_cfg); | |
9b3b8171 BW |
1031 | if (ret) { |
1032 | kfree(sdesc); | |
1033 | return NULL; | |
1034 | } | |
1035 | ||
1036 | return vchan_tx_prep(&schan->vc, &sdesc->vd, flags); | |
1037 | } | |
1038 | ||
ca1b7d3d EL |
1039 | static int sprd_dma_slave_config(struct dma_chan *chan, |
1040 | struct dma_slave_config *config) | |
1041 | { | |
1042 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
1043 | struct dma_slave_config *slave_cfg = &schan->slave_cfg; | |
1044 | ||
ca1b7d3d EL |
1045 | memcpy(slave_cfg, config, sizeof(*config)); |
1046 | return 0; | |
1047 | } | |
1048 | ||
9b3b8171 BW |
1049 | static int sprd_dma_pause(struct dma_chan *chan) |
1050 | { | |
1051 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
1052 | unsigned long flags; | |
1053 | ||
1054 | spin_lock_irqsave(&schan->vc.lock, flags); | |
1055 | sprd_dma_pause_resume(schan, true); | |
1056 | spin_unlock_irqrestore(&schan->vc.lock, flags); | |
1057 | ||
1058 | return 0; | |
1059 | } | |
1060 | ||
1061 | static int sprd_dma_resume(struct dma_chan *chan) | |
1062 | { | |
1063 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
1064 | unsigned long flags; | |
1065 | ||
1066 | spin_lock_irqsave(&schan->vc.lock, flags); | |
1067 | sprd_dma_pause_resume(schan, false); | |
1068 | spin_unlock_irqrestore(&schan->vc.lock, flags); | |
1069 | ||
1070 | return 0; | |
1071 | } | |
1072 | ||
1073 | static int sprd_dma_terminate_all(struct dma_chan *chan) | |
1074 | { | |
1075 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
ec1ac309 | 1076 | struct virt_dma_desc *cur_vd = NULL; |
9b3b8171 BW |
1077 | unsigned long flags; |
1078 | LIST_HEAD(head); | |
1079 | ||
1080 | spin_lock_irqsave(&schan->vc.lock, flags); | |
ec1ac309 BW |
1081 | if (schan->cur_desc) |
1082 | cur_vd = &schan->cur_desc->vd; | |
1083 | ||
9b3b8171 BW |
1084 | sprd_dma_stop(schan); |
1085 | ||
1086 | vchan_get_all_descriptors(&schan->vc, &head); | |
1087 | spin_unlock_irqrestore(&schan->vc.lock, flags); | |
1088 | ||
ec1ac309 BW |
1089 | if (cur_vd) |
1090 | sprd_dma_free_desc(cur_vd); | |
1091 | ||
9b3b8171 BW |
1092 | vchan_dma_desc_free_list(&schan->vc, &head); |
1093 | return 0; | |
1094 | } | |
1095 | ||
1096 | static void sprd_dma_free_desc(struct virt_dma_desc *vd) | |
1097 | { | |
1098 | struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd); | |
1099 | ||
1100 | kfree(sdesc); | |
1101 | } | |
1102 | ||
1103 | static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param) | |
1104 | { | |
1105 | struct sprd_dma_chn *schan = to_sprd_dma_chan(chan); | |
ffb5be7c | 1106 | u32 slave_id = *(u32 *)param; |
9b3b8171 | 1107 | |
ffb5be7c BW |
1108 | schan->dev_id = slave_id; |
1109 | return true; | |
9b3b8171 BW |
1110 | } |
1111 | ||
1112 | static int sprd_dma_probe(struct platform_device *pdev) | |
1113 | { | |
1114 | struct device_node *np = pdev->dev.of_node; | |
1115 | struct sprd_dma_dev *sdev; | |
1116 | struct sprd_dma_chn *dma_chn; | |
9b3b8171 BW |
1117 | u32 chn_count; |
1118 | int ret, i; | |
1119 | ||
d84c3ad9 KK |
1120 | /* Parse new and deprecated dma-channels properties */ |
1121 | ret = device_property_read_u32(&pdev->dev, "dma-channels", &chn_count); | |
1122 | if (ret) | |
1123 | ret = device_property_read_u32(&pdev->dev, "#dma-channels", | |
1124 | &chn_count); | |
9b3b8171 BW |
1125 | if (ret) { |
1126 | dev_err(&pdev->dev, "get dma channels count failed\n"); | |
1127 | return ret; | |
1128 | } | |
1129 | ||
0ed2dd03 KC |
1130 | sdev = devm_kzalloc(&pdev->dev, |
1131 | struct_size(sdev, channels, chn_count), | |
9b3b8171 BW |
1132 | GFP_KERNEL); |
1133 | if (!sdev) | |
1134 | return -ENOMEM; | |
1135 | ||
1136 | sdev->clk = devm_clk_get(&pdev->dev, "enable"); | |
1137 | if (IS_ERR(sdev->clk)) { | |
1138 | dev_err(&pdev->dev, "get enable clock failed\n"); | |
1139 | return PTR_ERR(sdev->clk); | |
1140 | } | |
1141 | ||
1142 | /* ashb clock is optional for AGCP DMA */ | |
1143 | sdev->ashb_clk = devm_clk_get(&pdev->dev, "ashb_eb"); | |
1144 | if (IS_ERR(sdev->ashb_clk)) | |
1145 | dev_warn(&pdev->dev, "no optional ashb eb clock\n"); | |
1146 | ||
1147 | /* | |
1148 | * We have three DMA controllers: AP DMA, AON DMA and AGCP DMA. For AGCP | |
1149 | * DMA controller, it can or do not request the irq, which will save | |
1150 | * system power without resuming system by DMA interrupts if AGCP DMA | |
1151 | * does not request the irq. Thus the DMA interrupts property should | |
1152 | * be optional. | |
1153 | */ | |
1154 | sdev->irq = platform_get_irq(pdev, 0); | |
1155 | if (sdev->irq > 0) { | |
1156 | ret = devm_request_irq(&pdev->dev, sdev->irq, dma_irq_handle, | |
1157 | 0, "sprd_dma", (void *)sdev); | |
1158 | if (ret < 0) { | |
1159 | dev_err(&pdev->dev, "request dma irq failed\n"); | |
1160 | return ret; | |
1161 | } | |
1162 | } else { | |
1163 | dev_warn(&pdev->dev, "no interrupts for the dma controller\n"); | |
1164 | } | |
1165 | ||
f228a4a2 | 1166 | sdev->glb_base = devm_platform_ioremap_resource(pdev, 0); |
fd8d26ad DC |
1167 | if (IS_ERR(sdev->glb_base)) |
1168 | return PTR_ERR(sdev->glb_base); | |
9b3b8171 BW |
1169 | |
1170 | dma_cap_set(DMA_MEMCPY, sdev->dma_dev.cap_mask); | |
1171 | sdev->total_chns = chn_count; | |
1172 | sdev->dma_dev.chancnt = chn_count; | |
1173 | INIT_LIST_HEAD(&sdev->dma_dev.channels); | |
1174 | INIT_LIST_HEAD(&sdev->dma_dev.global_node); | |
1175 | sdev->dma_dev.dev = &pdev->dev; | |
1176 | sdev->dma_dev.device_alloc_chan_resources = sprd_dma_alloc_chan_resources; | |
1177 | sdev->dma_dev.device_free_chan_resources = sprd_dma_free_chan_resources; | |
1178 | sdev->dma_dev.device_tx_status = sprd_dma_tx_status; | |
1179 | sdev->dma_dev.device_issue_pending = sprd_dma_issue_pending; | |
1180 | sdev->dma_dev.device_prep_dma_memcpy = sprd_dma_prep_dma_memcpy; | |
ca1b7d3d EL |
1181 | sdev->dma_dev.device_prep_slave_sg = sprd_dma_prep_slave_sg; |
1182 | sdev->dma_dev.device_config = sprd_dma_slave_config; | |
9b3b8171 BW |
1183 | sdev->dma_dev.device_pause = sprd_dma_pause; |
1184 | sdev->dma_dev.device_resume = sprd_dma_resume; | |
1185 | sdev->dma_dev.device_terminate_all = sprd_dma_terminate_all; | |
1186 | ||
1187 | for (i = 0; i < chn_count; i++) { | |
1188 | dma_chn = &sdev->channels[i]; | |
1189 | dma_chn->chn_num = i; | |
1190 | dma_chn->cur_desc = NULL; | |
1191 | /* get each channel's registers base address. */ | |
1192 | dma_chn->chn_base = sdev->glb_base + SPRD_DMA_CHN_REG_OFFSET + | |
1193 | SPRD_DMA_CHN_REG_LENGTH * i; | |
1194 | ||
1195 | dma_chn->vc.desc_free = sprd_dma_free_desc; | |
1196 | vchan_init(&dma_chn->vc, &sdev->dma_dev); | |
1197 | } | |
1198 | ||
1199 | platform_set_drvdata(pdev, sdev); | |
1200 | ret = sprd_dma_enable(sdev); | |
1201 | if (ret) | |
1202 | return ret; | |
1203 | ||
1204 | pm_runtime_set_active(&pdev->dev); | |
1205 | pm_runtime_enable(&pdev->dev); | |
1206 | ||
1207 | ret = pm_runtime_get_sync(&pdev->dev); | |
1208 | if (ret < 0) | |
1209 | goto err_rpm; | |
1210 | ||
1211 | ret = dma_async_device_register(&sdev->dma_dev); | |
1212 | if (ret < 0) { | |
1213 | dev_err(&pdev->dev, "register dma device failed:%d\n", ret); | |
1214 | goto err_register; | |
1215 | } | |
1216 | ||
1217 | sprd_dma_info.dma_cap = sdev->dma_dev.cap_mask; | |
1218 | ret = of_dma_controller_register(np, of_dma_simple_xlate, | |
1219 | &sprd_dma_info); | |
1220 | if (ret) | |
1221 | goto err_of_register; | |
1222 | ||
1223 | pm_runtime_put(&pdev->dev); | |
1224 | return 0; | |
1225 | ||
1226 | err_of_register: | |
1227 | dma_async_device_unregister(&sdev->dma_dev); | |
1228 | err_register: | |
1229 | pm_runtime_put_noidle(&pdev->dev); | |
1230 | pm_runtime_disable(&pdev->dev); | |
1231 | err_rpm: | |
1232 | sprd_dma_disable(sdev); | |
1233 | return ret; | |
1234 | } | |
1235 | ||
1236 | static int sprd_dma_remove(struct platform_device *pdev) | |
1237 | { | |
1238 | struct sprd_dma_dev *sdev = platform_get_drvdata(pdev); | |
1239 | struct sprd_dma_chn *c, *cn; | |
9b3b8171 | 1240 | |
1e42f82c | 1241 | pm_runtime_get_sync(&pdev->dev); |
9b3b8171 BW |
1242 | |
1243 | /* explicitly free the irq */ | |
1244 | if (sdev->irq > 0) | |
1245 | devm_free_irq(&pdev->dev, sdev->irq, sdev); | |
1246 | ||
1247 | list_for_each_entry_safe(c, cn, &sdev->dma_dev.channels, | |
1248 | vc.chan.device_node) { | |
1249 | list_del(&c->vc.chan.device_node); | |
1250 | tasklet_kill(&c->vc.task); | |
1251 | } | |
1252 | ||
1253 | of_dma_controller_free(pdev->dev.of_node); | |
1254 | dma_async_device_unregister(&sdev->dma_dev); | |
1255 | sprd_dma_disable(sdev); | |
1256 | ||
1257 | pm_runtime_put_noidle(&pdev->dev); | |
1258 | pm_runtime_disable(&pdev->dev); | |
1259 | return 0; | |
1260 | } | |
1261 | ||
1262 | static const struct of_device_id sprd_dma_match[] = { | |
1263 | { .compatible = "sprd,sc9860-dma", }, | |
1264 | {}, | |
1265 | }; | |
4faee8b6 | 1266 | MODULE_DEVICE_TABLE(of, sprd_dma_match); |
9b3b8171 BW |
1267 | |
1268 | static int __maybe_unused sprd_dma_runtime_suspend(struct device *dev) | |
1269 | { | |
1270 | struct sprd_dma_dev *sdev = dev_get_drvdata(dev); | |
1271 | ||
1272 | sprd_dma_disable(sdev); | |
1273 | return 0; | |
1274 | } | |
1275 | ||
1276 | static int __maybe_unused sprd_dma_runtime_resume(struct device *dev) | |
1277 | { | |
1278 | struct sprd_dma_dev *sdev = dev_get_drvdata(dev); | |
1279 | int ret; | |
1280 | ||
1281 | ret = sprd_dma_enable(sdev); | |
1282 | if (ret) | |
1283 | dev_err(sdev->dma_dev.dev, "enable dma failed\n"); | |
1284 | ||
1285 | return ret; | |
1286 | } | |
1287 | ||
1288 | static const struct dev_pm_ops sprd_dma_pm_ops = { | |
1289 | SET_RUNTIME_PM_OPS(sprd_dma_runtime_suspend, | |
1290 | sprd_dma_runtime_resume, | |
1291 | NULL) | |
1292 | }; | |
1293 | ||
1294 | static struct platform_driver sprd_dma_driver = { | |
1295 | .probe = sprd_dma_probe, | |
1296 | .remove = sprd_dma_remove, | |
1297 | .driver = { | |
1298 | .name = "sprd-dma", | |
1299 | .of_match_table = sprd_dma_match, | |
1300 | .pm = &sprd_dma_pm_ops, | |
1301 | }, | |
1302 | }; | |
1303 | module_platform_driver(sprd_dma_driver); | |
1304 | ||
1305 | MODULE_LICENSE("GPL v2"); | |
1306 | MODULE_DESCRIPTION("DMA driver for Spreadtrum"); | |
1307 | MODULE_AUTHOR("Baolin Wang <[email protected]>"); | |
53197123 | 1308 | MODULE_AUTHOR("Eric Long <[email protected]>"); |
9b3b8171 | 1309 | MODULE_ALIAS("platform:sprd-dma"); |