1 // SPDX-License-Identifier: GPL-2.0-only
5 * Copyright (C) M'boumba Cedric Madianga 2017
11 /* Functions for DMA support */
12 struct stm32_i2c_dma *stm32_i2c_dma_request(struct device *dev,
17 struct stm32_i2c_dma *dma;
18 struct dma_slave_config dma_sconfig;
21 dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
23 return ERR_PTR(-ENOMEM);
25 /* Request and configure I2C TX dma channel */
26 dma->chan_tx = dma_request_chan(dev, "tx");
27 if (IS_ERR(dma->chan_tx)) {
28 ret = PTR_ERR(dma->chan_tx);
30 ret = dev_err_probe(dev, ret,
31 "can't request DMA tx channel\n");
35 memset(&dma_sconfig, 0, sizeof(dma_sconfig));
36 dma_sconfig.dst_addr = phy_addr + txdr_offset;
37 dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
38 dma_sconfig.dst_maxburst = 1;
39 dma_sconfig.direction = DMA_MEM_TO_DEV;
40 ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
42 dev_err(dev, "can't configure tx channel\n");
46 /* Request and configure I2C RX dma channel */
47 dma->chan_rx = dma_request_chan(dev, "rx");
48 if (IS_ERR(dma->chan_rx)) {
49 ret = PTR_ERR(dma->chan_rx);
51 ret = dev_err_probe(dev, ret,
52 "can't request DMA rx channel\n");
57 memset(&dma_sconfig, 0, sizeof(dma_sconfig));
58 dma_sconfig.src_addr = phy_addr + rxdr_offset;
59 dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
60 dma_sconfig.src_maxburst = 1;
61 dma_sconfig.direction = DMA_DEV_TO_MEM;
62 ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
64 dev_err(dev, "can't configure rx channel\n");
68 init_completion(&dma->dma_complete);
70 dev_info(dev, "using %s (tx) and %s (rx) for DMA transfers\n",
71 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
76 dma_release_channel(dma->chan_rx);
78 dma_release_channel(dma->chan_tx);
85 void stm32_i2c_dma_free(struct stm32_i2c_dma *dma)
90 dma_release_channel(dma->chan_tx);
93 dma_release_channel(dma->chan_rx);
96 dma->chan_using = NULL;
99 int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
100 bool rd_wr, u32 len, u8 *buf,
101 dma_async_tx_callback callback,
102 void *dma_async_param)
104 struct dma_async_tx_descriptor *txdesc;
105 struct device *chan_dev;
109 dma->chan_using = dma->chan_rx;
110 dma->dma_transfer_dir = DMA_DEV_TO_MEM;
111 dma->dma_data_dir = DMA_FROM_DEVICE;
113 dma->chan_using = dma->chan_tx;
114 dma->dma_transfer_dir = DMA_MEM_TO_DEV;
115 dma->dma_data_dir = DMA_TO_DEVICE;
119 chan_dev = dma->chan_using->device->dev;
121 dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len,
123 if (dma_mapping_error(chan_dev, dma->dma_buf)) {
124 dev_err(dev, "DMA mapping failed\n");
128 txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
130 dma->dma_transfer_dir,
133 dev_err(dev, "Not able to get desc for DMA xfer\n");
138 reinit_completion(&dma->dma_complete);
140 txdesc->callback = callback;
141 txdesc->callback_param = dma_async_param;
142 ret = dma_submit_error(dmaengine_submit(txdesc));
144 dev_err(dev, "DMA submit failed\n");
148 dma_async_issue_pending(dma->chan_using);
153 dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len,