1 // SPDX-License-Identifier: GPL-2.0-only
3 * Driver for the Diolan DLN-2 USB-ADC adapter
5 * Copyright (c) 2017 Jack Andersen
8 #include <linux/kernel.h>
9 #include <linux/module.h>
10 #include <linux/types.h>
11 #include <linux/platform_device.h>
12 #include <linux/mfd/dln2.h>
14 #include <linux/iio/iio.h>
15 #include <linux/iio/sysfs.h>
16 #include <linux/iio/trigger.h>
17 #include <linux/iio/trigger_consumer.h>
18 #include <linux/iio/triggered_buffer.h>
19 #include <linux/iio/buffer.h>
20 #include <linux/iio/kfifo_buf.h>
22 #define DLN2_ADC_MOD_NAME "dln2-adc"
24 #define DLN2_ADC_ID 0x06
26 #define DLN2_ADC_GET_CHANNEL_COUNT DLN2_CMD(0x01, DLN2_ADC_ID)
27 #define DLN2_ADC_ENABLE DLN2_CMD(0x02, DLN2_ADC_ID)
28 #define DLN2_ADC_DISABLE DLN2_CMD(0x03, DLN2_ADC_ID)
29 #define DLN2_ADC_CHANNEL_ENABLE DLN2_CMD(0x05, DLN2_ADC_ID)
30 #define DLN2_ADC_CHANNEL_DISABLE DLN2_CMD(0x06, DLN2_ADC_ID)
31 #define DLN2_ADC_SET_RESOLUTION DLN2_CMD(0x08, DLN2_ADC_ID)
32 #define DLN2_ADC_CHANNEL_GET_VAL DLN2_CMD(0x0A, DLN2_ADC_ID)
33 #define DLN2_ADC_CHANNEL_GET_ALL_VAL DLN2_CMD(0x0B, DLN2_ADC_ID)
34 #define DLN2_ADC_CHANNEL_SET_CFG DLN2_CMD(0x0C, DLN2_ADC_ID)
35 #define DLN2_ADC_CHANNEL_GET_CFG DLN2_CMD(0x0D, DLN2_ADC_ID)
36 #define DLN2_ADC_CONDITION_MET_EV DLN2_CMD(0x10, DLN2_ADC_ID)
38 #define DLN2_ADC_EVENT_NONE 0
39 #define DLN2_ADC_EVENT_BELOW 1
40 #define DLN2_ADC_EVENT_LEVEL_ABOVE 2
41 #define DLN2_ADC_EVENT_OUTSIDE 3
42 #define DLN2_ADC_EVENT_INSIDE 4
43 #define DLN2_ADC_EVENT_ALWAYS 5
45 #define DLN2_ADC_MAX_CHANNELS 8
46 #define DLN2_ADC_DATA_BITS 10
49 * Plays similar role to iio_demux_table in subsystem core; except allocated
50 * in a fixed 8-element array.
52 struct dln2_adc_demux_table {
59 struct platform_device *pdev;
60 struct iio_chan_spec iio_channels[DLN2_ADC_MAX_CHANNELS + 1];
61 int port, trigger_chan;
62 struct iio_trigger *trig;
64 /* Cached sample period in milliseconds */
65 unsigned int sample_period;
67 unsigned int demux_count;
68 struct dln2_adc_demux_table demux[DLN2_ADC_MAX_CHANNELS];
69 /* Precomputed timestamp padding offset and length */
70 unsigned int ts_pad_offset, ts_pad_length;
73 struct dln2_adc_port_chan {
78 struct dln2_adc_get_all_vals {
80 __le16 values[DLN2_ADC_MAX_CHANNELS];
83 static void dln2_adc_add_demux(struct dln2_adc *dln2,
84 unsigned int in_loc, unsigned int out_loc,
87 struct dln2_adc_demux_table *p = dln2->demux_count ?
88 &dln2->demux[dln2->demux_count - 1] : NULL;
90 if (p && p->from + p->length == in_loc &&
91 p->to + p->length == out_loc) {
93 } else if (dln2->demux_count < DLN2_ADC_MAX_CHANNELS) {
94 p = &dln2->demux[dln2->demux_count++];
101 static void dln2_adc_update_demux(struct dln2_adc *dln2)
103 int in_ind = -1, out_ind;
104 unsigned int in_loc = 0, out_loc = 0;
105 struct iio_dev *indio_dev = platform_get_drvdata(dln2->pdev);
107 /* Clear out any old demux */
108 dln2->demux_count = 0;
110 /* Optimize all 8-channels case */
111 if (iio_get_masklength(indio_dev) &&
112 (*indio_dev->active_scan_mask & 0xff) == 0xff) {
113 dln2_adc_add_demux(dln2, 0, 0, 16);
114 dln2->ts_pad_offset = 0;
115 dln2->ts_pad_length = 0;
119 /* Build demux table from fixed 8-channels to active_scan_mask */
120 iio_for_each_active_channel(indio_dev, out_ind) {
121 /* Handle timestamp separately */
122 if (out_ind == DLN2_ADC_MAX_CHANNELS)
124 for (++in_ind; in_ind != out_ind; ++in_ind)
126 dln2_adc_add_demux(dln2, in_loc, out_loc, 2);
131 if (indio_dev->scan_timestamp) {
132 size_t ts_offset = indio_dev->scan_bytes / sizeof(int64_t) - 1;
134 dln2->ts_pad_offset = out_loc;
135 dln2->ts_pad_length = ts_offset * sizeof(int64_t) - out_loc;
137 dln2->ts_pad_offset = 0;
138 dln2->ts_pad_length = 0;
142 static int dln2_adc_get_chan_count(struct dln2_adc *dln2)
145 u8 port = dln2->port;
147 int olen = sizeof(count);
149 ret = dln2_transfer(dln2->pdev, DLN2_ADC_GET_CHANNEL_COUNT,
150 &port, sizeof(port), &count, &olen);
152 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
155 if (olen < sizeof(count))
161 static int dln2_adc_set_port_resolution(struct dln2_adc *dln2)
164 struct dln2_adc_port_chan port_chan = {
166 .chan = DLN2_ADC_DATA_BITS,
169 ret = dln2_transfer_tx(dln2->pdev, DLN2_ADC_SET_RESOLUTION,
170 &port_chan, sizeof(port_chan));
172 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
177 static int dln2_adc_set_chan_enabled(struct dln2_adc *dln2,
178 int channel, bool enable)
181 struct dln2_adc_port_chan port_chan = {
185 u16 cmd = enable ? DLN2_ADC_CHANNEL_ENABLE : DLN2_ADC_CHANNEL_DISABLE;
187 ret = dln2_transfer_tx(dln2->pdev, cmd, &port_chan, sizeof(port_chan));
189 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
194 static int dln2_adc_set_port_enabled(struct dln2_adc *dln2, bool enable,
198 u8 port = dln2->port;
200 int olen = sizeof(conflict);
201 u16 cmd = enable ? DLN2_ADC_ENABLE : DLN2_ADC_DISABLE;
206 ret = dln2_transfer(dln2->pdev, cmd, &port, sizeof(port),
209 dev_dbg(&dln2->pdev->dev, "Problem in %s(%d)\n",
210 __func__, (int)enable);
211 if (conflict_out && enable && olen >= sizeof(conflict))
212 *conflict_out = le16_to_cpu(conflict);
215 if (enable && olen < sizeof(conflict))
221 static int dln2_adc_set_chan_period(struct dln2_adc *dln2,
222 unsigned int channel, unsigned int period)
226 struct dln2_adc_port_chan port_chan;
231 } __packed set_cfg = {
232 .port_chan.port = dln2->port,
233 .port_chan.chan = channel,
234 .type = period ? DLN2_ADC_EVENT_ALWAYS : DLN2_ADC_EVENT_NONE,
235 .period = cpu_to_le16(period)
238 ret = dln2_transfer_tx(dln2->pdev, DLN2_ADC_CHANNEL_SET_CFG,
239 &set_cfg, sizeof(set_cfg));
241 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
246 static int dln2_adc_read(struct dln2_adc *dln2, unsigned int channel)
251 int olen = sizeof(value);
252 struct dln2_adc_port_chan port_chan = {
257 ret = dln2_adc_set_chan_enabled(dln2, channel, true);
261 ret = dln2_adc_set_port_enabled(dln2, true, &conflict);
264 dev_err(&dln2->pdev->dev,
265 "ADC pins conflict with mask %04X\n",
273 * Call GET_VAL twice due to initial zero-return immediately after
276 for (i = 0; i < 2; ++i) {
277 ret = dln2_transfer(dln2->pdev, DLN2_ADC_CHANNEL_GET_VAL,
278 &port_chan, sizeof(port_chan),
281 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
284 if (olen < sizeof(value)) {
290 ret = le16_to_cpu(value);
293 dln2_adc_set_port_enabled(dln2, false, NULL);
295 dln2_adc_set_chan_enabled(dln2, channel, false);
300 static int dln2_adc_read_all(struct dln2_adc *dln2,
301 struct dln2_adc_get_all_vals *get_all_vals)
304 __u8 port = dln2->port;
305 int olen = sizeof(*get_all_vals);
307 ret = dln2_transfer(dln2->pdev, DLN2_ADC_CHANNEL_GET_ALL_VAL,
308 &port, sizeof(port), get_all_vals, &olen);
310 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
313 if (olen < sizeof(*get_all_vals))
319 static int dln2_adc_read_raw(struct iio_dev *indio_dev,
320 struct iio_chan_spec const *chan,
326 unsigned int microhertz;
327 struct dln2_adc *dln2 = iio_priv(indio_dev);
330 case IIO_CHAN_INFO_RAW:
331 ret = iio_device_claim_direct_mode(indio_dev);
335 mutex_lock(&dln2->mutex);
336 ret = dln2_adc_read(dln2, chan->channel);
337 mutex_unlock(&dln2->mutex);
339 iio_device_release_direct_mode(indio_dev);
347 case IIO_CHAN_INFO_SCALE:
349 * Voltage reference is fixed at 3.3v
350 * 3.3 / (1 << 10) * 1000000000
354 return IIO_VAL_INT_PLUS_NANO;
356 case IIO_CHAN_INFO_SAMP_FREQ:
357 if (dln2->sample_period) {
358 microhertz = 1000000000 / dln2->sample_period;
359 *val = microhertz / 1000000;
360 *val2 = microhertz % 1000000;
366 return IIO_VAL_INT_PLUS_MICRO;
373 static int dln2_adc_write_raw(struct iio_dev *indio_dev,
374 struct iio_chan_spec const *chan,
380 unsigned int microhertz;
381 struct dln2_adc *dln2 = iio_priv(indio_dev);
384 case IIO_CHAN_INFO_SAMP_FREQ:
385 microhertz = 1000000 * val + val2;
387 mutex_lock(&dln2->mutex);
389 dln2->sample_period =
390 microhertz ? 1000000000 / microhertz : UINT_MAX;
391 if (dln2->sample_period > 65535) {
392 dln2->sample_period = 65535;
393 dev_warn(&dln2->pdev->dev,
394 "clamping period to 65535ms\n");
398 * The first requested channel is arbitrated as a shared
399 * trigger source, so only one event is registered with the
400 * DLN. The event handler will then read all enabled channel
401 * values using DLN2_ADC_CHANNEL_GET_ALL_VAL to maintain
402 * synchronization between ADC readings.
404 if (dln2->trigger_chan != -1)
405 ret = dln2_adc_set_chan_period(dln2,
406 dln2->trigger_chan, dln2->sample_period);
410 mutex_unlock(&dln2->mutex);
419 static int dln2_update_scan_mode(struct iio_dev *indio_dev,
420 const unsigned long *scan_mask)
422 struct dln2_adc *dln2 = iio_priv(indio_dev);
423 int chan_count = indio_dev->num_channels - 1;
426 mutex_lock(&dln2->mutex);
428 for (i = 0; i < chan_count; ++i) {
429 ret = dln2_adc_set_chan_enabled(dln2, i,
430 test_bit(i, scan_mask));
432 for (j = 0; j < i; ++j)
433 dln2_adc_set_chan_enabled(dln2, j, false);
434 mutex_unlock(&dln2->mutex);
435 dev_err(&dln2->pdev->dev,
436 "Unable to enable ADC channel %d\n", i);
441 dln2_adc_update_demux(dln2);
443 mutex_unlock(&dln2->mutex);
448 #define DLN2_ADC_CHAN(lval, idx) { \
449 lval.type = IIO_VOLTAGE; \
450 lval.channel = idx; \
452 lval.info_mask_separate = BIT(IIO_CHAN_INFO_RAW); \
453 lval.info_mask_shared_by_all = BIT(IIO_CHAN_INFO_SCALE) | \
454 BIT(IIO_CHAN_INFO_SAMP_FREQ); \
455 lval.scan_index = idx; \
456 lval.scan_type.sign = 'u'; \
457 lval.scan_type.realbits = DLN2_ADC_DATA_BITS; \
458 lval.scan_type.storagebits = 16; \
459 lval.scan_type.endianness = IIO_LE; \
462 /* Assignment version of IIO_CHAN_SOFT_TIMESTAMP */
463 #define IIO_CHAN_SOFT_TIMESTAMP_ASSIGN(lval, _si) { \
464 lval.type = IIO_TIMESTAMP; \
466 lval.scan_index = _si; \
467 lval.scan_type.sign = 's'; \
468 lval.scan_type.realbits = 64; \
469 lval.scan_type.storagebits = 64; \
472 static const struct iio_info dln2_adc_info = {
473 .read_raw = dln2_adc_read_raw,
474 .write_raw = dln2_adc_write_raw,
475 .update_scan_mode = dln2_update_scan_mode,
478 static irqreturn_t dln2_adc_trigger_h(int irq, void *p)
480 struct iio_poll_func *pf = p;
481 struct iio_dev *indio_dev = pf->indio_dev;
483 __le16 values[DLN2_ADC_MAX_CHANNELS];
484 int64_t timestamp_space;
486 struct dln2_adc_get_all_vals dev_data;
487 struct dln2_adc *dln2 = iio_priv(indio_dev);
488 const struct dln2_adc_demux_table *t;
491 mutex_lock(&dln2->mutex);
492 ret = dln2_adc_read_all(dln2, &dev_data);
493 mutex_unlock(&dln2->mutex);
497 /* Demux operation */
498 for (i = 0; i < dln2->demux_count; ++i) {
500 memcpy((void *)data.values + t->to,
501 (void *)dev_data.values + t->from, t->length);
504 /* Zero padding space between values and timestamp */
505 if (dln2->ts_pad_length)
506 memset((void *)data.values + dln2->ts_pad_offset,
507 0, dln2->ts_pad_length);
509 iio_push_to_buffers_with_timestamp(indio_dev, &data,
510 iio_get_time_ns(indio_dev));
513 iio_trigger_notify_done(indio_dev->trig);
517 static int dln2_adc_triggered_buffer_postenable(struct iio_dev *indio_dev)
520 struct dln2_adc *dln2 = iio_priv(indio_dev);
522 unsigned int trigger_chan;
524 mutex_lock(&dln2->mutex);
527 ret = dln2_adc_set_port_enabled(dln2, true, &conflict);
529 mutex_unlock(&dln2->mutex);
530 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
532 dev_err(&dln2->pdev->dev,
533 "ADC pins conflict with mask %04X\n",
540 /* Assign trigger channel based on first enabled channel */
541 trigger_chan = find_first_bit(indio_dev->active_scan_mask,
542 iio_get_masklength(indio_dev));
543 if (trigger_chan < DLN2_ADC_MAX_CHANNELS) {
544 dln2->trigger_chan = trigger_chan;
545 ret = dln2_adc_set_chan_period(dln2, dln2->trigger_chan,
546 dln2->sample_period);
547 mutex_unlock(&dln2->mutex);
549 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
553 dln2->trigger_chan = -1;
554 mutex_unlock(&dln2->mutex);
560 static int dln2_adc_triggered_buffer_predisable(struct iio_dev *indio_dev)
563 struct dln2_adc *dln2 = iio_priv(indio_dev);
565 mutex_lock(&dln2->mutex);
567 /* Disable trigger channel */
568 if (dln2->trigger_chan != -1) {
569 dln2_adc_set_chan_period(dln2, dln2->trigger_chan, 0);
570 dln2->trigger_chan = -1;
574 ret = dln2_adc_set_port_enabled(dln2, false, NULL);
576 mutex_unlock(&dln2->mutex);
578 dev_dbg(&dln2->pdev->dev, "Problem in %s\n", __func__);
583 static const struct iio_buffer_setup_ops dln2_adc_buffer_setup_ops = {
584 .postenable = dln2_adc_triggered_buffer_postenable,
585 .predisable = dln2_adc_triggered_buffer_predisable,
588 static void dln2_adc_event(struct platform_device *pdev, u16 echo,
589 const void *data, int len)
591 struct iio_dev *indio_dev = platform_get_drvdata(pdev);
592 struct dln2_adc *dln2 = iio_priv(indio_dev);
594 /* Called via URB completion handler */
595 iio_trigger_poll(dln2->trig);
598 static int dln2_adc_probe(struct platform_device *pdev)
600 struct device *dev = &pdev->dev;
601 struct dln2_adc *dln2;
602 struct dln2_platform_data *pdata = dev_get_platdata(&pdev->dev);
603 struct iio_dev *indio_dev;
606 indio_dev = devm_iio_device_alloc(dev, sizeof(*dln2));
608 dev_err(dev, "failed allocating iio device\n");
612 dln2 = iio_priv(indio_dev);
614 dln2->port = pdata->port;
615 dln2->trigger_chan = -1;
616 mutex_init(&dln2->mutex);
618 platform_set_drvdata(pdev, indio_dev);
620 ret = dln2_adc_set_port_resolution(dln2);
622 dev_err(dev, "failed to set ADC resolution to 10 bits\n");
626 chans = dln2_adc_get_chan_count(dln2);
628 dev_err(dev, "failed to get channel count: %d\n", chans);
631 if (chans > DLN2_ADC_MAX_CHANNELS) {
632 chans = DLN2_ADC_MAX_CHANNELS;
633 dev_warn(dev, "clamping channels to %d\n",
634 DLN2_ADC_MAX_CHANNELS);
637 for (i = 0; i < chans; ++i)
638 DLN2_ADC_CHAN(dln2->iio_channels[i], i)
639 IIO_CHAN_SOFT_TIMESTAMP_ASSIGN(dln2->iio_channels[i], i);
641 indio_dev->name = DLN2_ADC_MOD_NAME;
642 indio_dev->info = &dln2_adc_info;
643 indio_dev->modes = INDIO_DIRECT_MODE;
644 indio_dev->channels = dln2->iio_channels;
645 indio_dev->num_channels = chans + 1;
646 indio_dev->setup_ops = &dln2_adc_buffer_setup_ops;
648 dln2->trig = devm_iio_trigger_alloc(dev, "%s-dev%d",
650 iio_device_id(indio_dev));
652 dev_err(dev, "failed to allocate trigger\n");
655 iio_trigger_set_drvdata(dln2->trig, dln2);
656 ret = devm_iio_trigger_register(dev, dln2->trig);
658 dev_err(dev, "failed to register trigger: %d\n", ret);
661 iio_trigger_set_immutable(indio_dev, dln2->trig);
663 ret = devm_iio_triggered_buffer_setup(dev, indio_dev, NULL,
665 &dln2_adc_buffer_setup_ops);
667 dev_err(dev, "failed to allocate triggered buffer: %d\n", ret);
671 ret = dln2_register_event_cb(pdev, DLN2_ADC_CONDITION_MET_EV,
674 dev_err(dev, "failed to setup DLN2 periodic event: %d\n", ret);
678 ret = iio_device_register(indio_dev);
680 dev_err(dev, "failed to register iio device: %d\n", ret);
681 goto unregister_event;
687 dln2_unregister_event_cb(pdev, DLN2_ADC_CONDITION_MET_EV);
692 static void dln2_adc_remove(struct platform_device *pdev)
694 struct iio_dev *indio_dev = platform_get_drvdata(pdev);
696 iio_device_unregister(indio_dev);
697 dln2_unregister_event_cb(pdev, DLN2_ADC_CONDITION_MET_EV);
700 static struct platform_driver dln2_adc_driver = {
701 .driver.name = DLN2_ADC_MOD_NAME,
702 .probe = dln2_adc_probe,
703 .remove = dln2_adc_remove,
706 module_platform_driver(dln2_adc_driver);
709 MODULE_DESCRIPTION("Driver for the Diolan DLN2 ADC interface");
710 MODULE_LICENSE("GPL v2");
711 MODULE_ALIAS("platform:dln2-adc");