1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2020 Unisoc Inc.
6 #include <linux/component.h>
7 #include <linux/module.h>
8 #include <linux/of_address.h>
9 #include <linux/of_device.h>
10 #include <linux/of_irq.h>
11 #include <linux/of_graph.h>
12 #include <video/mipi_display.h>
14 #include <drm/drm_atomic_helper.h>
15 #include <drm/drm_bridge.h>
16 #include <drm/drm_crtc_helper.h>
17 #include <drm/drm_of.h>
18 #include <drm/drm_probe_helper.h>
24 #define SOFT_RESET 0x04
25 #define MASK_PROTOCOL_INT 0x0C
26 #define MASK_INTERNAL_INT 0x14
27 #define DSI_MODE_CFG 0x18
29 #define VIRTUAL_CHANNEL_ID 0x1C
30 #define GEN_RX_VCID GENMASK(1, 0)
31 #define VIDEO_PKT_VCID GENMASK(3, 2)
33 #define DPI_VIDEO_FORMAT 0x20
34 #define DPI_VIDEO_MODE_FORMAT GENMASK(5, 0)
35 #define LOOSELY18_EN BIT(6)
37 #define VIDEO_PKT_CONFIG 0x24
38 #define VIDEO_PKT_SIZE GENMASK(15, 0)
39 #define VIDEO_LINE_CHUNK_NUM GENMASK(31, 16)
41 #define VIDEO_LINE_HBLK_TIME 0x28
42 #define VIDEO_LINE_HBP_TIME GENMASK(15, 0)
43 #define VIDEO_LINE_HSA_TIME GENMASK(31, 16)
45 #define VIDEO_LINE_TIME 0x2C
47 #define VIDEO_VBLK_LINES 0x30
48 #define VFP_LINES GENMASK(9, 0)
49 #define VBP_LINES GENMASK(19, 10)
50 #define VSA_LINES GENMASK(29, 20)
52 #define VIDEO_VACTIVE_LINES 0x34
54 #define VID_MODE_CFG 0x38
55 #define VID_MODE_TYPE GENMASK(1, 0)
56 #define LP_VSA_EN BIT(8)
57 #define LP_VBP_EN BIT(9)
58 #define LP_VFP_EN BIT(10)
59 #define LP_VACT_EN BIT(11)
60 #define LP_HBP_EN BIT(12)
61 #define LP_HFP_EN BIT(13)
62 #define FRAME_BTA_ACK_EN BIT(14)
64 #define TIMEOUT_CNT_CLK_CONFIG 0x40
65 #define HTX_TO_CONFIG 0x44
66 #define LRX_H_TO_CONFIG 0x48
68 #define TX_ESC_CLK_CONFIG 0x5C
70 #define CMD_MODE_CFG 0x68
71 #define TEAR_FX_EN BIT(0)
74 #define GEN_DT GENMASK(5, 0)
75 #define GEN_VC GENMASK(7, 6)
77 #define GEN_PLD_DATA 0x70
79 #define PHY_CLK_LANE_LP_CTRL 0x74
80 #define PHY_CLKLANE_TX_REQ_HS BIT(0)
81 #define AUTO_CLKLANE_CTRL_EN BIT(1)
83 #define PHY_INTERFACE_CTRL 0x78
84 #define RF_PHY_SHUTDOWN BIT(0)
85 #define RF_PHY_RESET_N BIT(1)
86 #define RF_PHY_CLK_EN BIT(2)
88 #define CMD_MODE_STATUS 0x98
89 #define GEN_CMD_RDATA_FIFO_EMPTY BIT(1)
90 #define GEN_CMD_WDATA_FIFO_EMPTY BIT(3)
91 #define GEN_CMD_CMD_FIFO_EMPTY BIT(5)
92 #define GEN_CMD_RDCMD_DONE BIT(7)
94 #define PHY_STATUS 0x9C
95 #define PHY_LOCK BIT(1)
97 #define PHY_MIN_STOP_TIME 0xA0
98 #define PHY_LANE_NUM_CONFIG 0xA4
100 #define PHY_CLKLANE_TIME_CONFIG 0xA8
101 #define PHY_CLKLANE_LP_TO_HS_TIME GENMASK(15, 0)
102 #define PHY_CLKLANE_HS_TO_LP_TIME GENMASK(31, 16)
104 #define PHY_DATALANE_TIME_CONFIG 0xAC
105 #define PHY_DATALANE_LP_TO_HS_TIME GENMASK(15, 0)
106 #define PHY_DATALANE_HS_TO_LP_TIME GENMASK(31, 16)
108 #define MAX_READ_TIME 0xB0
110 #define RX_PKT_CHECK_CONFIG 0xB4
111 #define RX_PKT_ECC_EN BIT(0)
112 #define RX_PKT_CRC_EN BIT(1)
117 #define TX_EOTP_EN BIT(0)
118 #define RX_EOTP_EN BIT(1)
120 #define VIDEO_NULLPKT_SIZE 0xC0
121 #define DCS_WM_PKT_SIZE 0xC4
123 #define VIDEO_SIG_DELAY_CONFIG 0xD0
124 #define VIDEO_SIG_DELAY GENMASK(23, 0)
126 #define PHY_TST_CTRL0 0xF0
127 #define PHY_TESTCLR BIT(0)
128 #define PHY_TESTCLK BIT(1)
130 #define PHY_TST_CTRL1 0xF4
131 #define PHY_TESTDIN GENMASK(7, 0)
132 #define PHY_TESTDOUT GENMASK(15, 8)
133 #define PHY_TESTEN BIT(16)
135 #define host_to_dsi(host) \
136 container_of(host, struct sprd_dsi, host)
139 dsi_reg_rd(struct dsi_context *ctx, u32 offset, u32 mask,
142 return (readl(ctx->base + offset) & mask) >> shift;
146 dsi_reg_wr(struct dsi_context *ctx, u32 offset, u32 mask,
151 ret = readl(ctx->base + offset);
153 ret |= (val << shift) & mask;
154 writel(ret, ctx->base + offset);
158 dsi_reg_up(struct dsi_context *ctx, u32 offset, u32 mask,
161 u32 ret = readl(ctx->base + offset);
163 writel((ret & ~mask) | (val & mask), ctx->base + offset);
166 static int regmap_tst_io_write(void *context, u32 reg, u32 val)
168 struct sprd_dsi *dsi = context;
169 struct dsi_context *ctx = &dsi->ctx;
171 if (val > 0xff || reg > 0xff)
174 drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, val);
176 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
177 dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, reg);
178 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
179 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
180 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, 0);
181 dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, val);
182 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
183 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
188 static int regmap_tst_io_read(void *context, u32 reg, u32 *val)
190 struct sprd_dsi *dsi = context;
191 struct dsi_context *ctx = &dsi->ctx;
197 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, PHY_TESTEN);
198 dsi_reg_wr(ctx, PHY_TST_CTRL1, PHY_TESTDIN, 0, reg);
199 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, PHY_TESTCLK);
200 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLK, 0);
201 dsi_reg_up(ctx, PHY_TST_CTRL1, PHY_TESTEN, 0);
205 ret = dsi_reg_rd(ctx, PHY_TST_CTRL1, PHY_TESTDOUT, 8);
211 drm_dbg(dsi->drm, "reg = 0x%02x, val = 0x%02x\n", reg, *val);
215 static struct regmap_bus regmap_tst_io = {
216 .reg_write = regmap_tst_io_write,
217 .reg_read = regmap_tst_io_read,
220 static const struct regmap_config byte_config = {
225 static int dphy_wait_pll_locked(struct dsi_context *ctx)
227 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
230 for (i = 0; i < 50000; i++) {
231 if (dsi_reg_rd(ctx, PHY_STATUS, PHY_LOCK, 1))
236 drm_err(dsi->drm, "dphy pll can not be locked\n");
240 static int dsi_wait_tx_payload_fifo_empty(struct dsi_context *ctx)
244 for (i = 0; i < 5000; i++) {
245 if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_WDATA_FIFO_EMPTY, 3))
253 static int dsi_wait_tx_cmd_fifo_empty(struct dsi_context *ctx)
257 for (i = 0; i < 5000; i++) {
258 if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, 5))
266 static int dsi_wait_rd_resp_completed(struct dsi_context *ctx)
270 for (i = 0; i < 10000; i++) {
271 if (dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDCMD_DONE, 7))
279 static u16 calc_bytes_per_pixel_x100(int coding)
284 case COLOR_CODE_16BIT_CONFIG1:
285 case COLOR_CODE_16BIT_CONFIG2:
286 case COLOR_CODE_16BIT_CONFIG3:
289 case COLOR_CODE_18BIT_CONFIG1:
290 case COLOR_CODE_18BIT_CONFIG2:
293 case COLOR_CODE_24BIT:
296 case COLOR_CODE_COMPRESSTION:
299 case COLOR_CODE_20BIT_YCC422_LOOSELY:
302 case COLOR_CODE_24BIT_YCC422:
305 case COLOR_CODE_16BIT_YCC422:
308 case COLOR_CODE_30BIT:
311 case COLOR_CODE_36BIT:
314 case COLOR_CODE_12BIT_YCC420:
318 DRM_ERROR("invalid color coding");
326 static u8 calc_video_size_step(int coding)
331 case COLOR_CODE_16BIT_CONFIG1:
332 case COLOR_CODE_16BIT_CONFIG2:
333 case COLOR_CODE_16BIT_CONFIG3:
334 case COLOR_CODE_18BIT_CONFIG1:
335 case COLOR_CODE_18BIT_CONFIG2:
336 case COLOR_CODE_24BIT:
337 case COLOR_CODE_COMPRESSTION:
338 return video_size_step = 1;
339 case COLOR_CODE_20BIT_YCC422_LOOSELY:
340 case COLOR_CODE_24BIT_YCC422:
341 case COLOR_CODE_16BIT_YCC422:
342 case COLOR_CODE_30BIT:
343 case COLOR_CODE_36BIT:
344 case COLOR_CODE_12BIT_YCC420:
345 return video_size_step = 2;
347 DRM_ERROR("invalid color coding");
352 static u16 round_video_size(int coding, u16 video_size)
355 case COLOR_CODE_16BIT_YCC422:
356 case COLOR_CODE_24BIT_YCC422:
357 case COLOR_CODE_20BIT_YCC422_LOOSELY:
358 case COLOR_CODE_12BIT_YCC420:
359 /* round up active H pixels to a multiple of 2 */
360 if ((video_size % 2) != 0)
370 #define SPRD_MIPI_DSI_FMT_DSC 0xff
371 static u32 fmt_to_coding(u32 fmt)
374 case MIPI_DSI_FMT_RGB565:
375 return COLOR_CODE_16BIT_CONFIG1;
376 case MIPI_DSI_FMT_RGB666:
377 case MIPI_DSI_FMT_RGB666_PACKED:
378 return COLOR_CODE_18BIT_CONFIG1;
379 case MIPI_DSI_FMT_RGB888:
380 return COLOR_CODE_24BIT;
381 case SPRD_MIPI_DSI_FMT_DSC:
382 return COLOR_CODE_COMPRESSTION;
384 DRM_ERROR("Unsupported format (%d)\n", fmt);
385 return COLOR_CODE_24BIT;
389 #define ns_to_cycle(ns, byte_clk) \
390 DIV_ROUND_UP((ns) * (byte_clk), 1000000)
392 static void sprd_dsi_init(struct dsi_context *ctx)
394 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
395 u32 byte_clk = dsi->slave->hs_rate / 8;
396 u16 data_hs2lp, data_lp2hs, clk_hs2lp, clk_lp2hs;
400 writel(0, ctx->base + SOFT_RESET);
401 writel(0xffffffff, ctx->base + MASK_PROTOCOL_INT);
402 writel(0xffffffff, ctx->base + MASK_INTERNAL_INT);
403 writel(1, ctx->base + DSI_MODE_CFG);
404 dsi_reg_up(ctx, EOTP_EN, RX_EOTP_EN, 0);
405 dsi_reg_up(ctx, EOTP_EN, TX_EOTP_EN, 0);
406 dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_ECC_EN, RX_PKT_ECC_EN);
407 dsi_reg_up(ctx, RX_PKT_CHECK_CONFIG, RX_PKT_CRC_EN, RX_PKT_CRC_EN);
408 writel(1, ctx->base + TA_EN);
409 dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, VIDEO_PKT_VCID, 0);
410 dsi_reg_up(ctx, VIRTUAL_CHANNEL_ID, GEN_RX_VCID, 0);
412 div = DIV_ROUND_UP(byte_clk, dsi->slave->lp_rate);
413 writel(div, ctx->base + TX_ESC_CLK_CONFIG);
415 max_rd_time = ns_to_cycle(ctx->max_rd_time, byte_clk);
416 writel(max_rd_time, ctx->base + MAX_READ_TIME);
418 data_hs2lp = ns_to_cycle(ctx->data_hs2lp, byte_clk);
419 data_lp2hs = ns_to_cycle(ctx->data_lp2hs, byte_clk);
420 clk_hs2lp = ns_to_cycle(ctx->clk_hs2lp, byte_clk);
421 clk_lp2hs = ns_to_cycle(ctx->clk_lp2hs, byte_clk);
422 dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
423 PHY_DATALANE_HS_TO_LP_TIME, 16, data_hs2lp);
424 dsi_reg_wr(ctx, PHY_DATALANE_TIME_CONFIG,
425 PHY_DATALANE_LP_TO_HS_TIME, 0, data_lp2hs);
426 dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
427 PHY_CLKLANE_HS_TO_LP_TIME, 16, clk_hs2lp);
428 dsi_reg_wr(ctx, PHY_CLKLANE_TIME_CONFIG,
429 PHY_CLKLANE_LP_TO_HS_TIME, 0, clk_lp2hs);
431 writel(1, ctx->base + SOFT_RESET);
435 * Free up resources and shutdown host controller and PHY
437 static void sprd_dsi_fini(struct dsi_context *ctx)
439 writel(0xffffffff, ctx->base + MASK_PROTOCOL_INT);
440 writel(0xffffffff, ctx->base + MASK_INTERNAL_INT);
441 writel(0, ctx->base + SOFT_RESET);
445 * If not in burst mode, it will compute the video and null packet sizes
446 * according to necessity.
447 * Configure timers for data lanes and/or clock lane to return to LP when
448 * bandwidth is not filled by data.
450 static int sprd_dsi_dpi_video(struct dsi_context *ctx)
452 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
453 struct videomode *vm = &ctx->vm;
454 u32 byte_clk = dsi->slave->hs_rate / 8;
458 u16 null_pkt_size = 0;
466 const u8 pkt_header = 6;
472 coding = fmt_to_coding(dsi->slave->format);
473 video_size = round_video_size(coding, vm->hactive);
474 bpp_x100 = calc_bytes_per_pixel_x100(coding);
475 video_size_step = calc_video_size_step(coding);
476 ratio_x1000 = byte_clk * 1000 / (vm->pixelclock / 1000);
477 hline = vm->hactive + vm->hsync_len + vm->hfront_porch +
480 writel(0, ctx->base + SOFT_RESET);
481 dsi_reg_wr(ctx, VID_MODE_CFG, FRAME_BTA_ACK_EN, 15, ctx->frame_ack_en);
482 dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, 0, coding);
483 dsi_reg_wr(ctx, VID_MODE_CFG, VID_MODE_TYPE, 0, ctx->burst_mode);
484 byte_cycle = 95 * hline * ratio_x1000 / 100000;
485 dsi_reg_wr(ctx, VIDEO_SIG_DELAY_CONFIG, VIDEO_SIG_DELAY, 0, byte_cycle);
486 byte_cycle = hline * ratio_x1000 / 1000;
487 writel(byte_cycle, ctx->base + VIDEO_LINE_TIME);
488 byte_cycle = vm->hsync_len * ratio_x1000 / 1000;
489 dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HSA_TIME, 16, byte_cycle);
490 byte_cycle = vm->hback_porch * ratio_x1000 / 1000;
491 dsi_reg_wr(ctx, VIDEO_LINE_HBLK_TIME, VIDEO_LINE_HBP_TIME, 0, byte_cycle);
492 writel(vm->vactive, ctx->base + VIDEO_VACTIVE_LINES);
493 dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VFP_LINES, 0, vm->vfront_porch);
494 dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VBP_LINES, 10, vm->vback_porch);
495 dsi_reg_wr(ctx, VIDEO_VBLK_LINES, VSA_LINES, 20, vm->vsync_len);
496 dsi_reg_up(ctx, VID_MODE_CFG, LP_HBP_EN | LP_HFP_EN | LP_VACT_EN |
497 LP_VFP_EN | LP_VBP_EN | LP_VSA_EN, LP_HBP_EN | LP_HFP_EN |
498 LP_VACT_EN | LP_VFP_EN | LP_VBP_EN | LP_VSA_EN);
500 hs_to = (hline * vm->vactive) + (2 * bpp_x100) / 100;
501 for (div = 0x80; (div < hs_to) && (div > 2); div--) {
502 if ((hs_to % div) == 0) {
503 writel(div, ctx->base + TIMEOUT_CNT_CLK_CONFIG);
504 writel(hs_to / div, ctx->base + LRX_H_TO_CONFIG);
505 writel(hs_to / div, ctx->base + HTX_TO_CONFIG);
510 if (ctx->burst_mode == VIDEO_BURST_WITH_SYNC_PULSES) {
511 dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, 0, video_size);
512 writel(0, ctx->base + VIDEO_NULLPKT_SIZE);
513 dsi_reg_up(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, 0);
515 /* non burst transmission */
518 /* bytes to be sent - first as one chunk */
519 bytes_per_chunk = vm->hactive * bpp_x100 / 100 + pkt_header;
521 /* hline total bytes from the DPI interface */
522 total_bytes = (vm->hactive + vm->hfront_porch) *
523 ratio_x1000 / dsi->slave->lanes / 1000;
525 /* check if the pixels actually fit on the DSI link */
526 if (total_bytes < bytes_per_chunk) {
527 drm_err(dsi->drm, "current resolution can not be set\n");
531 chunk_overhead = total_bytes - bytes_per_chunk;
533 /* overhead higher than 1 -> enable multi packets */
534 if (chunk_overhead > 1) {
536 for (video_size = video_size_step;
537 video_size < vm->hactive;
538 video_size += video_size_step) {
539 if (vm->hactive * 1000 / video_size % 1000)
542 chunks = vm->hactive / video_size;
543 bytes_per_chunk = bpp_x100 * video_size / 100
545 if (total_bytes >= (bytes_per_chunk * chunks)) {
546 bytes_left = total_bytes -
547 bytes_per_chunk * chunks;
552 /* prevent overflow (unsigned - unsigned) */
553 if (bytes_left > (pkt_header * chunks)) {
554 null_pkt_size = (bytes_left -
555 pkt_header * chunks) / chunks;
556 /* avoid register overflow */
557 if (null_pkt_size > 1023)
558 null_pkt_size = 1023;
565 /* must be a multiple of 4 except 18 loosely */
566 for (video_size = vm->hactive;
567 (video_size % video_size_step) != 0;
572 dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_PKT_SIZE, 0, video_size);
573 writel(null_pkt_size, ctx->base + VIDEO_NULLPKT_SIZE);
574 dsi_reg_wr(ctx, VIDEO_PKT_CONFIG, VIDEO_LINE_CHUNK_NUM, 16, chunks);
577 writel(ctx->int0_mask, ctx->base + MASK_PROTOCOL_INT);
578 writel(ctx->int1_mask, ctx->base + MASK_INTERNAL_INT);
579 writel(1, ctx->base + SOFT_RESET);
584 static void sprd_dsi_edpi_video(struct dsi_context *ctx)
586 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
587 const u32 fifo_depth = 1096;
588 const u32 word_length = 4;
589 u32 hactive = ctx->vm.hactive;
594 coding = fmt_to_coding(dsi->slave->format);
595 bpp_x100 = calc_bytes_per_pixel_x100(coding);
596 max_fifo_len = word_length * fifo_depth * 100 / bpp_x100;
598 writel(0, ctx->base + SOFT_RESET);
599 dsi_reg_wr(ctx, DPI_VIDEO_FORMAT, DPI_VIDEO_MODE_FORMAT, 0, coding);
600 dsi_reg_wr(ctx, CMD_MODE_CFG, TEAR_FX_EN, 0, ctx->te_ack_en);
602 if (max_fifo_len > hactive)
603 writel(hactive, ctx->base + DCS_WM_PKT_SIZE);
605 writel(max_fifo_len, ctx->base + DCS_WM_PKT_SIZE);
607 writel(ctx->int0_mask, ctx->base + MASK_PROTOCOL_INT);
608 writel(ctx->int1_mask, ctx->base + MASK_INTERNAL_INT);
609 writel(1, ctx->base + SOFT_RESET);
613 * Send a packet on the generic interface,
614 * this function has an active delay to wait for the buffer to clear.
615 * The delay is limited to:
616 * (param_length / 4) x DSIH_FIFO_ACTIVE_WAIT x register access time
617 * the controller restricts the sending of.
619 * This function will not be able to send Null and Blanking packets due to
620 * controller restriction
622 static int sprd_dsi_wr_pkt(struct dsi_context *ctx, u8 vc, u8 type,
623 const u8 *param, u16 len)
625 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
626 u8 wc_lsbyte, wc_msbyte;
633 /* 1st: for long packet, must config payload first */
634 ret = dsi_wait_tx_payload_fifo_empty(ctx);
636 drm_err(dsi->drm, "tx payload fifo is not empty\n");
641 for (i = 0, j = 0; i < len; i += j) {
643 for (j = 0; (j < 4) && ((j + i) < (len)); j++)
644 payload |= param[i + j] << (j * 8);
646 writel(payload, ctx->base + GEN_PLD_DATA);
648 wc_lsbyte = len & 0xff;
649 wc_msbyte = len >> 8;
651 wc_lsbyte = (len > 0) ? param[0] : 0;
652 wc_msbyte = (len > 1) ? param[1] : 0;
655 /* 2nd: then set packet header */
656 ret = dsi_wait_tx_cmd_fifo_empty(ctx);
658 drm_err(dsi->drm, "tx cmd fifo is not empty\n");
662 writel(type | (vc << 6) | (wc_lsbyte << 8) | (wc_msbyte << 16),
663 ctx->base + GEN_HDR);
669 * Send READ packet to peripheral using the generic interface,
670 * this will force command mode and stop video mode (because of BTA).
672 * This function has an active delay to wait for the buffer to clear,
673 * the delay is limited to 2 x DSIH_FIFO_ACTIVE_WAIT
674 * (waiting for command buffer, and waiting for receiving)
675 * @note this function will enable BTA
677 static int sprd_dsi_rd_pkt(struct dsi_context *ctx, u8 vc, u8 type,
678 u8 msb_byte, u8 lsb_byte,
679 u8 *buffer, u8 bytes_to_read)
681 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
689 /* 1st: send read command to peripheral */
690 ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_CMD_FIFO_EMPTY, 5);
694 writel(type | (vc << 6) | (lsb_byte << 8) | (msb_byte << 16),
695 ctx->base + GEN_HDR);
697 /* 2nd: wait peripheral response completed */
698 ret = dsi_wait_rd_resp_completed(ctx);
700 drm_err(dsi->drm, "wait read response time out\n");
704 /* 3rd: get data from rx payload fifo */
705 ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, 1);
707 drm_err(dsi->drm, "rx payload fifo empty\n");
711 for (i = 0; i < 100; i++) {
712 temp = readl(ctx->base + GEN_PLD_DATA);
714 if (count < bytes_to_read)
715 buffer[count++] = temp & 0xff;
716 if (count < bytes_to_read)
717 buffer[count++] = (temp >> 8) & 0xff;
718 if (count < bytes_to_read)
719 buffer[count++] = (temp >> 16) & 0xff;
720 if (count < bytes_to_read)
721 buffer[count++] = (temp >> 24) & 0xff;
723 ret = dsi_reg_rd(ctx, CMD_MODE_STATUS, GEN_CMD_RDATA_FIFO_EMPTY, 1);
731 static void sprd_dsi_set_work_mode(struct dsi_context *ctx, u8 mode)
733 if (mode == DSI_MODE_CMD)
734 writel(1, ctx->base + DSI_MODE_CFG);
736 writel(0, ctx->base + DSI_MODE_CFG);
739 static void sprd_dsi_state_reset(struct dsi_context *ctx)
741 writel(0, ctx->base + SOFT_RESET);
743 writel(1, ctx->base + SOFT_RESET);
746 static int sprd_dphy_init(struct dsi_context *ctx)
748 struct sprd_dsi *dsi = container_of(ctx, struct sprd_dsi, ctx);
751 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, 0);
752 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, 0);
753 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, 0);
755 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, 0);
756 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, PHY_TESTCLR);
757 dsi_reg_up(ctx, PHY_TST_CTRL0, PHY_TESTCLR, 0);
759 dphy_pll_config(ctx);
760 dphy_timing_config(ctx);
762 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, RF_PHY_SHUTDOWN);
763 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
764 writel(0x1C, ctx->base + PHY_MIN_STOP_TIME);
765 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
766 writel(dsi->slave->lanes - 1, ctx->base + PHY_LANE_NUM_CONFIG);
768 ret = dphy_wait_pll_locked(ctx);
770 drm_err(dsi->drm, "dphy initial failed\n");
777 static void sprd_dphy_fini(struct dsi_context *ctx)
779 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, 0);
780 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_SHUTDOWN, 0);
781 dsi_reg_up(ctx, PHY_INTERFACE_CTRL, RF_PHY_RESET_N, RF_PHY_RESET_N);
784 static void sprd_dsi_encoder_mode_set(struct drm_encoder *encoder,
785 struct drm_display_mode *mode,
786 struct drm_display_mode *adj_mode)
788 struct sprd_dsi *dsi = encoder_to_dsi(encoder);
790 drm_display_mode_to_videomode(adj_mode, &dsi->ctx.vm);
793 static void sprd_dsi_encoder_enable(struct drm_encoder *encoder)
795 struct sprd_dsi *dsi = encoder_to_dsi(encoder);
796 struct sprd_dpu *dpu = to_sprd_crtc(encoder->crtc);
797 struct dsi_context *ctx = &dsi->ctx;
800 drm_warn(dsi->drm, "dsi is initialized\n");
805 if (ctx->work_mode == DSI_MODE_VIDEO)
806 sprd_dsi_dpi_video(ctx);
808 sprd_dsi_edpi_video(ctx);
812 sprd_dsi_set_work_mode(ctx, ctx->work_mode);
813 sprd_dsi_state_reset(ctx);
815 if (dsi->slave->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS) {
816 dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, AUTO_CLKLANE_CTRL_EN,
817 AUTO_CLKLANE_CTRL_EN);
819 dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, RF_PHY_CLK_EN, RF_PHY_CLK_EN);
820 dsi_reg_up(ctx, PHY_CLK_LANE_LP_CTRL, PHY_CLKLANE_TX_REQ_HS,
821 PHY_CLKLANE_TX_REQ_HS);
822 dphy_wait_pll_locked(ctx);
830 static void sprd_dsi_encoder_disable(struct drm_encoder *encoder)
832 struct sprd_dsi *dsi = encoder_to_dsi(encoder);
833 struct sprd_dpu *dpu = to_sprd_crtc(encoder->crtc);
834 struct dsi_context *ctx = &dsi->ctx;
837 drm_warn(dsi->drm, "dsi isn't initialized\n");
845 ctx->enabled = false;
848 static const struct drm_encoder_helper_funcs sprd_encoder_helper_funcs = {
849 .mode_set = sprd_dsi_encoder_mode_set,
850 .enable = sprd_dsi_encoder_enable,
851 .disable = sprd_dsi_encoder_disable
854 static const struct drm_encoder_funcs sprd_encoder_funcs = {
855 .destroy = drm_encoder_cleanup,
858 static int sprd_dsi_encoder_init(struct sprd_dsi *dsi,
861 struct drm_encoder *encoder = &dsi->encoder;
865 crtc_mask = drm_of_find_possible_crtcs(dsi->drm, dev->of_node);
867 drm_err(dsi->drm, "failed to find crtc mask\n");
871 drm_dbg(dsi->drm, "find possible crtcs: 0x%08x\n", crtc_mask);
873 encoder->possible_crtcs = crtc_mask;
874 ret = drm_encoder_init(dsi->drm, encoder, &sprd_encoder_funcs,
875 DRM_MODE_ENCODER_DSI, NULL);
877 drm_err(dsi->drm, "failed to init dsi encoder\n");
881 drm_encoder_helper_add(encoder, &sprd_encoder_helper_funcs);
886 static int sprd_dsi_bridge_init(struct sprd_dsi *dsi,
891 dsi->panel_bridge = devm_drm_of_get_bridge(dev, dev->of_node, 1, 0);
892 if (IS_ERR(dsi->panel_bridge))
893 return PTR_ERR(dsi->panel_bridge);
895 ret = drm_bridge_attach(&dsi->encoder, dsi->panel_bridge, NULL, 0);
902 static int sprd_dsi_context_init(struct sprd_dsi *dsi,
905 struct platform_device *pdev = to_platform_device(dev);
906 struct dsi_context *ctx = &dsi->ctx;
907 struct resource *res;
909 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
910 ctx->base = devm_ioremap(dev, res->start, resource_size(res));
912 drm_err(dsi->drm, "failed to map dsi host registers\n");
916 ctx->regmap = devm_regmap_init(dev, ®map_tst_io, dsi, &byte_config);
917 if (IS_ERR(ctx->regmap)) {
918 drm_err(dsi->drm, "dphy regmap init failed\n");
919 return PTR_ERR(ctx->regmap);
922 ctx->data_hs2lp = 120;
923 ctx->data_lp2hs = 500;
926 ctx->max_rd_time = 6000;
927 ctx->int0_mask = 0xffffffff;
928 ctx->int1_mask = 0xffffffff;
934 static int sprd_dsi_bind(struct device *dev, struct device *master, void *data)
936 struct drm_device *drm = data;
937 struct sprd_dsi *dsi = dev_get_drvdata(dev);
942 ret = sprd_dsi_encoder_init(dsi, dev);
946 ret = sprd_dsi_bridge_init(dsi, dev);
950 ret = sprd_dsi_context_init(dsi, dev);
957 static void sprd_dsi_unbind(struct device *dev,
958 struct device *master, void *data)
960 struct sprd_dsi *dsi = dev_get_drvdata(dev);
962 drm_of_panel_bridge_remove(dev->of_node, 1, 0);
964 drm_encoder_cleanup(&dsi->encoder);
967 static const struct component_ops dsi_component_ops = {
968 .bind = sprd_dsi_bind,
969 .unbind = sprd_dsi_unbind,
972 static int sprd_dsi_host_attach(struct mipi_dsi_host *host,
973 struct mipi_dsi_device *slave)
975 struct sprd_dsi *dsi = host_to_dsi(host);
976 struct dsi_context *ctx = &dsi->ctx;
980 if (slave->mode_flags & MIPI_DSI_MODE_VIDEO)
981 ctx->work_mode = DSI_MODE_VIDEO;
983 ctx->work_mode = DSI_MODE_CMD;
985 if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
986 ctx->burst_mode = VIDEO_BURST_WITH_SYNC_PULSES;
987 else if (slave->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
988 ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_PULSES;
990 ctx->burst_mode = VIDEO_NON_BURST_WITH_SYNC_EVENTS;
992 return component_add(host->dev, &dsi_component_ops);
995 static int sprd_dsi_host_detach(struct mipi_dsi_host *host,
996 struct mipi_dsi_device *slave)
998 component_del(host->dev, &dsi_component_ops);
1003 static ssize_t sprd_dsi_host_transfer(struct mipi_dsi_host *host,
1004 const struct mipi_dsi_msg *msg)
1006 struct sprd_dsi *dsi = host_to_dsi(host);
1007 const u8 *tx_buf = msg->tx_buf;
1009 if (msg->rx_buf && msg->rx_len) {
1010 u8 lsb = (msg->tx_len > 0) ? tx_buf[0] : 0;
1011 u8 msb = (msg->tx_len > 1) ? tx_buf[1] : 0;
1013 return sprd_dsi_rd_pkt(&dsi->ctx, msg->channel, msg->type,
1014 msb, lsb, msg->rx_buf, msg->rx_len);
1017 if (msg->tx_buf && msg->tx_len)
1018 return sprd_dsi_wr_pkt(&dsi->ctx, msg->channel, msg->type,
1019 tx_buf, msg->tx_len);
1024 static const struct mipi_dsi_host_ops sprd_dsi_host_ops = {
1025 .attach = sprd_dsi_host_attach,
1026 .detach = sprd_dsi_host_detach,
1027 .transfer = sprd_dsi_host_transfer,
1030 static const struct of_device_id dsi_match_table[] = {
1031 { .compatible = "sprd,sharkl3-dsi-host" },
1035 static int sprd_dsi_probe(struct platform_device *pdev)
1037 struct device *dev = &pdev->dev;
1038 struct sprd_dsi *dsi;
1040 dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL);
1044 dev_set_drvdata(dev, dsi);
1046 dsi->host.ops = &sprd_dsi_host_ops;
1047 dsi->host.dev = dev;
1049 return mipi_dsi_host_register(&dsi->host);
1052 static int sprd_dsi_remove(struct platform_device *pdev)
1054 struct sprd_dsi *dsi = dev_get_drvdata(&pdev->dev);
1056 mipi_dsi_host_unregister(&dsi->host);
1061 struct platform_driver sprd_dsi_driver = {
1062 .probe = sprd_dsi_probe,
1063 .remove = sprd_dsi_remove,
1065 .name = "sprd-dsi-drv",
1066 .of_match_table = dsi_match_table,
1072 MODULE_DESCRIPTION("Unisoc MIPI DSI HOST Controller Driver");
1073 MODULE_LICENSE("GPL v2");