1 From 0d6e214f5a257f9b53619ef8aa3b6e767189bdcf Mon Sep 17 00:00:00 2001
2 From: Fugang Duan <fugang.duan@nxp.com>
3 Date: Wed, 11 Sep 2019 16:21:06 +0800
4 Subject: [PATCH] tty: serial: fsl_lpuart: enable dma mode for imx8qxp
6 imx8qxp lpuart support eDMA for dma mode, support EOP (end-of-packet)
7 feature. But eDMA cannot detect the correct DADDR for current major
8 loop in cyclic mode, so it doesn't support cyclic mode.
10 The patch is to enable lpuart prep slave sg dma mode for imx8qxp.
12 Signed-off-by: Fugang Duan <fugang.duan@nxp.com>
14 drivers/tty/serial/fsl_lpuart.c | 280 +++++++++++++++++++++++++++++++---------
15 1 file changed, 219 insertions(+), 61 deletions(-)
17 --- a/drivers/tty/serial/fsl_lpuart.c
18 +++ b/drivers/tty/serial/fsl_lpuart.c
20 #define UARTBAUD_M10 0x20000000
21 #define UARTBAUD_TDMAE 0x00800000
22 #define UARTBAUD_RDMAE 0x00200000
23 +#define UARTBAUD_RIDMAE 0x00100000
24 #define UARTBAUD_MATCFG 0x00400000
25 #define UARTBAUD_BOTHEDGE 0x00020000
26 #define UARTBAUD_RESYNCDIS 0x00010000
28 #define UARTCTRL_SBK 0x00010000
29 #define UARTCTRL_MA1IE 0x00008000
30 #define UARTCTRL_MA2IE 0x00004000
31 -#define UARTCTRL_IDLECFG 0x00000100
32 +#define UARTCTRL_IDLECFG_OFF 0x8
33 #define UARTCTRL_LOOPS 0x00000080
34 #define UARTCTRL_DOZEEN 0x00000040
35 #define UARTCTRL_RSRC 0x00000020
37 #define UARTDATA_MASK 0x3ff
39 #define UARTMODIR_IREN 0x00020000
40 +#define UARTMODIR_RTSWATER_S 0x8
41 #define UARTMODIR_TXCTSSRC 0x00000020
42 #define UARTMODIR_TXCTSC 0x00000010
43 #define UARTMODIR_RXRTSE 0x00000008
45 #define UARTFIFO_RXUF 0x00010000
46 #define UARTFIFO_TXFLUSH 0x00008000
47 #define UARTFIFO_RXFLUSH 0x00004000
48 +#define UARTFIFO_RXIDEN_MASK 0x7
49 +#define UARTFIFO_RXIDEN_OFF 10
50 #define UARTFIFO_TXOFE 0x00000200
51 #define UARTFIFO_RXUFE 0x00000100
52 #define UARTFIFO_TXFE 0x00000080
54 #define UARTWATER_TXWATER_OFF 0
55 #define UARTWATER_RXWATER_OFF 16
57 +#define UARTFIFO_RXIDEN_RDRF 0x3
58 +#define UARTCTRL_IDLECFG 0x7
60 /* Rx DMA timeout in ms, which is used to calculate Rx ring buffer size */
61 #define DMA_RX_TIMEOUT (10)
63 @@ -252,6 +259,9 @@ struct lpuart_port {
64 unsigned int txfifo_size;
65 unsigned int rxfifo_size;
70 bool lpuart_dma_tx_use;
71 bool lpuart_dma_rx_use;
72 struct dma_chan *dma_tx_chan;
73 @@ -276,33 +286,45 @@ struct lpuart_soc_data {
74 enum lpuart_type devtype;
81 static const struct lpuart_soc_data vf_data = {
82 .devtype = VF610_LPUART,
85 + .rx_dma_cyclic = true,
88 static const struct lpuart_soc_data ls1021a_data = {
89 .devtype = LS1021A_LPUART,
90 .iotype = UPIO_MEM32BE,
92 + .rx_dma_cyclic = true,
95 static const struct lpuart_soc_data ls1028a_data = {
96 .devtype = LS1028A_LPUART,
99 + .rx_dma_cyclic = true,
102 static struct lpuart_soc_data imx7ulp_data = {
103 .devtype = IMX7ULP_LPUART,
104 .iotype = UPIO_MEM32,
105 .reg_off = IMX_REG_OFF,
107 + .rx_dma_cyclic = true,
110 static struct lpuart_soc_data imx8qxp_data = {
111 .devtype = IMX8QXP_LPUART,
112 .iotype = UPIO_MEM32,
113 .reg_off = IMX_REG_OFF,
114 + .rx_watermark = 31,
115 + .rx_dma_cyclic = false,
118 static const struct of_device_id lpuart_dt_ids[] = {
119 @@ -317,6 +339,7 @@ MODULE_DEVICE_TABLE(of, lpuart_dt_ids);
121 /* Forward declare this for the dma callbacks*/
122 static void lpuart_dma_tx_complete(void *arg);
123 +static int lpuart_sched_rx_dma(struct lpuart_port *sport);
125 static inline bool is_layerscape_lpuart(struct lpuart_port *sport)
127 @@ -1008,19 +1031,15 @@ static irqreturn_t lpuart32_int(int irq,
128 if ((sts & UARTSTAT_TDRE) && !sport->lpuart_dma_tx_use)
129 lpuart32_txint(sport);
131 + if (sport->lpuart_dma_rx_use && sport->dma_eeop)
132 + sts &= ~UARTSTAT_IDLE;
134 lpuart32_write(&sport->port, sts, UARTSTAT);
138 -static void lpuart_copy_rx_to_tty(struct lpuart_port *sport)
139 +static void lpuart_rx_error_stat(struct lpuart_port *sport)
141 - struct tty_port *port = &sport->port.state->port;
142 - struct dma_tx_state state;
143 - enum dma_status dmastat;
144 - struct circ_buf *ring = &sport->rx_ring;
145 - unsigned long flags;
148 if (lpuart_is_32(sport)) {
149 unsigned long sr = lpuart32_read(&sport->port, UARTSTAT);
151 @@ -1072,8 +1091,21 @@ static void lpuart_copy_rx_to_tty(struct
152 writeb(cr2, sport->port.membase + UARTCR2);
157 +static void lpuart_copy_rx_to_tty(struct lpuart_port *sport)
159 + struct tty_port *port = &sport->port.state->port;
160 + struct dma_tx_state state;
161 + enum dma_status dmastat;
162 + struct circ_buf *ring = &sport->rx_ring;
163 + unsigned long flags;
166 - async_tx_ack(sport->dma_rx_desc);
167 + if (!is_imx8qxp_lpuart(sport)) {
168 + lpuart_rx_error_stat(sport);
169 + async_tx_ack(sport->dma_rx_desc);
172 spin_lock_irqsave(&sport->port.lock, flags);
174 @@ -1136,7 +1168,33 @@ static void lpuart_copy_rx_to_tty(struct
175 spin_unlock_irqrestore(&sport->port.lock, flags);
177 tty_flip_buffer_push(port);
178 - mod_timer(&sport->lpuart_timer, jiffies + sport->dma_rx_timeout);
180 + if (!sport->dma_eeop)
181 + mod_timer(&sport->lpuart_timer,
182 + jiffies + sport->dma_rx_timeout);
185 +static void lpuart_dma_rx_post_handler(struct lpuart_port *sport)
187 + unsigned long flags;
188 + unsigned long rxcount;
190 + spin_lock_irqsave(&sport->port.lock, flags);
192 + /* For end of packet, clear the idle flag to avoid to trigger
193 + * the next transfer. Only i.MX8x lpuart support EEOP.
195 + if (sport->dma_eeop && lpuart_is_32(sport)) {
196 + rxcount = lpuart32_read(&sport->port, UARTWATER);
197 + rxcount = rxcount >> UARTWATER_RXCNT_OFF;
199 + lpuart32_write(&sport->port, UARTSTAT_IDLE, UARTSTAT);
202 + lpuart_sched_rx_dma(sport);
204 + spin_unlock_irqrestore(&sport->port.lock, flags);
208 static void lpuart_dma_rx_complete(void *arg)
209 @@ -1144,6 +1202,8 @@ static void lpuart_dma_rx_complete(void
210 struct lpuart_port *sport = arg;
212 lpuart_copy_rx_to_tty(sport);
213 + if (!sport->rx_dma_cyclic)
214 + lpuart_dma_rx_post_handler(sport);
217 static void lpuart_timer_func(struct timer_list *t)
218 @@ -1151,13 +1211,78 @@ static void lpuart_timer_func(struct tim
219 struct lpuart_port *sport = from_timer(sport, t, lpuart_timer);
221 lpuart_copy_rx_to_tty(sport);
222 + if (!sport->rx_dma_cyclic) {
223 + dmaengine_terminate_async(sport->dma_rx_chan);
224 + lpuart_dma_rx_post_handler(sport);
228 -static inline int lpuart_start_rx_dma(struct lpuart_port *sport)
229 +static int lpuart_sched_rxdma_cyclic(struct lpuart_port *sport)
231 + sport->dma_rx_desc = dmaengine_prep_dma_cyclic(sport->dma_rx_chan,
232 + sg_dma_address(&sport->rx_sgl),
233 + sport->rx_sgl.length,
234 + sport->rx_sgl.length / 2,
236 + DMA_PREP_INTERRUPT);
237 + if (!sport->dma_rx_desc) {
238 + dev_err(sport->port.dev, "Cannot prepare cyclic DMA\n");
245 +static int lpuart_sched_rxdma_slave_sg(struct lpuart_port *sport)
247 + dma_sync_sg_for_device(sport->port.dev, &sport->rx_sgl, 1,
249 + sport->dma_rx_desc = dmaengine_prep_slave_sg(sport->dma_rx_chan,
253 + DMA_PREP_INTERRUPT);
254 + if (!sport->dma_rx_desc) {
255 + dev_err(sport->port.dev, "Cannot prepare slave_sg DMA\n");
258 + sport->rx_ring.tail = 0;
259 + sport->rx_ring.head = 0;
264 +static int lpuart_sched_rx_dma(struct lpuart_port *sport)
266 + unsigned long temp;
269 + if (sport->rx_dma_cyclic)
270 + ret = lpuart_sched_rxdma_cyclic(sport);
272 + ret = lpuart_sched_rxdma_slave_sg(sport);
274 + sport->dma_rx_desc->callback = lpuart_dma_rx_complete;
275 + sport->dma_rx_desc->callback_param = sport;
276 + sport->dma_rx_cookie = dmaengine_submit(sport->dma_rx_desc);
277 + dma_async_issue_pending(sport->dma_rx_chan);
279 + if (lpuart_is_32(sport)) {
280 + temp = lpuart32_read(&sport->port, UARTBAUD);
281 + if (sport->dma_eeop)
282 + temp |= UARTBAUD_RIDMAE;
283 + temp |= UARTBAUD_RDMAE;
284 + lpuart32_write(&sport->port, temp, UARTBAUD);
286 + writeb(readb(sport->port.membase + UARTCR5) | UARTCR5_RDMAS,
287 + sport->port.membase + UARTCR5);
293 +static void lpuart_get_rx_dma_rng_len(struct lpuart_port *sport)
295 - struct dma_slave_config dma_rx_sconfig = {};
296 - struct circ_buf *ring = &sport->rx_ring;
299 struct tty_port *port = &sport->port.state->port;
300 struct tty_struct *tty = port->tty;
301 @@ -1177,6 +1302,18 @@ static inline int lpuart_start_rx_dma(st
302 sport->rx_dma_rng_buf_len = (1 << (fls(sport->rx_dma_rng_buf_len) - 1));
303 if (sport->rx_dma_rng_buf_len < 16)
304 sport->rx_dma_rng_buf_len = 16;
307 +static inline int lpuart_start_rx_dma(struct lpuart_port *sport)
309 + struct dma_slave_config dma_rx_sconfig = {};
310 + struct circ_buf *ring = &sport->rx_ring;
313 + if (!sport->dma_eeop)
314 + lpuart_get_rx_dma_rng_len(sport);
316 + sport->rx_dma_rng_buf_len = PAGE_SIZE;
318 ring->buf = kzalloc(sport->rx_dma_rng_buf_len, GFP_ATOMIC);
320 @@ -1202,32 +1339,7 @@ static inline int lpuart_start_rx_dma(st
324 - sport->dma_rx_desc = dmaengine_prep_dma_cyclic(sport->dma_rx_chan,
325 - sg_dma_address(&sport->rx_sgl),
326 - sport->rx_sgl.length,
327 - sport->rx_sgl.length / 2,
329 - DMA_PREP_INTERRUPT);
330 - if (!sport->dma_rx_desc) {
331 - dev_err(sport->port.dev, "Cannot prepare cyclic DMA\n");
335 - sport->dma_rx_desc->callback = lpuart_dma_rx_complete;
336 - sport->dma_rx_desc->callback_param = sport;
337 - sport->dma_rx_cookie = dmaengine_submit(sport->dma_rx_desc);
338 - dma_async_issue_pending(sport->dma_rx_chan);
340 - if (lpuart_is_32(sport)) {
341 - unsigned long temp = lpuart32_read(&sport->port, UARTBAUD);
343 - lpuart32_write(&sport->port, temp | UARTBAUD_RDMAE, UARTBAUD);
345 - writeb(readb(sport->port.membase + UARTCR5) | UARTCR5_RDMAS,
346 - sport->port.membase + UARTCR5);
350 + return lpuart_sched_rx_dma(sport);
353 static void lpuart_dma_rx_free(struct uart_port *port)
354 @@ -1413,8 +1525,10 @@ static void lpuart_setup_watermark(struc
355 writeb(UARTSFIFO_RXUF, sport->port.membase + UARTSFIFO);
358 + if (uart_console(&sport->port))
359 + sport->rx_watermark = 1;
360 writeb(0, sport->port.membase + UARTTWFIFO);
361 - writeb(1, sport->port.membase + UARTRWFIFO);
362 + writeb(sport->rx_watermark, sport->port.membase + UARTRWFIFO);
365 writeb(cr2_saved, sport->port.membase + UARTCR2);
366 @@ -1435,6 +1549,7 @@ static void lpuart32_setup_watermark(str
368 unsigned long val, ctrl;
369 unsigned long ctrl_saved;
370 + unsigned long rxiden_cnt;
372 ctrl = lpuart32_read(&sport->port, UARTCTRL);
374 @@ -1446,12 +1561,26 @@ static void lpuart32_setup_watermark(str
375 val = lpuart32_read(&sport->port, UARTFIFO);
376 val |= UARTFIFO_TXFE | UARTFIFO_RXFE;
377 val |= UARTFIFO_TXFLUSH | UARTFIFO_RXFLUSH;
378 + val &= ~(UARTFIFO_RXIDEN_MASK << UARTFIFO_RXIDEN_OFF);
379 + rxiden_cnt = sport->dma_eeop ? 0 : UARTFIFO_RXIDEN_RDRF;
380 + val |= ((rxiden_cnt & UARTFIFO_RXIDEN_MASK) <<
381 + UARTFIFO_RXIDEN_OFF);
382 lpuart32_write(&sport->port, val, UARTFIFO);
384 /* set the watermark */
385 - val = (0x1 << UARTWATER_RXWATER_OFF) | (0x0 << UARTWATER_TXWATER_OFF);
386 + if (uart_console(&sport->port))
387 + sport->rx_watermark = 1;
388 + val = (sport->rx_watermark << UARTWATER_RXWATER_OFF) |
389 + (0x0 << UARTWATER_TXWATER_OFF);
390 lpuart32_write(&sport->port, val, UARTWATER);
392 + /* set RTS watermark */
393 + if (!uart_console(&sport->port)) {
394 + val = lpuart32_read(&sport->port, UARTMODIR);
395 + val = (sport->rxfifo_size >> 1) << UARTMODIR_RTSWATER_S;
396 + lpuart32_write(&sport->port, val, UARTMODIR);
400 lpuart32_write(&sport->port, ctrl_saved, UARTCTRL);
402 @@ -1463,17 +1592,29 @@ static void lpuart32_setup_watermark_ena
403 lpuart32_setup_watermark(sport);
405 temp = lpuart32_read(&sport->port, UARTCTRL);
406 - temp |= UARTCTRL_RE | UARTCTRL_TE | UARTCTRL_ILIE;
407 + temp |= UARTCTRL_RE | UARTCTRL_TE;
408 + temp |= UARTCTRL_IDLECFG << UARTCTRL_IDLECFG_OFF;
409 lpuart32_write(&sport->port, temp, UARTCTRL);
412 static void rx_dma_timer_init(struct lpuart_port *sport)
414 + if (sport->dma_eeop)
417 timer_setup(&sport->lpuart_timer, lpuart_timer_func, 0);
418 sport->lpuart_timer.expires = jiffies + sport->dma_rx_timeout;
419 add_timer(&sport->lpuart_timer);
422 +static void lpuart_del_timer_sync(struct lpuart_port *sport)
424 + if (sport->dma_eeop)
427 + del_timer_sync(&sport->lpuart_timer);
430 static void lpuart_tx_dma_startup(struct lpuart_port *sport)
433 @@ -1537,19 +1678,23 @@ static int lpuart_startup(struct uart_po
437 +static void lpuart32_hw_disable(struct lpuart_port *sport)
439 + unsigned long temp;
441 + temp = lpuart32_read(&sport->port, UARTCTRL);
442 + temp &= ~(UARTCTRL_RIE | UARTCTRL_ILIE | UARTCTRL_RE |
443 + UARTCTRL_TIE | UARTCTRL_TE);
444 + lpuart32_write(&sport->port, temp, UARTCTRL);
447 static void lpuart32_configure(struct lpuart_port *sport)
451 - if (sport->lpuart_dma_rx_use) {
452 - /* RXWATER must be 0 */
453 - temp = lpuart32_read(&sport->port, UARTWATER);
454 - temp &= ~(UARTWATER_WATER_MASK << UARTWATER_RXWATER_OFF);
455 - lpuart32_write(&sport->port, temp, UARTWATER);
457 temp = lpuart32_read(&sport->port, UARTCTRL);
458 if (!sport->lpuart_dma_rx_use)
459 - temp |= UARTCTRL_RIE;
460 + temp |= UARTCTRL_RIE | UARTCTRL_ILIE;
461 if (!sport->lpuart_dma_tx_use)
462 temp |= UARTCTRL_TIE;
463 lpuart32_write(&sport->port, temp, UARTCTRL);
464 @@ -1593,12 +1738,12 @@ static int lpuart32_startup(struct uart_
466 spin_lock_irqsave(&sport->port.lock, flags);
468 - lpuart32_setup_watermark_enable(sport);
470 + lpuart32_hw_disable(sport);
472 lpuart_rx_dma_startup(sport);
473 lpuart_tx_dma_startup(sport);
475 + lpuart32_setup_watermark_enable(sport);
476 lpuart32_configure(sport);
478 spin_unlock_irqrestore(&sport->port.lock, flags);
479 @@ -1608,7 +1753,7 @@ static int lpuart32_startup(struct uart_
480 static void lpuart_dma_shutdown(struct lpuart_port *sport)
482 if (sport->lpuart_dma_rx_use) {
483 - del_timer_sync(&sport->lpuart_timer);
484 + lpuart_del_timer_sync(sport);
485 lpuart_dma_rx_free(&sport->port);
488 @@ -1649,11 +1794,22 @@ static void lpuart32_shutdown(struct uar
490 spin_lock_irqsave(&port->lock, flags);
493 + temp = lpuart32_read(&sport->port, UARTSTAT);
494 + lpuart32_write(&sport->port, temp, UARTSTAT);
496 + /* disable Rx/Tx DMA */
497 + temp = lpuart32_read(port, UARTBAUD);
498 + temp &= ~(UARTBAUD_TDMAE | UARTBAUD_RDMAE | UARTBAUD_RIDMAE);
499 + lpuart32_write(port, temp, UARTBAUD);
501 /* disable Rx/Tx and interrupts */
502 temp = lpuart32_read(port, UARTCTRL);
503 - temp &= ~(UARTCTRL_TE | UARTCTRL_RE |
504 - UARTCTRL_TIE | UARTCTRL_TCIE | UARTCTRL_RIE);
505 + temp &= ~(UARTCTRL_TE | UARTCTRL_RE | UARTCTRL_TIE |
506 + UARTCTRL_TCIE | UARTCTRL_RIE | UARTCTRL_ILIE |
508 lpuart32_write(port, temp, UARTCTRL);
509 + lpuart32_write(port, 0, UARTMODIR);
511 spin_unlock_irqrestore(&port->lock, flags);
513 @@ -1750,10 +1906,10 @@ lpuart_set_termios(struct uart_port *por
514 * baud rate and restart Rx DMA path.
516 * Since timer function acqures sport->port.lock, need to stop before
517 - * acquring same lock because otherwise del_timer_sync() can deadlock.
518 + * acquring same lock because otherwise lpuart_del_timer_sync() can deadlock.
520 if (old && sport->lpuart_dma_rx_use) {
521 - del_timer_sync(&sport->lpuart_timer);
522 + lpuart_del_timer_sync(sport);
523 lpuart_dma_rx_free(&sport->port);
526 @@ -1965,10 +2121,10 @@ lpuart32_set_termios(struct uart_port *p
527 * baud rate and restart Rx DMA path.
529 * Since timer function acqures sport->port.lock, need to stop before
530 - * acquring same lock because otherwise del_timer_sync() can deadlock.
531 + * acquring same lock because otherwise lpuart_del_timer_sync() can deadlock.
533 if (old && sport->lpuart_dma_rx_use) {
534 - del_timer_sync(&sport->lpuart_timer);
535 + lpuart_del_timer_sync(sport);
536 lpuart_dma_rx_free(&sport->port);
539 @@ -2481,6 +2637,10 @@ static int lpuart_probe(struct platform_
540 sport->port.dev = &pdev->dev;
541 sport->port.type = PORT_LPUART;
542 sport->devtype = sdata->devtype;
543 + sport->rx_dma_cyclic = sdata->rx_dma_cyclic;
544 + sport->rx_watermark = sdata->rx_watermark;
545 + sport->dma_eeop = is_imx8qxp_lpuart(sport);
547 ret = platform_get_irq(pdev, 0);
550 @@ -2631,7 +2791,7 @@ static int lpuart_suspend(struct device
551 * Rx DMA path before suspend and start Rx DMA path on resume.
554 - del_timer_sync(&sport->lpuart_timer);
555 + lpuart_del_timer_sync(sport);
556 lpuart_dma_rx_free(&sport->port);