1 From 0d6e214f5a257f9b53619ef8aa3b6e767189bdcf Mon Sep 17 00:00:00 2001
2 From: Fugang Duan <fugang.duan@nxp.com>
3 Date: Wed, 11 Sep 2019 16:21:06 +0800
4 Subject: [PATCH] tty: serial: fsl_lpuart: enable dma mode for imx8qxp
6 imx8qxp lpuart support eDMA for dma mode, support EOP (end-of-packet)
7 feature. But eDMA cannot detect the correct DADDR for current major
8 loop in cyclic mode, so it doesn't support cyclic mode.
10 The patch is to enable lpuart prep slave sg dma mode for imx8qxp.
12 Signed-off-by: Fugang Duan <fugang.duan@nxp.com>
14 drivers/tty/serial/fsl_lpuart.c | 280 +++++++++++++++++++++++++++++++---------
15 1 file changed, 219 insertions(+), 61 deletions(-)
17 --- a/drivers/tty/serial/fsl_lpuart.c
18 +++ b/drivers/tty/serial/fsl_lpuart.c
20 #define UARTBAUD_M10 0x20000000
21 #define UARTBAUD_TDMAE 0x00800000
22 #define UARTBAUD_RDMAE 0x00200000
23 +#define UARTBAUD_RIDMAE 0x00100000
24 #define UARTBAUD_MATCFG 0x00400000
25 #define UARTBAUD_BOTHEDGE 0x00020000
26 #define UARTBAUD_RESYNCDIS 0x00010000
28 #define UARTCTRL_SBK 0x00010000
29 #define UARTCTRL_MA1IE 0x00008000
30 #define UARTCTRL_MA2IE 0x00004000
31 -#define UARTCTRL_IDLECFG 0x00000100
32 +#define UARTCTRL_IDLECFG_OFF 0x8
33 #define UARTCTRL_LOOPS 0x00000080
34 #define UARTCTRL_DOZEEN 0x00000040
35 #define UARTCTRL_RSRC 0x00000020
37 #define UARTDATA_MASK 0x3ff
39 #define UARTMODIR_IREN 0x00020000
40 +#define UARTMODIR_RTSWATER_S 0x8
41 #define UARTMODIR_TXCTSSRC 0x00000020
42 #define UARTMODIR_TXCTSC 0x00000010
43 #define UARTMODIR_RXRTSE 0x00000008
45 #define UARTFIFO_RXUF 0x00010000
46 #define UARTFIFO_TXFLUSH 0x00008000
47 #define UARTFIFO_RXFLUSH 0x00004000
48 +#define UARTFIFO_RXIDEN_MASK 0x7
49 +#define UARTFIFO_RXIDEN_OFF 10
50 #define UARTFIFO_TXOFE 0x00000200
51 #define UARTFIFO_RXUFE 0x00000100
52 #define UARTFIFO_TXFE 0x00000080
54 #define UARTWATER_TXWATER_OFF 0
55 #define UARTWATER_RXWATER_OFF 16
57 +#define UARTFIFO_RXIDEN_RDRF 0x3
58 +#define UARTCTRL_IDLECFG 0x7
60 /* Rx DMA timeout in ms, which is used to calculate Rx ring buffer size */
61 #define DMA_RX_TIMEOUT (10)
63 @@ -253,6 +260,9 @@ struct lpuart_port {
64 unsigned int txfifo_size;
65 unsigned int rxfifo_size;
70 bool lpuart_dma_tx_use;
71 bool lpuart_dma_rx_use;
72 struct dma_chan *dma_tx_chan;
73 @@ -278,28 +288,38 @@ struct lpuart_soc_data {
74 enum lpuart_type devtype;
81 static const struct lpuart_soc_data vf_data = {
82 .devtype = VF610_LPUART,
85 + .rx_dma_cyclic = true,
88 static const struct lpuart_soc_data ls_data = {
89 .devtype = LS1021A_LPUART,
90 .iotype = UPIO_MEM32BE,
92 + .rx_dma_cyclic = true,
95 static struct lpuart_soc_data imx7ulp_data = {
96 .devtype = IMX7ULP_LPUART,
98 .reg_off = IMX_REG_OFF,
100 + .rx_dma_cyclic = true,
103 static struct lpuart_soc_data imx8qxp_data = {
104 .devtype = IMX8QXP_LPUART,
105 .iotype = UPIO_MEM32,
106 .reg_off = IMX_REG_OFF,
107 + .rx_watermark = 31,
108 + .rx_dma_cyclic = false,
111 static const struct of_device_id lpuart_dt_ids[] = {
112 @@ -313,6 +333,7 @@ MODULE_DEVICE_TABLE(of, lpuart_dt_ids);
114 /* Forward declare this for the dma callbacks*/
115 static void lpuart_dma_tx_complete(void *arg);
116 +static int lpuart_sched_rx_dma(struct lpuart_port *sport);
118 static inline bool is_imx8qxp_lpuart(struct lpuart_port *sport)
120 @@ -1000,19 +1021,15 @@ static irqreturn_t lpuart32_int(int irq,
121 if ((sts & UARTSTAT_TDRE) && !sport->lpuart_dma_tx_use)
122 lpuart32_txint(sport);
124 + if (sport->lpuart_dma_rx_use && sport->dma_eeop)
125 + sts &= ~UARTSTAT_IDLE;
127 lpuart32_write(&sport->port, sts, UARTSTAT);
131 -static void lpuart_copy_rx_to_tty(struct lpuart_port *sport)
132 +static void lpuart_rx_error_stat(struct lpuart_port *sport)
134 - struct tty_port *port = &sport->port.state->port;
135 - struct dma_tx_state state;
136 - enum dma_status dmastat;
137 - struct circ_buf *ring = &sport->rx_ring;
138 - unsigned long flags;
141 if (lpuart_is_32(sport)) {
142 unsigned long sr = lpuart32_read(&sport->port, UARTSTAT);
144 @@ -1064,8 +1081,21 @@ static void lpuart_copy_rx_to_tty(struct
145 writeb(cr2, sport->port.membase + UARTCR2);
150 +static void lpuart_copy_rx_to_tty(struct lpuart_port *sport)
152 + struct tty_port *port = &sport->port.state->port;
153 + struct dma_tx_state state;
154 + enum dma_status dmastat;
155 + struct circ_buf *ring = &sport->rx_ring;
156 + unsigned long flags;
159 - async_tx_ack(sport->dma_rx_desc);
160 + if (!is_imx8qxp_lpuart(sport)) {
161 + lpuart_rx_error_stat(sport);
162 + async_tx_ack(sport->dma_rx_desc);
165 spin_lock_irqsave(&sport->port.lock, flags);
167 @@ -1128,7 +1158,33 @@ static void lpuart_copy_rx_to_tty(struct
168 spin_unlock_irqrestore(&sport->port.lock, flags);
170 tty_flip_buffer_push(port);
171 - mod_timer(&sport->lpuart_timer, jiffies + sport->dma_rx_timeout);
173 + if (!sport->dma_eeop)
174 + mod_timer(&sport->lpuart_timer,
175 + jiffies + sport->dma_rx_timeout);
178 +static void lpuart_dma_rx_post_handler(struct lpuart_port *sport)
180 + unsigned long flags;
181 + unsigned long rxcount;
183 + spin_lock_irqsave(&sport->port.lock, flags);
185 + /* For end of packet, clear the idle flag to avoid to trigger
186 + * the next transfer. Only i.MX8x lpuart support EEOP.
188 + if (sport->dma_eeop && lpuart_is_32(sport)) {
189 + rxcount = lpuart32_read(&sport->port, UARTWATER);
190 + rxcount = rxcount >> UARTWATER_RXCNT_OFF;
192 + lpuart32_write(&sport->port, UARTSTAT_IDLE, UARTSTAT);
195 + lpuart_sched_rx_dma(sport);
197 + spin_unlock_irqrestore(&sport->port.lock, flags);
201 static void lpuart_dma_rx_complete(void *arg)
202 @@ -1136,6 +1192,8 @@ static void lpuart_dma_rx_complete(void
203 struct lpuart_port *sport = arg;
205 lpuart_copy_rx_to_tty(sport);
206 + if (!sport->rx_dma_cyclic)
207 + lpuart_dma_rx_post_handler(sport);
210 static void lpuart_timer_func(struct timer_list *t)
211 @@ -1143,13 +1201,78 @@ static void lpuart_timer_func(struct tim
212 struct lpuart_port *sport = from_timer(sport, t, lpuart_timer);
214 lpuart_copy_rx_to_tty(sport);
215 + if (!sport->rx_dma_cyclic) {
216 + dmaengine_terminate_async(sport->dma_rx_chan);
217 + lpuart_dma_rx_post_handler(sport);
221 -static inline int lpuart_start_rx_dma(struct lpuart_port *sport)
222 +static int lpuart_sched_rxdma_cyclic(struct lpuart_port *sport)
224 + sport->dma_rx_desc = dmaengine_prep_dma_cyclic(sport->dma_rx_chan,
225 + sg_dma_address(&sport->rx_sgl),
226 + sport->rx_sgl.length,
227 + sport->rx_sgl.length / 2,
229 + DMA_PREP_INTERRUPT);
230 + if (!sport->dma_rx_desc) {
231 + dev_err(sport->port.dev, "Cannot prepare cyclic DMA\n");
238 +static int lpuart_sched_rxdma_slave_sg(struct lpuart_port *sport)
240 + dma_sync_sg_for_device(sport->port.dev, &sport->rx_sgl, 1,
242 + sport->dma_rx_desc = dmaengine_prep_slave_sg(sport->dma_rx_chan,
246 + DMA_PREP_INTERRUPT);
247 + if (!sport->dma_rx_desc) {
248 + dev_err(sport->port.dev, "Cannot prepare slave_sg DMA\n");
251 + sport->rx_ring.tail = 0;
252 + sport->rx_ring.head = 0;
257 +static int lpuart_sched_rx_dma(struct lpuart_port *sport)
259 + unsigned long temp;
262 + if (sport->rx_dma_cyclic)
263 + ret = lpuart_sched_rxdma_cyclic(sport);
265 + ret = lpuart_sched_rxdma_slave_sg(sport);
267 + sport->dma_rx_desc->callback = lpuart_dma_rx_complete;
268 + sport->dma_rx_desc->callback_param = sport;
269 + sport->dma_rx_cookie = dmaengine_submit(sport->dma_rx_desc);
270 + dma_async_issue_pending(sport->dma_rx_chan);
272 + if (lpuart_is_32(sport)) {
273 + temp = lpuart32_read(&sport->port, UARTBAUD);
274 + if (sport->dma_eeop)
275 + temp |= UARTBAUD_RIDMAE;
276 + temp |= UARTBAUD_RDMAE;
277 + lpuart32_write(&sport->port, temp, UARTBAUD);
279 + writeb(readb(sport->port.membase + UARTCR5) | UARTCR5_RDMAS,
280 + sport->port.membase + UARTCR5);
286 +static void lpuart_get_rx_dma_rng_len(struct lpuart_port *sport)
288 - struct dma_slave_config dma_rx_sconfig = {};
289 - struct circ_buf *ring = &sport->rx_ring;
292 struct tty_port *port = &sport->port.state->port;
293 struct tty_struct *tty = port->tty;
294 @@ -1169,6 +1292,18 @@ static inline int lpuart_start_rx_dma(st
295 sport->rx_dma_rng_buf_len = (1 << (fls(sport->rx_dma_rng_buf_len) - 1));
296 if (sport->rx_dma_rng_buf_len < 16)
297 sport->rx_dma_rng_buf_len = 16;
300 +static inline int lpuart_start_rx_dma(struct lpuart_port *sport)
302 + struct dma_slave_config dma_rx_sconfig = {};
303 + struct circ_buf *ring = &sport->rx_ring;
306 + if (!sport->dma_eeop)
307 + lpuart_get_rx_dma_rng_len(sport);
309 + sport->rx_dma_rng_buf_len = PAGE_SIZE;
311 ring->buf = kzalloc(sport->rx_dma_rng_buf_len, GFP_ATOMIC);
313 @@ -1194,32 +1329,7 @@ static inline int lpuart_start_rx_dma(st
317 - sport->dma_rx_desc = dmaengine_prep_dma_cyclic(sport->dma_rx_chan,
318 - sg_dma_address(&sport->rx_sgl),
319 - sport->rx_sgl.length,
320 - sport->rx_sgl.length / 2,
322 - DMA_PREP_INTERRUPT);
323 - if (!sport->dma_rx_desc) {
324 - dev_err(sport->port.dev, "Cannot prepare cyclic DMA\n");
328 - sport->dma_rx_desc->callback = lpuart_dma_rx_complete;
329 - sport->dma_rx_desc->callback_param = sport;
330 - sport->dma_rx_cookie = dmaengine_submit(sport->dma_rx_desc);
331 - dma_async_issue_pending(sport->dma_rx_chan);
333 - if (lpuart_is_32(sport)) {
334 - unsigned long temp = lpuart32_read(&sport->port, UARTBAUD);
336 - lpuart32_write(&sport->port, temp | UARTBAUD_RDMAE, UARTBAUD);
338 - writeb(readb(sport->port.membase + UARTCR5) | UARTCR5_RDMAS,
339 - sport->port.membase + UARTCR5);
343 + return lpuart_sched_rx_dma(sport);
346 static void lpuart_dma_rx_free(struct uart_port *port)
347 @@ -1405,8 +1515,10 @@ static void lpuart_setup_watermark(struc
348 writeb(UARTSFIFO_RXUF, sport->port.membase + UARTSFIFO);
351 + if (uart_console(&sport->port))
352 + sport->rx_watermark = 1;
353 writeb(0, sport->port.membase + UARTTWFIFO);
354 - writeb(1, sport->port.membase + UARTRWFIFO);
355 + writeb(sport->rx_watermark, sport->port.membase + UARTRWFIFO);
358 writeb(cr2_saved, sport->port.membase + UARTCR2);
359 @@ -1427,6 +1539,7 @@ static void lpuart32_setup_watermark(str
361 unsigned long val, ctrl;
362 unsigned long ctrl_saved;
363 + unsigned long rxiden_cnt;
365 ctrl = lpuart32_read(&sport->port, UARTCTRL);
367 @@ -1438,12 +1551,26 @@ static void lpuart32_setup_watermark(str
368 val = lpuart32_read(&sport->port, UARTFIFO);
369 val |= UARTFIFO_TXFE | UARTFIFO_RXFE;
370 val |= UARTFIFO_TXFLUSH | UARTFIFO_RXFLUSH;
371 + val &= ~(UARTFIFO_RXIDEN_MASK << UARTFIFO_RXIDEN_OFF);
372 + rxiden_cnt = sport->dma_eeop ? 0 : UARTFIFO_RXIDEN_RDRF;
373 + val |= ((rxiden_cnt & UARTFIFO_RXIDEN_MASK) <<
374 + UARTFIFO_RXIDEN_OFF);
375 lpuart32_write(&sport->port, val, UARTFIFO);
377 /* set the watermark */
378 - val = (0x1 << UARTWATER_RXWATER_OFF) | (0x0 << UARTWATER_TXWATER_OFF);
379 + if (uart_console(&sport->port))
380 + sport->rx_watermark = 1;
381 + val = (sport->rx_watermark << UARTWATER_RXWATER_OFF) |
382 + (0x0 << UARTWATER_TXWATER_OFF);
383 lpuart32_write(&sport->port, val, UARTWATER);
385 + /* set RTS watermark */
386 + if (!uart_console(&sport->port)) {
387 + val = lpuart32_read(&sport->port, UARTMODIR);
388 + val = (sport->rxfifo_size >> 1) << UARTMODIR_RTSWATER_S;
389 + lpuart32_write(&sport->port, val, UARTMODIR);
393 lpuart32_write(&sport->port, ctrl_saved, UARTCTRL);
395 @@ -1455,17 +1582,29 @@ static void lpuart32_setup_watermark_ena
396 lpuart32_setup_watermark(sport);
398 temp = lpuart32_read(&sport->port, UARTCTRL);
399 - temp |= UARTCTRL_RE | UARTCTRL_TE | UARTCTRL_ILIE;
400 + temp |= UARTCTRL_RE | UARTCTRL_TE;
401 + temp |= UARTCTRL_IDLECFG << UARTCTRL_IDLECFG_OFF;
402 lpuart32_write(&sport->port, temp, UARTCTRL);
405 static void rx_dma_timer_init(struct lpuart_port *sport)
407 + if (sport->dma_eeop)
410 timer_setup(&sport->lpuart_timer, lpuart_timer_func, 0);
411 sport->lpuart_timer.expires = jiffies + sport->dma_rx_timeout;
412 add_timer(&sport->lpuart_timer);
415 +static void lpuart_del_timer_sync(struct lpuart_port *sport)
417 + if (sport->dma_eeop)
420 + del_timer_sync(&sport->lpuart_timer);
423 static void lpuart_tx_dma_startup(struct lpuart_port *sport)
426 @@ -1529,19 +1668,23 @@ static int lpuart_startup(struct uart_po
430 +static void lpuart32_hw_disable(struct lpuart_port *sport)
432 + unsigned long temp;
434 + temp = lpuart32_read(&sport->port, UARTCTRL);
435 + temp &= ~(UARTCTRL_RIE | UARTCTRL_ILIE | UARTCTRL_RE |
436 + UARTCTRL_TIE | UARTCTRL_TE);
437 + lpuart32_write(&sport->port, temp, UARTCTRL);
440 static void lpuart32_configure(struct lpuart_port *sport)
444 - if (sport->lpuart_dma_rx_use) {
445 - /* RXWATER must be 0 */
446 - temp = lpuart32_read(&sport->port, UARTWATER);
447 - temp &= ~(UARTWATER_WATER_MASK << UARTWATER_RXWATER_OFF);
448 - lpuart32_write(&sport->port, temp, UARTWATER);
450 temp = lpuart32_read(&sport->port, UARTCTRL);
451 if (!sport->lpuart_dma_rx_use)
452 - temp |= UARTCTRL_RIE;
453 + temp |= UARTCTRL_RIE | UARTCTRL_ILIE;
454 if (!sport->lpuart_dma_tx_use)
455 temp |= UARTCTRL_TIE;
456 lpuart32_write(&sport->port, temp, UARTCTRL);
457 @@ -1574,12 +1717,12 @@ static int lpuart32_startup(struct uart_
459 spin_lock_irqsave(&sport->port.lock, flags);
461 - lpuart32_setup_watermark_enable(sport);
463 + lpuart32_hw_disable(sport);
465 lpuart_rx_dma_startup(sport);
466 lpuart_tx_dma_startup(sport);
468 + lpuart32_setup_watermark_enable(sport);
469 lpuart32_configure(sport);
471 spin_unlock_irqrestore(&sport->port.lock, flags);
472 @@ -1589,7 +1732,7 @@ static int lpuart32_startup(struct uart_
473 static void lpuart_dma_shutdown(struct lpuart_port *sport)
475 if (sport->lpuart_dma_rx_use) {
476 - del_timer_sync(&sport->lpuart_timer);
477 + lpuart_del_timer_sync(sport);
478 lpuart_dma_rx_free(&sport->port);
481 @@ -1630,11 +1773,22 @@ static void lpuart32_shutdown(struct uar
483 spin_lock_irqsave(&port->lock, flags);
486 + temp = lpuart32_read(&sport->port, UARTSTAT);
487 + lpuart32_write(&sport->port, temp, UARTSTAT);
489 + /* disable Rx/Tx DMA */
490 + temp = lpuart32_read(port, UARTBAUD);
491 + temp &= ~(UARTBAUD_TDMAE | UARTBAUD_RDMAE | UARTBAUD_RIDMAE);
492 + lpuart32_write(port, temp, UARTBAUD);
494 /* disable Rx/Tx and interrupts */
495 temp = lpuart32_read(port, UARTCTRL);
496 - temp &= ~(UARTCTRL_TE | UARTCTRL_RE |
497 - UARTCTRL_TIE | UARTCTRL_TCIE | UARTCTRL_RIE);
498 + temp &= ~(UARTCTRL_TE | UARTCTRL_RE | UARTCTRL_TIE |
499 + UARTCTRL_TCIE | UARTCTRL_RIE | UARTCTRL_ILIE |
501 lpuart32_write(port, temp, UARTCTRL);
502 + lpuart32_write(port, 0, UARTMODIR);
504 spin_unlock_irqrestore(&port->lock, flags);
506 @@ -1731,10 +1885,10 @@ lpuart_set_termios(struct uart_port *por
507 * baud rate and restart Rx DMA path.
509 * Since timer function acqures sport->port.lock, need to stop before
510 - * acquring same lock because otherwise del_timer_sync() can deadlock.
511 + * acquring same lock because otherwise lpuart_del_timer_sync() can deadlock.
513 if (old && sport->lpuart_dma_rx_use) {
514 - del_timer_sync(&sport->lpuart_timer);
515 + lpuart_del_timer_sync(sport);
516 lpuart_dma_rx_free(&sport->port);
519 @@ -1946,10 +2100,10 @@ lpuart32_set_termios(struct uart_port *p
520 * baud rate and restart Rx DMA path.
522 * Since timer function acqures sport->port.lock, need to stop before
523 - * acquring same lock because otherwise del_timer_sync() can deadlock.
524 + * acquring same lock because otherwise lpuart_del_timer_sync() can deadlock.
526 if (old && sport->lpuart_dma_rx_use) {
527 - del_timer_sync(&sport->lpuart_timer);
528 + lpuart_del_timer_sync(sport);
529 lpuart_dma_rx_free(&sport->port);
532 @@ -2458,6 +2612,10 @@ static int lpuart_probe(struct platform_
533 sport->port.dev = &pdev->dev;
534 sport->port.type = PORT_LPUART;
535 sport->devtype = sdata->devtype;
536 + sport->rx_dma_cyclic = sdata->rx_dma_cyclic;
537 + sport->rx_watermark = sdata->rx_watermark;
538 + sport->dma_eeop = is_imx8qxp_lpuart(sport);
540 ret = platform_get_irq(pdev, 0);
543 @@ -2620,7 +2778,7 @@ static int lpuart_suspend(struct device
544 * Rx DMA path before suspend and start Rx DMA path on resume.
547 - del_timer_sync(&sport->lpuart_timer);
548 + lpuart_del_timer_sync(sport);
549 lpuart_dma_rx_free(&sport->port);