Lines Matching refs:uap

179 static int pl011_fifo_to_tty(struct uart_amba_port *uap)  in pl011_fifo_to_tty()  argument
186 status = readw(uap->port.membase + UART01x_FR); in pl011_fifo_to_tty()
191 ch = readw(uap->port.membase + UART01x_DR) | in pl011_fifo_to_tty()
194 uap->port.icount.rx++; in pl011_fifo_to_tty()
200 uap->port.icount.brk++; in pl011_fifo_to_tty()
201 if (uart_handle_break(&uap->port)) in pl011_fifo_to_tty()
204 uap->port.icount.parity++; in pl011_fifo_to_tty()
206 uap->port.icount.frame++; in pl011_fifo_to_tty()
208 uap->port.icount.overrun++; in pl011_fifo_to_tty()
210 ch &= uap->port.read_status_mask; in pl011_fifo_to_tty()
220 if (uart_handle_sysrq_char(&uap->port, ch & 255)) in pl011_fifo_to_tty()
223 uart_insert_char(&uap->port, ch, UART011_DR_OE, ch, flag); in pl011_fifo_to_tty()
268 static void pl011_dma_probe(struct uart_amba_port *uap) in pl011_dma_probe() argument
271 struct amba_pl011_data *plat = dev_get_platdata(uap->port.dev); in pl011_dma_probe()
272 struct device *dev = uap->port.dev; in pl011_dma_probe()
274 .dst_addr = uap->port.mapbase + UART01x_DR, in pl011_dma_probe()
277 .dst_maxburst = uap->fifosize >> 1, in pl011_dma_probe()
283 uap->dma_probed = true; in pl011_dma_probe()
287 uap->dma_probed = false; in pl011_dma_probe()
293 dev_info(uap->port.dev, "no DMA platform data\n"); in pl011_dma_probe()
304 dev_err(uap->port.dev, "no TX DMA channel!\n"); in pl011_dma_probe()
310 uap->dmatx.chan = chan; in pl011_dma_probe()
312 dev_info(uap->port.dev, "DMA channel TX %s\n", in pl011_dma_probe()
313 dma_chan_name(uap->dmatx.chan)); in pl011_dma_probe()
322 dev_err(uap->port.dev, "no RX DMA channel!\n"); in pl011_dma_probe()
329 .src_addr = uap->port.mapbase + UART01x_DR, in pl011_dma_probe()
332 .src_maxburst = uap->fifosize >> 2, in pl011_dma_probe()
346 dev_info(uap->port.dev, in pl011_dma_probe()
352 uap->dmarx.chan = chan; in pl011_dma_probe()
354 uap->dmarx.auto_poll_rate = false; in pl011_dma_probe()
358 uap->dmarx.auto_poll_rate = false; in pl011_dma_probe()
359 uap->dmarx.poll_rate = plat->dma_rx_poll_rate; in pl011_dma_probe()
366 uap->dmarx.auto_poll_rate = true; in pl011_dma_probe()
367 uap->dmarx.poll_rate = 100; in pl011_dma_probe()
371 uap->dmarx.poll_timeout = in pl011_dma_probe()
374 uap->dmarx.poll_timeout = 3000; in pl011_dma_probe()
376 uap->dmarx.auto_poll_rate = of_property_read_bool( in pl011_dma_probe()
378 if (uap->dmarx.auto_poll_rate) { in pl011_dma_probe()
383 uap->dmarx.poll_rate = x; in pl011_dma_probe()
385 uap->dmarx.poll_rate = 100; in pl011_dma_probe()
388 uap->dmarx.poll_timeout = x; in pl011_dma_probe()
390 uap->dmarx.poll_timeout = 3000; in pl011_dma_probe()
393 dev_info(uap->port.dev, "DMA channel RX %s\n", in pl011_dma_probe()
394 dma_chan_name(uap->dmarx.chan)); in pl011_dma_probe()
398 static void pl011_dma_remove(struct uart_amba_port *uap) in pl011_dma_remove() argument
400 if (uap->dmatx.chan) in pl011_dma_remove()
401 dma_release_channel(uap->dmatx.chan); in pl011_dma_remove()
402 if (uap->dmarx.chan) in pl011_dma_remove()
403 dma_release_channel(uap->dmarx.chan); in pl011_dma_remove()
407 static int pl011_dma_tx_refill(struct uart_amba_port *uap);
408 static void pl011_start_tx_pio(struct uart_amba_port *uap);
416 struct uart_amba_port *uap = data; in pl011_dma_tx_callback() local
417 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_callback()
421 spin_lock_irqsave(&uap->port.lock, flags); in pl011_dma_tx_callback()
422 if (uap->dmatx.queued) in pl011_dma_tx_callback()
426 dmacr = uap->dmacr; in pl011_dma_tx_callback()
427 uap->dmacr = dmacr & ~UART011_TXDMAE; in pl011_dma_tx_callback()
428 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_tx_callback()
439 if (!(dmacr & UART011_TXDMAE) || uart_tx_stopped(&uap->port) || in pl011_dma_tx_callback()
440 uart_circ_empty(&uap->port.state->xmit)) { in pl011_dma_tx_callback()
441 uap->dmatx.queued = false; in pl011_dma_tx_callback()
442 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_tx_callback()
446 if (pl011_dma_tx_refill(uap) <= 0) in pl011_dma_tx_callback()
451 pl011_start_tx_pio(uap); in pl011_dma_tx_callback()
453 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_tx_callback()
464 static int pl011_dma_tx_refill(struct uart_amba_port *uap) in pl011_dma_tx_refill() argument
466 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_refill()
470 struct circ_buf *xmit = &uap->port.state->xmit; in pl011_dma_tx_refill()
480 if (count < (uap->fifosize >> 1)) { in pl011_dma_tx_refill()
481 uap->dmatx.queued = false; in pl011_dma_tx_refill()
513 uap->dmatx.queued = false; in pl011_dma_tx_refill()
514 dev_dbg(uap->port.dev, "unable to map TX DMA\n"); in pl011_dma_tx_refill()
522 uap->dmatx.queued = false; in pl011_dma_tx_refill()
527 dev_dbg(uap->port.dev, "TX DMA busy\n"); in pl011_dma_tx_refill()
533 desc->callback_param = uap; in pl011_dma_tx_refill()
541 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_refill()
542 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_tx_refill()
543 uap->dmatx.queued = true; in pl011_dma_tx_refill()
550 uap->port.icount.tx += count; in pl011_dma_tx_refill()
553 uart_write_wakeup(&uap->port); in pl011_dma_tx_refill()
566 static bool pl011_dma_tx_irq(struct uart_amba_port *uap) in pl011_dma_tx_irq() argument
568 if (!uap->using_tx_dma) in pl011_dma_tx_irq()
576 if (uap->dmatx.queued) { in pl011_dma_tx_irq()
577 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_irq()
578 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_tx_irq()
579 uap->im &= ~UART011_TXIM; in pl011_dma_tx_irq()
580 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_dma_tx_irq()
588 if (pl011_dma_tx_refill(uap) > 0) { in pl011_dma_tx_irq()
589 uap->im &= ~UART011_TXIM; in pl011_dma_tx_irq()
590 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_dma_tx_irq()
600 static inline void pl011_dma_tx_stop(struct uart_amba_port *uap) in pl011_dma_tx_stop() argument
602 if (uap->dmatx.queued) { in pl011_dma_tx_stop()
603 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_tx_stop()
604 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_tx_stop()
616 static inline bool pl011_dma_tx_start(struct uart_amba_port *uap) in pl011_dma_tx_start() argument
620 if (!uap->using_tx_dma) in pl011_dma_tx_start()
623 if (!uap->port.x_char) { in pl011_dma_tx_start()
627 if (!uap->dmatx.queued) { in pl011_dma_tx_start()
628 if (pl011_dma_tx_refill(uap) > 0) { in pl011_dma_tx_start()
629 uap->im &= ~UART011_TXIM; in pl011_dma_tx_start()
630 writew(uap->im, uap->port.membase + in pl011_dma_tx_start()
634 } else if (!(uap->dmacr & UART011_TXDMAE)) { in pl011_dma_tx_start()
635 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_start()
636 writew(uap->dmacr, in pl011_dma_tx_start()
637 uap->port.membase + UART011_DMACR); in pl011_dma_tx_start()
646 dmacr = uap->dmacr; in pl011_dma_tx_start()
647 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_tx_start()
648 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_tx_start()
650 if (readw(uap->port.membase + UART01x_FR) & UART01x_FR_TXFF) { in pl011_dma_tx_start()
659 writew(uap->port.x_char, uap->port.membase + UART01x_DR); in pl011_dma_tx_start()
660 uap->port.icount.tx++; in pl011_dma_tx_start()
661 uap->port.x_char = 0; in pl011_dma_tx_start()
664 uap->dmacr = dmacr; in pl011_dma_tx_start()
665 writew(dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_tx_start()
675 __releases(&uap->port.lock) in pl011_dma_flush_buffer()
676 __acquires(&uap->port.lock) in pl011_dma_flush_buffer()
678 struct uart_amba_port *uap = in pl011_dma_flush_buffer() local
681 if (!uap->using_tx_dma) in pl011_dma_flush_buffer()
685 spin_unlock(&uap->port.lock); in pl011_dma_flush_buffer()
686 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_flush_buffer()
687 spin_lock(&uap->port.lock); in pl011_dma_flush_buffer()
688 if (uap->dmatx.queued) { in pl011_dma_flush_buffer()
689 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_flush_buffer()
691 uap->dmatx.queued = false; in pl011_dma_flush_buffer()
692 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_flush_buffer()
693 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_flush_buffer()
699 static int pl011_dma_rx_trigger_dma(struct uart_amba_port *uap) in pl011_dma_rx_trigger_dma() argument
701 struct dma_chan *rxchan = uap->dmarx.chan; in pl011_dma_rx_trigger_dma()
702 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_trigger_dma()
710 sgbuf = uap->dmarx.use_buf_b ? in pl011_dma_rx_trigger_dma()
711 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_trigger_dma()
721 uap->dmarx.running = false; in pl011_dma_rx_trigger_dma()
728 desc->callback_param = uap; in pl011_dma_rx_trigger_dma()
732 uap->dmacr |= UART011_RXDMAE; in pl011_dma_rx_trigger_dma()
733 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_rx_trigger_dma()
734 uap->dmarx.running = true; in pl011_dma_rx_trigger_dma()
736 uap->im &= ~UART011_RXIM; in pl011_dma_rx_trigger_dma()
737 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_dma_rx_trigger_dma()
747 static void pl011_dma_rx_chars(struct uart_amba_port *uap, in pl011_dma_rx_chars() argument
751 struct tty_port *port = &uap->port.state->port; in pl011_dma_rx_chars()
753 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_chars()
757 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_chars()
760 if (uap->dmarx.poll_rate) { in pl011_dma_rx_chars()
779 uap->port.icount.rx += dma_count; in pl011_dma_rx_chars()
781 dev_warn(uap->port.dev, in pl011_dma_rx_chars()
786 if (uap->dmarx.poll_rate) in pl011_dma_rx_chars()
796 uap->port.membase + UART011_ICR); in pl011_dma_rx_chars()
809 fifotaken = pl011_fifo_to_tty(uap); in pl011_dma_rx_chars()
812 spin_unlock(&uap->port.lock); in pl011_dma_rx_chars()
813 dev_vdbg(uap->port.dev, in pl011_dma_rx_chars()
817 spin_lock(&uap->port.lock); in pl011_dma_rx_chars()
820 static void pl011_dma_rx_irq(struct uart_amba_port *uap) in pl011_dma_rx_irq() argument
822 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_irq()
836 dev_err(uap->port.dev, "unable to pause DMA transfer\n"); in pl011_dma_rx_irq()
840 dev_err(uap->port.dev, "unable to pause DMA transfer\n"); in pl011_dma_rx_irq()
843 uap->dmacr &= ~UART011_RXDMAE; in pl011_dma_rx_irq()
844 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_rx_irq()
845 uap->dmarx.running = false; in pl011_dma_rx_irq()
856 pl011_dma_rx_chars(uap, pending, dmarx->use_buf_b, true); in pl011_dma_rx_irq()
860 if (pl011_dma_rx_trigger_dma(uap)) { in pl011_dma_rx_irq()
861 dev_dbg(uap->port.dev, "could not retrigger RX DMA job " in pl011_dma_rx_irq()
863 uap->im |= UART011_RXIM; in pl011_dma_rx_irq()
864 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_dma_rx_irq()
870 struct uart_amba_port *uap = data; in pl011_dma_rx_callback() local
871 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_callback()
887 spin_lock_irq(&uap->port.lock); in pl011_dma_rx_callback()
898 uap->dmarx.running = false; in pl011_dma_rx_callback()
900 ret = pl011_dma_rx_trigger_dma(uap); in pl011_dma_rx_callback()
902 pl011_dma_rx_chars(uap, pending, lastbuf, false); in pl011_dma_rx_callback()
903 spin_unlock_irq(&uap->port.lock); in pl011_dma_rx_callback()
909 dev_dbg(uap->port.dev, "could not retrigger RX DMA job " in pl011_dma_rx_callback()
911 uap->im |= UART011_RXIM; in pl011_dma_rx_callback()
912 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_dma_rx_callback()
921 static inline void pl011_dma_rx_stop(struct uart_amba_port *uap) in pl011_dma_rx_stop() argument
924 uap->dmacr &= ~UART011_RXDMAE; in pl011_dma_rx_stop()
925 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_rx_stop()
935 struct uart_amba_port *uap = (struct uart_amba_port *)args; in pl011_dma_rx_poll() local
936 struct tty_port *port = &uap->port.state->port; in pl011_dma_rx_poll()
937 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_poll()
938 struct dma_chan *rxchan = uap->dmarx.chan; in pl011_dma_rx_poll()
946 sgbuf = dmarx->use_buf_b ? &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_poll()
964 > uap->dmarx.poll_timeout) { in pl011_dma_rx_poll()
966 spin_lock_irqsave(&uap->port.lock, flags); in pl011_dma_rx_poll()
967 pl011_dma_rx_stop(uap); in pl011_dma_rx_poll()
968 uap->im |= UART011_RXIM; in pl011_dma_rx_poll()
969 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_dma_rx_poll()
970 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_rx_poll()
972 uap->dmarx.running = false; in pl011_dma_rx_poll()
974 del_timer(&uap->dmarx.timer); in pl011_dma_rx_poll()
976 mod_timer(&uap->dmarx.timer, in pl011_dma_rx_poll()
977 jiffies + msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_dma_rx_poll()
981 static void pl011_dma_startup(struct uart_amba_port *uap) in pl011_dma_startup() argument
985 if (!uap->dma_probed) in pl011_dma_startup()
986 pl011_dma_probe(uap); in pl011_dma_startup()
988 if (!uap->dmatx.chan) in pl011_dma_startup()
991 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA); in pl011_dma_startup()
992 if (!uap->dmatx.buf) { in pl011_dma_startup()
993 dev_err(uap->port.dev, "no memory for DMA TX buffer\n"); in pl011_dma_startup()
994 uap->port.fifosize = uap->fifosize; in pl011_dma_startup()
998 sg_init_one(&uap->dmatx.sg, uap->dmatx.buf, PL011_DMA_BUFFER_SIZE); in pl011_dma_startup()
1001 uap->port.fifosize = PL011_DMA_BUFFER_SIZE; in pl011_dma_startup()
1002 uap->using_tx_dma = true; in pl011_dma_startup()
1004 if (!uap->dmarx.chan) in pl011_dma_startup()
1008 ret = pl011_sgbuf_init(uap->dmarx.chan, &uap->dmarx.sgbuf_a, in pl011_dma_startup()
1011 dev_err(uap->port.dev, "failed to init DMA %s: %d\n", in pl011_dma_startup()
1016 ret = pl011_sgbuf_init(uap->dmarx.chan, &uap->dmarx.sgbuf_b, in pl011_dma_startup()
1019 dev_err(uap->port.dev, "failed to init DMA %s: %d\n", in pl011_dma_startup()
1021 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_a, in pl011_dma_startup()
1026 uap->using_rx_dma = true; in pl011_dma_startup()
1030 uap->dmacr |= UART011_DMAONERR; in pl011_dma_startup()
1031 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_startup()
1038 if (uap->vendor->dma_threshold) in pl011_dma_startup()
1040 uap->port.membase + ST_UART011_DMAWM); in pl011_dma_startup()
1042 if (uap->using_rx_dma) { in pl011_dma_startup()
1043 if (pl011_dma_rx_trigger_dma(uap)) in pl011_dma_startup()
1044 dev_dbg(uap->port.dev, "could not trigger initial " in pl011_dma_startup()
1046 if (uap->dmarx.poll_rate) { in pl011_dma_startup()
1047 init_timer(&(uap->dmarx.timer)); in pl011_dma_startup()
1048 uap->dmarx.timer.function = pl011_dma_rx_poll; in pl011_dma_startup()
1049 uap->dmarx.timer.data = (unsigned long)uap; in pl011_dma_startup()
1050 mod_timer(&uap->dmarx.timer, in pl011_dma_startup()
1052 msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_dma_startup()
1053 uap->dmarx.last_residue = PL011_DMA_BUFFER_SIZE; in pl011_dma_startup()
1054 uap->dmarx.last_jiffies = jiffies; in pl011_dma_startup()
1059 static void pl011_dma_shutdown(struct uart_amba_port *uap) in pl011_dma_shutdown() argument
1061 if (!(uap->using_tx_dma || uap->using_rx_dma)) in pl011_dma_shutdown()
1065 while (readw(uap->port.membase + UART01x_FR) & UART01x_FR_BUSY) in pl011_dma_shutdown()
1068 spin_lock_irq(&uap->port.lock); in pl011_dma_shutdown()
1069 uap->dmacr &= ~(UART011_DMAONERR | UART011_RXDMAE | UART011_TXDMAE); in pl011_dma_shutdown()
1070 writew(uap->dmacr, uap->port.membase + UART011_DMACR); in pl011_dma_shutdown()
1071 spin_unlock_irq(&uap->port.lock); in pl011_dma_shutdown()
1073 if (uap->using_tx_dma) { in pl011_dma_shutdown()
1075 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_shutdown()
1076 if (uap->dmatx.queued) { in pl011_dma_shutdown()
1077 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_shutdown()
1079 uap->dmatx.queued = false; in pl011_dma_shutdown()
1082 kfree(uap->dmatx.buf); in pl011_dma_shutdown()
1083 uap->using_tx_dma = false; in pl011_dma_shutdown()
1086 if (uap->using_rx_dma) { in pl011_dma_shutdown()
1087 dmaengine_terminate_all(uap->dmarx.chan); in pl011_dma_shutdown()
1089 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_a, DMA_FROM_DEVICE); in pl011_dma_shutdown()
1090 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_b, DMA_FROM_DEVICE); in pl011_dma_shutdown()
1091 if (uap->dmarx.poll_rate) in pl011_dma_shutdown()
1092 del_timer_sync(&uap->dmarx.timer); in pl011_dma_shutdown()
1093 uap->using_rx_dma = false; in pl011_dma_shutdown()
1097 static inline bool pl011_dma_rx_available(struct uart_amba_port *uap) in pl011_dma_rx_available() argument
1099 return uap->using_rx_dma; in pl011_dma_rx_available()
1102 static inline bool pl011_dma_rx_running(struct uart_amba_port *uap) in pl011_dma_rx_running() argument
1104 return uap->using_rx_dma && uap->dmarx.running; in pl011_dma_rx_running()
1109 static inline void pl011_dma_probe(struct uart_amba_port *uap) in pl011_dma_probe() argument
1113 static inline void pl011_dma_remove(struct uart_amba_port *uap) in pl011_dma_remove() argument
1117 static inline void pl011_dma_startup(struct uart_amba_port *uap) in pl011_dma_startup() argument
1121 static inline void pl011_dma_shutdown(struct uart_amba_port *uap) in pl011_dma_shutdown() argument
1125 static inline bool pl011_dma_tx_irq(struct uart_amba_port *uap) in pl011_dma_tx_irq() argument
1130 static inline void pl011_dma_tx_stop(struct uart_amba_port *uap) in pl011_dma_tx_stop() argument
1134 static inline bool pl011_dma_tx_start(struct uart_amba_port *uap) in pl011_dma_tx_start() argument
1139 static inline void pl011_dma_rx_irq(struct uart_amba_port *uap) in pl011_dma_rx_irq() argument
1143 static inline void pl011_dma_rx_stop(struct uart_amba_port *uap) in pl011_dma_rx_stop() argument
1147 static inline int pl011_dma_rx_trigger_dma(struct uart_amba_port *uap) in pl011_dma_rx_trigger_dma() argument
1152 static inline bool pl011_dma_rx_available(struct uart_amba_port *uap) in pl011_dma_rx_available() argument
1157 static inline bool pl011_dma_rx_running(struct uart_amba_port *uap) in pl011_dma_rx_running() argument
1167 struct uart_amba_port *uap = in pl011_stop_tx() local
1170 uap->im &= ~UART011_TXIM; in pl011_stop_tx()
1171 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_stop_tx()
1172 pl011_dma_tx_stop(uap); in pl011_stop_tx()
1175 static bool pl011_tx_chars(struct uart_amba_port *uap);
1178 static void pl011_start_tx_pio(struct uart_amba_port *uap) in pl011_start_tx_pio() argument
1180 uap->im |= UART011_TXIM; in pl011_start_tx_pio()
1181 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_start_tx_pio()
1182 if (!uap->tx_irq_seen) in pl011_start_tx_pio()
1183 pl011_tx_chars(uap); in pl011_start_tx_pio()
1188 struct uart_amba_port *uap = in pl011_start_tx() local
1191 if (!pl011_dma_tx_start(uap)) in pl011_start_tx()
1192 pl011_start_tx_pio(uap); in pl011_start_tx()
1197 struct uart_amba_port *uap = in pl011_stop_rx() local
1200 uap->im &= ~(UART011_RXIM|UART011_RTIM|UART011_FEIM| in pl011_stop_rx()
1202 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_stop_rx()
1204 pl011_dma_rx_stop(uap); in pl011_stop_rx()
1209 struct uart_amba_port *uap = in pl011_enable_ms() local
1212 uap->im |= UART011_RIMIM|UART011_CTSMIM|UART011_DCDMIM|UART011_DSRMIM; in pl011_enable_ms()
1213 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_enable_ms()
1216 static void pl011_rx_chars(struct uart_amba_port *uap) in pl011_rx_chars() argument
1217 __releases(&uap->port.lock) in pl011_rx_chars()
1218 __acquires(&uap->port.lock) in pl011_rx_chars()
1220 pl011_fifo_to_tty(uap); in pl011_rx_chars()
1222 spin_unlock(&uap->port.lock); in pl011_rx_chars()
1223 tty_flip_buffer_push(&uap->port.state->port); in pl011_rx_chars()
1228 if (pl011_dma_rx_available(uap)) { in pl011_rx_chars()
1229 if (pl011_dma_rx_trigger_dma(uap)) { in pl011_rx_chars()
1230 dev_dbg(uap->port.dev, "could not trigger RX DMA job " in pl011_rx_chars()
1232 uap->im |= UART011_RXIM; in pl011_rx_chars()
1233 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_rx_chars()
1237 if (uap->dmarx.poll_rate) { in pl011_rx_chars()
1238 uap->dmarx.last_jiffies = jiffies; in pl011_rx_chars()
1239 uap->dmarx.last_residue = PL011_DMA_BUFFER_SIZE; in pl011_rx_chars()
1240 mod_timer(&uap->dmarx.timer, in pl011_rx_chars()
1242 msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_rx_chars()
1247 spin_lock(&uap->port.lock); in pl011_rx_chars()
1256 static bool pl011_tx_char(struct uart_amba_port *uap, unsigned char c) in pl011_tx_char() argument
1258 if (readw(uap->port.membase + UART01x_FR) & UART01x_FR_TXFF) in pl011_tx_char()
1261 writew(c, uap->port.membase + UART01x_DR); in pl011_tx_char()
1262 uap->port.icount.tx++; in pl011_tx_char()
1267 static bool pl011_tx_chars(struct uart_amba_port *uap) in pl011_tx_chars() argument
1269 struct circ_buf *xmit = &uap->port.state->xmit; in pl011_tx_chars()
1272 if (unlikely(uap->tx_irq_seen < 2)) in pl011_tx_chars()
1277 count = uap->fifosize; in pl011_tx_chars()
1287 count = uap->fifosize >> 1; in pl011_tx_chars()
1293 if (unlikely(uap->tx_irq_seen < 2 && in pl011_tx_chars()
1294 readw(uap->port.membase + UART01x_FR) & UART01x_FR_TXFF)) in pl011_tx_chars()
1297 if (uap->port.x_char) { in pl011_tx_chars()
1298 if (!pl011_tx_char(uap, uap->port.x_char)) in pl011_tx_chars()
1300 uap->port.x_char = 0; in pl011_tx_chars()
1303 if (uart_circ_empty(xmit) || uart_tx_stopped(&uap->port)) { in pl011_tx_chars()
1304 pl011_stop_tx(&uap->port); in pl011_tx_chars()
1309 if (pl011_dma_tx_irq(uap)) in pl011_tx_chars()
1312 while (count-- > 0 && pl011_tx_char(uap, xmit->buf[xmit->tail])) { in pl011_tx_chars()
1319 uart_write_wakeup(&uap->port); in pl011_tx_chars()
1322 pl011_stop_tx(&uap->port); in pl011_tx_chars()
1326 if (unlikely(!uap->tx_irq_seen)) in pl011_tx_chars()
1327 schedule_delayed_work(&uap->tx_softirq_work, uap->port.timeout); in pl011_tx_chars()
1333 static void pl011_modem_status(struct uart_amba_port *uap) in pl011_modem_status() argument
1337 status = readw(uap->port.membase + UART01x_FR) & UART01x_FR_MODEM_ANY; in pl011_modem_status()
1339 delta = status ^ uap->old_status; in pl011_modem_status()
1340 uap->old_status = status; in pl011_modem_status()
1346 uart_handle_dcd_change(&uap->port, status & UART01x_FR_DCD); in pl011_modem_status()
1349 uap->port.icount.dsr++; in pl011_modem_status()
1352 uart_handle_cts_change(&uap->port, status & UART01x_FR_CTS); in pl011_modem_status()
1354 wake_up_interruptible(&uap->port.state->port.delta_msr_wait); in pl011_modem_status()
1360 struct uart_amba_port *uap = in pl011_tx_softirq() local
1363 spin_lock_irq(&uap->port.lock); in pl011_tx_softirq()
1364 while (pl011_tx_chars(uap)) ; in pl011_tx_softirq()
1365 spin_unlock_irq(&uap->port.lock); in pl011_tx_softirq()
1368 static void pl011_tx_irq_seen(struct uart_amba_port *uap) in pl011_tx_irq_seen() argument
1370 if (likely(uap->tx_irq_seen > 1)) in pl011_tx_irq_seen()
1373 uap->tx_irq_seen++; in pl011_tx_irq_seen()
1374 if (uap->tx_irq_seen < 2) in pl011_tx_irq_seen()
1376 cancel_delayed_work(&uap->tx_softirq_work); in pl011_tx_irq_seen()
1381 struct uart_amba_port *uap = dev_id; in pl011_int() local
1387 spin_lock_irqsave(&uap->port.lock, flags); in pl011_int()
1388 status = readw(uap->port.membase + UART011_MIS); in pl011_int()
1391 if (uap->vendor->cts_event_workaround) { in pl011_int()
1393 writew(0x00, uap->port.membase + UART011_ICR); in pl011_int()
1400 dummy_read = readw(uap->port.membase + UART011_ICR); in pl011_int()
1401 dummy_read = readw(uap->port.membase + UART011_ICR); in pl011_int()
1406 uap->port.membase + UART011_ICR); in pl011_int()
1409 if (pl011_dma_rx_running(uap)) in pl011_int()
1410 pl011_dma_rx_irq(uap); in pl011_int()
1412 pl011_rx_chars(uap); in pl011_int()
1416 pl011_modem_status(uap); in pl011_int()
1418 pl011_tx_irq_seen(uap); in pl011_int()
1419 pl011_tx_chars(uap); in pl011_int()
1425 status = readw(uap->port.membase + UART011_MIS); in pl011_int()
1430 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_int()
1437 struct uart_amba_port *uap = in pl011_tx_empty() local
1439 unsigned int status = readw(uap->port.membase + UART01x_FR); in pl011_tx_empty()
1445 struct uart_amba_port *uap = in pl011_get_mctrl() local
1448 unsigned int status = readw(uap->port.membase + UART01x_FR); in pl011_get_mctrl()
1464 struct uart_amba_port *uap = in pl011_set_mctrl() local
1468 cr = readw(uap->port.membase + UART011_CR); in pl011_set_mctrl()
1482 if (uap->autorts) { in pl011_set_mctrl()
1488 writew(cr, uap->port.membase + UART011_CR); in pl011_set_mctrl()
1493 struct uart_amba_port *uap = in pl011_break_ctl() local
1498 spin_lock_irqsave(&uap->port.lock, flags); in pl011_break_ctl()
1499 lcr_h = readw(uap->port.membase + uap->lcrh_tx); in pl011_break_ctl()
1504 writew(lcr_h, uap->port.membase + uap->lcrh_tx); in pl011_break_ctl()
1505 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_break_ctl()
1512 struct uart_amba_port *uap = in pl011_quiesce_irqs() local
1514 unsigned char __iomem *regs = uap->port.membase; in pl011_quiesce_irqs()
1535 struct uart_amba_port *uap = in pl011_get_poll_char() local
1545 status = readw(uap->port.membase + UART01x_FR); in pl011_get_poll_char()
1549 return readw(uap->port.membase + UART01x_DR); in pl011_get_poll_char()
1555 struct uart_amba_port *uap = in pl011_put_poll_char() local
1558 while (readw(uap->port.membase + UART01x_FR) & UART01x_FR_TXFF) in pl011_put_poll_char()
1561 writew(ch, uap->port.membase + UART01x_DR); in pl011_put_poll_char()
1568 struct uart_amba_port *uap = in pl011_hwinit() local
1578 retval = clk_prepare_enable(uap->clk); in pl011_hwinit()
1582 uap->port.uartclk = clk_get_rate(uap->clk); in pl011_hwinit()
1586 UART011_RTIS | UART011_RXIS, uap->port.membase + UART011_ICR); in pl011_hwinit()
1592 uap->im = readw(uap->port.membase + UART011_IMSC); in pl011_hwinit()
1593 writew(UART011_RTIM | UART011_RXIM, uap->port.membase + UART011_IMSC); in pl011_hwinit()
1595 if (dev_get_platdata(uap->port.dev)) { in pl011_hwinit()
1598 plat = dev_get_platdata(uap->port.dev); in pl011_hwinit()
1605 static void pl011_write_lcr_h(struct uart_amba_port *uap, unsigned int lcr_h) in pl011_write_lcr_h() argument
1607 writew(lcr_h, uap->port.membase + uap->lcrh_rx); in pl011_write_lcr_h()
1608 if (uap->lcrh_rx != uap->lcrh_tx) { in pl011_write_lcr_h()
1615 writew(0xff, uap->port.membase + UART011_MIS); in pl011_write_lcr_h()
1616 writew(lcr_h, uap->port.membase + uap->lcrh_tx); in pl011_write_lcr_h()
1622 struct uart_amba_port *uap = in pl011_startup() local
1631 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_startup()
1636 retval = request_irq(uap->port.irq, pl011_int, 0, "uart-pl011", uap); in pl011_startup()
1640 writew(uap->vendor->ifls, uap->port.membase + UART011_IFLS); in pl011_startup()
1643 uap->tx_irq_seen = 0; in pl011_startup()
1645 spin_lock_irq(&uap->port.lock); in pl011_startup()
1648 cr = uap->old_cr & (UART011_CR_RTS | UART011_CR_DTR); in pl011_startup()
1650 writew(cr, uap->port.membase + UART011_CR); in pl011_startup()
1652 spin_unlock_irq(&uap->port.lock); in pl011_startup()
1657 uap->old_status = readw(uap->port.membase + UART01x_FR) & UART01x_FR_MODEM_ANY; in pl011_startup()
1660 pl011_dma_startup(uap); in pl011_startup()
1667 spin_lock_irq(&uap->port.lock); in pl011_startup()
1670 uap->port.membase + UART011_ICR); in pl011_startup()
1671 uap->im = UART011_RTIM; in pl011_startup()
1672 if (!pl011_dma_rx_running(uap)) in pl011_startup()
1673 uap->im |= UART011_RXIM; in pl011_startup()
1674 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_startup()
1675 spin_unlock_irq(&uap->port.lock); in pl011_startup()
1680 clk_disable_unprepare(uap->clk); in pl011_startup()
1684 static void pl011_shutdown_channel(struct uart_amba_port *uap, in pl011_shutdown_channel() argument
1689 val = readw(uap->port.membase + lcrh); in pl011_shutdown_channel()
1691 writew(val, uap->port.membase + lcrh); in pl011_shutdown_channel()
1696 struct uart_amba_port *uap = in pl011_shutdown() local
1700 cancel_delayed_work_sync(&uap->tx_softirq_work); in pl011_shutdown()
1705 spin_lock_irq(&uap->port.lock); in pl011_shutdown()
1706 uap->im = 0; in pl011_shutdown()
1707 writew(uap->im, uap->port.membase + UART011_IMSC); in pl011_shutdown()
1708 writew(0xffff, uap->port.membase + UART011_ICR); in pl011_shutdown()
1709 spin_unlock_irq(&uap->port.lock); in pl011_shutdown()
1711 pl011_dma_shutdown(uap); in pl011_shutdown()
1716 free_irq(uap->port.irq, uap); in pl011_shutdown()
1724 uap->autorts = false; in pl011_shutdown()
1725 spin_lock_irq(&uap->port.lock); in pl011_shutdown()
1726 cr = readw(uap->port.membase + UART011_CR); in pl011_shutdown()
1727 uap->old_cr = cr; in pl011_shutdown()
1730 writew(cr, uap->port.membase + UART011_CR); in pl011_shutdown()
1731 spin_unlock_irq(&uap->port.lock); in pl011_shutdown()
1736 pl011_shutdown_channel(uap, uap->lcrh_rx); in pl011_shutdown()
1737 if (uap->lcrh_rx != uap->lcrh_tx) in pl011_shutdown()
1738 pl011_shutdown_channel(uap, uap->lcrh_tx); in pl011_shutdown()
1743 clk_disable_unprepare(uap->clk); in pl011_shutdown()
1747 if (dev_get_platdata(uap->port.dev)) { in pl011_shutdown()
1750 plat = dev_get_platdata(uap->port.dev); in pl011_shutdown()
1755 if (uap->port.ops->flush_buffer) in pl011_shutdown()
1756 uap->port.ops->flush_buffer(port); in pl011_shutdown()
1763 struct uart_amba_port *uap = in pl011_set_termios() local
1769 if (uap->vendor->oversampling) in pl011_set_termios()
1783 if (uap->dmarx.auto_poll_rate) in pl011_set_termios()
1784 uap->dmarx.poll_rate = DIV_ROUND_UP(10000000, baud); in pl011_set_termios()
1813 if (uap->fifosize > 1) in pl011_set_termios()
1863 uap->autorts = true; in pl011_set_termios()
1866 uap->autorts = false; in pl011_set_termios()
1869 if (uap->vendor->oversampling) { in pl011_set_termios()
1882 if (uap->vendor->oversampling) { in pl011_set_termios()
1898 pl011_write_lcr_h(uap, lcr_h); in pl011_set_termios()
1906 struct uart_amba_port *uap = in pl011_type() local
1908 return uap->port.type == PORT_AMBA ? uap->type : NULL; in pl011_type()
1985 struct uart_amba_port *uap = in pl011_console_putchar() local
1988 while (readw(uap->port.membase + UART01x_FR) & UART01x_FR_TXFF) in pl011_console_putchar()
1990 writew(ch, uap->port.membase + UART01x_DR); in pl011_console_putchar()
1996 struct uart_amba_port *uap = amba_ports[co->index]; in pl011_console_write() local
2001 clk_enable(uap->clk); in pl011_console_write()
2004 if (uap->port.sysrq) in pl011_console_write()
2007 locked = spin_trylock(&uap->port.lock); in pl011_console_write()
2009 spin_lock(&uap->port.lock); in pl011_console_write()
2014 old_cr = readw(uap->port.membase + UART011_CR); in pl011_console_write()
2017 writew(new_cr, uap->port.membase + UART011_CR); in pl011_console_write()
2019 uart_console_write(&uap->port, s, count, pl011_console_putchar); in pl011_console_write()
2026 status = readw(uap->port.membase + UART01x_FR); in pl011_console_write()
2028 writew(old_cr, uap->port.membase + UART011_CR); in pl011_console_write()
2031 spin_unlock(&uap->port.lock); in pl011_console_write()
2034 clk_disable(uap->clk); in pl011_console_write()
2038 pl011_console_get_options(struct uart_amba_port *uap, int *baud, in pl011_console_get_options() argument
2041 if (readw(uap->port.membase + UART011_CR) & UART01x_CR_UARTEN) { in pl011_console_get_options()
2044 lcr_h = readw(uap->port.membase + uap->lcrh_tx); in pl011_console_get_options()
2059 ibrd = readw(uap->port.membase + UART011_IBRD); in pl011_console_get_options()
2060 fbrd = readw(uap->port.membase + UART011_FBRD); in pl011_console_get_options()
2062 *baud = uap->port.uartclk * 4 / (64 * ibrd + fbrd); in pl011_console_get_options()
2064 if (uap->vendor->oversampling) { in pl011_console_get_options()
2065 if (readw(uap->port.membase + UART011_CR) in pl011_console_get_options()
2074 struct uart_amba_port *uap; in pl011_console_setup() local
2088 uap = amba_ports[co->index]; in pl011_console_setup()
2089 if (!uap) in pl011_console_setup()
2093 pinctrl_pm_select_default_state(uap->port.dev); in pl011_console_setup()
2095 ret = clk_prepare(uap->clk); in pl011_console_setup()
2099 if (dev_get_platdata(uap->port.dev)) { in pl011_console_setup()
2102 plat = dev_get_platdata(uap->port.dev); in pl011_console_setup()
2107 uap->port.uartclk = clk_get_rate(uap->clk); in pl011_console_setup()
2112 pl011_console_get_options(uap, &baud, &parity, &bits); in pl011_console_setup()
2114 return uart_set_options(&uap->port, co, baud, parity, bits, flow); in pl011_console_setup()
2206 struct uart_amba_port *uap; in pl011_probe() local
2218 uap = devm_kzalloc(&dev->dev, sizeof(struct uart_amba_port), in pl011_probe()
2220 if (uap == NULL) in pl011_probe()
2230 uap->clk = devm_clk_get(&dev->dev, NULL); in pl011_probe()
2231 if (IS_ERR(uap->clk)) in pl011_probe()
2232 return PTR_ERR(uap->clk); in pl011_probe()
2234 uap->vendor = vendor; in pl011_probe()
2235 uap->lcrh_rx = vendor->lcrh_rx; in pl011_probe()
2236 uap->lcrh_tx = vendor->lcrh_tx; in pl011_probe()
2237 uap->old_cr = 0; in pl011_probe()
2238 uap->fifosize = vendor->get_fifosize(dev); in pl011_probe()
2239 uap->port.dev = &dev->dev; in pl011_probe()
2240 uap->port.mapbase = dev->res.start; in pl011_probe()
2241 uap->port.membase = base; in pl011_probe()
2242 uap->port.iotype = UPIO_MEM; in pl011_probe()
2243 uap->port.irq = dev->irq[0]; in pl011_probe()
2244 uap->port.fifosize = uap->fifosize; in pl011_probe()
2245 uap->port.ops = &amba_pl011_pops; in pl011_probe()
2246 uap->port.flags = UPF_BOOT_AUTOCONF; in pl011_probe()
2247 uap->port.line = i; in pl011_probe()
2248 INIT_DELAYED_WORK(&uap->tx_softirq_work, pl011_tx_softirq); in pl011_probe()
2251 writew(0, uap->port.membase + UART011_IMSC); in pl011_probe()
2252 writew(0xffff, uap->port.membase + UART011_ICR); in pl011_probe()
2254 snprintf(uap->type, sizeof(uap->type), "PL011 rev%u", amba_rev(dev)); in pl011_probe()
2256 amba_ports[i] = uap; in pl011_probe()
2258 amba_set_drvdata(dev, uap); in pl011_probe()
2269 ret = uart_add_one_port(&amba_reg, &uap->port); in pl011_probe()
2280 struct uart_amba_port *uap = amba_get_drvdata(dev); in pl011_remove() local
2284 uart_remove_one_port(&amba_reg, &uap->port); in pl011_remove()
2287 if (amba_ports[i] == uap) in pl011_remove()
2292 pl011_dma_remove(uap); in pl011_remove()
2301 struct uart_amba_port *uap = dev_get_drvdata(dev); in pl011_suspend() local
2303 if (!uap) in pl011_suspend()
2306 return uart_suspend_port(&amba_reg, &uap->port); in pl011_suspend()
2311 struct uart_amba_port *uap = dev_get_drvdata(dev); in pl011_resume() local
2313 if (!uap) in pl011_resume()
2316 return uart_resume_port(&amba_reg, &uap->port); in pl011_resume()