2017-11-06 18:11:51 +01:00
// SPDX-License-Identifier: GPL-2.0+
2013-01-10 11:25:11 +02:00
/*
* 8250 _dma . c - DMA Engine API support for 8250. c
*
* Copyright ( C ) 2013 Intel Corporation
*/
# include <linux/tty.h>
# include <linux/tty_flip.h>
# include <linux/serial_reg.h>
# include <linux/dma-mapping.h>
# include "8250.h"
static void __dma_tx_complete ( void * param )
{
struct uart_8250_port * p = param ;
struct uart_8250_dma * dma = p - > dma ;
struct circ_buf * xmit = & p - > port . state - > xmit ;
2014-04-24 11:34:48 +02:00
unsigned long flags ;
2014-11-06 13:28:16 +02:00
int ret ;
2013-01-10 11:25:11 +02:00
dma_sync_single_for_cpu ( dma - > txchan - > device - > dev , dma - > tx_addr ,
UART_XMIT_SIZE , DMA_TO_DEVICE ) ;
2014-04-24 11:34:48 +02:00
spin_lock_irqsave ( & p - > port . lock , flags ) ;
dma - > tx_running = 0 ;
2013-01-10 11:25:11 +02:00
xmit - > tail + = dma - > tx_size ;
xmit - > tail & = UART_XMIT_SIZE - 1 ;
p - > port . icount . tx + = dma - > tx_size ;
if ( uart_circ_chars_pending ( xmit ) < WAKEUP_CHARS )
uart_write_wakeup ( & p - > port ) ;
2014-11-06 13:28:16 +02:00
ret = serial8250_tx_dma ( p ) ;
2019-06-17 16:53:20 +03:00
if ( ret )
serial8250_set_THRI ( p ) ;
2014-04-24 11:34:48 +02:00
spin_unlock_irqrestore ( & p - > port . lock , flags ) ;
2013-01-10 11:25:11 +02:00
}
static void __dma_rx_complete ( void * param )
{
struct uart_8250_port * p = param ;
struct uart_8250_dma * dma = p - > dma ;
2013-01-16 14:08:13 +02:00
struct tty_port * tty_port = & p - > port . state - > port ;
2013-01-10 11:25:11 +02:00
struct dma_tx_state state ;
2013-01-16 14:08:13 +02:00
int count ;
2013-01-10 11:25:11 +02:00
2014-09-29 20:06:41 +02:00
dma - > rx_running = 0 ;
2013-01-10 11:25:11 +02:00
dmaengine_tx_status ( dma - > rxchan , dma - > rx_cookie , & state ) ;
2013-01-16 14:08:13 +02:00
count = dma - > rx_size - state . residue ;
2013-01-10 11:25:11 +02:00
2013-01-16 14:08:13 +02:00
tty_insert_flip_string ( tty_port , dma - > rx_buf , count ) ;
p - > port . icount . rx + = count ;
tty_flip_buffer_push ( tty_port ) ;
2013-01-10 11:25:11 +02:00
}
int serial8250_tx_dma ( struct uart_8250_port * p )
{
struct uart_8250_dma * dma = p - > dma ;
struct circ_buf * xmit = & p - > port . state - > xmit ;
struct dma_async_tx_descriptor * desc ;
2014-09-29 20:06:40 +02:00
int ret ;
2013-01-10 11:25:11 +02:00
2016-11-14 12:26:52 +02:00
if ( dma - > tx_running )
2013-04-10 16:58:24 +03:00
return 0 ;
2013-01-10 11:25:11 +02:00
2016-11-14 12:26:52 +02:00
if ( uart_tx_stopped ( & p - > port ) | | uart_circ_empty ( xmit ) ) {
/* We have been called from __dma_tx_complete() */
serial8250_rpm_put_tx ( p ) ;
return 0 ;
}
2013-01-10 11:25:11 +02:00
dma - > tx_size = CIRC_CNT_TO_END ( xmit - > head , xmit - > tail , UART_XMIT_SIZE ) ;
desc = dmaengine_prep_slave_single ( dma - > txchan ,
dma - > tx_addr + xmit - > tail ,
dma - > tx_size , DMA_MEM_TO_DEV ,
DMA_PREP_INTERRUPT | DMA_CTRL_ACK ) ;
2014-09-29 20:06:40 +02:00
if ( ! desc ) {
ret = - EBUSY ;
goto err ;
}
2013-01-10 11:25:11 +02:00
dma - > tx_running = 1 ;
desc - > callback = __dma_tx_complete ;
desc - > callback_param = p ;
dma - > tx_cookie = dmaengine_submit ( desc ) ;
dma_sync_single_for_device ( dma - > txchan - > device - > dev , dma - > tx_addr ,
UART_XMIT_SIZE , DMA_TO_DEVICE ) ;
dma_async_issue_pending ( dma - > txchan ) ;
2014-09-29 20:06:40 +02:00
if ( dma - > tx_err ) {
dma - > tx_err = 0 ;
2019-06-17 16:53:20 +03:00
serial8250_clear_THRI ( p ) ;
2014-09-29 20:06:40 +02:00
}
2013-01-10 11:25:11 +02:00
return 0 ;
2014-09-29 20:06:40 +02:00
err :
dma - > tx_err = 1 ;
return ret ;
2013-01-10 11:25:11 +02:00
}
2016-04-09 22:14:36 -07:00
int serial8250_rx_dma ( struct uart_8250_port * p )
2013-01-10 11:25:11 +02:00
{
struct uart_8250_dma * dma = p - > dma ;
struct dma_async_tx_descriptor * desc ;
2013-04-10 16:58:25 +03:00
2014-09-29 20:06:41 +02:00
if ( dma - > rx_running )
2013-04-10 16:58:25 +03:00
return 0 ;
2013-01-10 11:25:11 +02:00
desc = dmaengine_prep_slave_single ( dma - > rxchan , dma - > rx_addr ,
dma - > rx_size , DMA_DEV_TO_MEM ,
DMA_PREP_INTERRUPT | DMA_CTRL_ACK ) ;
if ( ! desc )
return - EBUSY ;
2014-09-29 20:06:41 +02:00
dma - > rx_running = 1 ;
2013-01-10 11:25:11 +02:00
desc - > callback = __dma_rx_complete ;
desc - > callback_param = p ;
dma - > rx_cookie = dmaengine_submit ( desc ) ;
dma_async_issue_pending ( dma - > rxchan ) ;
return 0 ;
}
2016-04-09 22:14:36 -07:00
void serial8250_rx_dma_flush ( struct uart_8250_port * p )
{
struct uart_8250_dma * dma = p - > dma ;
if ( dma - > rx_running ) {
dmaengine_pause ( dma - > rxchan ) ;
__dma_rx_complete ( p ) ;
2016-08-17 19:20:24 +03:00
dmaengine_terminate_async ( dma - > rxchan ) ;
2016-04-09 22:14:36 -07:00
}
}
2016-06-15 13:44:12 +08:00
EXPORT_SYMBOL_GPL ( serial8250_rx_dma_flush ) ;
2016-04-09 22:14:36 -07:00
2013-01-10 11:25:11 +02:00
int serial8250_request_dma ( struct uart_8250_port * p )
{
struct uart_8250_dma * dma = p - > dma ;
2016-08-17 19:20:25 +03:00
phys_addr_t rx_dma_addr = dma - > rx_dma_addr ?
dma - > rx_dma_addr : p - > port . mapbase ;
phys_addr_t tx_dma_addr = dma - > tx_dma_addr ?
dma - > tx_dma_addr : p - > port . mapbase ;
2013-01-10 11:25:11 +02:00
dma_cap_mask_t mask ;
2016-04-09 22:14:32 -07:00
struct dma_slave_caps caps ;
int ret ;
2013-01-10 11:25:11 +02:00
2013-04-10 16:58:27 +03:00
/* Default slave configuration parameters */
dma - > rxconf . direction = DMA_DEV_TO_MEM ;
dma - > rxconf . src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE ;
2016-08-17 19:20:25 +03:00
dma - > rxconf . src_addr = rx_dma_addr + UART_RX ;
2013-04-10 16:58:27 +03:00
dma - > txconf . direction = DMA_MEM_TO_DEV ;
dma - > txconf . dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE ;
2016-08-17 19:20:25 +03:00
dma - > txconf . dst_addr = tx_dma_addr + UART_TX ;
2013-01-10 11:25:11 +02:00
dma_cap_zero ( mask ) ;
dma_cap_set ( DMA_SLAVE , mask ) ;
/* Get a channel for RX */
2013-04-10 16:58:26 +03:00
dma - > rxchan = dma_request_slave_channel_compat ( mask ,
dma - > fn , dma - > rx_param ,
p - > port . dev , " rx " ) ;
2013-01-10 11:25:11 +02:00
if ( ! dma - > rxchan )
return - ENODEV ;
2016-04-09 22:14:32 -07:00
/* 8250 rx dma requires dmaengine driver to support pause/terminate */
ret = dma_get_slave_caps ( dma - > rxchan , & caps ) ;
if ( ret )
goto release_rx ;
if ( ! caps . cmd_pause | | ! caps . cmd_terminate | |
caps . residue_granularity = = DMA_RESIDUE_GRANULARITY_DESCRIPTOR ) {
ret = - EINVAL ;
goto release_rx ;
}
2013-01-10 11:25:11 +02:00
dmaengine_slave_config ( dma - > rxchan , & dma - > rxconf ) ;
/* Get a channel for TX */
2013-04-10 16:58:26 +03:00
dma - > txchan = dma_request_slave_channel_compat ( mask ,
dma - > fn , dma - > tx_param ,
p - > port . dev , " tx " ) ;
2013-01-10 11:25:11 +02:00
if ( ! dma - > txchan ) {
2016-04-09 22:14:32 -07:00
ret = - ENODEV ;
goto release_rx ;
2013-01-10 11:25:11 +02:00
}
2016-04-09 22:14:33 -07:00
/* 8250 tx dma requires dmaengine driver to support terminate */
ret = dma_get_slave_caps ( dma - > txchan , & caps ) ;
if ( ret )
goto err ;
if ( ! caps . cmd_terminate ) {
ret = - EINVAL ;
goto err ;
}
2013-01-10 11:25:11 +02:00
dmaengine_slave_config ( dma - > txchan , & dma - > txconf ) ;
/* RX buffer */
if ( ! dma - > rx_size )
dma - > rx_size = PAGE_SIZE ;
dma - > rx_buf = dma_alloc_coherent ( dma - > rxchan - > device - > dev , dma - > rx_size ,
& dma - > rx_addr , GFP_KERNEL ) ;
2016-04-09 22:14:32 -07:00
if ( ! dma - > rx_buf ) {
ret = - ENOMEM ;
2014-04-28 15:59:56 +03:00
goto err ;
2016-04-09 22:14:32 -07:00
}
2013-01-10 11:25:11 +02:00
/* TX buffer */
dma - > tx_addr = dma_map_single ( dma - > txchan - > device - > dev ,
p - > port . state - > xmit . buf ,
UART_XMIT_SIZE ,
DMA_TO_DEVICE ) ;
2014-04-28 15:59:56 +03:00
if ( dma_mapping_error ( dma - > txchan - > device - > dev , dma - > tx_addr ) ) {
dma_free_coherent ( dma - > rxchan - > device - > dev , dma - > rx_size ,
dma - > rx_buf , dma - > rx_addr ) ;
2016-04-09 22:14:32 -07:00
ret = - ENOMEM ;
2014-04-28 15:59:56 +03:00
goto err ;
}
2013-01-10 11:25:11 +02:00
dev_dbg_ratelimited ( p - > port . dev , " got both dma channels \n " ) ;
return 0 ;
2014-04-28 15:59:56 +03:00
err :
dma_release_channel ( dma - > txchan ) ;
2016-04-09 22:14:32 -07:00
release_rx :
dma_release_channel ( dma - > rxchan ) ;
return ret ;
2013-01-10 11:25:11 +02:00
}
EXPORT_SYMBOL_GPL ( serial8250_request_dma ) ;
void serial8250_release_dma ( struct uart_8250_port * p )
{
struct uart_8250_dma * dma = p - > dma ;
if ( ! dma )
return ;
/* Release RX resources */
2016-08-17 19:20:24 +03:00
dmaengine_terminate_sync ( dma - > rxchan ) ;
2013-01-10 11:25:11 +02:00
dma_free_coherent ( dma - > rxchan - > device - > dev , dma - > rx_size , dma - > rx_buf ,
dma - > rx_addr ) ;
dma_release_channel ( dma - > rxchan ) ;
dma - > rxchan = NULL ;
/* Release TX resources */
2016-08-17 19:20:24 +03:00
dmaengine_terminate_sync ( dma - > txchan ) ;
2013-01-10 11:25:11 +02:00
dma_unmap_single ( dma - > txchan - > device - > dev , dma - > tx_addr ,
UART_XMIT_SIZE , DMA_TO_DEVICE ) ;
dma_release_channel ( dma - > txchan ) ;
dma - > txchan = NULL ;
dma - > tx_running = 0 ;
dev_dbg_ratelimited ( p - > port . dev , " dma channels released \n " ) ;
}
EXPORT_SYMBOL_GPL ( serial8250_release_dma ) ;