2010-12-24 08:59:11 +03:00
/*
2011-06-06 11:16:30 +04:00
* Special handling for DW core on Intel MID platform
2010-12-24 08:59:11 +03:00
*
2014-09-12 16:12:01 +04:00
* Copyright ( c ) 2009 , 2014 Intel Corporation .
2010-12-24 08:59:11 +03:00
*
* This program is free software ; you can redistribute it and / or modify it
* under the terms and conditions of the GNU General Public License ,
* version 2 , as published by the Free Software Foundation .
*
* This program is distributed in the hope it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU General Public License for
* more details .
*/
# include <linux/dma-mapping.h>
# include <linux/dmaengine.h>
# include <linux/interrupt.h>
# include <linux/slab.h>
# include <linux/spi/spi.h>
2012-02-01 14:42:19 +04:00
# include <linux/types.h>
2011-02-28 22:47:12 +03:00
2011-06-06 11:16:30 +04:00
# include "spi-dw.h"
2010-12-24 08:59:11 +03:00
# ifdef CONFIG_SPI_DW_MID_DMA
# include <linux/pci.h>
2015-03-09 17:48:50 +03:00
# include <linux/platform_data/dma-dw.h>
2010-12-24 08:59:11 +03:00
2014-10-28 19:25:02 +03:00
# define RX_BUSY 0
# define TX_BUSY 1
2015-03-09 17:48:50 +03:00
static struct dw_dma_slave mid_dma_tx = { . dst_id = 1 } ;
static struct dw_dma_slave mid_dma_rx = { . src_id = 0 } ;
2010-12-24 08:59:11 +03:00
static bool mid_spi_dma_chan_filter ( struct dma_chan * chan , void * param )
{
2015-03-09 17:48:50 +03:00
struct dw_dma_slave * s = param ;
if ( s - > dma_dev ! = chan - > device - > dev )
return false ;
2010-12-24 08:59:11 +03:00
2015-03-09 17:48:50 +03:00
chan - > private = s ;
return true ;
2010-12-24 08:59:11 +03:00
}
static int mid_spi_dma_init ( struct dw_spi * dws )
{
2014-09-12 16:12:00 +04:00
struct pci_dev * dma_dev ;
2015-03-09 17:48:50 +03:00
struct dw_dma_slave * tx = dws - > dma_tx ;
struct dw_dma_slave * rx = dws - > dma_rx ;
2010-12-24 08:59:11 +03:00
dma_cap_mask_t mask ;
/*
* Get pci device for DMA controller , currently it could only
2014-09-12 16:11:59 +04:00
* be the DMA controller of Medfield
2010-12-24 08:59:11 +03:00
*/
2014-09-12 16:12:00 +04:00
dma_dev = pci_get_device ( PCI_VENDOR_ID_INTEL , 0x0827 , NULL ) ;
if ( ! dma_dev )
return - ENODEV ;
2010-12-24 08:59:11 +03:00
dma_cap_zero ( mask ) ;
dma_cap_set ( DMA_SLAVE , mask ) ;
/* 1. Init rx channel */
2015-03-09 17:48:50 +03:00
rx - > dma_dev = & dma_dev - > dev ;
dws - > rxchan = dma_request_channel ( mask , mid_spi_dma_chan_filter , rx ) ;
2010-12-24 08:59:11 +03:00
if ( ! dws - > rxchan )
goto err_exit ;
2015-03-09 17:48:49 +03:00
dws - > master - > dma_rx = dws - > rxchan ;
2010-12-24 08:59:11 +03:00
/* 2. Init tx channel */
2015-03-09 17:48:50 +03:00
tx - > dma_dev = & dma_dev - > dev ;
dws - > txchan = dma_request_channel ( mask , mid_spi_dma_chan_filter , tx ) ;
2010-12-24 08:59:11 +03:00
if ( ! dws - > txchan )
goto free_rxchan ;
2015-03-09 17:48:49 +03:00
dws - > master - > dma_tx = dws - > txchan ;
2010-12-24 08:59:11 +03:00
dws - > dma_inited = 1 ;
return 0 ;
free_rxchan :
dma_release_channel ( dws - > rxchan ) ;
err_exit :
2014-09-12 16:12:00 +04:00
return - EBUSY ;
2010-12-24 08:59:11 +03:00
}
static void mid_spi_dma_exit ( struct dw_spi * dws )
{
2014-09-12 16:11:58 +04:00
if ( ! dws - > dma_inited )
return ;
2014-09-18 21:08:53 +04:00
2016-02-05 17:46:26 +03:00
dmaengine_terminate_sync ( dws - > txchan ) ;
2010-12-24 08:59:11 +03:00
dma_release_channel ( dws - > txchan ) ;
2014-09-18 21:08:53 +04:00
2016-02-05 17:46:26 +03:00
dmaengine_terminate_sync ( dws - > rxchan ) ;
2010-12-24 08:59:11 +03:00
dma_release_channel ( dws - > rxchan ) ;
}
2015-03-09 17:48:47 +03:00
static irqreturn_t dma_transfer ( struct dw_spi * dws )
{
2015-03-12 22:19:31 +03:00
u16 irq_status = dw_readl ( dws , DW_SPI_ISR ) ;
2015-03-09 17:48:47 +03:00
if ( ! irq_status )
return IRQ_NONE ;
2015-03-12 22:19:31 +03:00
dw_readl ( dws , DW_SPI_ICR ) ;
2015-03-09 17:48:47 +03:00
spi_reset_chip ( dws ) ;
dev_err ( & dws - > master - > dev , " %s: FIFO overrun/underrun \n " , __func__ ) ;
dws - > master - > cur_msg - > status = - EIO ;
spi_finalize_current_transfer ( dws - > master ) ;
return IRQ_HANDLED ;
}
2018-02-01 18:17:29 +03:00
static bool mid_spi_can_dma ( struct spi_controller * master ,
struct spi_device * spi , struct spi_transfer * xfer )
2015-03-09 17:48:49 +03:00
{
2018-02-01 18:17:29 +03:00
struct dw_spi * dws = spi_controller_get_devdata ( master ) ;
2015-03-09 17:48:49 +03:00
if ( ! dws - > dma_inited )
return false ;
return xfer - > len > dws - > fifo_len ;
}
2015-03-09 17:48:45 +03:00
static enum dma_slave_buswidth convert_dma_width ( u32 dma_width ) {
if ( dma_width = = 1 )
return DMA_SLAVE_BUSWIDTH_1_BYTE ;
else if ( dma_width = = 2 )
return DMA_SLAVE_BUSWIDTH_2_BYTES ;
return DMA_SLAVE_BUSWIDTH_UNDEFINED ;
}
2010-12-24 08:59:11 +03:00
/*
2014-10-28 19:25:02 +03:00
* dws - > dma_chan_busy is set before the dma transfer starts , callback for tx
* channel will clear a corresponding bit .
2010-12-24 08:59:11 +03:00
*/
2014-10-28 19:25:02 +03:00
static void dw_spi_dma_tx_done ( void * arg )
2010-12-24 08:59:11 +03:00
{
struct dw_spi * dws = arg ;
2015-03-06 15:42:01 +03:00
clear_bit ( TX_BUSY , & dws - > dma_chan_busy ) ;
if ( test_bit ( RX_BUSY , & dws - > dma_chan_busy ) )
2010-12-24 08:59:11 +03:00
return ;
2015-03-02 15:58:57 +03:00
spi_finalize_current_transfer ( dws - > master ) ;
2010-12-24 08:59:11 +03:00
}
2015-03-09 17:48:49 +03:00
static struct dma_async_tx_descriptor * dw_spi_dma_prepare_tx ( struct dw_spi * dws ,
struct spi_transfer * xfer )
2010-12-24 08:59:11 +03:00
{
2014-10-28 19:25:01 +03:00
struct dma_slave_config txconf ;
struct dma_async_tx_descriptor * txdesc ;
2010-12-24 08:59:11 +03:00
2015-03-09 17:48:49 +03:00
if ( ! xfer - > tx_buf )
2014-10-28 19:25:02 +03:00
return NULL ;
2011-10-14 09:17:38 +04:00
txconf . direction = DMA_MEM_TO_DEV ;
2010-12-24 08:59:11 +03:00
txconf . dst_addr = dws - > dma_addr ;
2015-03-09 17:48:50 +03:00
txconf . dst_maxburst = 16 ;
2010-12-24 08:59:11 +03:00
txconf . src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES ;
2015-03-09 17:48:45 +03:00
txconf . dst_addr_width = convert_dma_width ( dws - > dma_width ) ;
2012-02-01 14:42:19 +04:00
txconf . device_fc = false ;
2010-12-24 08:59:11 +03:00
2014-10-02 17:31:08 +04:00
dmaengine_slave_config ( dws - > txchan , & txconf ) ;
2010-12-24 08:59:11 +03:00
2014-10-02 17:31:08 +04:00
txdesc = dmaengine_prep_slave_sg ( dws - > txchan ,
2015-03-09 17:48:49 +03:00
xfer - > tx_sg . sgl ,
xfer - > tx_sg . nents ,
2011-10-14 09:17:38 +04:00
DMA_MEM_TO_DEV ,
2014-10-02 17:31:09 +04:00
DMA_PREP_INTERRUPT | DMA_CTRL_ACK ) ;
2015-03-02 21:15:58 +03:00
if ( ! txdesc )
return NULL ;
2014-10-28 19:25:02 +03:00
txdesc - > callback = dw_spi_dma_tx_done ;
2010-12-24 08:59:11 +03:00
txdesc - > callback_param = dws ;
2014-10-28 19:25:01 +03:00
return txdesc ;
}
2014-10-28 19:25:02 +03:00
/*
* dws - > dma_chan_busy is set before the dma transfer starts , callback for rx
* channel will clear a corresponding bit .
*/
static void dw_spi_dma_rx_done ( void * arg )
{
struct dw_spi * dws = arg ;
2015-03-06 15:42:01 +03:00
clear_bit ( RX_BUSY , & dws - > dma_chan_busy ) ;
if ( test_bit ( TX_BUSY , & dws - > dma_chan_busy ) )
2014-10-28 19:25:02 +03:00
return ;
2015-03-02 15:58:57 +03:00
spi_finalize_current_transfer ( dws - > master ) ;
2014-10-28 19:25:02 +03:00
}
2015-03-09 17:48:49 +03:00
static struct dma_async_tx_descriptor * dw_spi_dma_prepare_rx ( struct dw_spi * dws ,
struct spi_transfer * xfer )
2014-10-28 19:25:01 +03:00
{
struct dma_slave_config rxconf ;
struct dma_async_tx_descriptor * rxdesc ;
2015-03-09 17:48:49 +03:00
if ( ! xfer - > rx_buf )
2014-10-28 19:25:02 +03:00
return NULL ;
2011-10-14 09:17:38 +04:00
rxconf . direction = DMA_DEV_TO_MEM ;
2010-12-24 08:59:11 +03:00
rxconf . src_addr = dws - > dma_addr ;
2015-03-09 17:48:50 +03:00
rxconf . src_maxburst = 16 ;
2010-12-24 08:59:11 +03:00
rxconf . dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES ;
2015-03-09 17:48:45 +03:00
rxconf . src_addr_width = convert_dma_width ( dws - > dma_width ) ;
2012-02-01 14:42:19 +04:00
rxconf . device_fc = false ;
2010-12-24 08:59:11 +03:00
2014-10-02 17:31:08 +04:00
dmaengine_slave_config ( dws - > rxchan , & rxconf ) ;
2010-12-24 08:59:11 +03:00
2014-10-02 17:31:08 +04:00
rxdesc = dmaengine_prep_slave_sg ( dws - > rxchan ,
2015-03-09 17:48:49 +03:00
xfer - > rx_sg . sgl ,
xfer - > rx_sg . nents ,
2011-10-14 09:17:38 +04:00
DMA_DEV_TO_MEM ,
2014-10-02 17:31:09 +04:00
DMA_PREP_INTERRUPT | DMA_CTRL_ACK ) ;
2015-03-02 21:15:58 +03:00
if ( ! rxdesc )
return NULL ;
2014-10-28 19:25:02 +03:00
rxdesc - > callback = dw_spi_dma_rx_done ;
2010-12-24 08:59:11 +03:00
rxdesc - > callback_param = dws ;
2014-10-28 19:25:01 +03:00
return rxdesc ;
}
2015-03-09 17:48:49 +03:00
static int mid_spi_dma_setup ( struct dw_spi * dws , struct spi_transfer * xfer )
2014-10-28 19:25:01 +03:00
{
u16 dma_ctrl = 0 ;
2015-03-12 22:19:31 +03:00
dw_writel ( dws , DW_SPI_DMARDLR , 0xf ) ;
dw_writel ( dws , DW_SPI_DMATDLR , 0x10 ) ;
2014-10-28 19:25:01 +03:00
2015-03-09 17:48:49 +03:00
if ( xfer - > tx_buf )
2014-10-28 19:25:01 +03:00
dma_ctrl | = SPI_DMA_TDMAE ;
2015-03-09 17:48:49 +03:00
if ( xfer - > rx_buf )
2014-10-28 19:25:01 +03:00
dma_ctrl | = SPI_DMA_RDMAE ;
2015-03-12 22:19:31 +03:00
dw_writel ( dws , DW_SPI_DMACR , dma_ctrl ) ;
2014-10-28 19:25:01 +03:00
2015-03-09 17:48:47 +03:00
/* Set the interrupt mask */
spi_umask_intr ( dws , SPI_INT_TXOI | SPI_INT_RXUI | SPI_INT_RXOI ) ;
dws - > transfer_handler = dma_transfer ;
2015-03-09 17:48:46 +03:00
return 0 ;
2014-10-28 19:25:01 +03:00
}
2015-03-09 17:48:49 +03:00
static int mid_spi_dma_transfer ( struct dw_spi * dws , struct spi_transfer * xfer )
2014-10-28 19:25:01 +03:00
{
struct dma_async_tx_descriptor * txdesc , * rxdesc ;
2015-03-09 17:48:46 +03:00
/* Prepare the TX dma transfer */
2015-03-09 17:48:49 +03:00
txdesc = dw_spi_dma_prepare_tx ( dws , xfer ) ;
2014-10-28 19:25:01 +03:00
2015-03-09 17:48:46 +03:00
/* Prepare the RX dma transfer */
2015-03-09 17:48:49 +03:00
rxdesc = dw_spi_dma_prepare_rx ( dws , xfer ) ;
2014-10-28 19:25:01 +03:00
2010-12-24 08:59:11 +03:00
/* rx must be started before tx due to spi instinct */
2014-10-28 19:25:02 +03:00
if ( rxdesc ) {
set_bit ( RX_BUSY , & dws - > dma_chan_busy ) ;
dmaengine_submit ( rxdesc ) ;
dma_async_issue_pending ( dws - > rxchan ) ;
}
if ( txdesc ) {
set_bit ( TX_BUSY , & dws - > dma_chan_busy ) ;
dmaengine_submit ( txdesc ) ;
dma_async_issue_pending ( dws - > txchan ) ;
}
2014-10-02 17:31:09 +04:00
2010-12-24 08:59:11 +03:00
return 0 ;
}
2015-03-09 17:48:48 +03:00
static void mid_spi_dma_stop ( struct dw_spi * dws )
{
if ( test_bit ( TX_BUSY , & dws - > dma_chan_busy ) ) {
2017-01-03 16:48:20 +03:00
dmaengine_terminate_sync ( dws - > txchan ) ;
2015-03-09 17:48:48 +03:00
clear_bit ( TX_BUSY , & dws - > dma_chan_busy ) ;
}
if ( test_bit ( RX_BUSY , & dws - > dma_chan_busy ) ) {
2017-01-03 16:48:20 +03:00
dmaengine_terminate_sync ( dws - > rxchan ) ;
2015-03-09 17:48:48 +03:00
clear_bit ( RX_BUSY , & dws - > dma_chan_busy ) ;
}
}
2015-11-28 17:09:38 +03:00
static const struct dw_spi_dma_ops mid_dma_ops = {
2010-12-24 08:59:11 +03:00
. dma_init = mid_spi_dma_init ,
. dma_exit = mid_spi_dma_exit ,
2015-03-09 17:48:46 +03:00
. dma_setup = mid_spi_dma_setup ,
2015-03-09 17:48:49 +03:00
. can_dma = mid_spi_can_dma ,
2010-12-24 08:59:11 +03:00
. dma_transfer = mid_spi_dma_transfer ,
2015-03-09 17:48:48 +03:00
. dma_stop = mid_spi_dma_stop ,
2010-12-24 08:59:11 +03:00
} ;
# endif
2014-09-12 16:11:59 +04:00
/* Some specific info for SPI0 controller on Intel MID */
2010-12-24 08:59:11 +03:00
2015-01-22 18:59:34 +03:00
/* HW info for MRST Clk Control Unit, 32b reg per controller */
2010-12-24 08:59:11 +03:00
# define MRST_SPI_CLK_BASE 100000000 /* 100m */
2015-01-22 18:59:34 +03:00
# define MRST_CLK_SPI_REG 0xff11d86c
2010-12-24 08:59:11 +03:00
# define CLK_SPI_BDIV_OFFSET 0
# define CLK_SPI_BDIV_MASK 0x00000007
# define CLK_SPI_CDIV_OFFSET 9
# define CLK_SPI_CDIV_MASK 0x00000e00
# define CLK_SPI_DISABLE_OFFSET 8
int dw_spi_mid_init ( struct dw_spi * dws )
{
2011-09-20 22:06:17 +04:00
void __iomem * clk_reg ;
u32 clk_cdiv ;
2010-12-24 08:59:11 +03:00
2015-01-22 18:59:34 +03:00
clk_reg = ioremap_nocache ( MRST_CLK_SPI_REG , 16 ) ;
2010-12-24 08:59:11 +03:00
if ( ! clk_reg )
return - ENOMEM ;
2015-01-22 18:59:34 +03:00
/* Get SPI controller operating freq info */
clk_cdiv = readl ( clk_reg + dws - > bus_num * sizeof ( u32 ) ) ;
clk_cdiv & = CLK_SPI_CDIV_MASK ;
clk_cdiv > > = CLK_SPI_CDIV_OFFSET ;
2010-12-24 08:59:11 +03:00
dws - > max_freq = MRST_SPI_CLK_BASE / ( clk_cdiv + 1 ) ;
2015-01-22 18:59:34 +03:00
2010-12-24 08:59:11 +03:00
iounmap ( clk_reg ) ;
# ifdef CONFIG_SPI_DW_MID_DMA
2015-03-09 17:48:50 +03:00
dws - > dma_tx = & mid_dma_tx ;
dws - > dma_rx = & mid_dma_rx ;
2010-12-24 08:59:11 +03:00
dws - > dma_ops = & mid_dma_ops ;
# endif
return 0 ;
}