2005-04-16 15:20:36 -07:00
# ifndef __ASM_SH_DMA_MAPPING_H
# define __ASM_SH_DMA_MAPPING_H
# include <linux/mm.h>
2007-10-23 12:52:48 +02:00
# include <linux/scatterlist.h>
2006-01-16 22:14:09 -08:00
# include <asm/cacheflush.h>
2005-04-16 15:20:36 -07:00
# include <asm/io.h>
2008-07-18 13:30:31 +04:00
# include <asm-generic/dma-coherent.h>
2005-04-16 15:20:36 -07:00
extern struct bus_type pci_bus_type ;
# define dma_supported(dev, mask) (1)
static inline int dma_set_mask ( struct device * dev , u64 mask )
{
if ( ! dev - > dma_mask | | ! dma_supported ( dev , mask ) )
return - EIO ;
* dev - > dma_mask = mask ;
return 0 ;
}
2008-01-24 18:35:10 +09:00
void * dma_alloc_coherent ( struct device * dev , size_t size ,
dma_addr_t * dma_handle , gfp_t flag ) ;
2005-04-16 15:20:36 -07:00
2008-01-24 18:35:10 +09:00
void dma_free_coherent ( struct device * dev , size_t size ,
void * vaddr , dma_addr_t dma_handle ) ;
2005-04-16 15:20:36 -07:00
2008-01-24 18:35:10 +09:00
void dma_cache_sync ( struct device * dev , void * vaddr , size_t size ,
enum dma_data_direction dir ) ;
2005-04-16 15:20:36 -07:00
2007-02-13 11:11:22 +09:00
# define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
# define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
# define dma_is_consistent(d, h) (1)
2005-04-16 15:20:36 -07:00
static inline dma_addr_t dma_map_single ( struct device * dev ,
void * ptr , size_t size ,
enum dma_data_direction dir )
{
# if defined(CONFIG_PCI) && !defined(CONFIG_SH_PCIDMA_NONCOHERENT)
if ( dev - > bus = = & pci_bus_type )
2007-07-25 11:18:00 +09:00
return virt_to_phys ( ptr ) ;
2005-04-16 15:20:36 -07:00
# endif
2006-12-09 09:17:01 +09:00
dma_cache_sync ( dev , ptr , size , dir ) ;
2005-04-16 15:20:36 -07:00
2007-07-25 11:18:00 +09:00
return virt_to_phys ( ptr ) ;
2005-04-16 15:20:36 -07:00
}
# define dma_unmap_single(dev, addr, size, dir) do { } while (0)
static inline int dma_map_sg ( struct device * dev , struct scatterlist * sg ,
int nents , enum dma_data_direction dir )
{
int i ;
for ( i = 0 ; i < nents ; i + + ) {
# if !defined(CONFIG_PCI) || defined(CONFIG_SH_PCIDMA_NONCOHERENT)
2007-10-23 12:52:48 +02:00
dma_cache_sync ( dev , sg_virt ( & sg [ i ] ) , sg [ i ] . length , dir ) ;
2005-04-16 15:20:36 -07:00
# endif
2007-10-23 12:52:48 +02:00
sg [ i ] . dma_address = sg_phys ( & sg [ i ] ) ;
2005-04-16 15:20:36 -07:00
}
return nents ;
}
# define dma_unmap_sg(dev, sg, nents, dir) do { } while (0)
static inline dma_addr_t dma_map_page ( struct device * dev , struct page * page ,
unsigned long offset , size_t size ,
enum dma_data_direction dir )
{
return dma_map_single ( dev , page_address ( page ) + offset , size , dir ) ;
}
static inline void dma_unmap_page ( struct device * dev , dma_addr_t dma_address ,
size_t size , enum dma_data_direction dir )
{
dma_unmap_single ( dev , dma_address , size , dir ) ;
}
static inline void dma_sync_single ( struct device * dev , dma_addr_t dma_handle ,
size_t size , enum dma_data_direction dir )
{
# if defined(CONFIG_PCI) && !defined(CONFIG_SH_PCIDMA_NONCOHERENT)
if ( dev - > bus = = & pci_bus_type )
return ;
# endif
2007-07-25 11:18:00 +09:00
dma_cache_sync ( dev , phys_to_virt ( dma_handle ) , size , dir ) ;
2005-04-16 15:20:36 -07:00
}
static inline void dma_sync_single_range ( struct device * dev ,
dma_addr_t dma_handle ,
unsigned long offset , size_t size ,
enum dma_data_direction dir )
{
# if defined(CONFIG_PCI) && !defined(CONFIG_SH_PCIDMA_NONCOHERENT)
if ( dev - > bus = = & pci_bus_type )
return ;
# endif
2007-07-25 11:18:00 +09:00
dma_cache_sync ( dev , phys_to_virt ( dma_handle ) + offset , size , dir ) ;
2005-04-16 15:20:36 -07:00
}
static inline void dma_sync_sg ( struct device * dev , struct scatterlist * sg ,
int nelems , enum dma_data_direction dir )
{
int i ;
for ( i = 0 ; i < nelems ; i + + ) {
# if !defined(CONFIG_PCI) || defined(CONFIG_SH_PCIDMA_NONCOHERENT)
2007-10-23 12:52:48 +02:00
dma_cache_sync ( dev , sg_virt ( & sg [ i ] ) , sg [ i ] . length , dir ) ;
2005-04-16 15:20:36 -07:00
# endif
2007-10-23 12:52:48 +02:00
sg [ i ] . dma_address = sg_phys ( & sg [ i ] ) ;
2005-04-16 15:20:36 -07:00
}
}
2006-09-27 18:34:41 +09:00
static inline void dma_sync_single_for_cpu ( struct device * dev ,
dma_addr_t dma_handle , size_t size ,
enum dma_data_direction dir )
{
dma_sync_single ( dev , dma_handle , size , dir ) ;
}
static inline void dma_sync_single_for_device ( struct device * dev ,
dma_addr_t dma_handle ,
size_t size ,
enum dma_data_direction dir )
{
dma_sync_single ( dev , dma_handle , size , dir ) ;
}
2005-04-16 15:20:36 -07:00
2007-08-10 02:37:01 +09:00
static inline void dma_sync_single_range_for_cpu ( struct device * dev ,
dma_addr_t dma_handle ,
unsigned long offset ,
size_t size ,
enum dma_data_direction direction )
{
dma_sync_single_for_cpu ( dev , dma_handle + offset , size , direction ) ;
}
static inline void dma_sync_single_range_for_device ( struct device * dev ,
dma_addr_t dma_handle ,
unsigned long offset ,
size_t size ,
enum dma_data_direction direction )
{
dma_sync_single_for_device ( dev , dma_handle + offset , size , direction ) ;
}
2006-09-27 18:34:41 +09:00
static inline void dma_sync_sg_for_cpu ( struct device * dev ,
struct scatterlist * sg , int nelems ,
2006-01-16 22:14:09 -08:00
enum dma_data_direction dir )
2006-09-27 18:34:41 +09:00
{
dma_sync_sg ( dev , sg , nelems , dir ) ;
}
2005-04-16 15:20:36 -07:00
2006-09-27 18:34:41 +09:00
static inline void dma_sync_sg_for_device ( struct device * dev ,
struct scatterlist * sg , int nelems ,
enum dma_data_direction dir )
{
dma_sync_sg ( dev , sg , nelems , dir ) ;
}
2005-04-16 15:20:36 -07:00
static inline int dma_get_cache_alignment ( void )
{
/*
* Each processor family will define its own L1_CACHE_SHIFT ,
* L1_CACHE_BYTES wraps to this , so this is always safe .
*/
return L1_CACHE_BYTES ;
}
2008-07-25 19:44:49 -07:00
static inline int dma_mapping_error ( struct device * dev , dma_addr_t dma_addr )
2005-04-16 15:20:36 -07:00
{
return dma_addr = = 0 ;
}
2008-01-24 18:35:10 +09:00
# define ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
extern int
dma_declare_coherent_memory ( struct device * dev , dma_addr_t bus_addr ,
dma_addr_t device_addr , size_t size , int flags ) ;
extern void
dma_release_declared_memory ( struct device * dev ) ;
extern void *
dma_mark_declared_memory_occupied ( struct device * dev ,
dma_addr_t device_addr , size_t size ) ;
2005-04-16 15:20:36 -07:00
# endif /* __ASM_SH_DMA_MAPPING_H */