2008-07-17 21:55:51 -07:00
# ifndef ___ASM_SPARC_DMA_MAPPING_H
# define ___ASM_SPARC_DMA_MAPPING_H
2009-05-14 16:23:11 +00:00
# include <linux/scatterlist.h>
# include <linux/mm.h>
2009-08-10 11:53:13 +09:00
# include <linux/dma-debug.h>
2009-05-14 16:23:08 +00:00
# define DMA_ERROR_CODE (~(dma_addr_t)0x0)
extern int dma_supported ( struct device * dev , u64 mask ) ;
2009-05-14 16:23:11 +00:00
# define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
# define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
# define dma_is_consistent(d, h) (1)
2009-08-10 11:53:16 +09:00
extern struct dma_map_ops * dma_ops , pci32_dma_ops ;
extern struct bus_type pci_bus_type ;
2009-05-14 16:23:11 +00:00
2009-08-10 11:53:13 +09:00
static inline struct dma_map_ops * get_dma_ops ( struct device * dev )
2009-05-14 16:23:11 +00:00
{
2009-08-10 11:53:16 +09:00
# if defined(CONFIG_SPARC32) && defined(CONFIG_PCI)
if ( dev - > bus = = & pci_bus_type )
return & pci32_dma_ops ;
# endif
2009-08-10 11:53:13 +09:00
return dma_ops ;
2009-05-14 16:23:11 +00:00
}
2009-08-10 11:53:13 +09:00
# include <asm-generic/dma-mapping-common.h>
2009-05-14 16:23:11 +00:00
2009-08-10 11:53:13 +09:00
static inline void * dma_alloc_coherent ( struct device * dev , size_t size ,
dma_addr_t * dma_handle , gfp_t flag )
2009-05-14 16:23:11 +00:00
{
2009-08-10 11:53:13 +09:00
struct dma_map_ops * ops = get_dma_ops ( dev ) ;
2009-08-10 11:53:17 +09:00
void * cpu_addr ;
2009-05-14 16:23:11 +00:00
2009-08-10 11:53:17 +09:00
cpu_addr = ops - > alloc_coherent ( dev , size , dma_handle , flag ) ;
debug_dma_alloc_coherent ( dev , size , * dma_handle , cpu_addr ) ;
return cpu_addr ;
2009-05-14 16:23:11 +00:00
}
2009-05-14 16:23:08 +00:00
2009-08-10 11:53:13 +09:00
static inline void dma_free_coherent ( struct device * dev , size_t size ,
void * cpu_addr , dma_addr_t dma_handle )
2009-05-14 16:23:08 +00:00
{
2009-08-10 11:53:13 +09:00
struct dma_map_ops * ops = get_dma_ops ( dev ) ;
2009-05-14 16:23:08 +00:00
2009-08-10 11:53:17 +09:00
debug_dma_free_coherent ( dev , size , cpu_addr , dma_handle ) ;
2009-08-10 11:53:13 +09:00
ops - > free_coherent ( dev , size , cpu_addr , dma_handle ) ;
2009-05-14 16:23:08 +00:00
}
2009-05-14 16:23:11 +00:00
static inline int dma_mapping_error ( struct device * dev , dma_addr_t dma_addr )
{
return ( dma_addr = = DMA_ERROR_CODE ) ;
}
static inline int dma_get_cache_alignment ( void )
{
/*
* no easy way to get cache size on all processors , so return
* the maximum possible , to be safe
*/
return ( 1 < < INTERNODE_CACHE_SHIFT ) ;
}
2010-03-10 15:23:35 -08:00
static inline int dma_set_mask ( struct device * dev , u64 mask )
{
# ifdef CONFIG_PCI
if ( dev - > bus = = & pci_bus_type ) {
if ( ! dev - > dma_mask | | ! dma_supported ( dev , mask ) )
return - EINVAL ;
* dev - > dma_mask = mask ;
return 0 ;
}
# endif
return - EINVAL ;
}
2008-07-17 21:55:51 -07:00
# endif