2005-04-16 15:20:36 -07:00
# ifndef _ASM_DMA_MAPPING_H
# define _ASM_DMA_MAPPING_H
# include <asm/scatterlist.h>
# include <asm/cache.h>
2009-06-04 00:16:04 +09:00
# include <asm-generic/dma-coherent.h>
2005-04-16 15:20:36 -07:00
2011-05-18 13:14:36 +01:00
# ifndef CONFIG_SGI_IP27 /* Kludge to fix 2.6.39 build for IP27 */
2010-10-01 13:27:32 -07:00
# include <dma-coherence.h>
2011-05-18 13:14:36 +01:00
# endif
2005-04-16 15:20:36 -07:00
2010-10-01 13:27:32 -07:00
extern struct dma_map_ops * mips_dma_map_ops ;
2005-04-16 15:20:36 -07:00
2010-10-01 13:27:32 -07:00
static inline struct dma_map_ops * get_dma_ops ( struct device * dev )
{
if ( dev & & dev - > archdata . dma_ops )
return dev - > archdata . dma_ops ;
else
return mips_dma_map_ops ;
}
2005-04-16 15:20:36 -07:00
2010-10-01 13:27:32 -07:00
static inline bool dma_capable ( struct device * dev , dma_addr_t addr , size_t size )
{
if ( ! dev - > dma_mask )
return 0 ;
2005-04-16 15:20:36 -07:00
2010-10-01 13:27:32 -07:00
return addr + size < = * dev - > dma_mask ;
}
static inline void dma_mark_clean ( void * addr , size_t size ) { }
# include <asm-generic/dma-mapping-common.h>
static inline int dma_supported ( struct device * dev , u64 mask )
2009-01-23 00:42:11 +09:00
{
2010-10-01 13:27:32 -07:00
struct dma_map_ops * ops = get_dma_ops ( dev ) ;
return ops - > dma_supported ( dev , mask ) ;
2009-01-23 00:42:11 +09:00
}
2010-10-01 13:27:32 -07:00
static inline int dma_mapping_error ( struct device * dev , u64 mask )
{
struct dma_map_ops * ops = get_dma_ops ( dev ) ;
return ops - > mapping_error ( dev , mask ) ;
}
2005-04-16 15:20:36 -07:00
static inline int
dma_set_mask ( struct device * dev , u64 mask )
{
if ( ! dev - > dma_mask | | ! dma_supported ( dev , mask ) )
return - EIO ;
* dev - > dma_mask = mask ;
return 0 ;
}
2006-12-06 20:38:56 -08:00
extern void dma_cache_sync ( struct device * dev , void * vaddr , size_t size ,
2005-04-16 15:20:36 -07:00
enum dma_data_direction direction ) ;
2010-10-01 13:27:32 -07:00
static inline void * dma_alloc_coherent ( struct device * dev , size_t size ,
dma_addr_t * dma_handle , gfp_t gfp )
{
void * ret ;
struct dma_map_ops * ops = get_dma_ops ( dev ) ;
ret = ops - > alloc_coherent ( dev , size , dma_handle , gfp ) ;
debug_dma_alloc_coherent ( dev , size , * dma_handle , ret ) ;
return ret ;
}
static inline void dma_free_coherent ( struct device * dev , size_t size ,
void * vaddr , dma_addr_t dma_handle )
{
struct dma_map_ops * ops = get_dma_ops ( dev ) ;
ops - > free_coherent ( dev , size , vaddr , dma_handle ) ;
debug_dma_free_coherent ( dev , size , vaddr , dma_handle ) ;
}
void * dma_alloc_noncoherent ( struct device * dev , size_t size ,
dma_addr_t * dma_handle , gfp_t flag ) ;
void dma_free_noncoherent ( struct device * dev , size_t size ,
void * vaddr , dma_addr_t dma_handle ) ;
2005-04-16 15:20:36 -07:00
# endif /* _ASM_DMA_MAPPING_H */