2005-04-16 15:20:36 -07:00
# ifndef _ASM_SPARC64_DMA_MAPPING_H
# define _ASM_SPARC64_DMA_MAPPING_H
2007-07-27 22:39:14 -07:00
# include <linux/scatterlist.h>
2006-05-23 02:07:22 -07:00
# include <linux/mm.h>
2007-07-27 22:39:14 -07:00
# define DMA_ERROR_CODE (~(dma_addr_t)0x0)
struct dma_ops {
void * ( * alloc_coherent ) ( struct device * dev , size_t size ,
dma_addr_t * dma_handle , gfp_t flag ) ;
void ( * free_coherent ) ( struct device * dev , size_t size ,
void * cpu_addr , dma_addr_t dma_handle ) ;
dma_addr_t ( * map_single ) ( struct device * dev , void * cpu_addr ,
size_t size ,
enum dma_data_direction direction ) ;
void ( * unmap_single ) ( struct device * dev , dma_addr_t dma_addr ,
size_t size ,
enum dma_data_direction direction ) ;
int ( * map_sg ) ( struct device * dev , struct scatterlist * sg , int nents ,
enum dma_data_direction direction ) ;
void ( * unmap_sg ) ( struct device * dev , struct scatterlist * sg ,
int nhwentries ,
enum dma_data_direction direction ) ;
void ( * sync_single_for_cpu ) ( struct device * dev ,
dma_addr_t dma_handle , size_t size ,
enum dma_data_direction direction ) ;
void ( * sync_single_for_device ) ( struct device * dev ,
dma_addr_t dma_handle , size_t size ,
enum dma_data_direction direction ) ;
void ( * sync_sg_for_cpu ) ( struct device * dev , struct scatterlist * sg ,
int nelems ,
enum dma_data_direction direction ) ;
void ( * sync_sg_for_device ) ( struct device * dev , struct scatterlist * sg ,
int nelems ,
enum dma_data_direction direction ) ;
} ;
extern const struct dma_ops * dma_ops ;
extern int dma_supported ( struct device * dev , u64 mask ) ;
extern int dma_set_mask ( struct device * dev , u64 dma_mask ) ;
2005-04-16 15:20:36 -07:00
static inline void * dma_alloc_coherent ( struct device * dev , size_t size ,
2007-07-27 22:39:14 -07:00
dma_addr_t * dma_handle , gfp_t flag )
2005-04-16 15:20:36 -07:00
{
2007-07-27 22:39:14 -07:00
return dma_ops - > alloc_coherent ( dev , size , dma_handle , flag ) ;
2005-04-16 15:20:36 -07:00
}
static inline void dma_free_coherent ( struct device * dev , size_t size ,
2007-07-27 22:39:14 -07:00
void * cpu_addr , dma_addr_t dma_handle )
2005-04-16 15:20:36 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > free_coherent ( dev , size , cpu_addr , dma_handle ) ;
2005-04-16 15:20:36 -07:00
}
2007-07-27 22:39:14 -07:00
static inline dma_addr_t dma_map_single ( struct device * dev , void * cpu_addr ,
size_t size ,
enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
return dma_ops - > map_single ( dev , cpu_addr , size , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline void dma_unmap_single ( struct device * dev , dma_addr_t dma_addr ,
size_t size ,
enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > unmap_single ( dev , dma_addr , size , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline dma_addr_t dma_map_page ( struct device * dev , struct page * page ,
unsigned long offset , size_t size ,
enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
return dma_ops - > map_single ( dev , page_address ( page ) + offset ,
size , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline void dma_unmap_page ( struct device * dev , dma_addr_t dma_address ,
size_t size ,
enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > unmap_single ( dev , dma_address , size , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline int dma_map_sg ( struct device * dev , struct scatterlist * sg ,
int nents , enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
return dma_ops - > map_sg ( dev , sg , nents , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline void dma_unmap_sg ( struct device * dev , struct scatterlist * sg ,
int nents , enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > unmap_sg ( dev , sg , nents , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline void dma_sync_single_for_cpu ( struct device * dev ,
dma_addr_t dma_handle , size_t size ,
enum dma_data_direction direction )
2006-07-05 20:18:39 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > sync_single_for_cpu ( dev , dma_handle , size , direction ) ;
2006-07-05 20:18:39 -07:00
}
2007-07-27 22:39:14 -07:00
static inline void dma_sync_single_for_device ( struct device * dev ,
dma_addr_t dma_handle ,
size_t size ,
enum dma_data_direction direction )
2006-07-05 20:18:39 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > sync_single_for_device ( dev , dma_handle , size , direction ) ;
2006-07-05 20:18:39 -07:00
}
2007-07-31 20:43:17 -07:00
static inline void dma_sync_single_range_for_cpu ( struct device * dev ,
dma_addr_t dma_handle ,
unsigned long offset ,
size_t size ,
enum dma_data_direction direction )
{
dma_sync_single_for_cpu ( dev , dma_handle + offset , size , direction ) ;
}
static inline void dma_sync_single_range_for_device ( struct device * dev ,
dma_addr_t dma_handle ,
unsigned long offset ,
size_t size ,
enum dma_data_direction direction )
{
dma_sync_single_for_device ( dev , dma_handle + offset , size , direction ) ;
}
2007-07-27 22:39:14 -07:00
static inline void dma_sync_sg_for_cpu ( struct device * dev ,
struct scatterlist * sg , int nelems ,
enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > sync_sg_for_cpu ( dev , sg , nelems , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline void dma_sync_sg_for_device ( struct device * dev ,
struct scatterlist * sg , int nelems ,
enum dma_data_direction direction )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
dma_ops - > sync_sg_for_device ( dev , sg , nelems , direction ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-27 22:39:14 -07:00
static inline int dma_mapping_error ( dma_addr_t dma_addr )
2007-06-04 23:32:23 -07:00
{
2007-07-27 22:39:14 -07:00
return ( dma_addr = = DMA_ERROR_CODE ) ;
2007-06-04 23:32:23 -07:00
}
2007-07-31 01:28:33 -07:00
static inline int dma_get_cache_alignment ( void )
{
/* no easy way to get cache size on all processors, so return
* the maximum possible , to be safe */
return ( 1 < < INTERNODE_CACHE_SHIFT ) ;
}
2006-06-25 02:07:52 -07:00
# define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
# define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
2006-12-06 20:38:54 -08:00
# define dma_is_consistent(d, h) (1)
2006-06-25 02:07:52 -07:00
2005-04-16 15:20:36 -07:00
# endif /* _ASM_SPARC64_DMA_MAPPING_H */