2008-10-22 22:26:29 -07:00
# ifndef _ASM_X86_DMA_MAPPING_H
# define _ASM_X86_DMA_MAPPING_H
2008-03-25 18:36:20 -03:00
/*
2011-08-15 02:02:26 +02:00
* IOMMU interface . See Documentation / DMA - API - HOWTO . txt and
2009-01-29 16:28:02 -08:00
* Documentation / DMA - API . txt for documentation .
2008-03-25 18:36:20 -03:00
*/
2008-07-20 10:44:54 +02:00
# include <linux/kmemcheck.h>
2008-03-25 18:36:20 -03:00
# include <linux/scatterlist.h>
2009-01-09 15:13:15 +01:00
# include <linux/dma-debug.h>
2009-01-05 23:47:21 +09:00
# include <linux/dma-attrs.h>
2008-03-25 18:36:20 -03:00
# include <asm/io.h>
# include <asm/swiotlb.h>
2011-12-29 13:09:51 +01:00
# include <linux/dma-contiguous.h>
2008-03-25 18:36:20 -03:00
2009-11-08 12:12:14 +01:00
# ifdef CONFIG_ISA
# define ISA_DMA_BIT_MASK DMA_BIT_MASK(24)
# else
# define ISA_DMA_BIT_MASK DMA_BIT_MASK(32)
# endif
2009-11-15 21:19:53 +09:00
# define DMA_ERROR_CODE 0
2008-03-25 18:36:39 -03:00
extern int iommu_merge ;
2008-08-19 16:32:45 +02:00
extern struct device x86_dma_fallback_dev ;
2008-03-25 18:36:39 -03:00
extern int panic_on_overflow ;
2008-03-25 18:36:36 -03:00
2009-01-05 23:59:02 +09:00
extern struct dma_map_ops * dma_ops ;
static inline struct dma_map_ops * get_dma_ops ( struct device * dev )
2008-03-25 18:36:37 -03:00
{
2012-04-04 19:39:58 +02:00
# ifndef CONFIG_X86_DEV_DMA_OPS
2008-07-25 19:44:49 -07:00
return dma_ops ;
# else
if ( unlikely ( ! dev ) | | ! dev - > archdata . dma_ops )
return dma_ops ;
else
return dev - > archdata . dma_ops ;
2008-12-16 12:17:36 -08:00
# endif
2008-07-25 19:44:49 -07:00
}
2015-09-09 15:39:39 -07:00
bool arch_dma_alloc_attrs ( struct device * * dev , gfp_t * gfp ) ;
# define arch_dma_alloc_attrs arch_dma_alloc_attrs
2015-09-09 15:39:49 -07:00
# define HAVE_ARCH_DMA_SUPPORTED 1
extern int dma_supported ( struct device * hwdev , u64 mask ) ;
2008-09-24 20:48:35 +09:00
extern void * dma_generic_alloc_coherent ( struct device * dev , size_t size ,
2012-03-27 14:28:18 +02:00
dma_addr_t * dma_addr , gfp_t flag ,
struct dma_attrs * attrs ) ;
2008-09-24 20:48:35 +09:00
2011-12-29 13:09:51 +01:00
extern void dma_generic_free_coherent ( struct device * dev , size_t size ,
void * vaddr , dma_addr_t dma_addr ,
struct dma_attrs * attrs ) ;
2012-04-04 19:40:10 +02:00
# ifdef CONFIG_X86_DMA_REMAP /* Platform code defines bridge-specific code */
extern bool dma_capable ( struct device * dev , dma_addr_t addr , size_t size ) ;
extern dma_addr_t phys_to_dma ( struct device * dev , phys_addr_t paddr ) ;
extern phys_addr_t dma_to_phys ( struct device * dev , dma_addr_t daddr ) ;
# else
2009-07-10 10:04:54 +09:00
static inline bool dma_capable ( struct device * dev , dma_addr_t addr , size_t size )
{
if ( ! dev - > dma_mask )
return 0 ;
2009-12-15 16:47:43 -08:00
return addr + size - 1 < = * dev - > dma_mask ;
2009-07-10 10:04:54 +09:00
}
2009-07-10 10:05:01 +09:00
static inline dma_addr_t phys_to_dma ( struct device * dev , phys_addr_t paddr )
{
return paddr ;
}
static inline phys_addr_t dma_to_phys ( struct device * dev , dma_addr_t daddr )
{
return daddr ;
}
2012-04-04 19:40:10 +02:00
# endif /* CONFIG_X86_DMA_REMAP */
2009-07-10 10:05:01 +09:00
2008-03-25 18:36:33 -03:00
static inline void
dma_cache_sync ( struct device * dev , void * vaddr , size_t size ,
enum dma_data_direction dir )
{
flush_write_buffers ( ) ;
}
2008-03-25 18:36:38 -03:00
2008-09-08 18:10:13 +09:00
static inline unsigned long dma_alloc_coherent_mask ( struct device * dev ,
gfp_t gfp )
{
unsigned long dma_mask = 0 ;
2008-03-25 18:36:39 -03:00
2008-09-08 18:10:13 +09:00
dma_mask = dev - > coherent_dma_mask ;
if ( ! dma_mask )
2009-04-06 19:01:18 -07:00
dma_mask = ( gfp & GFP_DMA ) ? DMA_BIT_MASK ( 24 ) : DMA_BIT_MASK ( 32 ) ;
2008-09-08 18:10:13 +09:00
return dma_mask ;
}
static inline gfp_t dma_alloc_coherent_gfp_flags ( struct device * dev , gfp_t gfp )
{
unsigned long dma_mask = dma_alloc_coherent_mask ( dev , gfp ) ;
2009-04-06 19:01:18 -07:00
if ( dma_mask < = DMA_BIT_MASK ( 24 ) )
2008-10-23 20:46:55 +09:00
gfp | = GFP_DMA ;
# ifdef CONFIG_X86_64
2009-04-06 19:01:15 -07:00
if ( dma_mask < = DMA_BIT_MASK ( 32 ) & & ! ( gfp & GFP_DMA ) )
2008-09-08 18:10:13 +09:00
gfp | = GFP_DMA32 ;
# endif
return gfp ;
}
2008-03-25 18:36:20 -03:00
# endif