2005-04-16 15:20:36 -07:00
# ifndef __ASM_SH_CACHEFLUSH_H
# define __ASM_SH_CACHEFLUSH_H
2007-07-31 17:07:28 +09:00
2005-04-16 15:20:36 -07:00
# ifdef __KERNEL__
2009-08-15 12:29:49 +09:00
# include <linux/mm.h>
2008-07-29 08:09:44 +09:00
# include <cpu/cacheflush.h>
2005-04-16 15:20:36 -07:00
2009-08-04 16:02:43 +09:00
# define ARCH_HAS_FLUSH_ANON_PAGE
extern void __flush_anon_page ( struct page * page , unsigned long ) ;
static inline void flush_anon_page ( struct vm_area_struct * vma ,
struct page * page , unsigned long vmaddr )
{
if ( boot_cpu_data . dcache . n_aliases & & PageAnon ( page ) )
__flush_anon_page ( page , vmaddr ) ;
}
2008-01-07 13:50:18 +09:00
# define ARCH_HAS_FLUSH_KERNEL_DCACHE_PAGE
static inline void flush_kernel_dcache_page ( struct page * page )
{
flush_dcache_page ( page ) ;
}
2007-11-05 16:18:16 +09:00
extern void copy_to_user_page ( struct vm_area_struct * vma ,
struct page * page , unsigned long vaddr , void * dst , const void * src ,
unsigned long len ) ;
2005-04-16 15:20:36 -07:00
2007-11-05 16:18:16 +09:00
extern void copy_from_user_page ( struct vm_area_struct * vma ,
struct page * page , unsigned long vaddr , void * dst , const void * src ,
unsigned long len ) ;
# define flush_cache_vmap(start, end) flush_cache_all()
# define flush_cache_vunmap(start, end) flush_cache_all()
2005-04-16 15:20:36 -07:00
2009-08-15 11:25:32 +09:00
# define flush_dcache_mmap_lock(mapping) do { } while (0)
# define flush_dcache_mmap_unlock(mapping) do { } while (0)
2009-08-15 09:19:19 +09:00
void kmap_coherent_init ( void ) ;
void * kmap_coherent ( struct page * page , unsigned long addr ) ;
void kunmap_coherent ( void ) ;
2009-08-15 09:49:32 +09:00
# define PG_dcache_dirty PG_arch_1
2009-08-15 11:05:42 +09:00
void cpu_cache_init ( void ) ;
2005-04-16 15:20:36 -07:00
# endif /* __KERNEL__ */
# endif /* __ASM_SH_CACHEFLUSH_H */