2005-06-23 22:01:26 -07:00
/*
* This file is subject to the terms and conditions of the GNU General Public
* License . See the file " COPYING " in the main directory of this archive
* for more details .
*
2013-10-17 02:42:26 +04:00
* ( C ) 2001 - 2013 Tensilica Inc .
2005-06-23 22:01:26 -07:00
*/
# ifndef _XTENSA_CACHEFLUSH_H
# define _XTENSA_CACHEFLUSH_H
# include <linux/mm.h>
# include <asm/processor.h>
# include <asm/page.h>
/*
2007-08-22 10:14:51 -07:00
* Lo - level routines for cache flushing .
2005-06-23 22:01:26 -07:00
*
* invalidate data or instruction cache :
*
* __invalidate_icache_all ( )
* __invalidate_icache_page ( adr )
* __invalidate_dcache_page ( adr )
* __invalidate_icache_range ( from , size )
* __invalidate_dcache_range ( from , size )
*
* flush data cache :
*
* __flush_dcache_page ( adr )
*
* flush and invalidate data cache :
*
* __flush_invalidate_dcache_all ( )
* __flush_invalidate_dcache_page ( adr )
* __flush_invalidate_dcache_range ( from , size )
2007-08-22 10:14:51 -07:00
*
* specials for cache aliasing :
*
* __flush_invalidate_dcache_page_alias ( vaddr , paddr )
2014-07-21 18:54:11 +04:00
* __invalidate_dcache_page_alias ( vaddr , paddr )
2007-08-22 10:14:51 -07:00
* __invalidate_icache_page_alias ( vaddr , paddr )
2005-06-23 22:01:26 -07:00
*/
2007-08-22 10:14:51 -07:00
extern void __invalidate_dcache_all ( void ) ;
2005-06-23 22:01:26 -07:00
extern void __invalidate_icache_all ( void ) ;
extern void __invalidate_dcache_page ( unsigned long ) ;
extern void __invalidate_icache_page ( unsigned long ) ;
extern void __invalidate_icache_range ( unsigned long , unsigned long ) ;
extern void __invalidate_dcache_range ( unsigned long , unsigned long ) ;
# if XCHAL_DCACHE_IS_WRITEBACK
2007-08-22 10:14:51 -07:00
extern void __flush_invalidate_dcache_all ( void ) ;
2005-06-23 22:01:26 -07:00
extern void __flush_dcache_page ( unsigned long ) ;
2007-08-22 10:14:51 -07:00
extern void __flush_dcache_range ( unsigned long , unsigned long ) ;
2005-06-23 22:01:26 -07:00
extern void __flush_invalidate_dcache_page ( unsigned long ) ;
extern void __flush_invalidate_dcache_range ( unsigned long , unsigned long ) ;
# else
2015-10-09 02:44:23 +03:00
static inline void __flush_dcache_page ( unsigned long va )
{
}
static inline void __flush_dcache_range ( unsigned long va , unsigned long sz )
{
}
# define __flush_invalidate_dcache_all() __invalidate_dcache_all()
# define __flush_invalidate_dcache_page(p) __invalidate_dcache_page(p)
2007-08-22 10:14:51 -07:00
# define __flush_invalidate_dcache_range(p,s) __invalidate_dcache_range(p,s)
# endif
2009-03-04 16:21:31 +01:00
# if defined(CONFIG_MMU) && (DCACHE_WAY_SIZE > PAGE_SIZE)
2007-08-22 10:14:51 -07:00
extern void __flush_invalidate_dcache_page_alias ( unsigned long , unsigned long ) ;
2014-07-21 18:54:11 +04:00
extern void __invalidate_dcache_page_alias ( unsigned long , unsigned long ) ;
2009-03-04 16:21:31 +01:00
# else
static inline void __flush_invalidate_dcache_page_alias ( unsigned long virt ,
unsigned long phys ) { }
2014-09-22 06:32:07 +04:00
static inline void __invalidate_dcache_page_alias ( unsigned long virt ,
unsigned long phys ) { }
2007-08-22 10:14:51 -07:00
# endif
2009-03-04 16:21:31 +01:00
# if defined(CONFIG_MMU) && (ICACHE_WAY_SIZE > PAGE_SIZE)
2007-08-22 10:14:51 -07:00
extern void __invalidate_icache_page_alias ( unsigned long , unsigned long ) ;
2008-01-18 16:15:29 -08:00
# else
2009-03-04 16:21:31 +01:00
static inline void __invalidate_icache_page_alias ( unsigned long virt ,
unsigned long phys ) { }
2005-06-23 22:01:26 -07:00
# endif
/*
* We have physically tagged caches - nothing to do here -
* unless we have cache aliasing .
*
* Pages can get remapped . Because this might change the ' color ' of that page ,
* we have to flush the cache before the PTE is changed .
2018-05-08 15:14:57 -03:00
* ( see also Documentation / core - api / cachetlb . rst )
2005-06-23 22:01:26 -07:00
*/
2014-09-22 09:54:42 +04:00
# if defined(CONFIG_MMU) && \
( ( DCACHE_WAY_SIZE > PAGE_SIZE ) | | defined ( CONFIG_SMP ) )
2013-10-17 02:42:26 +04:00
# ifdef CONFIG_SMP
void flush_cache_all ( void ) ;
void flush_cache_range ( struct vm_area_struct * , ulong , ulong ) ;
void flush_icache_range ( unsigned long start , unsigned long end ) ;
void flush_cache_page ( struct vm_area_struct * ,
unsigned long , unsigned long ) ;
# else
# define flush_cache_all local_flush_cache_all
# define flush_cache_range local_flush_cache_range
# define flush_icache_range local_flush_icache_range
# define flush_cache_page local_flush_cache_page
# endif
2005-06-23 22:01:26 -07:00
2013-10-17 02:42:26 +04:00
# define local_flush_cache_all() \
2007-08-22 10:14:51 -07:00
do { \
__flush_invalidate_dcache_all ( ) ; \
__invalidate_icache_all ( ) ; \
} while ( 0 )
2005-06-23 22:01:26 -07:00
2007-08-22 10:14:51 -07:00
# define flush_cache_mm(mm) flush_cache_all()
# define flush_cache_dup_mm(mm) flush_cache_mm(mm)
2005-06-23 22:01:26 -07:00
2007-08-22 10:14:51 -07:00
# define flush_cache_vmap(start,end) flush_cache_all()
# define flush_cache_vunmap(start,end) flush_cache_all()
2005-06-23 22:01:26 -07:00
2009-11-26 09:16:19 +01:00
# define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 1
2007-08-22 10:14:51 -07:00
extern void flush_dcache_page ( struct page * ) ;
2013-10-17 02:42:26 +04:00
void local_flush_cache_range ( struct vm_area_struct * vma ,
unsigned long start , unsigned long end ) ;
void local_flush_cache_page ( struct vm_area_struct * vma ,
unsigned long address , unsigned long pfn ) ;
2005-06-23 22:01:26 -07:00
# else
# define flush_cache_all() do { } while (0)
# define flush_cache_mm(mm) do { } while (0)
2006-12-12 17:14:57 +00:00
# define flush_cache_dup_mm(mm) do { } while (0)
2005-06-23 22:01:26 -07:00
# define flush_cache_vmap(start,end) do { } while (0)
# define flush_cache_vunmap(start,end) do { } while (0)
2010-05-01 22:55:21 -07:00
# define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 0
2005-06-23 22:01:26 -07:00
# define flush_dcache_page(page) do { } while (0)
2013-10-17 02:42:26 +04:00
# define flush_icache_range local_flush_icache_range
# define flush_cache_page(vma, addr, pfn) do { } while (0)
# define flush_cache_range(vma, start, end) do { } while (0)
2005-06-23 22:01:26 -07:00
# endif
2007-08-22 10:14:51 -07:00
/* Ensure consistency between data and instruction cache. */
2013-10-17 02:42:26 +04:00
# define local_flush_icache_range(start, end) \
2007-08-22 10:14:51 -07:00
do { \
__flush_dcache_range ( start , ( end ) - ( start ) ) ; \
__invalidate_icache_range ( start , ( end ) - ( start ) ) ; \
} while ( 0 )
2005-06-23 22:01:26 -07:00
2018-05-08 15:14:57 -03:00
/* This is not required, see Documentation/core-api/cachetlb.rst */
2007-08-22 10:14:51 -07:00
# define flush_icache_page(vma,page) do { } while (0)
2005-06-23 22:01:26 -07:00
# define flush_dcache_mmap_lock(mapping) do { } while (0)
# define flush_dcache_mmap_unlock(mapping) do { } while (0)
2014-09-22 09:54:42 +04:00
# if defined(CONFIG_MMU) && (DCACHE_WAY_SIZE > PAGE_SIZE)
2005-06-23 22:01:26 -07:00
2007-08-22 10:14:51 -07:00
extern void copy_to_user_page ( struct vm_area_struct * , struct page * ,
unsigned long , void * , const void * , unsigned long ) ;
extern void copy_from_user_page ( struct vm_area_struct * , struct page * ,
unsigned long , void * , const void * , unsigned long ) ;
# else
# define copy_to_user_page(vma, page, vaddr, dst, src, len) \
do { \
memcpy ( dst , src , len ) ; \
__flush_dcache_range ( ( unsigned long ) dst , len ) ; \
__invalidate_icache_range ( ( unsigned long ) dst , len ) ; \
} while ( 0 )
2005-06-23 22:01:26 -07:00
# define copy_from_user_page(vma, page, vaddr, dst, src, len) \
memcpy ( dst , src , len )
2007-08-22 10:14:51 -07:00
# endif
2005-06-23 22:01:26 -07:00
# endif /* _XTENSA_CACHEFLUSH_H */