2018-09-05 14:25:10 +08:00
// SPDX-License-Identifier: GPL-2.0
// Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
# include <linux/spinlock.h>
# include <linux/smp.h>
2020-01-22 11:15:14 +08:00
# include <linux/mm.h>
2018-09-05 14:25:10 +08:00
# include <asm/cache.h>
# include <asm/barrier.h>
2020-03-31 22:15:42 +08:00
/* for L1-cache */
2020-01-22 11:15:14 +08:00
# define INS_CACHE (1 << 0)
2020-03-31 22:15:42 +08:00
# define DATA_CACHE (1 << 1)
2020-01-22 11:15:14 +08:00
# define CACHE_INV (1 << 4)
2020-03-31 22:15:42 +08:00
# define CACHE_CLR (1 << 5)
# define CACHE_OMS (1 << 6)
2020-01-22 11:15:14 +08:00
void local_icache_inv_all ( void * priv )
2018-09-05 14:25:10 +08:00
{
2020-01-22 11:15:14 +08:00
mtcr ( " cr17 " , INS_CACHE | CACHE_INV ) ;
2018-09-05 14:25:10 +08:00
sync_is ( ) ;
}
2020-01-22 11:15:14 +08:00
# ifdef CONFIG_CPU_HAS_ICACHE_INS
2018-09-05 14:25:10 +08:00
void icache_inv_range ( unsigned long start , unsigned long end )
{
unsigned long i = start & ~ ( L1_CACHE_BYTES - 1 ) ;
for ( ; i < end ; i + = L1_CACHE_BYTES )
asm volatile ( " icache.iva %0 \n " : : " r " ( i ) : " memory " ) ;
sync_is ( ) ;
}
2020-01-22 11:15:14 +08:00
# else
2020-03-31 22:15:42 +08:00
struct cache_range {
unsigned long start ;
unsigned long end ;
} ;
static DEFINE_SPINLOCK ( cache_lock ) ;
static inline void cache_op_line ( unsigned long i , unsigned int val )
{
mtcr ( " cr22 " , i ) ;
mtcr ( " cr17 " , val ) ;
}
void local_icache_inv_range ( void * priv )
{
struct cache_range * param = priv ;
unsigned long i = param - > start & ~ ( L1_CACHE_BYTES - 1 ) ;
unsigned long flags ;
spin_lock_irqsave ( & cache_lock , flags ) ;
for ( ; i < param - > end ; i + = L1_CACHE_BYTES )
cache_op_line ( i , INS_CACHE | CACHE_INV | CACHE_OMS ) ;
spin_unlock_irqrestore ( & cache_lock , flags ) ;
sync_is ( ) ;
}
2020-01-22 11:15:14 +08:00
void icache_inv_range ( unsigned long start , unsigned long end )
{
2020-03-31 22:15:42 +08:00
struct cache_range param = { start , end } ;
if ( irqs_disabled ( ) )
local_icache_inv_range ( & param ) ;
else
on_each_cpu ( local_icache_inv_range , & param , 1 ) ;
2020-01-22 11:15:14 +08:00
}
# endif
2018-09-05 14:25:10 +08:00
2020-01-22 11:15:14 +08:00
inline void dcache_wb_line ( unsigned long start )
2018-09-05 14:25:10 +08:00
{
2020-01-22 11:15:14 +08:00
asm volatile ( " dcache.cval1 %0 \n " : : " r " ( start ) : " memory " ) ;
2018-09-05 14:25:10 +08:00
sync_is ( ) ;
}
void dcache_wb_range ( unsigned long start , unsigned long end )
{
unsigned long i = start & ~ ( L1_CACHE_BYTES - 1 ) ;
for ( ; i < end ; i + = L1_CACHE_BYTES )
asm volatile ( " dcache.cval1 %0 \n " : : " r " ( i ) : " memory " ) ;
sync_is ( ) ;
}
void cache_wbinv_range ( unsigned long start , unsigned long end )
{
2020-02-02 10:58:38 +08:00
dcache_wb_range ( start , end ) ;
2020-01-22 11:15:14 +08:00
icache_inv_range ( start , end ) ;
2018-09-05 14:25:10 +08:00
}
EXPORT_SYMBOL ( cache_wbinv_range ) ;
void dma_wbinv_range ( unsigned long start , unsigned long end )
{
unsigned long i = start & ~ ( L1_CACHE_BYTES - 1 ) ;
for ( ; i < end ; i + = L1_CACHE_BYTES )
asm volatile ( " dcache.civa %0 \n " : : " r " ( i ) : " memory " ) ;
sync_is ( ) ;
}
2019-07-30 17:16:28 +08:00
void dma_inv_range ( unsigned long start , unsigned long end )
{
unsigned long i = start & ~ ( L1_CACHE_BYTES - 1 ) ;
for ( ; i < end ; i + = L1_CACHE_BYTES )
asm volatile ( " dcache.iva %0 \n " : : " r " ( i ) : " memory " ) ;
sync_is ( ) ;
}
2018-09-05 14:25:10 +08:00
void dma_wb_range ( unsigned long start , unsigned long end )
{
unsigned long i = start & ~ ( L1_CACHE_BYTES - 1 ) ;
for ( ; i < end ; i + = L1_CACHE_BYTES )
2019-07-30 17:16:28 +08:00
asm volatile ( " dcache.cva %0 \n " : : " r " ( i ) : " memory " ) ;
2018-09-05 14:25:10 +08:00
sync_is ( ) ;
}