2019-06-03 07:44:50 +02:00
/* SPDX-License-Identifier: GPL-2.0-only */
2015-10-01 13:47:17 +01:00
/*
* arch / arm / include / asm / arch_gicv3 . h
*
* Copyright ( C ) 2015 ARM Ltd .
*/
# ifndef __ASM_ARCH_GICV3_H
# define __ASM_ARCH_GICV3_H
# ifndef __ASSEMBLY__
# include <linux/io.h>
2015-12-07 10:11:12 +00:00
# include <asm/barrier.h>
2016-11-02 11:54:07 +00:00
# include <asm/cacheflush.h>
2016-09-12 15:49:22 +01:00
# include <asm/cp15.h>
2015-10-01 13:47:17 +01:00
# define ICC_EOIR1 __ACCESS_CP15(c12, 0, c12, 1)
# define ICC_DIR __ACCESS_CP15(c12, 0, c11, 1)
# define ICC_IAR1 __ACCESS_CP15(c12, 0, c12, 0)
# define ICC_SGI1R __ACCESS_CP15_64(0, c12)
# define ICC_PMR __ACCESS_CP15(c4, 0, c6, 0)
# define ICC_CTLR __ACCESS_CP15(c12, 0, c12, 4)
# define ICC_SRE __ACCESS_CP15(c12, 0, c12, 5)
# define ICC_IGRPEN1 __ACCESS_CP15(c12, 0, c12, 7)
2016-08-19 17:13:09 +01:00
# define ICC_BPR1 __ACCESS_CP15(c12, 0, c12, 3)
2019-01-31 14:58:43 +00:00
# define ICC_RPR __ACCESS_CP15(c12, 0, c11, 3)
2015-10-01 13:47:17 +01:00
2018-03-09 14:53:19 +00:00
# define __ICC_AP0Rx(x) __ACCESS_CP15(c12, 0, c8, 4 | x)
# define ICC_AP0R0 __ICC_AP0Rx(0)
# define ICC_AP0R1 __ICC_AP0Rx(1)
# define ICC_AP0R2 __ICC_AP0Rx(2)
# define ICC_AP0R3 __ICC_AP0Rx(3)
# define __ICC_AP1Rx(x) __ACCESS_CP15(c12, 0, c9, x)
# define ICC_AP1R0 __ICC_AP1Rx(0)
# define ICC_AP1R1 __ICC_AP1Rx(1)
# define ICC_AP1R2 __ICC_AP1Rx(2)
# define ICC_AP1R3 __ICC_AP1Rx(3)
2015-10-01 13:47:17 +01:00
# define ICC_HSRE __ACCESS_CP15(c12, 4, c9, 5)
# define ICH_VSEIR __ACCESS_CP15(c12, 4, c9, 4)
# define ICH_HCR __ACCESS_CP15(c12, 4, c11, 0)
# define ICH_VTR __ACCESS_CP15(c12, 4, c11, 1)
# define ICH_MISR __ACCESS_CP15(c12, 4, c11, 2)
# define ICH_EISR __ACCESS_CP15(c12, 4, c11, 3)
2019-01-04 11:33:42 +01:00
# define ICH_ELRSR __ACCESS_CP15(c12, 4, c11, 5)
2015-10-01 13:47:17 +01:00
# define ICH_VMCR __ACCESS_CP15(c12, 4, c11, 7)
# define __LR0(x) __ACCESS_CP15(c12, 4, c12, x)
# define __LR8(x) __ACCESS_CP15(c12, 4, c13, x)
# define ICH_LR0 __LR0(0)
# define ICH_LR1 __LR0(1)
# define ICH_LR2 __LR0(2)
# define ICH_LR3 __LR0(3)
# define ICH_LR4 __LR0(4)
# define ICH_LR5 __LR0(5)
# define ICH_LR6 __LR0(6)
# define ICH_LR7 __LR0(7)
# define ICH_LR8 __LR8(0)
# define ICH_LR9 __LR8(1)
# define ICH_LR10 __LR8(2)
# define ICH_LR11 __LR8(3)
# define ICH_LR12 __LR8(4)
# define ICH_LR13 __LR8(5)
# define ICH_LR14 __LR8(6)
# define ICH_LR15 __LR8(7)
/* LR top half */
# define __LRC0(x) __ACCESS_CP15(c12, 4, c14, x)
# define __LRC8(x) __ACCESS_CP15(c12, 4, c15, x)
# define ICH_LRC0 __LRC0(0)
# define ICH_LRC1 __LRC0(1)
# define ICH_LRC2 __LRC0(2)
# define ICH_LRC3 __LRC0(3)
# define ICH_LRC4 __LRC0(4)
# define ICH_LRC5 __LRC0(5)
# define ICH_LRC6 __LRC0(6)
# define ICH_LRC7 __LRC0(7)
# define ICH_LRC8 __LRC8(0)
# define ICH_LRC9 __LRC8(1)
# define ICH_LRC10 __LRC8(2)
# define ICH_LRC11 __LRC8(3)
# define ICH_LRC12 __LRC8(4)
# define ICH_LRC13 __LRC8(5)
# define ICH_LRC14 __LRC8(6)
# define ICH_LRC15 __LRC8(7)
2018-03-09 14:53:19 +00:00
# define __ICH_AP0Rx(x) __ACCESS_CP15(c12, 4, c8, x)
# define ICH_AP0R0 __ICH_AP0Rx(0)
# define ICH_AP0R1 __ICH_AP0Rx(1)
# define ICH_AP0R2 __ICH_AP0Rx(2)
# define ICH_AP0R3 __ICH_AP0Rx(3)
2015-10-01 13:47:17 +01:00
2018-03-09 14:53:19 +00:00
# define __ICH_AP1Rx(x) __ACCESS_CP15(c12, 4, c9, x)
# define ICH_AP1R0 __ICH_AP1Rx(0)
# define ICH_AP1R1 __ICH_AP1Rx(1)
# define ICH_AP1R2 __ICH_AP1Rx(2)
# define ICH_AP1R3 __ICH_AP1Rx(3)
2015-10-01 13:47:17 +01:00
2016-09-12 15:49:23 +01:00
/* A32-to-A64 mappings used by VGIC save/restore */
# define CPUIF_MAP(a32, a64) \
static inline void write_ # # a64 ( u32 val ) \
{ \
write_sysreg ( val , a32 ) ; \
} \
static inline u32 read_ # # a64 ( void ) \
{ \
return read_sysreg ( a32 ) ; \
} \
# define CPUIF_MAP_LO_HI(a32lo, a32hi, a64) \
static inline void write_ # # a64 ( u64 val ) \
{ \
write_sysreg ( lower_32_bits ( val ) , a32lo ) ; \
write_sysreg ( upper_32_bits ( val ) , a32hi ) ; \
} \
static inline u64 read_ # # a64 ( void ) \
{ \
u64 val = read_sysreg ( a32lo ) ; \
\
val | = ( u64 ) read_sysreg ( a32hi ) < < 32 ; \
\
return val ; \
}
2018-03-20 09:46:42 +00:00
CPUIF_MAP ( ICC_PMR , ICC_PMR_EL1 )
2018-03-09 14:53:19 +00:00
CPUIF_MAP ( ICC_AP0R0 , ICC_AP0R0_EL1 )
CPUIF_MAP ( ICC_AP0R1 , ICC_AP0R1_EL1 )
CPUIF_MAP ( ICC_AP0R2 , ICC_AP0R2_EL1 )
CPUIF_MAP ( ICC_AP0R3 , ICC_AP0R3_EL1 )
CPUIF_MAP ( ICC_AP1R0 , ICC_AP1R0_EL1 )
CPUIF_MAP ( ICC_AP1R1 , ICC_AP1R1_EL1 )
CPUIF_MAP ( ICC_AP1R2 , ICC_AP1R2_EL1 )
CPUIF_MAP ( ICC_AP1R3 , ICC_AP1R3_EL1 )
2016-09-12 15:49:23 +01:00
CPUIF_MAP ( ICH_HCR , ICH_HCR_EL2 )
CPUIF_MAP ( ICH_VTR , ICH_VTR_EL2 )
CPUIF_MAP ( ICH_MISR , ICH_MISR_EL2 )
CPUIF_MAP ( ICH_EISR , ICH_EISR_EL2 )
2019-01-04 11:33:42 +01:00
CPUIF_MAP ( ICH_ELRSR , ICH_ELRSR_EL2 )
2016-09-12 15:49:23 +01:00
CPUIF_MAP ( ICH_VMCR , ICH_VMCR_EL2 )
CPUIF_MAP ( ICH_AP0R3 , ICH_AP0R3_EL2 )
CPUIF_MAP ( ICH_AP0R2 , ICH_AP0R2_EL2 )
CPUIF_MAP ( ICH_AP0R1 , ICH_AP0R1_EL2 )
CPUIF_MAP ( ICH_AP0R0 , ICH_AP0R0_EL2 )
CPUIF_MAP ( ICH_AP1R3 , ICH_AP1R3_EL2 )
CPUIF_MAP ( ICH_AP1R2 , ICH_AP1R2_EL2 )
CPUIF_MAP ( ICH_AP1R1 , ICH_AP1R1_EL2 )
CPUIF_MAP ( ICH_AP1R0 , ICH_AP1R0_EL2 )
CPUIF_MAP ( ICC_HSRE , ICC_SRE_EL2 )
CPUIF_MAP ( ICC_SRE , ICC_SRE_EL1 )
CPUIF_MAP_LO_HI ( ICH_LR15 , ICH_LRC15 , ICH_LR15_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR14 , ICH_LRC14 , ICH_LR14_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR13 , ICH_LRC13 , ICH_LR13_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR12 , ICH_LRC12 , ICH_LR12_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR11 , ICH_LRC11 , ICH_LR11_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR10 , ICH_LRC10 , ICH_LR10_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR9 , ICH_LRC9 , ICH_LR9_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR8 , ICH_LRC8 , ICH_LR8_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR7 , ICH_LRC7 , ICH_LR7_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR6 , ICH_LRC6 , ICH_LR6_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR5 , ICH_LRC5 , ICH_LR5_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR4 , ICH_LRC4 , ICH_LR4_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR3 , ICH_LRC3 , ICH_LR3_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR2 , ICH_LRC2 , ICH_LR2_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR1 , ICH_LRC1 , ICH_LR1_EL2 )
CPUIF_MAP_LO_HI ( ICH_LR0 , ICH_LRC0 , ICH_LR0_EL2 )
# define read_gicreg(r) read_##r()
# define write_gicreg(v, r) write_##r(v)
2015-10-01 13:47:17 +01:00
/* Low-level accessors */
static inline void gic_write_eoir ( u32 irq )
{
2016-09-12 15:49:22 +01:00
write_sysreg ( irq , ICC_EOIR1 ) ;
2015-10-01 13:47:17 +01:00
isb ( ) ;
}
static inline void gic_write_dir ( u32 val )
{
2016-09-12 15:49:22 +01:00
write_sysreg ( val , ICC_DIR ) ;
2015-10-01 13:47:17 +01:00
isb ( ) ;
}
static inline u32 gic_read_iar ( void )
{
2016-09-12 15:49:22 +01:00
u32 irqstat = read_sysreg ( ICC_IAR1 ) ;
2015-10-01 13:47:17 +01:00
2016-02-18 19:15:45 +00:00
dsb ( sy ) ;
2016-09-12 15:49:22 +01:00
2015-10-01 13:47:17 +01:00
return irqstat ;
}
static inline void gic_write_ctlr ( u32 val )
{
2016-09-12 15:49:22 +01:00
write_sysreg ( val , ICC_CTLR ) ;
2015-10-01 13:47:17 +01:00
isb ( ) ;
}
2017-10-06 10:24:00 -05:00
static inline u32 gic_read_ctlr ( void )
{
return read_sysreg ( ICC_CTLR ) ;
}
2015-10-01 13:47:17 +01:00
static inline void gic_write_grpen1 ( u32 val )
{
2016-09-12 15:49:22 +01:00
write_sysreg ( val , ICC_IGRPEN1 ) ;
2015-10-01 13:47:17 +01:00
isb ( ) ;
}
static inline void gic_write_sgi1r ( u64 val )
{
2016-09-12 15:49:22 +01:00
write_sysreg ( val , ICC_SGI1R ) ;
2015-10-01 13:47:17 +01:00
}
static inline u32 gic_read_sre ( void )
{
2016-09-12 15:49:22 +01:00
return read_sysreg ( ICC_SRE ) ;
2015-10-01 13:47:17 +01:00
}
static inline void gic_write_sre ( u32 val )
{
2016-09-12 15:49:22 +01:00
write_sysreg ( val , ICC_SRE ) ;
2015-10-01 13:47:17 +01:00
isb ( ) ;
}
2016-08-19 17:13:09 +01:00
static inline void gic_write_bpr1 ( u32 val )
{
2016-09-23 14:23:43 +01:00
write_sysreg ( val , ICC_BPR1 ) ;
2016-08-19 17:13:09 +01:00
}
2019-01-31 14:58:43 +00:00
static inline u32 gic_read_pmr ( void )
{
return read_sysreg ( ICC_PMR ) ;
}
static inline void gic_write_pmr ( u32 val )
{
write_sysreg ( val , ICC_PMR ) ;
}
static inline u32 gic_read_rpr ( void )
{
return read_sysreg ( ICC_RPR ) ;
}
2015-10-01 13:47:17 +01:00
/*
* Even in 32 bit systems that use LPAE , there is no guarantee that the I / O
* interface provides true 64 bit atomic accesses , so using strd / ldrd doesn ' t
* make much sense .
* Moreover , 64 bit I / O emulation is extremely difficult to implement on
* AArch32 , since the syndrome register doesn ' t provide any information for
* them .
* Consequently , the following IO helpers use 32 bit accesses .
*/
2016-11-02 11:54:07 +00:00
static inline void __gic_writeq_nonatomic ( u64 val , volatile void __iomem * addr )
2015-10-01 13:47:17 +01:00
{
writel_relaxed ( ( u32 ) val , addr ) ;
writel_relaxed ( ( u32 ) ( val > > 32 ) , addr + 4 ) ;
}
2016-11-02 11:54:07 +00:00
static inline u64 __gic_readq_nonatomic ( const volatile void __iomem * addr )
2015-10-01 13:47:17 +01:00
{
u64 val ;
val = readl_relaxed ( addr ) ;
val | = ( u64 ) readl_relaxed ( addr + 4 ) < < 32 ;
return val ;
}
2016-11-02 11:54:07 +00:00
# define gic_flush_dcache_to_poc(a,l) __cpuc_flush_dcache_area((a), (l))
/*
* GICD_IROUTERn , contain the affinity values associated to each interrupt .
* The upper - word ( aff3 ) will always be 0 , so there is no need for a lock .
*/
# define gic_write_irouter(v, c) __gic_writeq_nonatomic(v, c)
/*
* GICR_TYPER is an ID register and doesn ' t need atomicity .
*/
# define gic_read_typer(c) __gic_readq_nonatomic(c)
/*
* GITS_BASER - hi and lo bits may be accessed independently .
*/
# define gits_read_baser(c) __gic_readq_nonatomic(c)
# define gits_write_baser(v, c) __gic_writeq_nonatomic(v, c)
/*
* GICR_PENDBASER and GICR_PROPBASE are changed with LPIs disabled , so they
* won ' t be being used during any updates and can be changed non - atomically
*/
# define gicr_read_propbaser(c) __gic_readq_nonatomic(c)
# define gicr_write_propbaser(v, c) __gic_writeq_nonatomic(v, c)
# define gicr_read_pendbaser(c) __gic_readq_nonatomic(c)
# define gicr_write_pendbaser(v, c) __gic_writeq_nonatomic(v, c)
2016-12-20 15:20:38 +00:00
/*
* GICR_xLPIR - only the lower bits are significant
*/
# define gic_read_lpir(c) readl_relaxed(c)
# define gic_write_lpir(v, c) writel_relaxed(lower_32_bits(v), c)
2016-11-02 11:54:07 +00:00
/*
* GITS_TYPER is an ID register and doesn ' t need atomicity .
*/
# define gits_read_typer(c) __gic_readq_nonatomic(c)
/*
* GITS_CBASER - hi and lo bits may be accessed independently .
*/
# define gits_read_cbaser(c) __gic_readq_nonatomic(c)
# define gits_write_cbaser(v, c) __gic_writeq_nonatomic(v, c)
/*
* GITS_CWRITER - hi and lo bits may be accessed independently .
*/
# define gits_write_cwriter(v, c) __gic_writeq_nonatomic(v, c)
2017-01-03 13:39:52 +00:00
/*
* GITS_VPROPBASER - hi and lo bits may be accessed independently .
*/
# define gits_write_vpropbaser(v, c) __gic_writeq_nonatomic(v, c)
/*
* GITS_VPENDBASER - the Valid bit must be cleared before changing
* anything else .
*/
static inline void gits_write_vpendbaser ( u64 val , void * __iomem addr )
{
u32 tmp ;
tmp = readl_relaxed ( addr + 4 ) ;
if ( tmp & ( GICR_VPENDBASER_Valid > > 32 ) ) {
tmp & = ~ ( GICR_VPENDBASER_Valid > > 32 ) ;
writel_relaxed ( tmp , addr + 4 ) ;
}
/*
* Use the fact that __gic_writeq_nonatomic writes the second
* half of the 64 bit quantity after the first .
*/
__gic_writeq_nonatomic ( val , addr ) ;
}
# define gits_read_vpendbaser(c) __gic_readq_nonatomic(c)
2019-01-31 14:58:44 +00:00
static inline bool gic_prio_masking_enabled ( void )
{
return false ;
}
static inline void gic_pmr_mask_irqs ( void )
{
/* Should not get called. */
WARN_ON_ONCE ( true ) ;
}
static inline void gic_arch_enable_irqs ( void )
{
/* Should not get called. */
WARN_ON_ONCE ( true ) ;
}
2015-10-01 13:47:17 +01:00
# endif /* !__ASSEMBLY__ */
# endif /* !__ASM_ARCH_GICV3_H */