2005-04-17 02:20:36 +04:00
/*
* This file is subject to the terms and conditions of the GNU General Public
* License . See the file " COPYING " in the main directory of this archive
* for more details .
*
2007-07-28 03:49:58 +04:00
* Copyright ( C ) 2003 , 04 , 07 Ralf Baechle < ralf @ linux - mips . org >
2006-03-13 19:16:29 +03:00
* Copyright ( C ) MIPS Technologies , Inc .
* written by Ralf Baechle < ralf @ linux - mips . org >
2005-04-17 02:20:36 +04:00
*/
# ifndef _ASM_HAZARDS_H
# define _ASM_HAZARDS_H
2006-09-25 18:49:49 +04:00
# ifdef __ASSEMBLY__
2006-09-08 06:13:49 +04:00
# define ASMMACRO(name, code...) .macro name; code; .endm
2005-04-17 02:20:36 +04:00
# else
2007-09-21 16:05:44 +04:00
# include <asm/cpu-features.h>
2006-09-08 06:13:49 +04:00
# define ASMMACRO(name, code...) \
__asm__ ( " .macro " # name " ; " # code " ; .endm " ) ; \
\
static inline void name ( void ) \
{ \
__asm__ __volatile__ ( # name ) ; \
}
2005-04-17 02:20:36 +04:00
2007-07-28 03:49:58 +04:00
/*
* MIPS R2 instruction hazard barrier . Needs to be called as a subroutine .
*/
extern void mips_ihb ( void ) ;
2005-04-17 02:20:36 +04:00
# endif
2006-09-08 06:13:49 +04:00
ASMMACRO ( _ssnop ,
sll $ 0 , $ 0 , 1
)
ASMMACRO ( _ehb ,
sll $ 0 , $ 0 , 3
)
2005-04-17 02:20:36 +04:00
/*
2006-09-08 06:13:49 +04:00
* TLB hazards
2005-04-17 02:20:36 +04:00
*/
2006-09-08 06:13:49 +04:00
# if defined(CONFIG_CPU_MIPSR2)
2005-04-17 02:20:36 +04:00
/*
2006-09-08 06:13:49 +04:00
* MIPSR2 defines ehb for hazard avoidance
2005-04-17 02:20:36 +04:00
*/
2006-09-08 06:13:49 +04:00
ASMMACRO ( mtc0_tlbw_hazard ,
_ehb
)
ASMMACRO ( tlbw_use_hazard ,
_ehb
)
ASMMACRO ( tlb_probe_hazard ,
_ehb
)
ASMMACRO ( irq_enable_hazard ,
2007-03-20 16:56:50 +03:00
_ehb
2006-09-08 06:13:49 +04:00
)
ASMMACRO ( irq_disable_hazard ,
2005-04-17 02:20:36 +04:00
_ehb
2006-09-08 06:13:49 +04:00
)
ASMMACRO ( back_to_back_c0_hazard ,
_ehb
)
2005-04-17 02:20:36 +04:00
/*
2006-09-08 06:13:49 +04:00
* gcc has a tradition of misscompiling the previous construct using the
* address of a label as argument to inline assembler . Gas otoh has the
* annoying difference between la and dla which are only usable for 32 - bit
* rsp . 64 - bit code , so can ' t be used without conditional compilation .
* The alterantive is switching the assembler to 64 - bit code which happens
* to work right even for 32 - bit code . . .
2005-04-17 02:20:36 +04:00
*/
2006-09-08 06:13:49 +04:00
# define instruction_hazard() \
do { \
unsigned long tmp ; \
\
__asm__ __volatile__ ( \
" .set mips64r2 \n " \
" dla %0, 1f \n " \
" jr.hb %0 \n " \
" .set mips0 \n " \
" 1: \n " \
: " =r " ( tmp ) ) ; \
} while ( 0 )
2005-04-17 02:20:36 +04:00
2007-09-21 16:05:44 +04:00
# elif defined(CONFIG_CPU_MIPSR1)
/*
* These are slightly complicated by the fact that we guarantee R1 kernels to
* run fine on R2 processors .
*/
ASMMACRO ( mtc0_tlbw_hazard ,
_ssnop ; _ssnop ; _ehb
)
ASMMACRO ( tlbw_use_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ehb
)
ASMMACRO ( tlb_probe_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ehb
)
ASMMACRO ( irq_enable_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ehb
)
ASMMACRO ( irq_disable_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ehb
)
ASMMACRO ( back_to_back_c0_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ehb
)
/*
* gcc has a tradition of misscompiling the previous construct using the
* address of a label as argument to inline assembler . Gas otoh has the
* annoying difference between la and dla which are only usable for 32 - bit
* rsp . 64 - bit code , so can ' t be used without conditional compilation .
* The alterantive is switching the assembler to 64 - bit code which happens
* to work right even for 32 - bit code . . .
*/
# define __instruction_hazard() \
do { \
unsigned long tmp ; \
\
__asm__ __volatile__ ( \
" .set mips64r2 \n " \
" dla %0, 1f \n " \
" jr.hb %0 \n " \
" .set mips0 \n " \
" 1: \n " \
: " =r " ( tmp ) ) ; \
} while ( 0 )
# define instruction_hazard() \
do { \
if ( cpu_has_mips_r2 ) \
__instruction_hazard ( ) ; \
} while ( 0 )
2006-09-08 06:13:49 +04:00
# elif defined(CONFIG_CPU_R10000)
2005-04-17 02:20:36 +04:00
/*
2006-09-08 06:13:49 +04:00
* R10000 rocks - all hazards handled in hardware , so this becomes a nobrainer .
2005-04-17 02:20:36 +04:00
*/
2006-09-08 06:13:49 +04:00
ASMMACRO ( mtc0_tlbw_hazard ,
)
ASMMACRO ( tlbw_use_hazard ,
)
ASMMACRO ( tlb_probe_hazard ,
)
ASMMACRO ( irq_enable_hazard ,
)
ASMMACRO ( irq_disable_hazard ,
)
ASMMACRO ( back_to_back_c0_hazard ,
)
# define instruction_hazard() do { } while (0)
2005-04-17 02:20:36 +04:00
2006-09-08 06:13:49 +04:00
# elif defined(CONFIG_CPU_RM9000)
2005-03-02 22:18:46 +03:00
2005-04-17 02:20:36 +04:00
/*
* RM9000 hazards . When the JTLB is updated by tlbwi or tlbwr , a subsequent
* use of the JTLB for instructions should not occur for 4 cpu cycles and use
* for data translations should not occur for 3 cpu cycles .
*/
2006-09-08 06:13:49 +04:00
ASMMACRO ( mtc0_tlbw_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ssnop
)
ASMMACRO ( tlbw_use_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ssnop
)
ASMMACRO ( tlb_probe_hazard ,
_ssnop ; _ssnop ; _ssnop ; _ssnop
)
ASMMACRO ( irq_enable_hazard ,
)
ASMMACRO ( irq_disable_hazard ,
)
ASMMACRO ( back_to_back_c0_hazard ,
)
# define instruction_hazard() do { } while (0)
2005-04-17 02:20:36 +04:00
2006-09-08 06:13:49 +04:00
# elif defined(CONFIG_CPU_SB1)
2005-04-17 02:20:36 +04:00
/*
2006-09-08 06:13:49 +04:00
* Mostly like R4000 for historic reasons
2005-04-17 02:20:36 +04:00
*/
2006-09-08 06:13:49 +04:00
ASMMACRO ( mtc0_tlbw_hazard ,
)
ASMMACRO ( tlbw_use_hazard ,
)
ASMMACRO ( tlb_probe_hazard ,
)
ASMMACRO ( irq_enable_hazard ,
)
ASMMACRO ( irq_disable_hazard ,
_ssnop ; _ssnop ; _ssnop
)
ASMMACRO ( back_to_back_c0_hazard ,
)
# define instruction_hazard() do { } while (0)
2005-03-01 21:12:06 +03:00
2005-04-17 02:20:36 +04:00
# else
/*
2006-09-08 06:13:49 +04:00
* Finally the catchall case for all other processors including R4000 , R4400 ,
* R4600 , R4700 , R5000 , RM7000 , NEC VR41xx etc .
2006-03-13 19:16:29 +03:00
*
2006-09-08 06:13:49 +04:00
* The taken branch will result in a two cycle penalty for the two killed
* instructions on R4000 / R4400 . Other processors only have a single cycle
* hazard so this is nice trick to have an optimal code for a range of
* processors .
2005-12-22 15:41:29 +03:00
*/
2006-09-08 06:13:49 +04:00
ASMMACRO ( mtc0_tlbw_hazard ,
2007-01-24 16:22:06 +03:00
nop ; nop
2006-09-08 06:13:49 +04:00
)
ASMMACRO ( tlbw_use_hazard ,
nop ; nop ; nop
)
ASMMACRO ( tlb_probe_hazard ,
nop ; nop ; nop
)
ASMMACRO ( irq_enable_hazard ,
2007-09-03 18:22:26 +04:00
_ssnop ; _ssnop ; _ssnop ;
2006-09-08 06:13:49 +04:00
)
ASMMACRO ( irq_disable_hazard ,
nop ; nop ; nop
)
ASMMACRO ( back_to_back_c0_hazard ,
_ssnop ; _ssnop ; _ssnop ;
)
2005-07-12 22:35:38 +04:00
# define instruction_hazard() do { } while (0)
2006-04-05 12:45:45 +04:00
2006-09-08 06:13:49 +04:00
# endif
2005-04-17 02:20:36 +04:00
2007-05-08 19:09:13 +04:00
/* FPU hazards */
# if defined(CONFIG_CPU_SB1)
ASMMACRO ( enable_fpu_hazard ,
. set push ;
. set mips64 ;
. set noreorder ;
_ssnop ;
2007-10-12 02:46:15 +04:00
bnezl $ 0 , . + 4 ;
2007-05-09 20:49:53 +04:00
_ssnop ;
2007-05-08 19:09:13 +04:00
. set pop
)
ASMMACRO ( disable_fpu_hazard ,
)
# elif defined(CONFIG_CPU_MIPSR2)
ASMMACRO ( enable_fpu_hazard ,
_ehb
)
ASMMACRO ( disable_fpu_hazard ,
_ehb
)
# else
ASMMACRO ( enable_fpu_hazard ,
nop ; nop ; nop ; nop
)
ASMMACRO ( disable_fpu_hazard ,
_ehb
)
# endif
2005-04-17 02:20:36 +04:00
# endif /* _ASM_HAZARDS_H */