2005-04-17 02:20:36 +04:00
# ifndef __ASM_MSR_H
# define __ASM_MSR_H
2007-05-02 21:27:12 +04:00
# include <asm/msr-index.h>
# ifdef __KERNEL__
# ifndef __ASSEMBLY__
2007-05-02 21:27:10 +04:00
# include <asm/errno.h>
static inline unsigned long long native_read_msr ( unsigned int msr )
{
unsigned long long val ;
asm volatile ( " rdmsr " : " =A " ( val ) : " c " ( msr ) ) ;
return val ;
}
static inline unsigned long long native_read_msr_safe ( unsigned int msr ,
int * err )
{
unsigned long long val ;
asm volatile ( " 2: rdmsr ; xorl %0,%0 \n "
" 1: \n \t "
" .section .fixup, \" ax \" \n \t "
" 3: movl %3,%0 ; jmp 1b \n \t "
" .previous \n \t "
" .section __ex_table, \" a \" \n "
" .align 4 \n \t "
" .long 2b,3b \n \t "
" .previous "
: " =r " ( * err ) , " =A " ( val )
: " c " ( msr ) , " i " ( - EFAULT ) ) ;
return val ;
}
static inline void native_write_msr ( unsigned int msr , unsigned long long val )
{
asm volatile ( " wrmsr " : : " c " ( msr ) , " A " ( val ) ) ;
}
static inline int native_write_msr_safe ( unsigned int msr ,
unsigned long long val )
{
int err ;
asm volatile ( " 2: wrmsr ; xorl %0,%0 \n "
" 1: \n \t "
" .section .fixup, \" ax \" \n \t "
" 3: movl %4,%0 ; jmp 1b \n \t "
" .previous \n \t "
" .section __ex_table, \" a \" \n "
" .align 4 \n \t "
" .long 2b,3b \n \t "
" .previous "
: " =a " ( err )
: " c " ( msr ) , " 0 " ( ( u32 ) val ) , " d " ( ( u32 ) ( val > > 32 ) ) ,
" i " ( - EFAULT ) ) ;
return err ;
}
static inline unsigned long long native_read_tsc ( void )
{
unsigned long long val ;
asm volatile ( " rdtsc " : " =A " ( val ) ) ;
return val ;
}
static inline unsigned long long native_read_pmc ( void )
{
unsigned long long val ;
asm volatile ( " rdpmc " : " =A " ( val ) ) ;
return val ;
}
2006-12-07 04:14:07 +03:00
# ifdef CONFIG_PARAVIRT
# include <asm/paravirt.h>
# else
2007-05-08 19:22:01 +04:00
# include <linux/errno.h>
2005-04-17 02:20:36 +04:00
/*
* Access to machine - specific registers ( available on 586 and better only )
* Note : the rd * operations modify the parameters directly ( without using
* pointer indirection ) , this allows gcc to optimize better
*/
2007-05-02 21:27:10 +04:00
# define rdmsr(msr,val1,val2) \
do { \
2007-05-09 11:02:11 +04:00
u64 __val = native_read_msr ( msr ) ; \
( val1 ) = ( u32 ) __val ; \
( val2 ) = ( u32 ) ( __val > > 32 ) ; \
2007-05-02 21:27:10 +04:00
} while ( 0 )
2005-04-17 02:20:36 +04:00
2007-05-09 11:02:11 +04:00
static inline void wrmsr ( u32 __msr , u32 __low , u32 __high )
2005-04-17 02:20:36 +04:00
{
2007-05-09 11:02:11 +04:00
native_write_msr ( __msr , ( ( u64 ) __high < < 32 ) | __low ) ;
2005-04-17 02:20:36 +04:00
}
2007-05-09 11:02:11 +04:00
# define rdmsrl(msr,val) \
( ( val ) = native_read_msr ( msr ) )
# define wrmsrl(msr,val) native_write_msr(msr, val)
2005-04-17 02:20:36 +04:00
/* wrmsr with exception handling */
2007-05-09 11:02:11 +04:00
static inline int wrmsr_safe ( u32 __msr , u32 __low , u32 __high )
{
return native_write_msr_safe ( __msr , ( ( u64 ) __high < < 32 ) | __low ) ;
}
2005-04-17 02:20:36 +04:00
2005-09-04 02:56:42 +04:00
/* rdmsr with exception handling */
2007-05-02 21:27:10 +04:00
# define rdmsr_safe(msr,p1,p2) \
( { \
int __err ; \
2007-05-09 11:02:11 +04:00
u64 __val = native_read_msr_safe ( msr , & __err ) ; \
( * p1 ) = ( u32 ) __val ; \
( * p2 ) = ( u32 ) ( __val > > 32 ) ; \
2007-05-02 21:27:10 +04:00
__err ; \
} )
# define rdtscl(low) \
2007-05-09 11:02:11 +04:00
( ( low ) = ( u32 ) native_read_tsc ( ) )
2007-05-02 21:27:10 +04:00
2007-05-09 11:02:11 +04:00
# define rdtscll(val) \
( ( val ) = native_read_tsc ( ) )
2005-04-17 02:20:36 +04:00
# define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
2007-05-02 21:27:10 +04:00
# define rdpmc(counter,low,high) \
do { \
u64 _l = native_read_pmc ( ) ; \
2007-05-09 11:02:11 +04:00
( low ) = ( u32 ) _l ; \
( high ) = ( u32 ) ( _l > > 32 ) ; \
2007-05-02 21:27:10 +04:00
} while ( 0 )
2006-12-07 04:14:07 +03:00
# endif /* !CONFIG_PARAVIRT */
2005-04-17 02:20:36 +04:00
2007-02-20 03:07:13 +03:00
# ifdef CONFIG_SMP
2007-02-16 12:48:11 +03:00
void rdmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 * l , u32 * h ) ;
void wrmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h ) ;
2007-05-08 19:22:01 +04:00
int rdmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no , u32 * l , u32 * h ) ;
int wrmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h ) ;
2007-02-20 03:07:13 +03:00
# else /* CONFIG_SMP */
static inline void rdmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 * l , u32 * h )
{
rdmsr ( msr_no , * l , * h ) ;
}
static inline void wrmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h )
{
wrmsr ( msr_no , l , h ) ;
}
2007-05-08 19:22:01 +04:00
static inline int rdmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no , u32 * l , u32 * h )
{
return rdmsr_safe ( msr_no , l , h ) ;
}
static inline int wrmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h )
{
return wrmsr_safe ( msr_no , l , h ) ;
}
2007-02-20 03:07:13 +03:00
# endif /* CONFIG_SMP */
2007-05-02 21:27:12 +04:00
# endif
# endif
2005-04-17 02:20:36 +04:00
# endif /* __ASM_MSR_H */