2007-10-23 22:37:24 +02:00
# ifndef __ASM_X86_MSR_H_
# define __ASM_X86_MSR_H_
# include <asm/msr-index.h>
2008-01-15 16:44:38 +01:00
# ifndef __ASSEMBLY__
# include <linux / types.h>
# endif
2008-01-30 13:31:06 +01:00
# ifdef __KERNEL__
# ifndef __ASSEMBLY__
2008-01-30 13:31:07 +01:00
# include <asm/asm.h>
# include <asm/errno.h>
2008-01-30 13:31:17 +01:00
static inline unsigned long long native_read_tscp ( unsigned int * aux )
2008-01-30 13:31:06 +01:00
{
unsigned long low , high ;
2008-03-23 01:02:51 -07:00
asm volatile ( " .byte 0x0f,0x01,0xf9 "
: " =a " ( low ) , " =d " ( high ) , " =c " ( * aux ) ) ;
2008-01-30 13:31:06 +01:00
return low | ( ( u64 ) high > > 32 ) ;
}
2008-01-30 13:31:07 +01:00
/*
* i386 calling convention returns 64 - bit value in edx : eax , while
* x86_64 returns at rax . Also , the " A " constraint does not really
* mean rdx : rax in x86_64 , so we need specialized behaviour for each
* architecture
*/
# ifdef CONFIG_X86_64
# define DECLARE_ARGS(val, low, high) unsigned low, high
2008-03-23 01:02:51 -07:00
# define EAX_EDX_VAL(val, low, high) ((low) | ((u64)(high) << 32))
2008-01-30 13:31:07 +01:00
# define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high)
# define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high)
# else
# define DECLARE_ARGS(val, low, high) unsigned long long val
# define EAX_EDX_VAL(val, low, high) (val)
# define EAX_EDX_ARGS(val, low, high) "A" (val)
# define EAX_EDX_RET(val, low, high) "=A" (val)
2008-01-30 13:31:06 +01:00
# endif
2007-10-23 22:37:24 +02:00
static inline unsigned long long native_read_msr ( unsigned int msr )
{
2008-01-30 13:31:07 +01:00
DECLARE_ARGS ( val , low , high ) ;
2007-10-23 22:37:24 +02:00
2008-01-30 13:31:07 +01:00
asm volatile ( " rdmsr " : EAX_EDX_RET ( val , low , high ) : " c " ( msr ) ) ;
return EAX_EDX_VAL ( val , low , high ) ;
2007-10-23 22:37:24 +02:00
}
static inline unsigned long long native_read_msr_safe ( unsigned int msr ,
int * err )
{
2008-01-30 13:31:07 +01:00
DECLARE_ARGS ( val , low , high ) ;
2007-10-23 22:37:24 +02:00
2008-01-30 13:31:07 +01:00
asm volatile ( " 2: rdmsr ; xor %0,%0 \n "
2007-10-23 22:37:24 +02:00
" 1: \n \t "
" .section .fixup, \" ax \" \n \t "
2008-01-30 13:31:07 +01:00
" 3: mov %3,%0 ; jmp 1b \n \t "
2007-10-23 22:37:24 +02:00
" .previous \n \t "
2008-03-23 01:02:51 -07:00
_ASM_EXTABLE ( 2 b , 3 b )
2008-01-30 13:31:07 +01:00
: " =r " ( * err ) , EAX_EDX_RET ( val , low , high )
2007-10-23 22:37:24 +02:00
: " c " ( msr ) , " i " ( - EFAULT ) ) ;
2008-01-30 13:31:07 +01:00
return EAX_EDX_VAL ( val , low , high ) ;
2007-10-23 22:37:24 +02:00
}
2008-01-30 13:31:07 +01:00
static inline void native_write_msr ( unsigned int msr ,
unsigned low , unsigned high )
2007-10-23 22:37:24 +02:00
{
2008-01-30 13:31:07 +01:00
asm volatile ( " wrmsr " : : " c " ( msr ) , " a " ( low ) , " d " ( high ) ) ;
2007-10-23 22:37:24 +02:00
}
static inline int native_write_msr_safe ( unsigned int msr ,
2008-01-30 13:31:07 +01:00
unsigned low , unsigned high )
2007-10-23 22:37:24 +02:00
{
int err ;
2008-01-30 13:31:07 +01:00
asm volatile ( " 2: wrmsr ; xor %0,%0 \n "
2007-10-23 22:37:24 +02:00
" 1: \n \t "
" .section .fixup, \" ax \" \n \t "
2008-01-30 13:31:07 +01:00
" 3: mov %4,%0 ; jmp 1b \n \t "
2007-10-23 22:37:24 +02:00
" .previous \n \t "
2008-03-23 01:02:51 -07:00
_ASM_EXTABLE ( 2 b , 3 b )
2007-10-23 22:37:24 +02:00
: " =a " ( err )
2008-01-30 13:31:07 +01:00
: " c " ( msr ) , " 0 " ( low ) , " d " ( high ) ,
2008-03-23 01:02:51 -07:00
" i " ( - EFAULT ) ) ;
2007-10-23 22:37:24 +02:00
return err ;
}
2008-01-30 13:32:39 +01:00
extern unsigned long long native_read_tsc ( void ) ;
2007-10-23 22:37:24 +02:00
2008-01-30 13:32:40 +01:00
static __always_inline unsigned long long __native_read_tsc ( void )
{
DECLARE_ARGS ( val , low , high ) ;
rdtsc_barrier ( ) ;
asm volatile ( " rdtsc " : EAX_EDX_RET ( val , low , high ) ) ;
rdtsc_barrier ( ) ;
return EAX_EDX_VAL ( val , low , high ) ;
}
2008-01-30 13:31:07 +01:00
static inline unsigned long long native_read_pmc ( int counter )
2007-10-23 22:37:24 +02:00
{
2008-01-30 13:31:07 +01:00
DECLARE_ARGS ( val , low , high ) ;
asm volatile ( " rdpmc " : EAX_EDX_RET ( val , low , high ) : " c " ( counter ) ) ;
return EAX_EDX_VAL ( val , low , high ) ;
2007-10-23 22:37:24 +02:00
}
# ifdef CONFIG_PARAVIRT
# include <asm/paravirt.h>
2007-10-11 11:20:03 +02:00
# else
2007-10-23 22:37:24 +02:00
# include <linux/errno.h>
/*
* Access to machine - specific registers ( available on 586 and better only )
* Note : the rd * operations modify the parameters directly ( without using
* pointer indirection ) , this allows gcc to optimize better
*/
2008-03-23 01:02:51 -07:00
# define rdmsr(msr, val1, val2) \
do { \
u64 __val = native_read_msr ( ( msr ) ) ; \
( val1 ) = ( u32 ) __val ; \
( val2 ) = ( u32 ) ( __val > > 32 ) ; \
} while ( 0 )
2007-10-23 22:37:24 +02:00
2008-01-30 13:31:07 +01:00
static inline void wrmsr ( unsigned msr , unsigned low , unsigned high )
2007-10-23 22:37:24 +02:00
{
2008-01-30 13:31:07 +01:00
native_write_msr ( msr , low , high ) ;
2007-10-23 22:37:24 +02:00
}
2008-03-23 01:02:51 -07:00
# define rdmsrl(msr, val) \
( ( val ) = native_read_msr ( ( msr ) ) )
2007-10-23 22:37:24 +02:00
2008-01-30 13:31:07 +01:00
# define wrmsrl(msr, val) \
2008-03-23 01:02:51 -07:00
native_write_msr ( ( msr ) , ( u32 ) ( ( u64 ) ( val ) ) , ( u32 ) ( ( u64 ) ( val ) > > 32 ) )
2007-10-23 22:37:24 +02:00
/* wrmsr with exception handling */
2008-01-30 13:31:07 +01:00
static inline int wrmsr_safe ( unsigned msr , unsigned low , unsigned high )
2007-10-23 22:37:24 +02:00
{
2008-01-30 13:31:07 +01:00
return native_write_msr_safe ( msr , low , high ) ;
2007-10-23 22:37:24 +02:00
}
/* rdmsr with exception handling */
2008-03-23 01:02:51 -07:00
# define rdmsr_safe(msr, p1, p2) \
( { \
int __err ; \
u64 __val = native_read_msr_safe ( ( msr ) , & __err ) ; \
( * p1 ) = ( u32 ) __val ; \
( * p2 ) = ( u32 ) ( __val > > 32 ) ; \
__err ; \
} )
2007-10-23 22:37:24 +02:00
2008-03-22 10:59:28 +01:00
static inline int rdmsrl_safe ( unsigned msr , unsigned long long * p )
{
int err ;
* p = native_read_msr_safe ( msr , & err ) ;
return err ;
}
2007-10-23 22:37:24 +02:00
# define rdtscl(low) \
( ( low ) = ( u32 ) native_read_tsc ( ) )
# define rdtscll(val) \
( ( val ) = native_read_tsc ( ) )
2008-03-23 01:02:51 -07:00
# define rdpmc(counter, low, high) \
do { \
u64 _l = native_read_pmc ( ( counter ) ) ; \
( low ) = ( u32 ) _l ; \
( high ) = ( u32 ) ( _l > > 32 ) ; \
} while ( 0 )
2007-10-23 22:37:24 +02:00
2008-03-23 01:02:51 -07:00
# define rdtscp(low, high, aux) \
do { \
unsigned long long _val = native_read_tscp ( & ( aux ) ) ; \
( low ) = ( u32 ) _val ; \
( high ) = ( u32 ) ( _val > > 32 ) ; \
} while ( 0 )
2007-10-23 22:37:24 +02:00
2008-01-30 13:31:07 +01:00
# define rdtscpll(val, aux) (val) = native_read_tscp(&(aux))
2007-10-23 22:37:24 +02:00
2008-01-30 13:31:07 +01:00
# endif /* !CONFIG_PARAVIRT */
2007-10-23 22:37:24 +02:00
2008-03-23 01:02:51 -07:00
# define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val), \
( u32 ) ( ( val ) > > 32 ) )
2007-10-23 22:37:24 +02:00
2008-03-23 01:02:51 -07:00
# define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2))
2007-10-23 22:37:24 +02:00
2008-03-23 01:02:51 -07:00
# define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0)
2007-10-23 22:37:24 +02:00
# ifdef CONFIG_SMP
void rdmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 * l , u32 * h ) ;
void wrmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h ) ;
int rdmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no , u32 * l , u32 * h ) ;
2008-03-23 01:02:51 -07:00
2007-10-23 22:37:24 +02:00
int wrmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h ) ;
# else /* CONFIG_SMP */
static inline void rdmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 * l , u32 * h )
{
rdmsr ( msr_no , * l , * h ) ;
}
static inline void wrmsr_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h )
{
wrmsr ( msr_no , l , h ) ;
}
2008-03-23 01:02:51 -07:00
static inline int rdmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no ,
u32 * l , u32 * h )
2007-10-23 22:37:24 +02:00
{
return rdmsr_safe ( msr_no , l , h ) ;
}
static inline int wrmsr_safe_on_cpu ( unsigned int cpu , u32 msr_no , u32 l , u32 h )
{
return wrmsr_safe ( msr_no , l , h ) ;
}
# endif /* CONFIG_SMP */
2008-01-30 13:31:03 +01:00
# endif /* __ASSEMBLY__ */
2008-01-30 13:31:07 +01:00
# endif /* __KERNEL__ */
2007-10-23 22:37:24 +02:00
2007-10-11 11:20:03 +02:00
# endif