2018-04-29 15:01:37 +02:00
/* SPDX-License-Identifier: GPL-2.0 */
# ifndef _ASM_X86_SPECCTRL_H_
# define _ASM_X86_SPECCTRL_H_
2018-04-29 15:21:42 +02:00
# include <linux/thread_info.h>
2018-04-29 15:01:37 +02:00
# include <asm/nospec-branch.h>
/*
* On VMENTER we must preserve whatever view of the SPEC_CTRL MSR
* the guest has , while on VMEXIT we restore the host view . This
* would be easier if SPEC_CTRL were architecturally maskable or
* shadowable for guests but this is not ( currently ) the case .
2018-05-09 23:01:01 +02:00
* Takes the guest view of SPEC_CTRL MSR as a parameter and also
* the guest ' s version of VIRT_SPEC_CTRL , if emulated .
2018-04-29 15:01:37 +02:00
*/
2018-05-12 00:14:51 +02:00
extern void x86_virt_spec_ctrl ( u64 guest_spec_ctrl , u64 guest_virt_spec_ctrl , bool guest ) ;
/**
* x86_spec_ctrl_set_guest - Set speculation control registers for the guest
* @ guest_spec_ctrl : The guest content of MSR_SPEC_CTRL
* @ guest_virt_spec_ctrl : The guest controlled bits of MSR_VIRT_SPEC_CTRL
* ( may get translated to MSR_AMD64_LS_CFG bits )
*
* Avoids writing to the MSR if the content / bits are the same
*/
static inline
void x86_spec_ctrl_set_guest ( u64 guest_spec_ctrl , u64 guest_virt_spec_ctrl )
{
x86_virt_spec_ctrl ( guest_spec_ctrl , guest_virt_spec_ctrl , true ) ;
}
/**
* x86_spec_ctrl_restore_host - Restore host speculation control registers
* @ guest_spec_ctrl : The guest content of MSR_SPEC_CTRL
* @ guest_virt_spec_ctrl : The guest controlled bits of MSR_VIRT_SPEC_CTRL
* ( may get translated to MSR_AMD64_LS_CFG bits )
*
* Avoids writing to the MSR if the content / bits are the same
*/
static inline
void x86_spec_ctrl_restore_host ( u64 guest_spec_ctrl , u64 guest_virt_spec_ctrl )
{
x86_virt_spec_ctrl ( guest_spec_ctrl , guest_virt_spec_ctrl , false ) ;
}
2018-04-29 15:01:37 +02:00
/* AMD specific Speculative Store Bypass MSR data */
extern u64 x86_amd_ls_cfg_base ;
2018-05-09 21:41:38 +02:00
extern u64 x86_amd_ls_cfg_ssbd_mask ;
2018-04-29 15:01:37 +02:00
2018-05-09 21:41:38 +02:00
static inline u64 ssbd_tif_to_spec_ctrl ( u64 tifn )
2018-04-29 15:21:42 +02:00
{
2018-05-09 21:41:38 +02:00
BUILD_BUG_ON ( TIF_SSBD < SPEC_CTRL_SSBD_SHIFT ) ;
return ( tifn & _TIF_SSBD ) > > ( TIF_SSBD - SPEC_CTRL_SSBD_SHIFT ) ;
2018-04-29 15:21:42 +02:00
}
2018-05-10 20:42:48 +02:00
static inline unsigned long ssbd_spec_ctrl_to_tif ( u64 spec_ctrl )
{
BUILD_BUG_ON ( TIF_SSBD < SPEC_CTRL_SSBD_SHIFT ) ;
return ( spec_ctrl & SPEC_CTRL_SSBD ) < < ( TIF_SSBD - SPEC_CTRL_SSBD_SHIFT ) ;
}
2018-05-09 21:41:38 +02:00
static inline u64 ssbd_tif_to_amd_ls_cfg ( u64 tifn )
2018-04-29 15:21:42 +02:00
{
2018-05-09 21:41:38 +02:00
return ( tifn & _TIF_SSBD ) ? x86_amd_ls_cfg_ssbd_mask : 0ULL ;
2018-04-29 15:21:42 +02:00
}
2018-05-09 21:53:09 +02:00
# ifdef CONFIG_SMP
extern void speculative_store_bypass_ht_init ( void ) ;
# else
static inline void speculative_store_bypass_ht_init ( void ) { }
# endif
2018-05-10 20:31:44 +02:00
extern void speculative_store_bypass_update ( unsigned long tif ) ;
static inline void speculative_store_bypass_update_current ( void )
{
speculative_store_bypass_update ( current_thread_info ( ) - > flags ) ;
}
2018-04-29 15:21:42 +02:00
2018-04-29 15:01:37 +02:00
# endif