1/* SPDX-License-Identifier: GPL-2.0 */ 2#ifndef _ASM_X86_SPECCTRL_H_ 3#define _ASM_X86_SPECCTRL_H_ 4 5#include <linux/thread_info.h> 6#include <asm/nospec-branch.h> 7#include <asm/msr.h> 8 9/* 10 * On VMENTER we must preserve whatever view of the SPEC_CTRL MSR 11 * the guest has, while on VMEXIT we restore the host view. This 12 * would be easier if SPEC_CTRL were architecturally maskable or 13 * shadowable for guests but this is not (currently) the case. 14 * Takes the guest view of SPEC_CTRL MSR as a parameter and also 15 * the guest's version of VIRT_SPEC_CTRL, if emulated. 16 */ 17extern void x86_virt_spec_ctrl(u64 guest_virt_spec_ctrl, bool guest); 18 19/** 20 * x86_spec_ctrl_set_guest - Set speculation control registers for the guest 21 * @guest_spec_ctrl: The guest content of MSR_SPEC_CTRL 22 * @guest_virt_spec_ctrl: The guest controlled bits of MSR_VIRT_SPEC_CTRL 23 * (may get translated to MSR_AMD64_LS_CFG bits) 24 * 25 * Avoids writing to the MSR if the content/bits are the same 26 */ 27static inline 28void x86_spec_ctrl_set_guest(u64 guest_virt_spec_ctrl) 29{ 30 x86_virt_spec_ctrl(guest_virt_spec_ctrl, true); 31} 32 33/** 34 * x86_spec_ctrl_restore_host - Restore host speculation control registers 35 * @guest_spec_ctrl: The guest content of MSR_SPEC_CTRL 36 * @guest_virt_spec_ctrl: The guest controlled bits of MSR_VIRT_SPEC_CTRL 37 * (may get translated to MSR_AMD64_LS_CFG bits) 38 * 39 * Avoids writing to the MSR if the content/bits are the same 40 */ 41static inline 42void x86_spec_ctrl_restore_host(u64 guest_virt_spec_ctrl) 43{ 44 x86_virt_spec_ctrl(guest_virt_spec_ctrl, false); 45} 46 47/* AMD specific Speculative Store Bypass MSR data */ 48extern u64 x86_amd_ls_cfg_base; 49extern u64 x86_amd_ls_cfg_ssbd_mask; 50 51static inline u64 ssbd_tif_to_spec_ctrl(u64 tifn) 52{ 53 BUILD_BUG_ON(TIF_SSBD < SPEC_CTRL_SSBD_SHIFT); 54 return (tifn & _TIF_SSBD) >> (TIF_SSBD - SPEC_CTRL_SSBD_SHIFT); 55} 56 57static inline u64 stibp_tif_to_spec_ctrl(u64 tifn) 58{ 59 BUILD_BUG_ON(TIF_SPEC_IB < SPEC_CTRL_STIBP_SHIFT); 60 return (tifn & _TIF_SPEC_IB) >> (TIF_SPEC_IB - SPEC_CTRL_STIBP_SHIFT); 61} 62 63static inline unsigned long ssbd_spec_ctrl_to_tif(u64 spec_ctrl) 64{ 65 BUILD_BUG_ON(TIF_SSBD < SPEC_CTRL_SSBD_SHIFT); 66 return (spec_ctrl & SPEC_CTRL_SSBD) << (TIF_SSBD - SPEC_CTRL_SSBD_SHIFT); 67} 68 69static inline unsigned long stibp_spec_ctrl_to_tif(u64 spec_ctrl) 70{ 71 BUILD_BUG_ON(TIF_SPEC_IB < SPEC_CTRL_STIBP_SHIFT); 72 return (spec_ctrl & SPEC_CTRL_STIBP) << (TIF_SPEC_IB - SPEC_CTRL_STIBP_SHIFT); 73} 74 75static inline u64 ssbd_tif_to_amd_ls_cfg(u64 tifn) 76{ 77 return (tifn & _TIF_SSBD) ? x86_amd_ls_cfg_ssbd_mask : 0ULL; 78} 79 80/* 81 * This can be used in noinstr functions & should only be called in bare 82 * metal context. 83 */ 84static __always_inline void __update_spec_ctrl(u64 val) 85{ 86 __this_cpu_write(x86_spec_ctrl_current, val); 87 native_wrmsrl(MSR_IA32_SPEC_CTRL, val); 88} 89 90#ifdef CONFIG_SMP 91extern void speculative_store_bypass_ht_init(void); 92#else 93static inline void speculative_store_bypass_ht_init(void) { } 94#endif 95 96extern void speculation_ctrl_update(unsigned long tif); 97extern void speculation_ctrl_update_current(void); 98 99extern bool itlb_multihit_kvm_mitigation; 100 101#endif 102