1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_X86_INVPCID
3#define _ASM_X86_INVPCID
4
5static inline void __invpcid(unsigned long pcid, unsigned long addr,
6			     unsigned long type)
7{
8	struct { u64 d[2]; } desc = { { pcid, addr } };
9
10	/*
11	 * The memory clobber is because the whole point is to invalidate
12	 * stale TLB entries and, especially if we're flushing global
13	 * mappings, we don't want the compiler to reorder any subsequent
14	 * memory accesses before the TLB flush.
15	 */
16	asm volatile("invpcid %[desc], %[type]"
17		     :: [desc] "m" (desc), [type] "r" (type) : "memory");
18}
19
20#define INVPCID_TYPE_INDIV_ADDR		0
21#define INVPCID_TYPE_SINGLE_CTXT	1
22#define INVPCID_TYPE_ALL_INCL_GLOBAL	2
23#define INVPCID_TYPE_ALL_NON_GLOBAL	3
24
25/* Flush all mappings for a given pcid and addr, not including globals. */
26static inline void invpcid_flush_one(unsigned long pcid,
27				     unsigned long addr)
28{
29	__invpcid(pcid, addr, INVPCID_TYPE_INDIV_ADDR);
30}
31
32/* Flush all mappings for a given PCID, not including globals. */
33static inline void invpcid_flush_single_context(unsigned long pcid)
34{
35	__invpcid(pcid, 0, INVPCID_TYPE_SINGLE_CTXT);
36}
37
38/* Flush all mappings, including globals, for all PCIDs. */
39static inline void invpcid_flush_all(void)
40{
41	__invpcid(0, 0, INVPCID_TYPE_ALL_INCL_GLOBAL);
42}
43
44/* Flush all mappings for all PCIDs except globals. */
45static inline void invpcid_flush_all_nonglobals(void)
46{
47	__invpcid(0, 0, INVPCID_TYPE_ALL_NON_GLOBAL);
48}
49
50#endif /* _ASM_X86_INVPCID */
51