1#ifndef _X86_64_ALTERNATIVE_H
2#define _X86_64_ALTERNATIVE_H
3
4#ifdef __KERNEL__
5
6#include <linux/types.h>
7#include <linux/stddef.h>
8#include <asm/cpufeature.h>
9
10struct alt_instr {
11	u8 *instr; 		/* original instruction */
12	u8 *replacement;
13	u8  cpuid;		/* cpuid bit set for replacement */
14	u8  instrlen;		/* length of original instruction */
15	u8  replacementlen; 	/* length of new instruction, <= instrlen */
16	u8  pad[5];
17};
18
19extern void alternative_instructions(void);
20extern void apply_alternatives(struct alt_instr *start, struct alt_instr *end);
21
22struct module;
23
24#ifdef CONFIG_SMP
25extern void alternatives_smp_module_add(struct module *mod, char *name,
26					void *locks, void *locks_end,
27					void *text, void *text_end);
28extern void alternatives_smp_module_del(struct module *mod);
29extern void alternatives_smp_switch(int smp);
30#else
31static inline void alternatives_smp_module_add(struct module *mod, char *name,
32					void *locks, void *locks_end,
33					void *text, void *text_end) {}
34static inline void alternatives_smp_module_del(struct module *mod) {}
35static inline void alternatives_smp_switch(int smp) {}
36#endif
37
38#endif
39
40/*
41 * Alternative instructions for different CPU types or capabilities.
42 *
43 * This allows to use optimized instructions even on generic binary
44 * kernels.
45 *
46 * length of oldinstr must be longer or equal the length of newinstr
47 * It can be padded with nops as needed.
48 *
49 * For non barrier like inlines please define new variants
50 * without volatile and memory clobber.
51 */
52#define alternative(oldinstr, newinstr, feature) 	\
53	asm volatile ("661:\n\t" oldinstr "\n662:\n" 		     \
54		      ".section .altinstructions,\"a\"\n"     	     \
55		      "  .align 8\n"				       \
56		      "  .quad 661b\n"            /* label */          \
57		      "  .quad 663f\n"		  /* new instruction */ \
58		      "  .byte %c0\n"             /* feature bit */    \
59		      "  .byte 662b-661b\n"       /* sourcelen */      \
60		      "  .byte 664f-663f\n"       /* replacementlen */ \
61		      ".previous\n"					\
62		      ".section .altinstr_replacement,\"ax\"\n"		\
63		      "663:\n\t" newinstr "\n664:\n"   /* replacement */ \
64		      ".previous" :: "i" (feature) : "memory")
65
66/*
67 * Alternative inline assembly with input.
68 *
69 * Pecularities:
70 * No memory clobber here.
71 * Argument numbers start with 1.
72 * Best is to use constraints that are fixed size (like (%1) ... "r")
73 * If you use variable sized constraints like "m" or "g" in the
74 * replacement make sure to pad to the worst case length.
75 */
76#define alternative_input(oldinstr, newinstr, feature, input...)	\
77	asm volatile ("661:\n\t" oldinstr "\n662:\n"			\
78		      ".section .altinstructions,\"a\"\n"		\
79		      "  .align 8\n"					\
80		      "  .quad 661b\n"            /* label */		\
81		      "  .quad 663f\n"		  /* new instruction */	\
82		      "  .byte %c0\n"             /* feature bit */	\
83		      "  .byte 662b-661b\n"       /* sourcelen */	\
84		      "  .byte 664f-663f\n"       /* replacementlen */	\
85		      ".previous\n"					\
86		      ".section .altinstr_replacement,\"ax\"\n"		\
87		      "663:\n\t" newinstr "\n664:\n"   /* replacement */ \
88		      ".previous" :: "i" (feature), ##input)
89
90/* Like alternative_input, but with a single output argument */
91#define alternative_io(oldinstr, newinstr, feature, output, input...) \
92	asm volatile ("661:\n\t" oldinstr "\n662:\n"			\
93		      ".section .altinstructions,\"a\"\n"		\
94		      "  .align 8\n"					\
95		      "  .quad 661b\n"            /* label */		\
96		      "  .quad 663f\n"		  /* new instruction */	\
97		      "  .byte %c[feat]\n"        /* feature bit */	\
98		      "  .byte 662b-661b\n"       /* sourcelen */	\
99		      "  .byte 664f-663f\n"       /* replacementlen */	\
100		      ".previous\n"					\
101		      ".section .altinstr_replacement,\"ax\"\n"		\
102		      "663:\n\t" newinstr "\n664:\n"   /* replacement */ \
103		      ".previous" : output : [feat] "i" (feature), ##input)
104
105/*
106 * use this macro(s) if you need more than one output parameter
107 * in alternative_io
108 */
109#define ASM_OUTPUT2(a, b) a, b
110
111/*
112 * Alternative inline assembly for SMP.
113 *
114 * The LOCK_PREFIX macro defined here replaces the LOCK and
115 * LOCK_PREFIX macros used everywhere in the source tree.
116 *
117 * SMP alternatives use the same data structures as the other
118 * alternatives and the X86_FEATURE_UP flag to indicate the case of a
119 * UP system running a SMP kernel.  The existing apply_alternatives()
120 * works fine for patching a SMP kernel for UP.
121 *
122 * The SMP alternative tables can be kept after boot and contain both
123 * UP and SMP versions of the instructions to allow switching back to
124 * SMP at runtime, when hotplugging in a new CPU, which is especially
125 * useful in virtualized environments.
126 *
127 * The very common lock prefix is handled as special case in a
128 * separate table which is a pure address list without replacement ptr
129 * and size information.  That keeps the table sizes small.
130 */
131
132#ifdef CONFIG_SMP
133#define LOCK_PREFIX \
134		".section .smp_locks,\"a\"\n"	\
135		"  .align 8\n"			\
136		"  .quad 661f\n" /* address */	\
137		".previous\n"			\
138	       	"661:\n\tlock; "
139
140#else /* ! CONFIG_SMP */
141#define LOCK_PREFIX ""
142#endif
143
144struct paravirt_patch;
145#ifdef CONFIG_PARAVIRT
146void apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end);
147#else
148static inline void
149apply_paravirt(struct paravirt_patch *start, struct paravirt_patch *end)
150{}
151#define __parainstructions NULL
152#define __parainstructions_end NULL
153#endif
154
155#endif /* _X86_64_ALTERNATIVE_H */
156