]> Git Repo - linux.git/blob - arch/x86/kernel/static_call.c
scsi: zfcp: Trace when request remove fails after qdio send fails
[linux.git] / arch / x86 / kernel / static_call.c
1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/static_call.h>
3 #include <linux/memory.h>
4 #include <linux/bug.h>
5 #include <asm/text-patching.h>
6
7 enum insn_type {
8         CALL = 0, /* site call */
9         NOP = 1,  /* site cond-call */
10         JMP = 2,  /* tramp / site tail-call */
11         RET = 3,  /* tramp / site cond-tail-call */
12 };
13
14 /*
15  * ud1 %esp, %ecx - a 3 byte #UD that is unique to trampolines, chosen such
16  * that there is no false-positive trampoline identification while also being a
17  * speculation stop.
18  */
19 static const u8 tramp_ud[] = { 0x0f, 0xb9, 0xcc };
20
21 /*
22  * cs cs cs xorl %eax, %eax - a single 5 byte instruction that clears %[er]ax
23  */
24 static const u8 xor5rax[] = { 0x2e, 0x2e, 0x2e, 0x31, 0xc0 };
25
26 static const u8 retinsn[] = { RET_INSN_OPCODE, 0xcc, 0xcc, 0xcc, 0xcc };
27
28 static void __ref __static_call_transform(void *insn, enum insn_type type,
29                                           void *func, bool modinit)
30 {
31         const void *emulate = NULL;
32         int size = CALL_INSN_SIZE;
33         const void *code;
34
35         switch (type) {
36         case CALL:
37                 func = callthunks_translate_call_dest(func);
38                 code = text_gen_insn(CALL_INSN_OPCODE, insn, func);
39                 if (func == &__static_call_return0) {
40                         emulate = code;
41                         code = &xor5rax;
42                 }
43
44                 break;
45
46         case NOP:
47                 code = x86_nops[5];
48                 break;
49
50         case JMP:
51                 code = text_gen_insn(JMP32_INSN_OPCODE, insn, func);
52                 break;
53
54         case RET:
55                 if (cpu_feature_enabled(X86_FEATURE_RETHUNK))
56                         code = text_gen_insn(JMP32_INSN_OPCODE, insn, x86_return_thunk);
57                 else
58                         code = &retinsn;
59                 break;
60         }
61
62         if (memcmp(insn, code, size) == 0)
63                 return;
64
65         if (system_state == SYSTEM_BOOTING || modinit)
66                 return text_poke_early(insn, code, size);
67
68         text_poke_bp(insn, code, size, emulate);
69 }
70
71 static void __static_call_validate(void *insn, bool tail, bool tramp)
72 {
73         u8 opcode = *(u8 *)insn;
74
75         if (tramp && memcmp(insn+5, tramp_ud, 3)) {
76                 pr_err("trampoline signature fail");
77                 BUG();
78         }
79
80         if (tail) {
81                 if (opcode == JMP32_INSN_OPCODE ||
82                     opcode == RET_INSN_OPCODE)
83                         return;
84         } else {
85                 if (opcode == CALL_INSN_OPCODE ||
86                     !memcmp(insn, x86_nops[5], 5) ||
87                     !memcmp(insn, xor5rax, 5))
88                         return;
89         }
90
91         /*
92          * If we ever trigger this, our text is corrupt, we'll probably not live long.
93          */
94         pr_err("unexpected static_call insn opcode 0x%x at %pS\n", opcode, insn);
95         BUG();
96 }
97
98 static inline enum insn_type __sc_insn(bool null, bool tail)
99 {
100         /*
101          * Encode the following table without branches:
102          *
103          *      tail    null    insn
104          *      -----+-------+------
105          *        0  |   0   |  CALL
106          *        0  |   1   |  NOP
107          *        1  |   0   |  JMP
108          *        1  |   1   |  RET
109          */
110         return 2*tail + null;
111 }
112
113 void arch_static_call_transform(void *site, void *tramp, void *func, bool tail)
114 {
115         mutex_lock(&text_mutex);
116
117         if (tramp) {
118                 __static_call_validate(tramp, true, true);
119                 __static_call_transform(tramp, __sc_insn(!func, true), func, false);
120         }
121
122         if (IS_ENABLED(CONFIG_HAVE_STATIC_CALL_INLINE) && site) {
123                 __static_call_validate(site, tail, false);
124                 __static_call_transform(site, __sc_insn(!func, tail), func, false);
125         }
126
127         mutex_unlock(&text_mutex);
128 }
129 EXPORT_SYMBOL_GPL(arch_static_call_transform);
130
131 #ifdef CONFIG_RETHUNK
132 /*
133  * This is called by apply_returns() to fix up static call trampolines,
134  * specifically ARCH_DEFINE_STATIC_CALL_NULL_TRAMP which is recorded as
135  * having a return trampoline.
136  *
137  * The problem is that static_call() is available before determining
138  * X86_FEATURE_RETHUNK and, by implication, running alternatives.
139  *
140  * This means that __static_call_transform() above can have overwritten the
141  * return trampoline and we now need to fix things up to be consistent.
142  */
143 bool __static_call_fixup(void *tramp, u8 op, void *dest)
144 {
145         if (memcmp(tramp+5, tramp_ud, 3)) {
146                 /* Not a trampoline site, not our problem. */
147                 return false;
148         }
149
150         mutex_lock(&text_mutex);
151         if (op == RET_INSN_OPCODE || dest == &__x86_return_thunk)
152                 __static_call_transform(tramp, RET, NULL, true);
153         mutex_unlock(&text_mutex);
154
155         return true;
156 }
157 #endif
This page took 0.045451 seconds and 4 git commands to generate.