|
42 | 42 | #define RSB_FILL_LOOPS 16 /* To avoid underflow */ |
43 | 43 |
|
44 | 44 | /* |
| 45 | + * Common helper for __FILL_RETURN_BUFFER and __FILL_ONE_RETURN. |
| 46 | + */ |
| 47 | +#define __FILL_RETURN_SLOT \ |
| 48 | + ANNOTATE_INTRA_FUNCTION_CALL; \ |
| 49 | + call 772f; \ |
| 50 | + int3; \ |
| 51 | +772: |
| 52 | + |
| 53 | +/* |
| 54 | + * Stuff the entire RSB. |
| 55 | + * |
45 | 56 | * Google experimented with loop-unrolling and this turned out to be |
46 | 57 | * the optimal version - two calls, each with their own speculation |
47 | 58 | * trap should their return address end up getting used, in a loop. |
48 | 59 | */ |
49 | | -#define __FILL_RETURN_BUFFER(reg, nr, sp) \ |
50 | | - mov $(nr/2), reg; \ |
51 | | -771: \ |
52 | | - ANNOTATE_INTRA_FUNCTION_CALL; \ |
53 | | - call 772f; \ |
54 | | -773: /* speculation trap */ \ |
55 | | - UNWIND_HINT_EMPTY; \ |
56 | | - pause; \ |
57 | | - lfence; \ |
58 | | - jmp 773b; \ |
59 | | -772: \ |
60 | | - ANNOTATE_INTRA_FUNCTION_CALL; \ |
61 | | - call 774f; \ |
62 | | -775: /* speculation trap */ \ |
63 | | - UNWIND_HINT_EMPTY; \ |
64 | | - pause; \ |
65 | | - lfence; \ |
66 | | - jmp 775b; \ |
67 | | -774: \ |
68 | | - add $(BITS_PER_LONG/8) * 2, sp; \ |
69 | | - dec reg; \ |
70 | | - jnz 771b; \ |
71 | | - /* barrier for jnz misprediction */ \ |
| 60 | +#define __FILL_RETURN_BUFFER(reg, nr) \ |
| 61 | + mov $(nr/2), reg; \ |
| 62 | +771: \ |
| 63 | + __FILL_RETURN_SLOT \ |
| 64 | + __FILL_RETURN_SLOT \ |
| 65 | + add $(BITS_PER_LONG/8) * 2, %_ASM_SP; \ |
| 66 | + dec reg; \ |
| 67 | + jnz 771b; \ |
| 68 | + /* barrier for jnz misprediction */ \ |
| 69 | + lfence; |
| 70 | + |
| 71 | +/* |
| 72 | + * Stuff a single RSB slot. |
| 73 | + * |
| 74 | + * To mitigate Post-Barrier RSB speculation, one CALL instruction must be |
| 75 | + * forced to retire before letting a RET instruction execute. |
| 76 | + * |
| 77 | + * On PBRSB-vulnerable CPUs, it is not safe for a RET to be executed |
| 78 | + * before this point. |
| 79 | + */ |
| 80 | +#define __FILL_ONE_RETURN \ |
| 81 | + __FILL_RETURN_SLOT \ |
| 82 | + add $(BITS_PER_LONG/8), %_ASM_SP; \ |
72 | 83 | lfence; |
73 | 84 |
|
74 | 85 | #ifdef __ASSEMBLY__ |
|
146 | 157 | #endif |
147 | 158 | .endm |
148 | 159 |
|
149 | | -.macro ISSUE_UNBALANCED_RET_GUARD |
150 | | - ANNOTATE_INTRA_FUNCTION_CALL |
151 | | - call .Lunbalanced_ret_guard_\@ |
152 | | - int3 |
153 | | -.Lunbalanced_ret_guard_\@: |
154 | | - add $(BITS_PER_LONG/8), %_ASM_SP |
155 | | - lfence |
156 | | -.endm |
157 | | - |
158 | 160 | /* |
159 | 161 | * A simpler FILL_RETURN_BUFFER macro. Don't make people use the CPP |
160 | 162 | * monstrosity above, manually. |
161 | 163 | */ |
162 | | -.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2 |
163 | | -.ifb \ftr2 |
164 | | - ALTERNATIVE "jmp .Lskip_rsb_\@", "", \ftr |
165 | | -.else |
166 | | - ALTERNATIVE_2 "jmp .Lskip_rsb_\@", "", \ftr, "jmp .Lunbalanced_\@", \ftr2 |
167 | | -.endif |
168 | | - __FILL_RETURN_BUFFER(\reg,\nr,%_ASM_SP) |
169 | | -.Lunbalanced_\@: |
170 | | - ISSUE_UNBALANCED_RET_GUARD |
| 164 | +.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2=ALT_NOT(X86_FEATURE_ALWAYS) |
| 165 | + ALTERNATIVE_2 "jmp .Lskip_rsb_\@", \ |
| 166 | + __stringify(__FILL_RETURN_BUFFER(\reg,\nr)), \ftr, \ |
| 167 | + __stringify(__FILL_ONE_RETURN), \ftr2 |
| 168 | + |
171 | 169 | .Lskip_rsb_\@: |
172 | 170 | .endm |
173 | 171 |
|
|
0 commit comments