@@ -195,123 +195,6 @@ static inline void __deactivate_cptr_traps(struct kvm_vcpu *vcpu)
195195 __deactivate_cptr_traps_nvhe (vcpu );
196196}
197197
198- #define reg_to_fgt_masks (reg ) \
199- ({ \
200- struct fgt_masks *m; \
201- switch(reg) { \
202- case HFGRTR_EL2: \
203- m = &hfgrtr_masks; \
204- break; \
205- case HFGWTR_EL2: \
206- m = &hfgwtr_masks; \
207- break; \
208- case HFGITR_EL2: \
209- m = &hfgitr_masks; \
210- break; \
211- case HDFGRTR_EL2: \
212- m = &hdfgrtr_masks; \
213- break; \
214- case HDFGWTR_EL2: \
215- m = &hdfgwtr_masks; \
216- break; \
217- case HAFGRTR_EL2: \
218- m = &hafgrtr_masks; \
219- break; \
220- case HFGRTR2_EL2: \
221- m = &hfgrtr2_masks; \
222- break; \
223- case HFGWTR2_EL2: \
224- m = &hfgwtr2_masks; \
225- break; \
226- case HFGITR2_EL2: \
227- m = &hfgitr2_masks; \
228- break; \
229- case HDFGRTR2_EL2: \
230- m = &hdfgrtr2_masks; \
231- break; \
232- case HDFGWTR2_EL2: \
233- m = &hdfgwtr2_masks; \
234- break; \
235- default: \
236- BUILD_BUG_ON(1); \
237- } \
238- \
239- m; \
240- })
241-
242- #define compute_clr_set (vcpu , reg , clr , set ) \
243- do { \
244- u64 hfg = __vcpu_sys_reg(vcpu, reg); \
245- struct fgt_masks *m = reg_to_fgt_masks(reg); \
246- set |= hfg & m->mask; \
247- clr |= ~hfg & m->nmask; \
248- } while(0)
249-
250- #define reg_to_fgt_group_id (reg ) \
251- ({ \
252- enum fgt_group_id id; \
253- switch(reg) { \
254- case HFGRTR_EL2: \
255- case HFGWTR_EL2: \
256- id = HFGRTR_GROUP; \
257- break; \
258- case HFGITR_EL2: \
259- id = HFGITR_GROUP; \
260- break; \
261- case HDFGRTR_EL2: \
262- case HDFGWTR_EL2: \
263- id = HDFGRTR_GROUP; \
264- break; \
265- case HAFGRTR_EL2: \
266- id = HAFGRTR_GROUP; \
267- break; \
268- case HFGRTR2_EL2: \
269- case HFGWTR2_EL2: \
270- id = HFGRTR2_GROUP; \
271- break; \
272- case HFGITR2_EL2: \
273- id = HFGITR2_GROUP; \
274- break; \
275- case HDFGRTR2_EL2: \
276- case HDFGWTR2_EL2: \
277- id = HDFGRTR2_GROUP; \
278- break; \
279- default: \
280- BUILD_BUG_ON(1); \
281- } \
282- \
283- id; \
284- })
285-
286- #define compute_undef_clr_set (vcpu , kvm , reg , clr , set ) \
287- do { \
288- u64 hfg = kvm->arch.fgu[reg_to_fgt_group_id(reg)]; \
289- struct fgt_masks *m = reg_to_fgt_masks(reg); \
290- set |= hfg & m->mask; \
291- clr |= hfg & m->nmask; \
292- } while(0)
293-
294- #define update_fgt_traps_cs (hctxt , vcpu , kvm , reg , clr , set ) \
295- do { \
296- struct fgt_masks *m = reg_to_fgt_masks(reg); \
297- u64 c = clr, s = set; \
298- u64 val; \
299- \
300- ctxt_sys_reg(hctxt, reg) = read_sysreg_s(SYS_ ## reg); \
301- if (is_nested_ctxt(vcpu)) \
302- compute_clr_set(vcpu, reg, c, s); \
303- \
304- compute_undef_clr_set(vcpu, kvm, reg, c, s); \
305- \
306- val = m->nmask; \
307- val |= s; \
308- val &= ~c; \
309- write_sysreg_s(val, SYS_ ## reg); \
310- } while(0)
311-
312- #define update_fgt_traps (hctxt , vcpu , kvm , reg ) \
313- update_fgt_traps_cs(hctxt, vcpu, kvm, reg, 0, 0)
314-
315198static inline bool cpu_has_amu (void )
316199{
317200 u64 pfr0 = read_sysreg_s (SYS_ID_AA64PFR0_EL1 );
@@ -320,33 +203,36 @@ static inline bool cpu_has_amu(void)
320203 ID_AA64PFR0_EL1_AMU_SHIFT );
321204}
322205
206+ #define __activate_fgt (hctxt , vcpu , reg ) \
207+ do { \
208+ ctxt_sys_reg(hctxt, reg) = read_sysreg_s(SYS_ ## reg); \
209+ write_sysreg_s(*vcpu_fgt(vcpu, reg), SYS_ ## reg); \
210+ } while (0)
211+
323212static inline void __activate_traps_hfgxtr (struct kvm_vcpu * vcpu )
324213{
325214 struct kvm_cpu_context * hctxt = host_data_ptr (host_ctxt );
326- struct kvm * kvm = kern_hyp_va (vcpu -> kvm );
327215
328216 if (!cpus_have_final_cap (ARM64_HAS_FGT ))
329217 return ;
330218
331- update_fgt_traps (hctxt , vcpu , kvm , HFGRTR_EL2 );
332- update_fgt_traps_cs (hctxt , vcpu , kvm , HFGWTR_EL2 , 0 ,
333- cpus_have_final_cap (ARM64_WORKAROUND_AMPERE_AC03_CPU_38 ) ?
334- HFGWTR_EL2_TCR_EL1_MASK : 0 );
335- update_fgt_traps (hctxt , vcpu , kvm , HFGITR_EL2 );
336- update_fgt_traps (hctxt , vcpu , kvm , HDFGRTR_EL2 );
337- update_fgt_traps (hctxt , vcpu , kvm , HDFGWTR_EL2 );
219+ __activate_fgt (hctxt , vcpu , HFGRTR_EL2 );
220+ __activate_fgt (hctxt , vcpu , HFGWTR_EL2 );
221+ __activate_fgt (hctxt , vcpu , HFGITR_EL2 );
222+ __activate_fgt (hctxt , vcpu , HDFGRTR_EL2 );
223+ __activate_fgt (hctxt , vcpu , HDFGWTR_EL2 );
338224
339225 if (cpu_has_amu ())
340- update_fgt_traps (hctxt , vcpu , kvm , HAFGRTR_EL2 );
226+ __activate_fgt (hctxt , vcpu , HAFGRTR_EL2 );
341227
342228 if (!cpus_have_final_cap (ARM64_HAS_FGT2 ))
343229 return ;
344230
345- update_fgt_traps (hctxt , vcpu , kvm , HFGRTR2_EL2 );
346- update_fgt_traps (hctxt , vcpu , kvm , HFGWTR2_EL2 );
347- update_fgt_traps (hctxt , vcpu , kvm , HFGITR2_EL2 );
348- update_fgt_traps (hctxt , vcpu , kvm , HDFGRTR2_EL2 );
349- update_fgt_traps (hctxt , vcpu , kvm , HDFGWTR2_EL2 );
231+ __activate_fgt (hctxt , vcpu , HFGRTR2_EL2 );
232+ __activate_fgt (hctxt , vcpu , HFGWTR2_EL2 );
233+ __activate_fgt (hctxt , vcpu , HFGITR2_EL2 );
234+ __activate_fgt (hctxt , vcpu , HDFGRTR2_EL2 );
235+ __activate_fgt (hctxt , vcpu , HDFGWTR2_EL2 );
350236}
351237
352238#define __deactivate_fgt (htcxt , vcpu , reg ) \
0 commit comments