Skip to content

Commit 2f0a750

Browse files
committed
Merge tag 'x86_cleanups_for_v6.18_rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 cleanups from Borislav Petkov: - Simplify inline asm flag output operands now that the minimum compiler version supports the =@ccCOND syntax - Remove a bunch of AS_* Kconfig symbols which detect assembler support for various instruction mnemonics now that the minimum assembler version supports them all - The usual cleanups all over the place * tag 'x86_cleanups_for_v6.18_rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: x86/asm: Remove code depending on __GCC_ASM_FLAG_OUTPUTS__ x86/sgx: Use ENCLS mnemonic in <kernel/cpu/sgx/encls.h> x86/mtrr: Remove license boilerplate text with bad FSF address x86/asm: Use RDPKRU and WRPKRU mnemonics in <asm/special_insns.h> x86/idle: Use MONITORX and MWAITX mnemonics in <asm/mwait.h> x86/entry/fred: Push __KERNEL_CS directly x86/kconfig: Remove CONFIG_AS_AVX512 crypto: x86 - Remove CONFIG_AS_VPCLMULQDQ crypto: X86 - Remove CONFIG_AS_VAES crypto: x86 - Remove CONFIG_AS_GFNI x86/kconfig: Drop unused and needless config X86_64_SMP
2 parents 6bb71f0 + c6c973d commit 2f0a750

34 files changed

+55
-239
lines changed

arch/x86/Kconfig

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -412,10 +412,6 @@ config HAVE_INTEL_TXT
412412
def_bool y
413413
depends on INTEL_IOMMU && ACPI
414414

415-
config X86_64_SMP
416-
def_bool y
417-
depends on X86_64 && SMP
418-
419415
config ARCH_SUPPORTS_UPROBES
420416
def_bool y
421417

arch/x86/Kconfig.assembler

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,6 @@
11
# SPDX-License-Identifier: GPL-2.0
22
# Copyright (C) 2020 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
33

4-
config AS_AVX512
5-
def_bool $(as-instr,vpmovm2b %k1$(comma)%zmm5)
6-
help
7-
Supported by binutils >= 2.25 and LLVM integrated assembler
8-
9-
config AS_GFNI
10-
def_bool $(as-instr,vgf2p8mulb %xmm0$(comma)%xmm1$(comma)%xmm2)
11-
help
12-
Supported by binutils >= 2.30 and LLVM integrated assembler
13-
14-
config AS_VAES
15-
def_bool $(as-instr,vaesenc %ymm0$(comma)%ymm1$(comma)%ymm2)
16-
help
17-
Supported by binutils >= 2.30 and LLVM integrated assembler
18-
19-
config AS_VPCLMULQDQ
20-
def_bool $(as-instr,vpclmulqdq \$0x10$(comma)%ymm0$(comma)%ymm1$(comma)%ymm2)
21-
help
22-
Supported by binutils >= 2.30 and LLVM integrated assembler
23-
244
config AS_WRUSS
255
def_bool $(as-instr64,wrussq %rax$(comma)(%rbx))
266
help

arch/x86/boot/bitops.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ static inline bool variable_test_bit(int nr, const void *addr)
2727
bool v;
2828
const u32 *p = addr;
2929

30-
asm("btl %2,%1" CC_SET(c) : CC_OUT(c) (v) : "m" (*p), "Ir" (nr));
30+
asm("btl %2,%1" : "=@ccc" (v) : "m" (*p), "Ir" (nr));
3131
return v;
3232
}
3333

arch/x86/boot/boot.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -155,15 +155,15 @@ static inline void wrgs32(u32 v, addr_t addr)
155155
static inline bool memcmp_fs(const void *s1, addr_t s2, size_t len)
156156
{
157157
bool diff;
158-
asm volatile("fs repe cmpsb" CC_SET(nz)
159-
: CC_OUT(nz) (diff), "+D" (s1), "+S" (s2), "+c" (len));
158+
asm volatile("fs repe cmpsb"
159+
: "=@ccnz" (diff), "+D" (s1), "+S" (s2), "+c" (len));
160160
return diff;
161161
}
162162
static inline bool memcmp_gs(const void *s1, addr_t s2, size_t len)
163163
{
164164
bool diff;
165-
asm volatile("gs repe cmpsb" CC_SET(nz)
166-
: CC_OUT(nz) (diff), "+D" (s1), "+S" (s2), "+c" (len));
165+
asm volatile("gs repe cmpsb"
166+
: "=@ccnz" (diff), "+D" (s1), "+S" (s2), "+c" (len));
167167
return diff;
168168
}
169169

arch/x86/boot/string.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@
3232
int memcmp(const void *s1, const void *s2, size_t len)
3333
{
3434
bool diff;
35-
asm("repe cmpsb" CC_SET(nz)
36-
: CC_OUT(nz) (diff), "+D" (s1), "+S" (s2), "+c" (len));
35+
asm("repe cmpsb"
36+
: "=@ccnz" (diff), "+D" (s1), "+S" (s2), "+c" (len));
3737
return diff;
3838
}
3939

arch/x86/crypto/Kconfig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@ config CRYPTO_ARIA_AESNI_AVX2_X86_64
306306

307307
config CRYPTO_ARIA_GFNI_AVX512_X86_64
308308
tristate "Ciphers: ARIA with modes: ECB, CTR (AVX512/GFNI)"
309-
depends on 64BIT && AS_GFNI
309+
depends on 64BIT
310310
select CRYPTO_SKCIPHER
311311
select CRYPTO_ALGAPI
312312
select CRYPTO_ARIA

arch/x86/crypto/Makefile

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,8 @@ obj-$(CONFIG_CRYPTO_AES_NI_INTEL) += aesni-intel.o
4646
aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o
4747
aesni-intel-$(CONFIG_64BIT) += aes-ctr-avx-x86_64.o \
4848
aes-gcm-aesni-x86_64.o \
49-
aes-xts-avx-x86_64.o
50-
ifeq ($(CONFIG_AS_VAES)$(CONFIG_AS_VPCLMULQDQ),yy)
51-
aesni-intel-$(CONFIG_64BIT) += aes-gcm-avx10-x86_64.o
52-
endif
49+
aes-xts-avx-x86_64.o \
50+
aes-gcm-avx10-x86_64.o
5351

5452
obj-$(CONFIG_CRYPTO_GHASH_CLMUL_NI_INTEL) += ghash-clmulni-intel.o
5553
ghash-clmulni-intel-y := ghash-clmulni-intel_asm.o ghash-clmulni-intel_glue.o

arch/x86/crypto/aes-ctr-avx-x86_64.S

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -552,7 +552,6 @@ SYM_TYPED_FUNC_START(aes_xctr_crypt_aesni_avx)
552552
_aes_ctr_crypt 1
553553
SYM_FUNC_END(aes_xctr_crypt_aesni_avx)
554554

555-
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
556555
.set VL, 32
557556
.set USE_AVX512, 0
558557
SYM_TYPED_FUNC_START(aes_ctr64_crypt_vaes_avx2)
@@ -570,4 +569,3 @@ SYM_FUNC_END(aes_ctr64_crypt_vaes_avx512)
570569
SYM_TYPED_FUNC_START(aes_xctr_crypt_vaes_avx512)
571570
_aes_ctr_crypt 1
572571
SYM_FUNC_END(aes_xctr_crypt_vaes_avx512)
573-
#endif // CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ

arch/x86/crypto/aes-xts-avx-x86_64.S

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -886,7 +886,6 @@ SYM_TYPED_FUNC_START(aes_xts_decrypt_aesni_avx)
886886
_aes_xts_crypt 0
887887
SYM_FUNC_END(aes_xts_decrypt_aesni_avx)
888888

889-
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
890889
.set VL, 32
891890
.set USE_AVX512, 0
892891
SYM_TYPED_FUNC_START(aes_xts_encrypt_vaes_avx2)
@@ -904,4 +903,3 @@ SYM_FUNC_END(aes_xts_encrypt_vaes_avx512)
904903
SYM_TYPED_FUNC_START(aes_xts_decrypt_vaes_avx512)
905904
_aes_xts_crypt 0
906905
SYM_FUNC_END(aes_xts_decrypt_vaes_avx512)
907-
#endif /* CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ */

arch/x86/crypto/aesni-intel_glue.c

Lines changed: 3 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -828,10 +828,8 @@ static struct skcipher_alg skcipher_algs_##suffix[] = {{ \
828828
}}
829829

830830
DEFINE_AVX_SKCIPHER_ALGS(aesni_avx, "aesni-avx", 500);
831-
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
832831
DEFINE_AVX_SKCIPHER_ALGS(vaes_avx2, "vaes-avx2", 600);
833832
DEFINE_AVX_SKCIPHER_ALGS(vaes_avx512, "vaes-avx512", 800);
834-
#endif
835833

836834
/* The common part of the x86_64 AES-GCM key struct */
837835
struct aes_gcm_key {
@@ -912,17 +910,8 @@ struct aes_gcm_key_avx10 {
912910
#define FLAG_RFC4106 BIT(0)
913911
#define FLAG_ENC BIT(1)
914912
#define FLAG_AVX BIT(2)
915-
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
916-
# define FLAG_AVX10_256 BIT(3)
917-
# define FLAG_AVX10_512 BIT(4)
918-
#else
919-
/*
920-
* This should cause all calls to the AVX10 assembly functions to be
921-
* optimized out, avoiding the need to ifdef each call individually.
922-
*/
923-
# define FLAG_AVX10_256 0
924-
# define FLAG_AVX10_512 0
925-
#endif
913+
#define FLAG_AVX10_256 BIT(3)
914+
#define FLAG_AVX10_512 BIT(4)
926915

927916
static inline struct aes_gcm_key *
928917
aes_gcm_key_get(struct crypto_aead *tfm, int flags)
@@ -1519,7 +1508,6 @@ DEFINE_GCM_ALGS(aesni_avx, FLAG_AVX,
15191508
"generic-gcm-aesni-avx", "rfc4106-gcm-aesni-avx",
15201509
AES_GCM_KEY_AESNI_SIZE, 500);
15211510

1522-
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
15231511
/* aes_gcm_algs_vaes_avx10_256 */
15241512
DEFINE_GCM_ALGS(vaes_avx10_256, FLAG_AVX10_256,
15251513
"generic-gcm-vaes-avx10_256", "rfc4106-gcm-vaes-avx10_256",
@@ -1529,7 +1517,6 @@ DEFINE_GCM_ALGS(vaes_avx10_256, FLAG_AVX10_256,
15291517
DEFINE_GCM_ALGS(vaes_avx10_512, FLAG_AVX10_512,
15301518
"generic-gcm-vaes-avx10_512", "rfc4106-gcm-vaes-avx10_512",
15311519
AES_GCM_KEY_AVX10_SIZE, 800);
1532-
#endif /* CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ */
15331520

15341521
static int __init register_avx_algs(void)
15351522
{
@@ -1551,7 +1538,6 @@ static int __init register_avx_algs(void)
15511538
* Similarly, the assembler support was added at about the same time.
15521539
* For simplicity, just always check for VAES and VPCLMULQDQ together.
15531540
*/
1554-
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
15551541
if (!boot_cpu_has(X86_FEATURE_AVX2) ||
15561542
!boot_cpu_has(X86_FEATURE_VAES) ||
15571543
!boot_cpu_has(X86_FEATURE_VPCLMULQDQ) ||
@@ -1592,7 +1578,7 @@ static int __init register_avx_algs(void)
15921578
ARRAY_SIZE(aes_gcm_algs_vaes_avx10_512));
15931579
if (err)
15941580
return err;
1595-
#endif /* CONFIG_AS_VAES && CONFIG_AS_VPCLMULQDQ */
1581+
15961582
return 0;
15971583
}
15981584

@@ -1607,12 +1593,10 @@ static void unregister_avx_algs(void)
16071593
{
16081594
unregister_skciphers(skcipher_algs_aesni_avx);
16091595
unregister_aeads(aes_gcm_algs_aesni_avx);
1610-
#if defined(CONFIG_AS_VAES) && defined(CONFIG_AS_VPCLMULQDQ)
16111596
unregister_skciphers(skcipher_algs_vaes_avx2);
16121597
unregister_skciphers(skcipher_algs_vaes_avx512);
16131598
unregister_aeads(aes_gcm_algs_vaes_avx10_256);
16141599
unregister_aeads(aes_gcm_algs_vaes_avx10_512);
1615-
#endif
16161600
}
16171601
#else /* CONFIG_X86_64 */
16181602
static struct aead_alg aes_gcm_algs_aesni[0];

0 commit comments

Comments
 (0)