Skip to content

Commit e084e9f

Browse files
ubizjakbp3tk0v
authored andcommitted
crypto: x86 - Remove CONFIG_AS_VPCLMULQDQ
Current minimum required version of binutils is 2.30, which supports VPCLMULQDQ instruction mnemonics. Remove check for assembler support of VPCLMULQDQ instructions and all relevant macros for conditional compilation. No functional change intended. Signed-off-by: Uros Bizjak <ubizjak@gmail.com> Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de> Acked-by: Herbert Xu <herbert@gondor.apana.org.au> Link: https://lore.kernel.org/20250819085855.333380-3-ubizjak@gmail.com
1 parent 4593311 commit e084e9f

File tree

5 files changed

+5
-32
lines changed

5 files changed

+5
-32
lines changed

arch/x86/Kconfig.assembler

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,6 @@ config AS_AVX512
66
help
77
Supported by binutils >= 2.25 and LLVM integrated assembler
88

9-
config AS_VPCLMULQDQ
10-
def_bool $(as-instr,vpclmulqdq \$0x10$(comma)%ymm0$(comma)%ymm1$(comma)%ymm2)
11-
help
12-
Supported by binutils >= 2.30 and LLVM integrated assembler
13-
149
config AS_WRUSS
1510
def_bool $(as-instr64,wrussq %rax$(comma)(%rbx))
1611
help

arch/x86/crypto/Makefile

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,8 @@ obj-$(CONFIG_CRYPTO_AES_NI_INTEL) += aesni-intel.o
4646
aesni-intel-y := aesni-intel_asm.o aesni-intel_glue.o
4747
aesni-intel-$(CONFIG_64BIT) += aes-ctr-avx-x86_64.o \
4848
aes-gcm-aesni-x86_64.o \
49-
aes-xts-avx-x86_64.o
50-
ifeq ($(CONFIG_AS_VPCLMULQDQ),y)
51-
aesni-intel-$(CONFIG_64BIT) += aes-gcm-avx10-x86_64.o
52-
endif
49+
aes-xts-avx-x86_64.o \
50+
aes-gcm-avx10-x86_64.o
5351

5452
obj-$(CONFIG_CRYPTO_GHASH_CLMUL_NI_INTEL) += ghash-clmulni-intel.o
5553
ghash-clmulni-intel-y := ghash-clmulni-intel_asm.o ghash-clmulni-intel_glue.o

arch/x86/crypto/aes-ctr-avx-x86_64.S

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -552,7 +552,6 @@ SYM_TYPED_FUNC_START(aes_xctr_crypt_aesni_avx)
552552
_aes_ctr_crypt 1
553553
SYM_FUNC_END(aes_xctr_crypt_aesni_avx)
554554

555-
#if defined(CONFIG_AS_VPCLMULQDQ)
556555
.set VL, 32
557556
.set USE_AVX512, 0
558557
SYM_TYPED_FUNC_START(aes_ctr64_crypt_vaes_avx2)
@@ -570,4 +569,3 @@ SYM_FUNC_END(aes_ctr64_crypt_vaes_avx512)
570569
SYM_TYPED_FUNC_START(aes_xctr_crypt_vaes_avx512)
571570
_aes_ctr_crypt 1
572571
SYM_FUNC_END(aes_xctr_crypt_vaes_avx512)
573-
#endif // CONFIG_AS_VPCLMULQDQ

arch/x86/crypto/aes-xts-avx-x86_64.S

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -886,7 +886,6 @@ SYM_TYPED_FUNC_START(aes_xts_decrypt_aesni_avx)
886886
_aes_xts_crypt 0
887887
SYM_FUNC_END(aes_xts_decrypt_aesni_avx)
888888

889-
#if defined(CONFIG_AS_VPCLMULQDQ)
890889
.set VL, 32
891890
.set USE_AVX512, 0
892891
SYM_TYPED_FUNC_START(aes_xts_encrypt_vaes_avx2)
@@ -904,4 +903,3 @@ SYM_FUNC_END(aes_xts_encrypt_vaes_avx512)
904903
SYM_TYPED_FUNC_START(aes_xts_decrypt_vaes_avx512)
905904
_aes_xts_crypt 0
906905
SYM_FUNC_END(aes_xts_decrypt_vaes_avx512)
907-
#endif /* CONFIG_AS_VPCLMULQDQ */

arch/x86/crypto/aesni-intel_glue.c

Lines changed: 3 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -828,10 +828,8 @@ static struct skcipher_alg skcipher_algs_##suffix[] = {{ \
828828
}}
829829

830830
DEFINE_AVX_SKCIPHER_ALGS(aesni_avx, "aesni-avx", 500);
831-
#if defined(CONFIG_AS_VPCLMULQDQ)
832831
DEFINE_AVX_SKCIPHER_ALGS(vaes_avx2, "vaes-avx2", 600);
833832
DEFINE_AVX_SKCIPHER_ALGS(vaes_avx512, "vaes-avx512", 800);
834-
#endif
835833

836834
/* The common part of the x86_64 AES-GCM key struct */
837835
struct aes_gcm_key {
@@ -912,17 +910,8 @@ struct aes_gcm_key_avx10 {
912910
#define FLAG_RFC4106 BIT(0)
913911
#define FLAG_ENC BIT(1)
914912
#define FLAG_AVX BIT(2)
915-
#if defined(CONFIG_AS_VPCLMULQDQ)
916-
# define FLAG_AVX10_256 BIT(3)
917-
# define FLAG_AVX10_512 BIT(4)
918-
#else
919-
/*
920-
* This should cause all calls to the AVX10 assembly functions to be
921-
* optimized out, avoiding the need to ifdef each call individually.
922-
*/
923-
# define FLAG_AVX10_256 0
924-
# define FLAG_AVX10_512 0
925-
#endif
913+
#define FLAG_AVX10_256 BIT(3)
914+
#define FLAG_AVX10_512 BIT(4)
926915

927916
static inline struct aes_gcm_key *
928917
aes_gcm_key_get(struct crypto_aead *tfm, int flags)
@@ -1519,7 +1508,6 @@ DEFINE_GCM_ALGS(aesni_avx, FLAG_AVX,
15191508
"generic-gcm-aesni-avx", "rfc4106-gcm-aesni-avx",
15201509
AES_GCM_KEY_AESNI_SIZE, 500);
15211510

1522-
#if defined(CONFIG_AS_VPCLMULQDQ)
15231511
/* aes_gcm_algs_vaes_avx10_256 */
15241512
DEFINE_GCM_ALGS(vaes_avx10_256, FLAG_AVX10_256,
15251513
"generic-gcm-vaes-avx10_256", "rfc4106-gcm-vaes-avx10_256",
@@ -1529,7 +1517,6 @@ DEFINE_GCM_ALGS(vaes_avx10_256, FLAG_AVX10_256,
15291517
DEFINE_GCM_ALGS(vaes_avx10_512, FLAG_AVX10_512,
15301518
"generic-gcm-vaes-avx10_512", "rfc4106-gcm-vaes-avx10_512",
15311519
AES_GCM_KEY_AVX10_SIZE, 800);
1532-
#endif /* CONFIG_AS_VPCLMULQDQ */
15331520

15341521
static int __init register_avx_algs(void)
15351522
{
@@ -1551,7 +1538,6 @@ static int __init register_avx_algs(void)
15511538
* Similarly, the assembler support was added at about the same time.
15521539
* For simplicity, just always check for VAES and VPCLMULQDQ together.
15531540
*/
1554-
#if defined(CONFIG_AS_VPCLMULQDQ)
15551541
if (!boot_cpu_has(X86_FEATURE_AVX2) ||
15561542
!boot_cpu_has(X86_FEATURE_VAES) ||
15571543
!boot_cpu_has(X86_FEATURE_VPCLMULQDQ) ||
@@ -1592,7 +1578,7 @@ static int __init register_avx_algs(void)
15921578
ARRAY_SIZE(aes_gcm_algs_vaes_avx10_512));
15931579
if (err)
15941580
return err;
1595-
#endif /* CONFIG_AS_VPCLMULQDQ */
1581+
15961582
return 0;
15971583
}
15981584

@@ -1607,12 +1593,10 @@ static void unregister_avx_algs(void)
16071593
{
16081594
unregister_skciphers(skcipher_algs_aesni_avx);
16091595
unregister_aeads(aes_gcm_algs_aesni_avx);
1610-
#if defined(CONFIG_AS_VPCLMULQDQ)
16111596
unregister_skciphers(skcipher_algs_vaes_avx2);
16121597
unregister_skciphers(skcipher_algs_vaes_avx512);
16131598
unregister_aeads(aes_gcm_algs_vaes_avx10_256);
16141599
unregister_aeads(aes_gcm_algs_vaes_avx10_512);
1615-
#endif
16161600
}
16171601
#else /* CONFIG_X86_64 */
16181602
static struct aead_alg aes_gcm_algs_aesni[0];

0 commit comments

Comments
 (0)