@@ -51,9 +51,16 @@ extern "C" {
5151#[ cfg( test) ]
5252use stdarch_test:: assert_instr;
5353
54+ // Rust compilers without 8a57820bca64a252489790a57cb5ea23db6f9198 need crypto (hence the bootstrap check)
55+ // LLVM builds without b8baa2a9132498ea286dbb0d03f005760ecc6fdb need crypto for arm (hence the target_arch check)
56+
5457/// AES single round encryption.
5558#[ inline]
56- #[ target_feature( enable = "crypto" ) ]
59+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
60+ #[ cfg_attr(
61+ not( any( bootstrap, target_arch = "arm" ) ) ,
62+ target_feature( enable = "aes" )
63+ ) ]
5764#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
5865#[ cfg_attr( test, assert_instr( aese) ) ]
5966pub unsafe fn vaeseq_u8 ( data : uint8x16_t , key : uint8x16_t ) -> uint8x16_t {
@@ -62,7 +69,11 @@ pub unsafe fn vaeseq_u8(data: uint8x16_t, key: uint8x16_t) -> uint8x16_t {
6269
6370/// AES single round decryption.
6471#[ inline]
65- #[ target_feature( enable = "crypto" ) ]
72+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
73+ #[ cfg_attr(
74+ not( any( bootstrap, target_arch = "arm" ) ) ,
75+ target_feature( enable = "aes" )
76+ ) ]
6677#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
6778#[ cfg_attr( test, assert_instr( aesd) ) ]
6879pub unsafe fn vaesdq_u8 ( data : uint8x16_t , key : uint8x16_t ) -> uint8x16_t {
@@ -71,7 +82,11 @@ pub unsafe fn vaesdq_u8(data: uint8x16_t, key: uint8x16_t) -> uint8x16_t {
7182
7283/// AES mix columns.
7384#[ inline]
74- #[ target_feature( enable = "crypto" ) ]
85+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
86+ #[ cfg_attr(
87+ not( any( bootstrap, target_arch = "arm" ) ) ,
88+ target_feature( enable = "aes" )
89+ ) ]
7590#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
7691#[ cfg_attr( test, assert_instr( aesmc) ) ]
7792pub unsafe fn vaesmcq_u8 ( data : uint8x16_t ) -> uint8x16_t {
@@ -80,7 +95,11 @@ pub unsafe fn vaesmcq_u8(data: uint8x16_t) -> uint8x16_t {
8095
8196/// AES inverse mix columns.
8297#[ inline]
83- #[ target_feature( enable = "crypto" ) ]
98+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
99+ #[ cfg_attr(
100+ not( any( bootstrap, target_arch = "arm" ) ) ,
101+ target_feature( enable = "aes" )
102+ ) ]
84103#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
85104#[ cfg_attr( test, assert_instr( aesimc) ) ]
86105pub unsafe fn vaesimcq_u8 ( data : uint8x16_t ) -> uint8x16_t {
@@ -89,7 +108,11 @@ pub unsafe fn vaesimcq_u8(data: uint8x16_t) -> uint8x16_t {
89108
90109/// SHA1 fixed rotate.
91110#[ inline]
92- #[ target_feature( enable = "crypto" ) ]
111+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
112+ #[ cfg_attr(
113+ not( any( bootstrap, target_arch = "arm" ) ) ,
114+ target_feature( enable = "sha2" )
115+ ) ]
93116#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
94117#[ cfg_attr( test, assert_instr( sha1h) ) ]
95118pub unsafe fn vsha1h_u32 ( hash_e : u32 ) -> u32 {
@@ -98,7 +121,11 @@ pub unsafe fn vsha1h_u32(hash_e: u32) -> u32 {
98121
99122/// SHA1 hash update accelerator, choose.
100123#[ inline]
101- #[ target_feature( enable = "crypto" ) ]
124+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
125+ #[ cfg_attr(
126+ not( any( bootstrap, target_arch = "arm" ) ) ,
127+ target_feature( enable = "sha2" )
128+ ) ]
102129#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
103130#[ cfg_attr( test, assert_instr( sha1c) ) ]
104131pub unsafe fn vsha1cq_u32 ( hash_abcd : uint32x4_t , hash_e : u32 , wk : uint32x4_t ) -> uint32x4_t {
@@ -107,7 +134,11 @@ pub unsafe fn vsha1cq_u32(hash_abcd: uint32x4_t, hash_e: u32, wk: uint32x4_t) ->
107134
108135/// SHA1 hash update accelerator, majority.
109136#[ inline]
110- #[ target_feature( enable = "crypto" ) ]
137+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
138+ #[ cfg_attr(
139+ not( any( bootstrap, target_arch = "arm" ) ) ,
140+ target_feature( enable = "sha2" )
141+ ) ]
111142#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
112143#[ cfg_attr( test, assert_instr( sha1m) ) ]
113144pub unsafe fn vsha1mq_u32 ( hash_abcd : uint32x4_t , hash_e : u32 , wk : uint32x4_t ) -> uint32x4_t {
@@ -116,7 +147,11 @@ pub unsafe fn vsha1mq_u32(hash_abcd: uint32x4_t, hash_e: u32, wk: uint32x4_t) ->
116147
117148/// SHA1 hash update accelerator, parity.
118149#[ inline]
119- #[ target_feature( enable = "crypto" ) ]
150+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
151+ #[ cfg_attr(
152+ not( any( bootstrap, target_arch = "arm" ) ) ,
153+ target_feature( enable = "sha2" )
154+ ) ]
120155#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
121156#[ cfg_attr( test, assert_instr( sha1p) ) ]
122157pub unsafe fn vsha1pq_u32 ( hash_abcd : uint32x4_t , hash_e : u32 , wk : uint32x4_t ) -> uint32x4_t {
@@ -125,7 +160,11 @@ pub unsafe fn vsha1pq_u32(hash_abcd: uint32x4_t, hash_e: u32, wk: uint32x4_t) ->
125160
126161/// SHA1 schedule update accelerator, first part.
127162#[ inline]
128- #[ target_feature( enable = "crypto" ) ]
163+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
164+ #[ cfg_attr(
165+ not( any( bootstrap, target_arch = "arm" ) ) ,
166+ target_feature( enable = "sha2" )
167+ ) ]
129168#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
130169#[ cfg_attr( test, assert_instr( sha1su0) ) ]
131170pub unsafe fn vsha1su0q_u32 ( w0_3 : uint32x4_t , w4_7 : uint32x4_t , w8_11 : uint32x4_t ) -> uint32x4_t {
@@ -134,7 +173,11 @@ pub unsafe fn vsha1su0q_u32(w0_3: uint32x4_t, w4_7: uint32x4_t, w8_11: uint32x4_
134173
135174/// SHA1 schedule update accelerator, second part.
136175#[ inline]
137- #[ target_feature( enable = "crypto" ) ]
176+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
177+ #[ cfg_attr(
178+ not( any( bootstrap, target_arch = "arm" ) ) ,
179+ target_feature( enable = "sha2" )
180+ ) ]
138181#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
139182#[ cfg_attr( test, assert_instr( sha1su1) ) ]
140183pub unsafe fn vsha1su1q_u32 ( tw0_3 : uint32x4_t , w12_15 : uint32x4_t ) -> uint32x4_t {
@@ -143,7 +186,11 @@ pub unsafe fn vsha1su1q_u32(tw0_3: uint32x4_t, w12_15: uint32x4_t) -> uint32x4_t
143186
144187/// SHA256 hash update accelerator.
145188#[ inline]
146- #[ target_feature( enable = "crypto" ) ]
189+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
190+ #[ cfg_attr(
191+ not( any( bootstrap, target_arch = "arm" ) ) ,
192+ target_feature( enable = "sha2" )
193+ ) ]
147194#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
148195#[ cfg_attr( test, assert_instr( sha256h) ) ]
149196pub unsafe fn vsha256hq_u32 (
@@ -156,7 +203,11 @@ pub unsafe fn vsha256hq_u32(
156203
157204/// SHA256 hash update accelerator, upper part.
158205#[ inline]
159- #[ target_feature( enable = "crypto" ) ]
206+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
207+ #[ cfg_attr(
208+ not( any( bootstrap, target_arch = "arm" ) ) ,
209+ target_feature( enable = "sha2" )
210+ ) ]
160211#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
161212#[ cfg_attr( test, assert_instr( sha256h2) ) ]
162213pub unsafe fn vsha256h2q_u32 (
@@ -169,7 +220,11 @@ pub unsafe fn vsha256h2q_u32(
169220
170221/// SHA256 schedule update accelerator, first part.
171222#[ inline]
172- #[ target_feature( enable = "crypto" ) ]
223+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
224+ #[ cfg_attr(
225+ not( any( bootstrap, target_arch = "arm" ) ) ,
226+ target_feature( enable = "sha2" )
227+ ) ]
173228#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
174229#[ cfg_attr( test, assert_instr( sha256su0) ) ]
175230pub unsafe fn vsha256su0q_u32 ( w0_3 : uint32x4_t , w4_7 : uint32x4_t ) -> uint32x4_t {
@@ -178,7 +233,11 @@ pub unsafe fn vsha256su0q_u32(w0_3: uint32x4_t, w4_7: uint32x4_t) -> uint32x4_t
178233
179234/// SHA256 schedule update accelerator, second part.
180235#[ inline]
181- #[ target_feature( enable = "crypto" ) ]
236+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , target_feature( enable = "crypto" ) ) ]
237+ #[ cfg_attr(
238+ not( any( bootstrap, target_arch = "arm" ) ) ,
239+ target_feature( enable = "sha2" )
240+ ) ]
182241#[ cfg_attr( target_arch = "arm" , target_feature( enable = "v8" ) ) ]
183242#[ cfg_attr( test, assert_instr( sha256su1) ) ]
184243pub unsafe fn vsha256su1q_u32 (
@@ -196,7 +255,11 @@ mod tests {
196255 use std:: mem;
197256 use stdarch_test:: simd_test;
198257
199- #[ simd_test( enable = "crypto" ) ]
258+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
259+ #[ cfg_attr(
260+ all( not( bootstrap) , target_arch = "aarch64" ) ,
261+ simd_test( enable = "aes" )
262+ ) ]
200263 unsafe fn test_vaeseq_u8 ( ) {
201264 let data = mem:: transmute ( u8x16:: new ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) ) ;
202265 let key = mem:: transmute ( u8x16:: new ( 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ) ) ;
@@ -209,7 +272,11 @@ mod tests {
209272 ) ;
210273 }
211274
212- #[ simd_test( enable = "crypto" ) ]
275+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
276+ #[ cfg_attr(
277+ all( not( bootstrap) , target_arch = "aarch64" ) ,
278+ simd_test( enable = "aes" )
279+ ) ]
213280 unsafe fn test_vaesdq_u8 ( ) {
214281 let data = mem:: transmute ( u8x16:: new ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) ) ;
215282 let key = mem:: transmute ( u8x16:: new ( 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 0 , 1 , 2 , 3 , 4 , 5 , 6 , 7 ) ) ;
@@ -220,7 +287,11 @@ mod tests {
220287 ) ;
221288 }
222289
223- #[ simd_test( enable = "crypto" ) ]
290+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
291+ #[ cfg_attr(
292+ all( not( bootstrap) , target_arch = "aarch64" ) ,
293+ simd_test( enable = "aes" )
294+ ) ]
224295 unsafe fn test_vaesmcq_u8 ( ) {
225296 let data = mem:: transmute ( u8x16:: new ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) ) ;
226297 let r: u8x16 = mem:: transmute ( vaesmcq_u8 ( data) ) ;
@@ -230,7 +301,11 @@ mod tests {
230301 ) ;
231302 }
232303
233- #[ simd_test( enable = "crypto" ) ]
304+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
305+ #[ cfg_attr(
306+ all( not( bootstrap) , target_arch = "aarch64" ) ,
307+ simd_test( enable = "aes" )
308+ ) ]
234309 unsafe fn test_vaesimcq_u8 ( ) {
235310 let data = mem:: transmute ( u8x16:: new ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 ) ) ;
236311 let r: u8x16 = mem:: transmute ( vaesimcq_u8 ( data) ) ;
@@ -240,13 +315,15 @@ mod tests {
240315 ) ;
241316 }
242317
243- #[ simd_test( enable = "crypto" ) ]
318+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
319+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
244320 unsafe fn test_vsha1h_u32 ( ) {
245321 assert_eq ! ( vsha1h_u32( 0x1234 ) , 0x048d ) ;
246322 assert_eq ! ( vsha1h_u32( 0x5678 ) , 0x159e ) ;
247323 }
248324
249- #[ simd_test( enable = "crypto" ) ]
325+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
326+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
250327 unsafe fn test_vsha1su0q_u32 ( ) {
251328 let r: u32x4 = mem:: transmute ( vsha1su0q_u32 (
252329 mem:: transmute ( u32x4:: new ( 0x1234_u32 , 0x5678_u32 , 0x9abc_u32 , 0xdef0_u32 ) ) ,
@@ -256,7 +333,8 @@ mod tests {
256333 assert_eq ! ( r, u32x4:: new( 0x9abc , 0xdef0 , 0x1234 , 0x5678 ) ) ;
257334 }
258335
259- #[ simd_test( enable = "crypto" ) ]
336+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
337+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
260338 unsafe fn test_vsha1su1q_u32 ( ) {
261339 let r: u32x4 = mem:: transmute ( vsha1su1q_u32 (
262340 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
@@ -268,7 +346,8 @@ mod tests {
268346 ) ;
269347 }
270348
271- #[ simd_test( enable = "crypto" ) ]
349+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
350+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
272351 unsafe fn test_vsha1cq_u32 ( ) {
273352 let r: u32x4 = mem:: transmute ( vsha1cq_u32 (
274353 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
@@ -281,7 +360,8 @@ mod tests {
281360 ) ;
282361 }
283362
284- #[ simd_test( enable = "crypto" ) ]
363+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
364+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
285365 unsafe fn test_vsha1pq_u32 ( ) {
286366 let r: u32x4 = mem:: transmute ( vsha1pq_u32 (
287367 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
@@ -294,7 +374,8 @@ mod tests {
294374 ) ;
295375 }
296376
297- #[ simd_test( enable = "crypto" ) ]
377+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
378+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
298379 unsafe fn test_vsha1mq_u32 ( ) {
299380 let r: u32x4 = mem:: transmute ( vsha1mq_u32 (
300381 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
@@ -307,7 +388,8 @@ mod tests {
307388 ) ;
308389 }
309390
310- #[ simd_test( enable = "crypto" ) ]
391+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
392+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
311393 unsafe fn test_vsha256hq_u32 ( ) {
312394 let r: u32x4 = mem:: transmute ( vsha256hq_u32 (
313395 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
@@ -320,7 +402,8 @@ mod tests {
320402 ) ;
321403 }
322404
323- #[ simd_test( enable = "crypto" ) ]
405+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
406+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
324407 unsafe fn test_vsha256h2q_u32 ( ) {
325408 let r: u32x4 = mem:: transmute ( vsha256h2q_u32 (
326409 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
@@ -333,7 +416,8 @@ mod tests {
333416 ) ;
334417 }
335418
336- #[ simd_test( enable = "crypto" ) ]
419+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
420+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
337421 unsafe fn test_vsha256su0q_u32 ( ) {
338422 let r: u32x4 = mem:: transmute ( vsha256su0q_u32 (
339423 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
@@ -345,7 +429,8 @@ mod tests {
345429 ) ;
346430 }
347431
348- #[ simd_test( enable = "crypto" ) ]
432+ #[ cfg_attr( any( bootstrap, target_arch = "arm" ) , simd_test( enable = "crypto" ) ) ]
433+ #[ cfg_attr( not( any( bootstrap, target_arch = "arm" ) ) , simd_test( enable = "sha2" ) ) ]
349434 unsafe fn test_vsha256su1q_u32 ( ) {
350435 let r: u32x4 = mem:: transmute ( vsha256su1q_u32 (
351436 mem:: transmute ( u32x4:: new ( 0x1234 , 0x5678 , 0x9abc , 0xdef0 ) ) ,
0 commit comments