@@ -1231,21 +1231,45 @@ impl ThreadId {
12311231 }
12321232 }
12331233 } else {
1234- use crate :: sync:: { Mutex , PoisonError } ;
1235-
1236- static COUNTER : Mutex <u64 > = Mutex :: new( 0 ) ;
1234+ use crate :: cell:: SyncUnsafeCell ;
1235+ use crate :: hint:: spin_loop;
1236+ use crate :: sync:: atomic:: { Atomic , AtomicBool } ;
1237+ use crate :: thread:: yield_now;
1238+
1239+ // If we don't have a 64-bit atomic we use a small spinlock. We don't use Mutex
1240+ // here as we might be trying to get the current thread id in the global allocator,
1241+ // and on some platforms Mutex requires allocation.
1242+ static COUNTER_LOCKED : Atomic <bool > = AtomicBool :: new( false ) ;
1243+ static COUNTER : SyncUnsafeCell <u64 > = SyncUnsafeCell :: new( 0 ) ;
1244+
1245+ // Acquire lock.
1246+ let mut spin = 0 ;
1247+ while COUNTER_LOCKED . compare_exchange_weak( false , true , Ordering :: Acquire , Ordering :: Relaxed ) . is_err( ) {
1248+ if spin <= 3 {
1249+ for _ in 0 ..( 1 << spin) {
1250+ spin_loop( ) ;
1251+ }
1252+ } else {
1253+ yield_now( ) ;
1254+ }
1255+ spin += 1 ;
1256+ }
12371257
1238- let mut counter = COUNTER . lock( ) . unwrap_or_else( PoisonError :: into_inner) ;
1239- let Some ( id) = counter. checked_add( 1 ) else {
1240- // in case the panic handler ends up calling `ThreadId::new()`,
1241- // avoid reentrant lock acquire.
1242- drop( counter) ;
1243- exhausted( ) ;
1258+ let id;
1259+ // SAFETY: we have an exclusive lock on the counter.
1260+ unsafe {
1261+ id = ( * COUNTER . get( ) ) . saturating_add( 1 ) ;
1262+ ( * COUNTER . get( ) ) = id;
12441263 } ;
12451264
1246- * counter = id;
1247- drop( counter) ;
1248- ThreadId ( NonZero :: new( id) . unwrap( ) )
1265+ // Release the lock.
1266+ COUNTER_LOCKED . store( false , Ordering :: Release ) ;
1267+
1268+ if id == u64 :: MAX {
1269+ exhausted( )
1270+ } else {
1271+ ThreadId ( NonZero :: new( id) . unwrap( ) )
1272+ }
12491273 }
12501274 }
12511275 }
0 commit comments