22
33use std:: collections:: BTreeMap ;
44use std:: collections:: btree_map:: Entry ;
5- use std:: collections:: HashSet ;
65
76use log:: trace;
87
8+ use rustc_data_structures:: fx:: FxHashMap ;
99use rustc_middle:: ty;
1010use rustc_target:: abi:: { Size , HasDataLayout } ;
1111
@@ -24,6 +24,12 @@ pub struct TlsEntry<'tcx> {
2424 dtor : Option < ty:: Instance < ' tcx > > ,
2525}
2626
27+ #[ derive( Clone , Debug ) ]
28+ struct RunningDtorsState {
29+ /// The last TlsKey used to retrieve a TLS destructor.
30+ last_dtor_key : Option < TlsKey > ,
31+ }
32+
2733#[ derive( Debug ) ]
2834pub struct TlsData < ' tcx > {
2935 /// The Key to use for the next thread-local allocation.
@@ -36,11 +42,10 @@ pub struct TlsData<'tcx> {
3642 /// things work on macOS) with a data argument.
3743 thread_dtors : BTreeMap < ThreadId , ( ty:: Instance < ' tcx > , Scalar < Tag > ) > ,
3844
39- /// Whether we are in the "destruct" phase, during which some operations are UB.
40- dtors_running : HashSet < ThreadId > ,
41-
42- /// The last TlsKey used to retrieve a TLS destructor.
43- last_dtor_key : BTreeMap < ThreadId , TlsKey > ,
45+ /// State for currently running TLS dtors. If this map contains a key for a
46+ /// specific thread, it means that we are in the "destruct" phase, during
47+ /// which some operations are UB.
48+ dtors_running : FxHashMap < ThreadId , RunningDtorsState > ,
4449}
4550
4651impl < ' tcx > Default for TlsData < ' tcx > {
@@ -50,7 +55,6 @@ impl<'tcx> Default for TlsData<'tcx> {
5055 keys : Default :: default ( ) ,
5156 thread_dtors : Default :: default ( ) ,
5257 dtors_running : Default :: default ( ) ,
53- last_dtor_key : Default :: default ( ) ,
5458 }
5559 }
5660}
@@ -135,7 +139,7 @@ impl<'tcx> TlsData<'tcx> {
135139 dtor : ty:: Instance < ' tcx > ,
136140 data : Scalar < Tag >
137141 ) -> InterpResult < ' tcx > {
138- if self . dtors_running . contains ( & thread) {
142+ if self . dtors_running . contains_key ( & thread) {
139143 // UB, according to libstd docs.
140144 throw_ub_format ! ( "setting thread's local storage destructor while destructors are already running" ) ;
141145 }
@@ -192,6 +196,21 @@ impl<'tcx> TlsData<'tcx> {
192196 }
193197 None
194198 }
199+
200+ /// Set that dtors are running for `thread`. It is guaranteed not to change
201+ /// the existing values stored in `dtors_running` for this thread. Returns
202+ /// `true` if dtors for `thread` are already running.
203+ fn set_dtors_running_for_thread ( & mut self , thread : ThreadId ) -> bool {
204+ if self . dtors_running . contains_key ( & thread) {
205+ true
206+ } else {
207+ self . dtors_running . insert (
208+ thread,
209+ RunningDtorsState { last_dtor_key : None }
210+ ) ;
211+ false
212+ }
213+ }
195214}
196215
197216impl < ' mir , ' tcx : ' mir > EvalContextPrivExt < ' mir , ' tcx > for crate :: MiriEvalContext < ' mir , ' tcx > { }
@@ -203,7 +222,6 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
203222 let this = self . eval_context_mut ( ) ;
204223 let active_thread = this. get_active_thread ( ) ?;
205224 assert_eq ! ( this. get_total_thread_count( ) ?, 1 , "concurrency on Windows not supported" ) ;
206- this. machine . tls . dtors_running . insert ( active_thread) ;
207225 // Windows has a special magic linker section that is run on certain events.
208226 // Instead of searching for that section and supporting arbitrary hooks in there
209227 // (that would be basically https://github.com/rust-lang/miri/issues/450),
@@ -260,7 +278,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
260278
261279 assert ! ( this. has_terminated( active_thread) ?, "running TLS dtors for non-terminated thread" ) ;
262280 // Fetch next dtor after `key`.
263- let last_key = this. machine . tls . last_dtor_key . get ( & active_thread) . cloned ( ) ;
281+ let last_key = this. machine . tls . dtors_running [ & active_thread] . last_dtor_key . clone ( ) ;
264282 let dtor = match this. machine . tls . fetch_tls_dtor ( last_key, active_thread) {
265283 dtor @ Some ( _) => dtor,
266284 // We ran each dtor once, start over from the beginning.
@@ -269,7 +287,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
269287 }
270288 } ;
271289 if let Some ( ( instance, ptr, key) ) = dtor {
272- this. machine . tls . last_dtor_key . insert ( active_thread, key) ;
290+ this. machine . tls . dtors_running . get_mut ( & active_thread) . unwrap ( ) . last_dtor_key = Some ( key) ;
273291 trace ! ( "Running TLS dtor {:?} on {:?} at {:?}" , instance, ptr, active_thread) ;
274292 assert ! ( !this. is_null( ptr) . unwrap( ) , "data can't be NULL when dtor is called!" ) ;
275293
@@ -284,7 +302,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
284302 this. enable_thread ( active_thread) ?;
285303 return Ok ( ( ) ) ;
286304 }
287- this. machine . tls . last_dtor_key . remove ( & active_thread) ;
305+ this. machine . tls . dtors_running . get_mut ( & active_thread) . unwrap ( ) . last_dtor_key = None ;
288306
289307 Ok ( ( ) )
290308 }
@@ -305,12 +323,11 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
305323 let active_thread = this. get_active_thread ( ) ?;
306324
307325 if this. tcx . sess . target . target . target_os == "windows" {
308- if !this. machine . tls . dtors_running . contains ( & active_thread) {
309- this. machine . tls . dtors_running . insert ( active_thread) ;
326+ if !this. machine . tls . set_dtors_running_for_thread ( active_thread) {
310327 this. schedule_windows_tls_dtors ( ) ?;
311328 }
312329 } else {
313- this. machine . tls . dtors_running . insert ( active_thread) ;
330+ this. machine . tls . set_dtors_running_for_thread ( active_thread) ;
314331 // The macOS thread wide destructor runs "before any TLS slots get
315332 // freed", so do that first.
316333 this. schedule_macos_tls_dtor ( ) ?;
0 commit comments