@@ -89,88 +89,13 @@ use rustc::ty::Ty;
8989use rustc:: hir;
9090use rustc:: mir:: * ;
9191use syntax_pos:: { DUMMY_SP , Span } ;
92- use rustc_data_structures:: fx:: FxHashMap ;
9392use std:: collections:: hash_map:: Entry ;
9493use std:: mem;
9594
96- #[ derive( Debug ) ]
97- struct Scope {
98- /// The source scope this scope was created in.
99- source_scope : SourceScope ,
100-
101- /// the region span of this scope within source code.
102- region_scope : region:: Scope ,
103-
104- /// the span of that region_scope
105- region_scope_span : Span ,
106-
107- /// Whether there's anything to do for the cleanup path, that is,
108- /// when unwinding through this scope. This includes destructors,
109- /// but not StorageDead statements, which don't get emitted at all
110- /// for unwinding, for several reasons:
111- /// * clang doesn't emit llvm.lifetime.end for C++ unwinding
112- /// * LLVM's memory dependency analysis can't handle it atm
113- /// * polluting the cleanup MIR with StorageDead creates
114- /// landing pads even though there's no actual destructors
115- /// * freeing up stack space has no effect during unwinding
116- /// Note that for generators we do emit StorageDeads, for the
117- /// use of optimizations in the MIR generator transform.
118- needs_cleanup : bool ,
119-
120- /// set of places to drop when exiting this scope. This starts
121- /// out empty but grows as variables are declared during the
122- /// building process. This is a stack, so we always drop from the
123- /// end of the vector (top of the stack) first.
124- drops : Vec < DropData > ,
125-
126- /// The cache for drop chain on “normal” exit into a particular BasicBlock.
127- cached_exits : FxHashMap < ( BasicBlock , region:: Scope ) , BasicBlock > ,
128-
129- /// The cache for drop chain on "generator drop" exit.
130- cached_generator_drop : Option < BasicBlock > ,
131-
132- /// The cache for drop chain on "unwind" exit.
133- cached_unwind : CachedBlock ,
134- }
95+ crate use stack:: { Scope , Scopes } ;
96+ use stack:: { CachedBlock , DropData } ;
13597
136- #[ derive( Debug , Default ) ]
137- pub struct Scopes < ' tcx > {
138- scopes : Vec < Scope > ,
139- /// The current set of breakable scopes. See module comment for more details.
140- breakable_scopes : Vec < BreakableScope < ' tcx > > ,
141- }
142-
143- #[ derive( Debug ) ]
144- struct DropData {
145- /// span where drop obligation was incurred (typically where place was declared)
146- span : Span ,
147-
148- /// local to drop
149- local : Local ,
150-
151- /// Whether this is a value Drop or a StorageDead.
152- kind : DropKind ,
153-
154- /// The cached blocks for unwinds.
155- cached_block : CachedBlock ,
156- }
157-
158- #[ derive( Debug , Default , Clone , Copy ) ]
159- struct CachedBlock {
160- /// The cached block for the cleanups-on-diverge path. This block
161- /// contains code to run the current drop and all the preceding
162- /// drops (i.e., those having lower index in Drop’s Scope drop
163- /// array)
164- unwind : Option < BasicBlock > ,
165-
166- /// The cached block for unwinds during cleanups-on-generator-drop path
167- ///
168- /// This is split from the standard unwind path here to prevent drop
169- /// elaboration from creating drop flags that would have to be captured
170- /// by the generator. I'm not sure how important this optimization is,
171- /// but it is here.
172- generator_drop : Option < BasicBlock > ,
173- }
98+ mod stack;
17499
175100#[ derive( Debug ) ]
176101pub ( crate ) enum DropKind {
@@ -200,169 +125,6 @@ pub enum BreakableTarget {
200125 Return ,
201126}
202127
203- impl CachedBlock {
204- fn invalidate ( & mut self ) {
205- self . generator_drop = None ;
206- self . unwind = None ;
207- }
208-
209- fn get ( & self , generator_drop : bool ) -> Option < BasicBlock > {
210- if generator_drop {
211- self . generator_drop
212- } else {
213- self . unwind
214- }
215- }
216-
217- fn ref_mut ( & mut self , generator_drop : bool ) -> & mut Option < BasicBlock > {
218- if generator_drop {
219- & mut self . generator_drop
220- } else {
221- & mut self . unwind
222- }
223- }
224- }
225-
226- impl Scope {
227- /// Invalidates all the cached blocks in the scope.
228- ///
229- /// Should always be run for all inner scopes when a drop is pushed into some scope enclosing a
230- /// larger extent of code.
231- ///
232- /// `storage_only` controls whether to invalidate only drop paths that run `StorageDead`.
233- /// `this_scope_only` controls whether to invalidate only drop paths that refer to the current
234- /// top-of-scope (as opposed to dependent scopes).
235- fn invalidate_cache ( & mut self , storage_only : bool , is_generator : bool , this_scope_only : bool ) {
236- // FIXME: maybe do shared caching of `cached_exits` etc. to handle functions
237- // with lots of `try!`?
238-
239- // cached exits drop storage and refer to the top-of-scope
240- self . cached_exits . clear ( ) ;
241-
242- // the current generator drop and unwind refer to top-of-scope
243- self . cached_generator_drop = None ;
244-
245- let ignore_unwinds = storage_only && !is_generator;
246- if !ignore_unwinds {
247- self . cached_unwind . invalidate ( ) ;
248- }
249-
250- if !ignore_unwinds && !this_scope_only {
251- for drop_data in & mut self . drops {
252- drop_data. cached_block . invalidate ( ) ;
253- }
254- }
255- }
256-
257- /// Given a span and this scope's source scope, make a SourceInfo.
258- fn source_info ( & self , span : Span ) -> SourceInfo {
259- SourceInfo {
260- span,
261- scope : self . source_scope
262- }
263- }
264- }
265-
266- impl < ' tcx > Scopes < ' tcx > {
267- fn len ( & self ) -> usize {
268- self . scopes . len ( )
269- }
270-
271- fn push_scope ( & mut self , region_scope : ( region:: Scope , SourceInfo ) , vis_scope : SourceScope ) {
272- debug ! ( "push_scope({:?})" , region_scope) ;
273- self . scopes . push ( Scope {
274- source_scope : vis_scope,
275- region_scope : region_scope. 0 ,
276- region_scope_span : region_scope. 1 . span ,
277- needs_cleanup : false ,
278- drops : vec ! [ ] ,
279- cached_generator_drop : None ,
280- cached_exits : Default :: default ( ) ,
281- cached_unwind : CachedBlock :: default ( ) ,
282- } ) ;
283- }
284-
285- fn pop_scope (
286- & mut self ,
287- region_scope : ( region:: Scope , SourceInfo ) ,
288- ) -> ( Scope , Option < BasicBlock > ) {
289- let scope = self . scopes . pop ( ) . unwrap ( ) ;
290- assert_eq ! ( scope. region_scope, region_scope. 0 ) ;
291- let unwind_to = self . scopes . last ( )
292- . and_then ( |next_scope| next_scope. cached_unwind . get ( false ) ) ;
293- ( scope, unwind_to)
294- }
295-
296- fn may_panic ( & self , scope_count : usize ) -> bool {
297- let len = self . len ( ) ;
298- self . scopes [ ( len - scope_count) ..] . iter ( ) . any ( |s| s. needs_cleanup )
299- }
300-
301- /// Finds the breakable scope for a given label. This is used for
302- /// resolving `return`, `break` and `continue`.
303- fn find_breakable_scope (
304- & self ,
305- span : Span ,
306- target : BreakableTarget ,
307- ) -> ( BasicBlock , region:: Scope , Option < Place < ' tcx > > ) {
308- let get_scope = |scope : region:: Scope | {
309- // find the loop-scope by its `region::Scope`.
310- self . breakable_scopes . iter ( )
311- . rfind ( |breakable_scope| breakable_scope. region_scope == scope)
312- . unwrap_or_else ( || span_bug ! ( span, "no enclosing breakable scope found" ) )
313- } ;
314- match target {
315- BreakableTarget :: Return => {
316- let scope = & self . breakable_scopes [ 0 ] ;
317- if scope. break_destination != Place :: return_place ( ) {
318- span_bug ! ( span, "`return` in item with no return scope" ) ;
319- }
320- ( scope. break_block , scope. region_scope , Some ( scope. break_destination . clone ( ) ) )
321- }
322- BreakableTarget :: Break ( scope) => {
323- let scope = get_scope ( scope) ;
324- ( scope. break_block , scope. region_scope , Some ( scope. break_destination . clone ( ) ) )
325- }
326- BreakableTarget :: Continue ( scope) => {
327- let scope = get_scope ( scope) ;
328- let continue_block = scope. continue_block
329- . unwrap_or_else ( || span_bug ! ( span, "missing `continue` block" ) ) ;
330- ( continue_block, scope. region_scope , None )
331- }
332- }
333- }
334-
335- fn num_scopes_above ( & self , region_scope : region:: Scope , span : Span ) -> usize {
336- let scope_count = self . scopes . iter ( ) . rev ( )
337- . position ( |scope| scope. region_scope == region_scope)
338- . unwrap_or_else ( || {
339- span_bug ! ( span, "region_scope {:?} does not enclose" , region_scope)
340- } ) ;
341- let len = self . len ( ) ;
342- assert ! ( scope_count < len, "should not use `exit_scope` to pop ALL scopes" ) ;
343- scope_count
344- }
345-
346- fn iter_mut ( & mut self ) -> impl DoubleEndedIterator < Item =& mut Scope > + ' _ {
347- self . scopes . iter_mut ( ) . rev ( )
348- }
349-
350- fn top_scopes ( & mut self , count : usize ) -> impl DoubleEndedIterator < Item =& mut Scope > + ' _ {
351- let len = self . len ( ) ;
352- self . scopes [ len - count..] . iter_mut ( )
353- }
354-
355- /// Returns the topmost active scope, which is known to be alive until
356- /// the next scope expression.
357- pub ( super ) fn topmost ( & self ) -> region:: Scope {
358- self . scopes . last ( ) . expect ( "topmost_scope: no scopes present" ) . region_scope
359- }
360-
361- fn source_info ( & self , index : usize , span : Span ) -> SourceInfo {
362- self . scopes [ self . len ( ) - index] . source_info ( span)
363- }
364- }
365-
366128impl < ' a , ' tcx > Builder < ' a , ' tcx > {
367129 // Adding and removing scopes
368130 // ==========================
0 commit comments