@@ -352,8 +352,10 @@ impl<'a> CoverageSpansGenerator<'a> {
352352
353353 let prev = self . take_prev ( ) ;
354354 debug ! ( " AT END, adding last prev={prev:?}" ) ;
355- let pending_dups = self . pending_dups . split_off ( 0 ) ;
356- for dup in pending_dups {
355+
356+ // Take `pending_dups` so that we can drain it while calling self methods.
357+ // It is never used as a field after this point.
358+ for dup in std:: mem:: take ( & mut self . pending_dups ) {
357359 debug ! ( " ...adding at least one pending dup={:?}" , dup) ;
358360 self . push_refined_span ( dup) ;
359361 }
@@ -470,11 +472,16 @@ impl<'a> CoverageSpansGenerator<'a> {
470472 previous iteration, or prev started a new disjoint span"
471473 ) ;
472474 if dup. span . hi ( ) <= self . curr ( ) . span . lo ( ) {
473- let pending_dups = self . pending_dups . split_off ( 0 ) ;
474- for dup in pending_dups. into_iter ( ) {
475+ // Temporarily steal `pending_dups` into a local, so that we can
476+ // drain it while calling other self methods.
477+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
478+ for dup in pending_dups. drain ( ..) {
475479 debug ! ( " ...adding at least one pending={:?}" , dup) ;
476480 self . push_refined_span ( dup) ;
477481 }
482+ // The list of dups is now empty, but we can recycle its capacity.
483+ assert ! ( pending_dups. is_empty( ) && self . pending_dups. is_empty( ) ) ;
484+ self . pending_dups = pending_dups;
478485 } else {
479486 self . pending_dups . clear ( ) ;
480487 }
@@ -523,7 +530,10 @@ impl<'a> CoverageSpansGenerator<'a> {
523530 let has_pre_closure_span = prev. span . lo ( ) < right_cutoff;
524531 let has_post_closure_span = prev. span . hi ( ) > right_cutoff;
525532
526- let mut pending_dups = self . pending_dups . split_off ( 0 ) ;
533+ // Temporarily steal `pending_dups` into a local, so that we can
534+ // mutate and/or drain it while calling other self methods.
535+ let mut pending_dups = std:: mem:: take ( & mut self . pending_dups ) ;
536+
527537 if has_pre_closure_span {
528538 let mut pre_closure = self . prev ( ) . clone ( ) ;
529539 pre_closure. span = pre_closure. span . with_hi ( left_cutoff) ;
@@ -537,6 +547,7 @@ impl<'a> CoverageSpansGenerator<'a> {
537547 }
538548 self . push_refined_span ( pre_closure) ;
539549 }
550+
540551 if has_post_closure_span {
541552 // Mutate `prev.span()` to start after the closure (and discard curr).
542553 // (**NEVER** update `prev_original_span` because it affects the assumptions
@@ -547,12 +558,15 @@ impl<'a> CoverageSpansGenerator<'a> {
547558 debug ! ( " ...and at least one overlapping dup={:?}" , dup) ;
548559 dup. span = dup. span . with_lo ( right_cutoff) ;
549560 }
550- self . pending_dups . append ( & mut pending_dups) ;
551561 let closure_covspan = self . take_curr ( ) ; // Prevent this curr from becoming prev.
552562 self . push_refined_span ( closure_covspan) ; // since self.prev() was already updated
553563 } else {
554564 pending_dups. clear ( ) ;
555565 }
566+
567+ // Restore the modified post-closure spans, or the empty vector's capacity.
568+ assert ! ( self . pending_dups. is_empty( ) ) ;
569+ self . pending_dups = pending_dups;
556570 }
557571
558572 /// Called if `curr.span` equals `prev_original_span` (and potentially equal to all
0 commit comments