1+ use rustc_data_structures::graph::iterate::{
2+ ControlFlow, NodeStatus, TriColorDepthFirstSearch, TriColorVisitor,
3+ };
14use rustc_hir::def_id::DefId;
25use rustc_hir::intravisit::FnKind;
3- use rustc_index::bit_set::BitSet;
4- use rustc_index::vec::IndexVec;
56use rustc_middle::hir::map::blocks::FnLikeNode;
6- use rustc_middle::mir::{BasicBlock, Body, ReadOnlyBodyAndCache , TerminatorKind, START_BLOCK };
7- use rustc_middle::ty::subst::InternalSubsts;
7+ use rustc_middle::mir::{BasicBlock, Body, Operand , TerminatorKind};
8+ use rustc_middle::ty::subst::{GenericArg, InternalSubsts} ;
89use rustc_middle::ty::{self, AssocItem, AssocItemContainer, Instance, TyCtxt};
910use rustc_session::lint::builtin::UNCONDITIONAL_RECURSION;
10- use std::collections::VecDeque ;
11+ use rustc_span::Span ;
1112
12- crate fn check<'tcx>(tcx: TyCtxt<'tcx>, body: &ReadOnlyBodyAndCache<'_, 'tcx>, def_id: DefId) {
13+ crate fn check<'tcx>(tcx: TyCtxt<'tcx>, body: &Body< 'tcx>, def_id: DefId) {
1314 let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
1415
1516 if let Some(fn_like_node) = FnLikeNode::from_node(tcx.hir().get(hir_id)) {
@@ -18,158 +19,133 @@ crate fn check<'tcx>(tcx: TyCtxt<'tcx>, body: &ReadOnlyBodyAndCache<'_, 'tcx>, d
1819 return;
1920 }
2021
21- check_fn_for_unconditional_recursion(tcx, body, def_id);
22- }
23- }
24-
25- fn check_fn_for_unconditional_recursion<'tcx>(
26- tcx: TyCtxt<'tcx>,
27- body: &ReadOnlyBodyAndCache<'_, 'tcx>,
28- def_id: DefId,
29- ) {
30- let self_calls = find_blocks_calling_self(tcx, &body, def_id);
31-
32- // Stores a list of `Span`s for every basic block. Those are the spans of self-calls where we
33- // know that one of them will definitely be reached. If the list is empty, the block either
34- // wasn't processed yet or will not always go to a self-call.
35- let mut results = IndexVec::from_elem_n(vec![], body.basic_blocks().len());
36-
37- // We start the analysis at the self calls and work backwards.
38- let mut queue: VecDeque<_> = self_calls.iter().collect();
39-
40- while let Some(bb) = queue.pop_front() {
41- if !results[bb].is_empty() {
42- // Already propagated.
43- continue;
44- }
45-
46- let locations = if self_calls.contains(bb) {
47- // `bb` *is* a self-call.
48- // We don't look at successors here because they are irrelevant here and we don't want
49- // to lint them (eg. `f(); f()` should only lint the first call).
50- vec![bb]
51- } else {
52- // If *all* successors of `bb` lead to a self-call, emit notes at all of their
53- // locations.
54-
55- // Determine all "relevant" successors. We ignore successors only reached via unwinding.
56- let terminator = body[bb].terminator();
57- let relevant_successors = match &terminator.kind {
58- TerminatorKind::Call { destination: None, .. }
59- | TerminatorKind::Yield { .. }
60- | TerminatorKind::GeneratorDrop => None.into_iter().chain(&[]),
61- TerminatorKind::SwitchInt { targets, .. } => None.into_iter().chain(targets),
62- TerminatorKind::Goto { target }
63- | TerminatorKind::Drop { target, .. }
64- | TerminatorKind::DropAndReplace { target, .. }
65- | TerminatorKind::Assert { target, .. }
66- | TerminatorKind::FalseEdges { real_target: target, .. }
67- | TerminatorKind::FalseUnwind { real_target: target, .. }
68- | TerminatorKind::Call { destination: Some((_, target)), .. } => {
69- Some(target).into_iter().chain(&[])
70- }
71- TerminatorKind::Resume
72- | TerminatorKind::Abort
73- | TerminatorKind::Return
74- | TerminatorKind::Unreachable => {
75- // We propagate backwards, so these should never be encountered here.
76- unreachable!("unexpected terminator {:?}", terminator.kind)
77- }
78- };
79-
80- // If all our successors are known to lead to self-calls, then we do too.
81- let all_are_self_calls =
82- relevant_successors.clone().all(|&succ| !results[succ].is_empty());
83-
84- if all_are_self_calls {
85- // We'll definitely lead to a self-call. Merge all call locations of the successors
86- // for linting them later.
87- relevant_successors.flat_map(|&succ| results[succ].iter().copied()).collect()
88- } else {
89- // At least 1 successor does not always lead to a self-call, so we also don't.
90- vec![]
22+ // If this is trait/impl method, extract the trait's substs.
23+ let trait_substs = match tcx.opt_associated_item(def_id) {
24+ Some(AssocItem {
25+ container: AssocItemContainer::TraitContainer(trait_def_id), ..
26+ }) => {
27+ let trait_substs_count = tcx.generics_of(trait_def_id).count();
28+ &InternalSubsts::identity_for_item(tcx, def_id)[..trait_substs_count]
9129 }
30+ _ => &[],
9231 };
9332
94- if !locations.is_empty() {
95- // This is a newly confirmed-to-always-reach-self-call block.
96- results[bb] = locations;
97-
98- // Propagate backwards through the CFG.
99- debug!("propagate loc={:?} in {:?} -> {:?}", results[bb], bb, body.predecessors()[bb]);
100- queue.extend(body.predecessors()[bb].iter().copied());
33+ let mut vis = Search { tcx, body, def_id, reachable_recursive_calls: vec![], trait_substs };
34+ if let Some(NonRecursive) = TriColorDepthFirstSearch::new(&body).run_from_start(&mut vis) {
35+ return;
10136 }
102- }
103-
104- debug!("unconditional recursion results: {:?}", results);
10537
106- let self_call_locations = &mut results[START_BLOCK];
107- self_call_locations.sort();
108- self_call_locations.dedup();
38+ vis.reachable_recursive_calls.sort();
10939
110- if !self_call_locations.is_empty() {
11140 let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
11241 let sp = tcx.sess.source_map().guess_head_span(tcx.hir().span(hir_id));
11342 tcx.struct_span_lint_hir(UNCONDITIONAL_RECURSION, hir_id, sp, |lint| {
11443 let mut db = lint.build("function cannot return without recursing");
11544 db.span_label(sp, "cannot return without recursing");
11645 // offer some help to the programmer.
117- for bb in self_call_locations {
118- let span = body.basic_blocks()[*bb].terminator().source_info.span;
119- db.span_label(span, "recursive call site");
46+ for call_span in vis.reachable_recursive_calls {
47+ db.span_label(call_span, "recursive call site");
12048 }
12149 db.help("a `loop` may express intention better if this is on purpose");
12250 db.emit();
12351 });
12452 }
12553}
12654
127- /// Finds blocks with `Call` terminators that would end up calling back into the same method.
128- fn find_blocks_calling_self<'tcx>(
55+ struct NonRecursive;
56+
57+ struct Search<'mir, 'tcx> {
12958 tcx: TyCtxt<'tcx>,
130- body: &Body<'tcx>,
59+ body: &'mir Body<'tcx>,
13160 def_id: DefId,
132- ) -> BitSet<BasicBlock> {
133- let param_env = tcx.param_env(def_id);
61+ trait_substs: &'tcx [GenericArg<'tcx>],
13462
135- // If this is trait/impl method, extract the trait's substs.
136- let trait_substs_count = match tcx.opt_associated_item(def_id) {
137- Some(AssocItem { container: AssocItemContainer::TraitContainer(trait_def_id), .. }) => {
138- tcx.generics_of(trait_def_id).count()
63+ reachable_recursive_calls: Vec<Span>,
64+ }
65+
66+ impl<'mir, 'tcx> Search<'mir, 'tcx> {
67+ /// Returns `true` if `func` refers to the function we are searching in.
68+ fn is_recursive_call(&self, func: &Operand<'tcx>) -> bool {
69+ let Search { tcx, body, def_id, trait_substs, .. } = *self;
70+ let param_env = tcx.param_env(def_id);
71+
72+ let func_ty = func.ty(body, tcx);
73+ if let ty::FnDef(fn_def_id, substs) = func_ty.kind {
74+ let (call_fn_id, call_substs) =
75+ if let Some(instance) = Instance::resolve(tcx, param_env, fn_def_id, substs) {
76+ (instance.def_id(), instance.substs)
77+ } else {
78+ (fn_def_id, substs)
79+ };
80+
81+ // FIXME(#57965): Make this work across function boundaries
82+
83+ // If this is a trait fn, the substs on the trait have to match, or we might be
84+ // calling into an entirely different method (for example, a call from the default
85+ // method in the trait to `<A as Trait<B>>::method`, where `A` and/or `B` are
86+ // specific types).
87+ return call_fn_id == def_id && &call_substs[..trait_substs.len()] == trait_substs;
88+ }
89+
90+ false
91+ }
92+ }
93+
94+ impl<'mir, 'tcx> TriColorVisitor<&'mir Body<'tcx>> for Search<'mir, 'tcx> {
95+ type BreakVal = NonRecursive;
96+
97+ fn node_examined(
98+ &mut self,
99+ bb: BasicBlock,
100+ prior_status: Option<NodeStatus>,
101+ ) -> ControlFlow<Self::BreakVal> {
102+ // Back-edge in the CFG (loop).
103+ if let Some(NodeStatus::Visited) = prior_status {
104+ return ControlFlow::Break(NonRecursive);
105+ }
106+
107+ match self.body[bb].terminator().kind {
108+ // These terminators return control flow to the caller.
109+ TerminatorKind::Abort
110+ | TerminatorKind::GeneratorDrop
111+ | TerminatorKind::Resume
112+ | TerminatorKind::Return
113+ | TerminatorKind::Unreachable
114+ | TerminatorKind::Yield { .. } => ControlFlow::Break(NonRecursive),
115+
116+ // These do not.
117+ TerminatorKind::Assert { .. }
118+ | TerminatorKind::Call { .. }
119+ | TerminatorKind::Drop { .. }
120+ | TerminatorKind::DropAndReplace { .. }
121+ | TerminatorKind::FalseEdges { .. }
122+ | TerminatorKind::FalseUnwind { .. }
123+ | TerminatorKind::Goto { .. }
124+ | TerminatorKind::SwitchInt { .. } => ControlFlow::Continue,
139125 }
140- _ => 0,
141- };
142- let trait_substs = &InternalSubsts::identity_for_item(tcx, def_id)[..trait_substs_count];
143-
144- let mut self_calls = BitSet::new_empty(body.basic_blocks().len());
145-
146- for (bb, data) in body.basic_blocks().iter_enumerated() {
147- if let TerminatorKind::Call { func, .. } = &data.terminator().kind {
148- let func_ty = func.ty(body, tcx);
149-
150- if let ty::FnDef(fn_def_id, substs) = func_ty.kind {
151- let (call_fn_id, call_substs) =
152- if let Some(instance) = Instance::resolve(tcx, param_env, fn_def_id, substs) {
153- (instance.def_id(), instance.substs)
154- } else {
155- (fn_def_id, substs)
156- };
157-
158- // FIXME(#57965): Make this work across function boundaries
159-
160- // If this is a trait fn, the substs on the trait have to match, or we might be
161- // calling into an entirely different method (for example, a call from the default
162- // method in the trait to `<A as Trait<B>>::method`, where `A` and/or `B` are
163- // specific types).
164- let is_self_call =
165- call_fn_id == def_id && &call_substs[..trait_substs.len()] == trait_substs;
166-
167- if is_self_call {
168- self_calls.insert(bb);
169- }
126+ }
127+
128+ fn node_settled(&mut self, bb: BasicBlock) -> ControlFlow<Self::BreakVal> {
129+ // When we examine a node for the last time, remember it if it is a recursive call.
130+ let terminator = self.body[bb].terminator();
131+ if let TerminatorKind::Call { func, .. } = &terminator.kind {
132+ if self.is_recursive_call(func) {
133+ self.reachable_recursive_calls.push(terminator.source_info.span);
170134 }
171135 }
136+
137+ ControlFlow::Continue
172138 }
173139
174- self_calls
140+ fn ignore_edge(&mut self, bb: BasicBlock, target: BasicBlock) -> bool {
141+ // Don't traverse successors of recursive calls or false CFG edges.
142+ match self.body[bb].terminator().kind {
143+ TerminatorKind::Call { ref func, .. } => self.is_recursive_call(func),
144+
145+ TerminatorKind::FalseUnwind { unwind: Some(imaginary_target), .. }
146+ | TerminatorKind::FalseEdges { imaginary_target, .. } => imaginary_target == target,
147+
148+ _ => false,
149+ }
150+ }
175151}
0 commit comments