|
21 | 21 | #include "swift/SILOptimizer/Utils/CanonicalizeInstruction.h" |
22 | 22 | #include "swift/SIL/DebugUtils.h" |
23 | 23 | #include "swift/SIL/InstructionUtils.h" |
24 | | -#include "swift/SIL/MemAccessUtils.h" |
25 | 24 | #include "swift/SIL/Projection.h" |
26 | 25 | #include "swift/SIL/SILBuilder.h" |
27 | 26 | #include "swift/SIL/SILFunction.h" |
@@ -138,77 +137,6 @@ static void replaceUsesOfExtract(SingleValueInstruction *extract, |
138 | 137 | extract->replaceAllUsesWith(loadedVal); |
139 | 138 | } |
140 | 139 |
|
141 | | -// If \p loadInst has an debug uses, then move it into a separate unsafe access |
142 | | -// scope. This hides it from the exclusivity checker. |
143 | | -// |
144 | | -// If \p loadInst was successfully hidden, then this returns the next |
145 | | -// instruction following \p loadInst and following any newly inserted |
146 | | -// instructions. Otherwise this returns nullptr. Returning nullptr is a signal |
147 | | -// to delete \p loadInst. |
148 | | -// |
149 | | -// Before: |
150 | | -// |
151 | | -// %a = begin_access %0 [read] [unknown] |
152 | | -// %proj = some_projections %a |
153 | | -// %whole = load %proj // <-- loadInst |
154 | | -// %field = struct_element_addr %proj, #field |
155 | | -// %part = load %field |
156 | | -// |
157 | | -// After: |
158 | | -// |
159 | | -// %a = begin_access %0 [read] [unknown] |
160 | | -// %proj = some_projections %a |
161 | | -// %a2 = begin_access %0 [read] [unsafe] // NEW |
162 | | -// %proj2 = some_projections %a // CLONED |
163 | | -// %whole = load %proj2 // <-- loadInst |
164 | | -// end_access %a2 // NEW |
165 | | -// %field = struct_element_addr %proj, #field |
166 | | -// %part = load %field |
167 | | -// |
168 | | -static SILInstruction * |
169 | | -moveLoadToUnsafeAccessScope(LoadInst *loadInst, |
170 | | - CanonicalizeInstruction &pass) { |
171 | | - if (llvm::none_of(loadInst->getUses(), [](Operand *use) { |
172 | | - return use->getUser()->isDebugInstruction(); |
173 | | - })) { |
174 | | - return nullptr; |
175 | | - } |
176 | | - SILValue accessScope = getAccessScope(loadInst->getOperand()); |
177 | | - auto *access = dyn_cast<BeginAccessInst>(accessScope); |
178 | | - if (access && access->getEnforcement() == SILAccessEnforcement::Unsafe) |
179 | | - return nullptr; |
180 | | - |
181 | | - auto checkBaseAddress = [=](SILValue addr) { |
182 | | - if (addr != accessScope) |
183 | | - return SILValue(); |
184 | | - |
185 | | - // the base of the new unsafe scope |
186 | | - if (access) |
187 | | - return access->getOperand(); |
188 | | - |
189 | | - return accessScope; |
190 | | - }; |
191 | | - |
192 | | - if (!canCloneUseDefChain(loadInst->getOperand(), checkBaseAddress)) |
193 | | - return nullptr; |
194 | | - |
195 | | - SILValue newBase = |
196 | | - cloneUseDefChain(loadInst->getOperand(), loadInst, checkBaseAddress); |
197 | | - |
198 | | - auto *beginUnsafe = SILBuilderWithScope(loadInst).createBeginAccess( |
199 | | - loadInst->getLoc(), newBase, SILAccessKind::Read, |
200 | | - SILAccessEnforcement::Unsafe, true, false); |
201 | | - loadInst->setOperand(beginUnsafe); |
202 | | - auto nextInst = loadInst->getNextInstruction(); |
203 | | - auto *endUnsafe = SILBuilderWithScope(nextInst).createEndAccess( |
204 | | - loadInst->getLoc(), beginUnsafe, false); |
205 | | - |
206 | | - pass.notifyNewInstruction(beginUnsafe); |
207 | | - pass.notifyNewInstruction(endUnsafe); |
208 | | - |
209 | | - return nextInst; |
210 | | -} |
211 | | - |
212 | 140 | // Given a load with multiple struct_extracts/tuple_extracts and no other uses, |
213 | 141 | // canonicalize the load into several (struct_element_addr (load)) pairs. |
214 | 142 | // |
@@ -373,9 +301,16 @@ splitAggregateLoad(LoadOperation loadInst, CanonicalizeInstruction &pass) { |
373 | 301 | } |
374 | 302 | pass.notifyNewInstruction(**lastNewLoad); |
375 | 303 |
|
376 | | - // FIXME: At -O, create "debug fragments" recover as much debug info as |
377 | | - // possible by creating debug_value fragments for each new partial |
378 | | - // load. Currently disabled because it caused on LLVM back-end crash. |
| 304 | + // FIXME: This drops debug info at -Onone load-splitting is required at |
| 305 | + // -Onone for exclusivity diagnostics. Fix this by |
| 306 | + // |
| 307 | + // 1. At -Onone, preserve the original load when pass.preserveDebugInfo is |
| 308 | + // true, but moving it out of its current access scope and into an "unknown" |
| 309 | + // access scope, which won't be enforced as an exclusivity violation. |
| 310 | + // |
| 311 | + // 2. At -O, create "debug fragments" recover as much debug info as possible |
| 312 | + // by creating debug_value fragments for each new partial load. Currently |
| 313 | + // disabled because of LLVM back-end crashes. |
379 | 314 | if (!pass.preserveDebugInfo && EnableLoadSplittingDebugInfo) { |
380 | 315 | createDebugFragments(*loadInst, proj, lastNewLoad->getLoadInst()); |
381 | 316 | } |
@@ -405,23 +340,13 @@ splitAggregateLoad(LoadOperation loadInst, CanonicalizeInstruction &pass) { |
405 | 340 | for (auto *borrow : borrows) |
406 | 341 | nextII = killInstAndIncidentalUses(borrow, nextII, pass); |
407 | 342 |
|
408 | | - // When pass.preserveDebugInfo is true, keep the original load so that debug |
409 | | - // info refers to the loaded value, rather than a memory location which may |
410 | | - // not be reused. Move the wide load out of its current access scope and into |
411 | | - // an "unknown" access scope, which won't be enforced as an exclusivity |
412 | | - // violation. |
413 | | - if (pass.preserveDebugInfo) { |
414 | | - if (auto *regularLoad = dyn_cast<LoadInst>(loadInst.getLoadInst())) { |
415 | | - if (auto *nextInst = moveLoadToUnsafeAccessScope(regularLoad, pass)) |
416 | | - return nextInst->getIterator(); |
417 | | - } |
418 | | - } |
419 | 343 | // Erase the old load. |
420 | 344 | for (auto *destroy : lifetimeEndingInsts) |
421 | 345 | nextII = killInstruction(destroy, nextII, pass); |
422 | 346 |
|
423 | 347 | // FIXME: remove this temporary hack to advance the iterator beyond |
424 | | - // debug_value. |
| 348 | + // debug_value. A soon-to-be merged commit migrates CanonicalizeInstruction to |
| 349 | + // use InstructionDeleter. |
425 | 350 | while (nextII != loadInst->getParent()->end() |
426 | 351 | && nextII->isDebugInstruction()) { |
427 | 352 | ++nextII; |
|
0 commit comments