|
141 | 141 | #include "swift/SIL/BasicBlockUtils.h" |
142 | 142 | #include "swift/SIL/DebugUtils.h" |
143 | 143 | #include "swift/SIL/DynamicCasts.h" |
| 144 | +#include "swift/SIL/MemAccessUtils.h" |
144 | 145 | #include "swift/SIL/OwnershipUtils.h" |
145 | 146 | #include "swift/SIL/PrettyStackTrace.h" |
146 | 147 | #include "swift/SIL/PrunedLiveness.h" |
|
157 | 158 | #include "swift/SILOptimizer/Utils/InstructionDeleter.h" |
158 | 159 | #include "swift/SILOptimizer/Utils/StackNesting.h" |
159 | 160 | #include "llvm/ADT/DenseMap.h" |
| 161 | +#include "llvm/ADT/STLExtras.h" |
160 | 162 | #include "llvm/ADT/SetVector.h" |
161 | 163 | #include "llvm/Support/CommandLine.h" |
162 | 164 | #include "llvm/Support/Debug.h" |
@@ -345,6 +347,22 @@ static bool isStoreCopy(SILValue value) { |
345 | 347 | if (!copyInst->hasOneUse()) |
346 | 348 | return false; |
347 | 349 |
|
| 350 | + auto source = copyInst->getOperand(); |
| 351 | + if (source->getOwnershipKind() == OwnershipKind::Guaranteed) { |
| 352 | + // [in_guaranteed_begin_apply_results] If any root of the source is a |
| 353 | + // begin_apply, we can't rely on projecting from the (rewritten) source: |
| 354 | + // The store may not be in the coroutine's range. The result would be |
| 355 | + // attempting to access invalid storage. |
| 356 | + SmallVector<SILValue, 4> roots; |
| 357 | + findGuaranteedReferenceRoots(source, /*lookThroughNestedBorrows=*/true, |
| 358 | + roots); |
| 359 | + if (llvm::any_of(roots, [](SILValue root) { |
| 360 | + return isa<BeginApplyInst>(root->getDefiningInstruction()); |
| 361 | + })) { |
| 362 | + return false; |
| 363 | + } |
| 364 | + } |
| 365 | + |
348 | 366 | auto *user = value->getSingleUse()->getUser(); |
349 | 367 | return isa<StoreInst>(user); |
350 | 368 | } |
@@ -933,8 +951,19 @@ static bool doesNotNeedStackAllocation(SILValue value) { |
933 | 951 | auto *defInst = value->getDefiningInstruction(); |
934 | 952 | if (!defInst) |
935 | 953 | return false; |
936 | | - |
937 | | - if (isa<LoadBorrowInst>(defInst) || isa<BeginApplyInst>(defInst)) |
| 954 | + // [in_guaranteed_begin_apply_results] OSSA ensures that every use of a |
| 955 | + // guaranteed value resulting from a begin_apply will occur in the |
| 956 | + // coroutine's range (i.e. "before" the end_apply/abort apply). |
| 957 | + // AddressLowering takes advantage of this lack of uses outside of the |
| 958 | + // coroutine's range to directly use the storage that is yielded by the |
| 959 | + // coroutine rather than moving it to local storage. |
| 960 | + // |
| 961 | + // It is, however, valid in OSSA to have uses of an owned value produced by a |
| 962 | + // begin_apply outside of the coroutine range. So in that case, it is |
| 963 | + // necessary to introduce new storage and move to it. |
| 964 | + if (isa<LoadBorrowInst>(defInst) || |
| 965 | + (isa<BeginApplyInst>(defInst) && |
| 966 | + value.getOwnershipKind() == OwnershipKind::Guaranteed)) |
938 | 967 | return true; |
939 | 968 |
|
940 | 969 | return false; |
@@ -2230,9 +2259,32 @@ void ApplyRewriter::convertBeginApplyWithOpaqueYield() { |
2230 | 2259 | continue; |
2231 | 2260 | } |
2232 | 2261 | if (oldResult.getType().isAddressOnly(*pass.function)) { |
2233 | | - // Remap storage when an address-only type is yielded as an opaque value. |
2234 | | - pass.valueStorageMap.setStorageAddress(&oldResult, &newResult); |
2235 | | - pass.valueStorageMap.getStorage(&oldResult).markRewritten(); |
| 2262 | + auto info = newCall->getSubstCalleeConv().getYieldInfoForOperandIndex(i); |
| 2263 | + assert(info.isFormalIndirect()); |
| 2264 | + if (info.isConsumed()) { |
| 2265 | + // Because it is legal to have uses of an owned value produced by a |
| 2266 | + // begin_apply after a coroutine's range, AddressLowering must move the |
| 2267 | + // value into local storage so that such out-of-coroutine-range uses can |
| 2268 | + // be rewritten in terms of that address (instead of being rewritten in |
| 2269 | + // terms of the yielded owned storage which is no longer valid beyond |
| 2270 | + // the coroutine's range). |
| 2271 | + auto &storage = pass.valueStorageMap.getStorage(&oldResult); |
| 2272 | + auto destAddr = addrMat.materializeAddress(&oldResult); |
| 2273 | + storage.storageAddress = destAddr; |
| 2274 | + storage.markRewritten(); |
| 2275 | + resultBuilder.createCopyAddr(callLoc, &newResult, destAddr, |
| 2276 | + info.isConsumed() ? IsTake : IsNotTake, |
| 2277 | + IsInitialization); |
| 2278 | + } else { |
| 2279 | + // [in_guaranteed_begin_apply_results] Because OSSA ensure that all uses |
| 2280 | + // of a guaranteed value produced by a begin_apply are used within the |
| 2281 | + // coroutine's range, AddressLowering will not introduce uses of |
| 2282 | + // invalid memory by rewriting the uses of a yielded guaranteed opaque |
| 2283 | + // value as uses of yielded guaranteed storage. However, it must |
| 2284 | + // allocate storage for copies of [projections of] such values. |
| 2285 | + pass.valueStorageMap.setStorageAddress(&oldResult, &newResult); |
| 2286 | + pass.valueStorageMap.getStorage(&oldResult).markRewritten(); |
| 2287 | + } |
2236 | 2288 | continue; |
2237 | 2289 | } |
2238 | 2290 | assert(oldResult.getType().isObject()); |
@@ -3271,7 +3323,7 @@ static void emitEndBorrows(SILValue value, AddressLoweringState &pass) { |
3271 | 3323 | SSAPrunedLiveness liveness(&discoveredBlocks); |
3272 | 3324 | liveness.initializeDef(value); |
3273 | 3325 | for (auto *use : usePoints) { |
3274 | | - assert(!use->isLifetimeEnding()); |
| 3326 | + assert(!use->isLifetimeEnding() || isa<EndBorrowInst>(use->getUser())); |
3275 | 3327 | liveness.updateForUse(use->getUser(), /*lifetimeEnding*/ false); |
3276 | 3328 | } |
3277 | 3329 | PrunedLivenessBoundary guaranteedBoundary; |
|
0 commit comments