Skip to content

Commit 5b8a1a7

Browse files
committed
LoopInvariantCodeMotion: don't extend "read" access scopes over memory writes to the same address.
We must not do this even if the write is _not_ in an access scope, because this would confuse alias analysis. Fixes a mis-compile.
1 parent dafb43b commit 5b8a1a7

File tree

2 files changed

+71
-40
lines changed

2 files changed

+71
-40
lines changed

SwiftCompilerSources/Sources/Optimizer/FunctionPasses/LoopInvariantCodeMotion.swift

Lines changed: 40 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -540,7 +540,35 @@ private extension AnalyzedInstructions {
540540
sideEffect.mayWrite(toAddress: address, aliasAnalysis)
541541
}
542542
}
543-
543+
544+
func sideEffectsMayWrite(to address: Value,
545+
outsideOf scope: InstructionRange,
546+
_ context: FunctionPassContext
547+
) -> Bool {
548+
for sideEffectInst in loopSideEffects {
549+
if sideEffectInst.mayWrite(toAddress: address, context.aliasAnalysis),
550+
!scope.inclusiveRangeContains(sideEffectInst)
551+
{
552+
return true
553+
}
554+
}
555+
return false
556+
}
557+
558+
func sideEffectsMayReadOrWrite(to address: Value,
559+
outsideOf scope: InstructionRange,
560+
_ context: FunctionPassContext
561+
) -> Bool {
562+
for sideEffectInst in loopSideEffects {
563+
if sideEffectInst.mayReadOrWrite(address: address, context.aliasAnalysis),
564+
!scope.inclusiveRangeContains(sideEffectInst)
565+
{
566+
return true
567+
}
568+
}
569+
return false
570+
}
571+
544572
/// Find all loads that contain `accessPath`. Split them into a load with
545573
/// identical `accessPath` and a set of non-overlapping loads. Add the new
546574
/// non-overlapping loads to `loads`.
@@ -1208,47 +1236,19 @@ private extension ScopedInstruction {
12081236
case is BeginApplyInst:
12091237
return true // Has already been checked with other full applies.
12101238
case let loadBorrowInst as LoadBorrowInst:
1211-
for sideEffectInst in analyzedInstructions.loopSideEffects {
1212-
if let endBorrow = sideEffectInst as? EndBorrowInst,
1213-
let begin = endBorrow.borrow as? LoadBorrowInst,
1214-
begin == self
1215-
{
1216-
continue
1217-
}
1218-
if sideEffectInst.mayWrite(toAddress: loadBorrowInst.address, context.aliasAnalysis),
1219-
!scope.contains(sideEffectInst)
1220-
{
1221-
return false
1222-
}
1223-
}
1224-
return true
1239+
return !analyzedInstructions.sideEffectsMayWrite(to: loadBorrowInst.address, outsideOf: scope, context)
12251240

12261241
case let beginAccess as BeginAccessInst:
1227-
for fullApplyInst in analyzedInstructions.fullApplies {
1228-
guard mayWriteToMemory && fullApplyInst.mayReadOrWrite(address: beginAccess.address, context.aliasAnalysis) ||
1229-
!mayWriteToMemory && fullApplyInst.mayWrite(toAddress: beginAccess.address, context.aliasAnalysis) else {
1230-
continue
1231-
}
1232-
1233-
// After hoisting the begin/end_access the apply will be within the scope, so it must not have a conflicting access.
1234-
if !scope.contains(fullApplyInst) {
1235-
return false
1236-
}
1237-
}
1238-
1239-
switch beginAccess.address.accessPath.base {
1240-
case .class, .global:
1241-
for sideEffect in analyzedInstructions.loopSideEffects where sideEffect.mayRelease {
1242-
// Since a class might have a deinitializer, hoisting begin/end_access pair could violate
1243-
// exclusive access if the deinitializer accesses address used by begin_access.
1244-
if !scope.contains(sideEffect) {
1245-
return false
1246-
}
1247-
}
1248-
1249-
return true
1250-
default:
1251-
return true
1242+
if beginAccess.accessKind == .read {
1243+
// Check that we don't generate nested accesses when extending the access scope. Also, we must not
1244+
// extend a "read" access scope over a memory write (to the same address) even if the write is _not_
1245+
// in an access scope, because this would confuse alias analysis.
1246+
return !analyzedInstructions.sideEffectsMayWrite(to: beginAccess.address, outsideOf: scope, context)
1247+
} else {
1248+
// This does not include memory-reading instructions which are not in `loopSideEffects`, like a
1249+
// plain `load`. This is fine because we can extend a "modify" access scope over memory reads
1250+
// (of the same address) as long as we are not generating nested accesses.
1251+
return !analyzedInstructions.sideEffectsMayReadOrWrite(to: beginAccess.address, outsideOf: scope, context)
12521252
}
12531253
case is BeginBorrowInst, is StoreBorrowInst:
12541254
// Ensure the value is produced outside the loop.

test/SILOptimizer/licm_exclusivity.sil

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -252,3 +252,34 @@ bb2:
252252
%10 = tuple ()
253253
return %10 : $()
254254
}
255+
256+
// CHECK-LABEL: sil [ossa] @dont_hoist_access_with_conflict_with_store :
257+
// CHECK: bb1:
258+
// CHECK-NEXT: begin_access
259+
// CHECK-LABEL: } // end sil function 'dont_hoist_access_with_conflict_with_store'
260+
sil [ossa] @dont_hoist_access_with_conflict_with_store : $@convention(thin) (X) -> () {
261+
bb0(%0 : $X):
262+
%1 = global_addr @globalX: $*X
263+
br bb1
264+
265+
bb1:
266+
%3 = begin_access [read] [dynamic] %1
267+
%4 = load [trivial] %3
268+
fix_lifetime %4
269+
end_access %3 : $*X
270+
cond_br undef, bb2, bb3
271+
bb2:
272+
store %0 to [trivial] %1
273+
br bb4
274+
bb3:
275+
br bb4
276+
bb4:
277+
cond_br undef, bb5, bb6
278+
279+
bb5:
280+
br bb1
281+
282+
bb6:
283+
%10 = tuple ()
284+
return %10
285+
}

0 commit comments

Comments
 (0)