@@ -191,207 +191,8 @@ class GenericSpecializer : public SILFunctionTransform {
191191 }
192192};
193193
194- // / The mandatory specializer, which runs in the mandatory pipeline.
195- // /
196- // / It specializes functions, called from performance-annotated functions
197- // / (@_noLocks, @_noAllocation).
198- class MandatoryGenericSpecializer : public SILModuleTransform {
199-
200- void run () override ;
201-
202- bool optimize (SILFunction *func, ClassHierarchyAnalysis *cha,
203- bool &invalidatedStackNesting);
204-
205- bool optimizeInst (SILInstruction *inst, SILOptFunctionBuilder &funcBuilder,
206- InstructionDeleter &deleter, ClassHierarchyAnalysis *cha,
207- bool &invalidatedStackNesting);
208- };
209-
210-
211- void MandatoryGenericSpecializer::run () {
212- SILModule *module = getModule ();
213-
214- ClassHierarchyAnalysis *cha = getAnalysis<ClassHierarchyAnalysis>();
215-
216- llvm::SmallVector<SILFunction *, 8 > workList;
217- llvm::SmallPtrSet<SILFunction *, 16 > visited;
218-
219- // Look for performance-annotated functions.
220- for (SILFunction &function : *module ) {
221- if (function.getPerfConstraints () != PerformanceConstraints::None) {
222- workList.push_back (&function);
223- visited.insert (&function);
224- }
225- }
226-
227- while (!workList.empty ()) {
228- SILFunction *func = workList.pop_back_val ();
229- module ->linkFunction (func, SILModule::LinkingMode::LinkAll);
230- if (!func->isDefinition ())
231- continue ;
232-
233- // Perform generic specialization and other related optimization.
234-
235- bool invalidatedStackNesting = false ;
236-
237- // To avoid phase ordering problems of the involved optimizations, iterate
238- // until we reach a fixed point.
239- // This should always happen, but to be on the safe side, limit the number
240- // of iterations to 10 (which is more than enough - usually the loop runs
241- // 1 to 3 times).
242- for (int i = 0 ; i < 10 ; i++) {
243- bool changed = optimize (func, cha, invalidatedStackNesting);
244- if (changed) {
245- invalidateAnalysis (func, SILAnalysis::InvalidationKind::FunctionBody);
246- } else {
247- break ;
248- }
249- }
250-
251- if (invalidatedStackNesting) {
252- StackNesting::fixNesting (func);
253- }
254-
255- // Continue specializing called functions.
256- for (SILBasicBlock &block : *func) {
257- for (SILInstruction &inst : block) {
258- if (auto as = ApplySite::isa (&inst)) {
259- if (SILFunction *callee = as.getReferencedFunctionOrNull ()) {
260- if (visited.insert (callee).second )
261- workList.push_back (callee);
262- }
263- }
264- }
265- }
266- }
267- }
268-
269- // / Specialize generic calls in \p func and do some other related optimizations:
270- // / devirtualization and constant-folding of the Builtin.canBeClass.
271- bool MandatoryGenericSpecializer::optimize (SILFunction *func,
272- ClassHierarchyAnalysis *cha,
273- bool &invalidatedStackNesting) {
274- bool changed = false ;
275- SILOptFunctionBuilder funcBuilder (*this );
276- InstructionDeleter deleter;
277- ReachingReturnBlocks rrBlocks (func);
278- NonErrorHandlingBlocks neBlocks (func);
279-
280- // If this is a just specialized function, try to optimize copy_addr, etc.
281- // instructions.
282- if (optimizeMemoryAccesses (func)) {
283- eliminateDeadAllocations (func);
284- changed = true ;
285- }
286-
287- // Visiting blocks in reverse order avoids revisiting instructions after block
288- // splitting, which would be quadratic.
289- for (SILBasicBlock &block : llvm::reverse (*func)) {
290- // Only consider blocks which are not on a "throw" path.
291- if (!rrBlocks.reachesReturn (&block) || !neBlocks.isNonErrorHandling (&block))
292- continue ;
293-
294- for (SILInstruction &inst : block.reverseDeletableInstructions ()) {
295- changed |= optimizeInst (&inst, funcBuilder, deleter, cha, invalidatedStackNesting);
296- }
297- }
298- deleter.cleanupDeadInstructions ();
299-
300- if (specializeAppliesInFunction (*func, this , /* isMandatory*/ true ))
301- changed = true ;
302-
303- return changed;
304- }
305-
306- bool MandatoryGenericSpecializer::
307- optimizeInst (SILInstruction *inst, SILOptFunctionBuilder &funcBuilder,
308- InstructionDeleter &deleter, ClassHierarchyAnalysis *cha,
309- bool &invalidatedStackNesting) {
310- if (auto as = ApplySite::isa (inst)) {
311-
312- bool changed = false ;
313-
314- // Specialization opens opportunities to devirtualize method calls.
315- if (ApplySite newAS = tryDevirtualizeApply (as, cha).first ) {
316- deleter.forceDelete (as.getInstruction ());
317- changed = true ;
318- as = newAS;
319- }
320-
321- if (auto *pai = dyn_cast<PartialApplyInst>(as)) {
322- SILBuilderContext builderCtxt (funcBuilder.getModule ());
323- if (tryOptimizeApplyOfPartialApply (pai, builderCtxt, deleter.getCallbacks ())) {
324- // Try to delete the partial_apply.
325- // We don't need to copy all arguments again (to extend their lifetimes),
326- // because it was already done in tryOptimizeApplyOfPartialApply.
327- tryDeleteDeadClosure (pai, deleter.getCallbacks (), /* needKeepArgsAlive=*/ false );
328- invalidatedStackNesting = true ;
329- return true ;
330- }
331- return changed;
332- }
333-
334- auto fas = FullApplySite::isa (as.getInstruction ());
335- assert (fas);
336-
337- SILFunction *callee = fas.getReferencedFunctionOrNull ();
338- if (!callee)
339- return changed;
340-
341- if (callee->isTransparent () == IsNotTransparent &&
342- // Force inlining of co-routines, because co-routines may allocate
343- // memory.
344- !isa<BeginApplyInst>(fas.getInstruction ()))
345- return changed;
346-
347- if (callee->isExternalDeclaration ())
348- getModule ()->loadFunction (callee, SILModule::LinkingMode::LinkAll);
349-
350- if (callee->isExternalDeclaration ())
351- return changed;
352-
353- // If the de-virtualized callee is a transparent function, inline it.
354- SILInliner::inlineFullApply (fas, SILInliner::InlineKind::MandatoryInline,
355- funcBuilder, deleter);
356- if (callee->hasOwnership () && !inst->getFunction ()->hasOwnership ())
357- invalidatedStackNesting = true ;
358- return true ;
359- }
360- if (auto *bi = dyn_cast<BuiltinInst>(inst)) {
361- // Constant-fold the Builtin.canBeClass. This is essential for Array code.
362- if (bi->getBuiltinInfo ().ID != BuiltinValueKind::CanBeObjCClass)
363- return false ;
364-
365- SILBuilderWithScope builder (bi);
366- IntegerLiteralInst *lit = optimizeBuiltinCanBeObjCClass (bi, builder);
367- if (!lit)
368- return false ;
369-
370- bi->replaceAllUsesWith (lit);
371- ConstantFolder constFolder (funcBuilder, getOptions ().AssertConfig ,
372- /* EnableDiagnostics*/ false );
373- constFolder.addToWorklist (lit);
374- constFolder.processWorkList ();
375- deleter.forceDelete (bi);
376- return true ;
377- }
378- if (auto *mti = dyn_cast<MetatypeInst>(inst)) {
379- // Remove dead `metatype` instructions which only have `debug_value` uses.
380- // We lose debug info for such type variables, but this is a compromise we
381- // need to accept to get allocation/lock free code.
382- if (onlyHaveDebugUses (mti)) {
383- deleter.forceDeleteWithUsers (mti);
384- }
385- }
386- return false ;
387- }
388-
389194} // end anonymous namespace
390195
391196SILTransform *swift::createGenericSpecializer () {
392197 return new GenericSpecializer ();
393198}
394-
395- SILTransform *swift::createMandatoryGenericSpecializer () {
396- return new MandatoryGenericSpecializer ();
397- }
0 commit comments