Skip to content

Commit 9a63c50

Browse files
committed
[CIR][CIRGen][NFCI] Take a step into getting scope information to match OG
We are missing cleanups all around, more incremental progress towards fixing that. This is supposed to be NFC intended, but we have to start changing some bits in order to properly match cleanup bits in OG. Start tagging places with more MissingFeatures to allow us to incrementally improve the situation.
1 parent 8c82ee2 commit 9a63c50

File tree

8 files changed

+321
-69
lines changed

8 files changed

+321
-69
lines changed

clang/include/clang/CIR/MissingFeatures.h

+11-3
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,6 @@ struct MissingFeatures {
5959
static bool emitTypeCheck() { return false; }
6060
static bool tbaa() { return false; }
6161
static bool tbaa_struct() { return false; }
62-
static bool cleanups() { return false; }
6362
static bool emitNullabilityCheck() { return false; }
6463
static bool ptrAuth() { return false; }
6564

@@ -160,12 +159,22 @@ struct MissingFeatures {
160159
static bool fastMathFlags() { return false; }
161160
static bool fastMathFuncAttributes() { return false; }
162161

162+
// Cleanup
163+
static bool cleanups() { return false; }
164+
static bool simplifyCleanupEntry() { return false; }
165+
static bool requiresCleanups() { return false; }
166+
static bool cleanupBranchAfterSwitch() { return false; }
167+
static bool cleanupAlwaysBranchThrough() { return false; }
168+
static bool cleanupDestinationIndex() { return false; }
169+
static bool cleanupDestroyNRVOVariable() { return false; }
170+
static bool cleanupAppendInsts() { return false; }
171+
static bool cleanupIndexAndBIAdjustment() { return false; }
172+
163173
// Exception handling
164174
static bool isSEHTryScope() { return false; }
165175
static bool ehStack() { return false; }
166176
static bool emitStartEHSpec() { return false; }
167177
static bool emitEndEHSpec() { return false; }
168-
static bool simplifyCleanupEntry() { return false; }
169178

170179
// Type qualifiers.
171180
static bool atomicTypes() { return false; }
@@ -208,7 +217,6 @@ struct MissingFeatures {
208217
static bool addAutoInitAnnotation() { return false; }
209218
static bool addHeapAllocSiteMetadata() { return false; }
210219
static bool loopInfoStack() { return false; }
211-
static bool requiresCleanups() { return false; }
212220
static bool constantFoldsToSimpleInteger() { return false; }
213221
static bool checkFunctionCallABI() { return false; }
214222
static bool zeroInitializer() { return false; }

clang/lib/CIR/CodeGen/CIRGenCleanup.cpp

+168-6
Original file line numberDiff line numberDiff line change
@@ -37,13 +37,12 @@ cir::BrOp CIRGenFunction::emitBranchThroughCleanup(mlir::Location Loc,
3737
JumpDest Dest) {
3838
// Remove this once we go for making sure unreachable code is
3939
// well modeled (or not).
40-
assert(builder.getInsertionBlock() && "not yet implemented");
4140
assert(!cir::MissingFeatures::ehStack());
4241

4342
// Insert a branch: to the cleanup block (unsolved) or to the already
4443
// materialized label. Keep track of unsolved goto's.
45-
auto brOp = builder.create<BrOp>(
46-
Loc, Dest.isValid() ? Dest.getBlock() : ReturnBlock().getBlock());
44+
assert(Dest.getBlock() && "assumes incoming valid dest");
45+
auto brOp = builder.create<BrOp>(Loc, Dest.getBlock());
4746

4847
// Calculate the innermost active normal cleanup.
4948
EHScopeStack::stable_iterator TopCleanup =
@@ -70,7 +69,33 @@ cir::BrOp CIRGenFunction::emitBranchThroughCleanup(mlir::Location Loc,
7069
return brOp;
7170
}
7271

73-
// FIXME(cir): otherwise, thread through all the normal cleanups in scope.
72+
// Otherwise, thread through all the normal cleanups in scope.
73+
auto index = builder.getUInt32(Dest.getDestIndex(), Loc);
74+
assert(!cir::MissingFeatures::cleanupIndexAndBIAdjustment());
75+
76+
// Add this destination to all the scopes involved.
77+
EHScopeStack::stable_iterator I = TopCleanup;
78+
EHScopeStack::stable_iterator E = Dest.getScopeDepth();
79+
if (E.strictlyEncloses(I)) {
80+
while (true) {
81+
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
82+
assert(Scope.isNormalCleanup());
83+
I = Scope.getEnclosingNormalCleanup();
84+
85+
// If this is the last cleanup we're propagating through, tell it
86+
// that there's a resolved jump moving through it.
87+
if (!E.strictlyEncloses(I)) {
88+
Scope.addBranchAfter(index, Dest.getBlock());
89+
break;
90+
}
91+
92+
// Otherwise, tell the scope that there's a jump propagating
93+
// through it. If this isn't new information, all the rest of
94+
// the work has been done before.
95+
if (!Scope.addBranchThrough(Dest.getBlock()))
96+
break;
97+
}
98+
}
7499
return brOp;
75100
}
76101

@@ -305,6 +330,18 @@ static void emitCleanup(CIRGenFunction &CGF, EHScopeStack::Cleanup *Fn,
305330
// No need to emit continuation block because CIR uses a cir.if.
306331
}
307332

333+
static mlir::Block *createNormalEntry(CIRGenFunction &cgf,
334+
EHCleanupScope &scope) {
335+
assert(scope.isNormalCleanup());
336+
mlir::Block *entry = scope.getNormalBlock();
337+
if (!entry) {
338+
mlir::OpBuilder::InsertionGuard guard(cgf.getBuilder());
339+
entry = cgf.currLexScope->getOrCreateCleanupBlock(cgf.getBuilder());
340+
scope.setNormalBlock(entry);
341+
}
342+
return entry;
343+
}
344+
308345
/// Pops a cleanup block. If the block includes a normal cleanup, the
309346
/// current insertion point is threaded through the cleanup, as are
310347
/// any branch fixups on the cleanup.
@@ -341,7 +378,8 @@ void CIRGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
341378

342379
// - whether there's a fallthrough
343380
auto *FallthroughSource = builder.getInsertionBlock();
344-
bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
381+
bool HasFallthrough =
382+
(FallthroughSource != nullptr && (IsActive || HasExistingBranches));
345383

346384
// Branch-through fall-throughs leave the insertion point set to the
347385
// end of the last cleanup, which points to the current scope. The
@@ -442,7 +480,131 @@ void CIRGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
442480
// Otherwise, the best approach is to thread everything through
443481
// the cleanup block and then try to clean up after ourselves.
444482
} else {
445-
llvm_unreachable("NYI");
483+
// Force the entry block to exist.
484+
mlir::Block *normalEntry = createNormalEntry(*this, Scope);
485+
486+
// I. Set up the fallthrough edge in.
487+
mlir::OpBuilder::InsertPoint savedInactiveFallthroughIP;
488+
489+
// If there's a fallthrough, we need to store the cleanup
490+
// destination index. For fall-throughs this is always zero.
491+
if (HasFallthrough) {
492+
if (!HasPrebranchedFallthrough) {
493+
assert(!cir::MissingFeatures::cleanupDestinationIndex());
494+
}
495+
496+
// Otherwise, save and clear the IP if we don't have fallthrough
497+
// because the cleanup is inactive.
498+
} else if (FallthroughSource) {
499+
assert(!IsActive && "source without fallthrough for active cleanup");
500+
savedInactiveFallthroughIP = getBuilder().saveInsertionPoint();
501+
}
502+
503+
// II. Emit the entry block. This implicitly branches to it if
504+
// we have fallthrough. All the fixups and existing branches
505+
// should already be branched to it.
506+
builder.setInsertionPointToEnd(normalEntry);
507+
508+
// intercept normal cleanup to mark SEH scope end
509+
if (IsEHa) {
510+
llvm_unreachable("NYI");
511+
}
512+
513+
// III. Figure out where we're going and build the cleanup
514+
// epilogue.
515+
bool HasEnclosingCleanups =
516+
(Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
517+
518+
// Compute the branch-through dest if we need it:
519+
// - if there are branch-throughs threaded through the scope
520+
// - if fall-through is a branch-through
521+
// - if there are fixups that will be optimistically forwarded
522+
// to the enclosing cleanup
523+
mlir::Block *branchThroughDest = nullptr;
524+
if (Scope.hasBranchThroughs() ||
525+
(FallthroughSource && FallthroughIsBranchThrough) ||
526+
(HasFixups && HasEnclosingCleanups)) {
527+
llvm_unreachable("NYI");
528+
}
529+
530+
mlir::Block *fallthroughDest = nullptr;
531+
532+
// If there's exactly one branch-after and no other threads,
533+
// we can route it without a switch.
534+
// Skip for SEH, since ExitSwitch is used to generate code to indicate
535+
// abnormal termination. (SEH: Except _leave and fall-through at
536+
// the end, all other exits in a _try (return/goto/continue/break)
537+
// are considered as abnormal terminations, using NormalCleanupDestSlot
538+
// to indicate abnormal termination)
539+
if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
540+
!currentFunctionUsesSEHTry() && Scope.getNumBranchAfters() == 1) {
541+
llvm_unreachable("NYI");
542+
// Build a switch-out if we need it:
543+
// - if there are branch-afters threaded through the scope
544+
// - if fall-through is a branch-after
545+
// - if there are fixups that have nowhere left to go and
546+
// so must be immediately resolved
547+
} else if (Scope.getNumBranchAfters() ||
548+
(HasFallthrough && !FallthroughIsBranchThrough) ||
549+
(HasFixups && !HasEnclosingCleanups)) {
550+
assert(!cir::MissingFeatures::cleanupBranchAfterSwitch());
551+
} else {
552+
// We should always have a branch-through destination in this case.
553+
assert(branchThroughDest);
554+
assert(!cir::MissingFeatures::cleanupAlwaysBranchThrough());
555+
}
556+
557+
// IV. Pop the cleanup and emit it.
558+
Scope.markEmitted();
559+
EHStack.popCleanup();
560+
assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
561+
562+
emitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
563+
564+
// Append the prepared cleanup prologue from above.
565+
assert(!cir::MissingFeatures::cleanupAppendInsts());
566+
567+
// Optimistically hope that any fixups will continue falling through.
568+
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); I < E;
569+
++I) {
570+
llvm_unreachable("NYI");
571+
}
572+
573+
// V. Set up the fallthrough edge out.
574+
575+
// Case 1: a fallthrough source exists but doesn't branch to the
576+
// cleanup because the cleanup is inactive.
577+
if (!HasFallthrough && FallthroughSource) {
578+
// Prebranched fallthrough was forwarded earlier.
579+
// Non-prebranched fallthrough doesn't need to be forwarded.
580+
// Either way, all we need to do is restore the IP we cleared before.
581+
assert(!IsActive);
582+
llvm_unreachable("NYI");
583+
584+
// Case 2: a fallthrough source exists and should branch to the
585+
// cleanup, but we're not supposed to branch through to the next
586+
// cleanup.
587+
} else if (HasFallthrough && fallthroughDest) {
588+
llvm_unreachable("NYI");
589+
590+
// Case 3: a fallthrough source exists and should branch to the
591+
// cleanup and then through to the next.
592+
} else if (HasFallthrough) {
593+
// Everything is already set up for this.
594+
595+
// Case 4: no fallthrough source exists.
596+
} else {
597+
// FIXME(cir): should we clear insertion point here?
598+
}
599+
600+
// VI. Assorted cleaning.
601+
602+
// Check whether we can merge NormalEntry into a single predecessor.
603+
// This might invalidate (non-IR) pointers to NormalEntry.
604+
//
605+
// If it did invalidate those pointers, and NormalEntry was the same
606+
// as NormalExit, go back and patch up the fixups.
607+
assert(!cir::MissingFeatures::simplifyCleanupEntry());
446608
}
447609
}
448610

clang/lib/CIR/CodeGen/CIRGenDecl.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -916,7 +916,7 @@ template <class Derived> struct DestroyNRVOVariable : EHScopeStack::Cleanup {
916916
QualType Ty;
917917

918918
void Emit(CIRGenFunction &CGF, Flags flags) override {
919-
llvm_unreachable("NYI");
919+
assert(!cir::MissingFeatures::cleanupDestroyNRVOVariable());
920920
}
921921

922922
virtual ~DestroyNRVOVariable() = default;

clang/lib/CIR/CodeGen/CIRGenFunction.cpp

+57-15
Original file line numberDiff line numberDiff line change
@@ -357,15 +357,23 @@ void CIRGenFunction::LexicalScope::cleanup() {
357357

358358
// Cleanup are done right before codegen resume a scope. This is where
359359
// objects are destroyed.
360-
unsigned curLoc = 0;
360+
SmallVector<mlir::Block *> retBlocks;
361361
for (auto *retBlock : localScope->getRetBlocks()) {
362362
mlir::OpBuilder::InsertionGuard guard(builder);
363363
builder.setInsertionPointToEnd(retBlock);
364-
mlir::Location retLoc = *localScope->getRetLocs()[curLoc];
365-
curLoc++;
364+
retBlocks.push_back(retBlock);
365+
mlir::Location retLoc = localScope->getRetLoc(retBlock);
366366
(void)emitReturn(retLoc);
367367
}
368368

369+
auto removeUnusedRetBlocks = [&]() {
370+
for (mlir::Block *retBlock : retBlocks) {
371+
if (!retBlock->getUses().empty())
372+
continue;
373+
retBlock->erase();
374+
}
375+
};
376+
369377
auto insertCleanupAndLeave = [&](mlir::Block *InsPt) {
370378
mlir::OpBuilder::InsertionGuard guard(builder);
371379
builder.setInsertionPointToEnd(InsPt);
@@ -381,9 +389,34 @@ void CIRGenFunction::LexicalScope::cleanup() {
381389
if (!cleanupBlock && localScope->getCleanupBlock(builder)) {
382390
cleanupBlock = localScope->getCleanupBlock(builder);
383391
builder.create<BrOp>(InsPt->back().getLoc(), cleanupBlock);
392+
if (!cleanupBlock->mightHaveTerminator()) {
393+
mlir::OpBuilder::InsertionGuard guard(builder);
394+
builder.setInsertionPointToEnd(cleanupBlock);
395+
builder.create<YieldOp>(localScope->EndLoc);
396+
}
384397
}
385398

386399
if (localScope->Depth == 0) {
400+
// TODO(cir): get rid of all this special cases once cleanups are properly
401+
// implemented.
402+
// TODO(cir): most of this code should move into emitBranchThroughCleanup
403+
if (localScope->getRetBlocks().size() == 1) {
404+
mlir::Block *retBlock = localScope->getRetBlocks()[0];
405+
mlir::Location loc = localScope->getRetLoc(retBlock);
406+
if (retBlock->getUses().empty())
407+
retBlock->erase();
408+
else {
409+
// Thread return block via cleanup block.
410+
if (cleanupBlock) {
411+
for (auto &blockUse : retBlock->getUses()) {
412+
auto brOp = dyn_cast<cir::BrOp>(blockUse.getOwner());
413+
brOp.setSuccessor(cleanupBlock);
414+
}
415+
}
416+
builder.create<BrOp>(loc, retBlock);
417+
return;
418+
}
419+
}
387420
emitImplicitReturn();
388421
return;
389422
}
@@ -428,6 +461,7 @@ void CIRGenFunction::LexicalScope::cleanup() {
428461
// get into this condition and emit the proper cleanup. This is
429462
// needed to get nrvo to interop with dtor logic.
430463
PerformCleanup = false;
464+
removeUnusedRetBlocks();
431465
return;
432466
}
433467

@@ -537,7 +571,7 @@ void CIRGenFunction::finishFunction(SourceLocation EndLoc) {
537571
// the ret after it's been at EndLoc.
538572
if (auto *DI = getDebugInfo())
539573
assert(!cir::MissingFeatures::generateDebugInfo() && "NYI");
540-
builder.clearInsertionPoint();
574+
// FIXME(cir): should we clearInsertionPoint? breaks many testcases
541575
PopCleanupBlocks(PrologueCleanupDepth);
542576
}
543577

@@ -686,7 +720,7 @@ cir::FuncOp CIRGenFunction::generateCode(clang::GlobalDecl GD, cir::FuncOp Fn,
686720
assert(Fn.isDeclaration() && "Function already has body?");
687721
mlir::Block *EntryBB = Fn.addEntryBlock();
688722
builder.setInsertionPointToStart(EntryBB);
689-
723+
mlir::Block *maybeEmptyLastBlock = nullptr;
690724
{
691725
// Initialize lexical scope information.
692726
LexicalScope lexScope{*this, fusedLoc, EntryBB};
@@ -736,18 +770,22 @@ cir::FuncOp CIRGenFunction::generateCode(clang::GlobalDecl GD, cir::FuncOp Fn,
736770
llvm_unreachable("no definition for emitted function");
737771

738772
assert(builder.getInsertionBlock() && "Should be valid");
739-
}
773+
maybeEmptyLastBlock = builder.getInsertionBlock();
740774

741-
if (mlir::failed(Fn.verifyBody()))
742-
return nullptr;
775+
if (mlir::failed(Fn.verifyBody()))
776+
return nullptr;
743777

744-
// Emit the standard function epilogue.
745-
finishFunction(BodyRange.getEnd());
778+
// Emit the standard function epilogue.
779+
finishFunction(BodyRange.getEnd());
746780

747-
// If we haven't marked the function nothrow through other means, do a quick
748-
// pass now to see if we can.
749-
assert(!cir::MissingFeatures::tryMarkNoThrow());
781+
// If we haven't marked the function nothrow through other means, do a quick
782+
// pass now to see if we can.
783+
assert(!cir::MissingFeatures::tryMarkNoThrow());
784+
}
750785

786+
if (maybeEmptyLastBlock && maybeEmptyLastBlock->getUses().empty() &&
787+
maybeEmptyLastBlock->empty())
788+
maybeEmptyLastBlock->erase();
751789
return Fn;
752790
}
753791

@@ -1171,9 +1209,13 @@ void CIRGenFunction::StartFunction(GlobalDecl GD, QualType RetTy,
11711209
if (FD && FD->isMain() && cir::MissingFeatures::zerocallusedregs())
11721210
llvm_unreachable("NYI");
11731211

1174-
mlir::Block *EntryBB = &Fn.getBlocks().front();
1212+
// CIRGen has its own logic for entry blocks, usually per operation region.
1213+
mlir::Block *retBlock = currLexScope->getOrCreateRetBlock(*this, getLoc(Loc));
1214+
// returnBlock handles per region getJumpDestInCurrentScope LLVM traditional
1215+
// codegen logic.
1216+
(void)returnBlock(retBlock);
11751217

1176-
// TODO: allocapt insertion? probably don't need for CIR
1218+
mlir::Block *EntryBB = &Fn.getBlocks().front();
11771219

11781220
if (cir::MissingFeatures::requiresReturnValueCheck())
11791221
llvm_unreachable("NYI");

0 commit comments

Comments
 (0)