Skip to content

Commit f1a29c6

Browse files
committed
Revert "[CIR][CIRGen][NFCI] Take a step into getting scope information to match OG"
Seems like windows bots are now broken! This reverts commit 9a63c50.
1 parent 9a63c50 commit f1a29c6

File tree

8 files changed

+69
-321
lines changed

8 files changed

+69
-321
lines changed

clang/include/clang/CIR/MissingFeatures.h

+3-11
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ struct MissingFeatures {
5959
static bool emitTypeCheck() { return false; }
6060
static bool tbaa() { return false; }
6161
static bool tbaa_struct() { return false; }
62+
static bool cleanups() { return false; }
6263
static bool emitNullabilityCheck() { return false; }
6364
static bool ptrAuth() { return false; }
6465

@@ -159,22 +160,12 @@ struct MissingFeatures {
159160
static bool fastMathFlags() { return false; }
160161
static bool fastMathFuncAttributes() { return false; }
161162

162-
// Cleanup
163-
static bool cleanups() { return false; }
164-
static bool simplifyCleanupEntry() { return false; }
165-
static bool requiresCleanups() { return false; }
166-
static bool cleanupBranchAfterSwitch() { return false; }
167-
static bool cleanupAlwaysBranchThrough() { return false; }
168-
static bool cleanupDestinationIndex() { return false; }
169-
static bool cleanupDestroyNRVOVariable() { return false; }
170-
static bool cleanupAppendInsts() { return false; }
171-
static bool cleanupIndexAndBIAdjustment() { return false; }
172-
173163
// Exception handling
174164
static bool isSEHTryScope() { return false; }
175165
static bool ehStack() { return false; }
176166
static bool emitStartEHSpec() { return false; }
177167
static bool emitEndEHSpec() { return false; }
168+
static bool simplifyCleanupEntry() { return false; }
178169

179170
// Type qualifiers.
180171
static bool atomicTypes() { return false; }
@@ -217,6 +208,7 @@ struct MissingFeatures {
217208
static bool addAutoInitAnnotation() { return false; }
218209
static bool addHeapAllocSiteMetadata() { return false; }
219210
static bool loopInfoStack() { return false; }
211+
static bool requiresCleanups() { return false; }
220212
static bool constantFoldsToSimpleInteger() { return false; }
221213
static bool checkFunctionCallABI() { return false; }
222214
static bool zeroInitializer() { return false; }

clang/lib/CIR/CodeGen/CIRGenCleanup.cpp

+6-168
Original file line numberDiff line numberDiff line change
@@ -37,12 +37,13 @@ cir::BrOp CIRGenFunction::emitBranchThroughCleanup(mlir::Location Loc,
3737
JumpDest Dest) {
3838
// Remove this once we go for making sure unreachable code is
3939
// well modeled (or not).
40+
assert(builder.getInsertionBlock() && "not yet implemented");
4041
assert(!cir::MissingFeatures::ehStack());
4142

4243
// Insert a branch: to the cleanup block (unsolved) or to the already
4344
// materialized label. Keep track of unsolved goto's.
44-
assert(Dest.getBlock() && "assumes incoming valid dest");
45-
auto brOp = builder.create<BrOp>(Loc, Dest.getBlock());
45+
auto brOp = builder.create<BrOp>(
46+
Loc, Dest.isValid() ? Dest.getBlock() : ReturnBlock().getBlock());
4647

4748
// Calculate the innermost active normal cleanup.
4849
EHScopeStack::stable_iterator TopCleanup =
@@ -69,33 +70,7 @@ cir::BrOp CIRGenFunction::emitBranchThroughCleanup(mlir::Location Loc,
6970
return brOp;
7071
}
7172

72-
// Otherwise, thread through all the normal cleanups in scope.
73-
auto index = builder.getUInt32(Dest.getDestIndex(), Loc);
74-
assert(!cir::MissingFeatures::cleanupIndexAndBIAdjustment());
75-
76-
// Add this destination to all the scopes involved.
77-
EHScopeStack::stable_iterator I = TopCleanup;
78-
EHScopeStack::stable_iterator E = Dest.getScopeDepth();
79-
if (E.strictlyEncloses(I)) {
80-
while (true) {
81-
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
82-
assert(Scope.isNormalCleanup());
83-
I = Scope.getEnclosingNormalCleanup();
84-
85-
// If this is the last cleanup we're propagating through, tell it
86-
// that there's a resolved jump moving through it.
87-
if (!E.strictlyEncloses(I)) {
88-
Scope.addBranchAfter(index, Dest.getBlock());
89-
break;
90-
}
91-
92-
// Otherwise, tell the scope that there's a jump propagating
93-
// through it. If this isn't new information, all the rest of
94-
// the work has been done before.
95-
if (!Scope.addBranchThrough(Dest.getBlock()))
96-
break;
97-
}
98-
}
73+
// FIXME(cir): otherwise, thread through all the normal cleanups in scope.
9974
return brOp;
10075
}
10176

@@ -330,18 +305,6 @@ static void emitCleanup(CIRGenFunction &CGF, EHScopeStack::Cleanup *Fn,
330305
// No need to emit continuation block because CIR uses a cir.if.
331306
}
332307

333-
static mlir::Block *createNormalEntry(CIRGenFunction &cgf,
334-
EHCleanupScope &scope) {
335-
assert(scope.isNormalCleanup());
336-
mlir::Block *entry = scope.getNormalBlock();
337-
if (!entry) {
338-
mlir::OpBuilder::InsertionGuard guard(cgf.getBuilder());
339-
entry = cgf.currLexScope->getOrCreateCleanupBlock(cgf.getBuilder());
340-
scope.setNormalBlock(entry);
341-
}
342-
return entry;
343-
}
344-
345308
/// Pops a cleanup block. If the block includes a normal cleanup, the
346309
/// current insertion point is threaded through the cleanup, as are
347310
/// any branch fixups on the cleanup.
@@ -378,8 +341,7 @@ void CIRGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
378341

379342
// - whether there's a fallthrough
380343
auto *FallthroughSource = builder.getInsertionBlock();
381-
bool HasFallthrough =
382-
(FallthroughSource != nullptr && (IsActive || HasExistingBranches));
344+
bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
383345

384346
// Branch-through fall-throughs leave the insertion point set to the
385347
// end of the last cleanup, which points to the current scope. The
@@ -480,131 +442,7 @@ void CIRGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
480442
// Otherwise, the best approach is to thread everything through
481443
// the cleanup block and then try to clean up after ourselves.
482444
} else {
483-
// Force the entry block to exist.
484-
mlir::Block *normalEntry = createNormalEntry(*this, Scope);
485-
486-
// I. Set up the fallthrough edge in.
487-
mlir::OpBuilder::InsertPoint savedInactiveFallthroughIP;
488-
489-
// If there's a fallthrough, we need to store the cleanup
490-
// destination index. For fall-throughs this is always zero.
491-
if (HasFallthrough) {
492-
if (!HasPrebranchedFallthrough) {
493-
assert(!cir::MissingFeatures::cleanupDestinationIndex());
494-
}
495-
496-
// Otherwise, save and clear the IP if we don't have fallthrough
497-
// because the cleanup is inactive.
498-
} else if (FallthroughSource) {
499-
assert(!IsActive && "source without fallthrough for active cleanup");
500-
savedInactiveFallthroughIP = getBuilder().saveInsertionPoint();
501-
}
502-
503-
// II. Emit the entry block. This implicitly branches to it if
504-
// we have fallthrough. All the fixups and existing branches
505-
// should already be branched to it.
506-
builder.setInsertionPointToEnd(normalEntry);
507-
508-
// intercept normal cleanup to mark SEH scope end
509-
if (IsEHa) {
510-
llvm_unreachable("NYI");
511-
}
512-
513-
// III. Figure out where we're going and build the cleanup
514-
// epilogue.
515-
bool HasEnclosingCleanups =
516-
(Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
517-
518-
// Compute the branch-through dest if we need it:
519-
// - if there are branch-throughs threaded through the scope
520-
// - if fall-through is a branch-through
521-
// - if there are fixups that will be optimistically forwarded
522-
// to the enclosing cleanup
523-
mlir::Block *branchThroughDest = nullptr;
524-
if (Scope.hasBranchThroughs() ||
525-
(FallthroughSource && FallthroughIsBranchThrough) ||
526-
(HasFixups && HasEnclosingCleanups)) {
527-
llvm_unreachable("NYI");
528-
}
529-
530-
mlir::Block *fallthroughDest = nullptr;
531-
532-
// If there's exactly one branch-after and no other threads,
533-
// we can route it without a switch.
534-
// Skip for SEH, since ExitSwitch is used to generate code to indicate
535-
// abnormal termination. (SEH: Except _leave and fall-through at
536-
// the end, all other exits in a _try (return/goto/continue/break)
537-
// are considered as abnormal terminations, using NormalCleanupDestSlot
538-
// to indicate abnormal termination)
539-
if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
540-
!currentFunctionUsesSEHTry() && Scope.getNumBranchAfters() == 1) {
541-
llvm_unreachable("NYI");
542-
// Build a switch-out if we need it:
543-
// - if there are branch-afters threaded through the scope
544-
// - if fall-through is a branch-after
545-
// - if there are fixups that have nowhere left to go and
546-
// so must be immediately resolved
547-
} else if (Scope.getNumBranchAfters() ||
548-
(HasFallthrough && !FallthroughIsBranchThrough) ||
549-
(HasFixups && !HasEnclosingCleanups)) {
550-
assert(!cir::MissingFeatures::cleanupBranchAfterSwitch());
551-
} else {
552-
// We should always have a branch-through destination in this case.
553-
assert(branchThroughDest);
554-
assert(!cir::MissingFeatures::cleanupAlwaysBranchThrough());
555-
}
556-
557-
// IV. Pop the cleanup and emit it.
558-
Scope.markEmitted();
559-
EHStack.popCleanup();
560-
assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
561-
562-
emitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
563-
564-
// Append the prepared cleanup prologue from above.
565-
assert(!cir::MissingFeatures::cleanupAppendInsts());
566-
567-
// Optimistically hope that any fixups will continue falling through.
568-
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); I < E;
569-
++I) {
570-
llvm_unreachable("NYI");
571-
}
572-
573-
// V. Set up the fallthrough edge out.
574-
575-
// Case 1: a fallthrough source exists but doesn't branch to the
576-
// cleanup because the cleanup is inactive.
577-
if (!HasFallthrough && FallthroughSource) {
578-
// Prebranched fallthrough was forwarded earlier.
579-
// Non-prebranched fallthrough doesn't need to be forwarded.
580-
// Either way, all we need to do is restore the IP we cleared before.
581-
assert(!IsActive);
582-
llvm_unreachable("NYI");
583-
584-
// Case 2: a fallthrough source exists and should branch to the
585-
// cleanup, but we're not supposed to branch through to the next
586-
// cleanup.
587-
} else if (HasFallthrough && fallthroughDest) {
588-
llvm_unreachable("NYI");
589-
590-
// Case 3: a fallthrough source exists and should branch to the
591-
// cleanup and then through to the next.
592-
} else if (HasFallthrough) {
593-
// Everything is already set up for this.
594-
595-
// Case 4: no fallthrough source exists.
596-
} else {
597-
// FIXME(cir): should we clear insertion point here?
598-
}
599-
600-
// VI. Assorted cleaning.
601-
602-
// Check whether we can merge NormalEntry into a single predecessor.
603-
// This might invalidate (non-IR) pointers to NormalEntry.
604-
//
605-
// If it did invalidate those pointers, and NormalEntry was the same
606-
// as NormalExit, go back and patch up the fixups.
607-
assert(!cir::MissingFeatures::simplifyCleanupEntry());
445+
llvm_unreachable("NYI");
608446
}
609447
}
610448

clang/lib/CIR/CodeGen/CIRGenDecl.cpp

+1-1
Original file line numberDiff line numberDiff line change
@@ -916,7 +916,7 @@ template <class Derived> struct DestroyNRVOVariable : EHScopeStack::Cleanup {
916916
QualType Ty;
917917

918918
void Emit(CIRGenFunction &CGF, Flags flags) override {
919-
assert(!cir::MissingFeatures::cleanupDestroyNRVOVariable());
919+
llvm_unreachable("NYI");
920920
}
921921

922922
virtual ~DestroyNRVOVariable() = default;

clang/lib/CIR/CodeGen/CIRGenFunction.cpp

+15-57
Original file line numberDiff line numberDiff line change
@@ -357,23 +357,15 @@ void CIRGenFunction::LexicalScope::cleanup() {
357357

358358
// Cleanup are done right before codegen resume a scope. This is where
359359
// objects are destroyed.
360-
SmallVector<mlir::Block *> retBlocks;
360+
unsigned curLoc = 0;
361361
for (auto *retBlock : localScope->getRetBlocks()) {
362362
mlir::OpBuilder::InsertionGuard guard(builder);
363363
builder.setInsertionPointToEnd(retBlock);
364-
retBlocks.push_back(retBlock);
365-
mlir::Location retLoc = localScope->getRetLoc(retBlock);
364+
mlir::Location retLoc = *localScope->getRetLocs()[curLoc];
365+
curLoc++;
366366
(void)emitReturn(retLoc);
367367
}
368368

369-
auto removeUnusedRetBlocks = [&]() {
370-
for (mlir::Block *retBlock : retBlocks) {
371-
if (!retBlock->getUses().empty())
372-
continue;
373-
retBlock->erase();
374-
}
375-
};
376-
377369
auto insertCleanupAndLeave = [&](mlir::Block *InsPt) {
378370
mlir::OpBuilder::InsertionGuard guard(builder);
379371
builder.setInsertionPointToEnd(InsPt);
@@ -389,34 +381,9 @@ void CIRGenFunction::LexicalScope::cleanup() {
389381
if (!cleanupBlock && localScope->getCleanupBlock(builder)) {
390382
cleanupBlock = localScope->getCleanupBlock(builder);
391383
builder.create<BrOp>(InsPt->back().getLoc(), cleanupBlock);
392-
if (!cleanupBlock->mightHaveTerminator()) {
393-
mlir::OpBuilder::InsertionGuard guard(builder);
394-
builder.setInsertionPointToEnd(cleanupBlock);
395-
builder.create<YieldOp>(localScope->EndLoc);
396-
}
397384
}
398385

399386
if (localScope->Depth == 0) {
400-
// TODO(cir): get rid of all this special cases once cleanups are properly
401-
// implemented.
402-
// TODO(cir): most of this code should move into emitBranchThroughCleanup
403-
if (localScope->getRetBlocks().size() == 1) {
404-
mlir::Block *retBlock = localScope->getRetBlocks()[0];
405-
mlir::Location loc = localScope->getRetLoc(retBlock);
406-
if (retBlock->getUses().empty())
407-
retBlock->erase();
408-
else {
409-
// Thread return block via cleanup block.
410-
if (cleanupBlock) {
411-
for (auto &blockUse : retBlock->getUses()) {
412-
auto brOp = dyn_cast<cir::BrOp>(blockUse.getOwner());
413-
brOp.setSuccessor(cleanupBlock);
414-
}
415-
}
416-
builder.create<BrOp>(loc, retBlock);
417-
return;
418-
}
419-
}
420387
emitImplicitReturn();
421388
return;
422389
}
@@ -461,7 +428,6 @@ void CIRGenFunction::LexicalScope::cleanup() {
461428
// get into this condition and emit the proper cleanup. This is
462429
// needed to get nrvo to interop with dtor logic.
463430
PerformCleanup = false;
464-
removeUnusedRetBlocks();
465431
return;
466432
}
467433

@@ -571,7 +537,7 @@ void CIRGenFunction::finishFunction(SourceLocation EndLoc) {
571537
// the ret after it's been at EndLoc.
572538
if (auto *DI = getDebugInfo())
573539
assert(!cir::MissingFeatures::generateDebugInfo() && "NYI");
574-
// FIXME(cir): should we clearInsertionPoint? breaks many testcases
540+
builder.clearInsertionPoint();
575541
PopCleanupBlocks(PrologueCleanupDepth);
576542
}
577543

@@ -720,7 +686,7 @@ cir::FuncOp CIRGenFunction::generateCode(clang::GlobalDecl GD, cir::FuncOp Fn,
720686
assert(Fn.isDeclaration() && "Function already has body?");
721687
mlir::Block *EntryBB = Fn.addEntryBlock();
722688
builder.setInsertionPointToStart(EntryBB);
723-
mlir::Block *maybeEmptyLastBlock = nullptr;
689+
724690
{
725691
// Initialize lexical scope information.
726692
LexicalScope lexScope{*this, fusedLoc, EntryBB};
@@ -770,22 +736,18 @@ cir::FuncOp CIRGenFunction::generateCode(clang::GlobalDecl GD, cir::FuncOp Fn,
770736
llvm_unreachable("no definition for emitted function");
771737

772738
assert(builder.getInsertionBlock() && "Should be valid");
773-
maybeEmptyLastBlock = builder.getInsertionBlock();
739+
}
774740

775-
if (mlir::failed(Fn.verifyBody()))
776-
return nullptr;
741+
if (mlir::failed(Fn.verifyBody()))
742+
return nullptr;
777743

778-
// Emit the standard function epilogue.
779-
finishFunction(BodyRange.getEnd());
744+
// Emit the standard function epilogue.
745+
finishFunction(BodyRange.getEnd());
780746

781-
// If we haven't marked the function nothrow through other means, do a quick
782-
// pass now to see if we can.
783-
assert(!cir::MissingFeatures::tryMarkNoThrow());
784-
}
747+
// If we haven't marked the function nothrow through other means, do a quick
748+
// pass now to see if we can.
749+
assert(!cir::MissingFeatures::tryMarkNoThrow());
785750

786-
if (maybeEmptyLastBlock && maybeEmptyLastBlock->getUses().empty() &&
787-
maybeEmptyLastBlock->empty())
788-
maybeEmptyLastBlock->erase();
789751
return Fn;
790752
}
791753

@@ -1209,14 +1171,10 @@ void CIRGenFunction::StartFunction(GlobalDecl GD, QualType RetTy,
12091171
if (FD && FD->isMain() && cir::MissingFeatures::zerocallusedregs())
12101172
llvm_unreachable("NYI");
12111173

1212-
// CIRGen has its own logic for entry blocks, usually per operation region.
1213-
mlir::Block *retBlock = currLexScope->getOrCreateRetBlock(*this, getLoc(Loc));
1214-
// returnBlock handles per region getJumpDestInCurrentScope LLVM traditional
1215-
// codegen logic.
1216-
(void)returnBlock(retBlock);
1217-
12181174
mlir::Block *EntryBB = &Fn.getBlocks().front();
12191175

1176+
// TODO: allocapt insertion? probably don't need for CIR
1177+
12201178
if (cir::MissingFeatures::requiresReturnValueCheck())
12211179
llvm_unreachable("NYI");
12221180

0 commit comments

Comments
 (0)