A RetroSearch Logo

Home - News ( United States | United Kingdom | Italy | Germany ) - Football scores

Search Query:

Showing content from https://clang.llvm.org/doxygen/CGCleanup_8cpp_source.html below:

clang: lib/CodeGen/CGCleanup.cpp Source File

21#include "llvm/Support/SaveAndRestore.h" 23using namespace clang

;

24using namespace

CodeGen;

65 case

AggregateLiteral:

66 case

AggregateAddress:

69 case

ComplexAddress: {

76

llvm_unreachable(

"bad saved r-value kind"

);

80char

*EHScopeStack::allocate(

size_t

Size) {

83 unsigned

Capacity = 1024;

84 while

(Capacity < Size) Capacity *= 2;

85

StartOfBuffer =

new char

[Capacity];

86

StartOfData = EndOfBuffer = StartOfBuffer + Capacity;

87

}

else if

(

static_cast<size_t>

(StartOfData - StartOfBuffer) < Size) {

88 unsigned

CurrentCapacity = EndOfBuffer - StartOfBuffer;

89 unsigned

UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);

91 unsigned

NewCapacity = CurrentCapacity;

94

}

while

(NewCapacity < UsedCapacity + Size);

96 char

*NewStartOfBuffer =

new char

[NewCapacity];

97 char

*NewEndOfBuffer = NewStartOfBuffer + NewCapacity;

98 char

*NewStartOfData = NewEndOfBuffer - UsedCapacity;

99 memcpy

(NewStartOfData, StartOfData, UsedCapacity);

100 delete

[] StartOfBuffer;

101

StartOfBuffer = NewStartOfBuffer;

102

EndOfBuffer = NewEndOfBuffer;

103

StartOfData = NewStartOfData;

106

assert(StartOfBuffer + Size <= StartOfData);

107

StartOfData -=

Size

;

111void

EHScopeStack::deallocate(

size_t

Size) {

123 if

(!cleanup->isLifetimeMarker() && !cleanup->isFakeUse())

133 if

(

auto

*cleanup = dyn_cast<EHCleanupScope>(&*

find

(si)))

134 if

(cleanup->isLifetimeMarker()) {

135

si = cleanup->getEnclosingEHScope();

149 if

(cleanup.isActive())

return

si;

150

si = cleanup.getEnclosingNormalCleanup();

156void

*EHScopeStack::pushCleanup(

CleanupKind

Kind,

size_t

Size) {

161 bool

IsFakeUse = Kind &

FakeUse

;

169

IsEHCleanup =

false

;

176

InnermostNormalCleanup,

182 if

(IsLifetimeMarker)

183 Scope

->setLifetimeMarker();

185 Scope

->setFakeUse();

193 if

(CGF->

getLangOpts

().EHAsynch && IsEHCleanup && !IsLifetimeMarker &&

197 return Scope

->getCleanupBuffer();

201

assert(!

empty

() &&

"popping exception stack when not empty"

);

203

assert(isa<EHCleanupScope>(*

begin

()));

205

InnermostNormalCleanup =

Cleanup

.getEnclosingNormalCleanup();

206

InnermostEHScope =

Cleanup

.getEnclosingEHScope();

207

deallocate(

Cleanup

.getAllocatedSize());

213 if

(!BranchFixups.empty()) {

217

BranchFixups.clear();

234

assert(!

empty

() &&

"popping exception stack when not empty"

);

245 new

(buffer)

EHCatchScope

(numHandlers, InnermostEHScope);

267 unsigned

MinSize = cast<EHCleanupScope>(*it).getFixupDepth();

268

assert(BranchFixups.size() >= MinSize &&

"fixup stack out of order"

);

270 while

(BranchFixups.size() > MinSize &&

271

BranchFixups.back().Destination ==

nullptr

)

272

BranchFixups.pop_back();

293

assert(!cleanup.hasActiveFlag() &&

"cleanup already has active flag?"

);

294

cleanup.setActiveFlag(ActiveFlag);

296 if

(cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();

297 if

(cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();

300void

EHScopeStack::Cleanup::anchor() {}

303

llvm::BasicBlock::iterator beforeInst,

305 auto

store =

new

llvm::StoreInst(value, addr.

emitRawPointer

(CGF), beforeInst);

309static

llvm::LoadInst *

311

llvm::BasicBlock::iterator beforeInst,

328

llvm::SwitchInst *

Switch

,

329

llvm::BasicBlock *CleanupEntry) {

353 if

(!CasesAdded.insert(Fixup.

Destination

).second)

366

llvm::BasicBlock *

Block

) {

369

llvm::Instruction *Term =

Block

->getTerminator();

370

assert(Term &&

"can't transition block without terminator"

);

372 if

(llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {

373

assert(Br->isUnconditional());

375 "cleanup.dest"

, Term->getIterator(), CGF);

376

llvm::SwitchInst *

Switch

=

377

llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4,

Block

);

378

Br->eraseFromParent();

381 return

cast<llvm::SwitchInst>(Term);

386

assert(

Block

&&

"resolving a null target block"

);

387 if

(!EHStack.getNumBranchFixups())

return

;

389

assert(EHStack.hasNormalCleanups() &&

390 "branch fixups exist with no normal cleanups on stack"

);

393 bool

ResolvedAny =

false

;

395 for

(

unsigned

I = 0,

E

= EHStack.getNumBranchFixups(); I !=

E

; ++I) {

410 if

(!ModifiedOptimisticBlocks.insert(BranchBB).second)

420

EHStack.popNullFixups();

426

std::initializer_list<llvm::Value **> ValuesToReload) {

429 bool

HadBranches =

false

;

430 while

(EHStack.stable_begin() != Old) {

432

HadBranches |=

Scope

.hasBranches();

437 bool

FallThroughIsBranchThrough =

440

PopCleanupBlock(FallThroughIsBranchThrough);

451 for

(llvm::Value **ReloadedValue : ValuesToReload) {

452 auto

*Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);

458 auto

*AI = dyn_cast<llvm::AllocaInst>(Inst);

459 if

(AI && AI->isStaticAlloca())

463

CreateDefaultAlignTempAlloca(Inst->getType(),

"tmp.exprcleanup"

);

466

llvm::BasicBlock::iterator InsertBefore;

467 if

(

auto

*Invoke = dyn_cast<llvm::InvokeInst>(Inst))

468

InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();

470

InsertBefore = std::next(Inst->getIterator());

474

*ReloadedValue = Builder.CreateLoad(Tmp);

482

std::initializer_list<llvm::Value **> ValuesToReload) {

483

PopCleanupBlocks(Old, ValuesToReload);

486 for

(

size_t

I = OldLifetimeExtendedSize,

487 E

= LifetimeExtendedCleanupStack.size(); I !=

E

;

) {

489

assert((I %

alignof

(LifetimeExtendedCleanupHeader) == 0) &&

490 "misaligned cleanup stack entry"

);

492

LifetimeExtendedCleanupHeader &Header =

493 reinterpret_cast<

LifetimeExtendedCleanupHeader&

>

(

494

LifetimeExtendedCleanupStack[I]);

495

I +=

sizeof

(Header);

497

EHStack.pushCopyOfCleanup(Header.getKind(),

498

&LifetimeExtendedCleanupStack[I],

500

I += Header.getSize();

502 if

(Header.isConditional()) {

504 reinterpret_cast<RawAddress

&

>

(LifetimeExtendedCleanupStack[I]);

505

initFullExprCleanupWithFlag(ActiveFlag);

506

I +=

sizeof

(ActiveFlag);

509

LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);

514

assert(

Scope

.isNormalCleanup());

515

llvm::BasicBlock *Entry =

Scope

.getNormalBlock();

518 Scope

.setNormalBlock(Entry);

529

llvm::BasicBlock *Entry) {

530

llvm::BasicBlock *Pred = Entry->getSinglePredecessor();

531 if

(!Pred)

return

Entry;

533

llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());

534 if

(!Br || Br->isConditional())

return

Entry;

535

assert(Br->getSuccessor(0) == Entry);

540 bool

WasInsertBlock = CGF.

Builder

.GetInsertBlock() == Entry;

541

assert(!WasInsertBlock || CGF.

Builder

.GetInsertPoint() == Entry->end());

544

Br->eraseFromParent();

548

Entry->replaceAllUsesWith(Pred);

551

Pred->splice(Pred->end(), Entry);

554

Entry->eraseFromParent();

557

CGF.

Builder

.SetInsertPoint(Pred);

568

llvm::BasicBlock *ContBB =

nullptr

;

572

llvm::Value *IsActive

574

CGF.

Builder

.CreateCondBr(IsActive, CleanupBB, ContBB);

579

Fn->Emit(CGF, flags);

580

assert(CGF.

HaveInsertPoint

() &&

"cleanup ended with no insertion point?"

);

588

llvm::BasicBlock *From,

589

llvm::BasicBlock *To) {

592

llvm::Instruction *Term = Exit->getTerminator();

594 if

(llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {

595

assert(Br->isUnconditional() && Br->getSuccessor(0) == From);

596

Br->setSuccessor(0, To);

598

llvm::SwitchInst *

Switch

= cast<llvm::SwitchInst>(Term);

599 for

(

unsigned

I = 0,

E

=

Switch

->getNumSuccessors(); I !=

E

; ++I)

600 if

(

Switch

->getSuccessor(I) == From)

601 Switch

->setSuccessor(I, To);

618 for

(llvm::BasicBlock::use_iterator

619

i = entry->use_begin(), e = entry->use_end(); i != e; ) {

623

use.set(unreachableBB);

626

llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());

627 if

(si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {

629

llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(),

633

llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());

636

si->eraseFromParent();

640

assert(condition->use_empty());

641

condition->eraseFromParent();

645

assert(entry->use_empty());

654

assert(!EHStack.empty() &&

"cleanup stack is empty!"

);

655

assert(isa<EHCleanupScope>(*EHStack.begin()) &&

"top not a cleanup!"

);

657

assert(

Scope

.getFixupDepth() <= EHStack.getNumBranchFixups());

661

CGBuilderTy::InsertPoint NormalDeactivateOrigIP;

663

NormalDeactivateOrigIP = Builder.saveAndClearIP();

666 bool

IsActive =

Scope

.isActive();

668 Scope

.shouldTestFlagInNormalCleanup() ?

Scope

.getActiveFlag()

671 Scope

.shouldTestFlagInEHCleanup() ?

Scope

.getActiveFlag()

676

llvm::BasicBlock *EHEntry =

Scope

.getCachedEHDispatchBlock();

677

assert(

Scope

.hasEHBranches() == (EHEntry !=

nullptr

));

678 bool

RequiresEHCleanup = (EHEntry !=

nullptr

);

684 unsigned

FixupDepth =

Scope

.getFixupDepth();

685 bool

HasFixups = EHStack.getNumBranchFixups() != FixupDepth;

688 bool

HasExistingBranches =

Scope

.hasBranches();

691

llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();

692 bool

HasFallthrough =

693

FallthroughSource !=

nullptr

&& (IsActive || HasExistingBranches);

699 bool

HasPrebranchedFallthrough =

700

(FallthroughSource && FallthroughSource->getTerminator());

705

assert(!

Scope

.isNormalCleanup() || !HasPrebranchedFallthrough ||

706

(

Scope

.getNormalBlock() &&

707

FallthroughSource->getTerminator()->getSuccessor(0)

708

==

Scope

.getNormalBlock()));

710 bool

RequiresNormalCleanup =

false

;

711 if

(

Scope

.isNormalCleanup() &&

712

(HasFixups || HasExistingBranches || HasFallthrough)) {

713

RequiresNormalCleanup =

true

;

718 if

(

Scope

.isNormalCleanup() && HasPrebranchedFallthrough &&

719

!RequiresNormalCleanup) {

723

llvm::BasicBlock *prebranchDest;

728 if

(FallthroughIsBranchThrough) {

729 EHScope

&enclosing = *EHStack.find(

Scope

.getEnclosingNormalCleanup());

737

prebranchDest = createBasicBlock(

"forwarded-prebranch"

);

738

EmitBlock(prebranchDest);

741

llvm::BasicBlock *normalEntry =

Scope

.getNormalBlock();

742

assert(normalEntry && !normalEntry->use_empty());

745

normalEntry, prebranchDest);

749 if

(!RequiresNormalCleanup && !RequiresEHCleanup) {

751

EHStack.popCleanup();

752

assert(EHStack.getNumBranchFixups() == 0 ||

753

EHStack.hasNormalCleanups());

754 if

(NormalDeactivateOrigIP.isSet())

755

Builder.restoreIP(NormalDeactivateOrigIP);

764 auto

*CleanupSource =

reinterpret_cast<char

*

>

(

Scope

.getCleanupBuffer());

766

CleanupBufferStack[8 *

sizeof

(

void

*)];

767

std::unique_ptr<char[]> CleanupBufferHeap;

768 size_t

CleanupSize =

Scope

.getCleanupSize();

771 if

(CleanupSize <=

sizeof

(CleanupBufferStack)) {

772 memcpy

(CleanupBufferStack, CleanupSource, CleanupSize);

775

CleanupBufferHeap.reset(

new char

[CleanupSize]);

776 memcpy

(CleanupBufferHeap.get(), CleanupSource, CleanupSize);

781 if

(

Scope

.isNormalCleanup())

783 if

(

Scope

.isEHCleanup())

787 bool

IsEHa = getLangOpts().EHAsynch && !

Scope

.isLifetimeMarker();

789 if

(!RequiresNormalCleanup) {

792 if

(IsEHa && getInvokeDest()) {

796 if

(NormalDeactivateOrigIP.isSet())

797

Builder.restoreIP(NormalDeactivateOrigIP);

799

EmitSehCppScopeEnd();

800 if

(NormalDeactivateOrigIP.isSet())

801

NormalDeactivateOrigIP = Builder.saveAndClearIP();

804 Scope

.MarkEmitted();

805

EHStack.popCleanup();

809 if

(HasFallthrough && !HasPrebranchedFallthrough && !HasFixups &&

810

!HasExistingBranches) {

813 if

(IsEHa && getInvokeDest()) {

815

EmitSehCppScopeEnd();

817

EmitSehTryScopeEnd();

821 Scope

.MarkEmitted();

822

EHStack.popCleanup();

824 EmitCleanup

(*

this

, Fn, cleanupFlags, NormalActiveFlag);

834

CGBuilderTy::InsertPoint savedInactiveFallthroughIP;

838 if

(HasFallthrough) {

839 if

(!HasPrebranchedFallthrough)

840

Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());

844

}

else if

(FallthroughSource) {

845

assert(!IsActive &&

"source without fallthrough for active cleanup"

);

846

savedInactiveFallthroughIP = Builder.saveAndClearIP();

852

EmitBlock(NormalEntry);

855 if

(IsEHa && getInvokeDest()) {

857

EmitSehCppScopeEnd();

859

EmitSehTryScopeEnd();

865 bool

HasEnclosingCleanups =

866

(

Scope

.getEnclosingNormalCleanup() != EHStack.stable_end());

873

llvm::BasicBlock *BranchThroughDest =

nullptr

;

874 if

(

Scope

.hasBranchThroughs() ||

875

(FallthroughSource && FallthroughIsBranchThrough) ||

876

(HasFixups && HasEnclosingCleanups)) {

877

assert(HasEnclosingCleanups);

878 EHScope

&S = *EHStack.find(

Scope

.getEnclosingNormalCleanup());

882

llvm::BasicBlock *FallthroughDest =

nullptr

;

892 if

(!

Scope

.hasBranchThroughs() && !HasFixups && !HasFallthrough &&

893

!currentFunctionUsesSEHTry() &&

Scope

.getNumBranchAfters() == 1) {

894

assert(!BranchThroughDest || !IsActive);

897

llvm::Instruction *NormalCleanupDestSlot =

898

cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());

899 if

(NormalCleanupDestSlot->hasOneUse()) {

900

NormalCleanupDestSlot->user_back()->eraseFromParent();

901

NormalCleanupDestSlot->eraseFromParent();

905

llvm::BasicBlock *BranchAfter =

Scope

.getBranchAfterBlock(0);

906

InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));

913

}

else if

(

Scope

.getNumBranchAfters() ||

914

(HasFallthrough && !FallthroughIsBranchThrough) ||

915

(HasFixups && !HasEnclosingCleanups)) {

918

(BranchThroughDest ? BranchThroughDest : getUnreachableBlock());

921 const unsigned

SwitchCapacity = 10;

927 "cleanup.dest"

, *

this

);

928

llvm::SwitchInst *

Switch

=

929

llvm::SwitchInst::Create(Load,

Default

, SwitchCapacity);

931

InstsToAppend.push_back(Load);

932

InstsToAppend.push_back(

Switch

);

935 if

(FallthroughSource && !FallthroughIsBranchThrough) {

936

FallthroughDest = createBasicBlock(

"cleanup.cont"

);

938 Switch

->addCase(Builder.getInt32(0), FallthroughDest);

941 for

(

unsigned

I = 0,

E

=

Scope

.getNumBranchAfters(); I !=

E

; ++I) {

943 Scope

.getBranchAfterBlock(I));

948 if

(HasFixups && !HasEnclosingCleanups)

952

assert(BranchThroughDest);

953

InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));

957 Scope

.MarkEmitted();

958

EHStack.popCleanup();

959

assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);

961 EmitCleanup

(*

this

, Fn, cleanupFlags, NormalActiveFlag);

964

llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();

965 for

(

unsigned

I = 0,

E

= InstsToAppend.size(); I !=

E

; ++I)

966

InstsToAppend[I]->insertInto(NormalExit, NormalExit->end());

969 for

(

unsigned

I = FixupDepth,

E

= EHStack.getNumBranchFixups();

976

getNormalCleanupDestSlot(),

987 if

(!HasFallthrough && FallthroughSource) {

992

Builder.restoreIP(savedInactiveFallthroughIP);

997

}

else if

(HasFallthrough && FallthroughDest) {

998

assert(!FallthroughIsBranchThrough);

999

EmitBlock(FallthroughDest);

1003

}

else if

(HasFallthrough) {

1008

Builder.ClearInsertionPoint();

1015

llvm::BasicBlock *NewNormalEntry =

1020 if

(NewNormalEntry != NormalEntry && NormalEntry == NormalExit)

1021 for

(

unsigned

I = FixupDepth,

E

= EHStack.getNumBranchFixups();

1023

EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;

1027 if

(NormalDeactivateOrigIP.isSet())

1028

Builder.restoreIP(NormalDeactivateOrigIP);

1029

assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);

1032 if

(RequiresEHCleanup) {

1033

CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();

1037

llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);

1042 bool

PushedTerminate =

false

;

1044

llvm::CleanupPadInst *CPI =

nullptr

;

1048

llvm::Value *ParentPad = CurrentFuncletPad;

1050

ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());

1051

CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);

1056

EHStack.pushTerminate();

1057

PushedTerminate =

true

;

1058

}

else if

(IsEHa && getInvokeDest()) {

1059

EmitSehCppScopeEnd();

1064 if

(EHActiveFlag.

isValid

() || IsActive) {

1066 EmitCleanup

(*

this

, Fn, cleanupFlags, EHActiveFlag);

1070

Builder.CreateCleanupRet(CPI, NextAction);

1072

Builder.CreateBr(NextAction);

1075 if

(PushedTerminate)

1076

EHStack.popTerminate();

1078

Builder.restoreIP(SavedIP);

1088

assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())

1089

&&

"stale jump destination"

);

1093

EHStack.getInnermostActiveNormalCleanup();

1098 if

(TopCleanup == EHStack.stable_end() ||

1099

TopCleanup.

encloses

(Dest.getScopeDepth()))

1113

assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())

1114

&&

"stale jump destination"

);

1116 if

(!HaveInsertPoint())

1120

llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());

1124

TopCleanup = EHStack.getInnermostActiveNormalCleanup();

1129 if

(TopCleanup == EHStack.stable_end() ||

1130

TopCleanup.

encloses

(Dest.getScopeDepth())) {

1131

Builder.ClearInsertionPoint();

1137 if

(!Dest.getScopeDepth().isValid()) {

1144

Builder.ClearInsertionPoint();

1151

llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());

1158

cast<EHCleanupScope>(*EHStack.find(TopCleanup));

1165 if

(

E

.strictlyEncloses(I)) {

1168

assert(

Scope

.isNormalCleanup());

1169

I =

Scope

.getEnclosingNormalCleanup();

1173 if

(!

E

.strictlyEncloses(I)) {

1174 Scope

.addBranchAfter(Index, Dest.getBlock());

1181 if

(!

Scope

.addBranchThrough(Dest.getBlock()))

1186

Builder.ClearInsertionPoint();

1198

assert(cleanup.strictlyEncloses(i));

1223

llvm::Instruction *dominatingIP) {

1229 bool

isActivatedInConditional =

1232 bool

needFlag =

false

;

1237 if

(

Scope

.isNormalCleanup()) {

1238 Scope

.setTestFlagInNormalCleanup();

1243 if

(

Scope

.isEHCleanup() &&

1245 Scope

.setTestFlagInEHCleanup();

1254 if

(!var.isValid()) {

1255

CodeGenFunction::AllocaTrackerRAII AllocaTracker(CGF);

1257 "cleanup.isactive"

);

1258 Scope

.setActiveFlag(var);

1259 Scope

.AddAuxAllocas(AllocaTracker.Take());

1261

assert(dominatingIP &&

"no existing variable and no dominating IP!"

);

1281

llvm::Instruction *dominatingIP) {

1282

assert(

C

!= EHStack.stable_end() &&

"activating bottom of stack?"

);

1284

assert(!

Scope

.isActive() &&

"double activation"

);

1288 Scope

.setActive(

true

);

1293

llvm::Instruction *dominatingIP) {

1294

assert(

C

!= EHStack.stable_end() &&

"deactivating bottom of stack?"

);

1296

assert(

Scope

.isActive() &&

"double deactivation"

);

1300 if

(

C

== EHStack.stable_begin() &&

1301

CurrentCleanupScopeDepth.strictlyEncloses(

C

)) {

1302

PopCleanupBlock(

false

,

1310 Scope

.setActive(

false

);

1314 if

(!NormalCleanupDest.isValid())

1316

CreateDefaultAlignTempAlloca(Builder.getInt32Ty(),

"cleanup.dest.slot"

);

1317 return

NormalCleanupDest;

1331

llvm::FunctionCallee &SehCppScope) {

1333

assert(CGF.

Builder

.GetInsertBlock() && InvokeDest);

1339

CGF.

Builder

.CreateInvoke(SehCppScope, Cont, InvokeDest, {}, BundleList);

1345

assert(getLangOpts().EHAsynch);

1346

llvm::FunctionType *FTy =

1347

llvm::FunctionType::get(CGM.VoidTy,

false

);

1348

llvm::FunctionCallee SehCppScope =

1349

CGM.CreateRuntimeFunction(FTy,

"llvm.seh.scope.begin"

);

1356

assert(getLangOpts().EHAsynch);

1357

llvm::FunctionType *FTy =

1358

llvm::FunctionType::get(CGM.VoidTy,

false

);

1359

llvm::FunctionCallee SehCppScope =

1360

CGM.CreateRuntimeFunction(FTy,

"llvm.seh.scope.end"

);

1366

assert(getLangOpts().EHAsynch);

1367

llvm::FunctionType *FTy =

1368

llvm::FunctionType::get(CGM.VoidTy,

false

);

1369

llvm::FunctionCallee SehCppScope =

1370

CGM.CreateRuntimeFunction(FTy,

"llvm.seh.try.begin"

);

1376

assert(getLangOpts().EHAsynch);

1377

llvm::FunctionType *FTy =

1378

llvm::FunctionType::get(CGM.VoidTy,

false

);

1379

llvm::FunctionCallee SehCppScope =

1380

CGM.CreateRuntimeFunction(FTy,

"llvm.seh.try.end"

);

static llvm::LoadInst * createLoadInstBefore(Address addr, const Twine &name, llvm::BasicBlock::iterator beforeInst, CodeGenFunction &CGF)

static void EmitSehScope(CodeGenFunction &CGF, llvm::FunctionCallee &SehCppScope)

static llvm::BasicBlock * CreateNormalEntry(CodeGenFunction &CGF, EHCleanupScope &Scope)

static void EmitCleanup(CodeGenFunction &CGF, EHScopeStack::Cleanup *Fn, EHScopeStack::Cleanup::Flags flags, Address ActiveFlag)

static void destroyOptimisticNormalEntry(CodeGenFunction &CGF, EHCleanupScope &scope)

We don't need a normal entry block for the given cleanup.

static void SetupCleanupBlockActivation(CodeGenFunction &CGF, EHScopeStack::stable_iterator C, ForActivation_t kind, llvm::Instruction *dominatingIP)

The given cleanup block is changing activation state.

static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit, llvm::BasicBlock *From, llvm::BasicBlock *To)

static void createStoreInstBefore(llvm::Value *value, Address addr, llvm::BasicBlock::iterator beforeInst, CodeGenFunction &CGF)

static void ResolveAllBranchFixups(CodeGenFunction &CGF, llvm::SwitchInst *Switch, llvm::BasicBlock *CleanupEntry)

All the branch fixups on the EH stack have propagated out past the outermost normal cleanup; resolve ...

static bool IsUsedAsEHCleanup(EHScopeStack &EHStack, EHScopeStack::stable_iterator cleanup)

static llvm::BasicBlock * SimplifyCleanupEntry(CodeGenFunction &CGF, llvm::BasicBlock *Entry)

Attempts to reduce a cleanup's entry block to a fallthrough.

static llvm::SwitchInst * TransitionToCleanupSwitch(CodeGenFunction &CGF, llvm::BasicBlock *Block)

Transitions the terminator of the given exit-block of a cleanup to be a cleanup switch.

static Decl::Kind getKind(const Decl *D)

__DEVICE__ void * memcpy(void *__a, const void *__b, size_t __c)

Represents a C++ temporary.

llvm::Align getAsAlign() const

getAsAlign - Returns Quantity as a valid llvm::Align, Beware llvm::Align assumes power of two 8-bit b...

static CharUnits One()

One - Construct a CharUnits quantity of one.

Like RawAddress, an abstract representation of an aligned address, but the pointer contained in this ...

llvm::Value * emitRawPointer(CodeGenFunction &CGF) const

Return the pointer contained in this class after authenticating it and adding offset to it if necessa...

CharUnits getAlignment() const

llvm::Type * getElementType() const

Return the type of the values stored in this address.

llvm::StoreInst * CreateStore(llvm::Value *Val, Address Addr, bool IsVolatile=false)

llvm::LoadInst * CreateLoad(Address Addr, const llvm::Twine &Name="")

CodeGenFunction - This class organizes the per-function state that is used while generating LLVM code...

void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup, llvm::Instruction *DominatingIP)

DeactivateCleanupBlock - Deactivates the given cleanup block.

void EmitSehCppScopeBegin()

RawAddress createCleanupActiveFlag()

RawAddress CreateTempAllocaWithoutCast(llvm::Type *Ty, CharUnits align, const Twine &Name="tmp", llvm::Value *ArraySize=nullptr)

llvm::BasicBlock * createBasicBlock(const Twine &name="", llvm::Function *parent=nullptr, llvm::BasicBlock *before=nullptr)

createBasicBlock - Create an LLVM basic block.

const LangOptions & getLangOpts() const

void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false)

EmitBlock - Emit the given block.

llvm::AllocaInst * CreateTempAlloca(llvm::Type *Ty, const Twine &Name="tmp", llvm::Value *ArraySize=nullptr)

CreateTempAlloca - This creates an alloca and inserts it into the entry block if ArraySize is nullptr...

SmallVector< llvm::OperandBundleDef, 1 > getBundlesForFunclet(llvm::Value *Callee)

RawAddress getNormalCleanupDestSlot()

void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize, std::initializer_list< llvm::Value ** > ValuesToReload={})

Takes the old cleanup stack size and emits the cleanup blocks that have been added.

llvm::BasicBlock * getUnreachableBlock()

const TargetInfo & getTarget() const

bool isInConditionalBranch() const

isInConditionalBranch - Return true if we're currently emitting one branch or the other of a conditio...

void EmitSehCppScopeEnd()

void initFullExprCleanupWithFlag(RawAddress ActiveFlag)

llvm::BasicBlock * getInvokeDest()

void EmitSehTryScopeBegin()

void EmitSehTryScopeEnd()

bool HaveInsertPoint() const

HaveInsertPoint - True if an insertion point is defined.

void setBeforeOutermostConditional(llvm::Value *value, Address addr, CodeGenFunction &CGF)

void EmitCXXTemporary(const CXXTemporary *Temporary, QualType TempType, Address Ptr)

void EmitBranchThroughCleanup(JumpDest Dest)

EmitBranchThroughCleanup - Emit a branch from the current insert block through the normal cleanup han...

void PopCleanupBlock(bool FallThroughIsBranchThrough=false, bool ForDeactivation=false)

PopCleanupBlock - Will pop the cleanup entry on the stack and process all branch fixups.

void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup, llvm::Instruction *DominatingIP)

ActivateCleanupBlock - Activates an initially-inactive cleanup.

RawAddress NormalCleanupDest

i32s containing the indexes of the cleanup destinations.

bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const

isObviouslyBranchWithoutCleanups - Return true if a branch to the specified destination obviously has...

llvm::Instruction * CurrentFuncletPad

void ResolveBranchFixups(llvm::BasicBlock *Target)

A scope which attempts to handle some, possibly all, types of exceptions.

static size_t getSizeForNumHandlers(unsigned N)

A cleanup scope which generates the cleanup blocks lazily.

static size_t getSizeForCleanupSize(size_t Size)

Gets the size required for a lazy cleanup scope with the given cleanup-data requirements.

llvm::BasicBlock * getNormalBlock() const

An exceptions scope which filters exceptions thrown through it.

static size_t getSizeForNumFilters(unsigned numFilters)

unsigned getNumFilters() const

void setIsNormalCleanupKind()

void setIsEHCleanupKind()

Information for lazily generating a cleanup.

A non-stable pointer into the scope stack.

A saved depth on the scope stack.

bool encloses(stable_iterator I) const

Returns true if this scope encloses I.

bool strictlyEncloses(stable_iterator I) const

Returns true if this scope strictly encloses I: that is, if it encloses I and is not I.

A stack of scopes which respond to exceptions, including cleanups and catch blocks.

class EHFilterScope * pushFilter(unsigned NumFilters)

Push an exceptions filter on the stack.

BranchFixup & getBranchFixup(unsigned I)

stable_iterator getInnermostNormalCleanup() const

Returns the innermost normal cleanup on the stack, or stable_end() if there are no normal cleanups.

stable_iterator stable_begin() const

Create a stable reference to the top of the EH stack.

unsigned getNumBranchFixups() const

void popCleanup()

Pops a cleanup scope off the stack. This is private to CGCleanup.cpp.

stable_iterator getInnermostEHScope() const

bool requiresLandingPad() const

bool empty() const

Determines whether the exception-scopes stack is empty.

void popFilter()

Pops an exceptions filter off the stack.

iterator begin() const

Returns an iterator pointing to the innermost EH scope.

bool containsOnlyNoopCleanups(stable_iterator Old) const

class EHCatchScope * pushCatch(unsigned NumHandlers)

Push a set of catch handlers on the stack.

iterator find(stable_iterator save) const

Turn a stable reference to a scope depth into a unstable pointer to the EH stack.

void popNullFixups()

Pops lazily-removed fixups from the end of the list.

bool hasNormalCleanups() const

Determines whether there are any normal cleanups on the stack.

stable_iterator getInnermostActiveNormalCleanup() const

stable_iterator stabilize(iterator it) const

Translates an iterator into a stable_iterator.

static stable_iterator stable_end()

Create a stable reference to the bottom of the EH stack.

void clearFixups()

Clears the branch-fixups list.

void pushTerminate()

Push a terminate handler on the stack.

A protected scope for zero-cost EH handling.

EHScopeStack::stable_iterator getEnclosingEHScope() const

bool hasEHBranches() const

An exceptions scope which calls std::terminate if any exception reaches it.

RValue - This trivial value class is used to represent the result of an expression that is evaluated.

static RValue get(llvm::Value *V)

static RValue getAggregate(Address addr, bool isVolatile=false)

Convert an Address to an RValue.

static RValue getComplex(llvm::Value *V1, llvm::Value *V2)

Address getAggregateAddress() const

getAggregateAddr() - Return the Value* of the address of the aggregate.

llvm::Value * getScalarVal() const

getScalarVal() - Return the Value* of this scalar value.

std::pair< llvm::Value *, llvm::Value * > getComplexVal() const

getComplexVal - Return the real/imag components of this complex value.

An abstract representation of an aligned address.

llvm::Value * getPointer() const

static RawAddress invalid()

A (possibly-)qualified type.

Scope - A scope is a transient data structure that is used while parsing the program.

bool isMicrosoft() const

Is this ABI an MSVC-compatible ABI?

TargetCXXABI getCXXABI() const

Get the C++ ABI currently in use.

@ NormalCleanup

Denotes a cleanup that should run when a scope is exited using normal control flow (falling off the e...

@ EHCleanup

Denotes a cleanup that should run when a scope is exited using exceptional control flow (a throw stat...

bool Load(InterpState &S, CodePtr OpPC)

The JSON file list parser is used to communicate input to InstallAPI.

unsigned DestinationIndex

The destination index value.

llvm::BasicBlock * Destination

The ultimate destination of the branch.

llvm::BasicBlock * OptimisticBranchBlock

The block containing the terminator which needs to be modified into a switch if this fixup is resolve...

llvm::BranchInst * InitialBranch

The initial branch of the fixup.

static llvm::Value * restore(CodeGenFunction &CGF, saved_type value)

static saved_type save(CodeGenFunction &CGF, llvm::Value *value)

static bool needsSaving(llvm::Value *value)

Answer whether the given value needs extra work to be saved.

A metaprogramming class for ensuring that a value will dominate an arbitrary position in a function.

The exceptions personality for a function.

bool isMSVCXXPersonality() const

static const EHPersonality & get(CodeGenModule &CGM, const FunctionDecl *FD)

bool usesFuncletPads() const

Does this personality use landingpads or the family of pad instructions designed to form funclets?

bool isMSVCPersonality() const


RetroSearch is an open source project built by @garambo | Open a GitHub Issue

Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo

HTML: 3.2 | Encoding: UTF-8 | Version: 0.7.4