1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
| //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file contains code dealing with the IR generation for cleanups
// and related information.
//
// A "cleanup" is a piece of code which needs to be executed whenever
// control transfers out of a particular scope. This can be
// conditionalized to occur only on exceptional control flow, only on
// normal control flow, or both.
//
//===----------------------------------------------------------------------===//
#include "CGCleanup.h"
#include "CodeGenFunction.h"
#include "llvm/Support/SaveAndRestore.h"
using namespace clang;
using namespace CodeGen;
bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
if (rv.isScalar())
return DominatingLLVMValue::needsSaving(rv.getScalarVal());
if (rv.isAggregate())
return DominatingLLVMValue::needsSaving(rv.getAggregatePointer());
return true;
}
DominatingValue<RValue>::saved_type
DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
if (rv.isScalar()) {
llvm::Value *V = rv.getScalarVal();
// These automatically dominate and don't need to be saved.
if (!DominatingLLVMValue::needsSaving(V))
return saved_type(V, ScalarLiteral);
// Everything else needs an alloca.
Address addr =
CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue");
CGF.Builder.CreateStore(V, addr);
return saved_type(addr.getPointer(), ScalarAddress);
}
if (rv.isComplex()) {
CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
llvm::Type *ComplexTy =
llvm::StructType::get(V.first->getType(), V.second->getType());
Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex");
CGF.Builder.CreateStore(V.first, CGF.Builder.CreateStructGEP(addr, 0));
CGF.Builder.CreateStore(V.second, CGF.Builder.CreateStructGEP(addr, 1));
return saved_type(addr.getPointer(), ComplexAddress);
}
assert(rv.isAggregate());
Address V = rv.getAggregateAddress(); // TODO: volatile?
if (!DominatingLLVMValue::needsSaving(V.getPointer()))
return saved_type(V.getPointer(), AggregateLiteral,
V.getAlignment().getQuantity());
Address addr =
CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue");
CGF.Builder.CreateStore(V.getPointer(), addr);
return saved_type(addr.getPointer(), AggregateAddress,
V.getAlignment().getQuantity());
}
/// Given a saved r-value produced by SaveRValue, perform the code
/// necessary to restore it to usability at the current insertion
/// point.
RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
auto getSavingAddress = [&](llvm::Value *value) {
auto alignment = cast<llvm::AllocaInst>(value)->getAlignment();
return Address(value, CharUnits::fromQuantity(alignment));
};
switch (K) {
case ScalarLiteral:
return RValue::get(Value);
case ScalarAddress:
return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value)));
case AggregateLiteral:
return RValue::getAggregate(Address(Value, CharUnits::fromQuantity(Align)));
case AggregateAddress: {
auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value));
return RValue::getAggregate(Address(addr, CharUnits::fromQuantity(Align)));
}
case ComplexAddress: {
Address address = getSavingAddress(Value);
llvm::Value *real =
CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 0));
llvm::Value *imag =
CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 1));
return RValue::getComplex(real, imag);
}
}
llvm_unreachable("bad saved r-value kind");
}
/// Push an entry of the given size onto this protected-scope stack.
char *EHScopeStack::allocate(size_t Size) {
Size = llvm::alignTo(Size, ScopeStackAlignment);
if (!StartOfBuffer) {
unsigned Capacity = 1024;
while (Capacity < Size) Capacity *= 2;
StartOfBuffer = new char[Capacity];
StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
} else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
unsigned NewCapacity = CurrentCapacity;
do {
NewCapacity *= 2;
} while (NewCapacity < UsedCapacity + Size);
char *NewStartOfBuffer = new char[NewCapacity];
char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
memcpy(NewStartOfData, StartOfData, UsedCapacity);
delete [] StartOfBuffer;
StartOfBuffer = NewStartOfBuffer;
EndOfBuffer = NewEndOfBuffer;
StartOfData = NewStartOfData;
}
assert(StartOfBuffer + Size <= StartOfData);
StartOfData -= Size;
return StartOfData;
}
void EHScopeStack::deallocate(size_t Size) {
StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
}
bool EHScopeStack::containsOnlyLifetimeMarkers(
EHScopeStack::stable_iterator Old) const {
for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
if (!cleanup || !cleanup->isLifetimeMarker())
return false;
}
return true;
}
bool EHScopeStack::requiresLandingPad() const {
for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
// Skip lifetime markers.
if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
if (cleanup->isLifetimeMarker()) {
si = cleanup->getEnclosingEHScope();
continue;
}
return true;
}
return false;
}
EHScopeStack::stable_iterator
EHScopeStack::getInnermostActiveNormalCleanup() const {
for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
si != se; ) {
EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
if (cleanup.isActive()) return si;
si = cleanup.getEnclosingNormalCleanup();
}
return stable_end();
}
void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
bool IsNormalCleanup = Kind & NormalCleanup;
bool IsEHCleanup = Kind & EHCleanup;
bool IsActive = !(Kind & InactiveCleanup);
bool IsLifetimeMarker = Kind & LifetimeMarker;
EHCleanupScope *Scope =
new (Buffer) EHCleanupScope(IsNormalCleanup,
IsEHCleanup,
IsActive,
Size,
BranchFixups.size(),
InnermostNormalCleanup,
InnermostEHScope);
if (IsNormalCleanup)
InnermostNormalCleanup = stable_begin();
if (IsEHCleanup)
InnermostEHScope = stable_begin();
if (IsLifetimeMarker)
Scope->setLifetimeMarker();
return Scope->getCleanupBuffer();
}
void EHScopeStack::popCleanup() {
assert(!empty() && "popping exception stack when not empty");
assert(isa<EHCleanupScope>(*begin()));
EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
InnermostEHScope = Cleanup.getEnclosingEHScope();
deallocate(Cleanup.getAllocatedSize());
// Destroy the cleanup.
Cleanup.Destroy();
// Check whether we can shrink the branch-fixups stack.
if (!BranchFixups.empty()) {
// If we no longer have any normal cleanups, all the fixups are
// complete.
if (!hasNormalCleanups())
BranchFixups.clear();
// Otherwise we can still trim out unnecessary nulls.
else
popNullFixups();
}
}
EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
assert(getInnermostEHScope() == stable_end());
char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
InnermostEHScope = stable_begin();
return filter;
}
void EHScopeStack::popFilter() {
assert(!empty() && "popping exception stack when not empty");
EHFilterScope &filter = cast<EHFilterScope>(*begin());
deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
InnermostEHScope = filter.getEnclosingEHScope();
}
EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
EHCatchScope *scope =
new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
InnermostEHScope = stable_begin();
return scope;
}
void EHScopeStack::pushTerminate() {
char *Buffer = allocate(EHTerminateScope::getSize());
new (Buffer) EHTerminateScope(InnermostEHScope);
InnermostEHScope = stable_begin();
}
/// Remove any 'null' fixups on the stack. However, we can't pop more
/// fixups than the fixup depth on the innermost normal cleanup, or
/// else fixups that we try to add to that cleanup will end up in the
/// wrong place. We *could* try to shrink fixup depths, but that's
/// actually a lot of work for little benefit.
void EHScopeStack::popNullFixups() {
// We expect this to only be called when there's still an innermost
// normal cleanup; otherwise there really shouldn't be any fixups.
assert(hasNormalCleanups());
EHScopeStack::iterator it = find(InnermostNormalCleanup);
unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
while (BranchFixups.size() > MinSize &&
BranchFixups.back().Destination == nullptr)
BranchFixups.pop_back();
}
Address CodeGenFunction::createCleanupActiveFlag() {
// Create a variable to decide whether the cleanup needs to be run.
Address active = CreateTempAllocaWithoutCast(
Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond");
// Initialize it to false at a site that's guaranteed to be run
// before each evaluation.
setBeforeOutermostConditional(Builder.getFalse(), active);
// Initialize it to true at the current location.
Builder.CreateStore(Builder.getTrue(), active);
return active;
}
void CodeGenFunction::initFullExprCleanupWithFlag(Address ActiveFlag) {
// Set that as the active flag in the cleanup.
EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
cleanup.setActiveFlag(ActiveFlag);
if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
}
void EHScopeStack::Cleanup::anchor() {}
static void createStoreInstBefore(llvm::Value *value, Address addr,
llvm::Instruction *beforeInst) {
auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst);
store->setAlignment(addr.getAlignment().getAsAlign());
}
static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
llvm::Instruction *beforeInst) {
auto load = new llvm::LoadInst(addr.getPointer(), name, beforeInst);
load->setAlignment(addr.getAlignment().getAsAlign());
return load;
}
/// All the branch fixups on the EH stack have propagated out past the
/// outermost normal cleanup; resolve them all by adding cases to the
/// given switch instruction.
static void ResolveAllBranchFixups(CodeGenFunction &CGF,
llvm::SwitchInst *Switch,
llvm::BasicBlock *CleanupEntry) {
llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
// Skip this fixup if its destination isn't set.
BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
if (Fixup.Destination == nullptr) continue;
// If there isn't an OptimisticBranchBlock, then InitialBranch is
// still pointing directly to its destination; forward it to the
// appropriate cleanup entry. This is required in the specific
// case of
// { std::string s; goto lbl; }
// lbl:
// i.e. where there's an unresolved fixup inside a single cleanup
// entry which we're currently popping.
if (Fixup.OptimisticBranchBlock == nullptr) {
createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
CGF.getNormalCleanupDestSlot(),
Fixup.InitialBranch);
Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
}
// Don't add this case to the switch statement twice.
if (!CasesAdded.insert(Fixup.Destination).second)
continue;
Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
Fixup.Destination);
}
CGF.EHStack.clearFixups();
}
/// Transitions the terminator of the given exit-block of a cleanup to
/// be a cleanup switch.
static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
llvm::BasicBlock *Block) {
// If it's a branch, turn it into a switch whose default
// destination is its original target.
llvm::Instruction *Term = Block->getTerminator();
assert(Term && "can't transition block without terminator");
if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
assert(Br->isUnconditional());
auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
"cleanup.dest", Term);
llvm::SwitchInst *Switch =
llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
Br->eraseFromParent();
return Switch;
} else {
return cast<llvm::SwitchInst>(Term);
}
}
void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
assert(Block && "resolving a null target block");
if (!EHStack.getNumBranchFixups()) return;
assert(EHStack.hasNormalCleanups() &&
"branch fixups exist with no normal cleanups on stack");
llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
bool ResolvedAny = false;
for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
// Skip this fixup if its destination doesn't match.
BranchFixup &Fixup = EHStack.getBranchFixup(I);
if (Fixup.Destination != Block) continue;
Fixup.Destination = nullptr;
ResolvedAny = true;
// If it doesn't have an optimistic branch block, LatestBranch is
// already pointing to the right place.
llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
if (!BranchBB)
continue;
// Don't process the same optimistic branch block twice.
if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
continue;
llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
// Add a case to the switch.
Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
}
if (ResolvedAny)
EHStack.popNullFixups();
}
/// Pops cleanup blocks until the given savepoint is reached.
void CodeGenFunction::PopCleanupBlocks(
EHScopeStack::stable_iterator Old,
std::initializer_list<llvm::Value **> ValuesToReload) {
assert(Old.isValid());
bool HadBranches = false;
while (EHStack.stable_begin() != Old) {
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
HadBranches |= Scope.hasBranches();
// As long as Old strictly encloses the scope's enclosing normal
// cleanup, we're going to emit another normal cleanup which
// fallthrough can propagate through.
bool FallThroughIsBranchThrough =
Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
PopCleanupBlock(FallThroughIsBranchThrough);
}
// If we didn't have any branches, the insertion point before cleanups must
// dominate the current insertion point and we don't need to reload any
// values.
if (!HadBranches)
return;
// Spill and reload all values that the caller wants to be live at the current
// insertion point.
for (llvm::Value **ReloadedValue : ValuesToReload) {
auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
if (!Inst)
continue;
// Don't spill static allocas, they dominate all cleanups. These are created
// by binding a reference to a local variable or temporary.
auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
if (AI && AI->isStaticAlloca())
continue;
Address Tmp =
CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
// Find an insertion point after Inst and spill it to the temporary.
llvm::BasicBlock::iterator InsertBefore;
if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
else
InsertBefore = std::next(Inst->getIterator());
CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
// Reload the value at the current insertion point.
*ReloadedValue = Builder.CreateLoad(Tmp);
}
}
/// Pops cleanup blocks until the given savepoint is reached, then add the
/// cleanups from the given savepoint in the lifetime-extended cleanups stack.
void CodeGenFunction::PopCleanupBlocks(
EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
std::initializer_list<llvm::Value **> ValuesToReload) {
PopCleanupBlocks(Old, ValuesToReload);
// Move our deferred cleanups onto the EH stack.
for (size_t I = OldLifetimeExtendedSize,
E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
// Alignment should be guaranteed by the vptrs in the individual cleanups.
assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
"misaligned cleanup stack entry");
LifetimeExtendedCleanupHeader &Header =
reinterpret_cast<LifetimeExtendedCleanupHeader&>(
LifetimeExtendedCleanupStack[I]);
I += sizeof(Header);
EHStack.pushCopyOfCleanup(Header.getKind(),
&LifetimeExtendedCleanupStack[I],
Header.getSize());
I += Header.getSize();
if (Header.isConditional()) {
Address ActiveFlag =
reinterpret_cast<Address &>(LifetimeExtendedCleanupStack[I]);
initFullExprCleanupWithFlag(ActiveFlag);
I += sizeof(ActiveFlag);
}
}
LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
}
static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
EHCleanupScope &Scope) {
assert(Scope.isNormalCleanup());
llvm::BasicBlock *Entry = Scope.getNormalBlock();
if (!Entry) {
Entry = CGF.createBasicBlock("cleanup");
Scope.setNormalBlock(Entry);
}
return Entry;
}
/// Attempts to reduce a cleanup's entry block to a fallthrough. This
/// is basically llvm::MergeBlockIntoPredecessor, except
/// simplified/optimized for the tighter constraints on cleanup blocks.
///
/// Returns the new block, whatever it is.
static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
llvm::BasicBlock *Entry) {
llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
if (!Pred) return Entry;
llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
if (!Br || Br->isConditional()) return Entry;
assert(Br->getSuccessor(0) == Entry);
// If we were previously inserting at the end of the cleanup entry
// block, we'll need to continue inserting at the end of the
// predecessor.
bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
// Kill the branch.
Br->eraseFromParent();
// Replace all uses of the entry with the predecessor, in case there
// are phis in the cleanup.
Entry->replaceAllUsesWith(Pred);
// Merge the blocks.
Pred->getInstList().splice(Pred->end(), Entry->getInstList());
// Kill the entry block.
Entry->eraseFromParent();
if (WasInsertBlock)
CGF.Builder.SetInsertPoint(Pred);
return Pred;
}
static void EmitCleanup(CodeGenFunction &CGF,
EHScopeStack::Cleanup *Fn,
EHScopeStack::Cleanup::Flags flags,
Address ActiveFlag) {
// If there's an active flag, load it and skip the cleanup if it's
// false.
llvm::BasicBlock *ContBB = nullptr;
if (ActiveFlag.isValid()) {
ContBB = CGF.createBasicBlock("cleanup.done");
llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
llvm::Value *IsActive
= CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
CGF.EmitBlock(CleanupBB);
}
// Ask the cleanup to emit itself.
Fn->Emit(CGF, flags);
assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
// Emit the continuation block if there was an active flag.
if (ActiveFlag.isValid())
CGF.EmitBlock(ContBB);
}
static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
llvm::BasicBlock *From,
llvm::BasicBlock *To) {
// Exit is the exit block of a cleanup, so it always terminates in
// an unconditional branch or a switch.
llvm::Instruction *Term = Exit->getTerminator();
if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
Br->setSuccessor(0, To);
} else {
llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
if (Switch->getSuccessor(I) == From)
Switch->setSuccessor(I, To);
}
}
/// We don't need a normal entry block for the given cleanup.
/// Optimistic fixup branches can cause these blocks to come into
/// existence anyway; if so, destroy it.
///
/// The validity of this transformation is very much specific to the
/// exact ways in which we form branches to cleanup entries.
static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
EHCleanupScope &scope) {
llvm::BasicBlock *entry = scope.getNormalBlock();
if (!entry) return;
// Replace all the uses with unreachable.
llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
for (llvm::BasicBlock::use_iterator
i = entry->use_begin(), e = entry->use_end(); i != e; ) {
llvm::Use &use = *i;
++i;
use.set(unreachableBB);
// The only uses should be fixup switches.
llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
// Replace the switch with a branch.
llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si);
// The switch operand is a load from the cleanup-dest alloca.
llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
// Destroy the switch.
si->eraseFromParent();
// Destroy the load.
assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
assert(condition->use_empty());
condition->eraseFromParent();
}
}
assert(entry->use_empty());
delete entry;
}
/// Pops a cleanup block. If the block includes a normal cleanup, the
/// current insertion point is threaded through the cleanup, as are
/// any branch fixups on the cleanup.
void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
assert(!EHStack.empty() && "cleanup stack is empty!");
assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
// Remember activation information.
bool IsActive = Scope.isActive();
Address NormalActiveFlag =
Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
: Address::invalid();
Address EHActiveFlag =
Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
: Address::invalid();
// Check whether we need an EH cleanup. This is only true if we've
// generated a lazy EH cleanup block.
llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
assert(Scope.hasEHBranches() == (EHEntry != nullptr));
bool RequiresEHCleanup = (EHEntry != nullptr);
EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
// Check the three conditions which might require a normal cleanup:
// - whether there are branch fix-ups through this cleanup
unsigned FixupDepth = Scope.getFixupDepth();
bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
// - whether there are branch-throughs or branch-afters
bool HasExistingBranches = Scope.hasBranches();
// - whether there's a fallthrough
llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
// Branch-through fall-throughs leave the insertion point set to the
// end of the last cleanup, which points to the current scope. The
// rest of IR gen doesn't need to worry about this; it only happens
// during the execution of PopCleanupBlocks().
bool HasPrebranchedFallthrough =
(FallthroughSource && FallthroughSource->getTerminator());
// If this is a normal cleanup, then having a prebranched
// fallthrough implies that the fallthrough source unconditionally
// jumps here.
assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
(Scope.getNormalBlock() &&
FallthroughSource->getTerminator()->getSuccessor(0)
== Scope.getNormalBlock()));
bool RequiresNormalCleanup = false;
if (Scope.isNormalCleanup() &&
(HasFixups || HasExistingBranches || HasFallthrough)) {
RequiresNormalCleanup = true;
}
// If we have a prebranched fallthrough into an inactive normal
// cleanup, rewrite it so that it leads to the appropriate place.
if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
llvm::BasicBlock *prebranchDest;
// If the prebranch is semantically branching through the next
// cleanup, just forward it to the next block, leaving the
// insertion point in the prebranched block.
if (FallthroughIsBranchThrough) {
EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
// Otherwise, we need to make a new block. If the normal cleanup
// isn't being used at all, we could actually reuse the normal
// entry block, but this is simpler, and it avoids conflicts with
// dead optimistic fixup branches.
} else {
prebranchDest = createBasicBlock("forwarded-prebranch");
EmitBlock(prebranchDest);
}
llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
assert(normalEntry && !normalEntry->use_empty());
ForwardPrebranchedFallthrough(FallthroughSource,
normalEntry, prebranchDest);
}
// If we don't need the cleanup at all, we're done.
if (!RequiresNormalCleanup && !RequiresEHCleanup) {
destroyOptimisticNormalEntry(*this, Scope);
EHStack.popCleanup(); // safe because there are no fixups
assert(EHStack.getNumBranchFixups() == 0 ||
EHStack.hasNormalCleanups());
return;
}
// Copy the cleanup emission data out. This uses either a stack
// array or malloc'd memory, depending on the size, which is
// behavior that SmallVector would provide, if we could use it
// here. Unfortunately, if you ask for a SmallVector<char>, the
// alignment isn't sufficient.
auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
alignas(EHScopeStack::ScopeStackAlignment) char
CleanupBufferStack[8 * sizeof(void *)];
std::unique_ptr<char[]> CleanupBufferHeap;
size_t CleanupSize = Scope.getCleanupSize();
EHScopeStack::Cleanup *Fn;
if (CleanupSize <= sizeof(CleanupBufferStack)) {
memcpy(CleanupBufferStack, CleanupSource, CleanupSize);
Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack);
} else {
CleanupBufferHeap.reset(new char[CleanupSize]);
memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
}
EHScopeStack::Cleanup::Flags cleanupFlags;
if (Scope.isNormalCleanup())
cleanupFlags.setIsNormalCleanupKind();
if (Scope.isEHCleanup())
cleanupFlags.setIsEHCleanupKind();
if (!RequiresNormalCleanup) {
destroyOptimisticNormalEntry(*this, Scope);
EHStack.popCleanup();
} else {
// If we have a fallthrough and no other need for the cleanup,
// emit it directly.
if (HasFallthrough && !HasPrebranchedFallthrough &&
!HasFixups && !HasExistingBranches) {
destroyOptimisticNormalEntry(*this, Scope);
EHStack.popCleanup();
EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
// Otherwise, the best approach is to thread everything through
// the cleanup block and then try to clean up after ourselves.
} else {
// Force the entry block to exist.
llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
// I. Set up the fallthrough edge in.
CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
// If there's a fallthrough, we need to store the cleanup
// destination index. For fall-throughs this is always zero.
if (HasFallthrough) {
if (!HasPrebranchedFallthrough)
Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
// Otherwise, save and clear the IP if we don't have fallthrough
// because the cleanup is inactive.
} else if (FallthroughSource) {
assert(!IsActive && "source without fallthrough for active cleanup");
savedInactiveFallthroughIP = Builder.saveAndClearIP();
}
// II. Emit the entry block. This implicitly branches to it if
// we have fallthrough. All the fixups and existing branches
// should already be branched to it.
EmitBlock(NormalEntry);
// III. Figure out where we're going and build the cleanup
// epilogue.
bool HasEnclosingCleanups =
(Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
// Compute the branch-through dest if we need it:
// - if there are branch-throughs threaded through the scope
// - if fall-through is a branch-through
// - if there are fixups that will be optimistically forwarded
// to the enclosing cleanup
llvm::BasicBlock *BranchThroughDest = nullptr;
if (Scope.hasBranchThroughs() ||
(FallthroughSource && FallthroughIsBranchThrough) ||
(HasFixups && HasEnclosingCleanups)) {
assert(HasEnclosingCleanups);
EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
}
llvm::BasicBlock *FallthroughDest = nullptr;
SmallVector<llvm::Instruction*, 2> InstsToAppend;
// If there's exactly one branch-after and no other threads,
// we can route it without a switch.
if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
Scope.getNumBranchAfters() == 1) {
assert(!BranchThroughDest || !IsActive);
// Clean up the possibly dead store to the cleanup dest slot.
llvm::Instruction *NormalCleanupDestSlot =
cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
if (NormalCleanupDestSlot->hasOneUse()) {
NormalCleanupDestSlot->user_back()->eraseFromParent();
NormalCleanupDestSlot->eraseFromParent();
NormalCleanupDest = Address::invalid();
}
llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
// Build a switch-out if we need it:
// - if there are branch-afters threaded through the scope
// - if fall-through is a branch-after
// - if there are fixups that have nowhere left to go and
// so must be immediately resolved
} else if (Scope.getNumBranchAfters() ||
(HasFallthrough && !FallthroughIsBranchThrough) ||
(HasFixups && !HasEnclosingCleanups)) {
llvm::BasicBlock *Default =
(BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
// TODO: base this on the number of branch-afters and fixups
const unsigned SwitchCapacity = 10;
llvm::LoadInst *Load =
createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest",
nullptr);
llvm::SwitchInst *Switch =
llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
InstsToAppend.push_back(Load);
InstsToAppend.push_back(Switch);
// Branch-after fallthrough.
if (FallthroughSource && !FallthroughIsBranchThrough) {
FallthroughDest = createBasicBlock("cleanup.cont");
if (HasFallthrough)
Switch->addCase(Builder.getInt32(0), FallthroughDest);
}
for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
Switch->addCase(Scope.getBranchAfterIndex(I),
Scope.getBranchAfterBlock(I));
}
// If there aren't any enclosing cleanups, we can resolve all
// the fixups now.
if (HasFixups && !HasEnclosingCleanups)
ResolveAllBranchFixups(*this, Switch, NormalEntry);
} else {
// We should always have a branch-through destination in this case.
assert(BranchThroughDest);
InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
}
// IV. Pop the cleanup and emit it.
EHStack.popCleanup();
assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
// Append the prepared cleanup prologue from above.
llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
NormalExit->getInstList().push_back(InstsToAppend[I]);
// Optimistically hope that any fixups will continue falling through.
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
I < E; ++I) {
BranchFixup &Fixup = EHStack.getBranchFixup(I);
if (!Fixup.Destination) continue;
if (!Fixup.OptimisticBranchBlock) {
createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
getNormalCleanupDestSlot(),
Fixup.InitialBranch);
Fixup.InitialBranch->setSuccessor(0, NormalEntry);
}
Fixup.OptimisticBranchBlock = NormalExit;
}
// V. Set up the fallthrough edge out.
// Case 1: a fallthrough source exists but doesn't branch to the
// cleanup because the cleanup is inactive.
if (!HasFallthrough && FallthroughSource) {
// Prebranched fallthrough was forwarded earlier.
// Non-prebranched fallthrough doesn't need to be forwarded.
// Either way, all we need to do is restore the IP we cleared before.
assert(!IsActive);
Builder.restoreIP(savedInactiveFallthroughIP);
// Case 2: a fallthrough source exists and should branch to the
// cleanup, but we're not supposed to branch through to the next
// cleanup.
} else if (HasFallthrough && FallthroughDest) {
assert(!FallthroughIsBranchThrough);
EmitBlock(FallthroughDest);
// Case 3: a fallthrough source exists and should branch to the
// cleanup and then through to the next.
} else if (HasFallthrough) {
// Everything is already set up for this.
// Case 4: no fallthrough source exists.
} else {
Builder.ClearInsertionPoint();
}
// VI. Assorted cleaning.
// Check whether we can merge NormalEntry into a single predecessor.
// This might invalidate (non-IR) pointers to NormalEntry.
llvm::BasicBlock *NewNormalEntry =
SimplifyCleanupEntry(*this, NormalEntry);
// If it did invalidate those pointers, and NormalEntry was the same
// as NormalExit, go back and patch up the fixups.
if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
I < E; ++I)
EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
}
}
assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
// Emit the EH cleanup if required.
if (RequiresEHCleanup) {
CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
EmitBlock(EHEntry);
llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
// Push a terminate scope or cleanupendpad scope around the potentially
// throwing cleanups. For funclet EH personalities, the cleanupendpad models
// program termination when cleanups throw.
bool PushedTerminate = false;
SaveAndRestore<llvm::Instruction *> RestoreCurrentFuncletPad(
CurrentFuncletPad);
llvm::CleanupPadInst *CPI = nullptr;
const EHPersonality &Personality = EHPersonality::get(*this);
if (Personality.usesFuncletPads()) {
llvm::Value *ParentPad = CurrentFuncletPad;
if (!ParentPad)
ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
}
// Non-MSVC personalities need to terminate when an EH cleanup throws.
if (!Personality.isMSVCPersonality()) {
EHStack.pushTerminate();
PushedTerminate = true;
}
// We only actually emit the cleanup code if the cleanup is either
// active or was used before it was deactivated.
if (EHActiveFlag.isValid() || IsActive) {
cleanupFlags.setIsForEHCleanup();
EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
}
if (CPI)
Builder.CreateCleanupRet(CPI, NextAction);
else
Builder.CreateBr(NextAction);
// Leave the terminate scope.
if (PushedTerminate)
EHStack.popTerminate();
Builder.restoreIP(SavedIP);
SimplifyCleanupEntry(*this, EHEntry);
}
}
/// isObviouslyBranchWithoutCleanups - Return true if a branch to the
/// specified destination obviously has no cleanups to run. 'false' is always
/// a conservatively correct answer for this method.
bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
&& "stale jump destination");
// Calculate the innermost active normal cleanup.
EHScopeStack::stable_iterator TopCleanup =
EHStack.getInnermostActiveNormalCleanup();
// If we're not in an active normal cleanup scope, or if the
// destination scope is within the innermost active normal cleanup
// scope, we don't need to worry about fixups.
if (TopCleanup == EHStack.stable_end() ||
TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
return true;
// Otherwise, we might need some cleanups.
return false;
}
/// Terminate the current block by emitting a branch which might leave
/// the current cleanup-protected scope. The target scope may not yet
/// be known, in which case this will require a fixup.
///
/// As a side-effect, this method clears the insertion point.
void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
&& "stale jump destination");
if (!HaveInsertPoint())
return;
// Create the branch.
llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
// Calculate the innermost active normal cleanup.
EHScopeStack::stable_iterator
TopCleanup = EHStack.getInnermostActiveNormalCleanup();
// If we're not in an active normal cleanup scope, or if the
// destination scope is within the innermost active normal cleanup
// scope, we don't need to worry about fixups.
if (TopCleanup == EHStack.stable_end() ||
TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
Builder.ClearInsertionPoint();
return;
}
// If we can't resolve the destination cleanup scope, just add this
// to the current cleanup scope as a branch fixup.
if (!Dest.getScopeDepth().isValid()) {
BranchFixup &Fixup = EHStack.addBranchFixup();
Fixup.Destination = Dest.getBlock();
Fixup.DestinationIndex = Dest.getDestIndex();
Fixup.InitialBranch = BI;
Fixup.OptimisticBranchBlock = nullptr;
Builder.ClearInsertionPoint();
return;
}
// Otherwise, thread through all the normal cleanups in scope.
// Store the index at the start.
llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI);
// Adjust BI to point to the first cleanup block.
{
EHCleanupScope &Scope =
cast<EHCleanupScope>(*EHStack.find(TopCleanup));
BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
}
// Add this destination to all the scopes involved.
EHScopeStack::stable_iterator I = TopCleanup;
EHScopeStack::stable_iterator E = Dest.getScopeDepth();
if (E.strictlyEncloses(I)) {
while (true) {
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
assert(Scope.isNormalCleanup());
I = Scope.getEnclosingNormalCleanup();
// If this is the last cleanup we're propagating through, tell it
// that there's a resolved jump moving through it.
if (!E.strictlyEncloses(I)) {
Scope.addBranchAfter(Index, Dest.getBlock());
break;
}
// Otherwise, tell the scope that there's a jump propagating
// through it. If this isn't new information, all the rest of
// the work has been done before.
if (!Scope.addBranchThrough(Dest.getBlock()))
break;
}
}
Builder.ClearInsertionPoint();
}
static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
EHScopeStack::stable_iterator C) {
// If we needed a normal block for any reason, that counts.
if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
return true;
// Check whether any enclosed cleanups were needed.
for (EHScopeStack::stable_iterator
I = EHStack.getInnermostNormalCleanup();
I != C; ) {
assert(C.strictlyEncloses(I));
EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
if (S.getNormalBlock()) return true;
I = S.getEnclosingNormalCleanup();
}
return false;
}
static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
EHScopeStack::stable_iterator cleanup) {
// If we needed an EH block for any reason, that counts.
if (EHStack.find(cleanup)->hasEHBranches())
return true;
// Check whether any enclosed cleanups were needed.
for (EHScopeStack::stable_iterator
i = EHStack.getInnermostEHScope(); i != cleanup; ) {
assert(cleanup.strictlyEncloses(i));
EHScope &scope = *EHStack.find(i);
if (scope.hasEHBranches())
return true;
i = scope.getEnclosingEHScope();
}
return false;
}
enum ForActivation_t {
ForActivation,
ForDeactivation
};
/// The given cleanup block is changing activation state. Configure a
/// cleanup variable if necessary.
///
/// It would be good if we had some way of determining if there were
/// extra uses *after* the change-over point.
static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
EHScopeStack::stable_iterator C,
ForActivation_t kind,
llvm::Instruction *dominatingIP) {
EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
// We always need the flag if we're activating the cleanup in a
// conditional context, because we have to assume that the current
// location doesn't necessarily dominate the cleanup's code.
bool isActivatedInConditional =
(kind == ForActivation && CGF.isInConditionalBranch());
bool needFlag = false;
// Calculate whether the cleanup was used:
// - as a normal cleanup
if (Scope.isNormalCleanup() &&
(isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
Scope.setTestFlagInNormalCleanup();
needFlag = true;
}
// - as an EH cleanup
if (Scope.isEHCleanup() &&
(isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
Scope.setTestFlagInEHCleanup();
needFlag = true;
}
// If it hasn't yet been used as either, we're done.
if (!needFlag) return;
Address var = Scope.getActiveFlag();
if (!var.isValid()) {
var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
"cleanup.isactive");
Scope.setActiveFlag(var);
assert(dominatingIP && "no existing variable and no dominating IP!");
// Initialize to true or false depending on whether it was
// active up to this point.
llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
// If we're in a conditional block, ignore the dominating IP and
// use the outermost conditional branch.
if (CGF.isInConditionalBranch()) {
CGF.setBeforeOutermostConditional(value, var);
} else {
createStoreInstBefore(value, var, dominatingIP);
}
}
CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
}
/// Activate a cleanup that was created in an inactivated state.
void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
llvm::Instruction *dominatingIP) {
assert(C != EHStack.stable_end() && "activating bottom of stack?");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
assert(!Scope.isActive() && "double activation");
SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
Scope.setActive(true);
}
/// Deactive a cleanup that was created in an active state.
void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
llvm::Instruction *dominatingIP) {
assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
assert(Scope.isActive() && "double deactivation");
// If it's the top of the stack, just pop it, but do so only if it belongs
// to the current RunCleanupsScope.
if (C == EHStack.stable_begin() &&
CurrentCleanupScopeDepth.strictlyEncloses(C)) {
// If it's a normal cleanup, we need to pretend that the
// fallthrough is unreachable.
CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
PopCleanupBlock();
Builder.restoreIP(SavedIP);
return;
}
// Otherwise, follow the general case.
SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
Scope.setActive(false);
}
Address CodeGenFunction::getNormalCleanupDestSlot() {
if (!NormalCleanupDest.isValid())
NormalCleanupDest =
CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
return NormalCleanupDest;
}
/// Emits all the code to cause the given temporary to be cleaned up.
void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
QualType TempType,
Address Ptr) {
pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
/*useEHCleanup*/ true);
}
|