summaryrefslogtreecommitdiffstats
path: root/contrib/llvm/lib/CodeGen/SjLjEHPrepare.cpp
blob: 3903743878b42959d3f98413ac38cda43f235472 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
//===- SjLjEHPrepare.cpp - Eliminate Invoke & Unwind instructions ---------===//
//
//                     The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This transformation is designed for use by code generators which use SjLj
// based exception handling.
//
//===----------------------------------------------------------------------===//

#define DEBUG_TYPE "sjljehprepare"
#include "llvm/CodeGen/Passes.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/SetVector.h"
#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/IR/Constants.h"
#include "llvm/IR/DataLayout.h"
#include "llvm/IR/DerivedTypes.h"
#include "llvm/IR/IRBuilder.h"
#include "llvm/IR/Instructions.h"
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Module.h"
#include "llvm/Pass.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/raw_ostream.h"
#include "llvm/Target/TargetLowering.h"
#include "llvm/Transforms/Scalar.h"
#include "llvm/Transforms/Utils/BasicBlockUtils.h"
#include "llvm/Transforms/Utils/Local.h"
#include <set>
using namespace llvm;

STATISTIC(NumInvokes, "Number of invokes replaced");
STATISTIC(NumSpilled, "Number of registers live across unwind edges");

namespace {
  class SjLjEHPrepare : public FunctionPass {
    const TargetLoweringBase *TLI;
    Type *FunctionContextTy;
    Constant *RegisterFn;
    Constant *UnregisterFn;
    Constant *BuiltinSetjmpFn;
    Constant *FrameAddrFn;
    Constant *StackAddrFn;
    Constant *StackRestoreFn;
    Constant *LSDAAddrFn;
    Value *PersonalityFn;
    Constant *CallSiteFn;
    Constant *FuncCtxFn;
    AllocaInst *FuncCtx;
  public:
    static char ID; // Pass identification, replacement for typeid
    explicit SjLjEHPrepare(const TargetLoweringBase *tli = NULL)
      : FunctionPass(ID), TLI(tli) { }
    bool doInitialization(Module &M);
    bool runOnFunction(Function &F);

    virtual void getAnalysisUsage(AnalysisUsage &AU) const {}
    const char *getPassName() const {
      return "SJLJ Exception Handling preparation";
    }

  private:
    bool setupEntryBlockAndCallSites(Function &F);
    void substituteLPadValues(LandingPadInst *LPI, Value *ExnVal,
                              Value *SelVal);
    Value *setupFunctionContext(Function &F, ArrayRef<LandingPadInst*> LPads);
    void lowerIncomingArguments(Function &F);
    void lowerAcrossUnwindEdges(Function &F, ArrayRef<InvokeInst*> Invokes);
    void insertCallSiteStore(Instruction *I, int Number);
  };
} // end anonymous namespace

char SjLjEHPrepare::ID = 0;

// Public Interface To the SjLjEHPrepare pass.
FunctionPass *llvm::createSjLjEHPreparePass(const TargetLoweringBase *TLI) {
  return new SjLjEHPrepare(TLI);
}
// doInitialization - Set up decalarations and types needed to process
// exceptions.
bool SjLjEHPrepare::doInitialization(Module &M) {
  // Build the function context structure.
  // builtin_setjmp uses a five word jbuf
  Type *VoidPtrTy = Type::getInt8PtrTy(M.getContext());
  Type *Int32Ty = Type::getInt32Ty(M.getContext());
  FunctionContextTy =
    StructType::get(VoidPtrTy,                        // __prev
                    Int32Ty,                          // call_site
                    ArrayType::get(Int32Ty, 4),       // __data
                    VoidPtrTy,                        // __personality
                    VoidPtrTy,                        // __lsda
                    ArrayType::get(VoidPtrTy, 5),     // __jbuf
                    NULL);
  RegisterFn = M.getOrInsertFunction("_Unwind_SjLj_Register",
                                     Type::getVoidTy(M.getContext()),
                                     PointerType::getUnqual(FunctionContextTy),
                                     (Type *)0);
  UnregisterFn =
    M.getOrInsertFunction("_Unwind_SjLj_Unregister",
                          Type::getVoidTy(M.getContext()),
                          PointerType::getUnqual(FunctionContextTy),
                          (Type *)0);
  FrameAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::frameaddress);
  StackAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::stacksave);
  StackRestoreFn = Intrinsic::getDeclaration(&M, Intrinsic::stackrestore);
  BuiltinSetjmpFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_setjmp);
  LSDAAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_lsda);
  CallSiteFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_callsite);
  FuncCtxFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_functioncontext);
  PersonalityFn = 0;

  return true;
}

/// insertCallSiteStore - Insert a store of the call-site value to the
/// function context
void SjLjEHPrepare::insertCallSiteStore(Instruction *I, int Number) {
  IRBuilder<> Builder(I);

  // Get a reference to the call_site field.
  Type *Int32Ty = Type::getInt32Ty(I->getContext());
  Value *Zero = ConstantInt::get(Int32Ty, 0);
  Value *One = ConstantInt::get(Int32Ty, 1);
  Value *Idxs[2] = { Zero, One };
  Value *CallSite = Builder.CreateGEP(FuncCtx, Idxs, "call_site");

  // Insert a store of the call-site number
  ConstantInt *CallSiteNoC = ConstantInt::get(Type::getInt32Ty(I->getContext()),
                                              Number);
  Builder.CreateStore(CallSiteNoC, CallSite, true/*volatile*/);
}

/// MarkBlocksLiveIn - Insert BB and all of its predescessors into LiveBBs until
/// we reach blocks we've already seen.
static void MarkBlocksLiveIn(BasicBlock *BB,
                             SmallPtrSet<BasicBlock*, 64> &LiveBBs) {
  if (!LiveBBs.insert(BB)) return; // already been here.

  for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
    MarkBlocksLiveIn(*PI, LiveBBs);
}

/// substituteLPadValues - Substitute the values returned by the landingpad
/// instruction with those returned by the personality function.
void SjLjEHPrepare::substituteLPadValues(LandingPadInst *LPI, Value *ExnVal,
                                         Value *SelVal) {
  SmallVector<Value*, 8> UseWorkList(LPI->use_begin(), LPI->use_end());
  while (!UseWorkList.empty()) {
    Value *Val = UseWorkList.pop_back_val();
    ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(Val);
    if (!EVI) continue;
    if (EVI->getNumIndices() != 1) continue;
    if (*EVI->idx_begin() == 0)
      EVI->replaceAllUsesWith(ExnVal);
    else if (*EVI->idx_begin() == 1)
      EVI->replaceAllUsesWith(SelVal);
    if (EVI->getNumUses() == 0)
      EVI->eraseFromParent();
  }

  if (LPI->getNumUses() == 0)  return;

  // There are still some uses of LPI. Construct an aggregate with the exception
  // values and replace the LPI with that aggregate.
  Type *LPadType = LPI->getType();
  Value *LPadVal = UndefValue::get(LPadType);
  IRBuilder<>
    Builder(llvm::next(BasicBlock::iterator(cast<Instruction>(SelVal))));
  LPadVal = Builder.CreateInsertValue(LPadVal, ExnVal, 0, "lpad.val");
  LPadVal = Builder.CreateInsertValue(LPadVal, SelVal, 1, "lpad.val");

  LPI->replaceAllUsesWith(LPadVal);
}

/// setupFunctionContext - Allocate the function context on the stack and fill
/// it with all of the data that we know at this point.
Value *SjLjEHPrepare::
setupFunctionContext(Function &F, ArrayRef<LandingPadInst*> LPads) {
  BasicBlock *EntryBB = F.begin();

  // Create an alloca for the incoming jump buffer ptr and the new jump buffer
  // that needs to be restored on all exits from the function. This is an alloca
  // because the value needs to be added to the global context list.
  unsigned Align =
    TLI->getDataLayout()->getPrefTypeAlignment(FunctionContextTy);
  FuncCtx =
    new AllocaInst(FunctionContextTy, 0, Align, "fn_context", EntryBB->begin());

  // Fill in the function context structure.
  for (unsigned I = 0, E = LPads.size(); I != E; ++I) {
    LandingPadInst *LPI = LPads[I];
    IRBuilder<> Builder(LPI->getParent()->getFirstInsertionPt());

    // Reference the __data field.
    Value *FCData = Builder.CreateConstGEP2_32(FuncCtx, 0, 2, "__data");

    // The exception values come back in context->__data[0].
    Value *ExceptionAddr = Builder.CreateConstGEP2_32(FCData, 0, 0,
                                                      "exception_gep");
    Value *ExnVal = Builder.CreateLoad(ExceptionAddr, true, "exn_val");
    ExnVal = Builder.CreateIntToPtr(ExnVal, Builder.getInt8PtrTy());

    Value *SelectorAddr = Builder.CreateConstGEP2_32(FCData, 0, 1,
                                                     "exn_selector_gep");
    Value *SelVal = Builder.CreateLoad(SelectorAddr, true, "exn_selector_val");

    substituteLPadValues(LPI, ExnVal, SelVal);
  }

  // Personality function
  IRBuilder<> Builder(EntryBB->getTerminator());
  if (!PersonalityFn)
    PersonalityFn = LPads[0]->getPersonalityFn();
  Value *PersonalityFieldPtr = Builder.CreateConstGEP2_32(FuncCtx, 0, 3,
                                                          "pers_fn_gep");
  Builder.CreateStore(PersonalityFn, PersonalityFieldPtr, /*isVolatile=*/true);

  // LSDA address
  Value *LSDA = Builder.CreateCall(LSDAAddrFn, "lsda_addr");
  Value *LSDAFieldPtr = Builder.CreateConstGEP2_32(FuncCtx, 0, 4, "lsda_gep");
  Builder.CreateStore(LSDA, LSDAFieldPtr, /*isVolatile=*/true);

  return FuncCtx;
}

/// lowerIncomingArguments - To avoid having to handle incoming arguments
/// specially, we lower each arg to a copy instruction in the entry block. This
/// ensures that the argument value itself cannot be live out of the entry
/// block.
void SjLjEHPrepare::lowerIncomingArguments(Function &F) {
  BasicBlock::iterator AfterAllocaInsPt = F.begin()->begin();
  while (isa<AllocaInst>(AfterAllocaInsPt) &&
         isa<ConstantInt>(cast<AllocaInst>(AfterAllocaInsPt)->getArraySize()))
    ++AfterAllocaInsPt;

  for (Function::arg_iterator
         AI = F.arg_begin(), AE = F.arg_end(); AI != AE; ++AI) {
    Type *Ty = AI->getType();

    // Aggregate types can't be cast, but are legal argument types, so we have
    // to handle them differently. We use an extract/insert pair as a
    // lightweight method to achieve the same goal.
    if (isa<StructType>(Ty) || isa<ArrayType>(Ty) || isa<VectorType>(Ty)) {
      Instruction *EI = ExtractValueInst::Create(AI, 0, "", AfterAllocaInsPt);
      Instruction *NI = InsertValueInst::Create(AI, EI, 0);
      NI->insertAfter(EI);
      AI->replaceAllUsesWith(NI);

      // Set the operand of the instructions back to the AllocaInst.
      EI->setOperand(0, AI);
      NI->setOperand(0, AI);
    } else {
      // This is always a no-op cast because we're casting AI to AI->getType()
      // so src and destination types are identical. BitCast is the only
      // possibility.
      CastInst *NC =
        new BitCastInst(AI, AI->getType(), AI->getName() + ".tmp",
                        AfterAllocaInsPt);
      AI->replaceAllUsesWith(NC);

      // Set the operand of the cast instruction back to the AllocaInst.
      // Normally it's forbidden to replace a CastInst's operand because it
      // could cause the opcode to reflect an illegal conversion. However, we're
      // replacing it here with the same value it was constructed with.  We do
      // this because the above replaceAllUsesWith() clobbered the operand, but
      // we want this one to remain.
      NC->setOperand(0, AI);
    }
  }
}

/// lowerAcrossUnwindEdges - Find all variables which are alive across an unwind
/// edge and spill them.
void SjLjEHPrepare::lowerAcrossUnwindEdges(Function &F,
                                           ArrayRef<InvokeInst*> Invokes) {
  // Finally, scan the code looking for instructions with bad live ranges.
  for (Function::iterator
         BB = F.begin(), BBE = F.end(); BB != BBE; ++BB) {
    for (BasicBlock::iterator
           II = BB->begin(), IIE = BB->end(); II != IIE; ++II) {
      // Ignore obvious cases we don't have to handle. In particular, most
      // instructions either have no uses or only have a single use inside the
      // current block. Ignore them quickly.
      Instruction *Inst = II;
      if (Inst->use_empty()) continue;
      if (Inst->hasOneUse() &&
          cast<Instruction>(Inst->use_back())->getParent() == BB &&
          !isa<PHINode>(Inst->use_back())) continue;

      // If this is an alloca in the entry block, it's not a real register
      // value.
      if (AllocaInst *AI = dyn_cast<AllocaInst>(Inst))
        if (isa<ConstantInt>(AI->getArraySize()) && BB == F.begin())
          continue;

      // Avoid iterator invalidation by copying users to a temporary vector.
      SmallVector<Instruction*, 16> Users;
      for (Value::use_iterator
             UI = Inst->use_begin(), E = Inst->use_end(); UI != E; ++UI) {
        Instruction *User = cast<Instruction>(*UI);
        if (User->getParent() != BB || isa<PHINode>(User))
          Users.push_back(User);
      }

      // Find all of the blocks that this value is live in.
      SmallPtrSet<BasicBlock*, 64> LiveBBs;
      LiveBBs.insert(Inst->getParent());
      while (!Users.empty()) {
        Instruction *U = Users.back();
        Users.pop_back();

        if (!isa<PHINode>(U)) {
          MarkBlocksLiveIn(U->getParent(), LiveBBs);
        } else {
          // Uses for a PHI node occur in their predecessor block.
          PHINode *PN = cast<PHINode>(U);
          for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i)
            if (PN->getIncomingValue(i) == Inst)
              MarkBlocksLiveIn(PN->getIncomingBlock(i), LiveBBs);
        }
      }

      // Now that we know all of the blocks that this thing is live in, see if
      // it includes any of the unwind locations.
      bool NeedsSpill = false;
      for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
        BasicBlock *UnwindBlock = Invokes[i]->getUnwindDest();
        if (UnwindBlock != BB && LiveBBs.count(UnwindBlock)) {
          DEBUG(dbgs() << "SJLJ Spill: " << *Inst << " around "
                << UnwindBlock->getName() << "\n");
          NeedsSpill = true;
          break;
        }
      }

      // If we decided we need a spill, do it.
      // FIXME: Spilling this way is overkill, as it forces all uses of
      // the value to be reloaded from the stack slot, even those that aren't
      // in the unwind blocks. We should be more selective.
      if (NeedsSpill) {
        DemoteRegToStack(*Inst, true);
        ++NumSpilled;
      }
    }
  }

  // Go through the landing pads and remove any PHIs there.
  for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
    BasicBlock *UnwindBlock = Invokes[i]->getUnwindDest();
    LandingPadInst *LPI = UnwindBlock->getLandingPadInst();

    // Place PHIs into a set to avoid invalidating the iterator.
    SmallPtrSet<PHINode*, 8> PHIsToDemote;
    for (BasicBlock::iterator
           PN = UnwindBlock->begin(); isa<PHINode>(PN); ++PN)
      PHIsToDemote.insert(cast<PHINode>(PN));
    if (PHIsToDemote.empty()) continue;

    // Demote the PHIs to the stack.
    for (SmallPtrSet<PHINode*, 8>::iterator
           I = PHIsToDemote.begin(), E = PHIsToDemote.end(); I != E; ++I)
      DemotePHIToStack(*I);

    // Move the landingpad instruction back to the top of the landing pad block.
    LPI->moveBefore(UnwindBlock->begin());
  }
}

/// setupEntryBlockAndCallSites - Setup the entry block by creating and filling
/// the function context and marking the call sites with the appropriate
/// values. These values are used by the DWARF EH emitter.
bool SjLjEHPrepare::setupEntryBlockAndCallSites(Function &F) {
  SmallVector<ReturnInst*, 16> Returns;
  SmallVector<InvokeInst*, 16> Invokes;
  SmallSetVector<LandingPadInst*, 16> LPads;

  // Look through the terminators of the basic blocks to find invokes.
  for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
    if (InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
      if (Function *Callee = II->getCalledFunction())
        if (Callee->isIntrinsic() &&
            Callee->getIntrinsicID() == Intrinsic::donothing) {
          // Remove the NOP invoke.
          BranchInst::Create(II->getNormalDest(), II);
          II->eraseFromParent();
          continue;
        }

      Invokes.push_back(II);
      LPads.insert(II->getUnwindDest()->getLandingPadInst());
    } else if (ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator())) {
      Returns.push_back(RI);
    }

  if (Invokes.empty()) return false;

  NumInvokes += Invokes.size();

  lowerIncomingArguments(F);
  lowerAcrossUnwindEdges(F, Invokes);

  Value *FuncCtx =
    setupFunctionContext(F, makeArrayRef(LPads.begin(), LPads.end()));
  BasicBlock *EntryBB = F.begin();
  IRBuilder<> Builder(EntryBB->getTerminator());

  // Get a reference to the jump buffer.
  Value *JBufPtr = Builder.CreateConstGEP2_32(FuncCtx, 0, 5, "jbuf_gep");

  // Save the frame pointer.
  Value *FramePtr = Builder.CreateConstGEP2_32(JBufPtr, 0, 0, "jbuf_fp_gep");

  Value *Val = Builder.CreateCall(FrameAddrFn, Builder.getInt32(0), "fp");
  Builder.CreateStore(Val, FramePtr, /*isVolatile=*/true);

  // Save the stack pointer.
  Value *StackPtr = Builder.CreateConstGEP2_32(JBufPtr, 0, 2, "jbuf_sp_gep");

  Val = Builder.CreateCall(StackAddrFn, "sp");
  Builder.CreateStore(Val, StackPtr, /*isVolatile=*/true);

  // Call the setjmp instrinsic. It fills in the rest of the jmpbuf.
  Value *SetjmpArg = Builder.CreateBitCast(JBufPtr, Builder.getInt8PtrTy());
  Builder.CreateCall(BuiltinSetjmpFn, SetjmpArg);

  // Store a pointer to the function context so that the back-end will know
  // where to look for it.
  Value *FuncCtxArg = Builder.CreateBitCast(FuncCtx, Builder.getInt8PtrTy());
  Builder.CreateCall(FuncCtxFn, FuncCtxArg);

  // At this point, we are all set up, update the invoke instructions to mark
  // their call_site values.
  for (unsigned I = 0, E = Invokes.size(); I != E; ++I) {
    insertCallSiteStore(Invokes[I], I + 1);

    ConstantInt *CallSiteNum =
      ConstantInt::get(Type::getInt32Ty(F.getContext()), I + 1);

    // Record the call site value for the back end so it stays associated with
    // the invoke.
    CallInst::Create(CallSiteFn, CallSiteNum, "", Invokes[I]);
  }

  // Mark call instructions that aren't nounwind as no-action (call_site ==
  // -1). Skip the entry block, as prior to then, no function context has been
  // created for this function and any unexpected exceptions thrown will go
  // directly to the caller's context, which is what we want anyway, so no need
  // to do anything here.
  for (Function::iterator BB = F.begin(), E = F.end(); ++BB != E;)
    for (BasicBlock::iterator I = BB->begin(), end = BB->end(); I != end; ++I)
      if (CallInst *CI = dyn_cast<CallInst>(I)) {
        if (!CI->doesNotThrow())
          insertCallSiteStore(CI, -1);
      } else if (ResumeInst *RI = dyn_cast<ResumeInst>(I)) {
        insertCallSiteStore(RI, -1);
      }

  // Register the function context and make sure it's known to not throw
  CallInst *Register = CallInst::Create(RegisterFn, FuncCtx, "",
                                        EntryBB->getTerminator());
  Register->setDoesNotThrow();

  // Following any allocas not in the entry block, update the saved SP in the
  // jmpbuf to the new value.
  for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) {
    if (BB == F.begin())
      continue;
    for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
      if (CallInst *CI = dyn_cast<CallInst>(I)) {
        if (CI->getCalledFunction() != StackRestoreFn)
          continue;
      } else if (!isa<AllocaInst>(I)) {
        continue;
      }
      Instruction *StackAddr = CallInst::Create(StackAddrFn, "sp");
      StackAddr->insertAfter(I);
      Instruction *StoreStackAddr = new StoreInst(StackAddr, StackPtr, true);
      StoreStackAddr->insertAfter(StackAddr);
    }
  }

  // Finally, for any returns from this function, if this function contains an
  // invoke, add a call to unregister the function context.
  for (unsigned I = 0, E = Returns.size(); I != E; ++I)
    CallInst::Create(UnregisterFn, FuncCtx, "", Returns[I]);

  return true;
}

bool SjLjEHPrepare::runOnFunction(Function &F) {
  bool Res = setupEntryBlockAndCallSites(F);
  return Res;
}
OpenPOWER on IntegriCloud