diff options
Diffstat (limited to 'lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp')
-rw-r--r-- | lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp | 71 |
1 files changed, 70 insertions, 1 deletions
diff --git a/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp b/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp index 7446a51..b2f2e24 100644 --- a/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp +++ b/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp @@ -22,6 +22,72 @@ using namespace llvm; STATISTIC(NumDeadStore, "Number of dead stores eliminated"); +// Try to kill dead allocas by walking through its uses until we see some use +// that could escape. This is a conservative analysis which tries to handle +// GEPs, bitcasts, stores, and no-op intrinsics. These tend to be the things +// left after inlining and SROA finish chewing on an alloca. +static Instruction *removeDeadAlloca(InstCombiner &IC, AllocaInst &AI) { + SmallVector<Instruction *, 4> Worklist, DeadStores; + Worklist.push_back(&AI); + do { + Instruction *PI = Worklist.pop_back_val(); + for (Value::use_iterator UI = PI->use_begin(), UE = PI->use_end(); + UI != UE; ++UI) { + Instruction *I = cast<Instruction>(*UI); + switch (I->getOpcode()) { + default: + // Give up the moment we see something we can't handle. + return 0; + + case Instruction::GetElementPtr: + case Instruction::BitCast: + Worklist.push_back(I); + continue; + + case Instruction::Call: + // We can handle a limited subset of calls to no-op intrinsics. + if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { + switch (II->getIntrinsicID()) { + case Intrinsic::dbg_declare: + case Intrinsic::dbg_value: + case Intrinsic::invariant_start: + case Intrinsic::invariant_end: + case Intrinsic::lifetime_start: + case Intrinsic::lifetime_end: + continue; + default: + return 0; + } + } + // Reject everything else. + return 0; + + case Instruction::Store: { + // Stores into the alloca are only live if the alloca is live. + StoreInst *SI = cast<StoreInst>(I); + // We can eliminate atomic stores, but not volatile. + if (SI->isVolatile()) + return 0; + // The store is only trivially safe if the poniter is the destination + // as opposed to the value. We're conservative here and don't check for + // the case where we store the address of a dead alloca into a dead + // alloca. + if (SI->getPointerOperand() != PI) + return 0; + DeadStores.push_back(I); + continue; + } + } + } + } while (!Worklist.empty()); + + // The alloca is dead. Kill off all the stores to it, and then replace it + // with undef. + while (!DeadStores.empty()) + IC.EraseInstFromFunction(*DeadStores.pop_back_val()); + return IC.ReplaceInstUsesWith(AI, UndefValue::get(AI.getType())); +} + Instruction *InstCombiner::visitAllocaInst(AllocaInst &AI) { // Ensure that the alloca array size argument has type intptr_t, so that // any casting is exposed early. @@ -81,7 +147,10 @@ Instruction *InstCombiner::visitAllocaInst(AllocaInst &AI) { AI.setAlignment(TD->getPrefTypeAlignment(AI.getAllocatedType())); } - return 0; + // Try to aggressively remove allocas which are only used for GEPs, lifetime + // markers, and stores. This happens when SROA iteratively promotes stores + // out of the alloca, and we need to cleanup after it. + return removeDeadAlloca(*this, AI); } |