summaryrefslogtreecommitdiff
path: root/lib/Analysis
diff options
context:
space:
mode:
authorBill Wendling <isanbard@gmail.com>2012-10-09 07:45:08 +0000
committerBill Wendling <isanbard@gmail.com>2012-10-09 07:45:08 +0000
commit6765834754cbb3cb0f15b4b15e98c5e73fa50066 (patch)
treed2d827f8d5acfde264a1b8f883884e8297879c5f /lib/Analysis
parent50cf36351cefbc64bca9b82b4a3897394feecac7 (diff)
downloadllvm-6765834754cbb3cb0f15b4b15e98c5e73fa50066.tar.gz
llvm-6765834754cbb3cb0f15b4b15e98c5e73fa50066.tar.bz2
llvm-6765834754cbb3cb0f15b4b15e98c5e73fa50066.tar.xz
Create enums for the different attributes.
We use the enums to query whether an Attributes object has that attribute. The opaque layer is responsible for knowing where that specific attribute is stored. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@165488 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Analysis')
-rw-r--r--lib/Analysis/CodeMetrics.cpp2
-rw-r--r--lib/Analysis/InlineCost.cpp7
-rw-r--r--lib/Analysis/MemoryDependenceAnalysis.cpp4
3 files changed, 7 insertions, 6 deletions
diff --git a/lib/Analysis/CodeMetrics.cpp b/lib/Analysis/CodeMetrics.cpp
index 6f9e1cf6d9..651a54be1b 100644
--- a/lib/Analysis/CodeMetrics.cpp
+++ b/lib/Analysis/CodeMetrics.cpp
@@ -196,7 +196,7 @@ void CodeMetrics::analyzeFunction(Function *F, const DataLayout *TD) {
// as volatile if they are live across a setjmp call, and they probably
// won't do this in callers.
exposesReturnsTwice = F->callsFunctionThatReturnsTwice() &&
- !F->getFnAttributes().hasReturnsTwiceAttr();
+ !F->getFnAttributes().hasAttribute(Attributes::ReturnsTwice);
// Look at the size of the callee.
for (Function::const_iterator BB = F->begin(), E = F->end(); BB != E; ++BB)
diff --git a/lib/Analysis/InlineCost.cpp b/lib/Analysis/InlineCost.cpp
index c8700a8798..5f51f775f1 100644
--- a/lib/Analysis/InlineCost.cpp
+++ b/lib/Analysis/InlineCost.cpp
@@ -128,7 +128,7 @@ class CallAnalyzer : public InstVisitor<CallAnalyzer, bool> {
public:
CallAnalyzer(const DataLayout *TD, Function &Callee, int Threshold)
: TD(TD), F(Callee), Threshold(Threshold), Cost(0),
- AlwaysInline(F.getFnAttributes().hasAlwaysInlineAttr()),
+ AlwaysInline(F.getFnAttributes().hasAttribute(Attributes::AlwaysInline)),
IsCallerRecursive(false), IsRecursiveCall(false),
ExposesReturnsTwice(false), HasDynamicAlloca(false), AllocatedSize(0),
NumInstructions(0), NumVectorInstructions(0),
@@ -614,7 +614,7 @@ bool CallAnalyzer::visitStore(StoreInst &I) {
bool CallAnalyzer::visitCallSite(CallSite CS) {
if (CS.isCall() && cast<CallInst>(CS.getInstruction())->canReturnTwice() &&
- !F.getFnAttributes().hasReturnsTwiceAttr()) {
+ !F.getFnAttributes().hasAttribute(Attributes::ReturnsTwice)) {
// This aborts the entire analysis.
ExposesReturnsTwice = true;
return false;
@@ -1044,7 +1044,8 @@ InlineCost InlineCostAnalyzer::getInlineCost(CallSite CS, Function *Callee,
// something else. Don't inline functions marked noinline or call sites
// marked noinline.
if (!Callee || Callee->mayBeOverridden() ||
- Callee->getFnAttributes().hasNoInlineAttr() || CS.isNoInline())
+ Callee->getFnAttributes().hasAttribute(Attributes::NoInline) ||
+ CS.isNoInline())
return llvm::InlineCost::getNever();
DEBUG(llvm::dbgs() << " Analyzing call of " << Callee->getName()
diff --git a/lib/Analysis/MemoryDependenceAnalysis.cpp b/lib/Analysis/MemoryDependenceAnalysis.cpp
index 55e4b2690d..9316df6fbf 100644
--- a/lib/Analysis/MemoryDependenceAnalysis.cpp
+++ b/lib/Analysis/MemoryDependenceAnalysis.cpp
@@ -327,12 +327,12 @@ getLoadLoadClobberFullWidthSize(const Value *MemLocBase, int64_t MemLocOffs,
return 0;
if (LIOffs+NewLoadByteSize > MemLocEnd &&
- LI->getParent()->getParent()->getFnAttributes().hasAddressSafetyAttr()){
+ LI->getParent()->getParent()->getFnAttributes().
+ hasAttribute(Attributes::AddressSafety))
// We will be reading past the location accessed by the original program.
// While this is safe in a regular build, Address Safety analysis tools
// may start reporting false warnings. So, don't do widening.
return 0;
- }
// If a load of this width would include all of MemLoc, then we succeed.
if (LIOffs+NewLoadByteSize >= MemLocEnd)