aboutsummaryrefslogtreecommitdiffstats
path: root/include/llvm/IR
diff options
context:
space:
mode:
Diffstat (limited to 'include/llvm/IR')
-rw-r--r--include/llvm/IR/Attributes.h4
-rw-r--r--include/llvm/IR/CallingConv.h7
-rw-r--r--include/llvm/IR/Constants.h15
-rw-r--r--include/llvm/IR/DataLayout.h15
-rw-r--r--include/llvm/IR/Function.h13
-rw-r--r--include/llvm/IR/GlobalAlias.h15
-rw-r--r--include/llvm/IR/GlobalValue.h12
-rw-r--r--include/llvm/IR/GlobalVariable.h4
-rw-r--r--include/llvm/IR/IRBuilder.h114
-rw-r--r--include/llvm/IR/InlineAsm.h2
-rw-r--r--include/llvm/IR/Instruction.def35
-rw-r--r--include/llvm/IR/Instructions.h445
-rw-r--r--include/llvm/IR/Intrinsics.h2
-rw-r--r--include/llvm/IR/Intrinsics.td26
-rw-r--r--include/llvm/IR/IntrinsicsAArch64.td363
-rw-r--r--include/llvm/IR/IntrinsicsARM.td48
-rw-r--r--include/llvm/IR/IntrinsicsMips.td1368
-rw-r--r--include/llvm/IR/IntrinsicsX86.td516
-rw-r--r--include/llvm/IR/LegacyPassManager.h111
-rw-r--r--include/llvm/IR/LegacyPassManagers.h470
-rw-r--r--include/llvm/IR/Metadata.h7
-rw-r--r--include/llvm/IR/Module.h2
-rw-r--r--include/llvm/IR/Operator.h4
-rw-r--r--include/llvm/IR/PassManager.h383
-rw-r--r--include/llvm/IR/Type.h8
-rw-r--r--include/llvm/IR/Value.h75
26 files changed, 3670 insertions, 394 deletions
diff --git a/include/llvm/IR/Attributes.h b/include/llvm/IR/Attributes.h
index 2183758..c23ba0f 100644
--- a/include/llvm/IR/Attributes.h
+++ b/include/llvm/IR/Attributes.h
@@ -18,6 +18,7 @@
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/FoldingSet.h"
+#include "llvm/Support/Compiler.h"
#include "llvm/Support/PointerLikeTypeTraits.h"
#include <bitset>
#include <cassert>
@@ -88,6 +89,7 @@ public:
NoReturn, ///< Mark the function as not returning
NoUnwind, ///< Function doesn't unwind stack
OptimizeForSize, ///< opt_size
+ OptimizeNone, ///< Function must not be optimized.
ReadNone, ///< Function does not access memory
ReadOnly, ///< Function only reads from memory
Returned, ///< Return value is always equal to this argument
@@ -199,7 +201,7 @@ public:
/// index `1'.
class AttributeSet {
public:
- enum AttrIndex {
+ enum AttrIndex LLVM_ENUM_INT_TYPE(unsigned) {
ReturnIndex = 0U,
FunctionIndex = ~0U
};
diff --git a/include/llvm/IR/CallingConv.h b/include/llvm/IR/CallingConv.h
index 585fc5e..4437af2 100644
--- a/include/llvm/IR/CallingConv.h
+++ b/include/llvm/IR/CallingConv.h
@@ -51,6 +51,13 @@ namespace CallingConv {
// (HiPE).
HiPE = 11,
+ // WebKit JS - Calling convention for stack based JavaScript calls
+ WebKit_JS = 12,
+
+ // AnyReg - Calling convention for dynamic register based calls (e.g.
+ // stackmap and patchpoint intrinsics).
+ AnyReg = 13,
+
// Target - This is the start of the target-specific calling conventions,
// e.g. fastcall and thiscall on X86.
FirstTargetCC = 64,
diff --git a/include/llvm/IR/Constants.h b/include/llvm/IR/Constants.h
index 99aed0d..dac20c9 100644
--- a/include/llvm/IR/Constants.h
+++ b/include/llvm/IR/Constants.h
@@ -862,6 +862,7 @@ public:
static Constant *getPtrToInt(Constant *C, Type *Ty);
static Constant *getIntToPtr(Constant *C, Type *Ty);
static Constant *getBitCast (Constant *C, Type *Ty);
+ static Constant *getAddrSpaceCast(Constant *C, Type *Ty);
static Constant *getNSWNeg(Constant *C) { return getNeg(C, false, true); }
static Constant *getNUWNeg(Constant *C) { return getNeg(C, true, false); }
@@ -942,12 +943,20 @@ public:
Type *Ty ///< The type to trunc or bitcast C to
);
- /// @brief Create a BitCast or a PtrToInt cast constant expression
+ /// @brief Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant
+ /// expression.
static Constant *getPointerCast(
Constant *C, ///< The pointer value to be casted (operand 0)
Type *Ty ///< The type to which cast should be made
);
+ /// @brief Create a BitCast or AddrSpaceCast for a pointer type depending on
+ /// the address space.
+ static Constant *getPointerBitCastOrAddrSpaceCast(
+ Constant *C, ///< The constant to addrspacecast or bitcast
+ Type *Ty ///< The type to bitcast or addrspacecast C to
+ );
+
/// @brief Create a ZExt, Bitcast or Trunc for integer -> integer casts
static Constant *getIntegerCast(
Constant *C, ///< The integer constant to be casted
@@ -1079,8 +1088,8 @@ public:
/// as this ConstantExpr. The instruction is not linked to any basic block.
///
/// A better approach to this could be to have a constructor for Instruction
- /// which would take a ConstantExpr parameter, but that would have spread
- /// implementation details of ConstantExpr outside of Constants.cpp, which
+ /// which would take a ConstantExpr parameter, but that would have spread
+ /// implementation details of ConstantExpr outside of Constants.cpp, which
/// would make it harder to remove ConstantExprs altogether.
Instruction *getAsInstruction();
diff --git a/include/llvm/IR/DataLayout.h b/include/llvm/IR/DataLayout.h
index 269edeb..10630a2 100644
--- a/include/llvm/IR/DataLayout.h
+++ b/include/llvm/IR/DataLayout.h
@@ -369,6 +369,17 @@ public:
/// least as big as Width bits.
Type *getSmallestLegalIntType(LLVMContext &C, unsigned Width = 0) const;
+ /// getLargestLegalIntType - Return the largest legal integer type, or null if
+ /// none are set.
+ Type *getLargestLegalIntType(LLVMContext &C) const {
+ unsigned LargestSize = getLargestLegalIntTypeSize();
+ return (LargestSize == 0) ? 0 : Type::getIntNTy(C, LargestSize);
+ }
+
+ /// getLargestLegalIntType - Return the size of largest legal integer type
+ /// size, or 0 if none are set.
+ unsigned getLargestLegalIntTypeSize() const;
+
/// getIndexedOffset - return the offset from the beginning of the type for
/// the specified indices. This is used to implement getelementptr.
uint64_t getIndexedOffset(Type *Ty, ArrayRef<Value *> Indices) const;
@@ -451,7 +462,7 @@ inline uint64_t DataLayout::getTypeSizeInBits(Type *Ty) const {
case Type::LabelTyID:
return getPointerSizeInBits(0);
case Type::PointerTyID:
- return getPointerSizeInBits(cast<PointerType>(Ty)->getAddressSpace());
+ return getPointerSizeInBits(Ty->getPointerAddressSpace());
case Type::ArrayTyID: {
ArrayType *ATy = cast<ArrayType>(Ty);
return ATy->getNumElements() *
@@ -461,7 +472,7 @@ inline uint64_t DataLayout::getTypeSizeInBits(Type *Ty) const {
// Get the layout annotation... which is lazily created on demand.
return getStructLayout(cast<StructType>(Ty))->getSizeInBits();
case Type::IntegerTyID:
- return cast<IntegerType>(Ty)->getBitWidth();
+ return Ty->getIntegerBitWidth();
case Type::HalfTyID:
return 16;
case Type::FloatTyID:
diff --git a/include/llvm/IR/Function.h b/include/llvm/IR/Function.h
index 0e51c6f..bba7ecd 100644
--- a/include/llvm/IR/Function.h
+++ b/include/llvm/IR/Function.h
@@ -159,11 +159,11 @@ public:
/// calling convention of this function. The enum values for the known
/// calling conventions are defined in CallingConv.h.
CallingConv::ID getCallingConv() const {
- return static_cast<CallingConv::ID>(getSubclassDataFromValue() >> 1);
+ return static_cast<CallingConv::ID>(getSubclassDataFromValue() >> 2);
}
void setCallingConv(CallingConv::ID CC) {
- setValueSubclassData((getSubclassDataFromValue() & 1) |
- (static_cast<unsigned>(CC) << 1));
+ setValueSubclassData((getSubclassDataFromValue() & 3) |
+ (static_cast<unsigned>(CC) << 2));
}
/// @brief Return the attribute list for this Function.
@@ -427,6 +427,13 @@ public:
size_t arg_size() const;
bool arg_empty() const;
+ bool hasPrefixData() const {
+ return getSubclassDataFromValue() & 2;
+ }
+
+ Constant *getPrefixData() const;
+ void setPrefixData(Constant *PrefixData);
+
/// viewCFG - This function is meant for use from the debugger. You can just
/// say 'call F->viewCFG()' and a ghostview window should pop up from the
/// program, displaying the CFG of the current function with the code for each
diff --git a/include/llvm/IR/GlobalAlias.h b/include/llvm/IR/GlobalAlias.h
index 883814a..fec61a7 100644
--- a/include/llvm/IR/GlobalAlias.h
+++ b/include/llvm/IR/GlobalAlias.h
@@ -66,14 +66,25 @@ public:
}
/// getAliasedGlobal() - Aliasee can be either global or bitcast of
/// global. This method retrives the global for both aliasee flavours.
- const GlobalValue *getAliasedGlobal() const;
+ GlobalValue *getAliasedGlobal();
+ const GlobalValue *getAliasedGlobal() const {
+ return const_cast<GlobalAlias *>(this)->getAliasedGlobal();
+ }
/// resolveAliasedGlobal() - This method tries to ultimately resolve the alias
/// by going through the aliasing chain and trying to find the very last
/// global. Returns NULL if a cycle was found. If stopOnWeak is false, then
/// the whole chain aliasing chain is traversed, otherwise - only strong
/// aliases.
- const GlobalValue *resolveAliasedGlobal(bool stopOnWeak = true) const;
+ GlobalValue *resolveAliasedGlobal(bool stopOnWeak = true);
+ const GlobalValue *resolveAliasedGlobal(bool stopOnWeak = true) const {
+ return const_cast<GlobalAlias *>(this)->resolveAliasedGlobal(stopOnWeak);
+ }
+
+ static bool isValidLinkage(LinkageTypes L) {
+ return isExternalLinkage(L) || isLocalLinkage(L) ||
+ isWeakLinkage(L) || isLinkOnceLinkage(L);
+ }
// Methods for support type inquiry through isa, cast, and dyn_cast:
static inline bool classof(const Value *V) {
diff --git a/include/llvm/IR/GlobalValue.h b/include/llvm/IR/GlobalValue.h
index 1dc99cf..4f20a31 100644
--- a/include/llvm/IR/GlobalValue.h
+++ b/include/llvm/IR/GlobalValue.h
@@ -35,7 +35,6 @@ public:
AvailableExternallyLinkage, ///< Available for inspection, not emission.
LinkOnceAnyLinkage, ///< Keep one copy of function when linking (inline)
LinkOnceODRLinkage, ///< Same, but only replaced by something equivalent.
- LinkOnceODRAutoHideLinkage, ///< Like LinkOnceODRLinkage but addr not taken.
WeakAnyLinkage, ///< Keep one copy of named function when linking (weak)
WeakODRLinkage, ///< Same, but only replaced by something equivalent.
AppendingLinkage, ///< Special purpose, only applies to global arrays
@@ -123,12 +122,7 @@ public:
return Linkage == AvailableExternallyLinkage;
}
static bool isLinkOnceLinkage(LinkageTypes Linkage) {
- return Linkage == LinkOnceAnyLinkage ||
- Linkage == LinkOnceODRLinkage ||
- Linkage == LinkOnceODRAutoHideLinkage;
- }
- static bool isLinkOnceODRAutoHideLinkage(LinkageTypes Linkage) {
- return Linkage == LinkOnceODRAutoHideLinkage;
+ return Linkage == LinkOnceAnyLinkage || Linkage == LinkOnceODRLinkage;
}
static bool isWeakLinkage(LinkageTypes Linkage) {
return Linkage == WeakAnyLinkage || Linkage == WeakODRLinkage;
@@ -192,7 +186,6 @@ public:
Linkage == WeakODRLinkage ||
Linkage == LinkOnceAnyLinkage ||
Linkage == LinkOnceODRLinkage ||
- Linkage == LinkOnceODRAutoHideLinkage ||
Linkage == CommonLinkage ||
Linkage == ExternalWeakLinkage ||
Linkage == LinkerPrivateWeakLinkage;
@@ -205,9 +198,6 @@ public:
bool hasLinkOnceLinkage() const {
return isLinkOnceLinkage(Linkage);
}
- bool hasLinkOnceODRAutoHideLinkage() const {
- return isLinkOnceODRAutoHideLinkage(Linkage);
- }
bool hasWeakLinkage() const {
return isWeakLinkage(Linkage);
}
diff --git a/include/llvm/IR/GlobalVariable.h b/include/llvm/IR/GlobalVariable.h
index bfed507..660092d 100644
--- a/include/llvm/IR/GlobalVariable.h
+++ b/include/llvm/IR/GlobalVariable.h
@@ -84,9 +84,7 @@ public:
/// Provide fast operand accessors
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
- /// hasInitializer - Unless a global variable isExternal(), it has an
- /// initializer. The initializer for the global variable/constant is held by
- /// Initializer if an initializer is specified.
+ /// Definitions have initializers, declarations don't.
///
inline bool hasInitializer() const { return !isDeclaration(); }
diff --git a/include/llvm/IR/IRBuilder.h b/include/llvm/IR/IRBuilder.h
index e396e71..8d1432d 100644
--- a/include/llvm/IR/IRBuilder.h
+++ b/include/llvm/IR/IRBuilder.h
@@ -25,6 +25,7 @@
#include "llvm/IR/Operator.h"
#include "llvm/Support/CBindingWrapping.h"
#include "llvm/Support/ConstantFolder.h"
+#include "llvm/Support/ValueHandle.h"
namespace llvm {
class MDNode;
@@ -52,10 +53,13 @@ protected:
BasicBlock *BB;
BasicBlock::iterator InsertPt;
LLVMContext &Context;
+
+ MDNode *DefaultFPMathTag;
+ FastMathFlags FMF;
public:
- IRBuilderBase(LLVMContext &context)
- : Context(context) {
+ IRBuilderBase(LLVMContext &context, MDNode *FPMathTag = 0)
+ : Context(context), DefaultFPMathTag(FPMathTag), FMF() {
ClearInsertionPoint();
}
@@ -169,6 +173,68 @@ public:
ClearInsertionPoint();
}
+ /// \brief Get the floating point math metadata being used.
+ MDNode *getDefaultFPMathTag() const { return DefaultFPMathTag; }
+
+ /// \brief Get the flags to be applied to created floating point ops
+ FastMathFlags getFastMathFlags() const { return FMF; }
+
+ /// \brief Clear the fast-math flags.
+ void clearFastMathFlags() { FMF.clear(); }
+
+ /// \brief Set the floating point math metadata to be used.
+ void SetDefaultFPMathTag(MDNode *FPMathTag) { DefaultFPMathTag = FPMathTag; }
+
+ /// \brief Set the fast-math flags to be used with generated fp-math operators
+ void SetFastMathFlags(FastMathFlags NewFMF) { FMF = NewFMF; }
+
+ //===--------------------------------------------------------------------===//
+ // RAII helpers.
+ //===--------------------------------------------------------------------===//
+
+ // \brief RAII object that stores the current insertion point and restores it
+ // when the object is destroyed. This includes the debug location.
+ class InsertPointGuard {
+ IRBuilderBase &Builder;
+ AssertingVH<BasicBlock> Block;
+ BasicBlock::iterator Point;
+ DebugLoc DbgLoc;
+
+ InsertPointGuard(const InsertPointGuard &) LLVM_DELETED_FUNCTION;
+ InsertPointGuard &operator=(const InsertPointGuard &) LLVM_DELETED_FUNCTION;
+
+ public:
+ InsertPointGuard(IRBuilderBase &B)
+ : Builder(B), Block(B.GetInsertBlock()), Point(B.GetInsertPoint()),
+ DbgLoc(B.getCurrentDebugLocation()) {}
+
+ ~InsertPointGuard() {
+ Builder.restoreIP(InsertPoint(Block, Point));
+ Builder.SetCurrentDebugLocation(DbgLoc);
+ }
+ };
+
+ // \brief RAII object that stores the current fast math settings and restores
+ // them when the object is destroyed.
+ class FastMathFlagGuard {
+ IRBuilderBase &Builder;
+ FastMathFlags FMF;
+ MDNode *FPMathTag;
+
+ FastMathFlagGuard(const FastMathFlagGuard &) LLVM_DELETED_FUNCTION;
+ FastMathFlagGuard &operator=(
+ const FastMathFlagGuard &) LLVM_DELETED_FUNCTION;
+
+ public:
+ FastMathFlagGuard(IRBuilderBase &B)
+ : Builder(B), FMF(B.FMF), FPMathTag(B.DefaultFPMathTag) {}
+
+ ~FastMathFlagGuard() {
+ Builder.FMF = FMF;
+ Builder.DefaultFPMathTag = FPMathTag;
+ }
+ };
+
//===--------------------------------------------------------------------===//
// Miscellaneous creation methods.
//===--------------------------------------------------------------------===//
@@ -354,76 +420,52 @@ template<bool preserveNames = true, typename T = ConstantFolder,
typename Inserter = IRBuilderDefaultInserter<preserveNames> >
class IRBuilder : public IRBuilderBase, public Inserter {
T Folder;
- MDNode *DefaultFPMathTag;
- FastMathFlags FMF;
public:
IRBuilder(LLVMContext &C, const T &F, const Inserter &I = Inserter(),
MDNode *FPMathTag = 0)
- : IRBuilderBase(C), Inserter(I), Folder(F), DefaultFPMathTag(FPMathTag),
- FMF() {
+ : IRBuilderBase(C, FPMathTag), Inserter(I), Folder(F) {
}
explicit IRBuilder(LLVMContext &C, MDNode *FPMathTag = 0)
- : IRBuilderBase(C), Folder(), DefaultFPMathTag(FPMathTag), FMF() {
+ : IRBuilderBase(C, FPMathTag), Folder() {
}
explicit IRBuilder(BasicBlock *TheBB, const T &F, MDNode *FPMathTag = 0)
- : IRBuilderBase(TheBB->getContext()), Folder(F),
- DefaultFPMathTag(FPMathTag), FMF() {
+ : IRBuilderBase(TheBB->getContext(), FPMathTag), Folder(F) {
SetInsertPoint(TheBB);
}
explicit IRBuilder(BasicBlock *TheBB, MDNode *FPMathTag = 0)
- : IRBuilderBase(TheBB->getContext()), Folder(),
- DefaultFPMathTag(FPMathTag), FMF() {
+ : IRBuilderBase(TheBB->getContext(), FPMathTag), Folder() {
SetInsertPoint(TheBB);
}
explicit IRBuilder(Instruction *IP, MDNode *FPMathTag = 0)
- : IRBuilderBase(IP->getContext()), Folder(), DefaultFPMathTag(FPMathTag),
- FMF() {
+ : IRBuilderBase(IP->getContext(), FPMathTag), Folder() {
SetInsertPoint(IP);
SetCurrentDebugLocation(IP->getDebugLoc());
}
explicit IRBuilder(Use &U, MDNode *FPMathTag = 0)
- : IRBuilderBase(U->getContext()), Folder(), DefaultFPMathTag(FPMathTag),
- FMF() {
+ : IRBuilderBase(U->getContext(), FPMathTag), Folder() {
SetInsertPoint(U);
SetCurrentDebugLocation(cast<Instruction>(U.getUser())->getDebugLoc());
}
IRBuilder(BasicBlock *TheBB, BasicBlock::iterator IP, const T& F,
MDNode *FPMathTag = 0)
- : IRBuilderBase(TheBB->getContext()), Folder(F),
- DefaultFPMathTag(FPMathTag), FMF() {
+ : IRBuilderBase(TheBB->getContext(), FPMathTag), Folder(F) {
SetInsertPoint(TheBB, IP);
}
IRBuilder(BasicBlock *TheBB, BasicBlock::iterator IP, MDNode *FPMathTag = 0)
- : IRBuilderBase(TheBB->getContext()), Folder(),
- DefaultFPMathTag(FPMathTag), FMF() {
+ : IRBuilderBase(TheBB->getContext(), FPMathTag), Folder() {
SetInsertPoint(TheBB, IP);
}
/// \brief Get the constant folder being used.
const T &getFolder() { return Folder; }
- /// \brief Get the floating point math metadata being used.
- MDNode *getDefaultFPMathTag() const { return DefaultFPMathTag; }
-
- /// \brief Get the flags to be applied to created floating point ops
- FastMathFlags getFastMathFlags() const { return FMF; }
-
- /// \brief Clear the fast-math flags.
- void clearFastMathFlags() { FMF.clear(); }
-
- /// \brief SetDefaultFPMathTag - Set the floating point math metadata to be used.
- void SetDefaultFPMathTag(MDNode *FPMathTag) { DefaultFPMathTag = FPMathTag; }
-
- /// \brief Set the fast-math flags to be used with generated fp-math operators
- void SetFastMathFlags(FastMathFlags NewFMF) { FMF = NewFMF; }
-
/// \brief Return true if this builder is configured to actually add the
/// requested names to IR created through it.
bool isNamePreserving() const { return preserveNames; }
@@ -1091,6 +1133,10 @@ public:
const Twine &Name = "") {
return CreateCast(Instruction::BitCast, V, DestTy, Name);
}
+ Value *CreateAddrSpaceCast(Value *V, Type *DestTy,
+ const Twine &Name = "") {
+ return CreateCast(Instruction::AddrSpaceCast, V, DestTy, Name);
+ }
Value *CreateZExtOrBitCast(Value *V, Type *DestTy,
const Twine &Name = "") {
if (V->getType() == DestTy)
diff --git a/include/llvm/IR/InlineAsm.h b/include/llvm/IR/InlineAsm.h
index 33e4ab8..3398a83 100644
--- a/include/llvm/IR/InlineAsm.h
+++ b/include/llvm/IR/InlineAsm.h
@@ -197,7 +197,7 @@ public:
// These are helper methods for dealing with flags in the INLINEASM SDNode
// in the backend.
- enum {
+ enum LLVM_ENUM_INT_TYPE(uint32_t) {
// Fixed operands on an INLINEASM SDNode.
Op_InputChain = 0,
Op_AsmString = 1,
diff --git a/include/llvm/IR/Instruction.def b/include/llvm/IR/Instruction.def
index e59a052..d46314c 100644
--- a/include/llvm/IR/Instruction.def
+++ b/include/llvm/IR/Instruction.def
@@ -154,25 +154,26 @@ HANDLE_CAST_INST(41, FPExt , FPExtInst ) // Extend floating point
HANDLE_CAST_INST(42, PtrToInt, PtrToIntInst) // Pointer -> Integer
HANDLE_CAST_INST(43, IntToPtr, IntToPtrInst) // Integer -> Pointer
HANDLE_CAST_INST(44, BitCast , BitCastInst ) // Type cast
- LAST_CAST_INST(44)
+HANDLE_CAST_INST(45, AddrSpaceCast, AddrSpaceCastInst) // addrspace cast
+ LAST_CAST_INST(45)
// Other operators...
- FIRST_OTHER_INST(45)
-HANDLE_OTHER_INST(45, ICmp , ICmpInst ) // Integer comparison instruction
-HANDLE_OTHER_INST(46, FCmp , FCmpInst ) // Floating point comparison instr.
-HANDLE_OTHER_INST(47, PHI , PHINode ) // PHI node instruction
-HANDLE_OTHER_INST(48, Call , CallInst ) // Call a function
-HANDLE_OTHER_INST(49, Select , SelectInst ) // select instruction
-HANDLE_OTHER_INST(50, UserOp1, Instruction) // May be used internally in a pass
-HANDLE_OTHER_INST(51, UserOp2, Instruction) // Internal to passes only
-HANDLE_OTHER_INST(52, VAArg , VAArgInst ) // vaarg instruction
-HANDLE_OTHER_INST(53, ExtractElement, ExtractElementInst)// extract from vector
-HANDLE_OTHER_INST(54, InsertElement, InsertElementInst) // insert into vector
-HANDLE_OTHER_INST(55, ShuffleVector, ShuffleVectorInst) // shuffle two vectors.
-HANDLE_OTHER_INST(56, ExtractValue, ExtractValueInst)// extract from aggregate
-HANDLE_OTHER_INST(57, InsertValue, InsertValueInst) // insert into aggregate
-HANDLE_OTHER_INST(58, LandingPad, LandingPadInst) // Landing pad instruction.
- LAST_OTHER_INST(58)
+ FIRST_OTHER_INST(46)
+HANDLE_OTHER_INST(46, ICmp , ICmpInst ) // Integer comparison instruction
+HANDLE_OTHER_INST(47, FCmp , FCmpInst ) // Floating point comparison instr.
+HANDLE_OTHER_INST(48, PHI , PHINode ) // PHI node instruction
+HANDLE_OTHER_INST(49, Call , CallInst ) // Call a function
+HANDLE_OTHER_INST(50, Select , SelectInst ) // select instruction
+HANDLE_OTHER_INST(51, UserOp1, Instruction) // May be used internally in a pass
+HANDLE_OTHER_INST(52, UserOp2, Instruction) // Internal to passes only
+HANDLE_OTHER_INST(53, VAArg , VAArgInst ) // vaarg instruction
+HANDLE_OTHER_INST(54, ExtractElement, ExtractElementInst)// extract from vector
+HANDLE_OTHER_INST(55, InsertElement, InsertElementInst) // insert into vector
+HANDLE_OTHER_INST(56, ShuffleVector, ShuffleVectorInst) // shuffle two vectors.
+HANDLE_OTHER_INST(57, ExtractValue, ExtractValueInst)// extract from aggregate
+HANDLE_OTHER_INST(58, InsertValue, InsertValueInst) // insert into aggregate
+HANDLE_OTHER_INST(59, LandingPad, LandingPadInst) // Landing pad instruction.
+ LAST_OTHER_INST(59)
#undef FIRST_TERM_INST
#undef HANDLE_TERM_INST
diff --git a/include/llvm/IR/Instructions.h b/include/llvm/IR/Instructions.h
index e05c3a8..0843d8f 100644
--- a/include/llvm/IR/Instructions.h
+++ b/include/llvm/IR/Instructions.h
@@ -23,8 +23,6 @@
#include "llvm/IR/DerivedTypes.h"
#include "llvm/IR/InstrTypes.h"
#include "llvm/Support/ErrorHandling.h"
-#include "llvm/Support/IntegersSubset.h"
-#include "llvm/Support/IntegersSubsetMapping.h"
#include <iterator>
namespace llvm {
@@ -911,6 +909,18 @@ DEFINE_TRANSPARENT_OPERAND_ACCESSORS(GetElementPtrInst, Value)
/// must be identical types.
/// \brief Represent an integer comparison operator.
class ICmpInst: public CmpInst {
+ void AssertOK() {
+ assert(getPredicate() >= CmpInst::FIRST_ICMP_PREDICATE &&
+ getPredicate() <= CmpInst::LAST_ICMP_PREDICATE &&
+ "Invalid ICmp predicate value");
+ assert(getOperand(0)->getType() == getOperand(1)->getType() &&
+ "Both operands to ICmp instruction are not of the same type!");
+ // Check that the operands are the right type
+ assert((getOperand(0)->getType()->isIntOrIntVectorTy() ||
+ getOperand(0)->getType()->isPtrOrPtrVectorTy()) &&
+ "Invalid operand types for ICmp instruction");
+ }
+
protected:
/// \brief Clone an identical ICmpInst
virtual ICmpInst *clone_impl() const;
@@ -925,15 +935,9 @@ public:
) : CmpInst(makeCmpResultType(LHS->getType()),
Instruction::ICmp, pred, LHS, RHS, NameStr,
InsertBefore) {
- assert(pred >= CmpInst::FIRST_ICMP_PREDICATE &&
- pred <= CmpInst::LAST_ICMP_PREDICATE &&
- "Invalid ICmp predicate value");
- assert(getOperand(0)->getType() == getOperand(1)->getType() &&
- "Both operands to ICmp instruction are not of the same type!");
- // Check that the operands are the right type
- assert((getOperand(0)->getType()->isIntOrIntVectorTy() ||
- getOperand(0)->getType()->getScalarType()->isPointerTy()) &&
- "Invalid operand types for ICmp instruction");
+#ifndef NDEBUG
+ AssertOK();
+#endif
}
/// \brief Constructor with insert-at-end semantics.
@@ -946,15 +950,9 @@ public:
) : CmpInst(makeCmpResultType(LHS->getType()),
Instruction::ICmp, pred, LHS, RHS, NameStr,
&InsertAtEnd) {
- assert(pred >= CmpInst::FIRST_ICMP_PREDICATE &&
- pred <= CmpInst::LAST_ICMP_PREDICATE &&
- "Invalid ICmp predicate value");
- assert(getOperand(0)->getType() == getOperand(1)->getType() &&
- "Both operands to ICmp instruction are not of the same type!");
- // Check that the operands are the right type
- assert((getOperand(0)->getType()->isIntOrIntVectorTy() ||
- getOperand(0)->getType()->getScalarType()->isPointerTy()) &&
- "Invalid operand types for ICmp instruction");
+#ifndef NDEBUG
+ AssertOK();
+#endif
}
/// \brief Constructor with no-insertion semantics
@@ -965,15 +963,9 @@ public:
const Twine &NameStr = "" ///< Name of the instruction
) : CmpInst(makeCmpResultType(LHS->getType()),
Instruction::ICmp, pred, LHS, RHS, NameStr) {
- assert(pred >= CmpInst::FIRST_ICMP_PREDICATE &&
- pred <= CmpInst::LAST_ICMP_PREDICATE &&
- "Invalid ICmp predicate value");
- assert(getOperand(0)->getType() == getOperand(1)->getType() &&
- "Both operands to ICmp instruction are not of the same type!");
- // Check that the operands are the right type
- assert((getOperand(0)->getType()->isIntOrIntVectorTy() ||
- getOperand(0)->getType()->getScalarType()->isPointerTy()) &&
- "Invalid operand types for ICmp instruction");
+#ifndef NDEBUG
+ AssertOK();
+#endif
}
/// For example, EQ->EQ, SLE->SLE, UGT->SGT, etc.
@@ -2457,31 +2449,10 @@ DEFINE_TRANSPARENT_OPERAND_ACCESSORS(BranchInst, Value)
class SwitchInst : public TerminatorInst {
void *operator new(size_t, unsigned) LLVM_DELETED_FUNCTION;
unsigned ReservedSpace;
- // Operands format:
// Operand[0] = Value to switch on
// Operand[1] = Default basic block destination
// Operand[2n ] = Value to match
// Operand[2n+1] = BasicBlock to go to on match
-
- // Store case values separately from operands list. We needn't User-Use
- // concept here, since it is just a case value, it will always constant,
- // and case value couldn't reused with another instructions/values.
- // Additionally:
- // It allows us to use custom type for case values that is not inherited
- // from Value. Since case value is a complex type that implements
- // the subset of integers, we needn't extract sub-constants within
- // slow getAggregateElement method.
- // For case values we will use std::list to by two reasons:
- // 1. It allows to add/remove cases without whole collection reallocation.
- // 2. In most of cases we needn't random access.
- // Currently case values are also stored in Operands List, but it will moved
- // out in future commits.
- typedef std::list<IntegersSubset> Subsets;
- typedef Subsets::iterator SubsetsIt;
- typedef Subsets::const_iterator SubsetsConstIt;
-
- Subsets TheSubsets;
-
SwitchInst(const SwitchInst &SI);
void init(Value *Value, BasicBlock *Default, unsigned NumReserved);
void growOperands();
@@ -2506,25 +2477,121 @@ protected:
virtual SwitchInst *clone_impl() const;
public:
- // FIXME: Currently there are a lot of unclean template parameters,
- // we need to make refactoring in future.
- // All these parameters are used to implement both iterator and const_iterator
- // without code duplication.
- // SwitchInstTy may be "const SwitchInst" or "SwitchInst"
- // ConstantIntTy may be "const ConstantInt" or "ConstantInt"
- // SubsetsItTy may be SubsetsConstIt or SubsetsIt
- // BasicBlockTy may be "const BasicBlock" or "BasicBlock"
- template <class SwitchInstTy, class ConstantIntTy,
- class SubsetsItTy, class BasicBlockTy>
- class CaseIteratorT;
-
- typedef CaseIteratorT<const SwitchInst, const ConstantInt,
- SubsetsConstIt, const BasicBlock> ConstCaseIt;
- class CaseIt;
-
// -2
static const unsigned DefaultPseudoIndex = static_cast<unsigned>(~0L-1);
+ template <class SwitchInstTy, class ConstantIntTy, class BasicBlockTy>
+ class CaseIteratorT {
+ protected:
+
+ SwitchInstTy *SI;
+ unsigned Index;
+
+ public:
+
+ typedef CaseIteratorT<SwitchInstTy, ConstantIntTy, BasicBlockTy> Self;
+
+ /// Initializes case iterator for given SwitchInst and for given
+ /// case number.
+ CaseIteratorT(SwitchInstTy *SI, unsigned CaseNum) {
+ this->SI = SI;
+ Index = CaseNum;
+ }
+
+ /// Initializes case iterator for given SwitchInst and for given
+ /// TerminatorInst's successor index.
+ static Self fromSuccessorIndex(SwitchInstTy *SI, unsigned SuccessorIndex) {
+ assert(SuccessorIndex < SI->getNumSuccessors() &&
+ "Successor index # out of range!");
+ return SuccessorIndex != 0 ?
+ Self(SI, SuccessorIndex - 1) :
+ Self(SI, DefaultPseudoIndex);
+ }
+
+ /// Resolves case value for current case.
+ ConstantIntTy *getCaseValue() {
+ assert(Index < SI->getNumCases() && "Index out the number of cases.");
+ return reinterpret_cast<ConstantIntTy*>(SI->getOperand(2 + Index*2));
+ }
+
+ /// Resolves successor for current case.
+ BasicBlockTy *getCaseSuccessor() {
+ assert((Index < SI->getNumCases() ||
+ Index == DefaultPseudoIndex) &&
+ "Index out the number of cases.");
+ return SI->getSuccessor(getSuccessorIndex());
+ }
+
+ /// Returns number of current case.
+ unsigned getCaseIndex() const { return Index; }
+
+ /// Returns TerminatorInst's successor index for current case successor.
+ unsigned getSuccessorIndex() const {
+ assert((Index == DefaultPseudoIndex || Index < SI->getNumCases()) &&
+ "Index out the number of cases.");
+ return Index != DefaultPseudoIndex ? Index + 1 : 0;
+ }
+
+ Self operator++() {
+ // Check index correctness after increment.
+ // Note: Index == getNumCases() means end().
+ assert(Index+1 <= SI->getNumCases() && "Index out the number of cases.");
+ ++Index;
+ return *this;
+ }
+ Self operator++(int) {
+ Self tmp = *this;
+ ++(*this);
+ return tmp;
+ }
+ Self operator--() {
+ // Check index correctness after decrement.
+ // Note: Index == getNumCases() means end().
+ // Also allow "-1" iterator here. That will became valid after ++.
+ assert((Index == 0 || Index-1 <= SI->getNumCases()) &&
+ "Index out the number of cases.");
+ --Index;
+ return *this;
+ }
+ Self operator--(int) {
+ Self tmp = *this;
+ --(*this);
+ return tmp;
+ }
+ bool operator==(const Self& RHS) const {
+ assert(RHS.SI == SI && "Incompatible operators.");
+ return RHS.Index == Index;
+ }
+ bool operator!=(const Self& RHS) const {
+ assert(RHS.SI == SI && "Incompatible operators.");
+ return RHS.Index != Index;
+ }
+ };
+
+ typedef CaseIteratorT<const SwitchInst, const ConstantInt, const BasicBlock>
+ ConstCaseIt;
+
+ class CaseIt : public CaseIteratorT<SwitchInst, ConstantInt, BasicBlock> {
+
+ typedef CaseIteratorT<SwitchInst, ConstantInt, BasicBlock> ParentTy;
+
+ public:
+
+ CaseIt(const ParentTy& Src) : ParentTy(Src) {}
+ CaseIt(SwitchInst *SI, unsigned CaseNum) : ParentTy(SI, CaseNum) {}
+
+ /// Sets the new value for current case.
+ void setValue(ConstantInt *V) {
+ assert(Index < SI->getNumCases() && "Index out the number of cases.");
+ SI->setOperand(2 + Index*2, reinterpret_cast<Value*>(V));
+ }
+
+ /// Sets the new successor for current case.
+ void setSuccessor(BasicBlock *S) {
+ SI->setSuccessor(getSuccessorIndex(), S);
+ }
+ };
+
static SwitchInst *Create(Value *Value, BasicBlock *Default,
unsigned NumCases, Instruction *InsertBefore = 0) {
return new SwitchInst(Value, Default, NumCases, InsertBefore);
@@ -2560,23 +2627,23 @@ public:
/// Returns a read/write iterator that points to the first
/// case in SwitchInst.
CaseIt case_begin() {
- return CaseIt(this, 0, TheSubsets.begin());
+ return CaseIt(this, 0);
}
/// Returns a read-only iterator that points to the first
/// case in the SwitchInst.
ConstCaseIt case_begin() const {
- return ConstCaseIt(this, 0, TheSubsets.begin());
+ return ConstCaseIt(this, 0);
}
/// Returns a read/write iterator that points one past the last
/// in the SwitchInst.
CaseIt case_end() {
- return CaseIt(this, getNumCases(), TheSubsets.end());
+ return CaseIt(this, getNumCases());
}
/// Returns a read-only iterator that points one past the last
/// in the SwitchInst.
ConstCaseIt case_end() const {
- return ConstCaseIt(this, getNumCases(), TheSubsets.end());
+ return ConstCaseIt(this, getNumCases());
}
/// Returns an iterator that points to the default case.
/// Note: this iterator allows to resolve successor only. Attempt
@@ -2584,10 +2651,10 @@ public:
/// Also note, that increment and decrement also causes an assertion and
/// makes iterator invalid.
CaseIt case_default() {
- return CaseIt(this, DefaultPseudoIndex, TheSubsets.end());
+ return CaseIt(this, DefaultPseudoIndex);
}
ConstCaseIt case_default() const {
- return ConstCaseIt(this, DefaultPseudoIndex, TheSubsets.end());
+ return ConstCaseIt(this, DefaultPseudoIndex);
}
/// findCaseValue - Search all of the case values for the specified constant.
@@ -2596,13 +2663,13 @@ public:
/// that it is handled by the default handler.
CaseIt findCaseValue(const ConstantInt *C) {
for (CaseIt i = case_begin(), e = case_end(); i != e; ++i)
- if (i.getCaseValueEx().isSatisfies(IntItem::fromConstantInt(C)))
+ if (i.getCaseValue() == C)
return i;
return case_default();
}
ConstCaseIt findCaseValue(const ConstantInt *C) const {
for (ConstCaseIt i = case_begin(), e = case_end(); i != e; ++i)
- if (i.getCaseValueEx().isSatisfies(IntItem::fromConstantInt(C)))
+ if (i.getCaseValue() == C)
return i;
return case_default();
}
@@ -2628,19 +2695,13 @@ public:
/// point to the added case.
void addCase(ConstantInt *OnVal, BasicBlock *Dest);
- /// addCase - Add an entry to the switch instruction.
- /// Note:
- /// This action invalidates case_end(). Old case_end() iterator will
- /// point to the added case.
- void addCase(IntegersSubset& OnVal, BasicBlock *Dest);
-
/// removeCase - This method removes the specified case and its successor
/// from the switch instruction. Note that this operation may reorder the
/// remaining cases at index idx and above.
/// Note:
/// This action invalidates iterators for all cases following the one removed,
/// including the case_end() iterator.
- void removeCase(CaseIt& i);
+ void removeCase(CaseIt i);
unsigned getNumSuccessors() const { return getNumOperands()/2; }
BasicBlock *getSuccessor(unsigned idx) const {
@@ -2652,190 +2713,7 @@ public:
setOperand(idx*2+1, (Value*)NewSucc);
}
- uint16_t hash() const {
- uint32_t NumberOfCases = (uint32_t)getNumCases();
- uint16_t Hash = (0xFFFF & NumberOfCases) ^ (NumberOfCases >> 16);
- for (ConstCaseIt i = case_begin(), e = case_end();
- i != e; ++i) {
- uint32_t NumItems = (uint32_t)i.getCaseValueEx().getNumItems();
- Hash = (Hash << 1) ^ (0xFFFF & NumItems) ^ (NumItems >> 16);
- }
- return Hash;
- }
-
- // Case iterators definition.
-
- template <class SwitchInstTy, class ConstantIntTy,
- class SubsetsItTy, class BasicBlockTy>
- class CaseIteratorT {
- protected:
-
- SwitchInstTy *SI;
- unsigned Index;
- SubsetsItTy SubsetIt;
-
- /// Initializes case iterator for given SwitchInst and for given
- /// case number.
- friend class SwitchInst;
- CaseIteratorT(SwitchInstTy *SI, unsigned SuccessorIndex,
- SubsetsItTy CaseValueIt) {
- this->SI = SI;
- Index = SuccessorIndex;
- this->SubsetIt = CaseValueIt;
- }
-
- public:
- typedef typename SubsetsItTy::reference IntegersSubsetRef;
- typedef CaseIteratorT<SwitchInstTy, ConstantIntTy,
- SubsetsItTy, BasicBlockTy> Self;
-
- CaseIteratorT(SwitchInstTy *SI, unsigned CaseNum) {
- this->SI = SI;
- Index = CaseNum;
- SubsetIt = SI->TheSubsets.begin();
- std::advance(SubsetIt, CaseNum);
- }
-
-
- /// Initializes case iterator for given SwitchInst and for given
- /// TerminatorInst's successor index.
- static Self fromSuccessorIndex(SwitchInstTy *SI, unsigned SuccessorIndex) {
- assert(SuccessorIndex < SI->getNumSuccessors() &&
- "Successor index # out of range!");
- return SuccessorIndex != 0 ?
- Self(SI, SuccessorIndex - 1) :
- Self(SI, DefaultPseudoIndex);
- }
-
- /// Resolves case value for current case.
- ConstantIntTy *getCaseValue() {
- assert(Index < SI->getNumCases() && "Index out the number of cases.");
- IntegersSubsetRef CaseRanges = *SubsetIt;
-
- // FIXME: Currently we work with ConstantInt based cases.
- // So return CaseValue as ConstantInt.
- return CaseRanges.getSingleNumber(0).toConstantInt();
- }
-
- /// Resolves case value for current case.
- IntegersSubsetRef getCaseValueEx() {
- assert(Index < SI->getNumCases() && "Index out the number of cases.");
- return *SubsetIt;
- }
-
- /// Resolves successor for current case.
- BasicBlockTy *getCaseSuccessor() {
- assert((Index < SI->getNumCases() ||
- Index == DefaultPseudoIndex) &&
- "Index out the number of cases.");
- return SI->getSuccessor(getSuccessorIndex());
- }
-
- /// Returns number of current case.
- unsigned getCaseIndex() const { return Index; }
-
- /// Returns TerminatorInst's successor index for current case successor.
- unsigned getSuccessorIndex() const {
- assert((Index == DefaultPseudoIndex || Index < SI->getNumCases()) &&
- "Index out the number of cases.");
- return Index != DefaultPseudoIndex ? Index + 1 : 0;
- }
-
- Self operator++() {
- // Check index correctness after increment.
- // Note: Index == getNumCases() means end().
- assert(Index+1 <= SI->getNumCases() && "Index out the number of cases.");
- ++Index;
- if (Index == 0)
- SubsetIt = SI->TheSubsets.begin();
- else
- ++SubsetIt;
- return *this;
- }
- Self operator++(int) {
- Self tmp = *this;
- ++(*this);
- return tmp;
- }
- Self operator--() {
- // Check index correctness after decrement.
- // Note: Index == getNumCases() means end().
- // Also allow "-1" iterator here. That will became valid after ++.
- unsigned NumCases = SI->getNumCases();
- assert((Index == 0 || Index-1 <= NumCases) &&
- "Index out the number of cases.");
- --Index;
- if (Index == NumCases) {
- SubsetIt = SI->TheSubsets.end();
- return *this;
- }
-
- if (Index != -1U)
- --SubsetIt;
-
- return *this;
- }
- Self operator--(int) {
- Self tmp = *this;
- --(*this);
- return tmp;
- }
- bool operator==(const Self& RHS) const {
- assert(RHS.SI == SI && "Incompatible operators.");
- return RHS.Index == Index;
- }
- bool operator!=(const Self& RHS) const {
- assert(RHS.SI == SI && "Incompatible operators.");
- return RHS.Index != Index;
- }
- };
-
- class CaseIt : public CaseIteratorT<SwitchInst, ConstantInt,
- SubsetsIt, BasicBlock> {
- typedef CaseIteratorT<SwitchInst, ConstantInt, SubsetsIt, BasicBlock>
- ParentTy;
-
- protected:
- friend class SwitchInst;
- CaseIt(SwitchInst *SI, unsigned CaseNum, SubsetsIt SubsetIt) :
- ParentTy(SI, CaseNum, SubsetIt) {}
-
- void updateCaseValueOperand(IntegersSubset& V) {
- SI->setOperand(2 + Index*2, reinterpret_cast<Value*>((Constant*)V));
- }
-
- public:
-
- CaseIt(SwitchInst *SI, unsigned CaseNum) : ParentTy(SI, CaseNum) {}
-
- CaseIt(const ParentTy& Src) : ParentTy(Src) {}
-
- /// Sets the new value for current case.
- void setValue(ConstantInt *V) {
- assert(Index < SI->getNumCases() && "Index out the number of cases.");
- IntegersSubsetToBB Mapping;
- // FIXME: Currently we work with ConstantInt based cases.
- // So inititalize IntItem container directly from ConstantInt.
- Mapping.add(IntItem::fromConstantInt(V));
- *SubsetIt = Mapping.getCase();
- updateCaseValueOperand(*SubsetIt);
- }
-
- /// Sets the new value for current case.
- void setValueEx(IntegersSubset& V) {
- assert(Index < SI->getNumCases() && "Index out the number of cases.");
- *SubsetIt = V;
- updateCaseValueOperand(*SubsetIt);
- }
-
- /// Sets the new successor for current case.
- void setSuccessor(BasicBlock *S) {
- SI->setSuccessor(getSuccessorIndex(), S);
- }
- };
-
// Methods for support type inquiry through isa, cast, and dyn_cast:
-
static inline bool classof(const Instruction *I) {
return I->getOpcode() == Instruction::Switch;
}
@@ -3034,7 +2912,7 @@ public:
/// removeAttribute - removes the attribute from the list of attributes.
void removeAttribute(unsigned i, Attribute attr);
- /// \brief Determine whether this call has the NoAlias attribute.
+ /// \brief Determine whether this call has the given attribute.
bool hasFnAttr(Attribute::AttrKind A) const {
assert(A != Attribute::NoBuiltin &&
"Use CallInst::isNoBuiltin() to check for Attribute::NoBuiltin");
@@ -3737,6 +3615,43 @@ public:
}
};
+//===----------------------------------------------------------------------===//
+// AddrSpaceCastInst Class
+//===----------------------------------------------------------------------===//
+
+/// \brief This class represents a conversion between pointers from
+/// one address space to another.
+class AddrSpaceCastInst : public CastInst {
+protected:
+ /// \brief Clone an identical AddrSpaceCastInst
+ virtual AddrSpaceCastInst *clone_impl() const;
+
+public:
+ /// \brief Constructor with insert-before-instruction semantics
+ AddrSpaceCastInst(
+ Value *S, ///< The value to be casted
+ Type *Ty, ///< The type to casted to
+ const Twine &NameStr = "", ///< A name for the new instruction
+ Instruction *InsertBefore = 0 ///< Where to insert the new instruction
+ );
+
+ /// \brief Constructor with insert-at-end-of-block semantics
+ AddrSpaceCastInst(
+ Value *S, ///< The value to be casted
+ Type *Ty, ///< The type to casted to
+ const Twine &NameStr, ///< A name for the new instruction
+ BasicBlock *InsertAtEnd ///< The block to insert the instruction into
+ );
+
+ // Methods for support type inquiry through isa, cast, and dyn_cast:
+ static inline bool classof(const Instruction *I) {
+ return I->getOpcode() == AddrSpaceCast;
+ }
+ static inline bool classof(const Value *V) {
+ return isa<Instruction>(V) && classof(cast<Instruction>(V));
+ }
+};
+
} // End llvm namespace
#endif
diff --git a/include/llvm/IR/Intrinsics.h b/include/llvm/IR/Intrinsics.h
index c81d110..473e525 100644
--- a/include/llvm/IR/Intrinsics.h
+++ b/include/llvm/IR/Intrinsics.h
@@ -77,7 +77,7 @@ namespace Intrinsic {
/// getIntrinsicInfoTableEntries.
struct IITDescriptor {
enum IITDescriptorKind {
- Void, MMX, Metadata, Half, Float, Double,
+ Void, VarArg, MMX, Metadata, Half, Float, Double,
Integer, Vector, Pointer, Struct,
Argument, ExtendVecArgument, TruncVecArgument
} Kind;
diff --git a/include/llvm/IR/Intrinsics.td b/include/llvm/IR/Intrinsics.td
index 1a849c4..ded6cc1 100644
--- a/include/llvm/IR/Intrinsics.td
+++ b/include/llvm/IR/Intrinsics.td
@@ -140,6 +140,7 @@ def llvm_v8i1_ty : LLVMType<v8i1>; // 8 x i1
def llvm_v16i1_ty : LLVMType<v16i1>; // 16 x i1
def llvm_v32i1_ty : LLVMType<v32i1>; // 32 x i1
def llvm_v64i1_ty : LLVMType<v64i1>; // 64 x i1
+def llvm_v1i8_ty : LLVMType<v1i8>; // 1 x i8
def llvm_v2i8_ty : LLVMType<v2i8>; // 2 x i8
def llvm_v4i8_ty : LLVMType<v4i8>; // 4 x i8
def llvm_v8i8_ty : LLVMType<v8i8>; // 8 x i8
@@ -165,10 +166,15 @@ def llvm_v4i64_ty : LLVMType<v4i64>; // 4 x i64
def llvm_v8i64_ty : LLVMType<v8i64>; // 8 x i64
def llvm_v16i64_ty : LLVMType<v16i64>; // 16 x i64
+def llvm_v2f16_ty : LLVMType<v2f16>; // 2 x half (__fp16)
+def llvm_v4f16_ty : LLVMType<v4f16>; // 4 x half (__fp16)
+def llvm_v8f16_ty : LLVMType<v8f16>; // 8 x half (__fp16)
+def llvm_v1f32_ty : LLVMType<v1f32>; // 1 x float
def llvm_v2f32_ty : LLVMType<v2f32>; // 2 x float
def llvm_v4f32_ty : LLVMType<v4f32>; // 4 x float
def llvm_v8f32_ty : LLVMType<v8f32>; // 8 x float
def llvm_v16f32_ty : LLVMType<v16f32>; // 16 x float
+def llvm_v1f64_ty : LLVMType<v1f64>; // 1 x double
def llvm_v2f64_ty : LLVMType<v2f64>; // 2 x double
def llvm_v4f64_ty : LLVMType<v4f64>; // 4 x double
def llvm_v8f64_ty : LLVMType<v8f64>; // 8 x double
@@ -258,6 +264,8 @@ def int_readcyclecounter : Intrinsic<[llvm_i64_ty]>;
// Stack Protector Intrinsic - The stackprotector intrinsic writes the stack
// guard to the correct place on the stack frame.
def int_stackprotector : Intrinsic<[], [llvm_ptr_ty, llvm_ptrptr_ty], []>;
+def int_stackprotectorcheck : Intrinsic<[], [llvm_ptrptr_ty],
+ [IntrReadWriteArgMem]>;
//===------------------- Standard C Library Intrinsics --------------------===//
//
@@ -293,11 +301,14 @@ let Properties = [IntrReadMem] in {
def int_exp : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_exp2 : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_fabs : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_copysign : Intrinsic<[llvm_anyfloat_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>]>;
def int_floor : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_ceil : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_trunc : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_rint : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_nearbyint : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
+ def int_round : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
}
let Properties = [IntrNoMem] in {
@@ -317,7 +328,7 @@ def int_sigsetjmp : Intrinsic<[llvm_i32_ty] , [llvm_ptr_ty, llvm_i32_ty]>;
def int_siglongjmp : Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty], [IntrNoReturn]>;
// Internal interface for object size checking
-def int_objectsize : Intrinsic<[llvm_anyint_ty], [llvm_ptr_ty, llvm_i1_ty],
+def int_objectsize : Intrinsic<[llvm_anyint_ty], [llvm_anyptr_ty, llvm_i1_ty],
[IntrNoMem]>,
GCCBuiltin<"__builtin_object_size">;
@@ -444,6 +455,19 @@ def int_invariant_end : Intrinsic<[],
llvm_ptr_ty],
[IntrReadWriteArgMem, NoCapture<2>]>;
+//===------------------------ Stackmap Intrinsics -------------------------===//
+//
+def int_experimental_stackmap : Intrinsic<[],
+ [llvm_i32_ty, llvm_i32_ty, llvm_vararg_ty]>;
+def int_experimental_patchpoint_void : Intrinsic<[],
+ [llvm_i32_ty, llvm_i32_ty,
+ llvm_ptr_ty, llvm_i32_ty,
+ llvm_vararg_ty]>;
+def int_experimental_patchpoint_i64 : Intrinsic<[llvm_i64_ty],
+ [llvm_i32_ty, llvm_i32_ty,
+ llvm_ptr_ty, llvm_i32_ty,
+ llvm_vararg_ty]>;
+
//===-------------------------- Other Intrinsics --------------------------===//
//
def int_flt_rounds : Intrinsic<[llvm_i32_ty]>,
diff --git a/include/llvm/IR/IntrinsicsAArch64.td b/include/llvm/IR/IntrinsicsAArch64.td
index d7b1947..68af8c1 100644
--- a/include/llvm/IR/IntrinsicsAArch64.td
+++ b/include/llvm/IR/IntrinsicsAArch64.td
@@ -17,12 +17,47 @@
let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.".
// Vector Absolute Compare (Floating Point)
-def int_aarch64_neon_vacgeq : Intrinsic<[llvm_v2i64_ty],
- [llvm_v2f64_ty, llvm_v2f64_ty],
- [IntrNoMem]>;
-def int_aarch64_neon_vacgtq : Intrinsic<[llvm_v2i64_ty],
- [llvm_v2f64_ty, llvm_v2f64_ty],
- [IntrNoMem]>;
+def int_aarch64_neon_vacgeq :
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+def int_aarch64_neon_vacgtq :
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+// Vector saturating accumulate
+def int_aarch64_neon_suqadd : Neon_2Arg_Intrinsic;
+def int_aarch64_neon_usqadd : Neon_2Arg_Intrinsic;
+
+// Vector Bitwise reverse
+def int_aarch64_neon_rbit : Neon_1Arg_Intrinsic;
+
+// Vector extract and narrow
+def int_aarch64_neon_xtn :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+
+// Vector floating-point convert
+def int_aarch64_neon_frintn : Neon_1Arg_Intrinsic;
+def int_aarch64_neon_fsqrt : Neon_1Arg_Intrinsic;
+def int_aarch64_neon_fcvtxn :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtns :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtnu :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtps :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtpu :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtms :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtmu :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtas :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtau :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtzs :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+def int_aarch64_neon_fcvtzu :
+ Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
// Vector maxNum (Floating Point)
def int_aarch64_neon_vmaxnm : Neon_2Arg_Intrinsic;
@@ -36,6 +71,318 @@ def int_aarch64_neon_vpmaxnm : Neon_2Arg_Intrinsic;
// Vector Pairwise minNum (Floating Point)
def int_aarch64_neon_vpminnm : Neon_2Arg_Intrinsic;
-// Vector Multiply Extended (Floating Point)
-def int_aarch64_neon_vmulx : Neon_2Arg_Intrinsic;
+// Vector Multiply Extended and Scalar Multiply Extended (Floating Point)
+def int_aarch64_neon_vmulx :
+ Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, LLVMMatchType<0>]>;
+
+class Neon_N2V_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [LLVMMatchType<0>, llvm_i32_ty],
+ [IntrNoMem]>;
+class Neon_N3V_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_i32_ty],
+ [IntrNoMem]>;
+class Neon_N2V_Narrow_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMExtendedElementVectorType<0>, llvm_i32_ty],
+ [IntrNoMem]>;
+
+// Vector rounding shift right by immediate (Signed)
+def int_aarch64_neon_vsrshr : Neon_N2V_Intrinsic;
+def int_aarch64_neon_vurshr : Neon_N2V_Intrinsic;
+def int_aarch64_neon_vsqshlu : Neon_N2V_Intrinsic;
+
+def int_aarch64_neon_vsri : Neon_N3V_Intrinsic;
+def int_aarch64_neon_vsli : Neon_N3V_Intrinsic;
+
+def int_aarch64_neon_vsqshrun : Neon_N2V_Narrow_Intrinsic;
+def int_aarch64_neon_vrshrn : Neon_N2V_Narrow_Intrinsic;
+def int_aarch64_neon_vsqrshrun : Neon_N2V_Narrow_Intrinsic;
+def int_aarch64_neon_vsqshrn : Neon_N2V_Narrow_Intrinsic;
+def int_aarch64_neon_vuqshrn : Neon_N2V_Narrow_Intrinsic;
+def int_aarch64_neon_vsqrshrn : Neon_N2V_Narrow_Intrinsic;
+def int_aarch64_neon_vuqrshrn : Neon_N2V_Narrow_Intrinsic;
+
+// Vector across
+class Neon_Across_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
+
+class Neon_2Arg_Across_Float_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+def int_aarch64_neon_saddlv : Neon_Across_Intrinsic;
+def int_aarch64_neon_uaddlv : Neon_Across_Intrinsic;
+def int_aarch64_neon_smaxv : Neon_Across_Intrinsic;
+def int_aarch64_neon_umaxv : Neon_Across_Intrinsic;
+def int_aarch64_neon_sminv : Neon_Across_Intrinsic;
+def int_aarch64_neon_uminv : Neon_Across_Intrinsic;
+def int_aarch64_neon_vaddv : Neon_Across_Intrinsic;
+def int_aarch64_neon_vmaxv : Neon_Across_Intrinsic;
+def int_aarch64_neon_vminv : Neon_Across_Intrinsic;
+def int_aarch64_neon_vmaxnmv : Neon_Across_Intrinsic;
+def int_aarch64_neon_vminnmv : Neon_Across_Intrinsic;
+
+// Vector Table Lookup.
+def int_aarch64_neon_vtbl1 :
+ Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyvector_ty, LLVMMatchType<0>], [IntrNoMem]>;
+
+def int_aarch64_neon_vtbl2 :
+ Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+
+def int_aarch64_neon_vtbl3 :
+ Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>,
+ LLVMMatchType<0>], [IntrNoMem]>;
+
+def int_aarch64_neon_vtbl4 :
+ Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>,
+ LLVMMatchType<1>, LLVMMatchType<0>], [IntrNoMem]>;
+
+// Vector Table Extension.
+// Some elements of the destination vector may not be updated, so the original
+// value of that vector is passed as the first argument. The next 1-4
+// arguments after that are the table.
+def int_aarch64_neon_vtbx1 :
+ Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<0>],
+ [IntrNoMem]>;
+
+def int_aarch64_neon_vtbx2 :
+ Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>,
+ LLVMMatchType<0>], [IntrNoMem]>;
+
+def int_aarch64_neon_vtbx3 :
+ Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>,
+ LLVMMatchType<1>, LLVMMatchType<0>], [IntrNoMem]>;
+
+def int_aarch64_neon_vtbx4 :
+ Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty, LLVMMatchType<1>,
+ LLVMMatchType<1>, LLVMMatchType<1>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+
+// Vector Load/store
+def int_aarch64_neon_vld1x2 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>],
+ [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+def int_aarch64_neon_vld1x3 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>],
+ [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+def int_aarch64_neon_vld1x4 : Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>,
+ LLVMMatchType<0>, LLVMMatchType<0>],
+ [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+
+def int_aarch64_neon_vst1x2 : Intrinsic<[],
+ [llvm_ptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<0>, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+def int_aarch64_neon_vst1x3 : Intrinsic<[],
+ [llvm_ptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ llvm_i32_ty], [IntrReadWriteArgMem]>;
+def int_aarch64_neon_vst1x4 : Intrinsic<[],
+ [llvm_ptr_ty, llvm_anyvector_ty,
+ LLVMMatchType<0>, LLVMMatchType<0>,
+ LLVMMatchType<0>, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+
+// Scalar Add
+def int_aarch64_neon_vaddds :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+def int_aarch64_neon_vadddu :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+
+
+// Scalar Sub
+def int_aarch64_neon_vsubds :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+def int_aarch64_neon_vsubdu :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+
+
+// Scalar Shift
+// Scalar Shift Left
+def int_aarch64_neon_vshlds :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+def int_aarch64_neon_vshldu :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+
+// Scalar Saturating Shift Left
+def int_aarch64_neon_vqshls : Neon_2Arg_Intrinsic;
+def int_aarch64_neon_vqshlu : Neon_2Arg_Intrinsic;
+
+// Scalar Shift Rouding Left
+def int_aarch64_neon_vrshlds :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+def int_aarch64_neon_vrshldu :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty], [IntrNoMem]>;
+
+// Scalar Saturating Rounding Shift Left
+def int_aarch64_neon_vqrshls : Neon_2Arg_Intrinsic;
+def int_aarch64_neon_vqrshlu : Neon_2Arg_Intrinsic;
+
+// Scalar Reduce Pairwise Add.
+def int_aarch64_neon_vpadd :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v2i64_ty],[IntrNoMem]>;
+def int_aarch64_neon_vpfadd :
+ Intrinsic<[llvm_v1f32_ty], [llvm_v2f32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vpfaddq :
+ Intrinsic<[llvm_v1f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+// Scalar Reduce Pairwise Floating Point Max/Min.
+def int_aarch64_neon_vpmax :
+ Intrinsic<[llvm_v1f32_ty], [llvm_v2f32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vpmaxq :
+ Intrinsic<[llvm_v1f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+def int_aarch64_neon_vpmin :
+ Intrinsic<[llvm_v1f32_ty], [llvm_v2f32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vpminq :
+ Intrinsic<[llvm_v1f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+// Scalar Reduce Pairwise Floating Point Maxnm/Minnm.
+def int_aarch64_neon_vpfmaxnm :
+ Intrinsic<[llvm_v1f32_ty], [llvm_v2f32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vpfmaxnmq :
+ Intrinsic<[llvm_v1f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+def int_aarch64_neon_vpfminnm :
+ Intrinsic<[llvm_v1f32_ty], [llvm_v2f32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vpfminnmq :
+ Intrinsic<[llvm_v1f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+// Scalar Signed Integer Convert To Floating-point
+def int_aarch64_neon_vcvtf32_s32 :
+ Intrinsic<[llvm_float_ty], [llvm_v1i32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vcvtf64_s64 :
+ Intrinsic<[llvm_double_ty], [llvm_v1i64_ty], [IntrNoMem]>;
+
+// Scalar Unsigned Integer Convert To Floating-point
+def int_aarch64_neon_vcvtf32_u32 :
+ Intrinsic<[llvm_float_ty], [llvm_v1i32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vcvtf64_u64 :
+ Intrinsic<[llvm_double_ty], [llvm_v1i64_ty], [IntrNoMem]>;
+
+// Scalar Floating-point Reciprocal Exponent
+def int_aarch64_neon_vrecpx : Neon_1Arg_Intrinsic;
+
+class Neon_Cmp_Intrinsic
+ : Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty, llvm_anyvector_ty],
+ [IntrNoMem]>;
+
+// Scalar Compare Equal
+def int_aarch64_neon_vceq : Neon_Cmp_Intrinsic;
+
+// Scalar Compare Greater-Than or Equal
+def int_aarch64_neon_vcge : Neon_Cmp_Intrinsic;
+def int_aarch64_neon_vchs : Neon_Cmp_Intrinsic;
+
+// Scalar Compare Less-Than or Equal
+def int_aarch64_neon_vclez : Neon_Cmp_Intrinsic;
+
+// Scalar Compare Less-Than
+def int_aarch64_neon_vcltz : Neon_Cmp_Intrinsic;
+
+// Scalar Compare Greater-Than
+def int_aarch64_neon_vcgt : Neon_Cmp_Intrinsic;
+def int_aarch64_neon_vchi : Neon_Cmp_Intrinsic;
+
+// Scalar Compare Bitwise Test Bits
+def int_aarch64_neon_vtstd : Neon_Cmp_Intrinsic;
+
+// Scalar Floating-point Absolute Compare Greater Than Or Equal
+def int_aarch64_neon_vcage : Neon_Cmp_Intrinsic;
+
+// Scalar Floating-point Absolute Compare Greater Than
+def int_aarch64_neon_vcagt : Neon_Cmp_Intrinsic;
+
+// Scalar Signed Saturating Accumulated of Unsigned Value
+def int_aarch64_neon_vuqadd : Neon_2Arg_Intrinsic;
+
+// Scalar Unsigned Saturating Accumulated of Signed Value
+def int_aarch64_neon_vsqadd : Neon_2Arg_Intrinsic;
+
+// Scalar Absolute Value
+def int_aarch64_neon_vabs :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty], [IntrNoMem]>;
+
+// Scalar Absolute Difference
+def int_aarch64_neon_vabd : Neon_2Arg_Intrinsic;
+
+// Scalar Negate Value
+def int_aarch64_neon_vneg :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty], [IntrNoMem]>;
+
+// Signed Saturating Doubling Multiply-Add Long
+def int_aarch64_neon_vqdmlal : Neon_3Arg_Long_Intrinsic;
+
+// Signed Saturating Doubling Multiply-Subtract Long
+def int_aarch64_neon_vqdmlsl : Neon_3Arg_Long_Intrinsic;
+
+class Neon_2Arg_ShiftImm_Intrinsic
+ : Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+class Neon_3Arg_ShiftImm_Intrinsic
+ : Intrinsic<[llvm_v1i64_ty], [llvm_v1i64_ty, llvm_v1i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+// Scalar Shift Right (Immediate)
+def int_aarch64_neon_vshrds_n : Neon_2Arg_ShiftImm_Intrinsic;
+def int_aarch64_neon_vshrdu_n : Neon_2Arg_ShiftImm_Intrinsic;
+
+// Scalar Shift Right and Accumulate (Immediate)
+def int_aarch64_neon_vsrads_n : Neon_3Arg_ShiftImm_Intrinsic;
+def int_aarch64_neon_vsradu_n : Neon_3Arg_ShiftImm_Intrinsic;
+
+// Scalar Rounding Shift Right and Accumulate (Immediate)
+def int_aarch64_neon_vrsrads_n : Neon_3Arg_ShiftImm_Intrinsic;
+def int_aarch64_neon_vrsradu_n : Neon_3Arg_ShiftImm_Intrinsic;
+
+// Scalar Shift Left (Immediate)
+def int_aarch64_neon_vshld_n : Neon_2Arg_ShiftImm_Intrinsic;
+
+// Scalar Saturating Shift Left (Immediate)
+def int_aarch64_neon_vqshls_n : Neon_N2V_Intrinsic;
+def int_aarch64_neon_vqshlu_n : Neon_N2V_Intrinsic;
+
+// Scalar Signed Saturating Shift Left Unsigned (Immediate)
+def int_aarch64_neon_vqshlus_n : Neon_N2V_Intrinsic;
+
+// Scalar Signed Fixed-point Convert To Floating-Point (Immediate)
+def int_aarch64_neon_vcvtf32_n_s32 :
+ Intrinsic<[llvm_float_ty], [llvm_v1i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vcvtf64_n_s64 :
+ Intrinsic<[llvm_double_ty], [llvm_v1i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+// Scalar Unsigned Fixed-point Convert To Floating-Point (Immediate)
+def int_aarch64_neon_vcvtf32_n_u32 :
+ Intrinsic<[llvm_float_ty], [llvm_v1i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vcvtf64_n_u64 :
+ Intrinsic<[llvm_double_ty], [llvm_v1i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+// Scalar Floating-point Convert To Signed Fixed-point (Immediate)
+def int_aarch64_neon_vcvts_n_s32_f32 :
+ Intrinsic<[llvm_v1i32_ty], [llvm_v1f32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vcvtd_n_s64_f64 :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1f64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+// Scalar Floating-point Convert To Unsigned Fixed-point (Immediate)
+def int_aarch64_neon_vcvts_n_u32_f32 :
+ Intrinsic<[llvm_v1i32_ty], [llvm_v1f32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_aarch64_neon_vcvtd_n_u64_f64 :
+ Intrinsic<[llvm_v1i64_ty], [llvm_v1f64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+class Neon_SHA_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v1i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_aarch64_neon_sha1c : Neon_SHA_Intrinsic;
+def int_aarch64_neon_sha1m : Neon_SHA_Intrinsic;
+def int_aarch64_neon_sha1p : Neon_SHA_Intrinsic;
}
diff --git a/include/llvm/IR/IntrinsicsARM.td b/include/llvm/IR/IntrinsicsARM.td
index 3c5d5ff..0b50d64 100644
--- a/include/llvm/IR/IntrinsicsARM.td
+++ b/include/llvm/IR/IntrinsicsARM.td
@@ -45,6 +45,11 @@ def int_arm_strexd : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty,
def int_arm_ldrexd : Intrinsic<[llvm_i32_ty, llvm_i32_ty], [llvm_ptr_ty]>;
//===----------------------------------------------------------------------===//
+// Data barrier instructions
+def int_arm_dmb : GCCBuiltin<"__builtin_arm_dmb">, Intrinsic<[], [llvm_i32_ty]>;
+def int_arm_dsb : GCCBuiltin<"__builtin_arm_dsb">, Intrinsic<[], [llvm_i32_ty]>;
+
+//===----------------------------------------------------------------------===//
// VFP
def int_arm_get_fpscr : GCCBuiltin<"__builtin_arm_get_fpscr">,
@@ -92,6 +97,26 @@ def int_arm_mcrr2 : GCCBuiltin<"__builtin_arm_mcrr2">,
llvm_i32_ty, llvm_i32_ty], []>;
//===----------------------------------------------------------------------===//
+// CRC32
+
+def int_arm_crc32b : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32cb : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32h : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32ch : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32w : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_arm_crc32cw : Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// HINT
+def int_arm_sevl : Intrinsic<[], []>;
+
+//===----------------------------------------------------------------------===//
// Advanced SIMD (NEON)
// The following classes do not correspond directly to GCC builtins.
@@ -163,7 +188,6 @@ let Properties = [IntrNoMem, Commutative] in {
def int_arm_neon_vrhaddu : Neon_2Arg_Intrinsic;
def int_arm_neon_vqadds : Neon_2Arg_Intrinsic;
def int_arm_neon_vqaddu : Neon_2Arg_Intrinsic;
- def int_arm_neon_vaddhn : Neon_2Arg_Narrow_Intrinsic;
def int_arm_neon_vraddhn : Neon_2Arg_Narrow_Intrinsic;
// Vector Multiply.
@@ -175,10 +199,6 @@ let Properties = [IntrNoMem, Commutative] in {
def int_arm_neon_vmullp : Neon_2Arg_Long_Intrinsic;
def int_arm_neon_vqdmull : Neon_2Arg_Long_Intrinsic;
- // Vector Multiply and Accumulate/Subtract.
- def int_arm_neon_vqdmlal : Neon_3Arg_Long_Intrinsic;
- def int_arm_neon_vqdmlsl : Neon_3Arg_Long_Intrinsic;
-
// Vector Maximum.
def int_arm_neon_vmaxs : Neon_2Arg_Intrinsic;
def int_arm_neon_vmaxu : Neon_2Arg_Intrinsic;
@@ -201,7 +221,6 @@ def int_arm_neon_vhsubs : Neon_2Arg_Intrinsic;
def int_arm_neon_vhsubu : Neon_2Arg_Intrinsic;
def int_arm_neon_vqsubs : Neon_2Arg_Intrinsic;
def int_arm_neon_vqsubu : Neon_2Arg_Intrinsic;
-def int_arm_neon_vsubhn : Neon_2Arg_Narrow_Intrinsic;
def int_arm_neon_vrsubhn : Neon_2Arg_Narrow_Intrinsic;
// Vector Absolute Compare.
@@ -451,4 +470,21 @@ def int_arm_neon_vbsl : Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
[IntrNoMem]>;
+
+// Crypto instructions
+def int_arm_neon_aesd : Neon_2Arg_Intrinsic;
+def int_arm_neon_aese : Neon_2Arg_Intrinsic;
+def int_arm_neon_aesimc : Neon_1Arg_Intrinsic;
+def int_arm_neon_aesmc : Neon_1Arg_Intrinsic;
+def int_arm_neon_sha1h : Neon_1Arg_Intrinsic;
+def int_arm_neon_sha1su1 : Neon_2Arg_Intrinsic;
+def int_arm_neon_sha256su0 : Neon_2Arg_Intrinsic;
+def int_arm_neon_sha1c : Neon_3Arg_Intrinsic;
+def int_arm_neon_sha1m : Neon_3Arg_Intrinsic;
+def int_arm_neon_sha1p : Neon_3Arg_Intrinsic;
+def int_arm_neon_sha1su0: Neon_3Arg_Intrinsic;
+def int_arm_neon_sha256h: Neon_3Arg_Intrinsic;
+def int_arm_neon_sha256h2: Neon_3Arg_Intrinsic;
+def int_arm_neon_sha256su1: Neon_3Arg_Intrinsic;
+
} // end TargetPrefix
diff --git a/include/llvm/IR/IntrinsicsMips.td b/include/llvm/IR/IntrinsicsMips.td
index a0987c8..42c5821 100644
--- a/include/llvm/IR/IntrinsicsMips.td
+++ b/include/llvm/IR/IntrinsicsMips.td
@@ -386,4 +386,1372 @@ def int_mips_subuh_qb: GCCBuiltin<"__builtin_mips_subuh_qb">,
Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [IntrNoMem]>;
def int_mips_subuh_r_qb: GCCBuiltin<"__builtin_mips_subuh_r_qb">,
Intrinsic<[llvm_v4i8_ty], [llvm_v4i8_ty, llvm_v4i8_ty], [IntrNoMem]>;
+
+//===----------------------------------------------------------------------===//
+// MIPS MSA
+
+//===----------------------------------------------------------------------===//
+// Addition/subtraction
+
+def int_mips_add_a_b : GCCBuiltin<"__builtin_msa_add_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_add_a_h : GCCBuiltin<"__builtin_msa_add_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_add_a_w : GCCBuiltin<"__builtin_msa_add_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_add_a_d : GCCBuiltin<"__builtin_msa_add_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_adds_a_b : GCCBuiltin<"__builtin_msa_adds_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_a_h : GCCBuiltin<"__builtin_msa_adds_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_a_w : GCCBuiltin<"__builtin_msa_adds_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_a_d : GCCBuiltin<"__builtin_msa_adds_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_adds_s_b : GCCBuiltin<"__builtin_msa_adds_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_s_h : GCCBuiltin<"__builtin_msa_adds_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_s_w : GCCBuiltin<"__builtin_msa_adds_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_s_d : GCCBuiltin<"__builtin_msa_adds_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_adds_u_b : GCCBuiltin<"__builtin_msa_adds_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_u_h : GCCBuiltin<"__builtin_msa_adds_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_u_w : GCCBuiltin<"__builtin_msa_adds_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_adds_u_d : GCCBuiltin<"__builtin_msa_adds_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_addv_b : GCCBuiltin<"__builtin_msa_addv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addv_h : GCCBuiltin<"__builtin_msa_addv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addv_w : GCCBuiltin<"__builtin_msa_addv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addv_d : GCCBuiltin<"__builtin_msa_addv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_addvi_b : GCCBuiltin<"__builtin_msa_addvi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addvi_h : GCCBuiltin<"__builtin_msa_addvi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addvi_w : GCCBuiltin<"__builtin_msa_addvi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_addvi_d : GCCBuiltin<"__builtin_msa_addvi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_and_v : GCCBuiltin<"__builtin_msa_and_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_andi_b : GCCBuiltin<"__builtin_msa_andi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_asub_s_b : GCCBuiltin<"__builtin_msa_asub_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_asub_s_h : GCCBuiltin<"__builtin_msa_asub_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_asub_s_w : GCCBuiltin<"__builtin_msa_asub_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_asub_s_d : GCCBuiltin<"__builtin_msa_asub_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_asub_u_b : GCCBuiltin<"__builtin_msa_asub_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_asub_u_h : GCCBuiltin<"__builtin_msa_asub_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_asub_u_w : GCCBuiltin<"__builtin_msa_asub_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_asub_u_d : GCCBuiltin<"__builtin_msa_asub_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ave_s_b : GCCBuiltin<"__builtin_msa_ave_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_s_h : GCCBuiltin<"__builtin_msa_ave_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_s_w : GCCBuiltin<"__builtin_msa_ave_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_s_d : GCCBuiltin<"__builtin_msa_ave_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_ave_u_b : GCCBuiltin<"__builtin_msa_ave_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_u_h : GCCBuiltin<"__builtin_msa_ave_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_u_w : GCCBuiltin<"__builtin_msa_ave_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_ave_u_d : GCCBuiltin<"__builtin_msa_ave_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_aver_s_b : GCCBuiltin<"__builtin_msa_aver_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_s_h : GCCBuiltin<"__builtin_msa_aver_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_s_w : GCCBuiltin<"__builtin_msa_aver_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_s_d : GCCBuiltin<"__builtin_msa_aver_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_aver_u_b : GCCBuiltin<"__builtin_msa_aver_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_u_h : GCCBuiltin<"__builtin_msa_aver_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_u_w : GCCBuiltin<"__builtin_msa_aver_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty],
+ [Commutative, IntrNoMem]>;
+def int_mips_aver_u_d : GCCBuiltin<"__builtin_msa_aver_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [Commutative, IntrNoMem]>;
+
+def int_mips_bclr_b : GCCBuiltin<"__builtin_msa_bclr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bclr_h : GCCBuiltin<"__builtin_msa_bclr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bclr_w : GCCBuiltin<"__builtin_msa_bclr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bclr_d : GCCBuiltin<"__builtin_msa_bclr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bclri_b : GCCBuiltin<"__builtin_msa_bclri_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bclri_h : GCCBuiltin<"__builtin_msa_bclri_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bclri_w : GCCBuiltin<"__builtin_msa_bclri_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bclri_d : GCCBuiltin<"__builtin_msa_bclri_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_binsl_b : GCCBuiltin<"__builtin_msa_binsl_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_binsl_h : GCCBuiltin<"__builtin_msa_binsl_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_binsl_w : GCCBuiltin<"__builtin_msa_binsl_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsl_d : GCCBuiltin<"__builtin_msa_binsl_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_binsli_b : GCCBuiltin<"__builtin_msa_binsli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsli_h : GCCBuiltin<"__builtin_msa_binsli_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsli_w : GCCBuiltin<"__builtin_msa_binsli_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsli_d : GCCBuiltin<"__builtin_msa_binsli_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_binsr_b : GCCBuiltin<"__builtin_msa_binsr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_binsr_h : GCCBuiltin<"__builtin_msa_binsr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_binsr_w : GCCBuiltin<"__builtin_msa_binsr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsr_d : GCCBuiltin<"__builtin_msa_binsr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_binsri_b : GCCBuiltin<"__builtin_msa_binsri_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsri_h : GCCBuiltin<"__builtin_msa_binsri_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsri_w : GCCBuiltin<"__builtin_msa_binsri_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_binsri_d : GCCBuiltin<"__builtin_msa_binsri_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmnz_v : GCCBuiltin<"__builtin_msa_bmnz_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmnzi_b : GCCBuiltin<"__builtin_msa_bmnzi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmz_v : GCCBuiltin<"__builtin_msa_bmz_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+def int_mips_bmzi_b : GCCBuiltin<"__builtin_msa_bmzi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bneg_b : GCCBuiltin<"__builtin_msa_bneg_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bneg_h : GCCBuiltin<"__builtin_msa_bneg_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bneg_w : GCCBuiltin<"__builtin_msa_bneg_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bneg_d : GCCBuiltin<"__builtin_msa_bneg_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bnegi_b : GCCBuiltin<"__builtin_msa_bnegi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bnegi_h : GCCBuiltin<"__builtin_msa_bnegi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bnegi_w : GCCBuiltin<"__builtin_msa_bnegi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bnegi_d : GCCBuiltin<"__builtin_msa_bnegi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_bnz_b : GCCBuiltin<"__builtin_msa_bnz_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bnz_h : GCCBuiltin<"__builtin_msa_bnz_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bnz_w : GCCBuiltin<"__builtin_msa_bnz_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bnz_d : GCCBuiltin<"__builtin_msa_bnz_d">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bnz_v : GCCBuiltin<"__builtin_msa_bnz_v">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_bsel_v : GCCBuiltin<"__builtin_msa_bsel_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+
+def int_mips_bseli_b : GCCBuiltin<"__builtin_msa_bseli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_bset_b : GCCBuiltin<"__builtin_msa_bset_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bset_h : GCCBuiltin<"__builtin_msa_bset_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bset_w : GCCBuiltin<"__builtin_msa_bset_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bset_d : GCCBuiltin<"__builtin_msa_bset_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bseti_b : GCCBuiltin<"__builtin_msa_bseti_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bseti_h : GCCBuiltin<"__builtin_msa_bseti_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bseti_w : GCCBuiltin<"__builtin_msa_bseti_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_bseti_d : GCCBuiltin<"__builtin_msa_bseti_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_bz_b : GCCBuiltin<"__builtin_msa_bz_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_bz_h : GCCBuiltin<"__builtin_msa_bz_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_bz_w : GCCBuiltin<"__builtin_msa_bz_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_bz_d : GCCBuiltin<"__builtin_msa_bz_d">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_bz_v : GCCBuiltin<"__builtin_msa_bz_v">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_ceq_b : GCCBuiltin<"__builtin_msa_ceq_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ceq_h : GCCBuiltin<"__builtin_msa_ceq_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ceq_w : GCCBuiltin<"__builtin_msa_ceq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ceq_d : GCCBuiltin<"__builtin_msa_ceq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ceqi_b : GCCBuiltin<"__builtin_msa_ceqi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ceqi_h : GCCBuiltin<"__builtin_msa_ceqi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ceqi_w : GCCBuiltin<"__builtin_msa_ceqi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ceqi_d : GCCBuiltin<"__builtin_msa_ceqi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_cfcmsa : GCCBuiltin<"__builtin_msa_cfcmsa">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty], []>;
+
+def int_mips_cle_s_b : GCCBuiltin<"__builtin_msa_cle_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_cle_s_h : GCCBuiltin<"__builtin_msa_cle_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_cle_s_w : GCCBuiltin<"__builtin_msa_cle_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_cle_s_d : GCCBuiltin<"__builtin_msa_cle_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_cle_u_b : GCCBuiltin<"__builtin_msa_cle_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_cle_u_h : GCCBuiltin<"__builtin_msa_cle_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_cle_u_w : GCCBuiltin<"__builtin_msa_cle_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_cle_u_d : GCCBuiltin<"__builtin_msa_cle_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_clei_s_b : GCCBuiltin<"__builtin_msa_clei_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_s_h : GCCBuiltin<"__builtin_msa_clei_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_s_w : GCCBuiltin<"__builtin_msa_clei_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_s_d : GCCBuiltin<"__builtin_msa_clei_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_clei_u_b : GCCBuiltin<"__builtin_msa_clei_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_u_h : GCCBuiltin<"__builtin_msa_clei_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_u_w : GCCBuiltin<"__builtin_msa_clei_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clei_u_d : GCCBuiltin<"__builtin_msa_clei_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_clt_s_b : GCCBuiltin<"__builtin_msa_clt_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_clt_s_h : GCCBuiltin<"__builtin_msa_clt_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_clt_s_w : GCCBuiltin<"__builtin_msa_clt_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_clt_s_d : GCCBuiltin<"__builtin_msa_clt_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_clt_u_b : GCCBuiltin<"__builtin_msa_clt_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_clt_u_h : GCCBuiltin<"__builtin_msa_clt_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_clt_u_w : GCCBuiltin<"__builtin_msa_clt_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_clt_u_d : GCCBuiltin<"__builtin_msa_clt_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_clti_s_b : GCCBuiltin<"__builtin_msa_clti_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_s_h : GCCBuiltin<"__builtin_msa_clti_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_s_w : GCCBuiltin<"__builtin_msa_clti_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_s_d : GCCBuiltin<"__builtin_msa_clti_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_clti_u_b : GCCBuiltin<"__builtin_msa_clti_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_u_h : GCCBuiltin<"__builtin_msa_clti_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_u_w : GCCBuiltin<"__builtin_msa_clti_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_clti_u_d : GCCBuiltin<"__builtin_msa_clti_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_copy_s_b : GCCBuiltin<"__builtin_msa_copy_s_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_s_h : GCCBuiltin<"__builtin_msa_copy_s_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_s_w : GCCBuiltin<"__builtin_msa_copy_s_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_s_d : GCCBuiltin<"__builtin_msa_copy_s_d">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_copy_u_b : GCCBuiltin<"__builtin_msa_copy_u_b">,
+ Intrinsic<[llvm_i32_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_u_h : GCCBuiltin<"__builtin_msa_copy_u_h">,
+ Intrinsic<[llvm_i32_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_u_w : GCCBuiltin<"__builtin_msa_copy_u_w">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_copy_u_d : GCCBuiltin<"__builtin_msa_copy_u_d">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_ctcmsa : GCCBuiltin<"__builtin_msa_ctcmsa">,
+ Intrinsic<[], [llvm_i32_ty, llvm_i32_ty], []>;
+
+def int_mips_div_s_b : GCCBuiltin<"__builtin_msa_div_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_div_s_h : GCCBuiltin<"__builtin_msa_div_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_div_s_w : GCCBuiltin<"__builtin_msa_div_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_div_s_d : GCCBuiltin<"__builtin_msa_div_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_div_u_b : GCCBuiltin<"__builtin_msa_div_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_div_u_h : GCCBuiltin<"__builtin_msa_div_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_div_u_w : GCCBuiltin<"__builtin_msa_div_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_div_u_d : GCCBuiltin<"__builtin_msa_div_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_dotp_s_h : GCCBuiltin<"__builtin_msa_dotp_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_dotp_s_w : GCCBuiltin<"__builtin_msa_dotp_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_dotp_s_d : GCCBuiltin<"__builtin_msa_dotp_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_dotp_u_h : GCCBuiltin<"__builtin_msa_dotp_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_dotp_u_w : GCCBuiltin<"__builtin_msa_dotp_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_dotp_u_d : GCCBuiltin<"__builtin_msa_dotp_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_dpadd_s_h : GCCBuiltin<"__builtin_msa_dpadd_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_s_w : GCCBuiltin<"__builtin_msa_dpadd_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_s_d : GCCBuiltin<"__builtin_msa_dpadd_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_dpadd_u_h : GCCBuiltin<"__builtin_msa_dpadd_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_u_w : GCCBuiltin<"__builtin_msa_dpadd_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpadd_u_d : GCCBuiltin<"__builtin_msa_dpadd_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_dpsub_s_h : GCCBuiltin<"__builtin_msa_dpsub_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_s_w : GCCBuiltin<"__builtin_msa_dpsub_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_s_d : GCCBuiltin<"__builtin_msa_dpsub_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_dpsub_u_h : GCCBuiltin<"__builtin_msa_dpsub_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_u_w : GCCBuiltin<"__builtin_msa_dpsub_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_dpsub_u_d : GCCBuiltin<"__builtin_msa_dpsub_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_fadd_w : GCCBuiltin<"__builtin_msa_fadd_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fadd_d : GCCBuiltin<"__builtin_msa_fadd_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcaf_w : GCCBuiltin<"__builtin_msa_fcaf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcaf_d : GCCBuiltin<"__builtin_msa_fcaf_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fceq_w : GCCBuiltin<"__builtin_msa_fceq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fceq_d : GCCBuiltin<"__builtin_msa_fceq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcle_w : GCCBuiltin<"__builtin_msa_fcle_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcle_d : GCCBuiltin<"__builtin_msa_fcle_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fclt_w : GCCBuiltin<"__builtin_msa_fclt_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fclt_d : GCCBuiltin<"__builtin_msa_fclt_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fclass_w : GCCBuiltin<"__builtin_msa_fclass_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fclass_d : GCCBuiltin<"__builtin_msa_fclass_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcne_w : GCCBuiltin<"__builtin_msa_fcne_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcne_d : GCCBuiltin<"__builtin_msa_fcne_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcor_w : GCCBuiltin<"__builtin_msa_fcor_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcor_d : GCCBuiltin<"__builtin_msa_fcor_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcueq_w : GCCBuiltin<"__builtin_msa_fcueq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcueq_d : GCCBuiltin<"__builtin_msa_fcueq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcule_w : GCCBuiltin<"__builtin_msa_fcule_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcule_d : GCCBuiltin<"__builtin_msa_fcule_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcult_w : GCCBuiltin<"__builtin_msa_fcult_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcult_d : GCCBuiltin<"__builtin_msa_fcult_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcun_w : GCCBuiltin<"__builtin_msa_fcun_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcun_d : GCCBuiltin<"__builtin_msa_fcun_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fcune_w : GCCBuiltin<"__builtin_msa_fcune_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fcune_d : GCCBuiltin<"__builtin_msa_fcune_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fdiv_w : GCCBuiltin<"__builtin_msa_fdiv_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fdiv_d : GCCBuiltin<"__builtin_msa_fdiv_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fexdo_h : GCCBuiltin<"__builtin_msa_fexdo_h">,
+ Intrinsic<[llvm_v8f16_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fexdo_w : GCCBuiltin<"__builtin_msa_fexdo_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fexp2_w : GCCBuiltin<"__builtin_msa_fexp2_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_fexp2_d : GCCBuiltin<"__builtin_msa_fexp2_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_fexupl_w : GCCBuiltin<"__builtin_msa_fexupl_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8f16_ty], [IntrNoMem]>;
+def int_mips_fexupl_d : GCCBuiltin<"__builtin_msa_fexupl_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+def int_mips_fexupr_w : GCCBuiltin<"__builtin_msa_fexupr_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8f16_ty], [IntrNoMem]>;
+def int_mips_fexupr_d : GCCBuiltin<"__builtin_msa_fexupr_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+def int_mips_ffint_s_w : GCCBuiltin<"__builtin_msa_ffint_s_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ffint_s_d : GCCBuiltin<"__builtin_msa_ffint_s_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ffint_u_w : GCCBuiltin<"__builtin_msa_ffint_u_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ffint_u_d : GCCBuiltin<"__builtin_msa_ffint_u_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ffql_w : GCCBuiltin<"__builtin_msa_ffql_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ffql_d : GCCBuiltin<"__builtin_msa_ffql_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_ffqr_w : GCCBuiltin<"__builtin_msa_ffqr_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ffqr_d : GCCBuiltin<"__builtin_msa_ffqr_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_fill_b : GCCBuiltin<"__builtin_msa_fill_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_fill_h : GCCBuiltin<"__builtin_msa_fill_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_fill_w : GCCBuiltin<"__builtin_msa_fill_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_fill_d : GCCBuiltin<"__builtin_msa_fill_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_i64_ty], [IntrNoMem]>;
+
+def int_mips_flog2_w : GCCBuiltin<"__builtin_msa_flog2_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_flog2_d : GCCBuiltin<"__builtin_msa_flog2_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmadd_w : GCCBuiltin<"__builtin_msa_fmadd_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+def int_mips_fmadd_d : GCCBuiltin<"__builtin_msa_fmadd_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty],
+ [IntrNoMem]>;
+
+def int_mips_fmax_w : GCCBuiltin<"__builtin_msa_fmax_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmax_d : GCCBuiltin<"__builtin_msa_fmax_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmax_a_w : GCCBuiltin<"__builtin_msa_fmax_a_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmax_a_d : GCCBuiltin<"__builtin_msa_fmax_a_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmin_w : GCCBuiltin<"__builtin_msa_fmin_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmin_d : GCCBuiltin<"__builtin_msa_fmin_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmin_a_w : GCCBuiltin<"__builtin_msa_fmin_a_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmin_a_d : GCCBuiltin<"__builtin_msa_fmin_a_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fmsub_w : GCCBuiltin<"__builtin_msa_fmsub_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+def int_mips_fmsub_d : GCCBuiltin<"__builtin_msa_fmsub_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty, llvm_v2f64_ty],
+ [IntrNoMem]>;
+
+def int_mips_fmul_w : GCCBuiltin<"__builtin_msa_fmul_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fmul_d : GCCBuiltin<"__builtin_msa_fmul_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_frint_w : GCCBuiltin<"__builtin_msa_frint_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_frint_d : GCCBuiltin<"__builtin_msa_frint_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_frcp_w : GCCBuiltin<"__builtin_msa_frcp_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_frcp_d : GCCBuiltin<"__builtin_msa_frcp_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_frsqrt_w : GCCBuiltin<"__builtin_msa_frsqrt_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_frsqrt_d : GCCBuiltin<"__builtin_msa_frsqrt_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsaf_w : GCCBuiltin<"__builtin_msa_fsaf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsaf_d : GCCBuiltin<"__builtin_msa_fsaf_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fseq_w : GCCBuiltin<"__builtin_msa_fseq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fseq_d : GCCBuiltin<"__builtin_msa_fseq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsle_w : GCCBuiltin<"__builtin_msa_fsle_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsle_d : GCCBuiltin<"__builtin_msa_fsle_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fslt_w : GCCBuiltin<"__builtin_msa_fslt_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fslt_d : GCCBuiltin<"__builtin_msa_fslt_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsne_w : GCCBuiltin<"__builtin_msa_fsne_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsne_d : GCCBuiltin<"__builtin_msa_fsne_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsor_w : GCCBuiltin<"__builtin_msa_fsor_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsor_d : GCCBuiltin<"__builtin_msa_fsor_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsqrt_w : GCCBuiltin<"__builtin_msa_fsqrt_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsqrt_d : GCCBuiltin<"__builtin_msa_fsqrt_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsub_w : GCCBuiltin<"__builtin_msa_fsub_w">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsub_d : GCCBuiltin<"__builtin_msa_fsub_d">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsueq_w : GCCBuiltin<"__builtin_msa_fsueq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsueq_d : GCCBuiltin<"__builtin_msa_fsueq_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsule_w : GCCBuiltin<"__builtin_msa_fsule_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsule_d : GCCBuiltin<"__builtin_msa_fsule_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsult_w : GCCBuiltin<"__builtin_msa_fsult_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsult_d : GCCBuiltin<"__builtin_msa_fsult_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsun_w : GCCBuiltin<"__builtin_msa_fsun_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsun_d : GCCBuiltin<"__builtin_msa_fsun_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_fsune_w : GCCBuiltin<"__builtin_msa_fsune_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_fsune_d : GCCBuiltin<"__builtin_msa_fsune_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftint_s_w : GCCBuiltin<"__builtin_msa_ftint_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftint_s_d : GCCBuiltin<"__builtin_msa_ftint_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftint_u_w : GCCBuiltin<"__builtin_msa_ftint_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftint_u_d : GCCBuiltin<"__builtin_msa_ftint_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftq_h : GCCBuiltin<"__builtin_msa_ftq_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v4f32_ty, llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftq_w : GCCBuiltin<"__builtin_msa_ftq_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v2f64_ty, llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftrunc_s_w : GCCBuiltin<"__builtin_msa_ftrunc_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftrunc_s_d : GCCBuiltin<"__builtin_msa_ftrunc_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_ftrunc_u_w : GCCBuiltin<"__builtin_msa_ftrunc_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+def int_mips_ftrunc_u_d : GCCBuiltin<"__builtin_msa_ftrunc_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+def int_mips_hadd_s_h : GCCBuiltin<"__builtin_msa_hadd_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hadd_s_w : GCCBuiltin<"__builtin_msa_hadd_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hadd_s_d : GCCBuiltin<"__builtin_msa_hadd_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_hadd_u_h : GCCBuiltin<"__builtin_msa_hadd_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hadd_u_w : GCCBuiltin<"__builtin_msa_hadd_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hadd_u_d : GCCBuiltin<"__builtin_msa_hadd_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_hsub_s_h : GCCBuiltin<"__builtin_msa_hsub_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hsub_s_w : GCCBuiltin<"__builtin_msa_hsub_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hsub_s_d : GCCBuiltin<"__builtin_msa_hsub_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_hsub_u_h : GCCBuiltin<"__builtin_msa_hsub_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_hsub_u_w : GCCBuiltin<"__builtin_msa_hsub_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_hsub_u_d : GCCBuiltin<"__builtin_msa_hsub_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_ilvev_b : GCCBuiltin<"__builtin_msa_ilvev_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvev_h : GCCBuiltin<"__builtin_msa_ilvev_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvev_w : GCCBuiltin<"__builtin_msa_ilvev_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvev_d : GCCBuiltin<"__builtin_msa_ilvev_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ilvl_b : GCCBuiltin<"__builtin_msa_ilvl_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvl_h : GCCBuiltin<"__builtin_msa_ilvl_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvl_w : GCCBuiltin<"__builtin_msa_ilvl_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvl_d : GCCBuiltin<"__builtin_msa_ilvl_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ilvod_b : GCCBuiltin<"__builtin_msa_ilvod_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvod_h : GCCBuiltin<"__builtin_msa_ilvod_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvod_w : GCCBuiltin<"__builtin_msa_ilvod_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvod_d : GCCBuiltin<"__builtin_msa_ilvod_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_ilvr_b : GCCBuiltin<"__builtin_msa_ilvr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_ilvr_h : GCCBuiltin<"__builtin_msa_ilvr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_ilvr_w : GCCBuiltin<"__builtin_msa_ilvr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_ilvr_d : GCCBuiltin<"__builtin_msa_ilvr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_insert_b : GCCBuiltin<"__builtin_msa_insert_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_insert_h : GCCBuiltin<"__builtin_msa_insert_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_insert_w : GCCBuiltin<"__builtin_msa_insert_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+def int_mips_insert_d : GCCBuiltin<"__builtin_msa_insert_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_insve_b : GCCBuiltin<"__builtin_msa_insve_b">,
+ Intrinsic<[llvm_v16i8_ty],
+ [llvm_v16i8_ty, llvm_i32_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_insve_h : GCCBuiltin<"__builtin_msa_insve_h">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_i32_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_insve_w : GCCBuiltin<"__builtin_msa_insve_w">,
+ Intrinsic<[llvm_v4i32_ty],
+ [llvm_v4i32_ty, llvm_i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_insve_d : GCCBuiltin<"__builtin_msa_insve_d">,
+ Intrinsic<[llvm_v2i64_ty],
+ [llvm_v2i64_ty, llvm_i32_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_ld_b : GCCBuiltin<"__builtin_msa_ld_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+def int_mips_ld_h : GCCBuiltin<"__builtin_msa_ld_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+def int_mips_ld_w : GCCBuiltin<"__builtin_msa_ld_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+def int_mips_ld_d : GCCBuiltin<"__builtin_msa_ld_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+
+def int_mips_ldi_b : GCCBuiltin<"__builtin_msa_ldi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ldi_h : GCCBuiltin<"__builtin_msa_ldi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ldi_w : GCCBuiltin<"__builtin_msa_ldi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+def int_mips_ldi_d : GCCBuiltin<"__builtin_msa_ldi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+// This instruction is part of the MSA spec but it does not share the
+// __builtin_msa prefix because it operates on the GPR registers.
+def int_mips_lsa : GCCBuiltin<"__builtin_mips_lsa">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_madd_q_h : GCCBuiltin<"__builtin_msa_madd_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_madd_q_w : GCCBuiltin<"__builtin_msa_madd_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_maddr_q_h : GCCBuiltin<"__builtin_msa_maddr_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_maddr_q_w : GCCBuiltin<"__builtin_msa_maddr_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_maddv_b : GCCBuiltin<"__builtin_msa_maddv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_maddv_h : GCCBuiltin<"__builtin_msa_maddv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_maddv_w : GCCBuiltin<"__builtin_msa_maddv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_maddv_d : GCCBuiltin<"__builtin_msa_maddv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_max_a_b : GCCBuiltin<"__builtin_msa_max_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_max_a_h : GCCBuiltin<"__builtin_msa_max_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_max_a_w : GCCBuiltin<"__builtin_msa_max_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_max_a_d : GCCBuiltin<"__builtin_msa_max_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_max_s_b : GCCBuiltin<"__builtin_msa_max_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_max_s_h : GCCBuiltin<"__builtin_msa_max_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_max_s_w : GCCBuiltin<"__builtin_msa_max_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_max_s_d : GCCBuiltin<"__builtin_msa_max_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_max_u_b : GCCBuiltin<"__builtin_msa_max_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_max_u_h : GCCBuiltin<"__builtin_msa_max_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_max_u_w : GCCBuiltin<"__builtin_msa_max_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_max_u_d : GCCBuiltin<"__builtin_msa_max_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_maxi_s_b : GCCBuiltin<"__builtin_msa_maxi_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_s_h : GCCBuiltin<"__builtin_msa_maxi_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_s_w : GCCBuiltin<"__builtin_msa_maxi_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_s_d : GCCBuiltin<"__builtin_msa_maxi_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_maxi_u_b : GCCBuiltin<"__builtin_msa_maxi_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_u_h : GCCBuiltin<"__builtin_msa_maxi_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_u_w : GCCBuiltin<"__builtin_msa_maxi_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_maxi_u_d : GCCBuiltin<"__builtin_msa_maxi_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_min_a_b : GCCBuiltin<"__builtin_msa_min_a_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_min_a_h : GCCBuiltin<"__builtin_msa_min_a_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_min_a_w : GCCBuiltin<"__builtin_msa_min_a_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_min_a_d : GCCBuiltin<"__builtin_msa_min_a_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_min_s_b : GCCBuiltin<"__builtin_msa_min_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_min_s_h : GCCBuiltin<"__builtin_msa_min_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_min_s_w : GCCBuiltin<"__builtin_msa_min_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_min_s_d : GCCBuiltin<"__builtin_msa_min_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_min_u_b : GCCBuiltin<"__builtin_msa_min_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_min_u_h : GCCBuiltin<"__builtin_msa_min_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_min_u_w : GCCBuiltin<"__builtin_msa_min_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_min_u_d : GCCBuiltin<"__builtin_msa_min_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_mini_s_b : GCCBuiltin<"__builtin_msa_mini_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_s_h : GCCBuiltin<"__builtin_msa_mini_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_s_w : GCCBuiltin<"__builtin_msa_mini_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_s_d : GCCBuiltin<"__builtin_msa_mini_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_mini_u_b : GCCBuiltin<"__builtin_msa_mini_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_u_h : GCCBuiltin<"__builtin_msa_mini_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_u_w : GCCBuiltin<"__builtin_msa_mini_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_mini_u_d : GCCBuiltin<"__builtin_msa_mini_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_mod_s_b : GCCBuiltin<"__builtin_msa_mod_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_mod_s_h : GCCBuiltin<"__builtin_msa_mod_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mod_s_w : GCCBuiltin<"__builtin_msa_mod_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_mod_s_d : GCCBuiltin<"__builtin_msa_mod_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_mod_u_b : GCCBuiltin<"__builtin_msa_mod_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_mod_u_h : GCCBuiltin<"__builtin_msa_mod_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mod_u_w : GCCBuiltin<"__builtin_msa_mod_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_mod_u_d : GCCBuiltin<"__builtin_msa_mod_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_move_v : GCCBuiltin<"__builtin_msa_move_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_msub_q_h : GCCBuiltin<"__builtin_msa_msub_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_msub_q_w : GCCBuiltin<"__builtin_msa_msub_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_msubr_q_h : GCCBuiltin<"__builtin_msa_msubr_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_msubr_q_w : GCCBuiltin<"__builtin_msa_msubr_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+def int_mips_msubv_b : GCCBuiltin<"__builtin_msa_msubv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_msubv_h : GCCBuiltin<"__builtin_msa_msubv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_msubv_w : GCCBuiltin<"__builtin_msa_msubv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_msubv_d : GCCBuiltin<"__builtin_msa_msubv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_mul_q_h : GCCBuiltin<"__builtin_msa_mul_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mul_q_w : GCCBuiltin<"__builtin_msa_mul_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_mulr_q_h : GCCBuiltin<"__builtin_msa_mulr_q_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mulr_q_w : GCCBuiltin<"__builtin_msa_mulr_q_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+
+def int_mips_mulv_b : GCCBuiltin<"__builtin_msa_mulv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_mulv_h : GCCBuiltin<"__builtin_msa_mulv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_mulv_w : GCCBuiltin<"__builtin_msa_mulv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_mulv_d : GCCBuiltin<"__builtin_msa_mulv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_nloc_b : GCCBuiltin<"__builtin_msa_nloc_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_nloc_h : GCCBuiltin<"__builtin_msa_nloc_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_nloc_w : GCCBuiltin<"__builtin_msa_nloc_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_nloc_d : GCCBuiltin<"__builtin_msa_nloc_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_nlzc_b : GCCBuiltin<"__builtin_msa_nlzc_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_nlzc_h : GCCBuiltin<"__builtin_msa_nlzc_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_nlzc_w : GCCBuiltin<"__builtin_msa_nlzc_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_nlzc_d : GCCBuiltin<"__builtin_msa_nlzc_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_nor_v : GCCBuiltin<"__builtin_msa_nor_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_nori_b : GCCBuiltin<"__builtin_msa_nori_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_or_v : GCCBuiltin<"__builtin_msa_or_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_ori_b : GCCBuiltin<"__builtin_msa_ori_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_pckev_b : GCCBuiltin<"__builtin_msa_pckev_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_pckev_h : GCCBuiltin<"__builtin_msa_pckev_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_pckev_w : GCCBuiltin<"__builtin_msa_pckev_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_pckev_d : GCCBuiltin<"__builtin_msa_pckev_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_pckod_b : GCCBuiltin<"__builtin_msa_pckod_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_pckod_h : GCCBuiltin<"__builtin_msa_pckod_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_pckod_w : GCCBuiltin<"__builtin_msa_pckod_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_pckod_d : GCCBuiltin<"__builtin_msa_pckod_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_pcnt_b : GCCBuiltin<"__builtin_msa_pcnt_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_pcnt_h : GCCBuiltin<"__builtin_msa_pcnt_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_pcnt_w : GCCBuiltin<"__builtin_msa_pcnt_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_pcnt_d : GCCBuiltin<"__builtin_msa_pcnt_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_sat_s_b : GCCBuiltin<"__builtin_msa_sat_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_s_h : GCCBuiltin<"__builtin_msa_sat_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_s_w : GCCBuiltin<"__builtin_msa_sat_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_s_d : GCCBuiltin<"__builtin_msa_sat_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sat_u_b : GCCBuiltin<"__builtin_msa_sat_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_u_h : GCCBuiltin<"__builtin_msa_sat_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_u_w : GCCBuiltin<"__builtin_msa_sat_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sat_u_d : GCCBuiltin<"__builtin_msa_sat_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_shf_b : GCCBuiltin<"__builtin_msa_shf_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shf_h : GCCBuiltin<"__builtin_msa_shf_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_shf_w : GCCBuiltin<"__builtin_msa_shf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sld_b : GCCBuiltin<"__builtin_msa_sld_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sld_h : GCCBuiltin<"__builtin_msa_sld_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sld_w : GCCBuiltin<"__builtin_msa_sld_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sld_d : GCCBuiltin<"__builtin_msa_sld_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sldi_b : GCCBuiltin<"__builtin_msa_sldi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sldi_h : GCCBuiltin<"__builtin_msa_sldi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sldi_w : GCCBuiltin<"__builtin_msa_sldi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_sldi_d : GCCBuiltin<"__builtin_msa_sldi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sll_b : GCCBuiltin<"__builtin_msa_sll_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_sll_h : GCCBuiltin<"__builtin_msa_sll_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_sll_w : GCCBuiltin<"__builtin_msa_sll_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_sll_d : GCCBuiltin<"__builtin_msa_sll_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_slli_b : GCCBuiltin<"__builtin_msa_slli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_slli_h : GCCBuiltin<"__builtin_msa_slli_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_slli_w : GCCBuiltin<"__builtin_msa_slli_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_slli_d : GCCBuiltin<"__builtin_msa_slli_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_splat_b : GCCBuiltin<"__builtin_msa_splat_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splat_h : GCCBuiltin<"__builtin_msa_splat_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splat_w : GCCBuiltin<"__builtin_msa_splat_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splat_d : GCCBuiltin<"__builtin_msa_splat_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_splati_b : GCCBuiltin<"__builtin_msa_splati_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splati_h : GCCBuiltin<"__builtin_msa_splati_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splati_w : GCCBuiltin<"__builtin_msa_splati_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_splati_d : GCCBuiltin<"__builtin_msa_splati_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_sra_b : GCCBuiltin<"__builtin_msa_sra_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_sra_h : GCCBuiltin<"__builtin_msa_sra_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_sra_w : GCCBuiltin<"__builtin_msa_sra_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_sra_d : GCCBuiltin<"__builtin_msa_sra_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srai_b : GCCBuiltin<"__builtin_msa_srai_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srai_h : GCCBuiltin<"__builtin_msa_srai_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srai_w : GCCBuiltin<"__builtin_msa_srai_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srai_d : GCCBuiltin<"__builtin_msa_srai_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_srar_b : GCCBuiltin<"__builtin_msa_srar_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_srar_h : GCCBuiltin<"__builtin_msa_srar_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_srar_w : GCCBuiltin<"__builtin_msa_srar_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_srar_d : GCCBuiltin<"__builtin_msa_srar_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srari_b : GCCBuiltin<"__builtin_msa_srari_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srari_h : GCCBuiltin<"__builtin_msa_srari_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srari_w : GCCBuiltin<"__builtin_msa_srari_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srari_d : GCCBuiltin<"__builtin_msa_srari_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_srl_b : GCCBuiltin<"__builtin_msa_srl_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_srl_h : GCCBuiltin<"__builtin_msa_srl_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_srl_w : GCCBuiltin<"__builtin_msa_srl_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_srl_d : GCCBuiltin<"__builtin_msa_srl_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srli_b : GCCBuiltin<"__builtin_msa_srli_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srli_h : GCCBuiltin<"__builtin_msa_srli_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srli_w : GCCBuiltin<"__builtin_msa_srli_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srli_d : GCCBuiltin<"__builtin_msa_srli_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_srlr_b : GCCBuiltin<"__builtin_msa_srlr_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_srlr_h : GCCBuiltin<"__builtin_msa_srlr_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_srlr_w : GCCBuiltin<"__builtin_msa_srlr_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_srlr_d : GCCBuiltin<"__builtin_msa_srlr_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_srlri_b : GCCBuiltin<"__builtin_msa_srlri_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srlri_h : GCCBuiltin<"__builtin_msa_srlri_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srlri_w : GCCBuiltin<"__builtin_msa_srlri_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_srlri_d : GCCBuiltin<"__builtin_msa_srlri_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_st_b : GCCBuiltin<"__builtin_msa_st_b">,
+ Intrinsic<[], [llvm_v16i8_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+def int_mips_st_h : GCCBuiltin<"__builtin_msa_st_h">,
+ Intrinsic<[], [llvm_v8i16_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+def int_mips_st_w : GCCBuiltin<"__builtin_msa_st_w">,
+ Intrinsic<[], [llvm_v4i32_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+def int_mips_st_d : GCCBuiltin<"__builtin_msa_st_d">,
+ Intrinsic<[], [llvm_v2i64_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+
+def int_mips_subs_s_b : GCCBuiltin<"__builtin_msa_subs_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subs_s_h : GCCBuiltin<"__builtin_msa_subs_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subs_s_w : GCCBuiltin<"__builtin_msa_subs_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subs_s_d : GCCBuiltin<"__builtin_msa_subs_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subs_u_b : GCCBuiltin<"__builtin_msa_subs_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subs_u_h : GCCBuiltin<"__builtin_msa_subs_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subs_u_w : GCCBuiltin<"__builtin_msa_subs_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subs_u_d : GCCBuiltin<"__builtin_msa_subs_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subsus_u_b : GCCBuiltin<"__builtin_msa_subsus_u_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subsus_u_h : GCCBuiltin<"__builtin_msa_subsus_u_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subsus_u_w : GCCBuiltin<"__builtin_msa_subsus_u_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subsus_u_d : GCCBuiltin<"__builtin_msa_subsus_u_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subsuu_s_b : GCCBuiltin<"__builtin_msa_subsuu_s_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subsuu_s_h : GCCBuiltin<"__builtin_msa_subsuu_s_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subsuu_s_w : GCCBuiltin<"__builtin_msa_subsuu_s_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subsuu_s_d : GCCBuiltin<"__builtin_msa_subsuu_s_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subv_b : GCCBuiltin<"__builtin_msa_subv_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+def int_mips_subv_h : GCCBuiltin<"__builtin_msa_subv_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty], [IntrNoMem]>;
+def int_mips_subv_w : GCCBuiltin<"__builtin_msa_subv_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+def int_mips_subv_d : GCCBuiltin<"__builtin_msa_subv_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+def int_mips_subvi_b : GCCBuiltin<"__builtin_msa_subvi_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_subvi_h : GCCBuiltin<"__builtin_msa_subvi_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_subvi_w : GCCBuiltin<"__builtin_msa_subvi_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_i32_ty], [IntrNoMem]>;
+def int_mips_subvi_d : GCCBuiltin<"__builtin_msa_subvi_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_i32_ty], [IntrNoMem]>;
+
+def int_mips_vshf_b : GCCBuiltin<"__builtin_msa_vshf_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty, llvm_v16i8_ty],
+ [IntrNoMem]>;
+def int_mips_vshf_h : GCCBuiltin<"__builtin_msa_vshf_h">,
+ Intrinsic<[llvm_v8i16_ty], [llvm_v8i16_ty, llvm_v8i16_ty, llvm_v8i16_ty],
+ [IntrNoMem]>;
+def int_mips_vshf_w : GCCBuiltin<"__builtin_msa_vshf_w">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+def int_mips_vshf_d : GCCBuiltin<"__builtin_msa_vshf_d">,
+ Intrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+def int_mips_xor_v : GCCBuiltin<"__builtin_msa_xor_v">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_v16i8_ty], [IntrNoMem]>;
+
+def int_mips_xori_b : GCCBuiltin<"__builtin_msa_xori_b">,
+ Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, llvm_i32_ty], [IntrNoMem]>;
}
diff --git a/include/llvm/IR/IntrinsicsX86.td b/include/llvm/IR/IntrinsicsX86.td
index c7675c2..4c5718f 100644
--- a/include/llvm/IR/IntrinsicsX86.td
+++ b/include/llvm/IR/IntrinsicsX86.td
@@ -206,6 +206,7 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
def int_x86_sse_cvtsi642ss : GCCBuiltin<"__builtin_ia32_cvtsi642ss">,
Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
llvm_i64_ty], [IntrNoMem]>;
+
def int_x86_sse_cvtps2pi : GCCBuiltin<"__builtin_ia32_cvtps2pi">,
Intrinsic<[llvm_x86mmx_ty], [llvm_v4f32_ty], [IntrNoMem]>;
def int_x86_sse_cvttps2pi: GCCBuiltin<"__builtin_ia32_cvttps2pi">,
@@ -936,9 +937,6 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
def int_x86_sse42_crc32_32_32 : GCCBuiltin<"__builtin_ia32_crc32si">,
Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
[IntrNoMem]>;
- def int_x86_sse42_crc32_64_8 :
- Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i8_ty],
- [IntrNoMem]>;
def int_x86_sse42_crc32_64_64 : GCCBuiltin<"__builtin_ia32_crc32di">,
Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty],
[IntrNoMem]>;
@@ -1635,7 +1633,6 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
GCCBuiltin<"__builtin_ia32_vbroadcastss_ps256">,
Intrinsic<[llvm_v8f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
def int_x86_avx2_vbroadcasti128 :
- GCCBuiltin<"__builtin_ia32_vbroadcastsi256">,
Intrinsic<[llvm_v4i64_ty], [llvm_ptr_ty], [IntrReadArgMem]>;
def int_x86_avx2_pbroadcastb_128 :
GCCBuiltin<"__builtin_ia32_pbroadcastb128">,
@@ -1867,6 +1864,14 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
Intrinsic<[llvm_v4f64_ty],
[llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4f64_ty],
[IntrNoMem]>;
+ def int_x86_fma_vfmadd_ps_512 : GCCBuiltin<"__builtin_ia32_vfmaddps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_fma_vfmadd_pd_512 : GCCBuiltin<"__builtin_ia32_vfmaddpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty],
+ [IntrNoMem]>;
def int_x86_fma_vfmsub_ss : GCCBuiltin<"__builtin_ia32_vfmsubss">,
Intrinsic<[llvm_v4f32_ty],
[llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
@@ -1891,6 +1896,14 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
Intrinsic<[llvm_v4f64_ty],
[llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4f64_ty],
[IntrNoMem]>;
+ def int_x86_fma_vfmsub_ps_512 : GCCBuiltin<"__builtin_ia32_vfmsubps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_fma_vfmsub_pd_512 : GCCBuiltin<"__builtin_ia32_vfmsubpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty],
+ [IntrNoMem]>;
def int_x86_fma_vfnmadd_ss : GCCBuiltin<"__builtin_ia32_vfnmaddss">,
Intrinsic<[llvm_v4f32_ty],
[llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
@@ -1915,6 +1928,14 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
Intrinsic<[llvm_v4f64_ty],
[llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4f64_ty],
[IntrNoMem]>;
+ def int_x86_fma_vfnmadd_ps_512 : GCCBuiltin<"__builtin_ia32_vfnmaddps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_fma_vfnmadd_pd_512 : GCCBuiltin<"__builtin_ia32_vfnmaddpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty],
+ [IntrNoMem]>;
def int_x86_fma_vfnmsub_ss : GCCBuiltin<"__builtin_ia32_vfnmsubss">,
Intrinsic<[llvm_v4f32_ty],
[llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
@@ -1939,6 +1960,14 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
Intrinsic<[llvm_v4f64_ty],
[llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4f64_ty],
[IntrNoMem]>;
+ def int_x86_fma_vfnmsub_ps_512 : GCCBuiltin<"__builtin_ia32_vfnmsubps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_fma_vfnmsub_pd_512 : GCCBuiltin<"__builtin_ia32_vfnmsubpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty],
+ [IntrNoMem]>;
def int_x86_fma_vfmaddsub_ps : GCCBuiltin<"__builtin_ia32_vfmaddsubps">,
Intrinsic<[llvm_v4f32_ty],
[llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
@@ -1957,6 +1986,14 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
Intrinsic<[llvm_v4f64_ty],
[llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4f64_ty],
[IntrNoMem]>;
+ def int_x86_fma_vfmaddsub_ps_512 : GCCBuiltin<"__builtin_ia32_vfmaddsubps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_fma_vfmaddsub_pd_512 : GCCBuiltin<"__builtin_ia32_vfmaddsubpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty],
+ [IntrNoMem]>;
def int_x86_fma_vfmsubadd_ps : GCCBuiltin<"__builtin_ia32_vfmsubaddps">,
Intrinsic<[llvm_v4f32_ty],
[llvm_v4f32_ty, llvm_v4f32_ty, llvm_v4f32_ty],
@@ -1975,6 +2012,14 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
Intrinsic<[llvm_v4f64_ty],
[llvm_v4f64_ty, llvm_v4f64_ty, llvm_v4f64_ty],
[IntrNoMem]>;
+ def int_x86_fma_vfmsubadd_ps_512 : GCCBuiltin<"__builtin_ia32_vfmsubaddps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16f32_ty, llvm_v16f32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_fma_vfmsubadd_pd_512 : GCCBuiltin<"__builtin_ia32_vfmsubaddpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8f64_ty, llvm_v8f64_ty, llvm_v8f64_ty],
+ [IntrNoMem]>;
}
//===----------------------------------------------------------------------===//
@@ -2550,6 +2595,16 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
}
//===----------------------------------------------------------------------===//
+// TBM
+
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_tbm_bextri_u32 : GCCBuiltin<"__builtin_ia32_bextri_u32">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_tbm_bextri_u64 : GCCBuiltin<"__builtin_ia32_bextri_u64">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
// RDRAND intrinsics - Return a random value and whether it is valid.
// RDSEED intrinsics - Return a NIST SP800-90B & C compliant random value and
// whether it is valid.
@@ -2578,8 +2633,11 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
def int_x86_xtest : GCCBuiltin<"__builtin_ia32_xtest">,
Intrinsic<[llvm_i32_ty], [], []>;
}
-// AVX-512
+//===----------------------------------------------------------------------===//
+// AVX512
+
+// Mask ops
let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
// Mask instructions
// 16-bit mask
@@ -2617,3 +2675,451 @@ let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
Intrinsic<[llvm_i32_ty], [llvm_i16_ty, llvm_i16_ty],
[IntrNoMem]>;
}
+
+// Conversion ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_cvtss2usi : GCCBuiltin<"__builtin_ia32_cvtss2usi">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtss2usi64 : GCCBuiltin<"__builtin_ia32_cvtss2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttss2usi : GCCBuiltin<"__builtin_ia32_cvttss2usi">,
+ Intrinsic<[llvm_i32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttss2usi64 : GCCBuiltin<"__builtin_ia32_cvttss2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtusi2ss : GCCBuiltin<"__builtin_ia32_cvtusi2ss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtusi642ss : GCCBuiltin<"__builtin_ia32_cvtusi642ss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty,
+ llvm_i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_cvtsd2usi : GCCBuiltin<"__builtin_ia32_cvtsd2usi">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtsd2usi64 : GCCBuiltin<"__builtin_ia32_cvtsd2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttsd2usi : GCCBuiltin<"__builtin_ia32_cvttsd2usi">,
+ Intrinsic<[llvm_i32_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvttsd2usi64 : GCCBuiltin<"__builtin_ia32_cvttsd2usi64">,
+ Intrinsic<[llvm_i64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtusi2sd : GCCBuiltin<"__builtin_ia32_cvtusi2sd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtusi642sd : GCCBuiltin<"__builtin_ia32_cvtusi642sd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty,
+ llvm_i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vcvtph2ps_512 : GCCBuiltin<"__builtin_ia32_vcvtph2ps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16i16_ty], [IntrNoMem]>;
+ def int_x86_avx512_vcvtps2ph_512 : GCCBuiltin<"__builtin_ia32_vcvtps2ph512">,
+ Intrinsic<[llvm_v16i16_ty], [llvm_v16f32_ty, llvm_i32_ty],
+ [IntrNoMem]>;
+}
+
+// Vector convert
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_cvt_ps2dq_512 : GCCBuiltin<"__builtin_ia32_cvtps2dq512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16f32_ty], [IntrNoMem]>;
+ def int_x86_avx512_cvtdq2_ps_512 : GCCBuiltin<"__builtin_ia32_cvtdq2ps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16i32_ty], [IntrNoMem]>;
+}
+
+// Vector load with broadcast
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_vbroadcast_ss_512 :
+ GCCBuiltin<"__builtin_ia32_vbroadcastss512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_ptr_ty], [IntrReadArgMem]>;
+ def int_x86_avx512_vbroadcast_ss_ps_512 :
+ GCCBuiltin<"__builtin_ia32_vbroadcastss_ps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v4f32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_vbroadcast_sd_512 :
+ GCCBuiltin<"__builtin_ia32_vbroadcastsd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_ptr_ty], [IntrReadArgMem]>;
+ def int_x86_avx512_vbroadcast_sd_pd_512 :
+ GCCBuiltin<"__builtin_ia32_vbroadcastsd_pd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v2f64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_pbroadcastd_512 :
+ GCCBuiltin<"__builtin_ia32_pbroadcastd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pbroadcastd_i32_512 :
+ Intrinsic<[llvm_v16i32_ty], [llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_pbroadcastq_512 :
+ GCCBuiltin<"__builtin_ia32_pbroadcastq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v2i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_pbroadcastq_i64_512 :
+ Intrinsic<[llvm_v8i64_ty], [llvm_i64_ty], [IntrNoMem]>;
+}
+
+// Vector sign and zero extend
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_pmovzxbq : GCCBuiltin<"__builtin_ia32_pmovzxbq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_pmovzxwd : GCCBuiltin<"__builtin_ia32_pmovzxwd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_pmovzxbd : GCCBuiltin<"__builtin_ia32_pmovzxbd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i8_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_pmovzxwq : GCCBuiltin<"__builtin_ia32_pmovzxwq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i16_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_pmovzxdq : GCCBuiltin<"__builtin_ia32_pmovzxdq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i32_ty],
+ [IntrNoMem]>;
+}
+
+// Arithmetic ops
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_min_ps_512 : GCCBuiltin<"__builtin_ia32_minps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty,
+ llvm_v16f32_ty], [IntrNoMem]>;
+ def int_x86_avx512_min_pd_512 : GCCBuiltin<"__builtin_ia32_minpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty,
+ llvm_v8f64_ty], [IntrNoMem]>;
+ def int_x86_avx512_max_ps_512 : GCCBuiltin<"__builtin_ia32_maxps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty,
+ llvm_v16f32_ty], [IntrNoMem]>;
+ def int_x86_avx512_max_pd_512 : GCCBuiltin<"__builtin_ia32_maxpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty,
+ llvm_v8f64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_pmaxu_d : GCCBuiltin<"__builtin_ia32_pmaxud512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pmaxu_q : GCCBuiltin<"__builtin_ia32_pmaxuq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_pmaxs_d : GCCBuiltin<"__builtin_ia32_pmaxsd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pmaxs_q : GCCBuiltin<"__builtin_ia32_pmaxsq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_pminu_d : GCCBuiltin<"__builtin_ia32_pminud512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pminu_q : GCCBuiltin<"__builtin_ia32_pminuq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+ def int_x86_avx512_pmins_d : GCCBuiltin<"__builtin_ia32_pminsd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_pmins_q : GCCBuiltin<"__builtin_ia32_pminsq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i64_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_rndscale_ss : GCCBuiltin<"__builtin_ia32_rndscaless">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_rndscale_sd : GCCBuiltin<"__builtin_ia32_rndscalesd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_sqrt_ss : GCCBuiltin<"__builtin_ia32_sqrtrndss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty, llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_sqrt_sd : GCCBuiltin<"__builtin_ia32_sqrtrndsd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty, llvm_v2f64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_rndscale_ps_512 : GCCBuiltin<"__builtin_ia32_rndscaleps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_rndscale_pd_512 : GCCBuiltin<"__builtin_ia32_rndscalepd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_sqrt_pd_512 : GCCBuiltin<"__builtin_ia32_sqrtpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty], [IntrNoMem]>;
+ def int_x86_avx512_sqrt_ps_512 : GCCBuiltin<"__builtin_ia32_sqrtps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty], [IntrNoMem]>;
+
+ def int_x86_avx512_rcp14_ps_512 : GCCBuiltin<"__builtin_ia32_rcp14ps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rcp14_pd_512 : GCCBuiltin<"__builtin_ia32_rcp14pd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rcp14_ss : GCCBuiltin<"__builtin_ia32_rcp14ss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rcp14_sd : GCCBuiltin<"__builtin_ia32_rcp14sd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_ps_512 : GCCBuiltin<"__builtin_ia32_rsqrt14ps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_pd_512 : GCCBuiltin<"__builtin_ia32_rsqrt14pd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_ss : GCCBuiltin<"__builtin_ia32_rsqrt14ss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt14_sd : GCCBuiltin<"__builtin_ia32_rsqrt14sd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_rcp28_ps_512 : GCCBuiltin<"__builtin_ia32_rcp28ps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rcp28_pd_512 : GCCBuiltin<"__builtin_ia32_rcp28pd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rcp28_ss : GCCBuiltin<"__builtin_ia32_rcp28ss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rcp28_sd : GCCBuiltin<"__builtin_ia32_rcp28sd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_ps_512 : GCCBuiltin<"__builtin_ia32_rsqrt28ps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_pd_512 : GCCBuiltin<"__builtin_ia32_rsqrt28pd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_ss : GCCBuiltin<"__builtin_ia32_rsqrt28ss">,
+ Intrinsic<[llvm_v4f32_ty], [llvm_v4f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_rsqrt28_sd : GCCBuiltin<"__builtin_ia32_rsqrt28sd">,
+ Intrinsic<[llvm_v2f64_ty], [llvm_v2f64_ty],
+ [IntrNoMem]>;
+}
+
+// Integer shift ops.
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_psll_dq : GCCBuiltin<"__builtin_ia32_pslldqi512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrl_dq : GCCBuiltin<"__builtin_ia32_psrldqi512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psll_dq_bs : GCCBuiltin<"__builtin_ia32_pslldqi512_byteshift">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+ def int_x86_avx512_psrl_dq_bs : GCCBuiltin<"__builtin_ia32_psrldqi512_byteshift">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_i32_ty], [IntrNoMem]>;
+}
+
+// Gather and Scatter ops
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_gather_dpd_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherdpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_i8_ty,
+ llvm_v8i32_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+ def int_x86_avx512_gather_dps_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherdps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16f32_ty, llvm_i16_ty,
+ llvm_v16i32_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+ def int_x86_avx512_gather_qpd_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherqpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8f64_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+ def int_x86_avx512_gather_qps_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherqps512">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8f32_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+
+ def int_x86_avx512_gather_dpd_512 : GCCBuiltin<"__builtin_ia32_gatherdpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8i32_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadMem]>;
+ def int_x86_avx512_gather_dps_512 : GCCBuiltin<"__builtin_ia32_gatherdps512">,
+ Intrinsic<[llvm_v16f32_ty], [llvm_v16i32_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadMem]>;
+ def int_x86_avx512_gather_qpd_512 : GCCBuiltin<"__builtin_ia32_gatherqpd512">,
+ Intrinsic<[llvm_v8f64_ty], [llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadArgMem]>;
+ def int_x86_avx512_gather_qps_512 : GCCBuiltin<"__builtin_ia32_gatherqps512">,
+ Intrinsic<[llvm_v8f32_ty], [llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadMem]>;
+
+ def int_x86_avx512_gather_dpq_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherdpq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_i8_ty,
+ llvm_v8i32_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+ def int_x86_avx512_gather_dpi_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherdpi512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_i16_ty,
+ llvm_v16i32_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+ def int_x86_avx512_gather_qpq_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherqpq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadArgMem]>;
+ def int_x86_avx512_gather_qpi_mask_512 : GCCBuiltin<"__builtin_ia32_mask_gatherqpi512">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i32_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrReadMem]>;
+
+ def int_x86_avx512_gather_dpq_512 : GCCBuiltin<"__builtin_ia32_gatherdpq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i32_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadArgMem]>;
+ def int_x86_avx512_gather_dpi_512 : GCCBuiltin<"__builtin_ia32_gatherdpi512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadArgMem]>;
+ def int_x86_avx512_gather_qpq_512 : GCCBuiltin<"__builtin_ia32_gatherqpq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadArgMem]>;
+ def int_x86_avx512_gather_qpi_512 : GCCBuiltin<"__builtin_ia32_gatherqpi512">,
+ Intrinsic<[llvm_v8i32_ty], [llvm_v8i64_ty, llvm_ptr_ty,
+ llvm_i32_ty],
+ [IntrReadArgMem]>;
+// scatter
+ def int_x86_avx512_scatter_dpd_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterdpd512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i32_ty, llvm_v8f64_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_dps_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterdps512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i16_ty,
+ llvm_v16i32_ty, llvm_v16f32_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_qpd_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterqpd512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_v8f64_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_qps_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterqps512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_v8f32_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+
+ def int_x86_avx512_scatter_dpd_512 : GCCBuiltin<"__builtin_ia32_scatterdpd512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v8i32_ty, llvm_v8f64_ty,
+ llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_dps_512 : GCCBuiltin<"__builtin_ia32_scatterdps512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v16i32_ty, llvm_v16f32_ty,
+ llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_qpd_512 : GCCBuiltin<"__builtin_ia32_scatterqpd512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v8i64_ty, llvm_v8f64_ty,
+ llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_qps_512 : GCCBuiltin<"__builtin_ia32_scatterqps512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v8i64_ty, llvm_v8f32_ty,
+ llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+
+ def int_x86_avx512_scatter_dpq_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterdpq512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty, llvm_v8i32_ty,
+ llvm_v8i64_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_dpi_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterdpi512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i16_ty,
+ llvm_v16i32_ty, llvm_v16i32_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_qpq_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterqpq512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_v8i64_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+ def int_x86_avx512_scatter_qpi_mask_512 : GCCBuiltin<"__builtin_ia32_mask_scatterqpi512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_i8_ty,
+ llvm_v8i64_ty, llvm_v8i32_ty, llvm_i32_ty],
+ [IntrReadWriteArgMem]>;
+
+ def int_x86_avx512_scatter_dpq_512 : GCCBuiltin<"__builtin_ia32_scatterdpq512">,
+ Intrinsic<[], [llvm_ptr_ty,
+ llvm_v8i32_ty, llvm_v8i64_ty, llvm_i32_ty],
+ []>;
+ def int_x86_avx512_scatter_dpi_512 : GCCBuiltin<"__builtin_ia32_scatterdpi512">,
+ Intrinsic<[], [llvm_ptr_ty,
+ llvm_v16i32_ty, llvm_v16i32_ty, llvm_i32_ty],
+ []>;
+ def int_x86_avx512_scatter_qpq_512 : GCCBuiltin<"__builtin_ia32_scatterqpq512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v8i64_ty, llvm_v8i64_ty,
+ llvm_i32_ty],
+ []>;
+ def int_x86_avx512_scatter_qpi_512 : GCCBuiltin<"__builtin_ia32_scatterqpi512">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_v8i64_ty, llvm_v8i32_ty,
+ llvm_i32_ty],
+ []>;
+}
+
+// AVX-512 conflict detection
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_conflict_d_512 : GCCBuiltin<"__builtin_ia32_conflictd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty],
+ []>;
+ def int_x86_avx512_conflict_d_mask_512 :
+ GCCBuiltin<"__builtin_ia32_mask_conflictd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty,
+ llvm_v16i1_ty, llvm_v16i32_ty],
+ []>;
+ def int_x86_avx512_conflict_d_maskz_512:
+ GCCBuiltin<"__builtin_ia32_maskz_conflictd512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i1_ty, llvm_v16i32_ty],
+ []>;
+
+ def int_x86_avx512_conflict_q_512 : GCCBuiltin<"__builtin_ia32_conflictq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty],
+ []>;
+ def int_x86_avx512_conflict_q_mask_512 :
+ GCCBuiltin<"__builtin_ia32_mask_conflictq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i64_ty,
+ llvm_v8i1_ty, llvm_v8i64_ty],
+ []>;
+ def int_x86_avx512_conflict_q_maskz_512:
+ GCCBuiltin<"__builtin_ia32_maskz_conflictq512">,
+ Intrinsic<[llvm_v8i64_ty], [llvm_v8i1_ty, llvm_v8i64_ty],
+ []>;
+}
+
+// Vector blend
+let TargetPrefix = "x86" in { // All intrinsics start with "llvm.x86.".
+ def int_x86_avx512_mskblend_ps_512 : GCCBuiltin<"__builtin_ia32_mskblendps512">,
+ Intrinsic<[llvm_v16f32_ty],
+ [llvm_v16i1_ty, llvm_v16f32_ty, llvm_v16f32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mskblend_pd_512 : GCCBuiltin<"__builtin_ia32_mskblendpd512">,
+ Intrinsic<[llvm_v8f64_ty],
+ [llvm_v8i1_ty, llvm_v8f64_ty, llvm_v8f64_ty],
+ [IntrNoMem]>;
+
+ def int_x86_avx512_mskblend_d_512 : GCCBuiltin<"__builtin_ia32_mskblendd512">,
+ Intrinsic<[llvm_v16i32_ty],
+ [llvm_v16i1_ty, llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_mskblend_q_512 : GCCBuiltin<"__builtin_ia32_mskblendq512">,
+ Intrinsic<[llvm_v8i64_ty],
+ [llvm_v8i1_ty, llvm_v8i64_ty, llvm_v8i64_ty],
+ [IntrNoMem]>;
+}
+
+// Misc.
+let TargetPrefix = "x86" in {
+ def int_x86_avx512_cmpeq_pi_512 : GCCBuiltin<"__builtin_ia32_cmpeqpi512">,
+ Intrinsic<[llvm_i16_ty], [llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+ def int_x86_avx512_and_pi : GCCBuiltin<"__builtin_ia32_andpi512">,
+ Intrinsic<[llvm_v16i32_ty], [llvm_v16i32_ty, llvm_v16i32_ty],
+ [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// SHA intrinsics
+let TargetPrefix = "x86" in {
+ def int_x86_sha1rnds4 : GCCBuiltin<"__builtin_ia32_sha1rnds4">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_i8_ty],
+ [IntrNoMem]>;
+ def int_x86_sha1nexte : GCCBuiltin<"__builtin_ia32_sha1nexte">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha1msg1 : GCCBuiltin<"__builtin_ia32_sha1msg1">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha1msg2 : GCCBuiltin<"__builtin_ia32_sha1msg2">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha256rnds2 : GCCBuiltin<"__builtin_ia32_sha256rnds2">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+ def int_x86_sha256msg1 : GCCBuiltin<"__builtin_ia32_sha256msg1">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+ def int_x86_sha256msg2 : GCCBuiltin<"__builtin_ia32_sha256msg2">,
+ Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
+}
diff --git a/include/llvm/IR/LegacyPassManager.h b/include/llvm/IR/LegacyPassManager.h
new file mode 100644
index 0000000..fa1436e
--- /dev/null
+++ b/include/llvm/IR/LegacyPassManager.h
@@ -0,0 +1,111 @@
+//===- LegacyPassManager.h - Legacy Container for Passes --------*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file defines the legacy PassManager class. This class is used to hold,
+// maintain, and optimize execution of Passes. The PassManager class ensures
+// that analysis results are available before a pass runs, and that Pass's are
+// destroyed when the PassManager is destroyed.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_IR_LEGACYPASSMANAGER_H
+#define LLVM_IR_LEGACYPASSMANAGER_H
+
+#include "llvm/Pass.h"
+#include "llvm/Support/CBindingWrapping.h"
+
+namespace llvm {
+
+class Pass;
+class Module;
+
+namespace legacy {
+
+class PassManagerImpl;
+class FunctionPassManagerImpl;
+
+/// PassManagerBase - An abstract interface to allow code to add passes to
+/// a pass manager without having to hard-code what kind of pass manager
+/// it is.
+class PassManagerBase {
+public:
+ virtual ~PassManagerBase();
+
+ /// add - Add a pass to the queue of passes to run. This passes ownership of
+ /// the Pass to the PassManager. When the PassManager is destroyed, the pass
+ /// will be destroyed as well, so there is no need to delete the pass. This
+ /// implies that all passes MUST be allocated with 'new'.
+ virtual void add(Pass *P) = 0;
+};
+
+/// PassManager manages ModulePassManagers
+class PassManager : public PassManagerBase {
+public:
+
+ PassManager();
+ ~PassManager();
+
+ /// add - Add a pass to the queue of passes to run. This passes ownership of
+ /// the Pass to the PassManager. When the PassManager is destroyed, the pass
+ /// will be destroyed as well, so there is no need to delete the pass. This
+ /// implies that all passes MUST be allocated with 'new'.
+ void add(Pass *P);
+
+ /// run - Execute all of the passes scheduled for execution. Keep track of
+ /// whether any of the passes modifies the module, and if so, return true.
+ bool run(Module &M);
+
+private:
+ /// PassManagerImpl_New is the actual class. PassManager is just the
+ /// wraper to publish simple pass manager interface
+ PassManagerImpl *PM;
+};
+
+/// FunctionPassManager manages FunctionPasses and BasicBlockPassManagers.
+class FunctionPassManager : public PassManagerBase {
+public:
+ /// FunctionPassManager ctor - This initializes the pass manager. It needs,
+ /// but does not take ownership of, the specified Module.
+ explicit FunctionPassManager(Module *M);
+ ~FunctionPassManager();
+
+ /// add - Add a pass to the queue of passes to run. This passes
+ /// ownership of the Pass to the PassManager. When the
+ /// PassManager_X is destroyed, the pass will be destroyed as well, so
+ /// there is no need to delete the pass.
+ /// This implies that all passes MUST be allocated with 'new'.
+ void add(Pass *P);
+
+ /// run - Execute all of the passes scheduled for execution. Keep
+ /// track of whether any of the passes modifies the function, and if
+ /// so, return true.
+ ///
+ bool run(Function &F);
+
+ /// doInitialization - Run all of the initializers for the function passes.
+ ///
+ bool doInitialization();
+
+ /// doFinalization - Run all of the finalizers for the function passes.
+ ///
+ bool doFinalization();
+
+private:
+ FunctionPassManagerImpl *FPM;
+ Module *M;
+};
+
+} // End legacy namespace
+
+// Create wrappers for C Binding types (see CBindingWrapping.h).
+DEFINE_STDCXX_CONVERSION_FUNCTIONS(legacy::PassManagerBase, LLVMPassManagerRef)
+
+} // End llvm namespace
+
+#endif
diff --git a/include/llvm/IR/LegacyPassManagers.h b/include/llvm/IR/LegacyPassManagers.h
new file mode 100644
index 0000000..d256a3e
--- /dev/null
+++ b/include/llvm/IR/LegacyPassManagers.h
@@ -0,0 +1,470 @@
+//===- LegacyPassManagers.h - Legacy Pass Infrastructure --------*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file declares the LLVM Pass Manager infrastructure.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_PASSMANAGERS_H
+#define LLVM_PASSMANAGERS_H
+
+#include "llvm/ADT/ArrayRef.h"
+#include "llvm/ADT/DenseMap.h"
+#include "llvm/ADT/SmallPtrSet.h"
+#include "llvm/ADT/SmallVector.h"
+#include "llvm/Pass.h"
+#include <map>
+#include <vector>
+
+//===----------------------------------------------------------------------===//
+// Overview:
+// The Pass Manager Infrastructure manages passes. It's responsibilities are:
+//
+// o Manage optimization pass execution order
+// o Make required Analysis information available before pass P is run
+// o Release memory occupied by dead passes
+// o If Analysis information is dirtied by a pass then regenerate Analysis
+// information before it is consumed by another pass.
+//
+// Pass Manager Infrastructure uses multiple pass managers. They are
+// PassManager, FunctionPassManager, MPPassManager, FPPassManager, BBPassManager.
+// This class hierarchy uses multiple inheritance but pass managers do not
+// derive from another pass manager.
+//
+// PassManager and FunctionPassManager are two top-level pass manager that
+// represents the external interface of this entire pass manager infrastucture.
+//
+// Important classes :
+//
+// [o] class PMTopLevelManager;
+//
+// Two top level managers, PassManager and FunctionPassManager, derive from
+// PMTopLevelManager. PMTopLevelManager manages information used by top level
+// managers such as last user info.
+//
+// [o] class PMDataManager;
+//
+// PMDataManager manages information, e.g. list of available analysis info,
+// used by a pass manager to manage execution order of passes. It also provides
+// a place to implement common pass manager APIs. All pass managers derive from
+// PMDataManager.
+//
+// [o] class BBPassManager : public FunctionPass, public PMDataManager;
+//
+// BBPassManager manages BasicBlockPasses.
+//
+// [o] class FunctionPassManager;
+//
+// This is a external interface used by JIT to manage FunctionPasses. This
+// interface relies on FunctionPassManagerImpl to do all the tasks.
+//
+// [o] class FunctionPassManagerImpl : public ModulePass, PMDataManager,
+// public PMTopLevelManager;
+//
+// FunctionPassManagerImpl is a top level manager. It manages FPPassManagers
+//
+// [o] class FPPassManager : public ModulePass, public PMDataManager;
+//
+// FPPassManager manages FunctionPasses and BBPassManagers
+//
+// [o] class MPPassManager : public Pass, public PMDataManager;
+//
+// MPPassManager manages ModulePasses and FPPassManagers
+//
+// [o] class PassManager;
+//
+// This is a external interface used by various tools to manages passes. It
+// relies on PassManagerImpl to do all the tasks.
+//
+// [o] class PassManagerImpl : public Pass, public PMDataManager,
+// public PMTopLevelManager
+//
+// PassManagerImpl is a top level pass manager responsible for managing
+// MPPassManagers.
+//===----------------------------------------------------------------------===//
+
+#include "llvm/Support/PrettyStackTrace.h"
+
+namespace llvm {
+ class Module;
+ class Pass;
+ class StringRef;
+ class Value;
+ class Timer;
+ class PMDataManager;
+
+// enums for debugging strings
+enum PassDebuggingString {
+ EXECUTION_MSG, // "Executing Pass '"
+ MODIFICATION_MSG, // "' Made Modification '"
+ FREEING_MSG, // " Freeing Pass '"
+ ON_BASICBLOCK_MSG, // "' on BasicBlock '" + PassName + "'...\n"
+ ON_FUNCTION_MSG, // "' on Function '" + FunctionName + "'...\n"
+ ON_MODULE_MSG, // "' on Module '" + ModuleName + "'...\n"
+ ON_REGION_MSG, // " 'on Region ...\n'"
+ ON_LOOP_MSG, // " 'on Loop ...\n'"
+ ON_CG_MSG // "' on Call Graph ...\n'"
+};
+
+/// PassManagerPrettyStackEntry - This is used to print informative information
+/// about what pass is running when/if a stack trace is generated.
+class PassManagerPrettyStackEntry : public PrettyStackTraceEntry {
+ Pass *P;
+ Value *V;
+ Module *M;
+public:
+ explicit PassManagerPrettyStackEntry(Pass *p)
+ : P(p), V(0), M(0) {} // When P is releaseMemory'd.
+ PassManagerPrettyStackEntry(Pass *p, Value &v)
+ : P(p), V(&v), M(0) {} // When P is run on V
+ PassManagerPrettyStackEntry(Pass *p, Module &m)
+ : P(p), V(0), M(&m) {} // When P is run on M
+
+ /// print - Emit information about this stack frame to OS.
+ virtual void print(raw_ostream &OS) const;
+};
+
+
+//===----------------------------------------------------------------------===//
+// PMStack
+//
+/// PMStack - This class implements a stack data structure of PMDataManager
+/// pointers.
+///
+/// Top level pass managers (see PassManager.cpp) maintain active Pass Managers
+/// using PMStack. Each Pass implements assignPassManager() to connect itself
+/// with appropriate manager. assignPassManager() walks PMStack to find
+/// suitable manager.
+class PMStack {
+public:
+ typedef std::vector<PMDataManager *>::const_reverse_iterator iterator;
+ iterator begin() const { return S.rbegin(); }
+ iterator end() const { return S.rend(); }
+
+ void pop();
+ PMDataManager *top() const { return S.back(); }
+ void push(PMDataManager *PM);
+ bool empty() const { return S.empty(); }
+
+ void dump() const;
+
+private:
+ std::vector<PMDataManager *> S;
+};
+
+
+//===----------------------------------------------------------------------===//
+// PMTopLevelManager
+//
+/// PMTopLevelManager manages LastUser info and collects common APIs used by
+/// top level pass managers.
+class PMTopLevelManager {
+protected:
+ explicit PMTopLevelManager(PMDataManager *PMDM);
+
+ unsigned getNumContainedManagers() const {
+ return (unsigned)PassManagers.size();
+ }
+
+ void initializeAllAnalysisInfo();
+
+private:
+ virtual PMDataManager *getAsPMDataManager() = 0;
+ virtual PassManagerType getTopLevelPassManagerType() = 0;
+
+public:
+ /// Schedule pass P for execution. Make sure that passes required by
+ /// P are run before P is run. Update analysis info maintained by
+ /// the manager. Remove dead passes. This is a recursive function.
+ void schedulePass(Pass *P);
+
+ /// Set pass P as the last user of the given analysis passes.
+ void setLastUser(ArrayRef<Pass*> AnalysisPasses, Pass *P);
+
+ /// Collect passes whose last user is P
+ void collectLastUses(SmallVectorImpl<Pass *> &LastUses, Pass *P);
+
+ /// Find the pass that implements Analysis AID. Search immutable
+ /// passes and all pass managers. If desired pass is not found
+ /// then return NULL.
+ Pass *findAnalysisPass(AnalysisID AID);
+
+ /// Find analysis usage information for the pass P.
+ AnalysisUsage *findAnalysisUsage(Pass *P);
+
+ virtual ~PMTopLevelManager();
+
+ /// Add immutable pass and initialize it.
+ inline void addImmutablePass(ImmutablePass *P) {
+ P->initializePass();
+ ImmutablePasses.push_back(P);
+ }
+
+ inline SmallVectorImpl<ImmutablePass *>& getImmutablePasses() {
+ return ImmutablePasses;
+ }
+
+ void addPassManager(PMDataManager *Manager) {
+ PassManagers.push_back(Manager);
+ }
+
+ // Add Manager into the list of managers that are not directly
+ // maintained by this top level pass manager
+ inline void addIndirectPassManager(PMDataManager *Manager) {
+ IndirectPassManagers.push_back(Manager);
+ }
+
+ // Print passes managed by this top level manager.
+ void dumpPasses() const;
+ void dumpArguments() const;
+
+ // Active Pass Managers
+ PMStack activeStack;
+
+protected:
+
+ /// Collection of pass managers
+ SmallVector<PMDataManager *, 8> PassManagers;
+
+private:
+
+ /// Collection of pass managers that are not directly maintained
+ /// by this pass manager
+ SmallVector<PMDataManager *, 8> IndirectPassManagers;
+
+ // Map to keep track of last user of the analysis pass.
+ // LastUser->second is the last user of Lastuser->first.
+ DenseMap<Pass *, Pass *> LastUser;
+
+ // Map to keep track of passes that are last used by a pass.
+ // This inverse map is initialized at PM->run() based on
+ // LastUser map.
+ DenseMap<Pass *, SmallPtrSet<Pass *, 8> > InversedLastUser;
+
+ /// Immutable passes are managed by top level manager.
+ SmallVector<ImmutablePass *, 8> ImmutablePasses;
+
+ DenseMap<Pass *, AnalysisUsage *> AnUsageMap;
+};
+
+
+
+//===----------------------------------------------------------------------===//
+// PMDataManager
+
+/// PMDataManager provides the common place to manage the analysis data
+/// used by pass managers.
+class PMDataManager {
+public:
+
+ explicit PMDataManager() : TPM(NULL), Depth(0) {
+ initializeAnalysisInfo();
+ }
+
+ virtual ~PMDataManager();
+
+ virtual Pass *getAsPass() = 0;
+
+ /// Augment AvailableAnalysis by adding analysis made available by pass P.
+ void recordAvailableAnalysis(Pass *P);
+
+ /// verifyPreservedAnalysis -- Verify analysis presreved by pass P.
+ void verifyPreservedAnalysis(Pass *P);
+
+ /// Remove Analysis that is not preserved by the pass
+ void removeNotPreservedAnalysis(Pass *P);
+
+ /// Remove dead passes used by P.
+ void removeDeadPasses(Pass *P, StringRef Msg,
+ enum PassDebuggingString);
+
+ /// Remove P.
+ void freePass(Pass *P, StringRef Msg,
+ enum PassDebuggingString);
+
+ /// Add pass P into the PassVector. Update
+ /// AvailableAnalysis appropriately if ProcessAnalysis is true.
+ void add(Pass *P, bool ProcessAnalysis = true);
+
+ /// Add RequiredPass into list of lower level passes required by pass P.
+ /// RequiredPass is run on the fly by Pass Manager when P requests it
+ /// through getAnalysis interface.
+ virtual void addLowerLevelRequiredPass(Pass *P, Pass *RequiredPass);
+
+ virtual Pass *getOnTheFlyPass(Pass *P, AnalysisID PI, Function &F);
+
+ /// Initialize available analysis information.
+ void initializeAnalysisInfo() {
+ AvailableAnalysis.clear();
+ for (unsigned i = 0; i < PMT_Last; ++i)
+ InheritedAnalysis[i] = NULL;
+ }
+
+ // Return true if P preserves high level analysis used by other
+ // passes that are managed by this manager.
+ bool preserveHigherLevelAnalysis(Pass *P);
+
+
+ /// Populate RequiredPasses with analysis pass that are required by
+ /// pass P and are available. Populate ReqPassNotAvailable with analysis
+ /// pass that are required by pass P but are not available.
+ void collectRequiredAnalysis(SmallVectorImpl<Pass *> &RequiredPasses,
+ SmallVectorImpl<AnalysisID> &ReqPassNotAvailable,
+ Pass *P);
+
+ /// All Required analyses should be available to the pass as it runs! Here
+ /// we fill in the AnalysisImpls member of the pass so that it can
+ /// successfully use the getAnalysis() method to retrieve the
+ /// implementations it needs.
+ void initializeAnalysisImpl(Pass *P);
+
+ /// Find the pass that implements Analysis AID. If desired pass is not found
+ /// then return NULL.
+ Pass *findAnalysisPass(AnalysisID AID, bool Direction);
+
+ // Access toplevel manager
+ PMTopLevelManager *getTopLevelManager() { return TPM; }
+ void setTopLevelManager(PMTopLevelManager *T) { TPM = T; }
+
+ unsigned getDepth() const { return Depth; }
+ void setDepth(unsigned newDepth) { Depth = newDepth; }
+
+ // Print routines used by debug-pass
+ void dumpLastUses(Pass *P, unsigned Offset) const;
+ void dumpPassArguments() const;
+ void dumpPassInfo(Pass *P, enum PassDebuggingString S1,
+ enum PassDebuggingString S2, StringRef Msg);
+ void dumpRequiredSet(const Pass *P) const;
+ void dumpPreservedSet(const Pass *P) const;
+
+ unsigned getNumContainedPasses() const {
+ return (unsigned)PassVector.size();
+ }
+
+ virtual PassManagerType getPassManagerType() const {
+ assert ( 0 && "Invalid use of getPassManagerType");
+ return PMT_Unknown;
+ }
+
+ DenseMap<AnalysisID, Pass*> *getAvailableAnalysis() {
+ return &AvailableAnalysis;
+ }
+
+ // Collect AvailableAnalysis from all the active Pass Managers.
+ void populateInheritedAnalysis(PMStack &PMS) {
+ unsigned Index = 0;
+ for (PMStack::iterator I = PMS.begin(), E = PMS.end();
+ I != E; ++I)
+ InheritedAnalysis[Index++] = (*I)->getAvailableAnalysis();
+ }
+
+protected:
+
+ // Top level manager.
+ PMTopLevelManager *TPM;
+
+ // Collection of pass that are managed by this manager
+ SmallVector<Pass *, 16> PassVector;
+
+ // Collection of Analysis provided by Parent pass manager and
+ // used by current pass manager. At at time there can not be more
+ // then PMT_Last active pass mangers.
+ DenseMap<AnalysisID, Pass *> *InheritedAnalysis[PMT_Last];
+
+ /// isPassDebuggingExecutionsOrMore - Return true if -debug-pass=Executions
+ /// or higher is specified.
+ bool isPassDebuggingExecutionsOrMore() const;
+
+private:
+ void dumpAnalysisUsage(StringRef Msg, const Pass *P,
+ const AnalysisUsage::VectorType &Set) const;
+
+ // Set of available Analysis. This information is used while scheduling
+ // pass. If a pass requires an analysis which is not available then
+ // the required analysis pass is scheduled to run before the pass itself is
+ // scheduled to run.
+ DenseMap<AnalysisID, Pass*> AvailableAnalysis;
+
+ // Collection of higher level analysis used by the pass managed by
+ // this manager.
+ SmallVector<Pass *, 8> HigherLevelAnalysis;
+
+ unsigned Depth;
+};
+
+//===----------------------------------------------------------------------===//
+// FPPassManager
+//
+/// FPPassManager manages BBPassManagers and FunctionPasses.
+/// It batches all function passes and basic block pass managers together and
+/// sequence them to process one function at a time before processing next
+/// function.
+class FPPassManager : public ModulePass, public PMDataManager {
+public:
+ static char ID;
+ explicit FPPassManager()
+ : ModulePass(ID), PMDataManager() { }
+
+ /// run - Execute all of the passes scheduled for execution. Keep track of
+ /// whether any of the passes modifies the module, and if so, return true.
+ bool runOnFunction(Function &F);
+ bool runOnModule(Module &M);
+
+ /// cleanup - After running all passes, clean up pass manager cache.
+ void cleanup();
+
+ /// doInitialization - Overrides ModulePass doInitialization for global
+ /// initialization tasks
+ ///
+ using ModulePass::doInitialization;
+
+ /// doInitialization - Run all of the initializers for the function passes.
+ ///
+ bool doInitialization(Module &M);
+
+ /// doFinalization - Overrides ModulePass doFinalization for global
+ /// finalization tasks
+ ///
+ using ModulePass::doFinalization;
+
+ /// doFinalization - Run all of the finalizers for the function passes.
+ ///
+ bool doFinalization(Module &M);
+
+ virtual PMDataManager *getAsPMDataManager() { return this; }
+ virtual Pass *getAsPass() { return this; }
+
+ /// Pass Manager itself does not invalidate any analysis info.
+ void getAnalysisUsage(AnalysisUsage &Info) const {
+ Info.setPreservesAll();
+ }
+
+ // Print passes managed by this manager
+ void dumpPassStructure(unsigned Offset);
+
+ virtual const char *getPassName() const {
+ return "Function Pass Manager";
+ }
+
+ FunctionPass *getContainedPass(unsigned N) {
+ assert ( N < PassVector.size() && "Pass number out of range!");
+ FunctionPass *FP = static_cast<FunctionPass *>(PassVector[N]);
+ return FP;
+ }
+
+ virtual PassManagerType getPassManagerType() const {
+ return PMT_FunctionPassManager;
+ }
+};
+
+Timer *getPassTimer(Pass *);
+
+}
+
+#endif
diff --git a/include/llvm/IR/Metadata.h b/include/llvm/IR/Metadata.h
index acd84d7..9659c2e 100644
--- a/include/llvm/IR/Metadata.h
+++ b/include/llvm/IR/Metadata.h
@@ -28,6 +28,10 @@ template<typename ValueSubClass, typename ItemParentClass>
class SymbolTableListTraits;
+enum LLVMConstants LLVM_ENUM_INT_TYPE(uint32_t) {
+ DEBUG_METADATA_VERSION = 1 // Current debug info version number.
+};
+
//===----------------------------------------------------------------------===//
/// MDString - a single uniqued string.
/// These are used to efficiently contain a byte sequence for metadata.
@@ -161,6 +165,9 @@ public:
return V->getValueID() == MDNodeVal;
}
+ /// Check whether MDNode is a vtable access.
+ bool isTBAAVtableAccess() const;
+
/// Methods for metadata merging.
static MDNode *getMostGenericTBAA(MDNode *A, MDNode *B);
static MDNode *getMostGenericFPMath(MDNode *A, MDNode *B);
diff --git a/include/llvm/IR/Module.h b/include/llvm/IR/Module.h
index 3dbc5ff..b30a9a3 100644
--- a/include/llvm/IR/Module.h
+++ b/include/llvm/IR/Module.h
@@ -391,7 +391,7 @@ public:
/// @name Named Metadata Accessors
/// @{
- /// getNamedMetadata - Return the NamedMDNode in the module with the
+ /// getNamedMetadata - Return the first NamedMDNode in the module with the
/// specified name. This method returns null if a NamedMDNode with the
/// specified name is not found.
NamedMDNode *getNamedMetadata(const Twine &Name) const;
diff --git a/include/llvm/IR/Operator.h b/include/llvm/IR/Operator.h
index 13ab72c..5b9bee7 100644
--- a/include/llvm/IR/Operator.h
+++ b/include/llvm/IR/Operator.h
@@ -439,8 +439,8 @@ public:
/// offset of this GEP if the GEP is in fact constant. If the GEP is not
/// all-constant, it returns false and the value of the offset APInt is
/// undefined (it is *not* preserved!). The APInt passed into this routine
- /// must be at least as wide as the IntPtr type for the address space of
- /// the base GEP pointer.
+ /// must be at exactly as wide as the IntPtr type for the address space of the
+ /// base GEP pointer.
bool accumulateConstantOffset(const DataLayout &DL, APInt &Offset) const {
assert(Offset.getBitWidth() ==
DL.getPointerSizeInBits(getPointerAddressSpace()) &&
diff --git a/include/llvm/IR/PassManager.h b/include/llvm/IR/PassManager.h
new file mode 100644
index 0000000..833547a
--- /dev/null
+++ b/include/llvm/IR/PassManager.h
@@ -0,0 +1,383 @@
+//===- PassManager.h - Pass management infrastructure -----------*- C++ -*-===//
+//
+// The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+/// \file
+///
+/// This header defines various interfaces for pass management in LLVM. There
+/// is no "pass" interface in LLVM per se. Instead, an instance of any class
+/// which supports a method to 'run' it over a unit of IR can be used as
+/// a pass. A pass manager is generally a tool to collect a sequence of passes
+/// which run over a particular IR construct, and run each of them in sequence
+/// over each such construct in the containing IR construct. As there is no
+/// containing IR construct for a Module, a manager for passes over modules
+/// forms the base case which runs its managed passes in sequence over the
+/// single module provided.
+///
+/// The core IR library provides managers for running passes over
+/// modules and functions.
+///
+/// * FunctionPassManager can run over a Module, runs each pass over
+/// a Function.
+/// * ModulePassManager must be directly run, runs each pass over the Module.
+///
+/// Note that the implementations of the pass managers use concept-based
+/// polymorphism as outlined in the "Value Semantics and Concept-based
+/// Polymorphism" talk (or its abbreviated sibling "Inheritance Is The Base
+/// Class of Evil") by Sean Parent:
+/// * http://github.com/sean-parent/sean-parent.github.com/wiki/Papers-and-Presentations
+/// * http://www.youtube.com/watch?v=_BpMYeUFXv8
+/// * http://channel9.msdn.com/Events/GoingNative/2013/Inheritance-Is-The-Base-Class-of-Evil
+///
+//===----------------------------------------------------------------------===//
+
+#include "llvm/ADT/DenseMap.h"
+#include "llvm/ADT/polymorphic_ptr.h"
+#include "llvm/Support/type_traits.h"
+#include "llvm/IR/Function.h"
+#include "llvm/IR/Module.h"
+#include <list>
+#include <vector>
+
+namespace llvm {
+
+class Module;
+class Function;
+
+/// \brief Implementation details of the pass manager interfaces.
+namespace detail {
+
+/// \brief Template for the abstract base class used to dispatch
+/// polymorphically over pass objects.
+template <typename T> struct PassConcept {
+ // Boiler plate necessary for the container of derived classes.
+ virtual ~PassConcept() {}
+ virtual PassConcept *clone() = 0;
+
+ /// \brief The polymorphic API which runs the pass over a given IR entity.
+ virtual bool run(T Arg) = 0;
+};
+
+/// \brief A template wrapper used to implement the polymorphic API.
+///
+/// Can be instantiated for any object which provides a \c run method
+/// accepting a \c T. It requires the pass to be a copyable
+/// object.
+template <typename T, typename PassT> struct PassModel : PassConcept<T> {
+ PassModel(PassT Pass) : Pass(llvm_move(Pass)) {}
+ virtual PassModel *clone() { return new PassModel(Pass); }
+ virtual bool run(T Arg) { return Pass.run(Arg); }
+ PassT Pass;
+};
+
+}
+
+class AnalysisManager;
+
+class ModulePassManager {
+public:
+ ModulePassManager(Module *M, AnalysisManager *AM = 0) : M(M), AM(AM) {}
+
+ template <typename ModulePassT> void addPass(ModulePassT Pass) {
+ Passes.push_back(new ModulePassModel<ModulePassT>(llvm_move(Pass)));
+ }
+
+ void run();
+
+private:
+ // Pull in the concept type and model template specialized for modules.
+ typedef detail::PassConcept<Module *> ModulePassConcept;
+ template <typename PassT>
+ struct ModulePassModel : detail::PassModel<Module *, PassT> {
+ ModulePassModel(PassT Pass) : detail::PassModel<Module *, PassT>(Pass) {}
+ };
+
+ Module *M;
+ AnalysisManager *AM;
+ std::vector<polymorphic_ptr<ModulePassConcept> > Passes;
+};
+
+class FunctionPassManager {
+public:
+ FunctionPassManager(AnalysisManager *AM = 0) : AM(AM) {}
+
+ template <typename FunctionPassT> void addPass(FunctionPassT Pass) {
+ Passes.push_back(new FunctionPassModel<FunctionPassT>(llvm_move(Pass)));
+ }
+
+ bool run(Module *M);
+
+private:
+ // Pull in the concept type and model template specialized for functions.
+ typedef detail::PassConcept<Function *> FunctionPassConcept;
+ template <typename PassT>
+ struct FunctionPassModel : detail::PassModel<Function *, PassT> {
+ FunctionPassModel(PassT Pass)
+ : detail::PassModel<Function *, PassT>(Pass) {}
+ };
+
+ AnalysisManager *AM;
+ std::vector<polymorphic_ptr<FunctionPassConcept> > Passes;
+};
+
+
+/// \brief An analysis manager to coordinate and cache analyses run over
+/// a module.
+///
+/// The analysis manager is typically used by passes in a pass pipeline
+/// (consisting potentially of several individual pass managers) over a module
+/// of IR. It provides registration of available analyses, declaring
+/// requirements on support for specific analyses, running of an specific
+/// analysis over a specific unit of IR to compute an analysis result, and
+/// caching of the analysis results to reuse them across multiple passes.
+///
+/// It is the responsibility of callers to use the invalidation API to
+/// invalidate analysis results when the IR they correspond to changes. The
+/// \c ModulePassManager and \c FunctionPassManager do this automatically.
+class AnalysisManager {
+public:
+ AnalysisManager(Module *M) : M(M) {}
+
+ /// \brief Get the result of an analysis pass for this module.
+ ///
+ /// If there is not a valid cached result in the manager already, this will
+ /// re-run the analysis to produce a valid result.
+ ///
+ /// The module passed in must be the same module as the analysis manager was
+ /// constructed around.
+ template <typename PassT>
+ const typename PassT::Result &getResult(Module *M) {
+ assert(ModuleAnalysisPasses.count(PassT::ID()) &&
+ "This analysis pass was not registered prior to being queried");
+
+ const AnalysisResultConcept<Module> &ResultConcept =
+ getResultImpl(PassT::ID(), M);
+ typedef AnalysisResultModel<Module, typename PassT::Result> ResultModelT;
+ return static_cast<const ResultModelT &>(ResultConcept).Result;
+ }
+
+ /// \brief Get the result of an analysis pass for a function.
+ ///
+ /// If there is not a valid cached result in the manager already, this will
+ /// re-run the analysis to produce a valid result.
+ template <typename PassT>
+ const typename PassT::Result &getResult(Function *F) {
+ assert(FunctionAnalysisPasses.count(PassT::ID()) &&
+ "This analysis pass was not registered prior to being queried");
+
+ const AnalysisResultConcept<Function> &ResultConcept =
+ getResultImpl(PassT::ID(), F);
+ typedef AnalysisResultModel<Function, typename PassT::Result> ResultModelT;
+ return static_cast<const ResultModelT &>(ResultConcept).Result;
+ }
+
+ /// \brief Register an analysis pass with the manager.
+ ///
+ /// This provides an initialized and set-up analysis pass to the
+ /// analysis
+ /// manager. Whomever is setting up analysis passes must use this to
+ /// populate
+ /// the manager with all of the analysis passes available.
+ template <typename PassT> void registerAnalysisPass(PassT Pass) {
+ registerAnalysisPassImpl<PassT>(llvm_move(Pass));
+ }
+
+ /// \brief Invalidate a specific analysis pass for an IR module.
+ ///
+ /// Note that the analysis result can disregard invalidation.
+ template <typename PassT> void invalidate(Module *M) {
+ invalidateImpl(PassT::ID(), M);
+ }
+
+ /// \brief Invalidate a specific analysis pass for an IR function.
+ ///
+ /// Note that the analysis result can disregard invalidation.
+ template <typename PassT> void invalidate(Function *F) {
+ invalidateImpl(PassT::ID(), F);
+ }
+
+ /// \brief Invalidate analyses cached for an IR Module.
+ ///
+ /// Note that specific analysis results can disregard invalidation by
+ /// overriding their invalidate method.
+ ///
+ /// The module must be the module this analysis manager was constructed
+ /// around.
+ void invalidateAll(Module *M);
+
+ /// \brief Invalidate analyses cached for an IR Function.
+ ///
+ /// Note that specific analysis results can disregard invalidation by
+ /// overriding the invalidate method.
+ void invalidateAll(Function *F);
+
+private:
+ /// \brief Abstract concept of an analysis result.
+ ///
+ /// This concept is parameterized over the IR unit that this result pertains
+ /// to.
+ template <typename IRUnitT> struct AnalysisResultConcept {
+ virtual ~AnalysisResultConcept() {}
+ virtual AnalysisResultConcept *clone() = 0;
+
+ /// \brief Method to try and mark a result as invalid.
+ ///
+ /// When the outer \c AnalysisManager detects a change in some underlying
+ /// unit of the IR, it will call this method on all of the results cached.
+ ///
+ /// \returns true if the result should indeed be invalidated (the default).
+ virtual bool invalidate(IRUnitT *IR) = 0;
+ };
+
+ /// \brief Wrapper to model the analysis result concept.
+ ///
+ /// Can wrap any type which implements a suitable invalidate member and model
+ /// the AnalysisResultConcept for the AnalysisManager.
+ template <typename IRUnitT, typename ResultT>
+ struct AnalysisResultModel : AnalysisResultConcept<IRUnitT> {
+ AnalysisResultModel(ResultT Result) : Result(llvm_move(Result)) {}
+ virtual AnalysisResultModel *clone() {
+ return new AnalysisResultModel(Result);
+ }
+
+ /// \brief The model delegates to the \c ResultT method.
+ virtual bool invalidate(IRUnitT *IR) { return Result.invalidate(IR); }
+
+ ResultT Result;
+ };
+
+ /// \brief Abstract concept of an analysis pass.
+ ///
+ /// This concept is parameterized over the IR unit that it can run over and
+ /// produce an analysis result.
+ template <typename IRUnitT> struct AnalysisPassConcept {
+ virtual ~AnalysisPassConcept() {}
+ virtual AnalysisPassConcept *clone() = 0;
+
+ /// \brief Method to run this analysis over a unit of IR.
+ /// \returns The analysis result object to be queried by users, the caller
+ /// takes ownership.
+ virtual AnalysisResultConcept<IRUnitT> *run(IRUnitT *IR) = 0;
+ };
+
+ /// \brief Wrapper to model the analysis pass concept.
+ ///
+ /// Can wrap any type which implements a suitable \c run method. The method
+ /// must accept the IRUnitT as an argument and produce an object which can be
+ /// wrapped in a \c AnalysisResultModel.
+ template <typename PassT>
+ struct AnalysisPassModel : AnalysisPassConcept<typename PassT::IRUnitT> {
+ AnalysisPassModel(PassT Pass) : Pass(llvm_move(Pass)) {}
+ virtual AnalysisPassModel *clone() { return new AnalysisPassModel(Pass); }
+
+ // FIXME: Replace PassT::IRUnitT with type traits when we use C++11.
+ typedef typename PassT::IRUnitT IRUnitT;
+
+ // FIXME: Replace PassT::Result with type traits when we use C++11.
+ typedef AnalysisResultModel<IRUnitT, typename PassT::Result> ResultModelT;
+
+ /// \brief The model delegates to the \c PassT::run method.
+ ///
+ /// The return is wrapped in an \c AnalysisResultModel.
+ virtual ResultModelT *run(IRUnitT *IR) {
+ return new ResultModelT(Pass.run(IR));
+ }
+
+ PassT Pass;
+ };
+
+
+ /// \brief Get a module pass result, running the pass if necessary.
+ const AnalysisResultConcept<Module> &getResultImpl(void *PassID, Module *M);
+
+ /// \brief Get a function pass result, running the pass if necessary.
+ const AnalysisResultConcept<Function> &getResultImpl(void *PassID,
+ Function *F);
+
+ /// \brief Invalidate a module pass result.
+ void invalidateImpl(void *PassID, Module *M);
+
+ /// \brief Invalidate a function pass result.
+ void invalidateImpl(void *PassID, Function *F);
+
+
+ /// \brief Module pass specific implementation of registration.
+ template <typename PassT>
+ typename enable_if<is_same<typename PassT::IRUnitT, Module> >::type
+ registerAnalysisPassImpl(PassT Pass) {
+ assert(!ModuleAnalysisPasses.count(PassT::ID()) &&
+ "Registered the same analysis pass twice!");
+ ModuleAnalysisPasses[PassT::ID()] =
+ new AnalysisPassModel<PassT>(llvm_move(Pass));
+ }
+
+ /// \brief Function pass specific implementation of registration.
+ template <typename PassT>
+ typename enable_if<is_same<typename PassT::IRUnitT, Function> >::type
+ registerAnalysisPassImpl(PassT Pass) {
+ assert(!FunctionAnalysisPasses.count(PassT::ID()) &&
+ "Registered the same analysis pass twice!");
+ FunctionAnalysisPasses[PassT::ID()] =
+ new AnalysisPassModel<PassT>(llvm_move(Pass));
+ }
+
+
+ /// \brief Map type from module analysis pass ID to pass concept pointer.
+ typedef DenseMap<void *, polymorphic_ptr<AnalysisPassConcept<Module> > >
+ ModuleAnalysisPassMapT;
+
+ /// \brief Collection of module analysis passes, indexed by ID.
+ ModuleAnalysisPassMapT ModuleAnalysisPasses;
+
+ /// \brief Map type from module analysis pass ID to pass result concept pointer.
+ typedef DenseMap<void *, polymorphic_ptr<AnalysisResultConcept<Module> > >
+ ModuleAnalysisResultMapT;
+
+ /// \brief Cache of computed module analysis results for this module.
+ ModuleAnalysisResultMapT ModuleAnalysisResults;
+
+
+ /// \brief Map type from function analysis pass ID to pass concept pointer.
+ typedef DenseMap<void *, polymorphic_ptr<AnalysisPassConcept<Function> > >
+ FunctionAnalysisPassMapT;
+
+ /// \brief Collection of function analysis passes, indexed by ID.
+ FunctionAnalysisPassMapT FunctionAnalysisPasses;
+
+ /// \brief List of function analysis pass IDs and associated concept pointers.
+ ///
+ /// Requires iterators to be valid across appending new entries and arbitrary
+ /// erases. Provides both the pass ID and concept pointer such that it is
+ /// half of a bijection and provides storage for the actual result concept.
+ typedef std::list<
+ std::pair<void *, polymorphic_ptr<AnalysisResultConcept<Function> > > >
+ FunctionAnalysisResultListT;
+
+ /// \brief Map type from function pointer to our custom list type.
+ typedef DenseMap<Function *, FunctionAnalysisResultListT> FunctionAnalysisResultListMapT;
+
+ /// \brief Map from function to a list of function analysis results.
+ ///
+ /// Provides linear time removal of all analysis results for a function and
+ /// the ultimate storage for a particular cached analysis result.
+ FunctionAnalysisResultListMapT FunctionAnalysisResultLists;
+
+ /// \brief Map type from a pair of analysis ID and function pointer to an
+ /// iterator into a particular result list.
+ typedef DenseMap<std::pair<void *, Function *>,
+ FunctionAnalysisResultListT::iterator>
+ FunctionAnalysisResultMapT;
+
+ /// \brief Map from an analysis ID and function to a particular cached
+ /// analysis result.
+ FunctionAnalysisResultMapT FunctionAnalysisResults;
+
+ /// \brief Module handle for the \c AnalysisManager.
+ Module *M;
+};
+
+}
diff --git a/include/llvm/IR/Type.h b/include/llvm/IR/Type.h
index 1bf8789..3cfb84e 100644
--- a/include/llvm/IR/Type.h
+++ b/include/llvm/IR/Type.h
@@ -324,6 +324,14 @@ public:
subtype_iterator subtype_begin() const { return ContainedTys; }
subtype_iterator subtype_end() const { return &ContainedTys[NumContainedTys];}
+ typedef std::reverse_iterator<subtype_iterator> subtype_reverse_iterator;
+ subtype_reverse_iterator subtype_rbegin() const {
+ return subtype_reverse_iterator(subtype_end());
+ }
+ subtype_reverse_iterator subtype_rend() const {
+ return subtype_reverse_iterator(subtype_begin());
+ }
+
/// getContainedType - This method is used to implement the type iterator
/// (defined a the end of the file). For derived types, this returns the
/// types 'contained' in the derived type.
diff --git a/include/llvm/IR/Value.h b/include/llvm/IR/Value.h
index 5fba3d5..e1361fe 100644
--- a/include/llvm/IR/Value.h
+++ b/include/llvm/IR/Value.h
@@ -22,26 +22,29 @@
namespace llvm {
-class Constant;
+class APInt;
class Argument;
-class Instruction;
+class AssemblyAnnotationWriter;
class BasicBlock;
-class GlobalValue;
+class Constant;
+class DataLayout;
class Function;
-class GlobalVariable;
class GlobalAlias;
+class GlobalValue;
+class GlobalVariable;
class InlineAsm;
-class ValueSymbolTable;
-template<typename ValueTy> class StringMapEntry;
-typedef StringMapEntry<Value*> ValueName;
-class raw_ostream;
-class AssemblyAnnotationWriter;
-class ValueHandleBase;
+class Instruction;
class LLVMContext;
-class Twine;
class MDNode;
-class Type;
class StringRef;
+class Twine;
+class Type;
+class ValueHandleBase;
+class ValueSymbolTable;
+class raw_ostream;
+
+template<typename ValueTy> class StringMapEntry;
+typedef StringMapEntry<Value*> ValueName;
//===----------------------------------------------------------------------===//
// Value Class
@@ -260,37 +263,53 @@ public:
/// this value.
bool hasValueHandle() const { return HasValueHandle; }
- /// \brief This method strips off any unneeded pointer casts,
- /// all-zero GEPs and aliases from the specified value, returning the original
- /// uncasted value. If this is called on a non-pointer value, it returns
- /// 'this'.
+ /// \brief Strips off any unneeded pointer casts, all-zero GEPs and aliases
+ /// from the specified value, returning the original uncasted value.
+ ///
+ /// If this is called on a non-pointer value, it returns 'this'.
Value *stripPointerCasts();
const Value *stripPointerCasts() const {
return const_cast<Value*>(this)->stripPointerCasts();
}
- /// \brief This method strips off any unneeded pointer casts and
- /// all-zero GEPs from the specified value, returning the original
- /// uncasted value. If this is called on a non-pointer value, it returns
- /// 'this'.
+ /// \brief Strips off any unneeded pointer casts and all-zero GEPs from the
+ /// specified value, returning the original uncasted value.
+ ///
+ /// If this is called on a non-pointer value, it returns 'this'.
Value *stripPointerCastsNoFollowAliases();
const Value *stripPointerCastsNoFollowAliases() const {
return const_cast<Value*>(this)->stripPointerCastsNoFollowAliases();
}
- /// stripInBoundsConstantOffsets - This method strips off unneeded pointer casts and
- /// all-constant GEPs from the specified value, returning the original
- /// pointer value. If this is called on a non-pointer value, it returns
- /// 'this'.
+ /// \brief Strips off unneeded pointer casts and all-constant GEPs from the
+ /// specified value, returning the original pointer value.
+ ///
+ /// If this is called on a non-pointer value, it returns 'this'.
Value *stripInBoundsConstantOffsets();
const Value *stripInBoundsConstantOffsets() const {
return const_cast<Value*>(this)->stripInBoundsConstantOffsets();
}
- /// stripInBoundsOffsets - This method strips off unneeded pointer casts and
- /// any in-bounds Offsets from the specified value, returning the original
- /// pointer value. If this is called on a non-pointer value, it returns
- /// 'this'.
+ /// \brief Strips like \c stripInBoundsConstantOffsets but also accumulates
+ /// the constant offset stripped.
+ ///
+ /// Stores the resulting constant offset stripped into the APInt provided.
+ /// The provided APInt will be extended or truncated as needed to be the
+ /// correct bitwidth for an offset of this pointer type.
+ ///
+ /// If this is called on a non-pointer value, it returns 'this'.
+ Value *stripAndAccumulateInBoundsConstantOffsets(const DataLayout &DL,
+ APInt &Offset);
+ const Value *stripAndAccumulateInBoundsConstantOffsets(const DataLayout &DL,
+ APInt &Offset) const {
+ return const_cast<Value *>(this)
+ ->stripAndAccumulateInBoundsConstantOffsets(DL, Offset);
+ }
+
+ /// \brief Strips off unneeded pointer casts and any in-bounds offsets from
+ /// the specified value, returning the original pointer value.
+ ///
+ /// If this is called on a non-pointer value, it returns 'this'.
Value *stripInBoundsOffsets();
const Value *stripInBoundsOffsets() const {
return const_cast<Value*>(this)->stripInBoundsOffsets();