aboutsummaryrefslogtreecommitdiff
path: root/llvm/include/llvm/IR
diff options
context:
space:
mode:
authorDimitry Andric <dim@FreeBSD.org>2021-07-29 20:15:26 +0000
committerDimitry Andric <dim@FreeBSD.org>2021-07-29 20:15:26 +0000
commit344a3780b2e33f6ca763666c380202b18aab72a3 (patch)
treef0b203ee6eb71d7fdd792373e3c81eb18d6934dd /llvm/include/llvm/IR
parentb60736ec1405bb0a8dd40989f67ef4c93da068ab (diff)
downloadsrc-344a3780b2e33f6ca763666c380202b18aab72a3.tar.gz
src-344a3780b2e33f6ca763666c380202b18aab72a3.zip
the upstream release/13.x branch was created.
Diffstat (limited to 'llvm/include/llvm/IR')
-rw-r--r--llvm/include/llvm/IR/Argument.h10
-rw-r--r--llvm/include/llvm/IR/Attributes.h125
-rw-r--r--llvm/include/llvm/IR/Attributes.td191
-rw-r--r--llvm/include/llvm/IR/BasicBlock.h6
-rw-r--r--llvm/include/llvm/IR/BuiltinGCs.h33
-rw-r--r--llvm/include/llvm/IR/CFG.h16
-rw-r--r--llvm/include/llvm/IR/CallingConv.h8
-rw-r--r--llvm/include/llvm/IR/Comdat.h10
-rw-r--r--llvm/include/llvm/IR/Constant.h28
-rw-r--r--llvm/include/llvm/IR/ConstantRange.h4
-rw-r--r--llvm/include/llvm/IR/Constants.h211
-rw-r--r--llvm/include/llvm/IR/DIBuilder.h16
-rw-r--r--llvm/include/llvm/IR/DataLayout.h43
-rw-r--r--llvm/include/llvm/IR/DebugInfo.h28
-rw-r--r--llvm/include/llvm/IR/DebugInfoMetadata.h199
-rw-r--r--llvm/include/llvm/IR/DebugLoc.h2
-rw-r--r--llvm/include/llvm/IR/DerivedTypes.h93
-rw-r--r--llvm/include/llvm/IR/DiagnosticInfo.h65
-rw-r--r--llvm/include/llvm/IR/Dominators.h3
-rw-r--r--llvm/include/llvm/IR/FPEnv.h10
-rw-r--r--llvm/include/llvm/IR/Function.h40
-rw-r--r--llvm/include/llvm/IR/GCStrategy.h136
-rw-r--r--llvm/include/llvm/IR/GetElementPtrTypeIterator.h206
-rw-r--r--llvm/include/llvm/IR/GlobalIFunc.h2
-rw-r--r--llvm/include/llvm/IR/GlobalIndirectSymbol.h5
-rw-r--r--llvm/include/llvm/IR/GlobalObject.h2
-rw-r--r--llvm/include/llvm/IR/IRBuilder.h326
-rw-r--r--llvm/include/llvm/IR/InlineAsm.h6
-rw-r--r--llvm/include/llvm/IR/InstVisitor.h3
-rw-r--r--llvm/include/llvm/IR/InstrTypes.h116
-rw-r--r--llvm/include/llvm/IR/Instruction.h35
-rw-r--r--llvm/include/llvm/IR/Instructions.h193
-rw-r--r--llvm/include/llvm/IR/IntrinsicInst.h338
-rw-r--r--llvm/include/llvm/IR/Intrinsics.h25
-rw-r--r--llvm/include/llvm/IR/Intrinsics.td199
-rw-r--r--llvm/include/llvm/IR/IntrinsicsAArch64.td156
-rw-r--r--llvm/include/llvm/IR/IntrinsicsAMDGPU.td238
-rw-r--r--llvm/include/llvm/IR/IntrinsicsARM.td34
-rw-r--r--llvm/include/llvm/IR/IntrinsicsHexagon.td120
-rw-r--r--llvm/include/llvm/IR/IntrinsicsHexagonDep.td7352
-rw-r--r--llvm/include/llvm/IR/IntrinsicsNVVM.td581
-rw-r--r--llvm/include/llvm/IR/IntrinsicsPowerPC.td286
-rw-r--r--llvm/include/llvm/IR/IntrinsicsRISCV.td265
-rw-r--r--llvm/include/llvm/IR/IntrinsicsSystemZ.td22
-rw-r--r--llvm/include/llvm/IR/IntrinsicsWebAssembly.td183
-rw-r--r--llvm/include/llvm/IR/IntrinsicsX86.td32
-rw-r--r--llvm/include/llvm/IR/LLVMContext.h50
-rw-r--r--llvm/include/llvm/IR/LegacyPassManager.h4
-rw-r--r--llvm/include/llvm/IR/LegacyPassManagers.h9
-rw-r--r--llvm/include/llvm/IR/MatrixBuilder.h16
-rw-r--r--llvm/include/llvm/IR/Metadata.def1
-rw-r--r--llvm/include/llvm/IR/Metadata.h58
-rw-r--r--llvm/include/llvm/IR/Module.h61
-rw-r--r--llvm/include/llvm/IR/ModuleSlotTracker.h32
-rw-r--r--llvm/include/llvm/IR/ModuleSummaryIndex.h37
-rw-r--r--llvm/include/llvm/IR/ModuleSummaryIndexYAML.h6
-rw-r--r--llvm/include/llvm/IR/Operator.h25
-rw-r--r--llvm/include/llvm/IR/PassInstrumentation.h37
-rw-r--r--llvm/include/llvm/IR/PassManager.h64
-rw-r--r--llvm/include/llvm/IR/PassManagerImpl.h15
-rw-r--r--llvm/include/llvm/IR/PassManagerInternal.h2
-rw-r--r--llvm/include/llvm/IR/PatternMatch.h129
-rw-r--r--llvm/include/llvm/IR/PseudoProbe.h33
-rw-r--r--llvm/include/llvm/IR/ReplaceConstant.h32
-rw-r--r--llvm/include/llvm/IR/SafepointIRVerifier.h6
-rw-r--r--llvm/include/llvm/IR/Statepoint.h117
-rw-r--r--llvm/include/llvm/IR/Type.h14
-rw-r--r--llvm/include/llvm/IR/VPIntrinsics.def92
-rw-r--r--llvm/include/llvm/IR/Value.h64
-rw-r--r--llvm/include/llvm/IR/ValueMap.h24
-rw-r--r--llvm/include/llvm/IR/ValueSymbolTable.h17
71 files changed, 7598 insertions, 5349 deletions
diff --git a/llvm/include/llvm/IR/Argument.h b/llvm/include/llvm/IR/Argument.h
index 76d780485ea0..dcf658f439b4 100644
--- a/llvm/include/llvm/IR/Argument.h
+++ b/llvm/include/llvm/IR/Argument.h
@@ -102,6 +102,8 @@ public:
/// If this is a byval or inalloca argument, return its alignment.
MaybeAlign getParamAlign() const;
+ MaybeAlign getParamStackAlign() const;
+
/// If this is a byval argument, return its type.
Type *getParamByValType() const;
@@ -111,6 +113,9 @@ public:
/// If this is a byref argument, return its type.
Type *getParamByRefType() const;
+ /// If this is an inalloca argument, return its type.
+ Type *getParamInAllocaType() const;
+
/// Return true if this argument has the nest attribute.
bool hasNestAttr() const;
@@ -120,6 +125,9 @@ public:
/// Return true if this argument has the nocapture attribute.
bool hasNoCaptureAttr() const;
+ /// Return true if this argument has the nofree attribute.
+ bool hasNoFreeAttr() const;
+
/// Return true if this argument has the sret attribute.
bool hasStructRetAttr() const;
@@ -154,6 +162,8 @@ public:
/// Remove attributes from an argument.
void removeAttr(Attribute::AttrKind Kind);
+ void removeAttrs(const AttrBuilder &B);
+
/// Check if an argument has a given attribute.
bool hasAttribute(Attribute::AttrKind Kind) const;
diff --git a/llvm/include/llvm/IR/Attributes.h b/llvm/include/llvm/IR/Attributes.h
index b4056540663f..d7bd3edb3d4c 100644
--- a/llvm/include/llvm/IR/Attributes.h
+++ b/llvm/include/llvm/IR/Attributes.h
@@ -18,6 +18,7 @@
#include "llvm-c/Types.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/Optional.h"
+#include "llvm/ADT/SmallString.h"
#include "llvm/ADT/StringRef.h"
#include "llvm/ADT/iterator_range.h"
#include "llvm/Config/llvm-config.h"
@@ -70,14 +71,29 @@ public:
enum AttrKind {
// IR-Level Attributes
None, ///< No attributes have been set
- #define GET_ATTR_NAMES
- #define ATTRIBUTE_ENUM(ENUM_NAME, OTHER) ENUM_NAME,
+ #define GET_ATTR_ENUM
#include "llvm/IR/Attributes.inc"
EndAttrKinds, ///< Sentinal value useful for loops
EmptyKey, ///< Use as Empty key for DenseMap of AttrKind
TombstoneKey, ///< Use as Tombstone key for DenseMap of AttrKind
};
+ static const unsigned NumTypeAttrKinds = LastTypeAttr - FirstTypeAttr + 1;
+
+ static bool isEnumAttrKind(AttrKind Kind) {
+ return Kind >= FirstEnumAttr && Kind <= LastEnumAttr;
+ }
+ static bool isIntAttrKind(AttrKind Kind) {
+ return Kind >= FirstIntAttr && Kind <= LastIntAttr;
+ }
+ static bool isTypeAttrKind(AttrKind Kind) {
+ return Kind >= FirstTypeAttr && Kind <= LastTypeAttr;
+ }
+
+ static bool canUseAsFnAttr(AttrKind Kind);
+ static bool canUseAsParamAttr(AttrKind Kind);
+ static bool canUseAsRetAttr(AttrKind Kind);
+
private:
AttributeImpl *pImpl = nullptr;
@@ -107,10 +123,13 @@ public:
static Attribute getWithAllocSizeArgs(LLVMContext &Context,
unsigned ElemSizeArg,
const Optional<unsigned> &NumElemsArg);
+ static Attribute getWithVScaleRangeArgs(LLVMContext &Context,
+ unsigned MinValue, unsigned MaxValue);
static Attribute getWithByValType(LLVMContext &Context, Type *Ty);
static Attribute getWithStructRetType(LLVMContext &Context, Type *Ty);
static Attribute getWithByRefType(LLVMContext &Context, Type *Ty);
static Attribute getWithPreallocatedType(LLVMContext &Context, Type *Ty);
+ static Attribute getWithInAllocaType(LLVMContext &Context, Type *Ty);
/// For a typed attribute, return the equivalent attribute with the type
/// changed to \p ReplacementTy.
@@ -123,9 +142,6 @@ public:
static StringRef getNameFromAttrKind(Attribute::AttrKind AttrKind);
- /// Return true if and only if the attribute has an Argument.
- static bool doesAttrKindHaveArgument(Attribute::AttrKind AttrKind);
-
/// Return true if the provided string matches the IR name of an attribute.
/// example: "noalias" return true but not "NoAlias"
static bool isExistingAttribute(StringRef Name);
@@ -157,13 +173,17 @@ public:
bool hasAttribute(StringRef Val) const;
/// Return the attribute's kind as an enum (Attribute::AttrKind). This
- /// requires the attribute to be an enum or integer attribute.
+ /// requires the attribute to be an enum, integer, or type attribute.
Attribute::AttrKind getKindAsEnum() const;
/// Return the attribute's value as an integer. This requires that the
/// attribute be an integer attribute.
uint64_t getValueAsInt() const;
+ /// Return the attribute's value as a boolean. This requires that the
+ /// attribute be a string attribute.
+ bool getValueAsBool() const;
+
/// Return the attribute's kind as a string. This requires the
/// attribute to be a string attribute.
StringRef getKindAsString() const;
@@ -196,10 +216,17 @@ public:
/// if not known).
std::pair<unsigned, Optional<unsigned>> getAllocSizeArgs() const;
+ /// Returns the argument numbers for the vscale_range attribute (or pair(0, 0)
+ /// if not known).
+ std::pair<unsigned, unsigned> getVScaleRangeArgs() const;
+
/// The Attribute is converted to a string of equivalent mnemonic. This
/// is, presumably, for writing out the mnemonics for the assembly writer.
std::string getAsString(bool InAttrGrp = false) const;
+ /// Return true if this attribute belongs to the LLVMContext.
+ bool hasParentContext(LLVMContext &C) const;
+
/// Equality and non-equality operators.
bool operator==(Attribute A) const { return pImpl == A.pImpl; }
bool operator!=(Attribute A) const { return pImpl != A.pImpl; }
@@ -318,9 +345,15 @@ public:
Type *getStructRetType() const;
Type *getByRefType() const;
Type *getPreallocatedType() const;
+ Type *getInAllocaType() const;
+ Type *getElementType() const;
std::pair<unsigned, Optional<unsigned>> getAllocSizeArgs() const;
+ std::pair<unsigned, unsigned> getVScaleRangeArgs() const;
std::string getAsString(bool InAttrGrp = false) const;
+ /// Return true if this attribute set belongs to the LLVMContext.
+ bool hasParentContext(LLVMContext &C) const;
+
using iterator = const Attribute *;
iterator begin() const;
@@ -571,6 +604,13 @@ public:
return addAllocSizeAttr(C, ArgNo + FirstArgIndex, ElemSizeArg, NumElemsArg);
}
+ /// Add the vscale_range attribute to the attribute set at the given index.
+ /// Returns a new list because attribute lists are immutable.
+ LLVM_NODISCARD AttributeList addVScaleRangeAttr(LLVMContext &C,
+ unsigned Index,
+ unsigned MinValue,
+ unsigned MaxValue);
+
//===--------------------------------------------------------------------===//
// AttributeList Accessors
//===--------------------------------------------------------------------===//
@@ -651,6 +691,9 @@ public:
/// Return the alignment for the specified function parameter.
MaybeAlign getParamAlignment(unsigned ArgNo) const;
+ /// Return the stack alignment for the specified function parameter.
+ MaybeAlign getParamStackAlignment(unsigned ArgNo) const;
+
/// Return the byval type for the specified function parameter.
Type *getParamByValType(unsigned ArgNo) const;
@@ -663,6 +706,12 @@ public:
/// Return the preallocated type for the specified function parameter.
Type *getParamPreallocatedType(unsigned ArgNo) const;
+ /// Return the inalloca type for the specified function parameter.
+ Type *getParamInAllocaType(unsigned ArgNo) const;
+
+ /// Return the elementtype type for the specified function parameter.
+ Type *getParamElementType(unsigned ArgNo) const;
+
/// Get the stack alignment.
MaybeAlign getStackAlignment(unsigned Index) const;
@@ -689,9 +738,15 @@ public:
std::pair<unsigned, Optional<unsigned>>
getAllocSizeArgs(unsigned Index) const;
+ /// Get the vscale_range argument numbers (or pair(0, 0) if unknown).
+ std::pair<unsigned, unsigned> getVScaleRangeArgs(unsigned Index) const;
+
/// Return the attributes at the index as a string.
std::string getAsString(unsigned Index, bool InAttrGrp = false) const;
+ /// Return true if this attribute list belongs to the LLVMContext.
+ bool hasParentContext(LLVMContext &C) const;
+
//===--------------------------------------------------------------------===//
// AttributeList Introspection
//===--------------------------------------------------------------------===//
@@ -719,6 +774,8 @@ public:
/// Return true if there are no attributes.
bool isEmpty() const { return pImpl == nullptr; }
+ void print(raw_ostream &O) const;
+
void dump() const;
};
@@ -756,16 +813,16 @@ template <> struct DenseMapInfo<AttributeList> {
/// equality, presence of attributes, etc.
class AttrBuilder {
std::bitset<Attribute::EndAttrKinds> Attrs;
- std::map<std::string, std::string, std::less<>> TargetDepAttrs;
+ std::map<SmallString<32>, SmallString<32>, std::less<>> TargetDepAttrs;
MaybeAlign Alignment;
MaybeAlign StackAlignment;
uint64_t DerefBytes = 0;
uint64_t DerefOrNullBytes = 0;
uint64_t AllocSizeArgs = 0;
- Type *ByValType = nullptr;
- Type *StructRetType = nullptr;
- Type *ByRefType = nullptr;
- Type *PreallocatedType = nullptr;
+ uint64_t VScaleRangeArgs = 0;
+ std::array<Type *, Attribute::NumTypeAttrKinds> TypeAttrs = {};
+
+ Optional<unsigned> kindToTypeIndex(Attribute::AttrKind Kind) const;
public:
AttrBuilder() = default;
@@ -783,8 +840,8 @@ public:
AttrBuilder &addAttribute(Attribute::AttrKind Val) {
assert((unsigned)Val < Attribute::EndAttrKinds &&
"Attribute out of range!");
- assert(!Attribute::doesAttrKindHaveArgument(Val) &&
- "Adding integer attribute without adding a value!");
+ assert(Attribute::isEnumAttrKind(Val) &&
+ "Adding integer/type attribute without an argument!");
Attrs[Val] = true;
return *this;
}
@@ -848,22 +905,34 @@ public:
/// dereferenceable_or_null attribute exists (zero is returned otherwise).
uint64_t getDereferenceableOrNullBytes() const { return DerefOrNullBytes; }
+ /// Retrieve type for the given type attribute.
+ Type *getTypeAttr(Attribute::AttrKind Kind) const;
+
/// Retrieve the byval type.
- Type *getByValType() const { return ByValType; }
+ Type *getByValType() const { return getTypeAttr(Attribute::ByVal); }
/// Retrieve the sret type.
- Type *getStructRetType() const { return StructRetType; }
+ Type *getStructRetType() const { return getTypeAttr(Attribute::StructRet); }
/// Retrieve the byref type.
- Type *getByRefType() const { return ByRefType; }
+ Type *getByRefType() const { return getTypeAttr(Attribute::ByRef); }
/// Retrieve the preallocated type.
- Type *getPreallocatedType() const { return PreallocatedType; }
+ Type *getPreallocatedType() const {
+ return getTypeAttr(Attribute::Preallocated);
+ }
+
+ /// Retrieve the inalloca type.
+ Type *getInAllocaType() const { return getTypeAttr(Attribute::InAlloca); }
/// Retrieve the allocsize args, if the allocsize attribute exists. If it
/// doesn't exist, pair(0, 0) is returned.
std::pair<unsigned, Optional<unsigned>> getAllocSizeArgs() const;
+ /// Retrieve the vscale_range args, if the vscale_range attribute exists. If
+ /// it doesn't exist, pair(0, 0) is returned.
+ std::pair<unsigned, unsigned> getVScaleRangeArgs() const;
+
/// This turns an alignment into the form used internally in Attribute.
/// This call has no effect if Align is not set.
AttrBuilder &addAlignmentAttr(MaybeAlign Align);
@@ -900,6 +969,12 @@ public:
AttrBuilder &addAllocSizeAttr(unsigned ElemSizeArg,
const Optional<unsigned> &NumElemsArg);
+ /// This turns two ints into the form used internally in Attribute.
+ AttrBuilder &addVScaleRangeAttr(unsigned MinValue, unsigned MaxValue);
+
+ /// Add a type attribute with the given type.
+ AttrBuilder &addTypeAttr(Attribute::AttrKind Kind, Type *Ty);
+
/// This turns a byval type into the form used internally in Attribute.
AttrBuilder &addByValAttr(Type *Ty);
@@ -912,16 +987,23 @@ public:
/// This turns a preallocated type into the form used internally in Attribute.
AttrBuilder &addPreallocatedAttr(Type *Ty);
+ /// This turns an inalloca type into the form used internally in Attribute.
+ AttrBuilder &addInAllocaAttr(Type *Ty);
+
/// Add an allocsize attribute, using the representation returned by
/// Attribute.getIntValue().
AttrBuilder &addAllocSizeAttrFromRawRepr(uint64_t RawAllocSizeRepr);
+ /// Add a vscale_range attribute, using the representation returned by
+ /// Attribute.getIntValue().
+ AttrBuilder &addVScaleRangeAttrFromRawRepr(uint64_t RawVScaleRangeRepr);
+
/// Return true if the builder contains no target-independent
/// attributes.
bool empty() const { return Attrs.none(); }
// Iterators for target-dependent attributes.
- using td_type = std::pair<std::string, std::string>;
+ using td_type = decltype(TargetDepAttrs)::value_type;
using td_iterator = decltype(TargetDepAttrs)::iterator;
using td_const_iterator = decltype(TargetDepAttrs)::const_iterator;
using td_range = iterator_range<td_iterator>;
@@ -950,6 +1032,13 @@ namespace AttributeFuncs {
/// Which attributes cannot be applied to a type.
AttrBuilder typeIncompatible(Type *Ty);
+/// Get param/return attributes which imply immediate undefined behavior if an
+/// invalid value is passed. For example, this includes noundef (where undef
+/// implies UB), but not nonnull (where null implies poison). It also does not
+/// include attributes like nocapture, which constrain the function
+/// implementation rather than the passed value.
+AttrBuilder getUBImplyingAttributes();
+
/// \returns Return true if the two functions have compatible target-independent
/// attributes for inlining purposes.
bool areInlineCompatible(const Function &Caller, const Function &Callee);
diff --git a/llvm/include/llvm/IR/Attributes.td b/llvm/include/llvm/IR/Attributes.td
index f7ffc888c65a..99b474161df7 100644
--- a/llvm/include/llvm/IR/Attributes.td
+++ b/llvm/include/llvm/IR/Attributes.td
@@ -10,225 +10,246 @@
//
//===----------------------------------------------------------------------===//
+/// Attribute property base class.
+class AttrProperty;
+
+/// Can be used as function attribute.
+def FnAttr : AttrProperty;
+
+/// Can be used as parameter attribute.
+def ParamAttr : AttrProperty;
+
+/// Can be used as return attribute.
+def RetAttr : AttrProperty;
+
/// Attribute base class.
-class Attr<string S> {
+class Attr<string S, list<AttrProperty> P> {
// String representation of this attribute in the IR.
string AttrString = S;
+ list<AttrProperty> Properties = P;
}
/// Enum attribute.
-class EnumAttr<string S> : Attr<S>;
+class EnumAttr<string S, list<AttrProperty> P> : Attr<S, P>;
/// Int attribute.
-class IntAttr<string S> : Attr<S>;
-
-/// StringBool attribute.
-class StrBoolAttr<string S> : Attr<S>;
+class IntAttr<string S, list<AttrProperty> P> : Attr<S, P>;
/// Type attribute.
-class TypeAttr<string S> : Attr<S>;
+class TypeAttr<string S, list<AttrProperty> P> : Attr<S, P>;
+
+/// StringBool attribute.
+class StrBoolAttr<string S> : Attr<S, []>;
/// Target-independent enum attributes.
/// Alignment of parameter (5 bits) stored as log2 of alignment with +1 bias.
/// 0 means unaligned (different from align(1)).
-def Alignment : IntAttr<"align">;
+def Alignment : IntAttr<"align", [ParamAttr, RetAttr]>;
/// The result of the function is guaranteed to point to a number of bytes that
/// we can determine if we know the value of the function's arguments.
-def AllocSize : IntAttr<"allocsize">;
+def AllocSize : IntAttr<"allocsize", [FnAttr]>;
/// inline=always.
-def AlwaysInline : EnumAttr<"alwaysinline">;
+def AlwaysInline : EnumAttr<"alwaysinline", [FnAttr]>;
/// Function can access memory only using pointers based on its arguments.
-def ArgMemOnly : EnumAttr<"argmemonly">;
+def ArgMemOnly : EnumAttr<"argmemonly", [FnAttr]>;
/// Callee is recognized as a builtin, despite nobuiltin attribute on its
/// declaration.
-def Builtin : EnumAttr<"builtin">;
+def Builtin : EnumAttr<"builtin", [FnAttr]>;
/// Pass structure by value.
-def ByVal : TypeAttr<"byval">;
+def ByVal : TypeAttr<"byval", [ParamAttr]>;
/// Mark in-memory ABI type.
-def ByRef : TypeAttr<"byref">;
+def ByRef : TypeAttr<"byref", [ParamAttr]>;
/// Parameter or return value may not contain uninitialized or poison bits.
-def NoUndef : EnumAttr<"noundef">;
+def NoUndef : EnumAttr<"noundef", [ParamAttr, RetAttr]>;
/// Marks function as being in a cold path.
-def Cold : EnumAttr<"cold">;
+def Cold : EnumAttr<"cold", [FnAttr]>;
/// Can only be moved to control-equivalent blocks.
-def Convergent : EnumAttr<"convergent">;
+def Convergent : EnumAttr<"convergent", [FnAttr]>;
/// Marks function as being in a hot path and frequently called.
-def Hot: EnumAttr<"hot">;
+def Hot: EnumAttr<"hot", [FnAttr]>;
/// Pointer is known to be dereferenceable.
-def Dereferenceable : IntAttr<"dereferenceable">;
+def Dereferenceable : IntAttr<"dereferenceable", [ParamAttr, RetAttr]>;
/// Pointer is either null or dereferenceable.
-def DereferenceableOrNull : IntAttr<"dereferenceable_or_null">;
+def DereferenceableOrNull : IntAttr<"dereferenceable_or_null",
+ [ParamAttr, RetAttr]>;
+
+/// Provide pointer element type to intrinsic.
+def ElementType : TypeAttr<"elementtype", [ParamAttr]>;
/// Function may only access memory that is inaccessible from IR.
-def InaccessibleMemOnly : EnumAttr<"inaccessiblememonly">;
+def InaccessibleMemOnly : EnumAttr<"inaccessiblememonly", [FnAttr]>;
/// Function may only access memory that is either inaccessible from the IR,
/// or pointed to by its pointer arguments.
-def InaccessibleMemOrArgMemOnly : EnumAttr<"inaccessiblemem_or_argmemonly">;
+def InaccessibleMemOrArgMemOnly : EnumAttr<"inaccessiblemem_or_argmemonly",
+ [FnAttr]>;
/// Pass structure in an alloca.
-def InAlloca : EnumAttr<"inalloca">;
+def InAlloca : TypeAttr<"inalloca", [ParamAttr]>;
/// Source said inlining was desirable.
-def InlineHint : EnumAttr<"inlinehint">;
+def InlineHint : EnumAttr<"inlinehint", [FnAttr]>;
/// Force argument to be passed in register.
-def InReg : EnumAttr<"inreg">;
+def InReg : EnumAttr<"inreg", [ParamAttr, RetAttr]>;
/// Build jump-instruction tables and replace refs.
-def JumpTable : EnumAttr<"jumptable">;
+def JumpTable : EnumAttr<"jumptable", [FnAttr]>;
/// Function must be optimized for size first.
-def MinSize : EnumAttr<"minsize">;
+def MinSize : EnumAttr<"minsize", [FnAttr]>;
/// Naked function.
-def Naked : EnumAttr<"naked">;
+def Naked : EnumAttr<"naked", [FnAttr]>;
/// Nested function static chain.
-def Nest : EnumAttr<"nest">;
+def Nest : EnumAttr<"nest", [ParamAttr]>;
/// Considered to not alias after call.
-def NoAlias : EnumAttr<"noalias">;
+def NoAlias : EnumAttr<"noalias", [ParamAttr, RetAttr]>;
/// Callee isn't recognized as a builtin.
-def NoBuiltin : EnumAttr<"nobuiltin">;
+def NoBuiltin : EnumAttr<"nobuiltin", [FnAttr]>;
/// Function cannot enter into caller's translation unit.
-def NoCallback : EnumAttr<"nocallback">;
+def NoCallback : EnumAttr<"nocallback", [FnAttr]>;
/// Function creates no aliases of pointer.
-def NoCapture : EnumAttr<"nocapture">;
+def NoCapture : EnumAttr<"nocapture", [ParamAttr]>;
/// Call cannot be duplicated.
-def NoDuplicate : EnumAttr<"noduplicate">;
+def NoDuplicate : EnumAttr<"noduplicate", [FnAttr]>;
/// Function does not deallocate memory.
-def NoFree : EnumAttr<"nofree">;
+def NoFree : EnumAttr<"nofree", [FnAttr, ParamAttr]>;
/// Disable implicit floating point insts.
-def NoImplicitFloat : EnumAttr<"noimplicitfloat">;
+def NoImplicitFloat : EnumAttr<"noimplicitfloat", [FnAttr]>;
/// inline=never.
-def NoInline : EnumAttr<"noinline">;
+def NoInline : EnumAttr<"noinline", [FnAttr]>;
/// Function is called early and/or often, so lazy binding isn't worthwhile.
-def NonLazyBind : EnumAttr<"nonlazybind">;
+def NonLazyBind : EnumAttr<"nonlazybind", [FnAttr]>;
/// Disable merging for specified functions or call sites.
-def NoMerge : EnumAttr<"nomerge">;
+def NoMerge : EnumAttr<"nomerge", [FnAttr]>;
/// Pointer is known to be not null.
-def NonNull : EnumAttr<"nonnull">;
+def NonNull : EnumAttr<"nonnull", [ParamAttr, RetAttr]>;
/// The function does not recurse.
-def NoRecurse : EnumAttr<"norecurse">;
+def NoRecurse : EnumAttr<"norecurse", [FnAttr]>;
/// Disable redzone.
-def NoRedZone : EnumAttr<"noredzone">;
+def NoRedZone : EnumAttr<"noredzone", [FnAttr]>;
/// Mark the function as not returning.
-def NoReturn : EnumAttr<"noreturn">;
+def NoReturn : EnumAttr<"noreturn", [FnAttr]>;
/// Function does not synchronize.
-def NoSync : EnumAttr<"nosync">;
+def NoSync : EnumAttr<"nosync", [FnAttr]>;
/// Disable Indirect Branch Tracking.
-def NoCfCheck : EnumAttr<"nocf_check">;
+def NoCfCheck : EnumAttr<"nocf_check", [FnAttr]>;
-/// Function should be instrumented.
-def NoProfile : EnumAttr<"noprofile">;
+/// Function should not be instrumented.
+def NoProfile : EnumAttr<"noprofile", [FnAttr]>;
/// Function doesn't unwind stack.
-def NoUnwind : EnumAttr<"nounwind">;
+def NoUnwind : EnumAttr<"nounwind", [FnAttr]>;
+
+/// No SanitizeCoverage instrumentation.
+def NoSanitizeCoverage : EnumAttr<"nosanitize_coverage", [FnAttr]>;
/// Null pointer in address space zero is valid.
-def NullPointerIsValid : EnumAttr<"null_pointer_is_valid">;
+def NullPointerIsValid : EnumAttr<"null_pointer_is_valid", [FnAttr]>;
/// Select optimizations for best fuzzing signal.
-def OptForFuzzing : EnumAttr<"optforfuzzing">;
+def OptForFuzzing : EnumAttr<"optforfuzzing", [FnAttr]>;
/// opt_size.
-def OptimizeForSize : EnumAttr<"optsize">;
+def OptimizeForSize : EnumAttr<"optsize", [FnAttr]>;
/// Function must not be optimized.
-def OptimizeNone : EnumAttr<"optnone">;
+def OptimizeNone : EnumAttr<"optnone", [FnAttr]>;
/// Similar to byval but without a copy.
-def Preallocated : TypeAttr<"preallocated">;
+def Preallocated : TypeAttr<"preallocated", [FnAttr, ParamAttr]>;
/// Function does not access memory.
-def ReadNone : EnumAttr<"readnone">;
+def ReadNone : EnumAttr<"readnone", [FnAttr, ParamAttr]>;
/// Function only reads from memory.
-def ReadOnly : EnumAttr<"readonly">;
+def ReadOnly : EnumAttr<"readonly", [FnAttr, ParamAttr]>;
/// Return value is always equal to this argument.
-def Returned : EnumAttr<"returned">;
+def Returned : EnumAttr<"returned", [ParamAttr]>;
/// Parameter is required to be a trivial constant.
-def ImmArg : EnumAttr<"immarg">;
+def ImmArg : EnumAttr<"immarg", [ParamAttr]>;
/// Function can return twice.
-def ReturnsTwice : EnumAttr<"returns_twice">;
+def ReturnsTwice : EnumAttr<"returns_twice", [FnAttr]>;
/// Safe Stack protection.
-def SafeStack : EnumAttr<"safestack">;
+def SafeStack : EnumAttr<"safestack", [FnAttr]>;
/// Shadow Call Stack protection.
-def ShadowCallStack : EnumAttr<"shadowcallstack">;
+def ShadowCallStack : EnumAttr<"shadowcallstack", [FnAttr]>;
/// Sign extended before/after call.
-def SExt : EnumAttr<"signext">;
+def SExt : EnumAttr<"signext", [ParamAttr, RetAttr]>;
/// Alignment of stack for function (3 bits) stored as log2 of alignment with
/// +1 bias 0 means unaligned (different from alignstack=(1)).
-def StackAlignment : IntAttr<"alignstack">;
+def StackAlignment : IntAttr<"alignstack", [FnAttr, ParamAttr]>;
/// Function can be speculated.
-def Speculatable : EnumAttr<"speculatable">;
+def Speculatable : EnumAttr<"speculatable", [FnAttr]>;
/// Stack protection.
-def StackProtect : EnumAttr<"ssp">;
+def StackProtect : EnumAttr<"ssp", [FnAttr]>;
/// Stack protection required.
-def StackProtectReq : EnumAttr<"sspreq">;
+def StackProtectReq : EnumAttr<"sspreq", [FnAttr]>;
/// Strong Stack protection.
-def StackProtectStrong : EnumAttr<"sspstrong">;
+def StackProtectStrong : EnumAttr<"sspstrong", [FnAttr]>;
/// Function was called in a scope requiring strict floating point semantics.
-def StrictFP : EnumAttr<"strictfp">;
+def StrictFP : EnumAttr<"strictfp", [FnAttr]>;
/// Hidden pointer to structure to return.
-def StructRet : TypeAttr<"sret">;
+def StructRet : TypeAttr<"sret", [ParamAttr]>;
/// AddressSanitizer is on.
-def SanitizeAddress : EnumAttr<"sanitize_address">;
+def SanitizeAddress : EnumAttr<"sanitize_address", [FnAttr]>;
/// ThreadSanitizer is on.
-def SanitizeThread : EnumAttr<"sanitize_thread">;
+def SanitizeThread : EnumAttr<"sanitize_thread", [FnAttr]>;
/// MemorySanitizer is on.
-def SanitizeMemory : EnumAttr<"sanitize_memory">;
+def SanitizeMemory : EnumAttr<"sanitize_memory", [FnAttr]>;
/// HWAddressSanitizer is on.
-def SanitizeHWAddress : EnumAttr<"sanitize_hwaddress">;
+def SanitizeHWAddress : EnumAttr<"sanitize_hwaddress", [FnAttr]>;
/// MemTagSanitizer is on.
-def SanitizeMemTag : EnumAttr<"sanitize_memtag">;
+def SanitizeMemTag : EnumAttr<"sanitize_memtag", [FnAttr]>;
/// Speculative Load Hardening is enabled.
///
@@ -236,28 +257,35 @@ def SanitizeMemTag : EnumAttr<"sanitize_memtag">;
/// inlining) and a conservative merge strategy where inlining an attributed
/// body will add the attribute to the caller. This ensures that code carrying
/// this attribute will always be lowered with hardening enabled.
-def SpeculativeLoadHardening : EnumAttr<"speculative_load_hardening">;
+def SpeculativeLoadHardening : EnumAttr<"speculative_load_hardening",
+ [FnAttr]>;
/// Argument is swift error.
-def SwiftError : EnumAttr<"swifterror">;
+def SwiftError : EnumAttr<"swifterror", [ParamAttr]>;
/// Argument is swift self/context.
-def SwiftSelf : EnumAttr<"swiftself">;
+def SwiftSelf : EnumAttr<"swiftself", [ParamAttr]>;
+
+/// Argument is swift async context.
+def SwiftAsync : EnumAttr<"swiftasync", [ParamAttr]>;
/// Function must be in a unwind table.
-def UWTable : EnumAttr<"uwtable">;
+def UWTable : EnumAttr<"uwtable", [FnAttr]>;
+
+/// Minimum/Maximum vscale value for function.
+def VScaleRange : IntAttr<"vscale_range", [FnAttr]>;
/// Function always comes back to callsite.
-def WillReturn : EnumAttr<"willreturn">;
+def WillReturn : EnumAttr<"willreturn", [FnAttr]>;
/// Function only writes to memory.
-def WriteOnly : EnumAttr<"writeonly">;
+def WriteOnly : EnumAttr<"writeonly", [FnAttr, ParamAttr]>;
/// Zero extended before/after call.
-def ZExt : EnumAttr<"zeroext">;
+def ZExt : EnumAttr<"zeroext", [ParamAttr, RetAttr]>;
/// Function is required to make Forward Progress.
-def MustProgress : TypeAttr<"mustprogress">;
+def MustProgress : EnumAttr<"mustprogress", [FnAttr]>;
/// Target-independent string attributes.
def LessPreciseFPMAD : StrBoolAttr<"less-precise-fpmad">;
@@ -288,6 +316,7 @@ def : CompatRule<"isEqual<SanitizeMemTagAttr>">;
def : CompatRule<"isEqual<SafeStackAttr>">;
def : CompatRule<"isEqual<ShadowCallStackAttr>">;
def : CompatRule<"isEqual<UseSampleProfileAttr>">;
+def : CompatRule<"isEqual<NoProfileAttr>">;
class MergeRule<string F> {
// The name of the function called to merge the attributes of the caller and
diff --git a/llvm/include/llvm/IR/BasicBlock.h b/llvm/include/llvm/IR/BasicBlock.h
index b86bb16e1239..0af4ec4ef138 100644
--- a/llvm/include/llvm/IR/BasicBlock.h
+++ b/llvm/include/llvm/IR/BasicBlock.h
@@ -480,6 +480,10 @@ public:
/// Return true if it is legal to hoist instructions into this block.
bool isLegalToHoistInto() const;
+ /// Return true if this is the entry block of the containing function.
+ /// This method can only be used on blocks that have a parent function.
+ bool isEntryBlock() const;
+
Optional<uint64_t> getIrrLoopHeaderWeight() const;
/// Returns true if the Order field of child Instructions is valid.
@@ -509,7 +513,7 @@ public:
void validateInstrOrdering() const;
private:
-#if defined(_AIX) && (!defined(__GNUC__) || defined(__ibmxl__))
+#if defined(_AIX) && (!defined(__GNUC__) || defined(__clang__))
// Except for GCC; by default, AIX compilers store bit-fields in 4-byte words
// and give the `pack` pragma push semantics.
#define BEGIN_TWO_BYTE_PACK() _Pragma("pack(2)")
diff --git a/llvm/include/llvm/IR/BuiltinGCs.h b/llvm/include/llvm/IR/BuiltinGCs.h
new file mode 100644
index 000000000000..16aff01dbcf3
--- /dev/null
+++ b/llvm/include/llvm/IR/BuiltinGCs.h
@@ -0,0 +1,33 @@
+//===-- BuiltinGCs.h - Garbage collector linkage hacks --------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file contains hack functions to force linking in the builtin GC
+// components.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_IR_BUILTINGCS_H
+#define LLVM_IR_BUILTINGCS_H
+
+namespace llvm {
+
+/// FIXME: Collector instances are not useful on their own. These no longer
+/// serve any purpose except to link in the plugins.
+
+/// Ensure the definition of the builtin GCs gets linked in
+void linkAllBuiltinGCs();
+
+/// Creates an ocaml-compatible metadata printer.
+void linkOcamlGCPrinter();
+
+/// Creates an erlang-compatible metadata printer.
+void linkErlangGCPrinter();
+
+} // namespace llvm
+
+#endif // LLVM_IR_BUILTINGCS_H
diff --git a/llvm/include/llvm/IR/CFG.h b/llvm/include/llvm/IR/CFG.h
index f798b1af6c83..b872e2626981 100644
--- a/llvm/include/llvm/IR/CFG.h
+++ b/llvm/include/llvm/IR/CFG.h
@@ -40,10 +40,15 @@ class Use;
//===----------------------------------------------------------------------===//
template <class Ptr, class USE_iterator> // Predecessor Iterator
-class PredIterator : public std::iterator<std::forward_iterator_tag,
- Ptr, ptrdiff_t, Ptr*, Ptr*> {
- using super =
- std::iterator<std::forward_iterator_tag, Ptr, ptrdiff_t, Ptr*, Ptr*>;
+class PredIterator {
+public:
+ using iterator_category = std::forward_iterator_tag;
+ using value_type = Ptr;
+ using difference_type = std::ptrdiff_t;
+ using pointer = Ptr *;
+ using reference = Ptr *;
+
+private:
using Self = PredIterator<Ptr, USE_iterator>;
USE_iterator It;
@@ -59,9 +64,6 @@ class PredIterator : public std::iterator<std::forward_iterator_tag,
}
public:
- using pointer = typename super::pointer;
- using reference = typename super::reference;
-
PredIterator() = default;
explicit inline PredIterator(Ptr *bb) : It(bb->user_begin()) {
advancePastNonTerminators();
diff --git a/llvm/include/llvm/IR/CallingConv.h b/llvm/include/llvm/IR/CallingConv.h
index 6a4e368b2e9d..fd2854246522 100644
--- a/llvm/include/llvm/IR/CallingConv.h
+++ b/llvm/include/llvm/IR/CallingConv.h
@@ -86,6 +86,11 @@ namespace CallingConv {
/// and has no return value. All register values are preserved.
CFGuard_Check = 19,
+ /// SwiftTail - This follows the Swift calling convention in how arguments
+ /// are passed but guarantees tail calls will be made by making the callee
+ /// clean up their stack.
+ SwiftTail = 20,
+
// Target - This is the start of the target-specific calling conventions,
// e.g. fastcall and thiscall on X86.
FirstTargetCC = 64,
@@ -244,6 +249,9 @@ namespace CallingConv {
/// Calling convention used for AMD graphics targets.
AMDGPU_Gfx = 100,
+ /// M68k_INTR - Calling convention used for M68k interrupt routines.
+ M68k_INTR = 101,
+
/// The highest possible calling convention ID. Must be some 2^k - 1.
MaxID = 1023
};
diff --git a/llvm/include/llvm/IR/Comdat.h b/llvm/include/llvm/IR/Comdat.h
index f712a16dd318..01a047d36455 100644
--- a/llvm/include/llvm/IR/Comdat.h
+++ b/llvm/include/llvm/IR/Comdat.h
@@ -31,11 +31,11 @@ template <typename ValueTy> class StringMapEntry;
class Comdat {
public:
enum SelectionKind {
- Any, ///< The linker may choose any COMDAT.
- ExactMatch, ///< The data referenced by the COMDAT must be the same.
- Largest, ///< The linker will choose the largest COMDAT.
- NoDuplicates, ///< No other Module may specify this COMDAT.
- SameSize, ///< The data referenced by the COMDAT must be the same size.
+ Any, ///< The linker may choose any COMDAT.
+ ExactMatch, ///< The data referenced by the COMDAT must be the same.
+ Largest, ///< The linker will choose the largest COMDAT.
+ NoDeduplicate, ///< No deduplication is performed.
+ SameSize, ///< The data referenced by the COMDAT must be the same size.
};
Comdat(const Comdat &) = delete;
diff --git a/llvm/include/llvm/IR/Constant.h b/llvm/include/llvm/IR/Constant.h
index 0190aca27b72..4e2022b36e30 100644
--- a/llvm/include/llvm/IR/Constant.h
+++ b/llvm/include/llvm/IR/Constant.h
@@ -130,11 +130,13 @@ public:
bool isConstantUsed() const;
/// This method classifies the entry according to whether or not it may
- /// generate a relocation entry. This must be conservative, so if it might
- /// codegen to a relocatable entry, it should say so.
+ /// generate a relocation entry (either static or dynamic). This must be
+ /// conservative, so if it might codegen to a relocatable entry, it should say
+ /// so.
///
/// FIXME: This really should not be in IR.
bool needsRelocation() const;
+ bool needsDynamicRelocation() const;
/// For aggregates (struct/array/vector) return the constant that corresponds
/// to the specified element if possible, or null if not. This can return null
@@ -214,6 +216,28 @@ public:
/// both must either be scalars or vectors with the same element count. If no
/// changes are made, the constant C is returned.
static Constant *mergeUndefsWith(Constant *C, Constant *Other);
+
+ /// Return true if a constant is ConstantData or a ConstantAggregate or
+ /// ConstantExpr that contain only ConstantData.
+ bool isManifestConstant() const;
+
+private:
+ enum PossibleRelocationsTy {
+ /// This constant requires no relocations. That is, it holds simple
+ /// constants (like integrals).
+ NoRelocation = 0,
+
+ /// This constant holds static relocations that can be resolved by the
+ /// static linker.
+ LocalRelocation = 1,
+
+ /// This constant holds dynamic relocations that the dynamic linker will
+ /// need to resolve.
+ GlobalRelocation = 2,
+ };
+
+ /// Determine what potential relocations may be needed by this constant.
+ PossibleRelocationsTy getRelocationInfo() const;
};
} // end namespace llvm
diff --git a/llvm/include/llvm/IR/ConstantRange.h b/llvm/include/llvm/IR/ConstantRange.h
index 20e8e67436a4..44b8c395c89e 100644
--- a/llvm/include/llvm/IR/ConstantRange.h
+++ b/llvm/include/llvm/IR/ConstantRange.h
@@ -124,6 +124,10 @@ public:
static ConstantRange makeExactICmpRegion(CmpInst::Predicate Pred,
const APInt &Other);
+ /// Does the predicate \p Pred hold between ranges this and \p Other?
+ /// NOTE: false does not mean that inverse predicate holds!
+ bool icmp(CmpInst::Predicate Pred, const ConstantRange &Other) const;
+
/// Produce the largest range containing all X such that "X BinOp Y" is
/// guaranteed not to wrap (overflow) for *all* Y in Other. However, there may
/// be *some* Y in Other for which additional X not contained in the result
diff --git a/llvm/include/llvm/IR/Constants.h b/llvm/include/llvm/IR/Constants.h
index ac802232c23d..1f716a45b70f 100644
--- a/llvm/include/llvm/IR/Constants.h
+++ b/llvm/include/llvm/IR/Constants.h
@@ -58,9 +58,11 @@ class ConstantData : public Constant {
protected:
explicit ConstantData(Type *Ty, ValueTy VT) : Constant(Ty, VT, nullptr, 0) {}
- void *operator new(size_t s) { return User::operator new(s, 0); }
+ void *operator new(size_t S) { return User::operator new(S, 0); }
public:
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
+
ConstantData(const ConstantData &) = delete;
/// Methods to support type inquiry through isa, cast, and dyn_cast.
@@ -79,7 +81,7 @@ class ConstantInt final : public ConstantData {
APInt Val;
- ConstantInt(IntegerType *Ty, const APInt& V);
+ ConstantInt(IntegerType *Ty, const APInt &V);
void destroyConstantImpl();
@@ -95,16 +97,15 @@ public:
/// If Ty is a vector type, return a Constant with a splat of the given
/// value. Otherwise return a ConstantInt for the given value.
- static Constant *get(Type *Ty, uint64_t V, bool isSigned = false);
+ static Constant *get(Type *Ty, uint64_t V, bool IsSigned = false);
/// Return a ConstantInt with the specified integer value for the specified
/// type. If the type is wider than 64 bits, the value will be zero-extended
- /// to fit the type, unless isSigned is true, in which case the value will
+ /// to fit the type, unless IsSigned is true, in which case the value will
/// be interpreted as a 64-bit signed integer and sign-extended to fit
/// the type.
/// Get a ConstantInt for a specific value.
- static ConstantInt *get(IntegerType *Ty, uint64_t V,
- bool isSigned = false);
+ static ConstantInt *get(IntegerType *Ty, uint64_t V, bool IsSigned = false);
/// Return a ConstantInt with the specified value for the specified type. The
/// value V will be canonicalized to a an unsigned APInt. Accessing it with
@@ -120,19 +121,16 @@ public:
/// Return a ConstantInt constructed from the string strStart with the given
/// radix.
- static ConstantInt *get(IntegerType *Ty, StringRef Str,
- uint8_t radix);
+ static ConstantInt *get(IntegerType *Ty, StringRef Str, uint8_t Radix);
/// If Ty is a vector type, return a Constant with a splat of the given
/// value. Otherwise return a ConstantInt for the given value.
- static Constant *get(Type* Ty, const APInt& V);
+ static Constant *get(Type *Ty, const APInt &V);
/// Return the constant as an APInt value reference. This allows clients to
/// obtain a full-precision copy of the value.
/// Return the constant's value.
- inline const APInt &getValue() const {
- return Val;
- }
+ inline const APInt &getValue() const { return Val; }
/// getBitWidth - Return the bitwidth of this constant.
unsigned getBitWidth() const { return Val.getBitWidth(); }
@@ -141,17 +139,13 @@ public:
/// has been zero extended as appropriate for the type of this constant. Note
/// that this method can assert if the value does not fit in 64 bits.
/// Return the zero extended value.
- inline uint64_t getZExtValue() const {
- return Val.getZExtValue();
- }
+ inline uint64_t getZExtValue() const { return Val.getZExtValue(); }
/// Return the constant as a 64-bit integer value after it has been sign
/// extended as appropriate for the type of this constant. Note that
/// this method can assert if the value does not fit in 64 bits.
/// Return the sign extended value.
- inline int64_t getSExtValue() const {
- return Val.getSExtValue();
- }
+ inline int64_t getSExtValue() const { return Val.getSExtValue(); }
/// Return the constant as an llvm::MaybeAlign.
/// Note that this method can assert if the value does not fit in 64 bits or
@@ -171,9 +165,7 @@ public:
/// within is equal to a constant. This only works for very small values,
/// because this is all that can be represented with all types.
/// Determine if this constant's value is same as an unsigned char.
- bool equalsInt(uint64_t V) const {
- return Val == V;
- }
+ bool equalsInt(uint64_t V) const { return Val == V; }
/// getType - Specialize the getType() method to always return an IntegerType,
/// which reduces the amount of casting needed in parts of the compiler.
@@ -199,33 +191,27 @@ public:
/// This is just a convenience method to make client code smaller for a
/// common code. It also correctly performs the comparison without the
/// potential for an assertion from getZExtValue().
- bool isZero() const {
- return Val.isNullValue();
- }
+ bool isZero() const { return Val.isNullValue(); }
/// This is just a convenience method to make client code smaller for a
/// common case. It also correctly performs the comparison without the
/// potential for an assertion from getZExtValue().
/// Determine if the value is one.
- bool isOne() const {
- return Val.isOneValue();
- }
+ bool isOne() const { return Val.isOneValue(); }
/// This function will return true iff every bit in this constant is set
/// to true.
/// @returns true iff this constant's bits are all set to true.
/// Determine if the value is all ones.
- bool isMinusOne() const {
- return Val.isAllOnesValue();
- }
+ bool isMinusOne() const { return Val.isAllOnesValue(); }
/// This function will return true iff this constant represents the largest
/// value that may be represented by the constant's type.
/// @returns true iff this is the largest value that may be represented
/// by this type.
/// Determine if the value is maximal.
- bool isMaxValue(bool isSigned) const {
- if (isSigned)
+ bool isMaxValue(bool IsSigned) const {
+ if (IsSigned)
return Val.isMaxSignedValue();
else
return Val.isMaxValue();
@@ -236,8 +222,8 @@ public:
/// @returns true if this is the smallest value that may be represented by
/// this type.
/// Determine if the value is minimal.
- bool isMinValue(bool isSigned) const {
- if (isSigned)
+ bool isMinValue(bool IsSigned) const {
+ if (IsSigned)
return Val.isMinSignedValue();
else
return Val.isMinValue();
@@ -248,9 +234,7 @@ public:
/// value.
/// @returns true iff this constant is greater or equal to the given number.
/// Determine if the value is greater or equal to the given number.
- bool uge(uint64_t Num) const {
- return Val.uge(Num);
- }
+ bool uge(uint64_t Num) const { return Val.uge(Num); }
/// getLimitedValue - If the value is smaller than the specified limit,
/// return it, otherwise return the limit value. This causes the value
@@ -275,7 +259,7 @@ class ConstantFP final : public ConstantData {
APFloat Val;
- ConstantFP(Type *Ty, const APFloat& V);
+ ConstantFP(Type *Ty, const APFloat &V);
void destroyConstantImpl();
@@ -291,15 +275,16 @@ public:
/// for the specified value in the specified type. This should only be used
/// for simple constant values like 2.0/1.0 etc, that are known-valid both as
/// host double and as the target format.
- static Constant *get(Type* Ty, double V);
+ static Constant *get(Type *Ty, double V);
/// If Ty is a vector type, return a Constant with a splat of the given
/// value. Otherwise return a ConstantFP for the given value.
static Constant *get(Type *Ty, const APFloat &V);
- static Constant *get(Type* Ty, StringRef Str);
+ static Constant *get(Type *Ty, StringRef Str);
static ConstantFP *get(LLVMContext &Context, const APFloat &V);
- static Constant *getNaN(Type *Ty, bool Negative = false, uint64_t Payload = 0);
+ static Constant *getNaN(Type *Ty, bool Negative = false,
+ uint64_t Payload = 0);
static Constant *getQNaN(Type *Ty, bool Negative = false,
APInt *Payload = nullptr);
static Constant *getSNaN(Type *Ty, bool Negative = false,
@@ -377,7 +362,7 @@ public:
Constant *getElementValue(unsigned Idx) const;
/// Return the number of elements in the array, vector, or struct.
- unsigned getNumElements() const;
+ ElementCount getElementCount() const;
/// Methods for support type inquiry through isa, cast, and dyn_cast:
///
@@ -432,7 +417,7 @@ class ConstantArray final : public ConstantAggregate {
public:
// ConstantArray accessors
- static Constant *get(ArrayType *T, ArrayRef<Constant*> V);
+ static Constant *get(ArrayType *T, ArrayRef<Constant *> V);
private:
static Constant *getImpl(ArrayType *T, ArrayRef<Constant *> V);
@@ -464,32 +449,31 @@ class ConstantStruct final : public ConstantAggregate {
public:
// ConstantStruct accessors
- static Constant *get(StructType *T, ArrayRef<Constant*> V);
+ static Constant *get(StructType *T, ArrayRef<Constant *> V);
template <typename... Csts>
static std::enable_if_t<are_base_of<Constant, Csts...>::value, Constant *>
- get(StructType *T, Csts *... Vs) {
- SmallVector<Constant *, 8> Values({Vs...});
- return get(T, Values);
+ get(StructType *T, Csts *...Vs) {
+ return get(T, ArrayRef<Constant *>({Vs...}));
}
/// Return an anonymous struct that has the specified elements.
/// If the struct is possibly empty, then you must specify a context.
- static Constant *getAnon(ArrayRef<Constant*> V, bool Packed = false) {
+ static Constant *getAnon(ArrayRef<Constant *> V, bool Packed = false) {
return get(getTypeForElements(V, Packed), V);
}
- static Constant *getAnon(LLVMContext &Ctx,
- ArrayRef<Constant*> V, bool Packed = false) {
+ static Constant *getAnon(LLVMContext &Ctx, ArrayRef<Constant *> V,
+ bool Packed = false) {
return get(getTypeForElements(Ctx, V, Packed), V);
}
/// Return an anonymous struct type to use for a constant with the specified
/// set of elements. The list must not be empty.
- static StructType *getTypeForElements(ArrayRef<Constant*> V,
+ static StructType *getTypeForElements(ArrayRef<Constant *> V,
bool Packed = false);
/// This version of the method allows an empty list.
static StructType *getTypeForElements(LLVMContext &Ctx,
- ArrayRef<Constant*> V,
+ ArrayRef<Constant *> V,
bool Packed = false);
/// Specialization - reduce amount of casting.
@@ -517,7 +501,7 @@ class ConstantVector final : public ConstantAggregate {
public:
// ConstantVector accessors
- static Constant *get(ArrayRef<Constant*> V);
+ static Constant *get(ArrayRef<Constant *> V);
private:
static Constant *getImpl(ArrayRef<Constant *> V);
@@ -575,10 +559,10 @@ public:
//===----------------------------------------------------------------------===//
/// ConstantDataSequential - A vector or array constant whose element type is a
-/// simple 1/2/4/8-byte integer or float/double, and whose elements are just
-/// simple data values (i.e. ConstantInt/ConstantFP). This Constant node has no
-/// operands because it stores all of the elements of the constant as densely
-/// packed data, instead of as Value*'s.
+/// simple 1/2/4/8-byte integer or half/bfloat/float/double, and whose elements
+/// are just simple data values (i.e. ConstantInt/ConstantFP). This Constant
+/// node has no operands because it stores all of the elements of the constant
+/// as densely packed data, instead of as Value*'s.
///
/// This is the common base class of ConstantDataArray and ConstantDataVector.
///
@@ -667,7 +651,7 @@ public:
StringRef getAsCString() const {
assert(isCString() && "Isn't a C string");
StringRef Str = getAsString();
- return Str.substr(0, Str.size()-1);
+ return Str.substr(0, Str.size() - 1);
}
/// Return the raw, underlying, bytes of this data. Note that this is an
@@ -717,13 +701,14 @@ public:
return ConstantDataArray::get(Context, makeArrayRef(Elts));
}
- /// get() constructor - Return a constant with array type with an element
+ /// getRaw() constructor - Return a constant with array type with an element
/// count and element type matching the NumElements and ElementTy parameters
/// passed in. Note that this can return a ConstantAggregateZero object.
- /// ElementTy needs to be one of i8/i16/i32/i64/float/double. Data is the
- /// buffer containing the elements. Be careful to make sure Data uses the
+ /// ElementTy must be one of i8/i16/i32/i64/half/bfloat/float/double. Data is
+ /// the buffer containing the elements. Be careful to make sure Data uses the
/// right endianness, the buffer will be used as-is.
- static Constant *getRaw(StringRef Data, uint64_t NumElements, Type *ElementTy) {
+ static Constant *getRaw(StringRef Data, uint64_t NumElements,
+ Type *ElementTy) {
Type *Ty = ArrayType::get(ElementTy, NumElements);
return getImpl(Data, Ty);
}
@@ -788,6 +773,18 @@ public:
static Constant *get(LLVMContext &Context, ArrayRef<float> Elts);
static Constant *get(LLVMContext &Context, ArrayRef<double> Elts);
+ /// getRaw() constructor - Return a constant with vector type with an element
+ /// count and element type matching the NumElements and ElementTy parameters
+ /// passed in. Note that this can return a ConstantAggregateZero object.
+ /// ElementTy must be one of i8/i16/i32/i64/half/bfloat/float/double. Data is
+ /// the buffer containing the elements. Be careful to make sure Data uses the
+ /// right endianness, the buffer will be used as-is.
+ static Constant *getRaw(StringRef Data, uint64_t NumElements,
+ Type *ElementTy) {
+ Type *Ty = VectorType::get(ElementTy, ElementCount::getFixed(NumElements));
+ return getImpl(Data, Ty);
+ }
+
/// getFP() constructors - Return a constant of vector type with a float
/// element type taken from argument `ElementType', and count taken from
/// argument `Elts'. The amount of bits of the contained type must match the
@@ -800,7 +797,7 @@ public:
/// Return a ConstantVector with the specified constant in each element.
/// The specified constant has to be a of a compatible type (i8/i16/
- /// i32/i64/float/double) and must be a ConstantFP or ConstantInt.
+ /// i32/i64/half/bfloat/float/double) and must be a ConstantFP or ConstantInt.
static Constant *getSplat(unsigned NumElts, Constant *Elt);
/// Returns true if this is a splat constant, meaning that all elements have
@@ -853,12 +850,14 @@ class BlockAddress final : public Constant {
BlockAddress(Function *F, BasicBlock *BB);
- void *operator new(size_t s) { return User::operator new(s, 2); }
+ void *operator new(size_t S) { return User::operator new(S, 2); }
void destroyConstantImpl();
Value *handleOperandChangeImpl(Value *From, Value *To);
public:
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
+
/// Return a BlockAddress for the specified function and basic block.
static BlockAddress *get(Function *F, BasicBlock *BB);
@@ -874,8 +873,8 @@ public:
/// Transparently provide more efficient getOperand methods.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
- Function *getFunction() const { return (Function*)Op<0>().get(); }
- BasicBlock *getBasicBlock() const { return (BasicBlock*)Op<1>().get(); }
+ Function *getFunction() const { return (Function *)Op<0>().get(); }
+ BasicBlock *getBasicBlock() const { return (BasicBlock *)Op<1>().get(); }
/// Methods for support type inquiry through isa, cast, and dyn_cast:
static bool classof(const Value *V) {
@@ -884,9 +883,8 @@ public:
};
template <>
-struct OperandTraits<BlockAddress> :
- public FixedNumOperandTraits<BlockAddress, 2> {
-};
+struct OperandTraits<BlockAddress>
+ : public FixedNumOperandTraits<BlockAddress, 2> {};
DEFINE_TRANSPARENT_OPERAND_ACCESSORS(BlockAddress, Value)
@@ -898,12 +896,14 @@ class DSOLocalEquivalent final : public Constant {
DSOLocalEquivalent(GlobalValue *GV);
- void *operator new(size_t s) { return User::operator new(s, 1); }
+ void *operator new(size_t S) { return User::operator new(S, 1); }
void destroyConstantImpl();
Value *handleOperandChangeImpl(Value *From, Value *To);
public:
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
+
/// Return a DSOLocalEquivalent for the specified global value.
static DSOLocalEquivalent *get(GlobalValue *GV);
@@ -975,17 +975,18 @@ public:
///
static Constant *getOffsetOf(Type *Ty, Constant *FieldNo);
- static Constant *getNeg(Constant *C, bool HasNUW = false, bool HasNSW =false);
+ static Constant *getNeg(Constant *C, bool HasNUW = false,
+ bool HasNSW = false);
static Constant *getFNeg(Constant *C);
static Constant *getNot(Constant *C);
- static Constant *getAdd(Constant *C1, Constant *C2,
- bool HasNUW = false, bool HasNSW = false);
+ static Constant *getAdd(Constant *C1, Constant *C2, bool HasNUW = false,
+ bool HasNSW = false);
static Constant *getFAdd(Constant *C1, Constant *C2);
- static Constant *getSub(Constant *C1, Constant *C2,
- bool HasNUW = false, bool HasNSW = false);
+ static Constant *getSub(Constant *C1, Constant *C2, bool HasNUW = false,
+ bool HasNSW = false);
static Constant *getFSub(Constant *C1, Constant *C2);
- static Constant *getMul(Constant *C1, Constant *C2,
- bool HasNUW = false, bool HasNSW = false);
+ static Constant *getMul(Constant *C1, Constant *C2, bool HasNUW = false,
+ bool HasNSW = false);
static Constant *getFMul(Constant *C1, Constant *C2);
static Constant *getUDiv(Constant *C1, Constant *C2, bool isExact = false);
static Constant *getSDiv(Constant *C1, Constant *C2, bool isExact = false);
@@ -997,8 +998,8 @@ public:
static Constant *getOr(Constant *C1, Constant *C2);
static Constant *getXor(Constant *C1, Constant *C2);
static Constant *getUMin(Constant *C1, Constant *C2);
- static Constant *getShl(Constant *C1, Constant *C2,
- bool HasNUW = false, bool HasNSW = false);
+ static Constant *getShl(Constant *C1, Constant *C2, bool HasNUW = false,
+ bool HasNSW = false);
static Constant *getLShr(Constant *C1, Constant *C2, bool isExact = false);
static Constant *getAShr(Constant *C1, Constant *C2, bool isExact = false);
static Constant *getTrunc(Constant *C, Type *Ty, bool OnlyIfReduced = false);
@@ -1107,48 +1108,47 @@ public:
bool OnlyIfReduced = false);
// Create a ZExt or BitCast cast constant expression
- static Constant *getZExtOrBitCast(
- Constant *C, ///< The constant to zext or bitcast
- Type *Ty ///< The type to zext or bitcast C to
+ static Constant *
+ getZExtOrBitCast(Constant *C, ///< The constant to zext or bitcast
+ Type *Ty ///< The type to zext or bitcast C to
);
// Create a SExt or BitCast cast constant expression
- static Constant *getSExtOrBitCast(
- Constant *C, ///< The constant to sext or bitcast
- Type *Ty ///< The type to sext or bitcast C to
+ static Constant *
+ getSExtOrBitCast(Constant *C, ///< The constant to sext or bitcast
+ Type *Ty ///< The type to sext or bitcast C to
);
// Create a Trunc or BitCast cast constant expression
- static Constant *getTruncOrBitCast(
- Constant *C, ///< The constant to trunc or bitcast
- Type *Ty ///< The type to trunc or bitcast C to
+ static Constant *
+ getTruncOrBitCast(Constant *C, ///< The constant to trunc or bitcast
+ Type *Ty ///< The type to trunc or bitcast C to
);
/// Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant
/// expression.
- static Constant *getPointerCast(
- Constant *C, ///< The pointer value to be casted (operand 0)
- Type *Ty ///< The type to which cast should be made
+ static Constant *
+ getPointerCast(Constant *C, ///< The pointer value to be casted (operand 0)
+ Type *Ty ///< The type to which cast should be made
);
/// Create a BitCast or AddrSpaceCast for a pointer type depending on
/// the address space.
static Constant *getPointerBitCastOrAddrSpaceCast(
- Constant *C, ///< The constant to addrspacecast or bitcast
- Type *Ty ///< The type to bitcast or addrspacecast C to
+ Constant *C, ///< The constant to addrspacecast or bitcast
+ Type *Ty ///< The type to bitcast or addrspacecast C to
);
/// Create a ZExt, Bitcast or Trunc for integer -> integer casts
- static Constant *getIntegerCast(
- Constant *C, ///< The integer constant to be casted
- Type *Ty, ///< The integer type to cast to
- bool isSigned ///< Whether C should be treated as signed or not
+ static Constant *
+ getIntegerCast(Constant *C, ///< The integer constant to be casted
+ Type *Ty, ///< The integer type to cast to
+ bool IsSigned ///< Whether C should be treated as signed or not
);
/// Create a FPExt, Bitcast or FPTrunc for fp -> fp casts
- static Constant *getFPCast(
- Constant *C, ///< The integer constant to be casted
- Type *Ty ///< The integer type to cast to
+ static Constant *getFPCast(Constant *C, ///< The integer constant to be casted
+ Type *Ty ///< The integer type to cast to
);
/// Return true if this is a convert constant expression
@@ -1178,7 +1178,7 @@ public:
/// folding if possible.
///
/// \param OnlyIfReducedTy see \a getWithOperands() docs.
- static Constant *get(unsigned Opcode, Constant *C1, unsigned Flags = 0,
+ static Constant *get(unsigned Opcode, Constant *C1, unsigned Flags = 0,
Type *OnlyIfReducedTy = nullptr);
/// get - Return a binary or shift operator constant expression,
@@ -1213,7 +1213,7 @@ public:
Optional<unsigned> InRangeIndex = None,
Type *OnlyIfReducedTy = nullptr) {
return getGetElementPtr(
- Ty, C, makeArrayRef((Value * const *)IdxList.data(), IdxList.size()),
+ Ty, C, makeArrayRef((Value *const *)IdxList.data(), IdxList.size()),
InBounds, InRangeIndex, OnlyIfReducedTy);
}
static Constant *getGetElementPtr(Type *Ty, Constant *C, Constant *Idx,
@@ -1294,7 +1294,7 @@ public:
/// This returns the current constant expression with the operands replaced
/// with the specified values. The specified array must have the same number
/// of operands as our current one.
- Constant *getWithOperands(ArrayRef<Constant*> Ops) const {
+ Constant *getWithOperands(ArrayRef<Constant *> Ops) const {
return getWithOperands(Ops, getType());
}
@@ -1334,9 +1334,8 @@ private:
};
template <>
-struct OperandTraits<ConstantExpr> :
- public VariadicOperandTraits<ConstantExpr, 1> {
-};
+struct OperandTraits<ConstantExpr>
+ : public VariadicOperandTraits<ConstantExpr, 1> {};
DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ConstantExpr, Constant)
diff --git a/llvm/include/llvm/IR/DIBuilder.h b/llvm/include/llvm/IR/DIBuilder.h
index e0238567f251..23ac47ca4d81 100644
--- a/llvm/include/llvm/IR/DIBuilder.h
+++ b/llvm/include/llvm/IR/DIBuilder.h
@@ -181,7 +181,9 @@ namespace llvm {
DIFile *File);
/// Create a single enumerator value.
- DIEnumerator *createEnumerator(StringRef Name, int64_t Val, bool IsUnsigned = false);
+ DIEnumerator *createEnumerator(StringRef Name, APSInt Value);
+ DIEnumerator *createEnumerator(StringRef Name, uint64_t Val,
+ bool IsUnsigned = false);
/// Create a DWARF unspecified type.
DIBasicType *createUnspecifiedType(StringRef Name);
@@ -538,6 +540,18 @@ namespace llvm {
uint64_t SizeInBits, uint32_t AlignInBits, DINodeArray Elements,
DIType *UnderlyingType, StringRef UniqueIdentifier = "", bool IsScoped = false);
+ /// Create debugging information entry for a set.
+ /// \param Scope Scope in which this set is defined.
+ /// \param Name Set name.
+ /// \param File File where this set is defined.
+ /// \param LineNo Line number.
+ /// \param SizeInBits Set size.
+ /// \param AlignInBits Set alignment.
+ /// \param Ty Base type of the set.
+ DIDerivedType *createSetType(DIScope *Scope, StringRef Name, DIFile *File,
+ unsigned LineNo, uint64_t SizeInBits,
+ uint32_t AlignInBits, DIType *Ty);
+
/// Create subroutine type.
/// \param ParameterTypes An array of subroutine parameter types. This
/// includes return type at 0th index.
diff --git a/llvm/include/llvm/IR/DataLayout.h b/llvm/include/llvm/IR/DataLayout.h
index eb031613a935..300f73c12df0 100644
--- a/llvm/include/llvm/IR/DataLayout.h
+++ b/llvm/include/llvm/IR/DataLayout.h
@@ -29,6 +29,7 @@
#include "llvm/Support/ErrorHandling.h"
#include "llvm/Support/MathExtras.h"
#include "llvm/Support/Alignment.h"
+#include "llvm/Support/TrailingObjects.h"
#include "llvm/Support/TypeSize.h"
#include <cassert>
#include <cstdint>
@@ -260,10 +261,7 @@ public:
///
/// The width is specified in bits.
bool isLegalInteger(uint64_t Width) const {
- for (unsigned LegalIntWidth : LegalIntWidths)
- if (LegalIntWidth == Width)
- return true;
- return false;
+ return llvm::is_contained(LegalIntWidths, Width);
}
bool isIllegalInteger(uint64_t Width) const { return !isLegalInteger(Width); }
@@ -591,25 +589,6 @@ public:
///
/// This includes an explicitly requested alignment (if the global has one).
Align getPreferredAlign(const GlobalVariable *GV) const;
-
- /// Returns the preferred alignment of the specified global.
- ///
- /// This includes an explicitly requested alignment (if the global has one).
- LLVM_ATTRIBUTE_DEPRECATED(
- inline unsigned getPreferredAlignment(const GlobalVariable *GV) const,
- "Use getPreferredAlign instead") {
- return getPreferredAlign(GV).value();
- }
-
- /// Returns the preferred alignment of the specified global, returned
- /// in log form.
- ///
- /// This includes an explicitly requested alignment (if the global has one).
- LLVM_ATTRIBUTE_DEPRECATED(
- inline unsigned getPreferredAlignmentLog(const GlobalVariable *GV) const,
- "Inline where needed") {
- return Log2(getPreferredAlign(GV));
- }
};
inline DataLayout *unwrap(LLVMTargetDataRef P) {
@@ -622,12 +601,11 @@ inline LLVMTargetDataRef wrap(const DataLayout *P) {
/// Used to lazily calculate structure layout information for a target machine,
/// based on the DataLayout structure.
-class StructLayout {
+class StructLayout final : public TrailingObjects<StructLayout, uint64_t> {
uint64_t StructSize;
Align StructAlignment;
unsigned IsPadded : 1;
unsigned NumElements : 31;
- uint64_t MemberOffsets[1]; // variable sized array!
public:
uint64_t getSizeInBytes() const { return StructSize; }
@@ -644,9 +622,18 @@ public:
/// index that contains it.
unsigned getElementContainingOffset(uint64_t Offset) const;
+ MutableArrayRef<uint64_t> getMemberOffsets() {
+ return llvm::makeMutableArrayRef(getTrailingObjects<uint64_t>(),
+ NumElements);
+ }
+
+ ArrayRef<uint64_t> getMemberOffsets() const {
+ return llvm::makeArrayRef(getTrailingObjects<uint64_t>(), NumElements);
+ }
+
uint64_t getElementOffset(unsigned Idx) const {
assert(Idx < NumElements && "Invalid element idx!");
- return MemberOffsets[Idx];
+ return getMemberOffsets()[Idx];
}
uint64_t getElementOffsetInBits(unsigned Idx) const {
@@ -657,6 +644,10 @@ private:
friend class DataLayout; // Only DataLayout can create this class
StructLayout(StructType *ST, const DataLayout &DL);
+
+ size_t numTrailingObjects(OverloadToken<uint64_t>) const {
+ return NumElements;
+ }
};
// The implementation of this method is provided inline as it is particularly
diff --git a/llvm/include/llvm/IR/DebugInfo.h b/llvm/include/llvm/IR/DebugInfo.h
index e7c1d9a90677..eba422a9fde6 100644
--- a/llvm/include/llvm/IR/DebugInfo.h
+++ b/llvm/include/llvm/IR/DebugInfo.h
@@ -19,15 +19,33 @@
#include "llvm/ADT/STLExtras.h"
#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/ADT/SmallVector.h"
+#include "llvm/ADT/TinyPtrVector.h"
#include "llvm/ADT/iterator_range.h"
#include "llvm/IR/DebugInfoMetadata.h"
namespace llvm {
+class DbgDeclareInst;
+class DbgValueInst;
class DbgVariableIntrinsic;
class Instruction;
class Module;
+/// Finds all intrinsics declaring local variables as living in the memory that
+/// 'V' points to. This may include a mix of dbg.declare and
+/// dbg.addr intrinsics.
+TinyPtrVector<DbgVariableIntrinsic *> FindDbgAddrUses(Value *V);
+
+/// Like \c FindDbgAddrUses, but only returns dbg.declare intrinsics, not
+/// dbg.addr.
+TinyPtrVector<DbgDeclareInst *> FindDbgDeclareUses(Value *V);
+
+/// Finds the llvm.dbg.value intrinsics describing a value.
+void findDbgValues(SmallVectorImpl<DbgValueInst *> &DbgValues, Value *V);
+
+/// Finds the debug info intrinsics describing a value.
+void findDbgUsers(SmallVectorImpl<DbgVariableIntrinsic *> &DbgInsts, Value *V);
+
/// Find subprogram that is enclosing this scope.
DISubprogram *getDISubprogram(const MDNode *Scope);
@@ -52,11 +70,11 @@ bool stripDebugInfo(Function &F);
bool stripNonLineTableDebugInfo(Module &M);
/// Update the debug locations contained within the MD_loop metadata attached
-/// to the instruction \p I, if one exists. \p Updater is applied to each debug
-/// location in the MD_loop metadata: the returned value is included in the
+/// to the instruction \p I, if one exists. \p Updater is applied to Metadata
+/// operand in the MD_loop metadata: the returned value is included in the
/// updated loop metadata node if it is non-null.
void updateLoopMetadataDebugLocations(
- Instruction &I, function_ref<DILocation *(const DILocation &)> Updater);
+ Instruction &I, function_ref<Metadata *(Metadata *)> Updater);
/// Return Debug Info Metadata Version by checking module flags.
unsigned getDebugMetadataVersionFromModule(const Module &M);
@@ -81,6 +99,9 @@ public:
/// Process debug info location.
void processLocation(const Module &M, const DILocation *Loc);
+ /// Process subprogram.
+ void processSubprogram(DISubprogram *SP);
+
/// Clear all lists.
void reset();
@@ -89,7 +110,6 @@ private:
void processCompileUnit(DICompileUnit *CU);
void processScope(DIScope *Scope);
- void processSubprogram(DISubprogram *SP);
void processType(DIType *DT);
bool addCompileUnit(DICompileUnit *CU);
bool addGlobalVariable(DIGlobalVariableExpression *DIG);
diff --git a/llvm/include/llvm/IR/DebugInfoMetadata.h b/llvm/include/llvm/IR/DebugInfoMetadata.h
index 22dd5ee6efac..20a032f04909 100644
--- a/llvm/include/llvm/IR/DebugInfoMetadata.h
+++ b/llvm/include/llvm/IR/DebugInfoMetadata.h
@@ -26,6 +26,8 @@
#include "llvm/IR/Constants.h"
#include "llvm/IR/Metadata.h"
#include "llvm/Support/Casting.h"
+#include "llvm/Support/CommandLine.h"
+#include "llvm/Support/Discriminator.h"
#include <cassert>
#include <climits>
#include <cstddef>
@@ -60,6 +62,8 @@
namespace llvm {
+extern cl::opt<bool> EnableFSDiscriminator;
+
class DITypeRefArray {
const MDTuple *N = nullptr;
@@ -80,11 +84,16 @@ public:
return cast_or_null<DIType>(N->getOperand(I));
}
- class iterator : std::iterator<std::input_iterator_tag, DIType *,
- std::ptrdiff_t, void, DIType *> {
+ class iterator {
MDNode::op_iterator I = nullptr;
public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = DIType *;
+ using difference_type = std::ptrdiff_t;
+ using pointer = void;
+ using reference = DIType *;
+
iterator() = default;
explicit iterator(MDNode::op_iterator I) : I(I) {}
@@ -148,7 +157,7 @@ protected:
void setTag(unsigned Tag) { SubclassData16 = Tag; }
public:
- unsigned getTag() const { return SubclassData16; }
+ dwarf::Tag getTag() const { return (dwarf::Tag)SubclassData16; }
/// Debug info flags.
///
@@ -257,7 +266,7 @@ public:
/// Return a (temporary) clone of this.
TempGenericDINode clone() const { return cloneImpl(); }
- unsigned getTag() const { return SubclassData16; }
+ dwarf::Tag getTag() const { return (dwarf::Tag)SubclassData16; }
StringRef getHeader() const { return getStringOperand(0); }
MDString *getRawHeader() const { return getOperandAs<MDString>(0); }
@@ -335,10 +344,9 @@ public:
Metadata *getRawStride() const { return getOperand(3).get(); }
- typedef PointerUnion<ConstantInt*, DIVariable*> CountType;
typedef PointerUnion<ConstantInt *, DIVariable *, DIExpression *> BoundType;
- CountType getCount() const;
+ BoundType getCount() const;
BoundType getLowerBound() const;
@@ -1572,31 +1580,6 @@ class DILocation : public MDNode {
ShouldCreate);
}
- /// With a given unsigned int \p U, use up to 13 bits to represent it.
- /// old_bit 1~5 --> new_bit 1~5
- /// old_bit 6~12 --> new_bit 7~13
- /// new_bit_6 is 0 if higher bits (7~13) are all 0
- static unsigned getPrefixEncodingFromUnsigned(unsigned U) {
- U &= 0xfff;
- return U > 0x1f ? (((U & 0xfe0) << 1) | (U & 0x1f) | 0x20) : U;
- }
-
- /// Reverse transformation as getPrefixEncodingFromUnsigned.
- static unsigned getUnsignedFromPrefixEncoding(unsigned U) {
- if (U & 1)
- return 0;
- U >>= 1;
- return (U & 0x20) ? (((U >> 1) & 0xfe0) | (U & 0x1f)) : (U & 0x1f);
- }
-
- /// Returns the next component stored in discriminator.
- static unsigned getNextComponentInDiscriminator(unsigned D) {
- if ((D & 1) == 0)
- return D >> ((D & 0x40) ? 14 : 7);
- else
- return D >> 1;
- }
-
TempDILocation cloneImpl() const {
// Get the raw scope/inlinedAt since it is possible to invoke this on
// a DILocation containing temporary metadata.
@@ -1604,14 +1587,6 @@ class DILocation : public MDNode {
getRawInlinedAt(), isImplicitCode());
}
- static unsigned encodeComponent(unsigned C) {
- return (C == 0) ? 1U : (getPrefixEncodingFromUnsigned(C) << 1);
- }
-
- static unsigned encodingBits(unsigned C) {
- return (C == 0) ? 1 : (C > 0x1f ? 14 : 7);
- }
-
public:
// Disallow replacing operands.
void replaceOperandWith(unsigned I, Metadata *New) = delete;
@@ -1643,8 +1618,8 @@ public:
/// written explicitly by the user (e.g. cleanup stuff in C++ put on a closing
/// bracket). It's useful for code coverage to not show a counter on "empty"
/// lines.
- bool isImplicitCode() const { return ImplicitCode; }
- void setImplicitCode(bool ImplicitCode) { this->ImplicitCode = ImplicitCode; }
+ bool isImplicitCode() const { return SubclassData1; }
+ void setImplicitCode(bool ImplicitCode) { SubclassData1 = ImplicitCode; }
DIFile *getFile() const { return getScope()->getFile(); }
StringRef getFilename() const { return getScope()->getFilename(); }
@@ -1758,8 +1733,22 @@ public:
static
const DILocation *getMergedLocations(ArrayRef<const DILocation *> Locs);
+ /// Return the masked discriminator value for an input discrimnator value D
+ /// (i.e. zero out the (B+1)-th and above bits for D (B is 0-base).
+ // Example: an input of (0x1FF, 7) returns 0xFF.
+ static unsigned getMaskedDiscriminator(unsigned D, unsigned B) {
+ return (D & getN1Bits(B));
+ }
+
+ /// Return the bits used for base discriminators.
+ static unsigned getBaseDiscriminatorBits() { return getBaseFSBitEnd(); }
+
/// Returns the base discriminator for a given encoded discriminator \p D.
- static unsigned getBaseDiscriminatorFromDiscriminator(unsigned D) {
+ static unsigned
+ getBaseDiscriminatorFromDiscriminator(unsigned D,
+ bool IsFSDiscriminator = false) {
+ if (IsFSDiscriminator)
+ return getMaskedDiscriminator(D, getBaseDiscriminatorBits());
return getUnsignedFromPrefixEncoding(D);
}
@@ -1781,6 +1770,8 @@ public:
/// Returns the duplication factor for a given encoded discriminator \p D, or
/// 1 if no value or 0 is encoded.
static unsigned getDuplicationFactorFromDiscriminator(unsigned D) {
+ if (EnableFSDiscriminator)
+ return 1;
D = getNextComponentInDiscriminator(D);
unsigned Ret = getUnsignedFromPrefixEncoding(D);
if (Ret == 0)
@@ -1951,6 +1942,7 @@ public:
unsigned getVirtualIndex() const { return VirtualIndex; }
int getThisAdjustment() const { return ThisAdjustment; }
unsigned getScopeLine() const { return ScopeLine; }
+ void setScopeLine(unsigned L) { assert(isDistinct()); ScopeLine = L; }
DIFlags getFlags() const { return Flags; }
DISPFlags getSPFlags() const { return SPFlags; }
bool isLocalToUnit() const { return getSPFlags() & SPFlagLocalToUnit; }
@@ -2010,6 +2002,8 @@ public:
StringRef getName() const { return getStringOperand(2); }
StringRef getLinkageName() const { return getStringOperand(3); }
+ /// Only used by clients of CloneFunction, and only right after the cloning.
+ void replaceLinkageName(MDString *LN) { replaceOperandWith(3, LN); }
DISubroutineType *getType() const {
return cast_or_null<DISubroutineType>(getRawType());
@@ -2175,11 +2169,6 @@ public:
(Scope, File, Discriminator))
TempDILexicalBlockFile clone() const { return cloneImpl(); }
-
- // TODO: Remove these once they're gone from DILexicalBlockBase.
- unsigned getLine() const = delete;
- unsigned getColumn() const = delete;
-
unsigned getDiscriminator() const { return Discriminator; }
static bool classof(const Metadata *MD) {
@@ -2211,7 +2200,8 @@ DILocation::cloneWithDiscriminator(unsigned Discriminator) const {
}
unsigned DILocation::getBaseDiscriminator() const {
- return getBaseDiscriminatorFromDiscriminator(getDiscriminator());
+ return getBaseDiscriminatorFromDiscriminator(getDiscriminator(),
+ EnableFSDiscriminator);
}
unsigned DILocation::getDuplicationFactor() const {
@@ -2224,6 +2214,14 @@ unsigned DILocation::getCopyIdentifier() const {
Optional<const DILocation *> DILocation::cloneWithBaseDiscriminator(unsigned D) const {
unsigned BD, DF, CI;
+
+ if (EnableFSDiscriminator) {
+ BD = getBaseDiscriminator();
+ if (D == BD)
+ return this;
+ return cloneWithDiscriminator(D);
+ }
+
decodeDiscriminator(getDiscriminator(), BD, DF, CI);
if (D == BD)
return this;
@@ -2233,6 +2231,8 @@ Optional<const DILocation *> DILocation::cloneWithBaseDiscriminator(unsigned D)
}
Optional<const DILocation *> DILocation::cloneByMultiplyingDuplicationFactor(unsigned DF) const {
+ assert(!EnableFSDiscriminator && "FSDiscriminator should not call this.");
+
DF *= getDuplicationFactor();
if (DF <= 1)
return this;
@@ -2587,11 +2587,20 @@ public:
return Elements[I];
}
- /// Determine whether this represents a standalone constant value.
- bool isConstant() const;
+ enum SignedOrUnsignedConstant { SignedConstant, UnsignedConstant };
+ /// Determine whether this represents a constant value, if so
+ // return it's sign information.
+ llvm::Optional<SignedOrUnsignedConstant> isConstant() const;
- /// Determine whether this represents a standalone signed constant value.
- bool isSignedConstant() const;
+ /// Return the number of unique location operands referred to (via
+ /// DW_OP_LLVM_arg) in this expression; this is not necessarily the number of
+ /// instances of DW_OP_LLVM_arg within the expression.
+ /// For example, for the expression:
+ /// (DW_OP_LLVM_arg 0, DW_OP_LLVM_arg 1, DW_OP_plus,
+ /// DW_OP_LLVM_arg 0, DW_OP_mul)
+ /// This function would return 2, as there are two unique location operands
+ /// (0 and 1).
+ uint64_t getNumLocationOperands() const;
using element_iterator = ArrayRef<uint64_t>::iterator;
@@ -2633,11 +2642,16 @@ public:
};
/// An iterator for expression operands.
- class expr_op_iterator
- : public std::iterator<std::input_iterator_tag, ExprOperand> {
+ class expr_op_iterator {
ExprOperand Op;
public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = ExprOperand;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
expr_op_iterator() = default;
explicit expr_op_iterator(element_iterator I) : Op(I) {}
@@ -2735,6 +2749,10 @@ public:
/// return true with an offset of zero.
bool extractIfOffset(int64_t &Offset) const;
+ /// Returns true iff this DIExpression contains at least one instance of
+ /// `DW_OP_LLVM_arg, n` for all n in [0, N).
+ bool hasAllLocationOps(unsigned N) const;
+
/// Checks if the last 4 elements of the expression are DW_OP_constu <DWARF
/// Address Space> DW_OP_swap DW_OP_xderef and extracts the <DWARF Address
/// Space>.
@@ -2775,6 +2793,23 @@ public:
static DIExpression *appendToStack(const DIExpression *Expr,
ArrayRef<uint64_t> Ops);
+ /// Create a copy of \p Expr by appending the given list of \p Ops to each
+ /// instance of the operand `DW_OP_LLVM_arg, \p ArgNo`. This is used to
+ /// modify a specific location used by \p Expr, such as when salvaging that
+ /// location.
+ static DIExpression *appendOpsToArg(const DIExpression *Expr,
+ ArrayRef<uint64_t> Ops, unsigned ArgNo,
+ bool StackValue = false);
+
+ /// Create a copy of \p Expr with each instance of
+ /// `DW_OP_LLVM_arg, \p OldArg` replaced with `DW_OP_LLVM_arg, \p NewArg`,
+ /// and each instance of `DW_OP_LLVM_arg, Arg` with `DW_OP_LLVM_arg, Arg - 1`
+ /// for all Arg > \p OldArg.
+ /// This is used when replacing one of the operands of a debug value list
+ /// with another operand in the same list and deleting the old operand.
+ static DIExpression *replaceArg(const DIExpression *Expr, uint64_t OldArg,
+ uint64_t NewArg);
+
/// Create a DIExpression to describe one part of an aggregate variable that
/// is fragmented across multiple Values. The DW_OP_LLVM_fragment operation
/// will be appended to the elements of \c Expr. If \c Expr already contains
@@ -3234,12 +3269,6 @@ public:
return "";
}
- Optional<StringRef> getSource() const {
- if (auto *F = getFile())
- return F->getSource();
- return None;
- }
-
MDString *getRawName() const { return getOperandAs<MDString>(0); }
Metadata *getRawFile() const { return getOperand(1); }
MDString *getRawGetterName() const { return getOperandAs<MDString>(2); }
@@ -3512,6 +3541,52 @@ public:
}
};
+/// List of ValueAsMetadata, to be used as an argument to a dbg.value
+/// intrinsic.
+class DIArgList : public MDNode {
+ friend class LLVMContextImpl;
+ friend class MDNode;
+ using iterator = SmallVectorImpl<ValueAsMetadata *>::iterator;
+
+ SmallVector<ValueAsMetadata *, 4> Args;
+
+ DIArgList(LLVMContext &C, StorageType Storage,
+ ArrayRef<ValueAsMetadata *> Args)
+ : MDNode(C, DIArgListKind, Storage, None),
+ Args(Args.begin(), Args.end()) {
+ track();
+ }
+ ~DIArgList() { untrack(); }
+
+ static DIArgList *getImpl(LLVMContext &Context,
+ ArrayRef<ValueAsMetadata *> Args,
+ StorageType Storage, bool ShouldCreate = true);
+
+ TempDIArgList cloneImpl() const {
+ return getTemporary(getContext(), getArgs());
+ }
+
+ void track();
+ void untrack();
+ void dropAllReferences();
+
+public:
+ DEFINE_MDNODE_GET(DIArgList, (ArrayRef<ValueAsMetadata *> Args), (Args))
+
+ TempDIArgList clone() const { return cloneImpl(); }
+
+ ArrayRef<ValueAsMetadata *> getArgs() const { return Args; }
+
+ iterator args_begin() { return Args.begin(); }
+ iterator args_end() { return Args.end(); }
+
+ static bool classof(const Metadata *MD) {
+ return MD->getMetadataID() == DIArgListKind;
+ }
+
+ void handleChangedOperand(void *Ref, Metadata *New);
+};
+
/// Identifies a unique instance of a variable.
///
/// Storage for identifying a potentially inlined instance of a variable,
@@ -3546,10 +3621,10 @@ public:
InlinedAt(InlinedAt) {}
const DILocalVariable *getVariable() const { return Variable; }
- const Optional<FragmentInfo> getFragment() const { return Fragment; }
+ Optional<FragmentInfo> getFragment() const { return Fragment; }
const DILocation *getInlinedAt() const { return InlinedAt; }
- const FragmentInfo getFragmentOrDefault() const {
+ FragmentInfo getFragmentOrDefault() const {
return Fragment.getValueOr(DefaultFragment);
}
diff --git a/llvm/include/llvm/IR/DebugLoc.h b/llvm/include/llvm/IR/DebugLoc.h
index 4824f2e9f2fd..4c48f048d60c 100644
--- a/llvm/include/llvm/IR/DebugLoc.h
+++ b/llvm/include/llvm/IR/DebugLoc.h
@@ -113,4 +113,4 @@ namespace llvm {
} // end namespace llvm
-#endif /* LLVM_SUPPORT_DEBUGLOC_H */
+#endif // LLVM_IR_DEBUGLOC_H
diff --git a/llvm/include/llvm/IR/DerivedTypes.h b/llvm/include/llvm/IR/DerivedTypes.h
index c3d97f4520e1..b68a912b5f70 100644
--- a/llvm/include/llvm/IR/DerivedTypes.h
+++ b/llvm/include/llvm/IR/DerivedTypes.h
@@ -244,8 +244,7 @@ public:
static std::enable_if_t<are_base_of<Type, Tys...>::value, StructType *>
create(StringRef Name, Type *elt1, Tys *... elts) {
assert(elt1 && "Cannot create a struct type with no elements with this");
- SmallVector<llvm::Type *, 8> StructFields({elt1, elts...});
- return create(StructFields, Name);
+ return create(ArrayRef<Type *>({elt1, elts...}), Name);
}
/// This static method is the primary way to create a literal StructType.
@@ -263,8 +262,7 @@ public:
get(Type *elt1, Tys *... elts) {
assert(elt1 && "Cannot create a struct type with no elements with this");
LLVMContext &Ctx = elt1->getContext();
- SmallVector<llvm::Type *, 8> StructFields({elt1, elts...});
- return llvm::StructType::get(Ctx, StructFields);
+ return StructType::get(Ctx, ArrayRef<Type *>({elt1, elts...}));
}
/// Return the type with the specified name, or null if there is none by that
@@ -306,8 +304,7 @@ public:
std::enable_if_t<are_base_of<Type, Tys...>::value, void>
setBody(Type *elt1, Tys *... elts) {
assert(elt1 && "Cannot create a struct type with no elements with this");
- SmallVector<llvm::Type *, 8> StructFields({elt1, elts...});
- setBody(StructFields);
+ setBody(ArrayRef<Type *>({elt1, elts...}));
}
/// Return true if the specified type is valid as a element type.
@@ -318,7 +315,7 @@ public:
element_iterator element_begin() const { return ContainedTys; }
element_iterator element_end() const { return &ContainedTys[NumContainedTys];}
- ArrayRef<Type *> const elements() const {
+ ArrayRef<Type *> elements() const {
return makeArrayRef(element_begin(), element_end());
}
@@ -421,15 +418,6 @@ public:
VectorType(const VectorType &) = delete;
VectorType &operator=(const VectorType &) = delete;
- /// Get the number of elements in this vector. It does not make sense to call
- /// this function on a scalable vector, and this will be moved into
- /// FixedVectorType in a future commit
- LLVM_ATTRIBUTE_DEPRECATED(
- inline unsigned getNumElements() const,
- "Calling this function via a base VectorType is deprecated. Either call "
- "getElementCount() and handle the case where Scalable is true or cast to "
- "FixedVectorType.");
-
Type *getElementType() const { return ContainedType; }
/// This static method is the primary way to construct an VectorType.
@@ -532,21 +520,6 @@ public:
}
};
-unsigned VectorType::getNumElements() const {
- ElementCount EC = getElementCount();
-#ifdef STRICT_FIXED_SIZE_VECTORS
- assert(!EC.isScalable() &&
- "Request for fixed number of elements from scalable vector");
-#else
- if (EC.isScalable())
- WithColor::warning()
- << "The code that requested the fixed number of elements has made the "
- "assumption that this vector is not scalable. This assumption was "
- "not correct, and this may lead to broken code\n";
-#endif
- return EC.getKnownMinValue();
-}
-
/// Class to represent fixed width SIMD vectors
class FixedVectorType : public VectorType {
protected:
@@ -657,6 +630,7 @@ inline ElementCount VectorType::getElementCount() const {
/// Class to represent pointers.
class PointerType : public Type {
explicit PointerType(Type *ElType, unsigned AddrSpace);
+ explicit PointerType(LLVMContext &C, unsigned AddrSpace);
Type *PointeeTy;
@@ -667,14 +641,40 @@ public:
/// This constructs a pointer to an object of the specified type in a numbered
/// address space.
static PointerType *get(Type *ElementType, unsigned AddressSpace);
+ /// This constructs an opaque pointer to an object in a numbered address
+ /// space.
+ static PointerType *get(LLVMContext &C, unsigned AddressSpace);
/// This constructs a pointer to an object of the specified type in the
- /// generic address space (address space zero).
+ /// default address space (address space zero).
static PointerType *getUnqual(Type *ElementType) {
return PointerType::get(ElementType, 0);
}
- Type *getElementType() const { return PointeeTy; }
+ /// This constructs an opaque pointer to an object in the
+ /// default address space (address space zero).
+ static PointerType *getUnqual(LLVMContext &C) {
+ return PointerType::get(C, 0);
+ }
+
+ /// This constructs a pointer type with the same pointee type as input
+ /// PointerType (or opaque pointer is the input PointerType is opaque) and the
+ /// given address space. This is only useful during the opaque pointer
+ /// transition.
+ /// TODO: remove after opaque pointer transition is complete.
+ static PointerType *getWithSamePointeeType(PointerType *PT,
+ unsigned AddressSpace) {
+ if (PT->isOpaque())
+ return get(PT->getContext(), AddressSpace);
+ return get(PT->getElementType(), AddressSpace);
+ }
+
+ Type *getElementType() const {
+ assert(!isOpaque() && "Attempting to get element type of opaque pointer");
+ return PointeeTy;
+ }
+
+ bool isOpaque() const { return !PointeeTy; }
/// Return true if the specified type is valid as a element type.
static bool isValidElementType(Type *ElemTy);
@@ -685,6 +685,22 @@ public:
/// Return the address space of the Pointer type.
inline unsigned getAddressSpace() const { return getSubclassData(); }
+ /// Return true if either this is an opaque pointer type or if this pointee
+ /// type matches Ty. Primarily used for checking if an instruction's pointer
+ /// operands are valid types. Will be useless after non-opaque pointers are
+ /// removed.
+ bool isOpaqueOrPointeeTypeMatches(Type *Ty) {
+ return isOpaque() || PointeeTy == Ty;
+ }
+
+ /// Return true if both pointer types have the same element type. Two opaque
+ /// pointers are considered to have the same element type, while an opaque
+ /// and a non-opaque pointer have different element types.
+ /// TODO: Remove after opaque pointer transition is complete.
+ bool hasSameElementTypeAs(PointerType *Other) {
+ return PointeeTy == Other->PointeeTy;
+ }
+
/// Implement support type inquiry through isa, cast, and dyn_cast.
static bool classof(const Type *T) {
return T->getTypeID() == PointerTyID;
@@ -701,14 +717,17 @@ Type *Type::getExtendedType() const {
return cast<IntegerType>(this)->getExtendedType();
}
+Type *Type::getWithNewType(Type *EltTy) const {
+ if (auto *VTy = dyn_cast<VectorType>(this))
+ return VectorType::get(EltTy, VTy->getElementCount());
+ return EltTy;
+}
+
Type *Type::getWithNewBitWidth(unsigned NewBitWidth) const {
assert(
isIntOrIntVectorTy() &&
"Original type expected to be a vector of integers or a scalar integer.");
- Type *NewType = getIntNTy(getContext(), NewBitWidth);
- if (auto *VTy = dyn_cast<VectorType>(this))
- NewType = VectorType::get(NewType, VTy->getElementCount());
- return NewType;
+ return getWithNewType(getIntNTy(getContext(), NewBitWidth));
}
unsigned Type::getPointerAddressSpace() const {
diff --git a/llvm/include/llvm/IR/DiagnosticInfo.h b/llvm/include/llvm/IR/DiagnosticInfo.h
index c457072d50f1..5064f4f4edf7 100644
--- a/llvm/include/llvm/IR/DiagnosticInfo.h
+++ b/llvm/include/llvm/IR/DiagnosticInfo.h
@@ -78,6 +78,7 @@ enum DiagnosticKind {
DK_MIRParser,
DK_PGOProfile,
DK_Unsupported,
+ DK_SrcMgr,
DK_FirstPluginKind // Must be last value to work with
// getNextAvailablePluginDiagnosticKind
};
@@ -130,7 +131,7 @@ using DiagnosticHandlerFunction = std::function<void(const DiagnosticInfo &)>;
class DiagnosticInfoInlineAsm : public DiagnosticInfo {
private:
/// Optional line information. 0 if not set.
- unsigned LocCookie = 0;
+ uint64_t LocCookie = 0;
/// Message to be reported.
const Twine &MsgStr;
/// Optional origin of the problem.
@@ -148,7 +149,7 @@ public:
/// \p MsgStr gives the message.
/// This class does not copy \p MsgStr, therefore the reference must be valid
/// for the whole life time of the Diagnostic.
- DiagnosticInfoInlineAsm(unsigned LocCookie, const Twine &MsgStr,
+ DiagnosticInfoInlineAsm(uint64_t LocCookie, const Twine &MsgStr,
DiagnosticSeverity Severity = DS_Error)
: DiagnosticInfo(DK_InlineAsm, Severity), LocCookie(LocCookie),
MsgStr(MsgStr) {}
@@ -161,7 +162,7 @@ public:
DiagnosticInfoInlineAsm(const Instruction &I, const Twine &MsgStr,
DiagnosticSeverity Severity = DS_Error);
- unsigned getLocCookie() const { return LocCookie; }
+ uint64_t getLocCookie() const { return LocCookie; }
const Twine &getMsgStr() const { return MsgStr; }
const Instruction *getInstruction() const { return Instr; }
@@ -219,7 +220,7 @@ public:
DiagnosticInfoStackSize(const Function &Fn, uint64_t StackSize,
DiagnosticSeverity Severity = DS_Warning,
uint64_t StackLimit = 0)
- : DiagnosticInfoResourceLimit(Fn, "stack size", StackSize, Severity,
+ : DiagnosticInfoResourceLimit(Fn, "stack frame size", StackSize, Severity,
DK_StackSize, StackLimit) {}
uint64_t getStackSize() const { return getResourceSize(); }
@@ -381,7 +382,7 @@ public:
/// Return a string with the location information for this diagnostic
/// in the format "file:line:col". If location information is not available,
/// it returns "<unknown>:0:0".
- const std::string getLocationStr() const;
+ std::string getLocationStr() const;
/// Return location information for this diagnostic in three parts:
/// the relative source file path, line number and column.
@@ -742,6 +743,11 @@ public:
OptimizationRemarkMissed(const char *PassName, StringRef RemarkName,
const Instruction *Inst);
+ /// Same as above but \p F is used to derive code region and debug
+ /// location.
+ OptimizationRemarkMissed(const char *PassName, StringRef RemarkName,
+ const Function *F);
+
static bool classof(const DiagnosticInfo *DI) {
return DI->getKind() == DK_OptimizationRemarkMissed;
}
@@ -794,6 +800,11 @@ public:
OptimizationRemarkAnalysis(const char *PassName, StringRef RemarkName,
const Instruction *Inst);
+ /// Same as above but \p F is used to derive code region and debug
+ /// location.
+ OptimizationRemarkAnalysis(const char *PassName, StringRef RemarkName,
+ const Function *F);
+
static bool classof(const DiagnosticInfo *DI) {
return DI->getKind() == DK_OptimizationRemarkAnalysis;
}
@@ -916,6 +927,7 @@ private:
};
/// Diagnostic information for machine IR parser.
+// FIXME: Remove this, use DiagnosticInfoSrcMgr instead.
class DiagnosticInfoMIRParser : public DiagnosticInfo {
const SMDiagnostic &Diagnostic;
@@ -1015,6 +1027,49 @@ public:
void print(DiagnosticPrinter &DP) const override;
};
+static DiagnosticSeverity getDiagnosticSeverity(SourceMgr::DiagKind DK) {
+ switch (DK) {
+ case llvm::SourceMgr::DK_Error:
+ return DS_Error;
+ break;
+ case llvm::SourceMgr::DK_Warning:
+ return DS_Warning;
+ break;
+ case llvm::SourceMgr::DK_Note:
+ return DS_Note;
+ break;
+ case llvm::SourceMgr::DK_Remark:
+ return DS_Remark;
+ break;
+ }
+ llvm_unreachable("unknown SourceMgr::DiagKind");
+}
+
+/// Diagnostic information for SMDiagnostic reporting.
+class DiagnosticInfoSrcMgr : public DiagnosticInfo {
+ const SMDiagnostic &Diagnostic;
+
+ // For inlineasm !srcloc translation.
+ bool InlineAsmDiag;
+ unsigned LocCookie;
+
+public:
+ DiagnosticInfoSrcMgr(const SMDiagnostic &Diagnostic,
+ bool InlineAsmDiag = true, unsigned LocCookie = 0)
+ : DiagnosticInfo(DK_SrcMgr, getDiagnosticSeverity(Diagnostic.getKind())),
+ Diagnostic(Diagnostic), InlineAsmDiag(InlineAsmDiag),
+ LocCookie(LocCookie) {}
+
+ bool isInlineAsmDiag() const { return InlineAsmDiag; }
+ const SMDiagnostic &getSMDiag() const { return Diagnostic; }
+ unsigned getLocCookie() const { return LocCookie; }
+ void print(DiagnosticPrinter &DP) const override;
+
+ static bool classof(const DiagnosticInfo *DI) {
+ return DI->getKind() == DK_SrcMgr;
+ }
+};
+
} // end namespace llvm
#endif // LLVM_IR_DIAGNOSTICINFO_H
diff --git a/llvm/include/llvm/IR/Dominators.h b/llvm/include/llvm/IR/Dominators.h
index 08dbccaf2c01..4d140c3ad0f2 100644
--- a/llvm/include/llvm/IR/Dominators.h
+++ b/llvm/include/llvm/IR/Dominators.h
@@ -165,6 +165,9 @@ class DominatorTree : public DominatorTreeBase<BasicBlock, false> {
// Ensure base-class overloads are visible.
using Base::dominates;
+ /// Return true if the (end of the) basic block BB dominates the use U.
+ bool dominates(const BasicBlock *BB, const Use &U) const;
+
/// Return true if value Def dominates use U, in the sense that Def is
/// available at U, and could be substituted as the used value without
/// violating the SSA dominance requirement.
diff --git a/llvm/include/llvm/IR/FPEnv.h b/llvm/include/llvm/IR/FPEnv.h
index f00cb735932f..621540000b5c 100644
--- a/llvm/include/llvm/IR/FPEnv.h
+++ b/llvm/include/llvm/IR/FPEnv.h
@@ -12,8 +12,8 @@
//
//===----------------------------------------------------------------------===//
-#ifndef LLVM_IR_FLOATINGPOINT_H
-#define LLVM_IR_FLOATINGPOINT_H
+#ifndef LLVM_IR_FPENV_H
+#define LLVM_IR_FPENV_H
#include "llvm/ADT/FloatingPointMode.h"
#include "llvm/ADT/Optional.h"
@@ -52,5 +52,11 @@ Optional<fp::ExceptionBehavior> StrToExceptionBehavior(StringRef);
/// For any ExceptionBehavior enumerator, returns a string valid as
/// input in constrained intrinsic exception behavior metadata.
Optional<StringRef> ExceptionBehaviorToStr(fp::ExceptionBehavior);
+
+/// Returns true if the exception handling behavior and rounding mode
+/// match what is used in the default floating point environment.
+inline bool isDefaultFPEnvironment(fp::ExceptionBehavior EB, RoundingMode RM) {
+ return EB == fp::ebIgnore && RM == RoundingMode::NearestTiesToEven;
+}
}
#endif
diff --git a/llvm/include/llvm/IR/Function.h b/llvm/include/llvm/IR/Function.h
index 7e209bb3769b..e0094e2afff2 100644
--- a/llvm/include/llvm/IR/Function.h
+++ b/llvm/include/llvm/IR/Function.h
@@ -153,6 +153,16 @@ public:
static Function *Create(FunctionType *Ty, LinkageTypes Linkage,
const Twine &N, Module &M);
+ /// Creates a function with some attributes recorded in llvm.module.flags
+ /// applied.
+ ///
+ /// Use this when synthesizing new functions that need attributes that would
+ /// have been set by command line options.
+ static Function *createWithDefaultAttr(FunctionType *Ty, LinkageTypes Linkage,
+ unsigned AddrSpace,
+ const Twine &N = "",
+ Module *M = nullptr);
+
// Provide fast operand accessors.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
@@ -426,6 +436,10 @@ public:
/// removes the attribute from the list of attributes.
void removeParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs);
+ /// removes noundef and other attributes that imply undefined behavior if a
+ /// `undef` or `poison` value is passed from the list of attributes.
+ void removeParamUndefImplyingAttrs(unsigned ArgNo);
+
/// check if an attributes is in the list of attributes.
bool hasAttribute(unsigned i, Attribute::AttrKind Kind) const {
return getAttributes().hasAttribute(i, Kind);
@@ -479,6 +493,10 @@ public:
return AttributeSets.getParamAlignment(ArgNo);
}
+ MaybeAlign getParamStackAlign(unsigned ArgNo) const {
+ return AttributeSets.getParamStackAlignment(ArgNo);
+ }
+
/// Extract the byval type for a parameter.
Type *getParamByValType(unsigned ArgNo) const {
return AttributeSets.getParamByValType(ArgNo);
@@ -489,6 +507,11 @@ public:
return AttributeSets.getParamStructRetType(ArgNo);
}
+ /// Extract the inalloca type for a parameter.
+ Type *getParamInAllocaType(unsigned ArgNo) const {
+ return AttributeSets.getParamInAllocaType(ArgNo);
+ }
+
/// Extract the byref type for a parameter.
Type *getParamByRefType(unsigned ArgNo) const {
return AttributeSets.getParamByRefType(ArgNo);
@@ -624,6 +647,14 @@ public:
addFnAttr(Attribute::NoFree);
}
+ /// Determine if the call can synchroize with other threads
+ bool hasNoSync() const {
+ return hasFnAttribute(Attribute::NoSync);
+ }
+ void setNoSync() {
+ addFnAttr(Attribute::NoSync);
+ }
+
/// Determine if the function is known not to recurse, directly or
/// indirectly.
bool doesNotRecurse() const {
@@ -872,11 +903,14 @@ public:
/// hasAddressTaken - returns true if there are any uses of this function
/// other than direct calls or invokes to it, or blockaddress expressions.
- /// Optionally passes back an offending user for diagnostic purposes and
- /// ignores callback uses.
+ /// Optionally passes back an offending user for diagnostic purposes,
+ /// ignores callback uses, assume like pointer annotation calls, and
+ /// references in llvm.used and llvm.compiler.used variables.
///
bool hasAddressTaken(const User ** = nullptr,
- bool IgnoreCallbackUses = false) const;
+ bool IgnoreCallbackUses = false,
+ bool IgnoreAssumeLikeCalls = true,
+ bool IngoreLLVMUsed = false) const;
/// isDefTriviallyDead - Return true if it is trivially safe to remove
/// this function definition from the module (because it isn't externally
diff --git a/llvm/include/llvm/IR/GCStrategy.h b/llvm/include/llvm/IR/GCStrategy.h
new file mode 100644
index 000000000000..a69958d596c6
--- /dev/null
+++ b/llvm/include/llvm/IR/GCStrategy.h
@@ -0,0 +1,136 @@
+//===- llvm/CodeGen/GCStrategy.h - Garbage collection -----------*- C++ -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// GCStrategy coordinates code generation algorithms and implements some itself
+// in order to generate code compatible with a target code generator as
+// specified in a function's 'gc' attribute. Algorithms are enabled by setting
+// flags in a subclass's constructor, and some virtual methods can be
+// overridden.
+//
+// GCStrategy is relevant for implementations using either gc.root or
+// gc.statepoint based lowering strategies, but is currently focused mostly on
+// options for gc.root. This will change over time.
+//
+// When requested by a subclass of GCStrategy, the gc.root implementation will
+// populate GCModuleInfo and GCFunctionInfo with that about each Function in
+// the Module that opts in to garbage collection. Specifically:
+//
+// - Safe points
+// Garbage collection is generally only possible at certain points in code.
+// GCStrategy can request that the collector insert such points:
+//
+// - At and after any call to a subroutine
+// - Before returning from the current function
+// - Before backwards branches (loops)
+//
+// - Roots
+// When a reference to a GC-allocated object exists on the stack, it must be
+// stored in an alloca registered with llvm.gcoot.
+//
+// This information can used to emit the metadata tables which are required by
+// the target garbage collector runtime.
+//
+// When used with gc.statepoint, information about safepoint and roots can be
+// found in the binary StackMap section after code generation. Safepoint
+// placement is currently the responsibility of the frontend, though late
+// insertion support is planned. gc.statepoint does not currently support
+// custom stack map formats; such can be generated by parsing the standard
+// stack map section if desired.
+//
+// The read and write barrier support can be used with either implementation.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_IR_GCSTRATEGY_H
+#define LLVM_IR_GCSTRATEGY_H
+
+#include "llvm/ADT/None.h"
+#include "llvm/ADT/Optional.h"
+#include "llvm/Support/Registry.h"
+#include <string>
+
+namespace llvm {
+
+class Type;
+
+/// GCStrategy describes a garbage collector algorithm's code generation
+/// requirements, and provides overridable hooks for those needs which cannot
+/// be abstractly described. GCStrategy objects must be looked up through
+/// the Function. The objects themselves are owned by the Context and must
+/// be immutable.
+class GCStrategy {
+private:
+ friend class GCModuleInfo;
+
+ std::string Name;
+
+protected:
+ bool UseStatepoints = false; /// Uses gc.statepoints as opposed to gc.roots,
+ /// if set, none of the other options can be
+ /// anything but their default values.
+
+ bool NeededSafePoints = false; ///< if set, calls are inferred to be safepoints
+ bool UsesMetadata = false; ///< If set, backend must emit metadata tables.
+
+public:
+ GCStrategy();
+ virtual ~GCStrategy() = default;
+
+ /// Return the name of the GC strategy. This is the value of the collector
+ /// name string specified on functions which use this strategy.
+ const std::string &getName() const { return Name; }
+
+ /// Returns true if this strategy is expecting the use of gc.statepoints,
+ /// and false otherwise.
+ bool useStatepoints() const { return UseStatepoints; }
+
+ /** @name Statepoint Specific Properties */
+ ///@{
+
+ /// If the type specified can be reliably distinguished, returns true for
+ /// pointers to GC managed locations and false for pointers to non-GC
+ /// managed locations. Note a GCStrategy can always return 'None' (i.e. an
+ /// empty optional indicating it can't reliably distinguish.
+ virtual Optional<bool> isGCManagedPointer(const Type *Ty) const {
+ return None;
+ }
+ ///@}
+
+ /** @name GCRoot Specific Properties
+ * These properties and overrides only apply to collector strategies using
+ * GCRoot.
+ */
+ ///@{
+
+ /// True if safe points need to be inferred on call sites
+ bool needsSafePoints() const { return NeededSafePoints; }
+
+ /// If set, appropriate metadata tables must be emitted by the back-end
+ /// (assembler, JIT, or otherwise). For statepoint, this method is
+ /// currently unsupported. The stackmap information can be found in the
+ /// StackMap section as described in the documentation.
+ bool usesMetadata() const { return UsesMetadata; }
+
+ ///@}
+};
+
+/// Subclasses of GCStrategy are made available for use during compilation by
+/// adding them to the global GCRegistry. This can done either within the
+/// LLVM source tree or via a loadable plugin. An example registeration
+/// would be:
+/// static GCRegistry::Add<CustomGC> X("custom-name",
+/// "my custom supper fancy gc strategy");
+///
+/// Note that to use a custom GCMetadataPrinter w/gc.roots, you must also
+/// register your GCMetadataPrinter subclass with the
+/// GCMetadataPrinterRegistery as well.
+using GCRegistry = Registry<GCStrategy>;
+
+} // end namespace llvm
+
+#endif // LLVM_IR_GCSTRATEGY_H
diff --git a/llvm/include/llvm/IR/GetElementPtrTypeIterator.h b/llvm/include/llvm/IR/GetElementPtrTypeIterator.h
index 6293305a2639..ed854e458da2 100644
--- a/llvm/include/llvm/IR/GetElementPtrTypeIterator.h
+++ b/llvm/include/llvm/IR/GetElementPtrTypeIterator.h
@@ -27,106 +27,112 @@
namespace llvm {
- template<typename ItTy = User::const_op_iterator>
- class generic_gep_type_iterator
- : public std::iterator<std::forward_iterator_tag, Type *, ptrdiff_t> {
- using super = std::iterator<std::forward_iterator_tag, Type *, ptrdiff_t>;
-
- ItTy OpIt;
- PointerUnion<StructType *, Type *> CurTy;
- enum : uint64_t { Unbounded = -1ull };
- uint64_t NumElements = Unbounded;
-
- generic_gep_type_iterator() = default;
-
- public:
- static generic_gep_type_iterator begin(Type *Ty, ItTy It) {
- generic_gep_type_iterator I;
- I.CurTy = Ty;
- I.OpIt = It;
- return I;
- }
-
- static generic_gep_type_iterator end(ItTy It) {
- generic_gep_type_iterator I;
- I.OpIt = It;
- return I;
- }
-
- bool operator==(const generic_gep_type_iterator& x) const {
- return OpIt == x.OpIt;
- }
-
- bool operator!=(const generic_gep_type_iterator& x) const {
- return !operator==(x);
- }
-
- // FIXME: Make this the iterator's operator*() after the 4.0 release.
- // operator*() had a different meaning in earlier releases, so we're
- // temporarily not giving this iterator an operator*() to avoid a subtle
- // semantics break.
- Type *getIndexedType() const {
- if (auto *T = CurTy.dyn_cast<Type *>())
- return T;
- return CurTy.get<StructType *>()->getTypeAtIndex(getOperand());
- }
-
- Value *getOperand() const { return const_cast<Value *>(&**OpIt); }
-
- generic_gep_type_iterator& operator++() { // Preincrement
- Type *Ty = getIndexedType();
- if (auto *ATy = dyn_cast<ArrayType>(Ty)) {
- CurTy = ATy->getElementType();
- NumElements = ATy->getNumElements();
- } else if (auto *VTy = dyn_cast<VectorType>(Ty)) {
- CurTy = VTy->getElementType();
- if (isa<ScalableVectorType>(VTy))
- NumElements = Unbounded;
- else
- NumElements = cast<FixedVectorType>(VTy)->getNumElements();
- } else
- CurTy = dyn_cast<StructType>(Ty);
- ++OpIt;
- return *this;
- }
-
- generic_gep_type_iterator operator++(int) { // Postincrement
- generic_gep_type_iterator tmp = *this; ++*this; return tmp;
- }
-
- // All of the below API is for querying properties of the "outer type", i.e.
- // the type that contains the indexed type. Most of the time this is just
- // the type that was visited immediately prior to the indexed type, but for
- // the first element this is an unbounded array of the GEP's source element
- // type, for which there is no clearly corresponding IR type (we've
- // historically used a pointer type as the outer type in this case, but
- // pointers will soon lose their element type).
- //
- // FIXME: Most current users of this class are just interested in byte
- // offsets (a few need to know whether the outer type is a struct because
- // they are trying to replace a constant with a variable, which is only
- // legal for arrays, e.g. canReplaceOperandWithVariable in SimplifyCFG.cpp);
- // we should provide a more minimal API here that exposes not much more than
- // that.
-
- bool isStruct() const { return CurTy.is<StructType *>(); }
- bool isSequential() const { return CurTy.is<Type *>(); }
-
- StructType *getStructType() const { return CurTy.get<StructType *>(); }
-
- StructType *getStructTypeOrNull() const {
- return CurTy.dyn_cast<StructType *>();
- }
-
- bool isBoundedSequential() const {
- return isSequential() && NumElements != Unbounded;
- }
-
- uint64_t getSequentialNumElements() const {
- assert(isBoundedSequential());
- return NumElements;
- }
- };
+template <typename ItTy = User::const_op_iterator>
+class generic_gep_type_iterator {
+
+ ItTy OpIt;
+ PointerUnion<StructType *, Type *> CurTy;
+ enum : uint64_t { Unbounded = -1ull };
+ uint64_t NumElements = Unbounded;
+
+ generic_gep_type_iterator() = default;
+
+public:
+ using iterator_category = std::forward_iterator_tag;
+ using value_type = Type *;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
+ static generic_gep_type_iterator begin(Type *Ty, ItTy It) {
+ generic_gep_type_iterator I;
+ I.CurTy = Ty;
+ I.OpIt = It;
+ return I;
+ }
+
+ static generic_gep_type_iterator end(ItTy It) {
+ generic_gep_type_iterator I;
+ I.OpIt = It;
+ return I;
+ }
+
+ bool operator==(const generic_gep_type_iterator &x) const {
+ return OpIt == x.OpIt;
+ }
+
+ bool operator!=(const generic_gep_type_iterator &x) const {
+ return !operator==(x);
+ }
+
+ // FIXME: Make this the iterator's operator*() after the 4.0 release.
+ // operator*() had a different meaning in earlier releases, so we're
+ // temporarily not giving this iterator an operator*() to avoid a subtle
+ // semantics break.
+ Type *getIndexedType() const {
+ if (auto *T = CurTy.dyn_cast<Type *>())
+ return T;
+ return CurTy.get<StructType *>()->getTypeAtIndex(getOperand());
+ }
+
+ Value *getOperand() const { return const_cast<Value *>(&**OpIt); }
+
+ generic_gep_type_iterator &operator++() { // Preincrement
+ Type *Ty = getIndexedType();
+ if (auto *ATy = dyn_cast<ArrayType>(Ty)) {
+ CurTy = ATy->getElementType();
+ NumElements = ATy->getNumElements();
+ } else if (auto *VTy = dyn_cast<VectorType>(Ty)) {
+ CurTy = VTy->getElementType();
+ if (isa<ScalableVectorType>(VTy))
+ NumElements = Unbounded;
+ else
+ NumElements = cast<FixedVectorType>(VTy)->getNumElements();
+ } else
+ CurTy = dyn_cast<StructType>(Ty);
+ ++OpIt;
+ return *this;
+ }
+
+ generic_gep_type_iterator operator++(int) { // Postincrement
+ generic_gep_type_iterator tmp = *this;
+ ++*this;
+ return tmp;
+ }
+
+ // All of the below API is for querying properties of the "outer type", i.e.
+ // the type that contains the indexed type. Most of the time this is just
+ // the type that was visited immediately prior to the indexed type, but for
+ // the first element this is an unbounded array of the GEP's source element
+ // type, for which there is no clearly corresponding IR type (we've
+ // historically used a pointer type as the outer type in this case, but
+ // pointers will soon lose their element type).
+ //
+ // FIXME: Most current users of this class are just interested in byte
+ // offsets (a few need to know whether the outer type is a struct because
+ // they are trying to replace a constant with a variable, which is only
+ // legal for arrays, e.g. canReplaceOperandWithVariable in SimplifyCFG.cpp);
+ // we should provide a more minimal API here that exposes not much more than
+ // that.
+
+ bool isStruct() const { return CurTy.is<StructType *>(); }
+ bool isSequential() const { return CurTy.is<Type *>(); }
+
+ StructType *getStructType() const { return CurTy.get<StructType *>(); }
+
+ StructType *getStructTypeOrNull() const {
+ return CurTy.dyn_cast<StructType *>();
+ }
+
+ bool isBoundedSequential() const {
+ return isSequential() && NumElements != Unbounded;
+ }
+
+ uint64_t getSequentialNumElements() const {
+ assert(isBoundedSequential());
+ return NumElements;
+ }
+};
using gep_type_iterator = generic_gep_type_iterator<>;
diff --git a/llvm/include/llvm/IR/GlobalIFunc.h b/llvm/include/llvm/IR/GlobalIFunc.h
index 0fdae917878a..ddd29c8a4a19 100644
--- a/llvm/include/llvm/IR/GlobalIFunc.h
+++ b/llvm/include/llvm/IR/GlobalIFunc.h
@@ -6,7 +6,7 @@
//
//===----------------------------------------------------------------------===//
///
-/// \brief
+/// \file
/// This file contains the declaration of the GlobalIFunc class, which
/// represents a single indirect function in the IR. Indirect function uses
/// ELF symbol type extension to mark that the address of a declaration should
diff --git a/llvm/include/llvm/IR/GlobalIndirectSymbol.h b/llvm/include/llvm/IR/GlobalIndirectSymbol.h
index d996237aa3ef..e45c7529885d 100644
--- a/llvm/include/llvm/IR/GlobalIndirectSymbol.h
+++ b/llvm/include/llvm/IR/GlobalIndirectSymbol.h
@@ -35,9 +35,8 @@ public:
GlobalIndirectSymbol &operator=(const GlobalIndirectSymbol &) = delete;
// allocate space for exactly one operand
- void *operator new(size_t s) {
- return User::operator new(s, 1);
- }
+ void *operator new(size_t S) { return User::operator new(S, 1); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
/// Provide fast operand accessors
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Constant);
diff --git a/llvm/include/llvm/IR/GlobalObject.h b/llvm/include/llvm/IR/GlobalObject.h
index d01abdc3b625..341fbec66080 100644
--- a/llvm/include/llvm/IR/GlobalObject.h
+++ b/llvm/include/llvm/IR/GlobalObject.h
@@ -18,8 +18,6 @@
#include "llvm/IR/GlobalValue.h"
#include "llvm/IR/Value.h"
#include "llvm/Support/Alignment.h"
-#include <string>
-#include <utility>
namespace llvm {
diff --git a/llvm/include/llvm/IR/IRBuilder.h b/llvm/include/llvm/IR/IRBuilder.h
index 9cefc9aa764c..8998ad0f94a9 100644
--- a/llvm/include/llvm/IR/IRBuilder.h
+++ b/llvm/include/llvm/IR/IRBuilder.h
@@ -647,8 +647,11 @@ public:
TBAAStructTag, ScopeTag, NoAliasTag);
}
- CallInst *CreateMemCpyInline(Value *Dst, MaybeAlign DstAlign, Value *Src,
- MaybeAlign SrcAlign, Value *Size);
+ CallInst *
+ CreateMemCpyInline(Value *Dst, MaybeAlign DstAlign, Value *Src,
+ MaybeAlign SrcAlign, Value *Size, bool IsVolatile = false,
+ MDNode *TBAATag = nullptr, MDNode *TBAAStructTag = nullptr,
+ MDNode *ScopeTag = nullptr, MDNode *NoAliasTag = nullptr);
/// Create and insert an element unordered-atomic memcpy between the
/// specified pointers.
@@ -664,32 +667,6 @@ public:
MDNode *TBAAStructTag = nullptr, MDNode *ScopeTag = nullptr,
MDNode *NoAliasTag = nullptr);
- LLVM_ATTRIBUTE_DEPRECATED(CallInst *CreateElementUnorderedAtomicMemCpy(
- Value *Dst, unsigned DstAlign, Value *Src,
- unsigned SrcAlign, uint64_t Size,
- uint32_t ElementSize, MDNode *TBAATag = nullptr,
- MDNode *TBAAStructTag = nullptr,
- MDNode *ScopeTag = nullptr,
- MDNode *NoAliasTag = nullptr),
- "Use the version that takes Align instead") {
- return CreateElementUnorderedAtomicMemCpy(
- Dst, Align(DstAlign), Src, Align(SrcAlign), getInt64(Size), ElementSize,
- TBAATag, TBAAStructTag, ScopeTag, NoAliasTag);
- }
-
- LLVM_ATTRIBUTE_DEPRECATED(CallInst *CreateElementUnorderedAtomicMemCpy(
- Value *Dst, unsigned DstAlign, Value *Src,
- unsigned SrcAlign, Value *Size,
- uint32_t ElementSize, MDNode *TBAATag = nullptr,
- MDNode *TBAAStructTag = nullptr,
- MDNode *ScopeTag = nullptr,
- MDNode *NoAliasTag = nullptr),
- "Use the version that takes Align instead") {
- return CreateElementUnorderedAtomicMemCpy(
- Dst, Align(DstAlign), Src, Align(SrcAlign), Size, ElementSize, TBAATag,
- TBAAStructTag, ScopeTag, NoAliasTag);
- }
-
CallInst *CreateMemMove(Value *Dst, MaybeAlign DstAlign, Value *Src,
MaybeAlign SrcAlign, uint64_t Size,
bool isVolatile = false, MDNode *TBAATag = nullptr,
@@ -720,32 +697,6 @@ public:
MDNode *TBAAStructTag = nullptr, MDNode *ScopeTag = nullptr,
MDNode *NoAliasTag = nullptr);
- LLVM_ATTRIBUTE_DEPRECATED(CallInst *CreateElementUnorderedAtomicMemMove(
- Value *Dst, unsigned DstAlign, Value *Src,
- unsigned SrcAlign, uint64_t Size,
- uint32_t ElementSize, MDNode *TBAATag = nullptr,
- MDNode *TBAAStructTag = nullptr,
- MDNode *ScopeTag = nullptr,
- MDNode *NoAliasTag = nullptr),
- "Use the version that takes Align instead") {
- return CreateElementUnorderedAtomicMemMove(
- Dst, Align(DstAlign), Src, Align(SrcAlign), getInt64(Size), ElementSize,
- TBAATag, TBAAStructTag, ScopeTag, NoAliasTag);
- }
-
- LLVM_ATTRIBUTE_DEPRECATED(CallInst *CreateElementUnorderedAtomicMemMove(
- Value *Dst, unsigned DstAlign, Value *Src,
- unsigned SrcAlign, Value *Size,
- uint32_t ElementSize, MDNode *TBAATag = nullptr,
- MDNode *TBAAStructTag = nullptr,
- MDNode *ScopeTag = nullptr,
- MDNode *NoAliasTag = nullptr),
- "Use the version that takes Align instead") {
- return CreateElementUnorderedAtomicMemMove(
- Dst, Align(DstAlign), Src, Align(SrcAlign), Size, ElementSize, TBAATag,
- TBAAStructTag, ScopeTag, NoAliasTag);
- }
-
/// Create a vector fadd reduction intrinsic of the source vector.
/// The first parameter is a scalar accumulator value for ordered reductions.
CallInst *CreateFAddReduce(Value *Acc, Value *Src);
@@ -801,52 +752,19 @@ public:
CallInst *CreateInvariantStart(Value *Ptr, ConstantInt *Size = nullptr);
/// Create a call to Masked Load intrinsic
- LLVM_ATTRIBUTE_DEPRECATED(
- CallInst *CreateMaskedLoad(Value *Ptr, unsigned Alignment, Value *Mask,
- Value *PassThru = nullptr,
- const Twine &Name = ""),
- "Use the version that takes Align instead") {
- return CreateMaskedLoad(Ptr, assumeAligned(Alignment), Mask, PassThru,
- Name);
- }
- CallInst *CreateMaskedLoad(Value *Ptr, Align Alignment, Value *Mask,
+ CallInst *CreateMaskedLoad(Type *Ty, Value *Ptr, Align Alignment, Value *Mask,
Value *PassThru = nullptr, const Twine &Name = "");
/// Create a call to Masked Store intrinsic
- LLVM_ATTRIBUTE_DEPRECATED(CallInst *CreateMaskedStore(Value *Val, Value *Ptr,
- unsigned Alignment,
- Value *Mask),
- "Use the version that takes Align instead") {
- return CreateMaskedStore(Val, Ptr, assumeAligned(Alignment), Mask);
- }
-
CallInst *CreateMaskedStore(Value *Val, Value *Ptr, Align Alignment,
Value *Mask);
/// Create a call to Masked Gather intrinsic
- LLVM_ATTRIBUTE_DEPRECATED(
- CallInst *CreateMaskedGather(Value *Ptrs, unsigned Alignment,
- Value *Mask = nullptr,
- Value *PassThru = nullptr,
- const Twine &Name = ""),
- "Use the version that takes Align instead") {
- return CreateMaskedGather(Ptrs, Align(Alignment), Mask, PassThru, Name);
- }
-
- /// Create a call to Masked Gather intrinsic
- CallInst *CreateMaskedGather(Value *Ptrs, Align Alignment,
+ CallInst *CreateMaskedGather(Type *Ty, Value *Ptrs, Align Alignment,
Value *Mask = nullptr, Value *PassThru = nullptr,
const Twine &Name = "");
/// Create a call to Masked Scatter intrinsic
- LLVM_ATTRIBUTE_DEPRECATED(
- CallInst *CreateMaskedScatter(Value *Val, Value *Ptrs, unsigned Alignment,
- Value *Mask = nullptr),
- "Use the version that takes Align instead") {
- return CreateMaskedScatter(Val, Ptrs, Align(Alignment), Mask);
- }
-
- /// Create a call to Masked Scatter intrinsic
CallInst *CreateMaskedScatter(Value *Val, Value *Ptrs, Align Alignment,
Value *Mask = nullptr);
@@ -935,10 +853,21 @@ public:
Type *ResultType,
const Twine &Name = "");
+ /// Create a call to the experimental.gc.pointer.base intrinsic to get the
+ /// base pointer for the specified derived pointer.
+ CallInst *CreateGCGetPointerBase(Value *DerivedPtr, const Twine &Name = "");
+
+ /// Create a call to the experimental.gc.get.pointer.offset intrinsic to get
+ /// the offset of the specified derived pointer from its base.
+ CallInst *CreateGCGetPointerOffset(Value *DerivedPtr, const Twine &Name = "");
+
/// Create a call to llvm.vscale, multiplied by \p Scaling. The type of VScale
/// will be the same type as that of \p Scaling.
Value *CreateVScale(Constant *Scaling, const Twine &Name = "");
+ /// Creates a vector of type \p DstType with the linear sequence <0, 1, ...>
+ Value *CreateStepVector(Type *DstType, const Twine &Name = "");
+
/// Create a call to intrinsic \p ID with 1 operand which is mangled on its
/// type.
CallInst *CreateUnaryIntrinsic(Intrinsic::ID ID, Value *V,
@@ -979,6 +908,13 @@ public:
return CreateBinaryIntrinsic(Intrinsic::maximum, LHS, RHS, nullptr, Name);
}
+ /// Create a call to the arithmetic_fence intrinsic.
+ CallInst *CreateArithmeticFence(Value *Val, Type *DstType,
+ const Twine &Name = "") {
+ return CreateIntrinsic(Intrinsic::arithmetic_fence, DstType, Val, nullptr,
+ Name);
+ }
+
/// Create a call to the experimental.vector.extract intrinsic.
CallInst *CreateExtractVector(Type *DstType, Value *SrcVec, Value *Idx,
const Twine &Name = "") {
@@ -1619,6 +1555,18 @@ public:
return Insert(BinOp, Name);
}
+ Value *CreateLogicalAnd(Value *Cond1, Value *Cond2, const Twine &Name = "") {
+ assert(Cond2->getType()->isIntOrIntVectorTy(1));
+ return CreateSelect(Cond1, Cond2,
+ ConstantInt::getNullValue(Cond2->getType()), Name);
+ }
+
+ Value *CreateLogicalOr(Value *Cond1, Value *Cond2, const Twine &Name = "") {
+ assert(Cond2->getType()->isIntOrIntVectorTy(1));
+ return CreateSelect(Cond1, ConstantInt::getAllOnesValue(Cond2->getType()),
+ Cond2, Name);
+ }
+
CallInst *CreateConstrainedFPBinOp(
Intrinsic::ID ID, Value *L, Value *R, Instruction *FMFSource = nullptr,
const Twine &Name = "", MDNode *FPMathTag = nullptr,
@@ -1719,17 +1667,27 @@ public:
}
// Deprecated [opaque pointer types]
- LoadInst *CreateLoad(Value *Ptr, const char *Name) {
+ LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateLoad(Value *Ptr,
+ const char *Name),
+ "Use the version that explicitly specifies the "
+ "loaded type instead") {
return CreateLoad(Ptr->getType()->getPointerElementType(), Ptr, Name);
}
// Deprecated [opaque pointer types]
- LoadInst *CreateLoad(Value *Ptr, const Twine &Name = "") {
+ LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateLoad(Value *Ptr,
+ const Twine &Name = ""),
+ "Use the version that explicitly specifies the "
+ "loaded type instead") {
return CreateLoad(Ptr->getType()->getPointerElementType(), Ptr, Name);
}
// Deprecated [opaque pointer types]
- LoadInst *CreateLoad(Value *Ptr, bool isVolatile, const Twine &Name = "") {
+ LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateLoad(Value *Ptr,
+ bool isVolatile,
+ const Twine &Name = ""),
+ "Use the version that explicitly specifies the "
+ "loaded type instead") {
return CreateLoad(Ptr->getType()->getPointerElementType(), Ptr, isVolatile,
Name);
}
@@ -1738,35 +1696,16 @@ public:
return CreateAlignedStore(Val, Ptr, MaybeAlign(), isVolatile);
}
- LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr,
- unsigned Align,
- const char *Name),
- "Use the version that takes NaybeAlign instead") {
- return CreateAlignedLoad(Ty, Ptr, MaybeAlign(Align), Name);
- }
LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align,
const char *Name) {
return CreateAlignedLoad(Ty, Ptr, Align, /*isVolatile*/false, Name);
}
- LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr,
- unsigned Align,
- const Twine &Name = ""),
- "Use the version that takes MaybeAlign instead") {
- return CreateAlignedLoad(Ty, Ptr, MaybeAlign(Align), Name);
- }
LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align,
const Twine &Name = "") {
return CreateAlignedLoad(Ty, Ptr, Align, /*isVolatile*/false, Name);
}
- LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr,
- unsigned Align,
- bool isVolatile,
- const Twine &Name = ""),
- "Use the version that takes MaybeAlign instead") {
- return CreateAlignedLoad(Ty, Ptr, MaybeAlign(Align), isVolatile, Name);
- }
LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr, MaybeAlign Align,
bool isVolatile, const Twine &Name = "") {
if (!Align) {
@@ -1778,53 +1717,33 @@ public:
// Deprecated [opaque pointer types]
LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateAlignedLoad(Value *Ptr,
- unsigned Align,
+ MaybeAlign Align,
const char *Name),
- "Use the version that takes MaybeAlign instead") {
+ "Use the version that explicitly specifies the "
+ "loaded type instead") {
return CreateAlignedLoad(Ptr->getType()->getPointerElementType(), Ptr,
- MaybeAlign(Align), Name);
+ Align, Name);
}
// Deprecated [opaque pointer types]
LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateAlignedLoad(Value *Ptr,
- unsigned Align,
+ MaybeAlign Align,
const Twine &Name = ""),
- "Use the version that takes MaybeAlign instead") {
+ "Use the version that explicitly specifies the "
+ "loaded type instead") {
return CreateAlignedLoad(Ptr->getType()->getPointerElementType(), Ptr,
- MaybeAlign(Align), Name);
+ Align, Name);
}
// Deprecated [opaque pointer types]
LLVM_ATTRIBUTE_DEPRECATED(LoadInst *CreateAlignedLoad(Value *Ptr,
- unsigned Align,
+ MaybeAlign Align,
bool isVolatile,
const Twine &Name = ""),
- "Use the version that takes MaybeAlign instead") {
- return CreateAlignedLoad(Ptr->getType()->getPointerElementType(), Ptr,
- MaybeAlign(Align), isVolatile, Name);
- }
- // Deprecated [opaque pointer types]
- LoadInst *CreateAlignedLoad(Value *Ptr, MaybeAlign Align, const char *Name) {
- return CreateAlignedLoad(Ptr->getType()->getPointerElementType(), Ptr,
- Align, Name);
- }
- // Deprecated [opaque pointer types]
- LoadInst *CreateAlignedLoad(Value *Ptr, MaybeAlign Align,
- const Twine &Name = "") {
- return CreateAlignedLoad(Ptr->getType()->getPointerElementType(), Ptr,
- Align, Name);
- }
- // Deprecated [opaque pointer types]
- LoadInst *CreateAlignedLoad(Value *Ptr, MaybeAlign Align, bool isVolatile,
- const Twine &Name = "") {
+ "Use the version that explicitly specifies the "
+ "loaded type instead") {
return CreateAlignedLoad(Ptr->getType()->getPointerElementType(), Ptr,
Align, isVolatile, Name);
}
- LLVM_ATTRIBUTE_DEPRECATED(
- StoreInst *CreateAlignedStore(Value *Val, Value *Ptr, unsigned Align,
- bool isVolatile = false),
- "Use the version that takes MaybeAlign instead") {
- return CreateAlignedStore(Val, Ptr, MaybeAlign(Align), isVolatile);
- }
StoreInst *CreateAlignedStore(Value *Val, Value *Ptr, MaybeAlign Align,
bool isVolatile = false) {
if (!Align) {
@@ -1839,26 +1758,38 @@ public:
return Insert(new FenceInst(Context, Ordering, SSID), Name);
}
- AtomicCmpXchgInst *CreateAtomicCmpXchg(
- Value *Ptr, Value *Cmp, Value *New, AtomicOrdering SuccessOrdering,
- AtomicOrdering FailureOrdering, SyncScope::ID SSID = SyncScope::System) {
- const DataLayout &DL = BB->getModule()->getDataLayout();
- Align Alignment(DL.getTypeStoreSize(New->getType()));
- return Insert(new AtomicCmpXchgInst(
- Ptr, Cmp, New, Alignment, SuccessOrdering, FailureOrdering, SSID));
+ AtomicCmpXchgInst *
+ CreateAtomicCmpXchg(Value *Ptr, Value *Cmp, Value *New, MaybeAlign Align,
+ AtomicOrdering SuccessOrdering,
+ AtomicOrdering FailureOrdering,
+ SyncScope::ID SSID = SyncScope::System) {
+ if (!Align) {
+ const DataLayout &DL = BB->getModule()->getDataLayout();
+ Align = llvm::Align(DL.getTypeStoreSize(New->getType()));
+ }
+
+ return Insert(new AtomicCmpXchgInst(Ptr, Cmp, New, *Align, SuccessOrdering,
+ FailureOrdering, SSID));
}
- AtomicRMWInst *CreateAtomicRMW(AtomicRMWInst::BinOp Op, Value *Ptr, Value *Val,
+ AtomicRMWInst *CreateAtomicRMW(AtomicRMWInst::BinOp Op, Value *Ptr,
+ Value *Val, MaybeAlign Align,
AtomicOrdering Ordering,
SyncScope::ID SSID = SyncScope::System) {
- const DataLayout &DL = BB->getModule()->getDataLayout();
- Align Alignment(DL.getTypeStoreSize(Val->getType()));
- return Insert(new AtomicRMWInst(Op, Ptr, Val, Alignment, Ordering, SSID));
+ if (!Align) {
+ const DataLayout &DL = BB->getModule()->getDataLayout();
+ Align = llvm::Align(DL.getTypeStoreSize(Val->getType()));
+ }
+
+ return Insert(new AtomicRMWInst(Op, Ptr, Val, *Align, Ordering, SSID));
}
- Value *CreateGEP(Value *Ptr, ArrayRef<Value *> IdxList,
- const Twine &Name = "") {
- return CreateGEP(nullptr, Ptr, IdxList, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateGEP(Value *Ptr, ArrayRef<Value *> IdxList,
+ const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateGEP(Ptr->getType()->getScalarType()->getPointerElementType(),
+ Ptr, IdxList, Name);
}
Value *CreateGEP(Type *Ty, Value *Ptr, ArrayRef<Value *> IdxList,
@@ -1875,9 +1806,13 @@ public:
return Insert(GetElementPtrInst::Create(Ty, Ptr, IdxList), Name);
}
- Value *CreateInBoundsGEP(Value *Ptr, ArrayRef<Value *> IdxList,
- const Twine &Name = "") {
- return CreateInBoundsGEP(nullptr, Ptr, IdxList, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateInBoundsGEP(Value *Ptr, ArrayRef<Value *> IdxList,
+ const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateInBoundsGEP(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, IdxList,
+ Name);
}
Value *CreateInBoundsGEP(Type *Ty, Value *Ptr, ArrayRef<Value *> IdxList,
@@ -1895,10 +1830,6 @@ public:
return Insert(GetElementPtrInst::CreateInBounds(Ty, Ptr, IdxList), Name);
}
- Value *CreateGEP(Value *Ptr, Value *Idx, const Twine &Name = "") {
- return CreateGEP(nullptr, Ptr, Idx, Name);
- }
-
Value *CreateGEP(Type *Ty, Value *Ptr, Value *Idx, const Twine &Name = "") {
if (auto *PC = dyn_cast<Constant>(Ptr))
if (auto *IC = dyn_cast<Constant>(Idx))
@@ -1914,8 +1845,13 @@ public:
return Insert(GetElementPtrInst::CreateInBounds(Ty, Ptr, Idx), Name);
}
- Value *CreateConstGEP1_32(Value *Ptr, unsigned Idx0, const Twine &Name = "") {
- return CreateConstGEP1_32(nullptr, Ptr, Idx0, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateConstGEP1_32(Value *Ptr, unsigned Idx0,
+ const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateConstGEP1_32(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, Idx0,
+ Name);
}
Value *CreateConstGEP1_32(Type *Ty, Value *Ptr, unsigned Idx0,
@@ -1974,8 +1910,13 @@ public:
return Insert(GetElementPtrInst::Create(Ty, Ptr, Idx), Name);
}
- Value *CreateConstGEP1_64(Value *Ptr, uint64_t Idx0, const Twine &Name = "") {
- return CreateConstGEP1_64(nullptr, Ptr, Idx0, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateConstGEP1_64(Value *Ptr, uint64_t Idx0,
+ const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateConstGEP1_64(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, Idx0,
+ Name);
}
Value *CreateConstInBoundsGEP1_64(Type *Ty, Value *Ptr, uint64_t Idx0,
@@ -1988,9 +1929,13 @@ public:
return Insert(GetElementPtrInst::CreateInBounds(Ty, Ptr, Idx), Name);
}
- Value *CreateConstInBoundsGEP1_64(Value *Ptr, uint64_t Idx0,
- const Twine &Name = "") {
- return CreateConstInBoundsGEP1_64(nullptr, Ptr, Idx0, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateConstInBoundsGEP1_64(Value *Ptr, uint64_t Idx0,
+ const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateConstInBoundsGEP1_64(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, Idx0,
+ Name);
}
Value *CreateConstGEP2_64(Type *Ty, Value *Ptr, uint64_t Idx0, uint64_t Idx1,
@@ -2006,9 +1951,13 @@ public:
return Insert(GetElementPtrInst::Create(Ty, Ptr, Idxs), Name);
}
- Value *CreateConstGEP2_64(Value *Ptr, uint64_t Idx0, uint64_t Idx1,
- const Twine &Name = "") {
- return CreateConstGEP2_64(nullptr, Ptr, Idx0, Idx1, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateConstGEP2_64(Value *Ptr, uint64_t Idx0, uint64_t Idx1,
+ const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateConstGEP2_64(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, Idx0,
+ Idx1, Name);
}
Value *CreateConstInBoundsGEP2_64(Type *Ty, Value *Ptr, uint64_t Idx0,
@@ -2024,9 +1973,13 @@ public:
return Insert(GetElementPtrInst::CreateInBounds(Ty, Ptr, Idxs), Name);
}
- Value *CreateConstInBoundsGEP2_64(Value *Ptr, uint64_t Idx0, uint64_t Idx1,
- const Twine &Name = "") {
- return CreateConstInBoundsGEP2_64(nullptr, Ptr, Idx0, Idx1, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateConstInBoundsGEP2_64(Value *Ptr, uint64_t Idx0,
+ uint64_t Idx1, const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateConstInBoundsGEP2_64(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, Idx0,
+ Idx1, Name);
}
Value *CreateStructGEP(Type *Ty, Value *Ptr, unsigned Idx,
@@ -2034,8 +1987,12 @@ public:
return CreateConstInBoundsGEP2_32(Ty, Ptr, 0, Idx, Name);
}
- Value *CreateStructGEP(Value *Ptr, unsigned Idx, const Twine &Name = "") {
- return CreateConstInBoundsGEP2_32(nullptr, Ptr, 0, Idx, Name);
+ LLVM_ATTRIBUTE_DEPRECATED(
+ Value *CreateStructGEP(Value *Ptr, unsigned Idx, const Twine &Name = ""),
+ "Use the version with explicit element type instead") {
+ return CreateConstInBoundsGEP2_32(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, 0, Idx,
+ Name);
}
/// Same as CreateGlobalString, but return a pointer with "i8*" type
@@ -2597,6 +2554,19 @@ public:
/// address space before call and casted back to Ptr type after call.
Value *CreateStripInvariantGroup(Value *Ptr);
+ /// Return a vector value that contains the vector V reversed
+ Value *CreateVectorReverse(Value *V, const Twine &Name = "");
+
+ /// Return a vector splice intrinsic if using scalable vectors, otherwise
+ /// return a shufflevector. If the immediate is positive, a vector is
+ /// extracted from concat(V1, V2), starting at Imm. If the immediate
+ /// is negative, we extract -Imm elements from V1 and the remaining
+ /// elements from V2. Imm is a signed integer in the range
+ /// -VL <= Imm < VL (where VL is the runtime vector length of the
+ /// source/result vector)
+ Value *CreateVectorSplice(Value *V1, Value *V2, int64_t Imm,
+ const Twine &Name = "");
+
/// Return a vector value that contains \arg V broadcasted to \p
/// NumElts elements.
Value *CreateVectorSplat(unsigned NumElts, Value *V, const Twine &Name = "");
diff --git a/llvm/include/llvm/IR/InlineAsm.h b/llvm/include/llvm/IR/InlineAsm.h
index b6f377093337..1a0767aca142 100644
--- a/llvm/include/llvm/IR/InlineAsm.h
+++ b/llvm/include/llvm/IR/InlineAsm.h
@@ -44,10 +44,11 @@ private:
bool HasSideEffects;
bool IsAlignStack;
AsmDialect Dialect;
+ bool CanThrow;
InlineAsm(FunctionType *Ty, const std::string &AsmString,
const std::string &Constraints, bool hasSideEffects,
- bool isAlignStack, AsmDialect asmDialect);
+ bool isAlignStack, AsmDialect asmDialect, bool canThrow);
/// When the ConstantUniqueMap merges two types and makes two InlineAsms
/// identical, it destroys one of them with this method.
@@ -62,11 +63,12 @@ public:
static InlineAsm *get(FunctionType *Ty, StringRef AsmString,
StringRef Constraints, bool hasSideEffects,
bool isAlignStack = false,
- AsmDialect asmDialect = AD_ATT);
+ AsmDialect asmDialect = AD_ATT, bool canThrow = false);
bool hasSideEffects() const { return HasSideEffects; }
bool isAlignStack() const { return IsAlignStack; }
AsmDialect getDialect() const { return Dialect; }
+ bool canThrow() const { return CanThrow; }
/// getType - InlineAsm's are always pointers.
///
diff --git a/llvm/include/llvm/IR/InstVisitor.h b/llvm/include/llvm/IR/InstVisitor.h
index 4dbdc66d1366..585129904dd4 100644
--- a/llvm/include/llvm/IR/InstVisitor.h
+++ b/llvm/include/llvm/IR/InstVisitor.h
@@ -209,6 +209,9 @@ public:
RetTy visitDbgInfoIntrinsic(DbgInfoIntrinsic &I){ DELEGATE(IntrinsicInst); }
RetTy visitMemSetInst(MemSetInst &I) { DELEGATE(MemIntrinsic); }
RetTy visitMemCpyInst(MemCpyInst &I) { DELEGATE(MemTransferInst); }
+ RetTy visitMemCpyInlineInst(MemCpyInlineInst &I) {
+ DELEGATE(MemTransferInst);
+ }
RetTy visitMemMoveInst(MemMoveInst &I) { DELEGATE(MemTransferInst); }
RetTy visitMemTransferInst(MemTransferInst &I) { DELEGATE(MemIntrinsic); }
RetTy visitMemIntrinsic(MemIntrinsic &I) { DELEGATE(IntrinsicInst); }
diff --git a/llvm/include/llvm/IR/InstrTypes.h b/llvm/include/llvm/IR/InstrTypes.h
index f42ef48de6b3..ef2c279ed455 100644
--- a/llvm/include/llvm/IR/InstrTypes.h
+++ b/llvm/include/llvm/IR/InstrTypes.h
@@ -68,9 +68,8 @@ protected:
public:
// allocate space for exactly one operand
- void *operator new(size_t s) {
- return User::operator new(s, 1);
- }
+ void *operator new(size_t S) { return User::operator new(S, 1); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
/// Transparently provide more efficient getOperand methods.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
@@ -203,9 +202,8 @@ protected:
public:
// allocate space for exactly two operands
- void *operator new(size_t s) {
- return User::operator new(s, 2);
- }
+ void *operator new(size_t S) { return User::operator new(S, 2); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
/// Transparently provide more efficient getOperand methods.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
@@ -248,11 +246,11 @@ public:
}
#include "llvm/IR/Instruction.def"
- static BinaryOperator *CreateWithCopiedFlags(BinaryOps Opc,
- Value *V1, Value *V2,
- Instruction *CopyO,
- const Twine &Name = "") {
- BinaryOperator *BO = Create(Opc, V1, V2, Name);
+ static BinaryOperator *
+ CreateWithCopiedFlags(BinaryOps Opc, Value *V1, Value *V2, Instruction *CopyO,
+ const Twine &Name = "",
+ Instruction *InsertBefore = nullptr) {
+ BinaryOperator *BO = Create(Opc, V1, V2, Name, InsertBefore);
BO->copyIRFlags(CopyO);
return BO;
}
@@ -769,9 +767,8 @@ protected:
public:
// allocate space for exactly two operands
- void *operator new(size_t s) {
- return User::operator new(s, 2);
- }
+ void *operator new(size_t S) { return User::operator new(S, 2); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
/// Construct a compare instruction, given the opcode, the predicate and
/// the two operands. Optionally (if InstBefore is specified) insert the
@@ -1214,6 +1211,24 @@ public:
static CallBase *Create(CallBase *CB, ArrayRef<OperandBundleDef> Bundles,
Instruction *InsertPt = nullptr);
+ /// Create a clone of \p CB with the operand bundle with the tag matching
+ /// \p Bundle's tag replaced with Bundle, and insert it before \p InsertPt.
+ ///
+ /// The returned call instruction is identical \p CI in every way except that
+ /// the specified operand bundle has been replaced.
+ static CallBase *Create(CallBase *CB,
+ OperandBundleDef Bundle,
+ Instruction *InsertPt = nullptr);
+
+ /// Create a clone of \p CB with operand bundle \p OB added.
+ static CallBase *addOperandBundle(CallBase *CB, uint32_t ID,
+ OperandBundleDef OB,
+ Instruction *InsertPt = nullptr);
+
+ /// Create a clone of \p CB with operand bundle \p ID removed.
+ static CallBase *removeOperandBundle(CallBase *CB, uint32_t ID,
+ Instruction *InsertPt = nullptr);
+
static bool classof(const Instruction *I) {
return I->getOpcode() == Instruction::Call ||
I->getOpcode() == Instruction::Invoke ||
@@ -1424,8 +1439,7 @@ public:
/// type.
void setCalledFunction(FunctionType *FTy, Value *Fn) {
this->FTy = FTy;
- assert(FTy == cast<FunctionType>(
- cast<PointerType>(Fn->getType())->getElementType()));
+ assert(cast<PointerType>(Fn->getType())->isOpaqueOrPointeeTypeMatches(FTy));
// This function doesn't mutate the return type, only the function
// type. Seems broken, but I'm just gonna stick an assert in for now.
assert(getType() == FTy->getReturnType());
@@ -1537,6 +1551,13 @@ public:
setAttributes(PAL);
}
+ /// Removes the attributes from the given argument
+ void removeParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) {
+ AttributeList PAL = getAttributes();
+ PAL = PAL.removeParamAttributes(getContext(), ArgNo, Attrs);
+ setAttributes(PAL);
+ }
+
/// adds the dereferenceable attribute to the list of attributes.
void addDereferenceableAttr(unsigned i, uint64_t Bytes) {
AttributeList PAL = getAttributes();
@@ -1644,6 +1665,17 @@ public:
paramHasAttr(ArgNo, Attribute::Preallocated);
}
+ /// Determine whether passing undef to this argument is undefined behavior.
+ /// If passing undef to this argument is UB, passing poison is UB as well
+ /// because poison is more undefined than undef.
+ bool isPassingUndefUB(unsigned ArgNo) const {
+ return paramHasAttr(ArgNo, Attribute::NoUndef) ||
+ // dereferenceable implies noundef.
+ paramHasAttr(ArgNo, Attribute::Dereferenceable) ||
+ // dereferenceable implies noundef, and null is a well-defined value.
+ paramHasAttr(ArgNo, Attribute::DereferenceableOrNull);
+ }
+
/// Determine if there are is an inalloca argument. Only the last argument can
/// have the inalloca attribute.
bool hasInAllocaArgument() const {
@@ -1670,39 +1702,43 @@ public:
dataOperandHasImpliedAttr(OpNo + 1, Attribute::ReadNone);
}
- LLVM_ATTRIBUTE_DEPRECATED(unsigned getRetAlignment() const,
- "Use getRetAlign() instead") {
- if (const auto MA = Attrs.getRetAlignment())
- return MA->value();
- return 0;
- }
-
/// Extract the alignment of the return value.
MaybeAlign getRetAlign() const { return Attrs.getRetAlignment(); }
/// Extract the alignment for a call or parameter (0=unknown).
- LLVM_ATTRIBUTE_DEPRECATED(unsigned getParamAlignment(unsigned ArgNo) const,
- "Use getParamAlign() instead") {
- if (const auto MA = Attrs.getParamAlignment(ArgNo))
- return MA->value();
- return 0;
- }
-
- /// Extract the alignment for a call or parameter (0=unknown).
MaybeAlign getParamAlign(unsigned ArgNo) const {
return Attrs.getParamAlignment(ArgNo);
}
+ MaybeAlign getParamStackAlign(unsigned ArgNo) const {
+ return Attrs.getParamStackAlignment(ArgNo);
+ }
+
/// Extract the byval type for a call or parameter.
Type *getParamByValType(unsigned ArgNo) const {
- Type *Ty = Attrs.getParamByValType(ArgNo);
- return Ty ? Ty : getArgOperand(ArgNo)->getType()->getPointerElementType();
+ if (auto *Ty = Attrs.getParamByValType(ArgNo))
+ return Ty;
+ if (const Function *F = getCalledFunction())
+ return F->getAttributes().getParamByValType(ArgNo);
+ return nullptr;
}
/// Extract the preallocated type for a call or parameter.
Type *getParamPreallocatedType(unsigned ArgNo) const {
- Type *Ty = Attrs.getParamPreallocatedType(ArgNo);
- return Ty ? Ty : getArgOperand(ArgNo)->getType()->getPointerElementType();
+ if (auto *Ty = Attrs.getParamPreallocatedType(ArgNo))
+ return Ty;
+ if (const Function *F = getCalledFunction())
+ return F->getAttributes().getParamPreallocatedType(ArgNo);
+ return nullptr;
+ }
+
+ /// Extract the preallocated type for a call or parameter.
+ Type *getParamInAllocaType(unsigned ArgNo) const {
+ if (auto *Ty = Attrs.getParamInAllocaType(ArgNo))
+ return Ty;
+ if (const Function *F = getCalledFunction())
+ return F->getAttributes().getParamInAllocaType(ArgNo);
+ return nullptr;
}
/// Extract the number of dereferenceable bytes for a call or
@@ -1757,9 +1793,6 @@ public:
return doesNotAccessMemory() || hasFnAttr(Attribute::ReadOnly);
}
- /// Returns true if this function is guaranteed to return.
- bool willReturn() const { return hasFnAttr(Attribute::WillReturn); }
-
void setOnlyReadsMemory() {
addAttribute(AttributeList::FunctionIndex, Attribute::ReadOnly);
}
@@ -1987,12 +2020,7 @@ public:
/// Return true if this operand bundle user has operand bundles that
/// may read from the heap.
- bool hasReadingOperandBundles() const {
- // Implementation note: this is a conservative implementation of operand
- // bundle semantics, where *any* operand bundle forces a callsite to be at
- // least readonly.
- return hasOperandBundles();
- }
+ bool hasReadingOperandBundles() const;
/// Return true if this operand bundle user has operand bundles that
/// may write to the heap.
diff --git a/llvm/include/llvm/IR/Instruction.h b/llvm/include/llvm/IR/Instruction.h
index d2a55f89fac9..deb85cf277fe 100644
--- a/llvm/include/llvm/IR/Instruction.h
+++ b/llvm/include/llvm/IR/Instruction.h
@@ -170,6 +170,11 @@ public:
bool isExceptionalTerminator() const {
return isExceptionalTerminator(getOpcode());
}
+
+ /// It checks if this instruction is the only user of at least one of
+ /// its operands.
+ bool isOnlyUserOfAnyOperand();
+
bool isIndirectTerminator() const {
return isIndirectTerminator(getOpcode());
}
@@ -327,6 +332,8 @@ public:
/// @{
/// Passes are required to drop metadata they don't understand. This is a
/// convenience method for passes to do so.
+ /// dropUndefImplyingAttrsAndUnknownMetadata should be used instead of
+ /// this API if the Instruction being modified is a call.
void dropUnknownNonDebugMetadata(ArrayRef<unsigned> KnownIDs);
void dropUnknownNonDebugMetadata() {
return dropUnknownNonDebugMetadata(None);
@@ -386,6 +393,13 @@ public:
/// having non-poison inputs.
void dropPoisonGeneratingFlags();
+ /// This function drops non-debug unknown metadata (through
+ /// dropUnknownNonDebugMetadata). For calls, it also drops parameter and
+ /// return attributes that can cause undefined behaviour. Both of these should
+ /// be done by passes which move instructions in IR.
+ void
+ dropUndefImplyingAttrsAndUnknownMetadata(ArrayRef<unsigned> KnownIDs = {});
+
/// Determine whether the exact flag is set.
bool isExact() const;
@@ -597,6 +611,9 @@ public:
/// Return true if this atomic instruction stores to memory.
bool hasAtomicStore() const;
+ /// Return true if this instruction has a volatile memory access.
+ bool isVolatile() const;
+
/// Return true if this instruction may throw an exception.
bool mayThrow() const;
@@ -619,11 +636,16 @@ public:
/// Return true if the instruction may have side effects.
///
+ /// Side effects are:
+ /// * Writing to memory.
+ /// * Unwinding.
+ /// * Not returning (e.g. an infinite loop).
+ ///
/// Note that this does not consider malloc and alloca to have side
/// effects because the newly allocated memory is completely invisible to
/// instructions which don't use the returned value. For cases where this
/// matters, isSafeToSpeculativelyExecute may be more appropriate.
- bool mayHaveSideEffects() const { return mayWriteToMemory() || mayThrow(); }
+ bool mayHaveSideEffects() const;
/// Return true if the instruction can be removed if the result is unused.
///
@@ -633,6 +655,10 @@ public:
/// generated program.
bool isSafeToRemove() const;
+ /// Return true if the instruction will return (unwinding is considered as
+ /// a form of returning control flow here).
+ bool willReturn() const;
+
/// Return true if the instruction is a variety of EH-block.
bool isEHPad() const {
switch (getOpcode()) {
@@ -650,6 +676,13 @@ public:
/// llvm.lifetime.end marker.
bool isLifetimeStartOrEnd() const;
+ /// Return true if the instruction is a llvm.launder.invariant.group or
+ /// llvm.strip.invariant.group.
+ bool isLaunderOrStripInvariantGroup() const;
+
+ /// Return true if the instruction is a DbgInfoIntrinsic or PseudoProbeInst.
+ bool isDebugOrPseudoInst() const;
+
/// Return a pointer to the next non-debug instruction in the same basic
/// block as 'this', or nullptr if no such instruction exists. Skip any pseudo
/// operations if \c SkipPseudoOp is true.
diff --git a/llvm/include/llvm/IR/Instructions.h b/llvm/include/llvm/IR/Instructions.h
index 00ecc2aa7f37..0c43a56daa33 100644
--- a/llvm/include/llvm/IR/Instructions.h
+++ b/llvm/include/llvm/IR/Instructions.h
@@ -17,6 +17,7 @@
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/Bitfields.h"
+#include "llvm/ADT/MapVector.h"
#include "llvm/ADT/None.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/ADT/SmallVector.h"
@@ -332,9 +333,8 @@ public:
AtomicOrdering Order, SyncScope::ID SSID, BasicBlock *InsertAtEnd);
// allocate space for exactly two operands
- void *operator new(size_t s) {
- return User::operator new(s, 2);
- }
+ void *operator new(size_t S) { return User::operator new(S, 2); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
/// Return true if this is a store to a volatile memory location.
bool isVolatile() const { return getSubclassData<VolatileField>(); }
@@ -462,9 +462,8 @@ public:
BasicBlock *InsertAtEnd);
// allocate space for exactly zero operands
- void *operator new(size_t s) {
- return User::operator new(s, 0);
- }
+ void *operator new(size_t S) { return User::operator new(S, 0); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
/// Returns the ordering constraint of this fence instruction.
AtomicOrdering getOrdering() const {
@@ -546,9 +545,8 @@ public:
BasicBlock *InsertAtEnd);
// allocate space for exactly three operands
- void *operator new(size_t s) {
- return User::operator new(s, 3);
- }
+ void *operator new(size_t S) { return User::operator new(S, 3); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
using VolatileField = BoolBitfieldElementT<0>;
using WeakField = BoolBitfieldElementT<VolatileField::NextBit>;
@@ -590,6 +588,18 @@ public:
/// Transparently provide more efficient getOperand methods.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
+ static bool isValidSuccessOrdering(AtomicOrdering Ordering) {
+ return Ordering != AtomicOrdering::NotAtomic &&
+ Ordering != AtomicOrdering::Unordered;
+ }
+
+ static bool isValidFailureOrdering(AtomicOrdering Ordering) {
+ return Ordering != AtomicOrdering::NotAtomic &&
+ Ordering != AtomicOrdering::Unordered &&
+ Ordering != AtomicOrdering::AcquireRelease &&
+ Ordering != AtomicOrdering::Release;
+ }
+
/// Returns the success ordering constraint of this cmpxchg instruction.
AtomicOrdering getSuccessOrdering() const {
return getSubclassData<SuccessOrderingField>();
@@ -597,8 +607,8 @@ public:
/// Sets the success ordering constraint of this cmpxchg instruction.
void setSuccessOrdering(AtomicOrdering Ordering) {
- assert(Ordering != AtomicOrdering::NotAtomic &&
- "CmpXchg instructions can only be atomic.");
+ assert(isValidSuccessOrdering(Ordering) &&
+ "invalid CmpXchg success ordering");
setSubclassData<SuccessOrderingField>(Ordering);
}
@@ -609,11 +619,25 @@ public:
/// Sets the failure ordering constraint of this cmpxchg instruction.
void setFailureOrdering(AtomicOrdering Ordering) {
- assert(Ordering != AtomicOrdering::NotAtomic &&
- "CmpXchg instructions can only be atomic.");
+ assert(isValidFailureOrdering(Ordering) &&
+ "invalid CmpXchg failure ordering");
setSubclassData<FailureOrderingField>(Ordering);
}
+ /// Returns a single ordering which is at least as strong as both the
+ /// success and failure orderings for this cmpxchg.
+ AtomicOrdering getMergedOrdering() const {
+ if (getFailureOrdering() == AtomicOrdering::SequentiallyConsistent)
+ return AtomicOrdering::SequentiallyConsistent;
+ if (getFailureOrdering() == AtomicOrdering::Acquire) {
+ if (getSuccessOrdering() == AtomicOrdering::Monotonic)
+ return AtomicOrdering::Acquire;
+ if (getSuccessOrdering() == AtomicOrdering::Release)
+ return AtomicOrdering::AcquireRelease;
+ }
+ return getSuccessOrdering();
+ }
+
/// Returns the synchronization scope ID of this cmpxchg instruction.
SyncScope::ID getSyncScopeID() const {
return SSID;
@@ -765,9 +789,8 @@ public:
BasicBlock *InsertAtEnd);
// allocate space for exactly two operands
- void *operator new(size_t s) {
- return User::operator new(s, 2);
- }
+ void *operator new(size_t S) { return User::operator new(S, 2); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
using VolatileField = BoolBitfieldElementT<0>;
using AtomicOrderingField =
@@ -933,13 +956,9 @@ public:
const Twine &NameStr = "",
Instruction *InsertBefore = nullptr) {
unsigned Values = 1 + unsigned(IdxList.size());
- if (!PointeeType)
- PointeeType =
- cast<PointerType>(Ptr->getType()->getScalarType())->getElementType();
- else
- assert(
- PointeeType ==
- cast<PointerType>(Ptr->getType()->getScalarType())->getElementType());
+ assert(PointeeType && "Must specify element type");
+ assert(cast<PointerType>(Ptr->getType()->getScalarType())
+ ->isOpaqueOrPointeeTypeMatches(PointeeType));
return new (Values) GetElementPtrInst(PointeeType, Ptr, IdxList, Values,
NameStr, InsertBefore);
}
@@ -949,26 +968,24 @@ public:
const Twine &NameStr,
BasicBlock *InsertAtEnd) {
unsigned Values = 1 + unsigned(IdxList.size());
- if (!PointeeType)
- PointeeType =
- cast<PointerType>(Ptr->getType()->getScalarType())->getElementType();
- else
- assert(
- PointeeType ==
- cast<PointerType>(Ptr->getType()->getScalarType())->getElementType());
+ assert(PointeeType && "Must specify element type");
+ assert(cast<PointerType>(Ptr->getType()->getScalarType())
+ ->isOpaqueOrPointeeTypeMatches(PointeeType));
return new (Values) GetElementPtrInst(PointeeType, Ptr, IdxList, Values,
NameStr, InsertAtEnd);
}
- /// Create an "inbounds" getelementptr. See the documentation for the
- /// "inbounds" flag in LangRef.html for details.
- static GetElementPtrInst *CreateInBounds(Value *Ptr,
- ArrayRef<Value *> IdxList,
- const Twine &NameStr = "",
- Instruction *InsertBefore = nullptr){
- return CreateInBounds(nullptr, Ptr, IdxList, NameStr, InsertBefore);
+ LLVM_ATTRIBUTE_DEPRECATED(static GetElementPtrInst *CreateInBounds(
+ Value *Ptr, ArrayRef<Value *> IdxList, const Twine &NameStr = "",
+ Instruction *InsertBefore = nullptr),
+ "Use the version with explicit element type instead") {
+ return CreateInBounds(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, IdxList,
+ NameStr, InsertBefore);
}
+ /// Create an "inbounds" getelementptr. See the documentation for the
+ /// "inbounds" flag in LangRef.html for details.
static GetElementPtrInst *
CreateInBounds(Type *PointeeType, Value *Ptr, ArrayRef<Value *> IdxList,
const Twine &NameStr = "",
@@ -979,11 +996,13 @@ public:
return GEP;
}
- static GetElementPtrInst *CreateInBounds(Value *Ptr,
- ArrayRef<Value *> IdxList,
- const Twine &NameStr,
- BasicBlock *InsertAtEnd) {
- return CreateInBounds(nullptr, Ptr, IdxList, NameStr, InsertAtEnd);
+ LLVM_ATTRIBUTE_DEPRECATED(static GetElementPtrInst *CreateInBounds(
+ Value *Ptr, ArrayRef<Value *> IdxList, const Twine &NameStr,
+ BasicBlock *InsertAtEnd),
+ "Use the version with explicit element type instead") {
+ return CreateInBounds(
+ Ptr->getType()->getScalarType()->getPointerElementType(), Ptr, IdxList,
+ NameStr, InsertAtEnd);
}
static GetElementPtrInst *CreateInBounds(Type *PointeeType, Value *Ptr,
@@ -1005,8 +1024,8 @@ public:
void setResultElementType(Type *Ty) { ResultElementType = Ty; }
Type *getResultElementType() const {
- assert(ResultElementType ==
- cast<PointerType>(getType()->getScalarType())->getElementType());
+ assert(cast<PointerType>(getType()->getScalarType())
+ ->isOpaqueOrPointeeTypeMatches(ResultElementType));
return ResultElementType;
}
@@ -1072,8 +1091,12 @@ public:
/// instruction, which may be a vector of pointers.
static Type *getGEPReturnType(Type *ElTy, Value *Ptr,
ArrayRef<Value *> IdxList) {
- Type *PtrTy = PointerType::get(checkGEPType(getIndexedType(ElTy, IdxList)),
- Ptr->getType()->getPointerAddressSpace());
+ PointerType *OrigPtrTy = cast<PointerType>(Ptr->getType()->getScalarType());
+ unsigned AddrSpace = OrigPtrTy->getAddressSpace();
+ Type *ResultElemTy = checkGEPType(getIndexedType(ElTy, IdxList));
+ Type *PtrTy = OrigPtrTy->isOpaque()
+ ? PointerType::get(OrigPtrTy->getContext(), AddrSpace)
+ : PointerType::get(ResultElemTy, AddrSpace);
// Vector GEP
if (auto *PtrVTy = dyn_cast<VectorType>(Ptr->getType())) {
ElementCount EltCount = PtrVTy->getElementCount();
@@ -1122,7 +1145,9 @@ public:
/// must be at least as wide as the IntPtr type for the address space of
/// the base GEP pointer.
bool accumulateConstantOffset(const DataLayout &DL, APInt &Offset) const;
-
+ bool collectOffset(const DataLayout &DL, unsigned BitWidth,
+ MapVector<Value *, APInt> &VariableOffsets,
+ APInt &ConstantOffset) const;
// Methods for support type inquiry through isa, cast, and dyn_cast:
static bool classof(const Instruction *I) {
return (I->getOpcode() == Instruction::GetElementPtr);
@@ -1146,8 +1171,8 @@ GetElementPtrInst::GetElementPtrInst(Type *PointeeType, Value *Ptr,
Values, InsertBefore),
SourceElementType(PointeeType),
ResultElementType(getIndexedType(PointeeType, IdxList)) {
- assert(ResultElementType ==
- cast<PointerType>(getType()->getScalarType())->getElementType());
+ assert(cast<PointerType>(getType()->getScalarType())
+ ->isOpaqueOrPointeeTypeMatches(ResultElementType));
init(Ptr, IdxList, NameStr);
}
@@ -1160,8 +1185,8 @@ GetElementPtrInst::GetElementPtrInst(Type *PointeeType, Value *Ptr,
Values, InsertAtEnd),
SourceElementType(PointeeType),
ResultElementType(getIndexedType(PointeeType, IdxList)) {
- assert(ResultElementType ==
- cast<PointerType>(getType()->getScalarType())->getElementType());
+ assert(cast<PointerType>(getType()->getScalarType())
+ ->isOpaqueOrPointeeTypeMatches(ResultElementType));
init(Ptr, IdxList, NameStr);
}
@@ -1585,16 +1610,6 @@ public:
static CallInst *Create(CallInst *CI, ArrayRef<OperandBundleDef> Bundles,
Instruction *InsertPt = nullptr);
- /// Create a clone of \p CI with a different set of operand bundles and
- /// insert it before \p InsertPt.
- ///
- /// The returned call instruction is identical \p CI in every way except that
- /// the operand bundle for the new instruction is set to the operand bundle
- /// in \p Bundle.
- static CallInst *CreateWithReplacedBundle(CallInst *CI,
- OperandBundleDef Bundle,
- Instruction *InsertPt = nullptr);
-
/// Generate the IR for a call to malloc:
/// 1. Compute the malloc call's argument as the specified type's size,
/// possibly multiplied by the array size if the array size is not
@@ -2015,7 +2030,8 @@ public:
ShuffleVectorInst(Value *V1, Value *V2, ArrayRef<int> Mask,
const Twine &NameStr, BasicBlock *InsertAtEnd);
- void *operator new(size_t s) { return User::operator new(s, 2); }
+ void *operator new(size_t S) { return User::operator new(S, 2); }
+ void operator delete(void *Ptr) { return User::operator delete(Ptr); }
/// Swap the operands and adjust the mask to preserve the semantics
/// of the instruction.
@@ -2472,9 +2488,8 @@ protected:
public:
// allocate space for exactly two operands
- void *operator new(size_t s) {
- return User::operator new(s, 2);
- }
+ void *operator new(size_t S) { return User::operator new(S, 2); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
static InsertValueInst *Create(Value *Agg, Value *Val,
ArrayRef<unsigned> Idxs,
@@ -2591,6 +2606,7 @@ class PHINode : public Instruction {
Instruction *InsertBefore = nullptr)
: Instruction(Ty, Instruction::PHI, nullptr, 0, InsertBefore),
ReservedSpace(NumReservedValues) {
+ assert(!Ty->isTokenTy() && "PHI nodes cannot have token type!");
setName(NameStr);
allocHungoffUses(ReservedSpace);
}
@@ -2599,6 +2615,7 @@ class PHINode : public Instruction {
BasicBlock *InsertAtEnd)
: Instruction(Ty, Instruction::PHI, nullptr, 0, InsertAtEnd),
ReservedSpace(NumReservedValues) {
+ assert(!Ty->isTokenTy() && "PHI nodes cannot have token type!");
setName(NameStr);
allocHungoffUses(ReservedSpace);
}
@@ -2848,9 +2865,7 @@ private:
const Twine &NameStr, BasicBlock *InsertAtEnd);
// Allocate space for exactly zero operands.
- void *operator new(size_t s) {
- return User::operator new(s);
- }
+ void *operator new(size_t S) { return User::operator new(S); }
void growOperands(unsigned Size);
void init(unsigned NumReservedValues, const Twine &NameStr);
@@ -2862,6 +2877,8 @@ protected:
LandingPadInst *cloneImpl() const;
public:
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
+
/// Constructors - NumReservedClauses is a hint for the number of incoming
/// clauses that this landingpad will have (use 0 if you really have no idea).
static LandingPadInst *Create(Type *RetTy, unsigned NumReservedClauses,
@@ -3180,9 +3197,7 @@ class SwitchInst : public Instruction {
BasicBlock *InsertAtEnd);
// allocate space for exactly zero operands
- void *operator new(size_t s) {
- return User::operator new(s);
- }
+ void *operator new(size_t S) { return User::operator new(S); }
void init(Value *Value, BasicBlock *Default, unsigned NumReserved);
void growOperands();
@@ -3194,6 +3209,8 @@ protected:
SwitchInst *cloneImpl() const;
public:
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
+
// -2
static const unsigned DefaultPseudoIndex = static_cast<unsigned>(~0L-1);
@@ -3578,9 +3595,7 @@ class IndirectBrInst : public Instruction {
IndirectBrInst(Value *Address, unsigned NumDests, BasicBlock *InsertAtEnd);
// allocate space for exactly zero operands
- void *operator new(size_t s) {
- return User::operator new(s);
- }
+ void *operator new(size_t S) { return User::operator new(S); }
void init(Value *Address, unsigned NumDests);
void growOperands();
@@ -3592,6 +3607,8 @@ protected:
IndirectBrInst *cloneImpl() const;
public:
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
+
/// Iterator type that casts an operand to a basic block.
///
/// This only makes sense because the successors are stored as adjacent
@@ -3824,16 +3841,6 @@ public:
static InvokeInst *Create(InvokeInst *II, ArrayRef<OperandBundleDef> Bundles,
Instruction *InsertPt = nullptr);
- /// Create a clone of \p II with a different set of operand bundles and
- /// insert it before \p InsertPt.
- ///
- /// The returned invoke instruction is identical to \p II in every way except
- /// that the operand bundle for the new instruction is set to the operand
- /// bundle in \p Bundle.
- static InvokeInst *CreateWithReplacedBundle(InvokeInst *II,
- OperandBundleDef Bundles,
- Instruction *InsertPt = nullptr);
-
// get*Dest - Return the destination basic blocks...
BasicBlock *getNormalDest() const {
return cast<BasicBlock>(Op<NormalDestOpEndIdx>());
@@ -4239,7 +4246,7 @@ class CatchSwitchInst : public Instruction {
BasicBlock *InsertAtEnd);
// allocate space for exactly zero operands
- void *operator new(size_t s) { return User::operator new(s); }
+ void *operator new(size_t S) { return User::operator new(S); }
void init(Value *ParentPad, BasicBlock *UnwindDest, unsigned NumReserved);
void growOperands(unsigned Size);
@@ -4251,6 +4258,8 @@ protected:
CatchSwitchInst *cloneImpl() const;
public:
+ void operator delete(void *Ptr) { return User::operator delete(Ptr); }
+
static CatchSwitchInst *Create(Value *ParentPad, BasicBlock *UnwindDest,
unsigned NumHandlers,
const Twine &NameStr = "",
@@ -4679,9 +4688,8 @@ public:
explicit UnreachableInst(LLVMContext &C, BasicBlock *InsertAtEnd);
// allocate space for exactly zero operands
- void *operator new(size_t s) {
- return User::operator new(s, 0);
- }
+ void *operator new(size_t S) { return User::operator new(S, 0); }
+ void operator delete(void *Ptr) { User::operator delete(Ptr); }
unsigned getNumSuccessors() const { return 0; }
@@ -5298,6 +5306,15 @@ inline unsigned getLoadStoreAddressSpace(Value *I) {
return cast<StoreInst>(I)->getPointerAddressSpace();
}
+/// A helper function that returns the type of a load or store instruction.
+inline Type *getLoadStoreType(Value *I) {
+ assert((isa<LoadInst>(I) || isa<StoreInst>(I)) &&
+ "Expected Load or Store instruction");
+ if (auto *LI = dyn_cast<LoadInst>(I))
+ return LI->getType();
+ return cast<StoreInst>(I)->getValueOperand()->getType();
+}
+
//===----------------------------------------------------------------------===//
// FreezeInst Class
//===----------------------------------------------------------------------===//
diff --git a/llvm/include/llvm/IR/IntrinsicInst.h b/llvm/include/llvm/IR/IntrinsicInst.h
index 9d68f3fdde6c..6b42cb949050 100644
--- a/llvm/include/llvm/IR/IntrinsicInst.h
+++ b/llvm/include/llvm/IR/IntrinsicInst.h
@@ -24,6 +24,7 @@
#define LLVM_IR_INTRINSICINST_H
#include "llvm/IR/Constants.h"
+#include "llvm/IR/DebugInfoMetadata.h"
#include "llvm/IR/DerivedTypes.h"
#include "llvm/IR/FPEnv.h"
#include "llvm/IR/Function.h"
@@ -82,6 +83,29 @@ public:
}
}
+ // Checks if the intrinsic is an annotation.
+ bool isAssumeLikeIntrinsic() const {
+ switch (getIntrinsicID()) {
+ default: break;
+ case Intrinsic::assume:
+ case Intrinsic::sideeffect:
+ case Intrinsic::pseudoprobe:
+ case Intrinsic::dbg_declare:
+ case Intrinsic::dbg_value:
+ case Intrinsic::dbg_label:
+ case Intrinsic::invariant_start:
+ case Intrinsic::invariant_end:
+ case Intrinsic::lifetime_start:
+ case Intrinsic::lifetime_end:
+ case Intrinsic::experimental_noalias_scope_decl:
+ case Intrinsic::objectsize:
+ case Intrinsic::ptr_annotation:
+ case Intrinsic::var_annotation:
+ return true;
+ }
+ return false;
+ }
+
// Methods for support type inquiry through isa, cast, and dyn_cast:
static bool classof(const CallInst *I) {
if (const Function *CF = I->getCalledFunction())
@@ -123,10 +147,84 @@ public:
/// This is the common base class for debug info intrinsics for variables.
class DbgVariableIntrinsic : public DbgInfoIntrinsic {
public:
- /// Get the location corresponding to the variable referenced by the debug
+ // Iterator for ValueAsMetadata that internally uses direct pointer iteration
+ // over either a ValueAsMetadata* or a ValueAsMetadata**, dereferencing to the
+ // ValueAsMetadata .
+ class location_op_iterator
+ : public iterator_facade_base<location_op_iterator,
+ std::bidirectional_iterator_tag, Value *> {
+ PointerUnion<ValueAsMetadata *, ValueAsMetadata **> I;
+
+ public:
+ location_op_iterator(ValueAsMetadata *SingleIter) : I(SingleIter) {}
+ location_op_iterator(ValueAsMetadata **MultiIter) : I(MultiIter) {}
+
+ location_op_iterator(const location_op_iterator &R) : I(R.I) {}
+ location_op_iterator &operator=(const location_op_iterator &R) {
+ I = R.I;
+ return *this;
+ }
+ bool operator==(const location_op_iterator &RHS) const {
+ return I == RHS.I;
+ }
+ const Value *operator*() const {
+ ValueAsMetadata *VAM = I.is<ValueAsMetadata *>()
+ ? I.get<ValueAsMetadata *>()
+ : *I.get<ValueAsMetadata **>();
+ return VAM->getValue();
+ };
+ Value *operator*() {
+ ValueAsMetadata *VAM = I.is<ValueAsMetadata *>()
+ ? I.get<ValueAsMetadata *>()
+ : *I.get<ValueAsMetadata **>();
+ return VAM->getValue();
+ }
+ location_op_iterator &operator++() {
+ if (I.is<ValueAsMetadata *>())
+ I = I.get<ValueAsMetadata *>() + 1;
+ else
+ I = I.get<ValueAsMetadata **>() + 1;
+ return *this;
+ }
+ location_op_iterator &operator--() {
+ if (I.is<ValueAsMetadata *>())
+ I = I.get<ValueAsMetadata *>() - 1;
+ else
+ I = I.get<ValueAsMetadata **>() - 1;
+ return *this;
+ }
+ };
+
+ /// Get the locations corresponding to the variable referenced by the debug
/// info intrinsic. Depending on the intrinsic, this could be the
/// variable's value or its address.
- Value *getVariableLocation(bool AllowNullOp = true) const;
+ iterator_range<location_op_iterator> location_ops() const;
+
+ Value *getVariableLocationOp(unsigned OpIdx) const;
+
+ void replaceVariableLocationOp(Value *OldValue, Value *NewValue);
+ void replaceVariableLocationOp(unsigned OpIdx, Value *NewValue);
+ /// Adding a new location operand will always result in this intrinsic using
+ /// an ArgList, and must always be accompanied by a new expression that uses
+ /// the new operand.
+ void addVariableLocationOps(ArrayRef<Value *> NewValues,
+ DIExpression *NewExpr);
+
+ void setVariable(DILocalVariable *NewVar) {
+ setArgOperand(1, MetadataAsValue::get(NewVar->getContext(), NewVar));
+ }
+
+ void setExpression(DIExpression *NewExpr) {
+ setArgOperand(2, MetadataAsValue::get(NewExpr->getContext(), NewExpr));
+ }
+
+ unsigned getNumVariableLocationOps() const {
+ if (hasArgList())
+ return cast<DIArgList>(getRawLocation())->getArgs().size();
+ return 1;
+ }
+
+ bool hasArgList() const { return isa<DIArgList>(getRawLocation()); }
/// Does this describe the address of a local variable. True for dbg.addr
/// and dbg.declare, but not dbg.value, which describes its value.
@@ -134,6 +232,24 @@ public:
return getIntrinsicID() != Intrinsic::dbg_value;
}
+ void setUndef() {
+ // TODO: When/if we remove duplicate values from DIArgLists, we don't need
+ // this set anymore.
+ SmallPtrSet<Value *, 4> RemovedValues;
+ for (Value *OldValue : location_ops()) {
+ if (!RemovedValues.insert(OldValue).second)
+ continue;
+ Value *Undef = UndefValue::get(OldValue->getType());
+ replaceVariableLocationOp(OldValue, Undef);
+ }
+ }
+
+ bool isUndef() const {
+ return (getNumVariableLocationOps() == 0 &&
+ !getExpression()->isComplex()) ||
+ any_of(location_ops(), [](Value *V) { return isa<UndefValue>(V); });
+ }
+
DILocalVariable *getVariable() const {
return cast<DILocalVariable>(getRawVariable());
}
@@ -142,6 +258,10 @@ public:
return cast<DIExpression>(getRawExpression());
}
+ Metadata *getRawLocation() const {
+ return cast<MetadataAsValue>(getArgOperand(0))->getMetadata();
+ }
+
Metadata *getRawVariable() const {
return cast<MetadataAsValue>(getArgOperand(1))->getMetadata();
}
@@ -150,6 +270,13 @@ public:
return cast<MetadataAsValue>(getArgOperand(2))->getMetadata();
}
+ /// Use of this should generally be avoided; instead,
+ /// replaceVariableLocationOp and addVariableLocationOps should be used where
+ /// possible to avoid creating invalid state.
+ void setRawLocation(Metadata *Location) {
+ return setArgOperand(0, MetadataAsValue::get(getContext(), Location));
+ }
+
/// Get the size (in bits) of the variable, or fragment of the variable that
/// is described.
Optional<uint64_t> getFragmentSizeInBits() const;
@@ -170,12 +297,21 @@ public:
return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
}
/// @}
+private:
+ void setArgOperand(unsigned i, Value *v) {
+ DbgInfoIntrinsic::setArgOperand(i, v);
+ }
+ void setOperand(unsigned i, Value *v) { DbgInfoIntrinsic::setOperand(i, v); }
};
/// This represents the llvm.dbg.declare instruction.
class DbgDeclareInst : public DbgVariableIntrinsic {
public:
- Value *getAddress() const { return getVariableLocation(); }
+ Value *getAddress() const {
+ assert(getNumVariableLocationOps() == 1 &&
+ "dbg.declare must have exactly 1 location operand.");
+ return getVariableLocationOp(0);
+ }
/// \name Casting methods
/// @{
@@ -191,7 +327,11 @@ public:
/// This represents the llvm.dbg.addr instruction.
class DbgAddrIntrinsic : public DbgVariableIntrinsic {
public:
- Value *getAddress() const { return getVariableLocation(); }
+ Value *getAddress() const {
+ assert(getNumVariableLocationOps() == 1 &&
+ "dbg.addr must have exactly 1 location operand.");
+ return getVariableLocationOp(0);
+ }
/// \name Casting methods
/// @{
@@ -206,8 +346,13 @@ public:
/// This represents the llvm.dbg.value instruction.
class DbgValueInst : public DbgVariableIntrinsic {
public:
- Value *getValue() const {
- return getVariableLocation(/* AllowNullOp = */ false);
+ // The default argument should only be used in ISel, and the default option
+ // should be removed once ISel support for multiple location ops is complete.
+ Value *getValue(unsigned OpIdx = 0) const {
+ return getVariableLocationOp(OpIdx);
+ }
+ iterator_range<location_op_iterator> getValues() const {
+ return location_ops();
}
/// \name Casting methods
@@ -244,43 +389,63 @@ public:
/// This is the common base class for vector predication intrinsics.
class VPIntrinsic : public IntrinsicInst {
public:
- static Optional<int> GetMaskParamPos(Intrinsic::ID IntrinsicID);
- static Optional<int> GetVectorLengthParamPos(Intrinsic::ID IntrinsicID);
+ /// \brief Declares a llvm.vp.* intrinsic in \p M that matches the parameters
+ /// \p Params.
+ static Function *getDeclarationForParams(Module *M, Intrinsic::ID,
+ ArrayRef<Value *> Params);
+
+ static Optional<unsigned> getMaskParamPos(Intrinsic::ID IntrinsicID);
+ static Optional<unsigned> getVectorLengthParamPos(Intrinsic::ID IntrinsicID);
/// The llvm.vp.* intrinsics for this instruction Opcode
- static Intrinsic::ID GetForOpcode(unsigned OC);
+ static Intrinsic::ID getForOpcode(unsigned OC);
// Whether \p ID is a VP intrinsic ID.
- static bool IsVPIntrinsic(Intrinsic::ID);
+ static bool isVPIntrinsic(Intrinsic::ID);
- /// \return the mask parameter or nullptr.
+ /// \return The mask parameter or nullptr.
Value *getMaskParam() const;
+ void setMaskParam(Value *);
- /// \return the vector length parameter or nullptr.
+ /// \return The vector length parameter or nullptr.
Value *getVectorLengthParam() const;
+ void setVectorLengthParam(Value *);
- /// \return whether the vector length param can be ignored.
+ /// \return Whether the vector length param can be ignored.
bool canIgnoreVectorLengthParam() const;
- /// \return the static element count (vector number of elements) the vector
+ /// \return The static element count (vector number of elements) the vector
/// length parameter applies to.
ElementCount getStaticVectorLength() const;
+ /// \return The alignment of the pointer used by this load/store/gather or
+ /// scatter.
+ MaybeAlign getPointerAlignment() const;
+ // MaybeAlign setPointerAlignment(Align NewAlign); // TODO
+
+ /// \return The pointer operand of this load,store, gather or scatter.
+ Value *getMemoryPointerParam() const;
+ static Optional<unsigned> getMemoryPointerParamPos(Intrinsic::ID);
+
+ /// \return The data (payload) operand of this store or scatter.
+ Value *getMemoryDataParam() const;
+ static Optional<unsigned> getMemoryDataParamPos(Intrinsic::ID);
+
// Methods for support type inquiry through isa, cast, and dyn_cast:
static bool classof(const IntrinsicInst *I) {
- return IsVPIntrinsic(I->getIntrinsicID());
+ return isVPIntrinsic(I->getIntrinsicID());
}
static bool classof(const Value *V) {
return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
}
// Equivalent non-predicated opcode
- unsigned getFunctionalOpcode() const {
- return GetFunctionalOpcodeForVP(getIntrinsicID());
+ Optional<unsigned> getFunctionalOpcode() const {
+ return getFunctionalOpcodeForVP(getIntrinsicID());
}
// Equivalent non-predicated opcode
- static unsigned GetFunctionalOpcodeForVP(Intrinsic::ID ID);
+ static Optional<unsigned> getFunctionalOpcodeForVP(Intrinsic::ID ID);
};
/// This is the common base class for constrained floating point intrinsics.
@@ -290,6 +455,7 @@ public:
bool isTernaryOp() const;
Optional<RoundingMode> getRoundingMode() const;
Optional<fp::ExceptionBehavior> getExceptionBehavior() const;
+ bool isDefaultFPEnvironment() const;
// Methods for support type inquiry through isa, cast, and dyn_cast:
static bool classof(const IntrinsicInst *I);
@@ -318,6 +484,47 @@ public:
}
};
+/// This class represents min/max intrinsics.
+class MinMaxIntrinsic : public IntrinsicInst {
+public:
+ static bool classof(const IntrinsicInst *I) {
+ switch (I->getIntrinsicID()) {
+ case Intrinsic::umin:
+ case Intrinsic::umax:
+ case Intrinsic::smin:
+ case Intrinsic::smax:
+ return true;
+ default:
+ return false;
+ }
+ }
+ static bool classof(const Value *V) {
+ return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
+ }
+
+ Value *getLHS() const { return const_cast<Value *>(getArgOperand(0)); }
+ Value *getRHS() const { return const_cast<Value *>(getArgOperand(1)); }
+
+ /// Returns the comparison predicate underlying the intrinsic.
+ ICmpInst::Predicate getPredicate() const {
+ switch (getIntrinsicID()) {
+ case Intrinsic::umin:
+ return ICmpInst::Predicate::ICMP_ULT;
+ case Intrinsic::umax:
+ return ICmpInst::Predicate::ICMP_UGT;
+ case Intrinsic::smin:
+ return ICmpInst::Predicate::ICMP_SLT;
+ case Intrinsic::smax:
+ return ICmpInst::Predicate::ICMP_SGT;
+ default:
+ llvm_unreachable("Invalid intrinsic");
+ }
+ }
+
+ /// Whether the intrinsic is signed or unsigned.
+ bool isSigned() const { return ICmpInst::isSigned(getPredicate()); };
+};
+
/// This class represents an intrinsic that is based on a binary operation.
/// This includes op.with.overflow and saturating add/sub intrinsics.
class BinaryOpIntrinsic : public IntrinsicInst {
@@ -708,7 +915,8 @@ class MemCpyInst : public MemTransferInst {
public:
// Methods for support type inquiry through isa, cast, and dyn_cast:
static bool classof(const IntrinsicInst *I) {
- return I->getIntrinsicID() == Intrinsic::memcpy;
+ return I->getIntrinsicID() == Intrinsic::memcpy ||
+ I->getIntrinsicID() == Intrinsic::memcpy_inline;
}
static bool classof(const Value *V) {
return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
@@ -728,10 +936,10 @@ public:
};
/// This class wraps the llvm.memcpy.inline intrinsic.
-class MemCpyInlineInst : public MemTransferInst {
+class MemCpyInlineInst : public MemCpyInst {
public:
ConstantInt *getLength() const {
- return cast<ConstantInt>(MemTransferInst::getLength());
+ return cast<ConstantInt>(MemCpyInst::getLength());
}
// Methods for support type inquiry through isa, cast, and dyn_cast:
static bool classof(const IntrinsicInst *I) {
@@ -981,12 +1189,16 @@ public:
return cast<ConstantInt>(const_cast<Value *>(getArgOperand(0)));
}
+ ConstantInt *getIndex() const {
+ return cast<ConstantInt>(const_cast<Value *>(getArgOperand(1)));
+ }
+
ConstantInt *getAttributes() const {
return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2)));
}
- ConstantInt *getIndex() const {
- return cast<ConstantInt>(const_cast<Value *>(getArgOperand(1)));
+ ConstantInt *getFactor() const {
+ return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3)));
}
};
@@ -1012,6 +1224,86 @@ public:
}
};
+// Defined in Statepoint.h -- NOT a subclass of IntrinsicInst
+class GCStatepointInst;
+
+/// Common base class for representing values projected from a statepoint.
+/// Currently, the only projections available are gc.result and gc.relocate.
+class GCProjectionInst : public IntrinsicInst {
+public:
+ static bool classof(const IntrinsicInst *I) {
+ return I->getIntrinsicID() == Intrinsic::experimental_gc_relocate ||
+ I->getIntrinsicID() == Intrinsic::experimental_gc_result;
+ }
+
+ static bool classof(const Value *V) {
+ return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
+ }
+
+ /// Return true if this relocate is tied to the invoke statepoint.
+ /// This includes relocates which are on the unwinding path.
+ bool isTiedToInvoke() const {
+ const Value *Token = getArgOperand(0);
+
+ return isa<LandingPadInst>(Token) || isa<InvokeInst>(Token);
+ }
+
+ /// The statepoint with which this gc.relocate is associated.
+ const GCStatepointInst *getStatepoint() const;
+};
+
+/// Represents calls to the gc.relocate intrinsic.
+class GCRelocateInst : public GCProjectionInst {
+public:
+ static bool classof(const IntrinsicInst *I) {
+ return I->getIntrinsicID() == Intrinsic::experimental_gc_relocate;
+ }
+
+ static bool classof(const Value *V) {
+ return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
+ }
+
+ /// The index into the associate statepoint's argument list
+ /// which contains the base pointer of the pointer whose
+ /// relocation this gc.relocate describes.
+ unsigned getBasePtrIndex() const {
+ return cast<ConstantInt>(getArgOperand(1))->getZExtValue();
+ }
+
+ /// The index into the associate statepoint's argument list which
+ /// contains the pointer whose relocation this gc.relocate describes.
+ unsigned getDerivedPtrIndex() const {
+ return cast<ConstantInt>(getArgOperand(2))->getZExtValue();
+ }
+
+ Value *getBasePtr() const;
+ Value *getDerivedPtr() const;
+};
+
+/// Represents calls to the gc.result intrinsic.
+class GCResultInst : public GCProjectionInst {
+public:
+ static bool classof(const IntrinsicInst *I) {
+ return I->getIntrinsicID() == Intrinsic::experimental_gc_result;
+ }
+
+ static bool classof(const Value *V) {
+ return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
+ }
+};
+
+
+/// This represents the llvm.assume intrinsic.
+class AssumeInst : public IntrinsicInst {
+public:
+ static bool classof(const IntrinsicInst *I) {
+ return I->getIntrinsicID() == Intrinsic::assume;
+ }
+ static bool classof(const Value *V) {
+ return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
+ }
+};
+
} // end namespace llvm
#endif // LLVM_IR_INTRINSICINST_H
diff --git a/llvm/include/llvm/IR/Intrinsics.h b/llvm/include/llvm/IR/Intrinsics.h
index f9b6c098a3f2..80a2f5a8cd3e 100644
--- a/llvm/include/llvm/IR/Intrinsics.h
+++ b/llvm/include/llvm/IR/Intrinsics.h
@@ -55,12 +55,23 @@ namespace Intrinsic {
/// version of getName if overloads are required.
StringRef getName(ID id);
- /// Return the LLVM name for an intrinsic, such as "llvm.ppc.altivec.lvx".
- /// Note, this version of getName supports overloads, but is less efficient
- /// than the StringRef version of this function. If no overloads are
- /// requried, it is safe to use this version, but better to use the StringRef
- /// version.
- std::string getName(ID id, ArrayRef<Type*> Tys);
+ /// Return the LLVM name for an intrinsic, without encoded types for
+ /// overloading, such as "llvm.ssa.copy".
+ StringRef getBaseName(ID id);
+
+ /// Return the LLVM name for an intrinsic, such as "llvm.ppc.altivec.lvx" or
+ /// "llvm.ssa.copy.p0s_s.1". Note, this version of getName supports overloads.
+ /// This is less efficient than the StringRef version of this function. If no
+ /// overloads are required, it is safe to use this version, but better to use
+ /// the StringRef version. If one of the types is based on an unnamed type, a
+ /// function type will be computed. Providing FT will avoid this computation.
+ std::string getName(ID Id, ArrayRef<Type *> Tys, Module *M,
+ FunctionType *FT = nullptr);
+
+ /// Return the LLVM name for an intrinsic. This is a special version only to
+ /// be used by LLVMIntrinsicCopyOverloadedName. It only supports overloads
+ /// based on named types.
+ std::string getNameNoUnnamedTypes(ID Id, ArrayRef<Type *> Tys);
/// Return the function type for an intrinsic.
FunctionType *getType(LLVMContext &Context, ID id,
@@ -233,6 +244,8 @@ namespace Intrinsic {
// Checks if the intrinsic name matches with its signature and if not
// returns the declaration with the same signature and remangled name.
+ // An existing GlobalValue with the wanted name but with a wrong prototype
+ // or of the wrong kind will be renamed by adding ".renamed" to the name.
llvm::Optional<Function*> remangleIntrinsicFunction(Function *F);
} // End Intrinsic namespace
diff --git a/llvm/include/llvm/IR/Intrinsics.td b/llvm/include/llvm/IR/Intrinsics.td
index b2bfc6e6f9e6..28fcc13266b1 100644
--- a/llvm/include/llvm/IR/Intrinsics.td
+++ b/llvm/include/llvm/IR/Intrinsics.td
@@ -134,10 +134,14 @@ def IntrWillReturn : IntrinsicProperty<1>;
// Parallels the cold attribute on LLVM IR functions.
def IntrCold : IntrinsicProperty;
-// IntrNoduplicate - Calls to this intrinsic cannot be duplicated.
+// IntrNoDuplicate - Calls to this intrinsic cannot be duplicated.
// Parallels the noduplicate attribute on LLVM IR functions.
def IntrNoDuplicate : IntrinsicProperty;
+// IntrNoMerge - Calls to this intrinsic cannot be merged
+// Parallels the nomerge attribute on LLVM IR functions.
+def IntrNoMerge : IntrinsicProperty;
+
// IntrConvergent - Calls to this intrinsic are convergent and may not be made
// control-dependent on any additional values.
// Parallels the convergent attribute on LLVM IR functions.
@@ -212,7 +216,7 @@ class LLVMVectorOfAnyPointersToElt<int num> : LLVMMatchType<num>;
class LLVMVectorElementType<int num> : LLVMMatchType<num>;
// Match the type of another intrinsic parameter that is expected to be a
-// vector type, but change the element count to be half as many
+// vector type, but change the element count to be half as many.
class LLVMHalfElementsVectorType<int num> : LLVMMatchType<num>;
// Match the type of another intrinsic parameter that is expected to be a
@@ -360,7 +364,7 @@ class Intrinsic<list<LLVMType> ret_types,
bit isTarget = false;
}
-// Intrinisc with default attributes (disable_default_attributes = false).
+// Intrinsic with default attributes (disable_default_attributes = false).
class DefaultAttrsIntrinsic<list<LLVMType> ret_types,
list<LLVMType> param_types = [],
list<IntrinsicProperty> intr_properties = [],
@@ -446,6 +450,9 @@ def int_objc_storeWeak : Intrinsic<[llvm_ptr_ty],
llvm_ptr_ty]>;
def int_objc_clang_arc_use : Intrinsic<[],
[llvm_vararg_ty]>;
+def int_objc_clang_arc_noop_use : DefaultAttrsIntrinsic<[],
+ [llvm_vararg_ty],
+ [IntrInaccessibleMemOnly]>;
def int_objc_unsafeClaimAutoreleasedReturnValue : Intrinsic<[llvm_ptr_ty],
[llvm_ptr_ty]>;
def int_objc_retainedObject : Intrinsic<[llvm_ptr_ty],
@@ -472,7 +479,12 @@ def int_objc_arc_annotation_bottomup_bbstart : Intrinsic<[],
def int_objc_arc_annotation_bottomup_bbend : Intrinsic<[],
[llvm_ptrptr_ty,
llvm_ptrptr_ty]>;
+//===--------------- Swift asynchronous context intrinsics ----------------===//
+// Returns the location of the Swift asynchronous context (usually stored just
+// before the frame pointer), and triggers the creation of a null context if it
+// would otherwise be unneeded.
+def int_swift_async_context_addr : Intrinsic<[llvm_ptrptr_ty], [], [IntrNoMem]>;
//===--------------------- Code Generator Intrinsics ----------------------===//
//
@@ -509,6 +521,16 @@ def int_eh_recoverfp : DefaultAttrsIntrinsic<[llvm_ptr_ty],
[llvm_ptr_ty, llvm_ptr_ty],
[IntrNoMem]>;
+// To mark the beginning/end of a try-scope for Windows SEH -EHa
+// calls/invokes to these intrinsics are placed to model control flows
+// caused by HW exceptions under option -EHa.
+// calls/invokes to these intrinsics will be discarded during a codegen pass
+// after EH tables are generated
+def int_seh_try_begin : Intrinsic<[], [], [IntrWriteMem, IntrWillReturn]>;
+def int_seh_try_end : Intrinsic<[], [], [IntrWriteMem, IntrWillReturn]>;
+def int_seh_scope_begin : Intrinsic<[], [], [IntrNoMem]>;
+def int_seh_scope_end : Intrinsic<[], [], [IntrNoMem]>;
+
// Note: we treat stacksave/stackrestore as writemem because we don't otherwise
// model their dependencies on allocas.
def int_stacksave : DefaultAttrsIntrinsic<[llvm_ptr_ty]>,
@@ -534,10 +556,10 @@ def int_pcmarker : DefaultAttrsIntrinsic<[], [llvm_i32_ty]>;
def int_readcyclecounter : DefaultAttrsIntrinsic<[llvm_i64_ty]>;
-// The assume intrinsic is marked as arbitrarily writing so that proper
-// control dependencies will be maintained.
-def int_assume : DefaultAttrsIntrinsic<[], [llvm_i1_ty], [IntrWillReturn,
- NoUndef<ArgIndex<0>>]>;
+// The assume intrinsic is marked InaccessibleMemOnly so that proper control
+// dependencies will be maintained.
+def int_assume : DefaultAttrsIntrinsic<
+ [], [llvm_i1_ty], [IntrInaccessibleMemOnly, NoUndef<ArgIndex<0>>]>;
// 'llvm.experimental.noalias.scope.decl' intrinsic: Inserted at the location of
// noalias scope declaration. Makes it possible to identify that a noalias scope
@@ -578,10 +600,10 @@ def int_call_preallocated_teardown : DefaultAttrsIntrinsic<[], [llvm_token_ty]>;
//===------------------- Standard C Library Intrinsics --------------------===//
//
-def int_memcpy : DefaultAttrsIntrinsic<[],
+def int_memcpy : Intrinsic<[],
[llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty,
llvm_i1_ty],
- [IntrArgMemOnly, IntrWillReturn,
+ [IntrArgMemOnly, IntrWillReturn, IntrNoFree,
NoCapture<ArgIndex<0>>, NoCapture<ArgIndex<1>>,
NoAlias<ArgIndex<0>>, NoAlias<ArgIndex<1>>,
WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>,
@@ -592,25 +614,26 @@ def int_memcpy : DefaultAttrsIntrinsic<[],
// external function.
// The third argument (specifying the size) must be a constant.
def int_memcpy_inline
- : DefaultAttrsIntrinsic<[],
+ : Intrinsic<[],
[llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty, llvm_i1_ty],
- [IntrArgMemOnly, IntrWillReturn,
+ [IntrArgMemOnly, IntrWillReturn, IntrNoFree,
NoCapture<ArgIndex<0>>, NoCapture<ArgIndex<1>>,
NoAlias<ArgIndex<0>>, NoAlias<ArgIndex<1>>,
WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>,
ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
-def int_memmove : DefaultAttrsIntrinsic<[],
+def int_memmove : Intrinsic<[],
[llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty,
llvm_i1_ty],
- [IntrArgMemOnly, IntrWillReturn,
+ [IntrArgMemOnly, IntrWillReturn, IntrNoFree,
NoCapture<ArgIndex<0>>, NoCapture<ArgIndex<1>>,
WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>,
ImmArg<ArgIndex<3>>]>;
-def int_memset : DefaultAttrsIntrinsic<[],
+def int_memset : Intrinsic<[],
[llvm_anyptr_ty, llvm_i8_ty, llvm_anyint_ty,
llvm_i1_ty],
[IntrWriteMem, IntrArgMemOnly, IntrWillReturn,
+ IntrNoFree,
NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>,
ImmArg<ArgIndex<3>>]>;
@@ -629,7 +652,7 @@ let IntrProperties = [IntrNoMem, IntrSpeculatable, IntrWillReturn] in {
// rounding mode. LLVM purposely does not model changes to the FP
// environment so they can be treated as readnone.
def int_sqrt : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
- def int_powi : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, llvm_i32_ty]>;
+ def int_powi : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>, llvm_anyint_ty]>;
def int_sin : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_cos : DefaultAttrsIntrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>]>;
def int_pow : DefaultAttrsIntrinsic<[llvm_anyfloat_ty],
@@ -689,6 +712,7 @@ def int_objectsize : DefaultAttrsIntrinsic<[llvm_anyint_ty],
let IntrProperties = [IntrInaccessibleMemOnly, IntrWillReturn] in {
def int_flt_rounds : DefaultAttrsIntrinsic<[llvm_i32_ty], []>;
+ def int_set_rounding : DefaultAttrsIntrinsic<[], [llvm_i32_ty]>;
}
//===--------------- Constrained Floating Point Intrinsics ----------------===//
@@ -964,18 +988,19 @@ def int_eh_sjlj_setup_dispatch : Intrinsic<[], []>;
//===---------------- Generic Variable Attribute Intrinsics----------------===//
//
-def int_var_annotation : DefaultAttrsIntrinsic<[],
- [llvm_ptr_ty, llvm_ptr_ty,
- llvm_ptr_ty, llvm_i32_ty, llvm_ptr_ty],
- [IntrWillReturn], "llvm.var.annotation">;
-def int_ptr_annotation : DefaultAttrsIntrinsic<[LLVMAnyPointerType<llvm_anyint_ty>],
- [LLVMMatchType<0>, llvm_ptr_ty, llvm_ptr_ty,
- llvm_i32_ty, llvm_ptr_ty],
- [IntrWillReturn], "llvm.ptr.annotation">;
-def int_annotation : DefaultAttrsIntrinsic<[llvm_anyint_ty],
- [LLVMMatchType<0>, llvm_ptr_ty,
- llvm_ptr_ty, llvm_i32_ty],
- [IntrWillReturn], "llvm.annotation">;
+def int_var_annotation : DefaultAttrsIntrinsic<
+ [], [llvm_ptr_ty, llvm_ptr_ty, llvm_ptr_ty, llvm_i32_ty, llvm_ptr_ty],
+ [IntrInaccessibleMemOnly], "llvm.var.annotation">;
+
+def int_ptr_annotation : DefaultAttrsIntrinsic<
+ [LLVMAnyPointerType<llvm_anyint_ty>],
+ [LLVMMatchType<0>, llvm_ptr_ty, llvm_ptr_ty, llvm_i32_ty, llvm_ptr_ty],
+ [IntrInaccessibleMemOnly], "llvm.ptr.annotation">;
+
+def int_annotation : DefaultAttrsIntrinsic<
+ [llvm_anyint_ty],
+ [LLVMMatchType<0>, llvm_ptr_ty, llvm_ptr_ty, llvm_i32_ty],
+ [IntrInaccessibleMemOnly], "llvm.annotation">;
// Annotates the current program point with metadata strings which are emitted
// as CodeView debug info records. This is expensive, as it disables inlining
@@ -1174,13 +1199,21 @@ def int_experimental_gc_statepoint : Intrinsic<[llvm_token_ty],
ImmArg<ArgIndex<4>>]>;
def int_experimental_gc_result : Intrinsic<[llvm_any_ty], [llvm_token_ty],
- [IntrReadMem]>;
+ [IntrNoMem]>;
def int_experimental_gc_relocate : Intrinsic<[llvm_any_ty],
[llvm_token_ty, llvm_i32_ty,
llvm_i32_ty],
- [IntrReadMem, ImmArg<ArgIndex<1>>,
+ [IntrNoMem, ImmArg<ArgIndex<1>>,
ImmArg<ArgIndex<2>>]>;
+def int_experimental_gc_get_pointer_base : Intrinsic<[llvm_anyptr_ty],
+ [llvm_anyptr_ty], [IntrNoMem, IntrWillReturn,
+ ReadNone<ArgIndex<0>>, NoCapture<ArgIndex<0>>]>;
+
+def int_experimental_gc_get_pointer_offset : Intrinsic<[llvm_i64_ty],
+ [llvm_anyptr_ty], [IntrNoMem, IntrWillReturn,
+ ReadNone<ArgIndex<0>>, NoCapture<ArgIndex<0>>]>;
+
//===------------------------ Coroutine Intrinsics ---------------===//
// These are documented in docs/Coroutines.rst
@@ -1212,9 +1245,10 @@ def int_coro_async_context_dealloc : Intrinsic<[],
def int_coro_async_resume : Intrinsic<[llvm_ptr_ty],
[],
[]>;
-def int_coro_suspend_async : Intrinsic<[llvm_ptr_ty, llvm_ptr_ty, llvm_ptr_ty],
- [llvm_ptr_ty, llvm_ptr_ty, llvm_vararg_ty],
- []>;
+def int_coro_async_size_replace : Intrinsic<[], [llvm_ptr_ty, llvm_ptr_ty], []>;
+def int_coro_suspend_async
+ : Intrinsic<[llvm_any_ty],
+ [llvm_i32_ty, llvm_ptr_ty, llvm_ptr_ty, llvm_vararg_ty], []>;
def int_coro_prepare_async : Intrinsic<[llvm_ptr_ty], [llvm_ptr_ty],
[IntrNoMem]>;
def int_coro_begin : Intrinsic<[llvm_ptr_ty], [llvm_token_ty, llvm_ptr_ty],
@@ -1298,9 +1332,12 @@ def int_sideeffect : DefaultAttrsIntrinsic<[], [], [IntrInaccessibleMemOnly, Int
// Like the sideeffect intrinsic defined above, this intrinsic is treated by the
// optimizer as having opaque side effects so that it won't be get rid of or moved
// out of the block it probes.
-def int_pseudoprobe : Intrinsic<[], [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty],
+def int_pseudoprobe : Intrinsic<[], [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty, llvm_i64_ty],
[IntrInaccessibleMemOnly, IntrWillReturn]>;
+// Arithmetic fence intrinsic.
+def int_arithmetic_fence : Intrinsic<[llvm_anyfloat_ty], [LLVMMatchType<0>], [IntrNoMem]>;
+
// Intrinsics to support half precision floating point format
let IntrProperties = [IntrNoMem, IntrWillReturn] in {
def int_convert_to_fp16 : DefaultAttrsIntrinsic<[llvm_i16_ty], [llvm_anyfloat_ty]>;
@@ -1327,7 +1364,36 @@ def int_is_constant : DefaultAttrsIntrinsic<[llvm_i1_ty], [llvm_any_ty],
def int_ptrmask: DefaultAttrsIntrinsic<[llvm_anyptr_ty], [LLVMMatchType<0>, llvm_anyint_ty],
[IntrNoMem, IntrSpeculatable, IntrWillReturn]>;
+def int_experimental_stepvector : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [], [IntrNoMem]>;
+
//===---------------- Vector Predication Intrinsics --------------===//
+// Memory Intrinsics
+def int_vp_store : DefaultAttrsIntrinsic<[],
+ [ llvm_anyvector_ty,
+ LLVMAnyPointerType<LLVMMatchType<0>>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty],
+ [ NoCapture<ArgIndex<1>>, IntrNoSync, IntrWriteMem, IntrArgMemOnly, IntrWillReturn ]>;
+
+def int_vp_load : DefaultAttrsIntrinsic<[ llvm_anyvector_ty],
+ [ LLVMAnyPointerType<LLVMMatchType<0>>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty],
+ [ NoCapture<ArgIndex<0>>, IntrNoSync, IntrReadMem, IntrWillReturn, IntrArgMemOnly ]>;
+
+def int_vp_gather: DefaultAttrsIntrinsic<[ llvm_anyvector_ty],
+ [ LLVMVectorOfAnyPointersToElt<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty],
+ [ IntrReadMem, IntrNoSync, IntrWillReturn, IntrArgMemOnly ]>;
+
+def int_vp_scatter: DefaultAttrsIntrinsic<[],
+ [ llvm_anyvector_ty,
+ LLVMVectorOfAnyPointersToElt<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty],
+ [ IntrArgMemOnly, IntrNoSync, IntrWillReturn ]>; // TODO allow IntrNoCapture for vectors of pointers
// Speculatable Binary operators
let IntrProperties = [IntrSpeculatable, IntrNoMem, IntrNoSync, IntrWillReturn] in {
@@ -1402,6 +1468,36 @@ let IntrProperties = [IntrNoMem, IntrNoSync, IntrWillReturn] in {
llvm_i32_ty]>;
}
+// Floating-point arithmetic.
+let IntrProperties =
+ [IntrSpeculatable, IntrNoMem, IntrNoSync, IntrWillReturn] in {
+ def int_vp_fadd : DefaultAttrsIntrinsic<[ llvm_anyvector_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty]>;
+ def int_vp_fsub : DefaultAttrsIntrinsic<[ llvm_anyvector_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty]>;
+ def int_vp_fmul : DefaultAttrsIntrinsic<[ llvm_anyvector_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty]>;
+ def int_vp_fdiv : DefaultAttrsIntrinsic<[ llvm_anyvector_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty]>;
+ def int_vp_frem : DefaultAttrsIntrinsic<[ llvm_anyvector_ty ],
+ [ LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_i32_ty]>;
+}
+
def int_get_active_lane_mask:
DefaultAttrsIntrinsic<[llvm_anyvector_ty],
[llvm_anyint_ty, LLVMMatchType<1>],
@@ -1448,7 +1544,7 @@ def int_masked_compressstore:
// Test whether a pointer is associated with a type metadata identifier.
def int_type_test : DefaultAttrsIntrinsic<[llvm_i1_ty], [llvm_ptr_ty, llvm_metadata_ty],
- [IntrNoMem, IntrWillReturn]>;
+ [IntrNoMem, IntrWillReturn, IntrSpeculatable]>;
// Safely loads a function pointer from a virtual table pointer using type metadata.
def int_type_checked_load : DefaultAttrsIntrinsic<[llvm_ptr_ty, llvm_i1_ty],
@@ -1490,22 +1586,24 @@ def int_xray_typedevent : Intrinsic<[], [llvm_i16_ty, llvm_ptr_ty, llvm_i32_ty],
def int_memcpy_element_unordered_atomic
: Intrinsic<[],
[llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty, llvm_i32_ty],
- [IntrArgMemOnly, IntrWillReturn, NoCapture<ArgIndex<0>>,
- NoCapture<ArgIndex<1>>, WriteOnly<ArgIndex<0>>,
- ReadOnly<ArgIndex<1>>, ImmArg<ArgIndex<3>>]>;
+ [IntrArgMemOnly, IntrWillReturn, IntrNoSync,
+ NoCapture<ArgIndex<0>>, NoCapture<ArgIndex<1>>,
+ WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>,
+ ImmArg<ArgIndex<3>>]>;
// @llvm.memmove.element.unordered.atomic.*(dest, src, length, elementsize)
def int_memmove_element_unordered_atomic
: Intrinsic<[],
[llvm_anyptr_ty, llvm_anyptr_ty, llvm_anyint_ty, llvm_i32_ty],
- [IntrArgMemOnly, IntrWillReturn, NoCapture<ArgIndex<0>>,
- NoCapture<ArgIndex<1>>, WriteOnly<ArgIndex<0>>,
- ReadOnly<ArgIndex<1>>, ImmArg<ArgIndex<3>>]>;
+ [IntrArgMemOnly, IntrWillReturn, IntrNoSync,
+ NoCapture<ArgIndex<0>>, NoCapture<ArgIndex<1>>,
+ WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>,
+ ImmArg<ArgIndex<3>>]>;
// @llvm.memset.element.unordered.atomic.*(dest, value, length, elementsize)
def int_memset_element_unordered_atomic
: Intrinsic<[], [llvm_anyptr_ty, llvm_i8_ty, llvm_anyint_ty, llvm_i32_ty],
- [IntrWriteMem, IntrArgMemOnly, IntrWillReturn,
+ [IntrWriteMem, IntrArgMemOnly, IntrWillReturn, IntrNoSync,
NoCapture<ArgIndex<0>>, WriteOnly<ArgIndex<0>>,
ImmArg<ArgIndex<3>>]>;
@@ -1592,6 +1690,12 @@ def int_start_loop_iterations :
def int_test_set_loop_iterations :
DefaultAttrsIntrinsic<[llvm_i1_ty], [llvm_anyint_ty], [IntrNoDuplicate]>;
+// Same as the above, but produces an extra value (the same as the input
+// operand) to be fed into the loop.
+def int_test_start_loop_iterations :
+ DefaultAttrsIntrinsic<[llvm_anyint_ty, llvm_i1_ty], [LLVMMatchType<0>],
+ [IntrNoDuplicate]>;
+
// Decrement loop counter by the given argument. Return false if the loop
// should exit.
def int_loop_decrement :
@@ -1632,6 +1736,12 @@ def int_preserve_struct_access_index : DefaultAttrsIntrinsic<[llvm_anyptr_ty],
ImmArg<ArgIndex<1>>,
ImmArg<ArgIndex<2>>]>;
+//===------------ Intrinsics to perform common vector shuffles ------------===//
+
+def int_experimental_vector_reverse : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>],
+ [IntrNoMem]>;
+
//===---------- Intrinsics to query properties of scalable vectors --------===//
def int_vscale : DefaultAttrsIntrinsic<[llvm_anyint_ty], [], [IntrNoMem]>;
@@ -1644,6 +1754,13 @@ def int_experimental_vector_extract : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
[llvm_anyvector_ty, llvm_i64_ty],
[IntrNoMem, ImmArg<ArgIndex<1>>]>;
+//===---------- Named shufflevector intrinsics ------===//
+def int_experimental_vector_splice : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>,
+ LLVMMatchType<0>,
+ llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
//===----------------------------------------------------------------------===//
//===----------------------------------------------------------------------===//
diff --git a/llvm/include/llvm/IR/IntrinsicsAArch64.td b/llvm/include/llvm/IR/IntrinsicsAArch64.td
index da3085171b19..87e0f83f85b7 100644
--- a/llvm/include/llvm/IR/IntrinsicsAArch64.td
+++ b/llvm/include/llvm/IR/IntrinsicsAArch64.td
@@ -44,6 +44,19 @@ def int_aarch64_fjcvtzs : DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_double_ty],
def int_aarch64_cls: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i32_ty], [IntrNoMem]>;
def int_aarch64_cls64: DefaultAttrsIntrinsic<[llvm_i32_ty], [llvm_i64_ty], [IntrNoMem]>;
+def int_aarch64_frint32z
+ : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
+ [ IntrNoMem ]>;
+def int_aarch64_frint64z
+ : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
+ [ IntrNoMem ]>;
+def int_aarch64_frint32x
+ : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
+ [ IntrNoMem ]>;
+def int_aarch64_frint64x
+ : DefaultAttrsIntrinsic<[ llvm_anyfloat_ty ], [ LLVMMatchType<0> ],
+ [ IntrNoMem ]>;
+
//===----------------------------------------------------------------------===//
// HINT
@@ -431,9 +444,6 @@ let TargetPrefix = "aarch64", IntrProperties = [IntrNoMem] in {
def int_aarch64_neon_ursqrte : AdvSIMD_1VectorArg_Intrinsic;
def int_aarch64_neon_frsqrte : AdvSIMD_1FloatArg_Intrinsic;
- // Vector Bitwise Reverse
- def int_aarch64_neon_rbit : AdvSIMD_1VectorArg_Intrinsic;
-
// Vector Conversions Between Half-Precision and Single-Precision.
def int_aarch64_neon_vcvtfp2hf
: DefaultAttrsIntrinsic<[llvm_v4i16_ty], [llvm_v4f32_ty], [IntrNoMem]>;
@@ -458,9 +468,11 @@ let TargetPrefix = "aarch64", IntrProperties = [IntrNoMem] in {
def int_aarch64_neon_fcvtzs : AdvSIMD_FPToIntRounding_Intrinsic;
def int_aarch64_neon_fcvtzu : AdvSIMD_FPToIntRounding_Intrinsic;
- // Vector FP Rounding: only ties to even is unrepresented by a normal
- // intrinsic.
- def int_aarch64_neon_frintn : AdvSIMD_1FloatArg_Intrinsic;
+ // v8.5-A Vector FP Rounding
+ def int_aarch64_neon_frint32x : AdvSIMD_1FloatArg_Intrinsic;
+ def int_aarch64_neon_frint32z : AdvSIMD_1FloatArg_Intrinsic;
+ def int_aarch64_neon_frint64x : AdvSIMD_1FloatArg_Intrinsic;
+ def int_aarch64_neon_frint64z : AdvSIMD_1FloatArg_Intrinsic;
// Scalar FP->Int conversions
@@ -475,7 +487,7 @@ let TargetPrefix = "aarch64", IntrProperties = [IntrNoMem] in {
def int_aarch64_neon_udot : AdvSIMD_Dot_Intrinsic;
def int_aarch64_neon_sdot : AdvSIMD_Dot_Intrinsic;
-// v8.6-A Matrix Multiply Intrinsics
+ // v8.6-A Matrix Multiply Intrinsics
def int_aarch64_neon_ummla : AdvSIMD_MatMul_Intrinsic;
def int_aarch64_neon_smmla : AdvSIMD_MatMul_Intrinsic;
def int_aarch64_neon_usmmla : AdvSIMD_MatMul_Intrinsic;
@@ -675,10 +687,19 @@ def int_aarch64_neon_tbx4 : AdvSIMD_Tbx4_Intrinsic;
let TargetPrefix = "aarch64" in {
class FPCR_Get_Intrinsic
: DefaultAttrsIntrinsic<[llvm_i64_ty], [], [IntrNoMem, IntrHasSideEffects]>;
+ class FPCR_Set_Intrinsic
+ : DefaultAttrsIntrinsic<[], [llvm_i64_ty], [IntrNoMem, IntrHasSideEffects]>;
+ class RNDR_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_i64_ty, llvm_i1_ty], [], [IntrNoMem, IntrHasSideEffects]>;
}
// FPCR
def int_aarch64_get_fpcr : FPCR_Get_Intrinsic;
+def int_aarch64_set_fpcr : FPCR_Set_Intrinsic;
+
+// Armv8.5-A Random number generation intrinsics
+def int_aarch64_rndr : RNDR_Intrinsic;
+def int_aarch64_rndrrs : RNDR_Intrinsic;
let TargetPrefix = "aarch64" in {
class Crypto_AES_DataKey_Intrinsic
@@ -711,6 +732,42 @@ let TargetPrefix = "aarch64" in {
class Crypto_SHA_8Hash4Schedule_Intrinsic
: DefaultAttrsIntrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
[IntrNoMem]>;
+
+ // SHA512 intrinsic taking 2 arguments
+ class Crypto_SHA512_2Arg_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty], [IntrNoMem]>;
+
+ // SHA512 intrinsic taking 3 Arguments
+ class Crypto_SHA512_3Arg_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+ // SHA3 Intrinsics taking 3 arguments
+ class Crypto_SHA3_3Arg_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem]>;
+
+ // SHA3 Intrinsic taking 2 arguments
+ class Crypto_SHA3_2Arg_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty],
+ [IntrNoMem]>;
+
+ // SHA3 Intrinsic taking 3 Arguments 1 immediate
+ class Crypto_SHA3_2ArgImm_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_v2i64_ty], [llvm_v2i64_ty, llvm_v2i64_ty, llvm_i64_ty],
+ [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+ class Crypto_SM3_3Vector_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty],
+ [IntrNoMem]>;
+
+ class Crypto_SM3_3VectorIndexed_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty, llvm_v4i32_ty, llvm_i64_ty],
+ [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+ class Crypto_SM4_2Vector_Intrinsic
+ : Intrinsic<[llvm_v4i32_ty], [llvm_v4i32_ty, llvm_v4i32_ty], [IntrNoMem]>;
}
// AES
@@ -734,6 +791,31 @@ def int_aarch64_crypto_sha256h2 : Crypto_SHA_8Hash4Schedule_Intrinsic;
def int_aarch64_crypto_sha256su0 : Crypto_SHA_8Schedule_Intrinsic;
def int_aarch64_crypto_sha256su1 : Crypto_SHA_12Schedule_Intrinsic;
+//SHA3
+def int_aarch64_crypto_eor3s : Crypto_SHA3_3Arg_Intrinsic;
+def int_aarch64_crypto_eor3u : Crypto_SHA3_3Arg_Intrinsic;
+def int_aarch64_crypto_bcaxs : Crypto_SHA3_3Arg_Intrinsic;
+def int_aarch64_crypto_bcaxu : Crypto_SHA3_3Arg_Intrinsic;
+def int_aarch64_crypto_rax1 : Crypto_SHA3_2Arg_Intrinsic;
+def int_aarch64_crypto_xar : Crypto_SHA3_2ArgImm_Intrinsic;
+
+// SHA512
+def int_aarch64_crypto_sha512h : Crypto_SHA512_3Arg_Intrinsic;
+def int_aarch64_crypto_sha512h2 : Crypto_SHA512_3Arg_Intrinsic;
+def int_aarch64_crypto_sha512su0 : Crypto_SHA512_2Arg_Intrinsic;
+def int_aarch64_crypto_sha512su1 : Crypto_SHA512_3Arg_Intrinsic;
+
+//SM3 & SM4
+def int_aarch64_crypto_sm3partw1 : Crypto_SM3_3Vector_Intrinsic;
+def int_aarch64_crypto_sm3partw2 : Crypto_SM3_3Vector_Intrinsic;
+def int_aarch64_crypto_sm3ss1 : Crypto_SM3_3Vector_Intrinsic;
+def int_aarch64_crypto_sm3tt1a : Crypto_SM3_3VectorIndexed_Intrinsic;
+def int_aarch64_crypto_sm3tt1b : Crypto_SM3_3VectorIndexed_Intrinsic;
+def int_aarch64_crypto_sm3tt2a : Crypto_SM3_3VectorIndexed_Intrinsic;
+def int_aarch64_crypto_sm3tt2b : Crypto_SM3_3VectorIndexed_Intrinsic;
+def int_aarch64_crypto_sm4e : Crypto_SM4_2Vector_Intrinsic;
+def int_aarch64_crypto_sm4ekey : Crypto_SM4_2Vector_Intrinsic;
+
//===----------------------------------------------------------------------===//
// CRC32
@@ -880,6 +962,12 @@ let TargetPrefix = "aarch64" in { // All intrinsics start with "llvm.aarch64.".
LLVMPointerToElt<0>],
[IntrReadMem, IntrArgMemOnly]>;
+ class AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ LLVMPointerToElt<0>],
+ [IntrInaccessibleMemOrArgMemOnly]>;
+
class AdvSIMD_1Vec_PredStore_Intrinsic
: DefaultAttrsIntrinsic<[],
[llvm_anyvector_ty,
@@ -1312,6 +1400,15 @@ class AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic
],
[IntrReadMem, IntrArgMemOnly]>;
+class AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ LLVMPointerToElt<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i64_ty>
+ ],
+ [IntrInaccessibleMemOrArgMemOnly]>;
+
class AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic
: DefaultAttrsIntrinsic<[llvm_anyvector_ty],
[
@@ -1321,6 +1418,15 @@ class AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic
],
[IntrReadMem, IntrArgMemOnly]>;
+class AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ LLVMPointerToElt<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i32_ty>
+ ],
+ [IntrInaccessibleMemOrArgMemOnly]>;
+
class AdvSIMD_GatherLoad_VS_Intrinsic
: DefaultAttrsIntrinsic<[llvm_anyvector_ty],
[
@@ -1330,6 +1436,15 @@ class AdvSIMD_GatherLoad_VS_Intrinsic
],
[IntrReadMem]>;
+class AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic
+ : DefaultAttrsIntrinsic<[llvm_anyvector_ty],
+ [
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
+ llvm_anyvector_ty,
+ llvm_i64_ty
+ ],
+ [IntrInaccessibleMemOrArgMemOnly]>;
+
class AdvSIMD_ScatterStore_SV_64b_Offsets_Intrinsic
: DefaultAttrsIntrinsic<[],
[
@@ -1421,8 +1536,8 @@ def int_aarch64_sve_ld3 : AdvSIMD_ManyVec_PredLoad_Intrinsic;
def int_aarch64_sve_ld4 : AdvSIMD_ManyVec_PredLoad_Intrinsic;
def int_aarch64_sve_ldnt1 : AdvSIMD_1Vec_PredLoad_Intrinsic;
-def int_aarch64_sve_ldnf1 : AdvSIMD_1Vec_PredLoad_Intrinsic;
-def int_aarch64_sve_ldff1 : AdvSIMD_1Vec_PredLoad_Intrinsic;
+def int_aarch64_sve_ldnf1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic;
+def int_aarch64_sve_ldff1 : AdvSIMD_1Vec_PredLoad_WriteFFR_Intrinsic;
def int_aarch64_sve_ld1rq : AdvSIMD_1Vec_PredLoad_Intrinsic;
def int_aarch64_sve_ld1ro : AdvSIMD_1Vec_PredLoad_Intrinsic;
@@ -1479,7 +1594,6 @@ def int_aarch64_sve_prfd_gather_scalar_offset : SVE_gather_prf_VS;
def int_aarch64_sve_dup : AdvSIMD_SVE_DUP_Intrinsic;
def int_aarch64_sve_dup_x : AdvSIMD_SVE_DUP_Unpred_Intrinsic;
-
def int_aarch64_sve_index : AdvSIMD_SVE_Index_Intrinsic;
//
@@ -1607,10 +1721,10 @@ def int_aarch64_sve_cntp : AdvSIMD_SVE_CNTP_Intrinsic;
// FFR manipulation
//
-def int_aarch64_sve_rdffr : GCCBuiltin<"__builtin_sve_svrdffr">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], []>;
-def int_aarch64_sve_rdffr_z : GCCBuiltin<"__builtin_sve_svrdffr_z">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [llvm_nxv16i1_ty]>;
-def int_aarch64_sve_setffr : GCCBuiltin<"__builtin_sve_svsetffr">, DefaultAttrsIntrinsic<[], []>;
-def int_aarch64_sve_wrffr : GCCBuiltin<"__builtin_sve_svwrffr">, DefaultAttrsIntrinsic<[], [llvm_nxv16i1_ty]>;
+def int_aarch64_sve_rdffr : GCCBuiltin<"__builtin_sve_svrdffr">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [], [IntrReadMem, IntrInaccessibleMemOnly]>;
+def int_aarch64_sve_rdffr_z : GCCBuiltin<"__builtin_sve_svrdffr_z">, DefaultAttrsIntrinsic<[llvm_nxv16i1_ty], [llvm_nxv16i1_ty], [IntrReadMem, IntrInaccessibleMemOnly]>;
+def int_aarch64_sve_setffr : GCCBuiltin<"__builtin_sve_svsetffr">, DefaultAttrsIntrinsic<[], [], [IntrWriteMem, IntrInaccessibleMemOnly]>;
+def int_aarch64_sve_wrffr : GCCBuiltin<"__builtin_sve_svwrffr">, DefaultAttrsIntrinsic<[], [llvm_nxv16i1_ty], [IntrWriteMem, IntrInaccessibleMemOnly]>;
//
// Saturating scalar arithmetic
@@ -1961,24 +2075,24 @@ def int_aarch64_sve_ld1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_Intrinsic;
//
// 64 bit unscaled offsets
-def int_aarch64_sve_ldff1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic;
+def int_aarch64_sve_ldff1_gather : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic;
// 64 bit scaled offsets
-def int_aarch64_sve_ldff1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_Intrinsic;
+def int_aarch64_sve_ldff1_gather_index : AdvSIMD_GatherLoad_SV_64b_Offsets_WriteFFR_Intrinsic;
// 32 bit unscaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits
-def int_aarch64_sve_ldff1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
-def int_aarch64_sve_ldff1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
+def int_aarch64_sve_ldff1_gather_sxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
+def int_aarch64_sve_ldff1_gather_uxtw : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
// 32 bit scaled offsets, sign (sxtw) or zero (uxtw) extended to 64 bits
-def int_aarch64_sve_ldff1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
-def int_aarch64_sve_ldff1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_Intrinsic;
+def int_aarch64_sve_ldff1_gather_sxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
+def int_aarch64_sve_ldff1_gather_uxtw_index : AdvSIMD_GatherLoad_SV_32b_Offsets_WriteFFR_Intrinsic;
//
// First-faulting gather loads: vector base + scalar offset
//
-def int_aarch64_sve_ldff1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_Intrinsic;
+def int_aarch64_sve_ldff1_gather_scalar_offset : AdvSIMD_GatherLoad_VS_WriteFFR_Intrinsic;
//
diff --git a/llvm/include/llvm/IR/IntrinsicsAMDGPU.td b/llvm/include/llvm/IR/IntrinsicsAMDGPU.td
index ac2291f9d43b..46a7aeb39c9a 100644
--- a/llvm/include/llvm/IR/IntrinsicsAMDGPU.td
+++ b/llvm/include/llvm/IR/IntrinsicsAMDGPU.td
@@ -508,12 +508,14 @@ class arglistconcat<list<list<AMDGPUArg>> arglists, int shift = 0> {
// Represent texture/image types / dimensionality.
class AMDGPUDimProps<bits<3> enc, string name, string asmsuffix,
- list<string> coord_names, list<string> slice_names> {
+ list<string> coord_names, list<string> slice_names,
+ bit msaa = 0> {
AMDGPUDimProps Dim = !cast<AMDGPUDimProps>(NAME);
string Name = name; // e.g. "2darraymsaa"
string AsmSuffix = asmsuffix; // e.g. 2D_MSAA_ARRAY (used in assembly strings)
bits<3> Encoding = enc;
bit DA = 0; // DA bit in MIMG encoding
+ bit MSAA = msaa;
list<AMDGPUArg> CoordSliceArgs =
makeArgList<!listconcat(coord_names, slice_names), llvm_anyfloat_ty>.ret;
@@ -536,9 +538,9 @@ let DA = 1 in {
def AMDGPUDim1DArray : AMDGPUDimProps<0x4, "1darray", "1D_ARRAY", ["s"], ["slice"]>;
def AMDGPUDim2DArray : AMDGPUDimProps<0x5, "2darray", "2D_ARRAY", ["s", "t"], ["slice"]>;
}
-def AMDGPUDim2DMsaa : AMDGPUDimProps<0x6, "2dmsaa", "2D_MSAA", ["s", "t"], ["fragid"]>;
+def AMDGPUDim2DMsaa : AMDGPUDimProps<0x6, "2dmsaa", "2D_MSAA", ["s", "t"], ["fragid"], 1>;
let DA = 1 in {
- def AMDGPUDim2DArrayMsaa : AMDGPUDimProps<0x7, "2darraymsaa", "2D_MSAA_ARRAY", ["s", "t"], ["slice", "fragid"]>;
+ def AMDGPUDim2DArrayMsaa : AMDGPUDimProps<0x7, "2darraymsaa", "2D_MSAA_ARRAY", ["s", "t"], ["slice", "fragid"], 1>;
}
def AMDGPUDims {
@@ -798,10 +800,15 @@ defset list<AMDGPUImageDimIntrinsic> AMDGPUImageDimIntrinsics = {
"STORE_MIP", [], [AMDGPUArg<llvm_anyfloat_ty, "vdata">],
[IntrWriteMem, IntrWillReturn], [SDNPMemOperand], 1>;
- defm int_amdgcn_image_msaa_load
- : AMDGPUImageDimIntrinsicsAll<"MSAA_LOAD", [llvm_any_ty], [], [IntrReadMem],
- [SDNPMemOperand]>,
- AMDGPUImageDMaskIntrinsic;
+ //////////////////////////////////////////////////////////////////////////
+ // MSAA intrinsics
+ //////////////////////////////////////////////////////////////////////////
+ foreach dim = AMDGPUDims.Msaa in {
+ def int_amdgcn_image_msaa_load_x # _ # dim.Name:
+ AMDGPUImageDimIntrinsic<
+ AMDGPUDimNoSampleProfile<"MSAA_LOAD_X", dim, [llvm_any_ty], []>,
+ [IntrReadMem], [SDNPMemOperand]>;
+ }
//////////////////////////////////////////////////////////////////////////
// sample and getlod intrinsics
@@ -1029,6 +1036,10 @@ def int_amdgcn_raw_buffer_atomic_cmpswap : Intrinsic<
// gfx908 intrinsic
def int_amdgcn_raw_buffer_atomic_fadd : AMDGPURawBufferAtomic<llvm_anyfloat_ty>;
+// gfx90a intrinsics
+def int_amdgcn_raw_buffer_atomic_fmin : AMDGPURawBufferAtomic<llvm_anyfloat_ty>;
+def int_amdgcn_raw_buffer_atomic_fmax : AMDGPURawBufferAtomic<llvm_anyfloat_ty>;
+
class AMDGPUStructBufferAtomic<LLVMType data_ty = llvm_any_ty, bit NoRtn = false> : Intrinsic <
!if(NoRtn, [], [data_ty]),
[!if(NoRtn, data_ty, LLVMMatchType<0>), // vdata(VGPR)
@@ -1066,6 +1077,10 @@ def int_amdgcn_struct_buffer_atomic_cmpswap : Intrinsic<
// gfx908 intrinsic
def int_amdgcn_struct_buffer_atomic_fadd : AMDGPUStructBufferAtomic<llvm_anyfloat_ty>;
+// gfx90a intrinsics
+def int_amdgcn_struct_buffer_atomic_fmin : AMDGPUStructBufferAtomic<llvm_anyfloat_ty>;
+def int_amdgcn_struct_buffer_atomic_fmax : AMDGPUStructBufferAtomic<llvm_anyfloat_ty>;
+
// Obsolescent tbuffer intrinsics.
def int_amdgcn_tbuffer_load : Intrinsic <
@@ -1275,6 +1290,10 @@ def int_amdgcn_s_decperflevel :
IntrHasSideEffects, IntrWillReturn]> {
}
+def int_amdgcn_s_sethalt :
+ Intrinsic<[], [llvm_i32_ty], [ImmArg<ArgIndex<0>>, IntrNoMem,
+ IntrHasSideEffects, IntrWillReturn]>;
+
def int_amdgcn_s_getreg :
GCCBuiltin<"__builtin_amdgcn_s_getreg">,
Intrinsic<[llvm_i32_ty], [llvm_i32_ty],
@@ -1349,13 +1368,18 @@ def int_amdgcn_interp_p2_f16 :
[IntrNoMem, IntrSpeculatable, IntrWillReturn,
ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>]>;
-// Pixel shaders only: whether the current pixel is live (i.e. not a helper
-// invocation for derivative computation).
+// Deprecated: use llvm.amdgcn.live.mask instead.
def int_amdgcn_ps_live : Intrinsic <
[llvm_i1_ty],
[],
[IntrNoMem, IntrWillReturn]>;
+// Query currently live lanes.
+// Returns true if lane is live (and not a helper lane).
+def int_amdgcn_live_mask : Intrinsic <[llvm_i1_ty],
+ [], [IntrReadMem, IntrInaccessibleMemOnly, IntrWillReturn]
+>;
+
def int_amdgcn_mbcnt_lo :
GCCBuiltin<"__builtin_amdgcn_mbcnt_lo">,
Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
@@ -1585,15 +1609,29 @@ def int_amdgcn_endpgm : GCCBuiltin<"__builtin_amdgcn_endpgm">,
Intrinsic<[], [], [IntrNoReturn, IntrCold, IntrNoMem, IntrHasSideEffects]
>;
+// If false, mark all active lanes as helper lanes until the end of program.
+def int_amdgcn_wqm_demote : Intrinsic<[],
+ [llvm_i1_ty], [IntrWriteMem, IntrInaccessibleMemOnly]
+>;
+
// Copies the active channels of the source value to the destination value,
// with the guarantee that the source value is computed as if the entire
// program were executed in Whole Wavefront Mode, i.e. with all channels
-// enabled, with a few exceptions: - Phi nodes with require WWM return an
+// enabled, with a few exceptions: - Phi nodes which require WWM return an
// undefined value.
+def int_amdgcn_strict_wwm : Intrinsic<[llvm_any_ty],
+ [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable,
+ IntrConvergent, IntrWillReturn]
+>;
+// Deprecated. Use int_amdgcn_strict_wwm instead.
def int_amdgcn_wwm : Intrinsic<[llvm_any_ty],
[LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable,
IntrConvergent, IntrWillReturn]
>;
+def int_amdgcn_strict_wqm : Intrinsic<[llvm_any_ty],
+ [LLVMMatchType<0>], [IntrNoMem, IntrSpeculatable,
+ IntrConvergent, IntrWillReturn]
+>;
// Given a value, copies it while setting all the inactive lanes to a given
// value. Note that OpenGL helper lanes are considered active, so if the
@@ -1678,6 +1716,12 @@ def int_amdgcn_ds_bpermute :
Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
[IntrNoMem, IntrConvergent, IntrWillReturn]>;
+// llvm.amdgcn.perm <src0> <src1> <selector>
+def int_amdgcn_perm :
+ GCCBuiltin<"__builtin_amdgcn_perm">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrSpeculatable, IntrWillReturn]>;
+
//===----------------------------------------------------------------------===//
// GFX10 Intrinsics
//===----------------------------------------------------------------------===//
@@ -1844,146 +1888,54 @@ def int_amdgcn_udot8 :
def int_amdgcn_global_atomic_fadd : AMDGPUGlobalAtomicRtn<llvm_anyfloat_ty>;
-// llvm.amdgcn.mfma.f32.* vdst, srcA, srcB, srcC, cbsz, abid, blgp
-def int_amdgcn_mfma_f32_32x32x1f32 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_32x32x1f32">,
- Intrinsic<[llvm_v32f32_ty],
- [llvm_float_ty, llvm_float_ty, llvm_v32f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_16x16x1f32 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_16x16x1f32">,
- Intrinsic<[llvm_v16f32_ty],
- [llvm_float_ty, llvm_float_ty, llvm_v16f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_4x4x1f32 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_4x4x1f32">,
- Intrinsic<[llvm_v4f32_ty],
- [llvm_float_ty, llvm_float_ty, llvm_v4f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+// llvm.amdgcn.mfma.*.* vdst, srcA, srcB, srcC, cbsz, abid, blgp
+class AMDGPUMfmaIntrinsic<LLVMType DestTy, LLVMType SrcABTy> :
+ GCCBuiltin<!subst("int", "__builtin", NAME)>,
+ Intrinsic<[DestTy],
+ [SrcABTy, SrcABTy, DestTy,
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
[IntrConvergent, IntrNoMem, IntrWillReturn,
ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-def int_amdgcn_mfma_f32_32x32x2f32 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_32x32x2f32">,
- Intrinsic<[llvm_v16f32_ty],
- [llvm_float_ty, llvm_float_ty, llvm_v16f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
+def int_amdgcn_mfma_f32_32x32x1f32 : AMDGPUMfmaIntrinsic<llvm_v32f32_ty, llvm_float_ty>;
+def int_amdgcn_mfma_f32_16x16x1f32 : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_float_ty>;
+def int_amdgcn_mfma_f32_4x4x1f32 : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_float_ty>;
+def int_amdgcn_mfma_f32_32x32x2f32 : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_float_ty>;
+def int_amdgcn_mfma_f32_16x16x4f32 : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_float_ty>;
+def int_amdgcn_mfma_f32_32x32x4f16 : AMDGPUMfmaIntrinsic<llvm_v32f32_ty, llvm_v4f16_ty>;
+def int_amdgcn_mfma_f32_16x16x4f16 : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_v4f16_ty>;
+def int_amdgcn_mfma_f32_4x4x4f16 : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_v4f16_ty>;
+def int_amdgcn_mfma_f32_32x32x8f16 : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_v4f16_ty>;
+def int_amdgcn_mfma_f32_16x16x16f16 : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_v4f16_ty>;
+def int_amdgcn_mfma_i32_32x32x4i8 : AMDGPUMfmaIntrinsic<llvm_v32i32_ty, llvm_i32_ty>;
+def int_amdgcn_mfma_i32_16x16x4i8 : AMDGPUMfmaIntrinsic<llvm_v16i32_ty, llvm_i32_ty>;
+def int_amdgcn_mfma_i32_4x4x4i8 : AMDGPUMfmaIntrinsic<llvm_v4i32_ty, llvm_i32_ty>;
+def int_amdgcn_mfma_i32_32x32x8i8 : AMDGPUMfmaIntrinsic<llvm_v16i32_ty, llvm_i32_ty>;
+def int_amdgcn_mfma_i32_16x16x16i8 : AMDGPUMfmaIntrinsic<llvm_v4i32_ty, llvm_i32_ty>;
+def int_amdgcn_mfma_f32_32x32x2bf16 : AMDGPUMfmaIntrinsic<llvm_v32f32_ty, llvm_v2i16_ty>;
+def int_amdgcn_mfma_f32_16x16x2bf16 : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_v2i16_ty>;
+def int_amdgcn_mfma_f32_4x4x2bf16 : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_v2i16_ty>;
+def int_amdgcn_mfma_f32_32x32x4bf16 : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_v2i16_ty>;
+def int_amdgcn_mfma_f32_16x16x8bf16 : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_v2i16_ty>;
-def int_amdgcn_mfma_f32_16x16x4f32 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_16x16x4f32">,
- Intrinsic<[llvm_v4f32_ty],
- [llvm_float_ty, llvm_float_ty, llvm_v4f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_32x32x4f16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_32x32x4f16">,
- Intrinsic<[llvm_v32f32_ty],
- [llvm_v4f16_ty, llvm_v4f16_ty, llvm_v32f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_16x16x4f16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_16x16x4f16">,
- Intrinsic<[llvm_v16f32_ty],
- [llvm_v4f16_ty, llvm_v4f16_ty, llvm_v16f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_4x4x4f16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_4x4x4f16">,
- Intrinsic<[llvm_v4f32_ty],
- [llvm_v4f16_ty, llvm_v4f16_ty, llvm_v4f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_32x32x8f16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_32x32x8f16">,
- Intrinsic<[llvm_v16f32_ty],
- [llvm_v4f16_ty, llvm_v4f16_ty, llvm_v16f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_16x16x16f16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_16x16x16f16">,
- Intrinsic<[llvm_v4f32_ty],
- [llvm_v4f16_ty, llvm_v4f16_ty, llvm_v4f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_i32_32x32x4i8 : GCCBuiltin<"__builtin_amdgcn_mfma_i32_32x32x4i8">,
- Intrinsic<[llvm_v32i32_ty],
- [llvm_i32_ty, llvm_i32_ty, llvm_v32i32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_i32_16x16x4i8 : GCCBuiltin<"__builtin_amdgcn_mfma_i32_16x16x4i8">,
- Intrinsic<[llvm_v16i32_ty],
- [llvm_i32_ty, llvm_i32_ty, llvm_v16i32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_i32_4x4x4i8 : GCCBuiltin<"__builtin_amdgcn_mfma_i32_4x4x4i8">,
- Intrinsic<[llvm_v4i32_ty],
- [llvm_i32_ty, llvm_i32_ty, llvm_v4i32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_i32_32x32x8i8 : GCCBuiltin<"__builtin_amdgcn_mfma_i32_32x32x8i8">,
- Intrinsic<[llvm_v16i32_ty],
- [llvm_i32_ty, llvm_i32_ty, llvm_v16i32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_i32_16x16x16i8 : GCCBuiltin<"__builtin_amdgcn_mfma_i32_16x16x16i8">,
- Intrinsic<[llvm_v4i32_ty],
- [llvm_i32_ty, llvm_i32_ty, llvm_v4i32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_32x32x2bf16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_32x32x2bf16">,
- Intrinsic<[llvm_v32f32_ty],
- [llvm_v2i16_ty, llvm_v2i16_ty, llvm_v32f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
-
-def int_amdgcn_mfma_f32_16x16x2bf16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_16x16x2bf16">,
- Intrinsic<[llvm_v16f32_ty],
- [llvm_v2i16_ty, llvm_v2i16_ty, llvm_v16f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
+//===----------------------------------------------------------------------===//
+// gfx90a intrinsics
+// ===----------------------------------------------------------------------===//
-def int_amdgcn_mfma_f32_4x4x2bf16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_4x4x2bf16">,
- Intrinsic<[llvm_v4f32_ty],
- [llvm_v2i16_ty, llvm_v2i16_ty, llvm_v4f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
+def int_amdgcn_global_atomic_fmin : AMDGPUGlobalAtomicRtn<llvm_anyfloat_ty>;
+def int_amdgcn_global_atomic_fmax : AMDGPUGlobalAtomicRtn<llvm_anyfloat_ty>;
+def int_amdgcn_flat_atomic_fadd : AMDGPUGlobalAtomicRtn<llvm_anyfloat_ty>;
+def int_amdgcn_flat_atomic_fmin : AMDGPUGlobalAtomicRtn<llvm_anyfloat_ty>;
+def int_amdgcn_flat_atomic_fmax : AMDGPUGlobalAtomicRtn<llvm_anyfloat_ty>;
-def int_amdgcn_mfma_f32_32x32x4bf16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_32x32x4bf16">,
- Intrinsic<[llvm_v16f32_ty],
- [llvm_v2i16_ty, llvm_v2i16_ty, llvm_v16f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
+def int_amdgcn_mfma_f32_32x32x4bf16_1k : AMDGPUMfmaIntrinsic<llvm_v32f32_ty, llvm_v4i16_ty>;
+def int_amdgcn_mfma_f32_16x16x4bf16_1k : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_v4i16_ty>;
+def int_amdgcn_mfma_f32_4x4x4bf16_1k : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_v4i16_ty>;
+def int_amdgcn_mfma_f32_32x32x8bf16_1k : AMDGPUMfmaIntrinsic<llvm_v16f32_ty, llvm_v4i16_ty>;
+def int_amdgcn_mfma_f32_16x16x16bf16_1k : AMDGPUMfmaIntrinsic<llvm_v4f32_ty, llvm_v4i16_ty>;
-def int_amdgcn_mfma_f32_16x16x8bf16 : GCCBuiltin<"__builtin_amdgcn_mfma_f32_16x16x8bf16">,
- Intrinsic<[llvm_v4f32_ty],
- [llvm_v2i16_ty, llvm_v2i16_ty, llvm_v4f32_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
- [IntrConvergent, IntrNoMem, IntrWillReturn,
- ImmArg<ArgIndex<3>>, ImmArg<ArgIndex<4>>, ImmArg<ArgIndex<5>>]>;
+def int_amdgcn_mfma_f64_16x16x4f64 : AMDGPUMfmaIntrinsic<llvm_v4f64_ty, llvm_double_ty>;
+def int_amdgcn_mfma_f64_4x4x4f64 : AMDGPUMfmaIntrinsic<llvm_double_ty, llvm_double_ty>;
//===----------------------------------------------------------------------===//
// Special Intrinsics for backend internal use only. No frontend
diff --git a/llvm/include/llvm/IR/IntrinsicsARM.td b/llvm/include/llvm/IR/IntrinsicsARM.td
index 0eb27cc34462..52702fe7e731 100644
--- a/llvm/include/llvm/IR/IntrinsicsARM.td
+++ b/llvm/include/llvm/IR/IntrinsicsARM.td
@@ -913,9 +913,10 @@ def int_arm_mve_vmaxnma_predicated: Intrinsic<[llvm_anyvector_ty],
multiclass MVEPredicated<list<LLVMType> rets, list<LLVMType> params,
LLVMType pred = llvm_anyvector_ty,
- list<IntrinsicProperty> props = [IntrNoMem]> {
- def "": Intrinsic<rets, params, props>;
- def _predicated: Intrinsic<rets, params # [pred], props>;
+ list<IntrinsicProperty> props = [IntrNoMem],
+ list<SDNodeProperty> sdprops = []> {
+ def "": Intrinsic<rets, params, props, "", sdprops>;
+ def _predicated: Intrinsic<rets, params # [pred], props, "", sdprops>;
}
multiclass MVEPredicatedM<list<LLVMType> rets, list<LLVMType> params,
LLVMType pred = llvm_anyvector_ty,
@@ -963,16 +964,17 @@ defm int_arm_mve_vcvt_widen: MVEMXPredicated<[llvm_v4f32_ty], [],
defm int_arm_mve_vldr_gather_base: MVEPredicated<
[llvm_anyvector_ty], [llvm_anyvector_ty, llvm_i32_ty],
- llvm_anyvector_ty, [IntrReadMem]>;
+ llvm_anyvector_ty, [IntrReadMem], [SDNPMemOperand]>;
defm int_arm_mve_vldr_gather_base_wb: MVEPredicated<
[llvm_anyvector_ty, llvm_anyvector_ty],
- [LLVMMatchType<1>, llvm_i32_ty], llvm_anyvector_ty, [IntrReadMem]>;
+ [LLVMMatchType<1>, llvm_i32_ty], llvm_anyvector_ty, [IntrReadMem],
+ [SDNPMemOperand]>;
defm int_arm_mve_vstr_scatter_base: MVEPredicated<
[], [llvm_anyvector_ty, llvm_i32_ty, llvm_anyvector_ty],
- llvm_anyvector_ty, [IntrWriteMem]>;
+ llvm_anyvector_ty, [IntrWriteMem], [SDNPMemOperand]>;
defm int_arm_mve_vstr_scatter_base_wb: MVEPredicated<
[llvm_anyvector_ty], [LLVMMatchType<0>, llvm_i32_ty, llvm_anyvector_ty],
- llvm_anyvector_ty, [IntrWriteMem]>;
+ llvm_anyvector_ty, [IntrWriteMem], [SDNPMemOperand]>;
// gather_offset takes three i32 parameters. The first is the size of
// memory element loaded, in bits. The second is a left bit shift to
@@ -985,10 +987,12 @@ defm int_arm_mve_vstr_scatter_base_wb: MVEPredicated<
// narrows rather than widening, it doesn't have the last one.
defm int_arm_mve_vldr_gather_offset: MVEPredicated<
[llvm_anyvector_ty], [llvm_anyptr_ty, llvm_anyvector_ty,
- llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], llvm_anyvector_ty, [IntrReadMem]>;
+ llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], llvm_anyvector_ty, [IntrReadMem],
+ [SDNPMemOperand]>;
defm int_arm_mve_vstr_scatter_offset: MVEPredicated<
[], [llvm_anyptr_ty, llvm_anyvector_ty, llvm_anyvector_ty,
- llvm_i32_ty, llvm_i32_ty], llvm_anyvector_ty, [IntrWriteMem]>;
+ llvm_i32_ty, llvm_i32_ty], llvm_anyvector_ty, [IntrWriteMem],
+ [SDNPMemOperand]>;
def int_arm_mve_shl_imm_predicated: Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_i32_ty, llvm_anyvector_ty, LLVMMatchType<0>],
@@ -1132,11 +1136,15 @@ defm int_arm_mve_vcmlaq : MVEPredicated<
[llvm_i32_ty, LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
llvm_anyvector_ty>;
-def int_arm_mve_vld2q: Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>], [llvm_anyptr_ty], [IntrReadMem, IntrArgMemOnly]>;
-def int_arm_mve_vld4q: Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], [llvm_anyptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_mve_vld2q: Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>], [llvm_anyptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
+def int_arm_mve_vld4q: Intrinsic<[llvm_anyvector_ty, LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>], [llvm_anyptr_ty],
+ [IntrReadMem, IntrArgMemOnly]>;
-def int_arm_mve_vst2q: Intrinsic<[], [llvm_anyptr_ty, llvm_anyvector_ty, LLVMMatchType<1>, llvm_i32_ty], [IntrWriteMem, IntrArgMemOnly]>;
-def int_arm_mve_vst4q: Intrinsic<[], [llvm_anyptr_ty, llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>, LLVMMatchType<1>, llvm_i32_ty], [IntrWriteMem, IntrArgMemOnly]>;
+def int_arm_mve_vst2q: Intrinsic<[], [llvm_anyptr_ty, llvm_anyvector_ty, LLVMMatchType<1>, llvm_i32_ty],
+ [IntrWriteMem, IntrArgMemOnly], "", [SDNPMemOperand]>;
+def int_arm_mve_vst4q: Intrinsic<[], [llvm_anyptr_ty, llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<1>, LLVMMatchType<1>, llvm_i32_ty],
+ [IntrWriteMem, IntrArgMemOnly], "", [SDNPMemOperand]>;
// MVE vector absolute difference and accumulate across vector
// The first operand is an 'unsigned' flag. The remaining operands are:
diff --git a/llvm/include/llvm/IR/IntrinsicsHexagon.td b/llvm/include/llvm/IR/IntrinsicsHexagon.td
index fe16a361ba3d..212262c28706 100644
--- a/llvm/include/llvm/IR/IntrinsicsHexagon.td
+++ b/llvm/include/llvm/IR/IntrinsicsHexagon.td
@@ -253,6 +253,124 @@ Hexagon_v32i32_v32i32v16i32i64_rtt_Intrinsic<"HEXAGON_V6_vrmpyub_rtt_acc">;
def int_hexagon_V6_vrmpyub_rtt_acc_128B :
Hexagon_v64i32_v64i32v32i32i64_rtt_Intrinsic<"HEXAGON_V6_vrmpyub_rtt_acc_128B">;
+// HVX conditional loads/stores
+
+class Hexagon_pred_vload_imm<LLVMType ValTy>
+ : Hexagon_NonGCC_Intrinsic<
+ [ValTy],
+ [llvm_i1_ty, LLVMPointerType<ValTy>, llvm_i32_ty],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<ArgIndex<1>>,
+ ImmArg<ArgIndex<2>>]>;
+
+class Hexagon_pred_vload_imm_64B: Hexagon_pred_vload_imm<llvm_v16i32_ty>;
+class Hexagon_pred_vload_imm_128B: Hexagon_pred_vload_imm<llvm_v32i32_ty>;
+
+def int_hexagon_V6_vL32b_pred_ai: Hexagon_pred_vload_imm_64B;
+def int_hexagon_V6_vL32b_npred_ai: Hexagon_pred_vload_imm_64B;
+def int_hexagon_V6_vL32b_nt_pred_ai: Hexagon_pred_vload_imm_64B;
+def int_hexagon_V6_vL32b_nt_npred_ai: Hexagon_pred_vload_imm_64B;
+def int_hexagon_V6_vL32b_pred_ai_128B: Hexagon_pred_vload_imm_128B;
+def int_hexagon_V6_vL32b_npred_ai_128B: Hexagon_pred_vload_imm_128B;
+def int_hexagon_V6_vL32b_nt_pred_ai_128B: Hexagon_pred_vload_imm_128B;
+def int_hexagon_V6_vL32b_nt_npred_ai_128B: Hexagon_pred_vload_imm_128B;
+
+class Hexagom_pred_vload_upd<LLVMType ValTy, bit TakesImm>
+ : Hexagon_NonGCC_Intrinsic<
+ [ValTy, LLVMPointerType<ValTy>],
+ [llvm_i1_ty, LLVMPointerType<ValTy>, llvm_i32_ty],
+ !if(TakesImm,
+ [IntrReadMem, IntrArgMemOnly, NoCapture<ArgIndex<1>>,
+ ImmArg<ArgIndex<2>>],
+ [IntrReadMem, IntrArgMemOnly, NoCapture<ArgIndex<1>>])>;
+
+class Hexagom_pred_vload_upd_64B<bit TakesImm>
+ : Hexagom_pred_vload_upd<llvm_v16i32_ty, TakesImm>;
+class Hexagom_pred_vload_upd_128B<bit TakesImm>
+ : Hexagom_pred_vload_upd<llvm_v32i32_ty, TakesImm>;
+
+def int_hexagon_V6_vL32b_pred_pi: Hexagom_pred_vload_upd_64B<1>;
+def int_hexagon_V6_vL32b_npred_pi: Hexagom_pred_vload_upd_64B<1>;
+def int_hexagon_V6_vL32b_nt_pred_pi: Hexagom_pred_vload_upd_64B<1>;
+def int_hexagon_V6_vL32b_nt_npred_pi: Hexagom_pred_vload_upd_64B<1>;
+def int_hexagon_V6_vL32b_pred_pi_128B: Hexagom_pred_vload_upd_128B<1>;
+def int_hexagon_V6_vL32b_npred_pi_128B: Hexagom_pred_vload_upd_128B<1>;
+def int_hexagon_V6_vL32b_nt_pred_pi_128B: Hexagom_pred_vload_upd_128B<1>;
+def int_hexagon_V6_vL32b_nt_npred_pi_128B: Hexagom_pred_vload_upd_128B<1>;
+
+def int_hexagon_V6_vL32b_pred_ppu: Hexagom_pred_vload_upd_64B<0>;
+def int_hexagon_V6_vL32b_npred_ppu: Hexagom_pred_vload_upd_64B<0>;
+def int_hexagon_V6_vL32b_nt_pred_ppu: Hexagom_pred_vload_upd_64B<0>;
+def int_hexagon_V6_vL32b_nt_npred_ppu: Hexagom_pred_vload_upd_64B<0>;
+def int_hexagon_V6_vL32b_pred_ppu_128B: Hexagom_pred_vload_upd_128B<0>;
+def int_hexagon_V6_vL32b_npred_ppu_128B: Hexagom_pred_vload_upd_128B<0>;
+def int_hexagon_V6_vL32b_nt_pred_ppu_128B: Hexagom_pred_vload_upd_128B<0>;
+def int_hexagon_V6_vL32b_nt_npred_ppu_128B: Hexagom_pred_vload_upd_128B<0>;
+
+
+class Hexagon_pred_vstore_imm<LLVMType ValTy>
+ : Hexagon_NonGCC_Intrinsic<
+ [],
+ [llvm_i1_ty, LLVMPointerType<ValTy>, llvm_i32_ty, ValTy],
+ [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<1>>,
+ ImmArg<ArgIndex<2>>]>;
+
+class Hexagon_pred_vstore_imm_64B: Hexagon_pred_vstore_imm<llvm_v16i32_ty>;
+class Hexagon_pred_vstore_imm_128B: Hexagon_pred_vstore_imm<llvm_v32i32_ty>;
+
+def int_hexagon_V6_vS32b_pred_ai: Hexagon_pred_vstore_imm_64B;
+def int_hexagon_V6_vS32b_npred_ai: Hexagon_pred_vstore_imm_64B;
+def int_hexagon_V6_vS32Ub_pred_ai: Hexagon_pred_vstore_imm_64B;
+def int_hexagon_V6_vS32Ub_npred_ai: Hexagon_pred_vstore_imm_64B;
+def int_hexagon_V6_vS32b_nt_pred_ai: Hexagon_pred_vstore_imm_64B;
+def int_hexagon_V6_vS32b_nt_npred_ai: Hexagon_pred_vstore_imm_64B;
+def int_hexagon_V6_vS32b_pred_ai_128B: Hexagon_pred_vstore_imm_128B;
+def int_hexagon_V6_vS32b_npred_ai_128B: Hexagon_pred_vstore_imm_128B;
+def int_hexagon_V6_vS32Ub_pred_ai_128B: Hexagon_pred_vstore_imm_128B;
+def int_hexagon_V6_vS32Ub_npred_ai_128B: Hexagon_pred_vstore_imm_128B;
+def int_hexagon_V6_vS32b_nt_pred_ai_128B: Hexagon_pred_vstore_imm_128B;
+def int_hexagon_V6_vS32b_nt_npred_ai_128B: Hexagon_pred_vstore_imm_128B;
+
+class Hexagon_pred_vstore_upd<LLVMType ValTy, bit TakesImm>
+ : Hexagon_NonGCC_Intrinsic<
+ [LLVMPointerType<ValTy>],
+ [llvm_i1_ty, LLVMPointerType<ValTy>, llvm_i32_ty, ValTy],
+ !if(TakesImm,
+ [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<1>>,
+ ImmArg<ArgIndex<2>>],
+ [IntrWriteMem, IntrArgMemOnly, NoCapture<ArgIndex<1>>])>;
+
+class Hexagon_pred_vstore_upd_64B<bit TakesImm>
+ : Hexagon_pred_vstore_upd<llvm_v16i32_ty, TakesImm>;
+class Hexagon_pred_vstore_upd_128B<bit TakesImm>
+ : Hexagon_pred_vstore_upd<llvm_v32i32_ty, TakesImm>;
+
+def int_hexagon_V6_vS32b_pred_pi: Hexagon_pred_vstore_upd_64B<1>;
+def int_hexagon_V6_vS32b_npred_pi: Hexagon_pred_vstore_upd_64B<1>;
+def int_hexagon_V6_vS32Ub_pred_pi: Hexagon_pred_vstore_upd_64B<1>;
+def int_hexagon_V6_vS32Ub_npred_pi: Hexagon_pred_vstore_upd_64B<1>;
+def int_hexagon_V6_vS32b_nt_pred_pi: Hexagon_pred_vstore_upd_64B<1>;
+def int_hexagon_V6_vS32b_nt_npred_pi: Hexagon_pred_vstore_upd_64B<1>;
+def int_hexagon_V6_vS32b_pred_pi_128B: Hexagon_pred_vstore_upd_128B<1>;
+def int_hexagon_V6_vS32b_npred_pi_128B: Hexagon_pred_vstore_upd_128B<1>;
+def int_hexagon_V6_vS32Ub_pred_pi_128B: Hexagon_pred_vstore_upd_128B<1>;
+def int_hexagon_V6_vS32Ub_npred_pi_128B: Hexagon_pred_vstore_upd_128B<1>;
+def int_hexagon_V6_vS32b_nt_pred_pi_128B: Hexagon_pred_vstore_upd_128B<1>;
+def int_hexagon_V6_vS32b_nt_npred_pi_128B: Hexagon_pred_vstore_upd_128B<1>;
+
+def int_hexagon_V6_vS32b_pred_ppu: Hexagon_pred_vstore_upd_64B<0>;
+def int_hexagon_V6_vS32b_npred_ppu: Hexagon_pred_vstore_upd_64B<0>;
+def int_hexagon_V6_vS32Ub_pred_ppu: Hexagon_pred_vstore_upd_64B<0>;
+def int_hexagon_V6_vS32Ub_npred_ppu: Hexagon_pred_vstore_upd_64B<0>;
+def int_hexagon_V6_vS32b_nt_pred_ppu: Hexagon_pred_vstore_upd_64B<0>;
+def int_hexagon_V6_vS32b_nt_npred_ppu: Hexagon_pred_vstore_upd_64B<0>;
+def int_hexagon_V6_vS32b_pred_ppu_128B: Hexagon_pred_vstore_upd_128B<0>;
+def int_hexagon_V6_vS32b_npred_ppu_128B: Hexagon_pred_vstore_upd_128B<0>;
+def int_hexagon_V6_vS32Ub_pred_ppu_128B: Hexagon_pred_vstore_upd_128B<0>;
+def int_hexagon_V6_vS32Ub_npred_ppu_128B: Hexagon_pred_vstore_upd_128B<0>;
+def int_hexagon_V6_vS32b_nt_pred_ppu_128B: Hexagon_pred_vstore_upd_128B<0>;
+def int_hexagon_V6_vS32b_nt_npred_ppu_128B: Hexagon_pred_vstore_upd_128B<0>;
+
+
// HVX Vector predicate casts.
// These intrinsics do not emit (nor do they correspond to) any instructions,
// they are no-ops.
@@ -265,6 +383,8 @@ Hexagon_NonGCC_Intrinsic<[llvm_anyvector_ty], [llvm_anyvector_ty], [IntrNoMem]>;
// Masked vector stores
//
+// These are all deprecated, the intrinsics matching instruction names
+// should be used instead, e.g. int_hexagon_V6_vS32b_qpred_ai, etc.
class Hexagon_custom_vms_Intrinsic
: Hexagon_NonGCC_Intrinsic<
diff --git a/llvm/include/llvm/IR/IntrinsicsHexagonDep.td b/llvm/include/llvm/IR/IntrinsicsHexagonDep.td
index 198b6a7ab0d1..6799273bf805 100644
--- a/llvm/include/llvm/IR/IntrinsicsHexagonDep.td
+++ b/llvm/include/llvm/IR/IntrinsicsHexagonDep.td
@@ -8,111 +8,48 @@
// Automatically generated file, do not edit!
//===----------------------------------------------------------------------===//
-// tag : C2_cmpeq
-class Hexagon_i32_i32i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty],
- intr_properties>;
-
-// tag : C2_cmpeqp
-class Hexagon_i32_i64i64_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i64_ty,llvm_i64_ty],
- intr_properties>;
-
-// tag : C2_not
+// tag : A2_abs
class Hexagon_i32_i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
[llvm_i32_ty], [llvm_i32_ty],
intr_properties>;
-// tag : C4_and_and
-class Hexagon_i32_i32i32i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty,llvm_i32_ty],
- intr_properties>;
-
-// tag : C2_vmux
-class Hexagon_i64_i32i64i64_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i32_ty,llvm_i64_ty,llvm_i64_ty],
- intr_properties>;
-
-// tag : C2_mask
-class Hexagon_i64_i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i32_ty],
- intr_properties>;
-
-// tag : A4_vcmpbeqi
-class Hexagon_i32_i64i32_Intrinsic<string GCCIntSuffix,
+// tag : A2_absp
+class Hexagon_i64_i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i64_ty,llvm_i32_ty],
+ [llvm_i64_ty], [llvm_i64_ty],
intr_properties>;
-// tag : A4_boundscheck
-class Hexagon_i32_i32i64_Intrinsic<string GCCIntSuffix,
+// tag : A2_add
+class Hexagon_custom_i32_i32i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i32_ty,llvm_i64_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : M2_mpyd_acc_hh_s0
-class Hexagon_i64_i64i32i32_Intrinsic<string GCCIntSuffix,
+// tag : A2_addh_h16_hh
+class Hexagon_i32_i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i64_ty,llvm_i32_ty,llvm_i32_ty],
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : M2_mpyd_hh_s0
-class Hexagon_i64_i32i32_Intrinsic<string GCCIntSuffix,
+// tag : A2_addp
+class Hexagon_custom_i64_i64i64_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i32_ty,llvm_i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty],
intr_properties>;
-// tag : M2_vmpy2es_s0
+// tag : A2_addpsat
class Hexagon_i64_i64i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
[llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty],
intr_properties>;
-// tag : M2_vmac2es_s0
-class Hexagon_i64_i64i64i64_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty,llvm_i64_ty],
- intr_properties>;
-
-// tag : M2_vrcmpys_s1
-class Hexagon_i64_i64i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i64_ty,llvm_i32_ty],
- intr_properties>;
-
-// tag : M2_vrcmpys_acc_s1
-class Hexagon_i64_i64i64i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty,llvm_i32_ty],
- intr_properties>;
-
-// tag : S4_vrcrotate_acc
-class Hexagon_i64_i64i64i32i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty,llvm_i32_ty,llvm_i32_ty],
- intr_properties>;
-
// tag : A2_addsp
class Hexagon_i64_i32i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
@@ -120,109 +57,109 @@ class Hexagon_i64_i32i64_Intrinsic<string GCCIntSuffix,
[llvm_i64_ty], [llvm_i32_ty,llvm_i64_ty],
intr_properties>;
-// tag : A2_vconj
-class Hexagon_i64_i64_Intrinsic<string GCCIntSuffix,
+// tag : A2_combineii
+class Hexagon_i64_i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_i64_ty],
+ [llvm_i64_ty], [llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : A2_sat
-class Hexagon_i32_i64_Intrinsic<string GCCIntSuffix,
+// tag : A2_neg
+class Hexagon_custom_i32_i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i64_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_i32_ty], [llvm_i32_ty],
intr_properties>;
-// tag : F2_sfadd
-class Hexagon_float_floatfloat_Intrinsic<string GCCIntSuffix,
+// tag : A2_roundsat
+class Hexagon_i32_i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_float_ty], [llvm_float_ty,llvm_float_ty],
+ [llvm_i32_ty], [llvm_i64_ty],
intr_properties>;
-// tag : F2_sffma
-class Hexagon_float_floatfloatfloat_Intrinsic<string GCCIntSuffix,
+// tag : A2_sxtw
+class Hexagon_i64_i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_float_ty], [llvm_float_ty,llvm_float_ty,llvm_float_ty],
+ [llvm_i64_ty], [llvm_i32_ty],
intr_properties>;
-// tag : F2_sffma_sc
-class Hexagon_float_floatfloatfloati32_Intrinsic<string GCCIntSuffix,
+// tag : A2_vcmpbeq
+class Hexagon_i32_i64i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_float_ty], [llvm_float_ty,llvm_float_ty,llvm_float_ty,llvm_i32_ty],
+ [llvm_i32_ty], [llvm_i64_ty,llvm_i64_ty],
intr_properties>;
-// tag : F2_sfcmpeq
-class Hexagon_i32_floatfloat_Intrinsic<string GCCIntSuffix,
+// tag : A2_vraddub_acc
+class Hexagon_i64_i64i64i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_float_ty,llvm_float_ty],
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty,llvm_i64_ty],
intr_properties>;
-// tag : F2_sfclass
-class Hexagon_i32_floati32_Intrinsic<string GCCIntSuffix,
+// tag : A4_boundscheck
+class Hexagon_i32_i32i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_float_ty,llvm_i32_ty],
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i64_ty],
intr_properties>;
-// tag : F2_sfimm_p
-class Hexagon_float_i32_Intrinsic<string GCCIntSuffix,
+// tag : A4_tlbmatch
+class Hexagon_i32_i64i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_float_ty], [llvm_i32_ty],
+ [llvm_i32_ty], [llvm_i64_ty,llvm_i32_ty],
intr_properties>;
-// tag : F2_sffixupr
-class Hexagon_float_float_Intrinsic<string GCCIntSuffix,
+// tag : A4_vrmaxh
+class Hexagon_i64_i64i64i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_float_ty], [llvm_float_ty],
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty,llvm_i32_ty],
intr_properties>;
-// tag : F2_dfadd
-class Hexagon_double_doubledouble_Intrinsic<string GCCIntSuffix,
+// tag : A7_croundd_ri
+class Hexagon_i64_i64i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_double_ty], [llvm_double_ty,llvm_double_ty],
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i32_ty],
intr_properties>;
-// tag : F2_dfmpylh
-class Hexagon_double_doubledoubledouble_Intrinsic<string GCCIntSuffix,
+// tag : C2_mux
+class Hexagon_i32_i32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_double_ty], [llvm_double_ty,llvm_double_ty,llvm_double_ty],
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : F2_dfcmpeq
-class Hexagon_i32_doubledouble_Intrinsic<string GCCIntSuffix,
+// tag : C2_vmux
+class Hexagon_i64_i32i64i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_double_ty,llvm_double_ty],
+ [llvm_i64_ty], [llvm_i32_ty,llvm_i64_ty,llvm_i64_ty],
intr_properties>;
-// tag : F2_dfclass
-class Hexagon_i32_doublei32_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_d2df
+class Hexagon_double_i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_double_ty,llvm_i32_ty],
+ [llvm_double_ty], [llvm_i64_ty],
intr_properties>;
-// tag : F2_dfimm_p
-class Hexagon_double_i32_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_d2sf
+class Hexagon_float_i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_double_ty], [llvm_i32_ty],
+ [llvm_float_ty], [llvm_i64_ty],
intr_properties>;
-// tag : F2_conv_sf2df
-class Hexagon_double_float_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_df2d
+class Hexagon_i64_double_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_double_ty], [llvm_float_ty],
+ [llvm_i64_ty], [llvm_double_ty],
intr_properties>;
// tag : F2_conv_df2sf
@@ -232,697 +169,662 @@ class Hexagon_float_double_Intrinsic<string GCCIntSuffix,
[llvm_float_ty], [llvm_double_ty],
intr_properties>;
-// tag : F2_conv_ud2sf
-class Hexagon_float_i64_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_df2uw
+class Hexagon_i32_double_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_float_ty], [llvm_i64_ty],
+ [llvm_i32_ty], [llvm_double_ty],
intr_properties>;
-// tag : F2_conv_ud2df
-class Hexagon_double_i64_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_sf2d
+class Hexagon_i64_float_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_double_ty], [llvm_i64_ty],
+ [llvm_i64_ty], [llvm_float_ty],
intr_properties>;
-// tag : F2_conv_sf2uw
-class Hexagon_i32_float_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_sf2df
+class Hexagon_double_float_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_float_ty],
+ [llvm_double_ty], [llvm_float_ty],
intr_properties>;
-// tag : F2_conv_sf2ud
-class Hexagon_i64_float_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_sf2uw
+class Hexagon_i32_float_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_float_ty],
+ [llvm_i32_ty], [llvm_float_ty],
intr_properties>;
-// tag : F2_conv_df2uw
-class Hexagon_i32_double_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_uw2df
+class Hexagon_double_i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_double_ty],
+ [llvm_double_ty], [llvm_i32_ty],
intr_properties>;
-// tag : F2_conv_df2ud
-class Hexagon_i64_double_Intrinsic<string GCCIntSuffix,
+// tag : F2_conv_uw2sf
+class Hexagon_float_i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_double_ty],
+ [llvm_float_ty], [llvm_i32_ty],
intr_properties>;
-// tag : S2_insert
-class Hexagon_i32_i32i32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : F2_dfadd
+class Hexagon_double_doubledouble_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [llvm_double_ty], [llvm_double_ty,llvm_double_ty],
intr_properties>;
-// tag : S2_insert_rp
-class Hexagon_i32_i32i32i64_Intrinsic<string GCCIntSuffix,
+// tag : F2_dfclass
+class Hexagon_i32_doublei32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty,llvm_i64_ty],
+ [llvm_i32_ty], [llvm_double_ty,llvm_i32_ty],
intr_properties>;
-// tag : Y2_dcfetch
-class Hexagon__ptr_Intrinsic<string GCCIntSuffix,
+// tag : F2_dfcmpeq
+class Hexagon_i32_doubledouble_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [], [llvm_ptr_ty],
+ [llvm_i32_ty], [llvm_double_ty,llvm_double_ty],
intr_properties>;
-// tag : Y4_l2fetch
-class Hexagon__ptri32_Intrinsic<string GCCIntSuffix,
+// tag : F2_dfmpyhh
+class Hexagon_double_doubledoubledouble_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [], [llvm_ptr_ty,llvm_i32_ty],
+ [llvm_double_ty], [llvm_double_ty,llvm_double_ty,llvm_double_ty],
intr_properties>;
-// tag : Y5_l2fetch
-class Hexagon__ptri64_Intrinsic<string GCCIntSuffix,
+// tag : F2_sfadd
+class Hexagon_float_floatfloat_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [], [llvm_ptr_ty,llvm_i64_ty],
+ [llvm_float_ty], [llvm_float_ty,llvm_float_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32_Intrinsic<string GCCIntSuffix,
+// tag : F2_sfclass
+class Hexagon_i32_floati32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty],
+ [llvm_i32_ty], [llvm_float_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32_Intrinsic<string GCCIntSuffix,
+// tag : F2_sfcmpeq
+class Hexagon_i32_floatfloat_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty],
+ [llvm_i32_ty], [llvm_float_ty,llvm_float_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32__Intrinsic<string GCCIntSuffix,
+// tag : F2_sffixupr
+class Hexagon_float_float_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [],
+ [llvm_float_ty], [llvm_float_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32__Intrinsic<string GCCIntSuffix,
+// tag : F2_sffma
+class Hexagon_float_floatfloatfloat_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [],
+ [llvm_float_ty], [llvm_float_ty,llvm_float_ty,llvm_float_ty],
intr_properties>;
-// tag :
-class Hexagon_i32_v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : F2_sffma_sc
+class Hexagon_float_floatfloatfloati32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_float_ty], [llvm_float_ty,llvm_float_ty,llvm_float_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32i64_Intrinsic<string GCCIntSuffix,
+// tag : M2_cmaci_s0
+class Hexagon_i64_i64i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i64_ty],
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_i64_v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : M2_dpmpyss_s0
+class Hexagon_custom_i64_i32i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i64_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_i64_ty], [llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v32i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : S2_asl_i_p
+class Hexagon_custom_i64_i64i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_i32_Intrinsic<string GCCIntSuffix,
+// tag : S2_insert
+class Hexagon_i32_i32i32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_i32_ty],
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty,llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_i32_Intrinsic<string GCCIntSuffix,
+// tag : S2_insert_rp
+class Hexagon_i32_i32i32i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_i32_ty],
+ [llvm_i32_ty], [llvm_i32_ty,llvm_i32_ty,llvm_i64_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_i64_Intrinsic<string GCCIntSuffix,
+// tag : S2_insertp
+class Hexagon_i64_i64i64i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_i64_ty],
+ [llvm_i64_ty], [llvm_i64_ty,llvm_i64_ty,llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_extractw
+class Hexagon_i32_v16i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ [llvm_i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_extractw
+class Hexagon_i32_v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_hi
+class Hexagon_v16i32_v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
+ [llvm_v16i32_ty], [llvm_v32i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32v64i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_hi
+class Hexagon_v32i32_v64i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty],
+ [llvm_v32i32_ty], [llvm_v64i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_lvsplatb
+class Hexagon_v16i32_i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v32i32_ty],
+ [llvm_v16i32_ty], [llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v64i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_lvsplatb
+class Hexagon_v32i32_i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v64i32_ty],
+ [llvm_v32i32_ty], [llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v4i32_v32i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_pred_and
+class Hexagon_custom_v64i1_v64i1v64i1_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v4i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v64i1_ty], [llvm_v64i1_ty,llvm_v64i1_ty],
intr_properties>;
-// tag :
-class Hexagon_v4i32_v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_pred_and
+class Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v4i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v128i1_ty], [llvm_v128i1_ty,llvm_v128i1_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v4i32v32i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_pred_not
+class Hexagon_custom_v64i1_v64i1_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v4i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v64i1_ty], [llvm_v64i1_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v8i32v64i32v64i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_pred_not
+class Hexagon_custom_v128i1_v128i1_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v8i32_ty,llvm_v64i32_ty,llvm_v64i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v128i1_ty], [llvm_v128i1_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v64i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_pred_scalar2
+class Hexagon_custom_v64i1_i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v64i1_ty], [llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v64i32v64i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_pred_scalar2
+class Hexagon_custom_v128i1_i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v128i1_ty], [llvm_i32_ty],
intr_properties>;
-// tag :
+// tag : V6_v6mpyhubs10
class Hexagon_v32i32_v32i32v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
[llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_v6mpyhubs10
+class Hexagon_v64i32_v64i32v64i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_v6mpyhubs10_vxx
+class Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_v6mpyhubs10_vxx
+class Hexagon_v64i32_v64i32v64i32v64i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32v64i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vS32b_nqpred_ai
+class Hexagon_custom__v64i1ptrv16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v64i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [], [llvm_v64i1_ty,llvm_ptr_ty,llvm_v16i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32v4i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vS32b_nqpred_ai
+class Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v4i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [], [llvm_v128i1_ty,llvm_ptr_ty,llvm_v32i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32v64i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vabsb
+class Hexagon_v16i32_v16i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty],
+ [llvm_v16i32_ty], [llvm_v16i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v32i32v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vabsb
+class Hexagon_v32i32_v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v32i32_v32i32v32i32i64_Intrinsic<string GCCIntSuffix,
+// tag : V6_vabsdiffh
+class Hexagon_v16i32_v16i32v16i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i64_ty],
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32v32i32v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vabsdiffh
+class Hexagon_v32i32_v32i32v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag :
-class Hexagon_v64i32_v64i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddb_dv
+class Hexagon_v64i32_v64i32v64i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty],
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty],
intr_properties>;
-// tag : V6_vS32b_qpred_ai
-class Hexagon_custom__v64i1ptrv16i32_Intrinsic<
+// tag : V6_vaddbnq
+class Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [], [llvm_v64i1_ty,llvm_ptr_ty,llvm_v16i32_ty],
+ [llvm_v16i32_ty], [llvm_v64i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vS32b_qpred_ai
-class Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<
+// tag : V6_vaddbnq
+class Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [], [llvm_v128i1_ty,llvm_ptr_ty,llvm_v32i32_ty],
- intr_properties>;
-
-// tag : V6_valignb
-class Hexagon_v16i32_v16i32v16i32i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
- intr_properties>;
-
-// tag : V6_vror
-class Hexagon_v16i32_v16i32i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
- intr_properties>;
-
-// tag : V6_vunpackub
-class Hexagon_v32i32_v16i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v16i32_ty],
+ [llvm_v32i32_ty], [llvm_v128i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vunpackob
-class Hexagon_v32i32_v32i32v16i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddcarry
+class Hexagon_custom_v16i32v64i1_v16i32v16i32v64i1_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v16i32_ty,llvm_v64i1_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v64i1_ty],
intr_properties>;
-// tag : V6_vpackeb
-class Hexagon_v16i32_v16i32v16i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddcarry
+class Hexagon_custom_v32i32v128i1_v32i32v32i32v128i1_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v32i32_ty,llvm_v128i1_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v128i1_ty],
intr_properties>;
-// tag : V6_vdmpyhvsat_acc
-class Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddcarrysat
+class Hexagon_custom_v16i32_v16i32v16i32v64i1_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v64i1_ty],
intr_properties>;
-// tag : V6_vdmpyhisat
-class Hexagon_v16i32_v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddcarrysat
+class Hexagon_custom_v32i32_v32i32v32i32v128i1_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v128i1_ty],
intr_properties>;
-// tag : V6_vdmpyhisat_acc
-class Hexagon_v16i32_v16i32v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddhw
+class Hexagon_v32i32_v16i32v16i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vdmpyhisat_acc
-class Hexagon_v32i32_v32i32v64i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddhw
+class Hexagon_v64i32_v32i32v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v64i32_ty,llvm_i32_ty],
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vrmpyubi
-class Hexagon_v32i32_v32i32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddhw_acc
+class Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vrmpyubi
-class Hexagon_v64i32_v64i32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaddhw_acc
+class Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vrmpyubi_acc
-class Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_valignb
+class Hexagon_v16i32_v16i32v16i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty,llvm_i32_ty],
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vrmpyubi_acc
-class Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vandnqrt
+class Hexagon_custom_v16i32_v64i1i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty,llvm_i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v16i32_ty], [llvm_v64i1_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vasr_into
-class Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vandnqrt
+class Hexagon_custom_v32i32_v128i1i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v32i32_ty], [llvm_v128i1_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vaddcarrysat
-class Hexagon_custom_v16i32_v16i32v16i32v64i1_Intrinsic<
+// tag : V6_vandnqrt_acc
+class Hexagon_custom_v16i32_v16i32v64i1i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v64i1_ty],
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v64i1_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vaddcarrysat
-class Hexagon_custom_v32i32_v32i32v32i32v128i1_Intrinsic_128B<
+// tag : V6_vandnqrt_acc
+class Hexagon_custom_v32i32_v32i32v128i1i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v128i1_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v128i1_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vaddcarry
-class Hexagon_custom_v16i32v64i1_v16i32v16i32v64i1_Intrinsic<
+// tag : V6_vandvnqv
+class Hexagon_custom_v16i32_v64i1v16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v16i32_ty,llvm_v64i1_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v64i1_ty],
+ [llvm_v16i32_ty], [llvm_v64i1_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vaddcarry
-class Hexagon_custom_v32i32v128i1_v32i32v32i32v128i1_Intrinsic_128B<
+// tag : V6_vandvnqv
+class Hexagon_custom_v32i32_v128i1v32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty,llvm_v128i1_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v128i1_ty],
+ [llvm_v32i32_ty], [llvm_v128i1_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vaddubh
-class Hexagon_v32i32_v16i32v16i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vandvrt
+class Hexagon_custom_v64i1_v16i32i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v64i1_ty], [llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vd0
-class Hexagon_v16i32__Intrinsic<string GCCIntSuffix,
+// tag : V6_vandvrt
+class Hexagon_custom_v128i1_v32i32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [],
+ : Hexagon_NonGCC_Intrinsic<
+ [llvm_v128i1_ty], [llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vaddbq
-class Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic<
+// tag : V6_vandvrt_acc
+class Hexagon_custom_v64i1_v64i1v16i32i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v16i32_ty], [llvm_v64i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [llvm_v64i1_ty], [llvm_v64i1_ty,llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vaddbq
-class Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B<
+// tag : V6_vandvrt_acc
+class Hexagon_custom_v128i1_v128i1v32i32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty], [llvm_v128i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ [llvm_v128i1_ty], [llvm_v128i1_ty,llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vabsb
-class Hexagon_v16i32_v16i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaslh
+class Hexagon_v16i32_v16i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v16i32_ty],
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vmpyub
-class Hexagon_v32i32_v16i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vaslh
+class Hexagon_v32i32_v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vmpyub
-class Hexagon_v64i32_v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vassignp
+class Hexagon_v64i32_v64i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_v64i32_ty], [llvm_v64i32_ty],
intr_properties>;
-// tag : V6_vmpyub_acc
-class Hexagon_v32i32_v32i32v16i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vd0
+class Hexagon_v16i32__Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [llvm_v16i32_ty], [],
intr_properties>;
-// tag : V6_vmpyub_acc
-class Hexagon_v64i32_v64i32v32i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vd0
+class Hexagon_v32i32__Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ [llvm_v32i32_ty], [],
intr_properties>;
-// tag : V6_vandqrt
-class Hexagon_custom_v16i32_v64i1i32_Intrinsic<
+// tag : V6_vdd0
+class Hexagon_v64i32__Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v16i32_ty], [llvm_v64i1_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [],
intr_properties>;
-// tag : V6_vandqrt
-class Hexagon_custom_v32i32_v128i1i32_Intrinsic_128B<
+// tag : V6_vdealvdd
+class Hexagon_v32i32_v16i32v16i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty], [llvm_v128i1_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vandqrt_acc
-class Hexagon_custom_v16i32_v16i32v64i1i32_Intrinsic<
+// tag : V6_vdealvdd
+class Hexagon_v64i32_v32i32v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v64i1_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vandqrt_acc
-class Hexagon_custom_v32i32_v32i32v128i1i32_Intrinsic_128B<
+// tag : V6_vdmpybus_dv
+class Hexagon_v64i32_v64i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v128i1_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vandvrt
-class Hexagon_custom_v64i1_v16i32i32_Intrinsic<
+// tag : V6_vdmpyhisat
+class Hexagon_v16i32_v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v64i1_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vandvrt
-class Hexagon_custom_v128i1_v32i32i32_Intrinsic_128B<
+// tag : V6_vdmpyhisat
+class Hexagon_v32i32_v64i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v128i1_ty], [llvm_v32i32_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v64i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vandvrt_acc
-class Hexagon_custom_v64i1_v64i1v16i32i32_Intrinsic<
+// tag : V6_vdmpyhisat_acc
+class Hexagon_v16i32_v16i32v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v64i1_ty], [llvm_v64i1_ty,llvm_v16i32_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vandvrt_acc
-class Hexagon_custom_v128i1_v128i1v32i32i32_Intrinsic_128B<
+// tag : V6_vdmpyhisat_acc
+class Hexagon_v32i32_v32i32v64i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v128i1_ty], [llvm_v128i1_ty,llvm_v32i32_ty,llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v64i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vandvqv
-class Hexagon_custom_v16i32_v64i1v16i32_Intrinsic<
+// tag : V6_vdmpyhvsat_acc
+class Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v16i32_ty], [llvm_v64i1_ty,llvm_v16i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vandvqv
-class Hexagon_custom_v32i32_v128i1v32i32_Intrinsic_128B<
+// tag : V6_vdmpyhvsat_acc
+class Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty], [llvm_v128i1_ty,llvm_v32i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vgtw
+// tag : V6_veqb
class Hexagon_custom_v64i1_v16i32v16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
[llvm_v64i1_ty], [llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vgtw
+// tag : V6_veqb
class Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
[llvm_v128i1_ty], [llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vgtw_and
+// tag : V6_veqb_and
class Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
[llvm_v64i1_ty], [llvm_v64i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vgtw_and
+// tag : V6_veqb_and
class Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
[llvm_v128i1_ty], [llvm_v128i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_pred_scalar2
-class Hexagon_custom_v64i1_i32_Intrinsic<
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v64i1_ty], [llvm_i32_ty],
- intr_properties>;
-
-// tag : V6_pred_scalar2
-class Hexagon_custom_v128i1_i32_Intrinsic_128B<
+// tag : V6_vgathermh
+class Hexagon__ptri32i32v16i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v128i1_ty], [llvm_i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_shuffeqw
-class Hexagon_custom_v64i1_v64i1v64i1_Intrinsic<
+// tag : V6_vgathermh
+class Hexagon__ptri32i32v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v64i1_ty], [llvm_v64i1_ty,llvm_v64i1_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_shuffeqw
-class Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B<
+// tag : V6_vgathermhq
+class Hexagon_custom__ptrv64i1i32i32v16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v128i1_ty], [llvm_v128i1_ty,llvm_v128i1_ty],
+ [], [llvm_ptr_ty,llvm_v64i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_pred_not
-class Hexagon_custom_v64i1_v64i1_Intrinsic<
+// tag : V6_vgathermhq
+class Hexagon_custom__ptrv128i1i32i32v32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v64i1_ty], [llvm_v64i1_ty],
+ [], [llvm_ptr_ty,llvm_v128i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_pred_not
-class Hexagon_custom_v128i1_v128i1_Intrinsic_128B<
+// tag : V6_vgathermhw
+class Hexagon__ptri32i32v64i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [llvm_v128i1_ty], [llvm_v128i1_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,llvm_v64i32_ty],
intr_properties>;
-// tag : V6_vswap
-class Hexagon_custom_v32i32_v64i1v16i32v16i32_Intrinsic<
+// tag : V6_vgathermhwq
+class Hexagon_custom__ptrv64i1i32i32v32i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty], [llvm_v64i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
+ [], [llvm_ptr_ty,llvm_v64i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vswap
-class Hexagon_custom_v64i32_v128i1v32i32v32i32_Intrinsic_128B<
+// tag : V6_vgathermhwq
+class Hexagon_custom__ptrv128i1i32i32v64i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v64i32_ty], [llvm_v128i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
- intr_properties>;
-
-// tag : V6_vshuffvdd
-class Hexagon_v32i32_v16i32v16i32i32_Intrinsic<string GCCIntSuffix,
- list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
+ [], [llvm_ptr_ty,llvm_v128i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v64i32_ty],
intr_properties>;
-// tag : V6_extractw
-class Hexagon_i32_v16i32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vlut4
+class Hexagon_v16i32_v16i32i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
+ [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i64_ty],
intr_properties>;
-// tag : V6_lvsplatw
-class Hexagon_v16i32_i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vlut4
+class Hexagon_v32i32_v32i32i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_i32_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i64_ty],
intr_properties>;
// tag : V6_vlutvvb_oracc
@@ -939,6 +841,13 @@ class Hexagon_v32i32_v32i32v16i32v16i32i32_Intrinsic<string GCCIntSuffix,
[llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
+// tag : V6_vlutvwh_oracc
+class Hexagon_v64i32_v64i32v32i32v32i32i32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty],
+ intr_properties>;
+
// tag : V6_vmpahhsat
class Hexagon_v16i32_v16i32v16i32i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
@@ -946,91 +855,119 @@ class Hexagon_v16i32_v16i32v16i32i64_Intrinsic<string GCCIntSuffix,
[llvm_v16i32_ty], [llvm_v16i32_ty,llvm_v16i32_ty,llvm_i64_ty],
intr_properties>;
-// tag : V6_vlut4
-class Hexagon_v16i32_v16i32i64_Intrinsic<string GCCIntSuffix,
+// tag : V6_vmpahhsat
+class Hexagon_v32i32_v32i32v32i32i64_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v16i32_ty,llvm_i64_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i64_ty],
intr_properties>;
-// tag : V6_hi
-class Hexagon_v16i32_v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vmpybus
+class Hexagon_v32i32_v16i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [llvm_v16i32_ty], [llvm_v32i32_ty],
+ [llvm_v32i32_ty], [llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vgathermw
-class Hexagon__ptri32i32v16i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vmpybus
+class Hexagon_v64i32_v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,llvm_v16i32_ty],
+ [llvm_v64i32_ty], [llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vgathermw
-class Hexagon__ptri32i32v32i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vmpybus_acc
+class Hexagon_v32i32_v32i32v16i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,llvm_v32i32_ty],
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vgathermhw
-class Hexagon__ptri32i32v64i32_Intrinsic<string GCCIntSuffix,
+// tag : V6_vmpybus_acc
+class Hexagon_v64i32_v64i32v32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
- [], [llvm_ptr_ty,llvm_i32_ty,llvm_i32_ty,llvm_v64i32_ty],
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vgathermwq
-class Hexagon_custom__ptrv64i1i32i32v16i32_Intrinsic<
+// tag : V6_vprefixqb
+class Hexagon_custom_v16i32_v64i1_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [], [llvm_ptr_ty,llvm_v64i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v16i32_ty],
+ [llvm_v16i32_ty], [llvm_v64i1_ty],
intr_properties>;
-// tag : V6_vgathermwq
-class Hexagon_custom__ptrv128i1i32i32v32i32_Intrinsic_128B<
+// tag : V6_vprefixqb
+class Hexagon_custom_v32i32_v128i1_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [], [llvm_ptr_ty,llvm_v128i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v32i32_ty],
+ [llvm_v32i32_ty], [llvm_v128i1_ty],
intr_properties>;
-// tag : V6_vgathermhwq
-class Hexagon_custom__ptrv64i1i32i32v32i32_Intrinsic<
+// tag : V6_vrmpybusi
+class Hexagon_v32i32_v32i32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [], [llvm_ptr_ty,llvm_v64i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v32i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_i32_ty,llvm_i32_ty],
intr_properties>;
-// tag : V6_vgathermhwq
-class Hexagon_custom__ptrv128i1i32i32v64i32_Intrinsic_128B<
+// tag : V6_vrmpybusi
+class Hexagon_v64i32_v64i32i32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
- : Hexagon_NonGCC_Intrinsic<
- [], [llvm_ptr_ty,llvm_v128i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v64i32_ty],
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_i32_ty,llvm_i32_ty],
+ intr_properties>;
+
+// tag : V6_vrmpybusi_acc
+class Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v32i32_ty,llvm_i32_ty,llvm_i32_ty],
+ intr_properties>;
+
+// tag : V6_vrmpybusi_acc
+class Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v64i32_ty,llvm_i32_ty,llvm_i32_ty],
+ intr_properties>;
+
+// tag : V6_vsb
+class Hexagon_v32i32_v16i32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v16i32_ty],
+ intr_properties>;
+
+// tag : V6_vsb
+class Hexagon_v64i32_v32i32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vscattermw
+// tag : V6_vscattermh
class Hexagon__i32i32v16i32v16i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
[], [llvm_i32_ty,llvm_i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vscattermw
+// tag : V6_vscattermh
class Hexagon__i32i32v32i32v32i32_Intrinsic<string GCCIntSuffix,
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_Intrinsic<GCCIntSuffix,
[], [llvm_i32_ty,llvm_i32_ty,llvm_v32i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vscattermwq
+// tag : V6_vscattermhq
class Hexagon_custom__v64i1i32i32v16i32v16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
[], [llvm_v64i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vscattermwq
+// tag : V6_vscattermhq
class Hexagon_custom__v128i1i32i32v32i32v32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
@@ -1065,927 +1002,1027 @@ class Hexagon_custom__v128i1i32i32v64i32v32i32_Intrinsic_128B<
[], [llvm_v128i1_ty,llvm_i32_ty,llvm_i32_ty,llvm_v64i32_ty,llvm_v32i32_ty],
intr_properties>;
-// tag : V6_vprefixqb
-class Hexagon_custom_v16i32_v64i1_Intrinsic<
+// tag : V6_vswap
+class Hexagon_custom_v32i32_v64i1v16i32v16i32_Intrinsic<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v16i32_ty], [llvm_v64i1_ty],
+ [llvm_v32i32_ty], [llvm_v64i1_ty,llvm_v16i32_ty,llvm_v16i32_ty],
intr_properties>;
-// tag : V6_vprefixqb
-class Hexagon_custom_v32i32_v128i1_Intrinsic_128B<
+// tag : V6_vswap
+class Hexagon_custom_v64i32_v128i1v32i32v32i32_Intrinsic_128B<
list<IntrinsicProperty> intr_properties = [IntrNoMem]>
: Hexagon_NonGCC_Intrinsic<
- [llvm_v32i32_ty], [llvm_v128i1_ty],
+ [llvm_v64i32_ty], [llvm_v128i1_ty,llvm_v32i32_ty,llvm_v32i32_ty],
+ intr_properties>;
+
+// tag : V6_vunpackob
+class Hexagon_v32i32_v32i32v16i32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v32i32_ty], [llvm_v32i32_ty,llvm_v16i32_ty],
+ intr_properties>;
+
+// tag : V6_vunpackob
+class Hexagon_v64i32_v64i32v32i32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_v64i32_ty], [llvm_v64i32_ty,llvm_v32i32_ty],
+ intr_properties>;
+
+// tag : Y2_dccleana
+class Hexagon__ptr_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty],
+ intr_properties>;
+
+// tag : Y4_l2fetch
+class Hexagon__ptri32_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i32_ty],
+ intr_properties>;
+
+// tag : Y5_l2fetch
+class Hexagon__ptri64_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_i64_ty],
+ intr_properties>;
+
+// tag : Y6_dmlink
+class Hexagon__ptrptr_Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [], [llvm_ptr_ty,llvm_ptr_ty],
+ intr_properties>;
+
+// tag : Y6_dmpause
+class Hexagon_i32__Intrinsic<string GCCIntSuffix,
+ list<IntrinsicProperty> intr_properties = [IntrNoMem]>
+ : Hexagon_Intrinsic<GCCIntSuffix,
+ [llvm_i32_ty], [],
intr_properties>;
// V5 Scalar Instructions.
-def int_hexagon_C2_cmpeq :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpeq">;
+def int_hexagon_A2_abs :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_abs">;
-def int_hexagon_C2_cmpgt :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgt">;
+def int_hexagon_A2_absp :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_absp">;
-def int_hexagon_C2_cmpgtu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgtu">;
+def int_hexagon_A2_abssat :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_abssat">;
-def int_hexagon_C2_cmpeqp :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_C2_cmpeqp">;
+def int_hexagon_A2_add :
+Hexagon_custom_i32_i32i32_Intrinsic;
-def int_hexagon_C2_cmpgtp :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_C2_cmpgtp">;
+def int_hexagon_A2_addh_h16_hh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_hh">;
-def int_hexagon_C2_cmpgtup :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_C2_cmpgtup">;
+def int_hexagon_A2_addh_h16_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_hl">;
-def int_hexagon_A4_rcmpeqi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_addh_h16_lh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_lh">;
-def int_hexagon_A4_rcmpneqi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpneqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_addh_h16_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_ll">;
-def int_hexagon_A4_rcmpeq :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpeq">;
+def int_hexagon_A2_addh_h16_sat_hh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_hh">;
-def int_hexagon_A4_rcmpneq :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpneq">;
+def int_hexagon_A2_addh_h16_sat_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_hl">;
-def int_hexagon_C2_bitsset :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_bitsset">;
+def int_hexagon_A2_addh_h16_sat_lh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_lh">;
-def int_hexagon_C2_bitsclr :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_bitsclr">;
+def int_hexagon_A2_addh_h16_sat_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_ll">;
-def int_hexagon_C4_nbitsset :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_nbitsset">;
+def int_hexagon_A2_addh_l16_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_hl">;
-def int_hexagon_C4_nbitsclr :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_nbitsclr">;
+def int_hexagon_A2_addh_l16_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_ll">;
-def int_hexagon_C2_cmpeqi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_addh_l16_sat_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_sat_hl">;
-def int_hexagon_C2_cmpgti :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_addh_l16_sat_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_sat_ll">;
-def int_hexagon_C2_cmpgtui :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_addi :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_C2_cmpgei :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgei", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_addp :
+Hexagon_custom_i64_i64i64_Intrinsic;
-def int_hexagon_C2_cmpgeui :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgeui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_addpsat :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_addpsat">;
-def int_hexagon_C2_cmplt :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmplt">;
+def int_hexagon_A2_addsat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addsat">;
-def int_hexagon_C2_cmpltu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpltu">;
+def int_hexagon_A2_addsp :
+Hexagon_i64_i32i64_Intrinsic<"HEXAGON_A2_addsp">;
-def int_hexagon_C2_bitsclri :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_bitsclri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_and :
+Hexagon_custom_i32_i32i32_Intrinsic;
-def int_hexagon_C4_nbitsclri :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_nbitsclri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_andir :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_C4_cmpneqi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmpneqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_andp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_andp">;
-def int_hexagon_C4_cmpltei :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmpltei", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_aslh :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_aslh">;
-def int_hexagon_C4_cmplteui :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmplteui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_asrh :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_asrh">;
-def int_hexagon_C4_cmpneq :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmpneq">;
+def int_hexagon_A2_combine_hh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_hh">;
-def int_hexagon_C4_cmplte :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmplte">;
+def int_hexagon_A2_combine_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_hl">;
-def int_hexagon_C4_cmplteu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmplteu">;
+def int_hexagon_A2_combine_lh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_lh">;
-def int_hexagon_C2_and :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_and">;
+def int_hexagon_A2_combine_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_ll">;
-def int_hexagon_C2_or :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_or">;
+def int_hexagon_A2_combineii :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A2_combineii", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_C2_xor :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_xor">;
+def int_hexagon_A2_combinew :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A2_combinew">;
-def int_hexagon_C2_andn :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_andn">;
+def int_hexagon_A2_max :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_max">;
-def int_hexagon_C2_not :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_not">;
+def int_hexagon_A2_maxp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_maxp">;
-def int_hexagon_C2_orn :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_orn">;
+def int_hexagon_A2_maxu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_maxu">;
-def int_hexagon_C4_and_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_and">;
+def int_hexagon_A2_maxup :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_maxup">;
-def int_hexagon_C4_and_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_or">;
+def int_hexagon_A2_min :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_min">;
-def int_hexagon_C4_or_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_and">;
+def int_hexagon_A2_minp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_minp">;
-def int_hexagon_C4_or_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_or">;
+def int_hexagon_A2_minu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_minu">;
-def int_hexagon_C4_and_andn :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_andn">;
+def int_hexagon_A2_minup :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_minup">;
-def int_hexagon_C4_and_orn :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_orn">;
+def int_hexagon_A2_neg :
+Hexagon_custom_i32_i32_Intrinsic;
-def int_hexagon_C4_or_andn :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_andn">;
+def int_hexagon_A2_negp :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_negp">;
-def int_hexagon_C4_or_orn :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_orn">;
+def int_hexagon_A2_negsat :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_negsat">;
-def int_hexagon_C2_pxfer_map :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_pxfer_map">;
+def int_hexagon_A2_not :
+Hexagon_custom_i32_i32_Intrinsic;
-def int_hexagon_C2_any8 :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_any8">;
+def int_hexagon_A2_notp :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_notp">;
-def int_hexagon_C2_all8 :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_all8">;
+def int_hexagon_A2_or :
+Hexagon_custom_i32_i32i32_Intrinsic;
-def int_hexagon_C2_vitpack :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_vitpack">;
+def int_hexagon_A2_orir :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_C2_mux :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_mux">;
+def int_hexagon_A2_orp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_orp">;
-def int_hexagon_C2_muxii :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_muxii", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_A2_roundsat :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_A2_roundsat">;
-def int_hexagon_C2_muxir :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_muxir", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_A2_sat :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_A2_sat">;
-def int_hexagon_C2_muxri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_muxri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_satb :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_satb">;
-def int_hexagon_C2_vmux :
-Hexagon_i64_i32i64i64_Intrinsic<"HEXAGON_C2_vmux">;
+def int_hexagon_A2_sath :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_sath">;
-def int_hexagon_C2_mask :
-Hexagon_i64_i32_Intrinsic<"HEXAGON_C2_mask">;
+def int_hexagon_A2_satub :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_satub">;
-def int_hexagon_A2_vcmpbeq :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpbeq">;
+def int_hexagon_A2_satuh :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_satuh">;
-def int_hexagon_A4_vcmpbeqi :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpbeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_sub :
+Hexagon_custom_i32_i32i32_Intrinsic;
-def int_hexagon_A4_vcmpbeq_any :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A4_vcmpbeq_any">;
+def int_hexagon_A2_subh_h16_hh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_hh">;
-def int_hexagon_A2_vcmpbgtu :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpbgtu">;
+def int_hexagon_A2_subh_h16_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_hl">;
-def int_hexagon_A4_vcmpbgtui :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpbgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_subh_h16_lh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_lh">;
-def int_hexagon_A4_vcmpbgt :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A4_vcmpbgt">;
+def int_hexagon_A2_subh_h16_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_ll">;
-def int_hexagon_A4_vcmpbgti :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpbgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_subh_h16_sat_hh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_hh">;
-def int_hexagon_A4_cmpbeq :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbeq">;
+def int_hexagon_A2_subh_h16_sat_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_hl">;
-def int_hexagon_A4_cmpbeqi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_subh_h16_sat_lh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_lh">;
-def int_hexagon_A4_cmpbgtu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgtu">;
+def int_hexagon_A2_subh_h16_sat_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_ll">;
-def int_hexagon_A4_cmpbgtui :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_subh_l16_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_hl">;
-def int_hexagon_A4_cmpbgt :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgt">;
+def int_hexagon_A2_subh_l16_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_ll">;
-def int_hexagon_A4_cmpbgti :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_subh_l16_sat_hl :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_sat_hl">;
-def int_hexagon_A2_vcmpheq :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpheq">;
+def int_hexagon_A2_subh_l16_sat_ll :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_sat_ll">;
-def int_hexagon_A2_vcmphgt :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmphgt">;
+def int_hexagon_A2_subp :
+Hexagon_custom_i64_i64i64_Intrinsic;
-def int_hexagon_A2_vcmphgtu :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmphgtu">;
+def int_hexagon_A2_subri :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_A4_vcmpheqi :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpheqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_subsat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subsat">;
-def int_hexagon_A4_vcmphgti :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmphgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_svaddh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svaddh">;
-def int_hexagon_A4_vcmphgtui :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmphgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_svaddhs :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svaddhs">;
-def int_hexagon_A4_cmpheq :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpheq">;
+def int_hexagon_A2_svadduhs :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svadduhs">;
-def int_hexagon_A4_cmphgt :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgt">;
+def int_hexagon_A2_svavgh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svavgh">;
-def int_hexagon_A4_cmphgtu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgtu">;
+def int_hexagon_A2_svavghs :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svavghs">;
-def int_hexagon_A4_cmpheqi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpheqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_svnavgh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svnavgh">;
-def int_hexagon_A4_cmphgti :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_svsubh :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svsubh">;
-def int_hexagon_A4_cmphgtui :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_svsubhs :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svsubhs">;
-def int_hexagon_A2_vcmpweq :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpweq">;
+def int_hexagon_A2_svsubuhs :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svsubuhs">;
-def int_hexagon_A2_vcmpwgt :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpwgt">;
+def int_hexagon_A2_swiz :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_swiz">;
-def int_hexagon_A2_vcmpwgtu :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpwgtu">;
+def int_hexagon_A2_sxtb :
+Hexagon_custom_i32_i32_Intrinsic;
-def int_hexagon_A4_vcmpweqi :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpweqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_sxth :
+Hexagon_custom_i32_i32_Intrinsic;
-def int_hexagon_A4_vcmpwgti :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpwgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_sxtw :
+Hexagon_i64_i32_Intrinsic<"HEXAGON_A2_sxtw">;
-def int_hexagon_A4_vcmpwgtui :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpwgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_A2_tfr :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_tfr">;
-def int_hexagon_A4_boundscheck :
-Hexagon_i32_i32i64_Intrinsic<"HEXAGON_A4_boundscheck">;
+def int_hexagon_A2_tfrih :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_tfrih", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_A4_tlbmatch :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_tlbmatch">;
+def int_hexagon_A2_tfril :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_tfril", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_C2_tfrpr :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_tfrpr">;
+def int_hexagon_A2_tfrp :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_tfrp">;
-def int_hexagon_C2_tfrrp :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_tfrrp">;
+def int_hexagon_A2_tfrpi :
+Hexagon_i64_i32_Intrinsic<"HEXAGON_A2_tfrpi", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_C4_fastcorner9 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_fastcorner9">;
+def int_hexagon_A2_tfrsi :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_tfrsi", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_C4_fastcorner9_not :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_fastcorner9_not">;
+def int_hexagon_A2_vabsh :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabsh">;
-def int_hexagon_M2_mpy_acc_hh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hh_s0">;
+def int_hexagon_A2_vabshsat :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabshsat">;
-def int_hexagon_M2_mpy_acc_hh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hh_s1">;
+def int_hexagon_A2_vabsw :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabsw">;
-def int_hexagon_M2_mpy_acc_hl_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hl_s0">;
+def int_hexagon_A2_vabswsat :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabswsat">;
-def int_hexagon_M2_mpy_acc_hl_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hl_s1">;
+def int_hexagon_A2_vaddb_map :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddb_map">;
-def int_hexagon_M2_mpy_acc_lh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_lh_s0">;
+def int_hexagon_A2_vaddh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddh">;
-def int_hexagon_M2_mpy_acc_lh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_lh_s1">;
+def int_hexagon_A2_vaddhs :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddhs">;
-def int_hexagon_M2_mpy_acc_ll_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_ll_s0">;
+def int_hexagon_A2_vaddub :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddub">;
-def int_hexagon_M2_mpy_acc_ll_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_ll_s1">;
+def int_hexagon_A2_vaddubs :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddubs">;
-def int_hexagon_M2_mpy_nac_hh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hh_s0">;
+def int_hexagon_A2_vadduhs :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vadduhs">;
-def int_hexagon_M2_mpy_nac_hh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hh_s1">;
+def int_hexagon_A2_vaddw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddw">;
-def int_hexagon_M2_mpy_nac_hl_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hl_s0">;
+def int_hexagon_A2_vaddws :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddws">;
-def int_hexagon_M2_mpy_nac_hl_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hl_s1">;
+def int_hexagon_A2_vavgh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgh">;
-def int_hexagon_M2_mpy_nac_lh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_lh_s0">;
+def int_hexagon_A2_vavghcr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavghcr">;
-def int_hexagon_M2_mpy_nac_lh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_lh_s1">;
+def int_hexagon_A2_vavghr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavghr">;
-def int_hexagon_M2_mpy_nac_ll_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_ll_s0">;
+def int_hexagon_A2_vavgub :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgub">;
-def int_hexagon_M2_mpy_nac_ll_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_ll_s1">;
+def int_hexagon_A2_vavgubr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgubr">;
-def int_hexagon_M2_mpy_acc_sat_hh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hh_s0">;
+def int_hexagon_A2_vavguh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguh">;
-def int_hexagon_M2_mpy_acc_sat_hh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hh_s1">;
+def int_hexagon_A2_vavguhr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguhr">;
-def int_hexagon_M2_mpy_acc_sat_hl_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hl_s0">;
+def int_hexagon_A2_vavguw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguw">;
-def int_hexagon_M2_mpy_acc_sat_hl_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hl_s1">;
+def int_hexagon_A2_vavguwr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguwr">;
-def int_hexagon_M2_mpy_acc_sat_lh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_lh_s0">;
+def int_hexagon_A2_vavgw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgw">;
-def int_hexagon_M2_mpy_acc_sat_lh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_lh_s1">;
+def int_hexagon_A2_vavgwcr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgwcr">;
-def int_hexagon_M2_mpy_acc_sat_ll_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_ll_s0">;
+def int_hexagon_A2_vavgwr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgwr">;
-def int_hexagon_M2_mpy_acc_sat_ll_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_ll_s1">;
+def int_hexagon_A2_vcmpbeq :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpbeq">;
-def int_hexagon_M2_mpy_nac_sat_hh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hh_s0">;
+def int_hexagon_A2_vcmpbgtu :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpbgtu">;
-def int_hexagon_M2_mpy_nac_sat_hh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hh_s1">;
+def int_hexagon_A2_vcmpheq :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpheq">;
-def int_hexagon_M2_mpy_nac_sat_hl_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hl_s0">;
+def int_hexagon_A2_vcmphgt :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmphgt">;
-def int_hexagon_M2_mpy_nac_sat_hl_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hl_s1">;
+def int_hexagon_A2_vcmphgtu :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmphgtu">;
-def int_hexagon_M2_mpy_nac_sat_lh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_lh_s0">;
+def int_hexagon_A2_vcmpweq :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpweq">;
-def int_hexagon_M2_mpy_nac_sat_lh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_lh_s1">;
+def int_hexagon_A2_vcmpwgt :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpwgt">;
-def int_hexagon_M2_mpy_nac_sat_ll_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_ll_s0">;
+def int_hexagon_A2_vcmpwgtu :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A2_vcmpwgtu">;
-def int_hexagon_M2_mpy_nac_sat_ll_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_ll_s1">;
+def int_hexagon_A2_vconj :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vconj">;
-def int_hexagon_M2_mpy_hh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hh_s0">;
+def int_hexagon_A2_vmaxb :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxb">;
-def int_hexagon_M2_mpy_hh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hh_s1">;
+def int_hexagon_A2_vmaxh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxh">;
-def int_hexagon_M2_mpy_hl_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hl_s0">;
+def int_hexagon_A2_vmaxub :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxub">;
-def int_hexagon_M2_mpy_hl_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hl_s1">;
+def int_hexagon_A2_vmaxuh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxuh">;
-def int_hexagon_M2_mpy_lh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_lh_s0">;
+def int_hexagon_A2_vmaxuw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxuw">;
-def int_hexagon_M2_mpy_lh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_lh_s1">;
+def int_hexagon_A2_vmaxw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxw">;
-def int_hexagon_M2_mpy_ll_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_ll_s0">;
+def int_hexagon_A2_vminb :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminb">;
-def int_hexagon_M2_mpy_ll_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_ll_s1">;
+def int_hexagon_A2_vminh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminh">;
-def int_hexagon_M2_mpy_sat_hh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hh_s0">;
+def int_hexagon_A2_vminub :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminub">;
-def int_hexagon_M2_mpy_sat_hh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hh_s1">;
+def int_hexagon_A2_vminuh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminuh">;
-def int_hexagon_M2_mpy_sat_hl_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hl_s0">;
+def int_hexagon_A2_vminuw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminuw">;
-def int_hexagon_M2_mpy_sat_hl_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hl_s1">;
+def int_hexagon_A2_vminw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminw">;
-def int_hexagon_M2_mpy_sat_lh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_lh_s0">;
+def int_hexagon_A2_vnavgh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgh">;
-def int_hexagon_M2_mpy_sat_lh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_lh_s1">;
+def int_hexagon_A2_vnavghcr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavghcr">;
-def int_hexagon_M2_mpy_sat_ll_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_ll_s0">;
+def int_hexagon_A2_vnavghr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavghr">;
-def int_hexagon_M2_mpy_sat_ll_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_ll_s1">;
+def int_hexagon_A2_vnavgw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgw">;
-def int_hexagon_M2_mpy_rnd_hh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hh_s0">;
+def int_hexagon_A2_vnavgwcr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgwcr">;
-def int_hexagon_M2_mpy_rnd_hh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hh_s1">;
+def int_hexagon_A2_vnavgwr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgwr">;
-def int_hexagon_M2_mpy_rnd_hl_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hl_s0">;
+def int_hexagon_A2_vraddub :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vraddub">;
-def int_hexagon_M2_mpy_rnd_hl_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hl_s1">;
+def int_hexagon_A2_vraddub_acc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_A2_vraddub_acc">;
-def int_hexagon_M2_mpy_rnd_lh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_lh_s0">;
+def int_hexagon_A2_vrsadub :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vrsadub">;
-def int_hexagon_M2_mpy_rnd_lh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_lh_s1">;
+def int_hexagon_A2_vrsadub_acc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_A2_vrsadub_acc">;
-def int_hexagon_M2_mpy_rnd_ll_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_ll_s0">;
+def int_hexagon_A2_vsubb_map :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubb_map">;
-def int_hexagon_M2_mpy_rnd_ll_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_ll_s1">;
+def int_hexagon_A2_vsubh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubh">;
-def int_hexagon_M2_mpy_sat_rnd_hh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hh_s0">;
+def int_hexagon_A2_vsubhs :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubhs">;
-def int_hexagon_M2_mpy_sat_rnd_hh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hh_s1">;
+def int_hexagon_A2_vsubub :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubub">;
-def int_hexagon_M2_mpy_sat_rnd_hl_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hl_s0">;
+def int_hexagon_A2_vsububs :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsububs">;
-def int_hexagon_M2_mpy_sat_rnd_hl_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hl_s1">;
+def int_hexagon_A2_vsubuhs :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubuhs">;
-def int_hexagon_M2_mpy_sat_rnd_lh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_lh_s0">;
+def int_hexagon_A2_vsubw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubw">;
-def int_hexagon_M2_mpy_sat_rnd_lh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_lh_s1">;
+def int_hexagon_A2_vsubws :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubws">;
-def int_hexagon_M2_mpy_sat_rnd_ll_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_ll_s0">;
+def int_hexagon_A2_xor :
+Hexagon_custom_i32_i32i32_Intrinsic;
-def int_hexagon_M2_mpy_sat_rnd_ll_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_ll_s1">;
+def int_hexagon_A2_xorp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_xorp">;
-def int_hexagon_M2_mpyd_acc_hh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hh_s0">;
+def int_hexagon_A2_zxtb :
+Hexagon_custom_i32_i32_Intrinsic;
-def int_hexagon_M2_mpyd_acc_hh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hh_s1">;
+def int_hexagon_A2_zxth :
+Hexagon_custom_i32_i32_Intrinsic;
-def int_hexagon_M2_mpyd_acc_hl_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hl_s0">;
+def int_hexagon_A4_andn :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_andn">;
-def int_hexagon_M2_mpyd_acc_hl_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hl_s1">;
+def int_hexagon_A4_andnp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A4_andnp">;
-def int_hexagon_M2_mpyd_acc_lh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_lh_s0">;
+def int_hexagon_A4_bitsplit :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_bitsplit">;
-def int_hexagon_M2_mpyd_acc_lh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_lh_s1">;
+def int_hexagon_A4_bitspliti :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_bitspliti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_acc_ll_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_ll_s0">;
+def int_hexagon_A4_boundscheck :
+Hexagon_i32_i32i64_Intrinsic<"HEXAGON_A4_boundscheck">;
-def int_hexagon_M2_mpyd_acc_ll_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_ll_s1">;
+def int_hexagon_A4_cmpbeq :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbeq">;
-def int_hexagon_M2_mpyd_nac_hh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hh_s0">;
+def int_hexagon_A4_cmpbeqi :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_nac_hh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hh_s1">;
+def int_hexagon_A4_cmpbgt :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgt">;
-def int_hexagon_M2_mpyd_nac_hl_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hl_s0">;
+def int_hexagon_A4_cmpbgti :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_nac_hl_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hl_s1">;
+def int_hexagon_A4_cmpbgtu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgtu">;
-def int_hexagon_M2_mpyd_nac_lh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_lh_s0">;
+def int_hexagon_A4_cmpbgtui :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpbgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_nac_lh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_lh_s1">;
+def int_hexagon_A4_cmpheq :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpheq">;
-def int_hexagon_M2_mpyd_nac_ll_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_ll_s0">;
+def int_hexagon_A4_cmpheqi :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmpheqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_nac_ll_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_ll_s1">;
+def int_hexagon_A4_cmphgt :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgt">;
-def int_hexagon_M2_mpyd_hh_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hh_s0">;
+def int_hexagon_A4_cmphgti :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_hh_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hh_s1">;
+def int_hexagon_A4_cmphgtu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgtu">;
-def int_hexagon_M2_mpyd_hl_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hl_s0">;
+def int_hexagon_A4_cmphgtui :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cmphgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_hl_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hl_s1">;
+def int_hexagon_A4_combineir :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_combineir", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_M2_mpyd_lh_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_lh_s0">;
+def int_hexagon_A4_combineri :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_combineri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_lh_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_lh_s1">;
+def int_hexagon_A4_cround_ri :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cround_ri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_ll_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_ll_s0">;
+def int_hexagon_A4_cround_rr :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cround_rr">;
-def int_hexagon_M2_mpyd_ll_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_ll_s1">;
+def int_hexagon_A4_modwrapu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_modwrapu">;
-def int_hexagon_M2_mpyd_rnd_hh_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hh_s0">;
+def int_hexagon_A4_orn :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_orn">;
-def int_hexagon_M2_mpyd_rnd_hh_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hh_s1">;
+def int_hexagon_A4_ornp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A4_ornp">;
-def int_hexagon_M2_mpyd_rnd_hl_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hl_s0">;
+def int_hexagon_A4_rcmpeq :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpeq">;
-def int_hexagon_M2_mpyd_rnd_hl_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hl_s1">;
+def int_hexagon_A4_rcmpeqi :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_rnd_lh_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_lh_s0">;
+def int_hexagon_A4_rcmpneq :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpneq">;
-def int_hexagon_M2_mpyd_rnd_lh_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_lh_s1">;
+def int_hexagon_A4_rcmpneqi :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_rcmpneqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_rnd_ll_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_ll_s0">;
+def int_hexagon_A4_round_ri :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_ri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyd_rnd_ll_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_ll_s1">;
+def int_hexagon_A4_round_ri_sat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_ri_sat", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_acc_hh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hh_s0">;
+def int_hexagon_A4_round_rr :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_rr">;
-def int_hexagon_M2_mpyu_acc_hh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hh_s1">;
+def int_hexagon_A4_round_rr_sat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_rr_sat">;
-def int_hexagon_M2_mpyu_acc_hl_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hl_s0">;
+def int_hexagon_A4_tlbmatch :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_tlbmatch">;
-def int_hexagon_M2_mpyu_acc_hl_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hl_s1">;
+def int_hexagon_A4_vcmpbeq_any :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A4_vcmpbeq_any">;
-def int_hexagon_M2_mpyu_acc_lh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_lh_s0">;
+def int_hexagon_A4_vcmpbeqi :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpbeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_acc_lh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_lh_s1">;
+def int_hexagon_A4_vcmpbgt :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A4_vcmpbgt">;
-def int_hexagon_M2_mpyu_acc_ll_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_ll_s0">;
+def int_hexagon_A4_vcmpbgti :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpbgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_acc_ll_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_ll_s1">;
+def int_hexagon_A4_vcmpbgtui :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpbgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_nac_hh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hh_s0">;
+def int_hexagon_A4_vcmpheqi :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpheqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_nac_hh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hh_s1">;
+def int_hexagon_A4_vcmphgti :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmphgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_nac_hl_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hl_s0">;
+def int_hexagon_A4_vcmphgtui :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmphgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_nac_hl_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hl_s1">;
+def int_hexagon_A4_vcmpweqi :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpweqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_nac_lh_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_lh_s0">;
+def int_hexagon_A4_vcmpwgti :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpwgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_nac_lh_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_lh_s1">;
+def int_hexagon_A4_vcmpwgtui :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_A4_vcmpwgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyu_nac_ll_s0 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_ll_s0">;
+def int_hexagon_A4_vrmaxh :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxh">;
-def int_hexagon_M2_mpyu_nac_ll_s1 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_ll_s1">;
+def int_hexagon_A4_vrmaxuh :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxuh">;
-def int_hexagon_M2_mpyu_hh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hh_s0">;
+def int_hexagon_A4_vrmaxuw :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxuw">;
-def int_hexagon_M2_mpyu_hh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hh_s1">;
+def int_hexagon_A4_vrmaxw :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxw">;
-def int_hexagon_M2_mpyu_hl_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hl_s0">;
+def int_hexagon_A4_vrminh :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminh">;
-def int_hexagon_M2_mpyu_hl_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hl_s1">;
+def int_hexagon_A4_vrminuh :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminuh">;
-def int_hexagon_M2_mpyu_lh_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_lh_s0">;
+def int_hexagon_A4_vrminuw :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminuw">;
-def int_hexagon_M2_mpyu_lh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_lh_s1">;
+def int_hexagon_A4_vrminw :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminw">;
-def int_hexagon_M2_mpyu_ll_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_ll_s0">;
+def int_hexagon_A5_vaddhubs :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A5_vaddhubs">;
-def int_hexagon_M2_mpyu_ll_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_ll_s1">;
+def int_hexagon_C2_all8 :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_all8">;
-def int_hexagon_M2_mpyud_acc_hh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hh_s0">;
+def int_hexagon_C2_and :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_and">;
-def int_hexagon_M2_mpyud_acc_hh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hh_s1">;
+def int_hexagon_C2_andn :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_andn">;
-def int_hexagon_M2_mpyud_acc_hl_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hl_s0">;
+def int_hexagon_C2_any8 :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_any8">;
-def int_hexagon_M2_mpyud_acc_hl_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hl_s1">;
+def int_hexagon_C2_bitsclr :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_bitsclr">;
-def int_hexagon_M2_mpyud_acc_lh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_lh_s0">;
+def int_hexagon_C2_bitsclri :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_bitsclri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyud_acc_lh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_lh_s1">;
+def int_hexagon_C2_bitsset :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_bitsset">;
-def int_hexagon_M2_mpyud_acc_ll_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_ll_s0">;
+def int_hexagon_C2_cmpeq :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpeq">;
-def int_hexagon_M2_mpyud_acc_ll_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_ll_s1">;
+def int_hexagon_C2_cmpeqi :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpeqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyud_nac_hh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hh_s0">;
+def int_hexagon_C2_cmpeqp :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_C2_cmpeqp">;
-def int_hexagon_M2_mpyud_nac_hh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hh_s1">;
+def int_hexagon_C2_cmpgei :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgei", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyud_nac_hl_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hl_s0">;
+def int_hexagon_C2_cmpgeui :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgeui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyud_nac_hl_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hl_s1">;
+def int_hexagon_C2_cmpgt :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgt">;
-def int_hexagon_M2_mpyud_nac_lh_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_lh_s0">;
+def int_hexagon_C2_cmpgti :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyud_nac_lh_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_lh_s1">;
+def int_hexagon_C2_cmpgtp :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_C2_cmpgtp">;
-def int_hexagon_M2_mpyud_nac_ll_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_ll_s0">;
+def int_hexagon_C2_cmpgtu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgtu">;
-def int_hexagon_M2_mpyud_nac_ll_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_ll_s1">;
+def int_hexagon_C2_cmpgtui :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpgtui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyud_hh_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hh_s0">;
+def int_hexagon_C2_cmpgtup :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_C2_cmpgtup">;
-def int_hexagon_M2_mpyud_hh_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hh_s1">;
+def int_hexagon_C2_cmplt :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmplt">;
-def int_hexagon_M2_mpyud_hl_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hl_s0">;
+def int_hexagon_C2_cmpltu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_cmpltu">;
-def int_hexagon_M2_mpyud_hl_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hl_s1">;
+def int_hexagon_C2_mask :
+Hexagon_i64_i32_Intrinsic<"HEXAGON_C2_mask">;
-def int_hexagon_M2_mpyud_lh_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_lh_s0">;
+def int_hexagon_C2_mux :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_mux">;
-def int_hexagon_M2_mpyud_lh_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_lh_s1">;
+def int_hexagon_C2_muxii :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_muxii", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_M2_mpyud_ll_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_ll_s0">;
+def int_hexagon_C2_muxir :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_muxir", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_M2_mpyud_ll_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_ll_s1">;
+def int_hexagon_C2_muxri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C2_muxri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpysmi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpysmi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_C2_not :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_not">;
-def int_hexagon_M2_macsip :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_macsip", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_C2_or :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_or">;
-def int_hexagon_M2_macsin :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_macsin", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_C2_orn :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_orn">;
-def int_hexagon_M2_dpmpyss_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_dpmpyss_s0">;
+def int_hexagon_C2_pxfer_map :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_pxfer_map">;
-def int_hexagon_M2_dpmpyss_acc_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyss_acc_s0">;
+def int_hexagon_C2_tfrpr :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_tfrpr">;
-def int_hexagon_M2_dpmpyss_nac_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyss_nac_s0">;
+def int_hexagon_C2_tfrrp :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_C2_tfrrp">;
-def int_hexagon_M2_dpmpyuu_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_dpmpyuu_s0">;
+def int_hexagon_C2_vitpack :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_vitpack">;
-def int_hexagon_M2_dpmpyuu_acc_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyuu_acc_s0">;
+def int_hexagon_C2_vmux :
+Hexagon_i64_i32i64i64_Intrinsic<"HEXAGON_C2_vmux">;
-def int_hexagon_M2_dpmpyuu_nac_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyuu_nac_s0">;
+def int_hexagon_C2_xor :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C2_xor">;
-def int_hexagon_M2_mpy_up :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_up">;
+def int_hexagon_C4_and_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_and">;
-def int_hexagon_M2_mpy_up_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_up_s1">;
+def int_hexagon_C4_and_andn :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_andn">;
-def int_hexagon_M2_mpy_up_s1_sat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_up_s1_sat">;
+def int_hexagon_C4_and_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_or">;
-def int_hexagon_M2_mpyu_up :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_up">;
+def int_hexagon_C4_and_orn :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_and_orn">;
-def int_hexagon_M2_mpysu_up :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpysu_up">;
+def int_hexagon_C4_cmplte :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmplte">;
-def int_hexagon_M2_dpmpyss_rnd_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_dpmpyss_rnd_s0">;
+def int_hexagon_C4_cmpltei :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmpltei", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M4_mac_up_s1_sat :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mac_up_s1_sat">;
+def int_hexagon_C4_cmplteu :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmplteu">;
-def int_hexagon_M4_nac_up_s1_sat :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_nac_up_s1_sat">;
+def int_hexagon_C4_cmplteui :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmplteui", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_mpyi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyi">;
+def int_hexagon_C4_cmpneq :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmpneq">;
-def int_hexagon_M2_mpyui :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyui">;
+def int_hexagon_C4_cmpneqi :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_cmpneqi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_maci :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_maci">;
+def int_hexagon_C4_fastcorner9 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_fastcorner9">;
-def int_hexagon_M2_acci :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_acci">;
+def int_hexagon_C4_fastcorner9_not :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_fastcorner9_not">;
-def int_hexagon_M2_accii :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_accii", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_C4_nbitsclr :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_nbitsclr">;
-def int_hexagon_M2_nacci :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_nacci">;
+def int_hexagon_C4_nbitsclri :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_nbitsclri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_naccii :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_naccii", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_C4_nbitsset :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_C4_nbitsset">;
-def int_hexagon_M2_subacc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_subacc">;
+def int_hexagon_C4_or_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_and">;
-def int_hexagon_M4_mpyrr_addr :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyrr_addr">;
+def int_hexagon_C4_or_andn :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_andn">;
-def int_hexagon_M4_mpyri_addr_u2 :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyri_addr_u2", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_C4_or_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_or">;
-def int_hexagon_M4_mpyri_addr :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyri_addr", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_C4_or_orn :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_C4_or_orn">;
-def int_hexagon_M4_mpyri_addi :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyri_addi", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_F2_conv_d2df :
+Hexagon_double_i64_Intrinsic<"HEXAGON_F2_conv_d2df">;
-def int_hexagon_M4_mpyrr_addi :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyrr_addi", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_F2_conv_d2sf :
+Hexagon_float_i64_Intrinsic<"HEXAGON_F2_conv_d2sf">;
-def int_hexagon_M2_vmpy2s_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s0">;
+def int_hexagon_F2_conv_df2d :
+Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2d">;
-def int_hexagon_M2_vmpy2s_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s1">;
+def int_hexagon_F2_conv_df2d_chop :
+Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2d_chop">;
-def int_hexagon_M2_vmac2s_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2s_s0">;
+def int_hexagon_F2_conv_df2sf :
+Hexagon_float_double_Intrinsic<"HEXAGON_F2_conv_df2sf">;
-def int_hexagon_M2_vmac2s_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2s_s1">;
+def int_hexagon_F2_conv_df2ud :
+Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2ud">;
-def int_hexagon_M2_vmpy2su_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2su_s0">;
+def int_hexagon_F2_conv_df2ud_chop :
+Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2ud_chop">;
-def int_hexagon_M2_vmpy2su_s1 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2su_s1">;
+def int_hexagon_F2_conv_df2uw :
+Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2uw">;
-def int_hexagon_M2_vmac2su_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2su_s0">;
+def int_hexagon_F2_conv_df2uw_chop :
+Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2uw_chop">;
-def int_hexagon_M2_vmac2su_s1 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2su_s1">;
+def int_hexagon_F2_conv_df2w :
+Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2w">;
-def int_hexagon_M2_vmpy2s_s0pack :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s0pack">;
+def int_hexagon_F2_conv_df2w_chop :
+Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2w_chop">;
-def int_hexagon_M2_vmpy2s_s1pack :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s1pack">;
+def int_hexagon_F2_conv_sf2d :
+Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2d">;
-def int_hexagon_M2_vmac2 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2">;
+def int_hexagon_F2_conv_sf2d_chop :
+Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2d_chop">;
-def int_hexagon_M2_vmpy2es_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vmpy2es_s0">;
+def int_hexagon_F2_conv_sf2df :
+Hexagon_double_float_Intrinsic<"HEXAGON_F2_conv_sf2df">;
-def int_hexagon_M2_vmpy2es_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vmpy2es_s1">;
+def int_hexagon_F2_conv_sf2ud :
+Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2ud">;
-def int_hexagon_M2_vmac2es_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vmac2es_s0">;
+def int_hexagon_F2_conv_sf2ud_chop :
+Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2ud_chop">;
-def int_hexagon_M2_vmac2es_s1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vmac2es_s1">;
+def int_hexagon_F2_conv_sf2uw :
+Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2uw">;
-def int_hexagon_M2_vmac2es :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vmac2es">;
+def int_hexagon_F2_conv_sf2uw_chop :
+Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2uw_chop">;
-def int_hexagon_M2_vrmac_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrmac_s0">;
+def int_hexagon_F2_conv_sf2w :
+Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2w">;
-def int_hexagon_M2_vrmpy_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrmpy_s0">;
+def int_hexagon_F2_conv_sf2w_chop :
+Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2w_chop">;
-def int_hexagon_M2_vdmpyrs_s0 :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vdmpyrs_s0">;
+def int_hexagon_F2_conv_ud2df :
+Hexagon_double_i64_Intrinsic<"HEXAGON_F2_conv_ud2df">;
-def int_hexagon_M2_vdmpyrs_s1 :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vdmpyrs_s1">;
+def int_hexagon_F2_conv_ud2sf :
+Hexagon_float_i64_Intrinsic<"HEXAGON_F2_conv_ud2sf">;
-def int_hexagon_M5_vrmpybuu :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M5_vrmpybuu">;
+def int_hexagon_F2_conv_uw2df :
+Hexagon_double_i32_Intrinsic<"HEXAGON_F2_conv_uw2df">;
-def int_hexagon_M5_vrmacbuu :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M5_vrmacbuu">;
+def int_hexagon_F2_conv_uw2sf :
+Hexagon_float_i32_Intrinsic<"HEXAGON_F2_conv_uw2sf">;
-def int_hexagon_M5_vrmpybsu :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M5_vrmpybsu">;
+def int_hexagon_F2_conv_w2df :
+Hexagon_double_i32_Intrinsic<"HEXAGON_F2_conv_w2df">;
-def int_hexagon_M5_vrmacbsu :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M5_vrmacbsu">;
+def int_hexagon_F2_conv_w2sf :
+Hexagon_float_i32_Intrinsic<"HEXAGON_F2_conv_w2sf">;
-def int_hexagon_M5_vmpybuu :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M5_vmpybuu">;
+def int_hexagon_F2_dfclass :
+Hexagon_i32_doublei32_Intrinsic<"HEXAGON_F2_dfclass", [IntrNoMem, Throws, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M5_vmpybsu :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M5_vmpybsu">;
+def int_hexagon_F2_dfcmpeq :
+Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpeq", [IntrNoMem, Throws]>;
-def int_hexagon_M5_vmacbuu :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M5_vmacbuu">;
+def int_hexagon_F2_dfcmpge :
+Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpge", [IntrNoMem, Throws]>;
-def int_hexagon_M5_vmacbsu :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M5_vmacbsu">;
+def int_hexagon_F2_dfcmpgt :
+Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpgt", [IntrNoMem, Throws]>;
-def int_hexagon_M5_vdmpybsu :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M5_vdmpybsu">;
+def int_hexagon_F2_dfcmpuo :
+Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpuo", [IntrNoMem, Throws]>;
-def int_hexagon_M5_vdmacbsu :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M5_vdmacbsu">;
+def int_hexagon_F2_dfimm_n :
+Hexagon_double_i32_Intrinsic<"HEXAGON_F2_dfimm_n", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_M2_vdmacs_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vdmacs_s0">;
+def int_hexagon_F2_dfimm_p :
+Hexagon_double_i32_Intrinsic<"HEXAGON_F2_dfimm_p", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_M2_vdmacs_s1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vdmacs_s1">;
+def int_hexagon_F2_sfadd :
+Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfadd", [IntrNoMem, Throws]>;
-def int_hexagon_M2_vdmpys_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vdmpys_s0">;
+def int_hexagon_F2_sfclass :
+Hexagon_i32_floati32_Intrinsic<"HEXAGON_F2_sfclass", [IntrNoMem, Throws, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_M2_vdmpys_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vdmpys_s1">;
+def int_hexagon_F2_sfcmpeq :
+Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpeq", [IntrNoMem, Throws]>;
-def int_hexagon_M2_cmpyrs_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrs_s0">;
+def int_hexagon_F2_sfcmpge :
+Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpge", [IntrNoMem, Throws]>;
-def int_hexagon_M2_cmpyrs_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrs_s1">;
+def int_hexagon_F2_sfcmpgt :
+Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpgt", [IntrNoMem, Throws]>;
-def int_hexagon_M2_cmpyrsc_s0 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrsc_s0">;
+def int_hexagon_F2_sfcmpuo :
+Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpuo", [IntrNoMem, Throws]>;
-def int_hexagon_M2_cmpyrsc_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrsc_s1">;
+def int_hexagon_F2_sffixupd :
+Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sffixupd", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sffixupn :
+Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sffixupn", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sffixupr :
+Hexagon_float_float_Intrinsic<"HEXAGON_F2_sffixupr", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sffma :
+Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffma", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sffma_lib :
+Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffma_lib", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sffma_sc :
+Hexagon_float_floatfloatfloati32_Intrinsic<"HEXAGON_F2_sffma_sc", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sffms :
+Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffms", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sffms_lib :
+Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffms_lib", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sfimm_n :
+Hexagon_float_i32_Intrinsic<"HEXAGON_F2_sfimm_n", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
+
+def int_hexagon_F2_sfimm_p :
+Hexagon_float_i32_Intrinsic<"HEXAGON_F2_sfimm_p", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
+
+def int_hexagon_F2_sfmax :
+Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfmax", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sfmin :
+Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfmin", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sfmpy :
+Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfmpy", [IntrNoMem, Throws]>;
+
+def int_hexagon_F2_sfsub :
+Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfsub", [IntrNoMem, Throws]>;
+
+def int_hexagon_M2_acci :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_acci">;
+
+def int_hexagon_M2_accii :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_accii", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_M2_cmaci_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cmaci_s0">;
+
+def int_hexagon_M2_cmacr_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cmacr_s0">;
def int_hexagon_M2_cmacs_s0 :
Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cmacs_s0">;
@@ -1999,6 +2036,24 @@ Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cmacsc_s0">;
def int_hexagon_M2_cmacsc_s1 :
Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cmacsc_s1">;
+def int_hexagon_M2_cmpyi_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_cmpyi_s0">;
+
+def int_hexagon_M2_cmpyr_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_cmpyr_s0">;
+
+def int_hexagon_M2_cmpyrs_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrs_s0">;
+
+def int_hexagon_M2_cmpyrs_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrs_s1">;
+
+def int_hexagon_M2_cmpyrsc_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrsc_s0">;
+
+def int_hexagon_M2_cmpyrsc_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_cmpyrsc_s1">;
+
def int_hexagon_M2_cmpys_s0 :
Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_cmpys_s0">;
@@ -2023,44 +2078,47 @@ Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cnacsc_s0">;
def int_hexagon_M2_cnacsc_s1 :
Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cnacsc_s1">;
-def int_hexagon_M2_vrcmpys_s1 :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_M2_vrcmpys_s1">;
+def int_hexagon_M2_dpmpyss_acc_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyss_acc_s0">;
-def int_hexagon_M2_vrcmpys_acc_s1 :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_M2_vrcmpys_acc_s1">;
+def int_hexagon_M2_dpmpyss_nac_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyss_nac_s0">;
-def int_hexagon_M2_vrcmpys_s1rp :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M2_vrcmpys_s1rp">;
+def int_hexagon_M2_dpmpyss_rnd_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_dpmpyss_rnd_s0">;
-def int_hexagon_M2_mmacls_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_s0">;
+def int_hexagon_M2_dpmpyss_s0 :
+Hexagon_custom_i64_i32i32_Intrinsic;
-def int_hexagon_M2_mmacls_s1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_s1">;
+def int_hexagon_M2_dpmpyuu_acc_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyuu_acc_s0">;
-def int_hexagon_M2_mmachs_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmachs_s0">;
+def int_hexagon_M2_dpmpyuu_nac_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_dpmpyuu_nac_s0">;
-def int_hexagon_M2_mmachs_s1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmachs_s1">;
+def int_hexagon_M2_dpmpyuu_s0 :
+Hexagon_custom_i64_i32i32_Intrinsic;
-def int_hexagon_M2_mmpyl_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_s0">;
+def int_hexagon_M2_hmmpyh_rs1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyh_rs1">;
-def int_hexagon_M2_mmpyl_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_s1">;
+def int_hexagon_M2_hmmpyh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyh_s1">;
-def int_hexagon_M2_mmpyh_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_s0">;
+def int_hexagon_M2_hmmpyl_rs1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyl_rs1">;
-def int_hexagon_M2_mmpyh_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_s1">;
+def int_hexagon_M2_hmmpyl_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyl_s1">;
-def int_hexagon_M2_mmacls_rs0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_rs0">;
+def int_hexagon_M2_maci :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_maci">;
-def int_hexagon_M2_mmacls_rs1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_rs1">;
+def int_hexagon_M2_macsin :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_macsin", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_M2_macsip :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_macsip", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
def int_hexagon_M2_mmachs_rs0 :
Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmachs_rs0">;
@@ -2068,53 +2126,41 @@ Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmachs_rs0">;
def int_hexagon_M2_mmachs_rs1 :
Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmachs_rs1">;
-def int_hexagon_M2_mmpyl_rs0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_rs0">;
-
-def int_hexagon_M2_mmpyl_rs1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_rs1">;
-
-def int_hexagon_M2_mmpyh_rs0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_rs0">;
-
-def int_hexagon_M2_mmpyh_rs1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_rs1">;
-
-def int_hexagon_M4_vrmpyeh_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_s0">;
+def int_hexagon_M2_mmachs_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmachs_s0">;
-def int_hexagon_M4_vrmpyeh_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_s1">;
+def int_hexagon_M2_mmachs_s1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmachs_s1">;
-def int_hexagon_M4_vrmpyeh_acc_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_acc_s0">;
+def int_hexagon_M2_mmacls_rs0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_rs0">;
-def int_hexagon_M4_vrmpyeh_acc_s1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_acc_s1">;
+def int_hexagon_M2_mmacls_rs1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_rs1">;
-def int_hexagon_M4_vrmpyoh_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_s0">;
+def int_hexagon_M2_mmacls_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_s0">;
-def int_hexagon_M4_vrmpyoh_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_s1">;
+def int_hexagon_M2_mmacls_s1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacls_s1">;
-def int_hexagon_M4_vrmpyoh_acc_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_acc_s0">;
+def int_hexagon_M2_mmacuhs_rs0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_rs0">;
-def int_hexagon_M4_vrmpyoh_acc_s1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_acc_s1">;
+def int_hexagon_M2_mmacuhs_rs1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_rs1">;
-def int_hexagon_M2_hmmpyl_rs1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyl_rs1">;
+def int_hexagon_M2_mmacuhs_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_s0">;
-def int_hexagon_M2_hmmpyh_rs1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyh_rs1">;
+def int_hexagon_M2_mmacuhs_s1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_s1">;
-def int_hexagon_M2_hmmpyl_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyl_s1">;
+def int_hexagon_M2_mmaculs_rs0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmaculs_rs0">;
-def int_hexagon_M2_hmmpyh_s1 :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_hmmpyh_s1">;
+def int_hexagon_M2_mmaculs_rs1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmaculs_rs1">;
def int_hexagon_M2_mmaculs_s0 :
Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmaculs_s0">;
@@ -2122,41 +2168,29 @@ Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmaculs_s0">;
def int_hexagon_M2_mmaculs_s1 :
Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmaculs_s1">;
-def int_hexagon_M2_mmacuhs_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_s0">;
-
-def int_hexagon_M2_mmacuhs_s1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_s1">;
-
-def int_hexagon_M2_mmpyul_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_s0">;
-
-def int_hexagon_M2_mmpyul_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_s1">;
-
-def int_hexagon_M2_mmpyuh_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyuh_s0">;
+def int_hexagon_M2_mmpyh_rs0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_rs0">;
-def int_hexagon_M2_mmpyuh_s1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyuh_s1">;
+def int_hexagon_M2_mmpyh_rs1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_rs1">;
-def int_hexagon_M2_mmaculs_rs0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmaculs_rs0">;
+def int_hexagon_M2_mmpyh_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_s0">;
-def int_hexagon_M2_mmaculs_rs1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmaculs_rs1">;
+def int_hexagon_M2_mmpyh_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyh_s1">;
-def int_hexagon_M2_mmacuhs_rs0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_rs0">;
+def int_hexagon_M2_mmpyl_rs0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_rs0">;
-def int_hexagon_M2_mmacuhs_rs1 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_mmacuhs_rs1">;
+def int_hexagon_M2_mmpyl_rs1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_rs1">;
-def int_hexagon_M2_mmpyul_rs0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_rs0">;
+def int_hexagon_M2_mmpyl_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_s0">;
-def int_hexagon_M2_mmpyul_rs1 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_rs1">;
+def int_hexagon_M2_mmpyl_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyl_s1">;
def int_hexagon_M2_mmpyuh_rs0 :
Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyuh_rs0">;
@@ -2164,1442 +2198,1388 @@ Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyuh_rs0">;
def int_hexagon_M2_mmpyuh_rs1 :
Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyuh_rs1">;
-def int_hexagon_M2_vrcmaci_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmaci_s0">;
+def int_hexagon_M2_mmpyuh_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyuh_s0">;
-def int_hexagon_M2_vrcmacr_s0 :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmacr_s0">;
+def int_hexagon_M2_mmpyuh_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyuh_s1">;
-def int_hexagon_M2_vrcmaci_s0c :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmaci_s0c">;
+def int_hexagon_M2_mmpyul_rs0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_rs0">;
-def int_hexagon_M2_vrcmacr_s0c :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmacr_s0c">;
+def int_hexagon_M2_mmpyul_rs1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_rs1">;
-def int_hexagon_M2_cmaci_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cmaci_s0">;
+def int_hexagon_M2_mmpyul_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_s0">;
-def int_hexagon_M2_cmacr_s0 :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_cmacr_s0">;
+def int_hexagon_M2_mmpyul_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_mmpyul_s1">;
-def int_hexagon_M2_vrcmpyi_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyi_s0">;
+def int_hexagon_M2_mpy_acc_hh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hh_s0">;
-def int_hexagon_M2_vrcmpyr_s0 :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyr_s0">;
+def int_hexagon_M2_mpy_acc_hh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hh_s1">;
-def int_hexagon_M2_vrcmpyi_s0c :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyi_s0c">;
+def int_hexagon_M2_mpy_acc_hl_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hl_s0">;
-def int_hexagon_M2_vrcmpyr_s0c :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyr_s0c">;
+def int_hexagon_M2_mpy_acc_hl_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_hl_s1">;
-def int_hexagon_M2_cmpyi_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_cmpyi_s0">;
+def int_hexagon_M2_mpy_acc_lh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_lh_s0">;
-def int_hexagon_M2_cmpyr_s0 :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_cmpyr_s0">;
+def int_hexagon_M2_mpy_acc_lh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_lh_s1">;
-def int_hexagon_M4_cmpyi_wh :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyi_wh">;
+def int_hexagon_M2_mpy_acc_ll_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_ll_s0">;
-def int_hexagon_M4_cmpyr_wh :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyr_wh">;
+def int_hexagon_M2_mpy_acc_ll_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_ll_s1">;
-def int_hexagon_M4_cmpyi_whc :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyi_whc">;
+def int_hexagon_M2_mpy_acc_sat_hh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hh_s0">;
-def int_hexagon_M4_cmpyr_whc :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyr_whc">;
+def int_hexagon_M2_mpy_acc_sat_hh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hh_s1">;
-def int_hexagon_M2_vcmpy_s0_sat_i :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s0_sat_i">;
+def int_hexagon_M2_mpy_acc_sat_hl_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hl_s0">;
-def int_hexagon_M2_vcmpy_s0_sat_r :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s0_sat_r">;
+def int_hexagon_M2_mpy_acc_sat_hl_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_hl_s1">;
-def int_hexagon_M2_vcmpy_s1_sat_i :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s1_sat_i">;
+def int_hexagon_M2_mpy_acc_sat_lh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_lh_s0">;
-def int_hexagon_M2_vcmpy_s1_sat_r :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s1_sat_r">;
+def int_hexagon_M2_mpy_acc_sat_lh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_lh_s1">;
-def int_hexagon_M2_vcmac_s0_sat_i :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vcmac_s0_sat_i">;
+def int_hexagon_M2_mpy_acc_sat_ll_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_ll_s0">;
-def int_hexagon_M2_vcmac_s0_sat_r :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vcmac_s0_sat_r">;
+def int_hexagon_M2_mpy_acc_sat_ll_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_acc_sat_ll_s1">;
-def int_hexagon_S2_vcrotate :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_vcrotate">;
+def int_hexagon_M2_mpy_hh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hh_s0">;
-def int_hexagon_S4_vrcrotate_acc :
-Hexagon_i64_i64i64i32i32_Intrinsic<"HEXAGON_S4_vrcrotate_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_M2_mpy_hh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hh_s1">;
-def int_hexagon_S4_vrcrotate :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_S4_vrcrotate", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M2_mpy_hl_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hl_s0">;
-def int_hexagon_S2_vcnegh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_vcnegh">;
+def int_hexagon_M2_mpy_hl_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_hl_s1">;
-def int_hexagon_S2_vrcnegh :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_vrcnegh">;
+def int_hexagon_M2_mpy_lh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_lh_s0">;
-def int_hexagon_M4_pmpyw :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M4_pmpyw">;
+def int_hexagon_M2_mpy_lh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_lh_s1">;
-def int_hexagon_M4_vpmpyh :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M4_vpmpyh">;
+def int_hexagon_M2_mpy_ll_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_ll_s0">;
-def int_hexagon_M4_pmpyw_acc :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M4_pmpyw_acc">;
+def int_hexagon_M2_mpy_ll_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_ll_s1">;
-def int_hexagon_M4_vpmpyh_acc :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M4_vpmpyh_acc">;
+def int_hexagon_M2_mpy_nac_hh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hh_s0">;
-def int_hexagon_A2_add :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_add">;
+def int_hexagon_M2_mpy_nac_hh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hh_s1">;
-def int_hexagon_A2_sub :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_sub">;
+def int_hexagon_M2_mpy_nac_hl_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hl_s0">;
-def int_hexagon_A2_addsat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addsat">;
+def int_hexagon_M2_mpy_nac_hl_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_hl_s1">;
-def int_hexagon_A2_subsat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subsat">;
+def int_hexagon_M2_mpy_nac_lh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_lh_s0">;
-def int_hexagon_A2_addi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpy_nac_lh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_lh_s1">;
-def int_hexagon_A2_addh_l16_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_ll">;
+def int_hexagon_M2_mpy_nac_ll_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_ll_s0">;
-def int_hexagon_A2_addh_l16_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_hl">;
+def int_hexagon_M2_mpy_nac_ll_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_ll_s1">;
-def int_hexagon_A2_addh_l16_sat_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_sat_ll">;
+def int_hexagon_M2_mpy_nac_sat_hh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hh_s0">;
-def int_hexagon_A2_addh_l16_sat_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_l16_sat_hl">;
+def int_hexagon_M2_mpy_nac_sat_hh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hh_s1">;
-def int_hexagon_A2_subh_l16_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_ll">;
+def int_hexagon_M2_mpy_nac_sat_hl_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hl_s0">;
-def int_hexagon_A2_subh_l16_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_hl">;
+def int_hexagon_M2_mpy_nac_sat_hl_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_hl_s1">;
-def int_hexagon_A2_subh_l16_sat_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_sat_ll">;
+def int_hexagon_M2_mpy_nac_sat_lh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_lh_s0">;
-def int_hexagon_A2_subh_l16_sat_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_l16_sat_hl">;
+def int_hexagon_M2_mpy_nac_sat_lh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_lh_s1">;
-def int_hexagon_A2_addh_h16_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_ll">;
+def int_hexagon_M2_mpy_nac_sat_ll_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_ll_s0">;
-def int_hexagon_A2_addh_h16_lh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_lh">;
+def int_hexagon_M2_mpy_nac_sat_ll_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpy_nac_sat_ll_s1">;
-def int_hexagon_A2_addh_h16_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_hl">;
+def int_hexagon_M2_mpy_rnd_hh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hh_s0">;
-def int_hexagon_A2_addh_h16_hh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_hh">;
+def int_hexagon_M2_mpy_rnd_hh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hh_s1">;
-def int_hexagon_A2_addh_h16_sat_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_ll">;
+def int_hexagon_M2_mpy_rnd_hl_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hl_s0">;
-def int_hexagon_A2_addh_h16_sat_lh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_lh">;
+def int_hexagon_M2_mpy_rnd_hl_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_hl_s1">;
-def int_hexagon_A2_addh_h16_sat_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_hl">;
+def int_hexagon_M2_mpy_rnd_lh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_lh_s0">;
-def int_hexagon_A2_addh_h16_sat_hh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_addh_h16_sat_hh">;
+def int_hexagon_M2_mpy_rnd_lh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_lh_s1">;
-def int_hexagon_A2_subh_h16_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_ll">;
+def int_hexagon_M2_mpy_rnd_ll_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_ll_s0">;
-def int_hexagon_A2_subh_h16_lh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_lh">;
+def int_hexagon_M2_mpy_rnd_ll_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_rnd_ll_s1">;
-def int_hexagon_A2_subh_h16_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_hl">;
+def int_hexagon_M2_mpy_sat_hh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hh_s0">;
-def int_hexagon_A2_subh_h16_hh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_hh">;
+def int_hexagon_M2_mpy_sat_hh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hh_s1">;
-def int_hexagon_A2_subh_h16_sat_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_ll">;
+def int_hexagon_M2_mpy_sat_hl_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hl_s0">;
-def int_hexagon_A2_subh_h16_sat_lh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_lh">;
+def int_hexagon_M2_mpy_sat_hl_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_hl_s1">;
-def int_hexagon_A2_subh_h16_sat_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_hl">;
+def int_hexagon_M2_mpy_sat_lh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_lh_s0">;
-def int_hexagon_A2_subh_h16_sat_hh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subh_h16_sat_hh">;
+def int_hexagon_M2_mpy_sat_lh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_lh_s1">;
-def int_hexagon_A2_aslh :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_aslh">;
+def int_hexagon_M2_mpy_sat_ll_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_ll_s0">;
-def int_hexagon_A2_asrh :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_asrh">;
+def int_hexagon_M2_mpy_sat_ll_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_ll_s1">;
-def int_hexagon_A2_addp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_addp">;
+def int_hexagon_M2_mpy_sat_rnd_hh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hh_s0">;
-def int_hexagon_A2_addpsat :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_addpsat">;
+def int_hexagon_M2_mpy_sat_rnd_hh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hh_s1">;
-def int_hexagon_A2_addsp :
-Hexagon_i64_i32i64_Intrinsic<"HEXAGON_A2_addsp">;
+def int_hexagon_M2_mpy_sat_rnd_hl_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hl_s0">;
-def int_hexagon_A2_subp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_subp">;
+def int_hexagon_M2_mpy_sat_rnd_hl_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_hl_s1">;
-def int_hexagon_A2_neg :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_neg">;
+def int_hexagon_M2_mpy_sat_rnd_lh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_lh_s0">;
-def int_hexagon_A2_negsat :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_negsat">;
+def int_hexagon_M2_mpy_sat_rnd_lh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_lh_s1">;
-def int_hexagon_A2_abs :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_abs">;
+def int_hexagon_M2_mpy_sat_rnd_ll_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_ll_s0">;
-def int_hexagon_A2_abssat :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_abssat">;
+def int_hexagon_M2_mpy_sat_rnd_ll_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_sat_rnd_ll_s1">;
-def int_hexagon_A2_vconj :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vconj">;
+def int_hexagon_M2_mpy_up :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_up">;
-def int_hexagon_A2_negp :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_negp">;
+def int_hexagon_M2_mpy_up_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_up_s1">;
-def int_hexagon_A2_absp :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_absp">;
+def int_hexagon_M2_mpy_up_s1_sat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpy_up_s1_sat">;
-def int_hexagon_A2_max :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_max">;
+def int_hexagon_M2_mpyd_acc_hh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hh_s0">;
-def int_hexagon_A2_maxu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_maxu">;
+def int_hexagon_M2_mpyd_acc_hh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hh_s1">;
-def int_hexagon_A2_min :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_min">;
+def int_hexagon_M2_mpyd_acc_hl_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hl_s0">;
-def int_hexagon_A2_minu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_minu">;
+def int_hexagon_M2_mpyd_acc_hl_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_hl_s1">;
-def int_hexagon_A2_maxp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_maxp">;
+def int_hexagon_M2_mpyd_acc_lh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_lh_s0">;
-def int_hexagon_A2_maxup :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_maxup">;
+def int_hexagon_M2_mpyd_acc_lh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_lh_s1">;
-def int_hexagon_A2_minp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_minp">;
+def int_hexagon_M2_mpyd_acc_ll_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_ll_s0">;
-def int_hexagon_A2_minup :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_minup">;
+def int_hexagon_M2_mpyd_acc_ll_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_acc_ll_s1">;
-def int_hexagon_A2_tfr :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_tfr">;
+def int_hexagon_M2_mpyd_hh_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hh_s0">;
-def int_hexagon_A2_tfrsi :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_tfrsi", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_M2_mpyd_hh_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hh_s1">;
-def int_hexagon_A2_tfrp :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_tfrp">;
+def int_hexagon_M2_mpyd_hl_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hl_s0">;
-def int_hexagon_A2_tfrpi :
-Hexagon_i64_i32_Intrinsic<"HEXAGON_A2_tfrpi", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_M2_mpyd_hl_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_hl_s1">;
-def int_hexagon_A2_zxtb :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_zxtb">;
+def int_hexagon_M2_mpyd_lh_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_lh_s0">;
-def int_hexagon_A2_sxtb :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_sxtb">;
+def int_hexagon_M2_mpyd_lh_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_lh_s1">;
-def int_hexagon_A2_zxth :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_zxth">;
+def int_hexagon_M2_mpyd_ll_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_ll_s0">;
-def int_hexagon_A2_sxth :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_sxth">;
+def int_hexagon_M2_mpyd_ll_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_ll_s1">;
-def int_hexagon_A2_combinew :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A2_combinew">;
+def int_hexagon_M2_mpyd_nac_hh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hh_s0">;
-def int_hexagon_A4_combineri :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_combineri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpyd_nac_hh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hh_s1">;
-def int_hexagon_A4_combineir :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_combineir", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_M2_mpyd_nac_hl_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hl_s0">;
-def int_hexagon_A2_combineii :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A2_combineii", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpyd_nac_hl_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_hl_s1">;
-def int_hexagon_A2_combine_hh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_hh">;
+def int_hexagon_M2_mpyd_nac_lh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_lh_s0">;
-def int_hexagon_A2_combine_hl :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_hl">;
+def int_hexagon_M2_mpyd_nac_lh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_lh_s1">;
-def int_hexagon_A2_combine_lh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_lh">;
+def int_hexagon_M2_mpyd_nac_ll_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_ll_s0">;
-def int_hexagon_A2_combine_ll :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_combine_ll">;
+def int_hexagon_M2_mpyd_nac_ll_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyd_nac_ll_s1">;
-def int_hexagon_A2_tfril :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_tfril", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpyd_rnd_hh_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hh_s0">;
-def int_hexagon_A2_tfrih :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_tfrih", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpyd_rnd_hh_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hh_s1">;
-def int_hexagon_A2_and :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_and">;
+def int_hexagon_M2_mpyd_rnd_hl_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hl_s0">;
-def int_hexagon_A2_or :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_or">;
+def int_hexagon_M2_mpyd_rnd_hl_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_hl_s1">;
-def int_hexagon_A2_xor :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_xor">;
+def int_hexagon_M2_mpyd_rnd_lh_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_lh_s0">;
-def int_hexagon_A2_not :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_not">;
+def int_hexagon_M2_mpyd_rnd_lh_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_lh_s1">;
-def int_hexagon_M2_xor_xacc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_xor_xacc">;
+def int_hexagon_M2_mpyd_rnd_ll_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_ll_s0">;
-def int_hexagon_M4_xor_xacc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_xor_xacc">;
+def int_hexagon_M2_mpyd_rnd_ll_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyd_rnd_ll_s1">;
-def int_hexagon_A4_andn :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_andn">;
+def int_hexagon_M2_mpyi :
+Hexagon_custom_i32_i32i32_Intrinsic;
-def int_hexagon_A4_orn :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_orn">;
+def int_hexagon_M2_mpysmi :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_A4_andnp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A4_andnp">;
+def int_hexagon_M2_mpysu_up :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpysu_up">;
-def int_hexagon_A4_ornp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A4_ornp">;
+def int_hexagon_M2_mpyu_acc_hh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hh_s0">;
-def int_hexagon_S4_addaddi :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_addaddi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M2_mpyu_acc_hh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hh_s1">;
-def int_hexagon_S4_subaddi :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_subaddi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpyu_acc_hl_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hl_s0">;
-def int_hexagon_M4_and_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_and">;
+def int_hexagon_M2_mpyu_acc_hl_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_hl_s1">;
-def int_hexagon_M4_and_andn :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_andn">;
+def int_hexagon_M2_mpyu_acc_lh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_lh_s0">;
-def int_hexagon_M4_and_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_or">;
+def int_hexagon_M2_mpyu_acc_lh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_lh_s1">;
-def int_hexagon_M4_and_xor :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_xor">;
+def int_hexagon_M2_mpyu_acc_ll_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_ll_s0">;
-def int_hexagon_M4_or_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_and">;
+def int_hexagon_M2_mpyu_acc_ll_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_acc_ll_s1">;
-def int_hexagon_M4_or_andn :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_andn">;
+def int_hexagon_M2_mpyu_hh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hh_s0">;
-def int_hexagon_M4_or_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_or">;
+def int_hexagon_M2_mpyu_hh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hh_s1">;
-def int_hexagon_M4_or_xor :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_xor">;
+def int_hexagon_M2_mpyu_hl_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hl_s0">;
-def int_hexagon_S4_or_andix :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_or_andix", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M2_mpyu_hl_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_hl_s1">;
-def int_hexagon_S4_or_andi :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_or_andi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M2_mpyu_lh_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_lh_s0">;
-def int_hexagon_S4_or_ori :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_or_ori", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M2_mpyu_lh_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_lh_s1">;
-def int_hexagon_M4_xor_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_xor_and">;
+def int_hexagon_M2_mpyu_ll_s0 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_ll_s0">;
-def int_hexagon_M4_xor_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_xor_or">;
+def int_hexagon_M2_mpyu_ll_s1 :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_ll_s1">;
-def int_hexagon_M4_xor_andn :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_xor_andn">;
+def int_hexagon_M2_mpyu_nac_hh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hh_s0">;
-def int_hexagon_A2_subri :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_subri", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_M2_mpyu_nac_hh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hh_s1">;
-def int_hexagon_A2_andir :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_andir", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpyu_nac_hl_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hl_s0">;
-def int_hexagon_A2_orir :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_orir", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_mpyu_nac_hl_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_hl_s1">;
-def int_hexagon_A2_andp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_andp">;
+def int_hexagon_M2_mpyu_nac_lh_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_lh_s0">;
-def int_hexagon_A2_orp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_orp">;
+def int_hexagon_M2_mpyu_nac_lh_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_lh_s1">;
-def int_hexagon_A2_xorp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_xorp">;
+def int_hexagon_M2_mpyu_nac_ll_s0 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_ll_s0">;
-def int_hexagon_A2_notp :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_notp">;
+def int_hexagon_M2_mpyu_nac_ll_s1 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mpyu_nac_ll_s1">;
-def int_hexagon_A2_sxtw :
-Hexagon_i64_i32_Intrinsic<"HEXAGON_A2_sxtw">;
+def int_hexagon_M2_mpyu_up :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_mpyu_up">;
-def int_hexagon_A2_sat :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_A2_sat">;
+def int_hexagon_M2_mpyud_acc_hh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hh_s0">;
-def int_hexagon_A2_roundsat :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_A2_roundsat">;
+def int_hexagon_M2_mpyud_acc_hh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hh_s1">;
-def int_hexagon_A2_sath :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_sath">;
+def int_hexagon_M2_mpyud_acc_hl_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hl_s0">;
-def int_hexagon_A2_satuh :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_satuh">;
+def int_hexagon_M2_mpyud_acc_hl_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_hl_s1">;
-def int_hexagon_A2_satub :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_satub">;
+def int_hexagon_M2_mpyud_acc_lh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_lh_s0">;
-def int_hexagon_A2_satb :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_satb">;
+def int_hexagon_M2_mpyud_acc_lh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_lh_s1">;
-def int_hexagon_A2_vaddub :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddub">;
+def int_hexagon_M2_mpyud_acc_ll_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_ll_s0">;
-def int_hexagon_A2_vaddb_map :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddb_map">;
+def int_hexagon_M2_mpyud_acc_ll_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_acc_ll_s1">;
-def int_hexagon_A2_vaddubs :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddubs">;
+def int_hexagon_M2_mpyud_hh_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hh_s0">;
-def int_hexagon_A2_vaddh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddh">;
+def int_hexagon_M2_mpyud_hh_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hh_s1">;
-def int_hexagon_A2_vaddhs :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddhs">;
+def int_hexagon_M2_mpyud_hl_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hl_s0">;
-def int_hexagon_A2_vadduhs :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vadduhs">;
+def int_hexagon_M2_mpyud_hl_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_hl_s1">;
-def int_hexagon_A5_vaddhubs :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A5_vaddhubs">;
+def int_hexagon_M2_mpyud_lh_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_lh_s0">;
-def int_hexagon_A2_vaddw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddw">;
+def int_hexagon_M2_mpyud_lh_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_lh_s1">;
-def int_hexagon_A2_vaddws :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vaddws">;
+def int_hexagon_M2_mpyud_ll_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_ll_s0">;
-def int_hexagon_S4_vxaddsubw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxaddsubw">;
+def int_hexagon_M2_mpyud_ll_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_mpyud_ll_s1">;
-def int_hexagon_S4_vxsubaddw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxsubaddw">;
+def int_hexagon_M2_mpyud_nac_hh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hh_s0">;
-def int_hexagon_S4_vxaddsubh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxaddsubh">;
+def int_hexagon_M2_mpyud_nac_hh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hh_s1">;
-def int_hexagon_S4_vxsubaddh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxsubaddh">;
+def int_hexagon_M2_mpyud_nac_hl_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hl_s0">;
-def int_hexagon_S4_vxaddsubhr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxaddsubhr">;
+def int_hexagon_M2_mpyud_nac_hl_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_hl_s1">;
-def int_hexagon_S4_vxsubaddhr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxsubaddhr">;
+def int_hexagon_M2_mpyud_nac_lh_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_lh_s0">;
-def int_hexagon_A2_svavgh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svavgh">;
+def int_hexagon_M2_mpyud_nac_lh_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_lh_s1">;
-def int_hexagon_A2_svavghs :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svavghs">;
+def int_hexagon_M2_mpyud_nac_ll_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_ll_s0">;
-def int_hexagon_A2_svnavgh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svnavgh">;
+def int_hexagon_M2_mpyud_nac_ll_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_mpyud_nac_ll_s1">;
-def int_hexagon_A2_svaddh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svaddh">;
+def int_hexagon_M2_mpyui :
+Hexagon_custom_i32_i32i32_Intrinsic;
-def int_hexagon_A2_svaddhs :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svaddhs">;
+def int_hexagon_M2_nacci :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_nacci">;
-def int_hexagon_A2_svadduhs :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svadduhs">;
+def int_hexagon_M2_naccii :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_naccii", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_A2_svsubh :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svsubh">;
+def int_hexagon_M2_subacc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_subacc">;
-def int_hexagon_A2_svsubhs :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svsubhs">;
+def int_hexagon_M2_vabsdiffh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vabsdiffh">;
-def int_hexagon_A2_svsubuhs :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A2_svsubuhs">;
+def int_hexagon_M2_vabsdiffw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vabsdiffw">;
-def int_hexagon_A2_vraddub :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vraddub">;
+def int_hexagon_M2_vcmac_s0_sat_i :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vcmac_s0_sat_i">;
-def int_hexagon_A2_vraddub_acc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_A2_vraddub_acc">;
+def int_hexagon_M2_vcmac_s0_sat_r :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vcmac_s0_sat_r">;
-def int_hexagon_M2_vraddh :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vraddh">;
+def int_hexagon_M2_vcmpy_s0_sat_i :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s0_sat_i">;
-def int_hexagon_M2_vradduh :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vradduh">;
+def int_hexagon_M2_vcmpy_s0_sat_r :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s0_sat_r">;
-def int_hexagon_A2_vsubub :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubub">;
+def int_hexagon_M2_vcmpy_s1_sat_i :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s1_sat_i">;
-def int_hexagon_A2_vsubb_map :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubb_map">;
+def int_hexagon_M2_vcmpy_s1_sat_r :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vcmpy_s1_sat_r">;
-def int_hexagon_A2_vsububs :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsububs">;
+def int_hexagon_M2_vdmacs_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vdmacs_s0">;
-def int_hexagon_A2_vsubh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubh">;
+def int_hexagon_M2_vdmacs_s1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vdmacs_s1">;
-def int_hexagon_A2_vsubhs :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubhs">;
+def int_hexagon_M2_vdmpyrs_s0 :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vdmpyrs_s0">;
-def int_hexagon_A2_vsubuhs :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubuhs">;
+def int_hexagon_M2_vdmpyrs_s1 :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vdmpyrs_s1">;
-def int_hexagon_A2_vsubw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubw">;
+def int_hexagon_M2_vdmpys_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vdmpys_s0">;
-def int_hexagon_A2_vsubws :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vsubws">;
+def int_hexagon_M2_vdmpys_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vdmpys_s1">;
-def int_hexagon_A2_vabsh :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabsh">;
+def int_hexagon_M2_vmac2 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2">;
-def int_hexagon_A2_vabshsat :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabshsat">;
+def int_hexagon_M2_vmac2es :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vmac2es">;
-def int_hexagon_A2_vabsw :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabsw">;
+def int_hexagon_M2_vmac2es_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vmac2es_s0">;
-def int_hexagon_A2_vabswsat :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_A2_vabswsat">;
+def int_hexagon_M2_vmac2es_s1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vmac2es_s1">;
-def int_hexagon_M2_vabsdiffw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vabsdiffw">;
+def int_hexagon_M2_vmac2s_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2s_s0">;
-def int_hexagon_M2_vabsdiffh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vabsdiffh">;
+def int_hexagon_M2_vmac2s_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2s_s1">;
-def int_hexagon_A2_vrsadub :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vrsadub">;
+def int_hexagon_M2_vmac2su_s0 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2su_s0">;
-def int_hexagon_A2_vrsadub_acc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_A2_vrsadub_acc">;
+def int_hexagon_M2_vmac2su_s1 :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M2_vmac2su_s1">;
-def int_hexagon_A2_vavgub :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgub">;
+def int_hexagon_M2_vmpy2es_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vmpy2es_s0">;
-def int_hexagon_A2_vavguh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguh">;
+def int_hexagon_M2_vmpy2es_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vmpy2es_s1">;
-def int_hexagon_A2_vavgh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgh">;
+def int_hexagon_M2_vmpy2s_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s0">;
-def int_hexagon_A2_vnavgh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgh">;
+def int_hexagon_M2_vmpy2s_s0pack :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s0pack">;
-def int_hexagon_A2_vavgw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgw">;
+def int_hexagon_M2_vmpy2s_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s1">;
-def int_hexagon_A2_vnavgw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgw">;
+def int_hexagon_M2_vmpy2s_s1pack :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_M2_vmpy2s_s1pack">;
-def int_hexagon_A2_vavgwr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgwr">;
+def int_hexagon_M2_vmpy2su_s0 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2su_s0">;
-def int_hexagon_A2_vnavgwr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgwr">;
+def int_hexagon_M2_vmpy2su_s1 :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M2_vmpy2su_s1">;
-def int_hexagon_A2_vavgwcr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgwcr">;
+def int_hexagon_M2_vraddh :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vraddh">;
-def int_hexagon_A2_vnavgwcr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavgwcr">;
+def int_hexagon_M2_vradduh :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M2_vradduh">;
-def int_hexagon_A2_vavghcr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavghcr">;
+def int_hexagon_M2_vrcmaci_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmaci_s0">;
-def int_hexagon_A2_vnavghcr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavghcr">;
+def int_hexagon_M2_vrcmaci_s0c :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmaci_s0c">;
-def int_hexagon_A2_vavguw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguw">;
+def int_hexagon_M2_vrcmacr_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmacr_s0">;
-def int_hexagon_A2_vavguwr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguwr">;
+def int_hexagon_M2_vrcmacr_s0c :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrcmacr_s0c">;
-def int_hexagon_A2_vavgubr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavgubr">;
+def int_hexagon_M2_vrcmpyi_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyi_s0">;
-def int_hexagon_A2_vavguhr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavguhr">;
+def int_hexagon_M2_vrcmpyi_s0c :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyi_s0c">;
-def int_hexagon_A2_vavghr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vavghr">;
+def int_hexagon_M2_vrcmpyr_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyr_s0">;
-def int_hexagon_A2_vnavghr :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vnavghr">;
+def int_hexagon_M2_vrcmpyr_s0c :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrcmpyr_s0c">;
-def int_hexagon_A4_round_ri :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_ri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_vrcmpys_acc_s1 :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_M2_vrcmpys_acc_s1">;
-def int_hexagon_A4_round_rr :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_rr">;
+def int_hexagon_M2_vrcmpys_s1 :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_M2_vrcmpys_s1">;
-def int_hexagon_A4_round_ri_sat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_ri_sat", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_vrcmpys_s1rp :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M2_vrcmpys_s1rp">;
-def int_hexagon_A4_round_rr_sat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_round_rr_sat">;
+def int_hexagon_M2_vrmac_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M2_vrmac_s0">;
-def int_hexagon_A4_cround_ri :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cround_ri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M2_vrmpy_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M2_vrmpy_s0">;
-def int_hexagon_A4_cround_rr :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_cround_rr">;
+def int_hexagon_M2_xor_xacc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_xor_xacc">;
-def int_hexagon_A4_vrminh :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminh">;
+def int_hexagon_M4_and_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_and">;
-def int_hexagon_A4_vrmaxh :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxh">;
+def int_hexagon_M4_and_andn :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_andn">;
-def int_hexagon_A4_vrminuh :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminuh">;
+def int_hexagon_M4_and_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_or">;
-def int_hexagon_A4_vrmaxuh :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxuh">;
+def int_hexagon_M4_and_xor :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_and_xor">;
-def int_hexagon_A4_vrminw :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminw">;
+def int_hexagon_M4_cmpyi_wh :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyi_wh">;
-def int_hexagon_A4_vrmaxw :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxw">;
+def int_hexagon_M4_cmpyi_whc :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyi_whc">;
-def int_hexagon_A4_vrminuw :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrminuw">;
+def int_hexagon_M4_cmpyr_wh :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyr_wh">;
-def int_hexagon_A4_vrmaxuw :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_A4_vrmaxuw">;
+def int_hexagon_M4_cmpyr_whc :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_M4_cmpyr_whc">;
-def int_hexagon_A2_vminb :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminb">;
+def int_hexagon_M4_mac_up_s1_sat :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mac_up_s1_sat">;
-def int_hexagon_A2_vmaxb :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxb">;
+def int_hexagon_M4_mpyri_addi :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyri_addi", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_A2_vminub :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminub">;
+def int_hexagon_M4_mpyri_addr :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyri_addr", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_A2_vmaxub :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxub">;
+def int_hexagon_M4_mpyri_addr_u2 :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyri_addr_u2", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_A2_vminh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminh">;
+def int_hexagon_M4_mpyrr_addi :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyrr_addi", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_A2_vmaxh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxh">;
+def int_hexagon_M4_mpyrr_addr :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_mpyrr_addr">;
-def int_hexagon_A2_vminuh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminuh">;
+def int_hexagon_M4_nac_up_s1_sat :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_nac_up_s1_sat">;
-def int_hexagon_A2_vmaxuh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxuh">;
+def int_hexagon_M4_or_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_and">;
-def int_hexagon_A2_vminw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminw">;
+def int_hexagon_M4_or_andn :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_andn">;
-def int_hexagon_A2_vmaxw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxw">;
+def int_hexagon_M4_or_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_or">;
-def int_hexagon_A2_vminuw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vminuw">;
+def int_hexagon_M4_or_xor :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_or_xor">;
-def int_hexagon_A2_vmaxuw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_A2_vmaxuw">;
+def int_hexagon_M4_pmpyw :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M4_pmpyw">;
-def int_hexagon_A4_modwrapu :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A4_modwrapu">;
+def int_hexagon_M4_pmpyw_acc :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M4_pmpyw_acc">;
-def int_hexagon_F2_sfadd :
-Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfadd", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vpmpyh :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M4_vpmpyh">;
-def int_hexagon_F2_sfsub :
-Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfsub", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vpmpyh_acc :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M4_vpmpyh_acc">;
-def int_hexagon_F2_sfmpy :
-Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfmpy", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyeh_acc_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_acc_s0">;
-def int_hexagon_F2_sffma :
-Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffma", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyeh_acc_s1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_acc_s1">;
-def int_hexagon_F2_sffma_sc :
-Hexagon_float_floatfloatfloati32_Intrinsic<"HEXAGON_F2_sffma_sc", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyeh_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_s0">;
-def int_hexagon_F2_sffms :
-Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffms", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyeh_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyeh_s1">;
-def int_hexagon_F2_sffma_lib :
-Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffma_lib", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyoh_acc_s0 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_acc_s0">;
-def int_hexagon_F2_sffms_lib :
-Hexagon_float_floatfloatfloat_Intrinsic<"HEXAGON_F2_sffms_lib", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyoh_acc_s1 :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_acc_s1">;
-def int_hexagon_F2_sfcmpeq :
-Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpeq", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyoh_s0 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_s0">;
-def int_hexagon_F2_sfcmpgt :
-Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpgt", [IntrNoMem, Throws]>;
+def int_hexagon_M4_vrmpyoh_s1 :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M4_vrmpyoh_s1">;
-def int_hexagon_F2_sfcmpge :
-Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpge", [IntrNoMem, Throws]>;
+def int_hexagon_M4_xor_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_xor_and">;
-def int_hexagon_F2_sfcmpuo :
-Hexagon_i32_floatfloat_Intrinsic<"HEXAGON_F2_sfcmpuo", [IntrNoMem, Throws]>;
+def int_hexagon_M4_xor_andn :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_xor_andn">;
-def int_hexagon_F2_sfmax :
-Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfmax", [IntrNoMem, Throws]>;
+def int_hexagon_M4_xor_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M4_xor_or">;
-def int_hexagon_F2_sfmin :
-Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sfmin", [IntrNoMem, Throws]>;
+def int_hexagon_M4_xor_xacc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M4_xor_xacc">;
-def int_hexagon_F2_sfclass :
-Hexagon_i32_floati32_Intrinsic<"HEXAGON_F2_sfclass", [IntrNoMem, Throws, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_M5_vdmacbsu :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M5_vdmacbsu">;
-def int_hexagon_F2_sfimm_p :
-Hexagon_float_i32_Intrinsic<"HEXAGON_F2_sfimm_p", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_M5_vdmpybsu :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M5_vdmpybsu">;
-def int_hexagon_F2_sfimm_n :
-Hexagon_float_i32_Intrinsic<"HEXAGON_F2_sfimm_n", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_M5_vmacbsu :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M5_vmacbsu">;
-def int_hexagon_F2_sffixupn :
-Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sffixupn", [IntrNoMem, Throws]>;
+def int_hexagon_M5_vmacbuu :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_M5_vmacbuu">;
-def int_hexagon_F2_sffixupd :
-Hexagon_float_floatfloat_Intrinsic<"HEXAGON_F2_sffixupd", [IntrNoMem, Throws]>;
+def int_hexagon_M5_vmpybsu :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M5_vmpybsu">;
-def int_hexagon_F2_sffixupr :
-Hexagon_float_float_Intrinsic<"HEXAGON_F2_sffixupr", [IntrNoMem, Throws]>;
+def int_hexagon_M5_vmpybuu :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_M5_vmpybuu">;
-def int_hexagon_F2_dfcmpeq :
-Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpeq", [IntrNoMem, Throws]>;
+def int_hexagon_M5_vrmacbsu :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M5_vrmacbsu">;
-def int_hexagon_F2_dfcmpgt :
-Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpgt", [IntrNoMem, Throws]>;
+def int_hexagon_M5_vrmacbuu :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M5_vrmacbuu">;
-def int_hexagon_F2_dfcmpge :
-Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpge", [IntrNoMem, Throws]>;
+def int_hexagon_M5_vrmpybsu :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M5_vrmpybsu">;
-def int_hexagon_F2_dfcmpuo :
-Hexagon_i32_doubledouble_Intrinsic<"HEXAGON_F2_dfcmpuo", [IntrNoMem, Throws]>;
+def int_hexagon_M5_vrmpybuu :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M5_vrmpybuu">;
-def int_hexagon_F2_dfclass :
-Hexagon_i32_doublei32_Intrinsic<"HEXAGON_F2_dfclass", [IntrNoMem, Throws, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_addasl_rrri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_addasl_rrri", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_dfimm_p :
-Hexagon_double_i32_Intrinsic<"HEXAGON_F2_dfimm_p", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_S2_asl_i_p :
+Hexagon_custom_i64_i64i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_F2_dfimm_n :
-Hexagon_double_i32_Intrinsic<"HEXAGON_F2_dfimm_n", [IntrNoMem, Throws, ImmArg<ArgIndex<0>>]>;
+def int_hexagon_S2_asl_i_p_acc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_sf2df :
-Hexagon_double_float_Intrinsic<"HEXAGON_F2_conv_sf2df">;
+def int_hexagon_S2_asl_i_p_and :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_df2sf :
-Hexagon_float_double_Intrinsic<"HEXAGON_F2_conv_df2sf">;
+def int_hexagon_S2_asl_i_p_nac :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_uw2sf :
-Hexagon_float_i32_Intrinsic<"HEXAGON_F2_conv_uw2sf">;
+def int_hexagon_S2_asl_i_p_or :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_uw2df :
-Hexagon_double_i32_Intrinsic<"HEXAGON_F2_conv_uw2df">;
+def int_hexagon_S2_asl_i_p_xacc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_w2sf :
-Hexagon_float_i32_Intrinsic<"HEXAGON_F2_conv_w2sf">;
+def int_hexagon_S2_asl_i_r :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_F2_conv_w2df :
-Hexagon_double_i32_Intrinsic<"HEXAGON_F2_conv_w2df">;
+def int_hexagon_S2_asl_i_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_ud2sf :
-Hexagon_float_i64_Intrinsic<"HEXAGON_F2_conv_ud2sf">;
+def int_hexagon_S2_asl_i_r_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_ud2df :
-Hexagon_double_i64_Intrinsic<"HEXAGON_F2_conv_ud2df">;
+def int_hexagon_S2_asl_i_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_d2sf :
-Hexagon_float_i64_Intrinsic<"HEXAGON_F2_conv_d2sf">;
+def int_hexagon_S2_asl_i_r_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_d2df :
-Hexagon_double_i64_Intrinsic<"HEXAGON_F2_conv_d2df">;
+def int_hexagon_S2_asl_i_r_sat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_sat", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_F2_conv_sf2uw :
-Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2uw">;
+def int_hexagon_S2_asl_i_r_xacc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_F2_conv_sf2w :
-Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2w">;
+def int_hexagon_S2_asl_i_vh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_i_vh", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_F2_conv_sf2ud :
-Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2ud">;
+def int_hexagon_S2_asl_i_vw :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_i_vw", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_F2_conv_sf2d :
-Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2d">;
+def int_hexagon_S2_asl_r_p :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_r_p">;
-def int_hexagon_F2_conv_df2uw :
-Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2uw">;
+def int_hexagon_S2_asl_r_p_acc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_acc">;
-def int_hexagon_F2_conv_df2w :
-Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2w">;
+def int_hexagon_S2_asl_r_p_and :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_and">;
-def int_hexagon_F2_conv_df2ud :
-Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2ud">;
+def int_hexagon_S2_asl_r_p_nac :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_nac">;
-def int_hexagon_F2_conv_df2d :
-Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2d">;
+def int_hexagon_S2_asl_r_p_or :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_or">;
-def int_hexagon_F2_conv_sf2uw_chop :
-Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2uw_chop">;
+def int_hexagon_S2_asl_r_p_xor :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_xor">;
-def int_hexagon_F2_conv_sf2w_chop :
-Hexagon_i32_float_Intrinsic<"HEXAGON_F2_conv_sf2w_chop">;
+def int_hexagon_S2_asl_r_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asl_r_r">;
-def int_hexagon_F2_conv_sf2ud_chop :
-Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2ud_chop">;
+def int_hexagon_S2_asl_r_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_acc">;
-def int_hexagon_F2_conv_sf2d_chop :
-Hexagon_i64_float_Intrinsic<"HEXAGON_F2_conv_sf2d_chop">;
+def int_hexagon_S2_asl_r_r_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_and">;
-def int_hexagon_F2_conv_df2uw_chop :
-Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2uw_chop">;
+def int_hexagon_S2_asl_r_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_nac">;
-def int_hexagon_F2_conv_df2w_chop :
-Hexagon_i32_double_Intrinsic<"HEXAGON_F2_conv_df2w_chop">;
+def int_hexagon_S2_asl_r_r_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_or">;
-def int_hexagon_F2_conv_df2ud_chop :
-Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2ud_chop">;
+def int_hexagon_S2_asl_r_r_sat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_sat">;
-def int_hexagon_F2_conv_df2d_chop :
-Hexagon_i64_double_Intrinsic<"HEXAGON_F2_conv_df2d_chop">;
+def int_hexagon_S2_asl_r_vh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_r_vh">;
-def int_hexagon_S2_asr_r_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_r_r">;
+def int_hexagon_S2_asl_r_vw :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_r_vw">;
-def int_hexagon_S2_asl_r_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asl_r_r">;
+def int_hexagon_S2_asr_i_p :
+Hexagon_custom_i64_i64i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_lsr_r_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r">;
+def int_hexagon_S2_asr_i_p_acc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_lsl_r_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r">;
+def int_hexagon_S2_asr_i_p_and :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asr_r_p :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_r_p">;
+def int_hexagon_S2_asr_i_p_nac :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asl_r_p :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_r_p">;
+def int_hexagon_S2_asr_i_p_or :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_lsr_r_p :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p">;
+def int_hexagon_S2_asr_i_p_rnd :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_rnd", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_lsl_r_p :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p">;
+def int_hexagon_S2_asr_i_p_rnd_goodsyntax :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_rnd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_asr_r_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_acc">;
+def int_hexagon_S2_asr_i_r :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_asl_r_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_acc">;
+def int_hexagon_S2_asr_i_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_lsr_r_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_acc">;
+def int_hexagon_S2_asr_i_r_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_lsl_r_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_acc">;
+def int_hexagon_S2_asr_i_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asr_r_p_acc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_acc">;
+def int_hexagon_S2_asr_i_r_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asl_r_p_acc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_acc">;
+def int_hexagon_S2_asr_i_r_rnd :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_rnd", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_lsr_r_p_acc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_acc">;
+def int_hexagon_S2_asr_i_r_rnd_goodsyntax :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_rnd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_lsl_r_p_acc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_acc">;
+def int_hexagon_S2_asr_i_svw_trun :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S2_asr_i_svw_trun", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_asr_r_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_nac">;
+def int_hexagon_S2_asr_i_vh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_vh", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_asl_r_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_nac">;
+def int_hexagon_S2_asr_i_vw :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_vw", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_lsr_r_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_nac">;
+def int_hexagon_S2_asr_r_p :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_r_p">;
-def int_hexagon_S2_lsl_r_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_nac">;
+def int_hexagon_S2_asr_r_p_acc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_acc">;
+
+def int_hexagon_S2_asr_r_p_and :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_and">;
def int_hexagon_S2_asr_r_p_nac :
Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_nac">;
-def int_hexagon_S2_asl_r_p_nac :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_nac">;
+def int_hexagon_S2_asr_r_p_or :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_or">;
-def int_hexagon_S2_lsr_r_p_nac :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_nac">;
+def int_hexagon_S2_asr_r_p_xor :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_xor">;
-def int_hexagon_S2_lsl_r_p_nac :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_nac">;
+def int_hexagon_S2_asr_r_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_r_r">;
+
+def int_hexagon_S2_asr_r_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_acc">;
def int_hexagon_S2_asr_r_r_and :
Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_and">;
-def int_hexagon_S2_asl_r_r_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_and">;
-
-def int_hexagon_S2_lsr_r_r_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_and">;
-
-def int_hexagon_S2_lsl_r_r_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_and">;
+def int_hexagon_S2_asr_r_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_nac">;
def int_hexagon_S2_asr_r_r_or :
Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_or">;
-def int_hexagon_S2_asl_r_r_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_or">;
-
-def int_hexagon_S2_lsr_r_r_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_or">;
+def int_hexagon_S2_asr_r_r_sat :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_sat">;
-def int_hexagon_S2_lsl_r_r_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_or">;
+def int_hexagon_S2_asr_r_svw_trun :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S2_asr_r_svw_trun">;
-def int_hexagon_S2_asr_r_p_and :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_and">;
+def int_hexagon_S2_asr_r_vh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_r_vh">;
-def int_hexagon_S2_asl_r_p_and :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_and">;
+def int_hexagon_S2_asr_r_vw :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_r_vw">;
-def int_hexagon_S2_lsr_r_p_and :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_and">;
+def int_hexagon_S2_brev :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_brev">;
-def int_hexagon_S2_lsl_r_p_and :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_and">;
+def int_hexagon_S2_brevp :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_brevp">;
-def int_hexagon_S2_asr_r_p_or :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_or">;
+def int_hexagon_S2_cl0 :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_cl0">;
-def int_hexagon_S2_asl_r_p_or :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_or">;
+def int_hexagon_S2_cl0p :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_cl0p">;
-def int_hexagon_S2_lsr_r_p_or :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_or">;
+def int_hexagon_S2_cl1 :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_cl1">;
-def int_hexagon_S2_lsl_r_p_or :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_or">;
+def int_hexagon_S2_cl1p :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_cl1p">;
-def int_hexagon_S2_asr_r_p_xor :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_r_p_xor">;
+def int_hexagon_S2_clb :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_clb">;
-def int_hexagon_S2_asl_r_p_xor :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_r_p_xor">;
+def int_hexagon_S2_clbnorm :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_clbnorm">;
-def int_hexagon_S2_lsr_r_p_xor :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_xor">;
+def int_hexagon_S2_clbp :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_clbp">;
-def int_hexagon_S2_lsl_r_p_xor :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_xor">;
+def int_hexagon_S2_clrbit_i :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_clrbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_asr_r_r_sat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_r_r_sat">;
+def int_hexagon_S2_clrbit_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_clrbit_r">;
-def int_hexagon_S2_asl_r_r_sat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asl_r_r_sat">;
+def int_hexagon_S2_ct0 :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_ct0">;
-def int_hexagon_S2_asr_i_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_i_r", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_ct0p :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_ct0p">;
-def int_hexagon_S2_lsr_i_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_ct1 :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_ct1">;
-def int_hexagon_S2_asl_i_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asl_i_r", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_ct1p :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_ct1p">;
-def int_hexagon_S2_asr_i_p :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_p", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_deinterleave :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_deinterleave">;
-def int_hexagon_S2_lsr_i_p :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_extractu :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_extractu", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asl_i_p :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_i_p", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_extractu_rp :
+Hexagon_i32_i32i64_Intrinsic<"HEXAGON_S2_extractu_rp">;
-def int_hexagon_S2_asr_i_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_extractup :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_S2_extractup", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_lsr_i_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_extractup_rp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_extractup_rp">;
-def int_hexagon_S2_asl_i_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_insert :
+Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_insert", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_S2_asr_i_p_acc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_insert_rp :
+Hexagon_i32_i32i32i64_Intrinsic<"HEXAGON_S2_insert_rp">;
-def int_hexagon_S2_lsr_i_p_acc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_insertp :
+Hexagon_i64_i64i64i32i32_Intrinsic<"HEXAGON_S2_insertp", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_S2_asl_i_p_acc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_insertp_rp :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_S2_insertp_rp">;
-def int_hexagon_S2_asr_i_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_interleave :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_interleave">;
-def int_hexagon_S2_lsr_i_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lfsp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_lfsp">;
-def int_hexagon_S2_asl_i_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_p :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p">;
-def int_hexagon_S2_asr_i_p_nac :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_p_acc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_acc">;
-def int_hexagon_S2_lsr_i_p_nac :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_p_and :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_and">;
-def int_hexagon_S2_asl_i_p_nac :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_p_nac :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_nac">;
-def int_hexagon_S2_lsr_i_r_xacc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_p_or :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_or">;
-def int_hexagon_S2_asl_i_r_xacc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_p_xor :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsl_r_p_xor">;
-def int_hexagon_S2_lsr_i_p_xacc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r">;
-def int_hexagon_S2_asl_i_p_xacc :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_acc">;
-def int_hexagon_S2_asr_i_r_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_r_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_and">;
-def int_hexagon_S2_lsr_i_r_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_nac">;
-def int_hexagon_S2_asl_i_r_and :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_r_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsl_r_r_or">;
-def int_hexagon_S2_asr_i_r_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_vh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsl_r_vh">;
-def int_hexagon_S2_lsr_i_r_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsl_r_vw :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsl_r_vw">;
-def int_hexagon_S2_asl_i_r_or :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_p :
+Hexagon_custom_i64_i64i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_asr_i_p_and :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_p_acc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
def int_hexagon_S2_lsr_i_p_and :
Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asl_i_p_and :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-
-def int_hexagon_S2_asr_i_p_or :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_p_nac :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
def int_hexagon_S2_lsr_i_p_or :
Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asl_i_p_or :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_asl_i_p_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-
-def int_hexagon_S2_asl_i_r_sat :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asl_i_r_sat", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
-def int_hexagon_S2_asr_i_r_rnd :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_rnd", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
-def int_hexagon_S2_asr_i_r_rnd_goodsyntax :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_asr_i_r_rnd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
-def int_hexagon_S2_asr_i_p_rnd :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_rnd", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
-def int_hexagon_S2_asr_i_p_rnd_goodsyntax :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_p_rnd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
-def int_hexagon_S4_lsli :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_lsli", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
-
-def int_hexagon_S2_addasl_rrri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_addasl_rrri", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_p_xacc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_i_p_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_andi_asl_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_andi_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_r :
+Hexagon_custom_i32_i32i32_Intrinsic<[IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S4_ori_asl_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_ori_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_addi_asl_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_addi_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_r_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_subi_asl_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_subi_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_andi_lsr_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_andi_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_r_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_ori_lsr_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_ori_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_r_xacc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_i_r_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_addi_lsr_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_addi_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_vh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_i_vh", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S4_subi_lsr_ri :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_subi_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_i_vw :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_i_vw", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_valignib :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_valignib", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_r_p :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p">;
-def int_hexagon_S2_valignrb :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_valignrb">;
+def int_hexagon_S2_lsr_r_p_acc :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_acc">;
-def int_hexagon_S2_vspliceib :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_vspliceib", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_lsr_r_p_and :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_and">;
-def int_hexagon_S2_vsplicerb :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_vsplicerb">;
+def int_hexagon_S2_lsr_r_p_nac :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_nac">;
-def int_hexagon_S2_vsplatrh :
-Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vsplatrh">;
+def int_hexagon_S2_lsr_r_p_or :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_or">;
-def int_hexagon_S2_vsplatrb :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_vsplatrb">;
+def int_hexagon_S2_lsr_r_p_xor :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_lsr_r_p_xor">;
-def int_hexagon_S2_insert :
-Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_insert", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_S2_lsr_r_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r">;
-def int_hexagon_S2_tableidxb_goodsyntax :
-Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxb_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_S2_lsr_r_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_acc">;
-def int_hexagon_S2_tableidxh_goodsyntax :
-Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxh_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_S2_lsr_r_r_and :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_and">;
-def int_hexagon_S2_tableidxw_goodsyntax :
-Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxw_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_S2_lsr_r_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_nac">;
-def int_hexagon_S2_tableidxd_goodsyntax :
-Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_S2_lsr_r_r_or :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_lsr_r_r_or">;
-def int_hexagon_A4_bitspliti :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_bitspliti", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_lsr_r_vh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_r_vh">;
-def int_hexagon_A4_bitsplit :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_A4_bitsplit">;
+def int_hexagon_S2_lsr_r_vw :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_r_vw">;
-def int_hexagon_S4_extract :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_extract", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_packhl :
+Hexagon_i64_i32i32_Intrinsic<"HEXAGON_S2_packhl">;
-def int_hexagon_S2_extractu :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S2_extractu", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_parityp :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_S2_parityp">;
-def int_hexagon_S2_insertp :
-Hexagon_i64_i64i64i32i32_Intrinsic<"HEXAGON_S2_insertp", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_S2_setbit_i :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_setbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S4_extractp :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_S4_extractp", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_setbit_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_setbit_r">;
-def int_hexagon_S2_extractup :
-Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_S2_extractup", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S2_shuffeb :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffeb">;
-def int_hexagon_S2_insert_rp :
-Hexagon_i32_i32i32i64_Intrinsic<"HEXAGON_S2_insert_rp">;
+def int_hexagon_S2_shuffeh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffeh">;
-def int_hexagon_S4_extract_rp :
-Hexagon_i32_i32i64_Intrinsic<"HEXAGON_S4_extract_rp">;
+def int_hexagon_S2_shuffob :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffob">;
-def int_hexagon_S2_extractu_rp :
-Hexagon_i32_i32i64_Intrinsic<"HEXAGON_S2_extractu_rp">;
+def int_hexagon_S2_shuffoh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffoh">;
-def int_hexagon_S2_insertp_rp :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_S2_insertp_rp">;
+def int_hexagon_S2_svsathb :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_svsathb">;
-def int_hexagon_S4_extractp_rp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_extractp_rp">;
+def int_hexagon_S2_svsathub :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_svsathub">;
-def int_hexagon_S2_extractup_rp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_extractup_rp">;
+def int_hexagon_S2_tableidxb_goodsyntax :
+Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxb_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_S2_tstbit_i :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_tstbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_tableidxd_goodsyntax :
+Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_S4_ntstbit_i :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_ntstbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_tableidxh_goodsyntax :
+Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxh_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_S2_setbit_i :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_setbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_tableidxw_goodsyntax :
+Hexagon_i32_i32i32i32i32_Intrinsic<"HEXAGON_S2_tableidxw_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<2>>, ImmArg<ArgIndex<3>>]>;
def int_hexagon_S2_togglebit_i :
Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_togglebit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_clrbit_i :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_clrbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
-def int_hexagon_S2_tstbit_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_tstbit_r">;
-
-def int_hexagon_S4_ntstbit_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_ntstbit_r">;
-
-def int_hexagon_S2_setbit_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_setbit_r">;
-
def int_hexagon_S2_togglebit_r :
Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_togglebit_r">;
-def int_hexagon_S2_clrbit_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_clrbit_r">;
-
-def int_hexagon_S2_asr_i_vh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_vh", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_tstbit_i :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_tstbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_lsr_i_vh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_i_vh", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_tstbit_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_tstbit_r">;
-def int_hexagon_S2_asl_i_vh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_i_vh", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_valignib :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_valignib", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_asr_r_vh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_r_vh">;
+def int_hexagon_S2_valignrb :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_valignrb">;
-def int_hexagon_S5_asrhub_rnd_sat_goodsyntax :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S5_asrhub_rnd_sat_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_vcnegh :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_vcnegh">;
-def int_hexagon_S5_asrhub_sat :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S5_asrhub_sat", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_vcrotate :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_vcrotate">;
-def int_hexagon_S5_vasrhrnd_goodsyntax :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S5_vasrhrnd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_vrcnegh :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_vrcnegh">;
-def int_hexagon_S2_asl_r_vh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_r_vh">;
+def int_hexagon_S2_vrndpackwh :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vrndpackwh">;
-def int_hexagon_S2_lsr_r_vh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_r_vh">;
+def int_hexagon_S2_vrndpackwhs :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vrndpackwhs">;
-def int_hexagon_S2_lsl_r_vh :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsl_r_vh">;
+def int_hexagon_S2_vsathb :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsathb">;
-def int_hexagon_S2_asr_i_vw :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_i_vw", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_vsathb_nopack :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsathb_nopack">;
-def int_hexagon_S2_asr_i_svw_trun :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S2_asr_i_svw_trun", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_vsathub :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsathub">;
-def int_hexagon_S2_asr_r_svw_trun :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S2_asr_r_svw_trun">;
+def int_hexagon_S2_vsathub_nopack :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsathub_nopack">;
-def int_hexagon_S2_lsr_i_vw :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_i_vw", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_vsatwh :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsatwh">;
-def int_hexagon_S2_asl_i_vw :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_i_vw", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S2_vsatwh_nopack :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsatwh_nopack">;
-def int_hexagon_S2_asr_r_vw :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asr_r_vw">;
+def int_hexagon_S2_vsatwuh :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsatwuh">;
-def int_hexagon_S2_asl_r_vw :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_asl_r_vw">;
+def int_hexagon_S2_vsatwuh_nopack :
+Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsatwuh_nopack">;
-def int_hexagon_S2_lsr_r_vw :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsr_r_vw">;
+def int_hexagon_S2_vsplatrb :
+Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_vsplatrb">;
-def int_hexagon_S2_lsl_r_vw :
-Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S2_lsl_r_vw">;
+def int_hexagon_S2_vsplatrh :
+Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vsplatrh">;
-def int_hexagon_S2_vrndpackwh :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vrndpackwh">;
+def int_hexagon_S2_vspliceib :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_vspliceib", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_vrndpackwhs :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vrndpackwhs">;
+def int_hexagon_S2_vsplicerb :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S2_vsplicerb">;
def int_hexagon_S2_vsxtbh :
Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vsxtbh">;
-def int_hexagon_S2_vzxtbh :
-Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vzxtbh">;
-
-def int_hexagon_S2_vsathub :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsathub">;
-
-def int_hexagon_S2_svsathub :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_svsathub">;
+def int_hexagon_S2_vsxthw :
+Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vsxthw">;
-def int_hexagon_S2_svsathb :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_svsathb">;
+def int_hexagon_S2_vtrunehb :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vtrunehb">;
-def int_hexagon_S2_vsathb :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsathb">;
+def int_hexagon_S2_vtrunewh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_vtrunewh">;
def int_hexagon_S2_vtrunohb :
Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vtrunohb">;
-def int_hexagon_S2_vtrunewh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_vtrunewh">;
-
def int_hexagon_S2_vtrunowh :
Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_vtrunowh">;
-def int_hexagon_S2_vtrunehb :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vtrunehb">;
-
-def int_hexagon_S2_vsxthw :
-Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vsxthw">;
+def int_hexagon_S2_vzxtbh :
+Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vzxtbh">;
def int_hexagon_S2_vzxthw :
Hexagon_i64_i32_Intrinsic<"HEXAGON_S2_vzxthw">;
-def int_hexagon_S2_vsatwh :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsatwh">;
+def int_hexagon_S4_addaddi :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_addaddi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_vsatwuh :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_vsatwuh">;
+def int_hexagon_S4_addi_asl_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_addi_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_packhl :
-Hexagon_i64_i32i32_Intrinsic<"HEXAGON_S2_packhl">;
+def int_hexagon_S4_addi_lsr_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_addi_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_A2_swiz :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_A2_swiz">;
+def int_hexagon_S4_andi_asl_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_andi_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_vsathub_nopack :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsathub_nopack">;
+def int_hexagon_S4_andi_lsr_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_andi_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_vsathb_nopack :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsathb_nopack">;
+def int_hexagon_S4_clbaddi :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_clbaddi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_vsatwh_nopack :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsatwh_nopack">;
+def int_hexagon_S4_clbpaddi :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S4_clbpaddi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_vsatwuh_nopack :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_vsatwuh_nopack">;
+def int_hexagon_S4_clbpnorm :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S4_clbpnorm">;
-def int_hexagon_S2_shuffob :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffob">;
+def int_hexagon_S4_extract :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_extract", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_shuffeb :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffeb">;
+def int_hexagon_S4_extract_rp :
+Hexagon_i32_i32i64_Intrinsic<"HEXAGON_S4_extract_rp">;
-def int_hexagon_S2_shuffoh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffoh">;
+def int_hexagon_S4_extractp :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_S4_extractp", [IntrNoMem, ImmArg<ArgIndex<1>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_shuffeh :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_shuffeh">;
+def int_hexagon_S4_extractp_rp :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_extractp_rp">;
-def int_hexagon_S5_popcountp :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S5_popcountp">;
+def int_hexagon_S4_lsli :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_lsli", [IntrNoMem, ImmArg<ArgIndex<0>>]>;
-def int_hexagon_S4_parity :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_parity">;
+def int_hexagon_S4_ntstbit_i :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_ntstbit_i", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_parityp :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_S2_parityp">;
+def int_hexagon_S4_ntstbit_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_ntstbit_r">;
-def int_hexagon_S2_lfsp :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S2_lfsp">;
+def int_hexagon_S4_or_andi :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_or_andi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_clbnorm :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_clbnorm">;
+def int_hexagon_S4_or_andix :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_or_andix", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_clbaddi :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_clbaddi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S4_or_ori :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_or_ori", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_clbpnorm :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S4_clbpnorm">;
+def int_hexagon_S4_ori_asl_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_ori_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S4_clbpaddi :
-Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S4_clbpaddi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+def int_hexagon_S4_ori_lsr_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_ori_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_clb :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_clb">;
+def int_hexagon_S4_parity :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S4_parity">;
-def int_hexagon_S2_cl0 :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_cl0">;
+def int_hexagon_S4_subaddi :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_subaddi", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_cl1 :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_cl1">;
+def int_hexagon_S4_subi_asl_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_subi_asl_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_clbp :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_clbp">;
+def int_hexagon_S4_subi_lsr_ri :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S4_subi_lsr_ri", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_cl0p :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_cl0p">;
+def int_hexagon_S4_vrcrotate :
+Hexagon_i64_i64i32i32_Intrinsic<"HEXAGON_S4_vrcrotate", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S2_cl1p :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_cl1p">;
+def int_hexagon_S4_vrcrotate_acc :
+Hexagon_i64_i64i64i32i32_Intrinsic<"HEXAGON_S4_vrcrotate_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_S2_brev :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_brev">;
+def int_hexagon_S4_vxaddsubh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxaddsubh">;
-def int_hexagon_S2_brevp :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_brevp">;
+def int_hexagon_S4_vxaddsubhr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxaddsubhr">;
-def int_hexagon_S2_ct0 :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_ct0">;
+def int_hexagon_S4_vxaddsubw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxaddsubw">;
-def int_hexagon_S2_ct1 :
-Hexagon_i32_i32_Intrinsic<"HEXAGON_S2_ct1">;
+def int_hexagon_S4_vxsubaddh :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxsubaddh">;
-def int_hexagon_S2_ct0p :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_ct0p">;
+def int_hexagon_S4_vxsubaddhr :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxsubaddhr">;
-def int_hexagon_S2_ct1p :
-Hexagon_i32_i64_Intrinsic<"HEXAGON_S2_ct1p">;
+def int_hexagon_S4_vxsubaddw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_S4_vxsubaddw">;
-def int_hexagon_S2_interleave :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_interleave">;
+def int_hexagon_S5_asrhub_rnd_sat_goodsyntax :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S5_asrhub_rnd_sat_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S2_deinterleave :
-Hexagon_i64_i64_Intrinsic<"HEXAGON_S2_deinterleave">;
+def int_hexagon_S5_asrhub_sat :
+Hexagon_i32_i64i32_Intrinsic<"HEXAGON_S5_asrhub_sat", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_Y2_dcfetch :
-Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dcfetch", []>;
+def int_hexagon_S5_popcountp :
+Hexagon_i32_i64_Intrinsic<"HEXAGON_S5_popcountp">;
-def int_hexagon_Y2_dczeroa :
-Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dczeroa", []>;
+def int_hexagon_S5_vasrhrnd_goodsyntax :
+Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S5_vasrhrnd_goodsyntax", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
def int_hexagon_Y2_dccleana :
Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dccleana", []>;
@@ -3607,9 +3587,15 @@ Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dccleana", []>;
def int_hexagon_Y2_dccleaninva :
Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dccleaninva", []>;
+def int_hexagon_Y2_dcfetch :
+Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dcfetch", []>;
+
def int_hexagon_Y2_dcinva :
Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dcinva", []>;
+def int_hexagon_Y2_dczeroa :
+Hexagon__ptr_Intrinsic<"HEXAGON_Y2_dczeroa", []>;
+
def int_hexagon_Y4_l2fetch :
Hexagon__ptri32_Intrinsic<"HEXAGON_Y4_l2fetch", []>;
@@ -3618,41 +3604,41 @@ Hexagon__ptri64_Intrinsic<"HEXAGON_Y5_l2fetch", []>;
// V60 Scalar Instructions.
-def int_hexagon_S6_rol_i_r :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S6_rol_i_r", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
def int_hexagon_S6_rol_i_p :
Hexagon_i64_i64i32_Intrinsic<"HEXAGON_S6_rol_i_p", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-def int_hexagon_S6_rol_i_r_acc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-
def int_hexagon_S6_rol_i_p_acc :
Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S6_rol_i_p_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S6_rol_i_r_nac :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S6_rol_i_p_and :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S6_rol_i_p_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
def int_hexagon_S6_rol_i_p_nac :
Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S6_rol_i_p_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S6_rol_i_r_xacc :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S6_rol_i_p_or :
+Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S6_rol_i_p_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
def int_hexagon_S6_rol_i_p_xacc :
Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S6_rol_i_p_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S6_rol_i_r :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S6_rol_i_r", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+
+def int_hexagon_S6_rol_i_r_acc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_acc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
def int_hexagon_S6_rol_i_r_and :
Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S6_rol_i_r_nac :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_nac", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
def int_hexagon_S6_rol_i_r_or :
Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_S6_rol_i_p_and :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S6_rol_i_p_and", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-
-def int_hexagon_S6_rol_i_p_or :
-Hexagon_i64_i64i64i32_Intrinsic<"HEXAGON_S6_rol_i_p_or", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_S6_rol_i_r_xacc :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_S6_rol_i_r_xacc", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
// V62 Scalar Instructions.
@@ -3678,73 +3664,22 @@ Hexagon_i32_i64i64_Intrinsic<"HEXAGON_A6_vcmpbeq_notany">;
// V66 Scalar Instructions.
-def int_hexagon_M2_mnaci :
-Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mnaci">;
-
def int_hexagon_F2_dfadd :
Hexagon_double_doubledouble_Intrinsic<"HEXAGON_F2_dfadd", [IntrNoMem, Throws]>;
def int_hexagon_F2_dfsub :
Hexagon_double_doubledouble_Intrinsic<"HEXAGON_F2_dfsub", [IntrNoMem, Throws]>;
+def int_hexagon_M2_mnaci :
+Hexagon_i32_i32i32i32_Intrinsic<"HEXAGON_M2_mnaci">;
+
def int_hexagon_S2_mask :
Hexagon_i32_i32i32_Intrinsic<"HEXAGON_S2_mask", [IntrNoMem, ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<1>>]>;
// V67 Scalar Instructions.
-def int_hexagon_M7_dcmpyrw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyrw">;
-
-def int_hexagon_M7_dcmpyrw_acc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyrw_acc">;
-
-def int_hexagon_M7_dcmpyrwc :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyrwc">;
-
-def int_hexagon_M7_dcmpyrwc_acc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyrwc_acc">;
-
-def int_hexagon_M7_dcmpyiw :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyiw">;
-
-def int_hexagon_M7_dcmpyiw_acc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyiw_acc">;
-
-def int_hexagon_M7_dcmpyiwc :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyiwc">;
-
-def int_hexagon_M7_dcmpyiwc_acc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyiwc_acc">;
-
-def int_hexagon_M7_vdmpy :
-Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_vdmpy">;
-
-def int_hexagon_M7_vdmpy_acc :
-Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_vdmpy_acc">;
-
-def int_hexagon_M7_wcmpyrw :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrw">;
-
-def int_hexagon_M7_wcmpyrwc :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrwc">;
-
-def int_hexagon_M7_wcmpyiw :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiw">;
-
-def int_hexagon_M7_wcmpyiwc :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiwc">;
-
-def int_hexagon_M7_wcmpyrw_rnd :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrw_rnd">;
-
-def int_hexagon_M7_wcmpyrwc_rnd :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrwc_rnd">;
-
-def int_hexagon_M7_wcmpyiw_rnd :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiw_rnd">;
-
-def int_hexagon_M7_wcmpyiwc_rnd :
-Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiwc_rnd">;
+def int_hexagon_A7_clip :
+Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A7_clip", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
def int_hexagon_A7_croundd_ri :
Hexagon_i64_i64i32_Intrinsic<"HEXAGON_A7_croundd_ri", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
@@ -3752,9 +3687,6 @@ Hexagon_i64_i64i32_Intrinsic<"HEXAGON_A7_croundd_ri", [IntrNoMem, ImmArg<ArgInde
def int_hexagon_A7_croundd_rr :
Hexagon_i64_i64i32_Intrinsic<"HEXAGON_A7_croundd_rr">;
-def int_hexagon_A7_clip :
-Hexagon_i32_i32i32_Intrinsic<"HEXAGON_A7_clip", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
-
def int_hexagon_A7_vclip :
Hexagon_i64_i64i32_Intrinsic<"HEXAGON_A7_vclip", [IntrNoMem, ImmArg<ArgIndex<1>>]>;
@@ -3767,436 +3699,420 @@ Hexagon_double_doubledouble_Intrinsic<"HEXAGON_F2_dfmin", [IntrNoMem, Throws]>;
def int_hexagon_F2_dfmpyfix :
Hexagon_double_doubledouble_Intrinsic<"HEXAGON_F2_dfmpyfix", [IntrNoMem, Throws]>;
-def int_hexagon_F2_dfmpyll :
-Hexagon_double_doubledouble_Intrinsic<"HEXAGON_F2_dfmpyll", [IntrNoMem, Throws]>;
-
-def int_hexagon_F2_dfmpylh :
-Hexagon_double_doubledoubledouble_Intrinsic<"HEXAGON_F2_dfmpylh", [IntrNoMem, Throws]>;
-
def int_hexagon_F2_dfmpyhh :
Hexagon_double_doubledoubledouble_Intrinsic<"HEXAGON_F2_dfmpyhh", [IntrNoMem, Throws]>;
-// V60 HVX Instructions.
-
-def int_hexagon_V6_vS32b_qpred_ai :
-Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
-
-def int_hexagon_V6_vS32b_qpred_ai_128B :
-Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
-
-def int_hexagon_V6_vS32b_nqpred_ai :
-Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
+def int_hexagon_F2_dfmpylh :
+Hexagon_double_doubledoubledouble_Intrinsic<"HEXAGON_F2_dfmpylh", [IntrNoMem, Throws]>;
-def int_hexagon_V6_vS32b_nqpred_ai_128B :
-Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
+def int_hexagon_F2_dfmpyll :
+Hexagon_double_doubledouble_Intrinsic<"HEXAGON_F2_dfmpyll", [IntrNoMem, Throws]>;
-def int_hexagon_V6_vS32b_nt_qpred_ai :
-Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
+def int_hexagon_M7_dcmpyiw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyiw">;
-def int_hexagon_V6_vS32b_nt_qpred_ai_128B :
-Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
+def int_hexagon_M7_dcmpyiw_acc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyiw_acc">;
-def int_hexagon_V6_vS32b_nt_nqpred_ai :
-Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
+def int_hexagon_M7_dcmpyiwc :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyiwc">;
-def int_hexagon_V6_vS32b_nt_nqpred_ai_128B :
-Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
+def int_hexagon_M7_dcmpyiwc_acc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyiwc_acc">;
-def int_hexagon_V6_valignb :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_valignb">;
+def int_hexagon_M7_dcmpyrw :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyrw">;
-def int_hexagon_V6_valignb_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_valignb_128B">;
+def int_hexagon_M7_dcmpyrw_acc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyrw_acc">;
-def int_hexagon_V6_vlalignb :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlalignb">;
+def int_hexagon_M7_dcmpyrwc :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_dcmpyrwc">;
-def int_hexagon_V6_vlalignb_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlalignb_128B">;
+def int_hexagon_M7_dcmpyrwc_acc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_dcmpyrwc_acc">;
-def int_hexagon_V6_valignbi :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_valignbi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M7_vdmpy :
+Hexagon_i64_i64i64_Intrinsic<"HEXAGON_M7_vdmpy">;
-def int_hexagon_V6_valignbi_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_valignbi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M7_vdmpy_acc :
+Hexagon_i64_i64i64i64_Intrinsic<"HEXAGON_M7_vdmpy_acc">;
-def int_hexagon_V6_vlalignbi :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlalignbi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M7_wcmpyiw :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiw">;
-def int_hexagon_V6_vlalignbi_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlalignbi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_M7_wcmpyiw_rnd :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiw_rnd">;
-def int_hexagon_V6_vror :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vror">;
+def int_hexagon_M7_wcmpyiwc :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiwc">;
-def int_hexagon_V6_vror_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vror_128B">;
+def int_hexagon_M7_wcmpyiwc_rnd :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyiwc_rnd">;
-def int_hexagon_V6_vunpackub :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackub">;
+def int_hexagon_M7_wcmpyrw :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrw">;
-def int_hexagon_V6_vunpackub_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackub_128B">;
+def int_hexagon_M7_wcmpyrw_rnd :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrw_rnd">;
-def int_hexagon_V6_vunpackb :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackb">;
+def int_hexagon_M7_wcmpyrwc :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrwc">;
-def int_hexagon_V6_vunpackb_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackb_128B">;
+def int_hexagon_M7_wcmpyrwc_rnd :
+Hexagon_i32_i64i64_Intrinsic<"HEXAGON_M7_wcmpyrwc_rnd">;
-def int_hexagon_V6_vunpackuh :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackuh">;
+// V68 Scalar Instructions.
-def int_hexagon_V6_vunpackuh_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackuh_128B">;
+def int_hexagon_Y6_dmlink :
+Hexagon__ptrptr_Intrinsic<"HEXAGON_Y6_dmlink", [IntrArgMemOnly, IntrHasSideEffects]>;
-def int_hexagon_V6_vunpackh :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackh">;
+def int_hexagon_Y6_dmpause :
+Hexagon_i32__Intrinsic<"HEXAGON_Y6_dmpause", [IntrArgMemOnly, IntrHasSideEffects]>;
-def int_hexagon_V6_vunpackh_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackh_128B">;
+def int_hexagon_Y6_dmpoll :
+Hexagon_i32__Intrinsic<"HEXAGON_Y6_dmpoll", [IntrArgMemOnly, IntrHasSideEffects]>;
-def int_hexagon_V6_vunpackob :
-Hexagon_v32i32_v32i32v16i32_Intrinsic<"HEXAGON_V6_vunpackob">;
+def int_hexagon_Y6_dmresume :
+Hexagon__ptr_Intrinsic<"HEXAGON_Y6_dmresume", [IntrArgMemOnly, IntrHasSideEffects]>;
-def int_hexagon_V6_vunpackob_128B :
-Hexagon_v64i32_v64i32v32i32_Intrinsic<"HEXAGON_V6_vunpackob_128B">;
+def int_hexagon_Y6_dmstart :
+Hexagon__ptr_Intrinsic<"HEXAGON_Y6_dmstart", [IntrArgMemOnly, IntrHasSideEffects]>;
-def int_hexagon_V6_vunpackoh :
-Hexagon_v32i32_v32i32v16i32_Intrinsic<"HEXAGON_V6_vunpackoh">;
+def int_hexagon_Y6_dmwait :
+Hexagon_i32__Intrinsic<"HEXAGON_Y6_dmwait", [IntrArgMemOnly, IntrHasSideEffects]>;
-def int_hexagon_V6_vunpackoh_128B :
-Hexagon_v64i32_v64i32v32i32_Intrinsic<"HEXAGON_V6_vunpackoh_128B">;
+// V60 HVX Instructions.
-def int_hexagon_V6_vpackeb :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackeb">;
+def int_hexagon_V6_extractw :
+Hexagon_i32_v16i32i32_Intrinsic<"HEXAGON_V6_extractw">;
-def int_hexagon_V6_vpackeb_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackeb_128B">;
+def int_hexagon_V6_extractw_128B :
+Hexagon_i32_v32i32i32_Intrinsic<"HEXAGON_V6_extractw_128B">;
-def int_hexagon_V6_vpackeh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackeh">;
+def int_hexagon_V6_hi :
+Hexagon_v16i32_v32i32_Intrinsic<"HEXAGON_V6_hi">;
-def int_hexagon_V6_vpackeh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackeh_128B">;
+def int_hexagon_V6_hi_128B :
+Hexagon_v32i32_v64i32_Intrinsic<"HEXAGON_V6_hi_128B">;
-def int_hexagon_V6_vpackob :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackob">;
+def int_hexagon_V6_lo :
+Hexagon_v16i32_v32i32_Intrinsic<"HEXAGON_V6_lo">;
-def int_hexagon_V6_vpackob_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackob_128B">;
+def int_hexagon_V6_lo_128B :
+Hexagon_v32i32_v64i32_Intrinsic<"HEXAGON_V6_lo_128B">;
-def int_hexagon_V6_vpackoh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackoh">;
+def int_hexagon_V6_lvsplatw :
+Hexagon_v16i32_i32_Intrinsic<"HEXAGON_V6_lvsplatw">;
-def int_hexagon_V6_vpackoh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackoh_128B">;
+def int_hexagon_V6_lvsplatw_128B :
+Hexagon_v32i32_i32_Intrinsic<"HEXAGON_V6_lvsplatw_128B">;
-def int_hexagon_V6_vpackhub_sat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackhub_sat">;
+def int_hexagon_V6_pred_and :
+Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
-def int_hexagon_V6_vpackhub_sat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackhub_sat_128B">;
+def int_hexagon_V6_pred_and_128B :
+Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
-def int_hexagon_V6_vpackhb_sat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackhb_sat">;
+def int_hexagon_V6_pred_and_n :
+Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
-def int_hexagon_V6_vpackhb_sat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackhb_sat_128B">;
+def int_hexagon_V6_pred_and_n_128B :
+Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
-def int_hexagon_V6_vpackwuh_sat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackwuh_sat">;
+def int_hexagon_V6_pred_not :
+Hexagon_custom_v64i1_v64i1_Intrinsic;
-def int_hexagon_V6_vpackwuh_sat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackwuh_sat_128B">;
+def int_hexagon_V6_pred_not_128B :
+Hexagon_custom_v128i1_v128i1_Intrinsic_128B;
-def int_hexagon_V6_vpackwh_sat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackwh_sat">;
+def int_hexagon_V6_pred_or :
+Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
-def int_hexagon_V6_vpackwh_sat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackwh_sat_128B">;
+def int_hexagon_V6_pred_or_128B :
+Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
-def int_hexagon_V6_vzb :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vzb">;
+def int_hexagon_V6_pred_or_n :
+Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
-def int_hexagon_V6_vzb_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vzb_128B">;
+def int_hexagon_V6_pred_or_n_128B :
+Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
-def int_hexagon_V6_vsb :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vsb">;
+def int_hexagon_V6_pred_scalar2 :
+Hexagon_custom_v64i1_i32_Intrinsic;
-def int_hexagon_V6_vsb_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vsb_128B">;
+def int_hexagon_V6_pred_scalar2_128B :
+Hexagon_custom_v128i1_i32_Intrinsic_128B;
-def int_hexagon_V6_vzh :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vzh">;
+def int_hexagon_V6_pred_xor :
+Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
-def int_hexagon_V6_vzh_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vzh_128B">;
+def int_hexagon_V6_pred_xor_128B :
+Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
-def int_hexagon_V6_vsh :
-Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vsh">;
+def int_hexagon_V6_vS32b_nqpred_ai :
+Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
-def int_hexagon_V6_vsh_128B :
-Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vsh_128B">;
+def int_hexagon_V6_vS32b_nqpred_ai_128B :
+Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
-def int_hexagon_V6_vdmpybus :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpybus">;
+def int_hexagon_V6_vS32b_nt_nqpred_ai :
+Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
-def int_hexagon_V6_vdmpybus_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_128B">;
+def int_hexagon_V6_vS32b_nt_nqpred_ai_128B :
+Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
-def int_hexagon_V6_vdmpybus_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_acc">;
+def int_hexagon_V6_vS32b_nt_qpred_ai :
+Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
-def int_hexagon_V6_vdmpybus_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_acc_128B">;
+def int_hexagon_V6_vS32b_nt_qpred_ai_128B :
+Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
-def int_hexagon_V6_vdmpybus_dv :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv">;
+def int_hexagon_V6_vS32b_qpred_ai :
+Hexagon_custom__v64i1ptrv16i32_Intrinsic<[IntrWriteMem]>;
-def int_hexagon_V6_vdmpybus_dv_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv_128B">;
+def int_hexagon_V6_vS32b_qpred_ai_128B :
+Hexagon_custom__v128i1ptrv32i32_Intrinsic_128B<[IntrWriteMem]>;
-def int_hexagon_V6_vdmpybus_dv_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv_acc">;
+def int_hexagon_V6_vabsdiffh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffh">;
-def int_hexagon_V6_vdmpybus_dv_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv_acc_128B">;
+def int_hexagon_V6_vabsdiffh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffh_128B">;
-def int_hexagon_V6_vdmpyhb :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb">;
+def int_hexagon_V6_vabsdiffub :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffub">;
-def int_hexagon_V6_vdmpyhb_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_128B">;
+def int_hexagon_V6_vabsdiffub_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffub_128B">;
-def int_hexagon_V6_vdmpyhb_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_acc">;
+def int_hexagon_V6_vabsdiffuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffuh">;
-def int_hexagon_V6_vdmpyhb_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_acc_128B">;
+def int_hexagon_V6_vabsdiffuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffuh_128B">;
-def int_hexagon_V6_vdmpyhb_dv :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv">;
+def int_hexagon_V6_vabsdiffw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffw">;
-def int_hexagon_V6_vdmpyhb_dv_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_128B">;
+def int_hexagon_V6_vabsdiffw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffw_128B">;
-def int_hexagon_V6_vdmpyhb_dv_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_acc">;
+def int_hexagon_V6_vabsh :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsh">;
-def int_hexagon_V6_vdmpyhb_dv_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_acc_128B">;
+def int_hexagon_V6_vabsh_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsh_128B">;
-def int_hexagon_V6_vdmpyhvsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat">;
+def int_hexagon_V6_vabsh_sat :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsh_sat">;
-def int_hexagon_V6_vdmpyhvsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat_128B">;
+def int_hexagon_V6_vabsh_sat_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsh_sat_128B">;
-def int_hexagon_V6_vdmpyhvsat_acc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat_acc">;
+def int_hexagon_V6_vabsw :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsw">;
-def int_hexagon_V6_vdmpyhvsat_acc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat_acc_128B">;
+def int_hexagon_V6_vabsw_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsw_128B">;
-def int_hexagon_V6_vdmpyhsat :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat">;
+def int_hexagon_V6_vabsw_sat :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsw_sat">;
-def int_hexagon_V6_vdmpyhsat_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat_128B">;
+def int_hexagon_V6_vabsw_sat_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsw_sat_128B">;
-def int_hexagon_V6_vdmpyhsat_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat_acc">;
+def int_hexagon_V6_vaddb :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddb">;
-def int_hexagon_V6_vdmpyhsat_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat_acc_128B">;
+def int_hexagon_V6_vaddb_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddb_128B">;
-def int_hexagon_V6_vdmpyhisat :
-Hexagon_v16i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat">;
+def int_hexagon_V6_vaddb_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddb_dv">;
-def int_hexagon_V6_vdmpyhisat_128B :
-Hexagon_v32i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat_128B">;
+def int_hexagon_V6_vaddb_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddb_dv_128B">;
-def int_hexagon_V6_vdmpyhisat_acc :
-Hexagon_v16i32_v16i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat_acc">;
+def int_hexagon_V6_vaddbnq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vdmpyhisat_acc_128B :
-Hexagon_v32i32_v32i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat_acc_128B">;
+def int_hexagon_V6_vaddbnq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vdmpyhsusat :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat">;
+def int_hexagon_V6_vaddbq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vdmpyhsusat_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat_128B">;
+def int_hexagon_V6_vaddbq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vdmpyhsusat_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat_acc">;
+def int_hexagon_V6_vaddh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddh">;
-def int_hexagon_V6_vdmpyhsusat_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat_acc_128B">;
+def int_hexagon_V6_vaddh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddh_128B">;
-def int_hexagon_V6_vdmpyhsuisat :
-Hexagon_v16i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat">;
+def int_hexagon_V6_vaddh_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddh_dv">;
-def int_hexagon_V6_vdmpyhsuisat_128B :
-Hexagon_v32i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_128B">;
+def int_hexagon_V6_vaddh_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddh_dv_128B">;
-def int_hexagon_V6_vdmpyhsuisat_acc :
-Hexagon_v16i32_v16i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_acc">;
+def int_hexagon_V6_vaddhnq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vdmpyhsuisat_acc_128B :
-Hexagon_v32i32_v32i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_acc_128B">;
+def int_hexagon_V6_vaddhnq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vtmpyb :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyb">;
+def int_hexagon_V6_vaddhq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vtmpyb_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyb_128B">;
+def int_hexagon_V6_vaddhq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vtmpyb_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyb_acc">;
+def int_hexagon_V6_vaddhsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddhsat">;
-def int_hexagon_V6_vtmpyb_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyb_acc_128B">;
+def int_hexagon_V6_vaddhsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddhsat_128B">;
-def int_hexagon_V6_vtmpybus :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vtmpybus">;
+def int_hexagon_V6_vaddhsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddhsat_dv">;
-def int_hexagon_V6_vtmpybus_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vtmpybus_128B">;
+def int_hexagon_V6_vaddhsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddhsat_dv_128B">;
-def int_hexagon_V6_vtmpybus_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vtmpybus_acc">;
+def int_hexagon_V6_vaddhw :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddhw">;
-def int_hexagon_V6_vtmpybus_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vtmpybus_acc_128B">;
+def int_hexagon_V6_vaddhw_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddhw_128B">;
-def int_hexagon_V6_vtmpyhb :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb">;
+def int_hexagon_V6_vaddubh :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddubh">;
-def int_hexagon_V6_vtmpyhb_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb_128B">;
+def int_hexagon_V6_vaddubh_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddubh_128B">;
-def int_hexagon_V6_vtmpyhb_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb_acc">;
+def int_hexagon_V6_vaddubsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddubsat">;
-def int_hexagon_V6_vtmpyhb_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb_acc_128B">;
+def int_hexagon_V6_vaddubsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddubsat_128B">;
-def int_hexagon_V6_vrmpyub :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vrmpyub">;
+def int_hexagon_V6_vaddubsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddubsat_dv">;
-def int_hexagon_V6_vrmpyub_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vrmpyub_128B">;
+def int_hexagon_V6_vaddubsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddubsat_dv_128B">;
-def int_hexagon_V6_vrmpyub_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vrmpyub_acc">;
+def int_hexagon_V6_vadduhsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduhsat">;
-def int_hexagon_V6_vrmpyub_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vrmpyub_acc_128B">;
+def int_hexagon_V6_vadduhsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhsat_128B">;
-def int_hexagon_V6_vrmpyubv :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpyubv">;
+def int_hexagon_V6_vadduhsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhsat_dv">;
-def int_hexagon_V6_vrmpyubv_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpyubv_128B">;
+def int_hexagon_V6_vadduhsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vadduhsat_dv_128B">;
-def int_hexagon_V6_vrmpyubv_acc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpyubv_acc">;
+def int_hexagon_V6_vadduhw :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduhw">;
-def int_hexagon_V6_vrmpyubv_acc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpyubv_acc_128B">;
+def int_hexagon_V6_vadduhw_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhw_128B">;
-def int_hexagon_V6_vrmpybv :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybv">;
+def int_hexagon_V6_vaddw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddw">;
-def int_hexagon_V6_vrmpybv_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybv_128B">;
+def int_hexagon_V6_vaddw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddw_128B">;
-def int_hexagon_V6_vrmpybv_acc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybv_acc">;
+def int_hexagon_V6_vaddw_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddw_dv">;
-def int_hexagon_V6_vrmpybv_acc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybv_acc_128B">;
+def int_hexagon_V6_vaddw_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddw_dv_128B">;
-def int_hexagon_V6_vrmpyubi :
-Hexagon_v32i32_v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vaddwnq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vrmpyubi_128B :
-Hexagon_v64i32_v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vaddwnq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vrmpyubi_acc :
-Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vaddwq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vrmpyubi_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi_acc_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vaddwq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vrmpybus :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vrmpybus">;
+def int_hexagon_V6_vaddwsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddwsat">;
-def int_hexagon_V6_vrmpybus_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vrmpybus_128B">;
+def int_hexagon_V6_vaddwsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddwsat_128B">;
-def int_hexagon_V6_vrmpybus_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vrmpybus_acc">;
+def int_hexagon_V6_vaddwsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddwsat_dv">;
-def int_hexagon_V6_vrmpybus_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vrmpybus_acc_128B">;
+def int_hexagon_V6_vaddwsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddwsat_dv_128B">;
-def int_hexagon_V6_vrmpybusi :
-Hexagon_v32i32_v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_valignb :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_valignb">;
-def int_hexagon_V6_vrmpybusi_128B :
-Hexagon_v64i32_v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_valignb_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_valignb_128B">;
-def int_hexagon_V6_vrmpybusi_acc :
-Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_valignbi :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_valignbi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vrmpybusi_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi_acc_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_valignbi_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_valignbi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vrmpybusv :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybusv">;
+def int_hexagon_V6_vand :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vand">;
-def int_hexagon_V6_vrmpybusv_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybusv_128B">;
+def int_hexagon_V6_vand_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vand_128B">;
-def int_hexagon_V6_vrmpybusv_acc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybusv_acc">;
+def int_hexagon_V6_vandqrt :
+Hexagon_custom_v16i32_v64i1i32_Intrinsic;
-def int_hexagon_V6_vrmpybusv_acc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybusv_acc_128B">;
+def int_hexagon_V6_vandqrt_128B :
+Hexagon_custom_v32i32_v128i1i32_Intrinsic_128B;
-def int_hexagon_V6_vdsaduh :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdsaduh">;
+def int_hexagon_V6_vandqrt_acc :
+Hexagon_custom_v16i32_v16i32v64i1i32_Intrinsic;
-def int_hexagon_V6_vdsaduh_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdsaduh_128B">;
+def int_hexagon_V6_vandqrt_acc_128B :
+Hexagon_custom_v32i32_v32i32v128i1i32_Intrinsic_128B;
-def int_hexagon_V6_vdsaduh_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdsaduh_acc">;
+def int_hexagon_V6_vandvrt :
+Hexagon_custom_v64i1_v16i32i32_Intrinsic;
-def int_hexagon_V6_vdsaduh_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vdsaduh_acc_128B">;
+def int_hexagon_V6_vandvrt_128B :
+Hexagon_custom_v128i1_v32i32i32_Intrinsic_128B;
-def int_hexagon_V6_vrsadubi :
-Hexagon_v32i32_v32i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vandvrt_acc :
+Hexagon_custom_v64i1_v64i1v16i32i32_Intrinsic;
-def int_hexagon_V6_vrsadubi_128B :
-Hexagon_v64i32_v64i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vandvrt_acc_128B :
+Hexagon_custom_v128i1_v128i1v32i32i32_Intrinsic_128B;
-def int_hexagon_V6_vrsadubi_acc :
-Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vaslh :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vaslh">;
-def int_hexagon_V6_vrsadubi_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi_acc_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vaslh_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vaslh_128B">;
-def int_hexagon_V6_vasrw :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vasrw">;
+def int_hexagon_V6_vaslhv :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaslhv">;
-def int_hexagon_V6_vasrw_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vasrw_128B">;
+def int_hexagon_V6_vaslhv_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaslhv_128B">;
def int_hexagon_V6_vaslw :
Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vaslw">;
@@ -4204,17 +4120,11 @@ Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vaslw">;
def int_hexagon_V6_vaslw_128B :
Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vaslw_128B">;
-def int_hexagon_V6_vlsrw :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vlsrw">;
-
-def int_hexagon_V6_vlsrw_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vlsrw_128B">;
-
-def int_hexagon_V6_vasrwv :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vasrwv">;
+def int_hexagon_V6_vaslw_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vaslw_acc">;
-def int_hexagon_V6_vasrwv_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vasrwv_128B">;
+def int_hexagon_V6_vaslw_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vaslw_acc_128B">;
def int_hexagon_V6_vaslwv :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaslwv">;
@@ -4222,29 +4132,29 @@ Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaslwv">;
def int_hexagon_V6_vaslwv_128B :
Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaslwv_128B">;
-def int_hexagon_V6_vlsrwv :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vlsrwv">;
-
-def int_hexagon_V6_vlsrwv_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vlsrwv_128B">;
-
def int_hexagon_V6_vasrh :
Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vasrh">;
def int_hexagon_V6_vasrh_128B :
Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vasrh_128B">;
-def int_hexagon_V6_vaslh :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vaslh">;
+def int_hexagon_V6_vasrhbrndsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhbrndsat">;
-def int_hexagon_V6_vaslh_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vaslh_128B">;
+def int_hexagon_V6_vasrhbrndsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhbrndsat_128B">;
-def int_hexagon_V6_vlsrh :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vlsrh">;
+def int_hexagon_V6_vasrhubrndsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhubrndsat">;
-def int_hexagon_V6_vlsrh_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vlsrh_128B">;
+def int_hexagon_V6_vasrhubrndsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhubrndsat_128B">;
+
+def int_hexagon_V6_vasrhubsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhubsat">;
+
+def int_hexagon_V6_vasrhubsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhubsat_128B">;
def int_hexagon_V6_vasrhv :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vasrhv">;
@@ -4252,17 +4162,17 @@ Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vasrhv">;
def int_hexagon_V6_vasrhv_128B :
Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vasrhv_128B">;
-def int_hexagon_V6_vaslhv :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaslhv">;
+def int_hexagon_V6_vasrw :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vasrw">;
-def int_hexagon_V6_vaslhv_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaslhv_128B">;
+def int_hexagon_V6_vasrw_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vasrw_128B">;
-def int_hexagon_V6_vlsrhv :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vlsrhv">;
+def int_hexagon_V6_vasrw_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrw_acc">;
-def int_hexagon_V6_vlsrhv_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vlsrhv_128B">;
+def int_hexagon_V6_vasrw_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrw_acc_128B">;
def int_hexagon_V6_vasrwh :
Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwh">;
@@ -4270,647 +4180,629 @@ Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwh">;
def int_hexagon_V6_vasrwh_128B :
Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrwh_128B">;
-def int_hexagon_V6_vasrwhsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwhsat">;
-
-def int_hexagon_V6_vasrwhsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrwhsat_128B">;
-
def int_hexagon_V6_vasrwhrndsat :
Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwhrndsat">;
def int_hexagon_V6_vasrwhrndsat_128B :
Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrwhrndsat_128B">;
+def int_hexagon_V6_vasrwhsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwhsat">;
+
+def int_hexagon_V6_vasrwhsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrwhsat_128B">;
+
def int_hexagon_V6_vasrwuhsat :
Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwuhsat">;
def int_hexagon_V6_vasrwuhsat_128B :
Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrwuhsat_128B">;
-def int_hexagon_V6_vroundwh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundwh">;
-
-def int_hexagon_V6_vroundwh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundwh_128B">;
-
-def int_hexagon_V6_vroundwuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundwuh">;
-
-def int_hexagon_V6_vroundwuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundwuh_128B">;
-
-def int_hexagon_V6_vasrhubsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhubsat">;
-
-def int_hexagon_V6_vasrhubsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhubsat_128B">;
-
-def int_hexagon_V6_vasrhubrndsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhubrndsat">;
+def int_hexagon_V6_vasrwv :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vasrwv">;
-def int_hexagon_V6_vasrhubrndsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhubrndsat_128B">;
+def int_hexagon_V6_vasrwv_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vasrwv_128B">;
-def int_hexagon_V6_vasrhbrndsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhbrndsat">;
+def int_hexagon_V6_vassign :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vassign">;
-def int_hexagon_V6_vasrhbrndsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhbrndsat_128B">;
+def int_hexagon_V6_vassign_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vassign_128B">;
-def int_hexagon_V6_vroundhb :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundhb">;
+def int_hexagon_V6_vassignp :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vassignp">;
-def int_hexagon_V6_vroundhb_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundhb_128B">;
+def int_hexagon_V6_vassignp_128B :
+Hexagon_v64i32_v64i32_Intrinsic<"HEXAGON_V6_vassignp_128B">;
-def int_hexagon_V6_vroundhub :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundhub">;
+def int_hexagon_V6_vavgh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgh">;
-def int_hexagon_V6_vroundhub_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundhub_128B">;
+def int_hexagon_V6_vavgh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgh_128B">;
-def int_hexagon_V6_vaslw_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vaslw_acc">;
+def int_hexagon_V6_vavghrnd :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavghrnd">;
-def int_hexagon_V6_vaslw_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vaslw_acc_128B">;
+def int_hexagon_V6_vavghrnd_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavghrnd_128B">;
-def int_hexagon_V6_vasrw_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrw_acc">;
+def int_hexagon_V6_vavgub :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgub">;
-def int_hexagon_V6_vasrw_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrw_acc_128B">;
+def int_hexagon_V6_vavgub_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgub_128B">;
-def int_hexagon_V6_vaddb :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddb">;
+def int_hexagon_V6_vavgubrnd :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgubrnd">;
-def int_hexagon_V6_vaddb_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddb_128B">;
+def int_hexagon_V6_vavgubrnd_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgubrnd_128B">;
-def int_hexagon_V6_vsubb :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubb">;
+def int_hexagon_V6_vavguh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguh">;
-def int_hexagon_V6_vsubb_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubb_128B">;
+def int_hexagon_V6_vavguh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguh_128B">;
-def int_hexagon_V6_vaddb_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddb_dv">;
+def int_hexagon_V6_vavguhrnd :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguhrnd">;
-def int_hexagon_V6_vaddb_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddb_dv_128B">;
+def int_hexagon_V6_vavguhrnd_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguhrnd_128B">;
-def int_hexagon_V6_vsubb_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubb_dv">;
+def int_hexagon_V6_vavgw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgw">;
-def int_hexagon_V6_vsubb_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubb_dv_128B">;
+def int_hexagon_V6_vavgw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgw_128B">;
-def int_hexagon_V6_vaddh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddh">;
+def int_hexagon_V6_vavgwrnd :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgwrnd">;
-def int_hexagon_V6_vaddh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddh_128B">;
+def int_hexagon_V6_vavgwrnd_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgwrnd_128B">;
-def int_hexagon_V6_vsubh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubh">;
+def int_hexagon_V6_vcl0h :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vcl0h">;
-def int_hexagon_V6_vsubh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubh_128B">;
+def int_hexagon_V6_vcl0h_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vcl0h_128B">;
-def int_hexagon_V6_vaddh_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddh_dv">;
+def int_hexagon_V6_vcl0w :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vcl0w">;
-def int_hexagon_V6_vaddh_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddh_dv_128B">;
+def int_hexagon_V6_vcl0w_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vcl0w_128B">;
-def int_hexagon_V6_vsubh_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubh_dv">;
+def int_hexagon_V6_vcombine :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vcombine">;
-def int_hexagon_V6_vsubh_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubh_dv_128B">;
+def int_hexagon_V6_vcombine_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vcombine_128B">;
-def int_hexagon_V6_vaddw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddw">;
+def int_hexagon_V6_vd0 :
+Hexagon_v16i32__Intrinsic<"HEXAGON_V6_vd0">;
-def int_hexagon_V6_vaddw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddw_128B">;
+def int_hexagon_V6_vd0_128B :
+Hexagon_v32i32__Intrinsic<"HEXAGON_V6_vd0_128B">;
-def int_hexagon_V6_vsubw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubw">;
+def int_hexagon_V6_vdealb :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vdealb">;
-def int_hexagon_V6_vsubw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubw_128B">;
+def int_hexagon_V6_vdealb_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vdealb_128B">;
-def int_hexagon_V6_vaddw_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddw_dv">;
+def int_hexagon_V6_vdealb4w :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vdealb4w">;
-def int_hexagon_V6_vaddw_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddw_dv_128B">;
+def int_hexagon_V6_vdealb4w_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vdealb4w_128B">;
-def int_hexagon_V6_vsubw_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubw_dv">;
+def int_hexagon_V6_vdealh :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vdealh">;
-def int_hexagon_V6_vsubw_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubw_dv_128B">;
+def int_hexagon_V6_vdealh_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vdealh_128B">;
-def int_hexagon_V6_vaddubsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddubsat">;
+def int_hexagon_V6_vdealvdd :
+Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdealvdd">;
-def int_hexagon_V6_vaddubsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddubsat_128B">;
+def int_hexagon_V6_vdealvdd_128B :
+Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdealvdd_128B">;
-def int_hexagon_V6_vaddubsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddubsat_dv">;
+def int_hexagon_V6_vdelta :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vdelta">;
-def int_hexagon_V6_vaddubsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddubsat_dv_128B">;
+def int_hexagon_V6_vdelta_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vdelta_128B">;
-def int_hexagon_V6_vsububsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsububsat">;
+def int_hexagon_V6_vdmpybus :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpybus">;
-def int_hexagon_V6_vsububsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsububsat_128B">;
+def int_hexagon_V6_vdmpybus_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_128B">;
-def int_hexagon_V6_vsububsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsububsat_dv">;
+def int_hexagon_V6_vdmpybus_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_acc">;
-def int_hexagon_V6_vsububsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsububsat_dv_128B">;
+def int_hexagon_V6_vdmpybus_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_acc_128B">;
-def int_hexagon_V6_vadduhsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduhsat">;
+def int_hexagon_V6_vdmpybus_dv :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv">;
-def int_hexagon_V6_vadduhsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhsat_128B">;
+def int_hexagon_V6_vdmpybus_dv_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv_128B">;
-def int_hexagon_V6_vadduhsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhsat_dv">;
+def int_hexagon_V6_vdmpybus_dv_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv_acc">;
-def int_hexagon_V6_vadduhsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vadduhsat_dv_128B">;
+def int_hexagon_V6_vdmpybus_dv_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpybus_dv_acc_128B">;
-def int_hexagon_V6_vsubuhsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubuhsat">;
+def int_hexagon_V6_vdmpyhb :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb">;
-def int_hexagon_V6_vsubuhsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuhsat_128B">;
+def int_hexagon_V6_vdmpyhb_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_128B">;
-def int_hexagon_V6_vsubuhsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuhsat_dv">;
+def int_hexagon_V6_vdmpyhb_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_acc">;
-def int_hexagon_V6_vsubuhsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubuhsat_dv_128B">;
+def int_hexagon_V6_vdmpyhb_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_acc_128B">;
-def int_hexagon_V6_vaddhsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddhsat">;
+def int_hexagon_V6_vdmpyhb_dv :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv">;
-def int_hexagon_V6_vaddhsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddhsat_128B">;
+def int_hexagon_V6_vdmpyhb_dv_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_128B">;
-def int_hexagon_V6_vaddhsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddhsat_dv">;
+def int_hexagon_V6_vdmpyhb_dv_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_acc">;
-def int_hexagon_V6_vaddhsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddhsat_dv_128B">;
+def int_hexagon_V6_vdmpyhb_dv_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhb_dv_acc_128B">;
-def int_hexagon_V6_vsubhsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubhsat">;
+def int_hexagon_V6_vdmpyhisat :
+Hexagon_v16i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat">;
-def int_hexagon_V6_vsubhsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubhsat_128B">;
+def int_hexagon_V6_vdmpyhisat_128B :
+Hexagon_v32i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat_128B">;
-def int_hexagon_V6_vsubhsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubhsat_dv">;
+def int_hexagon_V6_vdmpyhisat_acc :
+Hexagon_v16i32_v16i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat_acc">;
-def int_hexagon_V6_vsubhsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubhsat_dv_128B">;
+def int_hexagon_V6_vdmpyhisat_acc_128B :
+Hexagon_v32i32_v32i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhisat_acc_128B">;
-def int_hexagon_V6_vaddwsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddwsat">;
+def int_hexagon_V6_vdmpyhsat :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat">;
-def int_hexagon_V6_vaddwsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddwsat_128B">;
+def int_hexagon_V6_vdmpyhsat_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat_128B">;
-def int_hexagon_V6_vaddwsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddwsat_dv">;
+def int_hexagon_V6_vdmpyhsat_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat_acc">;
-def int_hexagon_V6_vaddwsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddwsat_dv_128B">;
+def int_hexagon_V6_vdmpyhsat_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsat_acc_128B">;
-def int_hexagon_V6_vsubwsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubwsat">;
+def int_hexagon_V6_vdmpyhsuisat :
+Hexagon_v16i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat">;
-def int_hexagon_V6_vsubwsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubwsat_128B">;
+def int_hexagon_V6_vdmpyhsuisat_128B :
+Hexagon_v32i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_128B">;
-def int_hexagon_V6_vsubwsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubwsat_dv">;
+def int_hexagon_V6_vdmpyhsuisat_acc :
+Hexagon_v16i32_v16i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_acc">;
-def int_hexagon_V6_vsubwsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubwsat_dv_128B">;
+def int_hexagon_V6_vdmpyhsuisat_acc_128B :
+Hexagon_v32i32_v32i32v64i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsuisat_acc_128B">;
-def int_hexagon_V6_vavgub :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgub">;
+def int_hexagon_V6_vdmpyhsusat :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat">;
-def int_hexagon_V6_vavgub_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgub_128B">;
+def int_hexagon_V6_vdmpyhsusat_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat_128B">;
-def int_hexagon_V6_vavgubrnd :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgubrnd">;
+def int_hexagon_V6_vdmpyhsusat_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat_acc">;
-def int_hexagon_V6_vavgubrnd_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgubrnd_128B">;
+def int_hexagon_V6_vdmpyhsusat_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdmpyhsusat_acc_128B">;
-def int_hexagon_V6_vavguh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguh">;
+def int_hexagon_V6_vdmpyhvsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat">;
-def int_hexagon_V6_vavguh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguh_128B">;
+def int_hexagon_V6_vdmpyhvsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat_128B">;
-def int_hexagon_V6_vavguhrnd :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguhrnd">;
+def int_hexagon_V6_vdmpyhvsat_acc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat_acc">;
-def int_hexagon_V6_vavguhrnd_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguhrnd_128B">;
+def int_hexagon_V6_vdmpyhvsat_acc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vdmpyhvsat_acc_128B">;
-def int_hexagon_V6_vavgh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgh">;
+def int_hexagon_V6_vdsaduh :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vdsaduh">;
-def int_hexagon_V6_vavgh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgh_128B">;
+def int_hexagon_V6_vdsaduh_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vdsaduh_128B">;
-def int_hexagon_V6_vavghrnd :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavghrnd">;
+def int_hexagon_V6_vdsaduh_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdsaduh_acc">;
-def int_hexagon_V6_vavghrnd_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavghrnd_128B">;
+def int_hexagon_V6_vdsaduh_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vdsaduh_acc_128B">;
-def int_hexagon_V6_vnavgh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgh">;
+def int_hexagon_V6_veqb :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vnavgh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgh_128B">;
+def int_hexagon_V6_veqb_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vavgw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgw">;
+def int_hexagon_V6_veqb_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vavgw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgw_128B">;
+def int_hexagon_V6_veqb_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vavgwrnd :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgwrnd">;
+def int_hexagon_V6_veqb_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vavgwrnd_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgwrnd_128B">;
+def int_hexagon_V6_veqb_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vnavgw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgw">;
+def int_hexagon_V6_veqb_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vnavgw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgw_128B">;
+def int_hexagon_V6_veqb_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsdiffub :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffub">;
+def int_hexagon_V6_veqh :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsdiffub_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffub_128B">;
+def int_hexagon_V6_veqh_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsdiffuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffuh">;
+def int_hexagon_V6_veqh_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsdiffuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffuh_128B">;
+def int_hexagon_V6_veqh_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsdiffh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffh">;
+def int_hexagon_V6_veqh_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsdiffh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffh_128B">;
+def int_hexagon_V6_veqh_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsdiffw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vabsdiffw">;
+def int_hexagon_V6_veqh_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsdiffw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vabsdiffw_128B">;
+def int_hexagon_V6_veqh_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vnavgub :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgub">;
+def int_hexagon_V6_veqw :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vnavgub_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgub_128B">;
+def int_hexagon_V6_veqw_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddubh :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddubh">;
+def int_hexagon_V6_veqw_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddubh_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddubh_128B">;
+def int_hexagon_V6_veqw_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsububh :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsububh">;
+def int_hexagon_V6_veqw_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsububh_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsububh_128B">;
+def int_hexagon_V6_veqw_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddhw :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddhw">;
+def int_hexagon_V6_veqw_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddhw_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddhw_128B">;
+def int_hexagon_V6_veqw_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubhw :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubhw">;
+def int_hexagon_V6_vgtb :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubhw_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubhw_128B">;
+def int_hexagon_V6_vgtb_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vadduhw :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduhw">;
+def int_hexagon_V6_vgtb_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vadduhw_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhw_128B">;
+def int_hexagon_V6_vgtb_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubuhw :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubuhw">;
+def int_hexagon_V6_vgtb_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubuhw_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuhw_128B">;
+def int_hexagon_V6_vgtb_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vd0 :
-Hexagon_v16i32__Intrinsic<"HEXAGON_V6_vd0">;
+def int_hexagon_V6_vgtb_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vd0_128B :
-Hexagon_v32i32__Intrinsic<"HEXAGON_V6_vd0_128B">;
+def int_hexagon_V6_vgtb_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddbq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgth :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddbq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgth_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubbq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgth_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubbq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgth_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddbnq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgth_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddbnq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgth_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubbnq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgth_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubbnq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgth_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddhq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtub :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddhq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtub_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubhq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtub_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubhq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtub_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddhnq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtub_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddhnq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtub_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubhnq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtub_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubhnq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtub_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddwq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtuh :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddwq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtuh_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubwq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtuh_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubwq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtuh_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vaddwnq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtuh_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vaddwnq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtuh_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vsubwnq :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vgtuh_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vsubwnq_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vgtuh_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsh :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsh">;
+def int_hexagon_V6_vgtuw :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsh_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsh_128B">;
+def int_hexagon_V6_vgtuw_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsh_sat :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsh_sat">;
+def int_hexagon_V6_vgtuw_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsh_sat_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsh_sat_128B">;
+def int_hexagon_V6_vgtuw_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsw :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsw">;
+def int_hexagon_V6_vgtuw_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsw_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsw_128B">;
+def int_hexagon_V6_vgtuw_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vabsw_sat :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsw_sat">;
+def int_hexagon_V6_vgtuw_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vabsw_sat_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsw_sat_128B">;
+def int_hexagon_V6_vgtuw_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vmpybv :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybv">;
+def int_hexagon_V6_vgtw :
+Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vmpybv_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybv_128B">;
+def int_hexagon_V6_vgtw_128B :
+Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vmpybv_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybv_acc">;
+def int_hexagon_V6_vgtw_and :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vmpybv_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybv_acc_128B">;
+def int_hexagon_V6_vgtw_and_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vmpyubv :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyubv">;
+def int_hexagon_V6_vgtw_or :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vmpyubv_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyubv_128B">;
+def int_hexagon_V6_vgtw_or_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vmpyubv_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyubv_acc">;
+def int_hexagon_V6_vgtw_xor :
+Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vmpyubv_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyubv_acc_128B">;
+def int_hexagon_V6_vgtw_xor_128B :
+Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vmpybusv :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybusv">;
+def int_hexagon_V6_vinsertwr :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vinsertwr">;
-def int_hexagon_V6_vmpybusv_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybusv_128B">;
+def int_hexagon_V6_vinsertwr_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vinsertwr_128B">;
-def int_hexagon_V6_vmpybusv_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybusv_acc">;
+def int_hexagon_V6_vlalignb :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlalignb">;
-def int_hexagon_V6_vmpybusv_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybusv_acc_128B">;
+def int_hexagon_V6_vlalignb_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlalignb_128B">;
-def int_hexagon_V6_vmpabusv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpabusv">;
+def int_hexagon_V6_vlalignbi :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlalignbi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vmpabusv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vmpabusv_128B">;
+def int_hexagon_V6_vlalignbi_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlalignbi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vmpabuuv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpabuuv">;
+def int_hexagon_V6_vlsrh :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vlsrh">;
-def int_hexagon_V6_vmpabuuv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vmpabuuv_128B">;
+def int_hexagon_V6_vlsrh_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vlsrh_128B">;
-def int_hexagon_V6_vmpyhv :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhv">;
+def int_hexagon_V6_vlsrhv :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vlsrhv">;
-def int_hexagon_V6_vmpyhv_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhv_128B">;
+def int_hexagon_V6_vlsrhv_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vlsrhv_128B">;
-def int_hexagon_V6_vmpyhv_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhv_acc">;
+def int_hexagon_V6_vlsrw :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vlsrw">;
-def int_hexagon_V6_vmpyhv_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhv_acc_128B">;
+def int_hexagon_V6_vlsrw_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vlsrw_128B">;
-def int_hexagon_V6_vmpyuhv :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyuhv">;
+def int_hexagon_V6_vlsrwv :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vlsrwv">;
-def int_hexagon_V6_vmpyuhv_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyuhv_128B">;
+def int_hexagon_V6_vlsrwv_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vlsrwv_128B">;
-def int_hexagon_V6_vmpyuhv_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyuhv_acc">;
+def int_hexagon_V6_vlutvvb :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb">;
-def int_hexagon_V6_vmpyuhv_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyuhv_acc_128B">;
+def int_hexagon_V6_vlutvvb_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_128B">;
-def int_hexagon_V6_vmpyhvsrs :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhvsrs">;
+def int_hexagon_V6_vlutvvb_oracc :
+Hexagon_v16i32_v16i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracc">;
-def int_hexagon_V6_vmpyhvsrs_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhvsrs_128B">;
+def int_hexagon_V6_vlutvvb_oracc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracc_128B">;
-def int_hexagon_V6_vmpyhus :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhus">;
+def int_hexagon_V6_vlutvwh :
+Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh">;
-def int_hexagon_V6_vmpyhus_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhus_128B">;
+def int_hexagon_V6_vlutvwh_128B :
+Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_128B">;
-def int_hexagon_V6_vmpyhus_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhus_acc">;
+def int_hexagon_V6_vlutvwh_oracc :
+Hexagon_v32i32_v32i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracc">;
-def int_hexagon_V6_vmpyhus_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhus_acc_128B">;
+def int_hexagon_V6_vlutvwh_oracc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracc_128B">;
-def int_hexagon_V6_vmpyih :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyih">;
+def int_hexagon_V6_vmaxh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxh">;
-def int_hexagon_V6_vmpyih_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyih_128B">;
+def int_hexagon_V6_vmaxh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxh_128B">;
-def int_hexagon_V6_vmpyih_acc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyih_acc">;
+def int_hexagon_V6_vmaxub :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxub">;
-def int_hexagon_V6_vmpyih_acc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyih_acc_128B">;
+def int_hexagon_V6_vmaxub_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxub_128B">;
-def int_hexagon_V6_vmpyewuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyewuh">;
+def int_hexagon_V6_vmaxuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxuh">;
-def int_hexagon_V6_vmpyewuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyewuh_128B">;
+def int_hexagon_V6_vmaxuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxuh_128B">;
-def int_hexagon_V6_vmpyowh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh">;
+def int_hexagon_V6_vmaxw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxw">;
-def int_hexagon_V6_vmpyowh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_128B">;
+def int_hexagon_V6_vmaxw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxw_128B">;
-def int_hexagon_V6_vmpyowh_rnd :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd">;
+def int_hexagon_V6_vminh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminh">;
-def int_hexagon_V6_vmpyowh_rnd_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_128B">;
+def int_hexagon_V6_vminh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminh_128B">;
-def int_hexagon_V6_vmpyowh_sacc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_sacc">;
+def int_hexagon_V6_vminub :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminub">;
-def int_hexagon_V6_vmpyowh_sacc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_sacc_128B">;
+def int_hexagon_V6_vminub_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminub_128B">;
-def int_hexagon_V6_vmpyowh_rnd_sacc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_sacc">;
+def int_hexagon_V6_vminuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminuh">;
-def int_hexagon_V6_vmpyowh_rnd_sacc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_sacc_128B">;
+def int_hexagon_V6_vminuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminuh_128B">;
-def int_hexagon_V6_vmpyieoh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyieoh">;
+def int_hexagon_V6_vminw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminw">;
-def int_hexagon_V6_vmpyieoh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyieoh_128B">;
+def int_hexagon_V6_vminw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminw_128B">;
-def int_hexagon_V6_vmpyiewuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiewuh">;
+def int_hexagon_V6_vmpabus :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpabus">;
-def int_hexagon_V6_vmpyiewuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiewuh_128B">;
+def int_hexagon_V6_vmpabus_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vmpabus_128B">;
-def int_hexagon_V6_vmpyiowh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiowh">;
+def int_hexagon_V6_vmpabus_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpabus_acc">;
-def int_hexagon_V6_vmpyiowh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiowh_128B">;
+def int_hexagon_V6_vmpabus_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vmpabus_acc_128B">;
-def int_hexagon_V6_vmpyiewh_acc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiewh_acc">;
+def int_hexagon_V6_vmpabusv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpabusv">;
-def int_hexagon_V6_vmpyiewh_acc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiewh_acc_128B">;
+def int_hexagon_V6_vmpabusv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vmpabusv_128B">;
-def int_hexagon_V6_vmpyiewuh_acc :
-Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiewuh_acc">;
+def int_hexagon_V6_vmpabuuv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpabuuv">;
-def int_hexagon_V6_vmpyiewuh_acc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiewuh_acc_128B">;
+def int_hexagon_V6_vmpabuuv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vmpabuuv_128B">;
-def int_hexagon_V6_vmpyub :
-Hexagon_v32i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyub">;
+def int_hexagon_V6_vmpahb :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpahb">;
-def int_hexagon_V6_vmpyub_128B :
-Hexagon_v64i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyub_128B">;
+def int_hexagon_V6_vmpahb_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vmpahb_128B">;
-def int_hexagon_V6_vmpyub_acc :
-Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyub_acc">;
+def int_hexagon_V6_vmpahb_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpahb_acc">;
-def int_hexagon_V6_vmpyub_acc_128B :
-Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyub_acc_128B">;
+def int_hexagon_V6_vmpahb_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vmpahb_acc_128B">;
def int_hexagon_V6_vmpybus :
Hexagon_v32i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpybus">;
@@ -4924,29 +4816,35 @@ Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpybus_acc">;
def int_hexagon_V6_vmpybus_acc_128B :
Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpybus_acc_128B">;
-def int_hexagon_V6_vmpabus :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpabus">;
+def int_hexagon_V6_vmpybusv :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybusv">;
-def int_hexagon_V6_vmpabus_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vmpabus_128B">;
+def int_hexagon_V6_vmpybusv_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybusv_128B">;
-def int_hexagon_V6_vmpabus_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpabus_acc">;
+def int_hexagon_V6_vmpybusv_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybusv_acc">;
-def int_hexagon_V6_vmpabus_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vmpabus_acc_128B">;
+def int_hexagon_V6_vmpybusv_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybusv_acc_128B">;
-def int_hexagon_V6_vmpahb :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpahb">;
+def int_hexagon_V6_vmpybv :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybv">;
-def int_hexagon_V6_vmpahb_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vmpahb_128B">;
+def int_hexagon_V6_vmpybv_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybv_128B">;
-def int_hexagon_V6_vmpahb_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpahb_acc">;
+def int_hexagon_V6_vmpybv_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpybv_acc">;
-def int_hexagon_V6_vmpahb_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vmpahb_acc_128B">;
+def int_hexagon_V6_vmpybv_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpybv_acc_128B">;
+
+def int_hexagon_V6_vmpyewuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyewuh">;
+
+def int_hexagon_V6_vmpyewuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyewuh_128B">;
def int_hexagon_V6_vmpyh :
Hexagon_v32i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyh">;
@@ -4960,29 +4858,83 @@ Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyhsat_acc">;
def int_hexagon_V6_vmpyhsat_acc_128B :
Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyhsat_acc_128B">;
+def int_hexagon_V6_vmpyhsrs :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyhsrs">;
+
+def int_hexagon_V6_vmpyhsrs_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyhsrs_128B">;
+
def int_hexagon_V6_vmpyhss :
Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyhss">;
def int_hexagon_V6_vmpyhss_128B :
Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyhss_128B">;
-def int_hexagon_V6_vmpyhsrs :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyhsrs">;
+def int_hexagon_V6_vmpyhus :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhus">;
-def int_hexagon_V6_vmpyhsrs_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyhsrs_128B">;
+def int_hexagon_V6_vmpyhus_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhus_128B">;
-def int_hexagon_V6_vmpyuh :
-Hexagon_v32i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyuh">;
+def int_hexagon_V6_vmpyhus_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhus_acc">;
-def int_hexagon_V6_vmpyuh_128B :
-Hexagon_v64i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyuh_128B">;
+def int_hexagon_V6_vmpyhus_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhus_acc_128B">;
-def int_hexagon_V6_vmpyuh_acc :
-Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyuh_acc">;
+def int_hexagon_V6_vmpyhv :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhv">;
-def int_hexagon_V6_vmpyuh_acc_128B :
-Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyuh_acc_128B">;
+def int_hexagon_V6_vmpyhv_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhv_128B">;
+
+def int_hexagon_V6_vmpyhv_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhv_acc">;
+
+def int_hexagon_V6_vmpyhv_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhv_acc_128B">;
+
+def int_hexagon_V6_vmpyhvsrs :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyhvsrs">;
+
+def int_hexagon_V6_vmpyhvsrs_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyhvsrs_128B">;
+
+def int_hexagon_V6_vmpyieoh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyieoh">;
+
+def int_hexagon_V6_vmpyieoh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyieoh_128B">;
+
+def int_hexagon_V6_vmpyiewh_acc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiewh_acc">;
+
+def int_hexagon_V6_vmpyiewh_acc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiewh_acc_128B">;
+
+def int_hexagon_V6_vmpyiewuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiewuh">;
+
+def int_hexagon_V6_vmpyiewuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiewuh_128B">;
+
+def int_hexagon_V6_vmpyiewuh_acc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiewuh_acc">;
+
+def int_hexagon_V6_vmpyiewuh_acc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiewuh_acc_128B">;
+
+def int_hexagon_V6_vmpyih :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyih">;
+
+def int_hexagon_V6_vmpyih_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyih_128B">;
+
+def int_hexagon_V6_vmpyih_acc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyih_acc">;
+
+def int_hexagon_V6_vmpyih_acc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyih_acc_128B">;
def int_hexagon_V6_vmpyihb :
Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyihb">;
@@ -4996,6 +4948,12 @@ Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyihb_acc">;
def int_hexagon_V6_vmpyihb_acc_128B :
Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyihb_acc_128B">;
+def int_hexagon_V6_vmpyiowh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyiowh">;
+
+def int_hexagon_V6_vmpyiowh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyiowh_128B">;
+
def int_hexagon_V6_vmpyiwb :
Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyiwb">;
@@ -5020,371 +4978,311 @@ Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyiwh_acc">;
def int_hexagon_V6_vmpyiwh_acc_128B :
Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyiwh_acc_128B">;
-def int_hexagon_V6_vand :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vand">;
-
-def int_hexagon_V6_vand_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vand_128B">;
-
-def int_hexagon_V6_vor :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vor">;
-
-def int_hexagon_V6_vor_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vor_128B">;
-
-def int_hexagon_V6_vxor :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vxor">;
-
-def int_hexagon_V6_vxor_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vxor_128B">;
-
-def int_hexagon_V6_vnot :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vnot">;
-
-def int_hexagon_V6_vnot_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vnot_128B">;
-
-def int_hexagon_V6_vandqrt :
-Hexagon_custom_v16i32_v64i1i32_Intrinsic;
-
-def int_hexagon_V6_vandqrt_128B :
-Hexagon_custom_v32i32_v128i1i32_Intrinsic_128B;
-
-def int_hexagon_V6_vandqrt_acc :
-Hexagon_custom_v16i32_v16i32v64i1i32_Intrinsic;
-
-def int_hexagon_V6_vandqrt_acc_128B :
-Hexagon_custom_v32i32_v32i32v128i1i32_Intrinsic_128B;
-
-def int_hexagon_V6_vandvrt :
-Hexagon_custom_v64i1_v16i32i32_Intrinsic;
-
-def int_hexagon_V6_vandvrt_128B :
-Hexagon_custom_v128i1_v32i32i32_Intrinsic_128B;
-
-def int_hexagon_V6_vandvrt_acc :
-Hexagon_custom_v64i1_v64i1v16i32i32_Intrinsic;
-
-def int_hexagon_V6_vandvrt_acc_128B :
-Hexagon_custom_v128i1_v128i1v32i32i32_Intrinsic_128B;
-
-def int_hexagon_V6_vgtw :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
-
-def int_hexagon_V6_vgtw_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
-
-def int_hexagon_V6_vgtw_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
-
-def int_hexagon_V6_vgtw_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
-
-def int_hexagon_V6_vgtw_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyowh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh">;
-def int_hexagon_V6_vgtw_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyowh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_128B">;
-def int_hexagon_V6_vgtw_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyowh_rnd :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd">;
-def int_hexagon_V6_vgtw_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyowh_rnd_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_128B">;
-def int_hexagon_V6_veqw :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyowh_rnd_sacc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_sacc">;
-def int_hexagon_V6_veqw_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyowh_rnd_sacc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_rnd_sacc_128B">;
-def int_hexagon_V6_veqw_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyowh_sacc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_sacc">;
-def int_hexagon_V6_veqw_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyowh_sacc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_sacc_128B">;
-def int_hexagon_V6_veqw_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyub :
+Hexagon_v32i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyub">;
-def int_hexagon_V6_veqw_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyub_128B :
+Hexagon_v64i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyub_128B">;
-def int_hexagon_V6_veqw_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyub_acc :
+Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyub_acc">;
-def int_hexagon_V6_veqw_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyub_acc_128B :
+Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyub_acc_128B">;
-def int_hexagon_V6_vgth :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyubv :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyubv">;
-def int_hexagon_V6_vgth_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyubv_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyubv_128B">;
-def int_hexagon_V6_vgth_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyubv_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyubv_acc">;
-def int_hexagon_V6_vgth_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyubv_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyubv_acc_128B">;
-def int_hexagon_V6_vgth_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyuh :
+Hexagon_v32i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyuh">;
-def int_hexagon_V6_vgth_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyuh_128B :
+Hexagon_v64i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyuh_128B">;
-def int_hexagon_V6_vgth_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyuh_acc :
+Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyuh_acc">;
-def int_hexagon_V6_vgth_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyuh_acc_128B :
+Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyuh_acc_128B">;
-def int_hexagon_V6_veqh :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyuhv :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyuhv">;
-def int_hexagon_V6_veqh_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyuhv_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyuhv_128B">;
-def int_hexagon_V6_veqh_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmpyuhv_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyuhv_acc">;
-def int_hexagon_V6_veqh_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmpyuhv_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyuhv_acc_128B">;
-def int_hexagon_V6_veqh_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vmux :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_veqh_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vmux_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_veqh_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vnavgh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgh">;
-def int_hexagon_V6_veqh_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vnavgh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgh_128B">;
-def int_hexagon_V6_vgtb :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vnavgub :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgub">;
-def int_hexagon_V6_vgtb_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vnavgub_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgub_128B">;
-def int_hexagon_V6_vgtb_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vnavgw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgw">;
-def int_hexagon_V6_vgtb_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vnavgw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgw_128B">;
-def int_hexagon_V6_vgtb_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vnormamth :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vnormamth">;
-def int_hexagon_V6_vgtb_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vnormamth_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vnormamth_128B">;
-def int_hexagon_V6_vgtb_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vnormamtw :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vnormamtw">;
-def int_hexagon_V6_vgtb_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vnormamtw_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vnormamtw_128B">;
-def int_hexagon_V6_veqb :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vnot :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vnot">;
-def int_hexagon_V6_veqb_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vnot_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vnot_128B">;
-def int_hexagon_V6_veqb_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vor :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vor">;
-def int_hexagon_V6_veqb_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vor_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vor_128B">;
-def int_hexagon_V6_veqb_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackeb :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackeb">;
-def int_hexagon_V6_veqb_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackeb_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackeb_128B">;
-def int_hexagon_V6_veqb_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackeh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackeh">;
-def int_hexagon_V6_veqb_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackeh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackeh_128B">;
-def int_hexagon_V6_vgtuw :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackhb_sat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackhb_sat">;
-def int_hexagon_V6_vgtuw_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackhb_sat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackhb_sat_128B">;
-def int_hexagon_V6_vgtuw_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackhub_sat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackhub_sat">;
-def int_hexagon_V6_vgtuw_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackhub_sat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackhub_sat_128B">;
-def int_hexagon_V6_vgtuw_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackob :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackob">;
-def int_hexagon_V6_vgtuw_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackob_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackob_128B">;
-def int_hexagon_V6_vgtuw_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackoh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackoh">;
-def int_hexagon_V6_vgtuw_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackoh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackoh_128B">;
-def int_hexagon_V6_vgtuh :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackwh_sat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackwh_sat">;
-def int_hexagon_V6_vgtuh_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackwh_sat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackwh_sat_128B">;
-def int_hexagon_V6_vgtuh_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpackwuh_sat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vpackwuh_sat">;
-def int_hexagon_V6_vgtuh_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpackwuh_sat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vpackwuh_sat_128B">;
-def int_hexagon_V6_vgtuh_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vpopcounth :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vpopcounth">;
-def int_hexagon_V6_vgtuh_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vpopcounth_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vpopcounth_128B">;
-def int_hexagon_V6_vgtuh_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vrdelta :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrdelta">;
-def int_hexagon_V6_vgtuh_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vrdelta_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrdelta_128B">;
-def int_hexagon_V6_vgtub :
-Hexagon_custom_v64i1_v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vrmpybus :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vrmpybus">;
-def int_hexagon_V6_vgtub_128B :
-Hexagon_custom_v128i1_v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vrmpybus_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vrmpybus_128B">;
-def int_hexagon_V6_vgtub_and :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vrmpybus_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vrmpybus_acc">;
-def int_hexagon_V6_vgtub_and_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vrmpybus_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vrmpybus_acc_128B">;
-def int_hexagon_V6_vgtub_or :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vrmpybusi :
+Hexagon_v32i32_v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vgtub_or_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vrmpybusi_128B :
+Hexagon_v64i32_v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vgtub_xor :
-Hexagon_custom_v64i1_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vrmpybusi_acc :
+Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_V6_vgtub_xor_128B :
-Hexagon_custom_v128i1_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vrmpybusi_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpybusi_acc_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_V6_pred_or :
-Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+def int_hexagon_V6_vrmpybusv :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybusv">;
-def int_hexagon_V6_pred_or_128B :
-Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+def int_hexagon_V6_vrmpybusv_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybusv_128B">;
-def int_hexagon_V6_pred_and :
-Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+def int_hexagon_V6_vrmpybusv_acc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybusv_acc">;
-def int_hexagon_V6_pred_and_128B :
-Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+def int_hexagon_V6_vrmpybusv_acc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybusv_acc_128B">;
-def int_hexagon_V6_pred_not :
-Hexagon_custom_v64i1_v64i1_Intrinsic;
+def int_hexagon_V6_vrmpybv :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybv">;
-def int_hexagon_V6_pred_not_128B :
-Hexagon_custom_v128i1_v128i1_Intrinsic_128B;
+def int_hexagon_V6_vrmpybv_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybv_128B">;
-def int_hexagon_V6_pred_xor :
-Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+def int_hexagon_V6_vrmpybv_acc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpybv_acc">;
-def int_hexagon_V6_pred_xor_128B :
-Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+def int_hexagon_V6_vrmpybv_acc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpybv_acc_128B">;
-def int_hexagon_V6_pred_and_n :
-Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+def int_hexagon_V6_vrmpyub :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vrmpyub">;
-def int_hexagon_V6_pred_and_n_128B :
-Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+def int_hexagon_V6_vrmpyub_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vrmpyub_128B">;
-def int_hexagon_V6_pred_or_n :
-Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+def int_hexagon_V6_vrmpyub_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vrmpyub_acc">;
-def int_hexagon_V6_pred_or_n_128B :
-Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+def int_hexagon_V6_vrmpyub_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vrmpyub_acc_128B">;
-def int_hexagon_V6_pred_scalar2 :
-Hexagon_custom_v64i1_i32_Intrinsic;
+def int_hexagon_V6_vrmpyubi :
+Hexagon_v32i32_v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_pred_scalar2_128B :
-Hexagon_custom_v128i1_i32_Intrinsic_128B;
+def int_hexagon_V6_vrmpyubi_128B :
+Hexagon_v64i32_v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vmux :
-Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vrmpyubi_acc :
+Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_V6_vmux_128B :
-Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vrmpyubi_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<"HEXAGON_V6_vrmpyubi_acc_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_V6_vswap :
-Hexagon_custom_v32i32_v64i1v16i32v16i32_Intrinsic;
+def int_hexagon_V6_vrmpyubv :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpyubv">;
-def int_hexagon_V6_vswap_128B :
-Hexagon_custom_v64i32_v128i1v32i32v32i32_Intrinsic_128B;
+def int_hexagon_V6_vrmpyubv_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpyubv_128B">;
-def int_hexagon_V6_vmaxub :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxub">;
+def int_hexagon_V6_vrmpyubv_acc :
+Hexagon_v16i32_v16i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vrmpyubv_acc">;
-def int_hexagon_V6_vmaxub_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxub_128B">;
+def int_hexagon_V6_vrmpyubv_acc_128B :
+Hexagon_v32i32_v32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vrmpyubv_acc_128B">;
-def int_hexagon_V6_vminub :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminub">;
+def int_hexagon_V6_vror :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vror">;
-def int_hexagon_V6_vminub_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminub_128B">;
+def int_hexagon_V6_vror_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vror_128B">;
-def int_hexagon_V6_vmaxuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxuh">;
+def int_hexagon_V6_vroundhb :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundhb">;
-def int_hexagon_V6_vmaxuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxuh_128B">;
+def int_hexagon_V6_vroundhb_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundhb_128B">;
-def int_hexagon_V6_vminuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminuh">;
+def int_hexagon_V6_vroundhub :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundhub">;
-def int_hexagon_V6_vminuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminuh_128B">;
+def int_hexagon_V6_vroundhub_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundhub_128B">;
-def int_hexagon_V6_vmaxh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxh">;
+def int_hexagon_V6_vroundwh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundwh">;
-def int_hexagon_V6_vmaxh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxh_128B">;
+def int_hexagon_V6_vroundwh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundwh_128B">;
-def int_hexagon_V6_vminh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminh">;
+def int_hexagon_V6_vroundwuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vroundwuh">;
-def int_hexagon_V6_vminh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminh_128B">;
+def int_hexagon_V6_vroundwuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vroundwuh_128B">;
-def int_hexagon_V6_vmaxw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxw">;
+def int_hexagon_V6_vrsadubi :
+Hexagon_v32i32_v32i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vmaxw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmaxw_128B">;
+def int_hexagon_V6_vrsadubi_128B :
+Hexagon_v64i32_v64i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
-def int_hexagon_V6_vminw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminw">;
+def int_hexagon_V6_vrsadubi_acc :
+Hexagon_v32i32_v32i32v32i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi_acc", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
-def int_hexagon_V6_vminw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminw_128B">;
+def int_hexagon_V6_vrsadubi_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32i32_Intrinsic<"HEXAGON_V6_vrsadubi_acc_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
def int_hexagon_V6_vsathub :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsathub">;
@@ -5398,17 +5296,17 @@ Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsatwh">;
def int_hexagon_V6_vsatwh_128B :
Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsatwh_128B">;
-def int_hexagon_V6_vshuffeb :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshuffeb">;
+def int_hexagon_V6_vsb :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vsb">;
-def int_hexagon_V6_vshuffeb_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshuffeb_128B">;
+def int_hexagon_V6_vsb_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vsb_128B">;
-def int_hexagon_V6_vshuffob :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshuffob">;
+def int_hexagon_V6_vsh :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vsh">;
-def int_hexagon_V6_vshuffob_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshuffob_128B">;
+def int_hexagon_V6_vsh_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vsh_128B">;
def int_hexagon_V6_vshufeh :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufeh">;
@@ -5416,11 +5314,29 @@ Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufeh">;
def int_hexagon_V6_vshufeh_128B :
Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshufeh_128B">;
-def int_hexagon_V6_vshufoh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufoh">;
+def int_hexagon_V6_vshuffb :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vshuffb">;
-def int_hexagon_V6_vshufoh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshufoh_128B">;
+def int_hexagon_V6_vshuffb_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vshuffb_128B">;
+
+def int_hexagon_V6_vshuffeb :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshuffeb">;
+
+def int_hexagon_V6_vshuffeb_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshuffeb_128B">;
+
+def int_hexagon_V6_vshuffh :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vshuffh">;
+
+def int_hexagon_V6_vshuffh_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vshuffh_128B">;
+
+def int_hexagon_V6_vshuffob :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshuffob">;
+
+def int_hexagon_V6_vshuffob_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshuffob_128B">;
def int_hexagon_V6_vshuffvdd :
Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vshuffvdd">;
@@ -5428,11 +5344,11 @@ Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vshuffvdd">;
def int_hexagon_V6_vshuffvdd_128B :
Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vshuffvdd_128B">;
-def int_hexagon_V6_vdealvdd :
-Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vdealvdd">;
+def int_hexagon_V6_vshufoeb :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufoeb">;
-def int_hexagon_V6_vdealvdd_128B :
-Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vdealvdd_128B">;
+def int_hexagon_V6_vshufoeb_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshufoeb_128B">;
def int_hexagon_V6_vshufoeh :
Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufoeh">;
@@ -5440,217 +5356,277 @@ Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufoeh">;
def int_hexagon_V6_vshufoeh_128B :
Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshufoeh_128B">;
-def int_hexagon_V6_vshufoeb :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufoeb">;
+def int_hexagon_V6_vshufoh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vshufoh">;
-def int_hexagon_V6_vshufoeb_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshufoeb_128B">;
+def int_hexagon_V6_vshufoh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vshufoh_128B">;
-def int_hexagon_V6_vdealh :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vdealh">;
+def int_hexagon_V6_vsubb :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubb">;
-def int_hexagon_V6_vdealh_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vdealh_128B">;
+def int_hexagon_V6_vsubb_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubb_128B">;
-def int_hexagon_V6_vdealb :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vdealb">;
+def int_hexagon_V6_vsubb_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubb_dv">;
-def int_hexagon_V6_vdealb_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vdealb_128B">;
+def int_hexagon_V6_vsubb_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubb_dv_128B">;
-def int_hexagon_V6_vdealb4w :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vdealb4w">;
+def int_hexagon_V6_vsubbnq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vdealb4w_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vdealb4w_128B">;
+def int_hexagon_V6_vsubbnq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vshuffh :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vshuffh">;
+def int_hexagon_V6_vsubbq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vshuffh_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vshuffh_128B">;
+def int_hexagon_V6_vsubbq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vshuffb :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vshuffb">;
+def int_hexagon_V6_vsubh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubh">;
-def int_hexagon_V6_vshuffb_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vshuffb_128B">;
+def int_hexagon_V6_vsubh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubh_128B">;
-def int_hexagon_V6_extractw :
-Hexagon_i32_v16i32i32_Intrinsic<"HEXAGON_V6_extractw">;
+def int_hexagon_V6_vsubh_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubh_dv">;
-def int_hexagon_V6_extractw_128B :
-Hexagon_i32_v32i32i32_Intrinsic<"HEXAGON_V6_extractw_128B">;
+def int_hexagon_V6_vsubh_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubh_dv_128B">;
-def int_hexagon_V6_vinsertwr :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vinsertwr">;
+def int_hexagon_V6_vsubhnq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vinsertwr_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vinsertwr_128B">;
+def int_hexagon_V6_vsubhnq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_lvsplatw :
-Hexagon_v16i32_i32_Intrinsic<"HEXAGON_V6_lvsplatw">;
+def int_hexagon_V6_vsubhq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_lvsplatw_128B :
-Hexagon_v32i32_i32_Intrinsic<"HEXAGON_V6_lvsplatw_128B">;
+def int_hexagon_V6_vsubhq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vassignp :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vassignp">;
+def int_hexagon_V6_vsubhsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubhsat">;
-def int_hexagon_V6_vassignp_128B :
-Hexagon_v64i32_v64i32_Intrinsic<"HEXAGON_V6_vassignp_128B">;
+def int_hexagon_V6_vsubhsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubhsat_128B">;
-def int_hexagon_V6_vassign :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vassign">;
+def int_hexagon_V6_vsubhsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubhsat_dv">;
-def int_hexagon_V6_vassign_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vassign_128B">;
+def int_hexagon_V6_vsubhsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubhsat_dv_128B">;
-def int_hexagon_V6_vcombine :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vcombine">;
+def int_hexagon_V6_vsubhw :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubhw">;
-def int_hexagon_V6_vcombine_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vcombine_128B">;
+def int_hexagon_V6_vsubhw_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubhw_128B">;
-def int_hexagon_V6_vdelta :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vdelta">;
+def int_hexagon_V6_vsububh :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsububh">;
-def int_hexagon_V6_vdelta_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vdelta_128B">;
+def int_hexagon_V6_vsububh_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsububh_128B">;
-def int_hexagon_V6_vrdelta :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrdelta">;
+def int_hexagon_V6_vsububsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsububsat">;
-def int_hexagon_V6_vrdelta_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrdelta_128B">;
+def int_hexagon_V6_vsububsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsububsat_128B">;
-def int_hexagon_V6_vcl0w :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vcl0w">;
+def int_hexagon_V6_vsububsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsububsat_dv">;
-def int_hexagon_V6_vcl0w_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vcl0w_128B">;
+def int_hexagon_V6_vsububsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsububsat_dv_128B">;
-def int_hexagon_V6_vcl0h :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vcl0h">;
+def int_hexagon_V6_vsubuhsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubuhsat">;
-def int_hexagon_V6_vcl0h_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vcl0h_128B">;
+def int_hexagon_V6_vsubuhsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuhsat_128B">;
-def int_hexagon_V6_vnormamtw :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vnormamtw">;
+def int_hexagon_V6_vsubuhsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuhsat_dv">;
-def int_hexagon_V6_vnormamtw_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vnormamtw_128B">;
+def int_hexagon_V6_vsubuhsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubuhsat_dv_128B">;
-def int_hexagon_V6_vnormamth :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vnormamth">;
+def int_hexagon_V6_vsubuhw :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubuhw">;
-def int_hexagon_V6_vnormamth_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vnormamth_128B">;
+def int_hexagon_V6_vsubuhw_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuhw_128B">;
-def int_hexagon_V6_vpopcounth :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vpopcounth">;
+def int_hexagon_V6_vsubw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubw">;
-def int_hexagon_V6_vpopcounth_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vpopcounth_128B">;
+def int_hexagon_V6_vsubw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubw_128B">;
-def int_hexagon_V6_vlutvvb :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb">;
+def int_hexagon_V6_vsubw_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubw_dv">;
-def int_hexagon_V6_vlutvvb_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_128B">;
+def int_hexagon_V6_vsubw_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubw_dv_128B">;
-def int_hexagon_V6_vlutvvb_oracc :
-Hexagon_v16i32_v16i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracc">;
+def int_hexagon_V6_vsubwnq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vlutvvb_oracc_128B :
-Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracc_128B">;
+def int_hexagon_V6_vsubwnq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vlutvwh :
-Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh">;
+def int_hexagon_V6_vsubwq :
+Hexagon_custom_v16i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_vlutvwh_128B :
-Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_128B">;
+def int_hexagon_V6_vsubwq_128B :
+Hexagon_custom_v32i32_v128i1v32i32v32i32_Intrinsic_128B;
-def int_hexagon_V6_vlutvwh_oracc :
-Hexagon_v32i32_v32i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracc">;
+def int_hexagon_V6_vsubwsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubwsat">;
-def int_hexagon_V6_vlutvwh_oracc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracc_128B">;
+def int_hexagon_V6_vsubwsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubwsat_128B">;
-def int_hexagon_V6_hi :
-Hexagon_v16i32_v32i32_Intrinsic<"HEXAGON_V6_hi">;
+def int_hexagon_V6_vsubwsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubwsat_dv">;
-def int_hexagon_V6_hi_128B :
-Hexagon_v32i32_v64i32_Intrinsic<"HEXAGON_V6_hi_128B">;
+def int_hexagon_V6_vsubwsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubwsat_dv_128B">;
-def int_hexagon_V6_lo :
-Hexagon_v16i32_v32i32_Intrinsic<"HEXAGON_V6_lo">;
+def int_hexagon_V6_vswap :
+Hexagon_custom_v32i32_v64i1v16i32v16i32_Intrinsic;
-def int_hexagon_V6_lo_128B :
-Hexagon_v32i32_v64i32_Intrinsic<"HEXAGON_V6_lo_128B">;
+def int_hexagon_V6_vswap_128B :
+Hexagon_custom_v64i32_v128i1v32i32v32i32_Intrinsic_128B;
-// V62 HVX Instructions.
+def int_hexagon_V6_vtmpyb :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyb">;
-def int_hexagon_V6_vlsrb :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vlsrb">;
+def int_hexagon_V6_vtmpyb_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyb_128B">;
-def int_hexagon_V6_vlsrb_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vlsrb_128B">;
+def int_hexagon_V6_vtmpyb_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyb_acc">;
-def int_hexagon_V6_vasrwuhrndsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwuhrndsat">;
+def int_hexagon_V6_vtmpyb_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyb_acc_128B">;
-def int_hexagon_V6_vasrwuhrndsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrwuhrndsat_128B">;
+def int_hexagon_V6_vtmpybus :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vtmpybus">;
-def int_hexagon_V6_vasruwuhrndsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruwuhrndsat">;
+def int_hexagon_V6_vtmpybus_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vtmpybus_128B">;
-def int_hexagon_V6_vasruwuhrndsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruwuhrndsat_128B">;
+def int_hexagon_V6_vtmpybus_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vtmpybus_acc">;
-def int_hexagon_V6_vasrhbsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhbsat">;
+def int_hexagon_V6_vtmpybus_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vtmpybus_acc_128B">;
-def int_hexagon_V6_vasrhbsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhbsat_128B">;
+def int_hexagon_V6_vtmpyhb :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb">;
-def int_hexagon_V6_vrounduwuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrounduwuh">;
+def int_hexagon_V6_vtmpyhb_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb_128B">;
-def int_hexagon_V6_vrounduwuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrounduwuh_128B">;
+def int_hexagon_V6_vtmpyhb_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb_acc">;
-def int_hexagon_V6_vrounduhub :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrounduhub">;
+def int_hexagon_V6_vtmpyhb_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vtmpyhb_acc_128B">;
-def int_hexagon_V6_vrounduhub_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrounduhub_128B">;
+def int_hexagon_V6_vunpackb :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackb">;
-def int_hexagon_V6_vadduwsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduwsat">;
+def int_hexagon_V6_vunpackb_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackb_128B">;
-def int_hexagon_V6_vadduwsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduwsat_128B">;
+def int_hexagon_V6_vunpackh :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackh">;
-def int_hexagon_V6_vadduwsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduwsat_dv">;
+def int_hexagon_V6_vunpackh_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackh_128B">;
-def int_hexagon_V6_vadduwsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vadduwsat_dv_128B">;
+def int_hexagon_V6_vunpackob :
+Hexagon_v32i32_v32i32v16i32_Intrinsic<"HEXAGON_V6_vunpackob">;
-def int_hexagon_V6_vsubuwsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubuwsat">;
+def int_hexagon_V6_vunpackob_128B :
+Hexagon_v64i32_v64i32v32i32_Intrinsic<"HEXAGON_V6_vunpackob_128B">;
-def int_hexagon_V6_vsubuwsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuwsat_128B">;
+def int_hexagon_V6_vunpackoh :
+Hexagon_v32i32_v32i32v16i32_Intrinsic<"HEXAGON_V6_vunpackoh">;
-def int_hexagon_V6_vsubuwsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuwsat_dv">;
+def int_hexagon_V6_vunpackoh_128B :
+Hexagon_v64i32_v64i32v32i32_Intrinsic<"HEXAGON_V6_vunpackoh_128B">;
-def int_hexagon_V6_vsubuwsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubuwsat_dv_128B">;
+def int_hexagon_V6_vunpackub :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackub">;
+
+def int_hexagon_V6_vunpackub_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackub_128B">;
+
+def int_hexagon_V6_vunpackuh :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vunpackuh">;
+
+def int_hexagon_V6_vunpackuh_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vunpackuh_128B">;
+
+def int_hexagon_V6_vxor :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vxor">;
+
+def int_hexagon_V6_vxor_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vxor_128B">;
+
+def int_hexagon_V6_vzb :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vzb">;
+
+def int_hexagon_V6_vzb_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vzb_128B">;
+
+def int_hexagon_V6_vzh :
+Hexagon_v32i32_v16i32_Intrinsic<"HEXAGON_V6_vzh">;
+
+def int_hexagon_V6_vzh_128B :
+Hexagon_v64i32_v32i32_Intrinsic<"HEXAGON_V6_vzh_128B">;
+
+// V62 HVX Instructions.
+
+def int_hexagon_V6_lvsplatb :
+Hexagon_v16i32_i32_Intrinsic<"HEXAGON_V6_lvsplatb">;
+
+def int_hexagon_V6_lvsplatb_128B :
+Hexagon_v32i32_i32_Intrinsic<"HEXAGON_V6_lvsplatb_128B">;
+
+def int_hexagon_V6_lvsplath :
+Hexagon_v16i32_i32_Intrinsic<"HEXAGON_V6_lvsplath">;
+
+def int_hexagon_V6_lvsplath_128B :
+Hexagon_v32i32_i32_Intrinsic<"HEXAGON_V6_lvsplath_128B">;
+
+def int_hexagon_V6_pred_scalar2v2 :
+Hexagon_custom_v64i1_i32_Intrinsic;
+
+def int_hexagon_V6_pred_scalar2v2_128B :
+Hexagon_custom_v128i1_i32_Intrinsic_128B;
+
+def int_hexagon_V6_shuffeqh :
+Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+
+def int_hexagon_V6_shuffeqh_128B :
+Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+
+def int_hexagon_V6_shuffeqw :
+Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+
+def int_hexagon_V6_shuffeqw_128B :
+Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
def int_hexagon_V6_vaddbsat :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddbsat">;
@@ -5664,41 +5640,23 @@ Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddbsat_dv">;
def int_hexagon_V6_vaddbsat_dv_128B :
Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vaddbsat_dv_128B">;
-def int_hexagon_V6_vsubbsat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubbsat">;
-
-def int_hexagon_V6_vsubbsat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubbsat_128B">;
-
-def int_hexagon_V6_vsubbsat_dv :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubbsat_dv">;
-
-def int_hexagon_V6_vsubbsat_dv_128B :
-Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubbsat_dv_128B">;
-
def int_hexagon_V6_vaddcarry :
Hexagon_custom_v16i32v64i1_v16i32v16i32v64i1_Intrinsic;
def int_hexagon_V6_vaddcarry_128B :
Hexagon_custom_v32i32v128i1_v32i32v32i32v128i1_Intrinsic_128B;
-def int_hexagon_V6_vsubcarry :
-Hexagon_custom_v16i32v64i1_v16i32v16i32v64i1_Intrinsic;
-
-def int_hexagon_V6_vsubcarry_128B :
-Hexagon_custom_v32i32v128i1_v32i32v32i32v128i1_Intrinsic_128B;
-
-def int_hexagon_V6_vaddububb_sat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddububb_sat">;
+def int_hexagon_V6_vaddclbh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddclbh">;
-def int_hexagon_V6_vaddububb_sat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddububb_sat_128B">;
+def int_hexagon_V6_vaddclbh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddclbh_128B">;
-def int_hexagon_V6_vsubububb_sat :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubububb_sat">;
+def int_hexagon_V6_vaddclbw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddclbw">;
-def int_hexagon_V6_vsubububb_sat_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubububb_sat_128B">;
+def int_hexagon_V6_vaddclbw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddclbw_128B">;
def int_hexagon_V6_vaddhw_acc :
Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddhw_acc">;
@@ -5706,53 +5664,35 @@ Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddhw_acc">;
def int_hexagon_V6_vaddhw_acc_128B :
Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddhw_acc_128B">;
-def int_hexagon_V6_vadduhw_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduhw_acc">;
-
-def int_hexagon_V6_vadduhw_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhw_acc_128B">;
-
def int_hexagon_V6_vaddubh_acc :
Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddubh_acc">;
def int_hexagon_V6_vaddubh_acc_128B :
Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddubh_acc_128B">;
-def int_hexagon_V6_vmpyewuh_64 :
-Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyewuh_64">;
-
-def int_hexagon_V6_vmpyewuh_64_128B :
-Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyewuh_64_128B">;
-
-def int_hexagon_V6_vmpyowh_64_acc :
-Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_64_acc">;
-
-def int_hexagon_V6_vmpyowh_64_acc_128B :
-Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_64_acc_128B">;
-
-def int_hexagon_V6_vmpauhb :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpauhb">;
+def int_hexagon_V6_vaddububb_sat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddububb_sat">;
-def int_hexagon_V6_vmpauhb_128B :
-Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vmpauhb_128B">;
+def int_hexagon_V6_vaddububb_sat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddububb_sat_128B">;
-def int_hexagon_V6_vmpauhb_acc :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpauhb_acc">;
+def int_hexagon_V6_vadduhw_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduhw_acc">;
-def int_hexagon_V6_vmpauhb_acc_128B :
-Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vmpauhb_acc_128B">;
+def int_hexagon_V6_vadduhw_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduhw_acc_128B">;
-def int_hexagon_V6_vmpyiwub :
-Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub">;
+def int_hexagon_V6_vadduwsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vadduwsat">;
-def int_hexagon_V6_vmpyiwub_128B :
-Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub_128B">;
+def int_hexagon_V6_vadduwsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduwsat_128B">;
-def int_hexagon_V6_vmpyiwub_acc :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub_acc">;
+def int_hexagon_V6_vadduwsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vadduwsat_dv">;
-def int_hexagon_V6_vmpyiwub_acc_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub_acc_128B">;
+def int_hexagon_V6_vadduwsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vadduwsat_dv_128B">;
def int_hexagon_V6_vandnqrt :
Hexagon_custom_v16i32_v64i1i32_Intrinsic;
@@ -5766,35 +5706,77 @@ Hexagon_custom_v16i32_v16i32v64i1i32_Intrinsic;
def int_hexagon_V6_vandnqrt_acc_128B :
Hexagon_custom_v32i32_v32i32v128i1i32_Intrinsic_128B;
-def int_hexagon_V6_vandvqv :
+def int_hexagon_V6_vandvnqv :
Hexagon_custom_v16i32_v64i1v16i32_Intrinsic;
-def int_hexagon_V6_vandvqv_128B :
+def int_hexagon_V6_vandvnqv_128B :
Hexagon_custom_v32i32_v128i1v32i32_Intrinsic_128B;
-def int_hexagon_V6_vandvnqv :
+def int_hexagon_V6_vandvqv :
Hexagon_custom_v16i32_v64i1v16i32_Intrinsic;
-def int_hexagon_V6_vandvnqv_128B :
+def int_hexagon_V6_vandvqv_128B :
Hexagon_custom_v32i32_v128i1v32i32_Intrinsic_128B;
-def int_hexagon_V6_pred_scalar2v2 :
-Hexagon_custom_v64i1_i32_Intrinsic;
+def int_hexagon_V6_vasrhbsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrhbsat">;
-def int_hexagon_V6_pred_scalar2v2_128B :
-Hexagon_custom_v128i1_i32_Intrinsic_128B;
+def int_hexagon_V6_vasrhbsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrhbsat_128B">;
-def int_hexagon_V6_shuffeqw :
-Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+def int_hexagon_V6_vasruwuhrndsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruwuhrndsat">;
-def int_hexagon_V6_shuffeqw_128B :
-Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+def int_hexagon_V6_vasruwuhrndsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruwuhrndsat_128B">;
-def int_hexagon_V6_shuffeqh :
-Hexagon_custom_v64i1_v64i1v64i1_Intrinsic;
+def int_hexagon_V6_vasrwuhrndsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrwuhrndsat">;
-def int_hexagon_V6_shuffeqh_128B :
-Hexagon_custom_v128i1_v128i1v128i1_Intrinsic_128B;
+def int_hexagon_V6_vasrwuhrndsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrwuhrndsat_128B">;
+
+def int_hexagon_V6_vlsrb :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vlsrb">;
+
+def int_hexagon_V6_vlsrb_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vlsrb_128B">;
+
+def int_hexagon_V6_vlutvvb_nm :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_nm">;
+
+def int_hexagon_V6_vlutvvb_nm_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_nm_128B">;
+
+def int_hexagon_V6_vlutvvb_oracci :
+Hexagon_v16i32_v16i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracci", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+def int_hexagon_V6_vlutvvb_oracci_128B :
+Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracci_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+def int_hexagon_V6_vlutvvbi :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvbi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_V6_vlutvvbi_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvbi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_V6_vlutvwh_nm :
+Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_nm">;
+
+def int_hexagon_V6_vlutvwh_nm_128B :
+Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_nm_128B">;
+
+def int_hexagon_V6_vlutvwh_oracci :
+Hexagon_v32i32_v32i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracci", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+def int_hexagon_V6_vlutvwh_oracci_128B :
+Hexagon_v64i32_v64i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracci_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+def int_hexagon_V6_vlutvwhi :
+Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwhi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_V6_vlutvwhi_128B :
+Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwhi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
def int_hexagon_V6_vmaxb :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmaxb">;
@@ -5808,91 +5790,109 @@ Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vminb">;
def int_hexagon_V6_vminb_128B :
Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vminb_128B">;
-def int_hexagon_V6_vsatuwuh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsatuwuh">;
+def int_hexagon_V6_vmpauhb :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpauhb">;
-def int_hexagon_V6_vsatuwuh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsatuwuh_128B">;
+def int_hexagon_V6_vmpauhb_128B :
+Hexagon_v64i32_v64i32i32_Intrinsic<"HEXAGON_V6_vmpauhb_128B">;
-def int_hexagon_V6_lvsplath :
-Hexagon_v16i32_i32_Intrinsic<"HEXAGON_V6_lvsplath">;
+def int_hexagon_V6_vmpauhb_acc :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpauhb_acc">;
-def int_hexagon_V6_lvsplath_128B :
-Hexagon_v32i32_i32_Intrinsic<"HEXAGON_V6_lvsplath_128B">;
+def int_hexagon_V6_vmpauhb_acc_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vmpauhb_acc_128B">;
-def int_hexagon_V6_lvsplatb :
-Hexagon_v16i32_i32_Intrinsic<"HEXAGON_V6_lvsplatb">;
+def int_hexagon_V6_vmpyewuh_64 :
+Hexagon_v32i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyewuh_64">;
-def int_hexagon_V6_lvsplatb_128B :
-Hexagon_v32i32_i32_Intrinsic<"HEXAGON_V6_lvsplatb_128B">;
+def int_hexagon_V6_vmpyewuh_64_128B :
+Hexagon_v64i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyewuh_64_128B">;
-def int_hexagon_V6_vaddclbw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddclbw">;
+def int_hexagon_V6_vmpyiwub :
+Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub">;
-def int_hexagon_V6_vaddclbw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddclbw_128B">;
+def int_hexagon_V6_vmpyiwub_128B :
+Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub_128B">;
-def int_hexagon_V6_vaddclbh :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vaddclbh">;
+def int_hexagon_V6_vmpyiwub_acc :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub_acc">;
-def int_hexagon_V6_vaddclbh_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vaddclbh_128B">;
+def int_hexagon_V6_vmpyiwub_acc_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyiwub_acc_128B">;
-def int_hexagon_V6_vlutvvbi :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvbi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vmpyowh_64_acc :
+Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vmpyowh_64_acc">;
-def int_hexagon_V6_vlutvvbi_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvbi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vmpyowh_64_acc_128B :
+Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vmpyowh_64_acc_128B">;
-def int_hexagon_V6_vlutvvb_oracci :
-Hexagon_v16i32_v16i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracci", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vrounduhub :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrounduhub">;
-def int_hexagon_V6_vlutvvb_oracci_128B :
-Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_oracci_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vrounduhub_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrounduhub_128B">;
-def int_hexagon_V6_vlutvwhi :
-Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwhi", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vrounduwuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrounduwuh">;
-def int_hexagon_V6_vlutvwhi_128B :
-Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwhi_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+def int_hexagon_V6_vrounduwuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrounduwuh_128B">;
-def int_hexagon_V6_vlutvwh_oracci :
-Hexagon_v32i32_v32i32v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracci", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vsatuwuh :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsatuwuh">;
-def int_hexagon_V6_vlutvwh_oracci_128B :
-Hexagon_v64i32_v64i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_oracci_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+def int_hexagon_V6_vsatuwuh_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsatuwuh_128B">;
-def int_hexagon_V6_vlutvvb_nm :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_nm">;
+def int_hexagon_V6_vsubbsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubbsat">;
-def int_hexagon_V6_vlutvvb_nm_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvvb_nm_128B">;
+def int_hexagon_V6_vsubbsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubbsat_128B">;
-def int_hexagon_V6_vlutvwh_nm :
-Hexagon_v32i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_nm">;
+def int_hexagon_V6_vsubbsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubbsat_dv">;
-def int_hexagon_V6_vlutvwh_nm_128B :
-Hexagon_v64i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vlutvwh_nm_128B">;
+def int_hexagon_V6_vsubbsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubbsat_dv_128B">;
-// V65 HVX Instructions.
+def int_hexagon_V6_vsubcarry :
+Hexagon_custom_v16i32v64i1_v16i32v16i32v64i1_Intrinsic;
-def int_hexagon_V6_vasruwuhsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruwuhsat">;
+def int_hexagon_V6_vsubcarry_128B :
+Hexagon_custom_v32i32v128i1_v32i32v32i32v128i1_Intrinsic_128B;
-def int_hexagon_V6_vasruwuhsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruwuhsat_128B">;
+def int_hexagon_V6_vsubububb_sat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubububb_sat">;
-def int_hexagon_V6_vasruhubsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruhubsat">;
+def int_hexagon_V6_vsubububb_sat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubububb_sat_128B">;
-def int_hexagon_V6_vasruhubsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruhubsat_128B">;
+def int_hexagon_V6_vsubuwsat :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsubuwsat">;
-def int_hexagon_V6_vasruhubrndsat :
-Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruhubrndsat">;
+def int_hexagon_V6_vsubuwsat_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuwsat_128B">;
-def int_hexagon_V6_vasruhubrndsat_128B :
-Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruhubrndsat_128B">;
+def int_hexagon_V6_vsubuwsat_dv :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsubuwsat_dv">;
+
+def int_hexagon_V6_vsubuwsat_dv_128B :
+Hexagon_v64i32_v64i32v64i32_Intrinsic<"HEXAGON_V6_vsubuwsat_dv_128B">;
+
+// V65 HVX Instructions.
+
+def int_hexagon_V6_vabsb :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsb">;
+
+def int_hexagon_V6_vabsb_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsb_128B">;
+
+def int_hexagon_V6_vabsb_sat :
+Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsb_sat">;
+
+def int_hexagon_V6_vabsb_sat_128B :
+Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsb_sat_128B">;
def int_hexagon_V6_vaslh_acc :
Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vaslh_acc">;
@@ -5906,17 +5906,23 @@ Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasrh_acc">;
def int_hexagon_V6_vasrh_acc_128B :
Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasrh_acc_128B">;
-def int_hexagon_V6_vavguw :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguw">;
+def int_hexagon_V6_vasruhubrndsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruhubrndsat">;
-def int_hexagon_V6_vavguw_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguw_128B">;
+def int_hexagon_V6_vasruhubrndsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruhubrndsat_128B">;
-def int_hexagon_V6_vavguwrnd :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguwrnd">;
+def int_hexagon_V6_vasruhubsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruhubsat">;
-def int_hexagon_V6_vavguwrnd_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguwrnd_128B">;
+def int_hexagon_V6_vasruhubsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruhubsat_128B">;
+
+def int_hexagon_V6_vasruwuhsat :
+Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vasruwuhsat">;
+
+def int_hexagon_V6_vasruwuhsat_128B :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vasruwuhsat_128B">;
def int_hexagon_V6_vavgb :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgb">;
@@ -5930,11 +5936,17 @@ Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavgbrnd">;
def int_hexagon_V6_vavgbrnd_128B :
Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavgbrnd_128B">;
-def int_hexagon_V6_vnavgb :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgb">;
+def int_hexagon_V6_vavguw :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguw">;
-def int_hexagon_V6_vnavgb_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgb_128B">;
+def int_hexagon_V6_vavguw_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguw_128B">;
+
+def int_hexagon_V6_vavguwrnd :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vavguwrnd">;
+
+def int_hexagon_V6_vavguwrnd_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vavguwrnd_128B">;
def int_hexagon_V6_vdd0 :
Hexagon_v32i32__Intrinsic<"HEXAGON_V6_vdd0">;
@@ -5942,17 +5954,47 @@ Hexagon_v32i32__Intrinsic<"HEXAGON_V6_vdd0">;
def int_hexagon_V6_vdd0_128B :
Hexagon_v64i32__Intrinsic<"HEXAGON_V6_vdd0_128B">;
-def int_hexagon_V6_vabsb :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsb">;
+def int_hexagon_V6_vgathermh :
+Hexagon__ptri32i32v16i32_Intrinsic<"HEXAGON_V6_vgathermh", [IntrArgMemOnly]>;
-def int_hexagon_V6_vabsb_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsb_128B">;
+def int_hexagon_V6_vgathermh_128B :
+Hexagon__ptri32i32v32i32_Intrinsic<"HEXAGON_V6_vgathermh_128B", [IntrArgMemOnly]>;
-def int_hexagon_V6_vabsb_sat :
-Hexagon_v16i32_v16i32_Intrinsic<"HEXAGON_V6_vabsb_sat">;
+def int_hexagon_V6_vgathermhq :
+Hexagon_custom__ptrv64i1i32i32v16i32_Intrinsic<[IntrArgMemOnly]>;
-def int_hexagon_V6_vabsb_sat_128B :
-Hexagon_v32i32_v32i32_Intrinsic<"HEXAGON_V6_vabsb_sat_128B">;
+def int_hexagon_V6_vgathermhq_128B :
+Hexagon_custom__ptrv128i1i32i32v32i32_Intrinsic_128B<[IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermhw :
+Hexagon__ptri32i32v32i32_Intrinsic<"HEXAGON_V6_vgathermhw", [IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermhw_128B :
+Hexagon__ptri32i32v64i32_Intrinsic<"HEXAGON_V6_vgathermhw_128B", [IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermhwq :
+Hexagon_custom__ptrv64i1i32i32v32i32_Intrinsic<[IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermhwq_128B :
+Hexagon_custom__ptrv128i1i32i32v64i32_Intrinsic_128B<[IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermw :
+Hexagon__ptri32i32v16i32_Intrinsic<"HEXAGON_V6_vgathermw", [IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermw_128B :
+Hexagon__ptri32i32v32i32_Intrinsic<"HEXAGON_V6_vgathermw_128B", [IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermwq :
+Hexagon_custom__ptrv64i1i32i32v16i32_Intrinsic<[IntrArgMemOnly]>;
+
+def int_hexagon_V6_vgathermwq_128B :
+Hexagon_custom__ptrv128i1i32i32v32i32_Intrinsic_128B<[IntrArgMemOnly]>;
+
+def int_hexagon_V6_vlut4 :
+Hexagon_v16i32_v16i32i64_Intrinsic<"HEXAGON_V6_vlut4">;
+
+def int_hexagon_V6_vlut4_128B :
+Hexagon_v32i32_v32i32i64_Intrinsic<"HEXAGON_V6_vlut4_128B">;
def int_hexagon_V6_vmpabuu :
Hexagon_v32i32_v32i32i32_Intrinsic<"HEXAGON_V6_vmpabuu">;
@@ -5966,12 +6008,6 @@ Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpabuu_acc">;
def int_hexagon_V6_vmpabuu_acc_128B :
Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_vmpabuu_acc_128B">;
-def int_hexagon_V6_vmpyh_acc :
-Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyh_acc">;
-
-def int_hexagon_V6_vmpyh_acc_128B :
-Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyh_acc_128B">;
-
def int_hexagon_V6_vmpahhsat :
Hexagon_v16i32_v16i32v16i32i64_Intrinsic<"HEXAGON_V6_vmpahhsat">;
@@ -5990,11 +6026,11 @@ Hexagon_v16i32_v16i32v16i32i64_Intrinsic<"HEXAGON_V6_vmpsuhuhsat">;
def int_hexagon_V6_vmpsuhuhsat_128B :
Hexagon_v32i32_v32i32v32i32i64_Intrinsic<"HEXAGON_V6_vmpsuhuhsat_128B">;
-def int_hexagon_V6_vlut4 :
-Hexagon_v16i32_v16i32i64_Intrinsic<"HEXAGON_V6_vlut4">;
+def int_hexagon_V6_vmpyh_acc :
+Hexagon_v32i32_v32i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyh_acc">;
-def int_hexagon_V6_vlut4_128B :
-Hexagon_v32i32_v32i32i64_Intrinsic<"HEXAGON_V6_vlut4_128B">;
+def int_hexagon_V6_vmpyh_acc_128B :
+Hexagon_v64i32_v64i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyh_acc_128B">;
def int_hexagon_V6_vmpyuhe :
Hexagon_v16i32_v16i32i32_Intrinsic<"HEXAGON_V6_vmpyuhe">;
@@ -6008,47 +6044,29 @@ Hexagon_v16i32_v16i32v16i32i32_Intrinsic<"HEXAGON_V6_vmpyuhe_acc">;
def int_hexagon_V6_vmpyuhe_acc_128B :
Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_vmpyuhe_acc_128B">;
-def int_hexagon_V6_vgathermw :
-Hexagon__ptri32i32v16i32_Intrinsic<"HEXAGON_V6_vgathermw", [IntrArgMemOnly]>;
-
-def int_hexagon_V6_vgathermw_128B :
-Hexagon__ptri32i32v32i32_Intrinsic<"HEXAGON_V6_vgathermw_128B", [IntrArgMemOnly]>;
-
-def int_hexagon_V6_vgathermh :
-Hexagon__ptri32i32v16i32_Intrinsic<"HEXAGON_V6_vgathermh", [IntrArgMemOnly]>;
-
-def int_hexagon_V6_vgathermh_128B :
-Hexagon__ptri32i32v32i32_Intrinsic<"HEXAGON_V6_vgathermh_128B", [IntrArgMemOnly]>;
-
-def int_hexagon_V6_vgathermhw :
-Hexagon__ptri32i32v32i32_Intrinsic<"HEXAGON_V6_vgathermhw", [IntrArgMemOnly]>;
-
-def int_hexagon_V6_vgathermhw_128B :
-Hexagon__ptri32i32v64i32_Intrinsic<"HEXAGON_V6_vgathermhw_128B", [IntrArgMemOnly]>;
-
-def int_hexagon_V6_vgathermwq :
-Hexagon_custom__ptrv64i1i32i32v16i32_Intrinsic<[IntrArgMemOnly]>;
+def int_hexagon_V6_vnavgb :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vnavgb">;
-def int_hexagon_V6_vgathermwq_128B :
-Hexagon_custom__ptrv128i1i32i32v32i32_Intrinsic_128B<[IntrArgMemOnly]>;
+def int_hexagon_V6_vnavgb_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vnavgb_128B">;
-def int_hexagon_V6_vgathermhq :
-Hexagon_custom__ptrv64i1i32i32v16i32_Intrinsic<[IntrArgMemOnly]>;
+def int_hexagon_V6_vprefixqb :
+Hexagon_custom_v16i32_v64i1_Intrinsic;
-def int_hexagon_V6_vgathermhq_128B :
-Hexagon_custom__ptrv128i1i32i32v32i32_Intrinsic_128B<[IntrArgMemOnly]>;
+def int_hexagon_V6_vprefixqb_128B :
+Hexagon_custom_v32i32_v128i1_Intrinsic_128B;
-def int_hexagon_V6_vgathermhwq :
-Hexagon_custom__ptrv64i1i32i32v32i32_Intrinsic<[IntrArgMemOnly]>;
+def int_hexagon_V6_vprefixqh :
+Hexagon_custom_v16i32_v64i1_Intrinsic;
-def int_hexagon_V6_vgathermhwq_128B :
-Hexagon_custom__ptrv128i1i32i32v64i32_Intrinsic_128B<[IntrArgMemOnly]>;
+def int_hexagon_V6_vprefixqh_128B :
+Hexagon_custom_v32i32_v128i1_Intrinsic_128B;
-def int_hexagon_V6_vscattermw :
-Hexagon__i32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vscattermw", [IntrWriteMem]>;
+def int_hexagon_V6_vprefixqw :
+Hexagon_custom_v16i32_v64i1_Intrinsic;
-def int_hexagon_V6_vscattermw_128B :
-Hexagon__i32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vscattermw_128B", [IntrWriteMem]>;
+def int_hexagon_V6_vprefixqw_128B :
+Hexagon_custom_v32i32_v128i1_Intrinsic_128B;
def int_hexagon_V6_vscattermh :
Hexagon__i32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vscattermh", [IntrWriteMem]>;
@@ -6056,24 +6074,12 @@ Hexagon__i32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vscattermh", [IntrWriteMem]>;
def int_hexagon_V6_vscattermh_128B :
Hexagon__i32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vscattermh_128B", [IntrWriteMem]>;
-def int_hexagon_V6_vscattermw_add :
-Hexagon__i32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vscattermw_add", [IntrWriteMem]>;
-
-def int_hexagon_V6_vscattermw_add_128B :
-Hexagon__i32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vscattermw_add_128B", [IntrWriteMem]>;
-
def int_hexagon_V6_vscattermh_add :
Hexagon__i32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vscattermh_add", [IntrWriteMem]>;
def int_hexagon_V6_vscattermh_add_128B :
Hexagon__i32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vscattermh_add_128B", [IntrWriteMem]>;
-def int_hexagon_V6_vscattermwq :
-Hexagon_custom__v64i1i32i32v16i32v16i32_Intrinsic<[IntrWriteMem]>;
-
-def int_hexagon_V6_vscattermwq_128B :
-Hexagon_custom__v128i1i32i32v32i32v32i32_Intrinsic_128B<[IntrWriteMem]>;
-
def int_hexagon_V6_vscattermhq :
Hexagon_custom__v64i1i32i32v16i32v16i32_Intrinsic<[IntrWriteMem]>;
@@ -6086,43 +6092,43 @@ Hexagon__i32i32v32i32v16i32_Intrinsic<"HEXAGON_V6_vscattermhw", [IntrWriteMem]>;
def int_hexagon_V6_vscattermhw_128B :
Hexagon__i32i32v64i32v32i32_Intrinsic<"HEXAGON_V6_vscattermhw_128B", [IntrWriteMem]>;
-def int_hexagon_V6_vscattermhwq :
-Hexagon_custom__v64i1i32i32v32i32v16i32_Intrinsic<[IntrWriteMem]>;
-
-def int_hexagon_V6_vscattermhwq_128B :
-Hexagon_custom__v128i1i32i32v64i32v32i32_Intrinsic_128B<[IntrWriteMem]>;
-
def int_hexagon_V6_vscattermhw_add :
Hexagon__i32i32v32i32v16i32_Intrinsic<"HEXAGON_V6_vscattermhw_add", [IntrWriteMem]>;
def int_hexagon_V6_vscattermhw_add_128B :
Hexagon__i32i32v64i32v32i32_Intrinsic<"HEXAGON_V6_vscattermhw_add_128B", [IntrWriteMem]>;
-def int_hexagon_V6_vprefixqb :
-Hexagon_custom_v16i32_v64i1_Intrinsic;
+def int_hexagon_V6_vscattermhwq :
+Hexagon_custom__v64i1i32i32v32i32v16i32_Intrinsic<[IntrWriteMem]>;
-def int_hexagon_V6_vprefixqb_128B :
-Hexagon_custom_v32i32_v128i1_Intrinsic_128B;
+def int_hexagon_V6_vscattermhwq_128B :
+Hexagon_custom__v128i1i32i32v64i32v32i32_Intrinsic_128B<[IntrWriteMem]>;
-def int_hexagon_V6_vprefixqh :
-Hexagon_custom_v16i32_v64i1_Intrinsic;
+def int_hexagon_V6_vscattermw :
+Hexagon__i32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vscattermw", [IntrWriteMem]>;
-def int_hexagon_V6_vprefixqh_128B :
-Hexagon_custom_v32i32_v128i1_Intrinsic_128B;
+def int_hexagon_V6_vscattermw_128B :
+Hexagon__i32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vscattermw_128B", [IntrWriteMem]>;
-def int_hexagon_V6_vprefixqw :
-Hexagon_custom_v16i32_v64i1_Intrinsic;
+def int_hexagon_V6_vscattermw_add :
+Hexagon__i32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vscattermw_add", [IntrWriteMem]>;
-def int_hexagon_V6_vprefixqw_128B :
-Hexagon_custom_v32i32_v128i1_Intrinsic_128B;
+def int_hexagon_V6_vscattermw_add_128B :
+Hexagon__i32i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vscattermw_add_128B", [IntrWriteMem]>;
+
+def int_hexagon_V6_vscattermwq :
+Hexagon_custom__v64i1i32i32v16i32v16i32_Intrinsic<[IntrWriteMem]>;
+
+def int_hexagon_V6_vscattermwq_128B :
+Hexagon_custom__v128i1i32i32v32i32v32i32_Intrinsic_128B<[IntrWriteMem]>;
// V66 HVX Instructions.
-def int_hexagon_V6_vrotr :
-Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrotr">;
+def int_hexagon_V6_vaddcarrysat :
+Hexagon_custom_v16i32_v16i32v16i32v64i1_Intrinsic;
-def int_hexagon_V6_vrotr_128B :
-Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrotr_128B">;
+def int_hexagon_V6_vaddcarrysat_128B :
+Hexagon_custom_v32i32_v32i32v32i32v128i1_Intrinsic_128B;
def int_hexagon_V6_vasr_into :
Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vasr_into">;
@@ -6130,11 +6136,11 @@ Hexagon_v32i32_v32i32v16i32v16i32_Intrinsic<"HEXAGON_V6_vasr_into">;
def int_hexagon_V6_vasr_into_128B :
Hexagon_v64i32_v64i32v32i32v32i32_Intrinsic<"HEXAGON_V6_vasr_into_128B">;
-def int_hexagon_V6_vaddcarrysat :
-Hexagon_custom_v16i32_v16i32v16i32v64i1_Intrinsic;
+def int_hexagon_V6_vrotr :
+Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vrotr">;
-def int_hexagon_V6_vaddcarrysat_128B :
-Hexagon_custom_v32i32_v32i32v32i32v128i1_Intrinsic_128B;
+def int_hexagon_V6_vrotr_128B :
+Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vrotr_128B">;
def int_hexagon_V6_vsatdw :
Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsatdw">;
@@ -6142,3 +6148,29 @@ Hexagon_v16i32_v16i32v16i32_Intrinsic<"HEXAGON_V6_vsatdw">;
def int_hexagon_V6_vsatdw_128B :
Hexagon_v32i32_v32i32v32i32_Intrinsic<"HEXAGON_V6_vsatdw_128B">;
+// V68 HVX Instructions.
+
+def int_hexagon_V6_v6mpyhubs10 :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_v6mpyhubs10", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_V6_v6mpyhubs10_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_v6mpyhubs10_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_V6_v6mpyhubs10_vxx :
+Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_v6mpyhubs10_vxx", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+def int_hexagon_V6_v6mpyhubs10_vxx_128B :
+Hexagon_v64i32_v64i32v64i32v64i32i32_Intrinsic<"HEXAGON_V6_v6mpyhubs10_vxx_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+def int_hexagon_V6_v6mpyvubs10 :
+Hexagon_v32i32_v32i32v32i32i32_Intrinsic<"HEXAGON_V6_v6mpyvubs10", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_V6_v6mpyvubs10_128B :
+Hexagon_v64i32_v64i32v64i32i32_Intrinsic<"HEXAGON_V6_v6mpyvubs10_128B", [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+
+def int_hexagon_V6_v6mpyvubs10_vxx :
+Hexagon_v32i32_v32i32v32i32v32i32i32_Intrinsic<"HEXAGON_V6_v6mpyvubs10_vxx", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
+def int_hexagon_V6_v6mpyvubs10_vxx_128B :
+Hexagon_v64i32_v64i32v64i32v64i32i32_Intrinsic<"HEXAGON_V6_v6mpyvubs10_vxx_128B", [IntrNoMem, ImmArg<ArgIndex<3>>]>;
+
diff --git a/llvm/include/llvm/IR/IntrinsicsNVVM.td b/llvm/include/llvm/IR/IntrinsicsNVVM.td
index 2ab48cfc4bb7..cc43d23bec1c 100644
--- a/llvm/include/llvm/IR/IntrinsicsNVVM.td
+++ b/llvm/include/llvm/IR/IntrinsicsNVVM.td
@@ -31,7 +31,11 @@
// * llvm.nvvm.max.ull --> ibid.
// * llvm.nvvm.h2f --> llvm.convert.to.fp16.f32
-def llvm_anyi64ptr_ty : LLVMAnyPointerType<llvm_i64_ty>; // (space)i64*
+def llvm_global_i8ptr_ty : LLVMQualPointerType<llvm_i8_ty, 1>; // (global)i8*
+def llvm_shared_i8ptr_ty : LLVMQualPointerType<llvm_i8_ty, 3>; // (shared)i8*
+def llvm_i64ptr_ty : LLVMPointerType<llvm_i64_ty>; // i64*
+def llvm_any_i64ptr_ty : LLVMAnyPointerType<llvm_i64_ty>; // (space)i64*
+def llvm_shared_i64ptr_ty : LLVMQualPointerType<llvm_i64_ty, 3>; // (shared)i64*
//
// MISC
@@ -48,13 +52,27 @@ class WMMA_REGS<string Geom, string Frag, string PtxEltType> {
string gft = Geom#":"#Frag#":"#ptx_elt_type;
string ft = frag#":"#ptx_elt_type;
list<LLVMType> regs = !cond(
- // mma.sync.m8n8k4 uses smaller a/b fragments than wmma fp ops
+ // mma fp ops use smaller fragments than wmma fp ops
!eq(gft,"m8n8k4:a:f16") : !listsplat(llvm_v2f16_ty, 2),
!eq(gft,"m8n8k4:b:f16") : !listsplat(llvm_v2f16_ty, 2),
-
- // fp16 -> fp16/fp32 @ m16n16k16/m8n32k16/m32n8k16
- // All currently supported geometries use the same fragment format,
- // so we only need to consider {fragment, type}.
+ !eq(gft,"m16n8k8:a:f16") : !listsplat(llvm_v2f16_ty, 2),
+ !eq(gft,"m16n8k8:b:f16") : [llvm_v2f16_ty],
+ !eq(gft,"m16n8k8:c:f16") : !listsplat(llvm_v2f16_ty, 2),
+ !eq(gft,"m16n8k8:d:f16") : !listsplat(llvm_v2f16_ty, 2),
+ !eq(gft,"m16n8k8:c:f32") : !listsplat(llvm_float_ty, 4),
+ !eq(gft,"m16n8k8:d:f32") : !listsplat(llvm_float_ty, 4),
+ !eq(gft,"m16n8k16:a:f16") : !listsplat(llvm_v2f16_ty, 4),
+ !eq(gft,"m16n8k16:b:f16") : !listsplat(llvm_v2f16_ty, 2),
+ !eq(gft,"m16n8k16:c:f16") : !listsplat(llvm_v2f16_ty, 2),
+ !eq(gft,"m16n8k16:d:f16") : !listsplat(llvm_v2f16_ty, 2),
+ !eq(gft,"m16n8k16:c:f32") : !listsplat(llvm_float_ty, 4),
+ !eq(gft,"m16n8k16:d:f32") : !listsplat(llvm_float_ty, 4),
+ !eq(gft,"m16n8k4:c:f32") : !listsplat(llvm_float_ty, 4),
+ !eq(gft,"m16n8k4:d:f32") : !listsplat(llvm_float_ty, 4),
+
+ // wmma fp16 -> fp16/fp32 @ m16n16k16/m8n32k16/m32n8k16
+ // All other supported geometries use the same fragment format for f32 and
+ // f16, so we only need to consider {fragment, type}.
!eq(ft,"a:f16") : !listsplat(llvm_v2f16_ty, 8),
!eq(ft,"b:f16") : !listsplat(llvm_v2f16_ty, 8),
!eq(ft,"c:f16") : !listsplat(llvm_v2f16_ty, 4),
@@ -62,7 +80,36 @@ class WMMA_REGS<string Geom, string Frag, string PtxEltType> {
!eq(ft,"c:f32") : !listsplat(llvm_float_ty, 8),
!eq(ft,"d:f32") : !listsplat(llvm_float_ty, 8),
- // u8/s8 -> s32 @ m16n16k16/m8n32k16/m32n8k16
+ // wmma tf32 -> s32 @ m16n16k8
+ !eq(gft,"m16n16k8:a:tf32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n16k8:b:tf32") : !listsplat(llvm_i32_ty, 4),
+
+ // mma tf32 -> s32 @ m16n16k8/m16n8k8
+ !eq(gft,"m16n8k4:a:tf32") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k4:b:tf32") : [llvm_i32_ty],
+ !eq(gft,"m16n8k8:a:tf32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k8:b:tf32") : !listsplat(llvm_i32_ty, 2),
+
+ !eq(gft,"m8n8k4:a:f64") : [llvm_double_ty],
+ !eq(gft,"m8n8k4:b:f64") : [llvm_double_ty],
+ !eq(gft,"m8n8k4:c:f64") : !listsplat(llvm_double_ty, 2),
+ !eq(gft,"m8n8k4:d:f64") : !listsplat(llvm_double_ty, 2),
+
+ // wmma bf16 -> s32 @ m16n16k16/m8n32k16/m32n8k16
+ !eq(gft,"m16n16k16:a:bf16") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n16k16:b:bf16") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m8n32k16:a:bf16") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m8n32k16:b:bf16") : !listsplat(llvm_i32_ty, 8),
+ !eq(gft,"m32n8k16:a:bf16") : !listsplat(llvm_i32_ty, 8),
+ !eq(gft,"m32n8k16:b:bf16") : !listsplat(llvm_i32_ty, 2),
+
+ // mma bf16 -> s32 @ m16n8k16/m16n8k8
+ !eq(gft,"m16n8k16:a:bf16") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k16:b:bf16") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k8:a:bf16") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k8:b:bf16") : [llvm_i32_ty],
+
+ // wmma u8/s8 -> s32 @ m16n16k16/m8n32k16/m32n8k16
!eq(gft,"m16n16k16:a:u8") : !listsplat(llvm_i32_ty, 2),
!eq(gft,"m16n16k16:a:s8") : !listsplat(llvm_i32_ty, 2),
!eq(gft,"m16n16k16:b:u8") : !listsplat(llvm_i32_ty, 2),
@@ -84,17 +131,65 @@ class WMMA_REGS<string Geom, string Frag, string PtxEltType> {
!eq(gft,"m32n8k16:c:s32") : !listsplat(llvm_i32_ty, 8),
!eq(gft,"m32n8k16:d:s32") : !listsplat(llvm_i32_ty, 8),
- // u4/s4/b1 -> s32 @ m8n8k32 (u4/s4), m8n8k128(b1)
- !eq(gft,"m8n8k128:a:b1") : [llvm_i32_ty],
+ // mma u8/s8 -> s32 @ m8n8k16/m16n8k16/m16n8k32
+ !eq(gft,"m8n8k16:a:u8") : [llvm_i32_ty],
+ !eq(gft,"m8n8k16:a:s8") : [llvm_i32_ty],
+ !eq(gft,"m8n8k16:b:u8") : [llvm_i32_ty],
+ !eq(gft,"m8n8k16:b:s8") : [llvm_i32_ty],
+ !eq(gft,"m8n8k16:c:s32") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m8n8k16:d:s32") : !listsplat(llvm_i32_ty, 2),
+
+ !eq(gft,"m16n8k16:a:u8") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k16:a:s8") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k16:b:u8") : [llvm_i32_ty],
+ !eq(gft,"m16n8k16:b:s8") : [llvm_i32_ty],
+ !eq(gft,"m16n8k16:c:s32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k16:d:s32") : !listsplat(llvm_i32_ty, 4),
+
+ !eq(gft,"m16n8k32:a:u8") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k32:a:s8") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k32:b:u8") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k32:b:s8") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k32:c:s32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k32:d:s32") : !listsplat(llvm_i32_ty, 4),
+
+ // wmma/mma u4/s4 -> s32 @ m8n8k32 (u4/s4)
!eq(gft,"m8n8k32:a:u4") : [llvm_i32_ty],
!eq(gft,"m8n8k32:a:s4") : [llvm_i32_ty],
- !eq(gft,"m8n8k128:b:b1") : [llvm_i32_ty],
!eq(gft,"m8n8k32:b:u4") : [llvm_i32_ty],
!eq(gft,"m8n8k32:b:s4") : [llvm_i32_ty],
- !eq(gft,"m8n8k128:c:s32") : !listsplat(llvm_i32_ty, 2),
- !eq(gft,"m8n8k128:d:s32") : !listsplat(llvm_i32_ty, 2),
!eq(gft,"m8n8k32:c:s32") : !listsplat(llvm_i32_ty, 2),
!eq(gft,"m8n8k32:d:s32") : !listsplat(llvm_i32_ty, 2),
+
+ !eq(gft,"m16n8k32:a:u4") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k32:a:s4") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k32:b:u4") : [llvm_i32_ty],
+ !eq(gft,"m16n8k32:b:s4") : [llvm_i32_ty],
+ !eq(gft,"m16n8k32:c:s32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k32:d:s32") : !listsplat(llvm_i32_ty, 4),
+
+ !eq(gft,"m16n8k64:a:u4") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k64:a:s4") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k64:b:u4") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k64:b:s4") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k64:c:s32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k64:d:s32") : !listsplat(llvm_i32_ty, 4),
+
+ // wmma/mma b1 -> s32 @ m8n8k128(b1)
+ !eq(gft,"m8n8k128:a:b1") : [llvm_i32_ty],
+ !eq(gft,"m8n8k128:b:b1") : [llvm_i32_ty],
+ !eq(gft,"m8n8k128:c:s32") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m8n8k128:d:s32") : !listsplat(llvm_i32_ty, 2),
+
+ !eq(gft,"m16n8k128:a:b1") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k128:b:b1") : [llvm_i32_ty],
+ !eq(gft,"m16n8k128:c:s32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k128:d:s32") : !listsplat(llvm_i32_ty, 4),
+
+ !eq(gft,"m16n8k256:a:b1") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k256:b:b1") : !listsplat(llvm_i32_ty, 2),
+ !eq(gft,"m16n8k256:c:s32") : !listsplat(llvm_i32_ty, 4),
+ !eq(gft,"m16n8k256:d:s32") : !listsplat(llvm_i32_ty, 4),
);
}
@@ -121,39 +216,46 @@ class WMMA_NAME_LDST<string Op, WMMA_REGS Frag, string Layout, int WithStride> {
class MMA_SIGNATURE<WMMA_REGS A, WMMA_REGS B, WMMA_REGS C, WMMA_REGS D> {
list<WMMA_REGS> id_frags = !cond(
- // int and sub-int ops are identified by input type.
- !eq(A.ptx_elt_type, "s8") : [A],
- !eq(A.ptx_elt_type, "u8") : [A],
- !eq(A.ptx_elt_type, "s4") : [A],
- !eq(A.ptx_elt_type, "u4") : [A],
- !eq(A.ptx_elt_type, "b1") : [A],
- // the rest are FP ops identified by accumulator & result type.
- true: [D, C]
+ // FP16 ops are identified by accumulator & result type.
+ !eq(A.ptx_elt_type, "f16") : [D, C],
+ // other ops are identified by input types.
+ !ne(A.ptx_elt_type, B.ptx_elt_type): [A, B],
+ true: [A]
);
string ret = !foldl("", id_frags, a, b, !strconcat(a, ".", b.ptx_elt_type));
}
-class WMMA_NAME_MMA<string ALayout, string BLayout, int Satfinite,
- WMMA_REGS A, WMMA_REGS B, WMMA_REGS C, WMMA_REGS D> {
+class WMMA_NAME<string ALayout, string BLayout, int Satfinite, string Rnd, string b1op,
+ WMMA_REGS A, WMMA_REGS B, WMMA_REGS C, WMMA_REGS D> {
string signature = MMA_SIGNATURE<A, B, C, D>.ret;
- string llvm = !if(
- !eq(A.geom, "m8n8k4"),
- "llvm.nvvm.mma.m8n8k4"
- # "." # ALayout
- # "." # BLayout
- # signature,
- "llvm.nvvm.wmma."
- # A.geom
- # ".mma"
- # "." # ALayout
- # "." # BLayout
- # signature
- # !if(Satfinite, ".satfinite", ""));
+ string llvm = "llvm.nvvm.wmma."
+ # A.geom
+ # ".mma"
+ # b1op
+ # "." # ALayout
+ # "." # BLayout
+ # !if(!ne(Rnd, ""), !strconcat(".", Rnd), "")
+ # signature
+ # !if(Satfinite, ".satfinite", "");
string record = !subst(".", "_",
!subst("llvm.", "int_", llvm));
}
+class MMA_NAME<string ALayout, string BLayout, int Satfinite, string b1op,
+ WMMA_REGS A, WMMA_REGS B, WMMA_REGS C, WMMA_REGS D> {
+ string signature = MMA_SIGNATURE<A, B, C, D>.ret;
+ string llvm = "llvm.nvvm.mma"
+ # b1op
+ # "." # A.geom
+ # "." # ALayout
+ # "." # BLayout
+ # !if(Satfinite, ".satfinite", "")
+ # signature;
+ string record = !subst(".", "_",
+ !subst("llvm.", "int_", llvm));
+}
+
// Generates list of 4-tuples of WMMA_REGS representing a valid MMA op.
// Geom: list of supported geometries.
// TypeN: PTX type of the corresponding fragment's element.
@@ -184,14 +286,18 @@ class MMA_LDST_OPS<list<string> Geom, list<string> Frags, list<string> Types> {
list<string> ops = !foreach(x, ret, x.gft);
}
-
-
// Creates list of valid combinations of fragments. This is the master list that
// drives generation of corresponding intrinsics and instructions.
class NVVM_MMA_OPS<int _ = 0> {
- list<list<WMMA_REGS>> fp_mma_ops = MMA_OPS<
+ list<list<WMMA_REGS>> tf32_wmma_ops = MMA_OPS<
+ ["m16n16k8"],
+ ["tf32"], [], ["f32"], []>.ret;
+ list<list<WMMA_REGS>> bf16_wmma_ops = MMA_OPS<
+ ["m16n16k16", "m32n8k16", "m8n32k16"],
+ ["bf16"], [], ["f32"], []>.ret;
+ list<list<WMMA_REGS>> f64_wmma_ops = MMA_OPS<
["m8n8k4"],
- ["f16"], [], ["f16", "f32"], ["f16", "f32"]>.ret;
+ ["f64"], [], ["f64"], []>.ret;
list<list<WMMA_REGS>> fp_wmma_ops = MMA_OPS<
["m16n16k16", "m32n8k16", "m8n32k16"],
["f16"], [], ["f16", "f32"], ["f16", "f32"]>.ret;
@@ -204,16 +310,50 @@ class NVVM_MMA_OPS<int _ = 0> {
list<list<WMMA_REGS>> bit_wmma_ops = MMA_OPS<
["m8n8k128"],
["b1"], [], ["s32"], []>.ret;
+ list<list<WMMA_REGS>> all_wmma_ops = !listconcat(
+ tf32_wmma_ops, bf16_wmma_ops, f64_wmma_ops,
+ fp_wmma_ops, int_wmma_ops, subint_wmma_ops, bit_wmma_ops);
+
+ list<list<WMMA_REGS>> tf32_mma_ops = MMA_OPS<
+ ["m16n8k4", "m16n8k8"],
+ ["tf32"], [], ["f32"], []>.ret;
+ list<list<WMMA_REGS>> bf16_mma_ops = MMA_OPS<
+ ["m16n8k16", "m16n8k8"],
+ ["bf16"], [], ["f32"], []>.ret;
+ list<list<WMMA_REGS>> f64_mma_ops = MMA_OPS<
+ ["m8n8k4"],
+ ["f64"], [], ["f64"], []>.ret;
+ list<list<WMMA_REGS>> fp_mma_ops = MMA_OPS<
+ ["m8n8k4", "m16n8k8", "m16n8k16"],
+ ["f16"], [], ["f16", "f32"], ["f16", "f32"]>.ret;
+ list<list<WMMA_REGS>> int_mma_ops = MMA_OPS<
+ ["m8n8k16", "m16n8k16", "m16n8k32"],
+ ["s8", "u8"], ["s8", "u8"], ["s32"], []>.ret;
+ list<list<WMMA_REGS>> subint_mma_ops = MMA_OPS<
+ ["m8n8k32", "m16n8k32", "m16n8k64"],
+ ["s4", "u4"], ["s4", "u4"], ["s32"], []>.ret;
+ list<list<WMMA_REGS>> bit_mma_ops = MMA_OPS<
+ ["m8n8k128", "m16n8k128", "m16n8k256"],
+ ["b1"], [], ["s32"], []>.ret;
list<list<WMMA_REGS>> all_mma_ops = !listconcat(
- fp_mma_ops, fp_wmma_ops, int_wmma_ops,
- subint_wmma_ops, bit_wmma_ops);
+ tf32_mma_ops, bf16_mma_ops, f64_mma_ops,
+ fp_mma_ops, int_mma_ops, subint_mma_ops, bit_mma_ops);
list<WMMA_REGS> ldst_ab_ops = MMA_LDST_OPS<
["m16n16k16", "m32n8k16", "m8n32k16"],
- ["a", "b"], ["f16", "u8", "s8"]>.ret;
+ ["a", "b"], ["f16", "u8", "s8", "bf16"]>.ret;
list<WMMA_REGS> ldst_cd_ops = MMA_LDST_OPS<
["m16n16k16", "m32n8k16", "m8n32k16"],
["c", "d"], ["f16", "f32", "s32"]>.ret;
+ list<WMMA_REGS> ldst_tf32_ab_ops = MMA_LDST_OPS<
+ ["m16n16k8"],
+ ["a", "b"], ["tf32"]>.ret;
+ list<WMMA_REGS> ldst_tf32_cd_ops = MMA_LDST_OPS<
+ ["m16n16k8"],
+ ["c", "d"], ["f32"]>.ret;
+ list<WMMA_REGS> ldst_f64_abcd_ops = MMA_LDST_OPS<
+ ["m8n8k4"],
+ ["a", "b", "c", "d"], ["f64"]>.ret;
list<WMMA_REGS> ldst_subint_ab_ops = MMA_LDST_OPS<
["m8n8k32"], ["a", "b"], ["s4","u4"]>.ret;
list<WMMA_REGS> ldst_bit_ab_ops = MMA_LDST_OPS<
@@ -221,6 +361,9 @@ class NVVM_MMA_OPS<int _ = 0> {
list<WMMA_REGS> ldst_subint_cd_ops = MMA_LDST_OPS<
["m8n8k32", "m8n8k128"], ["c", "d"], ["s32"]>.ret;
list<WMMA_REGS> all_ldst_ops = !listconcat(ldst_ab_ops, ldst_cd_ops,
+ ldst_tf32_ab_ops,
+ ldst_tf32_cd_ops,
+ ldst_f64_abcd_ops,
ldst_subint_ab_ops,
ldst_bit_ab_ops,
ldst_subint_cd_ops);
@@ -231,69 +374,117 @@ class NVVM_MMA_OPS<int _ = 0> {
def NVVM_MMA_OPS : NVVM_MMA_OPS;
-// Returns true if this combination of layout/satf is supported; false otherwise.
-// MMA ops must provide all parameters. Loads and stores -- only frags and layout_a.
-// The class is used to prevent generation of records for the unsupported variants.
+
+// Returns true if this combination of fragment and layout for WMMA load/store
+// ops is supported; false otherwise.
+// E.g.
+// if NVVM_WMMA_LDST_SUPPORTED<...>.ret then
+// def : FOO<>; // The record will only be defined for supported ops.
+//
+class NVVM_WMMA_LDST_SUPPORTED<WMMA_REGS frag, string layout> {
+ string f = frag.frag;
+ string t = frag.ptx_elt_type;
+
+ bit ret = !cond(
+ // Sub-int load and store requires A fragment to be of row layout and B
+ // fragments to be of column layout.
+ !and(!or(!eq(t, "b1"),
+ !eq(t, "u4"),
+ !eq(t, "s4")),
+ !or(!and(!eq(f, "a"),
+ !ne(layout, "row")),
+ !and(!eq(f, "b"),
+ !ne(layout, "col")))) : false,
+ true: true
+ );
+}
+
+// Returns true if this combination of layout/satf/rnd for WMMA ops is
+// supported; false otherwise.
+// E.g.
+// if NVVM_WMMA_SUPPORTED<...>.ret then
+// def : FOO<>; // The record will only be defined for supported ops.
+//
+class NVVM_WMMA_SUPPORTED<list<WMMA_REGS> frags, string layout_a, string layout_b, int satf, string rnd> {
+ // WMMA ops check both layouts.
+ string layout = layout_a # ":" # layout_b;
+ string t = frags[0].ptx_elt_type;
+
+ bit ret = !cond(
+ // only f64 wmma functions support rnd options
+ // any non f64 type that uses a rnd value is invalid
+ !and(!ne(t, "f64"), !ne(rnd, "")) : false,
+
+ // satf is only valid for select types
+ !and(!eq(satf, 1),
+ !ne(t, "s8"),
+ !ne(t, "u8"),
+ !ne(t, "s4"),
+ !ne(t, "u4"),
+ !ne(t, "f16")): false,
+
+ // Sub-int wmma requires row/column layout
+ !and(!or(!eq(t, "s4"),
+ !eq(t, "u4"),
+ !eq(t, "b1")),
+ !ne(layout, "row:col")) : false,
+ true: true
+ );
+}
+
+class NVVM_MMA_B1OPS<list<WMMA_REGS> frags> {
+ list<string> ret = !cond(
+ !eq(frags[0].ptx_elt_type, "b1") : [".xor.popc", ".and.popc"],
+ true: [""]
+ );
+}
+
+// Returns true if this combination of layout/satf for MMA ops is supported;
+// false otherwise.
// E.g.
// if NVVM_MMA_SUPPORTED<...>.ret then
// def : FOO<>; // The record will only be defined for supported ops.
//
-class NVVM_MMA_SUPPORTED<list<WMMA_REGS> frags, string layout_a, string layout_b="-", int satf=-1> {
+class NVVM_MMA_SUPPORTED<list<WMMA_REGS> frags, string layout_a, string layout_b, int satf> {
// MMA ops check both layouts.
- string mma = frags[0].ptx_elt_type
- # ":" # layout_a
- # ":" # layout_b;
- // Load ops only need type/fragment/layout.
- string ld = frags[0].ptx_elt_type
- # ":" # frags[0].frag
- # ":" # layout_a
- ;
- string ldf = frags[0].ptx_elt_type
- # ":" # frags[0].frag
- ;
- string t = frags[0].ptx_elt_type;
+ string layout = layout_a # ":" # layout_b;
+ string a_type = frags[0].ptx_elt_type;
+ string b_type = frags[1].ptx_elt_type;
+ string c_type = frags[2].ptx_elt_type;
+ string d_type = frags[3].ptx_elt_type;
+ string geom = frags[0].geom;
// gcd is a shortcut used to identify instructions that depend on
- // geom+frag_c+frag_d. Not all instances of this class have all fragments
- // specified. If there are not enough fragments, the tail evaluates to '?'.
- string gcd = frags[0].geom
- # ":"
- # !if(!eq(!size(frags), 4),
- frags[2].ptx_elt_type # frags[3].ptx_elt_type,
- "?");
+ // geom+frag_c+frag_d.
+ string gcd = geom # ":" # c_type # d_type;
bit ret = !cond(
- // Sub-int MMA only supports fixed A/B layout.
- // b1 does not support .satf.
- !eq(mma#":"#satf, "b1:row:col:0") : true,
- // mma.m8n8k4 has no .satf modifier.
- !and(!eq(frags[0].geom, "m8n8k4"),
- !ne(satf, 0)): false,
-
- // mma.m8n8k4 has no C=f32 D=f16 variant.
+
+ // Limit satf to valid types
+ !and(!eq(satf, 1),
+ !ne(a_type, "s8"),
+ !ne(a_type, "u8"),
+ !ne(a_type, "s4"),
+ !ne(a_type, "u4")): false,
+
+ // m8n8k4 has no C=f32 D=f16 variant.
!eq(gcd, "m8n8k4:f32f16"): false,
- !eq(mma, "s4:row:col") : true,
- !eq(mma, "u4:row:col") : true,
- !eq(mma, "s4:row:col") : true,
- !eq(mma, "u4:row:col") : true,
- // Sub-int load/stores have fixed layout for A and B.
- !and(!eq(layout_b, "-"), // It's a Load or Store op
- !or(!eq(ld, "b1:a:row"),
- !eq(ld, "b1:b:col"),
- !eq(ldf, "b1:c"),
- !eq(ldf, "b1:d"),
- !eq(ld, "s4:a:row"),
- !eq(ld, "s4:b:col"),
- !eq(ldf, "s4:c"),
- !eq(ldf, "s4:d"),
- !eq(ld, "u4:a:row"),
- !eq(ld, "u4:b:col"),
- !eq(ldf, "u4:c"),
- !eq(ldf, "u4:d"))) : true,
- // All other sub-int ops are not supported.
- !eq(t, "b1") : false,
- !eq(t, "s4") : false,
- !eq(t, "u4") : false,
- // All other (non sub-int) are OK.
+
+ // only m8n8k4 for f16 does not require row:col layout
+ !and(!ne(layout, "row:col"),
+ !or(!ne(geom, "m8n8k4"),
+ !ne(a_type, "f16"))) : false,
+
+ // m16n8k8 requires A and B to be the same type and C and D to be the same
+ // type.
+ !and(!eq(geom, "m16n8k8"),
+ !or(!ne(a_type, b_type),
+ !ne(c_type, d_type))): false,
+
+ // m16n8k8 requires C and D to be the same type.
+ !and(!eq(geom, "m16n8k8"),
+ !ne(c_type, d_type)): false,
+
+ // All other are OK.
true: true
);
}
@@ -1052,6 +1243,110 @@ let TargetPrefix = "nvvm" in {
def int_nvvm_membar_sys : GCCBuiltin<"__nvvm_membar_sys">,
Intrinsic<[], [], []>;
+// Async Copy
+def int_nvvm_cp_async_mbarrier_arrive :
+ GCCBuiltin<"__nvvm_cp_async_mbarrier_arrive">,
+ Intrinsic<[],[llvm_i64ptr_ty],[IntrConvergent]>;
+def int_nvvm_cp_async_mbarrier_arrive_shared :
+ GCCBuiltin<"__nvvm_cp_async_mbarrier_arrive_shared">,
+ Intrinsic<[],[llvm_shared_i64ptr_ty],[IntrConvergent]>;
+def int_nvvm_cp_async_mbarrier_arrive_noinc :
+ GCCBuiltin<"__nvvm_cp_async_mbarrier_arrive_noinc">,
+ Intrinsic<[],[llvm_i64ptr_ty],[IntrConvergent]>;
+def int_nvvm_cp_async_mbarrier_arrive_noinc_shared :
+ GCCBuiltin<"__nvvm_cp_async_mbarrier_arrive_noinc_shared">,
+ Intrinsic<[],[llvm_shared_i64ptr_ty],[IntrConvergent]>;
+
+def int_nvvm_cp_async_ca_shared_global_4 :
+ GCCBuiltin<"__nvvm_cp_async_ca_shared_global_4">,
+ Intrinsic<[],[llvm_shared_i8ptr_ty, llvm_global_i8ptr_ty],
+ [IntrArgMemOnly, NoAlias<ArgIndex<0>>, NoAlias<ArgIndex<1>>,
+ WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>],
+ "llvm.nvvm.cp.async.ca.shared.global.4">;
+def int_nvvm_cp_async_ca_shared_global_8 :
+ GCCBuiltin<"__nvvm_cp_async_ca_shared_global_8">,
+ Intrinsic<[],[llvm_shared_i8ptr_ty, llvm_global_i8ptr_ty],
+ [IntrArgMemOnly, NoAlias<ArgIndex<0>>, NoAlias<ArgIndex<1>>,
+ WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>],
+ "llvm.nvvm.cp.async.ca.shared.global.8">;
+def int_nvvm_cp_async_ca_shared_global_16 :
+ GCCBuiltin<"__nvvm_cp_async_ca_shared_global_16">,
+ Intrinsic<[],[llvm_shared_i8ptr_ty, llvm_global_i8ptr_ty],
+ [IntrArgMemOnly, NoAlias<ArgIndex<0>>, NoAlias<ArgIndex<1>>,
+ WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>],
+ "llvm.nvvm.cp.async.ca.shared.global.16">;
+def int_nvvm_cp_async_cg_shared_global_16 :
+ GCCBuiltin<"__nvvm_cp_async_cg_shared_global_16">,
+ Intrinsic<[],[llvm_shared_i8ptr_ty, llvm_global_i8ptr_ty],
+ [IntrArgMemOnly, NoAlias<ArgIndex<0>>, NoAlias<ArgIndex<1>>,
+ WriteOnly<ArgIndex<0>>, ReadOnly<ArgIndex<1>>],
+ "llvm.nvvm.cp.async.cg.shared.global.16">;
+
+def int_nvvm_cp_async_commit_group :
+ GCCBuiltin<"__nvvm_cp_async_commit_group">,
+ Intrinsic<[],[],[]>;
+
+def int_nvvm_cp_async_wait_group :
+ GCCBuiltin<"__nvvm_cp_async_wait_group">,
+ Intrinsic<[],[llvm_i32_ty],[ImmArg<ArgIndex<0>>]>;
+
+def int_nvvm_cp_async_wait_all :
+ GCCBuiltin<"__nvvm_cp_async_wait_all">,
+ Intrinsic<[],[],[]>;
+
+// mbarrier
+def int_nvvm_mbarrier_init : GCCBuiltin<"__nvvm_mbarrier_init">,
+ Intrinsic<[],[llvm_i64ptr_ty, llvm_i32_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_init_shared :
+ GCCBuiltin<"__nvvm_mbarrier_init_shared">,
+ Intrinsic<[],[llvm_shared_i64ptr_ty, llvm_i32_ty],[IntrConvergent]>;
+
+def int_nvvm_mbarrier_inval : GCCBuiltin<"__nvvm_mbarrier_inval">,
+ Intrinsic<[],[llvm_i64ptr_ty],
+ [IntrConvergent, IntrWriteMem, IntrArgMemOnly,
+ WriteOnly<ArgIndex<0>>, NoCapture<ArgIndex<0>>]>;
+def int_nvvm_mbarrier_inval_shared :
+ GCCBuiltin<"__nvvm_mbarrier_inval_shared">,
+ Intrinsic<[],[llvm_shared_i64ptr_ty],
+ [IntrConvergent, IntrWriteMem, IntrArgMemOnly,
+ WriteOnly<ArgIndex<0>>, NoCapture<ArgIndex<0>>]>;
+
+def int_nvvm_mbarrier_arrive : GCCBuiltin<"__nvvm_mbarrier_arrive">,
+ Intrinsic<[llvm_i64_ty],[llvm_i64ptr_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_arrive_shared :
+ GCCBuiltin<"__nvvm_mbarrier_arrive_shared">,
+ Intrinsic<[llvm_i64_ty],[llvm_shared_i64ptr_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_arrive_noComplete :
+ GCCBuiltin<"__nvvm_mbarrier_arrive_noComplete">,
+ Intrinsic<[llvm_i64_ty],[llvm_i64ptr_ty, llvm_i32_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_arrive_noComplete_shared :
+ GCCBuiltin<"__nvvm_mbarrier_arrive_noComplete_shared">,
+ Intrinsic<[llvm_i64_ty],[llvm_shared_i64ptr_ty, llvm_i32_ty],[IntrConvergent]>;
+
+def int_nvvm_mbarrier_arrive_drop :
+ GCCBuiltin<"__nvvm_mbarrier_arrive_drop">,
+ Intrinsic<[llvm_i64_ty],[llvm_i64ptr_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_arrive_drop_shared :
+ GCCBuiltin<"__nvvm_mbarrier_arrive_drop_shared">,
+ Intrinsic<[llvm_i64_ty],[llvm_shared_i64ptr_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_arrive_drop_noComplete :
+ GCCBuiltin<"__nvvm_mbarrier_arrive_drop_noComplete">,
+ Intrinsic<[llvm_i64_ty],[llvm_i64ptr_ty, llvm_i32_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_arrive_drop_noComplete_shared :
+ GCCBuiltin<"__nvvm_mbarrier_arrive_drop_noComplete_shared">,
+ Intrinsic<[llvm_i64_ty],[llvm_shared_i64ptr_ty, llvm_i32_ty],[IntrConvergent]>;
+
+def int_nvvm_mbarrier_test_wait :
+ GCCBuiltin<"__nvvm_mbarrier_test_wait">,
+ Intrinsic<[llvm_i1_ty],[llvm_i64ptr_ty, llvm_i64_ty],[IntrConvergent]>;
+def int_nvvm_mbarrier_test_wait_shared :
+ GCCBuiltin<"__nvvm_mbarrier_test_wait_shared">,
+ Intrinsic<[llvm_i1_ty],[llvm_shared_i64ptr_ty, llvm_i64_ty],[IntrConvergent]>;
+
+def int_nvvm_mbarrier_pending_count :
+ GCCBuiltin<"__nvvm_mbarrier_pending_count">,
+ Intrinsic<[llvm_i32_ty],[llvm_i64_ty],[IntrNoMem, IntrConvergent]>;
+
// Generated within nvvm. Use for ldu on sm_20 or later. Second arg is the
// pointer's alignment.
def int_nvvm_ldu_global_i : Intrinsic<[llvm_anyint_ty],
@@ -1141,7 +1436,7 @@ def int_nvvm_move_ptr : Intrinsic<[llvm_anyptr_ty], [llvm_anyptr_ty],
// For getting the handle from a texture or surface variable
def int_nvvm_texsurf_handle
- : Intrinsic<[llvm_i64_ty], [llvm_metadata_ty, llvm_anyi64ptr_ty],
+ : Intrinsic<[llvm_i64_ty], [llvm_metadata_ty, llvm_any_i64ptr_ty],
[IntrNoMem], "llvm.nvvm.texsurf.handle">;
def int_nvvm_texsurf_handle_internal
: Intrinsic<[llvm_i64_ty], [llvm_anyptr_ty],
@@ -4097,6 +4392,49 @@ def int_nvvm_match_all_sync_i64p :
[IntrInaccessibleMemOnly, IntrConvergent], "llvm.nvvm.match.all.sync.i64p">;
//
+// REDUX.SYNC
+//
+// redux.sync.min.u32 dst, src, membermask;
+def int_nvvm_redux_sync_umin : GCCBuiltin<"__nvvm_redux_sync_umin">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+// redux.sync.max.u32 dst, src, membermask;
+def int_nvvm_redux_sync_umax : GCCBuiltin<"__nvvm_redux_sync_umax">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+// redux.sync.add.s32 dst, src, membermask;
+def int_nvvm_redux_sync_add : GCCBuiltin<"__nvvm_redux_sync_add">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+// redux.sync.min.s32 dst, src, membermask;
+def int_nvvm_redux_sync_min : GCCBuiltin<"__nvvm_redux_sync_min">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+// redux.sync.max.s32 dst, src, membermask;
+def int_nvvm_redux_sync_max : GCCBuiltin<"__nvvm_redux_sync_max">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+// redux.sync.and.b32 dst, src, membermask;
+def int_nvvm_redux_sync_and : GCCBuiltin<"__nvvm_redux_sync_and">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+// redux.sync.xor.b32 dst, src, membermask;
+def int_nvvm_redux_sync_xor : GCCBuiltin<"__nvvm_redux_sync_xor">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+// redux.sync.or.b32 dst, src, membermask;
+def int_nvvm_redux_sync_or : GCCBuiltin<"__nvvm_redux_sync_or">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty],
+ [IntrConvergent, IntrInaccessibleMemOnly]>;
+
+//
// WMMA instructions
//
// WMMA.LOAD
@@ -4120,36 +4458,63 @@ class NVVM_WMMA_ST<WMMA_REGS Frag, string Layout, int WithStride>
foreach layout = ["row", "col"] in {
foreach stride = [0, 1] in {
foreach frag = NVVM_MMA_OPS.all_ld_ops in
- if NVVM_MMA_SUPPORTED<[frag], layout>.ret then
+ if NVVM_WMMA_LDST_SUPPORTED<frag, layout>.ret then
def WMMA_NAME_LDST<"load", frag, layout, stride>.record
: NVVM_WMMA_LD<frag, layout, stride>;
foreach frag = NVVM_MMA_OPS.all_st_ops in
- if NVVM_MMA_SUPPORTED<[frag], layout>.ret then
+ if NVVM_WMMA_LDST_SUPPORTED<frag, layout>.ret then
def WMMA_NAME_LDST<"store", frag, layout, stride>.record
: NVVM_WMMA_ST<frag, layout, stride>;
}
}
// WMMA.MMA
-class NVVM_WMMA_MMA<string ALayout, string BLayout, int Satfinite,
+class NVVM_WMMA_MMA<string ALayout, string BLayout, int Satfinite, string rnd, string b1op,
WMMA_REGS A, WMMA_REGS B,
WMMA_REGS C, WMMA_REGS D>
: Intrinsic<D.regs,
!listconcat(A.regs, B.regs, C.regs),
[IntrNoMem],
- WMMA_NAME_MMA<ALayout, BLayout, Satfinite, A, B, C, D>.llvm>;
+ WMMA_NAME<ALayout, BLayout, Satfinite, rnd, b1op, A, B, C, D>.llvm>;
+
+foreach layout_a = ["row", "col"] in {
+ foreach layout_b = ["row", "col"] in {
+ foreach satf = [0, 1] in {
+ foreach rnd = ["", "rn", "rz", "rm", "rp"] in {
+ foreach op = NVVM_MMA_OPS.all_wmma_ops in {
+ foreach b1op = NVVM_MMA_B1OPS<op>.ret in {
+ if NVVM_WMMA_SUPPORTED<op, layout_a, layout_b, satf, rnd>.ret then {
+ def WMMA_NAME<layout_a, layout_b, satf, rnd, b1op,
+ op[0], op[1], op[2], op[3]>.record
+ : NVVM_WMMA_MMA<layout_a, layout_b, satf, rnd, b1op,
+ op[0], op[1], op[2], op[3]>;
+ }
+ } // b1op
+ } // op
+ } // rnd
+ } // satf
+ } // layout_b
+} // layout_a
+
+// MMA
+class NVVM_MMA<string ALayout, string BLayout, int Satfinite, string b1op,
+ WMMA_REGS A, WMMA_REGS B, WMMA_REGS C, WMMA_REGS D>
+ : Intrinsic<D.regs,
+ !listconcat(A.regs, B.regs, C.regs),
+ [IntrNoMem],
+ MMA_NAME<ALayout, BLayout, Satfinite, b1op, A, B, C, D>.llvm>;
foreach layout_a = ["row", "col"] in {
foreach layout_b = ["row", "col"] in {
foreach satf = [0, 1] in {
foreach op = NVVM_MMA_OPS.all_mma_ops in {
- if NVVM_MMA_SUPPORTED<op, layout_a, layout_b, satf>.ret then {
- def WMMA_NAME_MMA<layout_a, layout_b, satf,
- op[0], op[1], op[2], op[3]>.record
- : NVVM_WMMA_MMA<layout_a, layout_b, satf,
- op[0], op[1], op[2], op[3]>;
- }
- }
+ foreach b1op = NVVM_MMA_B1OPS<op>.ret in {
+ if NVVM_MMA_SUPPORTED<op, layout_a, layout_b, satf>.ret then {
+ def MMA_NAME<layout_a, layout_b, satf, b1op, op[0], op[1], op[2], op[3]>.record
+ : NVVM_MMA<layout_a, layout_b, satf, b1op, op[0], op[1], op[2], op[3]>;
+ }
+ } // b1op
+ } // op
} // satf
} // layout_b
} // layout_a
diff --git a/llvm/include/llvm/IR/IntrinsicsPowerPC.td b/llvm/include/llvm/IR/IntrinsicsPowerPC.td
index 075b6252d9a5..92d3bdea37ed 100644
--- a/llvm/include/llvm/IR/IntrinsicsPowerPC.td
+++ b/llvm/include/llvm/IR/IntrinsicsPowerPC.td
@@ -20,35 +20,15 @@ let TargetPrefix = "ppc" in { // All intrinsics start with "llvm.ppc.".
def int_ppc_dcba : Intrinsic<[], [llvm_ptr_ty], []>;
def int_ppc_dcbf : GCCBuiltin<"__builtin_dcbf">,
Intrinsic<[], [llvm_ptr_ty], [IntrArgMemOnly]>;
- def int_ppc_dcbfl : Intrinsic<[], [llvm_ptr_ty], [IntrArgMemOnly]>;
- def int_ppc_dcbflp : Intrinsic<[], [llvm_ptr_ty], [IntrArgMemOnly]>;
def int_ppc_dcbfps : Intrinsic<[], [llvm_ptr_ty], [IntrArgMemOnly]>;
def int_ppc_dcbstps : Intrinsic<[], [llvm_ptr_ty], [IntrArgMemOnly]>;
def int_ppc_dcbi : Intrinsic<[], [llvm_ptr_ty], []>;
- def int_ppc_dcbst : Intrinsic<[], [llvm_ptr_ty], []>;
- def int_ppc_dcbt : Intrinsic<[], [llvm_ptr_ty],
- [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
- def int_ppc_dcbtst: Intrinsic<[], [llvm_ptr_ty],
- [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
def int_ppc_dcbt_with_hint: Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty],
[IntrArgMemOnly, NoCapture<ArgIndex<0>>, ImmArg<ArgIndex<1>>]>;
def int_ppc_dcbtst_with_hint: Intrinsic<[], [llvm_ptr_ty, llvm_i32_ty],
[IntrArgMemOnly, NoCapture<ArgIndex<0>>, ImmArg<ArgIndex<1>>]>;
- def int_ppc_dcbz : Intrinsic<[], [llvm_ptr_ty], []>;
def int_ppc_dcbzl : Intrinsic<[], [llvm_ptr_ty], []>;
- // Population Count in each Byte.
- def int_ppc_popcntb : Intrinsic<[llvm_i64_ty], [llvm_i64_ty], [IntrNoMem]>;
-
- // sync instruction (i.e. sync 0, a.k.a hwsync)
- def int_ppc_sync : Intrinsic<[], [], []>;
- // isync instruction
- def int_ppc_isync : Intrinsic<[], [], []>;
- // lwsync is sync 1
- def int_ppc_lwsync : Intrinsic<[], [], []>;
- // eieio instruction
- def int_ppc_eieio : Intrinsic<[],[],[]>;
-
// Get content from current FPSCR register
def int_ppc_readflm : GCCBuiltin<"__builtin_readflm">,
Intrinsic<[llvm_double_ty], [], [IntrNoMem]>;
@@ -132,9 +112,45 @@ let TargetPrefix = "ppc" in { // All intrinsics start with "llvm.ppc.".
: GCCBuiltin<"__builtin_vsx_scalar_insert_exp_qp">,
Intrinsic <[llvm_f128_ty], [llvm_f128_ty, llvm_i64_ty], [IntrNoMem]>;
+ // Intrinsics defined to maintain XL compatibility
+ def int_ppc_tdw
+ : GCCBuiltin<"__builtin_ppc_tdw">,
+ Intrinsic <[], [llvm_i64_ty, llvm_i64_ty, llvm_i32_ty], [ImmArg<ArgIndex<2>>]>;
+ def int_ppc_tw
+ : GCCBuiltin<"__builtin_ppc_tw">,
+ Intrinsic <[], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty], [ImmArg<ArgIndex<2>>]>;
+ def int_ppc_trapd
+ : GCCBuiltin<"__builtin_ppc_trapd">,
+ Intrinsic <[], [llvm_i64_ty], []>;
+ def int_ppc_trap
+ : GCCBuiltin<"__builtin_ppc_trap">,
+ Intrinsic <[], [llvm_i32_ty], []>;
+ def int_ppc_fcfid
+ : GCCBuiltin<"__builtin_ppc_fcfid">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fcfud
+ : GCCBuiltin<"__builtin_ppc_fcfud">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fctid
+ : GCCBuiltin<"__builtin_ppc_fctid">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fctidz
+ : GCCBuiltin<"__builtin_ppc_fctidz">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fctiw
+ : GCCBuiltin<"__builtin_ppc_fctiw">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fctiwz
+ : GCCBuiltin<"__builtin_ppc_fctiwz">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fctudz
+ : GCCBuiltin<"__builtin_ppc_fctudz">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fctuwz
+ : GCCBuiltin<"__builtin_ppc_fctuwz">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
}
-
let TargetPrefix = "ppc" in { // All PPC intrinsics start with "llvm.ppc.".
/// PowerPC_Vec_Intrinsic - Base class for all altivec intrinsics.
class PowerPC_Vec_Intrinsic<string GCCIntSuffix, list<LLVMType> ret_types,
@@ -1239,6 +1255,12 @@ def int_ppc_vsx_xvcvdpsxws :
def int_ppc_vsx_xvcvdpuxws :
PowerPC_VSX_Intrinsic<"xvcvdpuxws", [llvm_v4i32_ty],
[llvm_v2f64_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvspsxds :
+ PowerPC_VSX_Intrinsic<"xvcvspsxds", [llvm_v2i64_ty],
+ [llvm_v4f32_ty], [IntrNoMem]>;
+def int_ppc_vsx_xvcvspuxds :
+ PowerPC_VSX_Intrinsic<"xvcvspuxds", [llvm_v2i64_ty],
+ [llvm_v4f32_ty], [IntrNoMem]>;
def int_ppc_vsx_xvcvsxwdp :
PowerPC_VSX_Intrinsic<"xvcvsxwdp", [llvm_v2f64_ty],
[llvm_v4i32_ty], [IntrNoMem]>;
@@ -1495,3 +1517,225 @@ let TargetPrefix = "ppc" in {
[llvm_v512i1_ty, llvm_v16i8_ty, llvm_v16i8_ty, llvm_i32_ty,
llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
}
+
+// XL Compat intrinsics.
+let TargetPrefix = "ppc" in {
+ def int_ppc_dcbfl : GCCBuiltin<"__builtin_ppc_dcbfl">,
+ Intrinsic<[], [llvm_ptr_ty], [IntrArgMemOnly]>;
+ def int_ppc_dcbflp : GCCBuiltin<"__builtin_ppc_dcbflp">,
+ Intrinsic<[], [llvm_ptr_ty], [IntrArgMemOnly]>;
+ def int_ppc_dcbst : GCCBuiltin<"__builtin_ppc_dcbst">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_ppc_dcbt : GCCBuiltin<"__builtin_ppc_dcbt">,
+ Intrinsic<[], [llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
+ def int_ppc_dcbtst : GCCBuiltin<"__builtin_ppc_dcbtst">,
+ Intrinsic<[], [llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
+ def int_ppc_dcbz : GCCBuiltin<"__builtin_ppc_dcbz">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+ def int_ppc_icbt : GCCBuiltin<"__builtin_ppc_icbt">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
+
+ // Population Count in each Byte.
+ def int_ppc_popcntb : Intrinsic<[llvm_anyint_ty], [llvm_anyint_ty], [IntrNoMem]>;
+
+ // sync instruction (i.e. sync 0, a.k.a hwsync)
+ def int_ppc_sync : GCCBuiltin<"__builtin_ppc_sync">,
+ Intrinsic<[], [], []>;
+ def int_ppc_iospace_sync : GCCBuiltin<"__builtin_ppc_iospace_sync">,
+ Intrinsic<[], [], []>;
+ // isync instruction
+ def int_ppc_isync : GCCBuiltin<"__builtin_ppc_isync">,
+ Intrinsic<[], [], []>;
+ // lwsync is sync 1
+ def int_ppc_lwsync : GCCBuiltin<"__builtin_ppc_lwsync">,
+ Intrinsic<[], [], []>;
+ def int_ppc_iospace_lwsync : GCCBuiltin<"__builtin_ppc_iospace_lwsync">,
+ Intrinsic<[], [], []>;
+ // eieio instruction
+ def int_ppc_eieio : GCCBuiltin<"__builtin_ppc_eieio">,
+ Intrinsic<[],[],[]>;
+ def int_ppc_iospace_eieio : GCCBuiltin<"__builtin_ppc_iospace_eieio">,
+ Intrinsic<[],[],[]>;
+ def int_ppc_stdcx : GCCBuiltin<"__builtin_ppc_stdcx">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i64_ty],
+ [IntrWriteMem]>;
+ def int_ppc_stwcx : GCCBuiltin<"__builtin_ppc_stwcx">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrWriteMem]>;
+ def int_ppc_sthcx
+ : Intrinsic<[llvm_i32_ty], [ llvm_ptr_ty, llvm_i32_ty ], [IntrWriteMem]>;
+ def int_ppc_stbcx : GCCBuiltin<"__builtin_ppc_stbcx">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty, llvm_i32_ty],
+ [IntrWriteMem]>;
+ def int_ppc_dcbtstt : GCCBuiltin<"__builtin_ppc_dcbtstt">,
+ Intrinsic<[], [llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
+ def int_ppc_dcbtt : GCCBuiltin<"__builtin_ppc_dcbtt">,
+ Intrinsic<[], [llvm_ptr_ty],
+ [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
+ def int_ppc_mftbu : GCCBuiltin<"__builtin_ppc_mftbu">,
+ Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>;
+ def int_ppc_mfmsr : GCCBuiltin<"__builtin_ppc_mfmsr">,
+ Intrinsic<[llvm_i32_ty], [], [IntrNoMem]>;
+ def int_ppc_mfspr
+ : Intrinsic<[llvm_anyint_ty], [llvm_i32_ty], [ImmArg<ArgIndex<0>>]>;
+ def int_ppc_mtmsr
+ : GCCBuiltin<"__builtin_ppc_mtmsr">, Intrinsic<[], [llvm_i32_ty], []>;
+ def int_ppc_mtspr
+ : Intrinsic<[], [llvm_i32_ty, llvm_anyint_ty], [ImmArg<ArgIndex<0>>]>;
+ def int_ppc_stfiw : GCCBuiltin<"__builtin_ppc_stfiw">,
+ Intrinsic<[], [llvm_ptr_ty, llvm_double_ty],
+ [IntrWriteMem]>;
+ // compare
+ def int_ppc_cmpeqb
+ : GCCBuiltin<"__builtin_ppc_cmpeqb">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_ppc_cmprb
+ : GCCBuiltin<"__builtin_ppc_cmprb">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<0>>]>;
+ def int_ppc_setb
+ : GCCBuiltin<"__builtin_ppc_setb">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_ppc_cmpb
+ : Intrinsic<[llvm_anyint_ty], [llvm_anyint_ty, llvm_anyint_ty],
+ [IntrNoMem]>;
+ // multiply
+ def int_ppc_mulhd
+ : GCCBuiltin<"__builtin_ppc_mulhd">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_ppc_mulhdu
+ : GCCBuiltin<"__builtin_ppc_mulhdu">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_ppc_mulhw
+ : GCCBuiltin<"__builtin_ppc_mulhw">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_ppc_mulhwu
+ : GCCBuiltin<"__builtin_ppc_mulhwu">,
+ Intrinsic<[llvm_i32_ty], [llvm_i32_ty, llvm_i32_ty], [IntrNoMem]>;
+ def int_ppc_maddhd
+ : GCCBuiltin<"__builtin_ppc_maddhd">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_ppc_maddhdu
+ : GCCBuiltin<"__builtin_ppc_maddhdu">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ def int_ppc_maddld
+ : GCCBuiltin<"__builtin_ppc_maddld">,
+ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>;
+ // load
+ def int_ppc_load2r
+ : GCCBuiltin<"__builtin_ppc_load2r">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+ def int_ppc_load4r
+ : GCCBuiltin<"__builtin_ppc_load4r">,
+ Intrinsic<[llvm_i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+ def int_ppc_load8r
+ : GCCBuiltin<"__builtin_ppc_load8r">,
+ Intrinsic<[llvm_i64_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>;
+ // store
+ def int_ppc_store2r
+ : GCCBuiltin<"__builtin_ppc_store2r">,
+ Intrinsic<[], [llvm_i32_ty, llvm_ptr_ty], [IntrWriteMem]>;
+ def int_ppc_store4r
+ : GCCBuiltin<"__builtin_ppc_store4r">,
+ Intrinsic<[], [llvm_i32_ty, llvm_ptr_ty], [IntrWriteMem]>;
+ def int_ppc_store8r
+ : GCCBuiltin<"__builtin_ppc_store8r">,
+ Intrinsic<[], [llvm_i64_ty, llvm_ptr_ty], [IntrWriteMem]>;
+ def int_ppc_insert_exp
+ : GCCBuiltin<"__builtin_ppc_insert_exp">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty, llvm_i64_ty],
+ [IntrNoMem]>;
+ def int_ppc_extract_exp
+ : GCCBuiltin<"__builtin_ppc_extract_exp">,
+ Intrinsic <[llvm_i32_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_extract_sig
+ : GCCBuiltin<"__builtin_ppc_extract_sig">,
+ Intrinsic <[llvm_i64_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_mtfsb0
+ : GCCBuiltin<"__builtin_ppc_mtfsb0">,
+ Intrinsic <[], [llvm_i32_ty],
+ [IntrNoMem, IntrHasSideEffects, ImmArg<ArgIndex<0>>]>;
+ def int_ppc_mtfsb1
+ : GCCBuiltin<"__builtin_ppc_mtfsb1">,
+ Intrinsic <[], [llvm_i32_ty],
+ [IntrNoMem, IntrHasSideEffects, ImmArg<ArgIndex<0>>]>;
+ def int_ppc_mtfsf :
+ Intrinsic <[], [llvm_i32_ty, llvm_double_ty],
+ [IntrNoMem, IntrHasSideEffects, ImmArg<ArgIndex<0>>]>;
+ def int_ppc_mtfsfi
+ : GCCBuiltin<"__builtin_ppc_mtfsfi">,
+ Intrinsic <[], [llvm_i32_ty, llvm_i32_ty],
+ [IntrNoMem, IntrHasSideEffects,
+ ImmArg<ArgIndex<0>>,ImmArg<ArgIndex<1>>]>;
+ def int_ppc_fmsub
+ : GCCBuiltin<"__builtin_ppc_fmsub">,
+ Intrinsic <[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty],
+ [IntrNoMem]>;
+ def int_ppc_fmsubs
+ : GCCBuiltin<"__builtin_ppc_fmsubs">,
+ Intrinsic <[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem]>;
+ def int_ppc_fnmadd
+ : GCCBuiltin<"__builtin_ppc_fnmadd">,
+ Intrinsic <[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty],
+ [IntrNoMem]>;
+ def int_ppc_fnmadds
+ : GCCBuiltin<"__builtin_ppc_fnmadds">,
+ Intrinsic <[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem]>;
+ def int_ppc_fnmsub
+ : GCCBuiltin<"__builtin_ppc_fnmsub">,
+ Intrinsic <[llvm_double_ty],
+ [llvm_double_ty, llvm_double_ty, llvm_double_ty],
+ [IntrNoMem]>;
+ def int_ppc_fnmsubs
+ : GCCBuiltin<"__builtin_ppc_fnmsubs">,
+ Intrinsic <[llvm_float_ty],
+ [llvm_float_ty, llvm_float_ty, llvm_float_ty],
+ [IntrNoMem]>;
+ def int_ppc_fre
+ : GCCBuiltin<"__builtin_ppc_fre">,
+ Intrinsic <[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fres
+ : GCCBuiltin<"__builtin_ppc_fres">,
+ Intrinsic <[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+
+ def int_ppc_fsel : GCCBuiltin<"__builtin_ppc_fsel">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty, llvm_double_ty,
+ llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_fsels : GCCBuiltin<"__builtin_ppc_fsels">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty, llvm_float_ty,
+ llvm_float_ty], [IntrNoMem]>;
+ def int_ppc_frsqrte : GCCBuiltin<"__builtin_ppc_frsqrte">,
+ Intrinsic<[llvm_double_ty], [llvm_double_ty], [IntrNoMem]>;
+ def int_ppc_frsqrtes : GCCBuiltin<"__builtin_ppc_frsqrtes">,
+ Intrinsic<[llvm_float_ty], [llvm_float_ty], [IntrNoMem]>;
+}
+
+//===----------------------------------------------------------------------===//
+// PowerPC Atomic Intrinsic Definitions.
+let TargetPrefix = "ppc" in {
+ class AtomicRMW128Intrinsic
+ : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_ptr_ty, llvm_i64_ty, llvm_i64_ty],
+ [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
+ def int_ppc_atomicrmw_xchg_i128 : AtomicRMW128Intrinsic;
+ def int_ppc_atomicrmw_add_i128 : AtomicRMW128Intrinsic;
+ def int_ppc_atomicrmw_sub_i128 : AtomicRMW128Intrinsic;
+ def int_ppc_atomicrmw_and_i128 : AtomicRMW128Intrinsic;
+ def int_ppc_atomicrmw_or_i128 : AtomicRMW128Intrinsic;
+ def int_ppc_atomicrmw_xor_i128 : AtomicRMW128Intrinsic;
+ def int_ppc_atomicrmw_nand_i128 : AtomicRMW128Intrinsic;
+ def int_ppc_cmpxchg_i128 : Intrinsic<[llvm_i64_ty, llvm_i64_ty],
+ [llvm_ptr_ty,
+ llvm_i64_ty, llvm_i64_ty,
+ llvm_i64_ty, llvm_i64_ty],
+ [IntrArgMemOnly, NoCapture<ArgIndex<0>>]>;
+}
diff --git a/llvm/include/llvm/IR/IntrinsicsRISCV.td b/llvm/include/llvm/IR/IntrinsicsRISCV.td
index ab5b09b72ac3..a46709bf09d1 100644
--- a/llvm/include/llvm/IR/IntrinsicsRISCV.td
+++ b/llvm/include/llvm/IR/IntrinsicsRISCV.td
@@ -68,6 +68,53 @@ let TargetPrefix = "riscv" in {
} // TargetPrefix = "riscv"
//===----------------------------------------------------------------------===//
+// Bitmanip (Bit Manipulation) Extension
+
+let TargetPrefix = "riscv" in {
+
+ class BitManipGPRIntrinsics
+ : Intrinsic<[llvm_any_ty],
+ [LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable, IntrWillReturn]>;
+ class BitManipGPRGPRIntrinsics
+ : Intrinsic<[llvm_any_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>],
+ [IntrNoMem, IntrSpeculatable, IntrWillReturn]>;
+
+ // Zbb
+ def int_riscv_orc_b : BitManipGPRIntrinsics;
+
+ // Zbc
+ def int_riscv_clmul : BitManipGPRGPRIntrinsics;
+ def int_riscv_clmulh : BitManipGPRGPRIntrinsics;
+ def int_riscv_clmulr : BitManipGPRGPRIntrinsics;
+
+ // Zbe
+ def int_riscv_bcompress : BitManipGPRGPRIntrinsics;
+ def int_riscv_bdecompress : BitManipGPRGPRIntrinsics;
+
+ // Zbp
+ def int_riscv_grev : BitManipGPRGPRIntrinsics;
+ def int_riscv_gorc : BitManipGPRGPRIntrinsics;
+ def int_riscv_shfl : BitManipGPRGPRIntrinsics;
+ def int_riscv_unshfl : BitManipGPRGPRIntrinsics;
+ def int_riscv_xperm_n : BitManipGPRGPRIntrinsics;
+ def int_riscv_xperm_b : BitManipGPRGPRIntrinsics;
+ def int_riscv_xperm_h : BitManipGPRGPRIntrinsics;
+ def int_riscv_xperm_w : BitManipGPRGPRIntrinsics;
+
+ // Zbr
+ def int_riscv_crc32_b : BitManipGPRIntrinsics;
+ def int_riscv_crc32_h : BitManipGPRIntrinsics;
+ def int_riscv_crc32_w : BitManipGPRIntrinsics;
+ def int_riscv_crc32_d : BitManipGPRIntrinsics;
+ def int_riscv_crc32c_b : BitManipGPRIntrinsics;
+ def int_riscv_crc32c_h : BitManipGPRIntrinsics;
+ def int_riscv_crc32c_w : BitManipGPRIntrinsics;
+ def int_riscv_crc32c_d : BitManipGPRIntrinsics;
+} // TargetPrefix = "riscv"
+
+//===----------------------------------------------------------------------===//
// Vectors
class RISCVVIntrinsic {
@@ -75,7 +122,7 @@ class RISCVVIntrinsic {
// operand, so they have to be extended. If set to zero then the intrinsic
// does not have any operand that must be extended.
Intrinsic IntrinsicID = !cast<Intrinsic>(NAME);
- bits<4> ExtendOperand = 0;
+ bits<4> SplatOperand = 0;
}
let TargetPrefix = "riscv" in {
@@ -231,19 +278,56 @@ let TargetPrefix = "riscv" in {
[LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic;
// For destination vector type is the same as first and second source vector.
- // Input: (vector_in, vector_in, vl)
- class RISCVBinaryAAAMask
+ // Input: (vector_in, int_vector_in, vl)
+ class RISCVRGatherVVNoMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMVectorOfBitcastsToInt<0>, llvm_anyint_ty],
+ [IntrNoMem]>, RISCVVIntrinsic;
+ // For destination vector type is the same as first and second source vector.
+ // Input: (vector_in, vector_in, int_vector_in, vl)
+ class RISCVRGatherVVMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, LLVMVectorOfBitcastsToInt<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
+ [IntrNoMem]>, RISCVVIntrinsic;
+ // Input: (vector_in, int16_vector_in, vl)
+ class RISCVRGatherEI16VVNoMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMScalarOrSameVectorWidth<0, llvm_i16_ty>,
+ llvm_anyint_ty],
+ [IntrNoMem]>, RISCVVIntrinsic;
+ // For destination vector type is the same as first and second source vector.
+ // Input: (vector_in, vector_in, int16_vector_in, vl)
+ class RISCVRGatherEI16VVMask
: Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, LLVMMatchType<0>,
+ LLVMScalarOrSameVectorWidth<0, llvm_i16_ty>,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic;
+ // For destination vector type is the same as first source vector, and the
+ // second operand is XLen.
+ // Input: (vector_in, xlen_in, vl)
+ class RISCVGatherVXNoMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyint_ty, LLVMMatchType<1>],
+ [IntrNoMem]>, RISCVVIntrinsic {
+ }
+ // For destination vector type is the same as first source vector (with mask).
+ // Second operand is XLen.
+ // Input: (maskedoff, vector_in, xlen_in, mask, vl)
+ class RISCVGatherVXMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyint_ty,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, LLVMMatchType<1>],
+ [IntrNoMem]>, RISCVVIntrinsic {
+ }
// For destination vector type is the same as first source vector.
// Input: (vector_in, vector_in/scalar_in, vl)
class RISCVBinaryAAXNoMask
: Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_any_ty, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For destination vector type is the same as first source vector (with mask).
// Input: (maskedoff, vector_in, vector_in/scalar_in, mask, vl)
@@ -252,15 +336,30 @@ let TargetPrefix = "riscv" in {
[LLVMMatchType<0>, LLVMMatchType<0>, llvm_any_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 3;
+ let SplatOperand = 3;
}
+ // For destination vector type is the same as first source vector. The
+ // second source operand must match the destination type or be an XLen scalar.
+ // Input: (vector_in, vector_in/scalar_in, vl)
+ class RISCVBinaryAAShiftNoMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_any_ty, llvm_anyint_ty],
+ [IntrNoMem]>, RISCVVIntrinsic;
+ // For destination vector type is the same as first source vector (with mask).
+ // The second source operand must match the destination type or be an XLen scalar.
+ // Input: (maskedoff, vector_in, vector_in/scalar_in, mask, vl)
+ class RISCVBinaryAAShiftMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_any_ty,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
+ [IntrNoMem]>, RISCVVIntrinsic;
// For destination vector type is NOT the same as first source vector.
// Input: (vector_in, vector_in/scalar_in, vl)
class RISCVBinaryABXNoMask
: Intrinsic<[llvm_anyvector_ty],
[llvm_anyvector_ty, llvm_any_ty, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For destination vector type is NOT the same as first source vector (with mask).
// Input: (maskedoff, vector_in, vector_in/scalar_in, mask, vl)
@@ -269,8 +368,23 @@ let TargetPrefix = "riscv" in {
[LLVMMatchType<0>, llvm_anyvector_ty, llvm_any_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 3;
+ let SplatOperand = 3;
}
+ // For destination vector type is NOT the same as first source vector. The
+ // second source operand must match the destination type or be an XLen scalar.
+ // Input: (vector_in, vector_in/scalar_in, vl)
+ class RISCVBinaryABShiftNoMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [llvm_anyvector_ty, llvm_any_ty, llvm_anyint_ty],
+ [IntrNoMem]>, RISCVVIntrinsic;
+ // For destination vector type is NOT the same as first source vector (with mask).
+ // The second source operand must match the destination type or be an XLen scalar.
+ // Input: (maskedoff, vector_in, vector_in/scalar_in, mask, vl)
+ class RISCVBinaryABShiftMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_anyvector_ty, llvm_any_ty,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
+ [IntrNoMem]>, RISCVVIntrinsic;
// For binary operations with V0 as input.
// Input: (vector_in, vector_in/scalar_in, V0, vl)
class RISCVBinaryWithV0
@@ -279,7 +393,7 @@ let TargetPrefix = "riscv" in {
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For binary operations with mask type output and V0 as input.
// Output: (mask type output)
@@ -290,7 +404,7 @@ let TargetPrefix = "riscv" in {
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>,
llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For binary operations with mask type output.
// Output: (mask type output)
@@ -299,7 +413,7 @@ let TargetPrefix = "riscv" in {
: Intrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>],
[llvm_anyvector_ty, llvm_any_ty, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For binary operations with mask type output without mask.
// Output: (mask type output)
@@ -308,7 +422,7 @@ let TargetPrefix = "riscv" in {
: Intrinsic<[LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>],
[llvm_anyvector_ty, llvm_any_ty, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For binary operations with mask type output with mask.
// Output: (mask type output)
@@ -319,7 +433,7 @@ let TargetPrefix = "riscv" in {
llvm_anyvector_ty, llvm_any_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 3;
+ let SplatOperand = 3;
}
// For FP classify operations.
// Output: (bit mask type output)
@@ -343,7 +457,7 @@ let TargetPrefix = "riscv" in {
: Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_any_ty, llvm_anyint_ty],
[IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For Saturating binary operations with mask.
// The destination vector type is the same as first source vector.
@@ -353,27 +467,42 @@ let TargetPrefix = "riscv" in {
[LLVMMatchType<0>, LLVMMatchType<0>, llvm_any_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
[IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic {
- let ExtendOperand = 3;
+ let SplatOperand = 3;
}
// For Saturating binary operations.
+ // The destination vector type is the same as first source vector.
+ // The second source operand matches the destination type or is an XLen scalar.
+ // Input: (vector_in, vector_in/scalar_in, vl)
+ class RISCVSaturatingBinaryAAShiftNoMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, llvm_any_ty, llvm_anyint_ty],
+ [IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic;
+ // For Saturating binary operations with mask.
+ // The destination vector type is the same as first source vector.
+ // The second source operand matches the destination type or is an XLen scalar.
+ // Input: (maskedoff, vector_in, vector_in/scalar_in, mask, vl)
+ class RISCVSaturatingBinaryAAShiftMask
+ : Intrinsic<[llvm_anyvector_ty],
+ [LLVMMatchType<0>, LLVMMatchType<0>, llvm_any_ty,
+ LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
+ [IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic;
+ // For Saturating binary operations.
// The destination vector type is NOT the same as first source vector.
+ // The second source operand matches the destination type or is an XLen scalar.
// Input: (vector_in, vector_in/scalar_in, vl)
- class RISCVSaturatingBinaryABXNoMask
+ class RISCVSaturatingBinaryABShiftNoMask
: Intrinsic<[llvm_anyvector_ty],
[llvm_anyvector_ty, llvm_any_ty, llvm_anyint_ty],
- [IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
- }
+ [IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic;
// For Saturating binary operations with mask.
// The destination vector type is NOT the same as first source vector (with mask).
+ // The second source operand matches the destination type or is an XLen scalar.
// Input: (maskedoff, vector_in, vector_in/scalar_in, mask, vl)
- class RISCVSaturatingBinaryABXMask
+ class RISCVSaturatingBinaryABShiftMask
: Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_anyvector_ty, llvm_any_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
- [IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic {
- let ExtendOperand = 3;
- }
+ [IntrNoMem, IntrHasSideEffects]>, RISCVVIntrinsic;
class RISCVTernaryAAAXNoMask
: Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, LLVMMatchType<0>, llvm_anyint_ty,
@@ -389,28 +518,28 @@ let TargetPrefix = "riscv" in {
[LLVMMatchType<0>, llvm_any_ty, LLVMMatchType<0>,
llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
class RISCVTernaryAAXAMask
: Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_any_ty, LLVMMatchType<0>,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
class RISCVTernaryWideNoMask
: Intrinsic< [llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_any_ty, llvm_anyvector_ty,
llvm_anyint_ty],
[IntrNoMem] >, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
class RISCVTernaryWideMask
: Intrinsic< [llvm_anyvector_ty],
[LLVMMatchType<0>, llvm_any_ty, llvm_anyvector_ty,
LLVMScalarOrSameVectorWidth<0, llvm_i1_ty>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
+ let SplatOperand = 2;
}
// For Reduction ternary operations.
// For destination vector type is the same as first and third source vector.
@@ -433,15 +562,15 @@ let TargetPrefix = "riscv" in {
// Output: (scalar type)
// Input: (vector_in, vl)
class RISCVMaskUnarySOutNoMask
- : Intrinsic<[llvm_anyint_ty],
- [llvm_anyvector_ty, LLVMMatchType<0>],
+ : Intrinsic<[LLVMMatchType<1>],
+ [llvm_anyvector_ty, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic;
// For unary operations with scalar type output with mask
// Output: (scalar type)
// Input: (vector_in, mask, vl)
class RISCVMaskUnarySOutMask
- : Intrinsic<[llvm_anyint_ty],
- [llvm_anyvector_ty, LLVMMatchType<1>, LLVMMatchType<0>],
+ : Intrinsic<[LLVMMatchType<1>],
+ [llvm_anyvector_ty, LLVMMatchType<0>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic;
// For destination vector type is NOT the same as source vector.
// Input: (vector_in, vl)
@@ -688,12 +817,36 @@ let TargetPrefix = "riscv" in {
def "int_riscv_" # NAME : RISCVBinaryAAXNoMask;
def "int_riscv_" # NAME # "_mask" : RISCVBinaryAAXMask;
}
+ // Like RISCVBinaryAAX, but the second operand is used a shift amount so it
+ // must be a vector or an XLen scalar.
+ multiclass RISCVBinaryAAShift {
+ def "int_riscv_" # NAME : RISCVBinaryAAShiftNoMask;
+ def "int_riscv_" # NAME # "_mask" : RISCVBinaryAAShiftMask;
+ }
+ multiclass RISCVRGatherVV {
+ def "int_riscv_" # NAME : RISCVRGatherVVNoMask;
+ def "int_riscv_" # NAME # "_mask" : RISCVRGatherVVMask;
+ }
+ multiclass RISCVRGatherVX {
+ def "int_riscv_" # NAME : RISCVGatherVXNoMask;
+ def "int_riscv_" # NAME # "_mask" : RISCVGatherVXMask;
+ }
+ multiclass RISCVRGatherEI16VV {
+ def "int_riscv_" # NAME : RISCVRGatherEI16VVNoMask;
+ def "int_riscv_" # NAME # "_mask" : RISCVRGatherEI16VVMask;
+ }
// ABX means the destination type(A) is different from the first source
// type(B). X means any type for the second source operand.
multiclass RISCVBinaryABX {
def "int_riscv_" # NAME : RISCVBinaryABXNoMask;
def "int_riscv_" # NAME # "_mask" : RISCVBinaryABXMask;
}
+ // Like RISCVBinaryABX, but the second operand is used a shift amount so it
+ // must be a vector or an XLen scalar.
+ multiclass RISCVBinaryABShift {
+ def "int_riscv_" # NAME : RISCVBinaryABShiftNoMask;
+ def "int_riscv_" # NAME # "_mask" : RISCVBinaryABShiftMask;
+ }
multiclass RISCVBinaryWithV0 {
def "int_riscv_" # NAME : RISCVBinaryWithV0;
}
@@ -707,9 +860,13 @@ let TargetPrefix = "riscv" in {
def "int_riscv_" # NAME : RISCVSaturatingBinaryAAXNoMask;
def "int_riscv_" # NAME # "_mask" : RISCVSaturatingBinaryAAXMask;
}
- multiclass RISCVSaturatingBinaryABX {
- def "int_riscv_" # NAME : RISCVSaturatingBinaryABXNoMask;
- def "int_riscv_" # NAME # "_mask" : RISCVSaturatingBinaryABXMask;
+ multiclass RISCVSaturatingBinaryAAShift {
+ def "int_riscv_" # NAME : RISCVSaturatingBinaryAAShiftNoMask;
+ def "int_riscv_" # NAME # "_mask" : RISCVSaturatingBinaryAAShiftMask;
+ }
+ multiclass RISCVSaturatingBinaryABShift {
+ def "int_riscv_" # NAME : RISCVSaturatingBinaryABShiftNoMask;
+ def "int_riscv_" # NAME # "_mask" : RISCVSaturatingBinaryABShiftMask;
}
multiclass RISCVTernaryAAAX {
def "int_riscv_" # NAME : RISCVTernaryAAAXNoMask;
@@ -790,6 +947,9 @@ let TargetPrefix = "riscv" in {
defm vsoxei : RISCVIStore;
defm vsuxei : RISCVIStore;
+ def int_riscv_vle1 : RISCVUSLoad;
+ def int_riscv_vse1 : RISCVUSStore;
+
defm vamoswap : RISCVAMO;
defm vamoadd : RISCVAMO;
defm vamoxor : RISCVAMO;
@@ -828,12 +988,12 @@ let TargetPrefix = "riscv" in {
defm vor : RISCVBinaryAAX;
defm vxor : RISCVBinaryAAX;
- defm vsll : RISCVBinaryAAX;
- defm vsrl : RISCVBinaryAAX;
- defm vsra : RISCVBinaryAAX;
+ defm vsll : RISCVBinaryAAShift;
+ defm vsrl : RISCVBinaryAAShift;
+ defm vsra : RISCVBinaryAAShift;
- defm vnsrl : RISCVBinaryABX;
- defm vnsra : RISCVBinaryABX;
+ defm vnsrl : RISCVBinaryABShift;
+ defm vnsra : RISCVBinaryABShift;
defm vmseq : RISCVCompare;
defm vmsne : RISCVCompare;
@@ -843,6 +1003,8 @@ let TargetPrefix = "riscv" in {
defm vmsle : RISCVCompare;
defm vmsgtu : RISCVCompare;
defm vmsgt : RISCVCompare;
+ defm vmsgeu : RISCVCompare;
+ defm vmsge : RISCVCompare;
defm vminu : RISCVBinaryAAX;
defm vmin : RISCVBinaryAAX;
@@ -894,9 +1056,7 @@ let TargetPrefix = "riscv" in {
[IntrNoMem]>, RISCVVIntrinsic;
def int_riscv_vmv_v_x : Intrinsic<[llvm_anyint_ty],
[LLVMVectorElementType<0>, llvm_anyint_ty],
- [IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 1;
- }
+ [IntrNoMem]>, RISCVVIntrinsic;
def int_riscv_vfmv_v_f : Intrinsic<[llvm_anyfloat_ty],
[LLVMVectorElementType<0>, llvm_anyint_ty],
[IntrNoMem]>, RISCVVIntrinsic;
@@ -907,9 +1067,7 @@ let TargetPrefix = "riscv" in {
def int_riscv_vmv_s_x : Intrinsic<[llvm_anyint_ty],
[LLVMMatchType<0>, LLVMVectorElementType<0>,
llvm_anyint_ty],
- [IntrNoMem]>, RISCVVIntrinsic {
- let ExtendOperand = 2;
- }
+ [IntrNoMem]>, RISCVVIntrinsic;
def int_riscv_vfmv_f_s : Intrinsic<[LLVMVectorElementType<0>],
[llvm_anyfloat_ty],
@@ -940,8 +1098,8 @@ let TargetPrefix = "riscv" in {
defm vfwnmsac : RISCVTernaryWide;
defm vfsqrt : RISCVUnaryAA;
- defm vfrsqrte7 : RISCVUnaryAA;
- defm vfrece7 : RISCVUnaryAA;
+ defm vfrsqrt7 : RISCVUnaryAA;
+ defm vfrec7 : RISCVUnaryAA;
defm vfmin : RISCVBinaryAAX;
defm vfmax : RISCVBinaryAAX;
@@ -962,10 +1120,11 @@ let TargetPrefix = "riscv" in {
defm vfslide1up : RISCVBinaryAAX;
defm vfslide1down : RISCVBinaryAAX;
- defm vrgather : RISCVBinaryAAX;
- defm vrgatherei16 : RISCVBinaryAAX;
+ defm vrgather_vv : RISCVRGatherVV;
+ defm vrgather_vx : RISCVRGatherVX;
+ defm vrgatherei16_vv : RISCVRGatherEI16VV;
- def "int_riscv_vcompress" : RISCVBinaryAAAMask;
+ def "int_riscv_vcompress" : RISCVUnaryAAMask;
defm vaaddu : RISCVSaturatingBinaryAAX;
defm vaadd : RISCVSaturatingBinaryAAX;
@@ -974,11 +1133,11 @@ let TargetPrefix = "riscv" in {
defm vsmul : RISCVSaturatingBinaryAAX;
- defm vssrl : RISCVSaturatingBinaryAAX;
- defm vssra : RISCVSaturatingBinaryAAX;
+ defm vssrl : RISCVSaturatingBinaryAAShift;
+ defm vssra : RISCVSaturatingBinaryAAShift;
- defm vnclipu : RISCVSaturatingBinaryABX;
- defm vnclip : RISCVSaturatingBinaryABX;
+ defm vnclipu : RISCVSaturatingBinaryABShift;
+ defm vnclip : RISCVSaturatingBinaryABShift;
defm vmfeq : RISCVCompare;
defm vmfne : RISCVCompare;
diff --git a/llvm/include/llvm/IR/IntrinsicsSystemZ.td b/llvm/include/llvm/IR/IntrinsicsSystemZ.td
index b0c5cf0148fe..81435e98bea0 100644
--- a/llvm/include/llvm/IR/IntrinsicsSystemZ.td
+++ b/llvm/include/llvm/IR/IntrinsicsSystemZ.td
@@ -436,6 +436,28 @@ let TargetPrefix = "s390" in {
def int_s390_vstrszb : SystemZTernaryConvCC<llvm_v16i8_ty, llvm_v16i8_ty>;
def int_s390_vstrszh : SystemZTernaryConvCC<llvm_v16i8_ty, llvm_v8i16_ty>;
def int_s390_vstrszf : SystemZTernaryConvCC<llvm_v16i8_ty, llvm_v4i32_ty>;
+
+ // Instructions from the NNP-assist Facility
+ def int_s390_vclfnhs : GCCBuiltin<"__builtin_s390_vclfnhs">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+ def int_s390_vclfnls : GCCBuiltin<"__builtin_s390_vclfnls">,
+ Intrinsic<[llvm_v4f32_ty],
+ [llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+ def int_s390_vcrnfs : GCCBuiltin<"__builtin_s390_vcrnfs">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v4f32_ty, llvm_v4f32_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<2>>]>;
+ def int_s390_vcfn : GCCBuiltin<"__builtin_s390_vcfn">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<1>>]>;
+ def int_s390_vcnf : GCCBuiltin<"__builtin_s390_vcnf">,
+ Intrinsic<[llvm_v8i16_ty],
+ [llvm_v8i16_ty, llvm_i32_ty],
+ [IntrNoMem, ImmArg<ArgIndex<1>>]>;
}
//===----------------------------------------------------------------------===//
diff --git a/llvm/include/llvm/IR/IntrinsicsWebAssembly.td b/llvm/include/llvm/IR/IntrinsicsWebAssembly.td
index d306d0ccb90d..11990554037d 100644
--- a/llvm/include/llvm/IR/IntrinsicsWebAssembly.td
+++ b/llvm/include/llvm/IR/IntrinsicsWebAssembly.td
@@ -115,11 +115,11 @@ def int_wasm_shuffle :
llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty,
llvm_i32_ty, llvm_i32_ty, llvm_i32_ty, llvm_i32_ty],
[IntrNoMem, IntrSpeculatable]>;
-def int_wasm_sub_saturate_signed :
+def int_wasm_sub_sat_signed :
Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, LLVMMatchType<0>],
[IntrNoMem, IntrSpeculatable]>;
-def int_wasm_sub_saturate_unsigned :
+def int_wasm_sub_sat_unsigned :
Intrinsic<[llvm_anyvector_ty],
[LLVMMatchType<0>, LLVMMatchType<0>],
[IntrNoMem, IntrSpeculatable]>;
@@ -143,14 +143,6 @@ def int_wasm_bitmask :
Intrinsic<[llvm_i32_ty],
[llvm_anyvector_ty],
[IntrNoMem, IntrSpeculatable]>;
-def int_wasm_qfma :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_qfms :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
def int_wasm_dot :
Intrinsic<[llvm_v4i32_ty],
[llvm_v8i16_ty, llvm_v8i16_ty],
@@ -165,134 +157,11 @@ def int_wasm_narrow_unsigned :
[llvm_anyvector_ty, LLVMMatchType<1>],
[IntrNoMem, IntrSpeculatable]>;
-// TODO: Replace these intrinsics with normal ISel patterns once i32x4 to i64x2
-// widening is merged to the proposal.
-def int_wasm_widen_low_signed :
- Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_widen_high_signed :
- Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_widen_low_unsigned :
- Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_widen_high_unsigned :
- Intrinsic<[llvm_v2i64_ty], [llvm_v4i32_ty], [IntrNoMem, IntrSpeculatable]>;
-
-def int_wasm_q15mulr_saturate_signed :
+def int_wasm_q15mulr_sat_signed :
Intrinsic<[llvm_v8i16_ty],
[llvm_v8i16_ty, llvm_v8i16_ty],
[IntrNoMem, IntrSpeculatable]>;
-// TODO: Replace these intrinsics with normal ISel patterns
-def int_wasm_pmin :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>, LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_pmax :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>, LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-
-// TODO: Replace these instrinsics with normal ISel patterns once the
-// rounding instructions are merged to the proposal
-// (https://github.com/WebAssembly/simd/pull/232).
-def int_wasm_ceil :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_floor :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_trunc :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_nearest :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-
-// TODO: Replace these intrinsic with normal ISel patterns once the
-// load_zero instructions are merged to the proposal.
-def int_wasm_load32_zero :
- Intrinsic<[llvm_v4i32_ty],
- [LLVMPointerType<llvm_i32_ty>],
- [IntrReadMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-
-def int_wasm_load64_zero :
- Intrinsic<[llvm_v2i64_ty],
- [LLVMPointerType<llvm_i64_ty>],
- [IntrReadMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-
-// These intrinsics do not mark their lane index arguments as immediate because
-// that changes the corresponding SDNode from ISD::Constant to
-// ISD::TargetConstant, which would require extra complications in the ISel
-// tablegen patterns. TODO: Replace these intrinsic with normal ISel patterns
-// once the load_lane instructions are merged to the proposal.
-def int_wasm_load8_lane :
- Intrinsic<[llvm_v16i8_ty],
- [LLVMPointerType<llvm_i8_ty>, llvm_v16i8_ty, llvm_i32_ty],
- [IntrReadMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-def int_wasm_load16_lane :
- Intrinsic<[llvm_v8i16_ty],
- [LLVMPointerType<llvm_i16_ty>, llvm_v8i16_ty, llvm_i32_ty],
- [IntrReadMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-def int_wasm_load32_lane :
- Intrinsic<[llvm_v4i32_ty],
- [LLVMPointerType<llvm_i32_ty>, llvm_v4i32_ty, llvm_i32_ty],
- [IntrReadMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-def int_wasm_load64_lane :
- Intrinsic<[llvm_v2i64_ty],
- [LLVMPointerType<llvm_i64_ty>, llvm_v2i64_ty, llvm_i32_ty],
- [IntrReadMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-def int_wasm_store8_lane :
- Intrinsic<[],
- [LLVMPointerType<llvm_i8_ty>, llvm_v16i8_ty, llvm_i32_ty],
- [IntrWriteMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-def int_wasm_store16_lane :
- Intrinsic<[],
- [LLVMPointerType<llvm_i16_ty>, llvm_v8i16_ty, llvm_i32_ty],
- [IntrWriteMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-def int_wasm_store32_lane :
- Intrinsic<[],
- [LLVMPointerType<llvm_i32_ty>, llvm_v4i32_ty, llvm_i32_ty],
- [IntrWriteMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-def int_wasm_store64_lane :
- Intrinsic<[],
- [LLVMPointerType<llvm_i64_ty>, llvm_v2i64_ty, llvm_i32_ty],
- [IntrWriteMem, IntrArgMemOnly],
- "", [SDNPMemOperand]>;
-
-// TODO: Replace this intrinsic with normal ISel patterns once popcnt is merged
-// to the proposal.
-def int_wasm_popcnt :
- Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty], [IntrNoMem, IntrSpeculatable]>;
-
-def int_wasm_extmul_low_signed :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMSubdivide2VectorType<0>, LLVMSubdivide2VectorType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_extmul_high_signed :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMSubdivide2VectorType<0>, LLVMSubdivide2VectorType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_extmul_low_unsigned :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMSubdivide2VectorType<0>, LLVMSubdivide2VectorType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_extmul_high_unsigned :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMSubdivide2VectorType<0>, LLVMSubdivide2VectorType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-
def int_wasm_extadd_pairwise_signed :
Intrinsic<[llvm_anyvector_ty],
[LLVMSubdivide2VectorType<0>],
@@ -302,52 +171,6 @@ def int_wasm_extadd_pairwise_unsigned :
[LLVMSubdivide2VectorType<0>],
[IntrNoMem, IntrSpeculatable]>;
-def int_wasm_signselect :
- Intrinsic<[llvm_anyvector_ty],
- [LLVMMatchType<0>, LLVMMatchType<0>, LLVMMatchType<0>],
- [IntrNoMem, IntrSpeculatable]>;
-
-// TODO: Remove this intrinsic and the associated builtin if i64x2.eq gets
-// merged to the proposal.
-def int_wasm_eq :
- Intrinsic<[llvm_v2i64_ty],
- [llvm_v2i64_ty, llvm_v2i64_ty],
- [IntrNoMem, IntrSpeculatable]>;
-
-// TODO: Remove this after experiments have been run. Use the target-agnostic
-// int_prefetch if this becomes specified at some point.
-def int_wasm_prefetch_t :
- Intrinsic<[], [llvm_ptr_ty],
- [IntrInaccessibleMemOrArgMemOnly, IntrWillReturn,
- ReadOnly<ArgIndex<0>>, NoCapture<ArgIndex<0>>],
- "", [SDNPMemOperand]>;
-
-def int_wasm_prefetch_nt :
- Intrinsic<[], [llvm_ptr_ty],
- [IntrInaccessibleMemOrArgMemOnly, IntrWillReturn,
- ReadOnly<ArgIndex<0>>, NoCapture<ArgIndex<0>>],
- "", [SDNPMemOperand]>;
-
-// TODO: Remove these if possible if they are merged to the spec.
-def int_wasm_convert_low_signed :
- Intrinsic<[llvm_v2f64_ty], [llvm_v4i32_ty],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_convert_low_unsigned :
- Intrinsic<[llvm_v2f64_ty], [llvm_v4i32_ty],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_trunc_saturate_zero_signed :
- Intrinsic<[llvm_v4i32_ty], [llvm_v2f64_ty],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_trunc_saturate_zero_unsigned :
- Intrinsic<[llvm_v4i32_ty], [llvm_v2f64_ty],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_demote_zero :
- Intrinsic<[llvm_v4f32_ty], [llvm_v2f64_ty],
- [IntrNoMem, IntrSpeculatable]>;
-def int_wasm_promote_low :
- Intrinsic<[llvm_v2f64_ty], [llvm_v4f32_ty],
- [IntrNoMem, IntrSpeculatable]>;
-
//===----------------------------------------------------------------------===//
// Thread-local storage intrinsics
//===----------------------------------------------------------------------===//
diff --git a/llvm/include/llvm/IR/IntrinsicsX86.td b/llvm/include/llvm/IR/IntrinsicsX86.td
index bba12139976e..5848356b5b1a 100644
--- a/llvm/include/llvm/IR/IntrinsicsX86.td
+++ b/llvm/include/llvm/IR/IntrinsicsX86.td
@@ -5042,17 +5042,43 @@ let TargetPrefix = "x86" in {
[ImmArg<ArgIndex<0>>, ImmArg<ArgIndex<1>>,
ImmArg<ArgIndex<2>>]>;
// AMX - internal intrinsics
+ def int_x86_ldtilecfg_internal :
+ GCCBuiltin<"__builtin_ia32_tile_loadconfig_internal">,
+ Intrinsic<[], [llvm_ptr_ty], []>;
def int_x86_tileloadd64_internal :
GCCBuiltin<"__builtin_ia32_tileloadd64_internal">,
Intrinsic<[llvm_x86amx_ty],
[llvm_i16_ty, llvm_i16_ty, llvm_ptr_ty, llvm_i64_ty],
[]>;
+ def int_x86_tileloaddt164_internal :
+ GCCBuiltin<"__builtin_ia32_tileloaddt164_internal">,
+ Intrinsic<[llvm_x86amx_ty],
+ [llvm_i16_ty, llvm_i16_ty, llvm_ptr_ty, llvm_i64_ty],
+ []>;
def int_x86_tdpbssd_internal :
GCCBuiltin<"__builtin_ia32_tdpbssd_internal">,
Intrinsic<[llvm_x86amx_ty],
[llvm_i16_ty, llvm_i16_ty, llvm_i16_ty,
llvm_x86amx_ty, llvm_x86amx_ty,
llvm_x86amx_ty], []>;
+ def int_x86_tdpbsud_internal :
+ GCCBuiltin<"__builtin_ia32_tdpbsud_internal">,
+ Intrinsic<[llvm_x86amx_ty],
+ [llvm_i16_ty, llvm_i16_ty, llvm_i16_ty,
+ llvm_x86amx_ty, llvm_x86amx_ty,
+ llvm_x86amx_ty], []>;
+ def int_x86_tdpbusd_internal :
+ GCCBuiltin<"__builtin_ia32_tdpbusd_internal">,
+ Intrinsic<[llvm_x86amx_ty],
+ [llvm_i16_ty, llvm_i16_ty, llvm_i16_ty,
+ llvm_x86amx_ty, llvm_x86amx_ty,
+ llvm_x86amx_ty], []>;
+ def int_x86_tdpbuud_internal :
+ GCCBuiltin<"__builtin_ia32_tdpbuud_internal">,
+ Intrinsic<[llvm_x86amx_ty],
+ [llvm_i16_ty, llvm_i16_ty, llvm_i16_ty,
+ llvm_x86amx_ty, llvm_x86amx_ty,
+ llvm_x86amx_ty], []>;
def int_x86_tilestored64_internal :
GCCBuiltin<"__builtin_ia32_tilestored64_internal">,
Intrinsic<[], [llvm_i16_ty, llvm_i16_ty, llvm_ptr_ty,
@@ -5061,6 +5087,12 @@ let TargetPrefix = "x86" in {
GCCBuiltin<"__builtin_ia32_tilezero_internal">,
Intrinsic<[llvm_x86amx_ty], [llvm_i16_ty, llvm_i16_ty],
[]>;
+ def int_x86_tdpbf16ps_internal :
+ GCCBuiltin<"__builtin_ia32_tdpbf16ps_internal">,
+ Intrinsic<[llvm_x86amx_ty],
+ [llvm_i16_ty, llvm_i16_ty, llvm_i16_ty,
+ llvm_x86amx_ty, llvm_x86amx_ty,
+ llvm_x86amx_ty], []>;
}
//===----------------------------------------------------------------------===//
diff --git a/llvm/include/llvm/IR/LLVMContext.h b/llvm/include/llvm/IR/LLVMContext.h
index 8f8a35d07c64..bc605f108340 100644
--- a/llvm/include/llvm/IR/LLVMContext.h
+++ b/llvm/include/llvm/IR/LLVMContext.h
@@ -87,12 +87,13 @@ public:
/// operand bundle tags without comparing strings. Keep this in sync with
/// LLVMContext::LLVMContext().
enum : unsigned {
- OB_deopt = 0, // "deopt"
- OB_funclet = 1, // "funclet"
- OB_gc_transition = 2, // "gc-transition"
- OB_cfguardtarget = 3, // "cfguardtarget"
- OB_preallocated = 4, // "preallocated"
- OB_gc_live = 5, // "gc-live"
+ OB_deopt = 0, // "deopt"
+ OB_funclet = 1, // "funclet"
+ OB_gc_transition = 2, // "gc-transition"
+ OB_cfguardtarget = 3, // "cfguardtarget"
+ OB_preallocated = 4, // "preallocated"
+ OB_gc_live = 5, // "gc-live"
+ OB_clang_arc_attachedcall = 6, // "clang.arc.attachedcall"
};
/// getMDKindID - Return a unique non-zero ID for the specified metadata kind.
@@ -152,31 +153,10 @@ public:
void enableDebugTypeODRUniquing();
void disableDebugTypeODRUniquing();
- using InlineAsmDiagHandlerTy = void (*)(const SMDiagnostic&, void *Context,
- unsigned LocCookie);
-
/// Defines the type of a yield callback.
/// \see LLVMContext::setYieldCallback.
using YieldCallbackTy = void (*)(LLVMContext *Context, void *OpaqueHandle);
- /// setInlineAsmDiagnosticHandler - This method sets a handler that is invoked
- /// when problems with inline asm are detected by the backend. The first
- /// argument is a function pointer and the second is a context pointer that
- /// gets passed into the DiagHandler.
- ///
- /// LLVMContext doesn't take ownership or interpret either of these
- /// pointers.
- void setInlineAsmDiagnosticHandler(InlineAsmDiagHandlerTy DiagHandler,
- void *DiagContext = nullptr);
-
- /// getInlineAsmDiagnosticHandler - Return the diagnostic handler set by
- /// setInlineAsmDiagnosticHandler.
- InlineAsmDiagHandlerTy getInlineAsmDiagnosticHandler() const;
-
- /// getInlineAsmDiagnosticContext - Return the diagnostic context set by
- /// setInlineAsmDiagnosticHandler.
- void *getInlineAsmDiagnosticContext() const;
-
/// setDiagnosticHandlerCallBack - This method sets a handler call back
/// that is invoked when the backend needs to report anything to the user.
/// The first argument is a function pointer and the second is a context pointer
@@ -189,10 +169,11 @@ public:
DiagnosticHandler::DiagnosticHandlerTy DiagHandler,
void *DiagContext = nullptr, bool RespectFilters = false);
- /// setDiagnosticHandler - This method sets unique_ptr to object of DiagnosticHandler
- /// to provide custom diagnostic handling. The first argument is unique_ptr of object
- /// of type DiagnosticHandler or a derived of that. The third argument should be
- /// set to true if the handler only expects enabled diagnostics.
+ /// setDiagnosticHandler - This method sets unique_ptr to object of
+ /// DiagnosticHandler to provide custom diagnostic handling. The first
+ /// argument is unique_ptr of object of type DiagnosticHandler or a derived
+ /// of that. The second argument should be set to true if the handler only
+ /// expects enabled diagnostics.
///
/// Ownership of this pointer is moved to LLVMContextImpl.
void setDiagnosticHandler(std::unique_ptr<DiagnosticHandler> &&DH,
@@ -210,7 +191,7 @@ public:
/// setDiagnosticHandler.
const DiagnosticHandler *getDiagHandlerPtr() const;
- /// getDiagnosticHandler - transfers owenership of DiagnosticHandler unique_ptr
+ /// getDiagnosticHandler - transfers ownership of DiagnosticHandler unique_ptr
/// to caller.
std::unique_ptr<DiagnosticHandler> getDiagnosticHandler();
@@ -309,7 +290,7 @@ public:
/// be prepared to drop the erroneous construct on the floor and "not crash".
/// The generated code need not be correct. The error message will be
/// implicitly prefixed with "error: " and should not end with a ".".
- void emitError(unsigned LocCookie, const Twine &ErrorStr);
+ void emitError(uint64_t LocCookie, const Twine &ErrorStr);
void emitError(const Instruction *I, const Twine &ErrorStr);
void emitError(const Twine &ErrorStr);
@@ -324,6 +305,9 @@ public:
/// LLVMContext is used by compilation.
void setOptPassGate(OptPassGate&);
+ /// Whether typed pointers are supported. If false, all pointers are opaque.
+ bool supportsTypedPointers() const;
+
private:
// Module needs access to the add/removeModule methods.
friend class Module;
diff --git a/llvm/include/llvm/IR/LegacyPassManager.h b/llvm/include/llvm/IR/LegacyPassManager.h
index 2b87143276b9..2459f0a5450a 100644
--- a/llvm/include/llvm/IR/LegacyPassManager.h
+++ b/llvm/include/llvm/IR/LegacyPassManager.h
@@ -26,6 +26,10 @@ class Module;
namespace legacy {
+// Whether or not -debug-pass has been specified. For use to check if it's
+// specified alongside the new PM.
+bool debugPassSpecified();
+
class PassManagerImpl;
class FunctionPassManagerImpl;
diff --git a/llvm/include/llvm/IR/LegacyPassManagers.h b/llvm/include/llvm/IR/LegacyPassManagers.h
index f4fae184e428..0bcb408d4929 100644
--- a/llvm/include/llvm/IR/LegacyPassManagers.h
+++ b/llvm/include/llvm/IR/LegacyPassManagers.h
@@ -335,8 +335,8 @@ public:
/// Initialize available analysis information.
void initializeAnalysisInfo() {
AvailableAnalysis.clear();
- for (unsigned i = 0; i < PMT_Last; ++i)
- InheritedAnalysis[i] = nullptr;
+ for (auto &IA : InheritedAnalysis)
+ IA = nullptr;
}
// Return true if P preserves high level analysis used by other
@@ -392,9 +392,8 @@ public:
// Collect AvailableAnalysis from all the active Pass Managers.
void populateInheritedAnalysis(PMStack &PMS) {
unsigned Index = 0;
- for (PMStack::iterator I = PMS.begin(), E = PMS.end();
- I != E; ++I)
- InheritedAnalysis[Index++] = (*I)->getAvailableAnalysis();
+ for (PMDataManager *PMDM : PMS)
+ InheritedAnalysis[Index++] = PMDM->getAvailableAnalysis();
}
/// Set the initial size of the module if the user has specified that they
diff --git a/llvm/include/llvm/IR/MatrixBuilder.h b/llvm/include/llvm/IR/MatrixBuilder.h
index 084b1d49569e..b14127df2182 100644
--- a/llvm/include/llvm/IR/MatrixBuilder.h
+++ b/llvm/include/llvm/IR/MatrixBuilder.h
@@ -215,6 +215,22 @@ public:
return B.CreateMul(LHS, RHS);
}
+ /// Divide matrix \p LHS by scalar \p RHS. If the operands are integers, \p
+ /// IsUnsigned indicates whether UDiv or SDiv should be used.
+ Value *CreateScalarDiv(Value *LHS, Value *RHS, bool IsUnsigned) {
+ assert(LHS->getType()->isVectorTy() && !RHS->getType()->isVectorTy());
+ assert(!isa<ScalableVectorType>(LHS->getType()) &&
+ "LHS Assumed to be fixed width");
+ RHS =
+ B.CreateVectorSplat(cast<VectorType>(LHS->getType())->getElementCount(),
+ RHS, "scalar.splat");
+ return cast<VectorType>(LHS->getType())
+ ->getElementType()
+ ->isFloatingPointTy()
+ ? B.CreateFDiv(LHS, RHS)
+ : (IsUnsigned ? B.CreateUDiv(LHS, RHS) : B.CreateSDiv(LHS, RHS));
+ }
+
/// Extracts the element at (\p RowIdx, \p ColumnIdx) from \p Matrix.
Value *CreateExtractElement(Value *Matrix, Value *RowIdx, Value *ColumnIdx,
unsigned NumRows, Twine const &Name = "") {
diff --git a/llvm/include/llvm/IR/Metadata.def b/llvm/include/llvm/IR/Metadata.def
index f31be8d1bc0c..bbf349e6b508 100644
--- a/llvm/include/llvm/IR/Metadata.def
+++ b/llvm/include/llvm/IR/Metadata.def
@@ -114,6 +114,7 @@ HANDLE_SPECIALIZED_MDNODE_BRANCH(DIMacroNode)
HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIMacro)
HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIMacroFile)
HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DICommonBlock)
+HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIArgList)
HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIStringType)
HANDLE_SPECIALIZED_MDNODE_LEAF_UNIQUABLE(DIGenericSubrange)
diff --git a/llvm/include/llvm/IR/Metadata.h b/llvm/include/llvm/IR/Metadata.h
index 0b87416befe9..c5840564454e 100644
--- a/llvm/include/llvm/IR/Metadata.h
+++ b/llvm/include/llvm/IR/Metadata.h
@@ -52,6 +52,10 @@ enum LLVMConstants : uint32_t {
DEBUG_METADATA_VERSION = 3 // Current debug info version number.
};
+/// Magic number in the value profile metadata showing a target has been
+/// promoted for the instruction and shouldn't be promoted again.
+const uint64_t NOMORE_ICP_MAGICNUM = -1;
+
/// Root of the metadata hierarchy.
///
/// This is a root class for typeless data in the IR.
@@ -67,10 +71,8 @@ protected:
/// Storage flag for non-uniqued, otherwise unowned, metadata.
unsigned char Storage : 7;
- // TODO: expose remaining bits to subclasses.
-
- unsigned char ImplicitCode : 1;
+ unsigned char SubclassData1 : 1;
unsigned short SubclassData16 = 0;
unsigned SubclassData32 = 0;
@@ -82,7 +84,7 @@ public:
protected:
Metadata(unsigned ID, StorageType Storage)
- : SubclassID(ID), Storage(Storage), ImplicitCode(false) {
+ : SubclassID(ID), Storage(Storage), SubclassData1(false) {
static_assert(sizeof(*this) == 8, "Metadata fields poorly packed");
}
@@ -301,6 +303,9 @@ public:
/// Replace all uses of this with \c MD, which is allowed to be null.
void replaceAllUsesWith(Metadata *MD);
+ /// Returns the list of all DIArgList users of this.
+ SmallVector<Metadata *> getAllArgListUsers();
+
/// Resolve all uses of this.
///
/// Resolve all uses of this, turning off RAUW permanently. If \c
@@ -380,6 +385,10 @@ public:
Type *getType() const { return V->getType(); }
LLVMContext &getContext() const { return V->getContext(); }
+ SmallVector<Metadata *> getAllArgListUsers() {
+ return ReplaceableMetadataImpl::getAllArgListUsers();
+ }
+
static void handleDeletion(Value *V);
static void handleRAUW(Value *From, Value *To);
@@ -667,12 +676,18 @@ struct AAMDNodes {
/// The tag specifying the noalias scope.
MDNode *NoAlias = nullptr;
+ // Shift tbaa Metadata node to start off bytes later
+ static MDNode *shiftTBAA(MDNode *M, size_t off);
+
+ // Shift tbaa.struct Metadata node to start off bytes later
+ static MDNode *shiftTBAAStruct(MDNode *M, size_t off);
+
/// Given two sets of AAMDNodes that apply to the same pointer,
/// give the best AAMDNodes that are compatible with both (i.e. a set of
/// nodes whose allowable aliasing conclusions are a subset of those
/// allowable by both of the inputs). However, for efficiency
/// reasons, do not create any new MDNodes.
- AAMDNodes intersect(const AAMDNodes &Other) {
+ AAMDNodes intersect(const AAMDNodes &Other) const {
AAMDNodes Result;
Result.TBAA = Other.TBAA == TBAA ? TBAA : nullptr;
Result.TBAAStruct = Other.TBAAStruct == TBAAStruct ? TBAAStruct : nullptr;
@@ -680,6 +695,18 @@ struct AAMDNodes {
Result.NoAlias = Other.NoAlias == NoAlias ? NoAlias : nullptr;
return Result;
}
+
+ /// Create a new AAMDNode that describes this AAMDNode after applying a
+ /// constant offset to the start of the pointer.
+ AAMDNodes shift(size_t Offset) const {
+ AAMDNodes Result;
+ Result.TBAA = TBAA ? shiftTBAA(TBAA, Offset) : nullptr;
+ Result.TBAAStruct =
+ TBAAStruct ? shiftTBAAStruct(TBAAStruct, Offset) : nullptr;
+ Result.Scope = Scope;
+ Result.NoAlias = NoAlias;
+ return Result;
+ }
};
// Specialize DenseMapInfo for AAMDNodes.
@@ -1220,13 +1247,16 @@ public:
///
/// An iterator that transforms an \a MDNode::iterator into an iterator over a
/// particular Metadata subclass.
-template <class T>
-class TypedMDOperandIterator
- : public std::iterator<std::input_iterator_tag, T *, std::ptrdiff_t, void,
- T *> {
+template <class T> class TypedMDOperandIterator {
MDNode::op_iterator I = nullptr;
public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = T *;
+ using difference_type = std::ptrdiff_t;
+ using pointer = void;
+ using reference = T *;
+
TypedMDOperandIterator() = default;
explicit TypedMDOperandIterator(MDNode::op_iterator I) : I(I) {}
@@ -1365,9 +1395,7 @@ class NamedMDNode : public ilist_node<NamedMDNode> {
explicit NamedMDNode(const Twine &N);
- template<class T1, class T2>
- class op_iterator_impl :
- public std::iterator<std::bidirectional_iterator_tag, T2> {
+ template <class T1, class T2> class op_iterator_impl {
friend class NamedMDNode;
const NamedMDNode *Node = nullptr;
@@ -1376,6 +1404,12 @@ class NamedMDNode : public ilist_node<NamedMDNode> {
op_iterator_impl(const NamedMDNode *N, unsigned i) : Node(N), Idx(i) {}
public:
+ using iterator_category = std::bidirectional_iterator_tag;
+ using value_type = T2;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
op_iterator_impl() = default;
bool operator==(const op_iterator_impl &o) const { return Idx == o.Idx; }
diff --git a/llvm/include/llvm/IR/Module.h b/llvm/include/llvm/IR/Module.h
index 3664b275114d..81e29d9b86e8 100644
--- a/llvm/include/llvm/IR/Module.h
+++ b/llvm/include/llvm/IR/Module.h
@@ -197,6 +197,14 @@ private:
///< Format: (arch)(sub)-(vendor)-(sys0-(abi)
NamedMDSymTabType NamedMDSymTab; ///< NamedMDNode names.
DataLayout DL; ///< DataLayout associated with the module
+ StringMap<unsigned>
+ CurrentIntrinsicIds; ///< Keep track of the current unique id count for
+ ///< the specified intrinsic basename.
+ DenseMap<std::pair<Intrinsic::ID, const FunctionType *>, unsigned>
+ UniquedIntrinsicNames; ///< Keep track of uniqued names of intrinsics
+ ///< based on unnamed types. The combination of
+ ///< ID and FunctionType maps to the extension that
+ ///< is used to make the intrinsic name unique.
friend class Constant;
@@ -221,7 +229,7 @@ public:
/// Returns the number of non-debug IR instructions in the module.
/// This is equivalent to the sum of the IR instruction counts of each
/// function contained in the module.
- unsigned getInstructionCount();
+ unsigned getInstructionCount() const;
/// Get the module's original source file name. When compiling from
/// bitcode, this is taken from a bitcode record where it was recorded.
@@ -331,6 +339,11 @@ public:
std::vector<StructType *> getIdentifiedStructTypes() const;
+ /// Return a unique name for an intrinsic whose mangling is based on an
+ /// unnamed type. The Proto represents the function prototype.
+ std::string getUniqueIntrinsicName(StringRef BaseName, Intrinsic::ID Id,
+ const FunctionType *Proto);
+
/// @}
/// @name Function Accessors
/// @{
@@ -705,14 +718,19 @@ public:
}
/// An iterator for DICompileUnits that skips those marked NoDebug.
- class debug_compile_units_iterator
- : public std::iterator<std::input_iterator_tag, DICompileUnit *> {
+ class debug_compile_units_iterator {
NamedMDNode *CUs;
unsigned Idx;
void SkipNoDebugCUs();
public:
+ using iterator_category = std::input_iterator_tag;
+ using value_type = DICompileUnit *;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
explicit debug_compile_units_iterator(NamedMDNode *CUs, unsigned Idx)
: CUs(CUs), Idx(Idx) {
SkipNoDebugCUs();
@@ -805,6 +823,9 @@ public:
/// Returns the Dwarf Version by checking module flags.
unsigned getDwarfVersion() const;
+ /// Returns the DWARF format by checking module flags.
+ bool isDwarf64() const;
+
/// Returns the CodeView Version by checking module flags.
/// Returns zero if not present in module.
unsigned getCodeViewFlag() const;
@@ -865,6 +886,33 @@ public:
/// Set that PLT should be avoid for RTLib calls.
void setRtLibUseGOT();
+ /// Get/set whether synthesized functions should get the uwtable attribute.
+ bool getUwtable() const;
+ void setUwtable();
+
+ /// Get/set whether synthesized functions should get the "frame-pointer"
+ /// attribute.
+ FramePointerKind getFramePointer() const;
+ void setFramePointer(FramePointerKind Kind);
+
+ /// Get/set what kind of stack protector guard to use.
+ StringRef getStackProtectorGuard() const;
+ void setStackProtectorGuard(StringRef Kind);
+
+ /// Get/set which register to use as the stack protector guard register. The
+ /// empty string is equivalent to "global". Other values may be "tls" or
+ /// "sysreg".
+ StringRef getStackProtectorGuardReg() const;
+ void setStackProtectorGuardReg(StringRef Reg);
+
+ /// Get/set what offset from the stack protector to use.
+ int getStackProtectorGuardOffset() const;
+ void setStackProtectorGuardOffset(int Offset);
+
+ /// Get/set the stack alignment overridden from the default.
+ unsigned getOverrideStackAlignment() const;
+ void setOverrideStackAlignment(unsigned Align);
+
/// @name Utility functions for querying and setting the build SDK version
/// @{
@@ -885,10 +933,11 @@ public:
void setPartialSampleProfileRatio(const ModuleSummaryIndex &Index);
};
-/// Given "llvm.used" or "llvm.compiler.used" as a global name, collect
-/// the initializer elements of that global in Set and return the global itself.
+/// Given "llvm.used" or "llvm.compiler.used" as a global name, collect the
+/// initializer elements of that global in a SmallVector and return the global
+/// itself.
GlobalVariable *collectUsedGlobalVariables(const Module &M,
- SmallPtrSetImpl<GlobalValue *> &Set,
+ SmallVectorImpl<GlobalValue *> &Vec,
bool CompilerUsed);
/// An raw_ostream inserter for modules.
diff --git a/llvm/include/llvm/IR/ModuleSlotTracker.h b/llvm/include/llvm/IR/ModuleSlotTracker.h
index 85f8ff938366..37cfc0f07280 100644
--- a/llvm/include/llvm/IR/ModuleSlotTracker.h
+++ b/llvm/include/llvm/IR/ModuleSlotTracker.h
@@ -9,7 +9,10 @@
#ifndef LLVM_IR_MODULESLOTTRACKER_H
#define LLVM_IR_MODULESLOTTRACKER_H
+#include <functional>
#include <memory>
+#include <utility>
+#include <vector>
namespace llvm {
@@ -17,6 +20,18 @@ class Module;
class Function;
class SlotTracker;
class Value;
+class MDNode;
+
+/// Abstract interface of slot tracker storage.
+class AbstractSlotTrackerStorage {
+public:
+ virtual ~AbstractSlotTrackerStorage();
+
+ virtual unsigned getNextMetadataSlot() = 0;
+
+ virtual void createMetadataSlot(const MDNode *) = 0;
+ virtual int getMetadataSlot(const MDNode *) = 0;
+};
/// Manage lifetime of a slot tracker for printing IR.
///
@@ -36,6 +51,11 @@ class ModuleSlotTracker {
const Function *F = nullptr;
SlotTracker *Machine = nullptr;
+ std::function<void(AbstractSlotTrackerStorage *, const Module *, bool)>
+ ProcessModuleHookFn;
+ std::function<void(AbstractSlotTrackerStorage *, const Function *, bool)>
+ ProcessFunctionHookFn;
+
public:
/// Wrap a preinitialized SlotTracker.
ModuleSlotTracker(SlotTracker &Machine, const Module *M,
@@ -52,7 +72,7 @@ public:
bool ShouldInitializeAllMetadata = true);
/// Destructor to clean up storage.
- ~ModuleSlotTracker();
+ virtual ~ModuleSlotTracker();
/// Lazily creates a slot tracker.
SlotTracker *getMachine();
@@ -72,6 +92,16 @@ public:
/// this method.
/// Return -1 if the value is not in the function's SlotTracker.
int getLocalSlot(const Value *V);
+
+ void setProcessHook(
+ std::function<void(AbstractSlotTrackerStorage *, const Module *, bool)>);
+ void setProcessHook(std::function<void(AbstractSlotTrackerStorage *,
+ const Function *, bool)>);
+
+ using MachineMDNodeListType =
+ std::vector<std::pair<unsigned, const MDNode *>>;
+
+ void collectMDNodes(MachineMDNodeListType &L, unsigned LB, unsigned UB) const;
};
} // end namespace llvm
diff --git a/llvm/include/llvm/IR/ModuleSummaryIndex.h b/llvm/include/llvm/IR/ModuleSummaryIndex.h
index d5a7ad63737a..4b84f6b0408d 100644
--- a/llvm/include/llvm/IR/ModuleSummaryIndex.h
+++ b/llvm/include/llvm/IR/ModuleSummaryIndex.h
@@ -45,6 +45,8 @@
namespace llvm {
+template <class GraphType> struct GraphTraits;
+
namespace yaml {
template <typename T> struct MappingTraits;
@@ -223,7 +225,14 @@ struct ValueInfo {
return RefAndFlags.getPointer();
}
- bool isDSOLocal() const;
+ /// Returns the most constraining visibility among summaries. The
+ /// visibilities, ordered from least to most constraining, are: default,
+ /// protected and hidden.
+ GlobalValue::VisibilityTypes getELFVisibility() const;
+
+ /// Checks if all summaries are DSO local (have the flag set). When DSOLocal
+ /// propagation has been done, set the parameter to enable fast check.
+ bool isDSOLocal(bool WithDSOLocalPropagation = false) const;
/// Checks if all copies are eligible for auto-hiding (have flag set).
bool canAutoHide() const;
@@ -294,6 +303,9 @@ public:
/// types based on global summary-based analysis.
unsigned Linkage : 4;
+ /// Indicates the visibility.
+ unsigned Visibility : 2;
+
/// Indicate if the global value cannot be imported (e.g. it cannot
/// be renamed or references something that can't be renamed).
unsigned NotEligibleToImport : 1;
@@ -322,10 +334,12 @@ public:
/// Convenience Constructors
explicit GVFlags(GlobalValue::LinkageTypes Linkage,
+ GlobalValue::VisibilityTypes Visibility,
bool NotEligibleToImport, bool Live, bool IsLocal,
bool CanAutoHide)
- : Linkage(Linkage), NotEligibleToImport(NotEligibleToImport),
- Live(Live), DSOLocal(IsLocal), CanAutoHide(CanAutoHide) {}
+ : Linkage(Linkage), Visibility(Visibility),
+ NotEligibleToImport(NotEligibleToImport), Live(Live),
+ DSOLocal(IsLocal), CanAutoHide(CanAutoHide) {}
};
private:
@@ -410,6 +424,13 @@ public:
bool canAutoHide() const { return Flags.CanAutoHide; }
+ GlobalValue::VisibilityTypes getVisibility() const {
+ return (GlobalValue::VisibilityTypes)Flags.Visibility;
+ }
+ void setVisibility(GlobalValue::VisibilityTypes Vis) {
+ Flags.Visibility = (unsigned)Vis;
+ }
+
/// Flag that this global value cannot be imported.
void setNotEligibleToImport() { Flags.NotEligibleToImport = true; }
@@ -594,6 +615,7 @@ public:
return FunctionSummary(
FunctionSummary::GVFlags(
GlobalValue::LinkageTypes::AvailableExternallyLinkage,
+ GlobalValue::DefaultVisibility,
/*NotEligibleToImport=*/true, /*Live=*/true, /*IsLocal=*/false,
/*CanAutoHide=*/false),
/*NumInsts=*/0, FunctionSummary::FFlags{}, /*EntryCount=*/0,
@@ -1037,6 +1059,10 @@ private:
/// read/write only.
bool WithAttributePropagation = false;
+ /// Indicates that summary-based DSOLocal propagation has run and the flag in
+ /// every summary of a GV is synchronized.
+ bool WithDSOLocalPropagation = false;
+
/// Indicates that summary-based synthetic entry count propagation has run
bool HasSyntheticEntryCounts = false;
@@ -1192,6 +1218,9 @@ public:
WithAttributePropagation = true;
}
+ bool withDSOLocalPropagation() const { return WithDSOLocalPropagation; }
+ void setWithDSOLocalPropagation() { WithDSOLocalPropagation = true; }
+
bool isReadOnly(const GlobalVarSummary *GVS) const {
return WithAttributePropagation && GVS->maybeReadOnly();
}
@@ -1495,7 +1524,7 @@ public:
/// Print out strongly connected components for debugging.
void dumpSCCs(raw_ostream &OS);
- /// Analyze index and detect unmodified globals
+ /// Do the access attribute and DSOLocal propagation in combined index.
void propagateAttributes(const DenseSet<GlobalValue::GUID> &PreservedSymbols);
/// Checks if we can import global variable from another module.
diff --git a/llvm/include/llvm/IR/ModuleSummaryIndexYAML.h b/llvm/include/llvm/IR/ModuleSummaryIndexYAML.h
index f7fa16df1100..f09ee0efa5b2 100644
--- a/llvm/include/llvm/IR/ModuleSummaryIndexYAML.h
+++ b/llvm/include/llvm/IR/ModuleSummaryIndexYAML.h
@@ -136,7 +136,7 @@ template <> struct MappingTraits<TypeIdSummary> {
};
struct FunctionSummaryYaml {
- unsigned Linkage;
+ unsigned Linkage, Visibility;
bool NotEligibleToImport, Live, IsLocal, CanAutoHide;
std::vector<uint64_t> Refs;
std::vector<uint64_t> TypeTests;
@@ -178,6 +178,7 @@ namespace yaml {
template <> struct MappingTraits<FunctionSummaryYaml> {
static void mapping(IO &io, FunctionSummaryYaml& summary) {
io.mapOptional("Linkage", summary.Linkage);
+ io.mapOptional("Visibility", summary.Visibility);
io.mapOptional("NotEligibleToImport", summary.NotEligibleToImport);
io.mapOptional("Live", summary.Live);
io.mapOptional("Local", summary.IsLocal);
@@ -224,6 +225,7 @@ template <> struct CustomMappingTraits<GlobalValueSummaryMapTy> {
Elem.SummaryList.push_back(std::make_unique<FunctionSummary>(
GlobalValueSummary::GVFlags(
static_cast<GlobalValue::LinkageTypes>(FSum.Linkage),
+ static_cast<GlobalValue::VisibilityTypes>(FSum.Visibility),
FSum.NotEligibleToImport, FSum.Live, FSum.IsLocal,
FSum.CanAutoHide),
/*NumInsts=*/0, FunctionSummary::FFlags{}, /*EntryCount=*/0, Refs,
@@ -244,7 +246,7 @@ template <> struct CustomMappingTraits<GlobalValueSummaryMapTy> {
for (auto &VI : FSum->refs())
Refs.push_back(VI.getGUID());
FSums.push_back(FunctionSummaryYaml{
- FSum->flags().Linkage,
+ FSum->flags().Linkage, FSum->flags().Visibility,
static_cast<bool>(FSum->flags().NotEligibleToImport),
static_cast<bool>(FSum->flags().Live),
static_cast<bool>(FSum->flags().DSOLocal),
diff --git a/llvm/include/llvm/IR/Operator.h b/llvm/include/llvm/IR/Operator.h
index acfacbd6c74e..d0bce742cc96 100644
--- a/llvm/include/llvm/IR/Operator.h
+++ b/llvm/include/llvm/IR/Operator.h
@@ -14,6 +14,7 @@
#ifndef LLVM_IR_OPERATOR_H
#define LLVM_IR_OPERATOR_H
+#include "llvm/ADT/MapVector.h"
#include "llvm/ADT/None.h"
#include "llvm/ADT/Optional.h"
#include "llvm/IR/Constants.h"
@@ -239,6 +240,9 @@ public:
void operator&=(const FastMathFlags &OtherFlags) {
Flags &= OtherFlags.Flags;
}
+ void operator|=(const FastMathFlags &OtherFlags) {
+ Flags |= OtherFlags.Flags;
+ }
};
/// Utility class for floating point operations which can have
@@ -484,6 +488,14 @@ public:
inline op_iterator idx_end() { return op_end(); }
inline const_op_iterator idx_end() const { return op_end(); }
+ inline iterator_range<op_iterator> indices() {
+ return make_range(idx_begin(), idx_end());
+ }
+
+ inline iterator_range<const_op_iterator> indices() const {
+ return make_range(idx_begin(), idx_end());
+ }
+
Value *getPointerOperand() {
return getOperand(0);
}
@@ -540,7 +552,7 @@ public:
}
unsigned countNonConstantIndices() const {
- return count_if(make_range(idx_begin(), idx_end()), [](const Use& use) {
+ return count_if(indices(), [](const Use& use) {
return !isa<ConstantInt>(*use);
});
}
@@ -568,6 +580,17 @@ public:
bool accumulateConstantOffset(
const DataLayout &DL, APInt &Offset,
function_ref<bool(Value &, APInt &)> ExternalAnalysis = nullptr) const;
+
+ static bool accumulateConstantOffset(
+ Type *SourceType, ArrayRef<const Value *> Index, const DataLayout &DL,
+ APInt &Offset,
+ function_ref<bool(Value &, APInt &)> ExternalAnalysis = nullptr);
+
+ /// Collect the offset of this GEP as a map of Values to their associated
+ /// APInt multipliers, as well as a total Constant Offset.
+ bool collectOffset(const DataLayout &DL, unsigned BitWidth,
+ MapVector<Value *, APInt> &VariableOffsets,
+ APInt &ConstantOffset) const;
};
class PtrToIntOperator
diff --git a/llvm/include/llvm/IR/PassInstrumentation.h b/llvm/include/llvm/IR/PassInstrumentation.h
index 291f324b159a..8e81f30b2289 100644
--- a/llvm/include/llvm/IR/PassInstrumentation.h
+++ b/llvm/include/llvm/IR/PassInstrumentation.h
@@ -54,6 +54,7 @@
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/StringMap.h"
#include <type_traits>
+#include <vector>
namespace llvm {
@@ -81,6 +82,8 @@ public:
using AfterPassInvalidatedFunc = void(StringRef, const PreservedAnalyses &);
using BeforeAnalysisFunc = void(StringRef, Any);
using AfterAnalysisFunc = void(StringRef, Any);
+ using AnalysisInvalidatedFunc = void(StringRef, Any);
+ using AnalysesClearedFunc = void(StringRef);
public:
PassInstrumentationCallbacks() {}
@@ -123,6 +126,16 @@ public:
AfterAnalysisCallbacks.emplace_back(std::move(C));
}
+ template <typename CallableT>
+ void registerAnalysisInvalidatedCallback(CallableT C) {
+ AnalysisInvalidatedCallbacks.emplace_back(std::move(C));
+ }
+
+ template <typename CallableT>
+ void registerAnalysesClearedCallback(CallableT C) {
+ AnalysesClearedCallbacks.emplace_back(std::move(C));
+ }
+
/// Add a class name to pass name mapping for use by pass instrumentation.
void addClassToPassName(StringRef ClassName, StringRef PassName);
/// Get the pass name for a given pass class name.
@@ -152,6 +165,12 @@ private:
/// These are run on analyses that have been run.
SmallVector<llvm::unique_function<AfterAnalysisFunc>, 4>
AfterAnalysisCallbacks;
+ /// These are run on analyses that have been invalidated.
+ SmallVector<llvm::unique_function<AnalysisInvalidatedFunc>, 4>
+ AnalysisInvalidatedCallbacks;
+ /// These are run on analyses that have been cleared.
+ SmallVector<llvm::unique_function<AnalysesClearedFunc>, 4>
+ AnalysesClearedCallbacks;
StringMap<std::string> ClassToPassName;
};
@@ -256,6 +275,24 @@ public:
C(Analysis.name(), llvm::Any(&IR));
}
+ /// AnalysisInvalidated instrumentation point - takes \p Analysis instance
+ /// that has just been invalidated and constant reference to IR it operated
+ /// on.
+ template <typename IRUnitT, typename PassT>
+ void runAnalysisInvalidated(const PassT &Analysis, const IRUnitT &IR) const {
+ if (Callbacks)
+ for (auto &C : Callbacks->AnalysisInvalidatedCallbacks)
+ C(Analysis.name(), llvm::Any(&IR));
+ }
+
+ /// AnalysesCleared instrumentation point - takes name of IR that analyses
+ /// operated on.
+ void runAnalysesCleared(StringRef Name) const {
+ if (Callbacks)
+ for (auto &C : Callbacks->AnalysesClearedCallbacks)
+ C(Name);
+ }
+
/// Handle invalidation from the pass manager when PassInstrumentation
/// is used as the result of PassInstrumentationAnalysis.
///
diff --git a/llvm/include/llvm/IR/PassManager.h b/llvm/include/llvm/IR/PassManager.h
index c669565aa33b..8e592bfb0c78 100644
--- a/llvm/include/llvm/IR/PassManager.h
+++ b/llvm/include/llvm/IR/PassManager.h
@@ -467,21 +467,16 @@ class PassManager : public PassInfoMixin<
PassManager<IRUnitT, AnalysisManagerT, ExtraArgTs...>> {
public:
/// Construct a pass manager.
- ///
- /// If \p DebugLogging is true, we'll log our progress to llvm::dbgs().
- explicit PassManager(bool DebugLogging = false) : DebugLogging(DebugLogging) {}
+ explicit PassManager() {}
// FIXME: These are equivalent to the default move constructor/move
// assignment. However, using = default triggers linker errors due to the
// explicit instantiations below. Find away to use the default and remove the
// duplicated code here.
- PassManager(PassManager &&Arg)
- : Passes(std::move(Arg.Passes)),
- DebugLogging(std::move(Arg.DebugLogging)) {}
+ PassManager(PassManager &&Arg) : Passes(std::move(Arg.Passes)) {}
PassManager &operator=(PassManager &&RHS) {
Passes = std::move(RHS.Passes);
- DebugLogging = std::move(RHS.DebugLogging);
return *this;
}
@@ -499,9 +494,6 @@ public:
detail::getAnalysisResult<PassInstrumentationAnalysis>(
AM, IR, std::tuple<ExtraArgTs...>(ExtraArgs...));
- if (DebugLogging)
- dbgs() << "Starting " << getTypeName<IRUnitT>() << " pass manager run.\n";
-
for (unsigned Idx = 0, Size = Passes.size(); Idx != Size; ++Idx) {
auto *P = Passes[Idx].get();
@@ -542,20 +534,17 @@ public:
// need to inspect each one individually.
PA.preserveSet<AllAnalysesOn<IRUnitT>>();
- if (DebugLogging)
- dbgs() << "Finished " << getTypeName<IRUnitT>() << " pass manager run.\n";
-
return PA;
}
template <typename PassT>
std::enable_if_t<!std::is_same<PassT, PassManager>::value>
- addPass(PassT Pass) {
+ addPass(PassT &&Pass) {
using PassModelT =
detail::PassModel<IRUnitT, PassT, PreservedAnalyses, AnalysisManagerT,
ExtraArgTs...>;
- Passes.emplace_back(new PassModelT(std::move(Pass)));
+ Passes.emplace_back(new PassModelT(std::forward<PassT>(Pass)));
}
/// When adding a pass manager pass that has the same type as this pass
@@ -580,9 +569,6 @@ protected:
detail::PassConcept<IRUnitT, AnalysisManagerT, ExtraArgTs...>;
std::vector<std::unique_ptr<PassConceptT>> Passes;
-
- /// Flag indicating whether we should do debug logging.
- bool DebugLogging;
};
extern template class PassManager<Module>;
@@ -746,9 +732,7 @@ public:
};
/// Construct an empty analysis manager.
- ///
- /// If \p DebugLogging is true, we'll log our progress to llvm::dbgs().
- AnalysisManager(bool DebugLogging = false);
+ AnalysisManager();
AnalysisManager(AnalysisManager &&);
AnalysisManager &operator=(AnalysisManager &&);
@@ -860,16 +844,6 @@ public:
return true;
}
- /// Invalidate a specific analysis pass for an IR unit.
- ///
- /// Note that the analysis result can disregard invalidation, if it determines
- /// it is in fact still valid.
- template <typename PassT> void invalidate(IRUnitT &IR) {
- assert(AnalysisPasses.count(PassT::ID()) &&
- "This analysis pass was not registered prior to being invalidated");
- invalidateImpl(PassT::ID(), IR);
- }
-
/// Invalidate cached analyses for an IR unit.
///
/// Walk through all of the analyses pertaining to this unit of IR and
@@ -904,20 +878,6 @@ private:
return RI == AnalysisResults.end() ? nullptr : &*RI->second->second;
}
- /// Invalidate a pass result for a IR unit.
- void invalidateImpl(AnalysisKey *ID, IRUnitT &IR) {
- typename AnalysisResultMapT::iterator RI =
- AnalysisResults.find({ID, &IR});
- if (RI == AnalysisResults.end())
- return;
-
- if (DebugLogging)
- dbgs() << "Invalidating analysis: " << this->lookUpPass(ID).name()
- << " on " << IR.getName() << "\n";
- AnalysisResultLists[&IR].erase(RI->second);
- AnalysisResults.erase(RI);
- }
-
/// Map type from analysis pass ID to pass concept pointer.
using AnalysisPassMapT =
DenseMap<AnalysisKey *, std::unique_ptr<PassConceptT>>;
@@ -934,9 +894,6 @@ private:
/// Map from an analysis ID and IR unit to a particular cached
/// analysis result.
AnalysisResultMapT AnalysisResults;
-
- /// Indicates whether we log to \c llvm::dbgs().
- bool DebugLogging;
};
extern template class AnalysisManager<Module>;
@@ -1249,13 +1206,13 @@ private:
/// templated adaptor.
template <typename FunctionPassT>
ModuleToFunctionPassAdaptor
-createModuleToFunctionPassAdaptor(FunctionPassT Pass) {
+createModuleToFunctionPassAdaptor(FunctionPassT &&Pass) {
using PassModelT =
detail::PassModel<Function, FunctionPassT, PreservedAnalyses,
FunctionAnalysisManager>;
return ModuleToFunctionPassAdaptor(
- std::make_unique<PassModelT>(std::move(Pass)));
+ std::make_unique<PassModelT>(std::forward<FunctionPassT>(Pass)));
}
/// A utility pass template to force an analysis result to be available.
@@ -1327,7 +1284,8 @@ struct InvalidateAllAnalysesPass : PassInfoMixin<InvalidateAllAnalysesPass> {
template <typename PassT>
class RepeatedPass : public PassInfoMixin<RepeatedPass<PassT>> {
public:
- RepeatedPass(int Count, PassT P) : Count(Count), P(std::move(P)) {}
+ RepeatedPass(int Count, PassT &&P)
+ : Count(Count), P(std::forward<PassT>(P)) {}
template <typename IRUnitT, typename AnalysisManagerT, typename... Ts>
PreservedAnalyses run(IRUnitT &IR, AnalysisManagerT &AM, Ts &&... Args) {
@@ -1360,8 +1318,8 @@ private:
};
template <typename PassT>
-RepeatedPass<PassT> createRepeatedPass(int Count, PassT P) {
- return RepeatedPass<PassT>(Count, std::move(P));
+RepeatedPass<PassT> createRepeatedPass(int Count, PassT &&P) {
+ return RepeatedPass<PassT>(Count, std::forward<PassT>(P));
}
} // end namespace llvm
diff --git a/llvm/include/llvm/IR/PassManagerImpl.h b/llvm/include/llvm/IR/PassManagerImpl.h
index 71a86d1efb15..bb4fbe98b082 100644
--- a/llvm/include/llvm/IR/PassManagerImpl.h
+++ b/llvm/include/llvm/IR/PassManagerImpl.h
@@ -5,7 +5,7 @@
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
-///
+/// \file
/// Provides implementations for PassManager and AnalysisManager template
/// methods. These classes should be explicitly instantiated for any IR unit,
/// and files doing the explicit instantiation should include this header.
@@ -20,9 +20,7 @@
namespace llvm {
template <typename IRUnitT, typename... ExtraArgTs>
-inline AnalysisManager<IRUnitT, ExtraArgTs...>::AnalysisManager(
- bool DebugLogging)
- : DebugLogging(DebugLogging) {}
+inline AnalysisManager<IRUnitT, ExtraArgTs...>::AnalysisManager() {}
template <typename IRUnitT, typename... ExtraArgTs>
inline AnalysisManager<IRUnitT, ExtraArgTs...>::AnalysisManager(
@@ -37,8 +35,8 @@ template <typename IRUnitT, typename... ExtraArgTs>
inline void
AnalysisManager<IRUnitT, ExtraArgTs...>::clear(IRUnitT &IR,
llvm::StringRef Name) {
- if (DebugLogging)
- dbgs() << "Clearing all analysis results for: " << Name << "\n";
+ if (auto *PI = getCachedResult<PassInstrumentationAnalysis>(IR))
+ PI->runAnalysesCleared(Name);
auto ResultsListI = AnalysisResultLists.find(&IR);
if (ResultsListI == AnalysisResultLists.end())
@@ -133,9 +131,8 @@ inline void AnalysisManager<IRUnitT, ExtraArgTs...>::invalidate(
continue;
}
- if (DebugLogging)
- dbgs() << "Invalidating analysis: " << this->lookUpPass(ID).name()
- << " on " << IR.getName() << "\n";
+ if (auto *PI = getCachedResult<PassInstrumentationAnalysis>(IR))
+ PI->runAnalysisInvalidated(this->lookUpPass(ID), IR);
I = ResultsList.erase(I);
AnalysisResults.erase({ID, &IR});
diff --git a/llvm/include/llvm/IR/PassManagerInternal.h b/llvm/include/llvm/IR/PassManagerInternal.h
index 986ed0b5a7ac..8f42e69f3063 100644
--- a/llvm/include/llvm/IR/PassManagerInternal.h
+++ b/llvm/include/llvm/IR/PassManagerInternal.h
@@ -28,7 +28,7 @@ template <typename IRUnitT> class AllAnalysesOn;
template <typename IRUnitT, typename... ExtraArgTs> class AnalysisManager;
class PreservedAnalyses;
-/// Implementation details of the pass manager interfaces.
+// Implementation details of the pass manager interfaces.
namespace detail {
/// Template for the abstract base class used to dispatch
diff --git a/llvm/include/llvm/IR/PatternMatch.h b/llvm/include/llvm/IR/PatternMatch.h
index 166ad23de969..cbd429f84ee4 100644
--- a/llvm/include/llvm/IR/PatternMatch.h
+++ b/llvm/include/llvm/IR/PatternMatch.h
@@ -88,8 +88,52 @@ inline class_match<BinaryOperator> m_BinOp() {
/// Matches any compare instruction and ignore it.
inline class_match<CmpInst> m_Cmp() { return class_match<CmpInst>(); }
-/// Match an arbitrary undef constant.
-inline class_match<UndefValue> m_Undef() { return class_match<UndefValue>(); }
+struct undef_match {
+ static bool check(const Value *V) {
+ if (isa<UndefValue>(V))
+ return true;
+
+ const auto *CA = dyn_cast<ConstantAggregate>(V);
+ if (!CA)
+ return false;
+
+ SmallPtrSet<const ConstantAggregate *, 8> Seen;
+ SmallVector<const ConstantAggregate *, 8> Worklist;
+
+ // Either UndefValue, PoisonValue, or an aggregate that only contains
+ // these is accepted by matcher.
+ // CheckValue returns false if CA cannot satisfy this constraint.
+ auto CheckValue = [&](const ConstantAggregate *CA) {
+ for (const Value *Op : CA->operand_values()) {
+ if (isa<UndefValue>(Op))
+ continue;
+
+ const auto *CA = dyn_cast<ConstantAggregate>(Op);
+ if (!CA)
+ return false;
+ if (Seen.insert(CA).second)
+ Worklist.emplace_back(CA);
+ }
+
+ return true;
+ };
+
+ if (!CheckValue(CA))
+ return false;
+
+ while (!Worklist.empty()) {
+ if (!CheckValue(Worklist.pop_back_val()))
+ return false;
+ }
+ return true;
+ }
+ template <typename ITy> bool match(ITy *V) { return check(V); }
+};
+
+/// Match an arbitrary undef constant. This matches poison as well.
+/// If this is an aggregate and contains a non-aggregate element that is
+/// neither undef nor poison, the aggregate is not matched.
+inline auto m_Undef() { return undef_match(); }
/// Match an arbitrary poison constant.
inline class_match<PoisonValue> m_Poison() { return class_match<PoisonValue>(); }
@@ -708,6 +752,10 @@ inline bind_ty<UnaryOperator> m_UnOp(UnaryOperator *&I) { return I; }
inline bind_ty<BinaryOperator> m_BinOp(BinaryOperator *&I) { return I; }
/// Match a with overflow intrinsic, capturing it if we match.
inline bind_ty<WithOverflowInst> m_WithOverflowInst(WithOverflowInst *&I) { return I; }
+inline bind_ty<const WithOverflowInst>
+m_WithOverflowInst(const WithOverflowInst *&I) {
+ return I;
+}
/// Match a Constant, capturing the value if we match.
inline bind_ty<Constant> m_Constant(Constant *&C) { return C; }
@@ -763,7 +811,12 @@ template <typename Class> struct deferredval_ty {
template <typename ITy> bool match(ITy *const V) { return V == Val; }
};
-/// A commutative-friendly version of m_Specific().
+/// Like m_Specific(), but works if the specific value to match is determined
+/// as part of the same match() expression. For example:
+/// m_Add(m_Value(X), m_Specific(X)) is incorrect, because m_Specific() will
+/// bind X before the pattern match starts.
+/// m_Add(m_Value(X), m_Deferred(X)) is correct, and will check against
+/// whichever value m_Value(X) populated.
inline deferredval_ty<Value> m_Deferred(Value *const &V) { return V; }
inline deferredval_ty<const Value> m_Deferred(const Value *const &V) {
return V;
@@ -1115,10 +1168,10 @@ struct OverflowingBinaryOp_match {
if (auto *Op = dyn_cast<OverflowingBinaryOperator>(V)) {
if (Op->getOpcode() != Opcode)
return false;
- if (WrapFlags & OverflowingBinaryOperator::NoUnsignedWrap &&
+ if ((WrapFlags & OverflowingBinaryOperator::NoUnsignedWrap) &&
!Op->hasNoUnsignedWrap())
return false;
- if (WrapFlags & OverflowingBinaryOperator::NoSignedWrap &&
+ if ((WrapFlags & OverflowingBinaryOperator::NoSignedWrap) &&
!Op->hasNoSignedWrap())
return false;
return L.match(Op->getOperand(0)) && R.match(Op->getOperand(1));
@@ -1703,6 +1756,7 @@ m_Br(const Cond_t &C, const TrueBlock_t &T, const FalseBlock_t &F) {
template <typename CmpInst_t, typename LHS_t, typename RHS_t, typename Pred_t,
bool Commutable = false>
struct MaxMin_match {
+ using PredType = Pred_t;
LHS_t L;
RHS_t R;
@@ -1731,10 +1785,10 @@ struct MaxMin_match {
return false;
// At this point we have a select conditioned on a comparison. Check that
// it is the values returned by the select that are being compared.
- Value *TrueVal = SI->getTrueValue();
- Value *FalseVal = SI->getFalseValue();
- Value *LHS = Cmp->getOperand(0);
- Value *RHS = Cmp->getOperand(1);
+ auto *TrueVal = SI->getTrueValue();
+ auto *FalseVal = SI->getFalseValue();
+ auto *LHS = Cmp->getOperand(0);
+ auto *RHS = Cmp->getOperand(1);
if ((TrueVal != LHS || FalseVal != RHS) &&
(TrueVal != RHS || FalseVal != LHS))
return false;
@@ -2055,6 +2109,14 @@ template <Intrinsic::ID IntrID> inline IntrinsicID_match m_Intrinsic() {
return IntrinsicID_match(IntrID);
}
+/// Matches MaskedLoad Intrinsic.
+template <typename Opnd0, typename Opnd1, typename Opnd2, typename Opnd3>
+inline typename m_Intrinsic_Ty<Opnd0, Opnd1, Opnd2, Opnd3>::Ty
+m_MaskedLoad(const Opnd0 &Op0, const Opnd1 &Op1, const Opnd2 &Op2,
+ const Opnd3 &Op3) {
+ return m_Intrinsic<Intrinsic::masked_load>(Op0, Op1, Op2, Op3);
+}
+
template <Intrinsic::ID IntrID, typename T0>
inline typename m_Intrinsic_Ty<T0>::Ty m_Intrinsic(const T0 &Op0) {
return m_CombineAnd(m_Intrinsic<IntrID>(), m_Argument<0>(Op0));
@@ -2314,9 +2376,13 @@ template <int Ind, typename Opnd_t> struct ExtractValue_match {
ExtractValue_match(const Opnd_t &V) : Val(V) {}
template <typename OpTy> bool match(OpTy *V) {
- if (auto *I = dyn_cast<ExtractValueInst>(V))
- return I->getNumIndices() == 1 && I->getIndices()[0] == Ind &&
- Val.match(I->getAggregateOperand());
+ if (auto *I = dyn_cast<ExtractValueInst>(V)) {
+ // If Ind is -1, don't inspect indices
+ if (Ind != -1 &&
+ !(I->getNumIndices() == 1 && I->getIndices()[0] == (unsigned)Ind))
+ return false;
+ return Val.match(I->getAggregateOperand());
+ }
return false;
}
};
@@ -2328,6 +2394,13 @@ inline ExtractValue_match<Ind, Val_t> m_ExtractValue(const Val_t &V) {
return ExtractValue_match<Ind, Val_t>(V);
}
+/// Match an ExtractValue instruction with any index.
+/// For example m_ExtractValue(...)
+template <typename Val_t>
+inline ExtractValue_match<-1, Val_t> m_ExtractValue(const Val_t &V) {
+ return ExtractValue_match<-1, Val_t>(V);
+}
+
/// Matcher for a single index InsertValue instruction.
template <int Ind, typename T0, typename T1> struct InsertValue_match {
T0 Op0;
@@ -2356,14 +2429,6 @@ inline InsertValue_match<Ind, Val_t, Elt_t> m_InsertValue(const Val_t &Val,
/// `ptrtoint(gep <vscale x 1 x i8>, <vscale x 1 x i8>* null, i32 1>`
/// under the right conditions determined by DataLayout.
struct VScaleVal_match {
-private:
- template <typename Base, typename Offset>
- inline BinaryOp_match<Base, Offset, Instruction::GetElementPtr>
- m_OffsetGep(const Base &B, const Offset &O) {
- return BinaryOp_match<Base, Offset, Instruction::GetElementPtr>(B, O);
- }
-
-public:
const DataLayout &DL;
VScaleVal_match(const DataLayout &DL) : DL(DL) {}
@@ -2371,12 +2436,16 @@ public:
if (m_Intrinsic<Intrinsic::vscale>().match(V))
return true;
- if (m_PtrToInt(m_OffsetGep(m_Zero(), m_SpecificInt(1))).match(V)) {
- Type *PtrTy = cast<Operator>(V)->getOperand(0)->getType();
- auto *DerefTy = PtrTy->getPointerElementType();
- if (isa<ScalableVectorType>(DerefTy) &&
- DL.getTypeAllocSizeInBits(DerefTy).getKnownMinSize() == 8)
- return true;
+ Value *Ptr;
+ if (m_PtrToInt(m_Value(Ptr)).match(V)) {
+ if (auto *GEP = dyn_cast<GEPOperator>(Ptr)) {
+ auto *DerefTy = GEP->getSourceElementType();
+ if (GEP->getNumIndices() == 1 && isa<ScalableVectorType>(DerefTy) &&
+ m_Zero().match(GEP->getPointerOperand()) &&
+ m_SpecificInt(1).match(GEP->idx_begin()->get()) &&
+ DL.getTypeAllocSizeInBits(DerefTy).getKnownMinSize() == 8)
+ return true;
+ }
}
return false;
@@ -2431,6 +2500,9 @@ m_LogicalAnd(const LHS &L, const RHS &R) {
return LogicalOp_match<LHS, RHS, Instruction::And>(L, R);
}
+/// Matches L && R where L and R are arbitrary values.
+inline auto m_LogicalAnd() { return m_LogicalAnd(m_Value(), m_Value()); }
+
/// Matches L || R either in the form of L | R or L ? true : R.
/// Note that the latter form is poison-blocking.
template <typename LHS, typename RHS>
@@ -2439,6 +2511,11 @@ m_LogicalOr(const LHS &L, const RHS &R) {
return LogicalOp_match<LHS, RHS, Instruction::Or>(L, R);
}
+/// Matches L || R where L and R are arbitrary values.
+inline auto m_LogicalOr() {
+ return m_LogicalOr(m_Value(), m_Value());
+}
+
} // end namespace PatternMatch
} // end namespace llvm
diff --git a/llvm/include/llvm/IR/PseudoProbe.h b/llvm/include/llvm/IR/PseudoProbe.h
index e0370c264102..53100f049910 100644
--- a/llvm/include/llvm/IR/PseudoProbe.h
+++ b/llvm/include/llvm/IR/PseudoProbe.h
@@ -16,28 +16,43 @@
#include "llvm/ADT/Optional.h"
#include <cassert>
#include <cstdint>
+#include <limits>
namespace llvm {
class Instruction;
+class BasicBlock;
constexpr const char *PseudoProbeDescMetadataName = "llvm.pseudo_probe_desc";
enum class PseudoProbeType { Block = 0, IndirectCall, DirectCall };
+enum class PseudoProbeAttributes {
+ Reserved = 0x1, // Reserved for future use.
+};
+
+// The saturated distrution factor representing 100% for block probes.
+constexpr static uint64_t PseudoProbeFullDistributionFactor =
+ std::numeric_limits<uint64_t>::max();
+
struct PseudoProbeDwarfDiscriminator {
+public:
// The following APIs encodes/decodes per-probe information to/from a
// 32-bit integer which is organized as:
// [2:0] - 0x7, this is reserved for regular discriminator,
// see DWARF discriminator encoding rule
// [18:3] - probe id
- // [25:19] - reserved
+ // [25:19] - probe distribution factor
// [28:26] - probe type, see PseudoProbeType
// [31:29] - reserved for probe attributes
- static uint32_t packProbeData(uint32_t Index, uint32_t Type) {
+ static uint32_t packProbeData(uint32_t Index, uint32_t Type, uint32_t Flags,
+ uint32_t Factor) {
assert(Index <= 0xFFFF && "Probe index too big to encode, exceeding 2^16");
assert(Type <= 0x7 && "Probe type too big to encode, exceeding 7");
- return (Index << 3) | (Type << 26) | 0x7;
+ assert(Flags <= 0x7);
+ assert(Factor <= 100 &&
+ "Probe distribution factor too big to encode, exceeding 100");
+ return (Index << 3) | (Factor << 19) | (Type << 26) | 0x7;
}
static uint32_t extractProbeIndex(uint32_t Value) {
@@ -51,16 +66,28 @@ struct PseudoProbeDwarfDiscriminator {
static uint32_t extractProbeAttributes(uint32_t Value) {
return (Value >> 29) & 0x7;
}
+
+ static uint32_t extractProbeFactor(uint32_t Value) {
+ return (Value >> 19) & 0x7F;
+ }
+
+ // The saturated distrution factor representing 100% for callsites.
+ constexpr static uint8_t FullDistributionFactor = 100;
};
struct PseudoProbe {
uint32_t Id;
uint32_t Type;
uint32_t Attr;
+ // Distribution factor that estimates the portion of the real execution count.
+ // A saturated distribution factor stands for 1.0 or 100%. A pesudo probe has
+ // a factor with the value ranged from 0.0 to 1.0.
+ float Factor;
};
Optional<PseudoProbe> extractProbe(const Instruction &Inst);
+void setProbeDistributionFactor(Instruction &Inst, float Factor);
} // end namespace llvm
#endif // LLVM_IR_PSEUDOPROBE_H
diff --git a/llvm/include/llvm/IR/ReplaceConstant.h b/llvm/include/llvm/IR/ReplaceConstant.h
index 753f6d558ef8..4d95143a4bd2 100644
--- a/llvm/include/llvm/IR/ReplaceConstant.h
+++ b/llvm/include/llvm/IR/ReplaceConstant.h
@@ -16,6 +16,8 @@
#include "llvm/IR/Constants.h"
#include "llvm/IR/Instruction.h"
+#include <map>
+#include <vector>
namespace llvm {
@@ -23,6 +25,36 @@ namespace llvm {
/// it before \p Instr.
Instruction *createReplacementInstr(ConstantExpr *CE, Instruction *Instr);
+/// The given instruction \p I contains given constant expression \p CE as one
+/// of its operands, possibly nested within constant expression trees. Convert
+/// all reachable paths from contant expression operands of \p I to \p CE into
+/// corresponding instructions, insert them before \p I, update operands of \p I
+/// accordingly, and if required, return all such converted instructions at
+/// \p Insts.
+void convertConstantExprsToInstructions(
+ Instruction *I, ConstantExpr *CE,
+ SmallPtrSetImpl<Instruction *> *Insts = nullptr);
+
+/// The given instruction \p I contains constant expression CE within the
+/// constant expression trees of it`s constant expression operands, and
+/// \p CEPaths holds all the reachable paths (to CE) from such constant
+/// expression trees of \p I. Convert constant expressions within these paths
+/// into corresponding instructions, insert them before \p I, update operands of
+/// \p I accordingly, and if required, return all such converted instructions at
+/// \p Insts.
+void convertConstantExprsToInstructions(
+ Instruction *I,
+ std::map<Use *, std::vector<std::vector<ConstantExpr *>>> &CEPaths,
+ SmallPtrSetImpl<Instruction *> *Insts = nullptr);
+
+/// Given an instruction \p I which uses given constant expression \p CE as
+/// operand, either directly or nested within other constant expressions, return
+/// all reachable paths from the constant expression operands of \p I to \p CE,
+/// and return collected paths at \p CEPaths.
+void collectConstantExprPaths(
+ Instruction *I, ConstantExpr *CE,
+ std::map<Use *, std::vector<std::vector<ConstantExpr *>>> &CEPaths);
+
} // end namespace llvm
#endif // LLVM_IR_REPLACECONSTANT_H
diff --git a/llvm/include/llvm/IR/SafepointIRVerifier.h b/llvm/include/llvm/IR/SafepointIRVerifier.h
index ec5527954adc..76b147e690be 100644
--- a/llvm/include/llvm/IR/SafepointIRVerifier.h
+++ b/llvm/include/llvm/IR/SafepointIRVerifier.h
@@ -15,8 +15,8 @@
//
//===----------------------------------------------------------------------===//
-#ifndef LLVM_IR_SAFEPOINT_IR_VERIFIER
-#define LLVM_IR_SAFEPOINT_IR_VERIFIER
+#ifndef LLVM_IR_SAFEPOINTIRVERIFIER_H
+#define LLVM_IR_SAFEPOINTIRVERIFIER_H
#include "llvm/IR/PassManager.h"
@@ -43,4 +43,4 @@ public:
};
}
-#endif // LLVM_IR_SAFEPOINT_IR_VERIFIER
+#endif // LLVM_IR_SAFEPOINTIRVERIFIER_H
diff --git a/llvm/include/llvm/IR/Statepoint.h b/llvm/include/llvm/IR/Statepoint.h
index 6ce15839df46..c6251b9bf5c9 100644
--- a/llvm/include/llvm/IR/Statepoint.h
+++ b/llvm/include/llvm/IR/Statepoint.h
@@ -52,6 +52,8 @@ enum class StatepointFlags {
MaskAll = 3 ///< A bitmask that includes all valid flags.
};
+// These two are defined in IntrinsicInst since they're part of the
+// IntrinsicInst class hierarchy.
class GCRelocateInst;
class GCResultInst;
@@ -203,105 +205,10 @@ public:
/// path of invoke.
inline std::vector<const GCRelocateInst *> getGCRelocates() const;
- /// Get the experimental_gc_result call tied to this statepoint if there is
- /// one, otherwise return nullptr.
- const GCResultInst *getGCResult() const {
- for (auto *U : users())
- if (auto *GRI = dyn_cast<GCResultInst>(U))
- return GRI;
- return nullptr;
- }
-};
-
-/// Common base class for representing values projected from a statepoint.
-/// Currently, the only projections available are gc.result and gc.relocate.
-class GCProjectionInst : public IntrinsicInst {
-public:
- static bool classof(const IntrinsicInst *I) {
- return I->getIntrinsicID() == Intrinsic::experimental_gc_relocate ||
- I->getIntrinsicID() == Intrinsic::experimental_gc_result;
- }
-
- static bool classof(const Value *V) {
- return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
- }
-
- /// Return true if this relocate is tied to the invoke statepoint.
- /// This includes relocates which are on the unwinding path.
- bool isTiedToInvoke() const {
- const Value *Token = getArgOperand(0);
-
- return isa<LandingPadInst>(Token) || isa<InvokeInst>(Token);
- }
-
- /// The statepoint with which this gc.relocate is associated.
- const GCStatepointInst *getStatepoint() const {
- const Value *Token = getArgOperand(0);
-
- // This takes care both of relocates for call statepoints and relocates
- // on normal path of invoke statepoint.
- if (!isa<LandingPadInst>(Token))
- return cast<GCStatepointInst>(Token);
-
- // This relocate is on exceptional path of an invoke statepoint
- const BasicBlock *InvokeBB =
- cast<Instruction>(Token)->getParent()->getUniquePredecessor();
-
- assert(InvokeBB && "safepoints should have unique landingpads");
- assert(InvokeBB->getTerminator() &&
- "safepoint block should be well formed");
-
- return cast<GCStatepointInst>(InvokeBB->getTerminator());
- }
-};
-
-/// Represents calls to the gc.relocate intrinsic.
-class GCRelocateInst : public GCProjectionInst {
-public:
- static bool classof(const IntrinsicInst *I) {
- return I->getIntrinsicID() == Intrinsic::experimental_gc_relocate;
- }
-
- static bool classof(const Value *V) {
- return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
- }
-
- /// The index into the associate statepoint's argument list
- /// which contains the base pointer of the pointer whose
- /// relocation this gc.relocate describes.
- unsigned getBasePtrIndex() const {
- return cast<ConstantInt>(getArgOperand(1))->getZExtValue();
- }
-
- /// The index into the associate statepoint's argument list which
- /// contains the pointer whose relocation this gc.relocate describes.
- unsigned getDerivedPtrIndex() const {
- return cast<ConstantInt>(getArgOperand(2))->getZExtValue();
- }
-
- Value *getBasePtr() const {
- if (auto Opt = getStatepoint()->getOperandBundle(LLVMContext::OB_gc_live))
- return *(Opt->Inputs.begin() + getBasePtrIndex());
- return *(getStatepoint()->arg_begin() + getBasePtrIndex());
- }
-
- Value *getDerivedPtr() const {
- if (auto Opt = getStatepoint()->getOperandBundle(LLVMContext::OB_gc_live))
- return *(Opt->Inputs.begin() + getDerivedPtrIndex());
- return *(getStatepoint()->arg_begin() + getDerivedPtrIndex());
- }
-};
-
-/// Represents calls to the gc.result intrinsic.
-class GCResultInst : public GCProjectionInst {
-public:
- static bool classof(const IntrinsicInst *I) {
- return I->getIntrinsicID() == Intrinsic::experimental_gc_result;
- }
-
- static bool classof(const Value *V) {
- return isa<IntrinsicInst>(V) && classof(cast<IntrinsicInst>(V));
- }
+ /// Returns pair of boolean flags. The first one is true is there is
+ /// a gc.result intrinsic in the same block as statepoint. The second flag
+ /// is true if there is an intrinsic outside of the block with statepoint.
+ inline std::pair<bool, bool> getGCResultLocality() const;
};
std::vector<const GCRelocateInst *> GCStatepointInst::getGCRelocates() const {
@@ -329,6 +236,18 @@ std::vector<const GCRelocateInst *> GCStatepointInst::getGCRelocates() const {
return Result;
}
+std::pair<bool, bool> GCStatepointInst::getGCResultLocality() const {
+ std::pair<bool, bool> Res(false, false);
+ for (auto *U : users())
+ if (auto *GRI = dyn_cast<GCResultInst>(U)) {
+ if (GRI->getParent() == this->getParent())
+ Res.first = true;
+ else
+ Res.second = true;
+ }
+ return Res;
+}
+
/// Call sites that get wrapped by a gc.statepoint (currently only in
/// RewriteStatepointsForGC and potentially in other passes in the future) can
/// have attributes that describe properties of gc.statepoint call they will be
diff --git a/llvm/include/llvm/IR/Type.h b/llvm/include/llvm/IR/Type.h
index 756c69dd6ae9..430bc34a47e7 100644
--- a/llvm/include/llvm/IR/Type.h
+++ b/llvm/include/llvm/IR/Type.h
@@ -28,7 +28,6 @@
namespace llvm {
-template<class GraphType> struct GraphTraits;
class IntegerType;
class LLVMContext;
class PointerType;
@@ -228,6 +227,9 @@ public:
/// True if this is an instance of PointerType.
bool isPointerTy() const { return getTypeID() == PointerTyID; }
+ /// True if this is an instance of an opaque PointerType.
+ bool isOpaquePointerTy() const;
+
/// Return true if this is a pointer type or a vector of pointer types.
bool isPtrOrPtrVectorTy() const { return getScalarType()->isPointerTy(); }
@@ -308,6 +310,10 @@ public:
/// ppc long double), this method returns -1.
int getFPMantissaWidth() const;
+ /// Return whether the type is IEEE compatible, as defined by the eponymous
+ /// method in APFloat.
+ bool isIEEE() const { return APFloat::getZero(getFltSemantics()).isIEEE(); }
+
/// If this is a vector type, return the element type, otherwise return
/// 'this'.
inline Type *getScalarType() const {
@@ -376,6 +382,11 @@ public:
return ContainedTys[0];
}
+ /// Given vector type, change the element type,
+ /// whilst keeping the old number of elements.
+ /// For non-vectors simply returns \p EltTy.
+ inline Type *getWithNewType(Type *EltTy) const;
+
/// Given an integer or vector type, change the lane bitwidth to NewBitwidth,
/// whilst keeping the old number of lanes.
inline Type *getWithNewBitWidth(unsigned NewBitWidth) const;
@@ -475,6 +486,7 @@ public:
/// Return a pointer to the current type. This is equivalent to
/// PointerType::get(Foo, AddrSpace).
+ /// TODO: Remove this after opaque pointer transition is complete.
PointerType *getPointerTo(unsigned AddrSpace = 0) const;
private:
diff --git a/llvm/include/llvm/IR/VPIntrinsics.def b/llvm/include/llvm/IR/VPIntrinsics.def
index 981548c6dde9..92e2cd3a2783 100644
--- a/llvm/include/llvm/IR/VPIntrinsics.def
+++ b/llvm/include/llvm/IR/VPIntrinsics.def
@@ -33,8 +33,9 @@
#endif
// Register a new VP SDNode and begin its property scope.
-// When the SDNode scope is nested within a VP intrinsic scope, it is implicitly registered as the canonical SDNode for this VP intrinsic.
-// There is one VP intrinsic that maps directly to one SDNode that goes by the
+// When the SDNode scope is nested within a VP intrinsic scope, it is
+// implicitly registered as the canonical SDNode for this VP intrinsic. There
+// is one VP intrinsic that maps directly to one SDNode that goes by the
// same name. Since the operands are also the same, we open the property
// scopes for both the VPIntrinsic and the SDNode at once.
// \p SDOPC The SelectionDAG Node id (eg VP_ADD).
@@ -88,6 +89,28 @@ END_REGISTER_VP_SDNODE(SDOPC)
#define HANDLE_VP_TO_OPC(OPC)
#endif
+// Whether the intrinsic may have a rounding mode or exception behavior operand
+// bundle.
+// \p HASROUND '1' if the intrinsic can have a rounding mode operand bundle,
+// '0' otherwise.
+// \p HASEXCEPT '1' if the intrinsic can have an exception behavior operand
+// bundle, '0' otherwise.
+// \p INTRINID The constrained fp intrinsic this VP intrinsic corresponds to.
+#ifndef HANDLE_VP_TO_CONSTRAINEDFP
+#define HANDLE_VP_TO_CONSTRAINEDFP(HASROUND, HASEXCEPT, INTRINID)
+#endif
+
+// Map this VP intrinsic to its canonical functional intrinsic.
+#ifndef HANDLE_VP_TO_INTRIN
+#define HANDLE_VP_TO_INTRIN(ID)
+#endif
+
+// This VP Intrinsic is a memory operation
+// The pointer arg is at POINTERPOS and the data arg is at DATAPOS.
+#ifndef HANDLE_VP_IS_MEMOP
+#define HANDLE_VP_IS_MEMOP(VPID, POINTERPOS, DATAPOS)
+#endif
+
/// } Property Macros
///// Integer Arithmetic {
@@ -146,6 +169,68 @@ HELPER_REGISTER_BINARY_INT_VP(vp_xor, VP_XOR, Xor)
///// } Integer Arithmetic
+///// Floating-Point Arithmetic {
+
+// Specialized helper macro for floating-point binary operators
+// <operation>(%x, %y, %mask, %evl).
+#ifdef HELPER_REGISTER_BINARY_FP_VP
+#error \
+ "The internal helper macro HELPER_REGISTER_BINARY_FP_VP is already defined!"
+#endif
+#define HELPER_REGISTER_BINARY_FP_VP(OPSUFFIX, SDOPC, OPC) \
+ BEGIN_REGISTER_VP(vp_##OPSUFFIX, 2, 3, SDOPC, -1) \
+ HANDLE_VP_TO_OPC(OPC) \
+ HANDLE_VP_TO_CONSTRAINEDFP(1, 1, experimental_constrained_##OPSUFFIX) \
+ END_REGISTER_VP(vp_##OPSUFFIX, SDOPC)
+
+// llvm.vp.fadd(x,y,mask,vlen)
+HELPER_REGISTER_BINARY_FP_VP(fadd, VP_FADD, FAdd)
+
+// llvm.vp.fsub(x,y,mask,vlen)
+HELPER_REGISTER_BINARY_FP_VP(fsub, VP_FSUB, FSub)
+
+// llvm.vp.fmul(x,y,mask,vlen)
+HELPER_REGISTER_BINARY_FP_VP(fmul, VP_FMUL, FMul)
+
+// llvm.vp.fdiv(x,y,mask,vlen)
+HELPER_REGISTER_BINARY_FP_VP(fdiv, VP_FDIV, FDiv)
+
+// llvm.vp.frem(x,y,mask,vlen)
+HELPER_REGISTER_BINARY_FP_VP(frem, VP_FREM, FRem)
+
+#undef HELPER_REGISTER_BINARY_FP_VP
+
+///// } Floating-Point Arithmetic
+
+///// Memory Operations {
+// llvm.vp.store(ptr,val,mask,vlen)
+BEGIN_REGISTER_VP(vp_store, 2, 3, VP_STORE, 0)
+HANDLE_VP_TO_OPC(Store)
+HANDLE_VP_TO_INTRIN(masked_store)
+HANDLE_VP_IS_MEMOP(vp_store, 1, 0)
+END_REGISTER_VP(vp_store, VP_STORE)
+
+// llvm.vp.scatter(ptr,val,mask,vlen)
+BEGIN_REGISTER_VP(vp_scatter, 2, 3, VP_SCATTER, 0)
+HANDLE_VP_TO_INTRIN(masked_scatter)
+HANDLE_VP_IS_MEMOP(vp_scatter, 1, 0)
+END_REGISTER_VP(vp_scatter, VP_SCATTER)
+
+// llvm.vp.load(ptr,mask,vlen)
+BEGIN_REGISTER_VP(vp_load, 1, 2, VP_LOAD, -1)
+HANDLE_VP_TO_OPC(Load)
+HANDLE_VP_TO_INTRIN(masked_load)
+HANDLE_VP_IS_MEMOP(vp_load, 0, None)
+END_REGISTER_VP(vp_load, VP_LOAD)
+
+// llvm.vp.gather(ptr,mask,vlen)
+BEGIN_REGISTER_VP(vp_gather, 1, 2, VP_GATHER, -1)
+HANDLE_VP_TO_INTRIN(masked_gather)
+HANDLE_VP_IS_MEMOP(vp_gather, 0, None)
+END_REGISTER_VP(vp_gather, VP_GATHER)
+
+///// } Memory Operations
+
#undef BEGIN_REGISTER_VP
#undef BEGIN_REGISTER_VP_INTRINSIC
@@ -154,3 +239,6 @@ HELPER_REGISTER_BINARY_INT_VP(vp_xor, VP_XOR, Xor)
#undef END_REGISTER_VP_INTRINSIC
#undef END_REGISTER_VP_SDNODE
#undef HANDLE_VP_TO_OPC
+#undef HANDLE_VP_TO_CONSTRAINEDFP
+#undef HANDLE_VP_TO_INTRIN
+#undef HANDLE_VP_IS_MEMOP
diff --git a/llvm/include/llvm/IR/Value.h b/llvm/include/llvm/IR/Value.h
index 2a9912d46c89..2ad1c9e8c300 100644
--- a/llvm/include/llvm/IR/Value.h
+++ b/llvm/include/llvm/IR/Value.h
@@ -123,8 +123,7 @@ protected:
private:
template <typename UseT> // UseT == 'Use' or 'const Use'
- class use_iterator_impl
- : public std::iterator<std::forward_iterator_tag, UseT *> {
+ class use_iterator_impl {
friend class Value;
UseT *U;
@@ -132,6 +131,12 @@ private:
explicit use_iterator_impl(UseT *u) : U(u) {}
public:
+ using iterator_category = std::forward_iterator_tag;
+ using value_type = UseT *;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
use_iterator_impl() : U() {}
bool operator==(const use_iterator_impl &x) const { return U == x.U; }
@@ -162,13 +167,18 @@ private:
};
template <typename UserTy> // UserTy == 'User' or 'const User'
- class user_iterator_impl
- : public std::iterator<std::forward_iterator_tag, UserTy *> {
+ class user_iterator_impl {
use_iterator_impl<Use> UI;
explicit user_iterator_impl(Use *U) : UI(U) {}
friend class Value;
public:
+ using iterator_category = std::forward_iterator_tag;
+ using value_type = UserTy *;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
user_iterator_impl() = default;
bool operator==(const user_iterator_impl &x) const { return UI == x.UI; }
@@ -301,27 +311,15 @@ public:
/// Go through the uses list for this definition and make each use point
/// to "V" if the callback ShouldReplace returns true for the given Use.
/// Unlike replaceAllUsesWith() this function does not support basic block
- /// values or constant users.
+ /// values.
void replaceUsesWithIf(Value *New,
- llvm::function_ref<bool(Use &U)> ShouldReplace) {
- assert(New && "Value::replaceUsesWithIf(<null>) is invalid!");
- assert(New->getType() == getType() &&
- "replaceUses of value with new value of different type!");
-
- for (use_iterator UI = use_begin(), E = use_end(); UI != E;) {
- Use &U = *UI;
- ++UI;
- if (!ShouldReplace(U))
- continue;
- U.set(New);
- }
- }
+ llvm::function_ref<bool(Use &U)> ShouldReplace);
/// replaceUsesOutsideBlock - Go through the uses list for this definition and
/// make each use point to "V" instead of "this" when the use is outside the
/// block. 'This's use list is expected to have at least one element.
/// Unlike replaceAllUsesWith() this function does not support basic block
- /// values or constant users.
+ /// values.
void replaceUsesOutsideBlock(Value *V, BasicBlock *BB);
//----------------------------------------------------------------------
@@ -460,6 +458,9 @@ public:
/// This is specialized because it is a common request and does not require
/// traversing the whole use list.
Use *getSingleUndroppableUse();
+ const Use *getSingleUndroppableUse() const {
+ return const_cast<Value *>(this)->getSingleUndroppableUse();
+ }
/// Return true if there this value.
///
@@ -552,6 +553,9 @@ public:
/// Return true if there is metadata referencing this value.
bool isUsedByMetadata() const { return IsUsedByMD; }
+ // Return true if this value is only transitively referenced by metadata.
+ bool isTransitiveUsedByMetadataOnly() const;
+
protected:
/// Get the current metadata attachments for the given kind, if any.
///
@@ -651,15 +655,16 @@ public:
->stripPointerCastsSameRepresentation());
}
- /// Strip off pointer casts, all-zero GEPs and invariant group info.
+ /// Strip off pointer casts, all-zero GEPs, single-argument phi nodes and
+ /// invariant group info.
///
/// Returns the original uncasted value. If this is called on a non-pointer
/// value, it returns 'this'. This function should be used only in
/// Alias analysis.
- const Value *stripPointerCastsAndInvariantGroups() const;
- Value *stripPointerCastsAndInvariantGroups() {
+ const Value *stripPointerCastsForAliasAnalysis() const;
+ Value *stripPointerCastsForAliasAnalysis() {
return const_cast<Value *>(static_cast<const Value *>(this)
- ->stripPointerCastsAndInvariantGroups());
+ ->stripPointerCastsForAliasAnalysis());
}
/// Strip off pointer casts and all-constant inbounds GEPs.
@@ -734,13 +739,24 @@ public:
static_cast<const Value *>(this)->stripInBoundsOffsets(Func));
}
+ /// Return true if the memory object referred to by V can by freed in the
+ /// scope for which the SSA value defining the allocation is statically
+ /// defined. E.g. deallocation after the static scope of a value does not
+ /// count, but a deallocation before that does.
+ bool canBeFreed() const;
+
/// Returns the number of bytes known to be dereferenceable for the
/// pointer value.
///
/// If CanBeNull is set by this function the pointer can either be null or be
/// dereferenceable up to the returned number of bytes.
+ ///
+ /// IF CanBeFreed is true, the pointer is known to be dereferenceable at
+ /// point of definition only. Caller must prove that allocation is not
+ /// deallocated between point of definition and use.
uint64_t getPointerDereferenceableBytes(const DataLayout &DL,
- bool &CanBeNull) const;
+ bool &CanBeNull,
+ bool &CanBeFreed) const;
/// Returns an alignment of the pointer value.
///
diff --git a/llvm/include/llvm/IR/ValueMap.h b/llvm/include/llvm/IR/ValueMap.h
index a5a06b76dbf6..67f275cc06d9 100644
--- a/llvm/include/llvm/IR/ValueMap.h
+++ b/llvm/include/llvm/IR/ValueMap.h
@@ -323,17 +323,19 @@ struct DenseMapInfo<ValueMapCallbackVH<KeyT, ValueT, Config>> {
}
};
-template<typename DenseMapT, typename KeyT>
-class ValueMapIterator :
- public std::iterator<std::forward_iterator_tag,
- std::pair<KeyT, typename DenseMapT::mapped_type>,
- ptrdiff_t> {
+template <typename DenseMapT, typename KeyT> class ValueMapIterator {
using BaseT = typename DenseMapT::iterator;
using ValueT = typename DenseMapT::mapped_type;
BaseT I;
public:
+ using iterator_category = std::forward_iterator_tag;
+ using value_type = std::pair<KeyT, typename DenseMapT::mapped_type>;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
ValueMapIterator() : I() {}
ValueMapIterator(BaseT I) : I(I) {}
@@ -375,17 +377,19 @@ public:
}
};
-template<typename DenseMapT, typename KeyT>
-class ValueMapConstIterator :
- public std::iterator<std::forward_iterator_tag,
- std::pair<KeyT, typename DenseMapT::mapped_type>,
- ptrdiff_t> {
+template <typename DenseMapT, typename KeyT> class ValueMapConstIterator {
using BaseT = typename DenseMapT::const_iterator;
using ValueT = typename DenseMapT::mapped_type;
BaseT I;
public:
+ using iterator_category = std::forward_iterator_tag;
+ using value_type = std::pair<KeyT, typename DenseMapT::mapped_type>;
+ using difference_type = std::ptrdiff_t;
+ using pointer = value_type *;
+ using reference = value_type &;
+
ValueMapConstIterator() : I() {}
ValueMapConstIterator(BaseT I) : I(I) {}
ValueMapConstIterator(ValueMapIterator<DenseMapT, KeyT> Other)
diff --git a/llvm/include/llvm/IR/ValueSymbolTable.h b/llvm/include/llvm/IR/ValueSymbolTable.h
index 105ea73857af..43d00268f4b2 100644
--- a/llvm/include/llvm/IR/ValueSymbolTable.h
+++ b/llvm/include/llvm/IR/ValueSymbolTable.h
@@ -60,18 +60,23 @@ public:
/// @name Constructors
/// @{
- ValueSymbolTable() : vmap(0) {}
+ ValueSymbolTable(int MaxNameSize = -1) : vmap(0), MaxNameSize(MaxNameSize) {}
~ValueSymbolTable();
-/// @}
-/// @name Accessors
-/// @{
+ /// @}
+ /// @name Accessors
+ /// @{
/// This method finds the value with the given \p Name in the
/// the symbol table.
/// @returns the value associated with the \p Name
/// Lookup a named Value.
- Value *lookup(StringRef Name) const { return vmap.lookup(Name); }
+ Value *lookup(StringRef Name) const {
+ if (MaxNameSize > -1 && Name.size() > (unsigned)MaxNameSize)
+ Name = Name.substr(0, std::max(1u, (unsigned)MaxNameSize));
+
+ return vmap.lookup(Name);
+ }
/// @returns true iff the symbol table is empty
/// Determine if the symbol table is empty
@@ -128,6 +133,8 @@ private:
/// @{
ValueMap vmap; ///< The map that holds the symbol table.
+ int MaxNameSize; ///< The maximum size for each name. If the limit is
+ ///< exceeded, the name is capped.
mutable uint32_t LastUnique = 0; ///< Counter for tracking unique names
/// @}