aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Target/AArch64/AArch64ISelLowering.h
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Target/AArch64/AArch64ISelLowering.h')
-rw-r--r--llvm/lib/Target/AArch64/AArch64ISelLowering.h58
1 files changed, 55 insertions, 3 deletions
diff --git a/llvm/lib/Target/AArch64/AArch64ISelLowering.h b/llvm/lib/Target/AArch64/AArch64ISelLowering.h
index 00fa96bc4e6d..672dfc4fcbc0 100644
--- a/llvm/lib/Target/AArch64/AArch64ISelLowering.h
+++ b/llvm/lib/Target/AArch64/AArch64ISelLowering.h
@@ -155,6 +155,14 @@ enum NodeType : unsigned {
SMAXV,
UMAXV,
+ SMAXV_PRED,
+ UMAXV_PRED,
+ SMINV_PRED,
+ UMINV_PRED,
+ ORV_PRED,
+ EORV_PRED,
+ ANDV_PRED,
+
// Vector bitwise negation
NOT,
@@ -196,6 +204,43 @@ enum NodeType : unsigned {
UUNPKHI,
UUNPKLO,
+ CLASTA_N,
+ CLASTB_N,
+ LASTA,
+ LASTB,
+ REV,
+ TBL,
+
+ INSR,
+ PTEST,
+ PTRUE,
+
+ // Unsigned gather loads.
+ GLD1,
+ GLD1_SCALED,
+ GLD1_UXTW,
+ GLD1_SXTW,
+ GLD1_UXTW_SCALED,
+ GLD1_SXTW_SCALED,
+ GLD1_IMM,
+
+ // Signed gather loads
+ GLD1S,
+ GLD1S_SCALED,
+ GLD1S_UXTW,
+ GLD1S_SXTW,
+ GLD1S_UXTW_SCALED,
+ GLD1S_SXTW_SCALED,
+ GLD1S_IMM,
+ // Scatter store
+ SST1,
+ SST1_SCALED,
+ SST1_UXTW,
+ SST1_SXTW,
+ SST1_UXTW_SCALED,
+ SST1_SXTW_SCALED,
+ SST1_IMM,
+
// NEON Load/Store with post-increment base updates
LD2post = ISD::FIRST_TARGET_MEMORY_OPCODE,
LD3post,
@@ -224,8 +269,10 @@ enum NodeType : unsigned {
STG,
STZG,
ST2G,
- STZ2G
+ STZ2G,
+ LDP,
+ STP
};
} // end namespace AArch64ISD
@@ -396,7 +443,9 @@ public:
/// Return true if an FMA operation is faster than a pair of fmul and fadd
/// instructions. fmuladd intrinsics will be expanded to FMAs when this method
/// returns true, otherwise fmuladd is expanded to fmul + fadd.
- bool isFMAFasterThanFMulAndFAdd(EVT VT) const override;
+ bool isFMAFasterThanFMulAndFAdd(const MachineFunction &MF,
+ EVT VT) const override;
+ bool isFMAFasterThanFMulAndFAdd(const Function &F, Type *Ty) const override;
const MCPhysReg *getScratchRegisters(CallingConv::ID CC) const override;
@@ -648,6 +697,8 @@ private:
SDValue LowerGlobalTLSAddress(SDValue Op, SelectionDAG &DAG) const;
SDValue LowerDarwinGlobalTLSAddress(SDValue Op, SelectionDAG &DAG) const;
SDValue LowerELFGlobalTLSAddress(SDValue Op, SelectionDAG &DAG) const;
+ SDValue LowerELFTLSLocalExec(const GlobalValue *GV, SDValue ThreadBase,
+ const SDLoc &DL, SelectionDAG &DAG) const;
SDValue LowerELFTLSDescCallSeq(SDValue SymAddr, const SDLoc &DL,
SelectionDAG &DAG) const;
SDValue LowerWindowsGlobalTLSAddress(SDValue Op, SelectionDAG &DAG) const;
@@ -713,7 +764,7 @@ private:
unsigned combineRepeatedFPDivisors() const override;
ConstraintType getConstraintType(StringRef Constraint) const override;
- Register getRegisterByName(const char* RegName, EVT VT,
+ Register getRegisterByName(const char* RegName, LLT VT,
const MachineFunction &MF) const override;
/// Examine constraint string and operand type and determine a weight value.
@@ -741,6 +792,7 @@ private:
return TargetLowering::getInlineAsmMemConstraint(ConstraintCode);
}
+ bool isVectorLoadExtDesirable(SDValue ExtVal) const override;
bool isUsedByReturnOnly(SDNode *N, SDValue &Chain) const override;
bool mayBeEmittedAsTailCall(const CallInst *CI) const override;
bool getIndexedAddressParts(SDNode *Op, SDValue &Base, SDValue &Offset,