• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /freebsd-13-stable/contrib/llvm-project/llvm/lib/Target/X86/

Lines Matching defs:X86TargetLowering

103 X86TargetLowering::X86TargetLowering(const X86TargetMachine &TM,
2031 bool X86TargetLowering::useLoadStackGuardNode() const {
2035 bool X86TargetLowering::useStackGuardXorFP() const {
2040 SDValue X86TargetLowering::emitStackGuardXorFP(SelectionDAG &DAG, SDValue Val,
2049 X86TargetLowering::getPreferredVectorAction(MVT VT) const {
2095 MVT X86TargetLowering::getRegisterTypeForCallingConv(LLVMContext &Context,
2113 unsigned X86TargetLowering::getNumRegistersForCallingConv(LLVMContext &Context,
2131 unsigned X86TargetLowering::getVectorTypeBreakdownForCallingConv(
2159 EVT X86TargetLowering::getSetCCResultType(const DataLayout &DL,
2219 unsigned X86TargetLowering::getByValTypeAlignment(Type *Ty,
2239 EVT X86TargetLowering::getOptimalMemOpType(
2285 bool X86TargetLowering::isSafeMemOpType(MVT VT) const {
2293 bool X86TargetLowering::allowsMisalignedMemoryAccesses(
2328 unsigned X86TargetLowering::getJumpTableEncoding() const {
2338 bool X86TargetLowering::useSoftFloat() const {
2342 void X86TargetLowering::markLibCallAttributes(MachineFunction *MF, unsigned CC,
2371 X86TargetLowering::LowerCustomJumpTableEntry(const MachineJumpTableInfo *MJTI,
2382 SDValue X86TargetLowering::getPICJumpTableRelocBase(SDValue Table,
2394 const MCExpr *X86TargetLowering::
2406 X86TargetLowering::findRepresentativeClass(const TargetRegisterInfo *TRI,
2432 unsigned X86TargetLowering::getAddressSpace() const {
2450 Value *X86TargetLowering::getIRStackGuard(IRBuilder<> &IRB) const {
2469 void X86TargetLowering::insertSSPDeclarations(Module &M) const {
2493 Value *X86TargetLowering::getSDagStackGuard(const Module &M) const {
2502 Function *X86TargetLowering::getSSPStackGuardCheck(const Module &M) const {
2511 Value *X86TargetLowering::getSafeStackPointerLocation(IRBuilder<> &IRB) const {
2534 bool X86TargetLowering::isNoopAddrSpaceCast(unsigned SrcAS,
2549 bool X86TargetLowering::CanLowerReturn(
2557 const MCPhysReg *X86TargetLowering::getScratchRegisters(CallingConv::ID) const {
2621 X86TargetLowering::LowerReturn(SDValue Chain, CallingConv::ID CallConv,
2824 bool X86TargetLowering::isUsedByReturnOnly(SDNode *N, SDValue &Chain) const {
2861 EVT X86TargetLowering::getTypeForExtReturn(LLVMContext &Context, EVT VT,
2975 SDValue X86TargetLowering::LowerCallResult(
3159 bool X86TargetLowering::mayBeEmittedAsTailCall(const CallInst *CI) const {
3171 X86TargetLowering::LowerMemArgument(SDValue Chain, CallingConv::ID CallConv,
3548 SDValue X86TargetLowering::LowerFormalArguments(
3772 SDValue X86TargetLowering::LowerMemOpCallTo(SDValue Chain, SDValue StackPtr,
3792 SDValue X86TargetLowering::EmitTailCallLoadRetAddr(
3836 X86TargetLowering::LowerCall(TargetLowering::CallLoweringInfo &CLI,
4426 X86TargetLowering::GetAlignedArgumentStackSize(const unsigned StackSize,
4531 bool X86TargetLowering::IsEligibleForTailCallOptimization(
4725 X86TargetLowering::createFastISel(FunctionLoweringInfo &funcInfo,
4816 SDValue X86TargetLowering::getReturnAddressFrameIndex(SelectionDAG &DAG) const {
5021 bool X86TargetLowering::getTgtMemIntrinsic(IntrinsicInfo &Info,
5088 bool X86TargetLowering::isFPImmLegal(const APFloat &Imm, EVT VT,
5097 bool X86TargetLowering::shouldReduceLoadWidth(SDNode *Load,
5133 bool X86TargetLowering::shouldConvertConstantLoadToIntImm(const APInt &Imm,
5143 bool X86TargetLowering::reduceSelectOfFPConstantLoads(EVT CmpOpVT) const {
5152 bool X86TargetLowering::convertSelectOfConstantsToMath(EVT VT) const {
5161 bool X86TargetLowering::decomposeMulByConstant(LLVMContext &Context, EVT VT,
5190 bool X86TargetLowering::isExtractSubvectorCheap(EVT ResVT, EVT SrcVT,
5204 bool X86TargetLowering::shouldScalarizeBinop(SDValue VecOp) const {
5223 bool X86TargetLowering::shouldFormOverflowOp(unsigned Opcode, EVT VT,
5231 bool X86TargetLowering::isCheapToSpeculateCttz() const {
5236 bool X86TargetLowering::isCheapToSpeculateCtlz() const {
5241 bool X86TargetLowering::isLoadBitCastBeneficial(EVT LoadVT, EVT BitcastVT,
5259 bool X86TargetLowering::canMergeStoresTo(unsigned AddressSpace, EVT MemVT,
5277 bool X86TargetLowering::isCtlzFast() const {
5281 bool X86TargetLowering::isMaskAndCmp0FoldingBeneficial(
5286 bool X86TargetLowering::hasAndNotCompare(SDValue Y) const {
5302 bool X86TargetLowering::hasAndNot(SDValue Y) const {
5319 bool X86TargetLowering::hasBitTest(SDValue X, SDValue Y) const {
5323 bool X86TargetLowering::
5346 bool X86TargetLowering::shouldFoldConstantShiftPairToMask(
5364 bool X86TargetLowering::shouldFoldMaskToVariableShiftPair(SDValue Y) const {
5378 bool X86TargetLowering::shouldExpandShift(SelectionDAG &DAG,
5386 bool X86TargetLowering::shouldSplatInsEltVarIndex(EVT VT) const {
5392 MVT X86TargetLowering::hasFastEqualityCompare(unsigned NumBits) const {
6344 X86TargetLowering::getTargetConstantFromLoad(LoadSDNode *LD) const {
10109 X86TargetLowering::LowerBUILD_VECTOR(SDValue Op, SelectionDAG &DAG) const {
18001 SDValue X86TargetLowering::LowerVSELECT(SDValue Op, SelectionDAG &DAG) const {
18188 X86TargetLowering::LowerEXTRACT_VECTOR_ELT(SDValue Op,
18358 SDValue X86TargetLowering::LowerINSERT_VECTOR_ELT(SDValue Op,
18582 unsigned X86TargetLowering::getGlobalWrapperKind(
18607 X86TargetLowering::LowerConstantPool(SDValue Op, SelectionDAG &DAG) const {
18629 SDValue X86TargetLowering::LowerJumpTable(SDValue Op, SelectionDAG &DAG) const {
18650 SDValue X86TargetLowering::LowerExternalSymbol(SDValue Op,
18656 X86TargetLowering::LowerBlockAddress(SDValue Op, SelectionDAG &DAG) const {
18678 SDValue X86TargetLowering::LowerGlobalOrExternal(SDValue Op, SelectionDAG &DAG,
18750 X86TargetLowering::LowerGlobalAddress(SDValue Op, SelectionDAG &DAG) const {
18902 X86TargetLowering::LowerGlobalTLSAddress(SDValue Op, SelectionDAG &DAG) const {
19414 SDValue X86TargetLowering::LowerSINT_TO_FP(SDValue Op,
19502 std::pair<SDValue, SDValue> X86TargetLowering::BuildFILD(
19920 SDValue X86TargetLowering::LowerUINT_TO_FP(SDValue Op,
20054 X86TargetLowering::FP_TO_INTHelper(SDValue Op, SelectionDAG &DAG,
20533 SDValue X86TargetLowering::LowerTRUNCATE(SDValue Op, SelectionDAG &DAG) const {
20688 SDValue X86TargetLowering::LowerFP_TO_INT(SDValue Op, SelectionDAG &DAG) const {
20936 SDValue X86TargetLowering::LowerLRINT_LLRINT(SDValue Op,
20948 SDValue X86TargetLowering::LRINT_LLRINTHelper(SDNode *N,
20992 SDValue X86TargetLowering::LowerFP_EXTEND(SDValue Op, SelectionDAG &DAG) const {
21015 SDValue X86TargetLowering::LowerFP_ROUND(SDValue Op, SelectionDAG &DAG) const {
21179 SDValue X86TargetLowering::lowerFaddFsub(SDValue Op, SelectionDAG &DAG) const {
21801 bool X86TargetLowering::isFsqrtCheap(SDValue Op, SelectionDAG &DAG) const {
21815 SDValue X86TargetLowering::getSqrtEstimate(SDValue Op,
21848 SDValue X86TargetLowering::getRecipEstimate(SDValue Op, SelectionDAG &DAG,
21886 unsigned X86TargetLowering::combineRepeatedFPDivisors() const {
21891 X86TargetLowering::BuildSDIVPow2(SDNode *N, const APInt &Divisor,
22708 SDValue X86TargetLowering::emitFlagsForSetcc(SDValue Op0, SDValue Op1,
22778 SDValue X86TargetLowering::LowerSETCC(SDValue Op, SelectionDAG &DAG) const {
22838 SDValue X86TargetLowering::LowerSETCCCARRY(SDValue Op, SelectionDAG &DAG) const {
22948 SDValue X86TargetLowering::LowerSELECT(SDValue Op, SelectionDAG &DAG) const {
23669 SDValue X86TargetLowering::LowerBRCOND(SDValue Op, SelectionDAG &DAG) const {
23792 X86TargetLowering::LowerDYNAMIC_STACKALLOC(SDValue Op,
23887 SDValue X86TargetLowering::LowerVASTART(SDValue Op, SelectionDAG &DAG) const {
23944 SDValue X86TargetLowering::LowerVAARG(SDValue Op, SelectionDAG &DAG) const {
24347 SDValue X86TargetLowering::LowerINTRINSIC_WO_CHAIN(SDValue Op,
25801 SDValue X86TargetLowering::LowerRETURNADDR(SDValue Op,
25828 SDValue X86TargetLowering::LowerADDROFRETURNADDR(SDValue Op,
25834 SDValue X86TargetLowering::LowerFRAMEADDR(SDValue Op, SelectionDAG &DAG) const {
25874 Register X86TargetLowering::getRegisterByName(const char* RegName, LLT VT,
25905 SDValue X86TargetLowering::LowerFRAME_TO_ARGS_OFFSET(SDValue Op,
25911 Register X86TargetLowering::getExceptionPointerRegister(
25919 Register X86TargetLowering::getExceptionSelectorRegister(
25926 bool X86TargetLowering::needsFixedCatchObjects() const {
25930 SDValue X86TargetLowering::LowerEH_RETURN(SDValue Op, SelectionDAG &DAG) const {
25956 SDValue X86TargetLowering::lowerEH_SJLJ_SETJMP(SDValue Op,
25974 SDValue X86TargetLowering::lowerEH_SJLJ_LONGJMP(SDValue Op,
25981 SDValue X86TargetLowering::lowerEH_SJLJ_SETUP_DISPATCH(SDValue Op,
25992 SDValue X86TargetLowering::LowerINIT_TRAMPOLINE(SDValue Op,
26140 SDValue X86TargetLowering::LowerFLT_ROUNDS_(SDValue Op,
26892 SDValue X86TargetLowering::LowerWin64_i128OP(SDValue Op, SelectionDAG &DAG) const {
27947 bool X86TargetLowering::needsCmpXchgNb(Type *MemType) const {
27958 bool X86TargetLowering::shouldExpandAtomicStoreInIR(StoreInst *SI) const {
27974 X86TargetLowering::shouldExpandAtomicLoadInIR(LoadInst *LI) const {
27992 X86TargetLowering::shouldExpandAtomicRMWInIR(AtomicRMWInst *AI) const {
28033 X86TargetLowering::lowerIdempotentRMWIntoFencedLoad(AtomicRMWInst *AI) const {
28099 bool X86TargetLowering::lowerAtomicStoreAsStoreSDNode(const StoreInst &SI) const {
28104 bool X86TargetLowering::lowerAtomicLoadAsLoadSDNode(const LoadInst &LI) const {
29147 SDValue X86TargetLowering::LowerGC_TRANSITION(SDValue Op,
29168 SDValue X86TargetLowering::LowerF128Call(SDValue Op, SelectionDAG &DAG,
29202 SDValue X86TargetLowering::LowerOperation(SDValue Op, SelectionDAG &DAG) const {
29345 void X86TargetLowering::LowerOperationWrapper(SDNode *N,
29372 void X86TargetLowering::ReplaceNodeResults(SDNode *N,
30298 const char *X86TargetLowering::getTargetNodeName(unsigned Opcode) const {
30666 bool X86TargetLowering::isLegalAddressingMode(const DataLayout &DL,
30718 bool X86TargetLowering::isVectorShiftByScalarCheap(Type *Ty) const {
30746 bool X86TargetLowering::isBinOp(unsigned Opcode) const {
30761 bool X86TargetLowering::isCommutativeBinOp(unsigned Opcode) const {
30778 bool X86TargetLowering::isTruncateFree(Type *Ty1, Type *Ty2) const {
30786 bool X86TargetLowering::allowTruncateForTailCall(Type *Ty1, Type *Ty2) const {
30800 bool X86TargetLowering::isLegalICmpImmediate(int64_t Imm) const {
30804 bool X86TargetLowering::isLegalAddImmediate(int64_t Imm) const {
30809 bool X86TargetLowering::isLegalStoreImmediate(int64_t Imm) const {
30813 bool X86TargetLowering::isTruncateFree(EVT VT1, EVT VT2) const {
30821 bool X86TargetLowering::isZExtFree(Type *Ty1, Type *Ty2) const {
30826 bool X86TargetLowering::isZExtFree(EVT VT1, EVT VT2) const {
30831 bool X86TargetLowering::isZExtFree(SDValue Val, EVT VT2) const {
30855 bool X86TargetLowering::shouldSinkOperands(Instruction *I,
30882 bool X86TargetLowering::shouldConvertPhiType(Type *From, Type *To) const {
30888 bool X86TargetLowering::isVectorLoadExtDesirable(SDValue ExtVal) const {
30901 bool X86TargetLowering::isFMAFasterThanFMulAndFAdd(const MachineFunction &MF,
30922 bool X86TargetLowering::isNarrowingProfitable(EVT VT1, EVT VT2) const {
30931 bool X86TargetLowering::isShuffleMaskLegal(ArrayRef<int> Mask, EVT VT) const {
30948 bool X86TargetLowering::isVectorClearMaskLegal(ArrayRef<int> Mask,
30960 bool X86TargetLowering::areJTsAllowed(const Function *Fn) const {
31086 X86TargetLowering::EmitVAARG64WithCustomInserter(MachineInstr &MI,
31344 MachineBasicBlock *X86TargetLowering::EmitVAStartSaveXMMRegsWithCustomInserter(
31530 X86TargetLowering::EmitLoweredCascadedSelect(MachineInstr &FirstCMOV,
31686 X86TargetLowering::EmitLoweredSelect(MachineInstr &MI,
31839 X86TargetLowering::EmitLoweredProbedAlloca(MachineInstr &MI,
31933 X86TargetLowering::EmitLoweredSegAlloca(MachineInstr &MI,
32067 X86TargetLowering::EmitLoweredCatchRet(MachineInstr &MI,
32102 X86TargetLowering::EmitLoweredTLSAddr(MachineInstr &MI,
32131 X86TargetLowering::EmitLoweredTLSCall(MachineInstr &MI,
32274 X86TargetLowering::EmitLoweredIndirectThunk(MachineInstr &MI,
32338 void X86TargetLowering::emitSetJmpShadowStackFix(MachineInstr &MI,
32381 X86TargetLowering::emitEHSjLjSetJmp(MachineInstr &MI,
32541 X86TargetLowering::emitLongJmpShadowStackFix(MachineInstr &MI,
32722 X86TargetLowering::emitEHSjLjLongJmp(MachineInstr &MI,
32804 void X86TargetLowering::SetupEntryBlockForSjLj(MachineInstr &MI,
32855 X86TargetLowering::EmitSjLjDispatchBlock(MachineInstr &MI,
33083 X86TargetLowering::EmitInstrWithCustomInserter(MachineInstr &MI,
33317 // After X86TargetLowering::ReplaceNodeResults CMPXCHG8B is glued to its
33444 X86TargetLowering::targetShrinkDemandedConstant(SDValue Op,
33535 void X86TargetLowering::computeKnownBitsForTargetNode(const SDValue Op,
33787 unsigned X86TargetLowering::ComputeNumSignBitsForTargetNode(
33926 SDValue X86TargetLowering::unwrapAddress(SDValue N) const {
36954 bool X86TargetLowering::SimplifyDemandedVectorEltsForTargetShuffle(
37021 bool X86TargetLowering::SimplifyDemandedVectorEltsForTargetNode(
37498 bool X86TargetLowering::SimplifyDemandedBitsForTargetNode(
37842 SDValue X86TargetLowering::SimplifyMultipleUseDemandedBitsForTargetNode(
45250 SDValue X86TargetLowering::getNegatedExpression(SDValue Op, SelectionDAG &DAG,
45924 // 1) X86TargetLowering::EmitLoweredSelect later can do merging of two
47935 const X86TargetLowering *TLI = Subtarget.getTargetLowering();
48854 SDValue X86TargetLowering::PerformDAGCombine(SDNode *N,
49025 bool X86TargetLowering::isTypeDesirableForOp(unsigned Opc, EVT VT) const {
49071 SDValue X86TargetLowering::expandIndirectJTBranch(const SDLoc& dl,
49087 bool X86TargetLowering::IsDesirableToPromoteOp(SDValue Op, EVT &PVT) const {
49207 bool X86TargetLowering::ExpandInlineAsm(CallInst *CI) const {
49318 X86TargetLowering::ConstraintType
49319 X86TargetLowering::getConstraintType(StringRef Constraint) const {
49385 X86TargetLowering::getSingleConstraintMatchWeight(
49530 const char *X86TargetLowering::
49543 SDValue X86TargetLowering::LowerAsmOutputForConstraint(
49570 void X86TargetLowering::LowerAsmOperandForConstraint(SDValue Op,
49745 X86TargetLowering::getRegForInlineAsmConstraint(const TargetRegisterInfo *TRI,
50127 int X86TargetLowering::getScalingFactorCost(const DataLayout &DL,
50155 bool X86TargetLowering::isIntDivCheap(EVT VT, AttributeList Attr) const {
50167 void X86TargetLowering::initializeSplitCSR(MachineBasicBlock *Entry) const {
50177 void X86TargetLowering::insertCopiesSplitCSR(
50216 bool X86TargetLowering::supportSwiftError() const {
50221 bool X86TargetLowering::hasStackProbeSymbol(MachineFunction &MF) const {
50226 bool X86TargetLowering::hasInlineStackProbe(MachineFunction &MF) const {
50244 X86TargetLowering::getStackProbeSymbolName(MachineFunction &MF) const {
50267 X86TargetLowering::getStackProbeSize(MachineFunction &MF) const {