Lines Matching refs:DL

16                                      const DataLayout &DL) {
27 uint64_t StoreSize = DL.getTypeSizeInBits(StoredTy);
34 if (StoreSize < DL.getTypeSizeInBits(LoadTy))
38 if (DL.isNonIntegralPointerType(StoredVal->getType()->getScalarType()) !=
39 DL.isNonIntegralPointerType(LoadTy->getScalarType())) {
54 const DataLayout &DL) {
55 assert(canCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, DL) &&
58 if (auto *FoldedStoredVal = ConstantFoldConstant(C, DL))
64 uint64_t StoredValSize = DL.getTypeSizeInBits(StoredValTy);
65 uint64_t LoadedValSize = DL.getTypeSizeInBits(LoadedTy);
75 StoredValTy = DL.getIntPtrType(StoredValTy);
81 TypeToCastTo = DL.getIntPtrType(TypeToCastTo);
92 if (auto *FoldedStoredVal = ConstantFoldConstant(C, DL))
105 StoredValTy = DL.getIntPtrType(StoredValTy);
117 if (DL.isBigEndian()) {
118 uint64_t ShiftAmt = DL.getTypeStoreSizeInBits(StoredValTy) -
119 DL.getTypeStoreSizeInBits(LoadedTy);
138 if (auto *FoldedStoredVal = ConstantFoldConstant(C, DL))
151 IRBuilder<> &IRB, const DataLayout &DL) {
152 return coerceAvailableValueToLoadTypeHelper(StoredVal, LoadedTy, IRB, DL);
166 const DataLayout &DL) {
174 GetPointerBaseWithConstantOffset(WritePtr, StoreOffset, DL);
175 Value *LoadBase = GetPointerBaseWithConstantOffset(LoadPtr, LoadOffset, DL);
187 uint64_t LoadSize = DL.getTypeSizeInBits(LoadTy);
219 StoreInst *DepSI, const DataLayout &DL) {
228 if (DL.isNonIntegralPointerType(StoredVal->getType()->getScalarType()) !=
229 DL.isNonIntegralPointerType(LoadTy->getScalarType())) {
238 DL.getTypeSizeInBits(DepSI->getValueOperand()->getType());
240 DL);
247 const DataLayout &DL) {
253 if (DL.isNonIntegralPointerType(DepLI->getType()->getScalarType()) !=
254 DL.isNonIntegralPointerType(LoadTy->getScalarType()))
258 uint64_t DepSize = DL.getTypeSizeInBits(DepLI->getType());
259 int R = analyzeLoadFromClobberingWrite(LoadTy, LoadPtr, DepPtr, DepSize, DL);
267 GetPointerBaseWithConstantOffset(LoadPtr, LoadOffs, DL);
268 unsigned LoadSize = DL.getTypeStoreSize(LoadTy);
280 return analyzeLoadFromClobberingWrite(LoadTy, LoadPtr, DepPtr, Size * 8, DL);
284 MemIntrinsic *MI, const DataLayout &DL) {
294 if (DL.isNonIntegralPointerType(LoadTy->getScalarType())) {
300 MemSizeInBits, DL);
312 GlobalVariable *GV = dyn_cast<GlobalVariable>(GetUnderlyingObject(Src, DL));
318 MemSizeInBits, DL);
324 if (DL.isNonIntegralPointerType(LoadTy->getScalarType()))
338 if (ConstantFoldLoadFromConstPtr(Src, LoadTy, DL))
346 const DataLayout &DL) {
358 uint64_t StoreSize = (DL.getTypeSizeInBits(SrcVal->getType()) + 7) / 8;
359 uint64_t LoadSize = (DL.getTypeSizeInBits(LoadTy) + 7) / 8;
363 SrcVal = Helper.CreatePtrToInt(SrcVal, DL.getIntPtrType(SrcVal->getType()));
369 if (DL.isLittleEndian())
388 Instruction *InsertPt, const DataLayout &DL) {
391 SrcVal = getStoreValueForLoadHelper(SrcVal, Offset, LoadTy, Builder, DL);
392 return coerceAvailableValueToLoadTypeHelper(SrcVal, LoadTy, Builder, DL);
396 Type *LoadTy, const DataLayout &DL) {
398 SrcVal = getStoreValueForLoadHelper(SrcVal, Offset, LoadTy, F, DL);
399 return coerceAvailableValueToLoadTypeHelper(SrcVal, LoadTy, F, DL);
408 Instruction *InsertPt, const DataLayout &DL) {
411 unsigned SrcValStoreSize = DL.getTypeStoreSize(SrcVal->getType());
412 unsigned LoadSize = DL.getTypeStoreSize(LoadTy);
442 if (DL.isBigEndian())
450 return getStoreValueForLoad(SrcVal, Offset, LoadTy, InsertPt, DL);
454 Type *LoadTy, const DataLayout &DL) {
455 unsigned SrcValStoreSize = DL.getTypeStoreSize(SrcVal->getType());
456 unsigned LoadSize = DL.getTypeStoreSize(LoadTy);
459 return getConstantStoreValueForLoad(SrcVal, Offset, LoadTy, DL);
465 const DataLayout &DL) {
467 uint64_t LoadSize = DL.getTypeSizeInBits(LoadTy) / 8;
497 return coerceAvailableValueToLoadTypeHelper(Val, LoadTy, Helper, DL);
514 return ConstantFoldLoadFromConstPtr(Src, LoadTy, DL);
521 const DataLayout &DL) {
524 LoadTy, Builder, DL);
528 Type *LoadTy, const DataLayout &DL) {
536 LoadTy, F, DL);