@@ -494,20 +494,6 @@ static LinearExpression GetLinearExpression(
494
494
return Val;
495
495
}
496
496
497
- // / To ensure a pointer offset fits in an integer of size IndexSize
498
- // / (in bits) when that size is smaller than the maximum index size. This is
499
- // / an issue, for example, in particular for 32b pointers with negative indices
500
- // / that rely on two's complement wrap-arounds for precise alias information
501
- // / where the maximum index size is 64b.
502
- static void adjustToIndexSize (APInt &Offset, unsigned IndexSize) {
503
- assert (IndexSize <= Offset.getBitWidth () && " Invalid IndexSize!" );
504
- unsigned ShiftBits = Offset.getBitWidth () - IndexSize;
505
- if (ShiftBits != 0 ) {
506
- Offset <<= ShiftBits;
507
- Offset.ashrInPlace (ShiftBits);
508
- }
509
- }
510
-
511
497
namespace {
512
498
// A linear transformation of a Value; this class represents
513
499
// ZExt(SExt(Trunc(V, TruncBits), SExtBits), ZExtBits) * Scale.
@@ -594,9 +580,9 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
594
580
SearchTimes++;
595
581
const Instruction *CxtI = dyn_cast<Instruction>(V);
596
582
597
- unsigned MaxIndexSize = DL.getMaxIndexSizeInBits ( );
583
+ unsigned IndexSize = DL.getIndexTypeSizeInBits (V-> getType () );
598
584
DecomposedGEP Decomposed;
599
- Decomposed.Offset = APInt (MaxIndexSize , 0 );
585
+ Decomposed.Offset = APInt (IndexSize , 0 );
600
586
do {
601
587
// See if this is a bitcast or GEP.
602
588
const Operator *Op = dyn_cast<Operator>(V);
@@ -614,7 +600,14 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
614
600
615
601
if (Op->getOpcode () == Instruction::BitCast ||
616
602
Op->getOpcode () == Instruction::AddrSpaceCast) {
617
- V = Op->getOperand (0 );
603
+ Value *NewV = Op->getOperand (0 );
604
+ // Don't look through casts between address spaces with differing index
605
+ // widths.
606
+ if (DL.getIndexTypeSizeInBits (NewV->getType ()) != IndexSize) {
607
+ Decomposed.Base = V;
608
+ return Decomposed;
609
+ }
610
+ V = NewV;
618
611
continue ;
619
612
}
620
613
@@ -651,12 +644,8 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
651
644
652
645
assert (GEPOp->getSourceElementType ()->isSized () && " GEP must be sized" );
653
646
654
- unsigned AS = GEPOp->getPointerAddressSpace ();
655
647
// Walk the indices of the GEP, accumulating them into BaseOff/VarIndices.
656
648
gep_type_iterator GTI = gep_type_begin (GEPOp);
657
- unsigned IndexSize = DL.getIndexSizeInBits (AS);
658
- // Assume all GEP operands are constants until proven otherwise.
659
- bool GepHasConstantOffset = true ;
660
649
for (User::const_op_iterator I = GEPOp->op_begin () + 1 , E = GEPOp->op_end ();
661
650
I != E; ++I, ++GTI) {
662
651
const Value *Index = *I;
@@ -684,7 +673,7 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
684
673
}
685
674
686
675
Decomposed.Offset += AllocTypeSize.getFixedValue () *
687
- CIdx->getValue ().sextOrTrunc (MaxIndexSize );
676
+ CIdx->getValue ().sextOrTrunc (IndexSize );
688
677
continue ;
689
678
}
690
679
@@ -694,8 +683,6 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
694
683
return Decomposed;
695
684
}
696
685
697
- GepHasConstantOffset = false ;
698
-
699
686
// If the integer type is smaller than the index size, it is implicitly
700
687
// sign extended or truncated to index size.
701
688
bool NUSW = GEPOp->hasNoUnsignedSignedWrap ();
@@ -710,8 +697,8 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
710
697
// Scale by the type size.
711
698
unsigned TypeSize = AllocTypeSize.getFixedValue ();
712
699
LE = LE.mul (APInt (IndexSize, TypeSize), NUW, NUSW);
713
- Decomposed.Offset += LE.Offset . sext (MaxIndexSize) ;
714
- APInt Scale = LE.Scale . sext (MaxIndexSize) ;
700
+ Decomposed.Offset += LE.Offset ;
701
+ APInt Scale = LE.Scale ;
715
702
if (!LE.IsNUW )
716
703
Decomposed.NWFlags = Decomposed.NWFlags .withoutNoUnsignedWrap ();
717
704
@@ -731,21 +718,13 @@ BasicAAResult::DecomposeGEPExpression(const Value *V, const DataLayout &DL,
731
718
}
732
719
}
733
720
734
- // Make sure that we have a scale that makes sense for this target's
735
- // index size.
736
- adjustToIndexSize (Scale, IndexSize);
737
-
738
721
if (!!Scale) {
739
722
VariableGEPIndex Entry = {LE.Val , Scale, CxtI, LE.IsNSW ,
740
723
/* IsNegated */ false };
741
724
Decomposed.VarIndices .push_back (Entry);
742
725
}
743
726
}
744
727
745
- // Take care of wrap-arounds
746
- if (GepHasConstantOffset)
747
- adjustToIndexSize (Decomposed.Offset , IndexSize);
748
-
749
728
// Analyze the base pointer next.
750
729
V = GEPOp->getOperand (0 );
751
730
} while (--MaxLookup);
@@ -1084,6 +1063,14 @@ AliasResult BasicAAResult::aliasGEP(
1084
1063
const GEPOperator *GEP1, LocationSize V1Size,
1085
1064
const Value *V2, LocationSize V2Size,
1086
1065
const Value *UnderlyingV1, const Value *UnderlyingV2, AAQueryInfo &AAQI) {
1066
+ auto BaseObjectsAlias = [&]() {
1067
+ AliasResult BaseAlias =
1068
+ AAQI.AAR .alias (MemoryLocation::getBeforeOrAfter (UnderlyingV1),
1069
+ MemoryLocation::getBeforeOrAfter (UnderlyingV2), AAQI);
1070
+ return BaseAlias == AliasResult::NoAlias ? AliasResult::NoAlias
1071
+ : AliasResult::MayAlias;
1072
+ };
1073
+
1087
1074
if (!V1Size.hasValue () && !V2Size.hasValue ()) {
1088
1075
// TODO: This limitation exists for compile-time reasons. Relax it if we
1089
1076
// can avoid exponential pathological cases.
@@ -1092,11 +1079,7 @@ AliasResult BasicAAResult::aliasGEP(
1092
1079
1093
1080
// If both accesses have unknown size, we can only check whether the base
1094
1081
// objects don't alias.
1095
- AliasResult BaseAlias =
1096
- AAQI.AAR .alias (MemoryLocation::getBeforeOrAfter (UnderlyingV1),
1097
- MemoryLocation::getBeforeOrAfter (UnderlyingV2), AAQI);
1098
- return BaseAlias == AliasResult::NoAlias ? AliasResult::NoAlias
1099
- : AliasResult::MayAlias;
1082
+ return BaseObjectsAlias ();
1100
1083
}
1101
1084
1102
1085
DominatorTree *DT = getDT (AAQI);
@@ -1107,6 +1090,10 @@ AliasResult BasicAAResult::aliasGEP(
1107
1090
if (DecompGEP1.Base == GEP1 && DecompGEP2.Base == V2)
1108
1091
return AliasResult::MayAlias;
1109
1092
1093
+ // Fall back to base objects if pointers have different index widths.
1094
+ if (DecompGEP1.Offset .getBitWidth () != DecompGEP2.Offset .getBitWidth ())
1095
+ return BaseObjectsAlias ();
1096
+
1110
1097
// Swap GEP1 and GEP2 if GEP2 has more variable indices.
1111
1098
if (DecompGEP1.VarIndices .size () < DecompGEP2.VarIndices .size ()) {
1112
1099
std::swap (DecompGEP1, DecompGEP2);
0 commit comments