Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PR for llvm/llvm-project#53559 #66

Merged
merged 1 commit into from
Feb 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion llvm/include/llvm/Analysis/MemoryBuiltins.h
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,6 @@ class ObjectSizeOffsetVisitor
SizeOffsetType visitConstantPointerNull(ConstantPointerNull&);
SizeOffsetType visitExtractElementInst(ExtractElementInst &I);
SizeOffsetType visitExtractValueInst(ExtractValueInst &I);
SizeOffsetType visitGEPOperator(GEPOperator &GEP);
SizeOffsetType visitGlobalAlias(GlobalAlias &GA);
SizeOffsetType visitGlobalVariable(GlobalVariable &GV);
SizeOffsetType visitIntToPtrInst(IntToPtrInst&);
Expand All @@ -221,6 +220,7 @@ class ObjectSizeOffsetVisitor
SizeOffsetType visitInstruction(Instruction &I);

private:
SizeOffsetType computeImpl(Value *V);
bool CheckedZextOrTrunc(APInt &I);
};

Expand Down
51 changes: 33 additions & 18 deletions llvm/lib/Analysis/MemoryBuiltins.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -573,18 +573,48 @@ ObjectSizeOffsetVisitor::ObjectSizeOffsetVisitor(const DataLayout &DL,
}

SizeOffsetType ObjectSizeOffsetVisitor::compute(Value *V) {
unsigned InitialIntTyBits = DL.getIndexTypeSizeInBits(V->getType());

// Stripping pointer casts can strip address space casts which can change the
// index type size. The invariant is that we use the value type to determine
// the index type size and if we stripped address space casts we have to
// readjust the APInt as we pass it upwards in order for the APInt to match
// the type the caller passed in.
APInt Offset(InitialIntTyBits, 0);
V = V->stripAndAccumulateConstantOffsets(
DL, Offset, /* AllowNonInbounds */ true, /* AllowInvariantGroup */ true);

// Later we use the index type size and zero but it will match the type of the
// value that is passed to computeImpl.
IntTyBits = DL.getIndexTypeSizeInBits(V->getType());
Zero = APInt::getZero(IntTyBits);

V = V->stripPointerCasts();
bool IndexTypeSizeChanged = InitialIntTyBits != IntTyBits;
if (!IndexTypeSizeChanged && Offset.isZero())
return computeImpl(V);

// We stripped an address space cast that changed the index type size or we
// accumulated some constant offset (or both). Readjust the bit width to match
// the argument index type size and apply the offset, as required.
SizeOffsetType SOT = computeImpl(V);
if (IndexTypeSizeChanged) {
if (knownSize(SOT) && !::CheckedZextOrTrunc(SOT.first, InitialIntTyBits))
SOT.first = APInt();
if (knownOffset(SOT) && !::CheckedZextOrTrunc(SOT.second, InitialIntTyBits))
SOT.second = APInt();
}
// If the computed offset is "unknown" we cannot add the stripped offset.
return {SOT.first,
SOT.second.getBitWidth() > 1 ? SOT.second + Offset : SOT.second};
}

SizeOffsetType ObjectSizeOffsetVisitor::computeImpl(Value *V) {
if (Instruction *I = dyn_cast<Instruction>(V)) {
// If we have already seen this instruction, bail out. Cycles can happen in
// unreachable code after constant propagation.
if (!SeenInsts.insert(I).second)
return unknown();

if (GEPOperator *GEP = dyn_cast<GEPOperator>(V))
return visitGEPOperator(*GEP);
return visit(*I);
}
if (Argument *A = dyn_cast<Argument>(V))
Expand All @@ -597,12 +627,6 @@ SizeOffsetType ObjectSizeOffsetVisitor::compute(Value *V) {
return visitGlobalVariable(*GV);
if (UndefValue *UV = dyn_cast<UndefValue>(V))
return visitUndefValue(*UV);
if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
if (CE->getOpcode() == Instruction::IntToPtr)
return unknown(); // clueless
if (CE->getOpcode() == Instruction::GetElementPtr)
return visitGEPOperator(cast<GEPOperator>(*CE));
}

LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor::compute() unhandled value: "
<< *V << '\n');
Expand Down Expand Up @@ -682,15 +706,6 @@ ObjectSizeOffsetVisitor::visitExtractValueInst(ExtractValueInst&) {
return unknown();
}

SizeOffsetType ObjectSizeOffsetVisitor::visitGEPOperator(GEPOperator &GEP) {
SizeOffsetType PtrData = compute(GEP.getPointerOperand());
APInt Offset(DL.getIndexTypeSizeInBits(GEP.getPointerOperand()->getType()), 0);
if (!bothKnown(PtrData) || !GEP.accumulateConstantOffset(DL, Offset))
return unknown();

return std::make_pair(PtrData.first, PtrData.second + Offset);
}

SizeOffsetType ObjectSizeOffsetVisitor::visitGlobalAlias(GlobalAlias &GA) {
if (GA.isInterposable())
return unknown();
Expand Down
39 changes: 38 additions & 1 deletion llvm/test/Transforms/InstCombine/builtin-dynamic-object-size.ll
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
; RUN: opt -instcombine -S < %s | FileCheck %s

target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128"
target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128-p7:32:32"
target triple = "x86_64-apple-macosx10.14.0"

; Function Attrs: nounwind ssp uwtable
Expand Down Expand Up @@ -152,6 +152,43 @@ if.end: ; preds = %if.else, %if.then
; CHECK-NEXT: br i1 false, label %if.else, label %if.then
; CHECK: call void @fortified_chk(i8* %obj, i64 [[SZ]])

@p7 = internal addrspace(7) global i8 0

; Gracefully handle AS cast when the address spaces have different pointer widths.
define i64 @as_cast(i1 %c) {
; CHECK: [[TMP0:%.*]] = select i1 %c, i64 64, i64 1
; CHECK: [[NOT:%.*]] = xor i1 %c, true
; CHECK: [[NEG:%.*]] = sext i1 [[NOT]] to i64
; CHECK: [[TMP1:%.*]] = add nsw i64 [[TMP0]], [[NEG]]
; CHECK: [[TMP2:%.*]] = icmp ne i64 [[TMP1]], -1
; CHECK: call void @llvm.assume(i1 [[TMP2]])
; CHECK: ret i64 [[TMP1]]
;
entry:
%p0 = tail call i8* @malloc(i64 64)
%gep = getelementptr i8, i8 addrspace(7)* @p7, i32 1
%as = addrspacecast i8 addrspace(7)* %gep to i8*
%select = select i1 %c, i8* %p0, i8* %as
%calc_size = tail call i64 @llvm.objectsize.i64.p0i8(i8* %select, i1 false, i1 true, i1 true)
ret i64 %calc_size
}

define i64 @constexpr_as_cast(i1 %c) {
; CHECK: [[TMP0:%.*]] = select i1 %c, i64 64, i64 1
; CHECK: [[NOT:%.*]] = xor i1 %c, true
; CHECK: [[NEG:%.*]] = sext i1 [[NOT]] to i64
; CHECK: [[TMP1:%.*]] = add nsw i64 [[TMP0]], [[NEG]]
; CHECK: [[TMP2:%.*]] = icmp ne i64 [[TMP1]], -1
; CHECK: call void @llvm.assume(i1 [[TMP2]])
; CHECK: ret i64 [[TMP1]]
;
entry:
%p0 = tail call i8* @malloc(i64 64)
%select = select i1 %c, i8* %p0, i8* addrspacecast (i8 addrspace(7)* getelementptr (i8, i8 addrspace(7)* @p7, i32 1) to i8*)
%calc_size = tail call i64 @llvm.objectsize.i64.p0i8(i8* %select, i1 false, i1 true, i1 true)
ret i64 %calc_size
}

declare void @bury(i32) local_unnamed_addr #2

; Function Attrs: nounwind allocsize(0)
Expand Down