Skip to content

Commit

Permalink
Use the register of CAST for contained index operator (#74275)
Browse files Browse the repository at this point in the history
* Rename test files

* Fix index scale for cast

* Do not contain index if indir is not containable.

* Revert "Do not contain index if indir is not containable."

This reverts commit e79d17d92ceb0eed5ae1bfd03c2d1d6b171ab17f.

* Instead try to contain the LEA

* IsSafeToContainMem() check

* Do IsSafeToContainMem() only if needed

* Add test case

* Fix merge error

* fix the test case

* review comment
  • Loading branch information
kunalspathak committed Sep 5, 2022
1 parent db4a954 commit fd11209
Show file tree
Hide file tree
Showing 6 changed files with 89 additions and 24 deletions.
24 changes: 19 additions & 5 deletions src/coreclr/jit/codegenarmarch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4458,12 +4458,26 @@ void CodeGen::genLeaInstruction(GenTreeAddrMode* lea)
else
{
#ifdef TARGET_ARM64
// Handle LEA with "contained" BFIZ
if (index->isContained() && index->OperIs(GT_BFIZ))

if (index->isContained())
{
assert(scale == 0);
scale = (DWORD)index->gtGetOp2()->AsIntConCommon()->IconValue();
index = index->gtGetOp1()->gtGetOp1();
if (index->OperIs(GT_BFIZ))
{
// Handle LEA with "contained" BFIZ
assert(scale == 0);
scale = (DWORD)index->gtGetOp2()->AsIntConCommon()->IconValue();
index = index->gtGetOp1()->gtGetOp1();
}
else if (index->OperIs(GT_CAST))
{
index = index->AsCast()->gtGetOp1();
}
else
{
// Only BFIZ/CAST nodes should be present for for contained index on ARM64.
// If there are more, we need to handle them here.
unreached();
}
}
#endif

Expand Down
52 changes: 33 additions & 19 deletions src/coreclr/jit/lower.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5362,29 +5362,43 @@ bool Lowering::TryCreateAddrMode(GenTree* addr, bool isContainable, GenTree* par
}

#ifdef TARGET_ARM64
if ((index != nullptr) && index->OperIs(GT_CAST) && (scale == 1) && (offset == 0) && varTypeIsByte(targetType))
{
index->AsCast()->CastOp()->ClearContained(); // Uncontain any memory operands.
MakeSrcContained(addrMode, index);
}

// Check if we can "contain" LEA(BFIZ) in order to extend 32bit index to 64bit as part of load/store.
if ((index != nullptr) && index->OperIs(GT_BFIZ) && index->gtGetOp1()->OperIs(GT_CAST) &&
index->gtGetOp2()->IsCnsIntOrI() && !varTypeIsStruct(targetType))
if (index != nullptr)
{
// BFIZ node is a binary op where op1 is GT_CAST and op2 is GT_CNS_INT
GenTreeCast* cast = index->gtGetOp1()->AsCast();
assert(cast->isContained());
if (index->OperIs(GT_CAST) && (scale == 1) && (offset == 0) && varTypeIsByte(targetType))
{
if (IsSafeToContainMem(parent, index))
{
// Check containment safety against the parent node - this will ensure that LEA with the contained
// index will itself always be contained. We do not support uncontained LEAs with contained indices.
index->AsCast()->CastOp()->ClearContained(); // Uncontain any memory operands.
MakeSrcContained(addrMode, index);
}
}
else if (index->OperIs(GT_BFIZ) && index->gtGetOp1()->OperIs(GT_CAST) && index->gtGetOp2()->IsCnsIntOrI() &&
!varTypeIsStruct(targetType))
{
// Check if we can "contain" LEA(BFIZ) in order to extend 32bit index to 64bit as part of load/store.
// BFIZ node is a binary op where op1 is GT_CAST and op2 is GT_CNS_INT
GenTreeCast* cast = index->gtGetOp1()->AsCast();
assert(cast->isContained());

const unsigned shiftBy = (unsigned)index->gtGetOp2()->AsIntCon()->IconValue();
const unsigned shiftBy = (unsigned)index->gtGetOp2()->AsIntCon()->IconValue();

// 'scale' and 'offset' have to be unset since we're going to use [base + index * SXTW/UXTW scale] form
// where there is no room for additional offsets/scales on ARM64. 'shiftBy' has to match target's width.
if (cast->CastOp()->TypeIs(TYP_INT) && cast->TypeIs(TYP_LONG) && (genTypeSize(targetType) == (1U << shiftBy)) &&
(scale == 1) && (offset == 0))
{
// TODO: Make sure that genCreateAddrMode marks such BFIZ candidates as GTF_DONT_CSE for better CQ.
MakeSrcContained(addrMode, index);
// 'scale' and 'offset' have to be unset since we're going to use [base + index * SXTW/UXTW scale] form
// where there is no room for additional offsets/scales on ARM64. 'shiftBy' has to match target's width.
if (cast->CastOp()->TypeIs(TYP_INT) && cast->TypeIs(TYP_LONG) &&
(genTypeSize(targetType) == (1U << shiftBy)) && (scale == 1) && (offset == 0))
{
if (IsSafeToContainMem(parent, index))
{
// Check containment safety against the parent node - this will ensure that LEA with the contained
// index will itself always be contained. We do not support uncontained LEAs with contained indices.

// TODO: Make sure that genCreateAddrMode marks such BFIZ candidates as GTF_DONT_CSE for better CQ.
MakeSrcContained(addrMode, index);
}
}
}
}
#endif
Expand Down
27 changes: 27 additions & 0 deletions src/tests/JIT/Regression/JitBlue/Runtime_74117/Runtime_74117.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.

using System.Runtime.CompilerServices;

unsafe class Runtime_74117
{
public unsafe static int Main(string[] args)
{
byte a = 5;
Problem(ref a, 0);
return 100;
}

[MethodImpl(MethodImplOptions.NoInlining)]
static byte GetByte() => 1;

[MethodImpl(MethodImplOptions.NoInlining)]
private static void Problem(ref byte x, int a)
{
JitUse(&a);
Unsafe.Add(ref x, a) = GetByte();
}

[MethodImpl(MethodImplOptions.NoInlining)]
public static void JitUse<T>(T* arg) where T : unmanaged { }
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<Optimize>True</Optimize>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<Compile Include="$(MSBuildProjectName).cs" />
</ItemGroup>
</Project>

0 comments on commit fd11209

Please sign in to comment.