Skip to content

Commit 2cfc21b

Browse files
committed
Auto merge of rust-lang#137058 - scottmcm:trunc-unchecked, r=nikic
Emit `trunc nuw` for unchecked shifts and `to_immediate_scalar` - For shifts this shrinks the IR by no longer needing an `assume` while still providing the UB information - Having this on the `i8`→`i1` truncations will hopefully help with some places that have to load `i8`s or pass those in LLVM structs without range information
2 parents ed49386 + 0fb9186 commit 2cfc21b

File tree

15 files changed

+240
-138
lines changed

15 files changed

+240
-138
lines changed

compiler/rustc_codegen_gcc/src/builder.rs

+9-5
Original file line numberDiff line numberDiff line change
@@ -989,10 +989,14 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
989989
OperandValue::Ref(place.val)
990990
} else if place.layout.is_gcc_immediate() {
991991
let load = self.load(place.layout.gcc_type(self), place.val.llval, place.val.align);
992-
if let abi::BackendRepr::Scalar(ref scalar) = place.layout.backend_repr {
993-
scalar_load_metadata(self, load, scalar);
994-
}
995-
OperandValue::Immediate(self.to_immediate(load, place.layout))
992+
OperandValue::Immediate(
993+
if let abi::BackendRepr::Scalar(ref scalar) = place.layout.backend_repr {
994+
scalar_load_metadata(self, load, scalar);
995+
self.to_immediate_scalar(load, *scalar)
996+
} else {
997+
load
998+
},
999+
)
9961000
} else if let abi::BackendRepr::ScalarPair(ref a, ref b) = place.layout.backend_repr {
9971001
let b_offset = a.size(self).align_to(b.align(self).abi);
9981002

@@ -1694,7 +1698,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
16941698

16951699
fn to_immediate_scalar(&mut self, val: Self::Value, scalar: abi::Scalar) -> Self::Value {
16961700
if scalar.is_bool() {
1697-
return self.trunc(val, self.cx().type_i1());
1701+
return self.unchecked_utrunc(val, self.cx().type_i1());
16981702
}
16991703
val
17001704
}

compiler/rustc_codegen_gcc/src/intrinsic/mod.rs

+8-3
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use gccjit::FunctionType;
99
use gccjit::{ComparisonOp, Function, RValue, ToRValue, Type, UnaryOp};
1010
#[cfg(feature = "master")]
1111
use rustc_abi::ExternAbi;
12-
use rustc_abi::HasDataLayout;
12+
use rustc_abi::{BackendRepr, HasDataLayout};
1313
use rustc_codegen_ssa::MemFlags;
1414
use rustc_codegen_ssa::base::wants_msvc_seh;
1515
use rustc_codegen_ssa::common::IntPredicate;
@@ -181,14 +181,19 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc
181181
sym::volatile_load | sym::unaligned_volatile_load => {
182182
let tp_ty = fn_args.type_at(0);
183183
let ptr = args[0].immediate();
184+
let layout = self.layout_of(tp_ty);
184185
let load = if let PassMode::Cast { cast: ref ty, pad_i32: _ } = fn_abi.ret.mode {
185186
let gcc_ty = ty.gcc_type(self);
186187
self.volatile_load(gcc_ty, ptr)
187188
} else {
188-
self.volatile_load(self.layout_of(tp_ty).gcc_type(self), ptr)
189+
self.volatile_load(layout.gcc_type(self), ptr)
189190
};
190191
// TODO(antoyo): set alignment.
191-
self.to_immediate(load, self.layout_of(tp_ty))
192+
if let BackendRepr::Scalar(scalar) = layout.backend_repr {
193+
self.to_immediate_scalar(load, scalar)
194+
} else {
195+
load
196+
}
192197
}
193198
sym::volatile_store => {
194199
let dst = args[0].deref(self.cx());

compiler/rustc_codegen_llvm/src/builder.rs

+34-4
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,13 @@ use smallvec::SmallVec;
2929
use tracing::{debug, instrument};
3030

3131
use crate::abi::FnAbiLlvmExt;
32-
use crate::attributes;
3332
use crate::common::Funclet;
3433
use crate::context::{CodegenCx, SimpleCx};
3534
use crate::llvm::{self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, Metadata, True};
3635
use crate::type_::Type;
3736
use crate::type_of::LayoutLlvmExt;
3837
use crate::value::Value;
38+
use crate::{attributes, llvm_util};
3939

4040
#[must_use]
4141
pub(crate) struct GenericBuilder<'a, 'll, CX: Borrow<SimpleCx<'ll>>> {
@@ -606,7 +606,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
606606

607607
fn to_immediate_scalar(&mut self, val: Self::Value, scalar: abi::Scalar) -> Self::Value {
608608
if scalar.is_bool() {
609-
return self.trunc(val, self.cx().type_i1());
609+
return self.unchecked_utrunc(val, self.cx().type_i1());
610610
}
611611
val
612612
}
@@ -746,10 +746,12 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
746746
let load = self.load(llty, place.val.llval, place.val.align);
747747
if let abi::BackendRepr::Scalar(scalar) = place.layout.backend_repr {
748748
scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
749+
self.to_immediate_scalar(load, scalar)
750+
} else {
751+
load
749752
}
750-
load
751753
});
752-
OperandValue::Immediate(self.to_immediate(llval, place.layout))
754+
OperandValue::Immediate(llval)
753755
} else if let abi::BackendRepr::ScalarPair(a, b) = place.layout.backend_repr {
754756
let b_offset = a.size(self).align_to(b.align(self).abi);
755757

@@ -942,6 +944,34 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
942944
unsafe { llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED) }
943945
}
944946

947+
fn unchecked_utrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
948+
debug_assert_ne!(self.val_ty(val), dest_ty);
949+
950+
let trunc = self.trunc(val, dest_ty);
951+
if llvm_util::get_version() >= (19, 0, 0) {
952+
unsafe {
953+
if llvm::LLVMIsAInstruction(trunc).is_some() {
954+
llvm::LLVMSetNUW(trunc, True);
955+
}
956+
}
957+
}
958+
trunc
959+
}
960+
961+
fn unchecked_strunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
962+
debug_assert_ne!(self.val_ty(val), dest_ty);
963+
964+
let trunc = self.trunc(val, dest_ty);
965+
if llvm_util::get_version() >= (19, 0, 0) {
966+
unsafe {
967+
if llvm::LLVMIsAInstruction(trunc).is_some() {
968+
llvm::LLVMSetNSW(trunc, True);
969+
}
970+
}
971+
}
972+
trunc
973+
}
974+
945975
fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
946976
unsafe { llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED) }
947977
}

compiler/rustc_codegen_ssa/src/base.rs

+2-8
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ use rustc_middle::query::Providers;
2424
use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
2525
use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
2626
use rustc_session::Session;
27-
use rustc_session::config::{self, CrateType, EntryFnType, OptLevel, OutputType};
27+
use rustc_session::config::{self, CrateType, EntryFnType, OutputType};
2828
use rustc_span::{DUMMY_SP, Symbol, sym};
2929
use rustc_trait_selection::infer::{BoundRegionConversionTime, TyCtxtInferExt};
3030
use rustc_trait_selection::traits::{ObligationCause, ObligationCtxt};
@@ -364,13 +364,7 @@ pub(crate) fn build_shift_expr_rhs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
364364
let rhs_sz = bx.cx().int_width(rhs_llty);
365365
let lhs_sz = bx.cx().int_width(lhs_llty);
366366
if lhs_sz < rhs_sz {
367-
if is_unchecked && bx.sess().opts.optimize != OptLevel::No {
368-
// FIXME: Use `trunc nuw` once that's available
369-
let inrange = bx.icmp(IntPredicate::IntULE, rhs, mask);
370-
bx.assume(inrange);
371-
}
372-
373-
bx.trunc(rhs, lhs_llty)
367+
if is_unchecked { bx.unchecked_utrunc(rhs, lhs_llty) } else { bx.trunc(rhs, lhs_llty) }
374368
} else if lhs_sz > rhs_sz {
375369
// We zero-extend even if the RHS is signed. So e.g. `(x: i32) << -1i8` will zero-extend the
376370
// RHS to `255i32`. But then we mask the shift amount to be within the size of the LHS

compiler/rustc_codegen_ssa/src/mir/block.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -1040,7 +1040,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
10401040
let (idx, _) = op.layout.non_1zst_field(bx).expect(
10411041
"not exactly one non-1-ZST field in a `DispatchFromDyn` type",
10421042
);
1043-
op = op.extract_field(bx, idx);
1043+
op = op.extract_field(self, bx, idx);
10441044
}
10451045

10461046
// Now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
@@ -1072,7 +1072,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
10721072
let (idx, _) = op.layout.non_1zst_field(bx).expect(
10731073
"not exactly one non-1-ZST field in a `DispatchFromDyn` type",
10741074
);
1075-
op = op.extract_field(bx, idx);
1075+
op = op.extract_field(self, bx, idx);
10761076
}
10771077

10781078
// Make sure that we've actually unwrapped the rcvr down
@@ -1572,9 +1572,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
15721572
if scalar.is_bool() {
15731573
bx.range_metadata(llval, WrappingRange { start: 0, end: 1 });
15741574
}
1575+
// We store bools as `i8` so we need to truncate to `i1`.
1576+
llval = bx.to_immediate_scalar(llval, scalar);
15751577
}
1576-
// We store bools as `i8` so we need to truncate to `i1`.
1577-
llval = bx.to_immediate(llval, arg.layout);
15781578
}
15791579
}
15801580

@@ -1604,7 +1604,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
16041604
} else {
16051605
// If the tuple is immediate, the elements are as well.
16061606
for i in 0..tuple.layout.fields.count() {
1607-
let op = tuple.extract_field(bx, i);
1607+
let op = tuple.extract_field(self, bx, i);
16081608
self.codegen_argument(bx, op, llargs, &args[i]);
16091609
}
16101610
}

compiler/rustc_codegen_ssa/src/mir/operand.rs

+67-64
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,14 @@
1-
use std::assert_matches::assert_matches;
21
use std::fmt;
32

43
use arrayvec::ArrayVec;
54
use either::Either;
65
use rustc_abi as abi;
76
use rustc_abi::{Align, BackendRepr, Size};
8-
use rustc_middle::bug;
97
use rustc_middle::mir::interpret::{Pointer, Scalar, alloc_range};
108
use rustc_middle::mir::{self, ConstValue};
119
use rustc_middle::ty::Ty;
1210
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
11+
use rustc_middle::{bug, span_bug};
1312
use tracing::debug;
1413

1514
use super::place::{PlaceRef, PlaceValue};
@@ -352,79 +351,83 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
352351

353352
pub(crate) fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
354353
&self,
354+
fx: &mut FunctionCx<'a, 'tcx, Bx>,
355355
bx: &mut Bx,
356356
i: usize,
357357
) -> Self {
358358
let field = self.layout.field(bx.cx(), i);
359359
let offset = self.layout.fields.offset(i);
360360

361-
let mut val = match (self.val, self.layout.backend_repr) {
362-
// If the field is ZST, it has no data.
363-
_ if field.is_zst() => OperandValue::ZeroSized,
364-
365-
// Newtype of a scalar, scalar pair or vector.
366-
(OperandValue::Immediate(_) | OperandValue::Pair(..), _)
367-
if field.size == self.layout.size =>
368-
{
369-
assert_eq!(offset.bytes(), 0);
370-
self.val
361+
let val = if field.is_zst() {
362+
OperandValue::ZeroSized
363+
} else if field.size == self.layout.size {
364+
assert_eq!(offset.bytes(), 0);
365+
if let Some(field_val) = fx.codegen_transmute_operand(bx, *self, field) {
366+
field_val
367+
} else {
368+
// we have to go through memory for things like
369+
// Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]);
370+
let place = PlaceRef::alloca(bx, field);
371+
self.val.store(bx, place.val.with_type(self.layout));
372+
bx.load_operand(place).val
371373
}
372-
373-
// Extract a scalar component from a pair.
374-
(OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
375-
if offset.bytes() == 0 {
376-
assert_eq!(field.size, a.size(bx.cx()));
377-
OperandValue::Immediate(a_llval)
378-
} else {
379-
assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
380-
assert_eq!(field.size, b.size(bx.cx()));
381-
OperandValue::Immediate(b_llval)
374+
} else {
375+
let (in_scalar, imm) = match (self.val, self.layout.backend_repr) {
376+
// Extract a scalar component from a pair.
377+
(OperandValue::Pair(a_llval, b_llval), BackendRepr::ScalarPair(a, b)) => {
378+
if offset.bytes() == 0 {
379+
assert_eq!(field.size, a.size(bx.cx()));
380+
(Some(a), a_llval)
381+
} else {
382+
assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
383+
assert_eq!(field.size, b.size(bx.cx()));
384+
(Some(b), b_llval)
385+
}
382386
}
383-
}
384387

385-
// `#[repr(simd)]` types are also immediate.
386-
(OperandValue::Immediate(llval), BackendRepr::Vector { .. }) => {
387-
OperandValue::Immediate(bx.extract_element(llval, bx.cx().const_usize(i as u64)))
388-
}
388+
// `#[repr(simd)]` types are also immediate.
389+
(OperandValue::Immediate(llval), BackendRepr::Vector { .. }) => {
390+
(None, bx.extract_element(llval, bx.cx().const_usize(i as u64)))
391+
}
389392

390-
_ => bug!("OperandRef::extract_field({:?}): not applicable", self),
393+
_ => {
394+
span_bug!(fx.mir.span, "OperandRef::extract_field({:?}): not applicable", self)
395+
}
396+
};
397+
OperandValue::Immediate(match field.backend_repr {
398+
BackendRepr::Vector { .. } => imm,
399+
BackendRepr::Scalar(out_scalar) => {
400+
let Some(in_scalar) = in_scalar else {
401+
span_bug!(
402+
fx.mir.span,
403+
"OperandRef::extract_field({:?}): missing input scalar for output scalar",
404+
self
405+
)
406+
};
407+
if in_scalar != out_scalar {
408+
// If the backend and backend_immediate types might differ,
409+
// flip back to the backend type then to the new immediate.
410+
// This avoids nop truncations, but still handles things like
411+
// Bools in union fields needs to be truncated.
412+
let backend = bx.from_immediate(imm);
413+
bx.to_immediate_scalar(backend, out_scalar)
414+
} else {
415+
imm
416+
}
417+
}
418+
BackendRepr::Memory { sized: true } => {
419+
span_bug!(
420+
fx.mir.span,
421+
"Projecting into a simd type with padding doesn't work; \
422+
See <https://github.com/rust-lang/rust/issues/137108>",
423+
);
424+
}
425+
BackendRepr::Uninhabited
426+
| BackendRepr::ScalarPair(_, _)
427+
| BackendRepr::Memory { sized: false } => bug!(),
428+
})
391429
};
392430

393-
match (&mut val, field.backend_repr) {
394-
(OperandValue::ZeroSized, _) => {}
395-
(
396-
OperandValue::Immediate(llval),
397-
BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) | BackendRepr::Vector { .. },
398-
) => {
399-
// Bools in union fields needs to be truncated.
400-
*llval = bx.to_immediate(*llval, field);
401-
}
402-
(OperandValue::Pair(a, b), BackendRepr::ScalarPair(a_abi, b_abi)) => {
403-
// Bools in union fields needs to be truncated.
404-
*a = bx.to_immediate_scalar(*a, a_abi);
405-
*b = bx.to_immediate_scalar(*b, b_abi);
406-
}
407-
// Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]);
408-
(OperandValue::Immediate(llval), BackendRepr::Memory { sized: true }) => {
409-
assert_matches!(self.layout.backend_repr, BackendRepr::Vector { .. });
410-
411-
let llfield_ty = bx.cx().backend_type(field);
412-
413-
// Can't bitcast an aggregate, so round trip through memory.
414-
let llptr = bx.alloca(field.size, field.align.abi);
415-
bx.store(*llval, llptr, field.align.abi);
416-
*llval = bx.load(llfield_ty, llptr, field.align.abi);
417-
}
418-
(
419-
OperandValue::Immediate(_),
420-
BackendRepr::Uninhabited | BackendRepr::Memory { sized: false },
421-
) => {
422-
bug!()
423-
}
424-
(OperandValue::Pair(..), _) => bug!(),
425-
(OperandValue::Ref(..), _) => bug!(),
426-
}
427-
428431
OperandRef { val, layout: field }
429432
}
430433
}
@@ -587,7 +590,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
587590
"Bad PlaceRef: destructing pointers should use cast/PtrMetadata, \
588591
but tried to access field {f:?} of pointer {o:?}",
589592
);
590-
o = o.extract_field(bx, f.index());
593+
o = o.extract_field(self, bx, f.index());
591594
}
592595
mir::ProjectionElem::Index(_)
593596
| mir::ProjectionElem::ConstantIndex { .. } => {

compiler/rustc_codegen_ssa/src/mir/rvalue.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -231,7 +231,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
231231
///
232232
/// Returns `None` for cases that can't work in that framework, such as for
233233
/// `Immediate`->`Ref` that needs an `alloc` to get the location.
234-
fn codegen_transmute_operand(
234+
pub(crate) fn codegen_transmute_operand(
235235
&mut self,
236236
bx: &mut Bx,
237237
operand: OperandRef<'tcx, Bx::Value>,
@@ -260,6 +260,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
260260
OperandValue::Ref(source_place_val) => {
261261
assert_eq!(source_place_val.llextra, None);
262262
assert_matches!(operand_kind, OperandValueKind::Ref);
263+
// The existing alignment is part of `source_place_val`,
264+
// so that alignment will be used, not `cast`'s.
263265
Some(bx.load_operand(source_place_val.with_type(cast)).val)
264266
}
265267
OperandValue::ZeroSized => {

0 commit comments

Comments
 (0)