Skip to content

Commit a7a6c64

Browse files
committed
Auto merge of rust-lang#135335 - oli-obk:push-zxwssomxxtnq, r=saethlin
codegen: store ScalarPair via memset when one side is undef and the other side can be memset Basically since `undef` can be any byte, it can also be the byte(s) that are in the non-undef parts of a value. So we can just treat the `undef` at not being there and only look at the initialized bytes and memset over them fixes rust-lang#104290 based on rust-lang#135258
2 parents ebbe638 + 8f5f5e5 commit a7a6c64

File tree

9 files changed

+125
-30
lines changed

9 files changed

+125
-30
lines changed

compiler/rustc_codegen_gcc/src/common.rs

+5
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,11 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
6464
if type_is_pointer(typ) { self.context.new_null(typ) } else { self.const_int(typ, 0) }
6565
}
6666

67+
fn is_undef(&self, _val: RValue<'gcc>) -> bool {
68+
// FIXME: actually check for undef
69+
false
70+
}
71+
6772
fn const_undef(&self, typ: Type<'gcc>) -> RValue<'gcc> {
6873
let local = self.current_func.borrow().expect("func").new_local(None, typ, "undefined");
6974
if typ.is_struct().is_some() {

compiler/rustc_codegen_llvm/src/common.rs

+4
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,10 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
126126
unsafe { llvm::LLVMGetUndef(t) }
127127
}
128128

129+
fn is_undef(&self, v: &'ll Value) -> bool {
130+
unsafe { llvm::LLVMIsUndef(v) == True }
131+
}
132+
129133
fn const_poison(&self, t: &'ll Type) -> &'ll Value {
130134
unsafe { llvm::LLVMGetPoison(t) }
131135
}

compiler/rustc_codegen_llvm/src/llvm/ffi.rs

+1
Original file line numberDiff line numberDiff line change
@@ -918,6 +918,7 @@ unsafe extern "C" {
918918
pub fn LLVMMetadataTypeInContext(C: &Context) -> &Type;
919919

920920
// Operations on all values
921+
pub fn LLVMIsUndef(Val: &Value) -> Bool;
921922
pub fn LLVMTypeOf(Val: &Value) -> &Type;
922923
pub fn LLVMGetValueName2(Val: &Value, Length: *mut size_t) -> *const c_char;
923924
pub fn LLVMSetValueName2(Val: &Value, Name: *const c_char, NameLen: size_t);

compiler/rustc_codegen_ssa/src/mir/operand.rs

+37-22
Original file line numberDiff line numberDiff line change
@@ -204,14 +204,30 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
204204
let alloc_align = alloc.inner().align;
205205
assert!(alloc_align >= layout.align.abi);
206206

207+
// Returns `None` when the value is partially undefined or any byte of it has provenance.
208+
// Otherwise returns the value or (if the entire value is undef) returns an undef.
207209
let read_scalar = |start, size, s: abi::Scalar, ty| {
210+
let range = alloc_range(start, size);
208211
match alloc.0.read_scalar(
209212
bx,
210-
alloc_range(start, size),
213+
range,
211214
/*read_provenance*/ matches!(s.primitive(), abi::Primitive::Pointer(_)),
212215
) {
213-
Ok(val) => bx.scalar_to_backend(val, s, ty),
214-
Err(_) => bx.const_poison(ty),
216+
Ok(val) => Some(bx.scalar_to_backend(val, s, ty)),
217+
Err(_) => {
218+
// We may have failed due to partial provenance or unexpected provenance,
219+
// continue down the normal code path if so.
220+
if alloc.0.provenance().range_empty(range, &bx.tcx())
221+
// Since `read_scalar` failed, but there were no relocations involved, the
222+
// bytes must be partially or fully uninitialized. Thus we can now unwrap the
223+
// information about the range of uninit bytes and check if it's the full range.
224+
&& alloc.0.init_mask().is_range_initialized(range).unwrap_err() == range
225+
{
226+
Some(bx.const_undef(ty))
227+
} else {
228+
None
229+
}
230+
}
215231
}
216232
};
217233

@@ -222,16 +238,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
222238
// check that walks over the type of `mplace` to make sure it is truly correct to treat this
223239
// like a `Scalar` (or `ScalarPair`).
224240
match layout.backend_repr {
225-
BackendRepr::Scalar(s @ abi::Scalar::Initialized { .. }) => {
241+
BackendRepr::Scalar(s) => {
226242
let size = s.size(bx);
227243
assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
228-
let val = read_scalar(offset, size, s, bx.immediate_backend_type(layout));
229-
OperandRef { val: OperandValue::Immediate(val), layout }
244+
if let Some(val) = read_scalar(offset, size, s, bx.immediate_backend_type(layout)) {
245+
return OperandRef { val: OperandValue::Immediate(val), layout };
246+
}
230247
}
231-
BackendRepr::ScalarPair(
232-
a @ abi::Scalar::Initialized { .. },
233-
b @ abi::Scalar::Initialized { .. },
234-
) => {
248+
BackendRepr::ScalarPair(a, b) => {
235249
let (a_size, b_size) = (a.size(bx), b.size(bx));
236250
let b_offset = (offset + a_size).align_to(b.align(bx).abi);
237251
assert!(b_offset.bytes() > 0);
@@ -247,20 +261,21 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
247261
b,
248262
bx.scalar_pair_element_backend_type(layout, 1, true),
249263
);
250-
OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
251-
}
252-
_ if layout.is_zst() => OperandRef::zero_sized(layout),
253-
_ => {
254-
// Neither a scalar nor scalar pair. Load from a place
255-
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
256-
// same `ConstAllocation`?
257-
let init = bx.const_data_from_alloc(alloc);
258-
let base_addr = bx.static_addr_of(init, alloc_align, None);
259-
260-
let llval = bx.const_ptr_byte_offset(base_addr, offset);
261-
bx.load_operand(PlaceRef::new_sized(llval, layout))
264+
if let (Some(a_val), Some(b_val)) = (a_val, b_val) {
265+
return OperandRef { val: OperandValue::Pair(a_val, b_val), layout };
266+
}
262267
}
268+
_ if layout.is_zst() => return OperandRef::zero_sized(layout),
269+
_ => {}
263270
}
271+
// Neither a scalar nor scalar pair. Load from a place
272+
// FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the
273+
// same `ConstAllocation`?
274+
let init = bx.const_data_from_alloc(alloc);
275+
let base_addr = bx.static_addr_of(init, alloc_align, None);
276+
277+
let llval = bx.const_ptr_byte_offset(base_addr, offset);
278+
bx.load_operand(PlaceRef::new_sized(llval, layout))
264279
}
265280

266281
/// Asserts that this operand refers to a scalar and returns

compiler/rustc_codegen_ssa/src/mir/rvalue.rs

+24-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
88
use rustc_middle::{bug, mir, span_bug};
99
use rustc_session::config::OptLevel;
1010
use rustc_span::{DUMMY_SP, Span};
11-
use tracing::{debug, instrument};
11+
use tracing::{debug, instrument, trace};
1212

1313
use super::operand::{OperandRef, OperandValue};
1414
use super::place::PlaceRef;
@@ -93,6 +93,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
9393
return;
9494
}
9595

96+
// If `v` is an integer constant whose value is just a single byte repeated N times,
97+
// emit a `memset` filling the entire `dest` with that byte.
9698
let try_init_all_same = |bx: &mut Bx, v| {
9799
let start = dest.val.llval;
98100
let size = bx.const_usize(dest.layout.size.bytes());
@@ -117,13 +119,33 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
117119
false
118120
};
119121

122+
trace!(?cg_elem.val);
120123
match cg_elem.val {
121124
OperandValue::Immediate(v) => {
122125
if try_init_all_same(bx, v) {
123126
return;
124127
}
125128
}
126-
_ => (),
129+
OperandValue::Pair(a, b) => {
130+
let a_is_undef = bx.cx().is_undef(a);
131+
match (a_is_undef, bx.cx().is_undef(b)) {
132+
// Can happen for uninit unions
133+
(true, true) => {
134+
// FIXME: can we produce better output here?
135+
}
136+
(false, true) | (true, false) => {
137+
let val = if a_is_undef { b } else { a };
138+
if try_init_all_same(bx, val) {
139+
return;
140+
}
141+
}
142+
(false, false) => {
143+
// FIXME: if both are the same value, use try_init_all_same
144+
}
145+
}
146+
}
147+
OperandValue::ZeroSized => unreachable!("checked above"),
148+
OperandValue::Ref(..) => {}
127149
}
128150

129151
let count = self

compiler/rustc_codegen_ssa/src/traits/consts.rs

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ pub trait ConstCodegenMethods<'tcx>: BackendTypes {
99
/// Generate an uninitialized value (matching uninitialized memory in MIR).
1010
/// Whether memory is initialized or not is tracked byte-for-byte.
1111
fn const_undef(&self, t: Self::Type) -> Self::Value;
12+
fn is_undef(&self, v: Self::Value) -> bool;
1213
/// Generate a fake value. Poison always affects the entire value, even if just a single byte is
1314
/// poison. This can only be used in codepaths that are already UB, i.e., UB-free Rust code
1415
/// (including code that e.g. copies uninit memory with `MaybeUninit`) can never encounter a

compiler/rustc_middle/src/mir/interpret/allocation.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ impl AllocError {
222222
}
223223

224224
/// The information that makes up a memory access: offset and size.
225-
#[derive(Copy, Clone)]
225+
#[derive(Copy, Clone, PartialEq)]
226226
pub struct AllocRange {
227227
pub start: Size,
228228
pub size: Size,

tests/codegen/overaligned-constant.rs

-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@ pub fn overaligned_constant() {
1717
// CHECK-LABEL: @overaligned_constant
1818
// CHECK: [[full:%_.*]] = alloca [32 x i8], align 8
1919
// CHECK: call void @llvm.memcpy.p0.p0.i64(ptr align 8 [[full]], ptr align 8 @0, i64 32, i1 false)
20-
// CHECK: %b.0 = load i32, ptr @0, align 4
21-
// CHECK: %b.1 = load i32, ptr getelementptr inbounds ({{.*}}), align 4
2220
let mut s = S(1);
2321

2422
s.0 = 3;

tests/codegen/slice-init.rs

+52-3
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
#![crate_type = "lib"]
44

5+
use std::mem::MaybeUninit;
6+
57
// CHECK-LABEL: @zero_sized_elem
68
#[no_mangle]
79
pub fn zero_sized_elem() {
@@ -76,17 +78,64 @@ pub fn u16_init_one_bytes() -> [u16; N] {
7678
[const { u16::from_be_bytes([1, 1]) }; N]
7779
}
7880

79-
// FIXME: undef bytes can just be initialized with the same value as the
80-
// defined bytes, if the defines bytes are all the same.
8181
// CHECK-LABEL: @option_none_init
8282
#[no_mangle]
8383
pub fn option_none_init() -> [Option<u8>; N] {
84+
// CHECK-NOT: select
85+
// CHECK-NOT: br
86+
// CHECK-NOT: switch
87+
// CHECK-NOT: icmp
88+
// CHECK: call void @llvm.memset.p0
89+
[const { None }; N]
90+
}
91+
92+
// If there is partial provenance or some bytes are initialized and some are not,
93+
// we can't really do better than initialize bytes or groups of bytes together.
94+
// CHECK-LABEL: @option_maybe_uninit_init
95+
#[no_mangle]
96+
pub fn option_maybe_uninit_init() -> [MaybeUninit<u16>; N] {
97+
// CHECK-NOT: select
98+
// CHECK: br label %repeat_loop_header{{.*}}
99+
// CHECK-NOT: switch
100+
// CHECK: icmp
101+
// CHECK-NOT: call void @llvm.memset.p0
102+
[const {
103+
let mut val: MaybeUninit<u16> = MaybeUninit::uninit();
104+
let ptr = val.as_mut_ptr() as *mut u8;
105+
unsafe {
106+
ptr.write(0);
107+
}
108+
val
109+
}; N]
110+
}
111+
112+
#[repr(packed)]
113+
struct Packed {
114+
start: u8,
115+
ptr: &'static (),
116+
rest: u16,
117+
rest2: u8,
118+
}
119+
120+
// If there is partial provenance or some bytes are initialized and some are not,
121+
// we can't really do better than initialize bytes or groups of bytes together.
122+
// CHECK-LABEL: @option_maybe_uninit_provenance
123+
#[no_mangle]
124+
pub fn option_maybe_uninit_provenance() -> [MaybeUninit<Packed>; N] {
84125
// CHECK-NOT: select
85126
// CHECK: br label %repeat_loop_header{{.*}}
86127
// CHECK-NOT: switch
87128
// CHECK: icmp
88129
// CHECK-NOT: call void @llvm.memset.p0
89-
[None; N]
130+
[const {
131+
let mut val: MaybeUninit<Packed> = MaybeUninit::uninit();
132+
unsafe {
133+
let ptr = &raw mut (*val.as_mut_ptr()).ptr;
134+
static HAS_ADDR: () = ();
135+
ptr.write_unaligned(&HAS_ADDR);
136+
}
137+
val
138+
}; N]
90139
}
91140

92141
// Use an opaque function to prevent rustc from removing useless drops.

0 commit comments

Comments
 (0)