Skip to content

Commit

Permalink
Unrolled build for rust-lang#129724
Browse files Browse the repository at this point in the history
Rollup merge of rust-lang#129724 - nnethercote:rm-Option-bang, r=fee1-dead

Remove `Option<!>` return types.

Several compiler functions have `Option<!>` for their return type. That's odd. The only valid return value is `None`, so why is this type used?

Because it lets you write certain patterns slightly more concisely. E.g. if you have these common patterns:
```
    let Some(a) = f() else { return };
    let Ok(b) = g() else { return };
```
you can shorten them to these:
```
    let a = f()?;
    let b = g().ok()?;
```
Huh.

An `Option` return type typically designates success/failure. How should I interpret the type signature of a function that always returns (i.e. doesn't panic), does useful work (modifying `&mut` arguments), and yet only ever fails? This idiom subverts the type system for a cute syntactic trick.

Furthermore, returning `Option<!>` from a function F makes things syntactically more convenient within F, but makes things worse at F's callsites. The callsites can themselves use `?` with F but should not, because they will get an unconditional early return, which is almost certainly not desirable. Instead the return value should be ignored. (Note that some of callsites of `process_operand`, `process_immedate`, `process_assign` actually do use `?`, though the early return doesn't matter in these cases because nothing of significance comes after those calls. Ugh.)

When I first saw this pattern I had no idea how to interpret it, and it took me several minutes of close reading to understand everything I've written above. I even started a Zulip thread about it to make sure I understood it properly. "Save a few characters by introducing types so weird that compiler devs have to discuss it on Zulip" feels like a bad trade-off to me. This commit replaces all the `Option<!>` return values and uses `else`/`return` (or something similar) to replace the relevant `?` uses. The result is slightly more verbose but much easier to understand.

r? ``````@cjgillot``````
  • Loading branch information
rust-timer authored Aug 31, 2024
2 parents fa72f07 + fa4f892 commit d101b3f
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 66 deletions.
12 changes: 7 additions & 5 deletions compiler/rustc_mir_transform/src/dataflow_const_prop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
place: PlaceIndex,
mut operand: OpTy<'tcx>,
projection: &[PlaceElem<'tcx>],
) -> Option<!> {
) {
for &(mut proj_elem) in projection {
if let PlaceElem::Index(index) = proj_elem {
if let FlatSet::Elem(index) = state.get(index.into(), &self.map)
Expand All @@ -391,10 +391,14 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
{
proj_elem = PlaceElem::ConstantIndex { offset, min_length, from_end: false };
} else {
return None;
return;
}
}
operand = self.ecx.project(&operand, proj_elem).ok()?;
operand = if let Ok(operand) = self.ecx.project(&operand, proj_elem) {
operand
} else {
return;
}
}

self.map.for_each_projection_value(
Expand Down Expand Up @@ -426,8 +430,6 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
}
},
);

None
}

fn binary_op(
Expand Down
110 changes: 56 additions & 54 deletions compiler/rustc_mir_transform/src/jump_threading.rs
Original file line number Diff line number Diff line change
Expand Up @@ -191,26 +191,26 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {

/// Recursion entry point to find threading opportunities.
#[instrument(level = "trace", skip(self))]
fn start_from_switch(&mut self, bb: BasicBlock) -> Option<!> {
fn start_from_switch(&mut self, bb: BasicBlock) {
let bbdata = &self.body[bb];
if bbdata.is_cleanup || self.loop_headers.contains(bb) {
return None;
return;
}
let (discr, targets) = bbdata.terminator().kind.as_switch()?;
let discr = discr.place()?;
let Some((discr, targets)) = bbdata.terminator().kind.as_switch() else { return };
let Some(discr) = discr.place() else { return };
debug!(?discr, ?bb);

let discr_ty = discr.ty(self.body, self.tcx).ty;
let discr_layout = self.ecx.layout_of(discr_ty).ok()?;
let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return };

let discr = self.map.find(discr.as_ref())?;
let Some(discr) = self.map.find(discr.as_ref()) else { return };
debug!(?discr);

let cost = CostChecker::new(self.tcx, self.param_env, None, self.body);
let mut state = State::new_reachable();

let conds = if let Some((value, then, else_)) = targets.as_static_if() {
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
self.arena.alloc_from_iter([
Condition { value, polarity: Polarity::Eq, target: then },
Condition { value, polarity: Polarity::Ne, target: else_ },
Expand All @@ -225,7 +225,6 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
state.insert_value_idx(discr, conds, self.map);

self.find_opportunity(bb, state, cost, 0);
None
}

/// Recursively walk statements backwards from this bb's terminator to find threading
Expand Down Expand Up @@ -364,18 +363,17 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
lhs: PlaceIndex,
rhs: ImmTy<'tcx>,
state: &mut State<ConditionSet<'a>>,
) -> Option<!> {
) {
let register_opportunity = |c: Condition| {
debug!(?bb, ?c.target, "register");
self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
};

let conditions = state.try_get_idx(lhs, self.map)?;
if let Immediate::Scalar(Scalar::Int(int)) = *rhs {
if let Some(conditions) = state.try_get_idx(lhs, self.map)
&& let Immediate::Scalar(Scalar::Int(int)) = *rhs
{
conditions.iter_matches(int).for_each(register_opportunity);
}

None
}

/// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
Expand Down Expand Up @@ -428,22 +426,23 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
lhs: PlaceIndex,
rhs: &Operand<'tcx>,
state: &mut State<ConditionSet<'a>>,
) -> Option<!> {
) {
match rhs {
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
Operand::Constant(constant) => {
let constant =
self.ecx.eval_mir_constant(&constant.const_, constant.span, None).ok()?;
let Ok(constant) =
self.ecx.eval_mir_constant(&constant.const_, constant.span, None)
else {
return;
};
self.process_constant(bb, lhs, constant, state);
}
// Transfer the conditions on the copied rhs.
Operand::Move(rhs) | Operand::Copy(rhs) => {
let rhs = self.map.find(rhs.as_ref())?;
let Some(rhs) = self.map.find(rhs.as_ref()) else { return };
state.insert_place_idx(rhs, lhs, self.map);
}
}

None
}

#[instrument(level = "trace", skip(self))]
Expand All @@ -453,32 +452,34 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
lhs_place: &Place<'tcx>,
rhs: &Rvalue<'tcx>,
state: &mut State<ConditionSet<'a>>,
) -> Option<!> {
let lhs = self.map.find(lhs_place.as_ref())?;
) {
let Some(lhs) = self.map.find(lhs_place.as_ref()) else { return };
match rhs {
Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state)?,
Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state),
// Transfer the conditions on the copy rhs.
Rvalue::CopyForDeref(rhs) => {
self.process_operand(bb, lhs, &Operand::Copy(*rhs), state)?
}
Rvalue::CopyForDeref(rhs) => self.process_operand(bb, lhs, &Operand::Copy(*rhs), state),
Rvalue::Discriminant(rhs) => {
let rhs = self.map.find_discr(rhs.as_ref())?;
let Some(rhs) = self.map.find_discr(rhs.as_ref()) else { return };
state.insert_place_idx(rhs, lhs, self.map);
}
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
Rvalue::Aggregate(box ref kind, ref operands) => {
let agg_ty = lhs_place.ty(self.body, self.tcx).ty;
let lhs = match kind {
// Do not support unions.
AggregateKind::Adt(.., Some(_)) => return None,
AggregateKind::Adt(.., Some(_)) => return,
AggregateKind::Adt(_, variant_index, ..) if agg_ty.is_enum() => {
if let Some(discr_target) = self.map.apply(lhs, TrackElem::Discriminant)
&& let Ok(discr_value) =
self.ecx.discriminant_for_variant(agg_ty, *variant_index)
{
self.process_immediate(bb, discr_target, discr_value, state);
}
self.map.apply(lhs, TrackElem::Variant(*variant_index))?
if let Some(idx) = self.map.apply(lhs, TrackElem::Variant(*variant_index)) {
idx
} else {
return;
}
}
_ => lhs,
};
Expand All @@ -490,8 +491,8 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
}
// Transfer the conditions on the copy rhs, after inversing polarity.
Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => {
let conditions = state.try_get_idx(lhs, self.map)?;
let place = self.map.find(place.as_ref())?;
let Some(conditions) = state.try_get_idx(lhs, self.map) else { return };
let Some(place) = self.map.find(place.as_ref()) else { return };
let conds = conditions.map(self.arena, Condition::inv);
state.insert_value_idx(place, conds, self.map);
}
Expand All @@ -502,21 +503,25 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
box (Operand::Move(place) | Operand::Copy(place), Operand::Constant(value))
| box (Operand::Constant(value), Operand::Move(place) | Operand::Copy(place)),
) => {
let conditions = state.try_get_idx(lhs, self.map)?;
let place = self.map.find(place.as_ref())?;
let Some(conditions) = state.try_get_idx(lhs, self.map) else { return };
let Some(place) = self.map.find(place.as_ref()) else { return };
let equals = match op {
BinOp::Eq => ScalarInt::TRUE,
BinOp::Ne => ScalarInt::FALSE,
_ => return None,
_ => return,
};
if value.const_.ty().is_floating_point() {
// Floating point equality does not follow bit-patterns.
// -0.0 and NaN both have special rules for equality,
// and therefore we cannot use integer comparisons for them.
// Avoid handling them, though this could be extended in the future.
return None;
return;
}
let value = value.const_.normalize(self.tcx, self.param_env).try_to_scalar_int()?;
let Some(value) =
value.const_.normalize(self.tcx, self.param_env).try_to_scalar_int()
else {
return;
};
let conds = conditions.map(self.arena, |c| Condition {
value,
polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne },
Expand All @@ -527,8 +532,6 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {

_ => {}
}

None
}

#[instrument(level = "trace", skip(self))]
Expand All @@ -537,7 +540,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
bb: BasicBlock,
stmt: &Statement<'tcx>,
state: &mut State<ConditionSet<'a>>,
) -> Option<!> {
) {
let register_opportunity = |c: Condition| {
debug!(?bb, ?c.target, "register");
self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
Expand All @@ -550,12 +553,12 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
// If we expect `discriminant(place) ?= A`,
// we have an opportunity if `variant_index ?= A`.
StatementKind::SetDiscriminant { box place, variant_index } => {
let discr_target = self.map.find_discr(place.as_ref())?;
let Some(discr_target) = self.map.find_discr(place.as_ref()) else { return };
let enum_ty = place.ty(self.body, self.tcx).ty;
// `SetDiscriminant` may be a no-op if the assigned variant is the untagged variant
// of a niche encoding. If we cannot ensure that we write to the discriminant, do
// nothing.
let enum_layout = self.ecx.layout_of(enum_ty).ok()?;
let Ok(enum_layout) = self.ecx.layout_of(enum_ty) else { return };
let writes_discriminant = match enum_layout.variants {
Variants::Single { index } => {
assert_eq!(index, *variant_index);
Expand All @@ -568,24 +571,25 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
} => *variant_index != untagged_variant,
};
if writes_discriminant {
let discr = self.ecx.discriminant_for_variant(enum_ty, *variant_index).ok()?;
self.process_immediate(bb, discr_target, discr, state)?;
let Ok(discr) = self.ecx.discriminant_for_variant(enum_ty, *variant_index)
else {
return;
};
self.process_immediate(bb, discr_target, discr, state);
}
}
// If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`.
StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(
Operand::Copy(place) | Operand::Move(place),
)) => {
let conditions = state.try_get(place.as_ref(), self.map)?;
let Some(conditions) = state.try_get(place.as_ref(), self.map) else { return };
conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity);
}
StatementKind::Assign(box (lhs_place, rhs)) => {
self.process_assign(bb, lhs_place, rhs, state)?;
self.process_assign(bb, lhs_place, rhs, state);
}
_ => {}
}

None
}

#[instrument(level = "trace", skip(self, state, cost))]
Expand Down Expand Up @@ -638,17 +642,17 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
targets: &SwitchTargets,
target_bb: BasicBlock,
state: &mut State<ConditionSet<'a>>,
) -> Option<!> {
) {
debug_assert_ne!(target_bb, START_BLOCK);
debug_assert_eq!(self.body.basic_blocks.predecessors()[target_bb].len(), 1);

let discr = discr.place()?;
let Some(discr) = discr.place() else { return };
let discr_ty = discr.ty(self.body, self.tcx).ty;
let discr_layout = self.ecx.layout_of(discr_ty).ok()?;
let conditions = state.try_get(discr.as_ref(), self.map)?;
let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return };
let Some(conditions) = state.try_get(discr.as_ref(), self.map) else { return };

if let Some((value, _)) = targets.iter().find(|&(_, target)| target == target_bb) {
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
debug_assert_eq!(targets.iter().filter(|&(_, target)| target == target_bb).count(), 1);

// We are inside `target_bb`. Since we have a single predecessor, we know we passed
Expand All @@ -662,7 +666,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
} else if let Some((value, _, else_bb)) = targets.as_static_if()
&& target_bb == else_bb
{
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };

// We only know that `discr != value`. That's much weaker information than
// the equality we had in the previous arm. All we can conclude is that
Expand All @@ -675,8 +679,6 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
}
}
}

None
}
}

Expand Down
12 changes: 5 additions & 7 deletions compiler/rustc_mir_transform/src/known_panics_lint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -469,12 +469,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
msg: &AssertKind<Operand<'tcx>>,
cond: &Operand<'tcx>,
location: Location,
) -> Option<!> {
let value = &self.eval_operand(cond)?;
) {
let Some(value) = &self.eval_operand(cond) else { return };
trace!("assertion on {:?} should be {:?}", value, expected);

let expected = Scalar::from_bool(expected);
let value_const = self.use_ecx(|this| this.ecx.read_scalar(value))?;
let Some(value_const) = self.use_ecx(|this| this.ecx.read_scalar(value)) else { return };

if expected != value_const {
// Poison all places this operand references so that further code
Expand Down Expand Up @@ -516,14 +516,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
AssertKind::BoundsCheck { len, index }
}
// Remaining overflow errors are already covered by checks on the binary operators.
AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => return None,
AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => return,
// Need proper const propagator for these.
_ => return None,
_ => return,
};
self.report_assert_as_lint(location, AssertLintKind::UnconditionalPanic, msg);
}

None
}

fn ensure_not_propagated(&self, local: Local) {
Expand Down

0 comments on commit d101b3f

Please sign in to comment.