Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions compiler/rustc_borrowck/src/type_check/canonical.rs
Original file line number Diff line number Diff line change
Expand Up @@ -343,8 +343,15 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
return;
}

// FIXME: Ideally MIR types are normalized, but this is not always true.
let mir_ty = self.normalize(mir_ty, Locations::All(span));
// This is a hack. `body.local_decls` are not necessarily normalized in the old
// solver due to not deeply normalizing in writeback. So we must re-normalize here.
//
// I am not sure of a test case where this actually matters. There is a similar
// hack in `equate_inputs_and_outputs` which does have associated test cases.
let mir_ty = match self.infcx.next_trait_solver() {
true => mir_ty,
false => self.normalize(mir_ty, Locations::All(span)),
};

let cause = ObligationCause::dummy_with_span(span);
let param_env = self.infcx.param_env;
Expand All @@ -353,6 +360,10 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
ConstraintCategory::Boring,
type_op::custom::CustomTypeOp::new(
|ocx| {
// The `AscribeUserType` query would normally emit a wf
// obligation for the unnormalized user_ty here. This is
// where the "incorrectly skips the WF checks we normally do"
// happens
let user_ty = ocx.normalize(&cause, param_env, user_ty);
ocx.eq(&cause, param_env, user_ty, mir_ty)?;
Ok(())
Expand Down
77 changes: 54 additions & 23 deletions compiler/rustc_borrowck/src/type_check/input_output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,31 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
}

// FIXME(BoxyUwU): This should probably be part of a larger borrowck dev-guide chapter
//
/// Enforce that the types of the locals corresponding to the inputs and output of
/// the body are equal to those of the (normalized) signature.
///
/// This is necessary for two reasons:
/// - Locals in the MIR all start out with `'erased` regions and then are replaced
/// with unconstrained nll vars. If we have a function returning `&'a u32` then
/// the local `_0: &'?10 u32` needs to have its region var equated with the nll
/// var representing `'a`. i.e. borrow check must uphold that `'?10 = 'a`.
/// - When computing the normalized signature we may introduce new unconstrained nll
/// vars due to higher ranked where clauses ([#136547]). We then wind up with implied
/// bounds involving these vars.
///
/// For this reason it is important that we equate with the *normalized* signature
/// which was produced when computing implied bounds. If we do not do so then we will
/// wind up with implied bounds on nll vars which cannot actually be used as the nll
/// var never gets related to anything.
///
/// For 'closure-like' bodies this function effectively relates the *inferred* signature
/// of the closure against the locals corresponding to the closure's inputs/output. It *does
/// not* relate the user provided types for the signature to the locals, this is handled
/// separately by: [`TypeChecker::check_signature_annotation`].
///
/// [#136547]: <https://www.github.com/rust-lang/rust/issues/136547>
#[instrument(skip(self), level = "debug")]
pub(super) fn equate_inputs_and_outputs(&mut self, normalized_inputs_and_output: &[Ty<'tcx>]) {
let (&normalized_output_ty, normalized_input_tys) =
Expand Down Expand Up @@ -173,38 +198,44 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
);
}

// Return types are a bit more complex. They may contain opaque `impl Trait` types.
let mir_output_ty = self.body.local_decls[RETURN_PLACE].ty;
// Equate expected output ty with the type of the RETURN_PLACE in MIR
let mir_output_ty = self.body.return_ty();
let output_span = self.body.local_decls[RETURN_PLACE].source_info.span;
self.equate_normalized_input_or_output(normalized_output_ty, mir_output_ty, output_span);
}

#[instrument(skip(self), level = "debug")]
fn equate_normalized_input_or_output(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, span: Span) {
if self.infcx.next_trait_solver() {
return self
.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
.unwrap_or_else(|terr| {
span_mirbug!(
self,
Location::START,
"equate_normalized_input_or_output: `{a:?}=={b:?}` failed with `{terr:?}`",
);
});
}

// This is a hack. `body.local_decls` are not necessarily normalized in the old
// solver due to not deeply normalizing in writeback. So we must re-normalize here.
//
// However, in most cases normalizing is unnecessary so we only do so if it may be
// necessary for type equality to hold. This leads to some (very minor) performance
// wins.
if let Err(_) =
self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
{
// FIXME(jackh726): This is a hack. It's somewhat like
// `rustc_traits::normalize_after_erasing_regions`. Ideally, we'd
// like to normalize *before* inserting into `local_decls`, but
// doing so ends up causing some other trouble.
let b = self.normalize(b, Locations::All(span));

// Note: if we have to introduce new placeholders during normalization above, then we
// won't have added those universes to the universe info, which we would want in
// `relate_tys`.
if let Err(terr) =
self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
{
span_mirbug!(
self,
Location::START,
"equate_normalized_input_or_output: `{:?}=={:?}` failed with `{:?}`",
a,
b,
terr
);
}
}
self.eq_types(a, b, Locations::All(span), ConstraintCategory::BoringNoLocation)
.unwrap_or_else(|terr| {
span_mirbug!(
self,
Location::START,
"equate_normalized_input_or_output: `{a:?}=={b:?}` failed with `{terr:?}`",
);
});
};
}
}
23 changes: 13 additions & 10 deletions compiler/rustc_borrowck/src/type_check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,16 +123,19 @@ pub(crate) fn type_check<'tcx>(
known_type_outlives_obligations,
} = free_region_relations::create(infcx, universal_regions, &mut constraints);

let pre_obligations = infcx.take_registered_region_obligations();
assert!(
pre_obligations.is_empty(),
"there should be no incoming region obligations = {pre_obligations:#?}",
);
let pre_assumptions = infcx.take_registered_region_assumptions();
assert!(
pre_assumptions.is_empty(),
"there should be no incoming region assumptions = {pre_assumptions:#?}",
);
{
// Scope these variables so it's clear they're not used later
let pre_obligations = infcx.take_registered_region_obligations();
assert!(
pre_obligations.is_empty(),
"there should be no incoming region obligations = {pre_obligations:#?}",
);
let pre_assumptions = infcx.take_registered_region_assumptions();
assert!(
pre_assumptions.is_empty(),
"there should be no incoming region assumptions = {pre_assumptions:#?}",
);
}

debug!(?normalized_inputs_and_output);

Expand Down
5 changes: 2 additions & 3 deletions compiler/rustc_middle/src/ty/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -609,9 +609,8 @@ impl<'tcx> TyCtxt<'tcx> {
/// have the same `DefKind`.
///
/// Note that closures have a `DefId`, but the closure *expression* also has a
/// `HirId` that is located within the context where the closure appears (and, sadly,
/// a corresponding `NodeId`, since those are not yet phased out). The parent of
/// the closure's `DefId` will also be the context where it appears.
/// `HirId` that is located within the context where the closure appears. The
/// parent of the closure's `DefId` will also be the context where it appears.
pub fn is_closure_like(self, def_id: DefId) -> bool {
matches!(self.def_kind(def_id), DefKind::Closure)
}
Expand Down
10 changes: 10 additions & 0 deletions compiler/rustc_parse/messages.ftl
Original file line number Diff line number Diff line change
Expand Up @@ -822,9 +822,19 @@ parse_struct_literal_body_without_path =
struct literal body without path
.suggestion = you might have forgotten to add the struct literal inside the block

parse_struct_literal_body_without_path_late =
struct literal body without path
.label = struct name missing for struct literal
.suggestion = add the correct type

parse_struct_literal_not_allowed_here = struct literals are not allowed here
.suggestion = surround the struct literal with parentheses

parse_struct_literal_placeholder_path =
placeholder `_` is not allowed for the path in struct literals
.label = not allowed in struct literals
.suggestion = replace it with the correct type

parse_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
.help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)

Expand Down
19 changes: 19 additions & 0 deletions compiler/rustc_parse/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3684,3 +3684,22 @@ pub(crate) struct ImplReuseInherentImpl {
#[primary_span]
pub span: Span,
}

#[derive(Diagnostic)]
#[diag(parse_struct_literal_placeholder_path)]
pub(crate) struct StructLiteralPlaceholderPath {
#[primary_span]
#[label]
#[suggestion(applicability = "has-placeholders", code = "/* Type */", style = "verbose")]
pub span: Span,
}

#[derive(Diagnostic)]
#[diag(parse_struct_literal_body_without_path_late)]
pub(crate) struct StructLiteralWithoutPathLate {
#[primary_span]
#[label]
pub span: Span,
#[suggestion(applicability = "has-placeholders", code = "/* Type */ ", style = "verbose")]
pub suggestion_span: Span,
}
45 changes: 45 additions & 0 deletions compiler/rustc_parse/src/parser/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1468,6 +1468,9 @@ impl<'a> Parser<'a> {
} else if this.check(exp!(OpenParen)) {
this.parse_expr_tuple_parens(restrictions)
} else if this.check(exp!(OpenBrace)) {
if let Some(expr) = this.maybe_recover_bad_struct_literal_path(false)? {
return Ok(expr);
}
this.parse_expr_block(None, lo, BlockCheckMode::Default)
} else if this.check(exp!(Or)) || this.check(exp!(OrOr)) {
this.parse_expr_closure().map_err(|mut err| {
Expand Down Expand Up @@ -1542,6 +1545,9 @@ impl<'a> Parser<'a> {
} else if this.check_keyword(exp!(Let)) {
this.parse_expr_let(restrictions)
} else if this.eat_keyword(exp!(Underscore)) {
if let Some(expr) = this.maybe_recover_bad_struct_literal_path(true)? {
return Ok(expr);
}
Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
} else if this.token_uninterpolated_span().at_least_rust_2018() {
// `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
Expand Down Expand Up @@ -3698,6 +3704,45 @@ impl<'a> Parser<'a> {
}
}

fn maybe_recover_bad_struct_literal_path(
&mut self,
is_underscore_entry_point: bool,
) -> PResult<'a, Option<Box<Expr>>> {
if self.may_recover()
&& self.check_noexpect(&token::OpenBrace)
&& (!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
&& self.is_likely_struct_lit())
{
let span = if is_underscore_entry_point {
self.prev_token.span
} else {
self.token.span.shrink_to_lo()
};

self.bump(); // {
let expr = self.parse_expr_struct(
None,
Path::from_ident(Ident::new(kw::Underscore, span)),
false,
)?;

let guar = if is_underscore_entry_point {
self.dcx().create_err(errors::StructLiteralPlaceholderPath { span }).emit()
} else {
self.dcx()
.create_err(errors::StructLiteralWithoutPathLate {
span: expr.span,
suggestion_span: expr.span.shrink_to_lo(),
})
.emit()
};

Ok(Some(self.mk_expr_err(expr.span, guar)))
} else {
Ok(None)
}
}

pub(super) fn parse_struct_fields(
&mut self,
pth: ast::Path,
Expand Down
39 changes: 18 additions & 21 deletions compiler/rustc_query_impl/src/plumbing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ use rustc_span::def_id::LOCAL_CRATE;

use crate::QueryConfigRestored;

/// Implements [`QueryContext`] for use by [`rustc_query_system`], since that
/// crate does not have direct access to [`TyCtxt`].
#[derive(Copy, Clone)]
pub struct QueryCtxt<'tcx> {
pub tcx: TyCtxt<'tcx>,
Expand All @@ -47,15 +49,6 @@ impl<'tcx> QueryCtxt<'tcx> {
}
}

impl<'tcx> std::ops::Deref for QueryCtxt<'tcx> {
type Target = TyCtxt<'tcx>;

#[inline]
fn deref(&self) -> &Self::Target {
&self.tcx
}
}

impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
type Deps = rustc_middle::dep_graph::DepsType;
type DepContext = TyCtxt<'tcx>;
Expand All @@ -69,14 +62,16 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> {
impl QueryContext for QueryCtxt<'_> {
#[inline]
fn jobserver_proxy(&self) -> &Proxy {
&*self.jobserver_proxy
&self.tcx.jobserver_proxy
}

#[inline]
fn next_job_id(self) -> QueryJobId {
QueryJobId(
NonZero::new(self.query_system.jobs.fetch_add(1, std::sync::atomic::Ordering::Relaxed))
.unwrap(),
NonZero::new(
self.tcx.query_system.jobs.fetch_add(1, std::sync::atomic::Ordering::Relaxed),
)
.unwrap(),
)
}

Expand Down Expand Up @@ -113,7 +108,8 @@ impl QueryContext for QueryCtxt<'_> {
self,
prev_dep_node_index: SerializedDepNodeIndex,
) -> Option<QuerySideEffect> {
self.query_system
self.tcx
.query_system
.on_disk_cache
.as_ref()
.and_then(|c| c.load_side_effect(self.tcx, prev_dep_node_index))
Expand All @@ -122,7 +118,7 @@ impl QueryContext for QueryCtxt<'_> {
#[inline(never)]
#[cold]
fn store_side_effect(self, dep_node_index: DepNodeIndex, side_effect: QuerySideEffect) {
if let Some(c) = self.query_system.on_disk_cache.as_ref() {
if let Some(c) = self.tcx.query_system.on_disk_cache.as_ref() {
c.store_side_effect(dep_node_index, side_effect)
}
}
Expand All @@ -140,7 +136,9 @@ impl QueryContext for QueryCtxt<'_> {
// as `self`, so we use `with_related_context` to relate the 'tcx lifetimes
// when accessing the `ImplicitCtxt`.
tls::with_related_context(self.tcx, move |current_icx| {
if depth_limit && !self.recursion_limit().value_within_limit(current_icx.query_depth) {
if depth_limit
&& !self.tcx.recursion_limit().value_within_limit(current_icx.query_depth)
{
self.depth_limit_error(token);
}

Expand All @@ -161,16 +159,16 @@ impl QueryContext for QueryCtxt<'_> {
let query_map = self.collect_active_jobs(true).expect("failed to collect active queries");
let (info, depth) = job.find_dep_kind_root(query_map);

let suggested_limit = match self.recursion_limit() {
let suggested_limit = match self.tcx.recursion_limit() {
Limit(0) => Limit(2),
limit => limit * 2,
};

self.sess.dcx().emit_fatal(QueryOverflow {
self.tcx.sess.dcx().emit_fatal(QueryOverflow {
span: info.job.span,
note: QueryOverflowNote { desc: info.query.description, depth },
suggested_limit,
crate_name: self.crate_name(LOCAL_CRATE),
crate_name: self.tcx.crate_name(LOCAL_CRATE),
});
}
}
Expand Down Expand Up @@ -367,7 +365,7 @@ pub(crate) fn encode_query_results<'a, 'tcx, Q>(
Q: super::QueryConfigRestored<'tcx>,
Q::RestoredValue: Encodable<CacheEncoder<'a, 'tcx>>,
{
let _timer = qcx.profiler().generic_activity_with_arg("encode_query_results_for", query.name());
let _timer = qcx.tcx.prof.generic_activity_with_arg("encode_query_results_for", query.name());

assert!(query.query_state(qcx).all_inactive());
let cache = query.query_cache(qcx);
Expand All @@ -389,8 +387,7 @@ pub(crate) fn query_key_hash_verify<'tcx>(
query: impl QueryConfig<QueryCtxt<'tcx>>,
qcx: QueryCtxt<'tcx>,
) {
let _timer =
qcx.profiler().generic_activity_with_arg("query_key_hash_verify_for", query.name());
let _timer = qcx.tcx.prof.generic_activity_with_arg("query_key_hash_verify_for", query.name());

let mut map = UnordMap::default();

Expand Down
Loading
Loading