From 32c9ffb9ccb1dae15d473c8c4462eb80b4d35fc7 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Jul 2022 08:25:47 +1000 Subject: [PATCH 1/8] Clarify args terminology. The deriving code has inconsistent terminology to describe args. In some places it distinguishes between: - the `&self` arg (if present), versus - all other args. In other places it distinguishes between: - the `&self` arg (if present) and any other arguments with the same type (in practice there is at most one, e.g. in `PartialEq::eq`), versus - all other args. The terms "self_args" and "nonself_args" are sometimes used for the former distinction, and sometimes for the latter. "args" is also sometimes used for "all other args". This commit makes the code consistently uses "self_args"/"nonself_args" for the former and "selflike_args"/"nonselflike_args" for the latter. This change makes the code easier to read. The commit also adds a panic on an impossible path (the `Self_` case) in `extract_arg_details`. --- .../src/deriving/clone.rs | 2 +- .../src/deriving/cmp/eq.rs | 2 +- .../src/deriving/cmp/ord.rs | 2 +- .../src/deriving/cmp/partial_eq.rs | 2 +- .../src/deriving/cmp/partial_ord.rs | 2 +- .../src/deriving/debug.rs | 4 +- .../src/deriving/decodable.rs | 7 +- .../src/deriving/default.rs | 2 +- .../src/deriving/encodable.rs | 7 +- .../src/deriving/generic/mod.rs | 222 ++++++++++-------- .../rustc_builtin_macros/src/deriving/hash.rs | 4 +- 11 files changed, 147 insertions(+), 109 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/clone.rs b/compiler/rustc_builtin_macros/src/deriving/clone.rs index 0a55d4e0ce0a6..a67d16d6b2f20 100644 --- a/compiler/rustc_builtin_macros/src/deriving/clone.rs +++ b/compiler/rustc_builtin_macros/src/deriving/clone.rs @@ -80,7 +80,7 @@ pub fn expand_deriving_clone( name: sym::clone, generics: Bounds::empty(), explicit_self: true, - args: Vec::new(), + nonself_args: Vec::new(), ret_ty: Self_, attributes: attrs, unify_fieldless_variants: false, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs index c1a2ebcc14601..4e798bf6acb10 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/eq.rs @@ -32,7 +32,7 @@ pub fn expand_deriving_eq( name: sym::assert_receiver_is_total_eq, generics: Bounds::empty(), explicit_self: true, - args: vec![], + nonself_args: vec![], ret_ty: Unit, attributes: attrs, unify_fieldless_variants: true, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs index bec59aac5eee1..d80a2293e6674 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs @@ -28,7 +28,7 @@ pub fn expand_deriving_ord( name: sym::cmp, generics: Bounds::empty(), explicit_self: true, - args: vec![(self_ref(), sym::other)], + nonself_args: vec![(self_ref(), sym::other)], ret_ty: Path(path_std!(cmp::Ordering)), attributes: attrs, unify_fieldless_variants: true, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs index b44c290d12f56..a9a0634836b83 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs @@ -69,7 +69,7 @@ pub fn expand_deriving_partial_eq( name: $name, generics: Bounds::empty(), explicit_self: true, - args: vec![(self_ref(), sym::other)], + nonself_args: vec![(self_ref(), sym::other)], ret_ty: Path(path_local!(bool)), attributes: attrs, unify_fieldless_variants: true, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs index 5769f08f49482..c8c9a6fbb5793 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs @@ -26,7 +26,7 @@ pub fn expand_deriving_partial_ord( name: sym::partial_cmp, generics: Bounds::empty(), explicit_self: true, - args: vec![(self_ref(), sym::other)], + nonself_args: vec![(self_ref(), sym::other)], ret_ty, attributes: attrs, unify_fieldless_variants: true, diff --git a/compiler/rustc_builtin_macros/src/deriving/debug.rs b/compiler/rustc_builtin_macros/src/deriving/debug.rs index e04898287608b..71f77ea8b6a39 100644 --- a/compiler/rustc_builtin_macros/src/deriving/debug.rs +++ b/compiler/rustc_builtin_macros/src/deriving/debug.rs @@ -28,7 +28,7 @@ pub fn expand_deriving_debug( name: sym::fmt, generics: Bounds::empty(), explicit_self: true, - args: vec![(fmtr, sym::f)], + nonself_args: vec![(fmtr, sym::f)], ret_ty: Path(path_std!(fmt::Result)), attributes: Vec::new(), unify_fieldless_variants: false, @@ -53,7 +53,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> // We want to make sure we have the ctxt set so that we can use unstable methods let span = cx.with_def_site_ctxt(span); let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked)); - let fmt = substr.nonself_args[0].clone(); + let fmt = substr.nonselflike_args[0].clone(); // Struct and tuples are similar enough that we use the same code for both, // with some extra pieces for structs due to the field names. diff --git a/compiler/rustc_builtin_macros/src/deriving/decodable.rs b/compiler/rustc_builtin_macros/src/deriving/decodable.rs index b9f2a75082224..d688143a2a5c6 100644 --- a/compiler/rustc_builtin_macros/src/deriving/decodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/decodable.rs @@ -36,7 +36,10 @@ pub fn expand_deriving_rustc_decodable( )], }, explicit_self: false, - args: vec![(Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), sym::d)], + nonself_args: vec![( + Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), + sym::d, + )], ret_ty: Path(Path::new_( pathvec_std!(result::Result), vec![ @@ -63,7 +66,7 @@ fn decodable_substructure( substr: &Substructure<'_>, krate: Symbol, ) -> BlockOrExpr { - let decoder = substr.nonself_args[0].clone(); + let decoder = substr.nonselflike_args[0].clone(); let recurse = vec![ Ident::new(krate, trait_span), Ident::new(sym::Decodable, trait_span), diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs index 90d5cdbc0a078..5177690917f21 100644 --- a/compiler/rustc_builtin_macros/src/deriving/default.rs +++ b/compiler/rustc_builtin_macros/src/deriving/default.rs @@ -34,7 +34,7 @@ pub fn expand_deriving_default( name: kw::Default, generics: Bounds::empty(), explicit_self: false, - args: Vec::new(), + nonself_args: Vec::new(), ret_ty: Self_, attributes: attrs, unify_fieldless_variants: false, diff --git a/compiler/rustc_builtin_macros/src/deriving/encodable.rs b/compiler/rustc_builtin_macros/src/deriving/encodable.rs index 0dfce114bfc53..c89558f6b86eb 100644 --- a/compiler/rustc_builtin_macros/src/deriving/encodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/encodable.rs @@ -120,7 +120,10 @@ pub fn expand_deriving_rustc_encodable( )], }, explicit_self: true, - args: vec![(Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), sym::s)], + nonself_args: vec![( + Ref(Box::new(Path(Path::new_local(typaram))), Mutability::Mut), + sym::s, + )], ret_ty: Path(Path::new_( pathvec_std!(result::Result), vec![ @@ -147,7 +150,7 @@ fn encodable_substructure( substr: &Substructure<'_>, krate: Symbol, ) -> BlockOrExpr { - let encoder = substr.nonself_args[0].clone(); + let encoder = substr.nonselflike_args[0].clone(); // throw an underscore in front to suppress unused variable warnings let blkarg = Ident::new(sym::_e, trait_span); let blkencoder = cx.expr_ident(trait_span, blkarg); diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index e618255b0c6fd..cafca507bd446 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -227,8 +227,8 @@ pub struct MethodDef<'a> { /// Is there is a `&self` argument? If not, it is a static function. pub explicit_self: bool, - /// Arguments other than the self argument - pub args: Vec<(Ty, Symbol)>, + /// Arguments other than the self argument. + pub nonself_args: Vec<(Ty, Symbol)>, /// Returns type pub ret_ty: Ty, @@ -245,8 +245,8 @@ pub struct MethodDef<'a> { pub struct Substructure<'a> { /// ident of self pub type_ident: Ident, - /// verbatim access to any non-self arguments - pub nonself_args: &'a [P], + /// verbatim access to any non-selflike arguments + pub nonselflike_args: &'a [P], pub fields: &'a SubstructureFields<'a>, } @@ -782,8 +782,8 @@ impl<'a> TraitDef<'a> { .methods .iter() .map(|method_def| { - let (explicit_self, self_args, nonself_args, tys) = - method_def.split_self_nonself_args(cx, self, type_ident, generics); + let (explicit_self, selflike_args, nonselflike_args, nonself_arg_tys) = + method_def.extract_arg_details(cx, self, type_ident, generics); let body = if from_scratch || method_def.is_static() { method_def.expand_static_struct_method_body( @@ -791,7 +791,7 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - &nonself_args, + &nonselflike_args, ) } else { method_def.expand_struct_method_body( @@ -799,14 +799,22 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - &self_args, - &nonself_args, + &selflike_args, + &nonselflike_args, use_temporaries, is_packed, ) }; - method_def.create_method(cx, self, type_ident, generics, explicit_self, tys, body) + method_def.create_method( + cx, + self, + type_ident, + generics, + explicit_self, + nonself_arg_tys, + body, + ) }) .collect(); @@ -831,8 +839,8 @@ impl<'a> TraitDef<'a> { .methods .iter() .map(|method_def| { - let (explicit_self, self_args, nonself_args, tys) = - method_def.split_self_nonself_args(cx, self, type_ident, generics); + let (explicit_self, selflike_args, nonselflike_args, nonself_arg_tys) = + method_def.extract_arg_details(cx, self, type_ident, generics); let body = if from_scratch || method_def.is_static() { method_def.expand_static_enum_method_body( @@ -840,7 +848,7 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - &nonself_args, + &nonselflike_args, ) } else { method_def.expand_enum_method_body( @@ -848,12 +856,20 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args, - &nonself_args, + selflike_args, + &nonselflike_args, ) }; - method_def.create_method(cx, self, type_ident, generics, explicit_self, tys, body) + method_def.create_method( + cx, + self, + type_ident, + generics, + explicit_self, + nonself_arg_tys, + body, + ) }) .collect(); @@ -867,11 +883,11 @@ impl<'a> MethodDef<'a> { cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'_>, type_ident: Ident, - nonself_args: &[P], + nonselflike_args: &[P], fields: &SubstructureFields<'_>, ) -> BlockOrExpr { let span = trait_.span; - let substructure = Substructure { type_ident, nonself_args, fields }; + let substructure = Substructure { type_ident, nonselflike_args, fields }; let mut f = self.combine_substructure.borrow_mut(); let f: &mut CombineSubstructureFunc<'_> = &mut *f; f(cx, span, &substructure) @@ -891,49 +907,52 @@ impl<'a> MethodDef<'a> { !self.explicit_self } - fn split_self_nonself_args( + // The return value includes: + // - explicit_self: The `&self` arg, if present. + // - selflike_args: Expressions for `&self` (if present) and also any other + // args with the same type (e.g. the `other` arg in `PartialEq::eq`). + // - nonselflike_args: Expressions for all the remaining args. + // - nonself_arg_tys: Additional information about all the args other than + // `&self`. + fn extract_arg_details( &self, cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'_>, type_ident: Ident, generics: &Generics, ) -> (Option, Vec>, Vec>, Vec<(Ident, P)>) { - let mut self_args = Vec::new(); - let mut nonself_args = Vec::new(); - let mut arg_tys = Vec::new(); + let mut selflike_args = Vec::new(); + let mut nonselflike_args = Vec::new(); + let mut nonself_arg_tys = Vec::new(); let span = trait_.span; - let ast_explicit_self = if self.explicit_self { + let explicit_self = if self.explicit_self { let (self_expr, explicit_self) = ty::get_explicit_self(cx, span); - self_args.push(self_expr); + selflike_args.push(self_expr); Some(explicit_self) } else { None }; - for (ty, name) in self.args.iter() { + for (ty, name) in self.nonself_args.iter() { let ast_ty = ty.to_ty(cx, span, type_ident, generics); let ident = Ident::new(*name, span); - arg_tys.push((ident, ast_ty)); + nonself_arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(span, ident); match *ty { // for static methods, just treat any Self // arguments as a normal arg - Self_ if !self.is_static() => { - self_args.push(arg_expr); - } Ref(ref ty, _) if matches!(**ty, Self_) && !self.is_static() => { - self_args.push(cx.expr_deref(span, arg_expr)) - } - _ => { - nonself_args.push(arg_expr); + selflike_args.push(cx.expr_deref(span, arg_expr)) } + Self_ => cx.span_bug(span, "`Self` in non-return position"), + _ => nonselflike_args.push(arg_expr), } } - (ast_explicit_self, self_args, nonself_args, arg_tys) + (explicit_self, selflike_args, nonselflike_args, nonself_arg_tys) } fn create_method( @@ -943,7 +962,7 @@ impl<'a> MethodDef<'a> { type_ident: Ident, generics: &Generics, explicit_self: Option, - arg_types: Vec<(Ident, P)>, + nonself_arg_tys: Vec<(Ident, P)>, body: BlockOrExpr, ) -> P { let span = trait_.span; @@ -951,12 +970,13 @@ impl<'a> MethodDef<'a> { let fn_generics = self.generics.to_generics(cx, span, type_ident, generics); let args = { - let self_args = explicit_self.map(|explicit_self| { + let self_arg = explicit_self.map(|explicit_self| { let ident = Ident::with_dummy_span(kw::SelfLower).with_span_pos(span); ast::Param::from_self(ast::AttrVec::default(), explicit_self, ident) }); - let nonself_args = arg_types.into_iter().map(|(name, ty)| cx.param(span, name, ty)); - self_args.into_iter().chain(nonself_args).collect() + let nonself_args = + nonself_arg_tys.into_iter().map(|(name, ty)| cx.param(span, name, ty)); + self_arg.into_iter().chain(nonself_args).collect() }; let ret_type = self.get_ret_ty(cx, trait_, generics, type_ident); @@ -1024,8 +1044,8 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef<'b>, struct_def: &'b VariantData, type_ident: Ident, - self_args: &[P], - nonself_args: &[P], + selflike_args: &[P], + nonselflike_args: &[P], use_temporaries: bool, is_packed: bool, ) -> BlockOrExpr { @@ -1033,9 +1053,9 @@ impl<'a> MethodDef<'a> { let span = trait_.span; let mut patterns = Vec::new(); - for (i, self_arg) in self_args.iter().enumerate() { + for (i, selflike_arg) in selflike_args.iter().enumerate() { let ident_exprs = if !is_packed { - trait_.create_struct_field_accesses(cx, self_arg, struct_def) + trait_.create_struct_field_accesses(cx, selflike_arg, struct_def) } else { // Get the pattern for the let-destructuring. // @@ -1084,7 +1104,7 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &Struct(struct_def, fields), ); @@ -1092,8 +1112,10 @@ impl<'a> MethodDef<'a> { body } else { // Do the let-destructuring. - let mut stmts: Vec<_> = iter::zip(self_args, patterns) - .map(|(arg_expr, pat)| cx.stmt_let_pat(span, pat, arg_expr.clone())) + let mut stmts: Vec<_> = iter::zip(selflike_args, patterns) + .map(|(selflike_arg_expr, pat)| { + cx.stmt_let_pat(span, pat, selflike_arg_expr.clone()) + }) .collect(); stmts.extend(std::mem::take(&mut body.0)); BlockOrExpr(stmts, body.1) @@ -1106,7 +1128,7 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef<'_>, struct_def: &VariantData, type_ident: Ident, - nonself_args: &[P], + nonselflike_args: &[P], ) -> BlockOrExpr { let summary = trait_.summarise_struct(cx, struct_def); @@ -1114,7 +1136,7 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &StaticStruct(struct_def, summary), ) } @@ -1148,7 +1170,7 @@ impl<'a> MethodDef<'a> { /// } /// } /// ``` - /// Creates a match for a tuple of all `self_args`, where either all + /// Creates a match for a tuple of all `selflike_args`, where either all /// variants match, or it falls into a catch-all for when one variant /// does not match. /// @@ -1161,33 +1183,33 @@ impl<'a> MethodDef<'a> { /// a simple equality check (for PartialEq). /// /// The catch-all handler is provided access the variant index values - /// for each of the self-args, carried in precomputed variables. + /// for each of the selflike_args, carried in precomputed variables. fn expand_enum_method_body<'b>( &self, cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_ident: Ident, - mut self_args: Vec>, - nonself_args: &[P], + mut selflike_args: Vec>, + nonselflike_args: &[P], ) -> BlockOrExpr { let span = trait_.span; let variants = &enum_def.variants; - let self_arg_names = iter::once("__self".to_string()) + let selflike_arg_names = iter::once("__self".to_string()) .chain( - self_args + selflike_args .iter() .enumerate() .skip(1) - .map(|(arg_count, _self_arg)| format!("__arg_{}", arg_count)), + .map(|(arg_count, _selflike_arg)| format!("__arg_{}", arg_count)), ) .collect::>(); // The `vi_idents` will be bound, solely in the catch-all, to - // a series of let statements mapping each self_arg to an int + // a series of let statements mapping each selflike_arg to an int // value corresponding to its discriminant. - let vi_idents = self_arg_names + let vi_idents = selflike_arg_names .iter() .map(|name| { let vi_suffix = format!("{}_vi", name); @@ -1206,18 +1228,18 @@ impl<'a> MethodDef<'a> { // (Variant1, Variant1, ...) => Body1 // (Variant2, Variant2, ...) => Body2 // ... - // where each tuple has length = self_args.len() + // where each tuple has length = selflike_args.len() let mut match_arms: Vec = variants .iter() .enumerate() .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty())) .map(|(index, variant)| { - let mk_self_pat = |cx: &mut ExtCtxt<'_>, self_arg_name: &str| { + let mk_selflike_pat = |cx: &mut ExtCtxt<'_>, selflike_arg_name: &str| { let (p, idents) = trait_.create_enum_variant_pattern( cx, type_ident, variant, - self_arg_name, + selflike_arg_name, ast::Mutability::Not, ); (cx.pat(span, PatKind::Ref(p, ast::Mutability::Not)), idents) @@ -1225,17 +1247,17 @@ impl<'a> MethodDef<'a> { // A single arm has form (&VariantK, &VariantK, ...) => BodyK // (see "Final wrinkle" note below for why.) - let mut subpats = Vec::with_capacity(self_arg_names.len()); - let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); - let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, &self_arg_names[0]); + let mut subpats = Vec::with_capacity(selflike_arg_names.len()); + let mut selflike_pats_idents = Vec::with_capacity(selflike_arg_names.len() - 1); + let first_selflike_pat_idents = { + let (p, idents) = mk_selflike_pat(cx, &selflike_arg_names[0]); subpats.push(p); idents }; - for self_arg_name in &self_arg_names[1..] { - let (p, idents) = mk_self_pat(cx, &self_arg_name); + for selflike_arg_name in &selflike_arg_names[1..] { + let (p, idents) = mk_selflike_pat(cx, &selflike_arg_name); subpats.push(p); - self_pats_idents.push(idents); + selflike_pats_idents.push(idents); } // Here is the pat = `(&VariantK, &VariantK, ...)` @@ -1250,24 +1272,24 @@ impl<'a> MethodDef<'a> { // we are in. // All of the Self args have the same variant in these - // cases. So we transpose the info in self_pats_idents + // cases. So we transpose the info in selflike_pats_idents // to gather the getter expressions together, in the // form that EnumMatching expects. // The transposition is driven by walking across the - // arg fields of the variant for the first self pat. - let field_tuples = first_self_pat_idents + // arg fields of the variant for the first selflike pat. + let field_tuples = first_selflike_pat_idents .into_iter() .enumerate() // For each arg field of self, pull out its getter expr ... .map(|(field_index, (span, opt_ident, self_getter_expr, attrs))| { // ... but FieldInfo also wants getter expr // for matching other arguments of Self type; - // so walk across the *other* self_pats_idents + // so walk across the *other* selflike_pats_idents // and pull out getter for same field in each // of them (using `field_index` tracked above). // That is the heart of the transposition. - let others = self_pats_idents + let others = selflike_pats_idents .iter() .map(|fields| { let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; @@ -1298,7 +1320,13 @@ impl<'a> MethodDef<'a> { // Build up code associated with such a case. let substructure = EnumMatching(index, variants.len(), variant, field_tuples); let arm_expr = self - .call_substructure_method(cx, trait_, type_ident, nonself_args, &substructure) + .call_substructure_method( + cx, + trait_, + type_ident, + nonselflike_args, + &substructure, + ) .into_expr(cx, span); cx.arm(span, single_pat, arm_expr) @@ -1316,13 +1344,13 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &substructure, ) .into_expr(cx, span), ) } - _ if variants.len() > 1 && self_args.len() > 1 => { + _ if variants.len() > 1 && selflike_args.len() > 1 => { // Since we know that all the arguments will match if we reach // the match expression we add the unreachable intrinsics as the // result of the catch all which should help llvm in optimizing it @@ -1349,8 +1377,8 @@ impl<'a> MethodDef<'a> { // catch-all `_` match, it would trigger the // unreachable-pattern error. // - if variants.len() > 1 && self_args.len() > 1 { - // Build a series of let statements mapping each self_arg + if variants.len() > 1 && selflike_args.len() > 1 { + // Build a series of let statements mapping each selflike_arg // to its discriminant value. // // i.e., for `enum E { A, B(1), C(T, T) }`, and a deriving @@ -1365,10 +1393,14 @@ impl<'a> MethodDef<'a> { // We also build an expression which checks whether all discriminants are equal: // `__self_vi == __arg_1_vi && __self_vi == __arg_2_vi && ...` let mut discriminant_test = cx.expr_bool(span, true); - for (i, (&ident, self_arg)) in iter::zip(&vi_idents, &self_args).enumerate() { - let self_addr = cx.expr_addr_of(span, self_arg.clone()); - let variant_value = - deriving::call_intrinsic(cx, span, sym::discriminant_value, vec![self_addr]); + for (i, (&ident, selflike_arg)) in iter::zip(&vi_idents, &selflike_args).enumerate() { + let selflike_addr = cx.expr_addr_of(span, selflike_arg.clone()); + let variant_value = deriving::call_intrinsic( + cx, + span, + sym::discriminant_value, + vec![selflike_addr], + ); let let_stmt = cx.stmt_let(span, false, ident, variant_value); index_let_stmts.push(let_stmt); @@ -1389,18 +1421,18 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &catch_all_substructure, ) .into_expr(cx, span); - // Final wrinkle: the self_args are expressions that deref + // Final wrinkle: the selflike_args are expressions that deref // down to desired places, but we cannot actually deref // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - self_args.map_in_place(|self_arg| cx.expr_addr_of(span, self_arg)); - let match_arg = cx.expr(span, ast::ExprKind::Tup(self_args)); + selflike_args.map_in_place(|selflike_arg| cx.expr_addr_of(span, selflike_arg)); + let match_arg = cx.expr(span, ast::ExprKind::Tup(selflike_args)); // Lastly we create an expression which branches on all discriminants being equal // if discriminant_test { @@ -1469,16 +1501,16 @@ impl<'a> MethodDef<'a> { BlockOrExpr(vec![], Some(deriving::call_unreachable(cx, span))) } else { - // Final wrinkle: the self_args are expressions that deref + // Final wrinkle: the selflike_args are expressions that deref // down to desired places, but we cannot actually deref // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - self_args.map_in_place(|self_arg| cx.expr_addr_of(span, self_arg)); - let match_arg = if self_args.len() == 1 { - self_args.pop().unwrap() + selflike_args.map_in_place(|selflike_arg| cx.expr_addr_of(span, selflike_arg)); + let match_arg = if selflike_args.len() == 1 { + selflike_args.pop().unwrap() } else { - cx.expr(span, ast::ExprKind::Tup(self_args)) + cx.expr(span, ast::ExprKind::Tup(selflike_args)) }; BlockOrExpr(vec![], Some(cx.expr_match(span, match_arg, match_arms))) } @@ -1490,7 +1522,7 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef<'_>, enum_def: &EnumDef, type_ident: Ident, - nonself_args: &[P], + nonselflike_args: &[P], ) -> BlockOrExpr { let summary = enum_def .variants @@ -1505,7 +1537,7 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, - nonself_args, + nonselflike_args, &StaticEnum(enum_def, summary), ) } @@ -1609,7 +1641,7 @@ impl<'a> TraitDef<'a> { fn create_struct_field_accesses( &self, cx: &mut ExtCtxt<'_>, - mut self_arg: &P, + mut selflike_arg: &P, struct_def: &'a VariantData, ) -> Vec<(Span, Option, P, &'a [ast::Attribute])> { let mut ident_exprs = Vec::new(); @@ -1617,8 +1649,8 @@ impl<'a> TraitDef<'a> { let sp = struct_field.span.with_ctxt(self.span.ctxt()); // We don't the need the deref, if there is one. - if let ast::ExprKind::Unary(ast::UnOp::Deref, inner) = &self_arg.kind { - self_arg = inner; + if let ast::ExprKind::Unary(ast::UnOp::Deref, inner) = &selflike_arg.kind { + selflike_arg = inner; } // Note: we must use `struct_field.span` rather than `span` in the @@ -1628,7 +1660,7 @@ impl<'a> TraitDef<'a> { let val = cx.expr( sp, ast::ExprKind::Field( - self_arg.clone(), + selflike_arg.clone(), struct_field.ident.unwrap_or_else(|| { Ident::from_str_and_span(&i.to_string(), struct_field.span) }), diff --git a/compiler/rustc_builtin_macros/src/deriving/hash.rs b/compiler/rustc_builtin_macros/src/deriving/hash.rs index c3f7d09886b3a..6ff36e7f4ed03 100644 --- a/compiler/rustc_builtin_macros/src/deriving/hash.rs +++ b/compiler/rustc_builtin_macros/src/deriving/hash.rs @@ -30,7 +30,7 @@ pub fn expand_deriving_hash( name: sym::hash, generics: Bounds { bounds: vec![(typaram, vec![path_std!(hash::Hasher)])] }, explicit_self: true, - args: vec![(Ref(Box::new(Path(arg)), Mutability::Mut), sym::state)], + nonself_args: vec![(Ref(Box::new(Path(arg)), Mutability::Mut), sym::state)], ret_ty: Unit, attributes: vec![], unify_fieldless_variants: true, @@ -49,7 +49,7 @@ fn hash_substructure( trait_span: Span, substr: &Substructure<'_>, ) -> BlockOrExpr { - let [state_expr] = substr.nonself_args else { + let [state_expr] = substr.nonselflike_args else { cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`"); }; let call_hash = |span, thing_expr| { From d3057b5ca78cc05823f2dc75cb774bbffc5403a6 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Jul 2022 08:47:04 +1000 Subject: [PATCH 2/8] Rename `FieldInfo` fields. Use `self_exprs` and `other_selflike_exprs` in a manner similar to the previous commit. --- .../src/deriving/clone.rs | 2 +- .../src/deriving/cmp/ord.rs | 20 ++++++++----- .../src/deriving/cmp/partial_eq.rs | 17 ++++++----- .../src/deriving/cmp/partial_ord.rs | 20 ++++++++----- .../src/deriving/debug.rs | 4 +-- .../src/deriving/encodable.rs | 8 ++--- .../src/deriving/generic/mod.rs | 29 ++++++++++--------- .../rustc_builtin_macros/src/deriving/hash.rs | 4 ++- 8 files changed, 59 insertions(+), 45 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/clone.rs b/compiler/rustc_builtin_macros/src/deriving/clone.rs index a67d16d6b2f20..a45b6e0407a93 100644 --- a/compiler/rustc_builtin_macros/src/deriving/clone.rs +++ b/compiler/rustc_builtin_macros/src/deriving/clone.rs @@ -161,7 +161,7 @@ fn cs_clone( let all_fields; let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]); let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo<'_>| { - let args = vec![cx.expr_addr_of(field.span, field.self_.clone())]; + let args = vec![cx.expr_addr_of(field.span, field.self_expr.clone())]; cx.expr_call_global(field.span, fn_path.clone(), args) }; diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs index d80a2293e6674..a60db068f27de 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs @@ -74,17 +74,19 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> Bl // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, - |cx, span, old, self_f, other_fs| { + |cx, span, old, self_expr, other_selflike_exprs| { // match new { // ::std::cmp::Ordering::Equal => old, // cmp => cmp // } let new = { - let [other_f] = other_fs else { + let [other_expr] = other_selflike_exprs else { cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); }; - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; + let args = vec![ + cx.expr_addr_of(span, self_expr), + cx.expr_addr_of(span, other_expr.clone()), + ]; cx.expr_call_global(span, cmp_path.clone(), args) }; @@ -94,13 +96,15 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> Bl cx.expr_match(span, new, vec![eq_arm, neq_arm]) }, |cx, args| match args { - Some((span, self_f, other_fs)) => { + Some((span, self_expr, other_selflike_exprs)) => { let new = { - let [other_f] = other_fs else { + let [other_expr] = other_selflike_exprs else { cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); }; - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; + let args = vec![ + cx.expr_addr_of(span, self_expr), + cx.expr_addr_of(span, other_expr.clone()), + ]; cx.expr_call_global(span, cmp_path.clone(), args) }; diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs index a9a0634836b83..a18d78b8476c5 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs @@ -23,25 +23,28 @@ pub fn expand_deriving_partial_eq( combiner: BinOpKind, base: bool, ) -> BlockOrExpr { - let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P, other_fs: &[P]| { - let [other_f] = other_fs else { + let op = |cx: &mut ExtCtxt<'_>, + span: Span, + self_expr: P, + other_selflike_exprs: &[P]| { + let [other_expr] = other_selflike_exprs else { cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"); }; - cx.expr_binary(span, op, self_f, other_f.clone()) + cx.expr_binary(span, op, self_expr, other_expr.clone()) }; let expr = cs_fold( true, // use foldl - |cx, span, subexpr, self_f, other_fs| { - let eq = op(cx, span, self_f, other_fs); + |cx, span, subexpr, self_expr, other_selflike_exprs| { + let eq = op(cx, span, self_expr, other_selflike_exprs); cx.expr_binary(span, combiner, subexpr, eq) }, |cx, args| { match args { - Some((span, self_f, other_fs)) => { + Some((span, self_expr, other_selflike_exprs)) => { // Special-case the base case to generate cleaner code. - op(cx, span, self_f, other_fs) + op(cx, span, self_expr, other_selflike_exprs) } None => cx.expr_bool(span, base), } diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs index c8c9a6fbb5793..b809eaf8eecac 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs @@ -72,19 +72,21 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, - |cx, span, old, self_f, other_fs| { + |cx, span, old, self_expr, other_selflike_exprs| { // match new { // Some(::std::cmp::Ordering::Equal) => old, // cmp => cmp // } let new = { - let [other_f] = other_fs else { + let [other_expr] = other_selflike_exprs else { cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"); }; - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; + let args = vec![ + cx.expr_addr_of(span, self_expr), + cx.expr_addr_of(span, other_expr.clone()), + ]; cx.expr_call_global(span, partial_cmp_path.clone(), args) }; @@ -95,13 +97,15 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ cx.expr_match(span, new, vec![eq_arm, neq_arm]) }, |cx: &mut ExtCtxt<'_>, args: Option<(Span, P, &[P])>| match args { - Some((span, self_f, other_fs)) => { + Some((span, self_expr, other_selflike_exprs)) => { let new = { - let [other_f] = other_fs else { + let [other_expr] = other_selflike_exprs else { cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); }; - let args = - vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())]; + let args = vec![ + cx.expr_addr_of(span, self_expr), + cx.expr_addr_of(span, other_expr.clone()), + ]; cx.expr_call_global(span, partial_cmp_path.clone(), args) }; diff --git a/compiler/rustc_builtin_macros/src/deriving/debug.rs b/compiler/rustc_builtin_macros/src/deriving/debug.rs index 71f77ea8b6a39..b99198054def8 100644 --- a/compiler/rustc_builtin_macros/src/deriving/debug.rs +++ b/compiler/rustc_builtin_macros/src/deriving/debug.rs @@ -96,7 +96,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> args.push(name); } // Use double indirection to make sure this works for unsized types - let field = cx.expr_addr_of(field.span, field.self_.clone()); + let field = cx.expr_addr_of(field.span, field.self_expr.clone()); let field = cx.expr_addr_of(field.span, field); args.push(field); } @@ -116,7 +116,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> } // Use double indirection to make sure this works for unsized types - let value_ref = cx.expr_addr_of(field.span, field.self_.clone()); + let value_ref = cx.expr_addr_of(field.span, field.self_expr.clone()); value_exprs.push(cx.expr_addr_of(field.span, value_ref)); } diff --git a/compiler/rustc_builtin_macros/src/deriving/encodable.rs b/compiler/rustc_builtin_macros/src/deriving/encodable.rs index c89558f6b86eb..49dbe51f7626c 100644 --- a/compiler/rustc_builtin_macros/src/deriving/encodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/encodable.rs @@ -168,12 +168,12 @@ fn encodable_substructure( let fn_emit_struct_field_path = cx.def_site_path(&[sym::rustc_serialize, sym::Encoder, sym::emit_struct_field]); let mut stmts = Vec::new(); - for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() { + for (i, &FieldInfo { name, ref self_expr, span, .. }) in fields.iter().enumerate() { let name = match name { Some(id) => id.name, None => Symbol::intern(&format!("_field{}", i)), }; - let self_ref = cx.expr_addr_of(span, self_.clone()); + let self_ref = cx.expr_addr_of(span, self_expr.clone()); let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); let lambda = cx.lambda1(span, enc, blkarg); let call = cx.expr_call_global( @@ -237,8 +237,8 @@ fn encodable_substructure( let mut stmts = Vec::new(); if !fields.is_empty() { let last = fields.len() - 1; - for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() { - let self_ref = cx.expr_addr_of(span, self_.clone()); + for (i, &FieldInfo { ref self_expr, span, .. }) in fields.iter().enumerate() { + let self_ref = cx.expr_addr_of(span, self_expr.clone()); let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); let lambda = cx.lambda1(span, enc, blkarg); diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index cafca507bd446..6ee7f72f9a4ab 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -258,10 +258,10 @@ pub struct FieldInfo<'a> { pub name: Option, /// The expression corresponding to this field of `self` /// (specifically, a reference to it). - pub self_: P, + pub self_expr: P, /// The expressions corresponding to references to this field in - /// the other `Self` arguments. - pub other: Vec>, + /// the other selflike arguments. + pub other_selflike_exprs: Vec>, /// The attributes on the field pub attrs: &'a [ast::Attribute], } @@ -1080,13 +1080,13 @@ impl<'a> MethodDef<'a> { let fields = if !raw_fields.is_empty() { let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); let first_field = raw_fields.next().unwrap(); - let mut other_fields: Vec> = raw_fields.collect(); + let mut nonself_fields: Vec> = raw_fields.collect(); first_field - .map(|(span, opt_id, field, attrs)| FieldInfo { + .map(|(span, opt_id, expr, attrs)| FieldInfo { span: span.with_ctxt(trait_.span.ctxt()), name: opt_id, - self_: field, - other: other_fields + self_expr: expr, + other_selflike_exprs: nonself_fields .iter_mut() .map(|l| { let (.., ex, _) = l.next().unwrap(); @@ -1289,7 +1289,7 @@ impl<'a> MethodDef<'a> { // and pull out getter for same field in each // of them (using `field_index` tracked above). // That is the heart of the transposition. - let others = selflike_pats_idents + let other_selflike_exprs = selflike_pats_idents .iter() .map(|fields| { let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; @@ -1307,8 +1307,8 @@ impl<'a> MethodDef<'a> { FieldInfo { span, name: opt_ident, - self_: self_getter_expr, - other: others, + self_expr: self_getter_expr, + other_selflike_exprs, attrs, } }) @@ -1712,12 +1712,13 @@ where let (base, rest) = match (all_fields.is_empty(), use_foldl) { (false, true) => { let (first, rest) = all_fields.split_first().unwrap(); - let args = (first.span, first.self_.clone(), &first.other[..]); + let args = + (first.span, first.self_expr.clone(), &first.other_selflike_exprs[..]); (b(cx, Some(args)), rest) } (false, false) => { let (last, rest) = all_fields.split_last().unwrap(); - let args = (last.span, last.self_.clone(), &last.other[..]); + let args = (last.span, last.self_expr.clone(), &last.other_selflike_exprs[..]); (b(cx, Some(args)), rest) } (true, _) => (b(cx, None), &all_fields[..]), @@ -1725,11 +1726,11 @@ where if use_foldl { rest.iter().fold(base, |old, field| { - f(cx, field.span, old, field.self_.clone(), &field.other) + f(cx, field.span, old, field.self_expr.clone(), &field.other_selflike_exprs) }) } else { rest.iter().rev().fold(base, |old, field| { - f(cx, field.span, old, field.self_.clone(), &field.other) + f(cx, field.span, old, field.self_expr.clone(), &field.other_selflike_exprs) }) } } diff --git a/compiler/rustc_builtin_macros/src/deriving/hash.rs b/compiler/rustc_builtin_macros/src/deriving/hash.rs index 6ff36e7f4ed03..2213cd6d37d2d 100644 --- a/compiler/rustc_builtin_macros/src/deriving/hash.rs +++ b/compiler/rustc_builtin_macros/src/deriving/hash.rs @@ -82,7 +82,9 @@ fn hash_substructure( }; stmts.extend( - fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())), + fields + .iter() + .map(|FieldInfo { ref self_expr, span, .. }| call_hash(*span, self_expr.clone())), ); BlockOrExpr::new_stmts(stmts) } From 27571da5fae9ee5f4eb34582a91ba06ad0a2ab13 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Jul 2022 08:59:17 +1000 Subject: [PATCH 3/8] Remove `FieldInfo::attrs`. It's unused. This also removes the need for the lifetime on `FieldInfo`, which is nice. --- .../src/deriving/clone.rs | 2 +- .../src/deriving/generic/mod.rs | 30 ++++++++----------- 2 files changed, 14 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/clone.rs b/compiler/rustc_builtin_macros/src/deriving/clone.rs index a45b6e0407a93..9cd72ed0c67b2 100644 --- a/compiler/rustc_builtin_macros/src/deriving/clone.rs +++ b/compiler/rustc_builtin_macros/src/deriving/clone.rs @@ -160,7 +160,7 @@ fn cs_clone( let ctor_path; let all_fields; let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]); - let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo<'_>| { + let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo| { let args = vec![cx.expr_addr_of(field.span, field.self_expr.clone())]; cx.expr_call_global(field.span, fn_path.clone(), args) }; diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 6ee7f72f9a4ab..9781687d06150 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -251,7 +251,7 @@ pub struct Substructure<'a> { } /// Summary of the relevant parts of a struct/enum field. -pub struct FieldInfo<'a> { +pub struct FieldInfo { pub span: Span, /// None for tuple structs/normal enum variants, Some for normal /// structs/struct enum variants. @@ -262,8 +262,6 @@ pub struct FieldInfo<'a> { /// The expressions corresponding to references to this field in /// the other selflike arguments. pub other_selflike_exprs: Vec>, - /// The attributes on the field - pub attrs: &'a [ast::Attribute], } /// Fields for a static method @@ -276,11 +274,11 @@ pub enum StaticFields { /// A summary of the possible sets of fields. pub enum SubstructureFields<'a> { - Struct(&'a ast::VariantData, Vec>), + Struct(&'a ast::VariantData, Vec), /// Matching variants of the enum: variant index, variant count, ast::Variant, /// fields: the field name is only non-`None` in the case of a struct /// variant. - EnumMatching(usize, usize, &'a ast::Variant, Vec>), + EnumMatching(usize, usize, &'a ast::Variant, Vec), /// Non-matching variants of the enum, but with all state hidden from the /// consequent code. The field is a list of `Ident`s bound to the variant @@ -1082,18 +1080,17 @@ impl<'a> MethodDef<'a> { let first_field = raw_fields.next().unwrap(); let mut nonself_fields: Vec> = raw_fields.collect(); first_field - .map(|(span, opt_id, expr, attrs)| FieldInfo { + .map(|(span, opt_id, expr)| FieldInfo { span: span.with_ctxt(trait_.span.ctxt()), name: opt_id, self_expr: expr, other_selflike_exprs: nonself_fields .iter_mut() .map(|l| { - let (.., ex, _) = l.next().unwrap(); + let (_, _, ex) = l.next().unwrap(); ex }) .collect(), - attrs, }) .collect() } else { @@ -1282,7 +1279,7 @@ impl<'a> MethodDef<'a> { .into_iter() .enumerate() // For each arg field of self, pull out its getter expr ... - .map(|(field_index, (span, opt_ident, self_getter_expr, attrs))| { + .map(|(field_index, (span, opt_ident, self_getter_expr))| { // ... but FieldInfo also wants getter expr // for matching other arguments of Self type; // so walk across the *other* selflike_pats_idents @@ -1292,7 +1289,7 @@ impl<'a> MethodDef<'a> { let other_selflike_exprs = selflike_pats_idents .iter() .map(|fields| { - let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; + let (_, _opt_ident, ref other_getter_expr) = fields[field_index]; // All Self args have same variant, so // opt_idents are the same. (Assert @@ -1309,10 +1306,9 @@ impl<'a> MethodDef<'a> { name: opt_ident, self_expr: self_getter_expr, other_selflike_exprs, - attrs, } }) - .collect::>>(); + .collect::>(); // Now, for some given VariantK, we have built up // expressions for referencing every field of every @@ -1598,7 +1594,7 @@ impl<'a> TraitDef<'a> { prefix: &str, mutbl: ast::Mutability, use_temporaries: bool, - ) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { + ) -> (P, Vec<(Span, Option, P)>) { let mut paths = Vec::new(); let mut ident_exprs = Vec::new(); for (i, struct_field) in struct_def.fields().iter().enumerate() { @@ -1607,7 +1603,7 @@ impl<'a> TraitDef<'a> { paths.push(ident.with_span_pos(sp)); let val = cx.expr_path(cx.path_ident(sp, ident)); let val = if use_temporaries { val } else { cx.expr_deref(sp, val) }; - ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..])); + ident_exprs.push((sp, struct_field.ident, val)); } let subpats = self.create_subpatterns(cx, paths, mutbl, use_temporaries); @@ -1643,7 +1639,7 @@ impl<'a> TraitDef<'a> { cx: &mut ExtCtxt<'_>, mut selflike_arg: &P, struct_def: &'a VariantData, - ) -> Vec<(Span, Option, P, &'a [ast::Attribute])> { + ) -> Vec<(Span, Option, P)> { let mut ident_exprs = Vec::new(); for (i, struct_field) in struct_def.fields().iter().enumerate() { let sp = struct_field.span.with_ctxt(self.span.ctxt()); @@ -1666,7 +1662,7 @@ impl<'a> TraitDef<'a> { }), ), ); - ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..])); + ident_exprs.push((sp, struct_field.ident, val)); } ident_exprs } @@ -1678,7 +1674,7 @@ impl<'a> TraitDef<'a> { variant: &'a ast::Variant, prefix: &str, mutbl: ast::Mutability, - ) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { + ) -> (P, Vec<(Span, Option, P)>) { let sp = variant.span.with_ctxt(self.span.ctxt()); let variant_path = cx.path(sp, vec![enum_ident, variant.ident]); let use_temporaries = false; // enums can't be repr(packed) From 7f1dfcab679a1c8248b16e34902a578c2f7bf5e2 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Jul 2022 09:04:41 +1000 Subject: [PATCH 4/8] Avoid transposes in deriving code. The deriving code has some complex parts involving iterations over selflike args and also fields within structs and enum variants. The return types for a few functions demonstrate this: - `TraitDef::create_{struct_pattern,enum_variant_pattern}` returns a `(P, Vec<(Span, Option, P)>)` - `TraitDef::create_struct_field_accesses` returns a `Vec<(Span, Option, P)>`. This results in per-field data stored within per-selflike-arg data, with lots of repetition within the per-field data elements. This then has to be "transposed" in two places (`expand_struct_method_body` and `expand_enum_method_body`) into per-self-like-arg data stored within per-field data. It's all quite clumsy and confusing. This commit rearranges things greatly. Data is obtained in the needed form up-front, avoiding the need for transposition. Also, various functions are split, removed, and added, to make things clearer and avoid tuple return values. The diff is hard to read, which reflects the messiness of the original code -- there wasn't an easy way to break these changes into small pieces. (Sorry!) It's a net reduction of 35 lines and a readability improvement. The generated code is unchanged. --- .../src/deriving/generic/mod.rs | 401 ++++++++---------- 1 file changed, 183 insertions(+), 218 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 9781687d06150..47ba6f5017559 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -1047,67 +1047,40 @@ impl<'a> MethodDef<'a> { use_temporaries: bool, is_packed: bool, ) -> BlockOrExpr { - let mut raw_fields = Vec::new(); // Vec<[fields of self], [fields of next Self arg], [etc]> let span = trait_.span; - let mut patterns = Vec::new(); - - for (i, selflike_arg) in selflike_args.iter().enumerate() { - let ident_exprs = if !is_packed { - trait_.create_struct_field_accesses(cx, selflike_arg, struct_def) - } else { - // Get the pattern for the let-destructuring. - // - // We could use `type_ident` instead of `Self`, but in the case of a type parameter - // shadowing the struct name, that causes a second, unnecessary E0578 error. #97343 - let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]); - let (pat, ident_exprs) = trait_.create_struct_pattern( - cx, - struct_path, - struct_def, - &format!("__self_{}", i), - ast::Mutability::Not, - use_temporaries, - ); - patterns.push(pat); - ident_exprs - }; - raw_fields.push(ident_exprs); - } - - // transpose raw_fields - let fields = if !raw_fields.is_empty() { - let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); - let first_field = raw_fields.next().unwrap(); - let mut nonself_fields: Vec> = raw_fields.collect(); - first_field - .map(|(span, opt_id, expr)| FieldInfo { - span: span.with_ctxt(trait_.span.ctxt()), - name: opt_id, - self_expr: expr, - other_selflike_exprs: nonself_fields - .iter_mut() - .map(|l| { - let (_, _, ex) = l.next().unwrap(); - ex - }) - .collect(), - }) - .collect() - } else { - cx.span_bug(span, "no `self` parameter for method in generic `derive`") + assert!(selflike_args.len() == 1 || selflike_args.len() == 2); + + let mk_body = |cx, selflike_fields| { + self.call_substructure_method( + cx, + trait_, + type_ident, + nonselflike_args, + &Struct(struct_def, selflike_fields), + ) }; - let mut body = self.call_substructure_method( - cx, - trait_, - type_ident, - nonselflike_args, - &Struct(struct_def, fields), - ); - if !is_packed { - body + let selflike_fields = + trait_.create_struct_field_access_fields(cx, selflike_args, struct_def); + mk_body(cx, selflike_fields) } else { + let prefixes: Vec<_> = + (0..selflike_args.len()).map(|i| format!("__self_{}", i)).collect(); + let selflike_fields = + trait_.create_struct_pattern_fields(cx, struct_def, &prefixes, use_temporaries); + let mut body = mk_body(cx, selflike_fields); + + let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]); + let patterns = trait_.create_struct_patterns( + cx, + struct_path, + struct_def, + &prefixes, + ast::Mutability::Not, + use_temporaries, + ); + // Do the let-destructuring. let mut stmts: Vec<_> = iter::zip(selflike_args, patterns) .map(|(selflike_arg_expr, pat)| { @@ -1193,7 +1166,7 @@ impl<'a> MethodDef<'a> { let span = trait_.span; let variants = &enum_def.variants; - let selflike_arg_names = iter::once("__self".to_string()) + let prefixes = iter::once("__self".to_string()) .chain( selflike_args .iter() @@ -1206,7 +1179,7 @@ impl<'a> MethodDef<'a> { // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each selflike_arg to an int // value corresponding to its discriminant. - let vi_idents = selflike_arg_names + let vi_idents = prefixes .iter() .map(|name| { let vi_suffix = format!("{}_vi", name); @@ -1226,36 +1199,37 @@ impl<'a> MethodDef<'a> { // (Variant2, Variant2, ...) => Body2 // ... // where each tuple has length = selflike_args.len() + let mut match_arms: Vec = variants .iter() .enumerate() .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty())) .map(|(index, variant)| { - let mk_selflike_pat = |cx: &mut ExtCtxt<'_>, selflike_arg_name: &str| { - let (p, idents) = trait_.create_enum_variant_pattern( - cx, - type_ident, - variant, - selflike_arg_name, - ast::Mutability::Not, - ); - (cx.pat(span, PatKind::Ref(p, ast::Mutability::Not)), idents) - }; - // A single arm has form (&VariantK, &VariantK, ...) => BodyK // (see "Final wrinkle" note below for why.) - let mut subpats = Vec::with_capacity(selflike_arg_names.len()); - let mut selflike_pats_idents = Vec::with_capacity(selflike_arg_names.len() - 1); - let first_selflike_pat_idents = { - let (p, idents) = mk_selflike_pat(cx, &selflike_arg_names[0]); - subpats.push(p); - idents - }; - for selflike_arg_name in &selflike_arg_names[1..] { - let (p, idents) = mk_selflike_pat(cx, &selflike_arg_name); - subpats.push(p); - selflike_pats_idents.push(idents); - } + + let use_temporaries = false; // enums can't be repr(packed) + let fields = trait_.create_struct_pattern_fields( + cx, + &variant.data, + &prefixes, + use_temporaries, + ); + + let sp = variant.span.with_ctxt(trait_.span.ctxt()); + let variant_path = cx.path(sp, vec![type_ident, variant.ident]); + let mut subpats: Vec<_> = trait_ + .create_struct_patterns( + cx, + variant_path, + &variant.data, + &prefixes, + ast::Mutability::Not, + use_temporaries, + ) + .into_iter() + .map(|p| cx.pat(span, PatKind::Ref(p, ast::Mutability::Not))) + .collect(); // Here is the pat = `(&VariantK, &VariantK, ...)` let single_pat = if subpats.len() == 1 { @@ -1267,54 +1241,12 @@ impl<'a> MethodDef<'a> { // For the BodyK, we need to delegate to our caller, // passing it an EnumMatching to indicate which case // we are in. - - // All of the Self args have the same variant in these - // cases. So we transpose the info in selflike_pats_idents - // to gather the getter expressions together, in the - // form that EnumMatching expects. - - // The transposition is driven by walking across the - // arg fields of the variant for the first selflike pat. - let field_tuples = first_selflike_pat_idents - .into_iter() - .enumerate() - // For each arg field of self, pull out its getter expr ... - .map(|(field_index, (span, opt_ident, self_getter_expr))| { - // ... but FieldInfo also wants getter expr - // for matching other arguments of Self type; - // so walk across the *other* selflike_pats_idents - // and pull out getter for same field in each - // of them (using `field_index` tracked above). - // That is the heart of the transposition. - let other_selflike_exprs = selflike_pats_idents - .iter() - .map(|fields| { - let (_, _opt_ident, ref other_getter_expr) = fields[field_index]; - - // All Self args have same variant, so - // opt_idents are the same. (Assert - // here to make it self-evident that - // it is okay to ignore `_opt_ident`.) - assert!(opt_ident == _opt_ident); - - other_getter_expr.clone() - }) - .collect::>>(); - - FieldInfo { - span, - name: opt_ident, - self_expr: self_getter_expr, - other_selflike_exprs, - } - }) - .collect::>(); - + // // Now, for some given VariantK, we have built up // expressions for referencing every field of every // Self arg, assuming all are instances of VariantK. // Build up code associated with such a case. - let substructure = EnumMatching(index, variants.len(), variant, field_tuples); + let substructure = EnumMatching(index, variants.len(), variant, fields); let arm_expr = self .call_substructure_method( cx, @@ -1566,119 +1498,152 @@ impl<'a> TraitDef<'a> { } } - fn create_subpatterns( + fn create_struct_patterns( &self, cx: &mut ExtCtxt<'_>, - field_paths: Vec, + struct_path: ast::Path, + struct_def: &'a VariantData, + prefixes: &[String], mutbl: ast::Mutability, use_temporaries: bool, ) -> Vec> { - field_paths + prefixes .iter() - .map(|path| { - let binding_mode = if use_temporaries { - ast::BindingMode::ByValue(ast::Mutability::Not) - } else { - ast::BindingMode::ByRef(mutbl) - }; - cx.pat(path.span, PatKind::Ident(binding_mode, *path, None)) + .map(|prefix| { + let pieces: Vec<_> = struct_def + .fields() + .iter() + .enumerate() + .map(|(i, struct_field)| { + let sp = struct_field.span.with_ctxt(self.span.ctxt()); + let binding_mode = if use_temporaries { + ast::BindingMode::ByValue(ast::Mutability::Not) + } else { + ast::BindingMode::ByRef(mutbl) + }; + let ident = self.mk_pattern_ident(prefix, i); + let path = ident.with_span_pos(sp); + ( + sp, + struct_field.ident, + cx.pat(path.span, PatKind::Ident(binding_mode, path, None)), + ) + }) + .collect(); + + let struct_path = struct_path.clone(); + match *struct_def { + VariantData::Struct(..) => { + let field_pats = pieces + .into_iter() + .map(|(sp, ident, pat)| { + if ident.is_none() { + cx.span_bug( + sp, + "a braced struct with unnamed fields in `derive`", + ); + } + ast::PatField { + ident: ident.unwrap(), + is_shorthand: false, + attrs: ast::AttrVec::new(), + id: ast::DUMMY_NODE_ID, + span: pat.span.with_ctxt(self.span.ctxt()), + pat, + is_placeholder: false, + } + }) + .collect(); + cx.pat_struct(self.span, struct_path, field_pats) + } + VariantData::Tuple(..) => { + let subpats = pieces.into_iter().map(|(_, _, subpat)| subpat).collect(); + cx.pat_tuple_struct(self.span, struct_path, subpats) + } + VariantData::Unit(..) => cx.pat_path(self.span, struct_path), + } }) .collect() } - fn create_struct_pattern( - &self, - cx: &mut ExtCtxt<'_>, - struct_path: ast::Path, - struct_def: &'a VariantData, - prefix: &str, - mutbl: ast::Mutability, - use_temporaries: bool, - ) -> (P, Vec<(Span, Option, P)>) { - let mut paths = Vec::new(); - let mut ident_exprs = Vec::new(); - for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = struct_field.span.with_ctxt(self.span.ctxt()); - let ident = Ident::from_str_and_span(&format!("{}_{}", prefix, i), self.span); - paths.push(ident.with_span_pos(sp)); - let val = cx.expr_path(cx.path_ident(sp, ident)); - let val = if use_temporaries { val } else { cx.expr_deref(sp, val) }; - ident_exprs.push((sp, struct_field.ident, val)); - } - - let subpats = self.create_subpatterns(cx, paths, mutbl, use_temporaries); - let pattern = match *struct_def { - VariantData::Struct(..) => { - let field_pats = iter::zip(subpats, &ident_exprs) - .map(|(pat, &(sp, ident, ..))| { - if ident.is_none() { - cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); - } - ast::PatField { - ident: ident.unwrap(), - is_shorthand: false, - attrs: ast::AttrVec::new(), - id: ast::DUMMY_NODE_ID, - span: pat.span.with_ctxt(self.span.ctxt()), - pat, - is_placeholder: false, - } - }) - .collect(); - cx.pat_struct(self.span, struct_path, field_pats) - } - VariantData::Tuple(..) => cx.pat_tuple_struct(self.span, struct_path, subpats), - VariantData::Unit(..) => cx.pat_path(self.span, struct_path), - }; + fn create_fields(&self, struct_def: &'a VariantData, mk_exprs: F) -> Vec + where + F: Fn(usize, &ast::FieldDef, Span) -> Vec>, + { + struct_def + .fields() + .iter() + .enumerate() + .map(|(i, struct_field)| { + // For this field, get an expr for each selflike_arg. E.g. for + // `PartialEq::eq`, one for each of `&self` and `other`. + let sp = struct_field.span.with_ctxt(self.span.ctxt()); + let mut exprs: Vec<_> = mk_exprs(i, struct_field, sp); + let self_expr = exprs.remove(0); + let other_selflike_exprs = exprs; + FieldInfo { + span: sp.with_ctxt(self.span.ctxt()), + name: struct_field.ident, + self_expr, + other_selflike_exprs, + } + }) + .collect() + } - (pattern, ident_exprs) + fn mk_pattern_ident(&self, prefix: &str, i: usize) -> Ident { + Ident::from_str_and_span(&format!("{}_{}", prefix, i), self.span) } - fn create_struct_field_accesses( + fn create_struct_pattern_fields( &self, cx: &mut ExtCtxt<'_>, - mut selflike_arg: &P, struct_def: &'a VariantData, - ) -> Vec<(Span, Option, P)> { - let mut ident_exprs = Vec::new(); - for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = struct_field.span.with_ctxt(self.span.ctxt()); - - // We don't the need the deref, if there is one. - if let ast::ExprKind::Unary(ast::UnOp::Deref, inner) = &selflike_arg.kind { - selflike_arg = inner; - } - - // Note: we must use `struct_field.span` rather than `span` in the - // `unwrap_or_else` case otherwise the hygiene is wrong and we get - // "field `0` of struct `Point` is private" errors on tuple - // structs. - let val = cx.expr( - sp, - ast::ExprKind::Field( - selflike_arg.clone(), - struct_field.ident.unwrap_or_else(|| { - Ident::from_str_and_span(&i.to_string(), struct_field.span) - }), - ), - ); - ident_exprs.push((sp, struct_field.ident, val)); - } - ident_exprs + prefixes: &[String], + use_temporaries: bool, + ) -> Vec { + self.create_fields(struct_def, |i, _struct_field, sp| { + prefixes + .iter() + .map(|prefix| { + let ident = self.mk_pattern_ident(prefix, i); + let expr = cx.expr_path(cx.path_ident(sp, ident)); + if use_temporaries { expr } else { cx.expr_deref(sp, expr) } + }) + .collect() + }) } - fn create_enum_variant_pattern( + fn create_struct_field_access_fields( &self, cx: &mut ExtCtxt<'_>, - enum_ident: Ident, - variant: &'a ast::Variant, - prefix: &str, - mutbl: ast::Mutability, - ) -> (P, Vec<(Span, Option, P)>) { - let sp = variant.span.with_ctxt(self.span.ctxt()); - let variant_path = cx.path(sp, vec![enum_ident, variant.ident]); - let use_temporaries = false; // enums can't be repr(packed) - self.create_struct_pattern(cx, variant_path, &variant.data, prefix, mutbl, use_temporaries) + selflike_args: &[P], + struct_def: &'a VariantData, + ) -> Vec { + self.create_fields(struct_def, |i, struct_field, sp| { + selflike_args + .iter() + .map(|mut selflike_arg| { + // We don't the need the deref, if there is one. + if let ast::ExprKind::Unary(ast::UnOp::Deref, inner) = &selflike_arg.kind { + selflike_arg = inner; + } + // Note: we must use `struct_field.span` rather than `span` in the + // `unwrap_or_else` case otherwise the hygiene is wrong and we get + // "field `0` of struct `Point` is private" errors on tuple + // structs. + cx.expr( + sp, + ast::ExprKind::Field( + selflike_arg.clone(), + struct_field.ident.unwrap_or_else(|| { + Ident::from_str_and_span(&i.to_string(), struct_field.span) + }), + ), + ) + }) + .collect() + }) } } From 65d0bfbca569b91eec6199b4629bc53db417efed Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Jul 2022 09:50:36 +1000 Subject: [PATCH 5/8] Cut down large comment about zero-variant enums. When deriving functions for zero-variant enums, we just generated a function body that calls `std::instrincs::unreachable`. There is a large comment with some not-very-useful historical discussion about alternatives, including some discussion of feature-gating zero-variant enums, which is clearly irrelevant today. This commit cuts the comment down greatly. --- .../src/deriving/generic/mod.rs | 52 ++----------------- 1 file changed, 3 insertions(+), 49 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 47ba6f5017559..70a97c32b4869 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -1378,55 +1378,9 @@ impl<'a> MethodDef<'a> { let arm_expr = cx.expr_if(span, discriminant_test, all_match, Some(arm_expr)); BlockOrExpr(index_let_stmts, Some(arm_expr)) } else if variants.is_empty() { - // As an additional wrinkle, For a zero-variant enum A, - // currently the compiler - // will accept `fn (a: &Self) { match *a { } }` - // but rejects `fn (a: &Self) { match (&*a,) { } }` - // as well as `fn (a: &Self) { match ( *a,) { } }` - // - // This means that the strategy of building up a tuple of - // all Self arguments fails when Self is a zero variant - // enum: rustc rejects the expanded program, even though - // the actual code tends to be impossible to execute (at - // least safely), according to the type system. - // - // The most expedient fix for this is to just let the - // code fall through to the catch-all. But even this is - // error-prone, since the catch-all as defined above would - // generate code like this: - // - // _ => { let __self0 = match *self { }; - // let __self1 = match *__arg_0 { }; - // } - // - // Which is yields bindings for variables which type - // inference cannot resolve to unique types. - // - // One option to the above might be to add explicit type - // annotations. But the *only* reason to go down that path - // would be to try to make the expanded output consistent - // with the case when the number of enum variants >= 1. - // - // That just isn't worth it. In fact, trying to generate - // sensible code for *any* deriving on a zero-variant enum - // does not make sense. But at the same time, for now, we - // do not want to cause a compile failure just because the - // user happened to attach a deriving to their - // zero-variant enum. - // - // Instead, just generate a failing expression for the - // zero variant case, skipping matches and also skipping - // delegating back to the end user code entirely. - // - // (See also #4499 and #12609; note that some of the - // discussions there influence what choice we make here; - // e.g., if we feature-gate `match x { ... }` when x refers - // to an uninhabited type (e.g., a zero-variant enum or a - // type holding such an enum), but do not feature-gate - // zero-variant enums themselves, then attempting to - // derive Debug on such a type could here generate code - // that needs the feature gate enabled.) - + // There is no sensible code to be generated for *any* deriving on + // a zero-variant enum. So we just generate a failing expression + // for the zero variant case. BlockOrExpr(vec![], Some(deriving::call_unreachable(cx, span))) } else { // Final wrinkle: the selflike_args are expressions that deref From 559398fa7860fff2b4058c302efb6f14312b0fe4 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Jul 2022 10:21:37 +1000 Subject: [PATCH 6/8] Fix some inconsistencies. This makes `cs_cmp`, `cs_partial_cmp`, and `cs_op` (for `PartialEq`) more similar. It also fixes some out of date comments. --- .../src/deriving/cmp/ord.rs | 42 ++++++------------- .../src/deriving/cmp/partial_eq.rs | 16 ++++--- .../src/deriving/cmp/partial_ord.rs | 31 +++++--------- 3 files changed, 31 insertions(+), 58 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs index a60db068f27de..1856be87a20dd 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs @@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*; use crate::deriving::generic::*; use crate::deriving::path_std; -use rustc_ast::ptr::P; -use rustc_ast::{self as ast, MetaItem}; +use rustc_ast::MetaItem; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::symbol::{sym, Ident}; use rustc_span::Span; @@ -40,43 +39,25 @@ pub fn expand_deriving_ord( trait_def.expand(cx, mitem, item, push) } -pub fn ordering_collapsed( - cx: &mut ExtCtxt<'_>, - span: Span, - self_arg_tags: &[Ident], -) -> P { - let lft = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[0])); - let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); - let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); - cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt]) -} - pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr { let test_id = Ident::new(sym::cmp, span); - let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); - + let equal_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); // Builds: // - // match ::std::cmp::Ord::cmp(&self_field1, &other_field1) { - // ::std::cmp::Ordering::Equal => - // match ::std::cmp::Ord::cmp(&self_field2, &other_field2) { - // ::std::cmp::Ordering::Equal => { - // ... - // } - // cmp => cmp - // }, - // cmp => cmp + // match ::core::cmp::Ord::cmp(&self.x, &other.x) { + // ::std::cmp::Ordering::Equal => + // ::core::cmp::Ord::cmp(&self.y, &other.y), + // cmp => cmp, // } - // let expr = cs_fold( // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, |cx, span, old, self_expr, other_selflike_exprs| { // match new { - // ::std::cmp::Ordering::Equal => old, + // ::core::cmp::Ordering::Equal => old, // cmp => cmp // } let new = { @@ -90,7 +71,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> Bl cx.expr_call_global(span, cmp_path.clone(), args) }; - let eq_arm = cx.arm(span, cx.pat_path(span, equals_path.clone()), old); + let eq_arm = cx.arm(span, cx.pat_path(span, equal_path.clone()), old); let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); cx.expr_match(span, new, vec![eq_arm, neq_arm]) @@ -110,13 +91,16 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> Bl new } - None => cx.expr_path(equals_path.clone()), + None => cx.expr_path(equal_path.clone()), }, Box::new(|cx, span, tag_tuple| { if tag_tuple.len() != 2 { cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`") } else { - ordering_collapsed(cx, span, tag_tuple) + let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); + let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); + let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); + cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt]) } }), cx, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs index a18d78b8476c5..e4af0166577e1 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs @@ -36,18 +36,16 @@ pub fn expand_deriving_partial_eq( let expr = cs_fold( true, // use foldl - |cx, span, subexpr, self_expr, other_selflike_exprs| { + |cx, span, old, self_expr, other_selflike_exprs| { let eq = op(cx, span, self_expr, other_selflike_exprs); - cx.expr_binary(span, combiner, subexpr, eq) + cx.expr_binary(span, combiner, old, eq) }, - |cx, args| { - match args { - Some((span, self_expr, other_selflike_exprs)) => { - // Special-case the base case to generate cleaner code. - op(cx, span, self_expr, other_selflike_exprs) - } - None => cx.expr_bool(span, base), + |cx, args| match args { + Some((span, self_expr, other_selflike_exprs)) => { + // Special-case the base case to generate cleaner code. + op(cx, span, self_expr, other_selflike_exprs) } + None => cx.expr_bool(span, base), }, Box::new(|cx, span, _| cx.expr_bool(span, !base)), cx, diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs index b809eaf8eecac..bf52c63fad4b7 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs @@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*; use crate::deriving::generic::*; use crate::deriving::{path_std, pathvec_std}; -use rustc_ast::ptr::P; -use rustc_ast::{Expr, MetaItem}; +use rustc_ast::MetaItem; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::symbol::{sym, Ident}; use rustc_span::Span; @@ -50,34 +49,25 @@ pub fn expand_deriving_partial_ord( pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> BlockOrExpr { let test_id = Ident::new(sym::cmp, span); - let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); - let ordering_expr = cx.expr_path(ordering.clone()); - + let equal_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let partial_cmp_path = cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); // Builds: // - // match ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1) { - // ::std::option::Option::Some(::std::cmp::Ordering::Equal) => - // match ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2) { - // ::std::option::Option::Some(::std::cmp::Ordering::Equal) => { - // ... - // } - // cmp => cmp - // }, - // cmp => cmp + // match ::core::cmp::PartialOrd::partial_cmp(&self.x, &other.x) { + // ::core::option::Option::Some(::core::cmp::Ordering::Equal) => + // ::core::cmp::PartialOrd::partial_cmp(&self.y, &other.y), + // cmp => cmp, // } - // let expr = cs_fold( // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, |cx, span, old, self_expr, other_selflike_exprs| { // match new { - // Some(::std::cmp::Ordering::Equal) => old, + // Some(::core::cmp::Ordering::Equal) => old, // cmp => cmp // } - let new = { let [other_expr] = other_selflike_exprs else { cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"); @@ -91,12 +81,13 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ cx.expr_call_global(span, partial_cmp_path.clone(), args) }; - let eq_arm = cx.arm(span, cx.pat_some(span, cx.pat_path(span, ordering.clone())), old); + let eq_arm = + cx.arm(span, cx.pat_some(span, cx.pat_path(span, equal_path.clone())), old); let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); cx.expr_match(span, new, vec![eq_arm, neq_arm]) }, - |cx: &mut ExtCtxt<'_>, args: Option<(Span, P, &[P])>| match args { + |cx, args| match args { Some((span, self_expr, other_selflike_exprs)) => { let new = { let [other_expr] = other_selflike_exprs else { @@ -111,7 +102,7 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ new } - None => cx.expr_some(span, ordering_expr.clone()), + None => cx.expr_some(span, cx.expr_path(equal_path.clone())), }, Box::new(|cx, span, tag_tuple| { if tag_tuple.len() != 2 { From 16a286b003477fe07c06c5030f0ae8298c3e78ec Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Jul 2022 11:23:55 +1000 Subject: [PATCH 7/8] Simplify `cs_fold`. `cs_fold` has four distinct cases, covered by three different function arguments: - first field - combine current field with previous results - no fields - non-matching enum variants This commit clarifies things by replacing the three function arguments with one that takes a new `CsFold` type with four slightly different) cases - single field - combine result for current field with results for previous fields - no fields - non-matching enum variants This makes the code shorter and clearer. --- .../src/deriving/cmp/ord.rs | 75 +++++++---------- .../src/deriving/cmp/partial_eq.rs | 37 +++------ .../src/deriving/cmp/partial_ord.rs | 81 +++++++----------- .../src/deriving/generic/mod.rs | 83 ++++++++++--------- 4 files changed, 114 insertions(+), 162 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs index 1856be87a20dd..859e995356e80 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/ord.rs @@ -55,57 +55,38 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> Bl // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, - |cx, span, old, self_expr, other_selflike_exprs| { - // match new { - // ::core::cmp::Ordering::Equal => old, - // cmp => cmp - // } - let new = { - let [other_expr] = other_selflike_exprs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); - }; + cx, + span, + substr, + |cx, fold| match fold { + CsFold::Single(field) => { + let [other_expr] = &field.other_selflike_exprs[..] else { + cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`"); + }; let args = vec![ - cx.expr_addr_of(span, self_expr), - cx.expr_addr_of(span, other_expr.clone()), + cx.expr_addr_of(field.span, field.self_expr.clone()), + cx.expr_addr_of(field.span, other_expr.clone()), ]; - cx.expr_call_global(span, cmp_path.clone(), args) - }; - - let eq_arm = cx.arm(span, cx.pat_path(span, equal_path.clone()), old); - let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); - - cx.expr_match(span, new, vec![eq_arm, neq_arm]) - }, - |cx, args| match args { - Some((span, self_expr, other_selflike_exprs)) => { - let new = { - let [other_expr] = other_selflike_exprs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); - }; - let args = vec![ - cx.expr_addr_of(span, self_expr), - cx.expr_addr_of(span, other_expr.clone()), - ]; - cx.expr_call_global(span, cmp_path.clone(), args) - }; - - new + cx.expr_call_global(field.span, cmp_path.clone(), args) } - None => cx.expr_path(equal_path.clone()), - }, - Box::new(|cx, span, tag_tuple| { - if tag_tuple.len() != 2 { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`") - } else { - let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); - let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); - let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); - cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt]) + CsFold::Combine(span, expr1, expr2) => { + let eq_arm = cx.arm(span, cx.pat_path(span, equal_path.clone()), expr1); + let neq_arm = + cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); + cx.expr_match(span, expr2, vec![eq_arm, neq_arm]) } - }), - cx, - span, - substr, + CsFold::Fieldless => cx.expr_path(equal_path.clone()), + CsFold::EnumNonMatching(span, tag_tuple) => { + if tag_tuple.len() != 2 { + cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`") + } else { + let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); + let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); + let fn_cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); + cx.expr_call_global(span, fn_cmp_path, vec![lft, rgt]) + } + } + }, ); BlockOrExpr::new_expr(expr) } diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs index e4af0166577e1..724c639984cca 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_eq.rs @@ -2,8 +2,7 @@ use crate::deriving::generic::ty::*; use crate::deriving::generic::*; use crate::deriving::{path_local, path_std}; -use rustc_ast::ptr::P; -use rustc_ast::{BinOpKind, Expr, MetaItem}; +use rustc_ast::{BinOpKind, MetaItem}; use rustc_expand::base::{Annotatable, ExtCtxt}; use rustc_span::symbol::sym; use rustc_span::Span; @@ -23,34 +22,22 @@ pub fn expand_deriving_partial_eq( combiner: BinOpKind, base: bool, ) -> BlockOrExpr { - let op = |cx: &mut ExtCtxt<'_>, - span: Span, - self_expr: P, - other_selflike_exprs: &[P]| { - let [other_expr] = other_selflike_exprs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"); - }; - - cx.expr_binary(span, op, self_expr, other_expr.clone()) - }; - let expr = cs_fold( true, // use foldl - |cx, span, old, self_expr, other_selflike_exprs| { - let eq = op(cx, span, self_expr, other_selflike_exprs); - cx.expr_binary(span, combiner, old, eq) - }, - |cx, args| match args { - Some((span, self_expr, other_selflike_exprs)) => { - // Special-case the base case to generate cleaner code. - op(cx, span, self_expr, other_selflike_exprs) - } - None => cx.expr_bool(span, base), - }, - Box::new(|cx, span, _| cx.expr_bool(span, !base)), cx, span, substr, + |cx, fold| match fold { + CsFold::Single(field) => { + let [other_expr] = &field.other_selflike_exprs[..] else { + cx.span_bug(field.span, "not exactly 2 arguments in `derive(PartialEq)`"); + }; + cx.expr_binary(field.span, op, field.self_expr.clone(), other_expr.clone()) + } + CsFold::Combine(span, expr1, expr2) => cx.expr_binary(span, combiner, expr1, expr2), + CsFold::Fieldless => cx.expr_bool(span, base), + CsFold::EnumNonMatching(span, _tag_tuple) => cx.expr_bool(span, !base), + }, ); BlockOrExpr::new_expr(expr) } diff --git a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs index bf52c63fad4b7..3f9843922dad7 100644 --- a/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs +++ b/compiler/rustc_builtin_macros/src/deriving/cmp/partial_ord.rs @@ -63,61 +63,40 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_ // foldr nests the if-elses correctly, leaving the first field // as the outermost one, and the last as the innermost. false, - |cx, span, old, self_expr, other_selflike_exprs| { - // match new { - // Some(::core::cmp::Ordering::Equal) => old, - // cmp => cmp - // } - let new = { - let [other_expr] = other_selflike_exprs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"); - }; - + cx, + span, + substr, + |cx, fold| match fold { + CsFold::Single(field) => { + let [other_expr] = &field.other_selflike_exprs[..] else { + cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`"); + }; let args = vec![ - cx.expr_addr_of(span, self_expr), - cx.expr_addr_of(span, other_expr.clone()), + cx.expr_addr_of(field.span, field.self_expr.clone()), + cx.expr_addr_of(field.span, other_expr.clone()), ]; - - cx.expr_call_global(span, partial_cmp_path.clone(), args) - }; - - let eq_arm = - cx.arm(span, cx.pat_some(span, cx.pat_path(span, equal_path.clone())), old); - let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); - - cx.expr_match(span, new, vec![eq_arm, neq_arm]) - }, - |cx, args| match args { - Some((span, self_expr, other_selflike_exprs)) => { - let new = { - let [other_expr] = other_selflike_exprs else { - cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"); - }; - let args = vec![ - cx.expr_addr_of(span, self_expr), - cx.expr_addr_of(span, other_expr.clone()), - ]; - cx.expr_call_global(span, partial_cmp_path.clone(), args) - }; - - new + cx.expr_call_global(field.span, partial_cmp_path.clone(), args) } - None => cx.expr_some(span, cx.expr_path(equal_path.clone())), - }, - Box::new(|cx, span, tag_tuple| { - if tag_tuple.len() != 2 { - cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") - } else { - let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); - let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); - let fn_partial_cmp_path = - cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); - cx.expr_call_global(span, fn_partial_cmp_path, vec![lft, rgt]) + CsFold::Combine(span, expr1, expr2) => { + let eq_arm = + cx.arm(span, cx.pat_some(span, cx.pat_path(span, equal_path.clone())), expr1); + let neq_arm = + cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id)); + cx.expr_match(span, expr2, vec![eq_arm, neq_arm]) } - }), - cx, - span, - substr, + CsFold::Fieldless => cx.expr_some(span, cx.expr_path(equal_path.clone())), + CsFold::EnumNonMatching(span, tag_tuple) => { + if tag_tuple.len() != 2 { + cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") + } else { + let lft = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[0])); + let rgt = cx.expr_addr_of(span, cx.expr_ident(span, tag_tuple[1])); + let fn_partial_cmp_path = + cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]); + cx.expr_call_global(span, fn_partial_cmp_path, vec![lft, rgt]) + } + } + }, ); BlockOrExpr::new_expr(expr) } diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 70a97c32b4869..5cad71467a15e 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -296,11 +296,6 @@ pub enum SubstructureFields<'a> { pub type CombineSubstructureFunc<'a> = Box, Span, &Substructure<'_>) -> BlockOrExpr + 'a>; -/// Deal with non-matching enum variants. The slice is the identifiers holding -/// the variant index value for each of the `Self` arguments. -pub type EnumNonMatchCollapsedFunc<'a> = - Box, Span, &[Ident]) -> P + 'a>; - pub fn combine_substructure( f: CombineSubstructureFunc<'_>, ) -> RefCell> { @@ -1601,55 +1596,65 @@ impl<'a> TraitDef<'a> { } } -/// Function to fold over fields, with three cases, to generate more efficient and concise code. -/// When the `substructure` has grouped fields, there are two cases: -/// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`). -/// One or more fields: call the base case function on the first value (which depends on -/// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the -/// fields. -/// When the `substructure` is an `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f` -/// is returned. Statics may not be folded over. -pub fn cs_fold( +/// The function passed to `cs_fold` is called repeatedly with a value of this +/// type. It describes one part of the code generation. The result is always an +/// expression. +pub enum CsFold<'a> { + /// The basic case: a field expression for one or more selflike args. E.g. + /// for `PartialEq::eq` this is something like `self.x == other.x`. + Single(&'a FieldInfo), + + /// The combination of two field expressions. E.g. for `PartialEq::eq` this + /// is something like ` && `. + Combine(Span, P, P), + + // The fallback case for a struct or enum variant with no fields. + Fieldless, + + /// The fallback case for non-matching enum variants. The slice is the + /// identifiers holding the variant index value for each of the `Self` + /// arguments. + EnumNonMatching(Span, &'a [Ident]), +} + +/// Folds over fields, combining the expressions for each field in a sequence. +/// Statics may not be folded over. +pub fn cs_fold( use_foldl: bool, - mut f: F, - mut b: B, - mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, cx: &mut ExtCtxt<'_>, trait_span: Span, substructure: &Substructure<'_>, + mut f: F, ) -> P where - F: FnMut(&mut ExtCtxt<'_>, Span, P, P, &[P]) -> P, - B: FnMut(&mut ExtCtxt<'_>, Option<(Span, P, &[P])>) -> P, + F: FnMut(&mut ExtCtxt<'_>, CsFold<'_>) -> P, { match *substructure.fields { EnumMatching(.., ref all_fields) | Struct(_, ref all_fields) => { - let (base, rest) = match (all_fields.is_empty(), use_foldl) { - (false, true) => { - let (first, rest) = all_fields.split_first().unwrap(); - let args = - (first.span, first.self_expr.clone(), &first.other_selflike_exprs[..]); - (b(cx, Some(args)), rest) - } - (false, false) => { - let (last, rest) = all_fields.split_last().unwrap(); - let args = (last.span, last.self_expr.clone(), &last.other_selflike_exprs[..]); - (b(cx, Some(args)), rest) - } - (true, _) => (b(cx, None), &all_fields[..]), + if all_fields.is_empty() { + return f(cx, CsFold::Fieldless); + } + + let (base_field, rest) = if use_foldl { + all_fields.split_first().unwrap() + } else { + all_fields.split_last().unwrap() + }; + + let base_expr = f(cx, CsFold::Single(base_field)); + + let op = |old, field: &FieldInfo| { + let new = f(cx, CsFold::Single(field)); + f(cx, CsFold::Combine(field.span, old, new)) }; if use_foldl { - rest.iter().fold(base, |old, field| { - f(cx, field.span, old, field.self_expr.clone(), &field.other_selflike_exprs) - }) + rest.iter().fold(base_expr, op) } else { - rest.iter().rev().fold(base, |old, field| { - f(cx, field.span, old, field.self_expr.clone(), &field.other_selflike_exprs) - }) + rest.iter().rfold(base_expr, op) } } - EnumNonMatchingCollapsed(tuple) => enum_nonmatch_f(cx, trait_span, tuple), + EnumNonMatchingCollapsed(tuple) => f(cx, CsFold::EnumNonMatching(trait_span, tuple)), StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"), } } From 0578697a63f3b531f6ffeb18a186eff372bf67f1 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Fri, 8 Jul 2022 07:57:34 +1000 Subject: [PATCH 8/8] Minor updates based on review comments. --- .../src/deriving/generic/mod.rs | 25 ++++++++----------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 5cad71467a15e..74e18bffc2ec9 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -245,7 +245,8 @@ pub struct MethodDef<'a> { pub struct Substructure<'a> { /// ident of self pub type_ident: Ident, - /// verbatim access to any non-selflike arguments + /// Verbatim access to any non-selflike arguments, i.e. arguments that + /// don't have type `&Self`. pub nonselflike_args: &'a [P], pub fields: &'a SubstructureFields<'a>, } @@ -934,10 +935,9 @@ impl<'a> MethodDef<'a> { let arg_expr = cx.expr_ident(span, ident); - match *ty { - // for static methods, just treat any Self - // arguments as a normal arg - Ref(ref ty, _) if matches!(**ty, Self_) && !self.is_static() => { + match ty { + // Selflike (`&Self`) arguments only occur in non-static methods. + Ref(box Self_, _) if !self.is_static() => { selflike_args.push(cx.expr_deref(span, arg_expr)) } Self_ => cx.span_bug(span, "`Self` in non-return position"), @@ -1459,11 +1459,8 @@ impl<'a> TraitDef<'a> { prefixes .iter() .map(|prefix| { - let pieces: Vec<_> = struct_def - .fields() - .iter() - .enumerate() - .map(|(i, struct_field)| { + let pieces_iter = + struct_def.fields().iter().enumerate().map(|(i, struct_field)| { let sp = struct_field.span.with_ctxt(self.span.ctxt()); let binding_mode = if use_temporaries { ast::BindingMode::ByValue(ast::Mutability::Not) @@ -1477,14 +1474,12 @@ impl<'a> TraitDef<'a> { struct_field.ident, cx.pat(path.span, PatKind::Ident(binding_mode, path, None)), ) - }) - .collect(); + }); let struct_path = struct_path.clone(); match *struct_def { VariantData::Struct(..) => { - let field_pats = pieces - .into_iter() + let field_pats = pieces_iter .map(|(sp, ident, pat)| { if ident.is_none() { cx.span_bug( @@ -1506,7 +1501,7 @@ impl<'a> TraitDef<'a> { cx.pat_struct(self.span, struct_path, field_pats) } VariantData::Tuple(..) => { - let subpats = pieces.into_iter().map(|(_, _, subpat)| subpat).collect(); + let subpats = pieces_iter.map(|(_, _, subpat)| subpat).collect(); cx.pat_tuple_struct(self.span, struct_path, subpats) } VariantData::Unit(..) => cx.pat_path(self.span, struct_path),