diff --git a/.github/workflows/post-merge.yml b/.github/workflows/post-merge.yml index 2bc06d83c5b7c..31e075f45d646 100644 --- a/.github/workflows/post-merge.yml +++ b/.github/workflows/post-merge.yml @@ -36,7 +36,7 @@ jobs: cd src/ci/citool echo "Post-merge analysis result" > output.log - cargo run --release post-merge-analysis ${PARENT_COMMIT} ${{ github.sha }} >> output.log + cargo run --release post-merge-report ${PARENT_COMMIT} ${{ github.sha }} >> output.log cat output.log gh pr comment ${HEAD_PR} -F output.log diff --git a/compiler/rustc_attr_data_structures/src/lib.rs b/compiler/rustc_attr_data_structures/src/lib.rs index f4986d1d1872a..389d8c2413bfa 100644 --- a/compiler/rustc_attr_data_structures/src/lib.rs +++ b/compiler/rustc_attr_data_structures/src/lib.rs @@ -35,13 +35,17 @@ pub trait HashStableContext: rustc_ast::HashStableContext + rustc_abi::HashStabl /// like [`Span`]s and empty tuples, are gracefully skipped so they don't clutter the /// representation much. pub trait PrintAttribute { - fn print_something(&self) -> bool; + /// Whether or not this will render as something meaningful, or if it's skipped + /// (which will force the containing struct to also skip printing a comma + /// and the field name). + fn should_render(&self) -> bool; + fn print_attribute(&self, p: &mut Printer); } impl PrintAttribute for &T { - fn print_something(&self) -> bool { - T::print_something(self) + fn should_render(&self) -> bool { + T::should_render(self) } fn print_attribute(&self, p: &mut Printer) { @@ -49,9 +53,10 @@ impl PrintAttribute for &T { } } impl PrintAttribute for Option { - fn print_something(&self) -> bool { - self.as_ref().is_some_and(|x| x.print_something()) + fn should_render(&self) -> bool { + self.as_ref().is_some_and(|x| x.should_render()) } + fn print_attribute(&self, p: &mut Printer) { if let Some(i) = self { T::print_attribute(i, p) @@ -59,9 +64,10 @@ impl PrintAttribute for Option { } } impl PrintAttribute for ThinVec { - fn print_something(&self) -> bool { - self.is_empty() || self[0].print_something() + fn should_render(&self) -> bool { + self.is_empty() || self[0].should_render() } + fn print_attribute(&self, p: &mut Printer) { let mut last_printed = false; p.word("["); @@ -70,7 +76,7 @@ impl PrintAttribute for ThinVec { p.word_space(","); } i.print_attribute(p); - last_printed = i.print_something(); + last_printed = i.should_render(); } p.word("]"); } @@ -78,7 +84,7 @@ impl PrintAttribute for ThinVec { macro_rules! print_skip { ($($t: ty),* $(,)?) => {$( impl PrintAttribute for $t { - fn print_something(&self) -> bool { false } + fn should_render(&self) -> bool { false } fn print_attribute(&self, _: &mut Printer) { } })* }; @@ -87,7 +93,7 @@ macro_rules! print_skip { macro_rules! print_disp { ($($t: ty),* $(,)?) => {$( impl PrintAttribute for $t { - fn print_something(&self) -> bool { true } + fn should_render(&self) -> bool { true } fn print_attribute(&self, p: &mut Printer) { p.word(format!("{}", self)); } @@ -97,7 +103,7 @@ macro_rules! print_disp { macro_rules! print_debug { ($($t: ty),* $(,)?) => {$( impl PrintAttribute for $t { - fn print_something(&self) -> bool { true } + fn should_render(&self) -> bool { true } fn print_attribute(&self, p: &mut Printer) { p.word(format!("{:?}", self)); } @@ -106,37 +112,39 @@ macro_rules! print_debug { } macro_rules! print_tup { - (num_print_something $($ts: ident)*) => { 0 $(+ $ts.print_something() as usize)* }; + (num_should_render $($ts: ident)*) => { 0 $(+ $ts.should_render() as usize)* }; () => {}; ($t: ident $($ts: ident)*) => { #[allow(non_snake_case, unused)] impl<$t: PrintAttribute, $($ts: PrintAttribute),*> PrintAttribute for ($t, $($ts),*) { - fn print_something(&self) -> bool { + fn should_render(&self) -> bool { let ($t, $($ts),*) = self; - print_tup!(num_print_something $t $($ts)*) != 0 + print_tup!(num_should_render $t $($ts)*) != 0 } fn print_attribute(&self, p: &mut Printer) { let ($t, $($ts),*) = self; - let parens = print_tup!(num_print_something $t $($ts)*) > 1; + let parens = print_tup!(num_should_render $t $($ts)*) > 1; if parens { - p.word("("); + p.popen(); } - let mut printed_anything = $t.print_something(); + let mut printed_anything = $t.should_render(); $t.print_attribute(p); $( - if printed_anything && $ts.print_something() { - p.word_space(","); + if $ts.should_render() { + if printed_anything { + p.word_space(","); + } printed_anything = true; } $ts.print_attribute(p); )* if parens { - p.word(")"); + p.pclose(); } } } @@ -147,8 +155,8 @@ macro_rules! print_tup { print_tup!(A B C D E F G H); print_skip!(Span, ()); -print_disp!(Symbol, u16, bool, NonZero); -print_debug!(UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency); +print_disp!(u16, bool, NonZero); +print_debug!(Symbol, UintTy, IntTy, Align, AttrStyle, CommentKind, Transparency); /// Finds attributes in sequences of attributes by pattern matching. /// diff --git a/compiler/rustc_builtin_macros/Cargo.toml b/compiler/rustc_builtin_macros/Cargo.toml index b5f4f2efd1f5b..1289d21308b7a 100644 --- a/compiler/rustc_builtin_macros/Cargo.toml +++ b/compiler/rustc_builtin_macros/Cargo.toml @@ -3,10 +3,6 @@ name = "rustc_builtin_macros" version = "0.0.0" edition = "2024" - -[lints.rust] -unexpected_cfgs = { level = "warn", check-cfg = ['cfg(llvm_enzyme)'] } - [lib] doctest = false diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 7741f6668c382..3c61bfd1c93f5 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -240,7 +240,7 @@ declare_features! ( /// Added for testing unstable lints; perma-unstable. (internal, test_unstable_lint, "1.60.0", None), /// Helps with formatting for `group_imports = "StdExternalCrate"`. - (unstable, unqualified_local_imports, "1.83.0", None), + (unstable, unqualified_local_imports, "1.83.0", Some(138299)), /// Use for stable + negative coherence and strict coherence depending on trait's /// rustc_strict_coherence value. (unstable, with_negative_coherence, "1.60.0", None), diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs index ace5e34b38249..fa061c806180f 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/errors.rs @@ -1520,7 +1520,7 @@ fn generics_args_err_extend<'a>( }) .collect(); if args.len() > 1 - && let Some(span) = args.into_iter().last() + && let Some(span) = args.into_iter().next_back() { err.note( "generic arguments are not allowed on both an enum and its variant's path \ diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index 2572ff50eb74f..163d9a1b9d975 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -118,9 +118,9 @@ impl<'a> State<'a> { self.hardbreak() } hir::Attribute::Parsed(pa) => { - self.word("#[attr=\""); + self.word("#[attr = "); pa.print_attribute(self); - self.word("\")]"); + self.word("]"); self.hardbreak() } } diff --git a/compiler/rustc_macros/src/print_attribute.rs b/compiler/rustc_macros/src/print_attribute.rs index 3c6e30b851bf7..42d94e72ee942 100644 --- a/compiler/rustc_macros/src/print_attribute.rs +++ b/compiler/rustc_macros/src/print_attribute.rs @@ -16,12 +16,14 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok let name = field.ident.as_ref().unwrap(); let string_name = name.to_string(); disps.push(quote! { - if __printed_anything && #name.print_something() { - __p.word_space(","); + if #name.should_render() { + if __printed_anything { + __p.word_space(","); + } + __p.word(#string_name); + __p.word_space(":"); __printed_anything = true; } - __p.word(#string_name); - __p.word_space(":"); #name.print_attribute(__p); }); field_names.push(name); @@ -31,10 +33,11 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok quote! { {#(#field_names),*} }, quote! { __p.word(#string_name); - if true #(&& !#field_names.print_something())* { + if true #(&& !#field_names.should_render())* { return; } + __p.nbsp(); __p.word("{"); #(#disps)* __p.word("}"); @@ -48,8 +51,10 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok for idx in 0..fields_unnamed.unnamed.len() { let name = format_ident!("f{idx}"); disps.push(quote! { - if __printed_anything && #name.print_something() { - __p.word_space(","); + if #name.should_render() { + if __printed_anything { + __p.word_space(","); + } __printed_anything = true; } #name.print_attribute(__p); @@ -62,13 +67,13 @@ fn print_fields(name: &Ident, fields: &Fields) -> (TokenStream, TokenStream, Tok quote! { __p.word(#string_name); - if true #(&& !#field_names.print_something())* { + if true #(&& !#field_names.should_render())* { return; } - __p.word("("); + __p.popen(); #(#disps)* - __p.word(")"); + __p.pclose(); }, quote! { true }, ) @@ -138,7 +143,7 @@ pub(crate) fn print_attribute(input: Structure<'_>) -> TokenStream { input.gen_impl(quote! { #[allow(unused)] gen impl PrintAttribute for @Self { - fn print_something(&self) -> bool { #printed } + fn should_render(&self) -> bool { #printed } fn print_attribute(&self, __p: &mut rustc_ast_pretty::pp::Printer) { #code } } }) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index bb227a58cf19d..716ababb00802 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -2107,7 +2107,7 @@ impl<'a> Parser<'a> { ast::GenericBound::Trait(poly) => Some(poly), _ => None, }) - .last() + .next_back() { err.span_suggestion_verbose( poly.span.shrink_to_hi(), diff --git a/compiler/rustc_type_ir/Cargo.toml b/compiler/rustc_type_ir/Cargo.toml index 7b2593b96e37f..4adf715792666 100644 --- a/compiler/rustc_type_ir/Cargo.toml +++ b/compiler/rustc_type_ir/Cargo.toml @@ -33,6 +33,3 @@ nightly = [ "rustc_index/nightly", "rustc_ast_ir/nightly", ] - -[lints.rust] -unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)'] } diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 42886e90f997d..10ae43ac3fcd5 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -1825,10 +1825,19 @@ pub trait Iterator { Inspect::new(self, f) } - /// Borrows an iterator, rather than consuming it. + /// Creates a "by reference" adapter for this instance of `Iterator`. /// - /// This is useful to allow applying iterator adapters while still - /// retaining ownership of the original iterator. + /// Consuming method calls (direct or indirect calls to `next`) + /// on the "by reference" adapter will consume the original iterator, + /// but ownership-taking methods (those with a `self` parameter) + /// only take ownership of the "by reference" iterator. + /// + /// This is useful for applying ownership-taking methods + /// (such as `take` in the example below) + /// without giving up ownership of the original iterator, + /// so you can use the original iterator afterwards. + /// + /// Uses [impl Iterator for &mut I { type Item = I::Item; ...}](https://doc.rust-lang.org/nightly/std/iter/trait.Iterator.html#impl-Iterator-for-%26mut+I). /// /// # Examples /// @@ -4024,6 +4033,9 @@ where } } +/// Implements `Iterator` for mutable references to iterators, such as those produced by [`Iterator::by_ref`]. +/// +/// This implementation passes all method calls on to the original iterator. #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for &mut I { type Item = I::Item; diff --git a/library/std/src/net/test.rs b/library/std/src/net/test.rs index a5c3983cd89ec..df48b2f2420c3 100644 --- a/library/std/src/net/test.rs +++ b/library/std/src/net/test.rs @@ -31,3 +31,14 @@ pub fn tsa(a: A) -> Result, String> { Err(e) => Err(e.to_string()), } } + +pub fn compare_ignore_zoneid(a: &SocketAddr, b: &SocketAddr) -> bool { + match (a, b) { + (SocketAddr::V6(a), SocketAddr::V6(b)) => { + a.ip().segments() == b.ip().segments() + && a.flowinfo() == b.flowinfo() + && a.port() == b.port() + } + _ => a == b, + } +} diff --git a/library/std/src/net/udp/tests.rs b/library/std/src/net/udp/tests.rs index 1c8c58d187957..91da3135f97c6 100644 --- a/library/std/src/net/udp/tests.rs +++ b/library/std/src/net/udp/tests.rs @@ -1,4 +1,4 @@ -use crate::net::test::{next_test_ip4, next_test_ip6}; +use crate::net::test::{compare_ignore_zoneid, next_test_ip4, next_test_ip6}; use crate::net::*; use crate::sync::mpsc::channel; use crate::thread; @@ -46,7 +46,7 @@ fn socket_smoke_test_ip4() { let (nread, src) = t!(server.recv_from(&mut buf)); assert_eq!(nread, 1); assert_eq!(buf[0], 99); - assert_eq!(src, client_ip); + assert_eq!(compare_ignore_zoneid(&src, &client_ip), true); rx2.recv().unwrap(); }) } @@ -78,7 +78,9 @@ fn udp_clone_smoke() { let _t = thread::spawn(move || { let mut buf = [0, 0]; - assert_eq!(sock2.recv_from(&mut buf).unwrap(), (1, addr1)); + let res = sock2.recv_from(&mut buf).unwrap(); + assert_eq!(res.0, 1); + assert_eq!(compare_ignore_zoneid(&res.1, &addr1), true); assert_eq!(buf[0], 1); t!(sock2.send_to(&[2], &addr1)); }); @@ -94,7 +96,9 @@ fn udp_clone_smoke() { }); tx1.send(()).unwrap(); let mut buf = [0, 0]; - assert_eq!(sock1.recv_from(&mut buf).unwrap(), (1, addr2)); + let res = sock1.recv_from(&mut buf).unwrap(); + assert_eq!(res.0, 1); + assert_eq!(compare_ignore_zoneid(&res.1, &addr2), true); rx2.recv().unwrap(); }) } diff --git a/library/std/tests/floats/f16.rs b/library/std/tests/floats/f16.rs index 5180f3d40f3a7..3f563249c9c0d 100644 --- a/library/std/tests/floats/f16.rs +++ b/library/std/tests/floats/f16.rs @@ -461,18 +461,16 @@ fn test_recip() { #[test] #[cfg(reliable_f16_math)] fn test_powi() { - // FIXME(llvm19): LLVM misoptimizes `powi.f16` - // - // let nan: f16 = f16::NAN; - // let inf: f16 = f16::INFINITY; - // let neg_inf: f16 = f16::NEG_INFINITY; - // assert_eq!(1.0f16.powi(1), 1.0); - // assert_approx_eq!((-3.1f16).powi(2), 9.61, TOL_0); - // assert_approx_eq!(5.9f16.powi(-2), 0.028727, TOL_N2); - // assert_eq!(8.3f16.powi(0), 1.0); - // assert!(nan.powi(2).is_nan()); - // assert_eq!(inf.powi(3), inf); - // assert_eq!(neg_inf.powi(2), inf); + let nan: f16 = f16::NAN; + let inf: f16 = f16::INFINITY; + let neg_inf: f16 = f16::NEG_INFINITY; + assert_eq!(1.0f16.powi(1), 1.0); + assert_approx_eq!((-3.1f16).powi(2), 9.61, TOL_0); + assert_approx_eq!(5.9f16.powi(-2), 0.028727, TOL_N2); + assert_eq!(8.3f16.powi(0), 1.0); + assert!(nan.powi(2).is_nan()); + assert_eq!(inf.powi(3), inf); + assert_eq!(neg_inf.powi(2), inf); } #[test] @@ -820,14 +818,13 @@ fn test_total_cmp() { 1 << (f16::MANTISSA_DIGITS - 2) } - // FIXME(f16_f128): test subnormals when powf is available - // fn min_subnorm() -> f16 { - // f16::MIN_POSITIVE / f16::powf(2.0, f16::MANTISSA_DIGITS as f16 - 1.0) - // } + fn min_subnorm() -> f16 { + f16::MIN_POSITIVE / f16::powf(2.0, f16::MANTISSA_DIGITS as f16 - 1.0) + } - // fn max_subnorm() -> f16 { - // f16::MIN_POSITIVE - min_subnorm() - // } + fn max_subnorm() -> f16 { + f16::MIN_POSITIVE - min_subnorm() + } fn q_nan() -> f16 { f16::from_bits(f16::NAN.to_bits() | quiet_bit_mask()) @@ -846,12 +843,12 @@ fn test_total_cmp() { assert_eq!(Ordering::Equal, (-1.5_f16).total_cmp(&-1.5)); assert_eq!(Ordering::Equal, (-0.5_f16).total_cmp(&-0.5)); assert_eq!(Ordering::Equal, (-f16::MIN_POSITIVE).total_cmp(&-f16::MIN_POSITIVE)); - // assert_eq!(Ordering::Equal, (-max_subnorm()).total_cmp(&-max_subnorm())); - // assert_eq!(Ordering::Equal, (-min_subnorm()).total_cmp(&-min_subnorm())); + assert_eq!(Ordering::Equal, (-max_subnorm()).total_cmp(&-max_subnorm())); + assert_eq!(Ordering::Equal, (-min_subnorm()).total_cmp(&-min_subnorm())); assert_eq!(Ordering::Equal, (-0.0_f16).total_cmp(&-0.0)); assert_eq!(Ordering::Equal, 0.0_f16.total_cmp(&0.0)); - // assert_eq!(Ordering::Equal, min_subnorm().total_cmp(&min_subnorm())); - // assert_eq!(Ordering::Equal, max_subnorm().total_cmp(&max_subnorm())); + assert_eq!(Ordering::Equal, min_subnorm().total_cmp(&min_subnorm())); + assert_eq!(Ordering::Equal, max_subnorm().total_cmp(&max_subnorm())); assert_eq!(Ordering::Equal, f16::MIN_POSITIVE.total_cmp(&f16::MIN_POSITIVE)); assert_eq!(Ordering::Equal, 0.5_f16.total_cmp(&0.5)); assert_eq!(Ordering::Equal, 1.0_f16.total_cmp(&1.0)); @@ -870,13 +867,13 @@ fn test_total_cmp() { assert_eq!(Ordering::Less, (-1.5_f16).total_cmp(&-1.0)); assert_eq!(Ordering::Less, (-1.0_f16).total_cmp(&-0.5)); assert_eq!(Ordering::Less, (-0.5_f16).total_cmp(&-f16::MIN_POSITIVE)); - // assert_eq!(Ordering::Less, (-f16::MIN_POSITIVE).total_cmp(&-max_subnorm())); - // assert_eq!(Ordering::Less, (-max_subnorm()).total_cmp(&-min_subnorm())); - // assert_eq!(Ordering::Less, (-min_subnorm()).total_cmp(&-0.0)); + assert_eq!(Ordering::Less, (-f16::MIN_POSITIVE).total_cmp(&-max_subnorm())); + assert_eq!(Ordering::Less, (-max_subnorm()).total_cmp(&-min_subnorm())); + assert_eq!(Ordering::Less, (-min_subnorm()).total_cmp(&-0.0)); assert_eq!(Ordering::Less, (-0.0_f16).total_cmp(&0.0)); - // assert_eq!(Ordering::Less, 0.0_f16.total_cmp(&min_subnorm())); - // assert_eq!(Ordering::Less, min_subnorm().total_cmp(&max_subnorm())); - // assert_eq!(Ordering::Less, max_subnorm().total_cmp(&f16::MIN_POSITIVE)); + assert_eq!(Ordering::Less, 0.0_f16.total_cmp(&min_subnorm())); + assert_eq!(Ordering::Less, min_subnorm().total_cmp(&max_subnorm())); + assert_eq!(Ordering::Less, max_subnorm().total_cmp(&f16::MIN_POSITIVE)); assert_eq!(Ordering::Less, f16::MIN_POSITIVE.total_cmp(&0.5)); assert_eq!(Ordering::Less, 0.5_f16.total_cmp(&1.0)); assert_eq!(Ordering::Less, 1.0_f16.total_cmp(&1.5)); @@ -894,13 +891,13 @@ fn test_total_cmp() { assert_eq!(Ordering::Greater, (-1.0_f16).total_cmp(&-1.5)); assert_eq!(Ordering::Greater, (-0.5_f16).total_cmp(&-1.0)); assert_eq!(Ordering::Greater, (-f16::MIN_POSITIVE).total_cmp(&-0.5)); - // assert_eq!(Ordering::Greater, (-max_subnorm()).total_cmp(&-f16::MIN_POSITIVE)); - // assert_eq!(Ordering::Greater, (-min_subnorm()).total_cmp(&-max_subnorm())); - // assert_eq!(Ordering::Greater, (-0.0_f16).total_cmp(&-min_subnorm())); + assert_eq!(Ordering::Greater, (-max_subnorm()).total_cmp(&-f16::MIN_POSITIVE)); + assert_eq!(Ordering::Greater, (-min_subnorm()).total_cmp(&-max_subnorm())); + assert_eq!(Ordering::Greater, (-0.0_f16).total_cmp(&-min_subnorm())); assert_eq!(Ordering::Greater, 0.0_f16.total_cmp(&-0.0)); - // assert_eq!(Ordering::Greater, min_subnorm().total_cmp(&0.0)); - // assert_eq!(Ordering::Greater, max_subnorm().total_cmp(&min_subnorm())); - // assert_eq!(Ordering::Greater, f16::MIN_POSITIVE.total_cmp(&max_subnorm())); + assert_eq!(Ordering::Greater, min_subnorm().total_cmp(&0.0)); + assert_eq!(Ordering::Greater, max_subnorm().total_cmp(&min_subnorm())); + assert_eq!(Ordering::Greater, f16::MIN_POSITIVE.total_cmp(&max_subnorm())); assert_eq!(Ordering::Greater, 0.5_f16.total_cmp(&f16::MIN_POSITIVE)); assert_eq!(Ordering::Greater, 1.0_f16.total_cmp(&0.5)); assert_eq!(Ordering::Greater, 1.5_f16.total_cmp(&1.0)); @@ -918,12 +915,12 @@ fn test_total_cmp() { assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-1.0)); assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-0.5)); assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-f16::MIN_POSITIVE)); - // assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-max_subnorm())); - // assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-min_subnorm())); + assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-max_subnorm())); + assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-min_subnorm())); assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&-0.0)); assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&0.0)); - // assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&min_subnorm())); - // assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&max_subnorm())); + assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&min_subnorm())); + assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&max_subnorm())); assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&f16::MIN_POSITIVE)); assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&0.5)); assert_eq!(Ordering::Less, (-q_nan()).total_cmp(&1.0)); @@ -940,12 +937,12 @@ fn test_total_cmp() { assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-1.0)); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-0.5)); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-f16::MIN_POSITIVE)); - // assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-max_subnorm())); - // assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-min_subnorm())); + assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-max_subnorm())); + assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-min_subnorm())); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&-0.0)); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&0.0)); - // assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&min_subnorm())); - // assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&max_subnorm())); + assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&min_subnorm())); + assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&max_subnorm())); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&f16::MIN_POSITIVE)); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&0.5)); assert_eq!(Ordering::Less, (-s_nan()).total_cmp(&1.0)); diff --git a/src/ci/citool/Cargo.lock b/src/ci/citool/Cargo.lock index 46343a7b86e8a..c061ec6ebdcee 100644 --- a/src/ci/citool/Cargo.lock +++ b/src/ci/citool/Cargo.lock @@ -107,6 +107,7 @@ dependencies = [ "build_helper", "clap", "csv", + "glob-match", "insta", "serde", "serde_json", @@ -308,6 +309,12 @@ dependencies = [ "wasi", ] +[[package]] +name = "glob-match" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985c9503b412198aa4197559e9a318524ebc4519c229bfa05a535828c950b9d" + [[package]] name = "hashbrown" version = "0.15.2" diff --git a/src/ci/citool/Cargo.toml b/src/ci/citool/Cargo.toml index c486f2977a1cb..dde09224afe84 100644 --- a/src/ci/citool/Cargo.toml +++ b/src/ci/citool/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" anyhow = "1" clap = { version = "4.5", features = ["derive"] } csv = "1" +glob-match = "0.2" serde = { version = "1", features = ["derive"] } serde_yaml = "0.9" serde_json = "1" diff --git a/src/ci/citool/src/jobs.rs b/src/ci/citool/src/jobs.rs new file mode 100644 index 0000000000000..45a188fb234a1 --- /dev/null +++ b/src/ci/citool/src/jobs.rs @@ -0,0 +1,244 @@ +#[cfg(test)] +mod tests; + +use std::collections::BTreeMap; + +use serde_yaml::Value; + +use crate::GitHubContext; + +/// Representation of a job loaded from the `src/ci/github-actions/jobs.yml` file. +#[derive(serde::Deserialize, Debug, Clone)] +pub struct Job { + /// Name of the job, e.g. mingw-check + pub name: String, + /// GitHub runner on which the job should be executed + pub os: String, + pub env: BTreeMap, + /// Should the job be only executed on a specific channel? + #[serde(default)] + pub only_on_channel: Option, + /// Do not cancel the whole workflow if this job fails. + #[serde(default)] + pub continue_on_error: Option, + /// Free additional disk space in the job, by removing unused packages. + #[serde(default)] + pub free_disk: Option, +} + +impl Job { + /// By default, the Docker image of a job is based on its name. + /// However, it can be overridden by its IMAGE environment variable. + pub fn image(&self) -> String { + self.env + .get("IMAGE") + .map(|v| v.as_str().expect("IMAGE value should be a string").to_string()) + .unwrap_or_else(|| self.name.clone()) + } + + fn is_linux(&self) -> bool { + self.os.contains("ubuntu") + } +} + +#[derive(serde::Deserialize, Debug)] +struct JobEnvironments { + #[serde(rename = "pr")] + pr_env: BTreeMap, + #[serde(rename = "try")] + try_env: BTreeMap, + #[serde(rename = "auto")] + auto_env: BTreeMap, +} + +#[derive(serde::Deserialize, Debug)] +pub struct JobDatabase { + #[serde(rename = "pr")] + pub pr_jobs: Vec, + #[serde(rename = "try")] + pub try_jobs: Vec, + #[serde(rename = "auto")] + pub auto_jobs: Vec, + + /// Shared environments for the individual run types. + envs: JobEnvironments, +} + +impl JobDatabase { + /// Find `auto` jobs that correspond to the passed `pattern`. + /// Patterns are matched using the glob syntax. + /// For example `dist-*` matches all jobs starting with `dist-`. + fn find_auto_jobs_by_pattern(&self, pattern: &str) -> Vec { + self.auto_jobs + .iter() + .filter(|j| glob_match::glob_match(pattern, &j.name)) + .cloned() + .collect() + } +} + +pub fn load_job_db(db: &str) -> anyhow::Result { + let mut db: Value = serde_yaml::from_str(&db)?; + + // We need to expand merge keys (<<), because serde_yaml can't deal with them + // `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges. + db.apply_merge()?; + db.apply_merge()?; + + let db: JobDatabase = serde_yaml::from_value(db)?; + Ok(db) +} + +/// Representation of a job outputted to a GitHub Actions workflow. +#[derive(serde::Serialize, Debug)] +struct GithubActionsJob { + /// The main identifier of the job, used by CI scripts to determine what should be executed. + name: String, + /// Helper label displayed in GitHub Actions interface, containing the job name and a run type + /// prefix (PR/try/auto). + full_name: String, + os: String, + env: BTreeMap, + #[serde(skip_serializing_if = "Option::is_none")] + continue_on_error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + free_disk: Option, +} + +/// Skip CI jobs that are not supposed to be executed on the given `channel`. +fn skip_jobs(jobs: Vec, channel: &str) -> Vec { + jobs.into_iter() + .filter(|job| { + job.only_on_channel.is_none() || job.only_on_channel.as_deref() == Some(channel) + }) + .collect() +} + +/// Type of workflow that is being executed on CI +#[derive(Debug)] +pub enum RunType { + /// Workflows that run after a push to a PR branch + PullRequest, + /// Try run started with @bors try + TryJob { job_patterns: Option> }, + /// Merge attempt workflow + AutoJob, +} + +/// Maximum number of custom try jobs that can be requested in a single +/// `@bors try` request. +const MAX_TRY_JOBS_COUNT: usize = 20; + +fn calculate_jobs( + run_type: &RunType, + db: &JobDatabase, + channel: &str, +) -> anyhow::Result> { + let (jobs, prefix, base_env) = match run_type { + RunType::PullRequest => (db.pr_jobs.clone(), "PR", &db.envs.pr_env), + RunType::TryJob { job_patterns } => { + let jobs = if let Some(patterns) = job_patterns { + let mut jobs: Vec = vec![]; + let mut unknown_patterns = vec![]; + for pattern in patterns { + let matched_jobs = db.find_auto_jobs_by_pattern(pattern); + if matched_jobs.is_empty() { + unknown_patterns.push(pattern.clone()); + } else { + for job in matched_jobs { + if !jobs.iter().any(|j| j.name == job.name) { + jobs.push(job); + } + } + } + } + if !unknown_patterns.is_empty() { + return Err(anyhow::anyhow!( + "Patterns `{}` did not match any auto jobs", + unknown_patterns.join(", ") + )); + } + if jobs.len() > MAX_TRY_JOBS_COUNT { + return Err(anyhow::anyhow!( + "It is only possible to schedule up to {MAX_TRY_JOBS_COUNT} custom jobs, received {} custom jobs expanded from {} pattern(s)", + jobs.len(), + patterns.len() + )); + } + jobs + } else { + db.try_jobs.clone() + }; + (jobs, "try", &db.envs.try_env) + } + RunType::AutoJob => (db.auto_jobs.clone(), "auto", &db.envs.auto_env), + }; + let jobs = skip_jobs(jobs, channel); + let jobs = jobs + .into_iter() + .map(|job| { + let mut env: BTreeMap = crate::yaml_map_to_json(base_env); + env.extend(crate::yaml_map_to_json(&job.env)); + let full_name = format!("{prefix} - {}", job.name); + + GithubActionsJob { + name: job.name, + full_name, + os: job.os, + env, + continue_on_error: job.continue_on_error, + free_disk: job.free_disk, + } + }) + .collect(); + + Ok(jobs) +} + +pub fn calculate_job_matrix( + db: JobDatabase, + gh_ctx: GitHubContext, + channel: &str, +) -> anyhow::Result<()> { + let run_type = gh_ctx.get_run_type().ok_or_else(|| { + anyhow::anyhow!("Cannot determine the type of workflow that is being executed") + })?; + eprintln!("Run type: {run_type:?}"); + + let jobs = calculate_jobs(&run_type, &db, channel)?; + if jobs.is_empty() { + return Err(anyhow::anyhow!("Computed job list is empty")); + } + + let run_type = match run_type { + RunType::PullRequest => "pr", + RunType::TryJob { .. } => "try", + RunType::AutoJob => "auto", + }; + + eprintln!("Output"); + eprintln!("jobs={jobs:?}"); + eprintln!("run_type={run_type}"); + println!("jobs={}", serde_json::to_string(&jobs)?); + println!("run_type={run_type}"); + + Ok(()) +} + +pub fn find_linux_job<'a>(jobs: &'a [Job], name: &str) -> anyhow::Result<&'a Job> { + let Some(job) = jobs.iter().find(|j| j.name == name) else { + let available_jobs: Vec<&Job> = jobs.iter().filter(|j| j.is_linux()).collect(); + let mut available_jobs = + available_jobs.iter().map(|j| j.name.to_string()).collect::>(); + available_jobs.sort(); + return Err(anyhow::anyhow!( + "Job {name} not found. The following jobs are available:\n{}", + available_jobs.join(", ") + )); + }; + if !job.is_linux() { + return Err(anyhow::anyhow!("Only Linux jobs can be executed locally")); + } + + Ok(job) +} diff --git a/src/ci/citool/src/jobs/tests.rs b/src/ci/citool/src/jobs/tests.rs new file mode 100644 index 0000000000000..a489656fa5dc7 --- /dev/null +++ b/src/ci/citool/src/jobs/tests.rs @@ -0,0 +1,64 @@ +use crate::jobs::{JobDatabase, load_job_db}; + +#[test] +fn lookup_job_pattern() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + +pr: +try: +auto: + - name: dist-a + os: ubuntu + env: {} + - name: dist-a-alt + os: ubuntu + env: {} + - name: dist-b + os: ubuntu + env: {} + - name: dist-b-alt + os: ubuntu + env: {} + - name: test-a + os: ubuntu + env: {} + - name: test-a-alt + os: ubuntu + env: {} + - name: test-i686 + os: ubuntu + env: {} + - name: dist-i686 + os: ubuntu + env: {} + - name: test-msvc-i686-1 + os: ubuntu + env: {} + - name: test-msvc-i686-2 + os: ubuntu + env: {} +"#, + ) + .unwrap(); + check_pattern(&db, "dist-*", &["dist-a", "dist-a-alt", "dist-b", "dist-b-alt", "dist-i686"]); + check_pattern(&db, "*-alt", &["dist-a-alt", "dist-b-alt", "test-a-alt"]); + check_pattern(&db, "dist*-alt", &["dist-a-alt", "dist-b-alt"]); + check_pattern( + &db, + "*i686*", + &["test-i686", "dist-i686", "test-msvc-i686-1", "test-msvc-i686-2"], + ); +} + +#[track_caller] +fn check_pattern(db: &JobDatabase, pattern: &str, expected: &[&str]) { + let jobs = + db.find_auto_jobs_by_pattern(pattern).into_iter().map(|j| j.name).collect::>(); + + assert_eq!(jobs, expected); +} diff --git a/src/ci/citool/src/main.rs b/src/ci/citool/src/main.rs index 52e7638d98bdf..cd690ebeb0625 100644 --- a/src/ci/citool/src/main.rs +++ b/src/ci/citool/src/main.rs @@ -1,5 +1,6 @@ mod cpu_usage; mod datadog; +mod jobs; mod merge_report; mod metrics; mod utils; @@ -10,10 +11,12 @@ use std::process::Command; use anyhow::Context; use clap::Parser; +use jobs::JobDatabase; use serde_yaml::Value; use crate::cpu_usage::load_cpu_usage; use crate::datadog::upload_datadog_metric; +use crate::jobs::RunType; use crate::merge_report::post_merge_report; use crate::metrics::postprocess_metrics; use crate::utils::load_env_var; @@ -22,104 +25,6 @@ const CI_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/.."); const DOCKER_DIRECTORY: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../docker"); const JOBS_YML_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../github-actions/jobs.yml"); -/// Representation of a job loaded from the `src/ci/github-actions/jobs.yml` file. -#[derive(serde::Deserialize, Debug, Clone)] -struct Job { - /// Name of the job, e.g. mingw-check - name: String, - /// GitHub runner on which the job should be executed - os: String, - env: BTreeMap, - /// Should the job be only executed on a specific channel? - #[serde(default)] - only_on_channel: Option, - /// Rest of attributes that will be passed through to GitHub actions - #[serde(flatten)] - extra_keys: BTreeMap, -} - -impl Job { - fn is_linux(&self) -> bool { - self.os.contains("ubuntu") - } - - /// By default, the Docker image of a job is based on its name. - /// However, it can be overridden by its IMAGE environment variable. - fn image(&self) -> String { - self.env - .get("IMAGE") - .map(|v| v.as_str().expect("IMAGE value should be a string").to_string()) - .unwrap_or_else(|| self.name.clone()) - } -} - -#[derive(serde::Deserialize, Debug)] -struct JobEnvironments { - #[serde(rename = "pr")] - pr_env: BTreeMap, - #[serde(rename = "try")] - try_env: BTreeMap, - #[serde(rename = "auto")] - auto_env: BTreeMap, -} - -#[derive(serde::Deserialize, Debug)] -struct JobDatabase { - #[serde(rename = "pr")] - pr_jobs: Vec, - #[serde(rename = "try")] - try_jobs: Vec, - #[serde(rename = "auto")] - auto_jobs: Vec, - - /// Shared environments for the individual run types. - envs: JobEnvironments, -} - -impl JobDatabase { - fn find_auto_job_by_name(&self, name: &str) -> Option { - self.auto_jobs.iter().find(|j| j.name == name).cloned() - } -} - -fn load_job_db(path: &Path) -> anyhow::Result { - let db = utils::read_to_string(path)?; - let mut db: Value = serde_yaml::from_str(&db)?; - - // We need to expand merge keys (<<), because serde_yaml can't deal with them - // `apply_merge` only applies the merge once, so do it a few times to unwrap nested merges. - db.apply_merge()?; - db.apply_merge()?; - - let db: JobDatabase = serde_yaml::from_value(db)?; - Ok(db) -} - -/// Representation of a job outputted to a GitHub Actions workflow. -#[derive(serde::Serialize, Debug)] -struct GithubActionsJob { - /// The main identifier of the job, used by CI scripts to determine what should be executed. - name: String, - /// Helper label displayed in GitHub Actions interface, containing the job name and a run type - /// prefix (PR/try/auto). - full_name: String, - os: String, - env: BTreeMap, - #[serde(flatten)] - extra_keys: BTreeMap, -} - -/// Type of workflow that is being executed on CI -#[derive(Debug)] -enum RunType { - /// Workflows that run after a push to a PR branch - PullRequest, - /// Try run started with @bors try - TryJob { custom_jobs: Option> }, - /// Merge attempt workflow - AutoJob, -} - struct GitHubContext { event_name: String, branch_ref: String, @@ -130,24 +35,31 @@ impl GitHubContext { fn get_run_type(&self) -> Option { match (self.event_name.as_str(), self.branch_ref.as_str()) { ("pull_request", _) => Some(RunType::PullRequest), - ("push", "refs/heads/try-perf") => Some(RunType::TryJob { custom_jobs: None }), + ("push", "refs/heads/try-perf") => Some(RunType::TryJob { job_patterns: None }), ("push", "refs/heads/try" | "refs/heads/automation/bors/try") => { - let custom_jobs = self.get_custom_jobs(); - let custom_jobs = if !custom_jobs.is_empty() { Some(custom_jobs) } else { None }; - Some(RunType::TryJob { custom_jobs }) + let patterns = self.get_try_job_patterns(); + let patterns = if !patterns.is_empty() { Some(patterns) } else { None }; + Some(RunType::TryJob { job_patterns: patterns }) } ("push", "refs/heads/auto") => Some(RunType::AutoJob), _ => None, } } - /// Tries to parse names of specific CI jobs that should be executed in the form of - /// try-job: - /// from the commit message of the passed GitHub context. - fn get_custom_jobs(&self) -> Vec { + /// Tries to parse patterns of CI jobs that should be executed + /// from the commit message of the passed GitHub context + /// + /// They can be specified in the form of + /// try-job: + /// or + /// try-job: `` + /// (to avoid GitHub rendering the glob patterns as Markdown) + fn get_try_job_patterns(&self) -> Vec { if let Some(ref msg) = self.commit_message { msg.lines() .filter_map(|line| line.trim().strip_prefix("try-job: ")) + // Strip backticks if present + .map(|l| l.trim_matches('`')) .map(|l| l.trim().to_string()) .collect() } else { @@ -164,15 +76,6 @@ fn load_github_ctx() -> anyhow::Result { Ok(GitHubContext { event_name, branch_ref: load_env_var("GITHUB_REF")?, commit_message }) } -/// Skip CI jobs that are not supposed to be executed on the given `channel`. -fn skip_jobs(jobs: Vec, channel: &str) -> Vec { - jobs.into_iter() - .filter(|job| { - job.only_on_channel.is_none() || job.only_on_channel.as_deref() == Some(channel) - }) - .collect() -} - fn yaml_map_to_json(map: &BTreeMap) -> BTreeMap { map.into_iter() .map(|(key, value)| { @@ -184,124 +87,13 @@ fn yaml_map_to_json(map: &BTreeMap) -> BTreeMap anyhow::Result> { - let (jobs, prefix, base_env) = match run_type { - RunType::PullRequest => (db.pr_jobs.clone(), "PR", &db.envs.pr_env), - RunType::TryJob { custom_jobs } => { - let jobs = if let Some(custom_jobs) = custom_jobs { - if custom_jobs.len() > MAX_TRY_JOBS_COUNT { - return Err(anyhow::anyhow!( - "It is only possible to schedule up to {MAX_TRY_JOBS_COUNT} custom jobs, received {} custom jobs", - custom_jobs.len() - )); - } - - let mut jobs = vec![]; - let mut unknown_jobs = vec![]; - for custom_job in custom_jobs { - if let Some(job) = db.find_auto_job_by_name(custom_job) { - jobs.push(job); - } else { - unknown_jobs.push(custom_job.clone()); - } - } - if !unknown_jobs.is_empty() { - return Err(anyhow::anyhow!( - "Custom job(s) `{}` not found in auto jobs", - unknown_jobs.join(", ") - )); - } - jobs - } else { - db.try_jobs.clone() - }; - (jobs, "try", &db.envs.try_env) - } - RunType::AutoJob => (db.auto_jobs.clone(), "auto", &db.envs.auto_env), - }; - let jobs = skip_jobs(jobs, channel); - let jobs = jobs - .into_iter() - .map(|job| { - let mut env: BTreeMap = yaml_map_to_json(base_env); - env.extend(yaml_map_to_json(&job.env)); - let full_name = format!("{prefix} - {}", job.name); - - GithubActionsJob { - name: job.name, - full_name, - os: job.os, - env, - extra_keys: yaml_map_to_json(&job.extra_keys), - } - }) - .collect(); - - Ok(jobs) -} - -fn calculate_job_matrix( - db: JobDatabase, - gh_ctx: GitHubContext, - channel: &str, -) -> anyhow::Result<()> { - let run_type = gh_ctx.get_run_type().ok_or_else(|| { - anyhow::anyhow!("Cannot determine the type of workflow that is being executed") - })?; - eprintln!("Run type: {run_type:?}"); - - let jobs = calculate_jobs(&run_type, &db, channel)?; - if jobs.is_empty() { - return Err(anyhow::anyhow!("Computed job list is empty")); - } - - let run_type = match run_type { - RunType::PullRequest => "pr", - RunType::TryJob { .. } => "try", - RunType::AutoJob => "auto", - }; - - eprintln!("Output"); - eprintln!("jobs={jobs:?}"); - eprintln!("run_type={run_type}"); - println!("jobs={}", serde_json::to_string(&jobs)?); - println!("run_type={run_type}"); - - Ok(()) -} - -fn find_linux_job<'a>(jobs: &'a [Job], name: &str) -> anyhow::Result<&'a Job> { - let Some(job) = jobs.iter().find(|j| j.name == name) else { - let available_jobs: Vec<&Job> = jobs.iter().filter(|j| j.is_linux()).collect(); - let mut available_jobs = - available_jobs.iter().map(|j| j.name.to_string()).collect::>(); - available_jobs.sort(); - return Err(anyhow::anyhow!( - "Job {name} not found. The following jobs are available:\n{}", - available_jobs.join(", ") - )); - }; - if !job.is_linux() { - return Err(anyhow::anyhow!("Only Linux jobs can be executed locally")); - } - - Ok(job) -} - fn run_workflow_locally(db: JobDatabase, job_type: JobType, name: String) -> anyhow::Result<()> { let jobs = match job_type { JobType::Auto => &db.auto_jobs, JobType::PR => &db.pr_jobs, }; - let job = find_linux_job(jobs, &name).with_context(|| format!("Cannot find job {name}"))?; + let job = + jobs::find_linux_job(jobs, &name).with_context(|| format!("Cannot find job {name}"))?; let mut custom_env: BTreeMap = BTreeMap::new(); // Replicate src/ci/scripts/setup-environment.sh @@ -385,7 +177,7 @@ enum Args { } #[derive(clap::ValueEnum, Clone)] -enum JobType { +pub enum JobType { /// Merge attempt ("auto") job Auto, /// Pull request job @@ -395,7 +187,10 @@ enum JobType { fn main() -> anyhow::Result<()> { let args = Args::parse(); let default_jobs_file = Path::new(JOBS_YML_PATH); - let load_db = |jobs_path| load_job_db(jobs_path).context("Cannot load jobs.yml"); + let load_db = |jobs_path| { + let db = utils::read_to_string(jobs_path)?; + Ok::<_, anyhow::Error>(jobs::load_job_db(&db).context("Cannot load jobs.yml")?) + }; match args { Args::CalculateJobMatrix { jobs_file } => { @@ -407,7 +202,7 @@ fn main() -> anyhow::Result<()> { .trim() .to_string(); - calculate_job_matrix(load_db(jobs_path)?, gh_ctx, &channel) + jobs::calculate_job_matrix(load_db(jobs_path)?, gh_ctx, &channel) .context("Failed to calculate job matrix")?; } Args::RunJobLocally { job_type, name } => { diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs index 5dd662280f0f3..17e42d49286fe 100644 --- a/src/ci/citool/src/merge_report.rs +++ b/src/ci/citool/src/merge_report.rs @@ -4,7 +4,7 @@ use std::collections::HashMap; use anyhow::Context; use build_helper::metrics::{JsonRoot, TestOutcome}; -use crate::JobDatabase; +use crate::jobs::JobDatabase; use crate::metrics::get_test_suites; type Sha = String; diff --git a/src/doc/rustc-dev-guide/src/tests/ci.md b/src/doc/rustc-dev-guide/src/tests/ci.md index ae6adb678af14..0c0f750a45d72 100644 --- a/src/doc/rustc-dev-guide/src/tests/ci.md +++ b/src/doc/rustc-dev-guide/src/tests/ci.md @@ -133,29 +133,37 @@ There are several use-cases for try builds: Again, a working compiler build is needed for this, which can be produced by the [dist-x86_64-linux] CI job. - Run a specific CI job (e.g. Windows tests) on a PR, to quickly test if it - passes the test suite executed by that job. You can select which CI jobs will - be executed in the try build by adding up to 10 lines containing `try-job: - ` to the PR description. All such specified jobs will be executed - in the try build once the `@bors try` command is used on the PR. If no try - jobs are specified in this way, the jobs defined in the `try` section of - [`jobs.yml`] will be executed by default. + passes the test suite executed by that job. + +You can select which CI jobs will +be executed in the try build by adding lines containing `try-job: +` to the PR description. All such specified jobs will be executed +in the try build once the `@bors try` command is used on the PR. If no try +jobs are specified in this way, the jobs defined in the `try` section of +[`jobs.yml`] will be executed by default. + +Each pattern can either be an exact name of a job or a glob pattern that matches multiple jobs, +for example `*msvc*` or `*-alt`. You can start at most 20 jobs in a single try build. When using +glob patterns, you might want to wrap them in backticks (`` ` ``) to avoid GitHub rendering +the pattern as Markdown. > **Using `try-job` PR description directives** > -> 1. Identify which set of try-jobs (max 10) you would like to exercise. You can +> 1. Identify which set of try-jobs you would like to exercise. You can > find the name of the CI jobs in [`jobs.yml`]. > -> 2. Amend PR description to include (usually at the end of the PR description) -> e.g. +> 2. Amend PR description to include a set of patterns (usually at the end +> of the PR description), for example: > > ```text > This PR fixes #123456. > > try-job: x86_64-msvc > try-job: test-various +> try-job: `*-alt` > ``` > -> Each `try-job` directive must be on its own line. +> Each `try-job` pattern must be on its own line. > > 3. Run the prescribed try jobs with `@bors try`. As aforementioned, this > requires the user to either (1) have `try` permissions or (2) be delegated diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index b1d7e5421c1b4..2f86021a4859d 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -72,6 +72,7 @@ - [powerpc-unknown-linux-gnuspe](platform-support/powerpc-unknown-linux-gnuspe.md) - [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md) - [powerpc64-ibm-aix](platform-support/aix.md) + - [powerpc64le-unknown-linux-gnu](platform-support/powerpc64le-unknown-linux-gnu.md) - [powerpc64le-unknown-linux-musl](platform-support/powerpc64le-unknown-linux-musl.md) - [riscv32e\*-unknown-none-elf](platform-support/riscv32e-unknown-none-elf.md) - [riscv32i\*-unknown-none-elf](platform-support/riscv32-unknown-none-elf.md) diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index f78ab151b9c24..486aa1e091cfc 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -96,7 +96,7 @@ target | notes [`loongarch64-unknown-linux-musl`](platform-support/loongarch-linux.md) | LoongArch64 Linux, LP64D ABI (kernel 5.19, musl 1.2.5) `powerpc-unknown-linux-gnu` | PowerPC Linux (kernel 3.2, glibc 2.17) `powerpc64-unknown-linux-gnu` | PPC64 Linux (kernel 3.2, glibc 2.17) -`powerpc64le-unknown-linux-gnu` | PPC64LE Linux (kernel 3.10, glibc 2.17) +[`powerpc64le-unknown-linux-gnu`](platform-support/powerpc64le-unknown-linux-gnu.md) | PPC64LE Linux (kernel 3.10, glibc 2.17) [`powerpc64le-unknown-linux-musl`](platform-support/powerpc64le-unknown-linux-musl.md) | PPC64LE Linux (kernel 4.19, musl 1.2.3) [`riscv64gc-unknown-linux-gnu`](platform-support/riscv64gc-unknown-linux-gnu.md) | RISC-V Linux (kernel 4.20, glibc 2.29) [`riscv64gc-unknown-linux-musl`](platform-support/riscv64gc-unknown-linux-musl.md) | RISC-V Linux (kernel 4.20, musl 1.2.3) diff --git a/src/doc/rustc/src/platform-support/powerpc64le-unknown-linux-gnu.md b/src/doc/rustc/src/platform-support/powerpc64le-unknown-linux-gnu.md new file mode 100644 index 0000000000000..6cb34b2a777a5 --- /dev/null +++ b/src/doc/rustc/src/platform-support/powerpc64le-unknown-linux-gnu.md @@ -0,0 +1,47 @@ +# `powerpc64le-unknown-linux-gnu` + +**Tier: 2** + +Target for 64-bit little endian PowerPC Linux programs + +## Target maintainers + +- David Tenty `daltenty@ibm.com`, https://github.com/daltenty +- Chris Cambly, `ccambly@ca.ibm.com`, https://github.com/gilamn5tr + +## Requirements + +Building the target itself requires a 64-bit little endian PowerPC compiler that is supported by `cc-rs`. + +## Building the target + +The target can be built by enabling it for a `rustc` build. + +```toml +[build] +target = ["powerpc64le-unknown-linux-gnu"] +``` + +Make sure your C compiler is included in `$PATH`, then add it to the `config.toml`: + +```toml +[target.powerpc64le-unknown-linux-gnu] +cc = "powerpc64le-linux-gnu-gcc" +cxx = "powerpc64le-linux-gnu-g++" +ar = "powerpc64le-linux-gnu-ar" +linker = "powerpc64le-linux-gnu-gcc" +``` + +## Building Rust programs + +This target is distributed through `rustup`, and requires no special +configuration. + +## Cross-compilation + +This target can be cross-compiled from any host. + +## Testing + +This target can be tested as normal with `x.py` on a 64-bit little endian +PowerPC host or via QEMU emulation. diff --git a/src/doc/unstable-book/src/compiler-flags/crate-attr.md b/src/doc/unstable-book/src/compiler-flags/crate-attr.md new file mode 100644 index 0000000000000..8c9c501a23e3d --- /dev/null +++ b/src/doc/unstable-book/src/compiler-flags/crate-attr.md @@ -0,0 +1,16 @@ +# `crate-attr` + +The tracking issue for this feature is: [#138287](https://github.com/rust-lang/rust/issues/138287). + +------------------------ + +The `-Z crate-attr` flag allows you to inject attributes into the crate root. +For example, `-Z crate-attr=crate_name="test"` acts as if `#![crate_name="test"]` were present before the first source line of the crate root. + +To inject multiple attributes, pass `-Z crate-attr` multiple times. + +Formally, the expansion behaves as follows: +1. The crate is parsed as if `-Z crate-attr` were not present. +2. The attributes in `-Z crate-attr` are parsed. +3. The attributes are injected at the top of the crate root. +4. Macro expansion is performed. diff --git a/tests/debuginfo/pretty-huge-vec.rs b/tests/debuginfo/pretty-huge-vec.rs index 093fbc5b12d2e..6938158e365e5 100644 --- a/tests/debuginfo/pretty-huge-vec.rs +++ b/tests/debuginfo/pretty-huge-vec.rs @@ -1,5 +1,6 @@ //@ ignore-windows-gnu: #128981 //@ ignore-android: FIXME(#10381) +//@ ignore-aix: FIXME(#137965) //@ compile-flags:-g // === GDB TESTS =================================================================================== diff --git a/tests/pretty/hir-pretty-attr.pp b/tests/pretty/hir-pretty-attr.pp index 586810b004662..d8cc8c424ca5f 100644 --- a/tests/pretty/hir-pretty-attr.pp +++ b/tests/pretty/hir-pretty-attr.pp @@ -6,6 +6,6 @@ //@ pretty-mode:hir //@ pp-exact:hir-pretty-attr.pp -#[attr="Repr([ReprC, ReprPacked(Align(4 bytes)), ReprTransparent])")] +#[attr = Repr([ReprC, ReprPacked(Align(4 bytes)), ReprTransparent])] struct Example { } diff --git a/tests/rustdoc-json/enums/discriminant/struct.rs b/tests/rustdoc-json/enums/discriminant/struct.rs index 82437f5ef03bb..f2bed77902b03 100644 --- a/tests/rustdoc-json/enums/discriminant/struct.rs +++ b/tests/rustdoc-json/enums/discriminant/struct.rs @@ -1,5 +1,5 @@ #[repr(i32)] -//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr=\"Repr([ReprInt(SignedInt(I32))])\")]\n"]' +//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr = Repr([ReprInt(SignedInt(I32))])]\n"]' pub enum Foo { //@ is "$.index[*][?(@.name=='Struct')].inner.variant.discriminant" null //@ count "$.index[*][?(@.name=='Struct')].inner.variant.kind.struct.fields[*]" 0 diff --git a/tests/rustdoc-json/enums/discriminant/tuple.rs b/tests/rustdoc-json/enums/discriminant/tuple.rs index 25bba07e8f796..201c1cdc88e7e 100644 --- a/tests/rustdoc-json/enums/discriminant/tuple.rs +++ b/tests/rustdoc-json/enums/discriminant/tuple.rs @@ -1,5 +1,5 @@ #[repr(u32)] -//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr=\"Repr([ReprInt(UnsignedInt(U32))])\")]\n"]' +//@ is "$.index[*][?(@.name=='Foo')].attrs" '["#[attr = Repr([ReprInt(UnsignedInt(U32))])]\n"]' pub enum Foo { //@ is "$.index[*][?(@.name=='Tuple')].inner.variant.discriminant" null //@ count "$.index[*][?(@.name=='Tuple')].inner.variant.kind.tuple[*]" 0 diff --git a/tests/ui/attributes/z-crate-attr.rs b/tests/ui/attributes/z-crate-attr/basic.rs similarity index 100% rename from tests/ui/attributes/z-crate-attr.rs rename to tests/ui/attributes/z-crate-attr/basic.rs diff --git a/tests/ui/attributes/z-crate-attr/cfg-false.rs b/tests/ui/attributes/z-crate-attr/cfg-false.rs new file mode 100644 index 0000000000000..db37cfdd08637 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/cfg-false.rs @@ -0,0 +1,7 @@ +// Ensure that `-Z crate-attr=cfg(FALSE)` can comment out the whole crate +//@ compile-flags: --crate-type=lib -Zcrate-attr=cfg(FALSE) +//@ check-pass + +// NOTE: duplicate items are load-bearing +fn foo() {} +fn foo() {} diff --git a/tests/ui/attributes/z-crate-attr/comments.rs b/tests/ui/attributes/z-crate-attr/comments.rs new file mode 100644 index 0000000000000..c1ab041f34477 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/comments.rs @@ -0,0 +1,5 @@ +//@ check-pass +//@ compile-flags: -Zcrate-attr=/*hi-there*/feature(rustc_attrs) + +#[rustc_dummy] +fn main() {} diff --git a/tests/ui/attributes/z-crate-attr/crate-name.rs b/tests/ui/attributes/z-crate-attr/crate-name.rs new file mode 100644 index 0000000000000..d49830390e2ec --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/crate-name.rs @@ -0,0 +1,6 @@ +// Ensure that `crate_name` and `crate_type` can be set through `-Z crate-attr`. +//@ check-pass +//@ compile-flags: -Zcrate-attr=crate_name="override" +fn main() { + assert_eq!(module_path!(), "r#override"); +} diff --git a/tests/ui/attributes/z-crate-attr/crate-type.rs b/tests/ui/attributes/z-crate-attr/crate-type.rs new file mode 100644 index 0000000000000..0e7411865af9c --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/crate-type.rs @@ -0,0 +1,3 @@ +//@ check-pass +//@ compile-flags: -Zcrate-attr=crate_type="lib" +// notice the lack of `main` is load-bearing diff --git a/tests/ui/attributes/z-crate-attr/garbage.rs b/tests/ui/attributes/z-crate-attr/garbage.rs new file mode 100644 index 0000000000000..ec81dd1bcaa08 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/garbage.rs @@ -0,0 +1,4 @@ +// Show diagnostics for invalid tokens +//@ compile-flags: -Zcrate-attr=`%~@$# +//@ error-pattern:unknown start of token +fn main() {} diff --git a/tests/ui/attributes/z-crate-attr/garbage.stderr b/tests/ui/attributes/z-crate-attr/garbage.stderr new file mode 100644 index 0000000000000..082046e31f8cd --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/garbage.stderr @@ -0,0 +1,20 @@ +error: unknown start of token: ` + --> :1:1 + | +LL | `%~@$# + | ^ + | +help: Unicode character '`' (Grave Accent) looks like ''' (Single Quote), but it is not + | +LL - `%~@$# +LL + '%~@$# + | + +error: expected identifier, found `%` + --> :1:2 + | +LL | `%~@$# + | ^ expected identifier + +error: aborting due to 2 previous errors + diff --git a/tests/ui/attributes/z-crate-attr/injection.rs b/tests/ui/attributes/z-crate-attr/injection.rs new file mode 100644 index 0000000000000..0c5c81ca71a92 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/injection.rs @@ -0,0 +1,3 @@ +//@ compile-flags: '-Zcrate-attr=feature(yeet_expr)]fn main(){}#[inline' +//@ error-pattern:unexpected closing delimiter +fn foo() {} diff --git a/tests/ui/attributes/z-crate-attr/injection.stderr b/tests/ui/attributes/z-crate-attr/injection.stderr new file mode 100644 index 0000000000000..6fec98baf8dfe --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/injection.stderr @@ -0,0 +1,8 @@ +error: unexpected closing delimiter: `]` + --> :1:19 + | +LL | feature(yeet_expr)]fn main(){}#[inline + | ^ unexpected closing delimiter + +error: aborting due to 1 previous error + diff --git a/tests/ui/attributes/z-crate-attr/inner-attr.rs b/tests/ui/attributes/z-crate-attr/inner-attr.rs new file mode 100644 index 0000000000000..522c906dcd817 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/inner-attr.rs @@ -0,0 +1,4 @@ +//@ compile-flags: -Zcrate-attr=#![feature(foo)] +//@ error-pattern:expected identifier + +fn main() {} diff --git a/tests/ui/attributes/z-crate-attr/inner-attr.stderr b/tests/ui/attributes/z-crate-attr/inner-attr.stderr new file mode 100644 index 0000000000000..06a063d310b6b --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/inner-attr.stderr @@ -0,0 +1,8 @@ +error: expected identifier, found `#` + --> :1:1 + | +LL | #![feature(foo)] + | ^ expected identifier + +error: aborting due to 1 previous error + diff --git a/tests/ui/attributes/z-crate-attr/multiple.rs b/tests/ui/attributes/z-crate-attr/multiple.rs new file mode 100644 index 0000000000000..ee13253f62551 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/multiple.rs @@ -0,0 +1,3 @@ +//@ compile-flags: -Zcrate-attr=feature(foo),feature(bar) +//@ error-pattern:invalid crate attr +fn main() {} diff --git a/tests/ui/attributes/z-crate-attr/multiple.stderr b/tests/ui/attributes/z-crate-attr/multiple.stderr new file mode 100644 index 0000000000000..9f968a7e1346f --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/multiple.stderr @@ -0,0 +1,8 @@ +error: invalid crate attribute + --> :1:1 + | +LL | feature(foo),feature(bar) + | ^^^^^^^^^^^^^ + +error: aborting due to 1 previous error + diff --git a/tests/ui/attributes/z-crate-attr/respect-existing-attrs.rs b/tests/ui/attributes/z-crate-attr/respect-existing-attrs.rs new file mode 100644 index 0000000000000..71f2559998fba --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/respect-existing-attrs.rs @@ -0,0 +1,9 @@ +// Make sure that existing root attributes are still respected even when `-Zcrate-attr` is present. +//@ run-pass +//@ compile-flags: -Zcrate-attr=feature(rustc_attrs) +#![crate_name = "override"] + +#[rustc_dummy] +fn main() { + assert_eq!(module_path!(), "r#override"); +} diff --git a/tests/ui/attributes/z-crate-attr/shebang.rs b/tests/ui/attributes/z-crate-attr/shebang.rs new file mode 100644 index 0000000000000..195393acaf5ec --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/shebang.rs @@ -0,0 +1,6 @@ +#!/usr/bin/env -S cargo +nightly -Zscript +// Make sure that shebangs are still allowed even when `-Zcrate-attr` is present. +//@ check-pass +//@ compile-flags: -Zcrate-attr=feature(rustc_attrs) +#[rustc_dummy] +fn main() {} diff --git a/tests/ui/attributes/z-crate-attr/unbalanced-paren.rs b/tests/ui/attributes/z-crate-attr/unbalanced-paren.rs new file mode 100644 index 0000000000000..fc1d7f39a5972 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/unbalanced-paren.rs @@ -0,0 +1,4 @@ +// Show diagnostics for unbalanced parens. +//@ compile-flags: -Zcrate-attr=( +//@ error-pattern:unclosed delimiter +fn main() {} diff --git a/tests/ui/attributes/z-crate-attr/unbalanced-paren.stderr b/tests/ui/attributes/z-crate-attr/unbalanced-paren.stderr new file mode 100644 index 0000000000000..47b1b764ba9a7 --- /dev/null +++ b/tests/ui/attributes/z-crate-attr/unbalanced-paren.stderr @@ -0,0 +1,10 @@ +error: this file contains an unclosed delimiter + --> :1:2 + | +LL | ( + | -^ + | | + | unclosed delimiter + +error: aborting due to 1 previous error + diff --git a/tests/ui/consts/large_const_alloc.rs b/tests/ui/consts/large_const_alloc.rs index 14edc1bb69610..3573a018630f6 100644 --- a/tests/ui/consts/large_const_alloc.rs +++ b/tests/ui/consts/large_const_alloc.rs @@ -2,6 +2,9 @@ // on 32bit and 16bit platforms it is plausible that the maximum allocation size will succeed // FIXME (#135952) In some cases on AArch64 Linux the diagnostic does not trigger //@ ignore-aarch64-unknown-linux-gnu +// AIX will allow the allocation to go through, and get SIGKILL when zero initializing +// the overcommitted page. +//@ ignore-aix const FOO: () = { // 128 TiB, unlikely anyone has that much RAM diff --git a/tests/ui/consts/large_const_alloc.stderr b/tests/ui/consts/large_const_alloc.stderr index fa7d5977a9567..f3f3de7af63ac 100644 --- a/tests/ui/consts/large_const_alloc.stderr +++ b/tests/ui/consts/large_const_alloc.stderr @@ -1,11 +1,11 @@ error[E0080]: evaluation of constant value failed - --> $DIR/large_const_alloc.rs:8:13 + --> $DIR/large_const_alloc.rs:11:13 | LL | let x = [0_u8; (1 << 47) - 1]; | ^^^^^^^^^^^^^^^^^^^^^ tried to allocate more memory than available to compiler error[E0080]: could not evaluate static initializer - --> $DIR/large_const_alloc.rs:13:13 + --> $DIR/large_const_alloc.rs:16:13 | LL | let x = [0_u8; (1 << 47) - 1]; | ^^^^^^^^^^^^^^^^^^^^^ tried to allocate more memory than available to compiler diff --git a/tests/ui/consts/promoted_running_out_of_memory_issue-130687.rs b/tests/ui/consts/promoted_running_out_of_memory_issue-130687.rs index 53618e2e86acd..75765596fa172 100644 --- a/tests/ui/consts/promoted_running_out_of_memory_issue-130687.rs +++ b/tests/ui/consts/promoted_running_out_of_memory_issue-130687.rs @@ -5,6 +5,9 @@ //@ only-64bit // FIXME (#135952) In some cases on AArch64 Linux the diagnostic does not trigger //@ ignore-aarch64-unknown-linux-gnu +// AIX will allow the allocation to go through, and get SIGKILL when zero initializing +// the overcommitted page. +//@ ignore-aix pub struct Data([u8; (1 << 47) - 1]); const _: &'static Data = &Data([0; (1 << 47) - 1]); diff --git a/tests/ui/consts/promoted_running_out_of_memory_issue-130687.stderr b/tests/ui/consts/promoted_running_out_of_memory_issue-130687.stderr index aac805dbd8c79..02180c1e4c683 100644 --- a/tests/ui/consts/promoted_running_out_of_memory_issue-130687.stderr +++ b/tests/ui/consts/promoted_running_out_of_memory_issue-130687.stderr @@ -1,5 +1,5 @@ error[E0080]: evaluation of constant value failed - --> $DIR/promoted_running_out_of_memory_issue-130687.rs:10:32 + --> $DIR/promoted_running_out_of_memory_issue-130687.rs:13:32 | LL | const _: &'static Data = &Data([0; (1 << 47) - 1]); | ^^^^^^^^^^^^^^^^^^ tried to allocate more memory than available to compiler diff --git a/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr b/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr index bc8edd847cc0f..30e36acb871a4 100644 --- a/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr +++ b/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr @@ -5,6 +5,7 @@ LL | #![allow(unqualified_local_imports)] | ^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: the `unqualified_local_imports` lint is unstable + = note: see issue #138299 for more information = help: add `#![feature(unqualified_local_imports)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date = note: `#[warn(unknown_lints)]` on by default diff --git a/tests/ui/unpretty/deprecated-attr.rs b/tests/ui/unpretty/deprecated-attr.rs index 24a32d8a9acf9..dda362a595e24 100644 --- a/tests/ui/unpretty/deprecated-attr.rs +++ b/tests/ui/unpretty/deprecated-attr.rs @@ -1,8 +1,6 @@ //@ compile-flags: -Zunpretty=hir //@ check-pass -// FIXME(jdonszelmann): the pretty printing output for deprecated (and possibly more attrs) is -// slightly broken. #[deprecated] pub struct PlainDeprecated; diff --git a/tests/ui/unpretty/deprecated-attr.stdout b/tests/ui/unpretty/deprecated-attr.stdout index 675351351a0c6..42de7b4533e51 100644 --- a/tests/ui/unpretty/deprecated-attr.stdout +++ b/tests/ui/unpretty/deprecated-attr.stdout @@ -5,24 +5,21 @@ extern crate std; //@ compile-flags: -Zunpretty=hir //@ check-pass -// FIXME(jdonszelmann): the pretty printing output for deprecated (and possibly more attrs) is -// slightly broken. -#[attr="Deprecation{deprecation: Deprecation{since: Unspecifiednote: -suggestion: }span: }")] +#[attr = Deprecation {deprecation: Deprecation {since: Unspecified}}] struct PlainDeprecated; -#[attr="Deprecation{deprecation: Deprecation{since: Unspecifiednote: -here's why this is deprecatedsuggestion: }span: }")] +#[attr = Deprecation {deprecation: Deprecation {since: Unspecified, note: +"here's why this is deprecated"}}] struct DirectNote; -#[attr="Deprecation{deprecation: Deprecation{since: Unspecifiednote: -here's why this is deprecatedsuggestion: }span: }")] +#[attr = Deprecation {deprecation: Deprecation {since: Unspecified, note: +"here's why this is deprecated"}}] struct ExplicitNote; -#[attr="Deprecation{deprecation: Deprecation{since: NonStandard(1.2.3)note: -here's why this is deprecatedsuggestion: }span: }")] +#[attr = Deprecation {deprecation: Deprecation {since: NonStandard("1.2.3"), +note: "here's why this is deprecated"}}] struct SinceAndNote; -#[attr="Deprecation{deprecation: Deprecation{since: NonStandard(1.2.3)note: -here's why this is deprecatedsuggestion: }span: }")] +#[attr = Deprecation {deprecation: Deprecation {since: NonStandard("1.2.3"), +note: "here's why this is deprecated"}}] struct FlippedOrder;