diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 65736bb16fc05..54c8f156f9da7 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -11,6 +11,8 @@ pub(crate) mod types; pub(crate) mod utils; use rustc_ast as ast; +use rustc_ast::token::{Token, TokenKind}; +use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_attr as attr; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry}; use rustc_hir as hir; @@ -2079,8 +2081,8 @@ impl<'hir> hir::intravisit::Visitor<'hir> for OneLevelVisitor<'hir> { fn visit_item(&mut self, item: &'hir hir::Item<'hir>) { if self.item.is_none() && item.ident == self.looking_for - && matches!(item.kind, hir::ItemKind::Use(_, _)) - || item.owner_id.def_id == self.target_def_id + && (matches!(item.kind, hir::ItemKind::Use(_, _)) + || item.owner_id.def_id == self.target_def_id) { self.item = Some(item); } @@ -2096,34 +2098,149 @@ fn get_all_import_attributes<'hir>( tcx: TyCtxt<'hir>, target_def_id: LocalDefId, attributes: &mut Vec, + is_inline: bool, ) { let hir_map = tcx.hir(); let mut visitor = OneLevelVisitor::new(hir_map, target_def_id); let mut visited = FxHashSet::default(); // If the item is an import and has at least a path with two parts, we go into it. - while let hir::ItemKind::Use(path, _) = item.kind && - path.segments.len() > 1 && - let hir::def::Res::Def(_, def_id) = path.segments[path.segments.len() - 2].res && - visited.insert(def_id) - { - if let Some(hir::Node::Item(parent_item)) = hir_map.get_if_local(def_id) { - // We add the attributes from this import into the list. - attributes.extend_from_slice(hir_map.attrs(item.hir_id())); - // We get the `Ident` we will be looking for into `item`. - let looking_for = path.segments[path.segments.len() - 1].ident; - visitor.reset(looking_for); - hir::intravisit::walk_item(&mut visitor, parent_item); - if let Some(i) = visitor.item { - item = i; - } else { - break; + while let hir::ItemKind::Use(path, _) = item.kind && visited.insert(item.hir_id()) { + // We add the attributes from this import into the list. + add_without_unwanted_attributes(attributes, hir_map.attrs(item.hir_id()), is_inline); + + let def_id = if path.segments.len() > 1 { + match path.segments[path.segments.len() - 2].res { + hir::def::Res::Def(_, def_id) => def_id, + _ => break, + } + } else { + // If the path doesn't have a parent, then the parent is the current module. + tcx.parent(item.owner_id.def_id.to_def_id()) + }; + + let Some(parent) = hir_map.get_if_local(def_id) else { break }; + + // We get the `Ident` we will be looking for into `item`. + let looking_for = path.segments[path.segments.len() - 1].ident; + visitor.reset(looking_for); + + match parent { + hir::Node::Item(parent_item) => { + hir::intravisit::walk_item(&mut visitor, parent_item); + } + hir::Node::Crate(m) => { + hir::intravisit::walk_mod( + &mut visitor, + m, + tcx.local_def_id_to_hir_id(def_id.as_local().unwrap()), + ); } + _ => break, + } + if let Some(i) = visitor.item { + item = i; } else { break; } } } +fn filter_tokens_from_list( + args_tokens: TokenStream, + should_retain: impl Fn(&TokenTree) -> bool, +) -> Vec { + let mut tokens = Vec::with_capacity(args_tokens.len()); + let mut skip_next_comma = false; + for token in args_tokens.into_trees() { + match token { + TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => { + skip_next_comma = false; + } + token if should_retain(&token) => { + skip_next_comma = false; + tokens.push(token); + } + _ => { + skip_next_comma = true; + } + } + } + tokens +} + +/// When inlining items, we merge its attributes (and all the reexports attributes too) with the +/// final reexport. For example: +/// +/// ```ignore (just an example) +/// #[doc(hidden, cfg(feature = "foo"))] +/// pub struct Foo; +/// +/// #[doc(cfg(feature = "bar"))] +/// #[doc(hidden, no_inline)] +/// pub use Foo as Foo1; +/// +/// #[doc(inline)] +/// pub use Foo2 as Bar; +/// ``` +/// +/// So `Bar` at the end will have both `cfg(feature = "...")`. However, we don't want to merge all +/// attributes so we filter out the following ones: +/// * `doc(inline)` +/// * `doc(no_inline)` +/// * `doc(hidden)` +fn add_without_unwanted_attributes( + attrs: &mut Vec, + new_attrs: &[ast::Attribute], + is_inline: bool, +) { + // If it's `#[doc(inline)]`, we don't want all attributes, otherwise we keep everything. + if !is_inline { + attrs.extend_from_slice(new_attrs); + return; + } + for attr in new_attrs { + let mut attr = attr.clone(); + match attr.kind { + ast::AttrKind::Normal(ref mut normal) => { + if let [ident] = &*normal.item.path.segments && + let ident = ident.ident.name && + ident == sym::doc + { + match normal.item.args { + ast::AttrArgs::Delimited(ref mut args) => { + let tokens = + filter_tokens_from_list(args.tokens.clone(), |token| { + !matches!( + token, + TokenTree::Token( + Token { + kind: TokenKind::Ident( + sym::hidden | sym::inline | sym::no_inline, + _, + ), + .. + }, + _, + ), + ) + }); + args.tokens = TokenStream::new(tokens); + attrs.push(attr); + } + ast::AttrArgs::Empty | ast::AttrArgs::Eq(..) => { + attrs.push(attr); + continue; + } + } + } + } + ast::AttrKind::DocComment(..) => { + attrs.push(attr); + } + } + } +} + fn clean_maybe_renamed_item<'tcx>( cx: &mut DocContext<'tcx>, item: &hir::Item<'tcx>, @@ -2212,19 +2329,20 @@ fn clean_maybe_renamed_item<'tcx>( { // First, we add the attributes from the current import. extra_attrs.extend_from_slice(inline::load_attrs(cx, import_id.to_def_id())); + let is_inline = extra_attrs.lists(sym::doc).get_word_attr(sym::inline).is_some(); // Then we get all the various imports' attributes. - get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut extra_attrs); + get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut extra_attrs, is_inline); + add_without_unwanted_attributes(&mut extra_attrs, inline::load_attrs(cx, def_id), is_inline); + } else { + // We only keep the item's attributes. + extra_attrs.extend_from_slice(inline::load_attrs(cx, def_id)); } - let mut item = if !extra_attrs.is_empty() { - extra_attrs.extend_from_slice(inline::load_attrs(cx, def_id)); - let attrs = Attributes::from_ast(&extra_attrs); - let cfg = extra_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg); + let attrs = Attributes::from_ast(&extra_attrs); + let cfg = extra_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg); - Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg) - } else { - Item::from_def_id_and_parts(def_id, Some(name), kind, cx) - }; + let mut item = + Item::from_def_id_and_attrs_and_parts(def_id, Some(name), kind, Box::new(attrs), cfg); item.inline_stmt_id = import_id.map(|def_id| def_id.to_def_id()); vec![item] }) diff --git a/tests/rustdoc/reexport-attr-merge.rs b/tests/rustdoc/reexport-attr-merge.rs new file mode 100644 index 0000000000000..f6c23a1365f46 --- /dev/null +++ b/tests/rustdoc/reexport-attr-merge.rs @@ -0,0 +1,33 @@ +// Regression test for . +// The goal is to ensure that `doc(hidden)`, `doc(inline)` and `doc(no_inline)` +// are not copied from an item when inlined. + +#![crate_name = "foo"] +#![feature(doc_cfg)] + +// @has 'foo/index.html' + +#[doc(hidden, cfg(feature = "foo"))] +pub struct Foo; + +#[doc(hidden, no_inline, cfg(feature = "bar"))] +pub use Foo as Foo1; + +#[doc(hidden, inline)] +pub use Foo1 as Foo2; + +// First we ensure that only the reexport `Bar2` and the inlined struct `Bar` +// are inlined. +// @count - '//a[@class="struct"]' 2 +// Then we check that both `cfg` are displayed. +// @has - '//*[@class="stab portability"]' 'foo' +// @has - '//*[@class="stab portability"]' 'bar' +// And finally we check that the only element displayed is `Bar`. +// @has - '//a[@class="struct"]' 'Bar' +#[doc(inline)] +pub use Foo2 as Bar; + +// This one should appear but `Bar2` won't be linked because there is no +// `#[doc(inline)]`. +// @has - '//*[@id="reexport.Bar2"]' 'pub use Foo2 as Bar2;' +pub use Foo2 as Bar2;