Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove RefCell around module_trait_cache #82402

Merged
merged 2 commits into from
Mar 7, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions src/librustdoc/core.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ crate struct DocContext<'tcx> {
///
/// See `collect_intra_doc_links::traits_implemented_by` for more details.
/// `map<module, set<trait>>`
crate module_trait_cache: RefCell<FxHashMap<DefId, FxHashSet<DefId>>>,
crate module_trait_cache: FxHashMap<DefId, FxHashSet<DefId>>,
/// This same cache is used throughout rustdoc, including in [`crate::html::render`].
crate cache: Cache,
/// Used by [`clean::inline`] to tell if an item has already been inlined.
Expand Down Expand Up @@ -169,13 +169,13 @@ impl<'tcx> DocContext<'tcx> {

/// Like `hir().local_def_id_to_hir_id()`, but skips calling it on fake DefIds.
/// (This avoids a slice-index-out-of-bounds panic.)
crate fn as_local_hir_id(&self, def_id: DefId) -> Option<HirId> {
crate fn as_local_hir_id(tcx: TyCtxt<'_>, def_id: DefId) -> Option<HirId> {
if MAX_DEF_IDX.with(|m| {
m.borrow().get(&def_id.krate).map(|&idx| idx <= def_id.index).unwrap_or(false)
}) {
None
} else {
def_id.as_local().map(|def_id| self.tcx.hir().local_def_id_to_hir_id(def_id))
def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id))
}
}
}
Expand Down Expand Up @@ -450,7 +450,7 @@ crate fn run_global_ctxt(
.cloned()
.filter(|trait_def_id| tcx.trait_is_auto(*trait_def_id))
.collect(),
module_trait_cache: RefCell::new(FxHashMap::default()),
module_trait_cache: FxHashMap::default(),
cache: Cache::new(access_levels, render_options.document_private),
inlined: FxHashSet::default(),
output_format,
Expand Down Expand Up @@ -479,7 +479,7 @@ crate fn run_global_ctxt(
https://doc.rust-lang.org/nightly/rustdoc/how-to-write-documentation.html";
tcx.struct_lint_node(
crate::lint::MISSING_CRATE_LEVEL_DOCS,
ctxt.as_local_hir_id(m.def_id).unwrap(),
DocContext::as_local_hir_id(tcx, m.def_id).unwrap(),
|lint| {
let mut diag =
lint.build("no documentation found for this crate's top-level module");
Expand Down
9 changes: 6 additions & 3 deletions src/librustdoc/passes/check_code_block_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,12 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
let buffer = buffer.borrow();

if buffer.has_errors || is_empty {
let mut diag = if let Some(sp) =
super::source_span_for_markdown_range(self.cx, &dox, &code_block.range, &item.attrs)
{
let mut diag = if let Some(sp) = super::source_span_for_markdown_range(
self.cx.tcx,
&dox,
&code_block.range,
&item.attrs,
) {
let (warning_message, suggest_using_text) = if buffer.has_errors {
("could not parse code block as Rust code", true)
} else {
Expand Down
67 changes: 34 additions & 33 deletions src/librustdoc/passes/collect_intra_doc_links.rs
Original file line number Diff line number Diff line change
Expand Up @@ -484,21 +484,23 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
/// Resolves a string as a path within a particular namespace. Returns an
/// optional URL fragment in the case of variants and methods.
fn resolve<'path>(
&self,
&mut self,
path_str: &'path str,
ns: Namespace,
module_id: DefId,
extra_fragment: &Option<String>,
) -> Result<(Res, Option<String>), ErrorKind<'path>> {
let cx = &self.cx;
let tcx = self.cx.tcx;

if let Some(res) = self.resolve_path(path_str, ns, module_id) {
match res {
// FIXME(#76467): make this fallthrough to lookup the associated
// item a separate function.
Res::Def(DefKind::AssocFn | DefKind::AssocConst, _) => assert_eq!(ns, ValueNS),
Res::Def(DefKind::AssocTy, _) => assert_eq!(ns, TypeNS),
Res::Def(DefKind::Variant, _) => return handle_variant(cx, res, extra_fragment),
Res::Def(DefKind::Variant, _) => {
return handle_variant(self.cx, res, extra_fragment);
}
// Not a trait item; just return what we found.
Res::Primitive(ty) => {
if extra_fragment.is_some() {
Expand Down Expand Up @@ -565,13 +567,12 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
) => {
debug!("looking for associated item named {} for item {:?}", item_name, did);
// Checks if item_name belongs to `impl SomeItem`
let assoc_item = cx
.tcx
let assoc_item = tcx
.inherent_impls(did)
.iter()
.flat_map(|&imp| {
cx.tcx.associated_items(imp).find_by_name_and_namespace(
cx.tcx,
tcx.associated_items(imp).find_by_name_and_namespace(
tcx,
Ident::with_dummy_span(item_name),
ns,
imp,
Expand All @@ -587,7 +588,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
// something like [`ambi_fn`](<SomeStruct as SomeTrait>::ambi_fn)
.or_else(|| {
let kind =
resolve_associated_trait_item(did, module_id, item_name, ns, &self.cx);
resolve_associated_trait_item(did, module_id, item_name, ns, self.cx);
debug!("got associated item kind {:?}", kind);
kind
});
Expand All @@ -611,7 +612,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
debug!("looking for variants or fields named {} for {:?}", item_name, did);
// FIXME(jynelson): why is this different from
// `variant_field`?
match cx.tcx.type_of(did).kind() {
match tcx.type_of(did).kind() {
ty::Adt(def, _) => {
let field = if def.is_enum() {
def.all_fields().find(|item| item.ident.name == item_name)
Expand Down Expand Up @@ -652,10 +653,9 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
None
}
}
Res::Def(DefKind::Trait, did) => cx
.tcx
Res::Def(DefKind::Trait, did) => tcx
.associated_items(did)
.find_by_name_and_namespace(cx.tcx, Ident::with_dummy_span(item_name), ns, did)
.find_by_name_and_namespace(tcx, Ident::with_dummy_span(item_name), ns, did)
.map(|item| {
let kind = match item.kind {
ty::AssocKind::Const => "associatedconstant",
Expand Down Expand Up @@ -699,7 +699,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
/// This returns the `Res` even if it was erroneous for some reason
/// (such as having invalid URL fragments or being in the wrong namespace).
fn check_full_res(
&self,
&mut self,
ns: Namespace,
path_str: &str,
module_id: DefId,
Expand Down Expand Up @@ -733,7 +733,7 @@ fn resolve_associated_trait_item(
module: DefId,
item_name: Symbol,
ns: Namespace,
cx: &DocContext<'_>,
cx: &mut DocContext<'_>,
) -> Option<(ty::AssocKind, DefId)> {
// FIXME: this should also consider blanket impls (`impl<T> X for T`). Unfortunately
// `get_auto_trait_and_blanket_impls` is broken because the caching behavior is wrong. In the
Expand All @@ -758,10 +758,10 @@ fn resolve_associated_trait_item(
///
/// NOTE: this cannot be a query because more traits could be available when more crates are compiled!
/// So it is not stable to serialize cross-crate.
fn traits_implemented_by(cx: &DocContext<'_>, type_: DefId, module: DefId) -> FxHashSet<DefId> {
let mut cache = cx.module_trait_cache.borrow_mut();
let in_scope_traits = cache.entry(module).or_insert_with(|| {
cx.enter_resolver(|resolver| {
fn traits_implemented_by(cx: &mut DocContext<'_>, type_: DefId, module: DefId) -> FxHashSet<DefId> {
let mut resolver = cx.resolver.borrow_mut();
let in_scope_traits = cx.module_trait_cache.entry(module).or_insert_with(|| {
resolver.access(|resolver| {
let parent_scope = &ParentScope::module(resolver.get_module(module), resolver);
resolver
.traits_in_scope(None, parent_scope, SyntaxContext::root(), None)
Expand All @@ -771,13 +771,14 @@ fn traits_implemented_by(cx: &DocContext<'_>, type_: DefId, module: DefId) -> Fx
})
});

let ty = cx.tcx.type_of(type_);
let tcx = cx.tcx;
let ty = tcx.type_of(type_);
let iter = in_scope_traits.iter().flat_map(|&trait_| {
trace!("considering explicit impl for trait {:?}", trait_);

// Look at each trait implementation to see if it's an impl for `did`
cx.tcx.find_map_relevant_impl(trait_, ty, |impl_| {
let trait_ref = cx.tcx.impl_trait_ref(impl_).expect("this is not an inherent impl");
tcx.find_map_relevant_impl(trait_, ty, |impl_| {
let trait_ref = tcx.impl_trait_ref(impl_).expect("this is not an inherent impl");
// Check if these are the same type.
let impl_type = trait_ref.self_ty();
trace!(
Expand Down Expand Up @@ -1146,7 +1147,7 @@ impl LinkCollector<'_, '_> {
suggest_disambiguator(resolved, diag, path_str, dox, sp, &ori_link.range);
};
report_diagnostic(
self.cx,
self.cx.tcx,
BROKEN_INTRA_DOC_LINKS,
&msg,
&item,
Expand Down Expand Up @@ -1220,7 +1221,7 @@ impl LinkCollector<'_, '_> {
&& !self.cx.tcx.features().intra_doc_pointers
{
let span = super::source_span_for_markdown_range(
self.cx,
self.cx.tcx,
dox,
&ori_link.range,
&item.attrs,
Expand Down Expand Up @@ -1308,7 +1309,7 @@ impl LinkCollector<'_, '_> {
/// After parsing the disambiguator, resolve the main part of the link.
// FIXME(jynelson): wow this is just so much
fn resolve_with_disambiguator(
&self,
&mut self,
key: &ResolutionInfo,
diag: DiagnosticInfo<'_>,
) -> Option<(Res, Option<String>)> {
Expand Down Expand Up @@ -1674,15 +1675,15 @@ impl Suggestion {
/// parameter of the callback will contain it, and the primary span of the diagnostic will be set
/// to it.
fn report_diagnostic(
cx: &DocContext<'_>,
tcx: TyCtxt<'_>,
lint: &'static Lint,
msg: &str,
item: &Item,
dox: &str,
link_range: &Range<usize>,
decorate: impl FnOnce(&mut DiagnosticBuilder<'_>, Option<rustc_span::Span>),
) {
let hir_id = match cx.as_local_hir_id(item.def_id) {
let hir_id = match DocContext::as_local_hir_id(tcx, item.def_id) {
Some(hir_id) => hir_id,
None => {
// If non-local, no need to check anything.
Expand All @@ -1694,10 +1695,10 @@ fn report_diagnostic(
let attrs = &item.attrs;
let sp = span_of_attrs(attrs).unwrap_or(item.source.span());

cx.tcx.struct_span_lint_hir(lint, hir_id, sp, |lint| {
tcx.struct_span_lint_hir(lint, hir_id, sp, |lint| {
let mut diag = lint.build(msg);

let span = super::source_span_for_markdown_range(cx, dox, link_range, attrs);
let span = super::source_span_for_markdown_range(tcx, dox, link_range, attrs);

if let Some(sp) = span {
diag.set_span(sp);
Expand Down Expand Up @@ -1732,7 +1733,7 @@ fn report_diagnostic(
/// handled earlier. For example, if passed `Item::Crate(std)` and `path_str`
/// `std::io::Error::x`, this will resolve `std::io::Error`.
fn resolution_failure(
collector: &LinkCollector<'_, '_>,
collector: &mut LinkCollector<'_, '_>,
item: &Item,
path_str: &str,
disambiguator: Option<Disambiguator>,
Expand All @@ -1742,7 +1743,7 @@ fn resolution_failure(
) {
let tcx = collector.cx.tcx;
report_diagnostic(
collector.cx,
tcx,
BROKEN_INTRA_DOC_LINKS,
&format!("unresolved link to `{}`", path_str),
item,
Expand Down Expand Up @@ -1973,7 +1974,7 @@ fn anchor_failure(
),
};

report_diagnostic(cx, BROKEN_INTRA_DOC_LINKS, &msg, item, dox, &link_range, |diag, sp| {
report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, item, dox, &link_range, |diag, sp| {
if let Some(sp) = sp {
diag.span_label(sp, "contains invalid anchor");
}
Expand Down Expand Up @@ -2013,7 +2014,7 @@ fn ambiguity_error(
}
}

report_diagnostic(cx, BROKEN_INTRA_DOC_LINKS, &msg, item, dox, &link_range, |diag, sp| {
report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, item, dox, &link_range, |diag, sp| {
if let Some(sp) = sp {
diag.span_label(sp, "ambiguous link");
} else {
Expand Down Expand Up @@ -2066,7 +2067,7 @@ fn privacy_error(cx: &DocContext<'_>, item: &Item, path_str: &str, dox: &str, li
let msg =
format!("public documentation for `{}` links to private item `{}`", item_name, path_str);

report_diagnostic(cx, PRIVATE_INTRA_DOC_LINKS, &msg, item, dox, &link.range, |diag, sp| {
report_diagnostic(cx.tcx, PRIVATE_INTRA_DOC_LINKS, &msg, item, dox, &link.range, |diag, sp| {
if let Some(sp) = sp {
diag.span_label(sp, "this item is private");
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/doc_test_lints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ crate fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -> boo
}

crate fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) {
let hir_id = match cx.as_local_hir_id(item.def_id) {
let hir_id = match DocContext::as_local_hir_id(cx.tcx, item.def_id) {
Some(hir_id) => hir_id,
None => {
// If non-local, no need to check anything.
Expand Down
9 changes: 5 additions & 4 deletions src/librustdoc/passes/html_tags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,8 @@ fn extract_tags(

impl<'a, 'tcx> DocFolder for InvalidHtmlTagsLinter<'a, 'tcx> {
fn fold_item(&mut self, item: Item) -> Option<Item> {
let hir_id = match self.cx.as_local_hir_id(item.def_id) {
let tcx = self.cx.tcx;
let hir_id = match DocContext::as_local_hir_id(tcx, item.def_id) {
Some(hir_id) => hir_id,
None => {
// If non-local, no need to check anything.
Expand All @@ -176,13 +177,13 @@ impl<'a, 'tcx> DocFolder for InvalidHtmlTagsLinter<'a, 'tcx> {
};
let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
if !dox.is_empty() {
let cx = &self.cx;
let report_diag = |msg: &str, range: &Range<usize>| {
let sp = match super::source_span_for_markdown_range(cx, &dox, range, &item.attrs) {
let sp = match super::source_span_for_markdown_range(tcx, &dox, range, &item.attrs)
{
Some(sp) => sp,
None => span_of_attrs(&item.attrs).unwrap_or(item.source.span()),
};
cx.tcx.struct_span_lint_hir(crate::lint::INVALID_HTML_TAGS, hir_id, sp, |lint| {
tcx.struct_span_lint_hir(crate::lint::INVALID_HTML_TAGS, hir_id, sp, |lint| {
lint.build(msg).emit()
});
};
Expand Down
5 changes: 3 additions & 2 deletions src/librustdoc/passes/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
//! Contains information about "passes", used to modify crate information during the documentation
//! process.

use rustc_middle::ty::TyCtxt;
use rustc_span::{InnerSpan, Span, DUMMY_SP};
use std::ops::Range;

Expand Down Expand Up @@ -167,7 +168,7 @@ crate fn span_of_attrs(attrs: &clean::Attributes) -> Option<Span> {
/// attributes are not all sugared doc comments. It's difficult to calculate the correct span in
/// that case due to escaping and other source features.
crate fn source_span_for_markdown_range(
cx: &DocContext<'_>,
tcx: TyCtxt<'_>,
markdown: &str,
md_range: &Range<usize>,
attrs: &clean::Attributes,
Expand All @@ -179,7 +180,7 @@ crate fn source_span_for_markdown_range(
return None;
}

let snippet = cx.sess().source_map().span_to_snippet(span_of_attrs(attrs)?).ok()?;
let snippet = tcx.sess.source_map().span_to_snippet(span_of_attrs(attrs)?).ok()?;

let starting_line = markdown[..md_range.start].matches('\n').count();
let ending_line = starting_line + markdown[md_range.start..md_range.end].matches('\n').count();
Expand Down
4 changes: 2 additions & 2 deletions src/librustdoc/passes/non_autolinks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ crate fn check_non_autolinks(krate: Crate, cx: &mut DocContext<'_>) -> Crate {

impl<'a, 'tcx> DocFolder for NonAutolinksLinter<'a, 'tcx> {
fn fold_item(&mut self, item: Item) -> Option<Item> {
let hir_id = match self.cx.as_local_hir_id(item.def_id) {
let hir_id = match DocContext::as_local_hir_id(self.cx.tcx, item.def_id) {
Some(hir_id) => hir_id,
None => {
// If non-local, no need to check anything.
Expand All @@ -70,7 +70,7 @@ impl<'a, 'tcx> DocFolder for NonAutolinksLinter<'a, 'tcx> {
let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
if !dox.is_empty() {
let report_diag = |cx: &DocContext<'_>, msg: &str, url: &str, range: Range<usize>| {
let sp = super::source_span_for_markdown_range(cx, &dox, &range, &item.attrs)
let sp = super::source_span_for_markdown_range(cx.tcx, &dox, &range, &item.attrs)
.or_else(|| span_of_attrs(&item.attrs))
.unwrap_or(item.source.span());
cx.tcx.struct_span_lint_hir(crate::lint::NON_AUTOLINKS, hir_id, sp, |lint| {
Expand Down