diff --git a/.mailmap b/.mailmap index 9034aae171417..3ff9e94ee5410 100644 --- a/.mailmap +++ b/.mailmap @@ -95,9 +95,9 @@ Herman J. Radtke III Herman J. Radtke III Ivan Ivaschenko J. J. Weber -Jakub Bukaj -Jakub Bukaj -Jakub Bukaj Jakub Bukaj +Jakub Adam Wieczorek +Jakub Adam Wieczorek +Jakub Adam Wieczorek James Deng James Miller James Perry diff --git a/src/Cargo.lock b/src/Cargo.lock index d8306c66daf84..7dc8374e3e8fd 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -1086,7 +1086,7 @@ dependencies = [ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "open 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "pulldown-cmark 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1383,7 +1383,7 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.1.0" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2104,7 +2104,7 @@ dependencies = [ name = "rustdoc" version = "0.0.0" dependencies = [ - "pulldown-cmark 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2886,7 +2886,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8" "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903" "checksum pulldown-cmark 0.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "378e941dbd392c101f2cb88097fa4d7167bc421d4b88de3ff7dbee503bc3233b" -"checksum pulldown-cmark 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a656fdb8b6848f896df5e478a0eb9083681663e37dcb77dd16981ff65329fe8b" +"checksum pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d6fdf85cda6cadfae5428a54661d431330b312bc767ddbc57adbedc24da66e32" "checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4" "checksum quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "07589615d719a60c8dd8a4622e7946465dfef20d1a428f969e3443e7386d5f45" "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index 266cae3c122f4..e6759d1bad940 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -343,7 +343,7 @@ impl Ordering { /// v.sort_by_key(|&num| (num > 3, Reverse(num))); /// assert_eq!(v, vec![3, 2, 1, 6, 5, 4]); /// ``` -#[derive(PartialEq, Eq, Debug)] +#[derive(PartialEq, Eq, Debug, Copy, Clone)] #[stable(feature = "reverse_cmp_key", since = "1.19.0")] pub struct Reverse(#[stable(feature = "reverse_cmp_key", since = "1.19.0")] pub T); diff --git a/src/libcore/iter/iterator.rs b/src/libcore/iter/iterator.rs index 33adb3f49dd0d..c1a0518cd2288 100644 --- a/src/libcore/iter/iterator.rs +++ b/src/libcore/iter/iterator.rs @@ -168,7 +168,7 @@ pub trait Iterator { /// This function might panic if the iterator has more than [`usize::MAX`] /// elements. /// - /// [`usize::MAX`]: ../../std/isize/constant.MAX.html + /// [`usize::MAX`]: ../../std/usize/constant.MAX.html /// /// # Examples /// diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 560dcf295b278..43330b63f9b9c 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -1209,7 +1209,7 @@ Basic usage: ``` ", $Feature, "let x: ", stringify!($SelfT), " = 2; // or any other integer type -assert_eq!(x.pow(4), 16);", +assert_eq!(x.pow(5), 32);", $EndFeature, " ```"), #[stable(feature = "rust1", since = "1.0.0")] @@ -2364,7 +2364,7 @@ assert_eq!(0x10", stringify!($SelfT), ".overflowing_shr(132), (0x1, true));", $E Basic usage: ``` -", $Feature, "assert_eq!(2", stringify!($SelfT), ".pow(4), 16);", $EndFeature, " +", $Feature, "assert_eq!(2", stringify!($SelfT), ".pow(5), 32);", $EndFeature, " ```"), #[stable(feature = "rust1", since = "1.0.0")] #[inline] diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 43cc437e1e7e3..61a58a6030623 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -72,6 +72,10 @@ impl DefPathTable { index } + pub fn next_id(&self, address_space: DefIndexAddressSpace) -> DefIndex { + DefIndex::from_array_index(self.index_to_key[address_space.index()].len(), address_space) + } + #[inline(always)] pub fn def_key(&self, index: DefIndex) -> DefKey { self.index_to_key[index.address_space().index()] diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 07c5b319970f8..7a386c144b738 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -180,7 +180,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { // for each body-id in this map, which will process the // obligations within. This is expected to be done 'late enough' // that all type inference variables have been bound and so forth. - region_obligations: RefCell)>>, + pub region_obligations: RefCell)>>, } /// A map returned by `skolemize_late_bound_regions()` indicating the skolemized @@ -1555,11 +1555,20 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { InferOk { value, obligations } } - fn borrow_region_constraints(&self) -> RefMut<'_, RegionConstraintCollector<'tcx>> { + pub fn borrow_region_constraints(&self) -> RefMut<'_, RegionConstraintCollector<'tcx>> { RefMut::map( self.region_constraints.borrow_mut(), |c| c.as_mut().expect("region constraints already solved")) } + + /// Clears the selection, evaluation, and projection cachesThis is useful when + /// repeatedly attemping to select an Obligation while changing only + /// its ParamEnv, since FulfillmentContext doesn't use 'probe' + pub fn clear_caches(&self) { + self.selection_cache.clear(); + self.evaluation_cache.clear(); + self.projection_cache.borrow_mut().clear(); + } } impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> { diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 68d81a2dee352..be196192371fd 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -82,7 +82,7 @@ pub type VarOrigins = IndexVec; /// Describes constraints between the region variables and other /// regions, as well as other conditions that must be verified, or /// assumptions that can be made. -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct RegionConstraintData<'tcx> { /// Constraints of the form `A <= B`, where either `A` or `B` can /// be a region variable (or neither, as it happens). @@ -142,7 +142,7 @@ pub enum Constraint<'tcx> { /// outlive `RS`. Therefore verify that `R <= RS[i]` for some /// `i`. Inference variables may be involved (but this verification /// step doesn't influence inference). -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Verify<'tcx> { pub kind: GenericKind<'tcx>, pub origin: SubregionOrigin<'tcx>, @@ -159,7 +159,7 @@ pub enum GenericKind<'tcx> { /// When we introduce a verification step, we wish to test that a /// particular region (let's call it `'min`) meets some bound. /// The bound is described the by the following grammar: -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum VerifyBound<'tcx> { /// B = exists {R} --> some 'r in {R} must outlive 'min /// @@ -288,6 +288,10 @@ impl<'tcx> RegionConstraintCollector<'tcx> { &self.var_origins } + pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> { + &self.data + } + /// Once all the constraints have been gathered, extract out the final data. /// /// Not legal during a snapshot. diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 41cc8ca601ac0..31836f7e3c57b 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -32,8 +32,8 @@ use syntax_pos::{Span, DUMMY_SP}; pub use self::coherence::{orphan_check, overlapping_impls, OrphanCheckErr, OverlapResult}; pub use self::fulfill::FulfillmentContext; pub use self::project::MismatchedProjectionTypes; -pub use self::project::{normalize, normalize_projection_type, Normalized}; -pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal}; +pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; +pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal, Normalized}; pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote}; diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index ae539f07336d5..0d0476e7c21dd 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -1596,6 +1596,10 @@ impl<'tcx> ProjectionCache<'tcx> { } } + pub fn clear(&mut self) { + self.map.clear(); + } + pub fn snapshot(&mut self) -> ProjectionCacheSnapshot { ProjectionCacheSnapshot { snapshot: self.map.snapshot() } } diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 4ed25646d436d..cfeb456acefe6 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -93,6 +93,11 @@ pub struct SelectionContext<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { inferred_obligations: SnapshotVec>, intercrate_ambiguity_causes: Option>, + + /// Controls whether or not to filter out negative impls when selecting. + /// This is used in librustdoc to distinguish between the lack of an impl + /// and a negative impl + allow_negative_impls: bool } #[derive(Clone, Debug)] @@ -424,6 +429,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { intercrate: None, inferred_obligations: SnapshotVec::new(), intercrate_ambiguity_causes: None, + allow_negative_impls: false, } } @@ -436,6 +442,20 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { intercrate: Some(mode), inferred_obligations: SnapshotVec::new(), intercrate_ambiguity_causes: None, + allow_negative_impls: false, + } + } + + pub fn with_negative(infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, + allow_negative_impls: bool) -> SelectionContext<'cx, 'gcx, 'tcx> { + debug!("with_negative({:?})", allow_negative_impls); + SelectionContext { + infcx, + freshener: infcx.freshener(), + intercrate: None, + inferred_obligations: SnapshotVec::new(), + intercrate_ambiguity_causes: None, + allow_negative_impls, } } @@ -1086,7 +1106,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { fn filter_negative_impls(&self, candidate: SelectionCandidate<'tcx>) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { if let ImplCandidate(def_id) = candidate { - if self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative { + if !self.allow_negative_impls && + self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative { return Err(Unimplemented) } } @@ -3337,6 +3358,10 @@ impl<'tcx> SelectionCache<'tcx> { hashmap: RefCell::new(FxHashMap()) } } + + pub fn clear(&self) { + *self.hashmap.borrow_mut() = FxHashMap() + } } impl<'tcx> EvaluationCache<'tcx> { @@ -3345,6 +3370,10 @@ impl<'tcx> EvaluationCache<'tcx> { hashmap: RefCell::new(FxHashMap()) } } + + pub fn clear(&self) { + *self.hashmap.borrow_mut() = FxHashMap() + } } impl<'o,'tcx> TraitObligationStack<'o,'tcx> { diff --git a/src/librustc_data_structures/snapshot_map/mod.rs b/src/librustc_data_structures/snapshot_map/mod.rs index cd7143ad3ce84..cede6f147821b 100644 --- a/src/librustc_data_structures/snapshot_map/mod.rs +++ b/src/librustc_data_structures/snapshot_map/mod.rs @@ -45,6 +45,11 @@ impl SnapshotMap } } + pub fn clear(&mut self) { + self.map.clear(); + self.undo_log.clear(); + } + pub fn insert(&mut self, key: K, value: V) -> bool { match self.map.insert(key.clone(), value) { None => { diff --git a/src/librustc_resolve/diagnostics.rs b/src/librustc_resolve/diagnostics.rs index 8a29155d12d5b..c1e6b4f5a17d9 100644 --- a/src/librustc_resolve/diagnostics.rs +++ b/src/librustc_resolve/diagnostics.rs @@ -325,6 +325,8 @@ Erroneous code example: extern crate core; struct core; + +fn main() {} ``` There are two possible solutions: diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index e8c3966f23f08..9f3e44f56dae2 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -337,7 +337,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { lint::builtin::TYVAR_BEHIND_RAW_POINTER, scope_expr_id, span, - &format!("the type of this value must be known in this context")); + &format!("type annotations needed")); } } else { let t = self.structurally_resolved_type(span, final_ty); diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index d21bfb674c7fc..91fa3c5da6991 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -5052,9 +5052,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { ty } else { if !self.is_tainted_by_errors() { - type_error_struct!(self.tcx.sess, sp, ty, E0619, - "the type of this value must be known in this context") - .emit(); + self.need_type_info((**self).body_id, sp, ty); } self.demand_suptype(sp, self.tcx.types.err, ty); self.tcx.types.err diff --git a/src/librustc_typeck/diagnostics.rs b/src/librustc_typeck/diagnostics.rs index f59948e9fc42f..1c0e084832ebc 100644 --- a/src/librustc_typeck/diagnostics.rs +++ b/src/librustc_typeck/diagnostics.rs @@ -4368,12 +4368,13 @@ i_am_a_function(); "##, E0619: r##" +#### Note: this error code is no longer emitted by the compiler. The type-checker needed to know the type of an expression, but that type had not yet been inferred. Erroneous code example: -```compile_fail,E0619 +```compile_fail let mut x = vec![]; match x.pop() { Some(v) => { diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 09d0a0f610b7b..88b0f4486223c 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -10,5 +10,5 @@ path = "lib.rs" doctest = false [dependencies] -pulldown-cmark = { version = "0.1.0", default-features = false } +pulldown-cmark = { version = "0.1.2", default-features = false } tempdir = "0.3" diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs new file mode 100644 index 0000000000000..f1bba0e836189 --- /dev/null +++ b/src/librustdoc/clean/auto_trait.rs @@ -0,0 +1,1492 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::ty::TypeFoldable; + +use super::*; + +pub struct AutoTraitFinder<'a, 'tcx: 'a, 'rcx: 'a> { + pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>, +} + +impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> { + pub fn get_with_def_id(&self, def_id: DefId) -> Vec { + let ty = self.cx.tcx.type_of(def_id); + + let def_ctor: fn(DefId) -> Def = match ty.sty { + ty::TyAdt(adt, _) => match adt.adt_kind() { + AdtKind::Struct => Def::Struct, + AdtKind::Enum => Def::Enum, + AdtKind::Union => Def::Union, + }, + _ => panic!("Unexpected type {:?}", def_id), + }; + + self.get_auto_trait_impls(def_id, def_ctor, None) + } + + pub fn get_with_node_id(&self, id: ast::NodeId, name: String) -> Vec { + let item = &self.cx.tcx.hir.expect_item(id).node; + let did = self.cx.tcx.hir.local_def_id(id); + + let def_ctor = match *item { + hir::ItemStruct(_, _) => Def::Struct, + hir::ItemUnion(_, _) => Def::Union, + hir::ItemEnum(_, _) => Def::Enum, + _ => panic!("Unexpected type {:?} {:?}", item, id), + }; + + self.get_auto_trait_impls(did, def_ctor, Some(name)) + } + + pub fn get_auto_trait_impls( + &self, + def_id: DefId, + def_ctor: fn(DefId) -> Def, + name: Option, + ) -> Vec { + if self.cx + .tcx + .get_attrs(def_id) + .lists("doc") + .has_word("hidden") + { + debug!( + "get_auto_trait_impls(def_id={:?}, def_ctor={:?}): item has doc('hidden'), \ + aborting", + def_id, def_ctor + ); + return Vec::new(); + } + + let tcx = self.cx.tcx; + let generics = self.cx.tcx.generics_of(def_id); + + debug!( + "get_auto_trait_impls(def_id={:?}, def_ctor={:?}, generics={:?}", + def_id, def_ctor, generics + ); + let auto_traits: Vec<_> = self.cx + .send_trait + .and_then(|send_trait| { + self.get_auto_trait_impl_for( + def_id, + name.clone(), + generics.clone(), + def_ctor, + send_trait, + ) + }) + .into_iter() + .chain(self.get_auto_trait_impl_for( + def_id, + name.clone(), + generics.clone(), + def_ctor, + tcx.require_lang_item(lang_items::SyncTraitLangItem), + ).into_iter()) + .collect(); + + debug!( + "get_auto_traits: type {:?} auto_traits {:?}", + def_id, auto_traits + ); + auto_traits + } + + fn get_auto_trait_impl_for( + &self, + def_id: DefId, + name: Option, + generics: ty::Generics, + def_ctor: fn(DefId) -> Def, + trait_def_id: DefId, + ) -> Option { + if !self.cx + .generated_synthetics + .borrow_mut() + .insert((def_id, trait_def_id)) + { + debug!( + "get_auto_trait_impl_for(def_id={:?}, generics={:?}, def_ctor={:?}, \ + trait_def_id={:?}): already generated, aborting", + def_id, generics, def_ctor, trait_def_id + ); + return None; + } + + let result = self.find_auto_trait_generics(def_id, trait_def_id, &generics); + + if result.is_auto() { + let trait_ = hir::TraitRef { + path: get_path_for_type(self.cx.tcx, trait_def_id, hir::def::Def::Trait), + ref_id: ast::DUMMY_NODE_ID, + }; + + let polarity; + + let new_generics = match result { + AutoTraitResult::PositiveImpl(new_generics) => { + polarity = None; + new_generics + } + AutoTraitResult::NegativeImpl => { + polarity = Some(ImplPolarity::Negative); + + // For negative impls, we use the generic params, but *not* the predicates, + // from the original type. Otherwise, the displayed impl appears to be a + // conditional negative impl, when it's really unconditional. + // + // For example, consider the struct Foo(*mut T). Using + // the original predicates in our impl would cause us to generate + // `impl !Send for Foo`, which makes it appear that Foo + // implements Send where T is not copy. + // + // Instead, we generate `impl !Send for Foo`, which better + // expresses the fact that `Foo` never implements `Send`, + // regardless of the choice of `T`. + let real_generics = (&generics, &Default::default()); + + // Clean the generics, but ignore the '?Sized' bounds generated + // by the `Clean` impl + let clean_generics = real_generics.clean(self.cx); + + Generics { + params: clean_generics.params, + where_predicates: Vec::new(), + } + } + _ => unreachable!(), + }; + + let path = get_path_for_type(self.cx.tcx, def_id, def_ctor); + let mut segments = path.segments.into_vec(); + let last = segments.pop().unwrap(); + + let real_name = name.as_ref().map(|n| Symbol::from(n.as_str())); + + segments.push(hir::PathSegment::new( + real_name.unwrap_or(last.name), + self.generics_to_path_params(generics.clone()), + false, + )); + + let new_path = hir::Path { + span: path.span, + def: path.def, + segments: HirVec::from_vec(segments), + }; + + let ty = hir::Ty { + id: ast::DUMMY_NODE_ID, + node: hir::Ty_::TyPath(hir::QPath::Resolved(None, P(new_path))), + span: DUMMY_SP, + hir_id: hir::DUMMY_HIR_ID, + }; + + return Some(Item { + source: Span::empty(), + name: None, + attrs: Default::default(), + visibility: None, + def_id: self.next_def_id(def_id.krate), + stability: None, + deprecation: None, + inner: ImplItem(Impl { + unsafety: hir::Unsafety::Normal, + generics: new_generics, + provided_trait_methods: FxHashSet(), + trait_: Some(trait_.clean(self.cx)), + for_: ty.clean(self.cx), + items: Vec::new(), + polarity, + synthetic: true, + }), + }); + } + None + } + + fn generics_to_path_params(&self, generics: ty::Generics) -> hir::PathParameters { + let lifetimes = HirVec::from_vec( + generics + .regions + .iter() + .map(|p| { + let name = if p.name == "" { + hir::LifetimeName::Static + } else { + hir::LifetimeName::Name(p.name) + }; + + hir::Lifetime { + id: ast::DUMMY_NODE_ID, + span: DUMMY_SP, + name, + } + }) + .collect(), + ); + let types = HirVec::from_vec( + generics + .types + .iter() + .map(|p| P(self.ty_param_to_ty(p.clone()))) + .collect(), + ); + + hir::PathParameters { + lifetimes: lifetimes, + types: types, + bindings: HirVec::new(), + parenthesized: false, + } + } + + fn ty_param_to_ty(&self, param: ty::TypeParameterDef) -> hir::Ty { + debug!("ty_param_to_ty({:?}) {:?}", param, param.def_id); + hir::Ty { + id: ast::DUMMY_NODE_ID, + node: hir::Ty_::TyPath(hir::QPath::Resolved( + None, + P(hir::Path { + span: DUMMY_SP, + def: Def::TyParam(param.def_id), + segments: HirVec::from_vec(vec![hir::PathSegment::from_name(param.name)]), + }), + )), + span: DUMMY_SP, + hir_id: hir::DUMMY_HIR_ID, + } + } + + fn find_auto_trait_generics( + &self, + did: DefId, + trait_did: DefId, + generics: &ty::Generics, + ) -> AutoTraitResult { + let tcx = self.cx.tcx; + let ty = self.cx.tcx.type_of(did); + + let orig_params = tcx.param_env(did); + + let trait_ref = ty::TraitRef { + def_id: trait_did, + substs: tcx.mk_substs_trait(ty, &[]), + }; + + let trait_pred = ty::Binder(trait_ref); + + let bail_out = tcx.infer_ctxt().enter(|infcx| { + let mut selcx = SelectionContext::with_negative(&infcx, true); + let result = selcx.select(&Obligation::new( + ObligationCause::dummy(), + orig_params, + trait_pred.to_poly_trait_predicate(), + )); + match result { + Ok(Some(Vtable::VtableImpl(_))) => { + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): \ + manual impl found, bailing out", + did, trait_did, generics + ); + return true; + } + _ => return false, + }; + }); + + // If an explicit impl exists, it always takes priority over an auto impl + if bail_out { + return AutoTraitResult::ExplicitImpl; + } + + return tcx.infer_ctxt().enter(|mut infcx| { + let mut fresh_preds = FxHashSet(); + + // Due to the way projections are handled by SelectionContext, we need to run + // evaluate_predicates twice: once on the original param env, and once on the result of + // the first evaluate_predicates call. + // + // The problem is this: most of rustc, including SelectionContext and traits::project, + // are designed to work with a concrete usage of a type (e.g. Vec + // fn() { Vec }. This information will generally never change - given + // the 'T' in fn() { ... }, we'll never know anything else about 'T'. + // If we're unable to prove that 'T' implements a particular trait, we're done - + // there's nothing left to do but error out. + // + // However, synthesizing an auto trait impl works differently. Here, we start out with + // a set of initial conditions - the ParamEnv of the struct/enum/union we're dealing + // with - and progressively discover the conditions we need to fulfill for it to + // implement a certain auto trait. This ends up breaking two assumptions made by trait + // selection and projection: + // + // * We can always cache the result of a particular trait selection for the lifetime of + // an InfCtxt + // * Given a projection bound such as '::SomeItem = K', if 'T: + // SomeTrait' doesn't hold, then we don't need to care about the 'SomeItem = K' + // + // We fix the first assumption by manually clearing out all of the InferCtxt's caches + // in between calls to SelectionContext.select. This allows us to keep all of the + // intermediate types we create bound to the 'tcx lifetime, rather than needing to lift + // them between calls. + // + // We fix the second assumption by reprocessing the result of our first call to + // evaluate_predicates. Using the example of '::SomeItem = K', our first + // pass will pick up 'T: SomeTrait', but not 'SomeItem = K'. On our second pass, + // traits::project will see that 'T: SomeTrait' is in our ParamEnv, allowing + // SelectionContext to return it back to us. + + let (new_env, user_env) = match self.evaluate_predicates( + &mut infcx, + did, + trait_did, + ty, + orig_params.clone(), + orig_params, + &mut fresh_preds, + false, + ) { + Some(e) => e, + None => return AutoTraitResult::NegativeImpl, + }; + + let (full_env, full_user_env) = self.evaluate_predicates( + &mut infcx, + did, + trait_did, + ty, + new_env.clone(), + user_env, + &mut fresh_preds, + true, + ).unwrap_or_else(|| { + panic!( + "Failed to fully process: {:?} {:?} {:?}", + ty, trait_did, orig_params + ) + }); + + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): fulfilling \ + with {:?}", + did, trait_did, generics, full_env + ); + infcx.clear_caches(); + + // At this point, we already have all of the bounds we need. FulfillmentContext is used + // to store all of the necessary region/lifetime bounds in the InferContext, as well as + // an additional sanity check. + let mut fulfill = FulfillmentContext::new(); + fulfill.register_bound( + &infcx, + full_env, + ty, + trait_did, + ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID), + ); + fulfill.select_all_or_error(&infcx).unwrap_or_else(|e| { + panic!( + "Unable to fulfill trait {:?} for '{:?}': {:?}", + trait_did, ty, e + ) + }); + + let names_map: FxHashMap = generics + .regions + .iter() + .map(|l| (l.name.as_str().to_string(), l.clean(self.cx))) + .collect(); + + let body_ids: FxHashSet<_> = infcx + .region_obligations + .borrow() + .iter() + .map(|&(id, _)| id) + .collect(); + + for id in body_ids { + infcx.process_registered_region_obligations(&[], None, full_env.clone(), id); + } + + let region_data = infcx + .borrow_region_constraints() + .region_constraint_data() + .clone(); + + let lifetime_predicates = self.handle_lifetimes(®ion_data, &names_map); + let vid_to_region = self.map_vid_to_region(®ion_data); + + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): computed \ + lifetime information '{:?}' '{:?}'", + did, trait_did, generics, lifetime_predicates, vid_to_region + ); + + let new_generics = self.param_env_to_generics( + infcx.tcx, + did, + full_user_env, + generics.clone(), + lifetime_predicates, + vid_to_region, + ); + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): finished with \ + {:?}", + did, trait_did, generics, new_generics + ); + return AutoTraitResult::PositiveImpl(new_generics); + }); + } + + fn clean_pred<'c, 'd, 'cx>( + &self, + infcx: &InferCtxt<'c, 'd, 'cx>, + p: ty::Predicate<'cx>, + ) -> ty::Predicate<'cx> { + infcx.freshen(p) + } + + fn evaluate_nested_obligations<'b, 'c, 'd, 'cx, + T: Iterator>>>( + &self, + ty: ty::Ty, + nested: T, + computed_preds: &'b mut FxHashSet>, + fresh_preds: &'b mut FxHashSet>, + predicates: &'b mut VecDeque>, + select: &mut traits::SelectionContext<'c, 'd, 'cx>, + only_projections: bool, + ) -> bool { + let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID); + + for (obligation, predicate) in nested + .filter(|o| o.recursion_depth == 1) + .map(|o| (o.clone(), o.predicate.clone())) + { + let is_new_pred = + fresh_preds.insert(self.clean_pred(select.infcx(), predicate.clone())); + + match &predicate { + &ty::Predicate::Trait(ref p) => { + let substs = &p.skip_binder().trait_ref.substs; + + if self.is_of_param(substs) && !only_projections && is_new_pred { + computed_preds.insert(predicate); + } + predicates.push_back(p.clone()); + } + &ty::Predicate::Projection(p) => { + // If the projection isn't all type vars, then + // we don't want to add it as a bound + if self.is_of_param(p.skip_binder().projection_ty.substs) && is_new_pred { + computed_preds.insert(predicate); + } else { + match traits::poly_project_and_unify_type( + select, + &obligation.with(p.clone()), + ) { + Err(e) => { + debug!( + "evaluate_nested_obligations: Unable to unify predicate \ + '{:?}' '{:?}', bailing out", + ty, e + ); + return false; + } + Ok(Some(v)) => { + if !self.evaluate_nested_obligations( + ty, + v.clone().iter().cloned(), + computed_preds, + fresh_preds, + predicates, + select, + only_projections, + ) { + return false; + } + } + Ok(None) => { + panic!("Unexpected result when selecting {:?} {:?}", ty, obligation) + } + } + } + } + &ty::Predicate::RegionOutlives(ref binder) => { + if let Err(_) = select + .infcx() + .region_outlives_predicate(&dummy_cause, binder) + { + return false; + } + } + &ty::Predicate::TypeOutlives(ref binder) => { + match ( + binder.no_late_bound_regions(), + binder.map_bound_ref(|pred| pred.0).no_late_bound_regions(), + ) { + (None, Some(t_a)) => { + select.infcx().register_region_obligation( + ast::DUMMY_NODE_ID, + RegionObligation { + sup_type: t_a, + sub_region: select.infcx().tcx.types.re_static, + cause: dummy_cause.clone(), + }, + ); + } + (Some(ty::OutlivesPredicate(t_a, r_b)), _) => { + select.infcx().register_region_obligation( + ast::DUMMY_NODE_ID, + RegionObligation { + sup_type: t_a, + sub_region: r_b, + cause: dummy_cause.clone(), + }, + ); + } + _ => {} + }; + } + _ => panic!("Unexpected predicate {:?} {:?}", ty, predicate), + }; + } + return true; + } + + // The core logic responsible for computing the bounds for our synthesized impl. + // + // To calculate the bounds, we call SelectionContext.select in a loop. Like FulfillmentContext, + // we recursively select the nested obligations of predicates we encounter. However, whenver we + // encounter an UnimplementedError involving a type parameter, we add it to our ParamEnv. Since + // our goal is to determine when a particular type implements an auto trait, Unimplemented + // errors tell us what conditions need to be met. + // + // This method ends up working somewhat similary to FulfillmentContext, but with a few key + // differences. FulfillmentContext works under the assumption that it's dealing with concrete + // user code. According, it considers all possible ways that a Predicate could be met - which + // isn't always what we want for a synthesized impl. For example, given the predicate 'T: + // Iterator', FulfillmentContext can end up reporting an Unimplemented error for T: + // IntoIterator - since there's an implementation of Iteratpr where T: IntoIterator, + // FulfillmentContext will drive SelectionContext to consider that impl before giving up. If we + // were to rely on FulfillmentContext's decision, we might end up synthesizing an impl like + // this: + // 'impl Send for Foo where T: IntoIterator' + // + // While it might be technically true that Foo implements Send where T: IntoIterator, + // the bound is overly restrictive - it's really only necessary that T: Iterator. + // + // For this reason, evaluate_predicates handles predicates with type variables specially. When + // we encounter an Unimplemented error for a bound such as 'T: Iterator', we immediately add it + // to our ParamEnv, and add it to our stack for recursive evaluation. When we later select it, + // we'll pick up any nested bounds, without ever inferring that 'T: IntoIterator' needs to + // hold. + // + // One additonal consideration is supertrait bounds. Normally, a ParamEnv is only ever + // consutrcted once for a given type. As part of the construction process, the ParamEnv will + // have any supertrait bounds normalized - e.g. if we have a type 'struct Foo', the + // ParamEnv will contain 'T: Copy' and 'T: Clone', since 'Copy: Clone'. When we construct our + // own ParamEnv, we need to do this outselves, through traits::elaborate_predicates, or else + // SelectionContext will choke on the missing predicates. However, this should never show up in + // the final synthesized generics: we don't want our generated docs page to contain something + // like 'T: Copy + Clone', as that's redundant. Therefore, we keep track of a separate + // 'user_env', which only holds the predicates that will actually be displayed to the user. + fn evaluate_predicates<'b, 'gcx, 'c>( + &self, + infcx: &mut InferCtxt<'b, 'tcx, 'c>, + ty_did: DefId, + trait_did: DefId, + ty: ty::Ty<'c>, + param_env: ty::ParamEnv<'c>, + user_env: ty::ParamEnv<'c>, + fresh_preds: &mut FxHashSet>, + only_projections: bool, + ) -> Option<(ty::ParamEnv<'c>, ty::ParamEnv<'c>)> { + let tcx = infcx.tcx; + + let mut select = traits::SelectionContext::new(&infcx); + + let mut already_visited = FxHashSet(); + let mut predicates = VecDeque::new(); + predicates.push_back(ty::Binder(ty::TraitPredicate { + trait_ref: ty::TraitRef { + def_id: trait_did, + substs: infcx.tcx.mk_substs_trait(ty, &[]), + }, + })); + + let mut computed_preds: FxHashSet<_> = param_env.caller_bounds.iter().cloned().collect(); + let mut user_computed_preds: FxHashSet<_> = + user_env.caller_bounds.iter().cloned().collect(); + + let mut new_env = param_env.clone(); + let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID); + + while let Some(pred) = predicates.pop_front() { + infcx.clear_caches(); + + if !already_visited.insert(pred.clone()) { + continue; + } + + let result = select.select(&Obligation::new(dummy_cause.clone(), new_env, pred)); + + match &result { + &Ok(Some(ref vtable)) => { + let obligations = vtable.clone().nested_obligations().into_iter(); + + if !self.evaluate_nested_obligations( + ty, + obligations, + &mut user_computed_preds, + fresh_preds, + &mut predicates, + &mut select, + only_projections, + ) { + return None; + } + } + &Ok(None) => {} + &Err(SelectionError::Unimplemented) => { + if self.is_of_param(pred.skip_binder().trait_ref.substs) { + already_visited.remove(&pred); + user_computed_preds.insert(ty::Predicate::Trait(pred.clone())); + predicates.push_back(pred); + } else { + debug!( + "evaluate_nested_obligations: Unimplemented found, bailing: {:?} {:?} \ + {:?}", + ty, + pred, + pred.skip_binder().trait_ref.substs + ); + return None; + } + } + _ => panic!("Unexpected error for '{:?}': {:?}", ty, result), + }; + + computed_preds.extend(user_computed_preds.iter().cloned()); + let normalized_preds = + traits::elaborate_predicates(tcx, computed_preds.clone().into_iter().collect()); + new_env = ty::ParamEnv::new(tcx.mk_predicates(normalized_preds), param_env.reveal); + } + + let final_user_env = ty::ParamEnv::new( + tcx.mk_predicates(user_computed_preds.into_iter()), + user_env.reveal, + ); + debug!( + "evaluate_nested_obligations(ty_did={:?}, trait_did={:?}): succeeded with '{:?}' \ + '{:?}'", + ty_did, trait_did, new_env, final_user_env + ); + + return Some((new_env, final_user_env)); + } + + fn is_of_param(&self, substs: &Substs) -> bool { + if substs.is_noop() { + return false; + } + + return match substs.type_at(0).sty { + ty::TyParam(_) => true, + ty::TyProjection(p) => self.is_of_param(p.substs), + _ => false, + }; + } + + fn get_lifetime(&self, region: Region, names_map: &FxHashMap) -> Lifetime { + self.region_name(region) + .map(|name| { + names_map.get(&name).unwrap_or_else(|| { + panic!("Missing lifetime with name {:?} for {:?}", name, region) + }) + }) + .unwrap_or(&Lifetime::statik()) + .clone() + } + + fn region_name(&self, region: Region) -> Option { + match region { + &ty::ReEarlyBound(r) => Some(r.name.as_str().to_string()), + _ => None, + } + } + + // This is very similar to handle_lifetimes. However, instead of matching ty::Region's + // to each other, we match ty::RegionVid's to ty::Region's + fn map_vid_to_region<'cx>( + &self, + regions: &RegionConstraintData<'cx>, + ) -> FxHashMap> { + let mut vid_map: FxHashMap, RegionDeps<'cx>> = FxHashMap(); + let mut finished_map = FxHashMap(); + + for constraint in regions.constraints.keys() { + match constraint { + &Constraint::VarSubVar(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::RegionVid(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::RegionVid(r1)); + } + &Constraint::RegSubVar(region, vid) => { + { + let deps1 = vid_map + .entry(RegionTarget::Region(region)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(vid)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::Region(region)); + } + &Constraint::VarSubReg(vid, region) => { + finished_map.insert(vid, region); + } + &Constraint::RegSubReg(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::Region(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::Region(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::Region(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::Region(r1)); + } + } + } + + while !vid_map.is_empty() { + let target = vid_map.keys().next().expect("Keys somehow empty").clone(); + let deps = vid_map.remove(&target).expect("Entry somehow missing"); + + for smaller in deps.smaller.iter() { + for larger in deps.larger.iter() { + match (smaller, larger) { + (&RegionTarget::Region(_), &RegionTarget::Region(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + (&RegionTarget::RegionVid(v1), &RegionTarget::Region(r1)) => { + finished_map.insert(v1, r1); + } + (&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => { + // Do nothing - we don't care about regions that are smaller than vids + } + (&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + } + } + } + } + finished_map + } + + // This method calculates two things: Lifetime constraints of the form 'a: 'b, + // and region constraints of the form ReVar: 'a + // + // This is essentially a simplified version of lexical_region_resolve. However, + // handle_lifetimes determines what *needs be* true in order for an impl to hold. + // lexical_region_resolve, along with much of the rest of the compiler, is concerned + // with determining if a given set up constraints/predicates *are* met, given some + // starting conditions (e.g. user-provided code). For this reason, it's easier + // to perform the calculations we need on our own, rather than trying to make + // existing inference/solver code do what we want. + fn handle_lifetimes<'cx>( + &self, + regions: &RegionConstraintData<'cx>, + names_map: &FxHashMap, + ) -> Vec { + // Our goal is to 'flatten' the list of constraints by eliminating + // all intermediate RegionVids. At the end, all constraints should + // be between Regions (aka region variables). This gives us the information + // we need to create the Generics. + let mut finished = FxHashMap(); + + let mut vid_map: FxHashMap = FxHashMap(); + + // Flattening is done in two parts. First, we insert all of the constraints + // into a map. Each RegionTarget (either a RegionVid or a Region) maps + // to its smaller and larger regions. Note that 'larger' regions correspond + // to sub-regions in Rust code (e.g. in 'a: 'b, 'a is the larger region). + for constraint in regions.constraints.keys() { + match constraint { + &Constraint::VarSubVar(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::RegionVid(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::RegionVid(r1)); + } + &Constraint::RegSubVar(region, vid) => { + let deps = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps.smaller.insert(RegionTarget::Region(region)); + } + &Constraint::VarSubReg(vid, region) => { + let deps = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps.larger.insert(RegionTarget::Region(region)); + } + &Constraint::RegSubReg(r1, r2) => { + // The constraint is already in the form that we want, so we're done with it + // Desired order is 'larger, smaller', so flip then + if self.region_name(r1) != self.region_name(r2) { + finished + .entry(self.region_name(r2).unwrap()) + .or_insert_with(|| Vec::new()) + .push(r1); + } + } + } + } + + // Here, we 'flatten' the map one element at a time. + // All of the element's sub and super regions are connected + // to each other. For example, if we have a graph that looks like this: + // + // (A, B) - C - (D, E) + // Where (A, B) are subregions, and (D,E) are super-regions + // + // then after deleting 'C', the graph will look like this: + // ... - A - (D, E ...) + // ... - B - (D, E, ...) + // (A, B, ...) - D - ... + // (A, B, ...) - E - ... + // + // where '...' signifies the existing sub and super regions of an entry + // When two adjacent ty::Regions are encountered, we've computed a final + // constraint, and add it to our list. Since we make sure to never re-add + // deleted items, this process will always finish. + while !vid_map.is_empty() { + let target = vid_map.keys().next().expect("Keys somehow empty").clone(); + let deps = vid_map.remove(&target).expect("Entry somehow missing"); + + for smaller in deps.smaller.iter() { + for larger in deps.larger.iter() { + match (smaller, larger) { + (&RegionTarget::Region(r1), &RegionTarget::Region(r2)) => { + if self.region_name(r1) != self.region_name(r2) { + finished + .entry(self.region_name(r2).unwrap()) + .or_insert_with(|| Vec::new()) + .push(r1) // Larger, smaller + } + } + (&RegionTarget::RegionVid(_), &RegionTarget::Region(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + } + (&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let deps = v.into_mut(); + deps.smaller.insert(*smaller); + deps.smaller.remove(&target); + } + } + (&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + } + } + } + } + + let lifetime_predicates = names_map + .iter() + .flat_map(|(name, lifetime)| { + let empty = Vec::new(); + let bounds: FxHashSet = finished + .get(name) + .unwrap_or(&empty) + .iter() + .map(|region| self.get_lifetime(region, names_map)) + .collect(); + + if bounds.is_empty() { + return None; + } + Some(WherePredicate::RegionPredicate { + lifetime: lifetime.clone(), + bounds: bounds.into_iter().collect(), + }) + }) + .collect(); + + lifetime_predicates + } + + fn extract_for_generics<'b, 'c, 'd>( + &self, + tcx: TyCtxt<'b, 'c, 'd>, + pred: ty::Predicate<'d>, + ) -> FxHashSet { + pred.walk_tys() + .flat_map(|t| { + let mut regions = FxHashSet(); + tcx.collect_regions(&t, &mut regions); + + regions.into_iter().flat_map(|r| { + match r { + // We only care about late bound regions, as we need to add them + // to the 'for<>' section + &ty::ReLateBound(_, ty::BoundRegion::BrNamed(_, name)) => { + Some(GenericParam::Lifetime(Lifetime(name.as_str().to_string()))) + } + &ty::ReVar(_) | &ty::ReEarlyBound(_) => None, + _ => panic!("Unexpected region type {:?}", r), + } + }) + }) + .collect() + } + + fn make_final_bounds<'b, 'c, 'cx>( + &self, + ty_to_bounds: FxHashMap>, + ty_to_fn: FxHashMap, Option)>, + lifetime_to_bounds: FxHashMap>, + ) -> Vec { + ty_to_bounds + .into_iter() + .flat_map(|(ty, mut bounds)| { + if let Some(data) = ty_to_fn.get(&ty) { + let (poly_trait, output) = + (data.0.as_ref().unwrap().clone(), data.1.as_ref().cloned()); + let new_ty = match &poly_trait.trait_ { + &Type::ResolvedPath { + ref path, + ref typarams, + ref did, + ref is_generic, + } => { + let mut new_path = path.clone(); + let last_segment = new_path.segments.pop().unwrap(); + + let (old_input, old_output) = match last_segment.params { + PathParameters::AngleBracketed { types, .. } => (types, None), + PathParameters::Parenthesized { inputs, output, .. } => { + (inputs, output) + } + }; + + if old_output.is_some() && old_output != output { + panic!( + "Output mismatch for {:?} {:?} {:?}", + ty, old_output, data.1 + ); + } + + let new_params = PathParameters::Parenthesized { + inputs: old_input, + output, + }; + + new_path.segments.push(PathSegment { + name: last_segment.name, + params: new_params, + }); + + Type::ResolvedPath { + path: new_path, + typarams: typarams.clone(), + did: did.clone(), + is_generic: *is_generic, + } + } + _ => panic!("Unexpected data: {:?}, {:?}", ty, data), + }; + bounds.insert(TyParamBound::TraitBound( + PolyTrait { + trait_: new_ty, + generic_params: poly_trait.generic_params, + }, + hir::TraitBoundModifier::None, + )); + } + if bounds.is_empty() { + return None; + } + + Some(WherePredicate::BoundPredicate { + ty, + bounds: bounds.into_iter().collect(), + }) + }) + .chain( + lifetime_to_bounds + .into_iter() + .filter(|&(_, ref bounds)| !bounds.is_empty()) + .map(|(lifetime, bounds)| WherePredicate::RegionPredicate { + lifetime, + bounds: bounds.into_iter().collect(), + }), + ) + .collect() + } + + // Converts the calculated ParamEnv and lifetime information to a clean::Generics, suitable for + // display on the docs page. Cleaning the Predicates produces sub-optimal WherePredicate's, + // so we fix them up: + // + // * Multiple bounds for the same type are coalesced into one: e.g. 'T: Copy', 'T: Debug' + // becomes 'T: Copy + Debug' + // * Fn bounds are handled specially - instead of leaving it as 'T: Fn(), = + // K', we use the dedicated syntax 'T: Fn() -> K' + // * We explcitly add a '?Sized' bound if we didn't find any 'Sized' predicates for a type + fn param_env_to_generics<'b, 'c, 'cx>( + &self, + tcx: TyCtxt<'b, 'c, 'cx>, + did: DefId, + param_env: ty::ParamEnv<'cx>, + type_generics: ty::Generics, + mut existing_predicates: Vec, + vid_to_region: FxHashMap>, + ) -> Generics { + debug!( + "param_env_to_generics(did={:?}, param_env={:?}, type_generics={:?}, \ + existing_predicates={:?})", + did, param_env, type_generics, existing_predicates + ); + + // The `Sized` trait must be handled specially, since we only only display it when + // it is *not* required (i.e. '?Sized') + let sized_trait = self.cx + .tcx + .require_lang_item(lang_items::SizedTraitLangItem); + + let mut replacer = RegionReplacer { + vid_to_region: &vid_to_region, + tcx, + }; + + let orig_bounds: FxHashSet<_> = self.cx.tcx.param_env(did).caller_bounds.iter().collect(); + let clean_where_predicates = param_env + .caller_bounds + .iter() + .filter(|p| { + !orig_bounds.contains(p) || match p { + &&ty::Predicate::Trait(pred) => pred.def_id() == sized_trait, + _ => false, + } + }) + .map(|p| { + let replaced = p.fold_with(&mut replacer); + (replaced.clone(), replaced.clean(self.cx)) + }); + + let full_generics = (&type_generics, &tcx.predicates_of(did)); + let Generics { + params: mut generic_params, + .. + } = full_generics.clean(self.cx); + + let mut has_sized = FxHashSet(); + let mut ty_to_bounds = FxHashMap(); + let mut lifetime_to_bounds = FxHashMap(); + let mut ty_to_traits: FxHashMap> = FxHashMap(); + + let mut ty_to_fn: FxHashMap, Option)> = FxHashMap(); + + for (orig_p, p) in clean_where_predicates { + match p { + WherePredicate::BoundPredicate { ty, mut bounds } => { + // Writing a projection trait bound of the form + // ::Name : ?Sized + // is illegal, because ?Sized bounds can only + // be written in the (here, nonexistant) definition + // of the type. + // Therefore, we make sure that we never add a ?Sized + // bound for projections + match &ty { + &Type::QPath { .. } => { + has_sized.insert(ty.clone()); + } + _ => {} + } + + if bounds.is_empty() { + continue; + } + + let mut for_generics = self.extract_for_generics(tcx, orig_p.clone()); + + assert!(bounds.len() == 1); + let mut b = bounds.pop().unwrap(); + + if b.is_sized_bound(self.cx) { + has_sized.insert(ty.clone()); + } else if !b.get_trait_type() + .and_then(|t| { + ty_to_traits + .get(&ty) + .map(|bounds| bounds.contains(&strip_type(t.clone()))) + }) + .unwrap_or(false) + { + // If we've already added a projection bound for the same type, don't add + // this, as it would be a duplicate + + // Handle any 'Fn/FnOnce/FnMut' bounds specially, + // as we want to combine them with any 'Output' qpaths + // later + + let is_fn = match &mut b { + &mut TyParamBound::TraitBound(ref mut p, _) => { + // Insert regions into the for_generics hash map first, to ensure + // that we don't end up with duplicate bounds (e.g. for<'b, 'b>) + for_generics.extend(p.generic_params.clone()); + p.generic_params = for_generics.into_iter().collect(); + self.is_fn_ty(&tcx, &p.trait_) + } + _ => false, + }; + + let poly_trait = b.get_poly_trait().unwrap(); + + if is_fn { + ty_to_fn + .entry(ty.clone()) + .and_modify(|e| *e = (Some(poly_trait.clone()), e.1.clone())) + .or_insert(((Some(poly_trait.clone())), None)); + + ty_to_bounds + .entry(ty.clone()) + .or_insert_with(|| FxHashSet()); + } else { + ty_to_bounds + .entry(ty.clone()) + .or_insert_with(|| FxHashSet()) + .insert(b.clone()); + } + } + } + WherePredicate::RegionPredicate { lifetime, bounds } => { + lifetime_to_bounds + .entry(lifetime) + .or_insert_with(|| FxHashSet()) + .extend(bounds); + } + WherePredicate::EqPredicate { lhs, rhs } => { + match &lhs { + &Type::QPath { + name: ref left_name, + ref self_type, + ref trait_, + } => { + let ty = &*self_type; + match **trait_ { + Type::ResolvedPath { + path: ref trait_path, + ref typarams, + ref did, + ref is_generic, + } => { + let mut new_trait_path = trait_path.clone(); + + if self.is_fn_ty(&tcx, trait_) && left_name == FN_OUTPUT_NAME { + ty_to_fn + .entry(*ty.clone()) + .and_modify(|e| *e = (e.0.clone(), Some(rhs.clone()))) + .or_insert((None, Some(rhs))); + continue; + } + + // FIXME: Remove this scope when NLL lands + { + let params = + &mut new_trait_path.segments.last_mut().unwrap().params; + + match params { + // Convert somethiung like ' = u8' + // to 'T: Iterator' + &mut PathParameters::AngleBracketed { + ref mut bindings, + .. + } => { + bindings.push(TypeBinding { + name: left_name.clone(), + ty: rhs, + }); + } + &mut PathParameters::Parenthesized { .. } => { + existing_predicates.push( + WherePredicate::EqPredicate { + lhs: lhs.clone(), + rhs, + }, + ); + continue; // If something other than a Fn ends up + // with parenthesis, leave it alone + } + } + } + + let bounds = ty_to_bounds + .entry(*ty.clone()) + .or_insert_with(|| FxHashSet()); + + bounds.insert(TyParamBound::TraitBound( + PolyTrait { + trait_: Type::ResolvedPath { + path: new_trait_path, + typarams: typarams.clone(), + did: did.clone(), + is_generic: *is_generic, + }, + generic_params: Vec::new(), + }, + hir::TraitBoundModifier::None, + )); + + // Remove any existing 'plain' bound (e.g. 'T: Iterator`) so + // that we don't see a + // duplicate bound like `T: Iterator + Iterator` + // on the docs page. + bounds.remove(&TyParamBound::TraitBound( + PolyTrait { + trait_: *trait_.clone(), + generic_params: Vec::new(), + }, + hir::TraitBoundModifier::None, + )); + // Avoid creating any new duplicate bounds later in the outer + // loop + ty_to_traits + .entry(*ty.clone()) + .or_insert_with(|| FxHashSet()) + .insert(*trait_.clone()); + } + _ => panic!("Unexpected trait {:?} for {:?}", trait_, did), + } + } + _ => panic!("Unexpected LHS {:?} for {:?}", lhs, did), + } + } + }; + } + + let final_bounds = self.make_final_bounds(ty_to_bounds, ty_to_fn, lifetime_to_bounds); + + existing_predicates.extend(final_bounds); + + for p in generic_params.iter_mut() { + match p { + &mut GenericParam::Type(ref mut ty) => { + // We never want something like 'impl' + ty.default.take(); + + let generic_ty = Type::Generic(ty.name.clone()); + + if !has_sized.contains(&generic_ty) { + ty.bounds.insert(0, TyParamBound::maybe_sized(self.cx)); + } + } + _ => {} + } + } + + self.sort_where_predicates(&mut existing_predicates); + + Generics { + params: generic_params, + where_predicates: existing_predicates, + } + } + + // Ensure that the predicates are in a consistent order. The precise + // ordering doesn't actually matter, but it's important that + // a given set of predicates always appears in the same order - + // both for visual consistency between 'rustdoc' runs, and to + // make writing tests much easier + fn sort_where_predicates(&self, predicates: &mut Vec) { + // We should never have identical bounds - and if we do, + // they're visually identical as well. Therefore, using + // an unstable sort is fine. + predicates.sort_unstable_by(|first, second| { + // This might look horrendously hacky, but it's actually not that bad. + // + // For performance reasons, we use several different FxHashMaps + // in the process of computing the final set of where predicates. + // However, the iteration order of a HashMap is completely unspecified. + // In fact, the iteration of an FxHashMap can even vary between platforms, + // since FxHasher has different behavior for 32-bit and 64-bit platforms. + // + // Obviously, it's extremely undesireable for documentation rendering + // to be depndent on the platform it's run on. Apart from being confusing + // to end users, it makes writing tests much more difficult, as predicates + // can appear in any order in the final result. + // + // To solve this problem, we sort WherePredicates by their Debug + // string. The thing to keep in mind is that we don't really + // care what the final order is - we're synthesizing an impl + // ourselves, so any order can be considered equally valid. + // By sorting the predicates, however, we ensure that for + // a given codebase, all auto-trait impls always render + // in exactly the same way. + // + // Using the Debug impementation for sorting prevents + // us from needing to write quite a bit of almost + // entirely useless code (e.g. how should two + // Types be sorted relative to each other). + // This approach is probably somewhat slower, but + // the small number of items involved (impls + // rarely have more than a few bounds) means + // that it shouldn't matter in practice. + format!("{:?}", first).cmp(&format!("{:?}", second)) + }); + } + + fn is_fn_ty(&self, tcx: &TyCtxt, ty: &Type) -> bool { + match &ty { + &&Type::ResolvedPath { ref did, .. } => { + *did == tcx.require_lang_item(lang_items::FnTraitLangItem) + || *did == tcx.require_lang_item(lang_items::FnMutTraitLangItem) + || *did == tcx.require_lang_item(lang_items::FnOnceTraitLangItem) + } + _ => false, + } + } + + // This is an ugly hack, but it's the simplest way to handle synthetic impls without greatly + // refactoring either librustdoc or librustc. In particular, allowing new DefIds to be + // registered after the AST is constructed would require storing the defid mapping in a + // RefCell, decreasing the performance for normal compilation for very little gain. + // + // Instead, we construct 'fake' def ids, which start immediately after the last DefId in + // DefIndexAddressSpace::Low. In the Debug impl for clean::Item, we explicitly check for fake + // def ids, as we'll end up with a panic if we use the DefId Debug impl for fake DefIds + fn next_def_id(&self, crate_num: CrateNum) -> DefId { + let start_def_id = { + let next_id = if crate_num == LOCAL_CRATE { + self.cx + .tcx + .hir + .definitions() + .def_path_table() + .next_id(DefIndexAddressSpace::Low) + } else { + self.cx + .cstore + .def_path_table(crate_num) + .next_id(DefIndexAddressSpace::Low) + }; + + DefId { + krate: crate_num, + index: next_id, + } + }; + + let mut fake_ids = self.cx.fake_def_ids.borrow_mut(); + + let def_id = fake_ids.entry(crate_num).or_insert(start_def_id).clone(); + fake_ids.insert( + crate_num, + DefId { + krate: crate_num, + index: DefIndex::from_array_index( + def_id.index.as_array_index() + 1, + def_id.index.address_space(), + ), + }, + ); + + MAX_DEF_ID.with(|m| { + m.borrow_mut() + .entry(def_id.krate.clone()) + .or_insert(start_def_id); + }); + + self.cx.all_fake_def_ids.borrow_mut().insert(def_id); + + def_id.clone() + } +} + +// Replaces all ReVars in a type with ty::Region's, using the provided map +struct RegionReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { + vid_to_region: &'a FxHashMap>, + tcx: TyCtxt<'a, 'gcx, 'tcx>, +} + +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } + + fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { + (match r { + &ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(), + _ => None, + }).unwrap_or_else(|| r.super_fold_with(self)) + } +} diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index 5eb3e38d5b371..a769771f8aa88 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -25,7 +25,7 @@ use syntax_pos::Span; use html::escape::Escape; -#[derive(Clone, RustcEncodable, RustcDecodable, Debug, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq, Hash)] pub enum Cfg { /// Accepts all configurations. True, diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 9aba399b3b09f..d4233309627f5 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -12,8 +12,8 @@ use std::collections::BTreeMap; use std::io; -use std::iter::once; use std::rc::Rc; +use std::iter::once; use syntax::ast; use rustc::hir; @@ -25,7 +25,7 @@ use rustc::util::nodemap::FxHashSet; use core::{DocContext, DocAccessLevels}; use doctree; -use clean::{self, GetDefId}; +use clean::{self, GetDefId, get_auto_traits_with_def_id}; use super::Clean; @@ -50,7 +50,7 @@ pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name) let inner = match def { Def::Trait(did) => { record_extern_fqn(cx, did, clean::TypeKind::Trait); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::TraitItem(build_external_trait(cx, did)) } Def::Fn(did) => { @@ -59,27 +59,27 @@ pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name) } Def::Struct(did) => { record_extern_fqn(cx, did, clean::TypeKind::Struct); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::StructItem(build_struct(cx, did)) } Def::Union(did) => { record_extern_fqn(cx, did, clean::TypeKind::Union); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::UnionItem(build_union(cx, did)) } Def::TyAlias(did) => { record_extern_fqn(cx, did, clean::TypeKind::Typedef); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::TypedefItem(build_type_alias(cx, did), false) } Def::Enum(did) => { record_extern_fqn(cx, did, clean::TypeKind::Enum); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::EnumItem(build_enum(cx, did)) } Def::TyForeign(did) => { record_extern_fqn(cx, did, clean::TypeKind::Foreign); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::ForeignTypeItem } // Never inline enum variants but leave them shown as re-exports. @@ -125,6 +125,11 @@ pub fn load_attrs(cx: &DocContext, did: DefId) -> clean::Attributes { /// These names are used later on by HTML rendering to generate things like /// source links back to the original item. pub fn record_extern_fqn(cx: &DocContext, did: DefId, kind: clean::TypeKind) { + if did.is_local() { + debug!("record_extern_fqn(did={:?}, kind+{:?}): def_id is local, aborting", did, kind); + return; + } + let crate_name = cx.tcx.crate_name(did.krate).to_string(); let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| { // extern blocks have an empty name @@ -144,6 +149,7 @@ pub fn record_extern_fqn(cx: &DocContext, did: DefId, kind: clean::TypeKind) { } pub fn build_external_trait(cx: &DocContext, did: DefId) -> clean::Trait { + let auto_trait = cx.tcx.trait_def(did).has_auto_impl; let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect(); let predicates = cx.tcx.predicates_of(did); let generics = (cx.tcx.generics_of(did), &predicates).clean(cx); @@ -152,6 +158,7 @@ pub fn build_external_trait(cx: &DocContext, did: DefId) -> clean::Trait { let is_spotlight = load_attrs(cx, did).has_doc_flag("spotlight"); let is_auto = cx.tcx.trait_is_auto(did); clean::Trait { + auto: auto_trait, unsafety: cx.tcx.trait_def(did).unsafety, generics, items: trait_items, @@ -227,7 +234,7 @@ fn build_type_alias(cx: &DocContext, did: DefId) -> clean::Typedef { } } -pub fn build_impls(cx: &DocContext, did: DefId) -> Vec { +pub fn build_impls(cx: &DocContext, did: DefId, auto_traits: bool) -> Vec { let tcx = cx.tcx; let mut impls = Vec::new(); @@ -235,6 +242,16 @@ pub fn build_impls(cx: &DocContext, did: DefId) -> Vec { build_impl(cx, did, &mut impls); } + if auto_traits { + let auto_impls = get_auto_traits_with_def_id(cx, did); + let mut renderinfo = cx.renderinfo.borrow_mut(); + + let new_impls: Vec = auto_impls.into_iter() + .filter(|i| renderinfo.inlined.insert(i.def_id)).collect(); + + impls.extend(new_impls); + } + // If this is the first time we've inlined something from another crate, then // we inline *all* impls from all the crates into this crate. Note that there's // currently no way for us to filter this based on type, and we likely need @@ -347,13 +364,14 @@ pub fn build_impl(cx: &DocContext, did: DefId, ret: &mut Vec) { ret.push(clean::Item { inner: clean::ImplItem(clean::Impl { - unsafety: hir::Unsafety::Normal, // FIXME: this should be decoded + unsafety: hir::Unsafety::Normal, + generics: (tcx.generics_of(did), &predicates).clean(cx), provided_trait_methods: provided, trait_, for_, - generics: (tcx.generics_of(did), &predicates).clean(cx), items: trait_items, polarity: Some(polarity.clean(cx)), + synthetic: false, }), source: tcx.def_span(did).clean(cx), name: None, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 7f51b8f68ae49..4543b246b83ad 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -26,31 +26,41 @@ use syntax::codemap::Spanned; use syntax::feature_gate::UnstableFeatures; use syntax::ptr::P; use syntax::symbol::keywords; +use syntax::symbol::Symbol; use syntax_pos::{self, DUMMY_SP, Pos, FileName}; use rustc::middle::const_val::ConstVal; use rustc::middle::privacy::AccessLevels; use rustc::middle::resolve_lifetime as rl; +use rustc::ty::fold::TypeFolder; use rustc::middle::lang_items; -use rustc::hir::def::{Def, CtorKind}; -use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc::hir::{self, HirVec}; +use rustc::hir::def::{self, Def, CtorKind}; +use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc::hir::def_id::DefIndexAddressSpace; +use rustc::traits; use rustc::ty::subst::Substs; -use rustc::ty::{self, Ty, AdtKind}; +use rustc::ty::{self, TyCtxt, Region, RegionVid, Ty, AdtKind}; use rustc::middle::stability; use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_typeck::hir_ty_to_ty; - -use rustc::hir; +use rustc::infer::{InferCtxt, RegionObligation}; +use rustc::infer::region_constraints::{RegionConstraintData, Constraint}; +use rustc::traits::*; +use std::collections::hash_map::Entry; +use std::collections::VecDeque; +use std::fmt; use rustc_const_math::ConstInt; use std::default::Default; use std::{mem, slice, vec}; -use std::iter::FromIterator; +use std::iter::{FromIterator, once}; use std::rc::Rc; +use std::cell::RefCell; use std::sync::Arc; use std::u32; -use core::DocContext; +use core::{self, DocContext}; use doctree; use visit_ast; use html::item_type::ItemType; @@ -59,8 +69,14 @@ use html::markdown::markdown_links; pub mod inline; pub mod cfg; mod simplify; +mod auto_trait; use self::cfg::Cfg; +use self::auto_trait::AutoTraitFinder; + +thread_local!(static MAX_DEF_ID: RefCell> = RefCell::new(FxHashMap())); + +const FN_OUTPUT_NAME: &'static str = "Output"; // extract the stability index for a node from tcx, if possible fn get_stability(cx: &DocContext, def_id: DefId) -> Option { @@ -282,7 +298,7 @@ impl Clean for CrateNum { /// Anything with a source location and set of attributes and, optionally, a /// name. That is, anything that can be documented. This doesn't correspond /// directly to the AST's concept of an item; it's a strict superset. -#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Item { /// Stringified span pub source: Span, @@ -296,6 +312,26 @@ pub struct Item { pub deprecation: Option, } +impl fmt::Debug for Item { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + + let fake = MAX_DEF_ID.with(|m| m.borrow().get(&self.def_id.krate) + .map(|id| self.def_id >= *id).unwrap_or(false)); + let def_id: &fmt::Debug = if fake { &"**FAKE**" } else { &self.def_id }; + + fmt.debug_struct("Item") + .field("source", &self.source) + .field("name", &self.name) + .field("attrs", &self.attrs) + .field("inner", &self.inner) + .field("visibility", &self.visibility) + .field("def_id", def_id) + .field("stability", &self.stability) + .field("deprecation", &self.deprecation) + .finish() + } +} + impl Item { /// Finds the `doc` attribute as a NameValue and returns the corresponding /// value found. @@ -492,9 +528,9 @@ impl Clean for doctree::Module { let mut items: Vec = vec![]; items.extend(self.extern_crates.iter().map(|x| x.clean(cx))); items.extend(self.imports.iter().flat_map(|x| x.clean(cx))); - items.extend(self.structs.iter().map(|x| x.clean(cx))); - items.extend(self.unions.iter().map(|x| x.clean(cx))); - items.extend(self.enums.iter().map(|x| x.clean(cx))); + items.extend(self.structs.iter().flat_map(|x| x.clean(cx))); + items.extend(self.unions.iter().flat_map(|x| x.clean(cx))); + items.extend(self.enums.iter().flat_map(|x| x.clean(cx))); items.extend(self.fns.iter().map(|x| x.clean(cx))); items.extend(self.foreigns.iter().flat_map(|x| x.clean(cx))); items.extend(self.mods.iter().map(|x| x.clean(cx))); @@ -601,7 +637,7 @@ impl> NestedAttributesExt for I { /// Included files are kept separate from inline doc comments so that proper line-number /// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are /// kept separate because of issue #42760. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum DocFragment { // FIXME #44229 (misdreavus): sugared and raw doc comments can be brought back together once // hoedown is completely removed from rustdoc. @@ -653,7 +689,7 @@ impl<'a> FromIterator<&'a DocFragment> for String { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug, Default)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)] pub struct Attributes { pub doc_strings: Vec, pub other_attrs: Vec, @@ -1008,7 +1044,7 @@ fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option /// Resolve a string as a macro fn macro_resolve(cx: &DocContext, path_str: &str) -> Option { - use syntax::ext::base::MacroKind; + use syntax::ext::base::{MacroKind, SyntaxExtension}; use syntax::ext::hygiene::Mark; let segment = ast::PathSegment { identifier: ast::Ident::from_str(path_str), @@ -1025,7 +1061,11 @@ fn macro_resolve(cx: &DocContext, path_str: &str) -> Option { let res = resolver .resolve_macro_to_def_inner(mark, &path, MacroKind::Bang, false); if let Ok(def) = res { - Some(def) + if let SyntaxExtension::DeclMacro(..) = *resolver.get_macro(def) { + Some(def) + } else { + None + } } else if let Some(def) = resolver.all_macros.get(&path_str.into()) { Some(*def) } else { @@ -1177,7 +1217,7 @@ impl Clean for [ast::Attribute] { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct TyParam { pub name: String, pub did: DefId, @@ -1212,7 +1252,7 @@ impl<'tcx> Clean for ty::TypeParameterDef { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum TyParamBound { RegionBound(Lifetime), TraitBound(PolyTrait, hir::TraitBoundModifier) @@ -1245,6 +1285,21 @@ impl TyParamBound { } false } + + fn get_poly_trait(&self) -> Option { + if let TyParamBound::TraitBound(ref p, _) = *self { + return Some(p.clone()) + } + None + } + + fn get_trait_type(&self) -> Option { + + if let TyParamBound::TraitBound(PolyTrait { ref trait_, .. }, _) = *self { + return Some(trait_.clone()); + } + None + } } impl Clean for hir::TyParamBound { @@ -1363,7 +1418,7 @@ impl<'tcx> Clean>> for Substs<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Lifetime(String); impl Lifetime { @@ -1380,17 +1435,19 @@ impl Lifetime { impl Clean for hir::Lifetime { fn clean(&self, cx: &DocContext) -> Lifetime { - let hir_id = cx.tcx.hir.node_to_hir_id(self.id); - let def = cx.tcx.named_region(hir_id); - match def { - Some(rl::Region::EarlyBound(_, node_id, _)) | - Some(rl::Region::LateBound(_, node_id, _)) | - Some(rl::Region::Free(_, node_id)) => { - if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() { - return lt; + if self.id != ast::DUMMY_NODE_ID { + let hir_id = cx.tcx.hir.node_to_hir_id(self.id); + let def = cx.tcx.named_region(hir_id); + match def { + Some(rl::Region::EarlyBound(_, node_id, _)) | + Some(rl::Region::LateBound(_, node_id, _)) | + Some(rl::Region::Free(_, node_id)) => { + if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() { + return lt; + } } + _ => {} } - _ => {} } Lifetime(self.name.name().to_string()) } @@ -1437,7 +1494,7 @@ impl Clean> for ty::RegionKind { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum WherePredicate { BoundPredicate { ty: Type, bounds: Vec }, RegionPredicate { lifetime: Lifetime, bounds: Vec}, @@ -1562,7 +1619,7 @@ impl<'tcx> Clean for ty::ProjectionTy<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum GenericParam { Lifetime(Lifetime), Type(TyParam), @@ -1577,7 +1634,8 @@ impl Clean for hir::GenericParam { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug, Default)] +// maybe use a Generic enum and use Vec? +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)] pub struct Generics { pub params: Vec, pub where_predicates: Vec, @@ -1747,7 +1805,7 @@ impl Clean for doctree::Function { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct FnDecl { pub inputs: Arguments, pub output: FunctionRetTy, @@ -1765,7 +1823,7 @@ impl FnDecl { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Arguments { pub values: Vec, } @@ -1840,7 +1898,7 @@ impl<'a, 'tcx> Clean for (DefId, ty::PolyFnSig<'tcx>) { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Argument { pub type_: Type, pub name: String, @@ -1870,7 +1928,7 @@ impl Argument { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum FunctionRetTy { Return(Type), DefaultReturn, @@ -1896,6 +1954,7 @@ impl GetDefId for FunctionRetTy { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Trait { + pub auto: bool, pub unsafety: hir::Unsafety, pub items: Vec, pub generics: Generics, @@ -1917,6 +1976,7 @@ impl Clean for doctree::Trait { stability: self.stab.clean(cx), deprecation: self.depr.clean(cx), inner: TraitItem(Trait { + auto: self.is_auto.clean(cx), unsafety: self.unsafety, items: self.items.clean(cx), generics: self.generics.clean(cx), @@ -2158,7 +2218,7 @@ impl<'tcx> Clean for ty::AssociatedItem { } /// A trait reference, which may have higher ranked lifetimes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct PolyTrait { pub trait_: Type, pub generic_params: Vec, @@ -2167,7 +2227,7 @@ pub struct PolyTrait { /// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original /// type out of the AST/TyCtxt given one of these, if more information is needed. Most importantly /// it does not preserve mutability or boxes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum Type { /// structs/enums/traits (most that'd be an hir::TyPath) ResolvedPath { @@ -2782,10 +2842,13 @@ pub struct Union { pub fields_stripped: bool, } -impl Clean for doctree::Struct { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean> for doctree::Struct { + fn clean(&self, cx: &DocContext) -> Vec { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2798,14 +2861,19 @@ impl Clean for doctree::Struct { fields: self.fields.clean(cx), fields_stripped: false, }), - } + }); + + ret } } -impl Clean for doctree::Union { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean> for doctree::Union { + fn clean(&self, cx: &DocContext) -> Vec { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2818,7 +2886,9 @@ impl Clean for doctree::Union { fields: self.fields.clean(cx), fields_stripped: false, }), - } + }); + + ret } } @@ -2849,10 +2919,13 @@ pub struct Enum { pub variants_stripped: bool, } -impl Clean for doctree::Enum { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean> for doctree::Enum { + fn clean(&self, cx: &DocContext) -> Vec { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2864,7 +2937,9 @@ impl Clean for doctree::Enum { generics: self.generics.clean(cx), variants_stripped: false, }), - } + }); + + ret } } @@ -2989,7 +3064,7 @@ impl Clean for syntax_pos::Span { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Path { pub global: bool, pub def: Def, @@ -3006,7 +3081,7 @@ impl Path { params: PathParameters::AngleBracketed { lifetimes: Vec::new(), types: Vec::new(), - bindings: Vec::new() + bindings: Vec::new(), } }] } @@ -3027,7 +3102,7 @@ impl Clean for hir::Path { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum PathParameters { AngleBracketed { lifetimes: Vec, @@ -3062,7 +3137,7 @@ impl Clean for hir::PathParameters { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct PathSegment { pub name: String, pub params: PathParameters, @@ -3077,6 +3152,50 @@ impl Clean for hir::PathSegment { } } +fn strip_type(ty: Type) -> Type { + match ty { + Type::ResolvedPath { path, typarams, did, is_generic } => { + Type::ResolvedPath { path: strip_path(&path), typarams, did, is_generic } + } + Type::Tuple(inner_tys) => { + Type::Tuple(inner_tys.iter().map(|t| strip_type(t.clone())).collect()) + } + Type::Slice(inner_ty) => Type::Slice(Box::new(strip_type(*inner_ty))), + Type::Array(inner_ty, s) => Type::Array(Box::new(strip_type(*inner_ty)), s), + Type::Unique(inner_ty) => Type::Unique(Box::new(strip_type(*inner_ty))), + Type::RawPointer(m, inner_ty) => Type::RawPointer(m, Box::new(strip_type(*inner_ty))), + Type::BorrowedRef { lifetime, mutability, type_ } => { + Type::BorrowedRef { lifetime, mutability, type_: Box::new(strip_type(*type_)) } + } + Type::QPath { name, self_type, trait_ } => { + Type::QPath { + name, + self_type: Box::new(strip_type(*self_type)), trait_: Box::new(strip_type(*trait_)) + } + } + _ => ty + } +} + +fn strip_path(path: &Path) -> Path { + let segments = path.segments.iter().map(|s| { + PathSegment { + name: s.name.clone(), + params: PathParameters::AngleBracketed { + lifetimes: Vec::new(), + types: Vec::new(), + bindings: Vec::new(), + } + } + }).collect(); + + Path { + global: path.global, + def: path.def.clone(), + segments, + } +} + fn qpath_to_string(p: &hir::QPath) -> String { let segments = match *p { hir::QPath::Resolved(_, ref path) => &path.segments, @@ -3125,7 +3244,7 @@ impl Clean for doctree::Typedef { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct BareFunctionDecl { pub unsafety: hir::Unsafety, pub generic_params: Vec, @@ -3198,7 +3317,7 @@ impl Clean for doctree::Constant { } } -#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Copy)] +#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Hash)] pub enum Mutability { Mutable, Immutable, @@ -3213,7 +3332,7 @@ impl Clean for hir::Mutability { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Copy, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Debug, Hash)] pub enum ImplPolarity { Positive, Negative, @@ -3237,6 +3356,20 @@ pub struct Impl { pub for_: Type, pub items: Vec, pub polarity: Option, + pub synthetic: bool, +} + +pub fn get_auto_traits_with_node_id(cx: &DocContext, id: ast::NodeId, name: String) -> Vec { + let finder = AutoTraitFinder { cx }; + finder.get_with_node_id(id, name) +} + +pub fn get_auto_traits_with_def_id(cx: &DocContext, id: DefId) -> Vec { + let finder = AutoTraitFinder { + cx, + }; + + finder.get_with_def_id(id) } impl Clean> for doctree::Impl { @@ -3274,7 +3407,8 @@ impl Clean> for doctree::Impl { for_: self.for_.clean(cx), items, polarity: Some(self.polarity.clean(cx)), - }), + synthetic: false, + }) }); ret } @@ -3294,7 +3428,7 @@ fn build_deref_target_impls(cx: &DocContext, let primitive = match *target { ResolvedPath { did, .. } if did.is_local() => continue, ResolvedPath { did, .. } => { - ret.extend(inline::build_impls(cx, did)); + ret.extend(inline::build_impls(cx, did, true)); continue } _ => match target.primitive_type() { @@ -3514,7 +3648,11 @@ fn print_const_expr(cx: &DocContext, body: hir::BodyId) -> String { fn resolve_type(cx: &DocContext, path: Path, id: ast::NodeId) -> Type { - debug!("resolve_type({:?},{:?})", path, id); + if id == ast::DUMMY_NODE_ID { + debug!("resolve_type({:?})", path); + } else { + debug!("resolve_type({:?},{:?})", path, id); + } let is_generic = match path.def { Def::PrimTy(p) => match p { @@ -3669,7 +3807,7 @@ impl Clean for attr::Deprecation { } /// An equality constraint on an associated type, e.g. `A=Bar` in `Foo` -#[derive(Clone, PartialEq, RustcDecodable, RustcEncodable, Debug)] +#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)] pub struct TypeBinding { pub name: String, pub ty: Type @@ -3683,3 +3821,182 @@ impl Clean for hir::TypeBinding { } } } + +pub fn def_id_to_path(cx: &DocContext, did: DefId, name: Option) -> Vec { + let crate_name = name.unwrap_or_else(|| cx.tcx.crate_name(did.krate).to_string()); + let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| { + // extern blocks have an empty name + let s = elem.data.to_string(); + if !s.is_empty() { + Some(s) + } else { + None + } + }); + once(crate_name).chain(relative).collect() +} + +// Start of code copied from rust-clippy + +pub fn get_trait_def_id(tcx: &TyCtxt, path: &[&str], use_local: bool) -> Option { + if use_local { + path_to_def_local(tcx, path) + } else { + path_to_def(tcx, path) + } +} + +pub fn path_to_def_local(tcx: &TyCtxt, path: &[&str]) -> Option { + let krate = tcx.hir.krate(); + let mut items = krate.module.item_ids.clone(); + let mut path_it = path.iter().peekable(); + + loop { + let segment = match path_it.next() { + Some(segment) => segment, + None => return None, + }; + + for item_id in mem::replace(&mut items, HirVec::new()).iter() { + let item = tcx.hir.expect_item(item_id.id); + if item.name == *segment { + if path_it.peek().is_none() { + return Some(tcx.hir.local_def_id(item_id.id)) + } + + items = match &item.node { + &hir::ItemMod(ref m) => m.item_ids.clone(), + _ => panic!("Unexpected item {:?} in path {:?} path") + }; + break; + } + } + } +} + +pub fn path_to_def(tcx: &TyCtxt, path: &[&str]) -> Option { + let crates = tcx.crates(); + + let krate = crates + .iter() + .find(|&&krate| tcx.crate_name(krate) == path[0]); + + if let Some(krate) = krate { + let krate = DefId { + krate: *krate, + index: CRATE_DEF_INDEX, + }; + let mut items = tcx.item_children(krate); + let mut path_it = path.iter().skip(1).peekable(); + + loop { + let segment = match path_it.next() { + Some(segment) => segment, + None => return None, + }; + + for item in mem::replace(&mut items, Rc::new(vec![])).iter() { + if item.ident.name == *segment { + if path_it.peek().is_none() { + return match item.def { + def::Def::Trait(did) => Some(did), + _ => None, + } + } + + items = tcx.item_children(item.def.def_id()); + break; + } + } + } + } else { + None + } +} + +fn get_path_for_type(tcx: TyCtxt, def_id: DefId, def_ctor: fn(DefId) -> Def) -> hir::Path { + struct AbsolutePathBuffer { + names: Vec, + } + + impl ty::item_path::ItemPathBuffer for AbsolutePathBuffer { + fn root_mode(&self) -> &ty::item_path::RootMode { + const ABSOLUTE: &'static ty::item_path::RootMode = &ty::item_path::RootMode::Absolute; + ABSOLUTE + } + + fn push(&mut self, text: &str) { + self.names.push(text.to_owned()); + } + } + + let mut apb = AbsolutePathBuffer { names: vec![] }; + + tcx.push_item_path(&mut apb, def_id); + + hir::Path { + span: DUMMY_SP, + def: def_ctor(def_id), + segments: hir::HirVec::from_vec(apb.names.iter().map(|s| hir::PathSegment { + name: ast::Name::intern(&s), + parameters: None, + infer_types: false, + }).collect()) + } +} + +// End of code copied from rust-clippy + + +#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)] +enum RegionTarget<'tcx> { + Region(Region<'tcx>), + RegionVid(RegionVid) +} + +#[derive(Default, Debug, Clone)] +struct RegionDeps<'tcx> { + larger: FxHashSet>, + smaller: FxHashSet> +} + +#[derive(Eq, PartialEq, Hash, Debug)] +enum SimpleBound { + RegionBound(Lifetime), + TraitBound(Vec, Vec, Vec, hir::TraitBoundModifier) +} + +enum AutoTraitResult { + ExplicitImpl, + PositiveImpl(Generics), + NegativeImpl, +} + +impl AutoTraitResult { + fn is_auto(&self) -> bool { + match *self { + AutoTraitResult::PositiveImpl(_) | AutoTraitResult::NegativeImpl => true, + _ => false, + } + } +} + +impl From for SimpleBound { + fn from(bound: TyParamBound) -> Self { + match bound.clone() { + TyParamBound::RegionBound(l) => SimpleBound::RegionBound(l), + TyParamBound::TraitBound(t, mod_) => match t.trait_ { + Type::ResolvedPath { path, typarams, .. } => { + SimpleBound::TraitBound(path.segments, + typarams + .map_or_else(|| Vec::new(), |v| v.iter() + .map(|p| SimpleBound::from(p.clone())) + .collect()), + t.generic_params, + mod_) + } + _ => panic!("Unexpected bound {:?}", bound), + } + } + } +} diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 81babd803a5e9..df7371cdf817b 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -11,13 +11,14 @@ use rustc_lint; use rustc_driver::{self, driver, target_features, abort_on_err}; use rustc::session::{self, config}; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, CrateNum}; use rustc::hir::def::Def; +use rustc::middle::cstore::CrateStore; use rustc::middle::privacy::AccessLevels; use rustc::ty::{self, TyCtxt, AllArenas}; use rustc::hir::map as hir_map; use rustc::lint; -use rustc::util::nodemap::FxHashMap; +use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_resolve as resolve; use rustc_metadata::creader::CrateLoader; use rustc_metadata::cstore::CStore; @@ -48,6 +49,8 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> { pub resolver: &'a RefCell>, /// The stack of module NodeIds up till this point pub mod_ids: RefCell>, + pub crate_name: Option, + pub cstore: Rc, pub populated_all_crate_impls: Cell, // Note that external items for which `doc(hidden)` applies to are shown as // non-reachable while local items aren't. This is because we're reusing @@ -65,6 +68,11 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> { pub ty_substs: RefCell>, /// Table node id of lifetime parameter definition -> substituted lifetime pub lt_substs: RefCell>, + pub send_trait: Option, + pub fake_def_ids: RefCell>, + pub all_fake_def_ids: RefCell>, + /// Maps (type_id, trait_id) -> auto trait impl + pub generated_synthetics: RefCell> } impl<'a, 'tcx, 'rcx> DocContext<'a, 'tcx, 'rcx> { @@ -107,6 +115,7 @@ pub fn run_core(search_paths: SearchPaths, triple: Option, maybe_sysroot: Option, allow_warnings: bool, + crate_name: Option, force_unstable_if_unmarked: bool) -> (clean::Crate, RenderInfo) { // Parse, resolve, and typecheck the given crate. @@ -230,9 +239,17 @@ pub fn run_core(search_paths: SearchPaths, .collect() }; + let send_trait = if crate_name == Some("core".to_string()) { + clean::get_trait_def_id(&tcx, &["marker", "Send"], true) + } else { + clean::get_trait_def_id(&tcx, &["core", "marker", "Send"], false) + }; + let ctxt = DocContext { tcx, resolver: &resolver, + crate_name, + cstore: cstore.clone(), populated_all_crate_impls: Cell::new(false), access_levels: RefCell::new(access_levels), external_traits: Default::default(), @@ -240,6 +257,10 @@ pub fn run_core(search_paths: SearchPaths, ty_substs: Default::default(), lt_substs: Default::default(), mod_ids: Default::default(), + send_trait: send_trait, + fake_def_ids: RefCell::new(FxHashMap()), + all_fake_def_ids: RefCell::new(FxHashSet()), + generated_synthetics: RefCell::new(FxHashSet()), }; debug!("crate: {:?}", tcx.hir.krate()); diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 430236f30c4ef..413e5623118ac 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -210,6 +210,7 @@ pub struct Trait { pub depr: Option, } +#[derive(Debug)] pub struct Impl { pub unsafety: hir::Unsafety, pub polarity: hir::ImplPolarity, diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index fedd802ce557f..ae28e5a0923ea 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -471,18 +471,21 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector, position: Sp break 'main; } } - let offset = offset.unwrap_or(0); - let lines = test_s.lines().map(|l| map_line(l).for_code()); - let text = lines.collect::>().join("\n"); - nb_lines += doc[prev_offset..offset].lines().count(); - let line = tests.get_line() + (nb_lines - 1); - let filename = tests.get_filename(); - tests.add_test(text.to_owned(), - block_info.should_panic, block_info.no_run, - block_info.ignore, block_info.test_harness, - block_info.compile_fail, block_info.error_codes, - line, filename, block_info.allow_fail); - prev_offset = offset; + if let Some(offset) = offset { + let lines = test_s.lines().map(|l| map_line(l).for_code()); + let text = lines.collect::>().join("\n"); + nb_lines += doc[prev_offset..offset].lines().count(); + let line = tests.get_line() + (nb_lines - 1); + let filename = tests.get_filename(); + tests.add_test(text.to_owned(), + block_info.should_panic, block_info.no_run, + block_info.ignore, block_info.test_harness, + block_info.compile_fail, block_info.error_codes, + line, filename, block_info.allow_fail); + prev_offset = offset; + } else { + break; + } } Event::Start(Tag::Header(level)) => { register_header = Some(level as u32); @@ -591,7 +594,15 @@ impl<'a> fmt::Display for Markdown<'a> { opts.insert(OPTION_ENABLE_TABLES); opts.insert(OPTION_ENABLE_FOOTNOTES); - let p = Parser::new_ext(md, opts); + let replacer = |_: &str, s: &str| { + if let Some(&(_, ref replace)) = links.into_iter().find(|link| &*link.0 == s) { + Some((replace.clone(), s.to_owned())) + } else { + None + } + }; + + let p = Parser::new_with_broken_link_callback(md, opts, Some(&replacer)); let mut s = String::with_capacity(md.len() * 3 / 2); @@ -662,7 +673,16 @@ impl<'a> fmt::Display for MarkdownSummaryLine<'a> { // This is actually common enough to special-case if md.is_empty() { return Ok(()) } - let p = Parser::new(md); + let replacer = |_: &str, s: &str| { + if let Some(&(_, ref replace)) = links.into_iter().find(|link| &*link.0 == s) { + Some((replace.clone(), s.to_owned())) + } else { + None + } + }; + + let p = Parser::new_with_broken_link_callback(md, Options::empty(), + Some(&replacer)); let mut s = String::new(); @@ -731,18 +751,30 @@ pub fn markdown_links(md: &str) -> Vec { opts.insert(OPTION_ENABLE_TABLES); opts.insert(OPTION_ENABLE_FOOTNOTES); - let p = Parser::new_ext(md, opts); - - let iter = Footnotes::new(HeadingLinks::new(p, None)); let mut links = vec![]; + let shortcut_links = RefCell::new(vec![]); - for ev in iter { - if let Event::Start(Tag::Link(dest, _)) = ev { - debug!("found link: {}", dest); - links.push(dest.into_owned()); + { + let push = |_: &str, s: &str| { + shortcut_links.borrow_mut().push(s.to_owned()); + None + }; + let p = Parser::new_with_broken_link_callback(md, opts, + Some(&push)); + + let iter = Footnotes::new(HeadingLinks::new(p, None)); + + for ev in iter { + if let Event::Start(Tag::Link(dest, _)) = ev { + debug!("found link: {}", dest); + links.push(dest.into_owned()); + } } } + let mut shortcut_links = shortcut_links.into_inner(); + links.extend(shortcut_links.drain(..)); + links } diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index d6025920e78b0..e7bb0b03ce248 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -37,7 +37,7 @@ pub use self::ExternalLocation::*; use std::borrow::Cow; use std::cell::RefCell; use std::cmp::Ordering; -use std::collections::{BTreeMap, HashSet}; +use std::collections::{BTreeMap, HashSet, VecDeque}; use std::default::Default; use std::error; use std::fmt::{self, Display, Formatter, Write as FmtWrite}; @@ -270,6 +270,18 @@ pub struct Cache { /// generating explicit hyperlinks to other crates. pub external_paths: FxHashMap, ItemType)>, + /// Maps local def ids of exported types to fully qualified paths. + /// Unlike 'paths', this mapping ignores any renames that occur + /// due to 'use' statements. + /// + /// This map is used when writing out the special 'implementors' + /// javascript file. By using the exact path that the type + /// is declared with, we ensure that each path will be identical + /// to the path used if the corresponding type is inlined. By + /// doing this, we can detect duplicate impls on a trait page, and only display + /// the impl for the inlined type. + pub exact_paths: FxHashMap>, + /// This map contains information about all known traits of this crate. /// Implementations of a crate should inherit the documentation of the /// parent trait if no extra documentation is specified, and default methods @@ -322,6 +334,7 @@ pub struct RenderInfo { pub inlined: FxHashSet, pub external_paths: ::core::ExternalPaths, pub external_typarams: FxHashMap, + pub exact_paths: FxHashMap>, pub deref_trait_did: Option, pub deref_mut_trait_did: Option, pub owned_box_did: Option, @@ -436,7 +449,9 @@ fn init_ids() -> FxHashMap { "required-methods", "provided-methods", "implementors", + "synthetic-implementors", "implementors-list", + "synthetic-implementors-list", "methods", "deref-methods", "implementations", @@ -556,6 +571,7 @@ pub fn run(mut krate: clean::Crate, inlined: _, external_paths, external_typarams, + exact_paths, deref_trait_did, deref_mut_trait_did, owned_box_did, @@ -568,6 +584,7 @@ pub fn run(mut krate: clean::Crate, let mut cache = Cache { impls: FxHashMap(), external_paths, + exact_paths, paths: FxHashMap(), implementors: FxHashMap(), stack: Vec::new(), @@ -873,7 +890,10 @@ themePicker.onclick = function() {{ // should add it. if !imp.impl_item.def_id.is_local() { continue } have_impls = true; - write!(implementors, "{},", as_json(&imp.inner_impl().to_string())).unwrap(); + write!(implementors, "{{text:{},synthetic:{},types:{}}},", + as_json(&imp.inner_impl().to_string()), + imp.inner_impl().synthetic, + as_json(&collect_paths_for_type(imp.inner_impl().for_.clone()))).unwrap(); } implementors.push_str("];"); @@ -1856,8 +1876,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, items: &[clean::Item]) -> fmt::Result { document(w, cx, item)?; - let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()) - .collect::>(); + let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::>(); // the order of item types in the listing fn reorder(ty: ItemType) -> u8 { @@ -2201,6 +2220,50 @@ fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, document(w, cx, it) } +fn render_implementor(cx: &Context, implementor: &Impl, w: &mut fmt::Formatter, + implementor_dups: &FxHashMap<&str, (DefId, bool)>) -> Result<(), fmt::Error> { + write!(w, "
  • ")?; + if let Some(l) = (Item { cx, item: &implementor.impl_item }).src_href() { + write!(w, "
    ")?; + write!(w, "[src]", + l, "goto source code")?; + write!(w, "
    ")?; + } + write!(w, "")?; + // If there's already another implementor that has the same abbridged name, use the + // full path, for example in `std::iter::ExactSizeIterator` + let use_absolute = match implementor.inner_impl().for_ { + clean::ResolvedPath { ref path, is_generic: false, .. } | + clean::BorrowedRef { + type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, + .. + } => implementor_dups[path.last_name()].1, + _ => false, + }; + fmt_impl_for_trait_page(&implementor.inner_impl(), w, use_absolute)?; + for it in &implementor.inner_impl().items { + if let clean::TypedefItem(ref tydef, _) = it.inner { + write!(w, " ")?; + assoc_type(w, it, &vec![], Some(&tydef.type_), AssocItemLink::Anchor(None))?; + write!(w, ";")?; + } + } + writeln!(w, "
  • ")?; + Ok(()) +} + +fn render_impls(cx: &Context, w: &mut fmt::Formatter, + traits: Vec<&&Impl>, + containing_item: &clean::Item) -> Result<(), fmt::Error> { + for i in &traits { + let did = i.trait_did().unwrap(); + let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); + render_impl(w, cx, i, assoc_link, + RenderMode::Normal, containing_item.stable_since(), true)?; + } + Ok(()) +} + fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, t: &clean::Trait) -> fmt::Result { let mut bounds = String::new(); @@ -2380,6 +2443,16 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
      "; + + let synthetic_impl_header = " +

      + Auto implementors +

      +
        + "; + + let mut synthetic_types = Vec::new(); + if let Some(implementors) = cache.implementors.get(&it.def_id) { // The DefId is for the first Type found with that name. The bool is // if any Types with the same name but different DefId have been found. @@ -2405,6 +2478,11 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, .partition::, _>(|i| i.inner_impl().for_.def_id() .map_or(true, |d| cache.paths.contains_key(&d))); + + let (synthetic, concrete) = local.iter() + .partition::, _>(|i| i.inner_impl().synthetic); + + if !foreign.is_empty() { write!(w, "

        @@ -2422,42 +2500,35 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, } write!(w, "{}", impl_header)?; + for implementor in concrete { + render_implementor(cx, implementor, w, &implementor_dups)?; + } + write!(w, "

      ")?; - for implementor in local { - write!(w, "
    • ")?; - if let Some(l) = (Item { cx, item: &implementor.impl_item }).src_href() { - write!(w, "
      ")?; - write!(w, "[src]", - l, "goto source code")?; - write!(w, "
      ")?; - } - write!(w, "")?; - // If there's already another implementor that has the same abbridged name, use the - // full path, for example in `std::iter::ExactSizeIterator` - let use_absolute = match implementor.inner_impl().for_ { - clean::ResolvedPath { ref path, is_generic: false, .. } | - clean::BorrowedRef { - type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, - .. - } => implementor_dups[path.last_name()].1, - _ => false, - }; - fmt_impl_for_trait_page(&implementor.inner_impl(), w, use_absolute)?; - for it in &implementor.inner_impl().items { - if let clean::TypedefItem(ref tydef, _) = it.inner { - write!(w, " ")?; - assoc_type(w, it, &vec![], Some(&tydef.type_), AssocItemLink::Anchor(None))?; - write!(w, ";")?; - } + if t.auto { + write!(w, "{}", synthetic_impl_header)?; + for implementor in synthetic { + synthetic_types.extend( + collect_paths_for_type(implementor.inner_impl().for_.clone()) + ); + render_implementor(cx, implementor, w, &implementor_dups)?; } - writeln!(w, "
    • ")?; + write!(w, "
    ")?; } } else { // even without any implementations to write in, we still want the heading and list, so the // implementors javascript file pulled in below has somewhere to write the impls into write!(w, "{}", impl_header)?; + write!(w, "")?; + + if t.auto { + write!(w, "{}", synthetic_impl_header)?; + write!(w, "")?; + } } - write!(w, "")?; + write!(w, r#""#, + as_json(&synthetic_types))?; + write!(w, r#""#, @@ -3075,17 +3146,28 @@ fn render_assoc_items(w: &mut fmt::Formatter, }).is_some(); render_deref_methods(w, cx, impl_, containing_item, has_deref_mut)?; } + + let (synthetic, concrete) = traits + .iter() + .partition::, _>(|t| t.inner_impl().synthetic); + write!(w, "

    Trait Implementations

    +
    ")?; - for i in &traits { - let did = i.trait_did().unwrap(); - let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); - render_impl(w, cx, i, assoc_link, - RenderMode::Normal, containing_item.stable_since(), true)?; - } + render_impls(cx, w, concrete, containing_item)?; + write!(w, "
    ")?; + + write!(w, " +

    + Auto Trait Implementations +

    +
    + ")?; + render_impls(cx, w, synthetic, containing_item)?; + write!(w, "
    ")?; } Ok(()) } @@ -3586,32 +3668,48 @@ fn sidebar_assoc_items(it: &clean::Item) -> String { } } } - let mut links = HashSet::new(); - let ret = v.iter() - .filter_map(|i| { - let is_negative_impl = is_negative_impl(i.inner_impl()); - if let Some(ref i) = i.inner_impl().trait_ { - let i_display = format!("{:#}", i); - let out = Escape(&i_display); - let encoded = small_url_encode(&format!("{:#}", i)); - let generated = format!("{}{}", - encoded, - if is_negative_impl { "!" } else { "" }, - out); - if !links.contains(&generated) && links.insert(generated.clone()) { - Some(generated) + let format_impls = |impls: Vec<&Impl>| { + let mut links = HashSet::new(); + impls.iter() + .filter_map(|i| { + let is_negative_impl = is_negative_impl(i.inner_impl()); + if let Some(ref i) = i.inner_impl().trait_ { + let i_display = format!("{:#}", i); + let out = Escape(&i_display); + let encoded = small_url_encode(&format!("{:#}", i)); + let generated = format!("{}{}", + encoded, + if is_negative_impl { "!" } else { "" }, + out); + if links.insert(generated.clone()) { + Some(generated) + } else { + None + } } else { None } - } else { - None - } - }) - .collect::(); - if !ret.is_empty() { + }) + .collect::() + }; + + let (synthetic, concrete) = v + .iter() + .partition::, _>(|i| i.inner_impl().synthetic); + + let concrete_format = format_impls(concrete); + let synthetic_format = format_impls(synthetic); + + if !concrete_format.is_empty() { out.push_str("\ Trait Implementations"); - out.push_str(&format!("
    {}
    ", ret)); + out.push_str(&format!("
    {}
    ", concrete_format)); + } + + if !synthetic_format.is_empty() { + out.push_str("\ + Auto Trait Implementations"); + out.push_str(&format!("
    {}
    ", synthetic_format)); } } } @@ -3734,7 +3832,7 @@ fn sidebar_trait(fmt: &mut fmt::Formatter, it: &clean::Item, if let Some(implementors) = c.implementors.get(&it.def_id) { let res = implementors.iter() .filter(|i| i.inner_impl().for_.def_id() - .map_or(false, |d| !c.paths.contains_key(&d))) + .map_or(false, |d| !c.paths.contains_key(&d))) .filter_map(|i| { match extract_for_impl_name(&i.impl_item) { Some((ref name, ref url)) => { @@ -3755,6 +3853,10 @@ fn sidebar_trait(fmt: &mut fmt::Formatter, it: &clean::Item, } sidebar.push_str("Implementors"); + if t.auto { + sidebar.push_str("Auto Implementors"); + } sidebar.push_str(&sidebar_assoc_items(it)); @@ -3969,6 +4071,66 @@ fn get_index_type(clean_type: &clean::Type) -> Type { t } +/// Returns a list of all paths used in the type. +/// This is used to help deduplicate imported impls +/// for reexported types. If any of the contained +/// types are re-exported, we don't use the corresponding +/// entry from the js file, as inlining will have already +/// picked up the impl +fn collect_paths_for_type(first_ty: clean::Type) -> Vec { + let mut out = Vec::new(); + let mut visited = FxHashSet(); + let mut work = VecDeque::new(); + let cache = cache(); + + work.push_back(first_ty); + + while let Some(ty) = work.pop_front() { + if !visited.insert(ty.clone()) { + continue; + } + + match ty { + clean::Type::ResolvedPath { did, .. } => { + let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone()); + let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern); + + match fqp { + Some(path) => { + out.push(path.join("::")); + }, + _ => {} + }; + + }, + clean::Type::Tuple(tys) => { + work.extend(tys.into_iter()); + }, + clean::Type::Slice(ty) => { + work.push_back(*ty); + } + clean::Type::Array(ty, _) => { + work.push_back(*ty); + }, + clean::Type::Unique(ty) => { + work.push_back(*ty); + }, + clean::Type::RawPointer(_, ty) => { + work.push_back(*ty); + }, + clean::Type::BorrowedRef { type_, .. } => { + work.push_back(*type_); + }, + clean::Type::QPath { self_type, trait_, .. } => { + work.push_back(*self_type); + work.push_back(*trait_); + }, + _ => {} + } + }; + out +} + fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option { match *clean_type { clean::ResolvedPath { ref path, .. } => { diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index f688be89beebc..5c674cabde5d8 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -1563,14 +1563,31 @@ window.initSidebarItems = initSidebarItems; window.register_implementors = function(imp) { - var list = document.getElementById('implementors-list'); + var implementors = document.getElementById('implementors-list'); + var synthetic_implementors = document.getElementById('synthetic-implementors-list'); + var libs = Object.getOwnPropertyNames(imp); for (var i = 0; i < libs.length; ++i) { if (libs[i] === currentCrate) { continue; } var structs = imp[libs[i]]; + + struct_loop: for (var j = 0; j < structs.length; ++j) { + var struct = structs[j]; + + var list = struct.synthetic ? synthetic_implementors : implementors; + + if (struct.synthetic) { + for (var k = 0; k < struct.types.length; k++) { + if (window.inlined_types.has(struct.types[k])) { + continue struct_loop; + } + window.inlined_types.add(struct.types[k]); + } + } + var code = document.createElement('code'); - code.innerHTML = structs[j]; + code.innerHTML = struct.text; var x = code.getElementsByTagName('a'); for (var k = 0; k < x.length; k++) { diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 825558648e1f8..033988fa9d9d8 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -24,6 +24,7 @@ #![feature(test)] #![feature(unicode)] #![feature(vec_remove_item)] +#![feature(entry_and_modify)] extern crate arena; extern crate getopts; @@ -549,7 +550,8 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R { let (mut krate, renderinfo) = core::run_core(paths, cfgs, externs, Input::File(cratefile), triple, maybe_sysroot, - display_warnings, force_unstable_if_unmarked); + display_warnings, crate_name.clone(), + force_unstable_if_unmarked); info!("finished with rustc"); diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 90b6746d91d86..12c4076c9a5cc 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -414,7 +414,8 @@ fn partition_source(s: &str) -> (String, String) { for line in s.lines() { let trimline = line.trim(); let header = trimline.is_whitespace() || - trimline.starts_with("#!["); + trimline.starts_with("#![") || + trimline.starts_with("extern crate"); if !header || after_header { after_header = true; after.push_str(line); @@ -814,8 +815,8 @@ use asdf::qwop; assert_eq!(2+2, 4);"; let expected = "#![allow(unused)] -fn main() { extern crate asdf; +fn main() { use asdf::qwop; assert_eq!(2+2, 4); }".to_string(); diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 3b882827c6147..f692e05d6a259 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -24,12 +24,12 @@ use rustc::hir::def_id::{DefId, LOCAL_CRATE}; use rustc::middle::cstore::{LoadedMacro, CrateStore}; use rustc::middle::privacy::AccessLevel; use rustc::ty::Visibility; -use rustc::util::nodemap::FxHashSet; +use rustc::util::nodemap::{FxHashSet, FxHashMap}; use rustc::hir; use core; -use clean::{self, AttributesExt, NestedAttributesExt}; +use clean::{self, AttributesExt, NestedAttributesExt, def_id_to_path}; use doctree::*; // looks to me like the first two of these are actually @@ -41,7 +41,7 @@ use doctree::*; // framework from syntax? pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> { - cstore: &'a CrateStore, + pub cstore: &'a CrateStore, pub module: Module, pub attrs: hir::HirVec, pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>, @@ -50,6 +50,7 @@ pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> { /// Is the current module and all of its parents public? inside_public_path: bool, reexported_macros: FxHashSet, + exact_paths: Option>>, } impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { @@ -66,10 +67,21 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { inlining: false, inside_public_path: true, reexported_macros: FxHashSet(), + exact_paths: Some(FxHashMap()), cstore, } } + fn store_path(&mut self, did: DefId) { + // We can't use the entry api, as that keeps the mutable borrow of self active + // when we try to use cx + let exact_paths = self.exact_paths.as_mut().unwrap(); + if exact_paths.get(&did).is_none() { + let path = def_id_to_path(self.cx, did, self.cx.crate_name.clone()); + exact_paths.insert(did, path); + } + } + fn stability(&self, id: ast::NodeId) -> Option { self.cx.tcx.hir.opt_local_def_id(id) .and_then(|def_id| self.cx.tcx.lookup_stability(def_id)).cloned() @@ -94,6 +106,8 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { krate.exported_macros.iter().map(|def| self.visit_local_macro(def)).collect(); self.module.macros.extend(macro_exports); self.module.is_crate = true; + + self.cx.renderinfo.borrow_mut().exact_paths = self.exact_paths.take().unwrap(); } pub fn visit_variant_data(&mut self, item: &hir::Item, @@ -371,6 +385,12 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { renamed: Option, om: &mut Module) { debug!("Visiting item {:?}", item); let name = renamed.unwrap_or(item.name); + + if item.vis == hir::Public { + let def_id = self.cx.tcx.hir.local_def_id(item.id); + self.store_path(def_id); + } + match item.node { hir::ItemForeignMod(ref fm) => { // If inlining we only want to include public functions. diff --git a/src/libstd/ffi/c_str.rs b/src/libstd/ffi/c_str.rs index e91d3a32a50cd..2519d83043553 100644 --- a/src/libstd/ffi/c_str.rs +++ b/src/libstd/ffi/c_str.rs @@ -91,7 +91,7 @@ use sys; /// /// # Examples /// -/// ```no_run +/// ```ignore (extern-declaration) /// # fn main() { /// use std::ffi::CString; /// use std::os::raw::c_char; @@ -150,7 +150,7 @@ pub struct CString { /// /// Inspecting a foreign C string: /// -/// ```no_run +/// ```ignore (extern-declaration) /// use std::ffi::CStr; /// use std::os::raw::c_char; /// @@ -164,7 +164,7 @@ pub struct CString { /// /// Passing a Rust-originating C string: /// -/// ```no_run +/// ```ignore (extern-declaration) /// use std::ffi::{CString, CStr}; /// use std::os::raw::c_char; /// @@ -180,7 +180,7 @@ pub struct CString { /// /// Converting a foreign C string into a Rust [`String`]: /// -/// ```no_run +/// ```ignore (extern-declaration) /// use std::ffi::CStr; /// use std::os::raw::c_char; /// @@ -307,7 +307,7 @@ impl CString { /// /// # Examples /// - /// ```no_run + /// ```ignore (extern-declaration) /// use std::ffi::CString; /// use std::os::raw::c_char; /// @@ -389,7 +389,7 @@ impl CString { /// Create a `CString`, pass ownership to an `extern` function (via raw pointer), then retake /// ownership with `from_raw`: /// - /// ```no_run + /// ```ignore (extern-declaration) /// use std::ffi::CString; /// use std::os::raw::c_char; /// @@ -882,7 +882,7 @@ impl CStr { /// /// # Examples /// - /// ```no_run + /// ```ignore (extern-declaration) /// # fn main() { /// use std::ffi::CStr; /// use std::os::raw::c_char; diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index aa07f64b67859..d403bf6bfe53c 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -1506,6 +1506,8 @@ pub trait BufRead: Read { /// error is encountered then `buf` may contain some bytes already read in /// the event that all data read so far was valid UTF-8. /// + /// [`read_until`]: #method.read_until + /// /// # Examples /// /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In diff --git a/src/libstd/sys/unix/ext/net.rs b/src/libstd/sys/unix/ext/net.rs index 86b0f35be924d..31bdc5ea1f565 100644 --- a/src/libstd/sys/unix/ext/net.rs +++ b/src/libstd/sys/unix/ext/net.rs @@ -415,7 +415,7 @@ impl UnixStream { /// method. /// /// [`None`]: ../../../../std/option/enum.Option.html#variant.None - /// [`read`]: ../../../../std/io/trait.Write.html#tymethod.write + /// [`write`]: ../../../../std/io/trait.Write.html#tymethod.write /// [`Duration`]: ../../../../std/time/struct.Duration.html /// /// # Examples diff --git a/src/test/compile-fail/epoch-raw-pointer-method-2015.rs b/src/test/compile-fail/epoch-raw-pointer-method-2015.rs index a71db040b50e7..6aa83a38b7ee9 100644 --- a/src/test/compile-fail/epoch-raw-pointer-method-2015.rs +++ b/src/test/compile-fail/epoch-raw-pointer-method-2015.rs @@ -18,6 +18,6 @@ fn main() { let x = 0; let y = &x as *const _; let _ = y.is_null(); - //~^ error: the type of this value must be known in this context [tyvar_behind_raw_pointer] + //~^ error: type annotations needed [tyvar_behind_raw_pointer] //~^^ warning: this was previously accepted } diff --git a/src/test/compile-fail/issue-15965.rs b/src/test/compile-fail/issue-15965.rs index 08b896f387bbe..76ba5a0f4b371 100644 --- a/src/test/compile-fail/issue-15965.rs +++ b/src/test/compile-fail/issue-15965.rs @@ -11,7 +11,7 @@ fn main() { return { return () } -//~^ ERROR the type of this value must be known in this context +//~^ ERROR type annotations needed [E0282] () ; } diff --git a/src/test/compile-fail/issue-2151.rs b/src/test/compile-fail/issue-2151.rs index fbd8f9163b5df..3cf971f3f8dfb 100644 --- a/src/test/compile-fail/issue-2151.rs +++ b/src/test/compile-fail/issue-2151.rs @@ -10,5 +10,5 @@ fn main() { let x = panic!(); - x.clone(); //~ ERROR the type of this value must be known in this context + x.clone(); //~ ERROR type annotations needed } diff --git a/src/test/compile-fail/match-vec-mismatch.rs b/src/test/compile-fail/match-vec-mismatch.rs index fed68da006889..998c11979953c 100644 --- a/src/test/compile-fail/match-vec-mismatch.rs +++ b/src/test/compile-fail/match-vec-mismatch.rs @@ -43,6 +43,6 @@ fn main() { fn another_fn_to_avoid_suppression() { match Default::default() { - [] => {} //~ ERROR the type of this value + [] => {} //~ ERROR type annotations needed }; } diff --git a/src/test/compile-fail/pat-tuple-bad-type.rs b/src/test/compile-fail/pat-tuple-bad-type.rs index fd4ab5d253158..251e7b47dcc99 100644 --- a/src/test/compile-fail/pat-tuple-bad-type.rs +++ b/src/test/compile-fail/pat-tuple-bad-type.rs @@ -12,7 +12,7 @@ fn main() { let x; match x { - (..) => {} //~ ERROR the type of this value must be known in this context + (..) => {} //~ ERROR type annotations needed _ => {} } diff --git a/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs b/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs index 12b48b2a6c8aa..8f5bf827fcf1a 100644 --- a/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs +++ b/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs @@ -24,7 +24,7 @@ fn a() { match closure0.take() { Some(c) => { return c(); - //~^ ERROR the type of this value must be known in this context + //~^ ERROR type annotations needed } None => { } } diff --git a/src/test/rustdoc/duplicate_impls/issue-33054.rs b/src/test/rustdoc/duplicate_impls/issue-33054.rs index df6ebcae10756..43a425d4c5e4f 100644 --- a/src/test/rustdoc/duplicate_impls/issue-33054.rs +++ b/src/test/rustdoc/duplicate_impls/issue-33054.rs @@ -11,7 +11,8 @@ // @has issue_33054/impls/struct.Foo.html // @has - '//code' 'impl Foo' // @has - '//code' 'impl Bar for Foo' -// @count - '//*[@class="impl"]' 2 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 +// @count - '//*[@id="main"]/*[@class="impl"]' 1 // @has issue_33054/impls/bar/trait.Bar.html // @has - '//code' 'impl Bar for Foo' // @count - '//*[@class="struct"]' 1 diff --git a/src/test/rustdoc/intra-links.rs b/src/test/rustdoc/intra-links.rs index 4726323e11cef..c822d0f8b21b8 100644 --- a/src/test/rustdoc/intra-links.rs +++ b/src/test/rustdoc/intra-links.rs @@ -77,3 +77,15 @@ pub trait SoAmbiguous {} #[allow(bad_style)] pub fn SoAmbiguous() {} + + +// @has - '//a/@href' '../intra_links/struct.ThisType.html' +// @has - '//a/@href' '../intra_links/struct.ThisType.html#method.this_method' +// @has - '//a/@href' '../intra_links/enum.ThisEnum.html' +// @has - '//a/@href' '../intra_links/enum.ThisEnum.html#ThisVariant.v' +/// Shortcut links for: +/// * [`ThisType`] +/// * [`ThisType::this_method`] +/// * [ThisEnum] +/// * [ThisEnum::ThisVariant] +pub struct SomeOtherType; diff --git a/src/test/rustdoc/issue-21474.rs b/src/test/rustdoc/issue-21474.rs index 36f160acf1cf8..553bbeb0cff39 100644 --- a/src/test/rustdoc/issue-21474.rs +++ b/src/test/rustdoc/issue-21474.rs @@ -17,5 +17,5 @@ mod inner { pub trait Blah { } // @count issue_21474/struct.What.html \ -// '//*[@class="impl"]' 1 +// '//*[@id="implementations-list"]/*[@class="impl"]' 1 pub struct What; diff --git a/src/test/rustdoc/issue-45584.rs b/src/test/rustdoc/issue-45584.rs index 6d6ae3dc94a21..b0e64557be253 100644 --- a/src/test/rustdoc/issue-45584.rs +++ b/src/test/rustdoc/issue-45584.rs @@ -14,12 +14,12 @@ pub trait Bar {} // @has 'foo/struct.Foo1.html' pub struct Foo1; -// @count - '//*[@class="impl"]' 1 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 // @has - '//*[@class="impl"]' "impl Bar for Foo1" impl Bar for Foo1 {} // @has 'foo/struct.Foo2.html' pub struct Foo2; -// @count - '//*[@class="impl"]' 1 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 // @has - '//*[@class="impl"]' "impl Bar<&'static Foo2, Foo2> for u8" impl Bar<&'static Foo2, Foo2> for u8 {} diff --git a/src/test/ui/error-codes/E0619.rs b/src/test/rustdoc/issue-48377.rs similarity index 63% rename from src/test/ui/error-codes/E0619.rs rename to src/test/rustdoc/issue-48377.rs index a5a5ff7218dcf..d098b1a5b02f1 100644 --- a/src/test/ui/error-codes/E0619.rs +++ b/src/test/rustdoc/issue-48377.rs @@ -1,4 +1,4 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,12 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn main() { - let x; +// compile-flags:--test - match x { - (..) => {} //~ ERROR E0619 - _ => {} - } -} +//! This is a doc comment +//! +//! ```rust +//! fn main() {} +//! ``` +//! +//! With a trailing code fence +//! ``` +/// Some foo function +pub fn foo() {} diff --git a/src/test/rustdoc/synthetic_auto/basic.rs b/src/test/rustdoc/synthetic_auto/basic.rs new file mode 100644 index 0000000000000..8ff84d11a5009 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/basic.rs @@ -0,0 +1,18 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// @has basic/struct.Foo.html +// @has - '//code' 'impl Send for Foo where T: Send' +// @has - '//code' 'impl Sync for Foo where T: Sync' +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 0 +// @count - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]' 2 +pub struct Foo { + field: T, +} diff --git a/src/test/rustdoc/synthetic_auto/complex.rs b/src/test/rustdoc/synthetic_auto/complex.rs new file mode 100644 index 0000000000000..531798c30c656 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/complex.rs @@ -0,0 +1,52 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod foo { + pub trait MyTrait<'a> { + type MyItem: ?Sized; + } + + pub struct Inner<'a, Q, R: ?Sized> { + field: Q, + field3: &'a u8, + my_foo: Foo, + field2: R, + } + + pub struct Outer<'a, T, K: ?Sized> { + my_inner: Inner<'a, T, K>, + } + + pub struct Foo { + myfield: T, + } +} + +// @has complex/struct.NotOuter.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'a, T, K: \ +// ?Sized> Send for NotOuter<'a, T, K> where K: for<'b> Fn((&'b bool, &'a u8)) \ +// -> &'b i8, T: MyTrait<'a>, >::MyItem: Copy, 'a: 'static" + +pub use foo::{Foo, Inner as NotInner, MyTrait as NotMyTrait, Outer as NotOuter}; + +unsafe impl Send for Foo +where + T: NotMyTrait<'static>, +{ +} + +unsafe impl<'a, Q, R: ?Sized> Send for NotInner<'a, Q, R> +where + Q: NotMyTrait<'a>, + >::MyItem: Copy, + R: for<'b> Fn((&'b bool, &'a u8)) -> &'b i8, + Foo: Send, +{ +} diff --git a/src/test/rustdoc/synthetic_auto/lifetimes.rs b/src/test/rustdoc/synthetic_auto/lifetimes.rs new file mode 100644 index 0000000000000..272925e5db542 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/lifetimes.rs @@ -0,0 +1,28 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +pub struct Inner<'a, T: 'a> { + field: &'a T, +} + +unsafe impl<'a, T> Send for Inner<'a, T> +where + 'a: 'static, + T: for<'b> Fn(&'b bool) -> &'a u8, +{} + +// @has lifetimes/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Send \ +// for Foo<'c, K> where K: for<'b> Fn(&'b bool) -> &'c u8, 'c: 'static" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Sync \ +// for Foo<'c, K> where K: Sync" +pub struct Foo<'c, K: 'c> { + inner_field: Inner<'c, K>, +} diff --git a/src/test/rustdoc/synthetic_auto/manual.rs b/src/test/rustdoc/synthetic_auto/manual.rs new file mode 100644 index 0000000000000..d81e6309dff61 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/manual.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// @has manual/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' 'impl Sync for \ +// Foo where T: Sync' +// +// @has - '//*[@id="implementations-list"]/*[@class="impl"]/*/code' \ +// 'impl Send for Foo' +// +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 +// @count - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]' 1 +pub struct Foo { + field: T, +} + +unsafe impl Send for Foo {} diff --git a/src/test/rustdoc/synthetic_auto/negative.rs b/src/test/rustdoc/synthetic_auto/negative.rs new file mode 100644 index 0000000000000..ec9cb710f1f8c --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/negative.rs @@ -0,0 +1,23 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner { + field: *mut T, +} + +// @has negative/struct.Outer.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl !Send for \ +// Outer" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl \ +// !Sync for Outer" +pub struct Outer { + inner_field: Inner, +} diff --git a/src/test/rustdoc/synthetic_auto/nested.rs b/src/test/rustdoc/synthetic_auto/nested.rs new file mode 100644 index 0000000000000..1f33a8b13cbf8 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/nested.rs @@ -0,0 +1,28 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +pub struct Inner { + field: T, +} + +unsafe impl Send for Inner +where + T: Copy, +{ +} + +// @has nested/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' 'impl Send for \ +// Foo where T: Copy' +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' \ +// 'impl Sync for Foo where T: Sync' +pub struct Foo { + inner_field: Inner, +} diff --git a/src/test/rustdoc/synthetic_auto/no-redundancy.rs b/src/test/rustdoc/synthetic_auto/no-redundancy.rs new file mode 100644 index 0000000000000..0b37f2ed31790 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/no-redundancy.rs @@ -0,0 +1,26 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner { + field: T, +} + +unsafe impl Send for Inner +where + T: Copy + Send, +{ +} + +// @has no_redundancy/struct.Outer.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl Send for \ +// Outer where T: Copy + Send" +pub struct Outer { + inner_field: Inner, +} diff --git a/src/test/rustdoc/synthetic_auto/project.rs b/src/test/rustdoc/synthetic_auto/project.rs new file mode 100644 index 0000000000000..977607fb14826 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/project.rs @@ -0,0 +1,43 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner<'a, T: 'a> { + field: &'a T, +} + +trait MyTrait { + type MyItem; +} + +trait OtherTrait {} + +unsafe impl<'a, T> Send for Inner<'a, T> +where + 'a: 'static, + T: MyTrait, +{ +} +unsafe impl<'a, T> Sync for Inner<'a, T> +where + 'a: 'static, + T: MyTrait, + ::MyItem: OtherTrait, +{ +} + +// @has project/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Send \ +// for Foo<'c, K> where K: MyTrait, 'c: 'static" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Sync \ +// for Foo<'c, K> where K: MyTrait, ::MyItem: OtherTrait, 'c: 'static," +pub struct Foo<'c, K: 'c> { + inner_field: Inner<'c, K>, +} diff --git a/src/test/ui/error-codes/E0619.stderr b/src/test/ui/error-codes/E0619.stderr deleted file mode 100644 index cec336cfcec66..0000000000000 --- a/src/test/ui/error-codes/E0619.stderr +++ /dev/null @@ -1,8 +0,0 @@ -error[E0619]: the type of this value must be known in this context - --> $DIR/E0619.rs:15:9 - | -15 | (..) => {} //~ ERROR E0619 - | ^^^^ - -error: aborting due to previous error - diff --git a/src/test/ui/inference-variable-behind-raw-pointer.stderr b/src/test/ui/inference-variable-behind-raw-pointer.stderr index d0ee55c092b28..bb1d921f1c61c 100644 --- a/src/test/ui/inference-variable-behind-raw-pointer.stderr +++ b/src/test/ui/inference-variable-behind-raw-pointer.stderr @@ -1,4 +1,4 @@ -warning: the type of this value must be known in this context +warning: type annotations needed --> $DIR/inference-variable-behind-raw-pointer.rs:18:13 | 18 | if data.is_null() {} diff --git a/src/test/ui/span/issue-42234-unknown-receiver-type.rs b/src/test/ui/span/issue-42234-unknown-receiver-type.rs index d9cdd99c245e6..975c81955e0b3 100644 --- a/src/test/ui/span/issue-42234-unknown-receiver-type.rs +++ b/src/test/ui/span/issue-42234-unknown-receiver-type.rs @@ -15,11 +15,11 @@ fn shines_a_beacon_through_the_darkness() { let x: Option<_> = None; x.unwrap().method_that_could_exist_on_some_type(); - //~^ ERROR 17:5: 17:15: the type of this value must be known in this context + //~^ ERROR 17:5: 17:15: type annotations needed } fn courier_to_des_moines_and_points_west(data: &[u32]) -> String { - data.iter() //~ ERROR 22:5: 23:20: the type of this value must be known in this context + data.iter() //~ ERROR 22:5: 23:20: type annotations needed .sum::<_>() .to_string() } diff --git a/src/test/ui/span/issue-42234-unknown-receiver-type.stderr b/src/test/ui/span/issue-42234-unknown-receiver-type.stderr index ed756cdc553ce..d87cec642f18e 100644 --- a/src/test/ui/span/issue-42234-unknown-receiver-type.stderr +++ b/src/test/ui/span/issue-42234-unknown-receiver-type.stderr @@ -1,15 +1,17 @@ -error[E0619]: the type of this value must be known in this context +error[E0282]: type annotations needed --> $DIR/issue-42234-unknown-receiver-type.rs:17:5 | +16 | let x: Option<_> = None; + | - consider giving `x` a type 17 | x.unwrap().method_that_could_exist_on_some_type(); - | ^^^^^^^^^^ + | ^^^^^^^^^^ cannot infer type for `T` -error[E0619]: the type of this value must be known in this context +error[E0282]: type annotations needed --> $DIR/issue-42234-unknown-receiver-type.rs:22:5 | -22 | / data.iter() //~ ERROR 22:5: 23:20: the type of this value must be known in this context +22 | / data.iter() //~ ERROR 22:5: 23:20: type annotations needed 23 | | .sum::<_>() - | |___________________^ + | |___________________^ cannot infer type for `_` error: aborting due to 2 previous errors