Skip to content

Commit

Permalink
Fix fallout in rustdoc and tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
jseyfried committed Nov 21, 2016
1 parent c9935e4 commit a8e86f0
Show file tree
Hide file tree
Showing 30 changed files with 120 additions and 130 deletions.
10 changes: 3 additions & 7 deletions src/librustc/session/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1764,9 +1764,7 @@ mod tests {
use std::rc::Rc;
use super::{OutputType, OutputTypes, Externs};
use rustc_back::PanicStrategy;
use syntax::{ast, attr};
use syntax::parse::token::InternedString;
use syntax::codemap::dummy_spanned;
use syntax::symbol::Symbol;

fn optgroups() -> Vec<OptGroup> {
super::rustc_optgroups().into_iter()
Expand Down Expand Up @@ -1795,9 +1793,7 @@ mod tests {
let (sessopts, cfg) = build_session_options_and_crate_config(matches);
let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore));
let cfg = build_configuration(&sess, cfg);
assert!(attr::contains(&cfg, &dummy_spanned(ast::MetaItemKind::Word({
InternedString::new("test")
}))));
assert!(cfg.contains(&(Symbol::intern("test"), None)));
}

// When the user supplies --test and --cfg test, don't implicitly add
Expand All @@ -1818,7 +1814,7 @@ mod tests {
let sess = build_session(sessopts, &dep_graph, None, registry,
Rc::new(DummyCrateStore));
let cfg = build_configuration(&sess, cfg);
let mut test_items = cfg.iter().filter(|m| m.name() == "test");
let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test");
assert!(test_items.next().is_some());
assert!(test_items.next().is_none());
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_driver/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ use syntax::{ast, diagnostics, visit};
use syntax::attr;
use syntax::ext::base::ExtCtxt;
use syntax::parse::{self, PResult};
use syntax::symbol::{self, Symbol};
use syntax::symbol::Symbol;
use syntax::util::node_count::NodeCounter;
use syntax;
use syntax_ext;
Expand Down
6 changes: 3 additions & 3 deletions src/librustc_driver/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ use syntax::codemap::CodeMap;
use errors;
use errors::emitter::Emitter;
use errors::{Level, DiagnosticBuilder};
use syntax::parse::token;
use syntax::feature_gate::UnstableFeatures;
use syntax::symbol::Symbol;

use rustc::hir;

Expand Down Expand Up @@ -288,11 +288,11 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> {

pub fn t_param(&self, index: u32) -> Ty<'tcx> {
let name = format!("T{}", index);
self.infcx.tcx.mk_param(index, token::intern(&name[..]))
self.infcx.tcx.mk_param(index, Symbol::intern(&name[..]))
}

pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region {
let name = token::intern(name);
let name = Symbol::intern(name);
self.infcx.tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
index: index,
name: name,
Expand Down
6 changes: 3 additions & 3 deletions src/librustdoc/clean/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ use syntax::abi::Abi;
use syntax::ast;
use syntax::attr;
use syntax::codemap::Spanned;
use syntax::parse::token::keywords;
use syntax::ptr::P;
use syntax::print::pprust as syntax_pprust;
use syntax::symbol::keywords;
use syntax_pos::{self, DUMMY_SP, Pos};

use rustc_trans::back::link;
Expand Down Expand Up @@ -242,7 +242,7 @@ impl Clean<ExternalCrate> for CrateNum {
}
});
ExternalCrate {
name: (&cx.sess().cstore.crate_name(self.0)[..]).to_owned(),
name: cx.sess().cstore.crate_name(self.0).to_string(),
attrs: cx.sess().cstore.item_attrs(root).clean(cx),
primitives: primitives,
}
Expand Down Expand Up @@ -2577,7 +2577,7 @@ impl Clean<Vec<Item>> for doctree::Import {
// #[doc(no_inline)] attribute is present.
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
let denied = self.vis != hir::Public || self.attrs.iter().any(|a| {
&a.name()[..] == "doc" && match a.meta_item_list() {
a.name() == "doc" && match a.meta_item_list() {
Some(l) => attr::list_contains_name(l, "no_inline") ||
attr::list_contains_name(l, "hidden"),
None => false,
Expand Down
12 changes: 5 additions & 7 deletions src/libsyntax/codemap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -871,6 +871,7 @@ impl CodeMapper for CodeMap {
#[cfg(test)]
mod tests {
use super::*;
use symbol::keywords;
use std::rc::Rc;

#[test]
Expand Down Expand Up @@ -1097,10 +1098,9 @@ mod tests {
#[test]
fn t11() {
// Test span_to_expanded_string works with expansion
use ast::Name;
let cm = init_code_map();
let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
let format = ExpnFormat::MacroBang(Name(0u32));
let format = ExpnFormat::MacroBang(keywords::Invalid.name());
let callee = NameAndSpan { format: format,
allow_internal_unstable: false,
span: None };
Expand Down Expand Up @@ -1197,11 +1197,9 @@ mod tests {
fn init_expansion_chain(cm: &CodeMap) -> Span {
// Creates an expansion chain containing two recursive calls
// root -> expA -> expA -> expB -> expB -> end
use ast::Name;

let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };

let format_root = ExpnFormat::MacroBang(Name(0u32));
let format_root = ExpnFormat::MacroBang(keywords::Invalid.name());
let callee_root = NameAndSpan { format: format_root,
allow_internal_unstable: false,
span: Some(root) };
Expand All @@ -1210,7 +1208,7 @@ mod tests {
let id_a1 = cm.record_expansion(info_a1);
let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 };

let format_a = ExpnFormat::MacroBang(Name(1u32));
let format_a = ExpnFormat::MacroBang(keywords::As.name());
let callee_a = NameAndSpan { format: format_a,
allow_internal_unstable: false,
span: Some(span_a1) };
Expand All @@ -1223,7 +1221,7 @@ mod tests {
let id_b1 = cm.record_expansion(info_b1);
let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 };

let format_b = ExpnFormat::MacroBang(Name(2u32));
let format_b = ExpnFormat::MacroBang(keywords::Box.name());
let callee_b = NameAndSpan { format: format_b,
allow_internal_unstable: false,
span: None };
Expand Down
5 changes: 2 additions & 3 deletions src/libsyntax/fold.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1332,9 +1332,8 @@ pub fn noop_fold_vis<T: Folder>(vis: Visibility, folder: &mut T) -> Visibility {
#[cfg(test)]
mod tests {
use std::io;
use ast;
use ast::{self, Ident};
use util::parser_testing::{string_to_crate, matches_codepattern};
use parse::token;
use print::pprust;
use fold;
use super::*;
Expand All @@ -1350,7 +1349,7 @@ mod tests {

impl Folder for ToZzIdentFolder {
fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident {
token::str_to_ident("zz")
Ident::from_str("zz")
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
Expand Down
9 changes: 5 additions & 4 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1702,6 +1702,7 @@ mod tests {
use super::*;

use ast::Ident;
use symbol::Symbol;
use syntax_pos::{BytePos, Span, NO_EXPANSION};
use codemap::CodeMap;
use errors;
Expand Down Expand Up @@ -1752,7 +1753,7 @@ mod tests {
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan {
tok: token::Ident(str_to_ident("main")),
tok: token::Ident(Ident::from_str("main")),
sp: Span {
lo: BytePos(24),
hi: BytePos(28),
Expand All @@ -1774,7 +1775,7 @@ mod tests {

// make the identifier by looking up the string in the interner
fn mk_ident(id: &str) -> token::Token {
token::Ident(str_to_ident(id))
token::Ident(Ident::from_str(id))
}

#[test]
Expand Down Expand Up @@ -1838,7 +1839,7 @@ mod tests {
let cm = Rc::new(CodeMap::new());
let sh = mk_sh(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(token::str_to_ident("'abc")));
token::Lifetime(Ident::from_str("'abc")));
}

#[test]
Expand All @@ -1848,7 +1849,7 @@ mod tests {
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token()
.tok,
token::Literal(token::StrRaw(Symol::intern("\"#a\\b\x00c\""), 3), None));
token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
}

#[test]
Expand Down
51 changes: 23 additions & 28 deletions src/libsyntax/parse/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -597,12 +597,11 @@ mod tests {
use std::rc::Rc;
use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION};
use codemap::Spanned;
use ast::{self, PatKind};
use ast::{self, Ident, PatKind};
use abi::Abi;
use attr::first_attr_value_str_by_name;
use parse;
use parse::parser::Parser;
use parse::token::{str_to_ident};
use print::pprust::item_to_string;
use ptr::P;
use tokenstream::{self, TokenTree};
Expand All @@ -624,7 +623,7 @@ mod tests {
global: false,
segments: vec![
ast::PathSegment {
identifier: str_to_ident("a"),
identifier: Ident::from_str("a"),
parameters: ast::PathParameters::none(),
}
],
Expand All @@ -643,11 +642,11 @@ mod tests {
global: true,
segments: vec![
ast::PathSegment {
identifier: str_to_ident("a"),
identifier: Ident::from_str("a"),
parameters: ast::PathParameters::none(),
},
ast::PathSegment {
identifier: str_to_ident("b"),
identifier: Ident::from_str("b"),
parameters: ast::PathParameters::none(),
}
]
Expand Down Expand Up @@ -676,8 +675,8 @@ mod tests {
Some(&TokenTree::Token(_, token::Ident(name_zip))),
Some(&TokenTree::Delimited(_, ref macro_delimed)),
)
if name_macro_rules.name.as_str() == "macro_rules"
&& name_zip.name.as_str() == "zip" => {
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
let tts = &macro_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
Expand All @@ -694,8 +693,7 @@ mod tests {
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
)
if first_delimed.delim == token::Paren
&& ident.name.as_str() == "a" => {},
if first_delimed.delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?}", **first_delimed),
}
let tts = &second_delimed.tts[..];
Expand All @@ -706,7 +704,7 @@ mod tests {
Some(&TokenTree::Token(_, token::Ident(ident))),
)
if second_delimed.delim == token::Paren
&& ident.name.as_str() == "a" => {},
&& ident.name == "a" => {},
_ => panic!("value 4: {:?}", **second_delimed),
}
},
Expand All @@ -722,17 +720,17 @@ mod tests {
let tts = string_to_tts("fn a (b : i32) { b; }".to_string());

let expected = vec![
TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))),
TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))),
TokenTree::Delimited(
sp(5, 14),
Rc::new(tokenstream::Delimited {
delim: token::DelimToken::Paren,
open_span: sp(5, 6),
tts: vec![
TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))),
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
TokenTree::Token(sp(8, 9), token::Colon),
TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
],
close_span: sp(13, 14),
})),
Expand All @@ -742,7 +740,7 @@ mod tests {
delim: token::DelimToken::Brace,
open_span: sp(15, 16),
tts: vec![
TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))),
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
TokenTree::Token(sp(18, 19), token::Semi),
],
close_span: sp(20, 21),
Expand All @@ -763,7 +761,7 @@ mod tests {
global: false,
segments: vec![
ast::PathSegment {
identifier: str_to_ident("d"),
identifier: Ident::from_str("d"),
parameters: ast::PathParameters::none(),
}
],
Expand All @@ -786,7 +784,7 @@ mod tests {
global:false,
segments: vec![
ast::PathSegment {
identifier: str_to_ident("b"),
identifier: Ident::from_str("b"),
parameters: ast::PathParameters::none(),
}
],
Expand All @@ -810,7 +808,7 @@ mod tests {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
Spanned{ span:sp(0, 1),
node: str_to_ident("b")
node: Ident::from_str("b")
},
None),
span: sp(0,1)}));
Expand All @@ -822,7 +820,7 @@ mod tests {
// this test depends on the intern order of "fn" and "i32"
assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
Some(
P(ast::Item{ident:str_to_ident("a"),
P(ast::Item{ident:Ident::from_str("a"),
attrs:Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Fn(P(ast::FnDecl {
Expand All @@ -833,8 +831,7 @@ mod tests {
global:false,
segments: vec![
ast::PathSegment {
identifier:
str_to_ident("i32"),
identifier: Ident::from_str("i32"),
parameters: ast::PathParameters::none(),
}
],
Expand All @@ -847,7 +844,7 @@ mod tests {
ast::BindingMode::ByValue(ast::Mutability::Immutable),
Spanned{
span: sp(6,7),
node: str_to_ident("b")},
node: Ident::from_str("b")},
None
),
span: sp(6,7)
Expand Down Expand Up @@ -882,9 +879,7 @@ mod tests {
global:false,
segments: vec![
ast::PathSegment {
identifier:
str_to_ident(
"b"),
identifier: Ident::from_str("b"),
parameters:
ast::PathParameters::none(),
}
Expand Down Expand Up @@ -996,20 +991,20 @@ mod tests {
let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(&doc[..], "/// doc comment");
assert_eq!(doc, "/// doc comment");

let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap();
let docs = item.attrs.iter().filter(|a| &*a.name() == "doc")
let docs = item.attrs.iter().filter(|a| a.name() == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[..], b);

let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(&doc[..], "/** doc comment\n * with CRLF */");
assert_eq!(doc, "/** doc comment\n * with CRLF */");
}

#[test]
Expand Down
Loading

0 comments on commit a8e86f0

Please sign in to comment.