diff --git a/src/libcore/str.rs b/src/libcore/str.rs
index 94df7a5a6c2d9..f94d5a5e4b5e3 100644
--- a/src/libcore/str.rs
+++ b/src/libcore/str.rs
@@ -1764,7 +1764,9 @@ impl<'a> StrSlice<'a> for &'a str {
#[inline]
fn slice(&self, begin: uint, end: uint) -> &'a str {
- assert!(self.is_char_boundary(begin) && self.is_char_boundary(end));
+ assert!(self.is_char_boundary(begin) && self.is_char_boundary(end),
+ "index {} and/or {} in `{}` do not lie on character boundary", begin,
+ end, *self);
unsafe { raw::slice_bytes(*self, begin, end) }
}
@@ -1775,7 +1777,8 @@ impl<'a> StrSlice<'a> for &'a str {
#[inline]
fn slice_to(&self, end: uint) -> &'a str {
- assert!(self.is_char_boundary(end));
+ assert!(self.is_char_boundary(end), "index {} in `{}` does not lie on \
+ a character boundary", end, *self);
unsafe { raw::slice_bytes(*self, 0, end) }
}
diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs
index 98a6f7d5ed38d..ae401b9d6f15c 100644
--- a/src/librustc/lint/builtin.rs
+++ b/src/librustc/lint/builtin.rs
@@ -1114,7 +1114,7 @@ impl UnusedMut {
match mode {
ast::BindByValue(ast::MutMutable) => {
if !token::get_ident(ident).get().starts_with("_") {
- mutables.insert_or_update_with(ident.name as uint,
+ mutables.insert_or_update_with(ident.name.uint(),
vec!(id), |_, old| { old.push(id); });
}
}
diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs
index 8a2b95ae463b4..cc41223688ee0 100644
--- a/src/librustc/metadata/decoder.rs
+++ b/src/librustc/metadata/decoder.rs
@@ -323,7 +323,7 @@ fn item_name(intr: &IdentInterner, item: ebml::Doc) -> ast::Ident {
let string = name.as_str_slice();
match intr.find_equiv(&string) {
None => token::str_to_ident(string),
- Some(val) => ast::Ident::new(val as ast::Name),
+ Some(val) => ast::Ident::new(val),
}
}
diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs
index 11b1687dc5599..fb2b4951ea3d6 100644
--- a/src/librustc/middle/astencode.rs
+++ b/src/librustc/middle/astencode.rs
@@ -1523,7 +1523,7 @@ fn test_basic() {
fn foo() {}
));
}
-
+/* NOTE: When there's a snapshot, update this (yay quasiquoter!)
#[test]
fn test_smalltalk() {
let cx = mk_ctxt();
@@ -1531,6 +1531,7 @@ fn test_smalltalk() {
fn foo() -> int { 3 + 4 } // first smalltalk program ever executed.
));
}
+*/
#[test]
fn test_more() {
diff --git a/src/librustc/middle/trans/consts.rs b/src/librustc/middle/trans/consts.rs
index c35767f99a835..11a8207f8c43e 100644
--- a/src/librustc/middle/trans/consts.rs
+++ b/src/librustc/middle/trans/consts.rs
@@ -42,6 +42,7 @@ use syntax::{ast, ast_util};
pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
-> ValueRef {
let _icx = push_ctxt("trans_lit");
+ debug!("const_lit: {}", lit);
match lit.node {
ast::LitByte(b) => C_integral(Type::uint_from_ty(cx, ast::TyU8), b as u64, false),
ast::LitChar(i) => C_integral(Type::char(cx), i as u64, false),
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 82bb1bd58a6d3..3cb5cdc043962 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -18,7 +18,6 @@ use std::io;
use syntax::parse;
use syntax::parse::lexer;
-use syntax::codemap::{BytePos, Span};
use html::escape::Escape;
@@ -59,38 +58,30 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
None => {}
}
try!(write!(out, "class='rust {}'>\n", class.unwrap_or("")));
- let mut last = BytePos(0);
let mut is_attribute = false;
let mut is_macro = false;
let mut is_macro_nonterminal = false;
loop {
let next = lexer.next_token();
- let test = if next.tok == t::EOF {lexer.pos} else {next.sp.lo};
-
- // The lexer consumes all whitespace and non-doc-comments when iterating
- // between tokens. If this token isn't directly adjacent to our last
- // token, then we need to emit the whitespace/comment.
- //
- // If the gap has any '/' characters then we consider the whole thing a
- // comment. This will classify some whitespace as a comment, but that
- // doesn't matter too much for syntax highlighting purposes.
- if test > last {
- let snip = sess.span_diagnostic.cm.span_to_snippet(Span {
- lo: last,
- hi: test,
- expn_info: None,
- }).unwrap();
- if snip.as_slice().contains("/") {
- try!(write!(out, "{}",
- Escape(snip.as_slice())));
- } else {
- try!(write!(out, "{}", Escape(snip.as_slice())));
- }
- }
- last = next.sp.hi;
+
+ let snip = |sp| sess.span_diagnostic.cm.span_to_snippet(sp).unwrap();
+
if next.tok == t::EOF { break }
let klass = match next.tok {
+ t::WS => {
+ try!(write!(out, "{}", Escape(snip(next.sp).as_slice())));
+ continue
+ },
+ t::COMMENT => {
+ try!(write!(out, "{}",
+ Escape(snip(next.sp).as_slice())));
+ continue
+ },
+ t::SHEBANG(s) => {
+ try!(write!(out, "{}", Escape(s.as_str())));
+ continue
+ },
// If this '&' token is directly adjacent to another token, assume
// that it's the address-of operator instead of the and-operator.
// This allows us to give all pointers their own class (`Box` and
@@ -144,8 +135,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
t::LIT_CHAR(..) | t::LIT_STR(..) | t::LIT_STR_RAW(..) => "string",
// number literals
- t::LIT_INT(..) | t::LIT_UINT(..) | t::LIT_INT_UNSUFFIXED(..) |
- t::LIT_FLOAT(..) | t::LIT_FLOAT_UNSUFFIXED(..) => "number",
+ t::LIT_INTEGER(..) | t::LIT_FLOAT(..) => "number",
// keywords are also included in the identifier set
t::IDENT(ident, _is_mod_sep) => {
diff --git a/src/libsyntax/abi.rs b/src/libsyntax/abi.rs
index 9771bc9386b16..5aaf7ed3dba5d 100644
--- a/src/libsyntax/abi.rs
+++ b/src/libsyntax/abi.rs
@@ -60,9 +60,12 @@ pub struct AbiData {
}
pub enum AbiArchitecture {
- RustArch, // Not a real ABI (e.g., intrinsic)
- AllArch, // An ABI that specifies cross-platform defaults (e.g., "C")
- Archs(u32) // Multiple architectures (bitset)
+ /// Not a real ABI (e.g., intrinsic)
+ RustArch,
+ /// An ABI that specifies cross-platform defaults (e.g., "C")
+ AllArch,
+ /// Multiple architectures (bitset)
+ Archs(u32)
}
static AbiDatas: &'static [AbiData] = &[
@@ -84,21 +87,13 @@ static AbiDatas: &'static [AbiData] = &[
AbiData {abi: RustIntrinsic, name: "rust-intrinsic", abi_arch: RustArch},
];
+/// Iterates through each of the defined ABIs.
fn each_abi(op: |abi: Abi| -> bool) -> bool {
- /*!
- *
- * Iterates through each of the defined ABIs.
- */
-
AbiDatas.iter().advance(|abi_data| op(abi_data.abi))
}
+/// Returns the ABI with the given name (if any).
pub fn lookup(name: &str) -> Option {
- /*!
- *
- * Returns the ABI with the given name (if any).
- */
-
let mut res = None;
each_abi(|abi| {
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 5f3adbdb54df4..ebfc45d22cee9 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -24,7 +24,8 @@ use std::rc::Rc;
use std::gc::{Gc, GC};
use serialize::{Encodable, Decodable, Encoder, Decoder};
-/// A pointer abstraction. FIXME(eddyb) #10676 use Rc in the future.
+/// A pointer abstraction.
+// FIXME(eddyb) #10676 use Rc in the future.
pub type P = Gc;
#[allow(non_snake_case_functions)]
@@ -36,11 +37,11 @@ pub fn P(value: T) -> P {
// FIXME #6993: in librustc, uses of "ident" should be replaced
// by just "Name".
-// an identifier contains a Name (index into the interner
-// table) and a SyntaxContext to track renaming and
-// macro expansion per Flatt et al., "Macros
-// That Work Together"
-#[deriving(Clone, Hash, PartialOrd, Eq, Ord, Show)]
+/// An identifier contains a Name (index into the interner
+/// table) and a SyntaxContext to track renaming and
+/// macro expansion per Flatt et al., "Macros
+/// That Work Together"
+#[deriving(Clone, Hash, PartialOrd, Eq, Ord)]
pub struct Ident {
pub name: Name,
pub ctxt: SyntaxContext
@@ -49,6 +50,16 @@ pub struct Ident {
impl Ident {
/// Construct an identifier with the given name and an empty context:
pub fn new(name: Name) -> Ident { Ident {name: name, ctxt: EMPTY_CTXT}}
+
+ pub fn as_str<'a>(&'a self) -> &'a str {
+ self.name.as_str()
+ }
+}
+
+impl Show for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "\"{}\"#{}", token::get_ident(*self).get(), self.ctxt)
+ }
}
impl PartialEq for Ident {
@@ -95,7 +106,26 @@ pub static ILLEGAL_CTXT : SyntaxContext = 1;
/// A name is a part of an identifier, representing a string or gensym. It's
/// the result of interning.
-pub type Name = u32;
+#[deriving(Eq, Ord, PartialEq, PartialOrd, Hash, Encodable, Decodable, Clone, Show)]
+pub struct Name(pub u32);
+
+impl Name {
+ pub fn as_str<'a>(&'a self) -> &'a str {
+ unsafe {
+ // FIXME #12938: can't use copy_lifetime since &str isn't a &T
+ ::std::mem::transmute(token::get_name(*self).get())
+ }
+ }
+
+ pub fn uint(&self) -> uint {
+ let Name(nm) = *self;
+ nm as uint
+ }
+
+ pub fn ident(&self) -> Ident {
+ Ident { name: *self, ctxt: 0 }
+ }
+}
/// A mark represents a unique id associated with a macro expansion
pub type Mrk = u32;
@@ -122,10 +152,9 @@ pub struct Lifetime {
pub name: Name
}
-// a "Path" is essentially Rust's notion of a name;
-// for instance: std::cmp::PartialEq . It's represented
-// as a sequence of identifiers, along with a bunch
-// of supporting information.
+/// A "Path" is essentially Rust's notion of a name; for instance:
+/// std::cmp::PartialEq . It's represented as a sequence of identifiers,
+/// along with a bunch of supporting information.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub struct Path {
pub span: Span,
@@ -163,15 +192,15 @@ pub struct DefId {
pub static LOCAL_CRATE: CrateNum = 0;
pub static CRATE_NODE_ID: NodeId = 0;
-// When parsing and doing expansions, we initially give all AST nodes this AST
-// node value. Then later, in the renumber pass, we renumber them to have
-// small, positive ids.
+/// When parsing and doing expansions, we initially give all AST nodes this AST
+/// node value. Then later, in the renumber pass, we renumber them to have
+/// small, positive ids.
pub static DUMMY_NODE_ID: NodeId = -1;
-// The AST represents all type param bounds as types.
-// typeck::collect::compute_bounds matches these against
-// the "special" built-in traits (see middle::lang_items) and
-// detects Copy, Send and Share.
+/// The AST represents all type param bounds as types.
+/// typeck::collect::compute_bounds matches these against
+/// the "special" built-in traits (see middle::lang_items) and
+/// detects Copy, Send and Share.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum TyParamBound {
TraitTyParamBound(TraitRef),
@@ -210,9 +239,9 @@ impl Generics {
}
}
-// The set of MetaItems that define the compilation environment of the crate,
-// used to drive conditional compilation
-pub type CrateConfig = Vec>;
+/// The set of MetaItems that define the compilation environment of the crate,
+/// used to drive conditional compilation
+pub type CrateConfig = Vec> ;
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub struct Crate {
@@ -289,13 +318,13 @@ pub enum BindingMode {
pub enum Pat_ {
PatWild,
PatWildMulti,
- // A PatIdent may either be a new bound variable,
- // or a nullary enum (in which case the third field
- // is None).
- // In the nullary enum case, the parser can't determine
- // which it is. The resolver determines this, and
- // records this pattern's NodeId in an auxiliary
- // set (of "PatIdents that refer to nullary enums")
+ /// A PatIdent may either be a new bound variable,
+ /// or a nullary enum (in which case the third field
+ /// is None).
+ /// In the nullary enum case, the parser can't determine
+ /// which it is. The resolver determines this, and
+ /// records this pattern's NodeId in an auxiliary
+ /// set (of "PatIdents that refer to nullary enums")
PatIdent(BindingMode, SpannedIdent, Option>),
PatEnum(Path, Option>>), /* "none" means a * pattern where
* we don't bind the fields to names */
@@ -305,8 +334,8 @@ pub enum Pat_ {
PatRegion(Gc), // reference pattern
PatLit(Gc),
PatRange(Gc, Gc),
- // [a, b, ..i, y, z] is represented as
- // PatVec(~[a, b], Some(i), ~[y, z])
+ /// [a, b, ..i, y, z] is represented as:
+ /// PatVec(~[a, b], Some(i), ~[y, z])
PatVec(Vec>, Option>, Vec>),
PatMac(Mac),
}
@@ -319,9 +348,12 @@ pub enum Mutability {
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum ExprVstore {
- ExprVstoreUniq, // ~[1,2,3,4]
- ExprVstoreSlice, // &[1,2,3,4]
- ExprVstoreMutSlice, // &mut [1,2,3,4]
+ /// ~[1, 2, 3, 4]
+ ExprVstoreUniq,
+ /// &[1, 2, 3, 4]
+ ExprVstoreSlice,
+ /// &mut [1, 2, 3, 4]
+ ExprVstoreMutSlice,
}
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
@@ -359,16 +391,16 @@ pub type Stmt = Spanned;
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum Stmt_ {
- // could be an item or a local (let) binding:
+ /// Could be an item or a local (let) binding:
StmtDecl(Gc, NodeId),
- // expr without trailing semi-colon (must have unit type):
+ /// Expr without trailing semi-colon (must have unit type):
StmtExpr(Gc, NodeId),
- // expr with trailing semi-colon (may have any type):
+ /// Expr with trailing semi-colon (may have any type):
StmtSemi(Gc, NodeId),
- // bool: is there a trailing sem-colon?
+ /// bool: is there a trailing sem-colon?
StmtMac(Mac, bool),
}
@@ -397,9 +429,9 @@ pub type Decl = Spanned;
#[deriving(PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum Decl_ {
- // a local (let) binding:
+ /// A local (let) binding:
DeclLocal(Gc),
- // an item binding:
+ /// An item binding:
DeclItem(Gc),
}
@@ -443,7 +475,7 @@ pub struct Expr {
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum Expr_ {
ExprVstore(Gc, ExprVstore),
- // First expr is the place; second expr is the value.
+ /// First expr is the place; second expr is the value.
ExprBox(Gc, Gc),
ExprVec(Vec>),
ExprCall(Gc, Vec>),
@@ -483,130 +515,127 @@ pub enum Expr_ {
ExprMac(Mac),
- // A struct literal expression.
+ /// A struct literal expression.
ExprStruct(Path, Vec , Option> /* base */),
- // A vector literal constructed from one repeated element.
+ /// A vector literal constructed from one repeated element.
ExprRepeat(Gc /* element */, Gc /* count */),
- // No-op: used solely so we can pretty-print faithfully
+ /// No-op: used solely so we can pretty-print faithfully
ExprParen(Gc)
}
-// When the main rust parser encounters a syntax-extension invocation, it
-// parses the arguments to the invocation as a token-tree. This is a very
-// loose structure, such that all sorts of different AST-fragments can
-// be passed to syntax extensions using a uniform type.
-//
-// If the syntax extension is an MBE macro, it will attempt to match its
-// LHS "matchers" against the provided token tree, and if it finds a
-// match, will transcribe the RHS token tree, splicing in any captured
-// macro_parser::matched_nonterminals into the TTNonterminals it finds.
-//
-// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq
-// makes any real sense. You could write them elsewhere but nothing
-// else knows what to do with them, so you'll probably get a syntax
-// error.
-//
+/// When the main rust parser encounters a syntax-extension invocation, it
+/// parses the arguments to the invocation as a token-tree. This is a very
+/// loose structure, such that all sorts of different AST-fragments can
+/// be passed to syntax extensions using a uniform type.
+///
+/// If the syntax extension is an MBE macro, it will attempt to match its
+/// LHS "matchers" against the provided token tree, and if it finds a
+/// match, will transcribe the RHS token tree, splicing in any captured
+/// macro_parser::matched_nonterminals into the TTNonterminals it finds.
+///
+/// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq
+/// makes any real sense. You could write them elsewhere but nothing
+/// else knows what to do with them, so you'll probably get a syntax
+/// error.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
#[doc="For macro invocations; parsing is delegated to the macro"]
pub enum TokenTree {
- // a single token
+ /// A single token
TTTok(Span, ::parse::token::Token),
- // a delimited sequence (the delimiters appear as the first
- // and last elements of the vector)
+ /// A delimited sequence (the delimiters appear as the first
+ /// and last elements of the vector)
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTDelim(Rc>),
// These only make sense for right-hand-sides of MBE macros:
- // a kleene-style repetition sequence with a span, a TTForest,
- // an optional separator, and a boolean where true indicates
- // zero or more (..), and false indicates one or more (+).
+ /// A kleene-style repetition sequence with a span, a TTForest,
+ /// an optional separator, and a boolean where true indicates
+ /// zero or more (..), and false indicates one or more (+).
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTSeq(Span, Rc>, Option<::parse::token::Token>, bool),
- // a syntactic variable that will be filled in by macro expansion.
+ /// A syntactic variable that will be filled in by macro expansion.
TTNonterminal(Span, Ident)
}
-//
-// Matchers are nodes defined-by and recognized-by the main rust parser and
-// language, but they're only ever found inside syntax-extension invocations;
-// indeed, the only thing that ever _activates_ the rules in the rust parser
-// for parsing a matcher is a matcher looking for the 'matchers' nonterminal
-// itself. Matchers represent a small sub-language for pattern-matching
-// token-trees, and are thus primarily used by the macro-defining extension
-// itself.
-//
-// MatchTok
-// --------
-//
-// A matcher that matches a single token, denoted by the token itself. So
-// long as there's no $ involved.
-//
-//
-// MatchSeq
-// --------
-//
-// A matcher that matches a sequence of sub-matchers, denoted various
-// possible ways:
-//
-// $(M)* zero or more Ms
-// $(M)+ one or more Ms
-// $(M),+ one or more comma-separated Ms
-// $(A B C);* zero or more semi-separated 'A B C' seqs
-//
-//
-// MatchNonterminal
-// -----------------
-//
-// A matcher that matches one of a few interesting named rust
-// nonterminals, such as types, expressions, items, or raw token-trees. A
-// black-box matcher on expr, for example, binds an expr to a given ident,
-// and that ident can re-occur as an interpolation in the RHS of a
-// macro-by-example rule. For example:
-//
-// $foo:expr => 1 + $foo // interpolate an expr
-// $foo:tt => $foo // interpolate a token-tree
-// $foo:tt => bar! $foo // only other valid interpolation
-// // is in arg position for another
-// // macro
-//
-// As a final, horrifying aside, note that macro-by-example's input is
-// also matched by one of these matchers. Holy self-referential! It is matched
-// by a MatchSeq, specifically this one:
-//
-// $( $lhs:matchers => $rhs:tt );+
-//
-// If you understand that, you have closed to loop and understand the whole
-// macro system. Congratulations.
-//
+/// Matchers are nodes defined-by and recognized-by the main rust parser and
+/// language, but they're only ever found inside syntax-extension invocations;
+/// indeed, the only thing that ever _activates_ the rules in the rust parser
+/// for parsing a matcher is a matcher looking for the 'matchers' nonterminal
+/// itself. Matchers represent a small sub-language for pattern-matching
+/// token-trees, and are thus primarily used by the macro-defining extension
+/// itself.
+///
+/// MatchTok
+/// --------
+///
+/// A matcher that matches a single token, denoted by the token itself. So
+/// long as there's no $ involved.
+///
+///
+/// MatchSeq
+/// --------
+///
+/// A matcher that matches a sequence of sub-matchers, denoted various
+/// possible ways:
+///
+/// $(M)* zero or more Ms
+/// $(M)+ one or more Ms
+/// $(M),+ one or more comma-separated Ms
+/// $(A B C);* zero or more semi-separated 'A B C' seqs
+///
+///
+/// MatchNonterminal
+/// -----------------
+///
+/// A matcher that matches one of a few interesting named rust
+/// nonterminals, such as types, expressions, items, or raw token-trees. A
+/// black-box matcher on expr, for example, binds an expr to a given ident,
+/// and that ident can re-occur as an interpolation in the RHS of a
+/// macro-by-example rule. For example:
+///
+/// $foo:expr => 1 + $foo // interpolate an expr
+/// $foo:tt => $foo // interpolate a token-tree
+/// $foo:tt => bar! $foo // only other valid interpolation
+/// // is in arg position for another
+/// // macro
+///
+/// As a final, horrifying aside, note that macro-by-example's input is
+/// also matched by one of these matchers. Holy self-referential! It is matched
+/// by a MatchSeq, specifically this one:
+///
+/// $( $lhs:matchers => $rhs:tt );+
+///
+/// If you understand that, you have closed the loop and understand the whole
+/// macro system. Congratulations.
pub type Matcher = Spanned;
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum Matcher_ {
- // match one token
+ /// Match one token
MatchTok(::parse::token::Token),
- // match repetitions of a sequence: body, separator, zero ok?,
- // lo, hi position-in-match-array used:
+ /// Match repetitions of a sequence: body, separator, zero ok?,
+ /// lo, hi position-in-match-array used:
MatchSeq(Vec , Option<::parse::token::Token>, bool, uint, uint),
- // parse a Rust NT: name to bind, name of NT, position in match array:
+ /// Parse a Rust NT: name to bind, name of NT, position in match array:
MatchNonterminal(Ident, Ident, uint)
}
pub type Mac = Spanned;
-// represents a macro invocation. The Path indicates which macro
-// is being invoked, and the vector of token-trees contains the source
-// of the macro invocation.
-// There's only one flavor, now, so this could presumably be simplified.
+/// Represents a macro invocation. The Path indicates which macro
+/// is being invoked, and the vector of token-trees contains the source
+/// of the macro invocation.
+/// There's only one flavor, now, so this could presumably be simplified.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum Mac_ {
MacInvocTT(Path, Vec , SyntaxContext), // new macro-invocation
}
-#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
+#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub enum StrStyle {
CookedStr,
RawStr(uint)
@@ -614,7 +643,7 @@ pub enum StrStyle {
pub type Lit = Spanned;
-#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
+#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub enum Lit_ {
LitStr(InternedString, StrStyle),
LitBinary(Rc >),
@@ -659,11 +688,10 @@ pub struct TypeMethod {
pub vis: Visibility,
}
-/// Represents a method declaration in a trait declaration, possibly
-/// including a default implementation
-// A trait method is either required (meaning it doesn't have an
-// implementation, just a signature) or provided (meaning it has a default
-// implementation).
+/// Represents a method declaration in a trait declaration, possibly including
+/// a default implementation A trait method is either required (meaning it
+/// doesn't have an implementation, just a signature) or provided (meaning it
+/// has a default implementation).
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum TraitMethod {
Required(TypeMethod),
@@ -685,6 +713,16 @@ impl fmt::Show for IntTy {
}
}
+impl IntTy {
+ pub fn suffix_len(&self) -> uint {
+ match *self {
+ TyI => 1,
+ TyI8 => 2,
+ TyI16 | TyI32 | TyI64 => 3,
+ }
+ }
+}
+
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum UintTy {
TyU,
@@ -694,6 +732,16 @@ pub enum UintTy {
TyU64,
}
+impl UintTy {
+ pub fn suffix_len(&self) -> uint {
+ match *self {
+ TyU => 1,
+ TyU8 => 2,
+ TyU16 | TyU32 | TyU64 => 3,
+ }
+ }
+}
+
impl fmt::Show for UintTy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", ast_util::uint_ty_to_string(*self, None))
@@ -712,6 +760,14 @@ impl fmt::Show for FloatTy {
}
}
+impl FloatTy {
+ pub fn suffix_len(&self) -> uint {
+ match *self {
+ TyF32 | TyF64 => 3, // add F128 handling here
+ }
+ }
+}
+
// NB PartialEq method appears below.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub struct Ty {
@@ -720,7 +776,7 @@ pub struct Ty {
pub span: Span,
}
-// Not represented directly in the AST, referred to by name through a ty_path.
+/// Not represented directly in the AST, referred to by name through a ty_path.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum PrimTy {
TyInt(IntTy),
@@ -753,10 +809,10 @@ pub struct ClosureTy {
pub fn_style: FnStyle,
pub onceness: Onceness,
pub decl: P,
- // Optional optvec distinguishes between "fn()" and "fn:()" so we can
- // implement issue #7264. None means "fn()", which means infer a default
- // bound based on pointer sigil during typeck. Some(Empty) means "fn:()",
- // which means use no bounds (e.g., not even Owned on a ~fn()).
+ /// Optional optvec distinguishes between "fn()" and "fn:()" so we can
+ /// implement issue #7264. None means "fn()", which means infer a default
+ /// bound based on pointer sigil during typeck. Some(Empty) means "fn:()",
+ /// which means use no bounds (e.g., not even Owned on a ~fn()).
pub bounds: Option>,
}
@@ -789,11 +845,11 @@ pub enum Ty_ {
TyUnboxedFn(Gc),
TyTup(Vec
> ),
TyPath(Path, Option>, NodeId), // for #7264; see above
- // No-op; kept solely so that we can pretty-print faithfully
+ /// No-op; kept solely so that we can pretty-print faithfully
TyParen(P),
TyTypeof(Gc),
- // TyInfer means the type should be inferred instead of it having been
- // specified. This can appear anywhere in a type.
+ /// TyInfer means the type should be inferred instead of it having been
+ /// specified. This can appear anywhere in a type.
TyInfer,
}
@@ -854,8 +910,10 @@ pub struct FnDecl {
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum FnStyle {
- UnsafeFn, // declared with "unsafe fn"
- NormalFn, // declared with "fn"
+ /// Declared with "unsafe fn"
+ UnsafeFn,
+ /// Declared with "fn"
+ NormalFn,
}
impl fmt::Show for FnStyle {
@@ -869,18 +927,24 @@ impl fmt::Show for FnStyle {
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum RetStyle {
- NoReturn, // functions with return type _|_ that always
- // raise an error or exit (i.e. never return to the caller)
- Return, // everything else
+ /// Functions with return type ! that always
+ /// raise an error or exit (i.e. never return to the caller)
+ NoReturn,
+ /// Everything else
+ Return,
}
/// Represents the kind of 'self' associated with a method
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum ExplicitSelf_ {
- SelfStatic, // no self
- SelfValue(Ident), // `self`
- SelfRegion(Option, Mutability, Ident), // `&'lt self`, `&'lt mut self`
- SelfUniq(Ident), // `~self`
+ /// No self
+ SelfStatic,
+ /// `self
+ SelfValue(Ident),
+ /// `&'lt self`, `&'lt mut self`
+ SelfRegion(Option, Mutability, Ident),
+ /// `~self`
+ SelfUniq(Ident)
}
pub type ExplicitSelf = Spanned;
@@ -959,17 +1023,17 @@ pub type ViewPath = Spanned;
#[deriving(PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum ViewPath_ {
- // quux = foo::bar::baz
- //
- // or just
- //
- // foo::bar::baz (with 'baz =' implicitly on the left)
+ /// `quux = foo::bar::baz`
+ ///
+ /// or just
+ ///
+ /// `foo::bar::baz ` (with 'baz =' implicitly on the left)
ViewPathSimple(Ident, Path, NodeId),
- // foo::bar::*
+ /// `foo::bar::*`
ViewPathGlob(Path, NodeId),
- // foo::bar::{a,b,c}
+ /// `foo::bar::{a,b,c}`
ViewPathList(Path, Vec , NodeId)
}
@@ -983,20 +1047,20 @@ pub struct ViewItem {
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum ViewItem_ {
- // ident: name used to refer to this crate in the code
- // optional (InternedString,StrStyle): if present, this is a location
- // (containing arbitrary characters) from which to fetch the crate sources
- // For example, extern crate whatever = "github.com/rust-lang/rust"
+ /// Ident: name used to refer to this crate in the code
+ /// optional (InternedString,StrStyle): if present, this is a location
+ /// (containing arbitrary characters) from which to fetch the crate sources
+ /// For example, extern crate whatever = "github.com/rust-lang/rust"
ViewItemExternCrate(Ident, Option<(InternedString,StrStyle)>, NodeId),
ViewItemUse(Gc),
}
-// Meta-data associated with an item
+/// Meta-data associated with an item
pub type Attribute = Spanned;
-// Distinguishes between Attributes that decorate items and Attributes that
-// are contained as statements within items. These two cases need to be
-// distinguished for pretty-printing.
+/// Distinguishes between Attributes that decorate items and Attributes that
+/// are contained as statements within items. These two cases need to be
+/// distinguished for pretty-printing.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum AttrStyle {
AttrOuter,
@@ -1006,7 +1070,7 @@ pub enum AttrStyle {
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub struct AttrId(pub uint);
-// doc-comments are promoted to attributes that have is_sugared_doc = true
+/// Doc-comments are promoted to attributes that have is_sugared_doc = true
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub struct Attribute_ {
pub id: AttrId,
@@ -1015,13 +1079,12 @@ pub struct Attribute_ {
pub is_sugared_doc: bool,
}
-/*
- TraitRef's appear in impls.
- resolve maps each TraitRef's ref_id to its defining trait; that's all
- that the ref_id is for. The impl_id maps to the "self type" of this impl.
- If this impl is an ItemImpl, the impl_id is redundant (it could be the
- same as the impl's node id).
- */
+
+/// TraitRef's appear in impls.
+/// resolve maps each TraitRef's ref_id to its defining trait; that's all
+/// that the ref_id is for. The impl_id maps to the "self type" of this impl.
+/// If this impl is an ItemImpl, the impl_id is redundant (it could be the
+/// same as the impl's node id).
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub struct TraitRef {
pub path: Path,
@@ -1065,7 +1128,8 @@ pub type StructField = Spanned;
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum StructFieldKind {
NamedField(Ident, Visibility),
- UnnamedField(Visibility), // element of a tuple-like struct
+ /// Element of a tuple-like struct
+ UnnamedField(Visibility),
}
impl StructFieldKind {
@@ -1079,12 +1143,15 @@ impl StructFieldKind {
#[deriving(PartialEq, Eq, Encodable, Decodable, Hash)]
pub struct StructDef {
- pub fields: Vec, /* fields, not including ctor */
- /* ID of the constructor. This is only used for tuple- or enum-like
- * structs. */
+ /// Fields, not including ctor
+ pub fields: Vec,
+ /// ID of the constructor. This is only used for tuple- or enum-like
+ /// structs.
pub ctor_id: Option,
- pub super_struct: Option
>, // Super struct, if specified.
- pub is_virtual: bool, // True iff the struct may be inherited from.
+ /// Super struct, if specified.
+ pub super_struct: Option
>,
+ /// True iff the struct may be inherited from.
+ pub is_virtual: bool,
}
/*
@@ -1120,7 +1187,7 @@ pub enum Item_ {
Option, // (optional) trait this impl implements
P, // self
Vec>),
- // a macro invocation (which includes macro definition)
+ /// A macro invocation (which includes macro definition)
ItemMac(Mac),
}
@@ -1140,9 +1207,9 @@ pub enum ForeignItem_ {
ForeignItemStatic(P, /* is_mutbl */ bool),
}
-// The data we save and restore about an inlined item or method. This is not
-// part of the AST that we parse from a file, but it becomes part of the tree
-// that we trans.
+/// The data we save and restore about an inlined item or method. This is not
+/// part of the AST that we parse from a file, but it becomes part of the tree
+/// that we trans.
#[deriving(PartialEq, Eq, Encodable, Decodable, Hash)]
pub enum InlinedItem {
IIItem(Gc),
diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs
index c95ea4a24aadb..25c8e81bdbc91 100644
--- a/src/libsyntax/ast_map.rs
+++ b/src/libsyntax/ast_map.rs
@@ -112,13 +112,13 @@ pub enum Node {
NodeLifetime(Gc),
}
-// The odd layout is to bring down the total size.
+/// The odd layout is to bring down the total size.
#[deriving(Clone)]
enum MapEntry {
- // Placeholder for holes in the map.
+ /// Placeholder for holes in the map.
NotPresent,
- // All the node types, with a parent ID.
+ /// All the node types, with a parent ID.
EntryItem(NodeId, Gc),
EntryForeignItem(NodeId, Gc),
EntryTraitMethod(NodeId, Gc),
@@ -133,14 +133,14 @@ enum MapEntry {
EntryStructCtor(NodeId, Gc),
EntryLifetime(NodeId, Gc),
- // Roots for node trees.
+ /// Roots for node trees.
RootCrate,
RootInlinedParent(P)
}
struct InlinedParent {
path: Vec ,
- // Required by NodeTraitMethod and NodeMethod.
+ /// Required by NodeTraitMethod and NodeMethod.
def_id: DefId
}
@@ -243,7 +243,7 @@ impl Map {
ItemForeignMod(ref nm) => Some(nm.abi),
_ => None
},
- // Wrong but OK, because the only inlined foreign items are intrinsics.
+ /// Wrong but OK, because the only inlined foreign items are intrinsics.
Some(RootInlinedParent(_)) => Some(abi::RustIntrinsic),
_ => None
};
@@ -432,8 +432,8 @@ pub trait FoldOps {
pub struct Ctx<'a, F> {
map: &'a Map,
- // The node in which we are currently mapping (an item or a method).
- // When equal to DUMMY_NODE_ID, the next mapped node becomes the parent.
+ /// The node in which we are currently mapping (an item or a method).
+ /// When equal to DUMMY_NODE_ID, the next mapped node becomes the parent.
parent: NodeId,
fold_ops: F
}
@@ -618,9 +618,9 @@ pub fn map_crate(krate: Crate, fold_ops: F) -> (Crate, Map) {
(krate, map)
}
-// Used for items loaded from external crate that are being inlined into this
-// crate. The `path` should be the path to the item but should not include
-// the item itself.
+/// Used for items loaded from external crate that are being inlined into this
+/// crate. The `path` should be the path to the item but should not include
+/// the item itself.
pub fn map_decoded_item(map: &Map,
path: Vec ,
fold_ops: F,
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index 57c60b4a94903..13fe8a1506459 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -101,8 +101,8 @@ pub fn is_path(e: Gc) -> bool {
return match e.node { ExprPath(_) => true, _ => false };
}
-// Get a string representation of a signed int type, with its value.
-// We want to avoid "45int" and "-3int" in favor of "45" and "-3"
+/// Get a string representation of a signed int type, with its value.
+/// We want to avoid "45int" and "-3int" in favor of "45" and "-3"
pub fn int_ty_to_string(t: IntTy, val: Option) -> String {
let s = match t {
TyI if val.is_some() => "i",
@@ -131,8 +131,8 @@ pub fn int_ty_max(t: IntTy) -> u64 {
}
}
-// Get a string representation of an unsigned int type, with its value.
-// We want to avoid "42uint" in favor of "42u"
+/// Get a string representation of an unsigned int type, with its value.
+/// We want to avoid "42uint" in favor of "42u"
pub fn uint_ty_to_string(t: UintTy, val: Option) -> String {
let s = match t {
TyU if val.is_some() => "u",
@@ -249,8 +249,8 @@ pub fn public_methods(ms: Vec> ) -> Vec> {
}).collect()
}
-// extract a TypeMethod from a TraitMethod. if the TraitMethod is
-// a default, pull out the useful fields to make a TypeMethod
+/// extract a TypeMethod from a TraitMethod. if the TraitMethod is
+/// a default, pull out the useful fields to make a TypeMethod
pub fn trait_method_to_ty_method(method: &TraitMethod) -> TypeMethod {
match *method {
Required(ref m) => (*m).clone(),
@@ -705,7 +705,7 @@ pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> boo
}
}
-// Returns true if this literal is a string and false otherwise.
+/// Returns true if this literal is a string and false otherwise.
pub fn lit_is_str(lit: Gc) -> bool {
match lit.node {
LitStr(..) => true,
@@ -754,14 +754,14 @@ mod test {
#[test] fn idents_name_eq_test() {
assert!(segments_name_eq(
- [Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
+ [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
.iter().map(ident_to_segment).collect::>().as_slice(),
- [Ident{name:3,ctxt:104}, Ident{name:78,ctxt:182}]
+ [Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}]
.iter().map(ident_to_segment).collect::>().as_slice()));
assert!(!segments_name_eq(
- [Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
+ [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
.iter().map(ident_to_segment).collect::>().as_slice(),
- [Ident{name:3,ctxt:104}, Ident{name:77,ctxt:182}]
+ [Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}]
.iter().map(ident_to_segment).collect::>().as_slice()));
}
}
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index 3b2ee4e2a6134..e8b9ec9628f7d 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -46,10 +46,8 @@ pub trait AttrMetaMethods {
/// #[foo="bar"] and #[foo(bar)]
fn name(&self) -> InternedString;
- /**
- * Gets the string value if self is a MetaNameValue variant
- * containing a string, otherwise None.
- */
+ /// Gets the string value if self is a MetaNameValue variant
+ /// containing a string, otherwise None.
fn value_str(&self) -> Option;
/// Gets a list of inner meta items from a list MetaItem type.
fn meta_item_list<'a>(&'a self) -> Option<&'a [Gc]>;
@@ -420,18 +418,16 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[Gc]) {
}
-/**
- * Fold this over attributes to parse #[repr(...)] forms.
- *
- * Valid repr contents: any of the primitive integral type names (see
- * `int_type_of_word`, below) to specify the discriminant type; and `C`, to use
- * the same discriminant size that the corresponding C enum would. These are
- * not allowed on univariant or zero-variant enums, which have no discriminant.
- *
- * If a discriminant type is so specified, then the discriminant will be
- * present (before fields, if any) with that type; reprensentation
- * optimizations which would remove it will not be done.
- */
+/// Fold this over attributes to parse #[repr(...)] forms.
+///
+/// Valid repr contents: any of the primitive integral type names (see
+/// `int_type_of_word`, below) to specify the discriminant type; and `C`, to use
+/// the same discriminant size that the corresponding C enum would. These are
+/// not allowed on univariant or zero-variant enums, which have no discriminant.
+///
+/// If a discriminant type is so specified, then the discriminant will be
+/// present (before fields, if any) with that type; reprensentation
+/// optimizations which would remove it will not be done.
pub fn find_repr_attr(diagnostic: &SpanHandler, attr: &Attribute, acc: ReprAttr)
-> ReprAttr {
let mut acc = acc;
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index b3adf1daf418c..ef4024a8f83fe 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -96,7 +96,7 @@ pub struct Span {
pub static DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_info: None };
-#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)]
+#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
pub struct Spanned {
pub node: T,
pub span: Span,
@@ -252,15 +252,15 @@ pub struct FileMap {
}
impl FileMap {
- // EFFECT: register a start-of-line offset in the
- // table of line-beginnings.
- // UNCHECKED INVARIANT: these offsets must be added in the right
- // order and must be in the right places; there is shared knowledge
- // about what ends a line between this file and parse.rs
- // WARNING: pos param here is the offset relative to start of CodeMap,
- // and CodeMap will append a newline when adding a filemap without a newline at the end,
- // so the safe way to call this is with value calculated as
- // filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
+ /// EFFECT: register a start-of-line offset in the
+ /// table of line-beginnings.
+ /// UNCHECKED INVARIANT: these offsets must be added in the right
+ /// order and must be in the right places; there is shared knowledge
+ /// about what ends a line between this file and parse.rs
+ /// WARNING: pos param here is the offset relative to start of CodeMap,
+ /// and CodeMap will append a newline when adding a filemap without a newline at the end,
+ /// so the safe way to call this is with value calculated as
+ /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
pub fn next_line(&self, pos: BytePos) {
// the new charpos must be > the last one (or it's the first one).
let mut lines = self.lines.borrow_mut();;
@@ -269,7 +269,7 @@ impl FileMap {
lines.push(pos);
}
- // get a line from the list of pre-computed line-beginnings
+ /// get a line from the list of pre-computed line-beginnings
pub fn get_line(&self, line: int) -> String {
let mut lines = self.lines.borrow_mut();
let begin: BytePos = *lines.get(line as uint) - self.start_pos;
@@ -428,9 +428,8 @@ impl CodeMap {
FileMapAndBytePos {fm: fm, pos: offset}
}
- // Converts an absolute BytePos to a CharPos relative to the filemap and above.
+ /// Converts an absolute BytePos to a CharPos relative to the filemap and above.
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
- debug!("codemap: converting {:?} to char pos", bpos);
let idx = self.lookup_filemap_idx(bpos);
let files = self.files.borrow();
let map = files.get(idx);
@@ -439,7 +438,7 @@ impl CodeMap {
let mut total_extra_bytes = 0;
for mbc in map.multibyte_chars.borrow().iter() {
- debug!("codemap: {:?}-byte char at {:?}", mbc.bytes, mbc.pos);
+ debug!("{}-byte char at {}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
@@ -514,11 +513,11 @@ impl CodeMap {
let chpos = self.bytepos_to_file_charpos(pos);
let linebpos = *f.lines.borrow().get(a);
let linechpos = self.bytepos_to_file_charpos(linebpos);
- debug!("codemap: byte pos {:?} is on the line at byte pos {:?}",
+ debug!("byte pos {} is on the line at byte pos {}",
pos, linebpos);
- debug!("codemap: char pos {:?} is on the line at char pos {:?}",
+ debug!("char pos {} is on the line at char pos {}",
chpos, linechpos);
- debug!("codemap: byte is on line: {:?}", line);
+ debug!("byte is on line: {}", line);
assert!(chpos >= linechpos);
Loc {
file: f,
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index 3805390776e8d..e469f327ae8ba 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -21,7 +21,7 @@ use std::string::String;
use term::WriterWrapper;
use term;
-// maximum number of lines we will print for each error; arbitrary.
+/// maximum number of lines we will print for each error; arbitrary.
static MAX_LINES: uint = 6u;
#[deriving(Clone)]
@@ -73,9 +73,9 @@ pub struct FatalError;
/// or `.span_bug` rather than a failed assertion, etc.
pub struct ExplicitBug;
-// a span-handler is like a handler but also
-// accepts span information for source-location
-// reporting.
+/// A span-handler is like a handler but also
+/// accepts span information for source-location
+/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
@@ -114,9 +114,9 @@ impl SpanHandler {
}
}
-// a handler deals with errors; certain errors
-// (fatal, bug, unimpl) may cause immediate exit,
-// others log errors for later reporting.
+/// A handler deals with errors; certain errors
+/// (fatal, bug, unimpl) may cause immediate exit,
+/// others log errors for later reporting.
pub struct Handler {
err_count: Cell,
emit: RefCell>,
@@ -442,12 +442,12 @@ fn highlight_lines(err: &mut EmitterWriter,
Ok(())
}
-// Here are the differences between this and the normal `highlight_lines`:
-// `custom_highlight_lines` will always put arrow on the last byte of the
-// span (instead of the first byte). Also, when the span is too long (more
-// than 6 lines), `custom_highlight_lines` will print the first line, then
-// dot dot dot, then last line, whereas `highlight_lines` prints the first
-// six lines.
+/// Here are the differences between this and the normal `highlight_lines`:
+/// `custom_highlight_lines` will always put arrow on the last byte of the
+/// span (instead of the first byte). Also, when the span is too long (more
+/// than 6 lines), `custom_highlight_lines` will print the first line, then
+/// dot dot dot, then last line, whereas `highlight_lines` prints the first
+/// six lines.
fn custom_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index a2a442f8b6aa7..9a5c7e86d21c6 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -278,9 +278,9 @@ pub enum SyntaxExtension {
pub type NamedSyntaxExtension = (Name, SyntaxExtension);
pub struct BlockInfo {
- // should macros escape from this scope?
+ /// Should macros escape from this scope?
pub macros_escape: bool,
- // what are the pending renames?
+ /// What are the pending renames?
pub pending_renames: mtwt::RenameList,
}
@@ -293,8 +293,8 @@ impl BlockInfo {
}
}
-// The base map of methods for expanding syntax extension
-// AST nodes into full ASTs
+/// The base map of methods for expanding syntax extension
+/// AST nodes into full ASTs
pub fn syntax_expander_table() -> SyntaxEnv {
// utility function to simplify creating NormalTT syntax extensions
fn builtin_normal_expander(f: MacroExpanderFn) -> SyntaxExtension {
@@ -398,9 +398,9 @@ pub fn syntax_expander_table() -> SyntaxEnv {
syntax_expanders
}
-// One of these is made during expansion and incrementally updated as we go;
-// when a macro expansion occurs, the resulting nodes have the backtrace()
-// -> expn_info of their expansion context stored into their span.
+/// One of these is made during expansion and incrementally updated as we go;
+/// when a macro expansion occurs, the resulting nodes have the backtrace()
+/// -> expn_info of their expansion context stored into their span.
pub struct ExtCtxt<'a> {
pub parse_sess: &'a parse::ParseSess,
pub cfg: ast::CrateConfig,
@@ -535,6 +535,9 @@ impl<'a> ExtCtxt<'a> {
pub fn ident_of(&self, st: &str) -> ast::Ident {
str_to_ident(st)
}
+ pub fn name_of(&self, st: &str) -> ast::Name {
+ token::intern(st)
+ }
}
/// Extract a string literal from the macro expanded version of `expr`,
@@ -579,9 +582,9 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
} else {
match tts[0] {
- ast::TTTok(_, token::LIT_STR(ident))
- | ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
- return Some(token::get_ident(ident).get().to_string())
+ ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
+ ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
+ return Some(parse::raw_str_lit(ident.as_str()))
}
_ => {
cx.span_err(sp,
@@ -612,11 +615,11 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
Some(es)
}
-// in order to have some notion of scoping for macros,
-// we want to implement the notion of a transformation
-// environment.
+/// In order to have some notion of scoping for macros,
+/// we want to implement the notion of a transformation
+/// environment.
-// This environment maps Names to SyntaxExtensions.
+/// This environment maps Names to SyntaxExtensions.
//impl question: how to implement it? Initially, the
// env will contain only macros, so it might be painful
@@ -633,7 +636,6 @@ struct MapChainFrame {
map: HashMap,
}
-// Only generic to make it easy to test
pub struct SyntaxEnv {
chain: Vec ,
}
diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs
index 652d593c0042c..3b34407edfeaa 100644
--- a/src/libsyntax/ext/deriving/encodable.rs
+++ b/src/libsyntax/ext/deriving/encodable.rs
@@ -8,79 +8,76 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/*!
-
-The compiler code necessary to implement the `#[deriving(Encodable)]`
-(and `Decodable`, in decodable.rs) extension. The idea here is that
-type-defining items may be tagged with `#[deriving(Encodable, Decodable)]`.
-
-For example, a type like:
-
-```ignore
-#[deriving(Encodable, Decodable)]
-struct Node { id: uint }
-```
-
-would generate two implementations like:
-
-```ignore
-impl Encodable for Node {
- fn encode(&self, s: &S) {
- s.emit_struct("Node", 1, || {
- s.emit_field("id", 0, || s.emit_uint(self.id))
- })
- }
-}
-
-impl Decodable for node_id {
- fn decode(d: &D) -> Node {
- d.read_struct("Node", 1, || {
- Node {
- id: d.read_field("x".to_string(), 0, || decode(d))
- }
- })
- }
-}
-```
-
-Other interesting scenarios are whe the item has type parameters or
-references other non-built-in types. A type definition like:
-
-```ignore
-#[deriving(Encodable, Decodable)]
-struct spanned { node: T, span: Span }
-```
-
-would yield functions like:
-
-```ignore
- impl<
- S: Encoder,
- T: Encodable
- > spanned: Encodable {
- fn encode(s: &S) {
- s.emit_rec(|| {
- s.emit_field("node", 0, || self.node.encode(s));
- s.emit_field("span", 1, || self.span.encode(s));
- })
- }
- }
-
- impl<
- D: Decoder,
- T: Decodable
- > spanned: Decodable {
- fn decode(d: &D) -> spanned {
- d.read_rec(|| {
- {
- node: d.read_field("node".to_string(), 0, || decode(d)),
- span: d.read_field("span".to_string(), 1, || decode(d)),
- }
- })
- }
- }
-```
-*/
+//! The compiler code necessary to implement the `#[deriving(Encodable)]`
+//! (and `Decodable`, in decodable.rs) extension. The idea here is that
+//! type-defining items may be tagged with `#[deriving(Encodable, Decodable)]`.
+//!
+//! For example, a type like:
+//!
+//! ```ignore
+//! #[deriving(Encodable, Decodable)]
+//! struct Node { id: uint }
+//! ```
+//!
+//! would generate two implementations like:
+//!
+//! ```ignore
+//! impl Encodable for Node {
+//! fn encode(&self, s: &S) {
+//! s.emit_struct("Node", 1, || {
+//! s.emit_field("id", 0, || s.emit_uint(self.id))
+//! })
+//! }
+//! }
+//!
+//! impl Decodable for node_id {
+//! fn decode(d: &D) -> Node {
+//! d.read_struct("Node", 1, || {
+//! Node {
+//! id: d.read_field("x".to_string(), 0, || decode(d))
+//! }
+//! })
+//! }
+//! }
+//! ```
+//!
+//! Other interesting scenarios are whe the item has type parameters or
+//! references other non-built-in types. A type definition like:
+//!
+//! ```ignore
+//! #[deriving(Encodable, Decodable)]
+//! struct spanned { node: T, span: Span }
+//! ```
+//!
+//! would yield functions like:
+//!
+//! ```ignore
+//! impl<
+//! S: Encoder,
+//! T: Encodable
+//! > spanned: Encodable {
+//! fn encode(s: &S) {
+//! s.emit_rec(|| {
+//! s.emit_field("node", 0, || self.node.encode(s));
+//! s.emit_field("span", 1, || self.span.encode(s));
+//! })
+//! }
+//! }
+//!
+//! impl<
+//! D: Decoder,
+//! T: Decodable
+//! > spanned: Decodable {
+//! fn decode(d: &D) -> spanned {
+//! d.read_rec(|| {
+//! {
+//! node: d.read_field("node".to_string(), 0, || decode(d)),
+//! span: d.read_field("span".to_string(), 1, || decode(d)),
+//! }
+//! })
+//! }
+//! }
+//! ```
use ast::{MetaItem, Item, Expr, ExprRet, MutMutable, LitNil};
use codemap::Span;
diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs
index 764c88cc954ed..c9f5936a9bb05 100644
--- a/src/libsyntax/ext/deriving/generic/mod.rs
+++ b/src/libsyntax/ext/deriving/generic/mod.rs
@@ -8,174 +8,170 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/*!
-
-Some code that abstracts away much of the boilerplate of writing
-`deriving` instances for traits. Among other things it manages getting
-access to the fields of the 4 different sorts of structs and enum
-variants, as well as creating the method and impl ast instances.
-
-Supported features (fairly exhaustive):
-
-- Methods taking any number of parameters of any type, and returning
- any type, other than vectors, bottom and closures.
-- Generating `impl`s for types with type parameters and lifetimes
- (e.g. `Option`), the parameters are automatically given the
- current trait as a bound. (This includes separate type parameters
- and lifetimes for methods.)
-- Additional bounds on the type parameters, e.g. the `Ord` instance
- requires an explicit `PartialEq` bound at the
- moment. (`TraitDef.additional_bounds`)
-
-Unsupported: FIXME #6257: calling methods on reference fields,
-e.g. deriving Eq/Ord/Clone don't work on `struct A(&int)`,
-because of how the auto-dereferencing happens.
-
-The most important thing for implementers is the `Substructure` and
-`SubstructureFields` objects. The latter groups 5 possibilities of the
-arguments:
-
-- `Struct`, when `Self` is a struct (including tuple structs, e.g
- `struct T(int, char)`).
-- `EnumMatching`, when `Self` is an enum and all the arguments are the
- same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`)
-- `EnumNonMatching` when `Self` is an enum and the arguments are not
- the same variant (e.g. `None`, `Some(1)` and `None`). If
- `const_nonmatching` is true, this will contain an empty list.
-- `StaticEnum` and `StaticStruct` for static methods, where the type
- being derived upon is either an enum or struct respectively. (Any
- argument with type Self is just grouped among the non-self
- arguments.)
-
-In the first two cases, the values from the corresponding fields in
-all the arguments are grouped together. In the `EnumNonMatching` case
-this isn't possible (different variants have different fields), so the
-fields are grouped by which argument they come from. There are no
-fields with values in the static cases, so these are treated entirely
-differently.
-
-The non-static cases have `Option` in several places associated
-with field `expr`s. This represents the name of the field it is
-associated with. It is only not `None` when the associated field has
-an identifier in the source code. For example, the `x`s in the
-following snippet
-
-```rust
-struct A { x : int }
-
-struct B(int);
-
-enum C {
- C0(int),
- C1 { x: int }
-}
-```
-
-The `int`s in `B` and `C0` don't have an identifier, so the
-`Option`s would be `None` for them.
-
-In the static cases, the structure is summarised, either into the just
-spans of the fields or a list of spans and the field idents (for tuple
-structs and record structs, respectively), or a list of these, for
-enums (one for each variant). For empty struct and empty enum
-variants, it is represented as a count of 0.
-
-# Examples
-
-The following simplified `PartialEq` is used for in-code examples:
-
-```rust
-trait PartialEq {
- fn eq(&self, other: &Self);
-}
-impl PartialEq for int {
- fn eq(&self, other: &int) -> bool {
- *self == *other
- }
-}
-```
-
-Some examples of the values of `SubstructureFields` follow, using the
-above `PartialEq`, `A`, `B` and `C`.
-
-## Structs
-
-When generating the `expr` for the `A` impl, the `SubstructureFields` is
-
-~~~text
-Struct(~[FieldInfo {
- span:
- name: Some(),
- self_: ,
- other: ~[,
- name: None,
-
- ~[]
- }])
-~~~
-
-## Enums
-
-When generating the `expr` for a call with `self == C0(a)` and `other
-== C0(b)`, the SubstructureFields is
-
-~~~text
-EnumMatching(0, ,
- ~[FieldInfo {
- span:
- name: None,
- self_: ,
- other: ~[]
- }])
-~~~
-
-For `C1 {x}` and `C1 {x}`,
-
-~~~text
-EnumMatching(1, ,
- ~[FieldInfo {
- span:
- name: Some(),
- self_: ,
- other: ~[]
- }])
-~~~
-
-For `C0(a)` and `C1 {x}` ,
-
-~~~text
-EnumNonMatching(~[(0, ,
- ~[(, None, )]),
- (1, ,
- ~[(, Some(),
- )])])
-~~~
-
-(and vice versa, but with the order of the outermost list flipped.)
-
-## Static
-
-A static method on the above would result in,
-
-~~~text
-StaticStruct(, Named(~[(, )]))
-
-StaticStruct(, Unnamed(~[]))
-
-StaticEnum(, ~[(, , Unnamed(~[])),
- (, ,
- Named(~[(, )]))])
-~~~
-
-*/
+//! Some code that abstracts away much of the boilerplate of writing
+//! `deriving` instances for traits. Among other things it manages getting
+//! access to the fields of the 4 different sorts of structs and enum
+//! variants, as well as creating the method and impl ast instances.
+//!
+//! Supported features (fairly exhaustive):
+//!
+//! - Methods taking any number of parameters of any type, and returning
+//! any type, other than vectors, bottom and closures.
+//! - Generating `impl`s for types with type parameters and lifetimes
+//! (e.g. `Option`), the parameters are automatically given the
+//! current trait as a bound. (This includes separate type parameters
+//! and lifetimes for methods.)
+//! - Additional bounds on the type parameters, e.g. the `Ord` instance
+//! requires an explicit `PartialEq` bound at the
+//! moment. (`TraitDef.additional_bounds`)
+//!
+//! Unsupported: FIXME #6257: calling methods on reference fields,
+//! e.g. deriving Eq/Ord/Clone don't work on `struct A(&int)`,
+//! because of how the auto-dereferencing happens.
+//!
+//! The most important thing for implementers is the `Substructure` and
+//! `SubstructureFields` objects. The latter groups 5 possibilities of the
+//! arguments:
+//!
+//! - `Struct`, when `Self` is a struct (including tuple structs, e.g
+//! `struct T(int, char)`).
+//! - `EnumMatching`, when `Self` is an enum and all the arguments are the
+//! same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`)
+//! - `EnumNonMatching` when `Self` is an enum and the arguments are not
+//! the same variant (e.g. `None`, `Some(1)` and `None`). If
+//! `const_nonmatching` is true, this will contain an empty list.
+//! - `StaticEnum` and `StaticStruct` for static methods, where the type
+//! being derived upon is either an enum or struct respectively. (Any
+//! argument with type Self is just grouped among the non-self
+//! arguments.)
+//!
+//! In the first two cases, the values from the corresponding fields in
+//! all the arguments are grouped together. In the `EnumNonMatching` case
+//! this isn't possible (different variants have different fields), so the
+//! fields are grouped by which argument they come from. There are no
+//! fields with values in the static cases, so these are treated entirely
+//! differently.
+//!
+//! The non-static cases have `Option` in several places associated
+//! with field `expr`s. This represents the name of the field it is
+//! associated with. It is only not `None` when the associated field has
+//! an identifier in the source code. For example, the `x`s in the
+//! following snippet
+//!
+//! ```rust
+//! struct A { x : int }
+//!
+//! struct B(int);
+//!
+//! enum C {
+//! C0(int),
+//! C1 { x: int }
+//! }
+//! ```
+//!
+//! The `int`s in `B` and `C0` don't have an identifier, so the
+//! `Option`s would be `None` for them.
+//!
+//! In the static cases, the structure is summarised, either into the just
+//! spans of the fields or a list of spans and the field idents (for tuple
+//! structs and record structs, respectively), or a list of these, for
+//! enums (one for each variant). For empty struct and empty enum
+//! variants, it is represented as a count of 0.
+//!
+//! # Examples
+//!
+//! The following simplified `PartialEq` is used for in-code examples:
+//!
+//! ```rust
+//! trait PartialEq {
+//! fn eq(&self, other: &Self);
+//! }
+//! impl PartialEq for int {
+//! fn eq(&self, other: &int) -> bool {
+//! *self == *other
+//! }
+//! }
+//! ```
+//!
+//! Some examples of the values of `SubstructureFields` follow, using the
+//! above `PartialEq`, `A`, `B` and `C`.
+//!
+//! ## Structs
+//!
+//! When generating the `expr` for the `A` impl, the `SubstructureFields` is
+//!
+//! ~~~text
+//! Struct(~[FieldInfo {
+//! span:
+//! name: Some(),
+//! self_: ,
+//! other: ~[,
+//! name: None,
+//!
+//! ~[]
+//! }])
+//! ~~~
+//!
+//! ## Enums
+//!
+//! When generating the `expr` for a call with `self == C0(a)` and `other
+//! == C0(b)`, the SubstructureFields is
+//!
+//! ~~~text
+//! EnumMatching(0, ,
+//! ~[FieldInfo {
+//! span:
+//! name: None,
+//! self_: ,
+//! other: ~[]
+//! }])
+//! ~~~
+//!
+//! For `C1 {x}` and `C1 {x}`,
+//!
+//! ~~~text
+//! EnumMatching(1, ,
+//! ~[FieldInfo {
+//! span:
+//! name: Some(),
+//! self_: ,
+//! other: ~[]
+//! }])
+//! ~~~
+//!
+//! For `C0(a)` and `C1 {x}` ,
+//!
+//! ~~~text
+//! EnumNonMatching(~[(0, ,
+//! ~[(, None, )]),
+//! (1, ,
+//! ~[(, Some(),
+//! )])])
+//! ~~~
+//!
+//! (and vice versa, but with the order of the outermost list flipped.)
+//!
+//! ## Static
+//!
+//! A static method on the above would result in,
+//!
+//! ~~~text
+//! StaticStruct(, Named(~[(, )]))
+//!
+//! StaticStruct(, Unnamed(~[]))
+//!
+//! StaticEnum(, ~[(, , Unnamed(~[])),
+//! (, ,
+//! Named(~[(, )]))])
+//! ~~~
use std::cell::RefCell;
use std::gc::{Gc, GC};
diff --git a/src/libsyntax/ext/deriving/generic/ty.rs b/src/libsyntax/ext/deriving/generic/ty.rs
index b53281f99633f..f6a39d7b2e6c1 100644
--- a/src/libsyntax/ext/deriving/generic/ty.rs
+++ b/src/libsyntax/ext/deriving/generic/ty.rs
@@ -25,8 +25,10 @@ use std::gc::Gc;
/// The types of pointers
pub enum PtrTy<'a> {
- Send, // ~
- Borrowed(Option<&'a str>, ast::Mutability), // &['lifetime] [mut]
+ /// ~
+ Send,
+ /// &'lifetime mut
+ Borrowed(Option<&'a str>, ast::Mutability),
}
/// A path, e.g. `::std::option::Option::` (global). Has support
@@ -83,12 +85,12 @@ impl<'a> Path<'a> {
/// A type. Supports pointers (except for *), Self, and literals
pub enum Ty<'a> {
Self,
- // &/Box/ Ty
+ /// &/Box/ Ty
Ptr(Box>, PtrTy<'a>),
- // mod::mod::Type<[lifetime], [Params...]>, including a plain type
- // parameter, and things like `int`
+ /// mod::mod::Type<[lifetime], [Params...]>, including a plain type
+ /// parameter, and things like `int`
Literal(Path<'a>),
- // includes nil
+ /// includes unit
Tuple(Vec> )
}
diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs
index 8e673ff246598..05b5131d7e4d3 100644
--- a/src/libsyntax/ext/deriving/show.rs
+++ b/src/libsyntax/ext/deriving/show.rs
@@ -55,8 +55,8 @@ pub fn expand_deriving_show(cx: &mut ExtCtxt,
trait_def.expand(cx, mitem, item, push)
}
-// we construct a format string and then defer to std::fmt, since that
-// knows what's up with formatting at so on.
+/// We construct a format string and then defer to std::fmt, since that
+/// knows what's up with formatting and so on.
fn show_substructure(cx: &mut ExtCtxt, span: Span,
substr: &Substructure) -> Gc {
// build ``, `({}, {}, ...)` or ` { : {},
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 9fe431cfb6c75..b7d72ae4debc1 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -246,11 +246,11 @@ pub fn expand_expr(e: Gc, fld: &mut MacroExpander) -> Gc {
}
}
-// Rename loop label and expand its loop body
-//
-// The renaming procedure for loop is different in the sense that the loop
-// body is in a block enclosed by loop head so the renaming of loop label
-// must be propagated to the enclosed context.
+/// Rename loop label and expand its loop body
+///
+/// The renaming procedure for loop is different in the sense that the loop
+/// body is in a block enclosed by loop head so the renaming of loop label
+/// must be propagated to the enclosed context.
fn expand_loop_block(loop_block: P,
opt_ident: Option,
fld: &mut MacroExpander) -> (P, Option) {
@@ -1150,7 +1150,7 @@ mod test {
use super::{pattern_bindings, expand_crate, contains_macro_escape};
use super::{PatIdentFinder, IdentRenamer, PatIdentRenamer};
use ast;
- use ast::{Attribute_, AttrOuter, MetaWord};
+ use ast::{Attribute_, AttrOuter, MetaWord, Name};
use attr;
use codemap;
use codemap::Spanned;
@@ -1665,12 +1665,12 @@ foo_module!()
let f_ident = token::str_to_ident("f");
let x_ident = token::str_to_ident("x");
let int_ident = token::str_to_ident("int");
- let renames = vec!((x_ident,16));
+ let renames = vec!((x_ident,Name(16)));
let mut renamer = IdentRenamer{renames: &renames};
let renamed_crate = renamer.fold_crate(the_crate);
let idents = crate_idents(&renamed_crate);
let resolved : Vec = idents.iter().map(|id| mtwt::resolve(*id)).collect();
- assert_eq!(resolved,vec!(f_ident.name,16,int_ident.name,16,16,16));
+ assert_eq!(resolved,vec!(f_ident.name,Name(16),int_ident.name,Name(16),Name(16),Name(16)));
}
// test the PatIdentRenamer; only PatIdents get renamed
@@ -1680,13 +1680,13 @@ foo_module!()
let f_ident = token::str_to_ident("f");
let x_ident = token::str_to_ident("x");
let int_ident = token::str_to_ident("int");
- let renames = vec!((x_ident,16));
+ let renames = vec!((x_ident,Name(16)));
let mut renamer = PatIdentRenamer{renames: &renames};
let renamed_crate = renamer.fold_crate(the_crate);
let idents = crate_idents(&renamed_crate);
let resolved : Vec = idents.iter().map(|id| mtwt::resolve(*id)).collect();
let x_name = x_ident.name;
- assert_eq!(resolved,vec!(f_ident.name,16,int_ident.name,16,x_name,x_name));
+ assert_eq!(resolved,vec!(f_ident.name,Name(16),int_ident.name,Name(16),x_name,x_name));
}
diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs
index f486d2de3398b..786fd953f8901 100644
--- a/src/libsyntax/ext/format.rs
+++ b/src/libsyntax/ext/format.rs
@@ -37,24 +37,24 @@ struct Context<'a, 'b> {
ecx: &'a mut ExtCtxt<'b>,
fmtsp: Span,
- // Parsed argument expressions and the types that we've found so far for
- // them.
+ /// Parsed argument expressions and the types that we've found so far for
+ /// them.
args: Vec>,
arg_types: Vec