Skip to content

Commit

Permalink
char_ref: Move type variable from fns to impl
Browse files Browse the repository at this point in the history
  • Loading branch information
kmcallister committed Mar 25, 2014
1 parent df4b199 commit 3040b49
Showing 1 changed file with 30 additions and 28 deletions.
58 changes: 30 additions & 28 deletions src/tokenizer/char_ref.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,22 +66,6 @@ impl CharRefTokenizer {
}
}

pub fn step<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
if self.result.is_some() {
return Done;
}

debug!("char ref tokenizer stepping in state {:?}", self.state);
match self.state {
Begin => self.do_begin(tokenizer),
Octothorpe => self.do_octothorpe(tokenizer),
Numeric(base) => self.do_numeric(tokenizer, base),
NumericSemicolon => self.do_numeric_semicolon(tokenizer),
Named => self.do_named(tokenizer),
BogusName => self.do_bogus_name(tokenizer),
}
}

// A CharRefTokenizer can only tokenize one character reference,
// so this method consumes the tokenizer.
pub fn get_result(self) -> CharRef {
Expand All @@ -108,8 +92,26 @@ impl CharRefTokenizer {
});
Done
}
}

impl<Tok: SubTok> CharRefTokenizer {
pub fn step(&mut self, tokenizer: &mut Tok) -> Status {
if self.result.is_some() {
return Done;
}

debug!("char ref tokenizer stepping in state {:?}", self.state);
match self.state {
Begin => self.do_begin(tokenizer),
Octothorpe => self.do_octothorpe(tokenizer),
Numeric(base) => self.do_numeric(tokenizer, base),
NumericSemicolon => self.do_numeric_semicolon(tokenizer),
Named => self.do_named(tokenizer),
BogusName => self.do_bogus_name(tokenizer),
}
}

fn do_begin<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
fn do_begin(&mut self, tokenizer: &mut Tok) -> Status {
match unwrap_or_return!(tokenizer.peek(), Stuck) {
'\t' | '\n' | '\x0C' | ' ' | '<' | '&'
=> self.finish_none(),
Expand All @@ -130,7 +132,7 @@ impl CharRefTokenizer {
}
}

fn do_octothorpe<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
fn do_octothorpe(&mut self, tokenizer: &mut Tok) -> Status {
let c = unwrap_or_return!(tokenizer.peek(), Stuck);
match c {
'x' | 'X' => {
Expand All @@ -147,7 +149,7 @@ impl CharRefTokenizer {
Progress
}

fn do_numeric<T: SubTok>(&mut self, tokenizer: &mut T, base: u32) -> Status {
fn do_numeric(&mut self, tokenizer: &mut Tok, base: u32) -> Status {
let c = unwrap_or_return!(tokenizer.peek(), Stuck);
match to_digit(c, base as uint) {
Some(n) => {
Expand All @@ -172,15 +174,15 @@ impl CharRefTokenizer {
}
}

fn do_numeric_semicolon<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
fn do_numeric_semicolon(&mut self, tokenizer: &mut Tok) -> Status {
match unwrap_or_return!(tokenizer.peek(), Stuck) {
';' => tokenizer.discard_char(),
_ => tokenizer.emit_error(~"Semicolon missing after numeric character reference"),
};
self.finish_numeric(tokenizer)
}

fn unconsume_numeric<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
fn unconsume_numeric(&mut self, tokenizer: &mut Tok) -> Status {
let mut unconsume = ~"#";
match self.hex_marker {
Some(c) => unconsume.push_char(c),
Expand All @@ -192,7 +194,7 @@ impl CharRefTokenizer {
self.finish_none()
}

fn finish_numeric<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
fn finish_numeric(&mut self, tokenizer: &mut Tok) -> Status {
fn conv(n: u32) -> char {
from_u32(n).expect("invalid char missed by error handling cases")
}
Expand Down Expand Up @@ -223,7 +225,7 @@ impl CharRefTokenizer {
self.finish_one(c)
}

fn do_named<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
fn do_named(&mut self, tokenizer: &mut Tok) -> Status {
let c = unwrap_or_return!(tokenizer.peek(), Stuck);
tokenizer.discard_char();
self.name_buf().push_char(c);
Expand All @@ -244,17 +246,17 @@ impl CharRefTokenizer {
}
}

fn emit_name_error<T: SubTok>(&mut self, tokenizer: &mut T) {
fn emit_name_error(&mut self, tokenizer: &mut Tok) {
let msg = format!("Invalid character reference &{:s}",
self.name_buf().as_slice());
tokenizer.emit_error(msg);
}

fn unconsume_name<T: SubTok>(&mut self, tokenizer: &mut T) {
fn unconsume_name(&mut self, tokenizer: &mut Tok) {
tokenizer.unconsume(self.name_buf_opt.take_unwrap());
}

fn finish_named<T: SubTok>(&mut self, tokenizer: &mut T, end_char: Option<char>) -> Status {
fn finish_named(&mut self, tokenizer: &mut Tok, end_char: Option<char>) -> Status {
match self.name_match {
None => {
match end_char {
Expand Down Expand Up @@ -333,7 +335,7 @@ impl CharRefTokenizer {
}
}

fn do_bogus_name<T: SubTok>(&mut self, tokenizer: &mut T) -> Status {
fn do_bogus_name(&mut self, tokenizer: &mut Tok) -> Status {
let c = unwrap_or_return!(tokenizer.peek(), Stuck);
tokenizer.discard_char();
self.name_buf().push_char(c);
Expand All @@ -346,7 +348,7 @@ impl CharRefTokenizer {
self.finish_none()
}

pub fn end_of_file<T: SubTok>(&mut self, tokenizer: &mut T) {
pub fn end_of_file(&mut self, tokenizer: &mut Tok) {
while self.result.is_none() {
match self.state {
Begin => drop(self.finish_none()),
Expand Down

0 comments on commit 3040b49

Please sign in to comment.