Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

A few cleanups and minor improvements for the lexer #53289

Merged
merged 1 commit into from
Aug 16, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 16 additions & 6 deletions src/libsyntax/parse/lexer/comments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
if !lines.is_empty() && lines[0].chars().all(|c| c == '*') {
i += 1;
}

while i < j && lines[i].trim().is_empty() {
i += 1;
}
Expand All @@ -74,9 +75,11 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
.all(|c| c == '*') {
j -= 1;
}

while j > i && lines[j - 1].trim().is_empty() {
j -= 1;
}

lines[i..j].to_vec()
}

Expand All @@ -85,6 +88,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
let mut i = usize::MAX;
let mut can_trim = true;
let mut first = true;

for line in &lines {
for (j, c) in line.chars().enumerate() {
if j > i || !"* \t".contains(c) {
Expand Down Expand Up @@ -119,7 +123,8 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
}

// one-line comments lose their prefix
const ONELINERS: &'static [&'static str] = &["///!", "///", "//!", "//"];
const ONELINERS: &[&str] = &["///!", "///", "//!", "//"];

for prefix in ONELINERS {
if comment.starts_with(*prefix) {
return (&comment[prefix.len()..]).to_string();
Expand Down Expand Up @@ -205,6 +210,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
let len = s.len();
let mut col = col.to_usize();
let mut cursor: usize = 0;

while col > 0 && cursor < len {
let ch = char_at(s, cursor);
if !ch.is_whitespace() {
Expand All @@ -213,7 +219,8 @@ fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
cursor += ch.len_utf8();
col -= 1;
}
return Some(cursor);

Some(cursor)
}

fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<String>, s: String, col: CharPos) {
Expand Down Expand Up @@ -246,11 +253,13 @@ fn read_block_comment(rdr: &mut StringReader,
"src_index={}, end_src_index={}, line_begin_pos={}",
src_index, end_src_index, rdr.filemap.line_begin_pos(rdr.pos).to_u32());
let mut n = 0;

while src_index < end_src_index {
let c = char_at(&rdr.src, src_index);
src_index += c.len_utf8();
n += 1;
}

let col = CharPos(n);

rdr.bump();
Expand Down Expand Up @@ -358,10 +367,10 @@ pub struct Literal {
// it appears this function is called only from pprust... that's
// probably not a good thing.
pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut dyn Read)
-> (Vec<Comment>, Vec<Literal>) {
let mut src = Vec::new();
srdr.read_to_end(&mut src).unwrap();
let src = String::from_utf8(src).unwrap();
-> (Vec<Comment>, Vec<Literal>)
{
let mut src = String::new();
srdr.read_to_string(&mut src).unwrap();
let cm = CodeMap::new(sess.codemap().path_mapping().clone());
let filemap = cm.new_filemap(path, src);
let mut rdr = lexer::StringReader::new_raw(sess, filemap, None);
Expand All @@ -370,6 +379,7 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: FileName, srdr: &mut
let mut literals: Vec<Literal> = Vec::new();
let mut code_to_the_left = false; // Only code
let mut anything_to_the_left = false; // Code or comments

while !rdr.is_eof() {
loop {
// Eat all the whitespace and count blank lines.
Expand Down
Loading