-
Notifications
You must be signed in to change notification settings - Fork 1.2k
/
Copy pathindexer.rs
116 lines (98 loc) · 2.82 KB
/
indexer.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
//! Struct used to index source code, to enable efficient lookup of tokens that
//! are omitted from the AST (e.g., commented lines).
use rustpython_ast::Location;
use rustpython_parser::lexer::{LexResult, Tok};
pub struct Indexer {
commented_lines: Vec<usize>,
continuation_lines: Vec<usize>,
}
impl Indexer {
pub fn commented_lines(&self) -> &[usize] {
&self.commented_lines
}
pub fn continuation_lines(&self) -> &[usize] {
&self.continuation_lines
}
}
impl From<&[LexResult]> for Indexer {
fn from(lxr: &[LexResult]) -> Self {
let mut commented_lines = Vec::new();
let mut continuation_lines = Vec::new();
let mut prev: Option<(&Location, &Tok, &Location)> = None;
for (start, tok, end) in lxr.iter().flatten() {
if matches!(tok, Tok::Comment(_)) {
commented_lines.push(start.row());
}
if let Some((.., prev_tok, prev_end)) = prev {
if !matches!(
prev_tok,
Tok::Newline | Tok::NonLogicalNewline | Tok::Comment(..)
) {
for line in prev_end.row()..start.row() {
continuation_lines.push(line);
}
}
}
prev = Some((start, tok, end));
}
Self {
commented_lines,
continuation_lines,
}
}
}
#[cfg(test)]
mod tests {
use rustpython_parser::lexer;
use rustpython_parser::lexer::LexResult;
use crate::source_code::Indexer;
#[test]
fn continuation() {
let contents = r#"x = 1"#;
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let indexer: Indexer = lxr.as_slice().into();
assert_eq!(indexer.continuation_lines(), Vec::<usize>::new().as_slice());
let contents = r#"
# Hello, world!
x = 1
y = 2
"#
.trim();
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let indexer: Indexer = lxr.as_slice().into();
assert_eq!(indexer.continuation_lines(), Vec::<usize>::new().as_slice());
let contents = r#"
x = \
1
if True:
z = \
\
2
(
"abc" # Foo
"def" \
"ghi"
)
"#
.trim();
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let indexer: Indexer = lxr.as_slice().into();
assert_eq!(indexer.continuation_lines(), [1, 5, 6, 11]);
let contents = r#"
x = 1; import sys
import os
if True:
x = 1; import sys
import os
if True:
x = 1; \
import os
x = 1; \
import os
"#
.trim();
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
let indexer: Indexer = lxr.as_slice().into();
assert_eq!(indexer.continuation_lines(), [9, 12]);
}
}