Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(deps): Upgrade to Rust 1.50.0 #6428

Merged
merged 17 commits into from
Feb 18, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions lib/prometheus-parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -605,7 +605,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedChar { expected: ',', .. }, ..
kind: ErrorKind::ExpectedChar { expected: ',', .. },
..
}
));

Expand All @@ -614,7 +615,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::InvalidMetricKind { .. }, ..
kind: ErrorKind::InvalidMetricKind { .. },
..
}
));

Expand All @@ -623,7 +625,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedSpace { .. }, ..
kind: ErrorKind::ExpectedSpace { .. },
..
}
));

Expand All @@ -632,7 +635,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedChar { expected: '"', .. }, ..
kind: ErrorKind::ExpectedChar { expected: '"', .. },
..
}
));

Expand All @@ -641,7 +645,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedChar { expected: '"', .. }, ..
kind: ErrorKind::ExpectedChar { expected: '"', .. },
..
}
));

Expand All @@ -650,7 +655,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ParseFloatError { .. }, ..
kind: ErrorKind::ParseFloatError { .. },
..
}
));
}
Expand Down
5 changes: 1 addition & 4 deletions lib/prometheus-parser/src/line.rs
Original file line number Diff line number Diff line change
Expand Up @@ -620,10 +620,7 @@ mod test {

let input = wrap(r#"{ a="b" ,, c="d" }"#);
let error = Metric::parse_labels(&input).unwrap_err().into();
assert!(matches!(
error,
ErrorKind::ParseNameError { .. }
));
assert!(matches!(error, ErrorKind::ParseNameError { .. }));
}

#[test]
Expand Down
10 changes: 5 additions & 5 deletions lib/shared/src/conversion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,11 +184,11 @@ fn parse_bool(s: &str) -> Result<bool, Error> {

/// Does the format specifier have a time zone option?
fn format_has_zone(fmt: &str) -> bool {
fmt.find("%Z").is_some()
|| fmt.find("%z").is_some()
|| fmt.find("%:z").is_some()
|| fmt.find("%#z").is_some()
|| fmt.find("%+").is_some()
fmt.contains("%Z")
|| fmt.contains("%z")
|| fmt.contains("%:z")
|| fmt.contains("%#z")
|| fmt.contains("%+")
}

/// Convert a timestamp with a non-UTC time zone into UTC
Expand Down
13 changes: 6 additions & 7 deletions lib/vrl/cli/src/cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,14 +73,13 @@ fn run(opts: &Opts) -> Result<(), Error> {
}
}

#[cfg(feature = "repl")]
fn repl(objects: Vec<Value>) -> Result<(), Error> {
repl::run(objects)
}

#[cfg(not(feature = "repl"))]
fn repl(_: Vec<Value>) -> Result<(), Error> {
Err(Error::ReplFeature)
if cfg!(feature = "repl") {
repl::run(objects);
Ok(())
} else {
Err(Error::ReplFeature)
}
}

fn execute(object: &mut impl Target, source: String) -> Result<Value, Error> {
Expand Down
1 change: 0 additions & 1 deletion lib/vrl/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ pub enum Error {
#[error("json error")]
Json(#[from] serde_json::Error),

#[cfg(not(feature = "repl"))]
#[error("repl feature disabled, program input required")]
ReplFeature,
}
5 changes: 1 addition & 4 deletions lib/vrl/cli/src/repl.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use crate::Error;
use indoc::indoc;
use prettytable::{format, Cell, Row, Table};
use regex::Regex;
Expand All @@ -13,7 +12,7 @@ use vrl::{diagnostic::Formatter, state, Runtime, Target, Value};

const DOCS_URL: &str = "https://vector.dev/docs/reference/vrl";

pub(crate) fn run(mut objects: Vec<Value>) -> Result<(), Error> {
pub(crate) fn run(mut objects: Vec<Value>) {
let mut index = 0;
let func_docs_regex = Regex::new(r"^help\sdocs\s(\w{1,})$").unwrap();

Expand Down Expand Up @@ -82,8 +81,6 @@ pub(crate) fn run(mut objects: Vec<Value>) -> Result<(), Error> {
}
}
}

Ok(())
}

fn resolve(
Expand Down
83 changes: 43 additions & 40 deletions lib/vrl/parser/src/lex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ use std::iter::Peekable;
use std::str::CharIndices;

pub type Tok<'input> = Token<&'input str>;
pub type Spanned<'input, Loc> = Result<(Loc, Tok<'input>, Loc), Error>;
pub type SpannedResult<'input, Loc> = Result<Spanned<'input, Loc>, Error>;
pub type Spanned<'input, Loc> = (Loc, Tok<'input>, Loc);

#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum Error {
Expand Down Expand Up @@ -447,7 +448,7 @@ impl StringLiteral<&str> {
// -----------------------------------------------------------------------------

impl<'input> Iterator for Lexer<'input> {
type Item = Spanned<'input, usize>;
type Item = SpannedResult<'input, usize>;
Comment on lines -450 to +451
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are these changes needed for the 1.50 upgrade, or did they too sneak in from a VRL merge?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

These ones are intentional. Clippy was complaining about unnecessary_wraps so I renamed the existing Spanned type SpannedResult and added a new Spanned type that represents the Ok value of SpannedResult so I could easily refer to Spanned in the function return types where it was complaining about the unnecessary wrapping. For example: https://github.com/timberio/vector/pull/6428/files/f279014f63a058abe29e0afb8dbc1f1dffc2fb32#diff-3eaef579e2d938c6926fa17614f80aab7455adeeffd8907567b34fee18a07e94R575-R582


fn next(&mut self) -> Option<Self::Item> {
use Token::*;
Expand All @@ -461,7 +462,7 @@ impl<'input> Iterator for Lexer<'input> {
// represent a physical character, instead it is a boundary marker.
if self.query_start(start) {
// dbg!("LQuery"); // NOTE: uncomment this for debugging
return self.token2(start, start + 1, LQuery);
return Some(Ok(self.token2(start, start + 1, LQuery)));
}

// Check if we need to emit a `RQuery` token.
Expand All @@ -470,7 +471,7 @@ impl<'input> Iterator for Lexer<'input> {
// represent a physical character, instead it is a boundary marker.
if let Some(pos) = self.query_end(start) {
// dbg!("RQuery"); // NOTE: uncomment this for debugging
return self.token2(pos, pos + 1, RQuery);
return Some(Ok(self.token2(pos, pos + 1, RQuery)));
}

// Advance the internal iterator and emit the next token, or loop
Expand All @@ -479,26 +480,28 @@ impl<'input> Iterator for Lexer<'input> {
let result = match ch {
'"' => Some(self.string_literal(start)),

';' => self.token(start, SemiColon),
'\n' => self.token(start, Newline),
'\\' => self.token(start, Escape),
';' => Some(Ok(self.token(start, SemiColon))),
'\n' => Some(Ok(self.token(start, Newline))),
'\\' => Some(Ok(self.token(start, Escape))),

'(' => self.open(start, LParen),
'[' => self.open(start, LBracket),
'{' => self.open(start, LBrace),
'}' => self.close(start, RBrace),
']' => self.close(start, RBracket),
')' => self.close(start, RParen),
'(' => Some(Ok(self.open(start, LParen))),
'[' => Some(Ok(self.open(start, LBracket))),
'{' => Some(Ok(self.open(start, LBrace))),
'}' => Some(Ok(self.close(start, RBrace))),
']' => Some(Ok(self.close(start, RBracket))),
')' => Some(Ok(self.close(start, RParen))),

'.' => self.token(start, Dot),
':' => self.token(start, Colon),
',' => self.token(start, Comma),
'.' => Some(Ok(self.token(start, Dot))),
':' => Some(Ok(self.token(start, Colon))),
',' => Some(Ok(self.token(start, Comma))),

'_' if self.test_peek(char::is_alphabetic) => Some(self.internal_test(start)),
'_' => self.token(start, Underscore),
'_' if self.test_peek(char::is_alphabetic) => {
Some(Ok(self.internal_test(start)))
}
'_' => Some(Ok(self.token(start, Underscore))),

'!' if self.test_peek(|ch| ch == '!' || !is_operator(ch)) => {
self.token(start, Bang)
Some(Ok(self.token(start, Bang)))
}

'#' => {
Expand All @@ -510,14 +513,14 @@ impl<'input> Iterator for Lexer<'input> {
's' if self.test_peek(|ch| ch == '\'') => Some(self.raw_string_literal(start)),
't' if self.test_peek(|ch| ch == '\'') => Some(self.timestamp_literal(start)),

ch if is_ident_start(ch) => Some(self.identifier_or_function_call(start)),
ch if is_ident_start(ch) => Some(Ok(self.identifier_or_function_call(start))),
ch if is_digit(ch) || (ch == '-' && self.test_peek(is_digit)) => {
Some(self.numeric_literal(start))
}
ch if is_operator(ch) => Some(self.operator(start)),
ch if is_operator(ch) => Some(Ok(self.operator(start))),
ch if ch.is_whitespace() => continue,

ch => self.token(start, InvalidToken(ch)),
ch => Some(Ok(self.token(start, InvalidToken(ch)))),
};

// dbg!(&result); // NOTE: uncomment this for debugging
Expand All @@ -529,7 +532,7 @@ impl<'input> Iterator for Lexer<'input> {
// queries.
} else if let Some(end) = self.rquery_indices.pop() {
// dbg!("RQuery"); // NOTE: uncomment this for debugging
return self.token2(end, end + 1, RQuery);
return Some(Ok(self.token2(end, end + 1, RQuery)));
}

return None;
Expand All @@ -542,7 +545,7 @@ impl<'input> Iterator for Lexer<'input> {
// -----------------------------------------------------------------------------

impl<'input> Lexer<'input> {
fn open(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
fn open(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
match &token {
Token::LParen => self.open_parens += 1,
Token::LBracket => self.open_brackets += 1,
Expand All @@ -553,7 +556,7 @@ impl<'input> Lexer<'input> {
self.token(start, token)
}

fn close(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
fn close(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
match &token {
Token::RParen => self.open_parens = self.open_parens.saturating_sub(1),
Token::RBracket => self.open_brackets = self.open_brackets.saturating_sub(1),
Expand All @@ -564,7 +567,7 @@ impl<'input> Lexer<'input> {
self.token(start, token)
}

fn token(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
fn token(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
let end = self.next_index();
self.token2(start, end, token)
}
Expand All @@ -574,8 +577,8 @@ impl<'input> Lexer<'input> {
start: usize,
end: usize,
token: Token<&'input str>,
) -> Option<Spanned<'input, usize>> {
Some(Ok((start, token, end)))
) -> Spanned<'input, usize> {
(start, token, end)
}

fn query_end(&mut self, start: usize) -> Option<usize> {
Expand Down Expand Up @@ -640,7 +643,7 @@ impl<'input> Lexer<'input> {
let mut end = 0;
while let Some((pos, ch)) = chars.next() {
let take_until_end =
|result: Spanned<'input, usize>,
|result: SpannedResult<'input, usize>,
last_char: &mut Option<char>,
end: &mut usize,
chars: &mut Peekable<CharIndices<'input>>| {
Expand Down Expand Up @@ -735,7 +738,7 @@ impl<'input> Lexer<'input> {
while let Some((pos, ch)) = chars.peek() {
let pos = *pos;

let literal_check = |result: Spanned<'input, usize>, chars: &mut Peekable<CharIndices<'input>>| match result {
let literal_check = |result: SpannedResult<'input, usize>, chars: &mut Peekable<CharIndices<'input>>| match result {
Err(_) => Err(()),
Ok((_, _, new)) => {
#[allow(clippy::while_let_on_iterator)]
Expand Down Expand Up @@ -854,7 +857,7 @@ impl<'input> Lexer<'input> {
true
}

fn string_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn string_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
let content_start = self.next_index();

loop {
Expand All @@ -876,19 +879,19 @@ impl<'input> Lexer<'input> {
Err(Error::StringLiteral { start })
}

fn regex_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn regex_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
self.quoted_literal(start, Token::RegexLiteral)
}

fn raw_string_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn raw_string_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
self.quoted_literal(start, |c| Token::StringLiteral(StringLiteral::Raw(c)))
}

fn timestamp_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn timestamp_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
self.quoted_literal(start, Token::TimestampLiteral)
}

fn numeric_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn numeric_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
let (end, int) = self.take_while(start, |ch| is_digit(ch) || ch == '_');

match self.peek() {
Expand Down Expand Up @@ -928,7 +931,7 @@ impl<'input> Lexer<'input> {
Token::ident(ident)
};

Ok((start, token, end))
(start, token, end)
}

fn operator(&mut self, start: usize) -> Spanned<'input, usize> {
Expand All @@ -941,21 +944,21 @@ impl<'input> Lexer<'input> {
op => Token::Operator(op),
};

Ok((start, token, end))
(start, token, end)
}

fn internal_test(&mut self, start: usize) -> Spanned<'input, usize> {
self.bump();
let (end, test) = self.take_while(start, char::is_alphabetic);

Ok((start, Token::InternalTest(test), end))
(start, Token::InternalTest(test), end)
}

fn quoted_literal(
&mut self,
start: usize,
tok: impl Fn(&'input str) -> Tok<'input>,
) -> Spanned<'input, usize> {
) -> SpannedResult<'input, usize> {
self.bump();
let content_start = self.next_index();

Expand Down Expand Up @@ -1122,7 +1125,7 @@ mod test {
use super::*;
use crate::lex::Token::*;

fn lexer(input: &str) -> impl Iterator<Item = Spanned<'_, usize>> + '_ {
fn lexer(input: &str) -> impl Iterator<Item = SpannedResult<'_, usize>> + '_ {
let mut lexer = Lexer::new(input);
Box::new(std::iter::from_fn(move || Some(lexer.next()?)))
}
Expand Down
1 change: 1 addition & 0 deletions lib/vrl/stdlib/src/parse_aws_vpc_flow_log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ impl Expression for ParseAwsVpcFlowLogFn {

type ParseResult<T> = std::result::Result<T, String>;

#[allow(clippy::unnecessary_wraps)] // match other parse methods
fn identity<'a>(_key: &'a str, value: &'a str) -> ParseResult<&'a str> {
Ok(value)
}
Expand Down
Loading