Skip to content

Commit

Permalink
chore(deps): Upgrade to Rust 1.50.0 (#6428)
Browse files Browse the repository at this point in the history
* chore(deps): Upgrade Rust to 1.50.0

* Replace deprecated compare_and_swap

I believe this is the equivalent compare_exchange based on the table in
the docs:

Original Success  Failure
Relaxed  Relaxed  Relaxed
Acquire  Acquire  Acquire
Release  Release  Relaxed
AcqRel   AcqRel   Acquire
SeqCst   SeqCst   SeqCst

https://doc.rust-lang.org/std/sync/atomic/struct.AtomicUsize.html#migrating-to-compare_exchange-and-compare_exchange_weak

Also resolve all clippy errors.

Signed-off-by: Jesse Szwedko <jesse@szwedko.me>
  • Loading branch information
jszwedko authored Feb 18, 2021
1 parent fafbdb2 commit 5f6e07e
Show file tree
Hide file tree
Showing 39 changed files with 238 additions and 222 deletions.
18 changes: 12 additions & 6 deletions lib/prometheus-parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -605,7 +605,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedChar { expected: ',', .. }, ..
kind: ErrorKind::ExpectedChar { expected: ',', .. },
..
}
));

Expand All @@ -614,7 +615,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::InvalidMetricKind { .. }, ..
kind: ErrorKind::InvalidMetricKind { .. },
..
}
));

Expand All @@ -623,7 +625,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedSpace { .. }, ..
kind: ErrorKind::ExpectedSpace { .. },
..
}
));

Expand All @@ -632,7 +635,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedChar { expected: '"', .. }, ..
kind: ErrorKind::ExpectedChar { expected: '"', .. },
..
}
));

Expand All @@ -641,7 +645,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ExpectedChar { expected: '"', .. }, ..
kind: ErrorKind::ExpectedChar { expected: '"', .. },
..
}
));

Expand All @@ -650,7 +655,8 @@ mod test {
assert!(matches!(
error,
ParserError::WithLine {
kind: ErrorKind::ParseFloatError { .. }, ..
kind: ErrorKind::ParseFloatError { .. },
..
}
));
}
Expand Down
5 changes: 1 addition & 4 deletions lib/prometheus-parser/src/line.rs
Original file line number Diff line number Diff line change
Expand Up @@ -620,10 +620,7 @@ mod test {

let input = wrap(r#"{ a="b" ,, c="d" }"#);
let error = Metric::parse_labels(&input).unwrap_err().into();
assert!(matches!(
error,
ErrorKind::ParseNameError { .. }
));
assert!(matches!(error, ErrorKind::ParseNameError { .. }));
}

#[test]
Expand Down
10 changes: 5 additions & 5 deletions lib/shared/src/conversion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -184,11 +184,11 @@ fn parse_bool(s: &str) -> Result<bool, Error> {

/// Does the format specifier have a time zone option?
fn format_has_zone(fmt: &str) -> bool {
fmt.find("%Z").is_some()
|| fmt.find("%z").is_some()
|| fmt.find("%:z").is_some()
|| fmt.find("%#z").is_some()
|| fmt.find("%+").is_some()
fmt.contains("%Z")
|| fmt.contains("%z")
|| fmt.contains("%:z")
|| fmt.contains("%#z")
|| fmt.contains("%+")
}

/// Convert a timestamp with a non-UTC time zone into UTC
Expand Down
13 changes: 6 additions & 7 deletions lib/vrl/cli/src/cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,14 +73,13 @@ fn run(opts: &Opts) -> Result<(), Error> {
}
}

#[cfg(feature = "repl")]
fn repl(objects: Vec<Value>) -> Result<(), Error> {
repl::run(objects)
}

#[cfg(not(feature = "repl"))]
fn repl(_: Vec<Value>) -> Result<(), Error> {
Err(Error::ReplFeature)
if cfg!(feature = "repl") {
repl::run(objects);
Ok(())
} else {
Err(Error::ReplFeature)
}
}

fn execute(object: &mut impl Target, source: String) -> Result<Value, Error> {
Expand Down
1 change: 0 additions & 1 deletion lib/vrl/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ pub enum Error {
#[error("json error")]
Json(#[from] serde_json::Error),

#[cfg(not(feature = "repl"))]
#[error("repl feature disabled, program input required")]
ReplFeature,
}
5 changes: 1 addition & 4 deletions lib/vrl/cli/src/repl.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use crate::Error;
use indoc::indoc;
use lazy_static::lazy_static;
use prettytable::{format, Cell, Row, Table};
Expand All @@ -20,7 +19,7 @@ lazy_static! {
const DOCS_URL: &str = "https://vector.dev/docs/reference/vrl";
const ERRORS_URL_ROOT: &str = "https://errors.vrl.dev";

pub(crate) fn run(mut objects: Vec<Value>) -> Result<(), Error> {
pub(crate) fn run(mut objects: Vec<Value>) {
let mut index = 0;
let func_docs_regex = Regex::new(r"^help\sdocs\s(\w{1,})$").unwrap();
let error_docs_regex = Regex::new(r"^help\serror\s(\w{1,})$").unwrap();
Expand Down Expand Up @@ -92,8 +91,6 @@ pub(crate) fn run(mut objects: Vec<Value>) -> Result<(), Error> {
}
}
}

Ok(())
}

fn resolve(
Expand Down
83 changes: 43 additions & 40 deletions lib/vrl/parser/src/lex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ use std::iter::Peekable;
use std::str::CharIndices;

pub type Tok<'input> = Token<&'input str>;
pub type Spanned<'input, Loc> = Result<(Loc, Tok<'input>, Loc), Error>;
pub type SpannedResult<'input, Loc> = Result<Spanned<'input, Loc>, Error>;
pub type Spanned<'input, Loc> = (Loc, Tok<'input>, Loc);

#[derive(thiserror::Error, Clone, Debug, PartialEq)]
pub enum Error {
Expand Down Expand Up @@ -447,7 +448,7 @@ impl StringLiteral<&str> {
// -----------------------------------------------------------------------------

impl<'input> Iterator for Lexer<'input> {
type Item = Spanned<'input, usize>;
type Item = SpannedResult<'input, usize>;

fn next(&mut self) -> Option<Self::Item> {
use Token::*;
Expand All @@ -461,7 +462,7 @@ impl<'input> Iterator for Lexer<'input> {
// represent a physical character, instead it is a boundary marker.
if self.query_start(start) {
// dbg!("LQuery"); // NOTE: uncomment this for debugging
return self.token2(start, start + 1, LQuery);
return Some(Ok(self.token2(start, start + 1, LQuery)));
}

// Check if we need to emit a `RQuery` token.
Expand All @@ -470,7 +471,7 @@ impl<'input> Iterator for Lexer<'input> {
// represent a physical character, instead it is a boundary marker.
if let Some(pos) = self.query_end(start) {
// dbg!("RQuery"); // NOTE: uncomment this for debugging
return self.token2(pos, pos + 1, RQuery);
return Some(Ok(self.token2(pos, pos + 1, RQuery)));
}

// Advance the internal iterator and emit the next token, or loop
Expand All @@ -479,26 +480,28 @@ impl<'input> Iterator for Lexer<'input> {
let result = match ch {
'"' => Some(self.string_literal(start)),

';' => self.token(start, SemiColon),
'\n' => self.token(start, Newline),
'\\' => self.token(start, Escape),
';' => Some(Ok(self.token(start, SemiColon))),
'\n' => Some(Ok(self.token(start, Newline))),
'\\' => Some(Ok(self.token(start, Escape))),

'(' => self.open(start, LParen),
'[' => self.open(start, LBracket),
'{' => self.open(start, LBrace),
'}' => self.close(start, RBrace),
']' => self.close(start, RBracket),
')' => self.close(start, RParen),
'(' => Some(Ok(self.open(start, LParen))),
'[' => Some(Ok(self.open(start, LBracket))),
'{' => Some(Ok(self.open(start, LBrace))),
'}' => Some(Ok(self.close(start, RBrace))),
']' => Some(Ok(self.close(start, RBracket))),
')' => Some(Ok(self.close(start, RParen))),

'.' => self.token(start, Dot),
':' => self.token(start, Colon),
',' => self.token(start, Comma),
'.' => Some(Ok(self.token(start, Dot))),
':' => Some(Ok(self.token(start, Colon))),
',' => Some(Ok(self.token(start, Comma))),

'_' if self.test_peek(char::is_alphabetic) => Some(self.internal_test(start)),
'_' => self.token(start, Underscore),
'_' if self.test_peek(char::is_alphabetic) => {
Some(Ok(self.internal_test(start)))
}
'_' => Some(Ok(self.token(start, Underscore))),

'!' if self.test_peek(|ch| ch == '!' || !is_operator(ch)) => {
self.token(start, Bang)
Some(Ok(self.token(start, Bang)))
}

'#' => {
Expand All @@ -510,14 +513,14 @@ impl<'input> Iterator for Lexer<'input> {
's' if self.test_peek(|ch| ch == '\'') => Some(self.raw_string_literal(start)),
't' if self.test_peek(|ch| ch == '\'') => Some(self.timestamp_literal(start)),

ch if is_ident_start(ch) => Some(self.identifier_or_function_call(start)),
ch if is_ident_start(ch) => Some(Ok(self.identifier_or_function_call(start))),
ch if is_digit(ch) || (ch == '-' && self.test_peek(is_digit)) => {
Some(self.numeric_literal(start))
}
ch if is_operator(ch) => Some(self.operator(start)),
ch if is_operator(ch) => Some(Ok(self.operator(start))),
ch if ch.is_whitespace() => continue,

ch => self.token(start, InvalidToken(ch)),
ch => Some(Ok(self.token(start, InvalidToken(ch)))),
};

// dbg!(&result); // NOTE: uncomment this for debugging
Expand All @@ -529,7 +532,7 @@ impl<'input> Iterator for Lexer<'input> {
// queries.
} else if let Some(end) = self.rquery_indices.pop() {
// dbg!("RQuery"); // NOTE: uncomment this for debugging
return self.token2(end, end + 1, RQuery);
return Some(Ok(self.token2(end, end + 1, RQuery)));
}

return None;
Expand All @@ -542,7 +545,7 @@ impl<'input> Iterator for Lexer<'input> {
// -----------------------------------------------------------------------------

impl<'input> Lexer<'input> {
fn open(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
fn open(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
match &token {
Token::LParen => self.open_parens += 1,
Token::LBracket => self.open_brackets += 1,
Expand All @@ -553,7 +556,7 @@ impl<'input> Lexer<'input> {
self.token(start, token)
}

fn close(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
fn close(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
match &token {
Token::RParen => self.open_parens = self.open_parens.saturating_sub(1),
Token::RBracket => self.open_brackets = self.open_brackets.saturating_sub(1),
Expand All @@ -564,7 +567,7 @@ impl<'input> Lexer<'input> {
self.token(start, token)
}

fn token(&mut self, start: usize, token: Token<&'input str>) -> Option<Spanned<'input, usize>> {
fn token(&mut self, start: usize, token: Token<&'input str>) -> Spanned<'input, usize> {
let end = self.next_index();
self.token2(start, end, token)
}
Expand All @@ -574,8 +577,8 @@ impl<'input> Lexer<'input> {
start: usize,
end: usize,
token: Token<&'input str>,
) -> Option<Spanned<'input, usize>> {
Some(Ok((start, token, end)))
) -> Spanned<'input, usize> {
(start, token, end)
}

fn query_end(&mut self, start: usize) -> Option<usize> {
Expand Down Expand Up @@ -640,7 +643,7 @@ impl<'input> Lexer<'input> {
let mut end = 0;
while let Some((pos, ch)) = chars.next() {
let take_until_end =
|result: Spanned<'input, usize>,
|result: SpannedResult<'input, usize>,
last_char: &mut Option<char>,
end: &mut usize,
chars: &mut Peekable<CharIndices<'input>>| {
Expand Down Expand Up @@ -735,7 +738,7 @@ impl<'input> Lexer<'input> {
while let Some((pos, ch)) = chars.peek() {
let pos = *pos;

let literal_check = |result: Spanned<'input, usize>, chars: &mut Peekable<CharIndices<'input>>| match result {
let literal_check = |result: SpannedResult<'input, usize>, chars: &mut Peekable<CharIndices<'input>>| match result {
Err(_) => Err(()),
Ok((_, _, new)) => {
#[allow(clippy::while_let_on_iterator)]
Expand Down Expand Up @@ -854,7 +857,7 @@ impl<'input> Lexer<'input> {
true
}

fn string_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn string_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
let content_start = self.next_index();

loop {
Expand All @@ -876,19 +879,19 @@ impl<'input> Lexer<'input> {
Err(Error::StringLiteral { start })
}

fn regex_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn regex_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
self.quoted_literal(start, Token::RegexLiteral)
}

fn raw_string_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn raw_string_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
self.quoted_literal(start, |c| Token::StringLiteral(StringLiteral::Raw(c)))
}

fn timestamp_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn timestamp_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
self.quoted_literal(start, Token::TimestampLiteral)
}

fn numeric_literal(&mut self, start: usize) -> Spanned<'input, usize> {
fn numeric_literal(&mut self, start: usize) -> SpannedResult<'input, usize> {
let (end, int) = self.take_while(start, |ch| is_digit(ch) || ch == '_');

match self.peek() {
Expand Down Expand Up @@ -928,7 +931,7 @@ impl<'input> Lexer<'input> {
Token::ident(ident)
};

Ok((start, token, end))
(start, token, end)
}

fn operator(&mut self, start: usize) -> Spanned<'input, usize> {
Expand All @@ -941,21 +944,21 @@ impl<'input> Lexer<'input> {
op => Token::Operator(op),
};

Ok((start, token, end))
(start, token, end)
}

fn internal_test(&mut self, start: usize) -> Spanned<'input, usize> {
self.bump();
let (end, test) = self.take_while(start, char::is_alphabetic);

Ok((start, Token::InternalTest(test), end))
(start, Token::InternalTest(test), end)
}

fn quoted_literal(
&mut self,
start: usize,
tok: impl Fn(&'input str) -> Tok<'input>,
) -> Spanned<'input, usize> {
) -> SpannedResult<'input, usize> {
self.bump();
let content_start = self.next_index();

Expand Down Expand Up @@ -1122,7 +1125,7 @@ mod test {
use super::*;
use crate::lex::Token::*;

fn lexer(input: &str) -> impl Iterator<Item = Spanned<'_, usize>> + '_ {
fn lexer(input: &str) -> impl Iterator<Item = SpannedResult<'_, usize>> + '_ {
let mut lexer = Lexer::new(input);
Box::new(std::iter::from_fn(move || Some(lexer.next()?)))
}
Expand Down
1 change: 1 addition & 0 deletions lib/vrl/stdlib/src/parse_aws_vpc_flow_log.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ impl Expression for ParseAwsVpcFlowLogFn {

type ParseResult<T> = std::result::Result<T, String>;

#[allow(clippy::unnecessary_wraps)] // match other parse methods
fn identity<'a>(_key: &'a str, value: &'a str) -> ParseResult<&'a str> {
Ok(value)
}
Expand Down
Loading

0 comments on commit 5f6e07e

Please sign in to comment.