Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes for Clippy lints #1758

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions parser/src/command/assign.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ impl AssignCommand {
if let Some(Token::Dot) | Some(Token::EndOfLine) = toks.peek_token()? {
toks.next_token()?;
*input = toks;
return Ok(Some(AssignCommand::Own));
Ok(Some(AssignCommand::Own))
} else {
return Err(toks.error(ParseError::ExpectedEnd));
}
Expand Down Expand Up @@ -97,9 +97,9 @@ impl AssignCommand {
mod tests {
use super::*;

fn parse<'a>(input: &'a str) -> Result<Option<AssignCommand>, Error<'a>> {
fn parse(input: &str) -> Result<Option<AssignCommand>, Error<'_>> {
let mut toks = Tokenizer::new(input);
Ok(AssignCommand::parse(&mut toks)?)
AssignCommand::parse(&mut toks)
}

#[test]
Expand Down Expand Up @@ -135,9 +135,9 @@ mod tests {
);
}

fn parse_review<'a>(input: &'a str) -> Result<Option<AssignCommand>, Error<'a>> {
fn parse_review(input: &str) -> Result<Option<AssignCommand>, Error<'_>> {
let mut toks = Tokenizer::new(input);
Ok(AssignCommand::parse_review(&mut toks)?)
AssignCommand::parse_review(&mut toks)
}

#[test]
Expand Down
6 changes: 3 additions & 3 deletions parser/src/command/glacier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ impl GlacierCommand {
Some(Token::Quote(s)) => {
let source = s.to_owned();
if source.starts_with("https://gist.github.com/") {
return Ok(Some(GlacierCommand { source }));
Ok(Some(GlacierCommand { source }))
} else {
return Err(toks.error(ParseError::InvalidLink));
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this return not removed?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Because clippy didn't say to remove it, but I suppose it may as well be removed. Will do

}
Expand All @@ -67,9 +67,9 @@ impl GlacierCommand {
mod test {
use super::*;

fn parse<'a>(input: &'a str) -> Result<Option<GlacierCommand>, Error<'a>> {
fn parse(input: &str) -> Result<Option<GlacierCommand>, Error<'_>> {
let mut toks = Tokenizer::new(input);
Ok(GlacierCommand::parse(&mut toks)?)
GlacierCommand::parse(&mut toks)
}

#[test]
Expand Down
6 changes: 3 additions & 3 deletions parser/src/command/nominate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,17 +72,17 @@ impl NominateCommand {
if let Some(Token::Dot) | Some(Token::EndOfLine) = toks.peek_token()? {
toks.next_token()?;
*input = toks;
return Ok(Some(NominateCommand { team, style }));
Ok(Some(NominateCommand { team, style }))
} else {
return Err(toks.error(ParseError::ExpectedEnd));
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same here

}
}
}

#[cfg(test)]
fn parse<'a>(input: &'a str) -> Result<Option<NominateCommand>, Error<'a>> {
fn parse(input: &str) -> Result<Option<NominateCommand>, Error<'_>> {
let mut toks = Tokenizer::new(input);
Ok(NominateCommand::parse(&mut toks)?)
NominateCommand::parse(&mut toks)
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion parser/src/command/note.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ impl NoteCommand {
let mut remove = false;
loop {
match toks.next_token()? {
Some(Token::Word(title)) if title == "remove" => {
Some(Token::Word("remove")) => {
remove = true;
continue;
}
Expand Down
8 changes: 4 additions & 4 deletions parser/src/command/ping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,20 +47,20 @@ impl PingCommand {
if let Some(Token::Dot) | Some(Token::EndOfLine) = toks.peek_token()? {
toks.next_token()?;
*input = toks;
return Ok(Some(PingCommand { team }));
Ok(Some(PingCommand { team }))
} else {
return Err(toks.error(ParseError::ExpectedEnd));
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

and here

}
} else {
return Ok(None);
Ok(None)
}
}
}

#[cfg(test)]
fn parse<'a>(input: &'a str) -> Result<Option<PingCommand>, Error<'a>> {
fn parse(input: &str) -> Result<Option<PingCommand>, Error<'_>> {
let mut toks = Tokenizer::new(input);
Ok(PingCommand::parse(&mut toks)?)
PingCommand::parse(&mut toks)
}

#[test]
Expand Down
10 changes: 5 additions & 5 deletions parser/src/command/relabel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,13 +92,13 @@ impl LabelDelta {
return Err(input.error(ParseError::ExpectedLabelDelta));
}
};
if delta.starts_with('+') {
if let Some(rest) = delta.strip_prefix('+') {
Ok(LabelDelta::Add(
Label::parse(&delta[1..]).map_err(|e| input.error(e))?,
Label::parse(rest).map_err(|e| input.error(e))?,
))
} else if delta.starts_with('-') {
} else if let Some(rest) = delta.strip_prefix('-') {
Ok(LabelDelta::Remove(
Label::parse(&delta[1..]).map_err(|e| input.error(e))?,
Label::parse(rest).map_err(|e| input.error(e))?,
))
} else {
Ok(LabelDelta::Add(
Expand Down Expand Up @@ -165,7 +165,7 @@ impl RelabelCommand {
}

#[cfg(test)]
fn parse<'a>(input: &'a str) -> Result<Option<Vec<LabelDelta>>, Error<'a>> {
fn parse(input: &str) -> Result<Option<Vec<LabelDelta>>, Error<'_>> {
let mut toks = Tokenizer::new(input);
Ok(RelabelCommand::parse(&mut toks)?.map(|c| c.0))
}
Expand Down
2 changes: 1 addition & 1 deletion parser/src/command/shortcut.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl ShortcutCommand {
#[cfg(test)]
fn parse(input: &str) -> Result<Option<ShortcutCommand>, Error<'_>> {
let mut toks = Tokenizer::new(input);
Ok(ShortcutCommand::parse(&mut toks)?)
ShortcutCommand::parse(&mut toks)
}

#[test]
Expand Down
4 changes: 2 additions & 2 deletions parser/src/ignore_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ impl IgnoreBlocks {
while let Some((event, range)) = parser.next() {
if let Event::Start(Tag::CodeBlock(_)) = event {
let start = range.start;
while let Some((event, range)) = parser.next() {
for (event, range) in parser.by_ref() {
if let Event::End(Tag::CodeBlock(_)) = event {
ignore.push(start..range.end);
break;
Expand All @@ -22,7 +22,7 @@ impl IgnoreBlocks {
} else if let Event::Start(Tag::BlockQuote) = event {
let start = range.start;
let mut count = 1;
while let Some((event, range)) = parser.next() {
for (event, range) in parser.by_ref() {
if let Event::Start(Tag::BlockQuote) = event {
count += 1;
} else if let Event::End(Tag::BlockQuote) = event {
Expand Down
4 changes: 2 additions & 2 deletions parser/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ impl<'a> Tokenizer<'a> {
}
self.advance();
}
Ok(Some(Token::Word(&self.str_from(start))))
Ok(Some(Token::Word(self.str_from(start))))
}

pub fn eat_token(&mut self, token: Token<'a>) -> Result<bool, Error<'a>> {
Expand All @@ -222,7 +222,7 @@ impl<'a> Tokenizer<'a> {
}

#[cfg(test)]
fn tokenize<'a>(input: &'a str) -> Result<Vec<Token<'a>>, Error<'a>> {
fn tokenize(input: &str) -> Result<Vec<Token<'_>>, Error<'_>> {
let mut tokens = Vec::new();
let mut gen = Tokenizer::new(input);
while let Some(tok) = gen.next_token()? {
Expand Down
10 changes: 5 additions & 5 deletions src/agenda.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::actions::{Action, Query, QueryKind, QueryMap, Step};
use crate::github;
use std::sync::Arc;

pub fn prioritization<'a>() -> Box<dyn Action> {
pub fn prioritization() -> Box<dyn Action> {
Box::new(Step {
name: "prioritization_agenda",
actions: vec![
Expand Down Expand Up @@ -442,7 +442,7 @@ pub fn prioritization<'a>() -> Box<dyn Action> {
})
}

pub fn lang<'a>() -> Box<dyn Action + Send + Sync> {
pub fn lang() -> Box<dyn Action + Send + Sync> {
Box::new(Step {
name: "lang_agenda",
actions: vec![
Expand Down Expand Up @@ -561,7 +561,7 @@ pub fn lang<'a>() -> Box<dyn Action + Send + Sync> {
})
}

pub fn lang_planning<'a>() -> Box<dyn Action + Send + Sync> {
pub fn lang_planning() -> Box<dyn Action + Send + Sync> {
Box::new(Step {
name: "lang_planning_agenda",
actions: vec![
Expand Down Expand Up @@ -611,7 +611,7 @@ pub fn lang_planning<'a>() -> Box<dyn Action + Send + Sync> {
})
}

pub fn types_planning<'a>() -> Box<dyn Action + Send + Sync> {
pub fn types_planning() -> Box<dyn Action + Send + Sync> {
Box::new(Step {
name: "types_planning_agenda",
actions: vec![
Expand Down Expand Up @@ -677,7 +677,7 @@ pub fn types_planning<'a>() -> Box<dyn Action + Send + Sync> {
// Things to add (maybe):
// - Compiler RFCs
// - P-high issues
pub fn compiler_backlog_bonanza<'a>() -> Box<dyn Action> {
pub fn compiler_backlog_bonanza() -> Box<dyn Action> {
Box::new(Step {
name: "compiler_backlog_bonanza",
actions: vec![Query {
Expand Down
13 changes: 4 additions & 9 deletions src/bin/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,10 @@ async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt::init();

let args: Vec<String> = std::env::args().collect();
if args.len() == 2 {
match &args[1][..] {
"backlog_bonanza" => {
let agenda = agenda::compiler_backlog_bonanza();
print!("{}", agenda.call().await?);
return Ok(());
}
_ => {}
}
if args.len() == 2 && args[1] == "backlog_bonanza" {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think leaving this as-is is better for future proofing.

let agenda = agenda::compiler_backlog_bonanza();
print!("{}", agenda.call().await?);
return Ok(());
}

eprintln!("Usage: compiler (backlog_bonanza)");
Expand Down
13 changes: 4 additions & 9 deletions src/bin/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,10 @@ async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt::init();

let args: Vec<String> = std::env::args().collect();
if args.len() == 2 {
match &args[1][..] {
"planning" => {
let agenda = agenda::types_planning();
print!("{}", agenda.call().await?);
return Ok(());
}
_ => {}
}
if args.len() == 2 && args[1] == "planning" {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same here.

let agenda = agenda::types_planning();
print!("{}", agenda.call().await?);
return Ok(());
}

eprintln!("Usage: types (planning)");
Expand Down
2 changes: 1 addition & 1 deletion src/changelogs/rustc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ impl<'a> RustcFormat<'a> {
}

pub(super) fn parse(mut self, content: &str) -> anyhow::Result<Changelog> {
let ast = comrak::parse_document(&self.arena, &content, &ComrakOptions::default());
let ast = comrak::parse_document(self.arena, content, &ComrakOptions::default());

let mut section_ast = Vec::new();
for child in ast.children() {
Expand Down
6 changes: 4 additions & 2 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,11 @@ use tracing as log;
static CONFIG_FILE_NAME: &str = "triagebot.toml";
const REFRESH_EVERY: Duration = Duration::from_secs(2 * 60); // Every two minutes

type CacheLeaf = (Result<Arc<Config>, ConfigurationError>, Instant);

lazy_static::lazy_static! {
static ref CONFIG_CACHE:
RwLock<HashMap<String, (Result<Arc<Config>, ConfigurationError>, Instant)>> =
RwLock<HashMap<String, CacheLeaf>> =
RwLock::new(HashMap::new());
}

Expand Down Expand Up @@ -378,7 +380,7 @@ mod tests {

[shortcut]
"#;
let config = toml::from_str::<Config>(&config).unwrap();
let config = toml::from_str::<Config>(config).unwrap();
let mut ping_teams = HashMap::new();
ping_teams.insert(
"compiler".to_owned(),
Expand Down
21 changes: 12 additions & 9 deletions src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ async fn make_client() -> anyhow::Result<tokio_postgres::Client> {
let db_url = std::env::var("DATABASE_URL").expect("needs DATABASE_URL");
if db_url.contains("rds.amazonaws.com") {
let cert = &CERTIFICATE_PEM[..];
let cert = Certificate::from_pem(&cert).context("made certificate")?;
let cert = Certificate::from_pem(cert).context("made certificate")?;
let connector = TlsConnector::builder()
.add_root_certificate(cert)
.build()
Expand Down Expand Up @@ -205,29 +205,32 @@ pub async fn schedule_job(
anyhow::bail!("Job {} does not exist in the current job list.", job_name);
}

if let Err(_) = get_job_by_name_and_scheduled_at(&db, job_name, &when).await {
if get_job_by_name_and_scheduled_at(db, job_name, &when)
.await
.is_err()
{
// mean there's no job already in the db with that name and scheduled_at
insert_job(&db, job_name, &when, &job_metadata).await?;
insert_job(db, job_name, &when, &job_metadata).await?;
}

Ok(())
}

pub async fn run_scheduled_jobs(ctx: &Context, db: &DbClient) -> anyhow::Result<()> {
let jobs = get_jobs_to_execute(&db).await.unwrap();
let jobs = get_jobs_to_execute(db).await.unwrap();
tracing::trace!("jobs to execute: {:#?}", jobs);

for job in jobs.iter() {
update_job_executed_at(&db, &job.id).await?;
update_job_executed_at(db, &job.id).await?;

match handle_job(&ctx, &job.name, &job.metadata).await {
match handle_job(ctx, &job.name, &job.metadata).await {
Ok(_) => {
tracing::trace!("job successfully executed (id={})", job.id);
delete_job(&db, &job.id).await?;
delete_job(db, &job.id).await?;
}
Err(e) => {
tracing::error!("job failed on execution (id={:?}, error={:?})", job.id, e);
update_job_error_message(&db, &job.id, &e.to_string()).await?;
update_job_error_message(db, &job.id, &e.to_string()).await?;
}
}
}
Expand All @@ -242,7 +245,7 @@ async fn handle_job(
metadata: &serde_json::Value,
) -> anyhow::Result<()> {
for job in jobs() {
if &job.name() == &name {
if job.name() == name {
return job.run(ctx, metadata).await;
}
}
Expand Down
9 changes: 5 additions & 4 deletions src/db/notifications.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use anyhow::Context as _;
use chrono::{DateTime, FixedOffset};
use std::cmp::Ordering;
use tokio_postgres::Client as DbClient;
use tracing as log;

Expand Down Expand Up @@ -222,10 +223,10 @@ pub async fn move_indices(
);
}

if from < to {
notifications[from..=to].rotate_left(1);
} else if to < from {
notifications[to..=from].rotate_right(1);
match from.cmp(&to) {
Ordering::Less => notifications[from..=to].rotate_left(1),
Ordering::Greater => notifications[to..=from].rotate_right(1),
Ordering::Equal => (),
}

for (idx, id) in notifications.into_iter().enumerate() {
Expand Down
Loading
Loading