From 46ebeed14ed26962c58a4cec7578a77010f2f328 Mon Sep 17 00:00:00 2001 From: j-mendez Date: Tue, 20 Feb 2024 20:43:28 -0500 Subject: [PATCH] chore(clippy): fix clippy warnings --- spider/src/page.rs | 3 +-- spider/src/website.rs | 44 +++++++++++++++++++------------------------ 2 files changed, 20 insertions(+), 27 deletions(-) diff --git a/spider/src/page.rs b/spider/src/page.rs index 3846eceac..aa1d0e7f5 100644 --- a/spider/src/page.rs +++ b/spider/src/page.rs @@ -1223,8 +1223,7 @@ impl Page { #[cfg(test)] #[cfg(all(not(feature = "decentralized"), not(feature = "cache")))] -const TEST_AGENT_NAME: &'static str = - concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")); +const TEST_AGENT_NAME: &str = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")); #[cfg(all( feature = "headers", diff --git a/spider/src/website.rs b/spider/src/website.rs index 3ddbed199..d983cea0c 100644 --- a/spider/src/website.rs +++ b/spider/src/website.rs @@ -187,7 +187,7 @@ pub struct Website { impl Website { /// Initialize Website object with a start link to crawl. pub fn new(url: &str) -> Self { - let url = if url.starts_with(" ") || url.ends_with(" ") { + let url = if url.starts_with(' ') || url.ends_with(' ') { url.trim() } else { url @@ -1594,10 +1594,9 @@ impl Website { self.crawl_concurrent(&client, &handle).await; self.sitemap_crawl_chain(&client, &handle, false).await; self.set_crawl_status(); - match join_handle { - Some(h) => h.abort(), - _ => (), - }; + if let Some(h) = join_handle { + h.abort() + } self.client.replace(client); } @@ -1611,10 +1610,9 @@ impl Website { }; self.sitemap_crawl(&client, &handle, false).await; self.set_crawl_status(); - match join_handle { - Some(h) => h.abort(), - _ => (), - }; + if let Some(h) = join_handle { + h.abort() + } self.client.replace(client); } @@ -1629,10 +1627,9 @@ impl Website { }; self.crawl_concurrent_smart(&client, &handle).await; self.set_crawl_status(); - match join_handle { - Some(h) => h.abort(), - _ => (), - }; + if let Some(h) = join_handle { + h.abort() + } self.client.replace(client); } @@ -1653,10 +1650,9 @@ impl Website { self.crawl_concurrent_raw(&client, &handle).await; self.sitemap_crawl_chain(&client, &handle, false).await; self.set_crawl_status(); - match join_handle { - Some(h) => h.abort(), - _ => (), - }; + if let Some(h) = join_handle { + h.abort() + } self.client.replace(client); } @@ -1671,10 +1667,9 @@ impl Website { self.scrape_concurrent(&client, &handle).await; self.sitemap_crawl_chain(&client, &handle, true).await; self.set_crawl_status(); - match join_handle { - Some(h) => h.abort(), - _ => (), - }; + if let Some(h) = join_handle { + h.abort() + } self.client.replace(client); } @@ -1689,10 +1684,9 @@ impl Website { self.scrape_concurrent_raw(&client, &handle).await; self.sitemap_crawl_chain(&client, &handle, true).await; self.set_crawl_status(); - match join_handle { - Some(h) => h.abort(), - _ => (), - }; + if let Some(h) = join_handle { + h.abort() + } self.client.replace(client); }