diff --git a/src/web/error.rs b/src/web/error.rs index b002712b1..b3bcf8f64 100644 --- a/src/web/error.rs +++ b/src/web/error.rs @@ -22,8 +22,6 @@ pub enum Nope { OwnerNotFound, #[error("Requested crate does not have specified version")] VersionNotFound, - #[error("Search yielded no results")] - NoResults, #[error("Internal server error")] InternalServerError, } @@ -37,8 +35,7 @@ impl From for IronError { | Nope::BuildNotFound | Nope::CrateNotFound | Nope::OwnerNotFound - | Nope::VersionNotFound - | Nope::NoResults => status::NotFound, + | Nope::VersionNotFound => status::NotFound, Nope::InternalServerError => status::InternalServerError, }; @@ -96,29 +93,6 @@ impl Handler for Nope { .into_response(req) } - Nope::NoResults => { - let mut params = req.url.as_ref().query_pairs(); - - if let Some((_, query)) = params.find(|(key, _)| key == "query") { - // this used to be a search - Search { - title: format!("No crates found matching '{}'", query), - search_query: Some(query.into_owned()), - status: Status::NotFound, - ..Default::default() - } - .into_response(req) - } else { - // user did a search with no search terms - Search { - title: "No results given for empty search query".to_owned(), - status: Status::NotFound, - ..Default::default() - } - .into_response(req) - } - } - Nope::InternalServerError => { // something went wrong, details should have been logged ErrorPage { @@ -151,8 +125,8 @@ pub enum AxumNope { OwnerNotFound, #[error("Requested crate does not have specified version")] VersionNotFound, - // #[error("Search yielded no results")] - // NoResults, + #[error("Search yielded no results")] + NoResults, #[error("Internal server error")] InternalServerError, #[error("internal error")] @@ -207,9 +181,15 @@ impl IntoResponse for AxumNope { } .into_response() } - // AxumNope::NoResults => { - // todo!("to be implemented when search-handler is migrated to axum") - // } + AxumNope::NoResults => { + // user did a search with no search terms + Search { + title: "No results given for empty search query".to_owned(), + status: StatusCode::NOT_FOUND, + ..Default::default() + } + .into_response() + } AxumNope::InternalServerError => { // something went wrong, details should have been logged AxumErrorPage { @@ -254,7 +234,6 @@ impl From for AxumNope { Nope::CrateNotFound => AxumNope::CrateNotFound, Nope::OwnerNotFound => AxumNope::OwnerNotFound, Nope::VersionNotFound => AxumNope::VersionNotFound, - Nope::NoResults => todo!(), Nope::InternalServerError => AxumNope::InternalServerError, } } diff --git a/src/web/mod.rs b/src/web/mod.rs index ad20d58d8..cac1efc18 100644 --- a/src/web/mod.rs +++ b/src/web/mod.rs @@ -3,8 +3,8 @@ pub mod page; use crate::utils::get_correct_docsrs_style_file; -use crate::utils::report_error; -use anyhow::anyhow; +use crate::utils::{report_error, spawn_blocking}; +use anyhow::{anyhow, bail, Context as _}; use serde_json::Value; use tracing::{info, instrument}; @@ -92,7 +92,7 @@ mod source; mod statics; mod strangler; -use crate::{impl_axum_webpage, impl_webpage, Context}; +use crate::{db::Pool, impl_axum_webpage, impl_webpage, Context}; use anyhow::Error; use axum::{ extract::Extension, @@ -123,6 +123,7 @@ use std::{borrow::Cow, net::SocketAddr, sync::Arc}; use strangler::StranglerService; use tower::ServiceBuilder; use tower_http::trace::TraceLayer; +use url::form_urlencoded; /// Duration of static files for staticfile and DatabaseFileHandler (in seconds) const STATIC_FILE_CACHE_DURATION: u64 = 60 * 60 * 24 * 30 * 12; // 12 months @@ -428,6 +429,26 @@ fn match_version( Err(Nope::VersionNotFound) } +// temporary wrapper around `match_version` for axum handlers. +// +// FIXME: this can go when we fully migrated to axum / async in web +async fn match_version_axum( + pool: &Pool, + name: &str, + input_version: Option<&str>, +) -> Result { + spawn_blocking({ + let name = name.to_owned(); + let input_version = input_version.map(str::to_owned); + let pool = pool.clone(); + move || { + let mut conn = pool.get()?; + Ok(match_version(&mut conn, &name, input_version.as_deref())?) + } + }) + .await +} + #[instrument(skip_all)] pub(crate) fn build_axum_app( context: &dyn Context, @@ -539,15 +560,29 @@ fn redirect(url: Url) -> Response { resp } -fn axum_redirect(url: &str) -> Result { - if !url.starts_with('/') || url.starts_with("//") { - return Err(anyhow!("invalid redirect URL: {}", url)); +fn axum_redirect(uri: U) -> Result +where + U: TryInto, + >::Error: std::fmt::Debug, +{ + let uri: http::Uri = uri + .try_into() + .map_err(|err| anyhow!("invalid URI: {:?}", err))?; + + if let Some(path_and_query) = uri.path_and_query() { + if path_and_query.as_str().starts_with("//") { + bail!("protocol relative redirects are forbidden"); + } + } else { + // we always want a path to redirect to, even when it's just `/` + bail!("missing path in URI"); } + Ok(( StatusCode::FOUND, [( http::header::LOCATION, - http::HeaderValue::try_from(url).expect("invalid url for redirect"), + http::HeaderValue::try_from(uri.to_string()).context("invalid uri for redirect")?, )], )) } @@ -605,6 +640,29 @@ where } } +/// Parse an URI into a http::Uri struct. +/// When `queries` are given these are added to the URL, +/// with empty `queries` the `?` will be omitted. +pub(crate) fn axum_parse_uri_with_params(uri: &str, queries: I) -> Result +where + I: IntoIterator, + I::Item: Borrow<(K, V)>, + K: AsRef, + V: AsRef, +{ + let mut queries = queries.into_iter().peekable(); + if queries.peek().is_some() { + let query_params: String = form_urlencoded::Serializer::new(String::new()) + .extend_pairs(queries) + .finish(); + format!("{uri}?{}", query_params) + .parse::() + .context("error parsing URL") + } else { + uri.parse::().context("error parsing URL") + } +} + /// MetaData used in header #[derive(Debug, Clone, PartialEq, Eq, Serialize)] pub(crate) struct MetaData { diff --git a/src/web/releases.rs b/src/web/releases.rs index 725d7d89b..471e75cec 100644 --- a/src/web/releases.rs +++ b/src/web/releases.rs @@ -3,34 +3,27 @@ use crate::{ build_queue::QueuedCrate, cdn::{self, CrateInvalidation}, - db::{Pool, PoolClient}, - impl_axum_webpage, impl_webpage, + db::Pool, + impl_axum_webpage, utils::{report_error, spawn_blocking}, web::{ - error::{AxumResult, Nope}, - match_version, - page::WebPage, - parse_url_with_params, redirect_base, + axum_parse_uri_with_params, axum_redirect, + error::{AxumNope, AxumResult}, + match_version_axum, }, - BuildQueue, Config, + BuildQueue, Config, Metrics, }; -use anyhow::{anyhow, Result}; +use anyhow::{anyhow, Context as _, Result}; use axum::{ - extract::{Extension, Path}, - response::IntoResponse, + extract::{Extension, Path, Query}, + response::{IntoResponse, Response as AxumResponse}, }; use chrono::{DateTime, NaiveDate, Utc}; -use iron::{ - headers::{ContentType, Expires, HttpDate}, - mime::{Mime, SubLevel, TopLevel}, - modifiers::Redirect, - status, IronError, IronResult, Request, Response as IronResponse, Url, -}; use postgres::Client; -use router::Router; use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, HashMap}; use std::str; +use std::sync::Arc; use tracing::{debug, warn}; use url::form_urlencoded; @@ -134,10 +127,7 @@ struct SearchResult { /// Get the search results for a crate search query /// /// This delegates to the crates.io search API. -fn get_search_results( - conn: &mut Client, - query_params: &str, -) -> Result { +async fn get_search_results(pool: Pool, query_params: &str) -> Result { #[derive(Deserialize)] struct CratesIoSearchResult { crates: Vec, @@ -155,8 +145,8 @@ fn get_search_results( use crate::utils::APP_USER_AGENT; use once_cell::sync::Lazy; - use reqwest::blocking::Client as HttpClient; use reqwest::header::{HeaderMap, HeaderValue, ACCEPT, USER_AGENT}; + use reqwest::Client as HttpClient; static HTTP_CLIENT: Lazy = Lazy::new(|| { let mut headers = HeaderMap::new(); @@ -187,13 +177,21 @@ fn get_search_results( } }); - let releases: CratesIoSearchResult = HTTP_CLIENT.get(url).send()?.error_for_status()?.json()?; - - let names: Vec<_> = releases - .crates - .into_iter() - .map(|krate| krate.name) - .collect(); + let releases: CratesIoSearchResult = HTTP_CLIENT + .get(url) + .send() + .await? + .error_for_status()? + .json() + .await?; + + let names = Arc::new( + releases + .crates + .into_iter() + .map(|krate| krate.name) + .collect::>(), + ); // now we're trying to get the docs.rs data for the crates // returned by the search. @@ -202,43 +200,50 @@ fn get_search_results( // So for now we are using the version with the youngest release_time. // This is different from all other release-list views where we show // our latest build. - let crates: HashMap = conn - .query( - "SELECT - crates.name, - releases.version, - releases.description, - builds.build_time, - releases.target_name, - releases.rustdoc_status, - repositories.stars - - FROM crates - INNER JOIN releases ON crates.latest_version_id = releases.id - INNER JOIN builds ON releases.id = builds.rid - LEFT JOIN repositories ON releases.repository_id = repositories.id - - WHERE crates.name = ANY($1)", - &[&names], - )? - .into_iter() - .map(|row| { - let stars: Option<_> = row.get("stars"); - let name: String = row.get("name"); - ( - name.clone(), - Release { - name, - version: row.get("version"), - description: row.get("description"), - build_time: row.get("build_time"), - target_name: row.get("target_name"), - rustdoc_status: row.get("rustdoc_status"), - stars: stars.unwrap_or(0), - }, - ) - }) - .collect(); + let crates: HashMap = spawn_blocking({ + let names = names.clone(); + move || { + let mut conn = pool.get()?; + Ok(conn + .query( + "SELECT + crates.name, + releases.version, + releases.description, + builds.build_time, + releases.target_name, + releases.rustdoc_status, + repositories.stars + + FROM crates + INNER JOIN releases ON crates.latest_version_id = releases.id + INNER JOIN builds ON releases.id = builds.rid + LEFT JOIN repositories ON releases.repository_id = repositories.id + + WHERE crates.name = ANY($1)", + &[&*names], + )? + .into_iter() + .map(|row| { + let stars: Option<_> = row.get("stars"); + let name: String = row.get("name"); + ( + name.clone(), + Release { + name, + version: row.get("version"), + description: row.get("description"), + build_time: row.get("build_time"), + target_name: row.get("target_name"), + rustdoc_status: row.get("rustdoc_status"), + stars: stars.unwrap_or(0), + }, + ) + }) + .collect()) + } + }) + .await?; Ok(SearchResult { // start with the original names from crates.io to keep the original ranking, @@ -260,18 +265,18 @@ struct HomePage { recent_releases: Vec, } -impl_webpage! { +impl_axum_webpage! { HomePage = "core/home.html", } -pub fn home_page(req: &mut Request) -> IronResult { - let mut conn = extension!(req, Pool).get()?; - let recent_releases = ctry!( - req, +pub(crate) async fn home_page(Extension(pool): Extension) -> AxumResult { + let recent_releases = spawn_blocking(move || { + let mut conn = pool.get()?; get_releases(&mut conn, 1, RELEASES_IN_HOME, Order::ReleaseTime, true) - ); + }) + .await?; - HomePage { recent_releases }.into_response(req) + Ok(HomePage { recent_releases }) } #[derive(Debug, Clone, PartialEq, Eq, Serialize)] @@ -279,19 +284,21 @@ struct ReleaseFeed { recent_releases: Vec, } -impl_webpage! { +impl_axum_webpage! { ReleaseFeed = "releases/feed.xml", - content_type = ContentType(Mime(TopLevel::Application, SubLevel::Xml, vec![])), + content_type = "application/xml", } -pub fn releases_feed_handler(req: &mut Request) -> IronResult { - let mut conn = extension!(req, Pool).get()?; - let recent_releases = ctry!( - req, +pub(crate) async fn releases_feed_handler( + Extension(pool): Extension, +) -> AxumResult { + let recent_releases = spawn_blocking(move || { + let mut conn = pool.get()?; get_releases(&mut conn, 1, RELEASES_IN_FEED, Order::ReleaseTime, true) - ); + }) + .await?; - ReleaseFeed { recent_releases }.into_response(req) + Ok(ReleaseFeed { recent_releases }) } #[derive(Debug, Clone, PartialEq, Eq, Serialize)] @@ -402,16 +409,12 @@ pub(crate) async fn releases_failures_by_stars_handler( releases_handler(pool, page.map(|p| p.0), ReleaseType::Failures).await } -pub fn owner_handler(req: &mut Request) -> IronResult { - let router = extension!(req, Router); - let mut owner = router.find("owner").unwrap(); - if owner.starts_with('@') { - owner = &owner[1..]; - } - match format!("https://crates.io/users/{}", owner).parse() { - Ok(url) => Ok(super::redirect(url)), - Err(_) => Err(Nope::OwnerNotFound.into()), - } +pub(crate) async fn owner_handler(Path(owner): Path) -> AxumResult { + axum_redirect(format!( + "https://crates.io/users/{}", + owner.strip_prefix('@').unwrap_or(&owner) + )) + .map_err(|_| AxumNope::OwnerNotFound) } #[derive(Debug, Clone, PartialEq, Serialize)] @@ -425,7 +428,7 @@ pub(super) struct Search { /// This should always be `ReleaseType::Search` pub(super) release_type: ReleaseType, #[serde(skip)] - pub(super) status: iron::status::Status, + pub(super) status: http::StatusCode, } impl Default for Search { @@ -437,12 +440,16 @@ impl Default for Search { previous_page_link: None, next_page_link: None, release_type: ReleaseType::Search, - status: iron::status::Ok, + status: http::StatusCode::OK, } } } -fn redirect_to_random_crate(req: &Request, conn: &mut PoolClient) -> IronResult { +async fn redirect_to_random_crate( + config: Arc, + metrics: Arc, + pool: Pool, +) -> AxumResult { // We try to find a random crate and redirect to it. // // The query is efficient, but relies on a static factor which depends @@ -450,11 +457,11 @@ fn redirect_to_random_crate(req: &Request, conn: &mut PoolClient) -> IronResult< // // If random-crate-searches end up being empty, increase that value. - let config = extension!(req, Config); - let rows = ctry!( - req, - conn.query( - "WITH params AS ( + let row = spawn_blocking({ + move || { + let mut conn = pool.get()?; + Ok(conn.query_opt( + "WITH params AS ( -- get maximum possible id-value in crates-table SELECT last_value AS max_id FROM crates_id_seq ) @@ -474,56 +481,53 @@ fn redirect_to_random_crate(req: &Request, conn: &mut PoolClient) -> IronResult< releases.rustdoc_status = TRUE AND repositories.stars >= 100 LIMIT 1", - &[&(config.random_crate_search_view_size as i32)] - ) - ); + &[&(config.random_crate_search_view_size as i32)], + )?) + } + }) + .await?; - if let Some(row) = rows.into_iter().next() { + if let Some(row) = row { let name: String = row.get("name"); let version: String = row.get("version"); let target_name: String = row.get("target_name"); - let url = ctry!( - req, - Url::parse(&format!( - "{}/{}/{}/{}/", - redirect_base(req), - name, - version, - target_name - )), - ); - - let metrics = extension!(req, crate::Metrics).clone(); + metrics.im_feeling_lucky_searches.inc(); - Ok(super::redirect(url)) + Ok(axum_redirect(format!( + "/{}/{}/{}/", + name, version, target_name + ))?) } else { report_error(&anyhow!("found no result in random crate search")); - Err(Nope::NoResults.into()) + Err(AxumNope::NoResults) } } -impl_webpage! { +impl_axum_webpage! { Search = "releases/search_results.html", status = |search| search.status, } -pub fn search_handler(req: &mut Request) -> IronResult { - let url = req.url.as_ref(); - let mut params: HashMap<_, _> = url.query_pairs().collect(); +pub(crate) async fn search_handler( + Extension(pool): Extension, + Extension(config): Extension>, + Extension(metrics): Extension>, + Query(mut params): Query>, +) -> AxumResult { let query = params .get("query") .map(|q| q.to_string()) .unwrap_or_else(|| "".to_string()); - let mut conn = extension!(req, Pool).get()?; - // check if I am feeling lucky button pressed and redirect user to crate page // if there is a match. Also check for paths to items within crates. if params.remove("i-am-feeling-lucky").is_some() || query.contains("::") { // redirect to a random crate if query is empty if query.is_empty() { - return redirect_to_random_crate(req, &mut conn); + return Ok(redirect_to_random_crate(config, metrics, pool) + .await? + .into_response()); } let mut queries = BTreeMap::new(); @@ -531,74 +535,68 @@ pub fn search_handler(req: &mut Request) -> IronResult { let krate = match query.split_once("::") { Some((krate, query)) => { queries.insert("search".into(), query.into()); - krate.to_string() + krate } - None => query.clone(), + None => &query, }; // since we never pass a version into `match_version` here, we'll never get // `MatchVersion::Exact`, so the distinction between `Exact` and `Semver` doesn't // matter - if let Ok(matchver) = match_version(&mut conn, &krate, None) { + if let Ok(matchver) = match_version_axum(&pool, krate, None).await { params.remove("query"); queries.extend(params); let (version, _) = matchver.version.into_parts(); - let krate = matchver.corrected_name.unwrap_or(krate); + let krate = matchver.corrected_name.unwrap_or_else(|| krate.to_string()); - let base = redirect_base(req); - let url = if matchver.rustdoc_status { + let uri = if matchver.rustdoc_status { let target_name = matchver.target_name; - let path = format!("{base}/{krate}/{version}/{target_name}/"); - ctry!(req, parse_url_with_params(&path, queries)) + axum_parse_uri_with_params(&format!("/{krate}/{version}/{target_name}/"), queries)? } else { - ctry!(req, Url::parse(&format!("{base}/crate/{krate}/{version}"))) + format!("/crate/{krate}/{version}") + .parse::() + .context("could not parse redirect URI")? }; - let mut resp = IronResponse::with((status::Found, Redirect(url))); - resp.headers.set(Expires(HttpDate(time::now()))); - - return Ok(resp); + return Ok(super::axum_redirect(uri)?.into_response()); } } - let search_result = ctry!( - req, - if let Some(paginate) = params.get("paginate") { - let decoded = base64::decode(paginate.as_bytes()).map_err(|e| -> IronError { - warn!( - "error when decoding pagination base64 string \"{}\": {:?}", - paginate, e - ); - Nope::NoResults.into() - })?; - let query_params = String::from_utf8_lossy(&decoded); - - if !query_params.starts_with('?') { - // sometimes we see plain bytes being passed to `paginate`. - // In these cases we just return `NoResults` and don't call - // the crates.io API. - // The whole point of the `paginate` design is that we don't - // know anything about the pagination args and crates.io can - // change them as they wish, so we cannot do any more checks here. - warn!( - "didn't get query args in `paginate` arguments for search: \"{}\"", - query_params - ); - return Err(Nope::NoResults.into()); - } + let search_result = if let Some(paginate) = params.get("paginate") { + let decoded = base64::decode(paginate.as_bytes()).map_err(|e| { + warn!( + "error when decoding pagination base64 string \"{}\": {:?}", + paginate, e + ); + AxumNope::NoResults + })?; + let query_params = String::from_utf8_lossy(&decoded); + + if !query_params.starts_with('?') { + // sometimes we see plain bytes being passed to `paginate`. + // In these cases we just return `NoResults` and don't call + // the crates.io API. + // The whole point of the `paginate` design is that we don't + // know anything about the pagination args and crates.io can + // change them as they wish, so we cannot do any more checks here. + warn!( + "didn't get query args in `paginate` arguments for search: \"{}\"", + query_params + ); + return Err(AxumNope::NoResults); + } - get_search_results(&mut conn, &query_params) - } else if !query.is_empty() { - let query_params: String = form_urlencoded::Serializer::new(String::new()) - .append_pair("q", &query) - .append_pair("per_page", &RELEASES_IN_RELEASES.to_string()) - .finish(); + get_search_results(pool, &query_params).await? + } else if !query.is_empty() { + let query_params: String = form_urlencoded::Serializer::new(String::new()) + .append_pair("q", &query) + .append_pair("per_page", &RELEASES_IN_RELEASES.to_string()) + .finish(); - get_search_results(&mut conn, &format!("?{}", &query_params)) - } else { - return Err(Nope::NoResults.into()); - } - ); + get_search_results(pool, &format!("?{}", &query_params)).await? + } else { + return Err(AxumNope::NoResults); + }; let executed_query = search_result.executed_query.unwrap_or_default(); @@ -608,7 +606,7 @@ pub fn search_handler(req: &mut Request) -> IronResult { format!("Search results for '{}'", executed_query) }; - Search { + Ok(Search { title, results: search_result.results, search_query: Some(executed_query), @@ -620,7 +618,7 @@ pub fn search_handler(req: &mut Request) -> IronResult { .map(|params| format!("/releases/search?paginate={}", base64::encode(params))), ..Default::default() } - .into_response(req) + .into_response()) } #[derive(Debug, Clone, PartialEq, Serialize)] @@ -631,16 +629,16 @@ struct ReleaseActivity { failures: Vec, } -impl_webpage! { +impl_axum_webpage! { ReleaseActivity = "releases/activity.html", } -pub fn activity_handler(req: &mut Request) -> IronResult { - let mut conn = extension!(req, Pool).get()?; - - let data: Vec<(NaiveDate, i64, i64)> = ctry!( - req, - conn.query( +pub(crate) async fn activity_handler( + Extension(pool): Extension, +) -> AxumResult { + let data = spawn_blocking(move || { + let mut conn = pool.get()?; + Ok(conn.query( " WITH dates AS ( -- we need this series so that days in the statistic that don't have any releases are included @@ -675,13 +673,13 @@ pub fn activity_handler(req: &mut Request) -> IronResult { dates.date_ ", &[], - ) - ) - .into_iter() - .map(|row| (row.get(0), row.get(1), row.get(2))) - .collect(); + )?.into_iter() + .map(|row| (row.get(0), row.get(1), row.get(2))) + .collect::>() + ) + }).await?; - ReleaseActivity { + Ok(ReleaseActivity { description: "Monthly release activity", dates: data .iter() @@ -689,8 +687,7 @@ pub fn activity_handler(req: &mut Request) -> IronResult { .collect(), counts: data.iter().map(|&d| d.1).collect(), failures: data.iter().map(|&d| d.2).collect(), - } - .into_response(req) + }) } #[derive(Debug, Clone, PartialEq, Serialize)] @@ -700,27 +697,33 @@ struct BuildQueuePage { active_deployments: Vec, } -impl_webpage! { +impl_axum_webpage! { BuildQueuePage = "releases/build_queue.html", } -pub fn build_queue_handler(req: &mut Request) -> IronResult { - let mut queue = ctry!(req, extension!(req, BuildQueue).queued_crates()); - for krate in queue.iter_mut() { - // The priority here is inverted: in the database if a crate has a higher priority it - // will be built after everything else, which is counter-intuitive for people not - // familiar with docs.rs's inner workings. - krate.priority = -krate.priority; - } +pub(crate) async fn build_queue_handler( + Extension(build_queue): Extension>, + Extension(pool): Extension, +) -> AxumResult { + let (queue, active_deployments) = spawn_blocking(move || { + let mut queue = build_queue.queued_crates()?; + for krate in queue.iter_mut() { + // The priority here is inverted: in the database if a crate has a higher priority it + // will be built after everything else, which is counter-intuitive for people not + // familiar with docs.rs's inner workings. + krate.priority = -krate.priority; + } - let mut conn = extension!(req, Pool).get()?; + let mut conn = pool.get()?; + Ok((queue, cdn::active_crate_invalidations(&mut conn)?)) + }) + .await?; - BuildQueuePage { + Ok(BuildQueuePage { description: "crate documentation scheduled to build & deploy", queue, - active_deployments: ctry!(req, cdn::active_crate_invalidations(&mut conn)), - } - .into_response(req) + active_deployments, + }) } #[cfg(test)] diff --git a/src/web/routes.rs b/src/web/routes.rs index 38ad01de2..a2b80abe3 100644 --- a/src/web/routes.rs +++ b/src/web/routes.rs @@ -53,6 +53,10 @@ pub(super) fn build_axum_routes() -> AxumRouter { "/-/static/*path", get_static(super::statics::static_handler), ) + .route( + "/opensearch.xml", + get_static(|| async { Redirect::permanent("/-/static/opensearch.xml") }), + ) .route( "/sitemap.xml", get_internal(super::sitemap::sitemapindex_handler), @@ -74,6 +78,7 @@ pub(super) fn build_axum_routes() -> AxumRouter { "/about/:subpage", get_internal(super::sitemap::about_handler), ) + .route("/", get_internal(super::releases::home_page)) .route( "/releases", get_internal(super::releases::recent_releases_handler), @@ -114,19 +119,36 @@ pub(super) fn build_axum_routes() -> AxumRouter { "/crate/:name/:version", get_internal(super::crate_details::crate_details_handler), ) + .route( + "/releases/feed", + get_static(super::releases::releases_feed_handler), + ) + .route( + "/releases/:owner", + get_internal(super::releases::owner_handler), + ) + .route( + "/releases/:owner/:page", + get_internal(super::releases::owner_handler), + ) + .route( + "/releases/activity", + get_internal(super::releases::activity_handler), + ) + .route( + "/releases/search", + get_internal(super::releases::search_handler), + ) + .route( + "/releases/queue", + get_internal(super::releases::build_queue_handler), + ) } // REFACTOR: Break this into smaller initialization functions pub(super) fn build_routes() -> Routes { let mut routes = Routes::new(); - // This should not need to be served from the root as we reference the inner path in links, - // but clients might have cached the url and need to update it. - routes.static_resource( - "/opensearch.xml", - PermanentRedirect("/-/static/opensearch.xml"), - ); - routes.internal_page( "/-/rustdoc.static/:single", super::rustdoc::static_asset_handler, @@ -150,15 +172,6 @@ pub(super) fn build_routes() -> Routes { storage_change_detection }); - routes.internal_page("/", super::releases::home_page); - - routes.static_resource("/releases/feed", super::releases::releases_feed_handler); - routes.internal_page("/releases/:owner", super::releases::owner_handler); - routes.internal_page("/releases/:owner/:page", super::releases::owner_handler); - routes.internal_page("/releases/activity", super::releases::activity_handler); - routes.internal_page("/releases/search", super::releases::search_handler); - routes.internal_page("/releases/queue", super::releases::build_queue_handler); - routes.internal_page( "/crate/:name/:version/builds", super::builds::build_list_handler,