diff --git a/apps/server/src/middleware/auth.rs b/apps/server/src/middleware/auth.rs index 250d569f4..4ee384b75 100644 --- a/apps/server/src/middleware/auth.rs +++ b/apps/server/src/middleware/auth.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use axum::{ body::Body, extract::{OriginalUri, Path, Request, State}, @@ -9,6 +11,7 @@ use axum::{ use base64::{engine::general_purpose::STANDARD, Engine}; use prefixed_api_key::{PrefixedApiKey, PrefixedApiKeyController}; use prisma_client_rust::or; +use serde::Deserialize; use stump_core::{ db::entity::{APIKeyPermissions, User, UserPermission, API_KEY_PREFIX}, opds::v2_0::{ @@ -44,17 +47,24 @@ use super::host::HostExtractor; /// - They have a valid bearer token (session may not exist) /// - They have valid basic auth credentials (session is created after successful authentication) #[derive(Debug, Clone)] -pub struct RequestContext(User); +pub struct RequestContext { + user: User, + api_key: Option, +} impl RequestContext { /// Get a reference to the current user pub fn user(&self) -> &User { - &self.0 + &self.user } /// Get the ID of the current user pub fn id(&self) -> String { - self.0.id.clone() + self.user.id.clone() + } + + pub fn api_key(&self) -> Option { + self.api_key.clone() } /// Enforce that the current user has all the permissions provided, otherwise return an error @@ -155,7 +165,10 @@ pub async fn auth_middleware( if let Some(user) = session_user { if !user.is_locked { - req.extensions_mut().insert(RequestContext(user)); + req.extensions_mut().insert(RequestContext { + user, + api_key: None, + }); return Ok(next.run(req).await); } } @@ -186,7 +199,7 @@ pub async fn auth_middleware( return Err(APIError::Unauthorized.into_response()); }; - let user = match auth_header { + let req_ctx = match auth_header { _ if auth_header.starts_with("Bearer ") && auth_header.len() > 7 => { let token = auth_header[7..].to_owned(); handle_bearer_auth(token, &ctx.db) @@ -202,11 +215,20 @@ pub async fn auth_middleware( _ => return Err(APIError::Unauthorized.into_response()), }; - req.extensions_mut().insert(RequestContext(user)); + req.extensions_mut().insert(req_ctx); Ok(next.run(req).await) } +#[derive(Debug, Deserialize)] +pub struct APIKeyPath(HashMap); + +impl APIKeyPath { + fn get_key(&self) -> Option { + self.0.get("api_key").cloned() + } +} + /// A middleware to authenticate a user by an API key in a *very* specific way. This middleware /// assumes that a fully qualified API key is provided in the path. This is used for two features today: /// @@ -218,10 +240,15 @@ pub async fn auth_middleware( /// hashing algorithm, therefore the default auth method would not work. pub async fn api_key_middleware( State(ctx): State, - Path(api_key): Path, + Path(params): Path, mut req: Request, next: Next, ) -> Result { + let Some(api_key) = params.get_key() else { + tracing::error!("No API key provided"); + return Err(APIError::Unauthorized.into_response()); + }; + let Ok(pak) = PrefixedApiKey::from_string(api_key.as_str()) else { tracing::error!("Failed to parse API key"); return Err(APIError::Unauthorized.into_response()); @@ -231,7 +258,10 @@ pub async fn api_key_middleware( .await .map_err(|e| e.into_response())?; - req.extensions_mut().insert(RequestContext(user)); + req.extensions_mut().insert(RequestContext { + user, + api_key: Some(api_key), + }); Ok(next.run(req).await) } @@ -338,10 +368,18 @@ pub async fn validate_api_key( /// A function to handle bearer token authentication. This function will verify the token and /// return the user if the token is valid. #[tracing::instrument(skip_all)] -async fn handle_bearer_auth(token: String, client: &PrismaClient) -> APIResult { +async fn handle_bearer_auth( + token: String, + client: &PrismaClient, +) -> APIResult { match PrefixedApiKey::from_string(token.as_str()) { Ok(api_key) if api_key.prefix() == API_KEY_PREFIX => { - return validate_api_key(api_key, client).await; + return validate_api_key(api_key, client) + .await + .map(|user| RequestContext { + user, + api_key: Some(token), + }); }, _ => (), }; @@ -371,7 +409,10 @@ async fn handle_bearer_auth(token: String, client: &PrismaClient) -> APIResult APIResult { +) -> APIResult { let decoded_bytes = STANDARD .decode(encoded_credentials.as_bytes()) .map_err(|e| APIError::InternalServerError(e.to_string()))?; @@ -427,7 +468,10 @@ async fn handle_basic_auth( enforce_max_sessions(&user, client).await?; let user = User::from(user); session.insert(SESSION_USER_KEY, user.clone()).await?; - return Ok(user); + return Ok(RequestContext { + user, + api_key: None, + }); } Err(APIError::Unauthorized) @@ -558,14 +602,20 @@ mod tests { #[test] fn test_request_context_user() { let user = User::default(); - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(user.is(request_context.user())); } #[test] fn test_request_context_id() { let user = User::default(); - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert_eq!(user.id, request_context.id()); } @@ -575,7 +625,10 @@ mod tests { is_server_owner: true, ..Default::default() }; - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(request_context .enforce_permissions(&[UserPermission::AccessBookClub]) .is_ok()); @@ -587,7 +640,10 @@ mod tests { permissions: vec![UserPermission::AccessBookClub], ..Default::default() }; - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(request_context .enforce_permissions(&[UserPermission::AccessBookClub]) .is_ok()); @@ -596,7 +652,10 @@ mod tests { #[test] fn test_request_context_enforce_permissions_when_denied() { let user = User::default(); - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(request_context .enforce_permissions(&[UserPermission::AccessBookClub]) .is_err()); @@ -608,7 +667,10 @@ mod tests { permissions: vec![UserPermission::AccessBookClub], ..Default::default() }; - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(request_context .enforce_permissions(&[ UserPermission::AccessBookClub, @@ -623,7 +685,10 @@ mod tests { permissions: vec![UserPermission::AccessBookClub], ..Default::default() }; - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(user.is(&request_context .user_and_enforce_permissions(&[UserPermission::AccessBookClub]) .unwrap())); @@ -632,7 +697,10 @@ mod tests { #[test] fn test_request_context_user_and_enforce_permissions_when_denied() { let user = User::default(); - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(request_context .user_and_enforce_permissions(&[UserPermission::AccessBookClub]) .is_err()); @@ -644,14 +712,20 @@ mod tests { is_server_owner: true, ..Default::default() }; - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(request_context.enforce_server_owner().is_ok()); } #[test] fn test_request_context_enforce_server_owner_when_not_server_owner() { let user = User::default(); - let request_context = RequestContext(user.clone()); + let request_context = RequestContext { + user: user.clone(), + api_key: None, + }; assert!(request_context.enforce_server_owner().is_err()); } diff --git a/apps/server/src/routers/opds/v1_2.rs b/apps/server/src/routers/opds/v1_2.rs index 4e80e8826..cf74d8109 100644 --- a/apps/server/src/routers/opds/v1_2.rs +++ b/apps/server/src/routers/opds/v1_2.rs @@ -5,6 +5,7 @@ use axum::{ Extension, Router, }; use prisma_client_rust::{chrono, Direction}; +use serde::{Deserialize, Serialize}; use stump_core::{ db::{entity::UserPermission, query::pagination::PageQuery}, filesystem::{ @@ -13,8 +14,8 @@ use stump_core::{ ContentType, }, opds::v1_2::{ - entry::OpdsEntry, - feed::OpdsFeed, + entry::{IntoOPDSEntry, OPDSEntryBuilder, OpdsEntry}, + feed::{OPDSFeedBuilder, OpdsFeed}, link::{OpdsLink, OpdsLinkRel, OpdsLinkType}, }, prisma::{active_reading_session, library, media, series, user}, @@ -62,24 +63,55 @@ pub(crate) fn mount(app_state: AppState) -> Router { .route("/file/:filename", get(download_book)), ); - Router::new().nest( - "/v1.2", - Router::new() - .nest( - "/", - primary_router.clone().layer(middleware::from_fn_with_state( - app_state.clone(), - auth_middleware, - )), - ) - .nest( - "/:api_key/v1.2", - primary_router.layer(middleware::from_fn_with_state( - app_state, - api_key_middleware, - )), - ), - ) + Router::new() + .nest( + "/v1.2", + primary_router.clone().layer(middleware::from_fn_with_state( + app_state.clone(), + auth_middleware, + )), + ) + .nest( + "/:api_key/v1.2", + primary_router.layer(middleware::from_fn_with_state( + app_state, + api_key_middleware, + )), + ) +} + +#[derive(Debug, Serialize, Deserialize)] +struct OPDSURLParams { + #[serde(flatten)] + params: D, + #[serde(default)] + api_key: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +struct OPDSIDURLParams { + id: String, +} + +fn number_or_string_deserializer<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + let value = String::deserialize(deserializer)?; + value.parse::().map_err(serde::de::Error::custom) +} + +#[derive(Debug, Serialize, Deserialize)] +struct OPDSPageURLParams { + id: String, + #[serde(deserialize_with = "number_or_string_deserializer")] + page: i32, +} + +#[derive(Debug, Serialize, Deserialize)] +struct OPDSFilenameURLParams { + id: String, + filename: String, } fn pagination_bounds(page: i64, page_size: i64) -> (i64, i64) { @@ -87,7 +119,15 @@ fn pagination_bounds(page: i64, page_size: i64) -> (i64, i64) { (skip, page_size) } -async fn catalog() -> APIResult { +fn catalog_url(req_ctx: &RequestContext, path: &str) -> String { + if let Some(api_key) = req_ctx.api_key() { + format!("/opds/{}/v1.2/{}", api_key, path) + } else { + format!("/opds/v1.2/{}", path) + } +} + +async fn catalog(Extension(req): Extension) -> APIResult { let entries = vec![ OpdsEntry::new( "keepReading".to_string(), @@ -98,7 +138,7 @@ async fn catalog() -> APIResult { Some(vec![OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Subsection, - href: String::from("/opds/v1.2/keep-reading"), + href: catalog_url(&req, "keep-reading"), }]), None, ), @@ -111,7 +151,7 @@ async fn catalog() -> APIResult { Some(vec![OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Subsection, - href: String::from("/opds/v1.2/series"), + href: catalog_url(&req, "series"), }]), None, ), @@ -124,7 +164,7 @@ async fn catalog() -> APIResult { Some(vec![OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Subsection, - href: String::from("/opds/v1.2/series/latest"), + href: catalog_url(&req, "series/latest"), }]), None, ), @@ -137,36 +177,10 @@ async fn catalog() -> APIResult { Some(vec![OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Subsection, - href: String::from("/opds/v1.2/libraries"), + href: catalog_url(&req, "libraries"), }]), None, ), - // OpdsEntry::new( - // "allCollections".to_string(), - // chrono::Utc::now().into(), - // "All collections".to_string(), - // Some(String::from("Browse by collection")), - // None, - // Some(vec![OpdsLink { - // link_type: OpdsLinkType::Navigation, - // rel: OpdsLinkRel::Subsection, - // href: String::from("/opds/v1.2/collections"), - // }]), - // None, - // ), - // OpdsEntry::new( - // "allReadLists".to_string(), - // chrono::Utc::now().into(), - // "All read lists".to_string(), - // Some(String::from("Browse by read list")), - // None, - // Some(vec![OpdsLink { - // link_type: OpdsLinkType::Navigation, - // rel: OpdsLinkRel::Subsection, - // href: String::from("/opds/v1.2/readlists"), - // }]), - // None, - // ), // TODO: more? // TODO: get user stored searches, so they don't have to redo them over and over? // e.g. /opds/v1.2/series?search={searchTerms}, /opds/v1.2/libraries?search={searchTerms}, etc. @@ -176,18 +190,13 @@ async fn catalog() -> APIResult { OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::ItSelf, - href: String::from("/opds/v1.2/catalog"), + href: catalog_url(&req, "catalog"), }, OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Start, - href: String::from("/opds/v1.2/catalog"), + href: catalog_url(&req, "catalog"), }, - // OpdsLink { - // link_type: OpdsLinkType::Search, - // rel: OpdsLinkRel::Search, - // href: String::from("/opds/v1.2/search"), - // }, ]; let feed = OpdsFeed::new( @@ -237,7 +246,10 @@ async fn keep_reading( } }); - let entries = books_in_progress.into_iter().map(OpdsEntry::from).collect(); + let entries = books_in_progress + .into_iter() + .map(|m| OPDSEntryBuilder::::new(m, req.api_key()).into_opds_entry()) + .collect::>(); let feed = OpdsFeed::new( "keepReading".to_string(), @@ -246,12 +258,12 @@ async fn keep_reading( OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::ItSelf, - href: String::from("/opds/v1.2/keep-reading"), + href: catalog_url(&req, "keep-reading"), }, OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Start, - href: String::from("/opds/v1.2/catalog"), + href: catalog_url(&req, "catalog"), }, ]), entries, @@ -273,7 +285,12 @@ async fn get_libraries( .find_many(vec![library_not_hidden_from_user_filter(user)]) .exec() .await?; - let entries = libraries.into_iter().map(OpdsEntry::from).collect(); + let entries = libraries + .into_iter() + .map(|l| { + OPDSEntryBuilder::::new(l, req.api_key()).into_opds_entry() + }) + .collect::>(); let feed = OpdsFeed::new( "allLibraries".to_string(), @@ -282,12 +299,12 @@ async fn get_libraries( OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::ItSelf, - href: String::from("/opds/v1.2/libraries"), + href: catalog_url(&req, "libraries"), }, OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Start, - href: String::from("/opds/v1.2/catalog"), + href: catalog_url(&req, "catalog"), }, ]), entries, @@ -298,7 +315,10 @@ async fn get_libraries( async fn get_library_by_id( State(ctx): State, - Path(id): Path, + Path(OPDSURLParams { + params: OPDSIDURLParams { id }, + .. + }): Path>, pagination: Query, Extension(req): Extension, ) -> APIResult { @@ -354,15 +374,23 @@ async fn get_library_by_id( library_series_count, "Fetched library with series" ); - Ok(Xml(OpdsFeed::paginated( + + let entries = library_series + .into_iter() + .map(|s| { + OPDSEntryBuilder::::new(s, req.api_key()).into_opds_entry() + }) + .collect::>(); + + let feed = OPDSFeedBuilder::new(req.api_key()).paginated( library.id.as_str(), library.name.as_str(), + entries, format!("libraries/{}", &library.id).as_str(), - library_series, page.into(), library_series_count, - ) - .build()?)) + )?; + Ok(Xml(feed.build()?)) } else { Err(APIError::NotFound(format!( "Library {library_id} not found" @@ -408,15 +436,23 @@ async fn get_series( }) .await?; - Ok(Xml(OpdsFeed::paginated( + let entries = series + .into_iter() + .map(|s| { + OPDSEntryBuilder::::new(s, req.api_key()).into_opds_entry() + }) + .collect::>(); + + let feed = OPDSFeedBuilder::new(req.api_key()).paginated( "allSeries", "All Series", + entries, "series", - series, page.into(), count, - ) - .build()?)) + )?; + + Ok(Xml(feed.build()?)) } async fn get_latest_series( @@ -456,19 +492,31 @@ async fn get_latest_series( }) .await?; - Ok(Xml(OpdsFeed::paginated( + let entries = series + .into_iter() + .map(|s| { + OPDSEntryBuilder::::new(s, req.api_key()).into_opds_entry() + }) + .collect::>(); + + let feed = OPDSFeedBuilder::new(req.api_key()).paginated( "latestSeries", "Latest Series", + entries, "series/latest", - series, page.into(), count, - ) - .build()?)) + )?; + + Ok(Xml(feed.build()?)) } async fn get_series_by_id( - Path(id): Path, + Path(OPDSURLParams { + params: OPDSIDURLParams { id }, + .. + }): Path>, + // Path((id, _)): Path<(String, String)>, State(ctx): State, pagination: Query, Extension(req): Extension, @@ -518,15 +566,23 @@ async fn get_series_by_id( .await?; if let (Some(series), Some(series_book_count)) = tx_result { - Ok(Xml(OpdsFeed::paginated( + let series_media = series.media().unwrap_or(&Vec::new()).to_owned(); + let entries = series_media + .into_iter() + .map(|m| { + OPDSEntryBuilder::::new(m, req.api_key()).into_opds_entry() + }) + .collect(); + + let feed = OPDSFeedBuilder::new(req.api_key()).paginated( series.id.as_str(), series.name.as_str(), + entries, format!("series/{}", &series.id).as_str(), - series.media().unwrap_or(&Vec::new()).to_owned(), page.into(), series_book_count, - ) - .build()?)) + )?; + Ok(Xml(feed.build()?)) } else { Err(APIError::NotFound(format!("Series {series_id} not found"))) } @@ -557,7 +613,10 @@ fn handle_opds_image_response( /// A handler for GET /opds/v1.2/books/{id}/thumbnail, returns the thumbnail async fn get_book_thumbnail( - Path(id): Path, + Path(OPDSURLParams { + params: OPDSIDURLParams { id }, + .. + }): Path>, State(ctx): State, Extension(req): Extension, ) -> APIResult { @@ -569,7 +628,10 @@ async fn get_book_thumbnail( /// A handler for GET /opds/v1.2/books/{id}/page/{page}, returns the page async fn get_book_page( - Path((id, page)): Path<(String, i32)>, + Path(OPDSURLParams { + params: OPDSPageURLParams { id, page }, + .. + }): Path>, State(ctx): State, pagination: Query, Extension(req): Extension, @@ -650,7 +712,10 @@ async fn get_book_page( /// A handler for GET /opds/v1.2/books/{id}/file/{filename}, returns the book async fn download_book( - Path((id, filename)): Path<(String, String)>, + Path(OPDSURLParams { + params: OPDSFilenameURLParams { id, filename }, + .. + }): Path>, State(ctx): State, Extension(req): Extension, ) -> APIResult { diff --git a/core/src/opds/v1_2/entry.rs b/core/src/opds/v1_2/entry.rs index e291e9b01..71eec65c6 100644 --- a/core/src/opds/v1_2/entry.rs +++ b/core/src/opds/v1_2/entry.rs @@ -104,23 +104,46 @@ impl OpdsEntry { } } -impl From for OpdsEntry { - fn from(l: library::Data) -> Self { +pub trait IntoOPDSEntry { + fn into_opds_entry(self) -> OpdsEntry; +} + +pub struct OPDSEntryBuilder { + data: T, + api_key: Option, +} + +impl OPDSEntryBuilder { + pub fn new(data: T, api_key: Option) -> Self { + Self { data, api_key } + } + + fn format_url(&self, path: &str) -> String { + if let Some(ref api_key) = self.api_key { + format!("/opds/{}/v1.2/{}", api_key, path) + } else { + format!("/opds/v1.2/{}", path) + } + } +} + +impl IntoOPDSEntry for OPDSEntryBuilder { + fn into_opds_entry(self) -> OpdsEntry { let mut links = Vec::new(); let nav_link = OpdsLink::new( OpdsLinkType::Navigation, OpdsLinkRel::Subsection, - format!("/opds/v1.2/libraries/{}", l.id), + self.format_url(&format!("libraries/{}", self.data.id)), ); links.push(nav_link); OpdsEntry { - id: l.id, - updated: l.updated_at, - title: l.name, - content: l.description, + id: self.data.id, + updated: self.data.updated_at, + title: self.data.name, + content: self.data.description, authors: None, links, stream_link: None, @@ -128,23 +151,23 @@ impl From for OpdsEntry { } } -impl From for OpdsEntry { - fn from(s: series::Data) -> Self { +impl IntoOPDSEntry for OPDSEntryBuilder { + fn into_opds_entry(self) -> OpdsEntry { let mut links = Vec::new(); let nav_link = OpdsLink::new( OpdsLinkType::Navigation, OpdsLinkRel::Subsection, - format!("/opds/v1.2/series/{}", s.id), + self.format_url(&format!("series/{}", self.data.id)), ); links.push(nav_link); OpdsEntry { - id: s.id.to_string(), - updated: s.updated_at, - title: s.name, - content: s.description, + id: self.data.id.to_string(), + updated: self.data.updated_at, + title: self.data.name, + content: self.data.description, authors: None, links, stream_link: None, @@ -152,20 +175,16 @@ impl From for OpdsEntry { } } -// TODO: I was panicking here on my hosted server, and added additional safe guards. I need to check what was happening -// once these changes are deployed and I can see the logs on my server. +impl IntoOPDSEntry for OPDSEntryBuilder { + fn into_opds_entry(self) -> OpdsEntry { + let base_url = self.format_url(&format!("books/{}", self.data.id)); -impl From for OpdsEntry { - fn from(value: media::Data) -> Self { - tracing::trace!(book = ?value, "Converting book to OPDS entry"); - - let base_url = format!("/opds/v1.2/books/{}", value.id); - - let path_buf = PathBuf::from(value.path.as_str()); + let path_buf = PathBuf::from(self.data.path.as_str()); let FileParts { file_name, .. } = path_buf.file_parts(); let file_name_encoded = encode(&file_name); - let active_reading_session = value + let active_reading_session = self + .data .active_user_reading_sessions() .ok() .and_then(|sessions| sessions.first().cloned()); @@ -180,11 +199,13 @@ impl From for OpdsEntry { vec![1] }; - let page_content_types = get_content_types_for_pages(&value.path, target_pages) - .unwrap_or_else(|error| { - tracing::error!(error = ?error, "Failed to get content types for pages"); - HashMap::default() - }); + let page_content_types = + get_content_types_for_pages(&self.data.path, target_pages).unwrap_or_else( + |error| { + tracing::error!(error = ?error, "Failed to get content types for pages"); + HashMap::default() + }, + ); tracing::trace!(?page_content_types, "Got page content types"); let thumbnail_link_type = page_content_types @@ -196,7 +217,7 @@ impl From for OpdsEntry { .to_owned(); let current_page_link_type = match current_page { - Some(page) if page < value.pages => page_content_types + Some(page) if page < self.data.pages => page_content_types .get(&page) .unwrap_or_else(|| { tracing::error!("Failed to get content type for current page"); @@ -204,7 +225,7 @@ impl From for OpdsEntry { }) .to_owned(), Some(page) => { - tracing::warn!(current_page=?page, book_pages=?value.pages, "Current page is out of bounds!"); + tracing::warn!(current_page=?page, book_pages=?self.data.pages, "Current page is out of bounds!"); thumbnail_link_type }, _ => thumbnail_link_type, @@ -217,8 +238,8 @@ impl From for OpdsEntry { }); let entry_file_acquisition_link_type = - OpdsLinkType::from_extension(&value.extension).unwrap_or_else(|| { - tracing::error!(?value.extension, "Failed to convert file extension to OPDS link type"); + OpdsLinkType::from_extension(&self.data.extension).unwrap_or_else(|| { + tracing::error!(?self.data.extension, "Failed to convert file extension to OPDS link type"); OpdsLinkType::Zip }); @@ -241,16 +262,17 @@ impl From for OpdsEntry { ]; let stream_link = OpdsStreamLink::new( - value.id.clone(), - value.pages.to_string(), + self.data.id.clone(), + self.data.pages.to_string(), current_page_link_type.to_string(), current_page.map(|page| page.to_string()), last_read_at.map(|date| date.to_string()), ); - let mib = value.size as f64 / (1024.0 * 1024.0); + let mib = self.data.size as f64 / (1024.0 * 1024.0); - let metadata = value + let metadata = self + .data .metadata() .ok() .flatten() @@ -263,14 +285,14 @@ impl From for OpdsEntry { let content = match description { Some(s) => Some(format!( "{:.1} MiB - {}

{}", - mib, value.extension, s + mib, self.data.extension, s )), - None => Some(format!("{:.1} MiB - {}", mib, value.extension)), + None => Some(format!("{:.1} MiB - {}", mib, self.data.extension)), }; OpdsEntry { - id: value.id.to_string(), - title: value.name, + id: self.data.id.to_string(), + title: self.data.name, updated: chrono::Utc::now().into(), content, links, diff --git a/core/src/opds/v1_2/feed.rs b/core/src/opds/v1_2/feed.rs index c56dbef91..77e6515bc 100644 --- a/core/src/opds/v1_2/feed.rs +++ b/core/src/opds/v1_2/feed.rs @@ -3,11 +3,13 @@ use crate::{ error::CoreError, - opds::v1_2::link::OpdsLink, + opds::v1_2::{ + entry::{IntoOPDSEntry, OPDSEntryBuilder}, + link::OpdsLink, + }, prisma::{library, series}, }; use prisma_client_rust::chrono::{self, DateTime, Utc}; -use tracing::warn; use xml::{writer::XmlEvent, EventWriter}; use super::{ @@ -42,28 +44,6 @@ impl OpdsFeed { } } - pub fn paginated( - id: &str, - title: &str, - href_postfix: &str, - data: Vec, - page: i64, - count: i64, - ) -> OpdsFeed - where - OpdsEntry: From, - { - ( - id.to_string(), - title.to_string(), - href_postfix.to_string(), - data, - page, - count, - ) - .into() - } - /// Build an xml string from the feed. pub fn build(&self) -> Result { self.build_with_datetime(&chrono::Utc::now()) @@ -106,10 +86,24 @@ impl OpdsFeed { } } -// TODO: impl feeds for search results +pub struct OPDSFeedBuilder { + api_key: Option, +} + +impl OPDSFeedBuilder { + pub fn new(api_key: Option) -> Self { + Self { api_key } + } + + fn format_url(&self, path: &str) -> String { + if let Some(ref api_key) = self.api_key { + format!("/opds/{}/v1.2/{}", api_key, path) + } else { + format!("/opds/v1.2/{}", path) + } + } -impl From for OpdsFeed { - fn from(library: library::Data) -> Self { + pub fn library(&self, library: library::Data) -> Result { let id = library.id.clone(); let title = library.name.clone(); @@ -117,167 +111,76 @@ impl From for OpdsFeed { OpdsLink::new( OpdsLinkType::Navigation, OpdsLinkRel::ItSelf, - format!("/opds/v1.2/libraries/{id}"), + self.format_url(&format!("libraries/{}", id)), ), OpdsLink::new( OpdsLinkType::Navigation, OpdsLinkRel::Start, - "/opds/v1.2/catalog".to_string(), + self.format_url("catalog"), ), ]; - let entries = match library.series() { - Ok(series) => series.iter().cloned().map(OpdsEntry::from).collect(), - Err(e) => { - warn!("Failed to get series for library {}: {}", id, e); - vec![] - }, + let Ok(series) = library.series().cloned() else { + return Ok(OpdsFeed::new(id, title, Some(links), vec![])); }; - Self::new(id, title, Some(links), entries) - } -} - -impl From<(library::Data, i64, i64)> for OpdsFeed { - fn from((library, page, count): (library::Data, i64, i64)) -> Self { - let id = library.id.clone(); - let title = library.name.clone(); + let entries = series + .into_iter() + .map(|s| { + OPDSEntryBuilder::::new(s, self.api_key.clone()) + .into_opds_entry() + }) + .collect::>(); - let href_postfix = format!("libraries/{}", &id); - - let data = library.series().unwrap_or(&Vec::new()).to_owned(); - - (id, title, href_postfix, data, page, count).into() - - // let mut links = vec![ - // OpdsLink::new( - // OpdsLinkType::Navigation, - // OpdsLinkRel::ItSelf, - // format!("/opds/v1.2/libraries/{}", id), - // ), - // OpdsLink::new( - // OpdsLinkType::Navigation, - // OpdsLinkRel::Start, - // "/opds/v1.2/catalog".to_string(), - // ), - // ]; - - // let entries = library - // .series() - // .unwrap_or(Vec::::new().as_ref()) - // .to_owned() - // .into_iter() - // .map(OpdsEntry::from) - // .collect::>(); - - // if page > 0 { - // links.push(OpdsLink { - // link_type: OpdsLinkType::Navigation, - // rel: OpdsLinkRel::Previous, - // href: format!("/opds/v1.2/libraries?page={}", page - 1), - // }); - // } - - // let total_pages = (count as f32 / 20.0).ceil() as u32; - - // if page < total_pages as u32 && entries.len() == 20 { - // links.push(OpdsLink { - // link_type: OpdsLinkType::Navigation, - // rel: OpdsLinkRel::Next, - // href: format!("/opds/v1.2/libraries?page={}", page + 1), - // }); - // } - - // OpdsFeed::new(id, title, Some(links), entries) + Ok(OpdsFeed::new(id, title, Some(links), entries)) } -} - -impl From<(String, Vec, i64, i64)> for OpdsFeed { - /// Used in /opds/series?page={page}, converting the raw Vector of series into an OPDS feed. - /// The page URL param is also passed in, and is used when generating the OPDS links. - fn from((title, series, page, count): (String, Vec, i64, i64)) -> Self { - let entries = series.into_iter().map(OpdsEntry::from).collect::>(); - - let mut links = vec![ - OpdsLink { - link_type: OpdsLinkType::Navigation, - rel: OpdsLinkRel::ItSelf, - href: String::from("/opds/v1.2/series"), - }, - OpdsLink { - link_type: OpdsLinkType::Navigation, - rel: OpdsLinkRel::Start, - href: String::from("/opds/v1.2/catalog"), - }, - ]; - - if page > 0 { - links.push(OpdsLink { - link_type: OpdsLinkType::Navigation, - rel: OpdsLinkRel::Previous, - href: format!("/opds/v1.2/series?page={}", page - 1), - }); - } - - // TODO: this 20.0 is the page size, which I might make dynamic for OPDS routes... but - // not sure.. - let total_pages = (count as f32 / 20.0).ceil() as u32; - - if page < total_pages as i64 && entries.len() == 20 { - links.push(OpdsLink { - link_type: OpdsLinkType::Navigation, - rel: OpdsLinkRel::Next, - href: format!("/opds/v1.2/series?page={}", page + 1), - }); - } - - OpdsFeed::new("root".to_string(), title, Some(links), entries) - } -} - -impl From<(String, String, String, Vec, i64, i64)> for OpdsFeed -where - OpdsEntry: From, -{ - fn from(tuple: (String, String, String, Vec, i64, i64)) -> OpdsFeed { - let (id, title, href_postfix, data, page, count) = tuple; - - let entries = data.into_iter().map(OpdsEntry::from).collect::>(); + pub fn paginated( + self, + id: &str, + title: &str, + entries: Vec, + href_postfix: &str, + page: i64, + count: i64, + ) -> Result { let mut links = vec![ - OpdsLink { - link_type: OpdsLinkType::Navigation, - rel: OpdsLinkRel::ItSelf, - href: format!("/opds/v1.2/{href_postfix}"), - }, - OpdsLink { - link_type: OpdsLinkType::Navigation, - rel: OpdsLinkRel::Start, - href: "/opds/v1.2/catalog".into(), - }, + OpdsLink::new( + OpdsLinkType::Navigation, + OpdsLinkRel::ItSelf, + self.format_url(&format!("{}?page={}", href_postfix, page)), + ), + OpdsLink::new( + OpdsLinkType::Navigation, + OpdsLinkRel::Start, + self.format_url("catalog"), + ), ]; if page > 0 { links.push(OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Previous, - href: format!("/opds/v1.2/{}?page={}", href_postfix, page - 1), + href: self.format_url(&format!("{}?page={}", href_postfix, page - 1)), }); } - // TODO: this 20.0 is the page size, which I might make dynamic for OPDS routes... but - // not sure.. let total_pages = (count as f32 / 20.0).ceil() as u32; if page < total_pages as i64 && entries.len() == 20 { links.push(OpdsLink { link_type: OpdsLinkType::Navigation, rel: OpdsLinkRel::Next, - href: format!("/opds/v1.2/{href_postfix}?page={}", page + 1), + href: self.format_url(&format!("{}?page={}", href_postfix, page + 1)), }); } - OpdsFeed::new(id, title, Some(links), entries) + Ok(OpdsFeed::new( + id.to_string(), + title.to_string(), + Some(links), + entries, + )) } }