diff --git a/Cargo.lock b/Cargo.lock index 72f21f71e..6c5ab2685 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1219,6 +1219,8 @@ dependencies = [ "clap", "crucible", "crucible-common", + "crucible-pantry-api", + "crucible-pantry-types", "crucible-smf 0.0.0", "crucible-workspace-hack", "dropshot", @@ -1243,6 +1245,18 @@ dependencies = [ "uuid", ] +[[package]] +name = "crucible-pantry-api" +version = "0.1.0" +dependencies = [ + "crucible-client-types", + "crucible-pantry-types", + "crucible-workspace-hack", + "dropshot", + "schemars", + "serde", +] + [[package]] name = "crucible-pantry-client" version = "0.0.1" @@ -1259,6 +1273,15 @@ dependencies = [ "uuid", ] +[[package]] +name = "crucible-pantry-types" +version = "0.1.0" +dependencies = [ + "crucible-workspace-hack", + "schemars", + "serde", +] + [[package]] name = "crucible-protocol" version = "0.0.0" diff --git a/Cargo.toml b/Cargo.toml index 173d54082..9c94aa7d8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,9 @@ members = [ "nbd_server", "package", "pantry", + "pantry-api", "pantry-client", + "pantry-types", "protocol", "repair-client", "smf", @@ -137,7 +139,9 @@ crucible-control-client = { path = "./control-client" } # cleanup issues in the integration tests! crucible-downstairs = { path = "./downstairs" } crucible-pantry = { path = "./pantry" } +crucible-pantry-api = { path = "./pantry-api" } crucible-pantry-client = { path = "./pantry-client" } +crucible-pantry-types = { path = "./pantry-types" } crucible-protocol = { path = "./protocol" } crucible-smf = { path = "./smf" } dsc-client = { path = "./dsc-client" } diff --git a/pantry-api/Cargo.toml b/pantry-api/Cargo.toml new file mode 100644 index 000000000..8e36b38b0 --- /dev/null +++ b/pantry-api/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "crucible-pantry-api" +version = "0.1.0" +edition = "2021" +license = "MPL-2.0" + +[dependencies] +crucible-client-types.workspace = true +crucible-pantry-types.workspace = true +crucible-workspace-hack.workspace = true +dropshot.workspace = true +schemars.workspace = true +serde.workspace = true diff --git a/pantry-api/src/lib.rs b/pantry-api/src/lib.rs new file mode 100644 index 000000000..9e7cad4a0 --- /dev/null +++ b/pantry-api/src/lib.rs @@ -0,0 +1,258 @@ +// Copyright 2025 Oxide Computer Company + +use crucible_client_types::{ReplaceResult, VolumeConstructionRequest}; +use crucible_pantry_types::*; +use dropshot::{ + HttpError, HttpResponseDeleted, HttpResponseOk, + HttpResponseUpdatedNoContent, Path, RequestContext, TypedBody, +}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[dropshot::api_description] +pub trait CruciblePantryApi { + type Context; + + /// Get the Pantry's status + #[endpoint { + method = GET, + path = "/crucible/pantry/0", + }] + async fn pantry_status( + rqctx: RequestContext, + ) -> Result, HttpError>; + + /// Get a current Volume's status + #[endpoint { + method = GET, + path = "/crucible/pantry/0/volume/{id}", + }] + async fn volume_status( + rqctx: RequestContext, + path: Path, + ) -> Result, HttpError>; + + /// Construct a volume from a VolumeConstructionRequest, storing the result in + /// the Pantry. + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}", + }] + async fn attach( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result, HttpError>; + + /// Construct a volume from a VolumeConstructionRequest, storing the result in + /// the Pantry. Activate in a separate job so as not to block the request. + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/background", + }] + async fn attach_activate_background( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result; + + /// Call a volume's target_replace function + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/replace", + }] + async fn replace( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result, HttpError>; + + /// Poll to see if a Pantry background job is done + #[endpoint { + method = GET, + path = "/crucible/pantry/0/job/{id}/is-finished", + }] + async fn is_job_finished( + rqctx: RequestContext, + path: Path, + ) -> Result, HttpError>; + + /// Block on returning a Pantry background job result, then return 200 OK if the + /// job executed OK, 500 otherwise. + #[endpoint { + method = GET, + path = "/crucible/pantry/0/job/{id}/ok", + }] + async fn job_result_ok( + rqctx: RequestContext, + path: Path, + ) -> Result, HttpError>; + + /// Import data from a URL into a volume + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/import-from-url", + }] + async fn import_from_url( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result, HttpError>; + + /// Take a snapshot of a volume + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/snapshot", + }] + async fn snapshot( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result; + + /// Bulk write data into a volume at a specified offset + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/bulk-write", + }] + async fn bulk_write( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result; + + /// Bulk read data from a volume at a specified offset + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/bulk-read", + }] + async fn bulk_read( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result, HttpError>; + + /// Scrub the volume (copy blocks from read-only parent to subvolumes) + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/scrub", + }] + async fn scrub( + rqctx: RequestContext, + path: Path, + ) -> Result, HttpError>; + + /// Validate the digest of a whole volume + #[endpoint { + method = POST, + path = "/crucible/pantry/0/volume/{id}/validate", + }] + async fn validate( + rqctx: RequestContext, + path: Path, + body: TypedBody, + ) -> Result, HttpError>; + + /// Deactivate a volume, removing it from the Pantry + #[endpoint { + method = DELETE, + path = "/crucible/pantry/0/volume/{id}", + }] + async fn detach( + rqctx: RequestContext, + path: Path, + ) -> Result; +} + +#[derive(Deserialize, JsonSchema)] +pub struct VolumePath { + pub id: String, +} + +#[derive(Deserialize, JsonSchema)] +pub struct AttachRequest { + pub volume_construction_request: VolumeConstructionRequest, +} + +#[derive(Serialize, JsonSchema)] +pub struct AttachResult { + pub id: String, +} + +#[derive(Deserialize, JsonSchema)] +pub struct AttachBackgroundRequest { + pub volume_construction_request: VolumeConstructionRequest, + pub job_id: String, +} + +#[derive(Deserialize, JsonSchema)] +pub struct ReplaceRequest { + pub volume_construction_request: VolumeConstructionRequest, +} + +#[derive(Deserialize, JsonSchema)] +pub struct JobPath { + pub id: String, +} + +#[derive(Serialize, JsonSchema)] +pub struct JobPollResponse { + pub job_is_finished: bool, +} + +#[derive(Serialize, JsonSchema)] +pub struct JobResultOkResponse { + pub job_result_ok: bool, +} + +#[derive(Deserialize, JsonSchema)] +pub struct ImportFromUrlRequest { + pub url: String, + pub expected_digest: Option, +} + +#[derive(Serialize, JsonSchema)] +pub struct ImportFromUrlResponse { + pub job_id: String, +} + +#[derive(Deserialize, JsonSchema)] +pub struct SnapshotRequest { + pub snapshot_id: String, +} + +#[derive(Deserialize, JsonSchema)] +pub struct BulkWriteRequest { + pub offset: u64, + pub base64_encoded_data: String, +} + +#[derive(Deserialize, JsonSchema)] +pub struct BulkReadRequest { + pub offset: u64, + pub size: usize, +} + +#[derive(Serialize, JsonSchema)] +pub struct BulkReadResponse { + pub base64_encoded_data: String, +} + +#[derive(Serialize, JsonSchema)] +pub struct ScrubResponse { + pub job_id: String, +} + +#[derive(Deserialize, JsonSchema)] +pub struct ValidateRequest { + pub expected_digest: ExpectedDigest, + + // Size to validate in bytes, starting from offset 0. If not specified, the + // total volume size is used. + pub size_to_validate: Option, +} + +#[derive(Serialize, JsonSchema)] +pub struct ValidateResponse { + pub job_id: String, +} diff --git a/pantry-types/Cargo.toml b/pantry-types/Cargo.toml new file mode 100644 index 000000000..1345916ce --- /dev/null +++ b/pantry-types/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "crucible-pantry-types" +version = "0.1.0" +edition = "2021" +license = "MPL-2.0" + +[dependencies] +crucible-workspace-hack.workspace = true +schemars.workspace = true +serde.workspace = true diff --git a/pantry-types/src/lib.rs b/pantry-types/src/lib.rs new file mode 100644 index 000000000..6254fb277 --- /dev/null +++ b/pantry-types/src/lib.rs @@ -0,0 +1,32 @@ +// Copyright 2025 Oxide Computer Company + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, JsonSchema)] +pub struct PantryStatus { + /// Which volumes does this Pantry know about? Note this may include volumes + /// that are no longer active, and haven't been garbage collected yet. + pub volumes: Vec, + + /// How many job handles? + pub num_job_handles: usize, +} + +#[derive(Serialize, JsonSchema)] +pub struct VolumeStatus { + /// Is the Volume currently active? + pub active: bool, + + /// Has the Pantry ever seen this Volume active? + pub seen_active: bool, + + /// How many job handles are there for this Volume? + pub num_job_handles: usize, +} + +#[derive(Debug, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum ExpectedDigest { + Sha256(String), +} diff --git a/pantry/Cargo.toml b/pantry/Cargo.toml index b37203c4c..fde5aa0cc 100644 --- a/pantry/Cargo.toml +++ b/pantry/Cargo.toml @@ -10,6 +10,8 @@ bytes.workspace = true base64.workspace = true chrono.workspace = true clap.workspace = true +crucible-pantry-api.workspace = true +crucible-pantry-types.workspace = true dropshot.workspace = true futures.workspace = true http.workspace = true diff --git a/pantry/src/main.rs b/pantry/src/main.rs index 2efce87ef..ba9d4da9b 100644 --- a/pantry/src/main.rs +++ b/pantry/src/main.rs @@ -2,6 +2,7 @@ use anyhow::{anyhow, Result}; use clap::Parser; +use crucible_pantry_api::crucible_pantry_api_mod; use semver::Version; use std::io::Write; use std::net::SocketAddr; @@ -46,7 +47,8 @@ async fn main() -> Result<()> { } fn write_openapi(f: &mut W) -> Result<()> { - let api = server::make_api().map_err(|e| anyhow!(e))?; + // TODO: Switch to OpenAPI manager once available. + let api = crucible_pantry_api_mod::stub_api_description()?; api.openapi("Crucible Pantry", Version::new(0, 0, 1)) .write(f)?; Ok(()) diff --git a/pantry/src/pantry.rs b/pantry/src/pantry.rs index 0251e259d..2c1c8838e 100644 --- a/pantry/src/pantry.rs +++ b/pantry/src/pantry.rs @@ -27,10 +27,7 @@ use crucible::Volume; use crucible::VolumeConstructionRequest; use crucible_common::crucible_bail; use crucible_common::CrucibleError; - -use crate::server::ExpectedDigest; -use crate::server::PantryStatus; -use crate::server::VolumeStatus; +use crucible_pantry_types::{ExpectedDigest, PantryStatus, VolumeStatus}; pub enum ActiveObservation { /// This Pantry has never seen this Volume active diff --git a/pantry/src/server.rs b/pantry/src/server.rs index f6a9448a0..6b9ec9e81 100644 --- a/pantry/src/server.rs +++ b/pantry/src/server.rs @@ -1,5 +1,4 @@ -// Copyright 2022 Oxide Computer Company - +// Copyright 2025 Oxide Computer Company use super::pantry::Pantry; use std::net::SocketAddr; @@ -7,454 +6,239 @@ use std::sync::Arc; use anyhow::{anyhow, Result}; use base64::{engine, Engine}; -use dropshot::endpoint; -use dropshot::HandlerTaskMode; -use dropshot::HttpError; -use dropshot::HttpResponseDeleted; -use dropshot::HttpResponseOk; -use dropshot::HttpResponseUpdatedNoContent; -use dropshot::Path as TypedPath; -use dropshot::RequestContext; -use dropshot::TypedBody; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; +use crucible_pantry_api::*; +use crucible_pantry_types::*; +use dropshot::{ + HandlerTaskMode, HttpError, HttpResponseDeleted, HttpResponseOk, + HttpResponseUpdatedNoContent, Path as TypedPath, RequestContext, TypedBody, +}; use slog::{info, o, Logger}; +use std::result::Result as SResult; -use crucible::ReplaceResult; -use crucible::VolumeConstructionRequest; - -#[derive(Serialize, JsonSchema)] -pub struct PantryStatus { - /// Which volumes does this Pantry know about? Note this may include volumes - /// that are no longer active, and haven't been garbage collected yet. - pub volumes: Vec, - - /// How many job handles? - pub num_job_handles: usize, -} +#[derive(Debug)] +pub(crate) struct CruciblePantryImpl; -/// Get the Pantry's status -#[endpoint { - method = GET, - path = "/crucible/pantry/0", -}] -async fn pantry_status( - rc: RequestContext>, -) -> Result, HttpError> { - let pantry = rc.context(); - - let status = pantry.status().await?; - - Ok(HttpResponseOk(status)) -} +impl CruciblePantryApi for CruciblePantryImpl { + type Context = Arc; -#[derive(Deserialize, JsonSchema)] -struct VolumePath { - pub id: String, -} + async fn pantry_status( + rqctx: RequestContext, + ) -> SResult, HttpError> { + let pantry = rqctx.context(); -#[derive(Serialize, JsonSchema)] -pub struct VolumeStatus { - /// Is the Volume currently active? - pub active: bool, + let status = pantry.status().await?; - /// Has the Pantry ever seen this Volume active? - pub seen_active: bool, + Ok(HttpResponseOk(status)) + } - /// How many job handles are there for this Volume? - pub num_job_handles: usize, -} + async fn volume_status( + rqctx: RequestContext, + path: TypedPath, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let pantry = rqctx.context(); -/// Get a current Volume's status -#[endpoint { - method = GET, - path = "/crucible/pantry/0/volume/{id}", -}] -async fn volume_status( - rc: RequestContext>, - path: TypedPath, -) -> Result, HttpError> { - let path = path.into_inner(); - let pantry = rc.context(); - - let status = pantry.volume_status(path.id.clone()).await?; - - Ok(HttpResponseOk(status)) -} + let status = pantry.volume_status(path.id.clone()).await?; -#[derive(Deserialize, JsonSchema)] -struct AttachRequest { - pub volume_construction_request: VolumeConstructionRequest, -} + Ok(HttpResponseOk(status)) + } -#[derive(Serialize, JsonSchema)] -struct AttachResult { - pub id: String, -} + async fn attach( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); -/// Construct a volume from a VolumeConstructionRequest, storing the result in -/// the Pantry. -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}", -}] -async fn attach( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result, HttpError> { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - pantry - .attach(path.id.clone(), body.volume_construction_request) - .await?; - - Ok(HttpResponseOk(AttachResult { id: path.id })) -} + pantry + .attach(path.id.clone(), body.volume_construction_request) + .await?; -#[derive(Deserialize, JsonSchema)] -struct AttachBackgroundRequest { - pub volume_construction_request: VolumeConstructionRequest, - pub job_id: String, -} + Ok(HttpResponseOk(AttachResult { id: path.id })) + } -/// Construct a volume from a VolumeConstructionRequest, storing the result in -/// the Pantry. Activate in a separate job so as not to block the request. -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/background", -}] -async fn attach_activate_background( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - pantry - .attach_activate_background( - path.id.clone(), - body.job_id, - body.volume_construction_request, - ) - .await?; - - Ok(HttpResponseUpdatedNoContent()) -} + async fn attach_activate_background( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); + + pantry + .attach_activate_background( + path.id.clone(), + body.job_id, + body.volume_construction_request, + ) + .await?; + + Ok(HttpResponseUpdatedNoContent()) + } -#[derive(Deserialize, JsonSchema)] -struct ReplaceRequest { - pub volume_construction_request: VolumeConstructionRequest, -} + async fn replace( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); -/// Call a volume's target_replace function -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/replace", -}] -async fn replace( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result, HttpError> { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - let result = pantry - .replace(path.id.clone(), body.volume_construction_request) - .await?; - - Ok(HttpResponseOk(result)) -} + let result = pantry + .replace(path.id.clone(), body.volume_construction_request) + .await?; -#[derive(Deserialize, JsonSchema)] -struct JobPath { - pub id: String, -} + Ok(HttpResponseOk(result)) + } -#[derive(Serialize, JsonSchema)] -struct JobPollResponse { - pub job_is_finished: bool, -} + async fn is_job_finished( + rqctx: RequestContext, + path: TypedPath, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let pantry = rqctx.context(); -/// Poll to see if a Pantry background job is done -#[endpoint { - method = GET, - path = "/crucible/pantry/0/job/{id}/is-finished", -}] -async fn is_job_finished( - rc: RequestContext>, - path: TypedPath, -) -> Result, HttpError> { - let path = path.into_inner(); - let pantry = rc.context(); - - let job_is_finished = pantry.is_job_finished(path.id).await?; - - Ok(HttpResponseOk(JobPollResponse { job_is_finished })) -} + let job_is_finished = pantry.is_job_finished(path.id).await?; -#[derive(Serialize, JsonSchema)] -pub struct JobResultOkResponse { - pub job_result_ok: bool, -} + Ok(HttpResponseOk(JobPollResponse { job_is_finished })) + } -/// Block on returning a Pantry background job result, then return 200 OK if the -/// job executed OK, 500 otherwise. -#[endpoint { - method = GET, - path = "/crucible/pantry/0/job/{id}/ok", -}] -async fn job_result_ok( - rc: RequestContext>, - path: TypedPath, -) -> Result, HttpError> { - let path = path.into_inner(); - let pantry = rc.context(); - - match pantry.get_job_result(path.id).await { - Ok(result) => { - // The inner result is from the tokio task itself. - Ok(HttpResponseOk(JobResultOkResponse { - job_result_ok: result.is_ok(), - })) + async fn job_result_ok( + rqctx: RequestContext, + path: TypedPath, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let pantry = rqctx.context(); + + match pantry.get_job_result(path.id).await { + Ok(result) => { + // The inner result is from the tokio task itself. + Ok(HttpResponseOk(JobResultOkResponse { + job_result_ok: result.is_ok(), + })) + } + + // Here is where get_job_result will return 404 if the job id is not + // found, or a 500 if the join_handle.await didn't work. + Err(e) => Err(e), } - - // Here is where get_job_result will return 404 if the job id is not - // found, or a 500 if the join_handle.await didn't work. - Err(e) => Err(e), } -} -#[derive(Debug, Deserialize, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub enum ExpectedDigest { - Sha256(String), -} + async fn import_from_url( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); -#[derive(Deserialize, JsonSchema)] -struct ImportFromUrlRequest { - pub url: String, - pub expected_digest: Option, -} + let job_id = pantry + .import_from_url(path.id.clone(), body.url, body.expected_digest) + .await?; -#[derive(Serialize, JsonSchema)] -struct ImportFromUrlResponse { - pub job_id: String, -} - -/// Import data from a URL into a volume -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/import-from-url", -}] -async fn import_from_url( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result, HttpError> { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - let job_id = pantry - .import_from_url(path.id.clone(), body.url, body.expected_digest) - .await?; - - Ok(HttpResponseOk(ImportFromUrlResponse { job_id })) -} + Ok(HttpResponseOk(ImportFromUrlResponse { job_id })) + } -#[derive(Deserialize, JsonSchema)] -struct SnapshotRequest { - pub snapshot_id: String, -} + async fn snapshot( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); -/// Take a snapshot of a volume -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/snapshot", -}] -async fn snapshot( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - pantry.snapshot(path.id.clone(), body.snapshot_id).await?; - - Ok(HttpResponseUpdatedNoContent()) -} + pantry.snapshot(path.id.clone(), body.snapshot_id).await?; -#[derive(Deserialize, JsonSchema)] -struct BulkWriteRequest { - pub offset: u64, + Ok(HttpResponseUpdatedNoContent()) + } - pub base64_encoded_data: String, -} + async fn bulk_write( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); -/// Bulk write data into a volume at a specified offset -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/bulk-write", -}] -async fn bulk_write( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - let data = engine::general_purpose::STANDARD - .decode(body.base64_encoded_data) - .map_err(|e| HttpError::for_bad_request(None, e.to_string()))?; - - pantry - .bulk_write(path.id.clone(), body.offset, data) - .await?; - - Ok(HttpResponseUpdatedNoContent()) -} + let data = engine::general_purpose::STANDARD + .decode(body.base64_encoded_data) + .map_err(|e| HttpError::for_bad_request(None, e.to_string()))?; -#[derive(Deserialize, JsonSchema)] -struct BulkReadRequest { - pub offset: u64, - pub size: usize, -} + pantry + .bulk_write(path.id.clone(), body.offset, data) + .await?; -#[derive(Serialize, JsonSchema)] -struct BulkReadResponse { - pub base64_encoded_data: String, -} - -/// Bulk read data from a volume at a specified offset -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/bulk-read", -}] -async fn bulk_read( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result, HttpError> { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - let data = pantry - .bulk_read(path.id.clone(), body.offset, body.size) - .await?; - - Ok(HttpResponseOk(BulkReadResponse { - base64_encoded_data: engine::general_purpose::STANDARD.encode(data), - })) -} + Ok(HttpResponseUpdatedNoContent()) + } -#[derive(Serialize, JsonSchema)] -struct ScrubResponse { - pub job_id: String, -} + async fn bulk_read( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); + + let data = pantry + .bulk_read(path.id.clone(), body.offset, body.size) + .await?; + + Ok(HttpResponseOk(BulkReadResponse { + base64_encoded_data: engine::general_purpose::STANDARD.encode(data), + })) + } -/// Scrub the volume (copy blocks from read-only parent to subvolumes) -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/scrub", -}] -async fn scrub( - rc: RequestContext>, - path: TypedPath, -) -> Result, HttpError> { - let path = path.into_inner(); - let pantry = rc.context(); - - let job_id = pantry.scrub(path.id.clone()).await?; - - Ok(HttpResponseOk(ScrubResponse { job_id })) -} + async fn scrub( + rqctx: RequestContext, + path: TypedPath, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let pantry = rqctx.context(); -#[derive(Deserialize, JsonSchema)] -struct ValidateRequest { - pub expected_digest: ExpectedDigest, + let job_id = pantry.scrub(path.id.clone()).await?; - // Size to validate in bytes, starting from offset 0. If not specified, the - // total volume size is used. - pub size_to_validate: Option, -} + Ok(HttpResponseOk(ScrubResponse { job_id })) + } -#[derive(Serialize, JsonSchema)] -struct ValidateResponse { - pub job_id: String, -} + async fn validate( + rqctx: RequestContext, + path: TypedPath, + body: TypedBody, + ) -> SResult, HttpError> { + let path = path.into_inner(); + let body = body.into_inner(); + let pantry = rqctx.context(); + + let job_id = pantry + .validate( + path.id.clone(), + body.expected_digest, + body.size_to_validate, + ) + .await?; + + Ok(HttpResponseOk(ValidateResponse { job_id })) + } -/// Validate the digest of a whole volume -#[endpoint { - method = POST, - path = "/crucible/pantry/0/volume/{id}/validate", -}] -async fn validate( - rc: RequestContext>, - path: TypedPath, - body: TypedBody, -) -> Result, HttpError> { - let path = path.into_inner(); - let body = body.into_inner(); - let pantry = rc.context(); - - let job_id = pantry - .validate(path.id.clone(), body.expected_digest, body.size_to_validate) - .await?; - - Ok(HttpResponseOk(ValidateResponse { job_id })) -} + async fn detach( + rqctx: RequestContext, + path: TypedPath, + ) -> SResult { + let path = path.into_inner(); + let pantry = rqctx.context(); -/// Deactivate a volume, removing it from the Pantry -#[endpoint { - method = DELETE, - path = "/crucible/pantry/0/volume/{id}", -}] -async fn detach( - rc: RequestContext>, - path: TypedPath, -) -> Result { - let path = path.into_inner(); - let pantry = rc.context(); - - pantry.detach(path.id).await?; - - Ok(HttpResponseDeleted()) -} + pantry.detach(path.id).await?; -pub fn make_api() -> Result< - dropshot::ApiDescription>, - dropshot::ApiDescriptionRegisterError, -> { - let mut api = dropshot::ApiDescription::new(); - - api.register(pantry_status)?; - api.register(volume_status)?; - api.register(attach)?; - api.register(attach_activate_background)?; - api.register(replace)?; - api.register(is_job_finished)?; - api.register(job_result_ok)?; - api.register(import_from_url)?; - api.register(snapshot)?; - api.register(bulk_write)?; - api.register(bulk_read)?; - api.register(scrub)?; - api.register(validate)?; - api.register(detach)?; - - Ok(api) + Ok(HttpResponseDeleted()) + } } pub fn run_server( @@ -462,7 +246,7 @@ pub fn run_server( bind_address: SocketAddr, df: &Arc, ) -> Result<(SocketAddr, tokio::task::JoinHandle>)> { - let api = make_api().map_err(|e| anyhow!(e))?; + let api = crucible_pantry_api_mod::api_description::()?; let server = dropshot::HttpServerStarter::new( &dropshot::ConfigDropshot { diff --git a/workspace-hack/Cargo.toml b/workspace-hack/Cargo.toml index ba73ea1fa..43aa711de 100644 --- a/workspace-hack/Cargo.toml +++ b/workspace-hack/Cargo.toml @@ -30,6 +30,7 @@ futures-core = { version = "0.3" } futures-executor = { version = "0.3" } futures-sink = { version = "0.3" } futures-util = { version = "0.3", features = ["channel", "io", "sink"] } +getrandom = { version = "0.2", default-features = false, features = ["std"] } hashbrown = { version = "0.15" } hex = { version = "0.4", features = ["serde"] } indexmap = { version = "2", features = ["serde"] } @@ -81,6 +82,7 @@ futures-core = { version = "0.3" } futures-executor = { version = "0.3" } futures-sink = { version = "0.3" } futures-util = { version = "0.3", features = ["channel", "io", "sink"] } +getrandom = { version = "0.2", default-features = false, features = ["std"] } hashbrown = { version = "0.15" } hex = { version = "0.4", features = ["serde"] } indexmap = { version = "2", features = ["serde"] } @@ -120,7 +122,6 @@ uuid = { version = "1", features = ["serde", "v4"] } [target.x86_64-unknown-linux-gnu.dependencies] bitflags = { version = "2", default-features = false, features = ["std"] } dof = { version = "0.3", default-features = false, features = ["des"] } -getrandom = { version = "0.2", default-features = false, features = ["std"] } hyper = { version = "1", features = ["full"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "ring", "tls12", "webpki-tokio"] } hyper-util = { version = "0.1", features = ["full"] } @@ -134,7 +135,6 @@ tokio-util = { version = "0.7", features = ["codec", "io"] } [target.x86_64-unknown-linux-gnu.build-dependencies] bitflags = { version = "2", default-features = false, features = ["std"] } dof = { version = "0.3", default-features = false, features = ["des"] } -getrandom = { version = "0.2", default-features = false, features = ["std"] } hyper = { version = "1", features = ["full"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "ring", "tls12", "webpki-tokio"] } hyper-util = { version = "0.1", features = ["full"] } @@ -146,7 +146,6 @@ tokio-util = { version = "0.7", features = ["codec", "io"] } [target.aarch64-apple-darwin.dependencies] bitflags = { version = "2", default-features = false, features = ["std"] } -getrandom = { version = "0.2", default-features = false, features = ["std"] } hyper = { version = "1", features = ["full"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "ring", "tls12", "webpki-tokio"] } hyper-util = { version = "0.1", features = ["full"] } @@ -158,7 +157,6 @@ tokio-util = { version = "0.7", features = ["codec", "io"] } [target.aarch64-apple-darwin.build-dependencies] bitflags = { version = "2", default-features = false, features = ["std"] } -getrandom = { version = "0.2", default-features = false, features = ["std"] } hyper = { version = "1", features = ["full"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "ring", "tls12", "webpki-tokio"] } hyper-util = { version = "0.1", features = ["full"] } @@ -170,7 +168,6 @@ tokio-util = { version = "0.7", features = ["codec", "io"] } [target.x86_64-unknown-illumos.dependencies] bitflags = { version = "2", default-features = false, features = ["std"] } dof = { version = "0.3", default-features = false, features = ["des"] } -getrandom = { version = "0.2", default-features = false, features = ["std"] } hyper = { version = "1", features = ["full"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "ring", "tls12", "webpki-tokio"] } hyper-util = { version = "0.1", features = ["full"] } @@ -184,7 +181,6 @@ tokio-util = { version = "0.7", features = ["codec", "io"] } [target.x86_64-unknown-illumos.build-dependencies] bitflags = { version = "2", default-features = false, features = ["std"] } dof = { version = "0.3", default-features = false, features = ["des"] } -getrandom = { version = "0.2", default-features = false, features = ["std"] } hyper = { version = "1", features = ["full"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "ring", "tls12", "webpki-tokio"] } hyper-util = { version = "0.1", features = ["full"] }