From 95e0f4e23ff8cf12f57f464922477286012f62c2 Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Tue, 14 Oct 2025 12:55:08 +0200 Subject: [PATCH 01/12] feat: update ReplicateStatusCause serialization and worker_api to support new WorkflowError format --- Cargo.lock | 2 +- pre-compute/src/api/worker_api.rs | 147 +++++++--------- pre-compute/src/compute/app_runner.rs | 18 +- pre-compute/src/compute/dataset.rs | 48 ++++-- pre-compute/src/compute/errors.rs | 176 +++++++++++++++++--- pre-compute/src/compute/pre_compute_app.rs | 7 +- pre-compute/src/compute/pre_compute_args.rs | 28 ++-- pre-compute/src/compute/utils/env_utils.rs | 2 +- 8 files changed, 279 insertions(+), 149 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c3f7c26..30de312 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4159,7 +4159,7 @@ dependencies = [ [[package]] name = "tee-worker-pre-compute" -version = "0.2.0" +version = "0.3.0" dependencies = [ "aes", "alloy-signer", diff --git a/pre-compute/src/api/worker_api.rs b/pre-compute/src/api/worker_api.rs index 3aeeec4..b9c4e17 100644 --- a/pre-compute/src/api/worker_api.rs +++ b/pre-compute/src/api/worker_api.rs @@ -4,40 +4,6 @@ use crate::compute::{ }; use log::error; use reqwest::{blocking::Client, header::AUTHORIZATION}; -use serde::Serialize; - -/// Represents payload that can be sent to the worker API to report the outcome of the -/// pre‑compute stage. -/// -/// The JSON structure expected by the REST endpoint is: -/// ```json -/// { -/// "cause": "" -/// } -/// ``` -/// -/// # Arguments -/// -/// * `cause` - A reference to the ReplicateStatusCause indicating why the pre-compute operation exited -/// -/// # Example -/// -/// ```rust -/// use tee_worker_pre_compute::api::worker_api::ExitMessage; -/// use tee_worker_pre_compute::compute::errors::ReplicateStatusCause; -/// -/// let exit_message = ExitMessage::from(&ReplicateStatusCause::PreComputeInvalidTeeSignature); -/// ``` -#[derive(Serialize, Debug)] -pub struct ExitMessage<'a> { - pub cause: &'a ReplicateStatusCause, -} - -impl<'a> From<&'a ReplicateStatusCause> for ExitMessage<'a> { - fn from(cause: &'a ReplicateStatusCause) -> Self { - Self { cause } - } -} /// Thin wrapper around a [`Client`] that knows how to reach the iExec worker API. /// @@ -93,21 +59,21 @@ impl WorkerApiClient { Self::new(&base_url) } - /// Sends an exit cause for a pre-compute operation to the Worker API. + /// Sends exit causes for a pre-compute operation to the Worker API. /// - /// This method reports the exit cause of a pre-compute operation to the Worker API, + /// This method reports the exit causes of a pre-compute operation to the Worker API, /// which can be used for tracking and debugging purposes. /// /// # Arguments /// /// * `authorization` - The authorization token to use for the API request - /// * `chain_task_id` - The chain task ID for which to report the exit cause - /// * `exit_cause` - The exit cause to report + /// * `chain_task_id` - The chain task ID for which to report the exit causes + /// * `exit_causes` - The list of exit causes to report /// /// # Returns /// - /// * `Ok(())` - If the exit cause was successfully reported - /// * `Err(Error)` - If the exit cause could not be reported due to an HTTP error + /// * `Ok(())` - If the exit causes were successfully reported + /// * `Err(Error)` - If the exit causes could not be reported due to an HTTP error /// /// # Errors /// @@ -117,33 +83,33 @@ impl WorkerApiClient { /// # Example /// /// ```rust - /// use tee_worker_pre_compute::api::worker_api::{ExitMessage, WorkerApiClient}; + /// use tee_worker_pre_compute::api::worker_api::WorkerApiClient; /// use tee_worker_pre_compute::compute::errors::ReplicateStatusCause; /// /// let client = WorkerApiClient::new("http://worker:13100"); - /// let exit_message = ExitMessage::from(&ReplicateStatusCause::PreComputeInvalidTeeSignature); + /// let exit_causes = vec![ReplicateStatusCause::PreComputeInvalidTeeSignature]; /// - /// match client.send_exit_cause_for_pre_compute_stage( + /// match client.send_exit_causes_for_pre_compute_stage( /// "authorization_token", /// "0x123456789abcdef", - /// &exit_message, + /// &exit_causes, /// ) { - /// Ok(()) => println!("Exit cause reported successfully"), - /// Err(error) => eprintln!("Failed to report exit cause: {error}"), + /// Ok(()) => println!("Exit causes reported successfully"), + /// Err(error) => eprintln!("Failed to report exit causes: {error}"), /// } /// ``` - pub fn send_exit_cause_for_pre_compute_stage( + pub fn send_exit_causes_for_pre_compute_stage( &self, authorization: &str, chain_task_id: &str, - exit_cause: &ExitMessage, + exit_causes: &Vec, ) -> Result<(), ReplicateStatusCause> { let url = format!("{}/compute/pre/{chain_task_id}/exit", self.base_url); match self .client .post(&url) .header(AUTHORIZATION, authorization) - .json(exit_cause) + .json(exit_causes) .send() { Ok(resp) => { @@ -152,12 +118,12 @@ impl WorkerApiClient { Ok(()) } else { let body = resp.text().unwrap_or_default(); - error!("Failed to send exit cause: [status:{status}, body:{body}]"); + error!("Failed to send exit causes: [status:{status}, body:{body}]"); Err(ReplicateStatusCause::PreComputeFailedUnknownIssue) } } Err(err) => { - error!("HTTP request failed when sending exit cause to {url}: {err:?}"); + error!("HTTP request failed when sending exit causes to {url}: {err:?}"); Err(ReplicateStatusCause::PreComputeFailedUnknownIssue) } } @@ -175,31 +141,45 @@ mod tests { matchers::{body_json, header, method, path}, }; - // region ExitMessage() + // region Serialization tests #[test] - fn should_serialize_exit_message() { - let causes = [ + fn should_serialize_replicate_status_cause() { + let causes = vec![ ( ReplicateStatusCause::PreComputeInvalidTeeSignature, - "PRE_COMPUTE_INVALID_TEE_SIGNATURE", + r#"{"cause":"PRE_COMPUTE_INVALID_TEE_SIGNATURE","message":"Invalid TEE signature"}"#, ), ( ReplicateStatusCause::PreComputeWorkerAddressMissing, - "PRE_COMPUTE_WORKER_ADDRESS_MISSING", + r#"{"cause":"PRE_COMPUTE_WORKER_ADDRESS_MISSING","message":"Worker address related environment variable is missing"}"#, ), ( - ReplicateStatusCause::PreComputeFailedUnknownIssue, - "PRE_COMPUTE_FAILED_UNKNOWN_ISSUE", + ReplicateStatusCause::PreComputeDatasetUrlMissing(2), + r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 2"}"#, + ), + ( + ReplicateStatusCause::PreComputeInvalidDatasetChecksum(1), + r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 1"}"#, ), ]; - for (cause, message) in causes { - let exit_message = ExitMessage::from(&cause); - let serialized = to_string(&exit_message).expect("Failed to serialize"); - let expected = format!("{{\"cause\":\"{message}\"}}"); - assert_eq!(serialized, expected); + for (cause, expected_json) in causes { + let serialized = to_string(&cause).expect("Failed to serialize"); + assert_eq!(serialized, expected_json); } } + + #[test] + fn should_serialize_vec_of_causes() { + let causes = vec![ + ReplicateStatusCause::PreComputeDatasetUrlMissing(0), + ReplicateStatusCause::PreComputeInvalidDatasetChecksum(1), + ]; + + let serialized = to_string(&causes).expect("Failed to serialize"); + let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 1"}]"#; + assert_eq!(serialized, expected); + } // endregion // region get_worker_api_client @@ -223,18 +203,21 @@ mod tests { } // endregion - // region send_exit_cause_for_pre_compute_stage() + // region send_exit_causes_for_pre_compute_stage() const CHALLENGE: &str = "challenge"; const CHAIN_TASK_ID: &str = "0x123456789abcdef"; #[tokio::test] - async fn should_send_exit_cause() { + async fn should_send_exit_causes() { let mock_server = MockServer::start().await; let server_url = mock_server.uri(); - let expected_body = json!({ - "cause": ReplicateStatusCause::PreComputeInvalidTeeSignature, - }); + let expected_body = json!([ + { + "cause": "PRE_COMPUTE_INVALID_TEE_SIGNATURE", + "message": "Invalid TEE signature" + } + ]); Mock::given(method("POST")) .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit"))) @@ -246,13 +229,12 @@ mod tests { .await; let result = tokio::task::spawn_blocking(move || { - let exit_message = - ExitMessage::from(&ReplicateStatusCause::PreComputeInvalidTeeSignature); + let exit_causes = vec![ReplicateStatusCause::PreComputeInvalidTeeSignature]; let worker_api_client = WorkerApiClient::new(&server_url); - worker_api_client.send_exit_cause_for_pre_compute_stage( + worker_api_client.send_exit_causes_for_pre_compute_stage( CHALLENGE, CHAIN_TASK_ID, - &exit_message, + &exit_causes, ) }) .await @@ -262,7 +244,7 @@ mod tests { } #[tokio::test] - async fn should_not_send_exit_cause() { + async fn should_not_send_exit_causes() { testing_logger::setup(); let mock_server = MockServer::start().await; let server_url = mock_server.uri(); @@ -275,13 +257,12 @@ mod tests { .await; let result = tokio::task::spawn_blocking(move || { - let exit_message = - ExitMessage::from(&ReplicateStatusCause::PreComputeFailedUnknownIssue); + let exit_causes = vec![ReplicateStatusCause::PreComputeFailedUnknownIssue]; let worker_api_client = WorkerApiClient::new(&server_url); - let response = worker_api_client.send_exit_cause_for_pre_compute_stage( + let response = worker_api_client.send_exit_causes_for_pre_compute_stage( CHALLENGE, CHAIN_TASK_ID, - &exit_message, + &exit_causes, ); testing_logger::validate(|captured_logs| { let logs = captured_logs @@ -292,7 +273,7 @@ mod tests { assert_eq!(logs.len(), 1); assert_eq!( logs[0].body, - "Failed to send exit cause: [status:503 Service Unavailable, body:Service Unavailable]" + "Failed to send exit causes: [status:503 Service Unavailable, body:Service Unavailable]" ); }); response @@ -308,14 +289,14 @@ mod tests { } #[test] - fn test_send_exit_cause_http_request_failure() { + fn test_send_exit_causes_http_request_failure() { testing_logger::setup(); - let exit_message = ExitMessage::from(&ReplicateStatusCause::PreComputeFailedUnknownIssue); + let exit_causes = vec![ReplicateStatusCause::PreComputeFailedUnknownIssue]; let worker_api_client = WorkerApiClient::new("wrong_url"); - let result = worker_api_client.send_exit_cause_for_pre_compute_stage( + let result = worker_api_client.send_exit_causes_for_pre_compute_stage( CHALLENGE, CHAIN_TASK_ID, - &exit_message, + &exit_causes, ); testing_logger::validate(|captured_logs| { let logs = captured_logs @@ -326,7 +307,7 @@ mod tests { assert_eq!(logs.len(), 1); assert_eq!( logs[0].body, - "HTTP request failed when sending exit cause to wrong_url/compute/pre/0x123456789abcdef/exit: reqwest::Error { kind: Builder, source: RelativeUrlWithoutBase }" + "HTTP request failed when sending exit causes to wrong_url/compute/pre/0x123456789abcdef/exit: reqwest::Error { kind: Builder, source: RelativeUrlWithoutBase }" ); }); assert!(result.is_err()); diff --git a/pre-compute/src/compute/app_runner.rs b/pre-compute/src/compute/app_runner.rs index 40a1586..08307b7 100644 --- a/pre-compute/src/compute/app_runner.rs +++ b/pre-compute/src/compute/app_runner.rs @@ -1,4 +1,4 @@ -use crate::api::worker_api::{ExitMessage, WorkerApiClient}; +use crate::api::worker_api::WorkerApiClient; use crate::compute::pre_compute_app::{PreComputeApp, PreComputeAppTrait}; use crate::compute::{ errors::ReplicateStatusCause, @@ -61,14 +61,12 @@ pub fn start_with_app( } }; - let exit_message = ExitMessage { - cause: &exit_cause.clone(), - }; + let exit_causes = vec![exit_cause.clone()]; - match WorkerApiClient::from_env().send_exit_cause_for_pre_compute_stage( + match WorkerApiClient::from_env().send_exit_causes_for_pre_compute_stage( &authorization, chain_task_id, - &exit_message, + &exit_causes, ) { Ok(_) => ExitMode::ReportedFailure, Err(_) => { @@ -231,10 +229,10 @@ mod pre_compute_start_with_app_tests { async fn start_succeeds_when_send_exit_cause_api_success() { let mock_server = MockServer::start().await; - let expected_cause_enum = ReplicateStatusCause::PreComputeOutputFolderNotFound; - let expected_exit_message_payload = json!({ - "cause": expected_cause_enum // Relies on ReplicateStatusCause's Serialize impl - }); + let expected_exit_message_payload = json!([{ + "cause": "PRE_COMPUTE_OUTPUT_FOLDER_NOT_FOUND", + "message": "Input files number related environment variable is missing" + }]); // Mock the worker API to return success Mock::given(method("POST")) diff --git a/pre-compute/src/compute/dataset.rs b/pre-compute/src/compute/dataset.rs index 33003d0..536ddf3 100644 --- a/pre-compute/src/compute/dataset.rs +++ b/pre-compute/src/compute/dataset.rs @@ -48,6 +48,7 @@ impl Dataset { /// # Arguments /// /// * `chain_task_id` - The chain task ID for logging + /// * `index` - The dataset index for error reporting /// /// # Returns /// @@ -57,6 +58,7 @@ impl Dataset { pub fn download_encrypted_dataset( &self, chain_task_id: &str, + index: usize, ) -> Result, ReplicateStatusCause> { info!( "Downloading encrypted dataset file [chainTaskId:{chain_task_id}, url:{}]", @@ -79,7 +81,7 @@ impl Dataset { } else { download_from_url(&self.url) } - .ok_or(ReplicateStatusCause::PreComputeDatasetDownloadFailed)?; + .ok_or(ReplicateStatusCause::PreComputeDatasetDownloadFailed(index))?; info!("Checking encrypted dataset checksum [chainTaskId:{chain_task_id}]"); let actual_checksum = sha256_from_bytes(&encrypted_content); @@ -89,7 +91,9 @@ impl Dataset { "Invalid dataset checksum [chainTaskId:{chain_task_id}, expected:{}, actual:{actual_checksum}]", self.checksum ); - return Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum); + return Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum( + index, + )); } info!("Dataset downloaded and verified successfully."); @@ -104,6 +108,7 @@ impl Dataset { /// # Arguments /// /// * `encrypted_content` - Full encrypted dataset, including the IV prefix. + /// * `index` - The dataset index for error reporting /// /// # Returns /// @@ -112,13 +117,16 @@ impl Dataset { pub fn decrypt_dataset( &self, encrypted_content: &[u8], + index: usize, ) -> Result, ReplicateStatusCause> { let key = general_purpose::STANDARD .decode(&self.key) - .map_err(|_| ReplicateStatusCause::PreComputeDatasetDecryptionFailed)?; + .map_err(|_| ReplicateStatusCause::PreComputeDatasetDecryptionFailed(index))?; if encrypted_content.len() < AES_IV_LENGTH || key.len() != AES_KEY_LENGTH { - return Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed); + return Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed( + index, + )); } let key_slice = &key[..AES_KEY_LENGTH]; @@ -127,7 +135,7 @@ impl Dataset { Aes256CbcDec::new(key_slice.into(), iv_slice.into()) .decrypt_padded_vec_mut::(ciphertext) - .map_err(|_| ReplicateStatusCause::PreComputeDatasetDecryptionFailed) + .map_err(|_| ReplicateStatusCause::PreComputeDatasetDecryptionFailed(index)) } } @@ -160,7 +168,7 @@ mod tests { #[test] fn download_encrypted_dataset_success() { let dataset = get_test_dataset(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 0); assert!(actual_content.is_ok()); } @@ -168,10 +176,10 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_dataset_url() { let mut dataset = get_test_dataset(); dataset.url = "http://bad-url".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 10); assert_eq!( actual_content, - Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed) + Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed(10)) ); } @@ -181,7 +189,7 @@ mod tests { dataset.url = IPFS_DATASET_URL.to_string(); dataset.checksum = "0x323b1637c7999942fbebfe5d42fe15dbfe93737577663afa0181938d7ad4a2ac".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 0); let expected_content = Ok("hello world !\n".as_bytes().to_vec()); assert_eq!(actual_content, expected_content); } @@ -190,8 +198,8 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_gateway() { let mut dataset = get_test_dataset(); dataset.url = "/ipfs/INVALID_IPFS_DATASET_URL".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); - let expected_content = Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 1); + let expected_content = Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed(1)); assert_eq!(actual_content, expected_content); } @@ -199,8 +207,8 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_dataset_checksum() { let mut dataset = get_test_dataset(); dataset.checksum = "invalid_dataset_checksum".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); - let expected_content = Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 9999); + let expected_content = Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum(9999)); assert_eq!(actual_content, expected_content); } // endregion @@ -210,9 +218,11 @@ mod tests { fn decrypt_dataset_success_with_valid_dataset() { let dataset = get_test_dataset(); - let encrypted_data = dataset.download_encrypted_dataset(CHAIN_TASK_ID).unwrap(); + let encrypted_data = dataset + .download_encrypted_dataset(CHAIN_TASK_ID, 3) + .unwrap(); let expected_plain_data = Ok("Some very useful data.".as_bytes().to_vec()); - let actual_plain_data = dataset.decrypt_dataset(&encrypted_data); + let actual_plain_data = dataset.decrypt_dataset(&encrypted_data, 3); assert_eq!(actual_plain_data, expected_plain_data); } @@ -221,12 +231,14 @@ mod tests { fn decrypt_dataset_failure_with_bad_key() { let mut dataset = get_test_dataset(); dataset.key = "bad_key".to_string(); - let encrypted_data = dataset.download_encrypted_dataset(CHAIN_TASK_ID).unwrap(); - let actual_plain_data = dataset.decrypt_dataset(&encrypted_data); + let encrypted_data = dataset + .download_encrypted_dataset(CHAIN_TASK_ID, 55) + .unwrap(); + let actual_plain_data = dataset.decrypt_dataset(&encrypted_data, 55); assert_eq!( actual_plain_data, - Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed) + Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed(55)) ); } // endregion diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index 51ceace..ac6709a 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -1,24 +1,24 @@ -use serde::{Deserialize, Serialize}; +use serde::Serializer; +use serde::ser::SerializeStruct; use thiserror::Error; -#[derive(Debug, PartialEq, Clone, Error, Serialize, Deserialize)] -#[serde(rename_all(serialize = "SCREAMING_SNAKE_CASE"))] +#[derive(Debug, PartialEq, Clone, Error)] #[allow(clippy::enum_variant_names)] pub enum ReplicateStatusCause { - #[error("At least one input file URL is missing")] - PreComputeAtLeastOneInputFileUrlMissing, - #[error("Dataset checksum related environment variable is missing")] - PreComputeDatasetChecksumMissing, - #[error("Failed to decrypt dataset")] - PreComputeDatasetDecryptionFailed, - #[error("Failed to download encrypted dataset file")] - PreComputeDatasetDownloadFailed, - #[error("Dataset filename related environment variable is missing")] - PreComputeDatasetFilenameMissing, - #[error("Dataset key related environment variable is missing")] - PreComputeDatasetKeyMissing, - #[error("Dataset URL related environment variable is missing")] - PreComputeDatasetUrlMissing, + #[error("input file URL {0} is missing")] + PreComputeAtLeastOneInputFileUrlMissing(usize), + #[error("Dataset checksum related environment variable is missing for dataset {0}")] + PreComputeDatasetChecksumMissing(usize), + #[error("Failed to decrypt dataset {0}")] + PreComputeDatasetDecryptionFailed(usize), + #[error("Failed to download encrypted dataset file for dataset {0}")] + PreComputeDatasetDownloadFailed(usize), + #[error("Dataset filename related environment variable is missing for dataset {0}")] + PreComputeDatasetFilenameMissing(usize), + #[error("Dataset key related environment variable is missing for dataset {0}")] + PreComputeDatasetKeyMissing(usize), + #[error("Dataset URL related environment variable is missing for dataset {0}")] + PreComputeDatasetUrlMissing(usize), #[error("Unexpected error occurred")] PreComputeFailedUnknownIssue, #[error("Invalid TEE signature")] @@ -29,8 +29,8 @@ pub enum ReplicateStatusCause { PreComputeInputFileDownloadFailed, #[error("Input files number related environment variable is missing")] PreComputeInputFilesNumberMissing, - #[error("Invalid dataset checksum")] - PreComputeInvalidDatasetChecksum, + #[error("Invalid dataset checksum for dataset {0}")] + PreComputeInvalidDatasetChecksum(usize), #[error("Input files number related environment variable is missing")] PreComputeOutputFolderNotFound, #[error("Output path related environment variable is missing")] @@ -44,3 +44,141 @@ pub enum ReplicateStatusCause { #[error("Worker address related environment variable is missing")] PreComputeWorkerAddressMissing, } + +impl serde::Serialize for ReplicateStatusCause { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut state = serializer.serialize_struct("ReplicateStatusCause", 2)?; + + let cause_name = match self { + ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing(_) => { + "PRE_COMPUTE_AT_LEAST_ONE_INPUT_FILE_URL_MISSING" + } + ReplicateStatusCause::PreComputeDatasetChecksumMissing(_) => { + "PRE_COMPUTE_DATASET_CHECKSUM_MISSING" + } + ReplicateStatusCause::PreComputeDatasetDecryptionFailed(_) => { + "PRE_COMPUTE_DATASET_DECRYPTION_FAILED" + } + ReplicateStatusCause::PreComputeDatasetDownloadFailed(_) => { + "PRE_COMPUTE_DATASET_DOWNLOAD_FAILED" + } + ReplicateStatusCause::PreComputeDatasetFilenameMissing(_) => { + "PRE_COMPUTE_DATASET_FILENAME_MISSING" + } + ReplicateStatusCause::PreComputeDatasetKeyMissing(_) => { + "PRE_COMPUTE_DATASET_KEY_MISSING" + } + ReplicateStatusCause::PreComputeDatasetUrlMissing(_) => { + "PRE_COMPUTE_DATASET_URL_MISSING" + } + ReplicateStatusCause::PreComputeFailedUnknownIssue => { + "PRE_COMPUTE_FAILED_UNKNOWN_ISSUE" + } + ReplicateStatusCause::PreComputeInvalidTeeSignature => { + "PRE_COMPUTE_INVALID_TEE_SIGNATURE" + } + ReplicateStatusCause::PreComputeIsDatasetRequiredMissing => { + "PRE_COMPUTE_IS_DATASET_REQUIRED_MISSING" + } + ReplicateStatusCause::PreComputeInputFileDownloadFailed => { + "PRE_COMPUTE_INPUT_FILE_DOWNLOAD_FAILED" + } + ReplicateStatusCause::PreComputeInputFilesNumberMissing => { + "PRE_COMPUTE_INPUT_FILES_NUMBER_MISSING" + } + ReplicateStatusCause::PreComputeInvalidDatasetChecksum(_) => { + "PRE_COMPUTE_INVALID_DATASET_CHECKSUM" + } + ReplicateStatusCause::PreComputeOutputFolderNotFound => { + "PRE_COMPUTE_OUTPUT_FOLDER_NOT_FOUND" + } + ReplicateStatusCause::PreComputeOutputPathMissing => "PRE_COMPUTE_OUTPUT_PATH_MISSING", + ReplicateStatusCause::PreComputeSavingPlainDatasetFailed => { + "PRE_COMPUTE_SAVING_PLAIN_DATASET_FAILED" + } + ReplicateStatusCause::PreComputeTaskIdMissing => "PRE_COMPUTE_TASK_ID_MISSING", + ReplicateStatusCause::PreComputeTeeChallengePrivateKeyMissing => { + "PRE_COMPUTE_TEE_CHALLENGE_PRIVATE_KEY_MISSING" + } + ReplicateStatusCause::PreComputeWorkerAddressMissing => { + "PRE_COMPUTE_WORKER_ADDRESS_MISSING" + } + }; + + state.serialize_field("cause", cause_name)?; + state.serialize_field("message", &self.to_string())?; + state.end() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::to_string; + + #[test] + fn test_serialize_dataset_error_with_index() { + let cause = ReplicateStatusCause::PreComputeDatasetUrlMissing(2); + let serialized = to_string(&cause).unwrap(); + assert_eq!( + serialized, + r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 2"}"# + ); + } + + #[test] + fn test_serialize_non_dataset_error() { + let cause = ReplicateStatusCause::PreComputeInvalidTeeSignature; + let serialized = to_string(&cause).unwrap(); + assert_eq!( + serialized, + r#"{"cause":"PRE_COMPUTE_INVALID_TEE_SIGNATURE","message":"Invalid TEE signature"}"# + ); + } + + #[test] + fn test_serialize_all_dataset_errors() { + let test_cases = vec![ + ( + ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing(1), + r#"{"cause":"PRE_COMPUTE_AT_LEAST_ONE_INPUT_FILE_URL_MISSING","message":"input file URL 1 is missing"}"#, + ), + ( + ReplicateStatusCause::PreComputeDatasetChecksumMissing(3), + r#"{"cause":"PRE_COMPUTE_DATASET_CHECKSUM_MISSING","message":"Dataset checksum related environment variable is missing for dataset 3"}"#, + ), + ( + ReplicateStatusCause::PreComputeDatasetDecryptionFailed(0), + r#"{"cause":"PRE_COMPUTE_DATASET_DECRYPTION_FAILED","message":"Failed to decrypt dataset 0"}"#, + ), + ( + ReplicateStatusCause::PreComputeDatasetDownloadFailed(5), + r#"{"cause":"PRE_COMPUTE_DATASET_DOWNLOAD_FAILED","message":"Failed to download encrypted dataset file for dataset 5"}"#, + ), + ( + ReplicateStatusCause::PreComputeInvalidDatasetChecksum(2), + r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 2"}"#, + ), + ]; + + for (cause, expected) in test_cases { + let serialized = to_string(&cause).unwrap(); + assert_eq!(serialized, expected); + } + } + + #[test] + fn test_serialize_vec_of_errors() { + let causes = vec![ + ReplicateStatusCause::PreComputeDatasetUrlMissing(5), + ReplicateStatusCause::PreComputeInvalidDatasetChecksum(99), + ]; + + let serialized = to_string(&causes).unwrap(); + let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 5"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 99"}]"#; + assert_eq!(serialized, expected); + } +} diff --git a/pre-compute/src/compute/pre_compute_app.rs b/pre-compute/src/compute/pre_compute_app.rs index ca12b10..aefa42a 100644 --- a/pre-compute/src/compute/pre_compute_app.rs +++ b/pre-compute/src/compute/pre_compute_app.rs @@ -59,9 +59,10 @@ impl PreComputeAppTrait for PreComputeApp { // TODO: Collect all errors instead of propagating immediately, and return the list of errors self.pre_compute_args = PreComputeArgs::read_args()?; self.check_output_folder()?; - for dataset in &self.pre_compute_args.datasets { - let encrypted_content = dataset.download_encrypted_dataset(&self.chain_task_id)?; - let plain_content = dataset.decrypt_dataset(&encrypted_content)?; + for (index, dataset) in self.pre_compute_args.datasets.iter().enumerate() { + let encrypted_content = + dataset.download_encrypted_dataset(&self.chain_task_id, index)?; + let plain_content = dataset.decrypt_dataset(&encrypted_content, index)?; self.save_plain_dataset_file(&plain_content, &dataset.filename)?; } self.download_input_files()?; diff --git a/pre-compute/src/compute/pre_compute_args.rs b/pre-compute/src/compute/pre_compute_args.rs index 1bd074c..ca4f36d 100644 --- a/pre-compute/src/compute/pre_compute_args.rs +++ b/pre-compute/src/compute/pre_compute_args.rs @@ -88,19 +88,19 @@ impl PreComputeArgs { for i in start_index..=iexec_bulk_slice_size { let url = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetUrl(i), - ReplicateStatusCause::PreComputeDatasetUrlMissing, // TODO: replace with a more specific error for bulk dataset + ReplicateStatusCause::PreComputeDatasetUrlMissing(i), )?; let checksum = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetChecksum(i), - ReplicateStatusCause::PreComputeDatasetChecksumMissing, // TODO: replace with a more specific error for bulk dataset + ReplicateStatusCause::PreComputeDatasetChecksumMissing(i), )?; let filename = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetFilename(i), - ReplicateStatusCause::PreComputeDatasetFilenameMissing, // TODO: replace with a more specific error for bulk dataset + ReplicateStatusCause::PreComputeDatasetFilenameMissing(i), )?; let key = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetKey(i), - ReplicateStatusCause::PreComputeDatasetKeyMissing, // TODO: replace with a more specific error for bulk dataset + ReplicateStatusCause::PreComputeDatasetKeyMissing(i), )?; datasets.push(Dataset::new(url, checksum, filename, key)); @@ -118,7 +118,7 @@ impl PreComputeArgs { for i in 1..=input_files_nb { let url = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecInputFileUrlPrefix(i), - ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing, + ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing(i), )?; input_files.push(url); } @@ -427,7 +427,7 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetUrlMissing + ReplicateStatusCause::PreComputeDatasetUrlMissing(1) ); }); } @@ -446,7 +446,7 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetChecksumMissing + ReplicateStatusCause::PreComputeDatasetChecksumMissing(2) ); }); } @@ -465,7 +465,7 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetFilenameMissing + ReplicateStatusCause::PreComputeDatasetFilenameMissing(2) ); }); } @@ -484,7 +484,7 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetKeyMissing + ReplicateStatusCause::PreComputeDatasetKeyMissing(1) ); }); } @@ -508,23 +508,23 @@ mod tests { ), ( IexecDatasetUrl(0), - ReplicateStatusCause::PreComputeDatasetUrlMissing, + ReplicateStatusCause::PreComputeDatasetUrlMissing(0), ), ( IexecDatasetKey(0), - ReplicateStatusCause::PreComputeDatasetKeyMissing, + ReplicateStatusCause::PreComputeDatasetKeyMissing(0), ), ( IexecDatasetChecksum(0), - ReplicateStatusCause::PreComputeDatasetChecksumMissing, + ReplicateStatusCause::PreComputeDatasetChecksumMissing(0), ), ( IexecDatasetFilename(0), - ReplicateStatusCause::PreComputeDatasetFilenameMissing, + ReplicateStatusCause::PreComputeDatasetFilenameMissing(0), ), ( IexecInputFileUrlPrefix(1), - ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing, + ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing(1), ), ]; for (env_var, error) in missing_env_var_causes { diff --git a/pre-compute/src/compute/utils/env_utils.rs b/pre-compute/src/compute/utils/env_utils.rs index 270f0d6..470b50c 100644 --- a/pre-compute/src/compute/utils/env_utils.rs +++ b/pre-compute/src/compute/utils/env_utils.rs @@ -202,7 +202,7 @@ mod tests { #[test] fn get_env_var_or_error_succeeds_when_indexed_variables() { let env_var = TeeSessionEnvironmentVariable::IexecDatasetChecksum(1); - let status_cause = ReplicateStatusCause::PreComputeDatasetChecksumMissing; + let status_cause = ReplicateStatusCause::PreComputeDatasetChecksumMissing(1); temp_env::with_var("IEXEC_DATASET_1_CHECKSUM", Some("abc123def456"), || { let result = get_env_var_or_error(env_var, status_cause.clone()); From cac78db9b9a7e6e7754ecde90aa53e937f9b6f2e Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Tue, 14 Oct 2025 14:53:05 +0200 Subject: [PATCH 02/12] style: clean up serde imports in errors.rs --- pre-compute/src/compute/errors.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index ac6709a..7623e0a 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -1,5 +1,4 @@ -use serde::Serializer; -use serde::ser::SerializeStruct; +use serde::{Serializer, ser::SerializeStruct}; use thiserror::Error; #[derive(Debug, PartialEq, Clone, Error)] From 31565cf397972ce07f0042b4ca7df79cdbb0d4cb Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Wed, 15 Oct 2025 09:40:26 +0200 Subject: [PATCH 03/12] refactor: simplify serialize method implementation using strum crate --- Cargo.lock | 20 ++++++++ pre-compute/Cargo.toml | 2 + pre-compute/src/compute/app_runner.rs | 2 +- pre-compute/src/compute/errors.rs | 66 +++------------------------ 4 files changed, 29 insertions(+), 61 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 30de312..ad17538 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4039,6 +4039,24 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "subtle" version = "2.6.1" @@ -4175,6 +4193,8 @@ dependencies = [ "serde_json", "sha256", "sha3", + "strum", + "strum_macros", "temp-env", "tempfile", "testcontainers", diff --git a/pre-compute/Cargo.toml b/pre-compute/Cargo.toml index 8ecd4a2..c821816 100644 --- a/pre-compute/Cargo.toml +++ b/pre-compute/Cargo.toml @@ -16,6 +16,8 @@ reqwest = { version = "0.12.15", features = ["blocking", "json"] } serde = "1.0.219" sha256 = "1.6.0" sha3 = "0.10.8" +strum = "0.27.2" +strum_macros = "0.27.2" thiserror = "2.0.12" [dev-dependencies] diff --git a/pre-compute/src/compute/app_runner.rs b/pre-compute/src/compute/app_runner.rs index 08307b7..b2da7cb 100644 --- a/pre-compute/src/compute/app_runner.rs +++ b/pre-compute/src/compute/app_runner.rs @@ -231,7 +231,7 @@ mod pre_compute_start_with_app_tests { let expected_exit_message_payload = json!([{ "cause": "PRE_COMPUTE_OUTPUT_FOLDER_NOT_FOUND", - "message": "Input files number related environment variable is missing" + "message": "Output folder related environment variable is missing" }]); // Mock the worker API to return success diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index 7623e0a..65e7c18 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -1,7 +1,10 @@ use serde::{Serializer, ser::SerializeStruct}; use thiserror::Error; +use strum_macros::EnumDiscriminants; -#[derive(Debug, PartialEq, Clone, Error)] +#[derive(Debug, PartialEq, Clone, Error, EnumDiscriminants)] +#[strum_discriminants(derive(serde::Serialize))] +#[strum_discriminants(serde(rename_all = "SCREAMING_SNAKE_CASE"))] #[allow(clippy::enum_variant_names)] pub enum ReplicateStatusCause { #[error("input file URL {0} is missing")] @@ -30,7 +33,7 @@ pub enum ReplicateStatusCause { PreComputeInputFilesNumberMissing, #[error("Invalid dataset checksum for dataset {0}")] PreComputeInvalidDatasetChecksum(usize), - #[error("Input files number related environment variable is missing")] + #[error("Output folder related environment variable is missing")] PreComputeOutputFolderNotFound, #[error("Output path related environment variable is missing")] PreComputeOutputPathMissing, @@ -50,64 +53,7 @@ impl serde::Serialize for ReplicateStatusCause { S: Serializer, { let mut state = serializer.serialize_struct("ReplicateStatusCause", 2)?; - - let cause_name = match self { - ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing(_) => { - "PRE_COMPUTE_AT_LEAST_ONE_INPUT_FILE_URL_MISSING" - } - ReplicateStatusCause::PreComputeDatasetChecksumMissing(_) => { - "PRE_COMPUTE_DATASET_CHECKSUM_MISSING" - } - ReplicateStatusCause::PreComputeDatasetDecryptionFailed(_) => { - "PRE_COMPUTE_DATASET_DECRYPTION_FAILED" - } - ReplicateStatusCause::PreComputeDatasetDownloadFailed(_) => { - "PRE_COMPUTE_DATASET_DOWNLOAD_FAILED" - } - ReplicateStatusCause::PreComputeDatasetFilenameMissing(_) => { - "PRE_COMPUTE_DATASET_FILENAME_MISSING" - } - ReplicateStatusCause::PreComputeDatasetKeyMissing(_) => { - "PRE_COMPUTE_DATASET_KEY_MISSING" - } - ReplicateStatusCause::PreComputeDatasetUrlMissing(_) => { - "PRE_COMPUTE_DATASET_URL_MISSING" - } - ReplicateStatusCause::PreComputeFailedUnknownIssue => { - "PRE_COMPUTE_FAILED_UNKNOWN_ISSUE" - } - ReplicateStatusCause::PreComputeInvalidTeeSignature => { - "PRE_COMPUTE_INVALID_TEE_SIGNATURE" - } - ReplicateStatusCause::PreComputeIsDatasetRequiredMissing => { - "PRE_COMPUTE_IS_DATASET_REQUIRED_MISSING" - } - ReplicateStatusCause::PreComputeInputFileDownloadFailed => { - "PRE_COMPUTE_INPUT_FILE_DOWNLOAD_FAILED" - } - ReplicateStatusCause::PreComputeInputFilesNumberMissing => { - "PRE_COMPUTE_INPUT_FILES_NUMBER_MISSING" - } - ReplicateStatusCause::PreComputeInvalidDatasetChecksum(_) => { - "PRE_COMPUTE_INVALID_DATASET_CHECKSUM" - } - ReplicateStatusCause::PreComputeOutputFolderNotFound => { - "PRE_COMPUTE_OUTPUT_FOLDER_NOT_FOUND" - } - ReplicateStatusCause::PreComputeOutputPathMissing => "PRE_COMPUTE_OUTPUT_PATH_MISSING", - ReplicateStatusCause::PreComputeSavingPlainDatasetFailed => { - "PRE_COMPUTE_SAVING_PLAIN_DATASET_FAILED" - } - ReplicateStatusCause::PreComputeTaskIdMissing => "PRE_COMPUTE_TASK_ID_MISSING", - ReplicateStatusCause::PreComputeTeeChallengePrivateKeyMissing => { - "PRE_COMPUTE_TEE_CHALLENGE_PRIVATE_KEY_MISSING" - } - ReplicateStatusCause::PreComputeWorkerAddressMissing => { - "PRE_COMPUTE_WORKER_ADDRESS_MISSING" - } - }; - - state.serialize_field("cause", cause_name)?; + state.serialize_field("cause", &ReplicateStatusCauseDiscriminants::from(self))?; state.serialize_field("message", &self.to_string())?; state.end() } From d825047466453c7208622774c94104b5c7bcfcba Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Wed, 15 Oct 2025 09:44:19 +0200 Subject: [PATCH 04/12] style: format the code --- pre-compute/src/compute/errors.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index 65e7c18..c7909a0 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -1,6 +1,6 @@ use serde::{Serializer, ser::SerializeStruct}; -use thiserror::Error; use strum_macros::EnumDiscriminants; +use thiserror::Error; #[derive(Debug, PartialEq, Clone, Error, EnumDiscriminants)] #[strum_discriminants(derive(serde::Serialize))] From b92beb522a6e6932b6943631a8005ba42abdd3ad Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Thu, 16 Oct 2025 12:18:16 +0200 Subject: [PATCH 05/12] refactor: use slice instead of Vec reference for exit_causes --- pre-compute/src/api/worker_api.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pre-compute/src/api/worker_api.rs b/pre-compute/src/api/worker_api.rs index b9c4e17..ab5e0b7 100644 --- a/pre-compute/src/api/worker_api.rs +++ b/pre-compute/src/api/worker_api.rs @@ -102,7 +102,7 @@ impl WorkerApiClient { &self, authorization: &str, chain_task_id: &str, - exit_causes: &Vec, + exit_causes: &[ReplicateStatusCause], ) -> Result<(), ReplicateStatusCause> { let url = format!("{}/compute/pre/{chain_task_id}/exit", self.base_url); match self From 48897436636e3b8d82854f9e1a54a7afb3159319 Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Thu, 16 Oct 2025 17:27:13 +0200 Subject: [PATCH 06/12] fix:: correct endpoint path to /exit-causes --- pre-compute/src/api/worker_api.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pre-compute/src/api/worker_api.rs b/pre-compute/src/api/worker_api.rs index ab5e0b7..e7a923f 100644 --- a/pre-compute/src/api/worker_api.rs +++ b/pre-compute/src/api/worker_api.rs @@ -104,7 +104,7 @@ impl WorkerApiClient { chain_task_id: &str, exit_causes: &[ReplicateStatusCause], ) -> Result<(), ReplicateStatusCause> { - let url = format!("{}/compute/pre/{chain_task_id}/exit", self.base_url); + let url = format!("{}/compute/pre/{chain_task_id}/exit-causes", self.base_url); match self .client .post(&url) From b5fb947e7cd2309ffc2378307be0c66df4418561 Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Fri, 17 Oct 2025 09:46:37 +0200 Subject: [PATCH 07/12] fix: adapt worker_api tests --- post-compute/src/api/worker_api.rs | 6 +++--- pre-compute/src/api/worker_api.rs | 6 +++--- pre-compute/src/compute/app_runner.rs | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/post-compute/src/api/worker_api.rs b/post-compute/src/api/worker_api.rs index ff3943d..0bf3d3e 100644 --- a/post-compute/src/api/worker_api.rs +++ b/post-compute/src/api/worker_api.rs @@ -143,7 +143,7 @@ impl WorkerApiClient { chain_task_id: &str, exit_cause: &ExitMessage, ) -> Result<(), ReplicateStatusCause> { - let url = format!("{}/compute/post/{chain_task_id}/exit", self.base_url); + let url = format!("{}/compute/post/{chain_task_id}/exit-causes", self.base_url); match self .client .post(&url) @@ -325,7 +325,7 @@ mod tests { }); Mock::given(method("POST")) - .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit"))) + .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit-causes"))) .and(header("Authorization", CHALLENGE)) .and(body_json(&expected_body)) .respond_with(ResponseTemplate::new(200)) @@ -360,7 +360,7 @@ mod tests { let server_url = mock_server.uri(); Mock::given(method("POST")) - .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit"))) + .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit-causes"))) .respond_with(ResponseTemplate::new(404)) .expect(1) .mount(&mock_server) diff --git a/pre-compute/src/api/worker_api.rs b/pre-compute/src/api/worker_api.rs index e7a923f..5092fe6 100644 --- a/pre-compute/src/api/worker_api.rs +++ b/pre-compute/src/api/worker_api.rs @@ -220,7 +220,7 @@ mod tests { ]); Mock::given(method("POST")) - .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit"))) + .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit-causes"))) .and(header("Authorization", CHALLENGE)) .and(body_json(&expected_body)) .respond_with(ResponseTemplate::new(200)) @@ -250,7 +250,7 @@ mod tests { let server_url = mock_server.uri(); Mock::given(method("POST")) - .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit"))) + .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit-causes"))) .respond_with(ResponseTemplate::new(503).set_body_string("Service Unavailable")) .expect(1) .mount(&mock_server) @@ -307,7 +307,7 @@ mod tests { assert_eq!(logs.len(), 1); assert_eq!( logs[0].body, - "HTTP request failed when sending exit causes to wrong_url/compute/pre/0x123456789abcdef/exit: reqwest::Error { kind: Builder, source: RelativeUrlWithoutBase }" + "HTTP request failed when sending exit causes to wrong_url/compute/pre/0x123456789abcdef/exit-causes: reqwest::Error { kind: Builder, source: RelativeUrlWithoutBase }" ); }); assert!(result.is_err()); diff --git a/pre-compute/src/compute/app_runner.rs b/pre-compute/src/compute/app_runner.rs index b2da7cb..093a47b 100644 --- a/pre-compute/src/compute/app_runner.rs +++ b/pre-compute/src/compute/app_runner.rs @@ -191,7 +191,7 @@ mod pre_compute_start_with_app_tests { let mock_server = MockServer::start().await; Mock::given(method("POST")) - .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit"))) + .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit-causes"))) .respond_with(ResponseTemplate::new(500)) .mount(&mock_server) .await; @@ -236,7 +236,7 @@ mod pre_compute_start_with_app_tests { // Mock the worker API to return success Mock::given(method("POST")) - .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit"))) + .and(path(format!("/compute/pre/{CHAIN_TASK_ID}/exit-causes"))) .and(body_json(expected_exit_message_payload)) .respond_with(ResponseTemplate::new(200)) .expect(1) From 0f59f599f496b6b92ae096a53674cc2674283777 Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Fri, 17 Oct 2025 16:18:27 +0200 Subject: [PATCH 08/12] test: improve test naming for clarity and consistency --- pre-compute/src/api/worker_api.rs | 14 +++++++------- pre-compute/src/compute/errors.rs | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/pre-compute/src/api/worker_api.rs b/pre-compute/src/api/worker_api.rs index 5092fe6..eb80982 100644 --- a/pre-compute/src/api/worker_api.rs +++ b/pre-compute/src/api/worker_api.rs @@ -143,7 +143,7 @@ mod tests { // region Serialization tests #[test] - fn should_serialize_replicate_status_cause() { + fn serialize_replicate_status_cause_succeeds_when_single_cause() { let causes = vec![ ( ReplicateStatusCause::PreComputeInvalidTeeSignature, @@ -170,7 +170,7 @@ mod tests { } #[test] - fn should_serialize_vec_of_causes() { + fn serialize_vec_of_causes_succeeds_when_multiple_causes() { let causes = vec![ ReplicateStatusCause::PreComputeDatasetUrlMissing(0), ReplicateStatusCause::PreComputeInvalidDatasetChecksum(1), @@ -184,7 +184,7 @@ mod tests { // region get_worker_api_client #[test] - fn should_get_worker_api_client_with_env_var() { + fn from_env_creates_client_with_custom_host_when_env_var_set() { with_vars( vec![(WorkerHostEnvVar.name(), Some("custom-worker-host:9999"))], || { @@ -195,7 +195,7 @@ mod tests { } #[test] - fn should_get_worker_api_client_without_env_var() { + fn from_env_creates_client_with_default_host_when_env_var_unset() { temp_env::with_vars_unset(vec![WorkerHostEnvVar.name()], || { let client = WorkerApiClient::from_env(); assert_eq!(client.base_url, format!("http://{DEFAULT_WORKER_HOST}")); @@ -208,7 +208,7 @@ mod tests { const CHAIN_TASK_ID: &str = "0x123456789abcdef"; #[tokio::test] - async fn should_send_exit_causes() { + async fn send_exit_causes_succeeds_when_api_returns_success() { let mock_server = MockServer::start().await; let server_url = mock_server.uri(); @@ -244,7 +244,7 @@ mod tests { } #[tokio::test] - async fn should_not_send_exit_causes() { + async fn send_exit_causes_fails_when_api_returns_error() { testing_logger::setup(); let mock_server = MockServer::start().await; let server_url = mock_server.uri(); @@ -289,7 +289,7 @@ mod tests { } #[test] - fn test_send_exit_causes_http_request_failure() { + fn send_exit_causes_fails_when_http_request_invalid() { testing_logger::setup(); let exit_causes = vec![ReplicateStatusCause::PreComputeFailedUnknownIssue]; let worker_api_client = WorkerApiClient::new("wrong_url"); diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index c7909a0..2db37bf 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -65,7 +65,7 @@ mod tests { use serde_json::to_string; #[test] - fn test_serialize_dataset_error_with_index() { + fn serialize_produces_correct_json_when_error_has_dataset_index() { let cause = ReplicateStatusCause::PreComputeDatasetUrlMissing(2); let serialized = to_string(&cause).unwrap(); assert_eq!( @@ -75,7 +75,7 @@ mod tests { } #[test] - fn test_serialize_non_dataset_error() { + fn serialize_produces_correct_json_when_error_has_no_index() { let cause = ReplicateStatusCause::PreComputeInvalidTeeSignature; let serialized = to_string(&cause).unwrap(); assert_eq!( @@ -85,7 +85,7 @@ mod tests { } #[test] - fn test_serialize_all_dataset_errors() { + fn serialize_produces_correct_json_when_multiple_dataset_errors_with_indices() { let test_cases = vec![ ( ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing(1), @@ -116,7 +116,7 @@ mod tests { } #[test] - fn test_serialize_vec_of_errors() { + fn serialize_produces_correct_json_when_vector_of_multiple_errors() { let causes = vec![ ReplicateStatusCause::PreComputeDatasetUrlMissing(5), ReplicateStatusCause::PreComputeInvalidDatasetChecksum(99), From c05a9a33657ffb279e711843f5c2e06b05a99732 Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Fri, 17 Oct 2025 16:23:19 +0200 Subject: [PATCH 09/12] fix: revert changes on post-compute --- post-compute/src/api/worker_api.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/post-compute/src/api/worker_api.rs b/post-compute/src/api/worker_api.rs index 0bf3d3e..ff3943d 100644 --- a/post-compute/src/api/worker_api.rs +++ b/post-compute/src/api/worker_api.rs @@ -143,7 +143,7 @@ impl WorkerApiClient { chain_task_id: &str, exit_cause: &ExitMessage, ) -> Result<(), ReplicateStatusCause> { - let url = format!("{}/compute/post/{chain_task_id}/exit-causes", self.base_url); + let url = format!("{}/compute/post/{chain_task_id}/exit", self.base_url); match self .client .post(&url) @@ -325,7 +325,7 @@ mod tests { }); Mock::given(method("POST")) - .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit-causes"))) + .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit"))) .and(header("Authorization", CHALLENGE)) .and(body_json(&expected_body)) .respond_with(ResponseTemplate::new(200)) @@ -360,7 +360,7 @@ mod tests { let server_url = mock_server.uri(); Mock::given(method("POST")) - .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit-causes"))) + .and(path(format!("/compute/post/{CHAIN_TASK_ID}/exit"))) .respond_with(ResponseTemplate::new(404)) .expect(1) .mount(&mock_server) From 70d4c7b045d89f320679030a2e6e87adbe841cad Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Mon, 20 Oct 2025 15:53:07 +0200 Subject: [PATCH 10/12] feat: use dataset address for improved error handling --- pre-compute/src/api/worker_api.rs | 14 ++-- pre-compute/src/compute/dataset.rs | 75 +++++++++++++-------- pre-compute/src/compute/errors.rs | 44 ++++++------ pre-compute/src/compute/pre_compute_app.rs | 8 +-- pre-compute/src/compute/pre_compute_args.rs | 55 +++++++++++---- pre-compute/src/compute/utils/env_utils.rs | 11 ++- 6 files changed, 133 insertions(+), 74 deletions(-) diff --git a/pre-compute/src/api/worker_api.rs b/pre-compute/src/api/worker_api.rs index eb80982..d6e75f3 100644 --- a/pre-compute/src/api/worker_api.rs +++ b/pre-compute/src/api/worker_api.rs @@ -154,12 +154,12 @@ mod tests { r#"{"cause":"PRE_COMPUTE_WORKER_ADDRESS_MISSING","message":"Worker address related environment variable is missing"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetUrlMissing(2), - r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 2"}"#, + ReplicateStatusCause::PreComputeDatasetUrlMissing("0xDataset2".to_string()), + r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDataset2"}"#, ), ( - ReplicateStatusCause::PreComputeInvalidDatasetChecksum(1), - r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 1"}"#, + ReplicateStatusCause::PreComputeInvalidDatasetChecksum("0xDataset1".to_string()), + r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDataset1"}"#, ), ]; @@ -172,12 +172,12 @@ mod tests { #[test] fn serialize_vec_of_causes_succeeds_when_multiple_causes() { let causes = vec![ - ReplicateStatusCause::PreComputeDatasetUrlMissing(0), - ReplicateStatusCause::PreComputeInvalidDatasetChecksum(1), + ReplicateStatusCause::PreComputeDatasetUrlMissing("0xDatasetA".to_string()), + ReplicateStatusCause::PreComputeInvalidDatasetChecksum("0xDatasetB".to_string()), ]; let serialized = to_string(&causes).expect("Failed to serialize"); - let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 1"}]"#; + let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDatasetA"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDatasetB"}]"#; assert_eq!(serialized, expected); } // endregion diff --git a/pre-compute/src/compute/dataset.rs b/pre-compute/src/compute/dataset.rs index 536ddf3..ca6aa10 100644 --- a/pre-compute/src/compute/dataset.rs +++ b/pre-compute/src/compute/dataset.rs @@ -27,6 +27,7 @@ const AES_IV_LENGTH: usize = 16; #[cfg_attr(test, derive(Debug))] #[derive(Clone, Default)] pub struct Dataset { + pub address: String, pub url: String, pub checksum: String, pub filename: String, @@ -34,8 +35,15 @@ pub struct Dataset { } impl Dataset { - pub fn new(url: String, checksum: String, filename: String, key: String) -> Self { + pub fn new( + address: String, + url: String, + checksum: String, + filename: String, + key: String, + ) -> Self { Dataset { + address, url, checksum, filename, @@ -48,7 +56,6 @@ impl Dataset { /// # Arguments /// /// * `chain_task_id` - The chain task ID for logging - /// * `index` - The dataset index for error reporting /// /// # Returns /// @@ -58,7 +65,6 @@ impl Dataset { pub fn download_encrypted_dataset( &self, chain_task_id: &str, - index: usize, ) -> Result, ReplicateStatusCause> { info!( "Downloading encrypted dataset file [chainTaskId:{chain_task_id}, url:{}]", @@ -81,7 +87,9 @@ impl Dataset { } else { download_from_url(&self.url) } - .ok_or(ReplicateStatusCause::PreComputeDatasetDownloadFailed(index))?; + .ok_or(ReplicateStatusCause::PreComputeDatasetDownloadFailed( + self.address.clone(), + ))?; info!("Checking encrypted dataset checksum [chainTaskId:{chain_task_id}]"); let actual_checksum = sha256_from_bytes(&encrypted_content); @@ -92,7 +100,7 @@ impl Dataset { self.checksum ); return Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum( - index, + self.address.clone(), )); } @@ -108,7 +116,6 @@ impl Dataset { /// # Arguments /// /// * `encrypted_content` - Full encrypted dataset, including the IV prefix. - /// * `index` - The dataset index for error reporting /// /// # Returns /// @@ -117,15 +124,14 @@ impl Dataset { pub fn decrypt_dataset( &self, encrypted_content: &[u8], - index: usize, ) -> Result, ReplicateStatusCause> { - let key = general_purpose::STANDARD - .decode(&self.key) - .map_err(|_| ReplicateStatusCause::PreComputeDatasetDecryptionFailed(index))?; + let key = general_purpose::STANDARD.decode(&self.key).map_err(|_| { + ReplicateStatusCause::PreComputeDatasetDecryptionFailed(self.address.clone()) + })?; if encrypted_content.len() < AES_IV_LENGTH || key.len() != AES_KEY_LENGTH { return Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed( - index, + self.address.clone(), )); } @@ -135,7 +141,9 @@ impl Dataset { Aes256CbcDec::new(key_slice.into(), iv_slice.into()) .decrypt_padded_vec_mut::(ciphertext) - .map_err(|_| ReplicateStatusCause::PreComputeDatasetDecryptionFailed(index)) + .map_err(|_| { + ReplicateStatusCause::PreComputeDatasetDecryptionFailed(self.address.clone()) + }) } } @@ -157,6 +165,7 @@ mod tests { fn get_test_dataset() -> Dataset { Dataset::new( + "0xDatasetAddress".to_string(), HTTP_DATASET_URL.to_string(), DATASET_CHECKSUM.to_string(), PLAIN_DATA_FILE.to_string(), @@ -168,7 +177,7 @@ mod tests { #[test] fn download_encrypted_dataset_success() { let dataset = get_test_dataset(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 0); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); assert!(actual_content.is_ok()); } @@ -176,10 +185,13 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_dataset_url() { let mut dataset = get_test_dataset(); dataset.url = "http://bad-url".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 10); + dataset.address = "0xbaddataset".to_string(); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); assert_eq!( actual_content, - Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed(10)) + Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed( + "0xbaddataset".to_string() + )) ); } @@ -189,7 +201,7 @@ mod tests { dataset.url = IPFS_DATASET_URL.to_string(); dataset.checksum = "0x323b1637c7999942fbebfe5d42fe15dbfe93737577663afa0181938d7ad4a2ac".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 0); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); let expected_content = Ok("hello world !\n".as_bytes().to_vec()); assert_eq!(actual_content, expected_content); } @@ -198,8 +210,11 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_gateway() { let mut dataset = get_test_dataset(); dataset.url = "/ipfs/INVALID_IPFS_DATASET_URL".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 1); - let expected_content = Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed(1)); + dataset.address = "0xinvalidgateway".to_string(); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); + let expected_content = Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed( + "0xinvalidgateway".to_string(), + )); assert_eq!(actual_content, expected_content); } @@ -207,8 +222,11 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_dataset_checksum() { let mut dataset = get_test_dataset(); dataset.checksum = "invalid_dataset_checksum".to_string(); - let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID, 9999); - let expected_content = Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum(9999)); + dataset.address = "0xinvalidchecksum".to_string(); + let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); + let expected_content = Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum( + "0xinvalidchecksum".to_string(), + )); assert_eq!(actual_content, expected_content); } // endregion @@ -218,11 +236,9 @@ mod tests { fn decrypt_dataset_success_with_valid_dataset() { let dataset = get_test_dataset(); - let encrypted_data = dataset - .download_encrypted_dataset(CHAIN_TASK_ID, 3) - .unwrap(); + let encrypted_data = dataset.download_encrypted_dataset(CHAIN_TASK_ID).unwrap(); let expected_plain_data = Ok("Some very useful data.".as_bytes().to_vec()); - let actual_plain_data = dataset.decrypt_dataset(&encrypted_data, 3); + let actual_plain_data = dataset.decrypt_dataset(&encrypted_data); assert_eq!(actual_plain_data, expected_plain_data); } @@ -231,14 +247,15 @@ mod tests { fn decrypt_dataset_failure_with_bad_key() { let mut dataset = get_test_dataset(); dataset.key = "bad_key".to_string(); - let encrypted_data = dataset - .download_encrypted_dataset(CHAIN_TASK_ID, 55) - .unwrap(); - let actual_plain_data = dataset.decrypt_dataset(&encrypted_data, 55); + dataset.address = "0xbadkey".to_string(); + let encrypted_data = dataset.download_encrypted_dataset(CHAIN_TASK_ID).unwrap(); + let actual_plain_data = dataset.decrypt_dataset(&encrypted_data); assert_eq!( actual_plain_data, - Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed(55)) + Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed( + "0xbadkey".to_string() + )) ); } // endregion diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index 2db37bf..702723f 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -10,17 +10,17 @@ pub enum ReplicateStatusCause { #[error("input file URL {0} is missing")] PreComputeAtLeastOneInputFileUrlMissing(usize), #[error("Dataset checksum related environment variable is missing for dataset {0}")] - PreComputeDatasetChecksumMissing(usize), + PreComputeDatasetChecksumMissing(String), #[error("Failed to decrypt dataset {0}")] - PreComputeDatasetDecryptionFailed(usize), + PreComputeDatasetDecryptionFailed(String), #[error("Failed to download encrypted dataset file for dataset {0}")] - PreComputeDatasetDownloadFailed(usize), + PreComputeDatasetDownloadFailed(String), #[error("Dataset filename related environment variable is missing for dataset {0}")] - PreComputeDatasetFilenameMissing(usize), + PreComputeDatasetFilenameMissing(String), #[error("Dataset key related environment variable is missing for dataset {0}")] - PreComputeDatasetKeyMissing(usize), + PreComputeDatasetKeyMissing(String), #[error("Dataset URL related environment variable is missing for dataset {0}")] - PreComputeDatasetUrlMissing(usize), + PreComputeDatasetUrlMissing(String), #[error("Unexpected error occurred")] PreComputeFailedUnknownIssue, #[error("Invalid TEE signature")] @@ -32,7 +32,7 @@ pub enum ReplicateStatusCause { #[error("Input files number related environment variable is missing")] PreComputeInputFilesNumberMissing, #[error("Invalid dataset checksum for dataset {0}")] - PreComputeInvalidDatasetChecksum(usize), + PreComputeInvalidDatasetChecksum(String), #[error("Output folder related environment variable is missing")] PreComputeOutputFolderNotFound, #[error("Output path related environment variable is missing")] @@ -64,13 +64,15 @@ mod tests { use super::*; use serde_json::to_string; + const DATASET_ADDRESS: &str = "0xDatasetAddress"; + #[test] fn serialize_produces_correct_json_when_error_has_dataset_index() { - let cause = ReplicateStatusCause::PreComputeDatasetUrlMissing(2); + let cause = ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_ADDRESS.to_string()); let serialized = to_string(&cause).unwrap(); assert_eq!( serialized, - r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 2"}"# + r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDatasetAddress"}"# ); } @@ -92,20 +94,22 @@ mod tests { r#"{"cause":"PRE_COMPUTE_AT_LEAST_ONE_INPUT_FILE_URL_MISSING","message":"input file URL 1 is missing"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetChecksumMissing(3), - r#"{"cause":"PRE_COMPUTE_DATASET_CHECKSUM_MISSING","message":"Dataset checksum related environment variable is missing for dataset 3"}"#, + ReplicateStatusCause::PreComputeDatasetChecksumMissing(DATASET_ADDRESS.to_string()), + r#"{"cause":"PRE_COMPUTE_DATASET_CHECKSUM_MISSING","message":"Dataset checksum related environment variable is missing for dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetDecryptionFailed(0), - r#"{"cause":"PRE_COMPUTE_DATASET_DECRYPTION_FAILED","message":"Failed to decrypt dataset 0"}"#, + ReplicateStatusCause::PreComputeDatasetDecryptionFailed( + DATASET_ADDRESS.to_string(), + ), + r#"{"cause":"PRE_COMPUTE_DATASET_DECRYPTION_FAILED","message":"Failed to decrypt dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetDownloadFailed(5), - r#"{"cause":"PRE_COMPUTE_DATASET_DOWNLOAD_FAILED","message":"Failed to download encrypted dataset file for dataset 5"}"#, + ReplicateStatusCause::PreComputeDatasetDownloadFailed(DATASET_ADDRESS.to_string()), + r#"{"cause":"PRE_COMPUTE_DATASET_DOWNLOAD_FAILED","message":"Failed to download encrypted dataset file for dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeInvalidDatasetChecksum(2), - r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 2"}"#, + ReplicateStatusCause::PreComputeInvalidDatasetChecksum(DATASET_ADDRESS.to_string()), + r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDatasetAddress"}"#, ), ]; @@ -118,12 +122,12 @@ mod tests { #[test] fn serialize_produces_correct_json_when_vector_of_multiple_errors() { let causes = vec![ - ReplicateStatusCause::PreComputeDatasetUrlMissing(5), - ReplicateStatusCause::PreComputeInvalidDatasetChecksum(99), + ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeInvalidDatasetChecksum("0xAnotherDataset".to_string()), ]; let serialized = to_string(&causes).unwrap(); - let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 5"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 99"}]"#; + let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDatasetAddress"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xAnotherDataset"}]"#; assert_eq!(serialized, expected); } } diff --git a/pre-compute/src/compute/pre_compute_app.rs b/pre-compute/src/compute/pre_compute_app.rs index aefa42a..1fd2961 100644 --- a/pre-compute/src/compute/pre_compute_app.rs +++ b/pre-compute/src/compute/pre_compute_app.rs @@ -59,10 +59,9 @@ impl PreComputeAppTrait for PreComputeApp { // TODO: Collect all errors instead of propagating immediately, and return the list of errors self.pre_compute_args = PreComputeArgs::read_args()?; self.check_output_folder()?; - for (index, dataset) in self.pre_compute_args.datasets.iter().enumerate() { - let encrypted_content = - dataset.download_encrypted_dataset(&self.chain_task_id, index)?; - let plain_content = dataset.decrypt_dataset(&encrypted_content, index)?; + for dataset in self.pre_compute_args.datasets.iter() { + let encrypted_content = dataset.download_encrypted_dataset(&self.chain_task_id)?; + let plain_content = dataset.decrypt_dataset(&encrypted_content)?; self.save_plain_dataset_file(&plain_content, &dataset.filename)?; } self.download_input_files()?; @@ -190,6 +189,7 @@ mod tests { is_dataset_required: true, iexec_bulk_slice_size: 0, datasets: vec![Dataset { + address: "0xDatasetAddress".to_string(), url: HTTP_DATASET_URL.to_string(), checksum: DATASET_CHECKSUM.to_string(), filename: PLAIN_DATA_FILE.to_string(), diff --git a/pre-compute/src/compute/pre_compute_args.rs b/pre-compute/src/compute/pre_compute_args.rs index ca4f36d..84ccdf1 100644 --- a/pre-compute/src/compute/pre_compute_args.rs +++ b/pre-compute/src/compute/pre_compute_args.rs @@ -86,24 +86,28 @@ impl PreComputeArgs { // Read datasets let start_index = if is_dataset_required { 0 } else { 1 }; for i in start_index..=iexec_bulk_slice_size { + let address = get_env_var_or_error( + TeeSessionEnvironmentVariable::IexecDatasetAddress(i), + ReplicateStatusCause::PreComputeFailedUnknownIssue, // TODO: add specific error + )?; let url = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetUrl(i), - ReplicateStatusCause::PreComputeDatasetUrlMissing(i), + ReplicateStatusCause::PreComputeDatasetUrlMissing(address.clone()), )?; let checksum = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetChecksum(i), - ReplicateStatusCause::PreComputeDatasetChecksumMissing(i), + ReplicateStatusCause::PreComputeDatasetChecksumMissing(address.clone()), )?; let filename = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetFilename(i), - ReplicateStatusCause::PreComputeDatasetFilenameMissing(i), + ReplicateStatusCause::PreComputeDatasetFilenameMissing(address.clone()), )?; let key = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetKey(i), - ReplicateStatusCause::PreComputeDatasetKeyMissing(i), + ReplicateStatusCause::PreComputeDatasetKeyMissing(address.clone()), )?; - datasets.push(Dataset::new(url, checksum, filename, key)); + datasets.push(Dataset::new(address.clone(), url, checksum, filename, key)); } let input_files_nb_str = get_env_var_or_error( @@ -145,6 +149,7 @@ mod tests { const DATASET_KEY: &str = "datasetKey123"; const DATASET_CHECKSUM: &str = "0x123checksum"; const DATASET_FILENAME: &str = "dataset.txt"; + const DATASET_ADDRESS: &str = "0xDataset123Address"; fn setup_basic_env_vars() -> HashMap { let mut vars = HashMap::new(); @@ -157,6 +162,7 @@ mod tests { fn setup_dataset_env_vars() -> HashMap { let mut vars = HashMap::new(); + vars.insert(IexecDatasetAddress(0).name(), DATASET_ADDRESS.to_string()); vars.insert(IexecDatasetUrl(0).name(), DATASET_URL.to_string()); vars.insert(IexecDatasetKey(0).name(), DATASET_KEY.to_string()); vars.insert(IexecDatasetChecksum(0).name(), DATASET_CHECKSUM.to_string()); @@ -183,6 +189,10 @@ mod tests { vars.insert(IexecBulkSliceSize.name(), count.to_string()); for i in 1..=count { + vars.insert( + IexecDatasetAddress(i).name(), + format!("0xBulkDataset{i}Address"), + ); vars.insert( IexecDatasetUrl(i).name(), format!("https://bulk-dataset-{i}.bin"), @@ -237,6 +247,7 @@ mod tests { assert_eq!(args.output_dir, OUTPUT_DIR); assert!(args.is_dataset_required); + assert_eq!(args.datasets[0].address, DATASET_ADDRESS.to_string()); assert_eq!(args.datasets[0].url, DATASET_URL.to_string()); assert_eq!(args.datasets[0].key, DATASET_KEY.to_string()); assert_eq!(args.datasets[0].checksum, DATASET_CHECKSUM.to_string()); @@ -346,18 +357,21 @@ mod tests { assert_eq!(args.input_files.len(), 0); // Check first bulk dataset + assert_eq!(args.datasets[0].address, "0xBulkDataset1Address"); assert_eq!(args.datasets[0].url, "https://bulk-dataset-1.bin"); assert_eq!(args.datasets[0].checksum, "0x123checksum"); assert_eq!(args.datasets[0].filename, "bulk-dataset-1.txt"); assert_eq!(args.datasets[0].key, "bulkKey123"); // Check second bulk dataset + assert_eq!(args.datasets[1].address, "0xBulkDataset2Address"); assert_eq!(args.datasets[1].url, "https://bulk-dataset-2.bin"); assert_eq!(args.datasets[1].checksum, "0x223checksum"); assert_eq!(args.datasets[1].filename, "bulk-dataset-2.txt"); assert_eq!(args.datasets[1].key, "bulkKey223"); // Check third bulk dataset + assert_eq!(args.datasets[2].address, "0xBulkDataset3Address"); assert_eq!(args.datasets[2].url, "https://bulk-dataset-3.bin"); assert_eq!(args.datasets[2].checksum, "0x323checksum"); assert_eq!(args.datasets[2].filename, "bulk-dataset-3.txt"); @@ -385,13 +399,16 @@ mod tests { assert_eq!(args.input_files.len(), 0); // Check regular dataset (first in list) + assert_eq!(args.datasets[0].address, DATASET_ADDRESS); assert_eq!(args.datasets[0].url, DATASET_URL); assert_eq!(args.datasets[0].checksum, DATASET_CHECKSUM); assert_eq!(args.datasets[0].filename, DATASET_FILENAME); assert_eq!(args.datasets[0].key, DATASET_KEY); // Check bulk datasets + assert_eq!(args.datasets[1].address, "0xBulkDataset1Address"); assert_eq!(args.datasets[1].url, "https://bulk-dataset-1.bin"); + assert_eq!(args.datasets[2].address, "0xBulkDataset2Address"); assert_eq!(args.datasets[2].url, "https://bulk-dataset-2.bin"); }); } @@ -427,7 +444,9 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetUrlMissing(1) + ReplicateStatusCause::PreComputeDatasetUrlMissing( + "0xBulkDataset1Address".to_string() + ) ); }); } @@ -446,7 +465,9 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetChecksumMissing(2) + ReplicateStatusCause::PreComputeDatasetChecksumMissing( + "0xBulkDataset2Address".to_string() + ) ); }); } @@ -465,7 +486,9 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetFilenameMissing(2) + ReplicateStatusCause::PreComputeDatasetFilenameMissing( + "0xBulkDataset2Address".to_string() + ) ); }); } @@ -484,7 +507,9 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetKeyMissing(1) + ReplicateStatusCause::PreComputeDatasetKeyMissing( + "0xBulkDataset1Address".to_string() + ) ); }); } @@ -506,21 +531,25 @@ mod tests { IexecInputFilesNumber, ReplicateStatusCause::PreComputeInputFilesNumberMissing, ), + ( + IexecDatasetAddress(0), + ReplicateStatusCause::PreComputeFailedUnknownIssue, + ), ( IexecDatasetUrl(0), - ReplicateStatusCause::PreComputeDatasetUrlMissing(0), + ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_ADDRESS.to_string()), ), ( IexecDatasetKey(0), - ReplicateStatusCause::PreComputeDatasetKeyMissing(0), + ReplicateStatusCause::PreComputeDatasetKeyMissing(DATASET_ADDRESS.to_string()), ), ( IexecDatasetChecksum(0), - ReplicateStatusCause::PreComputeDatasetChecksumMissing(0), + ReplicateStatusCause::PreComputeDatasetChecksumMissing(DATASET_ADDRESS.to_string()), ), ( IexecDatasetFilename(0), - ReplicateStatusCause::PreComputeDatasetFilenameMissing(0), + ReplicateStatusCause::PreComputeDatasetFilenameMissing(DATASET_ADDRESS.to_string()), ), ( IexecInputFileUrlPrefix(1), diff --git a/pre-compute/src/compute/utils/env_utils.rs b/pre-compute/src/compute/utils/env_utils.rs index 470b50c..78b8352 100644 --- a/pre-compute/src/compute/utils/env_utils.rs +++ b/pre-compute/src/compute/utils/env_utils.rs @@ -2,6 +2,7 @@ use crate::compute::errors::ReplicateStatusCause; use std::env; pub enum TeeSessionEnvironmentVariable { + IexecDatasetAddress(usize), IexecBulkSliceSize, IexecDatasetChecksum(usize), IexecDatasetFilename(usize), @@ -20,6 +21,11 @@ pub enum TeeSessionEnvironmentVariable { impl TeeSessionEnvironmentVariable { pub fn name(&self) -> String { match self { + Self::IexecDatasetAddress(0) => "IEXEC_DATASET_ADDRESS".to_string(), + Self::IexecDatasetAddress(index) => { + format!("IEXEC_DATASET_{index}_ADDRESS") + } + Self::IexecBulkSliceSize => "IEXEC_BULK_SLICE_SIZE".to_string(), Self::IexecDatasetChecksum(0) => "IEXEC_DATASET_CHECKSUM".to_string(), @@ -71,6 +77,8 @@ mod tests { use super::*; use temp_env; + const DATASET_ADDRESS: &str = "0xDatasetAddress"; + #[test] fn name_succeeds_when_simple_environment_variable_names() { assert_eq!( @@ -202,7 +210,8 @@ mod tests { #[test] fn get_env_var_or_error_succeeds_when_indexed_variables() { let env_var = TeeSessionEnvironmentVariable::IexecDatasetChecksum(1); - let status_cause = ReplicateStatusCause::PreComputeDatasetChecksumMissing(1); + let status_cause = + ReplicateStatusCause::PreComputeDatasetChecksumMissing(DATASET_ADDRESS.to_string()); temp_env::with_var("IEXEC_DATASET_1_CHECKSUM", Some("abc123def456"), || { let result = get_env_var_or_error(env_var, status_cause.clone()); From bb1b672b7cd78b086ee98cb8fa8069e04ff2ec40 Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Tue, 21 Oct 2025 11:55:46 +0200 Subject: [PATCH 11/12] feat: use dataset file name for improved error handling --- pre-compute/src/api/worker_api.rs | 16 +++--- pre-compute/src/compute/dataset.rs | 35 ++++-------- pre-compute/src/compute/errors.rs | 20 ++++--- pre-compute/src/compute/pre_compute_app.rs | 1 - pre-compute/src/compute/pre_compute_args.rs | 59 ++++++--------------- pre-compute/src/compute/utils/env_utils.rs | 6 --- 6 files changed, 46 insertions(+), 91 deletions(-) diff --git a/pre-compute/src/api/worker_api.rs b/pre-compute/src/api/worker_api.rs index d6e75f3..838e332 100644 --- a/pre-compute/src/api/worker_api.rs +++ b/pre-compute/src/api/worker_api.rs @@ -154,12 +154,14 @@ mod tests { r#"{"cause":"PRE_COMPUTE_WORKER_ADDRESS_MISSING","message":"Worker address related environment variable is missing"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetUrlMissing("0xDataset2".to_string()), - r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDataset2"}"#, + ReplicateStatusCause::PreComputeDatasetUrlMissing("0xDatasetAdress1".to_string()), + r#"{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDatasetAdress1"}"#, ), ( - ReplicateStatusCause::PreComputeInvalidDatasetChecksum("0xDataset1".to_string()), - r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDataset1"}"#, + ReplicateStatusCause::PreComputeInvalidDatasetChecksum( + "0xDatasetAdress2".to_string(), + ), + r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDatasetAdress2"}"#, ), ]; @@ -172,12 +174,12 @@ mod tests { #[test] fn serialize_vec_of_causes_succeeds_when_multiple_causes() { let causes = vec![ - ReplicateStatusCause::PreComputeDatasetUrlMissing("0xDatasetA".to_string()), - ReplicateStatusCause::PreComputeInvalidDatasetChecksum("0xDatasetB".to_string()), + ReplicateStatusCause::PreComputeDatasetUrlMissing("0xDatasetAdress".to_string()), + ReplicateStatusCause::PreComputeInvalidDatasetChecksum("0xDatasetAdress".to_string()), ]; let serialized = to_string(&causes).expect("Failed to serialize"); - let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDatasetA"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDatasetB"}]"#; + let expected = r#"[{"cause":"PRE_COMPUTE_DATASET_URL_MISSING","message":"Dataset URL related environment variable is missing for dataset 0xDatasetAdress"},{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDatasetAdress"}]"#; assert_eq!(serialized, expected); } // endregion diff --git a/pre-compute/src/compute/dataset.rs b/pre-compute/src/compute/dataset.rs index ca6aa10..f65543f 100644 --- a/pre-compute/src/compute/dataset.rs +++ b/pre-compute/src/compute/dataset.rs @@ -27,7 +27,6 @@ const AES_IV_LENGTH: usize = 16; #[cfg_attr(test, derive(Debug))] #[derive(Clone, Default)] pub struct Dataset { - pub address: String, pub url: String, pub checksum: String, pub filename: String, @@ -35,15 +34,8 @@ pub struct Dataset { } impl Dataset { - pub fn new( - address: String, - url: String, - checksum: String, - filename: String, - key: String, - ) -> Self { + pub fn new(url: String, checksum: String, filename: String, key: String) -> Self { Dataset { - address, url, checksum, filename, @@ -88,7 +80,7 @@ impl Dataset { download_from_url(&self.url) } .ok_or(ReplicateStatusCause::PreComputeDatasetDownloadFailed( - self.address.clone(), + self.filename.clone(), ))?; info!("Checking encrypted dataset checksum [chainTaskId:{chain_task_id}]"); @@ -100,7 +92,7 @@ impl Dataset { self.checksum ); return Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum( - self.address.clone(), + self.filename.clone(), )); } @@ -126,12 +118,12 @@ impl Dataset { encrypted_content: &[u8], ) -> Result, ReplicateStatusCause> { let key = general_purpose::STANDARD.decode(&self.key).map_err(|_| { - ReplicateStatusCause::PreComputeDatasetDecryptionFailed(self.address.clone()) + ReplicateStatusCause::PreComputeDatasetDecryptionFailed(self.filename.clone()) })?; if encrypted_content.len() < AES_IV_LENGTH || key.len() != AES_KEY_LENGTH { return Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed( - self.address.clone(), + self.filename.clone(), )); } @@ -142,7 +134,7 @@ impl Dataset { Aes256CbcDec::new(key_slice.into(), iv_slice.into()) .decrypt_padded_vec_mut::(ciphertext) .map_err(|_| { - ReplicateStatusCause::PreComputeDatasetDecryptionFailed(self.address.clone()) + ReplicateStatusCause::PreComputeDatasetDecryptionFailed(self.filename.clone()) }) } } @@ -160,12 +152,11 @@ mod tests { "0x02a12ef127dcfbdb294a090c8f0b69a0ca30b7940fc36cabf971f488efd374d7"; const ENCRYPTED_DATASET_KEY: &str = "ubA6H9emVPJT91/flYAmnKHC0phSV3cfuqsLxQfgow0="; const HTTP_DATASET_URL: &str = "https://raw.githubusercontent.com/iExecBlockchainComputing/tee-worker-pre-compute-rust/main/src/tests_resources/encrypted-data.bin"; - const PLAIN_DATA_FILE: &str = "plain-data.txt"; + const PLAIN_DATA_FILE: &str = "0xDatasetAddress"; const IPFS_DATASET_URL: &str = "/ipfs/QmUVhChbLFiuzNK1g2GsWyWEiad7SXPqARnWzGumgziwEp"; fn get_test_dataset() -> Dataset { Dataset::new( - "0xDatasetAddress".to_string(), HTTP_DATASET_URL.to_string(), DATASET_CHECKSUM.to_string(), PLAIN_DATA_FILE.to_string(), @@ -185,12 +176,11 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_dataset_url() { let mut dataset = get_test_dataset(); dataset.url = "http://bad-url".to_string(); - dataset.address = "0xbaddataset".to_string(); let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); assert_eq!( actual_content, Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed( - "0xbaddataset".to_string() + PLAIN_DATA_FILE.to_string() )) ); } @@ -210,10 +200,9 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_gateway() { let mut dataset = get_test_dataset(); dataset.url = "/ipfs/INVALID_IPFS_DATASET_URL".to_string(); - dataset.address = "0xinvalidgateway".to_string(); let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); let expected_content = Err(ReplicateStatusCause::PreComputeDatasetDownloadFailed( - "0xinvalidgateway".to_string(), + PLAIN_DATA_FILE.to_string(), )); assert_eq!(actual_content, expected_content); } @@ -222,10 +211,9 @@ mod tests { fn download_encrypted_dataset_failure_with_invalid_dataset_checksum() { let mut dataset = get_test_dataset(); dataset.checksum = "invalid_dataset_checksum".to_string(); - dataset.address = "0xinvalidchecksum".to_string(); let actual_content = dataset.download_encrypted_dataset(CHAIN_TASK_ID); let expected_content = Err(ReplicateStatusCause::PreComputeInvalidDatasetChecksum( - "0xinvalidchecksum".to_string(), + PLAIN_DATA_FILE.to_string(), )); assert_eq!(actual_content, expected_content); } @@ -247,14 +235,13 @@ mod tests { fn decrypt_dataset_failure_with_bad_key() { let mut dataset = get_test_dataset(); dataset.key = "bad_key".to_string(); - dataset.address = "0xbadkey".to_string(); let encrypted_data = dataset.download_encrypted_dataset(CHAIN_TASK_ID).unwrap(); let actual_plain_data = dataset.decrypt_dataset(&encrypted_data); assert_eq!( actual_plain_data, Err(ReplicateStatusCause::PreComputeDatasetDecryptionFailed( - "0xbadkey".to_string() + PLAIN_DATA_FILE.to_string() )) ); } diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index 702723f..c880f0f 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -64,11 +64,11 @@ mod tests { use super::*; use serde_json::to_string; - const DATASET_ADDRESS: &str = "0xDatasetAddress"; + const DATASET_FILENAME: &str = "0xDatasetAddress"; #[test] - fn serialize_produces_correct_json_when_error_has_dataset_index() { - let cause = ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_ADDRESS.to_string()); + fn serialize_produces_correct_json_when_error_has_dataset_filename() { + let cause = ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_FILENAME.to_string()); let serialized = to_string(&cause).unwrap(); assert_eq!( serialized, @@ -87,28 +87,26 @@ mod tests { } #[test] - fn serialize_produces_correct_json_when_multiple_dataset_errors_with_indices() { + fn serialize_produces_correct_json_when_multiple_dataset_errors_with_filenames() { let test_cases = vec![ ( ReplicateStatusCause::PreComputeAtLeastOneInputFileUrlMissing(1), r#"{"cause":"PRE_COMPUTE_AT_LEAST_ONE_INPUT_FILE_URL_MISSING","message":"input file URL 1 is missing"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetChecksumMissing(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeDatasetChecksumMissing(DATASET_FILENAME.to_string()), r#"{"cause":"PRE_COMPUTE_DATASET_CHECKSUM_MISSING","message":"Dataset checksum related environment variable is missing for dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetDecryptionFailed( - DATASET_ADDRESS.to_string(), - ), + ReplicateStatusCause::PreComputeDatasetDecryptionFailed(DATASET_FILENAME.to_string()), r#"{"cause":"PRE_COMPUTE_DATASET_DECRYPTION_FAILED","message":"Failed to decrypt dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetDownloadFailed(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeDatasetDownloadFailed(DATASET_FILENAME.to_string()), r#"{"cause":"PRE_COMPUTE_DATASET_DOWNLOAD_FAILED","message":"Failed to download encrypted dataset file for dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeInvalidDatasetChecksum(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeInvalidDatasetChecksum(DATASET_FILENAME.to_string()), r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDatasetAddress"}"#, ), ]; @@ -122,7 +120,7 @@ mod tests { #[test] fn serialize_produces_correct_json_when_vector_of_multiple_errors() { let causes = vec![ - ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_FILENAME.to_string()), ReplicateStatusCause::PreComputeInvalidDatasetChecksum("0xAnotherDataset".to_string()), ]; diff --git a/pre-compute/src/compute/pre_compute_app.rs b/pre-compute/src/compute/pre_compute_app.rs index 1fd2961..825f3ef 100644 --- a/pre-compute/src/compute/pre_compute_app.rs +++ b/pre-compute/src/compute/pre_compute_app.rs @@ -189,7 +189,6 @@ mod tests { is_dataset_required: true, iexec_bulk_slice_size: 0, datasets: vec![Dataset { - address: "0xDatasetAddress".to_string(), url: HTTP_DATASET_URL.to_string(), checksum: DATASET_CHECKSUM.to_string(), filename: PLAIN_DATA_FILE.to_string(), diff --git a/pre-compute/src/compute/pre_compute_args.rs b/pre-compute/src/compute/pre_compute_args.rs index 84ccdf1..e230afb 100644 --- a/pre-compute/src/compute/pre_compute_args.rs +++ b/pre-compute/src/compute/pre_compute_args.rs @@ -86,28 +86,24 @@ impl PreComputeArgs { // Read datasets let start_index = if is_dataset_required { 0 } else { 1 }; for i in start_index..=iexec_bulk_slice_size { - let address = get_env_var_or_error( - TeeSessionEnvironmentVariable::IexecDatasetAddress(i), - ReplicateStatusCause::PreComputeFailedUnknownIssue, // TODO: add specific error + let filename = get_env_var_or_error( + TeeSessionEnvironmentVariable::IexecDatasetFilename(i), + ReplicateStatusCause::PreComputeDatasetFilenameMissing(format!("dataset_{i}")), )?; let url = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetUrl(i), - ReplicateStatusCause::PreComputeDatasetUrlMissing(address.clone()), + ReplicateStatusCause::PreComputeDatasetUrlMissing(filename.clone()), )?; let checksum = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetChecksum(i), - ReplicateStatusCause::PreComputeDatasetChecksumMissing(address.clone()), - )?; - let filename = get_env_var_or_error( - TeeSessionEnvironmentVariable::IexecDatasetFilename(i), - ReplicateStatusCause::PreComputeDatasetFilenameMissing(address.clone()), + ReplicateStatusCause::PreComputeDatasetChecksumMissing(filename.clone()), )?; let key = get_env_var_or_error( TeeSessionEnvironmentVariable::IexecDatasetKey(i), - ReplicateStatusCause::PreComputeDatasetKeyMissing(address.clone()), + ReplicateStatusCause::PreComputeDatasetKeyMissing(filename.clone()), )?; - datasets.push(Dataset::new(address.clone(), url, checksum, filename, key)); + datasets.push(Dataset::new(url, checksum, filename, key)); } let input_files_nb_str = get_env_var_or_error( @@ -149,7 +145,6 @@ mod tests { const DATASET_KEY: &str = "datasetKey123"; const DATASET_CHECKSUM: &str = "0x123checksum"; const DATASET_FILENAME: &str = "dataset.txt"; - const DATASET_ADDRESS: &str = "0xDataset123Address"; fn setup_basic_env_vars() -> HashMap { let mut vars = HashMap::new(); @@ -162,7 +157,6 @@ mod tests { fn setup_dataset_env_vars() -> HashMap { let mut vars = HashMap::new(); - vars.insert(IexecDatasetAddress(0).name(), DATASET_ADDRESS.to_string()); vars.insert(IexecDatasetUrl(0).name(), DATASET_URL.to_string()); vars.insert(IexecDatasetKey(0).name(), DATASET_KEY.to_string()); vars.insert(IexecDatasetChecksum(0).name(), DATASET_CHECKSUM.to_string()); @@ -189,10 +183,6 @@ mod tests { vars.insert(IexecBulkSliceSize.name(), count.to_string()); for i in 1..=count { - vars.insert( - IexecDatasetAddress(i).name(), - format!("0xBulkDataset{i}Address"), - ); vars.insert( IexecDatasetUrl(i).name(), format!("https://bulk-dataset-{i}.bin"), @@ -247,7 +237,6 @@ mod tests { assert_eq!(args.output_dir, OUTPUT_DIR); assert!(args.is_dataset_required); - assert_eq!(args.datasets[0].address, DATASET_ADDRESS.to_string()); assert_eq!(args.datasets[0].url, DATASET_URL.to_string()); assert_eq!(args.datasets[0].key, DATASET_KEY.to_string()); assert_eq!(args.datasets[0].checksum, DATASET_CHECKSUM.to_string()); @@ -357,21 +346,18 @@ mod tests { assert_eq!(args.input_files.len(), 0); // Check first bulk dataset - assert_eq!(args.datasets[0].address, "0xBulkDataset1Address"); assert_eq!(args.datasets[0].url, "https://bulk-dataset-1.bin"); assert_eq!(args.datasets[0].checksum, "0x123checksum"); assert_eq!(args.datasets[0].filename, "bulk-dataset-1.txt"); assert_eq!(args.datasets[0].key, "bulkKey123"); // Check second bulk dataset - assert_eq!(args.datasets[1].address, "0xBulkDataset2Address"); assert_eq!(args.datasets[1].url, "https://bulk-dataset-2.bin"); assert_eq!(args.datasets[1].checksum, "0x223checksum"); assert_eq!(args.datasets[1].filename, "bulk-dataset-2.txt"); assert_eq!(args.datasets[1].key, "bulkKey223"); // Check third bulk dataset - assert_eq!(args.datasets[2].address, "0xBulkDataset3Address"); assert_eq!(args.datasets[2].url, "https://bulk-dataset-3.bin"); assert_eq!(args.datasets[2].checksum, "0x323checksum"); assert_eq!(args.datasets[2].filename, "bulk-dataset-3.txt"); @@ -399,16 +385,13 @@ mod tests { assert_eq!(args.input_files.len(), 0); // Check regular dataset (first in list) - assert_eq!(args.datasets[0].address, DATASET_ADDRESS); assert_eq!(args.datasets[0].url, DATASET_URL); assert_eq!(args.datasets[0].checksum, DATASET_CHECKSUM); assert_eq!(args.datasets[0].filename, DATASET_FILENAME); assert_eq!(args.datasets[0].key, DATASET_KEY); // Check bulk datasets - assert_eq!(args.datasets[1].address, "0xBulkDataset1Address"); assert_eq!(args.datasets[1].url, "https://bulk-dataset-1.bin"); - assert_eq!(args.datasets[2].address, "0xBulkDataset2Address"); assert_eq!(args.datasets[2].url, "https://bulk-dataset-2.bin"); }); } @@ -444,9 +427,7 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetUrlMissing( - "0xBulkDataset1Address".to_string() - ) + ReplicateStatusCause::PreComputeDatasetUrlMissing("bulk-dataset-1.txt".to_string()) ); }); } @@ -466,7 +447,7 @@ mod tests { assert_eq!( result.unwrap_err(), ReplicateStatusCause::PreComputeDatasetChecksumMissing( - "0xBulkDataset2Address".to_string() + "bulk-dataset-2.txt".to_string() ) ); }); @@ -486,9 +467,7 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetFilenameMissing( - "0xBulkDataset2Address".to_string() - ) + ReplicateStatusCause::PreComputeDatasetFilenameMissing("dataset_2".to_string()) ); }); } @@ -507,9 +486,7 @@ mod tests { assert!(result.is_err()); assert_eq!( result.unwrap_err(), - ReplicateStatusCause::PreComputeDatasetKeyMissing( - "0xBulkDataset1Address".to_string() - ) + ReplicateStatusCause::PreComputeDatasetKeyMissing("bulk-dataset-1.txt".to_string()) ); }); } @@ -531,25 +508,23 @@ mod tests { IexecInputFilesNumber, ReplicateStatusCause::PreComputeInputFilesNumberMissing, ), - ( - IexecDatasetAddress(0), - ReplicateStatusCause::PreComputeFailedUnknownIssue, - ), ( IexecDatasetUrl(0), - ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeDatasetUrlMissing(DATASET_FILENAME.to_string()), ), ( IexecDatasetKey(0), - ReplicateStatusCause::PreComputeDatasetKeyMissing(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeDatasetKeyMissing(DATASET_FILENAME.to_string()), ), ( IexecDatasetChecksum(0), - ReplicateStatusCause::PreComputeDatasetChecksumMissing(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeDatasetChecksumMissing( + DATASET_FILENAME.to_string(), + ), ), ( IexecDatasetFilename(0), - ReplicateStatusCause::PreComputeDatasetFilenameMissing(DATASET_ADDRESS.to_string()), + ReplicateStatusCause::PreComputeDatasetFilenameMissing("dataset_0".to_string()), ), ( IexecInputFileUrlPrefix(1), diff --git a/pre-compute/src/compute/utils/env_utils.rs b/pre-compute/src/compute/utils/env_utils.rs index 78b8352..72598d5 100644 --- a/pre-compute/src/compute/utils/env_utils.rs +++ b/pre-compute/src/compute/utils/env_utils.rs @@ -2,7 +2,6 @@ use crate::compute::errors::ReplicateStatusCause; use std::env; pub enum TeeSessionEnvironmentVariable { - IexecDatasetAddress(usize), IexecBulkSliceSize, IexecDatasetChecksum(usize), IexecDatasetFilename(usize), @@ -21,11 +20,6 @@ pub enum TeeSessionEnvironmentVariable { impl TeeSessionEnvironmentVariable { pub fn name(&self) -> String { match self { - Self::IexecDatasetAddress(0) => "IEXEC_DATASET_ADDRESS".to_string(), - Self::IexecDatasetAddress(index) => { - format!("IEXEC_DATASET_{index}_ADDRESS") - } - Self::IexecBulkSliceSize => "IEXEC_BULK_SLICE_SIZE".to_string(), Self::IexecDatasetChecksum(0) => "IEXEC_DATASET_CHECKSUM".to_string(), From 0f9d086e1f153ffea47943e22f4f13973fcd2752 Mon Sep 17 00:00:00 2001 From: nabil-Tounarti Date: Tue, 21 Oct 2025 11:58:06 +0200 Subject: [PATCH 12/12] style: format the code --- pre-compute/src/compute/errors.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/pre-compute/src/compute/errors.rs b/pre-compute/src/compute/errors.rs index c880f0f..f9981de 100644 --- a/pre-compute/src/compute/errors.rs +++ b/pre-compute/src/compute/errors.rs @@ -94,11 +94,15 @@ mod tests { r#"{"cause":"PRE_COMPUTE_AT_LEAST_ONE_INPUT_FILE_URL_MISSING","message":"input file URL 1 is missing"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetChecksumMissing(DATASET_FILENAME.to_string()), + ReplicateStatusCause::PreComputeDatasetChecksumMissing( + DATASET_FILENAME.to_string(), + ), r#"{"cause":"PRE_COMPUTE_DATASET_CHECKSUM_MISSING","message":"Dataset checksum related environment variable is missing for dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeDatasetDecryptionFailed(DATASET_FILENAME.to_string()), + ReplicateStatusCause::PreComputeDatasetDecryptionFailed( + DATASET_FILENAME.to_string(), + ), r#"{"cause":"PRE_COMPUTE_DATASET_DECRYPTION_FAILED","message":"Failed to decrypt dataset 0xDatasetAddress"}"#, ), ( @@ -106,7 +110,9 @@ mod tests { r#"{"cause":"PRE_COMPUTE_DATASET_DOWNLOAD_FAILED","message":"Failed to download encrypted dataset file for dataset 0xDatasetAddress"}"#, ), ( - ReplicateStatusCause::PreComputeInvalidDatasetChecksum(DATASET_FILENAME.to_string()), + ReplicateStatusCause::PreComputeInvalidDatasetChecksum( + DATASET_FILENAME.to_string(), + ), r#"{"cause":"PRE_COMPUTE_INVALID_DATASET_CHECKSUM","message":"Invalid dataset checksum for dataset 0xDatasetAddress"}"#, ), ];