From 1ece5b1ef1b1ddd2ca5fcb172230e44b97f25c44 Mon Sep 17 00:00:00 2001 From: Christopher Patton Date: Tue, 28 Nov 2023 15:54:31 -0800 Subject: [PATCH 1/3] taskprov: Clean up and update draft references --- README.md | 10 ++++++++-- daphne/src/error/aborts.rs | 3 +-- daphne/src/lib.rs | 4 ++-- daphne/src/messages/taskprov.rs | 3 +-- daphne/src/roles/helper.rs | 2 +- daphne/src/taskprov.rs | 5 +++-- daphne_worker/src/config.rs | 6 +++--- daphne_worker/src/roles/aggregator.rs | 6 ------ 8 files changed, 19 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index e59b3f520..2b8f14214 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,14 @@ Daphne is a Rust implementation of the Distributed Aggregation Protocol ([DAP](https://datatracker.ietf.org/doc/draft-ietf-ppm-dap/)) standard. DAP is under active development in the PPM working group of the IETF. -Daphne currently implements draft-ietf-ppm-dap-02. The next draft, -draft-ietf-ppm-dap-03, is a work-in-progress. +Daphne currently implements: + +* draft-ietf-ppm-dap-02 + * VDAF: draft-irtf-cfrg-vdaf-03 + * Taskprov extension: draft-wang-ppm-dap-taskprov-02 +* draft-ietf-ppm-dap-09 + * VDAF: draft-irtf-cfrg-vdaf-08 + * Taskprov extension: draft-wang-ppm-dap-taskprov-06 This software is intended to support experimental DAP deployments and is not yet suitable for use in production. Daphne will evolve along with the DAP draft: diff --git a/daphne/src/error/aborts.rs b/daphne/src/error/aborts.rs index 83b3f6dee..c5421a75d 100644 --- a/daphne/src/error/aborts.rs +++ b/daphne/src/error/aborts.rs @@ -40,8 +40,7 @@ pub enum DapAbort { #[error("invalidBatchSize")] InvalidBatchSize { detail: String, task_id: TaskId }, - /// draft-wang-ppm-dap-taskprov-02: Invalid DAP task. Sent when a server opts out of a - /// taskprov task configuration. + /// taskprov: Invalid DAP task. Sent when a server opts out of a taskprov task configuration. #[error("invalidTask")] InvalidTask { detail: String, task_id: TaskId }, diff --git a/daphne/src/lib.rs b/daphne/src/lib.rs index fa10d09a8..bb3151c77 100644 --- a/daphne/src/lib.rs +++ b/daphne/src/lib.rs @@ -170,7 +170,7 @@ pub struct DapGlobalConfig { /// receiver config. pub supported_hpke_kems: Vec, - /// Indicates if the taskprov extension is enabled. + /// draft-wang-ppm-dap-taskprov: Indicates if the taskprov extension is enabled. #[serde(default)] pub allow_taskprov: bool, } @@ -408,7 +408,7 @@ impl Extend<(DapBatchBucket, (ReportId, Time))> for DapAggregateSpan<()> { #[derive(Clone, Default, Deserialize, Serialize)] #[cfg_attr(test, derive(PartialEq, Debug))] pub enum DapTaskConfigMethod { - /// draft-wang-ppm-dap-taskprov-06 + /// draft-wang-ppm-dap-taskprov Taskprov { /// `TaskConfig.task_info`. If not set, then the task info is unknown. info: Option>, diff --git a/daphne/src/messages/taskprov.rs b/daphne/src/messages/taskprov.rs index 19f264c88..6e66ff4a9 100644 --- a/daphne/src/messages/taskprov.rs +++ b/daphne/src/messages/taskprov.rs @@ -1,8 +1,7 @@ // Copyright (c) 2022 Cloudflare, Inc. All rights reserved. // SPDX-License-Identifier: BSD-3-Clause -//! Messages in the taskprov extension to the DAP protocol, as -//! defined in draft-wang-ppm-dap-taskprov-02. +//! draft-wang-ppm-dap-taskprov: Messages for the taskrpov extension for DAP. use crate::messages::{ decode_u16_bytes, decode_u16_item_for_version, encode_u16_bytes, encode_u16_item_for_version, diff --git a/daphne/src/roles/helper.rs b/daphne/src/roles/helper.rs index aa15db236..02aebee9d 100644 --- a/daphne/src/roles/helper.rs +++ b/daphne/src/roles/helper.rs @@ -60,7 +60,7 @@ pub trait DapHelper: DapAggregator { // taskprov: Resolve the task config to use for the request. if self.get_global_config().allow_taskprov { // draft02 compatibility: We also need to ensure that all of the reports include the task - // config in the report extensions. (See section 6 of draft-wang-ppm-dap-taskprov-02.) + // config in the report extensions. (See draft-wang-ppm-dap-taskprov-02, Section 6.) let first_metadata = if req.version == DapVersion::default() { let using_taskprov = agg_job_init_req .prep_inits diff --git a/daphne/src/taskprov.rs b/daphne/src/taskprov.rs index 28f29ab1e..a86fced7a 100644 --- a/daphne/src/taskprov.rs +++ b/daphne/src/taskprov.rs @@ -1,8 +1,9 @@ // Copyright (c) 2022 Cloudflare, Inc. All rights reserved. // SPDX-License-Identifier: BSD-3-Clause -//! Functions for implementing the taskprov extension. The extension's behavior depends on the -//! version of DAP, i.e., each version of taskprov implies a version of DAP. +//! draft-wang-ppm-dap-taskprov: Functions for implementing the taskprov extension. The extension's +//! behavior depends on the version of DAP, i.e., each version of taskprov implies a version of +//! DAP. use crate::{ fatal_error, diff --git a/daphne_worker/src/config.rs b/daphne_worker/src/config.rs index e2c48e13a..660fb3524 100644 --- a/daphne_worker/src/config.rs +++ b/daphne_worker/src/config.rs @@ -59,7 +59,7 @@ const DAP_BASE_URL: &str = "DAP_BASE_URL"; const INT_ERR_PEER_ABORT: &str = "request aborted by peer"; const INT_ERR_PEER_RESP_MISSING_MEDIA_TYPE: &str = "peer response is missing media type"; -/// Long-lived parameters for tasks using draft-wang-ppm-dap-taskprov-02 ("taskprov"). +/// draft-wang-ppm-dap-taskprov: Long-lived parameters for the taskprov extension. pub(crate) struct TaskprovConfig { /// HPKE collector configuration for all taskprov tasks. pub(crate) hpke_collector_config: HpkeConfig, @@ -110,8 +110,8 @@ pub(crate) struct DaphneWorkerConfig { /// this field is used for endpoint configuration for interop testing. base_url: Option, - /// Optional: draft-wang-ppm-dap-taskprov-02 configuration. If not configured, then taskprov - /// will be disabled. + /// draft-wang-ppm-dap-taskprov: Long-lived parameters for the taskprov extension. If not set, + /// then taskprov will be disabled. pub(crate) taskprov: Option, /// Default DAP version to use if not specified by the API URL diff --git a/daphne_worker/src/roles/aggregator.rs b/daphne_worker/src/roles/aggregator.rs index f566a9d9a..2b680908c 100644 --- a/daphne_worker/src/roles/aggregator.rs +++ b/daphne_worker/src/roles/aggregator.rs @@ -271,12 +271,6 @@ impl<'srv> DapAggregator for DaphneWorker<'srv> { .as_ref() .ok_or_else(|| fatal_error!(err = "taskprov configuration not found"))?; - // If `resolve_advertised_task_config()` returned a `TaskConfig` and `req.taskprov` is set, - // then the task was advertised in the HTTP "dap-taskprov" header. In this case we expect - // the peer to send the header in every request for this task. - // - // NOTE(cjpatton) This behavior is not specified in taskprov-02, but we expect it to be - // mandatory in a future draft. if !self.config().is_leader && req.taskprov.is_some() { // Store the task config in Worker memory, but don't write it through to KV. let mut guarded_tasks = self From dbd0d4fde7fa4e55a610d863618c69c2d0c2421a Mon Sep 17 00:00:00 2001 From: Christopher Patton Date: Tue, 28 Nov 2023 16:20:28 -0800 Subject: [PATCH 2/3] taskprov: Use version to resolve VDAF config In the future we want to prevent any VDAF from being used in draft02 except `Prio2`. (This is to avoid confusing incompatible versions of standardized VDAFs: draft-wang-ppm-dap-taskprov-02 implies draft-irfg-cfrg-vdaf-03; and draft-wang-ppm-dap-taskprov-06 implies draft-irtf-cfrg-vdaf-08.) In preparation for this, pass the DAP version to `VdafConfig::try_from_taskprov()`. --- daphne/src/taskprov.rs | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/daphne/src/taskprov.rs b/daphne/src/taskprov.rs index a86fced7a..8692045f5 100644 --- a/daphne/src/taskprov.rs +++ b/daphne/src/taskprov.rs @@ -218,15 +218,19 @@ impl DapQueryConfig { } impl VdafConfig { - fn try_from_taskprov(task_id: &TaskId, var: VdafTypeVar) -> Result { - match var { - VdafTypeVar::Prio2 { dimension } => Ok(VdafConfig::Prio2 { + fn try_from_taskprov( + task_id: &TaskId, + version: DapVersion, + var: VdafTypeVar, + ) -> Result { + match (version, var) { + (.., VdafTypeVar::Prio2 { dimension }) => Ok(VdafConfig::Prio2 { dimension: dimension.try_into().map_err(|_| DapAbort::InvalidTask { detail: "dimension is larger than the system's word size".to_string(), task_id: *task_id, })?, }), - VdafTypeVar::NotImplemented { typ, .. } => Err(DapAbort::InvalidTask { + (.., VdafTypeVar::NotImplemented { typ, .. }) => Err(DapAbort::InvalidTask { detail: format!("unimplemented VDAF type ({typ})"), task_id: *task_id, }), @@ -264,7 +268,7 @@ impl DapTaskConfig { }); } - let vdaf = VdafConfig::try_from_taskprov(task_id, task_config.vdaf_config.var)?; + let vdaf = VdafConfig::try_from_taskprov(task_id, version, task_config.vdaf_config.var)?; let vdaf_verify_key = compute_vdaf_verify_key(version, vdaf_verify_key_init, task_id, &vdaf); Ok(DapTaskConfig { From 4efc4b87aa132023830b2ad090df8a2977631c64 Mon Sep 17 00:00:00 2001 From: Christopher Patton Date: Tue, 28 Nov 2023 16:44:04 -0800 Subject: [PATCH 3/3] Change draft07 -> draft09 and update domain separation tag --- daphne/src/constants.rs | 79 ++++++++-------- daphne/src/lib.rs | 40 ++++---- daphne/src/messages/mod.rs | 97 ++++++++++---------- daphne/src/messages/taskprov.rs | 36 ++++---- daphne/src/roles/aggregator.rs | 2 +- daphne/src/roles/helper.rs | 8 +- daphne/src/roles/leader.rs | 12 +-- daphne/src/roles/mod.rs | 12 +-- daphne/src/taskprov.rs | 4 +- daphne/src/testing.rs | 10 +- daphne/src/vdaf/mod.rs | 69 +++++++------- daphne_worker/src/config.rs | 2 +- daphne_worker/src/durable/mod.rs | 2 +- daphne_worker/src/durable/reports_pending.rs | 2 +- daphne_worker_test/tests/e2e/e2e.rs | 17 ++-- daphne_worker_test/tests/e2e/test_runner.rs | 12 +-- daphne_worker_test/wrangler.toml | 4 +- docker/wrangler.toml | 4 +- 18 files changed, 204 insertions(+), 208 deletions(-) diff --git a/daphne/src/constants.rs b/daphne/src/constants.rs index 2d8bc9747..5e24262b5 100644 --- a/daphne/src/constants.rs +++ b/daphne/src/constants.rs @@ -68,31 +68,31 @@ impl DapMediaType { pub fn from_str_for_version(version: DapVersion, content_type: Option<&str>) -> Self { match (version, content_type) { (DapVersion::Draft02, Some(DRAFT02_MEDIA_TYPE_AGG_CONT_REQ)) - | (DapVersion::Draft07, Some(MEDIA_TYPE_AGG_JOB_CONT_REQ)) => { + | (DapVersion::Latest, Some(MEDIA_TYPE_AGG_JOB_CONT_REQ)) => { Self::AggregationJobContinueReq } (DapVersion::Draft02, Some(DRAFT02_MEDIA_TYPE_AGG_CONT_RESP)) => { Self::Draft02AggregateContinueResp } (DapVersion::Draft02, Some(DRAFT02_MEDIA_TYPE_AGG_INIT_REQ)) - | (DapVersion::Draft07, Some(MEDIA_TYPE_AGG_JOB_INIT_REQ)) => { + | (DapVersion::Latest, Some(MEDIA_TYPE_AGG_JOB_INIT_REQ)) => { Self::AggregationJobInitReq } (DapVersion::Draft02, Some(DRAFT02_MEDIA_TYPE_AGG_INIT_RESP)) - | (DapVersion::Draft07, Some(MEDIA_TYPE_AGG_JOB_RESP)) => Self::AggregationJobResp, + | (DapVersion::Latest, Some(MEDIA_TYPE_AGG_JOB_RESP)) => Self::AggregationJobResp, (DapVersion::Draft02, Some(DRAFT02_MEDIA_TYPE_AGG_SHARE_RESP)) - | (DapVersion::Draft07, Some(MEDIA_TYPE_AGG_SHARE)) => Self::AggregateShare, + | (DapVersion::Latest, Some(MEDIA_TYPE_AGG_SHARE)) => Self::AggregateShare, (DapVersion::Draft02, Some(DRAFT02_MEDIA_TYPE_COLLECT_RESP)) - | (DapVersion::Draft07, Some(MEDIA_TYPE_COLLECTION)) => Self::Collection, + | (DapVersion::Latest, Some(MEDIA_TYPE_COLLECTION)) => Self::Collection, (DapVersion::Draft02, Some(DRAFT02_MEDIA_TYPE_HPKE_CONFIG)) - | (DapVersion::Draft07, Some(MEDIA_TYPE_HPKE_CONFIG_LIST)) => Self::HpkeConfigList, - (DapVersion::Draft02 | DapVersion::Draft07, Some(MEDIA_TYPE_AGG_SHARE_REQ)) => { + | (DapVersion::Latest, Some(MEDIA_TYPE_HPKE_CONFIG_LIST)) => Self::HpkeConfigList, + (DapVersion::Draft02 | DapVersion::Latest, Some(MEDIA_TYPE_AGG_SHARE_REQ)) => { Self::AggregateShareReq } - (DapVersion::Draft02 | DapVersion::Draft07, Some(MEDIA_TYPE_COLLECT_REQ)) => { + (DapVersion::Draft02 | DapVersion::Latest, Some(MEDIA_TYPE_COLLECT_REQ)) => { Self::CollectReq } - (DapVersion::Draft02 | DapVersion::Draft07, Some(MEDIA_TYPE_REPORT)) => Self::Report, + (DapVersion::Draft02 | DapVersion::Latest, Some(MEDIA_TYPE_REPORT)) => Self::Report, (_, Some(content_type)) => Self::Invalid(content_type.to_string()), (_, None) => Self::Missing, } @@ -104,33 +104,33 @@ impl DapMediaType { (DapVersion::Draft02, Self::AggregationJobInitReq) => { Some(DRAFT02_MEDIA_TYPE_AGG_INIT_REQ) } - (DapVersion::Draft07, Self::AggregationJobInitReq) => Some(MEDIA_TYPE_AGG_JOB_INIT_REQ), + (DapVersion::Latest, Self::AggregationJobInitReq) => Some(MEDIA_TYPE_AGG_JOB_INIT_REQ), (DapVersion::Draft02, Self::AggregationJobResp) => { Some(DRAFT02_MEDIA_TYPE_AGG_INIT_RESP) } - (DapVersion::Draft07, Self::AggregationJobResp) => Some(MEDIA_TYPE_AGG_JOB_RESP), + (DapVersion::Latest, Self::AggregationJobResp) => Some(MEDIA_TYPE_AGG_JOB_RESP), (DapVersion::Draft02, Self::AggregationJobContinueReq) => { Some(DRAFT02_MEDIA_TYPE_AGG_CONT_REQ) } - (DapVersion::Draft07, Self::AggregationJobContinueReq) => { + (DapVersion::Latest, Self::AggregationJobContinueReq) => { Some(MEDIA_TYPE_AGG_JOB_CONT_REQ) } (DapVersion::Draft02, Self::Draft02AggregateContinueResp) => { Some(DRAFT02_MEDIA_TYPE_AGG_CONT_RESP) } - (DapVersion::Draft02 | DapVersion::Draft07, Self::AggregateShareReq) => { + (DapVersion::Draft02 | DapVersion::Latest, Self::AggregateShareReq) => { Some(MEDIA_TYPE_AGG_SHARE_REQ) } (DapVersion::Draft02, Self::AggregateShare) => Some(DRAFT02_MEDIA_TYPE_AGG_SHARE_RESP), - (DapVersion::Draft07, Self::AggregateShare) => Some(MEDIA_TYPE_AGG_SHARE), - (DapVersion::Draft02 | DapVersion::Draft07, Self::CollectReq) => { + (DapVersion::Latest, Self::AggregateShare) => Some(MEDIA_TYPE_AGG_SHARE), + (DapVersion::Draft02 | DapVersion::Latest, Self::CollectReq) => { Some(MEDIA_TYPE_COLLECT_REQ) } (DapVersion::Draft02, Self::Collection) => Some(DRAFT02_MEDIA_TYPE_COLLECT_RESP), - (DapVersion::Draft07, Self::Collection) => Some(MEDIA_TYPE_COLLECTION), + (DapVersion::Latest, Self::Collection) => Some(MEDIA_TYPE_COLLECTION), (DapVersion::Draft02, Self::HpkeConfigList) => Some(DRAFT02_MEDIA_TYPE_HPKE_CONFIG), - (DapVersion::Draft07, Self::HpkeConfigList) => Some(MEDIA_TYPE_HPKE_CONFIG_LIST), - (DapVersion::Draft02 | DapVersion::Draft07, Self::Report) => Some(MEDIA_TYPE_REPORT), + (DapVersion::Latest, Self::HpkeConfigList) => Some(MEDIA_TYPE_HPKE_CONFIG_LIST), + (DapVersion::Draft02 | DapVersion::Latest, Self::Report) => Some(MEDIA_TYPE_REPORT), (_, Self::Draft02AggregateContinueResp | Self::Missing) => None, (_, Self::Invalid(ref content_type)) => Some(content_type), } @@ -141,7 +141,7 @@ impl DapMediaType { pub(crate) fn agg_job_cont_resp_for_version(version: DapVersion) -> Self { match version { DapVersion::Draft02 => Self::Draft02AggregateContinueResp, - DapVersion::Draft07 => Self::AggregationJobResp, + DapVersion::Latest => Self::AggregationJobResp, } } } @@ -218,59 +218,58 @@ mod test { DapMediaType::Collection, ); - // draft07, Section 8.1 assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-hpke-config-list") ), DapMediaType::HpkeConfigList ); assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-aggregation-job-init-req") ), DapMediaType::AggregationJobInitReq, ); assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-aggregation-job-resp") ), DapMediaType::AggregationJobResp, ); assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-aggregation-job-continue-req") ), DapMediaType::AggregationJobContinueReq, ); assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-aggregate-share-req") ), DapMediaType::AggregateShareReq, ); assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-aggregate-share") ), DapMediaType::AggregateShare, ); assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-collect-req") ), DapMediaType::CollectReq, ); assert_eq!( DapMediaType::from_str_for_version( - DapVersion::Draft07, + DapVersion::Latest, Some("application/dap-collection") ), DapMediaType::Collection, @@ -278,13 +277,13 @@ mod test { // Invalid media type assert_eq!( - DapMediaType::from_str_for_version(DapVersion::Draft07, Some("blah-blah-blah")), + DapMediaType::from_str_for_version(DapVersion::Latest, Some("blah-blah-blah")), DapMediaType::Invalid("blah-blah-blah".into()), ); // Missing media type assert_eq!( - DapMediaType::from_str_for_version(DapVersion::Draft07, None), + DapMediaType::from_str_for_version(DapVersion::Latest, None), DapMediaType::Missing, ); } @@ -293,27 +292,27 @@ mod test { fn round_trip() { for (version, media_type) in [ (DapVersion::Draft02, DapMediaType::AggregationJobInitReq), - (DapVersion::Draft07, DapMediaType::AggregationJobInitReq), + (DapVersion::Latest, DapMediaType::AggregationJobInitReq), (DapVersion::Draft02, DapMediaType::AggregationJobResp), - (DapVersion::Draft07, DapMediaType::AggregationJobResp), + (DapVersion::Latest, DapMediaType::AggregationJobResp), (DapVersion::Draft02, DapMediaType::AggregationJobContinueReq), - (DapVersion::Draft07, DapMediaType::AggregationJobContinueReq), + (DapVersion::Latest, DapMediaType::AggregationJobContinueReq), ( DapVersion::Draft02, DapMediaType::Draft02AggregateContinueResp, ), (DapVersion::Draft02, DapMediaType::AggregateShareReq), - (DapVersion::Draft07, DapMediaType::AggregateShareReq), + (DapVersion::Latest, DapMediaType::AggregateShareReq), (DapVersion::Draft02, DapMediaType::AggregateShare), - (DapVersion::Draft07, DapMediaType::AggregateShare), + (DapVersion::Latest, DapMediaType::AggregateShare), (DapVersion::Draft02, DapMediaType::CollectReq), - (DapVersion::Draft07, DapMediaType::CollectReq), + (DapVersion::Latest, DapMediaType::CollectReq), (DapVersion::Draft02, DapMediaType::Collection), - (DapVersion::Draft07, DapMediaType::Collection), + (DapVersion::Latest, DapMediaType::Collection), (DapVersion::Draft02, DapMediaType::HpkeConfigList), - (DapVersion::Draft07, DapMediaType::HpkeConfigList), + (DapVersion::Latest, DapMediaType::HpkeConfigList), (DapVersion::Draft02, DapMediaType::Report), - (DapVersion::Draft07, DapMediaType::Report), + (DapVersion::Latest, DapMediaType::Report), ] { assert_eq!( DapMediaType::from_str_for_version(version, media_type.as_str_for_version(version)), @@ -334,7 +333,7 @@ mod test { assert_eq!( DapMediaType::AggregationJobResp, - DapMediaType::agg_job_cont_resp_for_version(DapVersion::Draft07) + DapMediaType::agg_job_cont_resp_for_version(DapVersion::Latest) ); } } diff --git a/daphne/src/lib.rs b/daphne/src/lib.rs index bb3151c77..1915a5957 100644 --- a/daphne/src/lib.rs +++ b/daphne/src/lib.rs @@ -24,9 +24,13 @@ //! PPM working group of the IETF. See [`VdafConfig`] for a listing of supported //! [VDAFs](https://github.com/cfrg/draft-irtf-cfrg-vdaf). //! -//! Daphne implements draft-ietf-ppm-dap-02 and draft-ietf-ppm-dap-07. -//! -//! **WARNING:** draft07 is a work-in-progress. We are not yet compatible. +//! Daphne implements: +//! * draft-ietf-ppm-dap-02 +//! * VDAF: draft-irtf-cfrg-vdaf-03 +//! * Taskprov extension: draft-wang-ppm-dap-taskprov-02 +//! * draft-ietf-ppm-dap-09 +//! * VDAF: draft-irtf-cfrg-vdaf-08 +//! * Taskprov extension: draft-wang-ppm-dap-taskprov-06 //! //! Daphne does not provide the complete, end-to-end functionality of any party in the protocol. //! Instead, it defines traits for the functionalities that a concrete instantiation of the @@ -102,9 +106,9 @@ pub enum DapVersion { #[serde(rename = "v02")] Draft02, - #[serde(rename = "v07")] + #[serde(rename = "v09")] #[default] - Draft07, + Latest, } impl FromStr for DapVersion { @@ -112,7 +116,7 @@ impl FromStr for DapVersion { fn from_str(version: &str) -> Result { match version { "v02" => Ok(DapVersion::Draft02), - "v07" => Ok(DapVersion::Draft07), + "v09" => Ok(DapVersion::Latest), _ => Err(DapAbort::version_unknown()), } } @@ -122,7 +126,7 @@ impl AsRef for DapVersion { fn as_ref(&self) -> &str { match self { DapVersion::Draft02 => "v02", - DapVersion::Draft07 => "v07", + DapVersion::Latest => "v09", } } } @@ -492,7 +496,7 @@ impl DapTaskParameters { .unwrap(); let (taskprov_advertisement, taskprov_report_extension_payload) = match self.version { - DapVersion::Draft07 => (Some(encode_base64url(&encoded_taskprov_config)), Vec::new()), + DapVersion::Latest => (Some(encode_base64url(&encoded_taskprov_config)), Vec::new()), // draft02 compatibility: The taskprov config is advertised in an HTTP header in // the latest draft. In draft02, it is carried by a report extension. DapVersion::Draft02 => (None, encoded_taskprov_config), @@ -1120,7 +1124,7 @@ pub struct DapRequest { impl Default for DapRequest { fn default() -> Self { Self { - version: DapVersion::Draft07, + version: DapVersion::Latest, media_type: Default::default(), task_id: Default::default(), resource: Default::default(), @@ -1208,13 +1212,13 @@ pub struct DapLeaderProcessTelemetry { } /// draft02 compatibility: A logical aggregation job ID. In the latest draft, this is a 32-byte -/// string included in the HTTP request payload; in draft07, this is a 16-byte string included in -/// the HTTP request path. This type unifies these into one type so that any protocol logic that -/// is agnostic to these details can use the same object. +/// string included in the HTTP request payload; in the latest draft, this is a 16-byte string +/// included in the HTTP request path. This type unifies these into one type so that any protocol +/// logic that is agnostic to these details can use the same object. #[derive(Clone, Debug)] pub enum MetaAggregationJobId { Draft02(Draft02AggregationJobId), - Draft07(AggregationJobId), + Latest(AggregationJobId), } impl MetaAggregationJobId { @@ -1223,7 +1227,7 @@ impl MetaAggregationJobId { let mut rng = thread_rng(); match version { DapVersion::Draft02 => Self::Draft02(Draft02AggregationJobId(rng.gen())), - DapVersion::Draft07 => Self::Draft07(AggregationJobId(rng.gen())), + DapVersion::Latest => Self::Latest(AggregationJobId(rng.gen())), } } @@ -1232,7 +1236,7 @@ impl MetaAggregationJobId { pub(crate) fn for_request_payload(&self) -> Option { match self { Self::Draft02(agg_job_id) => Some(*agg_job_id), - Self::Draft07(..) => None, + Self::Latest(..) => None, } } @@ -1242,7 +1246,7 @@ impl MetaAggregationJobId { match self { // In draft02, the aggregation job ID is not determined until the payload is parsed. Self::Draft02(..) => DapResource::Undefined, - Self::Draft07(agg_job_id) => DapResource::AggregationJob(*agg_job_id), + Self::Latest(agg_job_id) => DapResource::AggregationJob(*agg_job_id), } } @@ -1250,7 +1254,7 @@ impl MetaAggregationJobId { pub fn to_hex(&self) -> String { match self { Self::Draft02(agg_job_id) => agg_job_id.to_hex(), - Self::Draft07(agg_job_id) => agg_job_id.to_hex(), + Self::Latest(agg_job_id) => agg_job_id.to_hex(), } } @@ -1258,7 +1262,7 @@ impl MetaAggregationJobId { pub fn to_base64url(&self) -> String { match self { Self::Draft02(agg_job_id) => agg_job_id.to_base64url(), - Self::Draft07(agg_job_id) => agg_job_id.to_base64url(), + Self::Latest(agg_job_id) => agg_job_id.to_base64url(), } } } diff --git a/daphne/src/messages/mod.rs b/daphne/src/messages/mod.rs index 21c1b5187..37ccdf1c8 100644 --- a/daphne/src/messages/mod.rs +++ b/daphne/src/messages/mod.rs @@ -105,11 +105,11 @@ id_struct!(TaskId, 32, "Task ID"); impl TaskId { /// draft02 compatibility: Convert the task ID to the field that would be added to the DAP /// request for the given version. In draft02, the task ID is generally included in the HTTP - /// request payload; in draft07, the task ID is included in the HTTP request path. + /// request payload; in the latest draft, the task ID is included in the HTTP request path. pub fn for_request_payload(&self, version: &DapVersion) -> Option { match version { DapVersion::Draft02 => Some(*self), - DapVersion::Draft07 => None, + DapVersion::Latest => None, } } } @@ -155,7 +155,7 @@ impl ParameterizedEncode for Extension { Self::Taskprov { draft02_payload } => { EXTENSION_TASKPROV.encode(bytes); match (version, draft02_payload) { - (DapVersion::Draft07, None) => encode_u16_item(bytes, *version, &()), + (DapVersion::Latest, None) => encode_u16_item(bytes, *version, &()), (DapVersion::Draft02, Some(payload)) => encode_u16_bytes(bytes, payload), _ => unreachable!("unhandled version {version:?}"), } @@ -175,7 +175,7 @@ impl ParameterizedDecode for Extension { ) -> Result { let typ = u16::decode(bytes)?; match (version, typ) { - (DapVersion::Draft07, EXTENSION_TASKPROV) => { + (DapVersion::Latest, EXTENSION_TASKPROV) => { decode_u16_item::<()>(*version, bytes)?; Ok(Self::Taskprov { draft02_payload: None, @@ -208,7 +208,7 @@ impl ParameterizedEncode for ReportMetadata { self.id.encode(bytes); self.time.encode(bytes); match (version, &self.draft02_extensions) { - (DapVersion::Draft07, None) => (), + (DapVersion::Latest, None) => (), (DapVersion::Draft02, Some(extensions)) => encode_u16_items(bytes, version, extensions), _ => unreachable!("extensions should be set in (and only in) draft02"), } @@ -225,7 +225,7 @@ impl ParameterizedDecode for ReportMetadata { time: Time::decode(bytes)?, draft02_extensions: match version { DapVersion::Draft02 => Some(decode_u16_items(version, bytes)?), - DapVersion::Draft07 => None, + DapVersion::Latest => None, }, }; @@ -256,7 +256,7 @@ impl ParameterizedEncode for Report { encode_u32_bytes(bytes, &self.public_share); match version { DapVersion::Draft02 => encode_u32_items(bytes, &(), &self.encrypted_input_shares), - DapVersion::Draft07 => { + DapVersion::Latest => { self.encrypted_input_shares[0].encode(bytes); self.encrypted_input_shares[1].encode(bytes); } @@ -282,7 +282,7 @@ impl ParameterizedDecode for Report { DapVersion::Draft02 => decode_u32_items(&(), bytes)? .try_into() .map_err(|_| CodecError::UnexpectedValue)?, - DapVersion::Draft07 => [ + DapVersion::Latest => [ HpkeCiphertext::decode(bytes)?, HpkeCiphertext::decode(bytes)?, ], @@ -446,15 +446,15 @@ impl TryFrom for BatchSelector { #[derive(Clone, Debug, PartialEq, Eq)] pub struct PrepareInit { pub report_share: ReportShare, - pub draft07_payload: Option>, + pub draft09_payload: Option>, } impl ParameterizedEncode for PrepareInit { fn encode_with_param(&self, version: &DapVersion, bytes: &mut Vec) { self.report_share.encode_with_param(version, bytes); - match (version, &self.draft07_payload) { + match (version, &self.draft09_payload) { (DapVersion::Draft02, None) => (), - (DapVersion::Draft07, Some(payload)) => { + (DapVersion::Latest, Some(payload)) => { encode_u32_bytes(bytes, payload); } _ => unreachable!("unhandled version {version:?}"), @@ -468,14 +468,14 @@ impl ParameterizedDecode for PrepareInit { bytes: &mut Cursor<&[u8]>, ) -> Result { let report_share = ReportShare::decode_with_param(version, bytes)?; - let draft07_payload = match version { + let draft09_payload = match version { DapVersion::Draft02 => None, - DapVersion::Draft07 => Some(decode_u32_bytes(bytes)?), + DapVersion::Latest => Some(decode_u32_bytes(bytes)?), }; Ok(Self { report_share, - draft07_payload, + draft09_payload, }) } } @@ -504,7 +504,7 @@ impl ParameterizedEncode for AggregationJobInitReq { .encode(bytes); encode_u16_bytes(bytes, &self.agg_param); } - DapVersion::Draft07 => encode_u32_bytes(bytes, &self.agg_param), + DapVersion::Latest => encode_u32_bytes(bytes, &self.agg_param), }; self.part_batch_sel.encode(bytes); encode_u32_items(bytes, version, &self.prep_inits); @@ -522,7 +522,7 @@ impl ParameterizedDecode for AggregationJobInitReq { Some(Draft02AggregationJobId::decode(bytes)?), decode_u16_bytes(bytes)?, ), - DapVersion::Draft07 => (None, None, decode_u32_bytes(bytes)?), + DapVersion::Latest => (None, None, decode_u32_bytes(bytes)?), }; Ok(Self { @@ -557,11 +557,8 @@ impl ParameterizedEncode for AggregationJobContinueReq { .expect("draft02: missing aggregation job ID") .encode(bytes); } - DapVersion::Draft07 => { - self.round - .as_ref() - .expect("draft07: missing round") - .encode(bytes); + DapVersion::Latest => { + self.round.as_ref().expect("missing round").encode(bytes); } }; encode_u32_items(bytes, &(), &self.transitions); @@ -579,7 +576,7 @@ impl ParameterizedDecode for AggregationJobContinueReq { Some(Draft02AggregationJobId::decode(bytes)?), None, ), - DapVersion::Draft07 => (None, None, Some(u16::decode(bytes)?)), + DapVersion::Latest => (None, None, Some(u16::decode(bytes)?)), }; Ok(Self { draft02_task_id, @@ -592,8 +589,6 @@ impl ParameterizedDecode for AggregationJobContinueReq { /// Transition message. This conveyes a message sent from one Aggregator to another during the /// preparation phase of VDAF evaluation. -// -// TODO Consider renaming this to `PrepareStep` to align with draft07. #[derive(Clone, Debug, PartialEq, Eq)] #[cfg_attr(any(test, feature = "test-utils"), derive(deepsize::DeepSizeOf))] pub struct Transition { @@ -867,12 +862,12 @@ impl ParameterizedEncode for CollectionReq { .expect("draft02: missing task ID") .encode(bytes); } - DapVersion::Draft07 => {} + DapVersion::Latest => {} } self.query.encode_with_param(version, bytes); match version { DapVersion::Draft02 => encode_u16_bytes(bytes, &self.agg_param), - DapVersion::Draft07 => encode_u32_bytes(bytes, &self.agg_param), + DapVersion::Latest => encode_u32_bytes(bytes, &self.agg_param), }; } } @@ -884,14 +879,14 @@ impl ParameterizedDecode for CollectionReq { ) -> Result { let draft02_task_id = match version { DapVersion::Draft02 => Some(TaskId::decode(bytes)?), - DapVersion::Draft07 => None, + DapVersion::Latest => None, }; Ok(Self { draft02_task_id, query: Query::decode_with_param(version, bytes)?, agg_param: match version { DapVersion::Draft02 => decode_u16_bytes(bytes)?, - DapVersion::Draft07 => decode_u32_bytes(bytes)?, + DapVersion::Latest => decode_u32_bytes(bytes)?, }, }) } @@ -905,7 +900,7 @@ impl ParameterizedDecode for CollectionReq { pub struct Collection { pub part_batch_sel: PartialBatchSelector, pub report_count: u64, - pub draft07_interval: Option, // Not set in draft02 + pub draft09_interval: Option, // Not set in draft02 pub encrypted_agg_shares: [HpkeCiphertext; 2], } @@ -913,9 +908,9 @@ impl ParameterizedEncode for Collection { fn encode_with_param(&self, version: &DapVersion, bytes: &mut Vec) { self.part_batch_sel.encode(bytes); self.report_count.encode(bytes); - match (version, &self.draft07_interval) { + match (version, &self.draft09_interval) { (DapVersion::Draft02, None) => encode_u32_items(bytes, &(), &self.encrypted_agg_shares), - (DapVersion::Draft07, Some(interval)) => { + (DapVersion::Latest, Some(interval)) => { interval.encode(bytes); self.encrypted_agg_shares[0].encode(bytes); self.encrypted_agg_shares[1].encode(bytes); @@ -932,14 +927,14 @@ impl ParameterizedDecode for Collection { ) -> Result { let part_batch_sel = PartialBatchSelector::decode(bytes)?; let report_count = u64::decode(bytes)?; - let (draft07_interval, encrypted_agg_shares) = match version { + let (draft09_interval, encrypted_agg_shares) = match version { DapVersion::Draft02 => ( None, decode_u32_items(&(), bytes)? .try_into() .map_err(|_| CodecError::UnexpectedValue)?, ), - DapVersion::Draft07 => ( + DapVersion::Latest => ( Some(Interval::decode(bytes)?), [ HpkeCiphertext::decode(bytes)?, @@ -951,7 +946,7 @@ impl ParameterizedDecode for Collection { Ok(Self { part_batch_sel, report_count, - draft07_interval, + draft09_interval, encrypted_agg_shares, }) } @@ -980,7 +975,7 @@ impl ParameterizedEncode for AggregateShareReq { self.batch_sel.encode_with_param(version, bytes); encode_u16_bytes(bytes, &self.agg_param); } - DapVersion::Draft07 => { + DapVersion::Latest => { self.batch_sel.encode_with_param(version, bytes); encode_u32_bytes(bytes, &self.agg_param); } @@ -1001,7 +996,7 @@ impl ParameterizedDecode for AggregateShareReq { BatchSelector::decode_with_param(version, bytes)?, decode_u16_bytes(bytes)?, ), - DapVersion::Draft07 => ( + DapVersion::Latest => ( None, BatchSelector::decode_with_param(version, bytes)?, decode_u32_bytes(bytes)?, @@ -1290,7 +1285,7 @@ fn encode_u16_item_for_version>( item: &E, ) { match version { - DapVersion::Draft07 => encode_u16_item(bytes, version, item), + DapVersion::Latest => encode_u16_item(bytes, version, item), DapVersion::Draft02 => item.encode_with_param(&version, bytes), } } @@ -1300,7 +1295,7 @@ fn decode_u16_item_for_version>( bytes: &mut Cursor<&[u8]>, ) -> Result { match version { - DapVersion::Draft07 => decode_u16_item(version, bytes), + DapVersion::Latest => decode_u16_item(version, bytes), DapVersion::Draft02 => D::decode_with_param(&version, bytes), } } @@ -1330,7 +1325,7 @@ mod test { time: 1_637_364_244, draft02_extensions: match version { DapVersion::Draft02 => Some(Vec::new()), - DapVersion::Draft07 => None, + DapVersion::Latest => None, }, }, public_share: b"public share".to_vec(), @@ -1400,7 +1395,7 @@ mod test { payload: b"ciphertext".to_vec(), }, }, - draft07_payload: None, + draft09_payload: None, }, PrepareInit { report_share: ReportShare { @@ -1416,7 +1411,7 @@ mod test { payload: b"ciphertext".to_vec(), }, }, - draft07_payload: None, + draft09_payload: None, }, ], }, @@ -1447,7 +1442,7 @@ mod test { payload: b"ciphertext".to_vec(), }, }, - draft07_payload: None, + draft09_payload: None, }, PrepareInit { report_share: ReportShare { @@ -1463,7 +1458,7 @@ mod test { payload: b"ciphertext".to_vec(), }, }, - draft07_payload: None, + draft09_payload: None, }, ], }; @@ -1497,7 +1492,7 @@ mod test { payload: b"ciphertext".to_vec(), }, }, - draft07_payload: Some(b"prep share".to_vec()), + draft09_payload: Some(b"prep share".to_vec()), }, PrepareInit { report_share: ReportShare { @@ -1513,14 +1508,14 @@ mod test { payload: b"ciphertext".to_vec(), }, }, - draft07_payload: Some(b"prep share".to_vec()), + draft09_payload: Some(b"prep share".to_vec()), }, ], }; let got = AggregationJobInitReq::get_decoded_with_param( - &DapVersion::Draft07, - &want.get_encoded_with_param(&DapVersion::Draft07), + &DapVersion::Latest, + &want.get_encoded_with_param(&DapVersion::Latest), ) .unwrap(); assert_eq!(got, want); @@ -1584,8 +1579,8 @@ mod test { }; let got = AggregationJobContinueReq::get_decoded_with_param( - &DapVersion::Draft07, - &want.get_encoded_with_param(&DapVersion::Draft07), + &DapVersion::Latest, + &want.get_encoded_with_param(&DapVersion::Latest), ) .unwrap(); assert_eq!(got, want); @@ -1657,8 +1652,8 @@ mod test { checksum: [0; 32], }; let got = AggregateShareReq::get_decoded_with_param( - &DapVersion::Draft07, - &want.get_encoded_with_param(&DapVersion::Draft07), + &DapVersion::Latest, + &want.get_encoded_with_param(&DapVersion::Latest), ) .unwrap(); assert_eq!(got, want); diff --git a/daphne/src/messages/taskprov.rs b/daphne/src/messages/taskprov.rs index 6e66ff4a9..a52ca9b5d 100644 --- a/daphne/src/messages/taskprov.rs +++ b/daphne/src/messages/taskprov.rs @@ -38,7 +38,7 @@ impl ParameterizedEncode for VdafTypeVar { Self::NotImplemented { typ, param } => { typ.encode(bytes); match version { - DapVersion::Draft07 => encode_u16_bytes(bytes, param), + DapVersion::Latest => encode_u16_bytes(bytes, param), DapVersion::Draft02 => bytes.extend_from_slice(param), } } @@ -56,7 +56,7 @@ impl ParameterizedDecode for VdafTypeVar { (.., VDAF_TYPE_PRIO2) => Ok(Self::Prio2 { dimension: decode_u16_item_for_version(*version, bytes)?, }), - (DapVersion::Draft07, ..) => Ok(Self::NotImplemented { + (DapVersion::Latest, ..) => Ok(Self::NotImplemented { typ: vdaf_type, param: decode_u16_bytes(bytes)?, }), @@ -85,7 +85,7 @@ impl ParameterizedEncode for DpConfig { Self::NotImplemented { typ, param } => { typ.encode(bytes); match version { - DapVersion::Draft07 => encode_u16_bytes(bytes, param), + DapVersion::Latest => encode_u16_bytes(bytes, param), DapVersion::Draft02 => bytes.extend_from_slice(param), } } @@ -104,7 +104,7 @@ impl ParameterizedDecode for DpConfig { decode_u16_item_for_version::<()>(*version, bytes)?; Ok(Self::None) } - (DapVersion::Draft07, ..) => Ok(Self::NotImplemented { + (DapVersion::Latest, ..) => Ok(Self::NotImplemented { typ: dp_mechanism, param: decode_u16_bytes(bytes)?, }), @@ -216,7 +216,7 @@ impl ParameterizedEncode for QueryConfig { QueryConfigVar::NotImplemented { typ, param } => { typ.encode(bytes); match version { - DapVersion::Draft07 => encode_u16_bytes(bytes, param), + DapVersion::Latest => encode_u16_bytes(bytes, param), DapVersion::Draft02 => bytes.extend_from_slice(param), } } @@ -230,7 +230,7 @@ impl ParameterizedDecode for QueryConfig { bytes: &mut Cursor<&[u8]>, ) -> Result { let query_type = match version { - DapVersion::Draft07 => None, + DapVersion::Latest => None, DapVersion::Draft02 => Some(u8::decode(bytes)?), }; let time_precision = Duration::decode(bytes)?; @@ -245,7 +245,7 @@ impl ParameterizedDecode for QueryConfig { (.., QUERY_TYPE_FIXED_SIZE) => QueryConfigVar::FixedSize { max_batch_size: decode_u16_item_for_version(*version, bytes)?, }, - (DapVersion::Draft07, ..) => QueryConfigVar::NotImplemented { + (DapVersion::Latest, ..) => QueryConfigVar::NotImplemented { typ: query_type, param: decode_u16_bytes(bytes)?, }, @@ -283,7 +283,7 @@ impl ParameterizedEncode for TaskConfig { &(), &[self.leader_url.clone(), self.helper_url.clone()], ), - DapVersion::Draft07 => { + DapVersion::Latest => { self.leader_url.encode(bytes); self.helper_url.encode(bytes); } @@ -304,7 +304,7 @@ impl ParameterizedDecode for TaskConfig { DapVersion::Draft02 => decode_u16_items(&(), bytes)? .try_into() .map_err(|_| CodecError::UnexpectedValue)?, // Expect exactly two Aggregator endpoints. - DapVersion::Draft07 => [UrlBytes::decode(bytes)?, UrlBytes::decode(bytes)?], + DapVersion::Latest => [UrlBytes::decode(bytes)?, UrlBytes::decode(bytes)?], }; Ok(TaskConfig { @@ -361,7 +361,7 @@ mod tests { test_versions! { roundtrip_query_config } #[test] - fn roundtrip_query_config_not_implemented_draft07() { + fn roundtrip_query_config_not_implemented_draft09() { let query_config = QueryConfig { time_precision: 12_345_678, max_batch_query_count: 1337, @@ -373,8 +373,8 @@ mod tests { }; assert_eq!( QueryConfig::get_decoded_with_param( - &DapVersion::Draft07, - &query_config.get_encoded_with_param(&DapVersion::Draft07) + &DapVersion::Latest, + &query_config.get_encoded_with_param(&DapVersion::Latest) ) .unwrap(), query_config @@ -413,15 +413,15 @@ mod tests { test_versions! { roundtrip_dp_config } #[test] - fn roundtrip_dp_config_not_implemented_draft07() { + fn roundtrip_dp_config_not_implemented_draft09() { let dp_config = DpConfig::NotImplemented { typ: 0, param: b"dp mechanism param".to_vec(), }; assert_eq!( DpConfig::get_decoded_with_param( - &DapVersion::Draft07, - &dp_config.get_encoded_with_param(&DapVersion::Draft07) + &DapVersion::Latest, + &dp_config.get_encoded_with_param(&DapVersion::Latest) ) .unwrap(), dp_config @@ -461,7 +461,7 @@ mod tests { test_versions! { roundtrip_vdaf_config } #[test] - fn roundtrip_vdaf_config_not_implemented_draft07() { + fn roundtrip_vdaf_config_not_implemented_draft09() { let vdaf_config = VdafConfig { dp_config: DpConfig::None, var: VdafTypeVar::NotImplemented { @@ -472,8 +472,8 @@ mod tests { assert_eq!( VdafConfig::get_decoded_with_param( - &DapVersion::Draft07, - &vdaf_config.get_encoded_with_param(&DapVersion::Draft07) + &DapVersion::Latest, + &vdaf_config.get_encoded_with_param(&DapVersion::Latest) ) .unwrap(), vdaf_config diff --git a/daphne/src/roles/aggregator.rs b/daphne/src/roles/aggregator.rs index 4c6f1a0ae..745953d26 100644 --- a/daphne/src/roles/aggregator.rs +++ b/daphne/src/roles/aggregator.rs @@ -177,7 +177,7 @@ pub trait DapAggregator: HpkeDecrypter + DapReportInitializer + Sized { let payload = match req.version { DapVersion::Draft02 => hpke_config.as_ref().get_encoded(), - DapVersion::Draft07 => { + DapVersion::Latest => { let hpke_config_list = HpkeConfigList { hpke_configs: vec![hpke_config.as_ref().clone()], }; diff --git a/daphne/src/roles/helper.rs b/daphne/src/roles/helper.rs index 02aebee9d..042687a4f 100644 --- a/daphne/src/roles/helper.rs +++ b/daphne/src/roles/helper.rs @@ -164,7 +164,7 @@ pub trait DapHelper: DapAggregator { agg_job_resp } - DapVersion::Draft07 => { + DapVersion::Latest => { let agg_job_resp = finish_agg_job_and_aggregate( self, task_id, @@ -458,10 +458,10 @@ fn resolve_agg_job_id<'id, S>( (DapVersion::Draft02, DapResource::Undefined, Some(agg_job_id)) => { Ok(MetaAggregationJobId::Draft02(*agg_job_id)) } - (DapVersion::Draft07, DapResource::AggregationJob(agg_job_id), None) => { - Ok(MetaAggregationJobId::Draft07(*agg_job_id)) + (DapVersion::Latest, DapResource::AggregationJob(agg_job_id), None) => { + Ok(MetaAggregationJobId::Latest(*agg_job_id)) } - (DapVersion::Draft07, DapResource::Undefined, None) => { + (DapVersion::Latest, DapResource::Undefined, None) => { Err(DapAbort::BadRequest("undefined resource".into())) } _ => unreachable!("unhandled resource {:?}", req.resource), diff --git a/daphne/src/roles/leader.rs b/daphne/src/roles/leader.rs index 1cb5e9530..37054a24d 100644 --- a/daphne/src/roles/leader.rs +++ b/daphne/src/roles/leader.rs @@ -283,10 +283,10 @@ pub trait DapLeader: DapAuthorizedSender + DapAggregator { // from the request path. let collect_job_id = match (req.version, &req.resource) { (DapVersion::Draft02, DapResource::Undefined) => None, - (DapVersion::Draft07, DapResource::CollectionJob(ref collect_job_id)) => { + (DapVersion::Latest, DapResource::CollectionJob(ref collect_job_id)) => { Some(*collect_job_id) } - (DapVersion::Draft07, DapResource::Undefined) => { + (DapVersion::Latest, DapResource::Undefined) => { return Err(DapAbort::BadRequest("undefined resource".into()).into()); } _ => unreachable!("unhandled resource {:?}", req.resource), @@ -531,11 +531,11 @@ pub trait DapLeader: DapAuthorizedSender + DapAggregator { .await?; let agg_share_resp = AggregateShare::get_decoded(&resp.payload) .map_err(|e| DapAbort::from_codec_error(e, *task_id))?; - // For draft07 and later, the Collection message includes the smallest quantized time + // In the latest draft, the Collection message includes the smallest quantized time // interval containing all reports in the batch. - let draft07_interval = match task_config.version { + let draft09_interval = match task_config.version { DapVersion::Draft02 => None, - DapVersion::Draft07 => { + DapVersion::Latest => { let low = task_config.quantized_time_lower_bound(leader_agg_share.min_time); let high = task_config.quantized_time_upper_bound(leader_agg_share.max_time); Some(Interval { @@ -554,7 +554,7 @@ pub trait DapLeader: DapAuthorizedSender + DapAggregator { let collection = Collection { part_batch_sel: batch_selector.into(), report_count: leader_agg_share.report_count, - draft07_interval, + draft09_interval, encrypted_agg_shares: [leader_enc_agg_share, agg_share_resp.encrypted_agg_share], }; self.finish_collect_job(task_id, collect_id, &collection) diff --git a/daphne/src/roles/mod.rs b/daphne/src/roles/mod.rs index 9feca88bf..8c1098149 100644 --- a/daphne/src/roles/mod.rs +++ b/daphne/src/roles/mod.rs @@ -197,7 +197,7 @@ mod test { fn empty_report_extensions_for_version(version: DapVersion) -> Option> { match version { DapVersion::Draft02 => Some(Vec::new()), - DapVersion::Draft07 => None, + DapVersion::Latest => None, } } @@ -1364,7 +1364,7 @@ mod test { let collect_resp = Collection { part_batch_sel: PartialBatchSelector::TimeInterval, report_count: 0, - draft07_interval: if version == DapVersion::Draft02 { + draft09_interval: if version == DapVersion::Draft02 { None } else { Some(Interval { @@ -1692,7 +1692,7 @@ mod test { let agg_job_req_count = match version { DapVersion::Draft02 => 2, - DapVersion::Draft07 => 1, + DapVersion::Latest => 1, }; assert_metrics_include!(t.helper_registry, { @@ -1733,7 +1733,7 @@ mod test { let agg_job_req_count = match version { DapVersion::Draft02 => 2, - DapVersion::Draft07 => 1, + DapVersion::Latest => 1, }; assert_metrics_include!(t.helper_registry, { @@ -1795,7 +1795,7 @@ mod test { DapMeasurement::U32Vec(vec![1; 10]), vec![Extension::Taskprov { draft02_payload: match version { - DapVersion::Draft07 => None, + DapVersion::Latest => None, DapVersion::Draft02 => Some(taskprov_report_extension_payload.clone()), }, }], @@ -1833,7 +1833,7 @@ mod test { let agg_job_req_count = match version { DapVersion::Draft02 => 2, - DapVersion::Draft07 => 1, + DapVersion::Latest => 1, }; assert_metrics_include!(t.helper_registry, { diff --git a/daphne/src/taskprov.rs b/daphne/src/taskprov.rs index 8692045f5..69c73f113 100644 --- a/daphne/src/taskprov.rs +++ b/daphne/src/taskprov.rs @@ -147,7 +147,7 @@ fn get_taskprov_task_config( ) })?) } else if let Some(metadata) = report_metadata_advertisement { - if req.version == DapVersion::Draft07 { + if req.version == DapVersion::Latest { return Ok(None); } let taskprovs: Vec<&Extension> = metadata @@ -621,7 +621,7 @@ mod test { (DapVersion::Draft02, Err(DapAbort::InvalidMessage { detail, .. })) => { assert_eq!(detail, "codec error: unexpected value"); } - (DapVersion::Draft07, Err(DapAbort::InvalidTask { detail, .. })) => { + (DapVersion::Latest, Err(DapAbort::InvalidTask { detail, .. })) => { assert_eq!(detail, "unimplemented VDAF type (1337)"); } (_, r) => panic!("unexpected result: {r:?} ({version})"), diff --git a/daphne/src/testing.rs b/daphne/src/testing.rs index 8d94e1d16..00e0ce50f 100644 --- a/daphne/src/testing.rs +++ b/daphne/src/testing.rs @@ -501,7 +501,7 @@ impl AggregationJobTest { // // and // -// something_draft07 +// something_draft09 // // that called something(version) with the appropriate version. // @@ -524,7 +524,7 @@ macro_rules! test_versions { ($($fname:ident),*) => { $( $crate::test_version! { $fname, Draft02 } - $crate::test_version! { $fname, Draft07 } + $crate::test_version! { $fname, Latest } )* }; } @@ -546,7 +546,7 @@ macro_rules! async_test_versions { ($($fname:ident),*) => { $( $crate::async_test_version! { $fname, Draft02 } - $crate::async_test_version! { $fname, Draft07 } + $crate::async_test_version! { $fname, Latest } )* }; } @@ -555,14 +555,14 @@ macro_rules! async_test_versions { #[cfg_attr(any(test, feature = "test-utils"), derive(deepsize::DeepSizeOf))] pub(crate) enum MetaAggregationJobIdOwned { Draft02(Draft02AggregationJobId), - Draft07(AggregationJobId), + Latest(AggregationJobId), } impl From<&MetaAggregationJobId> for MetaAggregationJobIdOwned { fn from(agg_job_id: &MetaAggregationJobId) -> Self { match agg_job_id { MetaAggregationJobId::Draft02(agg_job_id) => Self::Draft02(*agg_job_id), - MetaAggregationJobId::Draft07(agg_job_id) => Self::Draft07(*agg_job_id), + MetaAggregationJobId::Latest(agg_job_id) => Self::Latest(*agg_job_id), } } } diff --git a/daphne/src/vdaf/mod.rs b/daphne/src/vdaf/mod.rs index b7d44d8cb..679ab6e4f 100644 --- a/daphne/src/vdaf/mod.rs +++ b/daphne/src/vdaf/mod.rs @@ -58,9 +58,9 @@ use std::{ }; const CTX_INPUT_SHARE_DRAFT02: &[u8] = b"dap-02 input share"; -const CTX_INPUT_SHARE_DRAFT07: &[u8] = b"dap-07 input share"; +const CTX_INPUT_SHARE_DRAFT_LATEST: &[u8] = b"dap-09 input share"; const CTX_AGG_SHARE_DRAFT02: &[u8] = b"dap-02 aggregate share"; -const CTX_AGG_SHARE_DRAFT07: &[u8] = b"dap-07 aggregate share"; +const CTX_AGG_SHARE_DRAFT_LATEST: &[u8] = b"dap-09 aggregate share"; const CTX_ROLE_COLLECTOR: u8 = 0; const CTX_ROLE_CLIENT: u8 = 1; const CTX_ROLE_LEADER: u8 = 2; @@ -69,7 +69,7 @@ const CTX_ROLE_HELPER: u8 = 3; pub(crate) const VDAF_VERIFY_KEY_SIZE_PRIO3: usize = 16; pub(crate) const VDAF_VERIFY_KEY_SIZE_PRIO2: usize = 32; -// Ping-pong message framing as defined in draft-irtf-cfrg-vdaf-07, Section 5.8. We do not +// Ping-pong message framing as defined in draft-irtf-cfrg-vdaf-08, Section 5.8. We do not // implement the "continue" message type because we only support 1-round VDAFs. enum PingPongMessageType { Initialize = 0, @@ -190,7 +190,7 @@ impl<'req> EarlyReportStateConsumed<'req> { let input_share_text = match task_config.version { DapVersion::Draft02 => CTX_INPUT_SHARE_DRAFT02, - DapVersion::Draft07 => CTX_INPUT_SHARE_DRAFT07, + DapVersion::Latest => CTX_INPUT_SHARE_DRAFT_LATEST, }; let n: usize = input_share_text.len(); let mut info = Vec::with_capacity(n + 2); @@ -218,9 +218,9 @@ impl<'req> EarlyReportStateConsumed<'req> { // draft02 compatibility: The plaintext is passed to the VDAF directly. In the latest // draft, the plaintext also encodes the report extensions. - let (input_share, draft07_extensions) = match task_config.version { + let (input_share, draft09_extensions) = match task_config.version { DapVersion::Draft02 => (encoded_input_share, None), - DapVersion::Draft07 => { + DapVersion::Latest => { match PlaintextInputShare::get_decoded_with_param( &task_config.version, &encoded_input_share, @@ -239,7 +239,7 @@ impl<'req> EarlyReportStateConsumed<'req> { // Handle report extensions. { let extensions = match task_config.version { - DapVersion::Draft07 => draft07_extensions.as_ref().unwrap(), + DapVersion::Latest => draft09_extensions.as_ref().unwrap(), DapVersion::Draft02 => metadata.as_ref().draft02_extensions.as_ref().unwrap(), }; @@ -260,7 +260,7 @@ impl<'req> EarlyReportStateConsumed<'req> { } // Reject reports with unrecognized extensions. - (DapVersion::Draft07, ..) => { + (DapVersion::Latest, ..) => { return Ok(Self::Rejected { metadata, failure: TransitionFailure::InvalidMessage, @@ -693,8 +693,8 @@ impl VdafConfig { return Err(fatal_error!(err = "unexpected number of HPKE configs")); } - let (draft02_extensions, mut draft07_plaintext_input_share) = match version { - DapVersion::Draft07 => ( + let (draft02_extensions, mut draft09_plaintext_input_share) = match version { + DapVersion::Latest => ( None, Some(PlaintextInputShare { extensions, @@ -711,7 +711,7 @@ impl VdafConfig { }; let encoded_input_shares = input_shares.into_iter().map(|input_share| { - if let Some(ref mut plaintext_input_share) = draft07_plaintext_input_share { + if let Some(ref mut plaintext_input_share) = draft09_plaintext_input_share { plaintext_input_share.payload = input_share; plaintext_input_share.get_encoded_with_param(&version) } else { @@ -721,7 +721,7 @@ impl VdafConfig { let input_share_text = match version { DapVersion::Draft02 => CTX_INPUT_SHARE_DRAFT02, - DapVersion::Draft07 => CTX_INPUT_SHARE_DRAFT07, + DapVersion::Latest => CTX_INPUT_SHARE_DRAFT_LATEST, }; let n: usize = input_share_text.len(); let mut info = Vec::with_capacity(n + 2); @@ -878,9 +878,9 @@ impl VdafConfig { } => { // draft02 compatibility: In the latest version, the Leader sends the Helper // its initial prep share in the first request. - let (draft02_prep_share, draft07_payload) = match task_config.version { + let (draft02_prep_share, draft09_payload) = match task_config.version { DapVersion::Draft02 => (Some(prep_share), None), - DapVersion::Draft07 => { + DapVersion::Latest => { let mut outbound = Vec::with_capacity( prep_share .encoded_len_with_param(&task_config.version) @@ -888,7 +888,7 @@ impl VdafConfig { + 5, ); // Add the ping-pong "initialize" message framing - // (draft-irtf-cfrg-vdaf-07, Section 5.8). + // (draft-irtf-cfrg-vdaf-08, Section 5.8). outbound.push(PingPongMessageType::Initialize as u8); encode_u32_items(&mut outbound, &task_config.version, &[prep_share]); (None, Some(outbound)) @@ -907,7 +907,7 @@ impl VdafConfig { public_share: public_share.into_owned(), encrypted_input_share: helper_share, }, - draft07_payload, + draft09_payload, }); } @@ -1010,7 +1010,7 @@ impl VdafConfig { agg_job_init_req, metrics, )), - DapVersion::Draft07 => self.draft07_handle_agg_job_init_req( + DapVersion::Latest => self.draft09_handle_agg_job_init_req( task_id, task_config, report_status, @@ -1079,7 +1079,7 @@ impl VdafConfig { ) } - fn draft07_handle_agg_job_init_req( + fn draft09_handle_agg_job_init_req( &self, task_id: &TaskId, task_config: &DapTaskConfig, @@ -1106,7 +1106,7 @@ impl VdafConfig { state: helper_prep_state, message: helper_prep_share, } => { - let Some(ref leader_inbound) = prep_init.draft07_payload else { + let Some(ref leader_inbound) = prep_init.draft09_payload else { return Err(DapAbort::InvalidMessage { detail: "PrepareInit with missing payload".to_string(), task_id: Some(*task_id), @@ -1115,7 +1115,7 @@ impl VdafConfig { }; // Decode the ping-pong "initialize" message framing. - // (draft-irtf-cfrg-vdaf-07, Section 5.8). + // (draft-irtf-cfrg-vdaf-08, Section 5.8). let leader_prep_share = decode_ping_pong_framed( leader_inbound, PingPongMessageType::Initialize, @@ -1149,7 +1149,7 @@ impl VdafConfig { )?; let mut outbound = Vec::with_capacity(1 + prep_msg.len()); - // Add ping-pong "finish" message framing (draft-irtf-cfrg-vdaf-07, + // Add ping-pong "finish" message framing (draft-irtf-cfrg-vdaf-08, // Section 5.8). outbound.push(PingPongMessageType::Finish as u8); encode_u32_bytes(&mut outbound, &prep_msg); @@ -1207,8 +1207,8 @@ impl VdafConfig { metrics, ) .map_err(Into::into), - DapVersion::Draft07 => { - self.draft07_handle_agg_job_resp(task_id, task_config, state, agg_job_resp, metrics) + DapVersion::Latest => { + self.draft09_handle_agg_job_resp(task_id, task_config, state, agg_job_resp, metrics) } } } @@ -1326,7 +1326,7 @@ impl VdafConfig { )) } - fn draft07_handle_agg_job_resp( + fn draft09_handle_agg_job_resp( &self, task_id: &TaskId, task_config: &DapTaskConfig, @@ -1365,7 +1365,7 @@ impl VdafConfig { let prep_msg = match &helper.var { TransitionVar::Continued(inbound) => { - // Decode the ping-pong "finish" message frame (draft-irtf-cfrg-vdaf-07, + // Decode the ping-pong "finish" message frame (draft-irtf-cfrg-vdaf-08, // Section 5.8). Abort the aggregation job if not found. let Ok(prep_msg) = decode_ping_pong_framed(inbound, PingPongMessageType::Finish) @@ -1714,7 +1714,7 @@ impl VdafConfig { let agg_share_text = match version { DapVersion::Draft02 => CTX_AGG_SHARE_DRAFT02, - DapVersion::Draft07 => CTX_AGG_SHARE_DRAFT07, + DapVersion::Latest => CTX_AGG_SHARE_DRAFT_LATEST, }; let n: usize = agg_share_text.len(); let mut info = Vec::with_capacity(n + 2); @@ -1778,7 +1778,7 @@ fn produce_encrypted_agg_share( let agg_share_text = match version { DapVersion::Draft02 => CTX_AGG_SHARE_DRAFT02, - DapVersion::Draft07 => CTX_AGG_SHARE_DRAFT07, + DapVersion::Latest => CTX_AGG_SHARE_DRAFT_LATEST, }; let n: usize = agg_share_text.len(); let mut info = Vec::with_capacity(n + 2); @@ -2202,7 +2202,7 @@ mod test { public_share: report0.public_share, encrypted_input_share: report0.encrypted_input_shares[1].clone(), }, - draft07_payload: Some(b"malformed payload".to_vec()), + draft09_payload: Some(b"malformed payload".to_vec()), }, PrepareInit { report_share: ReportShare { @@ -2210,7 +2210,7 @@ mod test { public_share: report1.public_share, encrypted_input_share: report1.encrypted_input_shares[1].clone(), }, - draft07_payload: Some(b"malformed payload".to_vec()), + draft09_payload: Some(b"malformed payload".to_vec()), }, ], }; @@ -2440,13 +2440,12 @@ mod test { } #[tokio::test] - async fn agg_job_init_req_skip_vdaf_prep_error_draft07() { - let t = - AggregationJobTest::new(TEST_VDAF, HpkeKemId::X25519HkdfSha256, DapVersion::Draft07); + async fn agg_job_init_req_skip_vdaf_prep_error_draft09() { + let t = AggregationJobTest::new(TEST_VDAF, HpkeKemId::X25519HkdfSha256, DapVersion::Latest); let mut reports = t.produce_reports(vec![DapMeasurement::U64(1), DapMeasurement::U64(1)]); reports.insert( 1, - t.produce_invalid_report_vdaf_prep_failure(DapMeasurement::U64(1), DapVersion::Draft07), + t.produce_invalid_report_vdaf_prep_failure(DapMeasurement::U64(1), DapVersion::Latest), ); let (leader_state, agg_job_init_req) = @@ -2661,9 +2660,9 @@ mod test { let expect_ready = match version { // In draft02 we're meant to ignore extensions we don't recognize. DapVersion::Draft02 => true, - // In the latest versioin we're meant to reject reports containing unrecognized + // In the latest version we're meant to reject reports containing unrecognized // extensions. - DapVersion::Draft07 => false, + DapVersion::Latest => false, }; assert_eq!(consumed_report.is_ready(), expect_ready); } diff --git a/daphne_worker/src/config.rs b/daphne_worker/src/config.rs index 660fb3524..ff3a92531 100644 --- a/daphne_worker/src/config.rs +++ b/daphne_worker/src/config.rs @@ -1136,7 +1136,7 @@ impl<'srv> DaphneWorker<'srv> { let mut r = Cursor::new(payload.as_ref()); (TaskId::decode(&mut r).ok(), DapResource::Undefined) } - DapVersion::Draft07 => { + DapVersion::Latest => { let task_id = ctx.param("task_id").and_then(TaskId::try_from_base64url); let resource = match media_type { DapMediaType::AggregationJobInitReq diff --git a/daphne_worker/src/durable/mod.rs b/daphne_worker/src/durable/mod.rs index 8d6b06e75..968442e1f 100644 --- a/daphne_worker/src/durable/mod.rs +++ b/daphne_worker/src/durable/mod.rs @@ -728,7 +728,7 @@ mod test { time: rng.gen(), draft02_extensions: match version { DapVersion::Draft02 => Some(Vec::new()), - DapVersion::Draft07 => None, + DapVersion::Latest => None, }, }, public_share: Vec::default(), diff --git a/daphne_worker/src/durable/reports_pending.rs b/daphne_worker/src/durable/reports_pending.rs index a1a231e9d..6707193d8 100644 --- a/daphne_worker/src/durable/reports_pending.rs +++ b/daphne_worker/src/durable/reports_pending.rs @@ -51,7 +51,7 @@ impl PendingReport { pub(crate) fn report_id_hex(&self) -> Option<&str> { match self.version { DapVersion::Draft02 if self.report_hex.len() >= 96 => Some(&self.report_hex[64..96]), - DapVersion::Draft07 if self.report_hex.len() >= 32 => Some(&self.report_hex[..32]), + DapVersion::Latest if self.report_hex.len() >= 32 => Some(&self.report_hex[..32]), _ => None, } } diff --git a/daphne_worker_test/tests/e2e/e2e.rs b/daphne_worker_test/tests/e2e/e2e.rs index 940ad096a..ce4a3d6b0 100644 --- a/daphne_worker_test/tests/e2e/e2e.rs +++ b/daphne_worker_test/tests/e2e/e2e.rs @@ -73,7 +73,7 @@ async fn leader_endpoint_for_task(version: DapVersion, want_prefix: bool) { let expected = if want_prefix { format!("/{}/", version.as_ref()) } else { - String::from("/v07/") // Must match DAP_DEFAULT_VERSION + String::from("/v09/") // Must match DAP_DEFAULT_VERSION }; assert_eq!(res.endpoint.unwrap(), expected); } @@ -114,7 +114,7 @@ async fn helper_endpoint_for_task(version: DapVersion, want_prefix: bool) { let expected = if want_prefix { format!("/{}/", version.as_ref()) } else { - String::from("/v07/") // Must match DAP_DEFAULT_VERSION + String::from("/v09/") // Must match DAP_DEFAULT_VERSION }; assert_eq!(res.endpoint.unwrap(), expected); } @@ -301,7 +301,7 @@ async fn leader_upload(version: DapVersion) { ); let builder = match t.version { DapVersion::Draft02 => client.post(url.as_str()), - DapVersion::Draft07 => client.put(url.as_str()), + DapVersion::Latest => client.put(url.as_str()), }; let resp = builder .body( @@ -312,7 +312,7 @@ async fn leader_upload(version: DapVersion) { time: t.now, draft02_extensions: match version { DapVersion::Draft02 => Some(Vec::default()), - DapVersion::Draft07 => None, + DapVersion::Latest => None, }, }, public_share: b"public share".to_vec(), @@ -665,7 +665,7 @@ async fn leader_collect_ok(version: DapVersion) { if version != DapVersion::Draft02 { // Check that the time interval for the reports is correct. - let interval = collection.draft07_interval.as_ref().unwrap(); + let interval = collection.draft09_interval.as_ref().unwrap(); let low = t.task_config.quantized_time_lower_bound(time_min); let high = t.task_config.quantized_time_upper_bound(time_max); assert!(low < high); @@ -1079,9 +1079,8 @@ async_test_versions! { leader_collect_abort_overlapping_batch_interval } async fn fixed_size(version: DapVersion, use_current: bool) { if version == DapVersion::Draft02 && use_current { - // draft02 compatibility: The "current batch" isn't a feature in draft02, but we allow it - // and immediately return for testing flexibility, as this allows us to not have a test - // coverage regression if we add a draft07 in the future. + // draft02 compatibility: The "current batch" isn't a feature in draft02, but is in the + // latest version. return; } let t = TestRunner::fixed_size(version).await; @@ -1313,7 +1312,7 @@ async fn leader_collect_taskprov_ok(version: DapVersion) { for _ in 0..t.task_config.min_batch_size { let extensions = vec![Extension::Taskprov { draft02_payload: match version { - DapVersion::Draft07 => None, + DapVersion::Latest => None, DapVersion::Draft02 => Some(taskprov_report_extension_payload.clone()), }, }]; diff --git a/daphne_worker_test/tests/e2e/test_runner.rs b/daphne_worker_test/tests/e2e/test_runner.rs index c74d80984..4bb8d84e0 100644 --- a/daphne_worker_test/tests/e2e/test_runner.rs +++ b/daphne_worker_test/tests/e2e/test_runner.rs @@ -78,7 +78,7 @@ impl TestRunner { // aggregator URL with 127.0.0.1. let version_path = match version { DapVersion::Draft02 => "v02", - DapVersion::Draft07 => "v07", + DapVersion::Latest => "v09", }; let mut leader_url = Url::parse(&format!("http://leader:8787/{}/", version_path)).unwrap(); let mut helper_url = Url::parse(&format!("http://helper:8788/{}/", version_path)).unwrap(); @@ -603,7 +603,7 @@ impl TestRunner { client: &reqwest::Client, report_sel: &DaphneWorkerReportSelector, ) -> DapLeaderProcessTelemetry { - // Replace path "/v07" with "/internal/process". + // Replace path "/v09" with "/internal/process". let mut url = self.leader_url.clone(); url.set_path("internal/process"); @@ -634,7 +634,7 @@ impl TestRunner { } else { self.helper_url.clone() }; - url.set_path(path); // Overwrites the version path (i.e., "/v07") + url.set_path(path); // Overwrites the version path (i.e., "/v09") let resp = client .post(url.clone()) .json(data) @@ -693,14 +693,14 @@ impl TestRunner { pub fn upload_path_for_task(&self, id: &TaskId) -> String { match self.version { DapVersion::Draft02 => "upload".to_string(), - DapVersion::Draft07 => format!("tasks/{}/reports", id.to_base64url()), + DapVersion::Latest => format!("tasks/{}/reports", id.to_base64url()), } } pub fn collect_path_for_task(&self, task_id: &TaskId) -> String { match self.version { DapVersion::Draft02 => "collect".to_string(), - DapVersion::Draft07 => { + DapVersion::Latest => { let collection_job_id = CollectionJobId(thread_rng().gen()); format!( "tasks/{}/collection_jobs/{}", @@ -791,7 +791,7 @@ async fn post_internal_delete_all( base_url: &Url, batch_interval: &Interval, ) { - // Replace path "/v07" with "/internal/delete_all". + // Replace path "/v09" with "/internal/delete_all". let mut url = base_url.clone(); url.set_path("internal/delete_all"); diff --git a/daphne_worker_test/wrangler.toml b/daphne_worker_test/wrangler.toml index fb687926c..bb4b17dfc 100644 --- a/daphne_worker_test/wrangler.toml +++ b/daphne_worker_test/wrangler.toml @@ -54,7 +54,7 @@ DAP_TASKPROV_LEADER_AUTH = """{ DAP_TASKPROV_COLLECTOR_AUTH = """{ "bearer_token": "I am the collector!" }""" # SECRET -DAP_DEFAULT_VERSION = "v07" +DAP_DEFAULT_VERSION = "v09" DAP_TRACING = "debug" [env.leader.durable_objects] @@ -131,7 +131,7 @@ DAP_TASKPROV_VDAF_VERIFY_KEY_INIT = "b029a72fa327931a5cb643dcadcaafa098fcbfac07d DAP_TASKPROV_LEADER_AUTH = """{ "bearer_token": "I am the leader!" }""" # SECRET -DAP_DEFAULT_VERSION = "v07" +DAP_DEFAULT_VERSION = "v09" DAP_TRACING = "debug" [env.helper.durable_objects] diff --git a/docker/wrangler.toml b/docker/wrangler.toml index 728f4bf06..6f3c9e4e9 100644 --- a/docker/wrangler.toml +++ b/docker/wrangler.toml @@ -54,7 +54,7 @@ DAP_TASKPROV_LEADER_AUTH = """{ DAP_TASKPROV_COLLECTOR_AUTH = """{ "bearer_token": "I am the collector!" }""" # SECRET -DAP_DEFAULT_VERSION = "v07" +DAP_DEFAULT_VERSION = "v09" DAP_TRACING = "debug" [env.leader.durable_objects] @@ -131,7 +131,7 @@ DAP_TASKPROV_VDAF_VERIFY_KEY_INIT = "b029a72fa327931a5cb643dcadcaafa098fcbfac07d DAP_TASKPROV_LEADER_AUTH = """{ "bearer_token": "I am the leader!" }""" # SECRET -DAP_DEFAULT_VERSION = "v07" +DAP_DEFAULT_VERSION = "v09" DAP_TRACING = "debug" [env.helper.durable_objects]