Skip to content

Commit

Permalink
feat(dashmate): configure proposer and tx limits (#2057)
Browse files Browse the repository at this point in the history
  • Loading branch information
shumkov authored Aug 21, 2024
1 parent 5f4055f commit 4f32c35
Show file tree
Hide file tree
Showing 15 changed files with 153 additions and 49 deletions.
5 changes: 5 additions & 0 deletions packages/dashmate/configs/defaults/getBaseConfigFactory.js
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,9 @@ export default function getBaseConfigFactory(homeDir) {
host: '127.0.0.1',
port: 8083,
},
proposer: {
txProcessingTimeLimit: null,
},
epochTime: 788400,
},
tenderdash: {
Expand Down Expand Up @@ -341,6 +344,8 @@ export default function getBaseConfigFactory(homeDir) {
txSendRateLimit: 10,
txRecvRateLimit: 12,
maxConcurrentCheckTx: 250,
ttlDuration: '0s',
ttlNumBlocks: 0,
},
consensus: {
createEmptyBlocks: true,
Expand Down
7 changes: 7 additions & 0 deletions packages/dashmate/configs/defaults/getMainnetConfigFactory.js
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,19 @@ export default function getMainnetConfigFactory(homeDir, getBaseConfig) {
txEnqueueTimeout: '30ms',
txSendRateLimit: 100,
txRecvRateLimit: 120,
ttlDuration: '24h',
ttlNumBlocks: 0,
},
genesis: {
chain_id: 'dash-1',
validator_quorum_type: 4,
},
},
abci: {
proposer: {
txProcessingTimeLimit: 5000,
},
},
},
},
};
Expand Down
11 changes: 11 additions & 0 deletions packages/dashmate/configs/defaults/getTestnetConfigFactory.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ export default function getTestnetConfigFactory(homeDir, getBaseConfig) {
rotation: true,
},
},
proposer: {
txProcessingTimeLimit: 5000,
},
},
tenderdash: {
p2p: {
Expand All @@ -91,6 +94,14 @@ export default function getTestnetConfigFactory(homeDir, getBaseConfig) {
],
port: 36656,
},
mempool: {
timeoutCheckTx: '3s',
txEnqueueTimeout: '30ms',
txSendRateLimit: 100,
txRecvRateLimit: 120,
ttlDuration: '24h',
ttlNumBlocks: 0,
},
rpc: {
port: 36657,
timeoutBroadcastTx: '1s',
Expand Down
29 changes: 29 additions & 0 deletions packages/dashmate/configs/getConfigFileMigrationsFactory.js
Original file line number Diff line number Diff line change
Expand Up @@ -790,6 +790,35 @@ export default function getConfigFileMigrationsFactory(homeDir, defaultConfigs)
});
return configFile;
},
'1.1.0-dev.2': (configFile) => {
Object.entries(configFile.configs)
.forEach(([name, options]) => {
if (options.network === NETWORK_TESTNET) {
options.platform.drive.abci.proposer = {
txProcessingTimeLimit: 5000,
};
options.platform.drive.tenderdash.mempool.timeoutCheckTx = '3s';
options.platform.drive.tenderdash.mempool.txEnqueueTimeout = '30ms';
options.platform.drive.tenderdash.mempool.txSendRateLimit = 100;
options.platform.drive.tenderdash.mempool.txRecvRateLimit = 120;
options.platform.drive.tenderdash.mempool.ttlDuration = '24h';
options.platform.drive.tenderdash.mempool.ttlNumBlocks = 0;
} else if (options.network === NETWORK_MAINNET && name !== 'base') {
options.platform.drive.abci.proposer = {
txProcessingTimeLimit: 5000,
};
options.platform.drive.tenderdash.mempool.ttlDuration = '24h';
options.platform.drive.tenderdash.mempool.ttlNumBlocks = 0;
} else {
options.platform.drive.tenderdash.mempool.ttlDuration = '0s';
options.platform.drive.tenderdash.mempool.ttlNumBlocks = 0;
options.platform.drive.abci.proposer = {
txProcessingTimeLimit: null,
};
}
});
return configFile;
},
};
}

Expand Down
1 change: 1 addition & 0 deletions packages/dashmate/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ services:
- TOKIO_CONSOLE_RETENTION_SECS=${PLATFORM_DRIVE_ABCI_TOKIO_CONSOLE_RETENTION:?err}
- GROVEDB_VISUALIZER_ENABLED=${PLATFORM_DRIVE_ABCI_GROVEDB_VISUALIZER_ENABLED:?err}
- GROVEDB_VISUALIZER_ADDRESS=0.0.0.0:${PLATFORM_DRIVE_ABCI_GROVEDB_VISUALIZER_PORT:?err}
- PROPOSER_TX_PROCESSING_TIME_LIMIT=${PLATFORM_DRIVE_ABCI_PROPOSER_TX_PROCESSING_TIME_LIMIT}
- NETWORK=${NETWORK:?err}
stop_grace_period: 30s
expose:
Expand Down
22 changes: 20 additions & 2 deletions packages/dashmate/src/config/configJsonSchema.js
Original file line number Diff line number Diff line change
Expand Up @@ -895,9 +895,20 @@ export default {
grovedbVisualizer: {
$ref: '#/definitions/enabledHostPort',
},
proposer: {
type: 'object',
properties: {
txProcessingTimeLimit: {
type: ['null', 'integer'],
minimum: 0,
},
},
required: ['txProcessingTimeLimit'],
additionalProperties: false,
},
},
additionalProperties: false,
required: ['docker', 'logs', 'tokioConsole', 'validatorSet', 'chainLock', 'epochTime', 'metrics', 'grovedbVisualizer'],
required: ['docker', 'logs', 'tokioConsole', 'validatorSet', 'chainLock', 'epochTime', 'metrics', 'grovedbVisualizer', 'proposer'],
},
tenderdash: {
type: 'object',
Expand Down Expand Up @@ -992,9 +1003,16 @@ export default {
type: 'integer',
minimum: 0,
},
ttlDuration: {
$ref: '#/definitions/duration',
},
ttlNumBlocks: {
type: 'integer',
minimum: 0,
},
},
additionalProperties: false,
required: ['size', 'maxTxsBytes', 'cacheSize', 'timeoutCheckTx', 'txEnqueueTimeout', 'txSendRateLimit', 'txRecvRateLimit', 'maxConcurrentCheckTx'],
required: ['size', 'maxTxsBytes', 'cacheSize', 'timeoutCheckTx', 'txEnqueueTimeout', 'txSendRateLimit', 'txRecvRateLimit', 'maxConcurrentCheckTx', 'ttlDuration', 'ttlNumBlocks'],
},
consensus: {
type: 'object',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -395,15 +395,15 @@ max-batch-bytes = 0
# Note, if ttl-num-blocks is also defined, a transaction will be removed if it
# has existed in the mempool at least ttl-num-blocks number of blocks or if it's
# insertion time into the mempool is beyond ttl-duration.
ttl-duration = "0s"
ttl-duration = "{{=it.platform.drive.tenderdash.mempool.ttlDuration}}"

# ttl-num-blocks, if non-zero, defines the maximum number of blocks a transaction
# can exist for in the mempool.
#
# Note, if ttl-duration is also defined, a transaction will be removed if it
# has existed in the mempool at least ttl-num-blocks number of blocks or if
# it's insertion time into the mempool is beyond ttl-duration.
ttl-num-blocks = 0
ttl-num-blocks = {{=it.platform.drive.tenderdash.mempool.ttlNumBlocks}}

#######################################################
### State Sync Configuration Options ###
Expand Down
2 changes: 2 additions & 0 deletions packages/rs-drive-abci/.env.mainnet
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,6 @@ TOKIO_CONSOLE_RETENTION_SECS=180
GROVEDB_VISUALIZER_ENABLED=false
GROVEDB_VISUALIZER_ADDRESS=127.0.0.1:8083

PROPOSER_TX_PROCESSING_TIME_LIMIT=5000

NETWORK=mainnet
2 changes: 2 additions & 0 deletions packages/rs-drive-abci/.env.testnet
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,6 @@ TOKIO_CONSOLE_RETENTION_SECS=180
GROVEDB_VISUALIZER_ENABLED=false
GROVEDB_VISUALIZER_ADDRESS=127.0.0.1:8083

PROPOSER_TX_PROCESSING_TIME_LIMIT=5000

NETWORK=testnet
13 changes: 5 additions & 8 deletions packages/rs-drive-abci/src/abci/config.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
//! Configuration of ABCI Application server

use crate::utils::from_opt_str_or_number;
use dpp::prelude::TimestampMillis;

Check warning on line 4 in packages/rs-drive-abci/src/abci/config.rs

View workflow job for this annotation

GitHub Actions / Rust packages (drive-abci) / Linting

unused import: `dpp::prelude::TimestampMillis`

warning: unused import: `dpp::prelude::TimestampMillis` --> packages/rs-drive-abci/src/abci/config.rs:4:5 | 4 | use dpp::prelude::TimestampMillis; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `#[warn(unused_imports)]` on by default

Check warning on line 4 in packages/rs-drive-abci/src/abci/config.rs

View workflow job for this annotation

GitHub Actions / Rust packages (drive-abci) / Linting

unused import: `dpp::prelude::TimestampMillis`

warning: unused import: `dpp::prelude::TimestampMillis` --> packages/rs-drive-abci/src/abci/config.rs:4:5 | 4 | use dpp::prelude::TimestampMillis; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `#[warn(unused_imports)]` on by default

Check warning on line 4 in packages/rs-drive-abci/src/abci/config.rs

View workflow job for this annotation

GitHub Actions / Rust packages (drive-abci) / Linting

unused import: `dpp::prelude::TimestampMillis`

warning: unused import: `dpp::prelude::TimestampMillis` --> packages/rs-drive-abci/src/abci/config.rs:4:5 | 4 | use dpp::prelude::TimestampMillis; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `#[warn(unused_imports)]` on by default
use serde::{Deserialize, Serialize};

Expand Down Expand Up @@ -34,9 +35,9 @@ pub struct AbciConfig {
#[serde(default)]
pub log: crate::logging::LogConfigs,

/// Maximum time limit (in ms) to process state transitions in block proposals
#[serde(default = "AbciConfig::default_tx_processing_time_limit")]
pub tx_processing_time_limit: TimestampMillis,
/// Maximum time limit (in ms) to process state transitions to prepare proposal
#[serde(default, deserialize_with = "from_opt_str_or_number")]
pub proposer_tx_processing_time_limit: Option<u16>,
}

impl AbciConfig {
Expand All @@ -47,10 +48,6 @@ impl AbciConfig {
pub(crate) fn default_genesis_core_height() -> u32 {
1
}

pub(crate) fn default_tx_processing_time_limit() -> TimestampMillis {
8000
}
}

impl Default for AbciConfig {
Expand All @@ -61,7 +58,7 @@ impl Default for AbciConfig {
genesis_core_height: AbciConfig::default_genesis_core_height(),
chain_id: "chain_id".to_string(),
log: Default::default(),
tx_processing_time_limit: AbciConfig::default_tx_processing_time_limit(),
proposer_tx_processing_time_limit: Default::default(),
}
}
}
13 changes: 1 addition & 12 deletions packages/rs-drive-abci/src/config.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::logging::LogConfigs;
use crate::utils::from_str_or_number;
use crate::{abci::config::AbciConfig, error::Error};
use bincode::{Decode, Encode};
use dashcore_rpc::json::QuorumType;
Expand Down Expand Up @@ -113,18 +114,6 @@ pub struct ExecutionConfig {
pub epoch_time_length_s: u64,
}

fn from_str_or_number<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + std::str::FromStr,
<T as std::str::FromStr>::Err: std::fmt::Display,
{
use serde::de::Error;

let s = String::deserialize(deserializer)?;
s.parse::<T>().map_err(Error::custom)
}

/// Configuration of Dash Platform.
///
/// All fields in this struct can be configured using environment variables.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ use crate::platform_types::state_transitions_processing_result::{
NotExecutedReason, StateTransitionExecutionResult, StateTransitionsProcessingResult,
};
use dpp::fee::default_costs::CachedEpochIndexFeeVersions;
use dpp::prelude::TimestampMillis;
use dpp::util::hash::hash_single;
use dpp::validation::ConsensusValidationResult;
use dpp::version::PlatformVersion;
Expand Down Expand Up @@ -83,10 +82,16 @@ where
let mut processing_result = StateTransitionsProcessingResult::default();

for decoded_state_transition in state_transition_container.into_iter() {
// If we propose state transitions, we need to check if we have a time limit for processing
// set and if we have exceeded it.
let execution_result = if proposing_state_transitions
&& timer.map_or(false, |timer| {
timer.elapsed().as_millis() as TimestampMillis
> self.config.abci.tx_processing_time_limit
timer.elapsed().as_millis()
> self
.config
.abci
.proposer_tx_processing_time_limit
.unwrap_or(u16::MAX) as u128
}) {
StateTransitionExecutionResult::NotExecuted(NotExecutedReason::ProposerRanOutOfTime)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,23 +233,22 @@ impl DocumentsBatchTransitionInternalTransformerV0 for DocumentsBatchTransition
};

let validation_result = document_transitions
.iter()
.map(|(document_type_name, document_transitions)| {
Self::transform_document_transitions_within_document_type_v0(
platform,
block_info,
validate_against_state,
data_contract_fetch_info.clone(),
document_type_name,
owner_id,
document_transitions,
execution_context,
transaction,
platform_version,
)
})
.collect::<Result<Vec<ConsensusValidationResult<Vec<DocumentTransitionAction>>>, Error>>(
)?;
.iter()
.map(|(document_type_name, document_transitions)| {
Self::transform_document_transitions_within_document_type_v0(
platform,
block_info,
validate_against_state,
data_contract_fetch_info.clone(),
document_type_name,
owner_id,
document_transitions,
execution_context,
transaction,
platform_version,
)
})
.collect::<Result<Vec<ConsensusValidationResult<Vec<DocumentTransitionAction>>>, Error>>()?;
Ok(ConsensusValidationResult::flatten(validation_result))
}

Expand Down Expand Up @@ -327,7 +326,7 @@ impl DocumentsBatchTransitionInternalTransformerV0 for DocumentsBatchTransition
.map(|transition| {
// we validate every transition in this document type
Self::transform_transition_v0(
&platform.drive,
platform.drive,
transaction,
validate_against_state,
block_info,
Expand Down Expand Up @@ -376,7 +375,7 @@ impl DocumentsBatchTransitionInternalTransformerV0 for DocumentsBatchTransition
drive, transaction,
document_create_transition, block_info, |_identifier| {
Ok(data_contract_fetch_info.clone())
}, platform_version)?;
}, platform_version)?;

execution_context
.add_operation(ValidationOperation::PrecalculatedOperation(fee_result));
Expand Down Expand Up @@ -475,9 +474,9 @@ impl DocumentsBatchTransitionInternalTransformerV0 for DocumentsBatchTransition
}
}
DocumentTransition::Delete(document_delete_transition) => {
let action = DocumentDeleteTransitionAction::from_document_borrowed_create_transition_with_contract_lookup(document_delete_transition, |_identifier| {
Ok(data_contract_fetch_info.clone())
})?;
let action = DocumentDeleteTransitionAction::from_document_borrowed_create_transition_with_contract_lookup(document_delete_transition, |_identifier| {
Ok(data_contract_fetch_info.clone())
})?;
Ok(DocumentTransitionAction::DeleteAction(action).into())
}
DocumentTransition::Transfer(document_transfer_transition) => {
Expand Down
3 changes: 3 additions & 0 deletions packages/rs-drive-abci/src/utils/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
mod serialization;
mod spawn;

pub use serialization::from_opt_str_or_number;
pub use serialization::from_str_or_number;
pub use spawn::spawn_blocking_task_with_name_if_supported;
36 changes: 36 additions & 0 deletions packages/rs-drive-abci/src/utils/serialization.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
use serde::Deserialize;

/// Deserialize a value from a string or a number.
pub fn from_str_or_number<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + std::str::FromStr,
<T as std::str::FromStr>::Err: std::fmt::Display,
{
use serde::de::Error;

let s = String::deserialize(deserializer)?;
s.parse::<T>().map_err(Error::custom)
}

/// Deserialize a value from an optional string or a number
pub fn from_opt_str_or_number<'de, D, T>(deserializer: D) -> Result<Option<T>, D::Error>
where
D: serde::Deserializer<'de>,
T: serde::Deserialize<'de> + std::str::FromStr,
<T as std::str::FromStr>::Err: std::fmt::Display,
{
use serde::de::Error;

let s = Option::<String>::deserialize(deserializer)?;
match s {
Some(s) => {
if s.is_empty() {
Ok(None)
} else {
s.parse::<T>().map(Some).map_err(Error::custom)
}
}
None => Ok(None),
}
}

0 comments on commit 4f32c35

Please sign in to comment.