From 24177fac262fd403cc198f107ac21ffb67b5d85e Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Mon, 4 Nov 2024 18:12:14 +0700 Subject: [PATCH 01/14] feat: hardcoded identity transfers in strategy tests --- .../tests/strategy_tests/main.rs | 7 +- .../tests/strategy_tests/strategy.rs | 14 ++- .../tests/strategy_tests/voting_tests.rs | 90 +++++++++++-------- packages/strategy-tests/src/lib.rs | 71 ++++++++++----- packages/strategy-tests/src/operations.rs | 19 +++- packages/strategy-tests/src/transitions.rs | 2 +- 6 files changed, 137 insertions(+), 66 deletions(-) diff --git a/packages/rs-drive-abci/tests/strategy_tests/main.rs b/packages/rs-drive-abci/tests/strategy_tests/main.rs index 2312241cc6..f2122d627e 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/main.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/main.rs @@ -2602,7 +2602,10 @@ mod tests { &simple_signer, &mut rng, platform_version, - ); + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let strategy = NetworkStrategy { strategy: Strategy { @@ -3910,7 +3913,7 @@ mod tests { strategy: Strategy { start_contracts: vec![], operations: vec![Operation { - op_type: OperationType::IdentityTransfer, + op_type: OperationType::IdentityTransfer(None), frequency: Frequency { times_per_block_range: 1..3, chance_per_block: None, diff --git a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs index 667b846868..4d1a7ccb62 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs @@ -41,7 +41,7 @@ use drive_abci::rpc::core::MockCoreRPCLike; use rand::prelude::{IteratorRandom, SliceRandom, StdRng}; use rand::Rng; use strategy_tests::Strategy; -use strategy_tests::transitions::{create_state_transitions_for_identities, create_state_transitions_for_identities_and_proofs, instant_asset_lock_proof_fixture, instant_asset_lock_proof_fixture_with_dynamic_range}; +use strategy_tests::transitions::{create_state_transitions_for_identities, create_state_transitions_for_identities_and_proofs, instant_asset_lock_proof_fixture_with_dynamic_range}; use std::borrow::Cow; use std::collections::{BTreeMap, HashMap, HashSet}; use std::ops::RangeInclusive; @@ -405,7 +405,15 @@ impl NetworkStrategy { state_transitions.append(&mut new_transitions); } if !self.strategy.start_identities.hard_coded.is_empty() { - state_transitions.extend(self.strategy.start_identities.hard_coded.clone()); + state_transitions.extend( + self.strategy.start_identities.hard_coded.iter().filter_map( + |(identity, transition)| { + transition.as_ref().map(|create_transition| { + (identity.clone(), create_transition.clone()) + }) + }, + ), + ); } } let frequency = &self.strategy.identity_inserts.frequency; @@ -1196,7 +1204,7 @@ impl NetworkStrategy { operations.push(state_transition); } } - OperationType::IdentityTransfer if current_identities.len() > 1 => { + OperationType::IdentityTransfer(_) if current_identities.len() > 1 => { let identities_clone = current_identities.clone(); // Sender is the first in the list, which should be loaded_identity diff --git a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs index e14f8d7b1b..2264a4cd5f 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs @@ -79,13 +79,17 @@ mod tests { simple_signer.add_keys(keys1); - let start_identities = create_state_transitions_for_identities( - vec![identity1], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -363,13 +367,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -635,13 +643,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -988,13 +1000,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive @@ -1353,13 +1369,17 @@ mod tests { simple_signer.add_keys(keys2); - let start_identities = create_state_transitions_for_identities( - vec![identity1, identity2], - &(dash_to_duffs!(1)..=dash_to_duffs!(1)), - &simple_signer, - &mut rng, - platform_version, - ); + let start_identities: Vec<(Identity, Option)> = + create_state_transitions_for_identities( + vec![identity1, identity2], + &(dash_to_duffs!(1)..=dash_to_duffs!(1)), + &simple_signer, + &mut rng, + platform_version, + ) + .iter() + .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .collect(); let dpns_contract = platform .drive diff --git a/packages/strategy-tests/src/lib.rs b/packages/strategy-tests/src/lib.rs index 61395d99f2..65e8a51f85 100644 --- a/packages/strategy-tests/src/lib.rs +++ b/packages/strategy-tests/src/lib.rs @@ -44,6 +44,7 @@ use platform_version::TryFromPlatformVersioned; use rand::prelude::StdRng; use rand::seq::{IteratorRandom, SliceRandom}; use rand::Rng; +use transitions::create_identity_credit_transfer_transition; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::ops::RangeInclusive; use bincode::{Decode, Encode}; @@ -146,7 +147,7 @@ pub struct StartIdentities { pub keys_per_identity: u8, pub starting_balances: u64, // starting balance in duffs pub extra_keys: KeyMaps, - pub hard_coded: Vec<(Identity, StateTransition)>, + pub hard_coded: Vec<(Identity, Option)>, } /// Identities to register on the first block of the strategy @@ -1287,38 +1288,66 @@ impl Strategy { } // Generate state transition for identity transfer operation - OperationType::IdentityTransfer if current_identities.len() > 1 => { + OperationType::IdentityTransfer(identity_transfer_info) => { for _ in 0..count { - let identities_count = current_identities.len(); - if identities_count == 0 { - break; - } + if let Some(transfer_info) = identity_transfer_info { + let sender = self + .start_identities + .hard_coded + .iter() + .find(|(identity, _)| identity.id() == transfer_info.from) + .expect( + "Expected to find sender identity in hardcoded start identities", + ); + let recipient = self + .start_identities + .hard_coded + .iter() + .find(|(identity, _)| identity.id() == transfer_info.to) + .expect( + "Expected to find recipient identity in hardcoded start identities", + ); + + let state_transition = create_identity_credit_transfer_transition( + &sender.0, + &recipient.0, + identity_nonce_counter, + signer, // Does this mean the loaded identity must be the sender since we're signing with it? + transfer_info.amount, + ); + operations.push(state_transition); + } else if current_identities.len() > 1 { + let identities_count = current_identities.len(); + if identities_count == 0 { + break; + } - // Select a random identity from the current_identities for the sender - let random_index_sender = rng.gen_range(0..identities_count); + // Select a random identity from the current_identities for the sender + let random_index_sender = rng.gen_range(0..identities_count); - // Clone current_identities to a Vec for manipulation - let mut unused_identities: Vec<_> = - current_identities.iter().cloned().collect(); - unused_identities.remove(random_index_sender); // Remove the sender - let unused_identities_count = unused_identities.len(); + // Clone current_identities to a Vec for manipulation + let mut unused_identities: Vec<_> = + current_identities.iter().cloned().collect(); + unused_identities.remove(random_index_sender); // Remove the sender + let unused_identities_count = unused_identities.len(); - // Select a random identity from the remaining ones for the recipient - let random_index_recipient = rng.gen_range(0..unused_identities_count); - let recipient = &unused_identities[random_index_recipient]; + // Select a random identity from the remaining ones for the recipient + let random_index_recipient = + rng.gen_range(0..unused_identities_count); + let recipient = &unused_identities[random_index_recipient]; - // Use the sender index on the original slice - let sender = &mut current_identities[random_index_sender]; + // Use the sender index on the original slice + let sender = &mut current_identities[random_index_sender]; - let state_transition = - crate::transitions::create_identity_credit_transfer_transition( + let state_transition = create_identity_credit_transfer_transition( sender, recipient, identity_nonce_counter, signer, 300000, ); - operations.push(state_transition); + operations.push(state_transition); + } } } diff --git a/packages/strategy-tests/src/operations.rs b/packages/strategy-tests/src/operations.rs index 675e996843..d35fc9f503 100644 --- a/packages/strategy-tests/src/operations.rs +++ b/packages/strategy-tests/src/operations.rs @@ -497,6 +497,13 @@ impl VoteAction { pub type AmountRange = RangeInclusive; +#[derive(Clone, Debug, PartialEq, Encode, Decode)] +pub struct IdentityTransferInfo { + pub from: Identifier, + pub to: Identifier, + pub amount: Credits, +} + #[derive(Clone, Debug, PartialEq)] pub enum OperationType { Document(DocumentOp), @@ -505,7 +512,7 @@ pub enum OperationType { IdentityWithdrawal(AmountRange), ContractCreate(RandomDocumentTypeParameters, DocumentTypeCount), ContractUpdate(DataContractUpdateOp), - IdentityTransfer, + IdentityTransfer(Option), ResourceVote(ResourceVoteOp), } @@ -517,7 +524,7 @@ enum OperationTypeInSerializationFormat { IdentityWithdrawal(AmountRange), ContractCreate(RandomDocumentTypeParameters, DocumentTypeCount), ContractUpdate(Vec), - IdentityTransfer, + IdentityTransfer(Option), ResourceVote(ResourceVoteOpSerializable), } @@ -563,7 +570,9 @@ impl PlatformSerializableWithPlatformVersion for OperationType { contract_op_in_serialization_format, ) } - OperationType::IdentityTransfer => OperationTypeInSerializationFormat::IdentityTransfer, + OperationType::IdentityTransfer(identity_transfer_info) => { + OperationTypeInSerializationFormat::IdentityTransfer(identity_transfer_info) + } OperationType::ResourceVote(resource_vote_op) => { let vote_op_in_serialization_format = resource_vote_op.try_into_platform_versioned(platform_version)?; @@ -626,7 +635,9 @@ impl PlatformDeserializableWithPotentialValidationFromVersionedStructure for Ope )?; OperationType::ContractUpdate(update_op) } - OperationTypeInSerializationFormat::IdentityTransfer => OperationType::IdentityTransfer, + OperationTypeInSerializationFormat::IdentityTransfer(identity_transfer_info) => { + OperationType::IdentityTransfer(identity_transfer_info) + } OperationTypeInSerializationFormat::ResourceVote(resource_vote_op) => { let vote_op = resource_vote_op.try_into_platform_versioned(platform_version)?; OperationType::ResourceVote(vote_op) diff --git a/packages/strategy-tests/src/transitions.rs b/packages/strategy-tests/src/transitions.rs index 85d03eb333..c77b51e290 100644 --- a/packages/strategy-tests/src/transitions.rs +++ b/packages/strategy-tests/src/transitions.rs @@ -802,7 +802,7 @@ pub fn create_identity_withdrawal_transition_with_output_address( /// - If the sender's identity does not have a suitable authentication key available for signing. /// - If there's an error during the signing process. pub fn create_identity_credit_transfer_transition( - identity: &mut Identity, + identity: &Identity, recipient: &Identity, identity_nonce_counter: &mut BTreeMap, signer: &mut SimpleSigner, From dacc6dbd04b6c4f9593c7c8ab128ac1f5a094e4f Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Mon, 4 Nov 2024 18:30:19 +0700 Subject: [PATCH 02/14] fix(drive): uncommitted state if db transaction fails (#2305) --- .../src/abci/handler/finalize_block.rs | 26 +++++++++- .../rs-drive-abci/src/abci/handler/info.rs | 52 ++++++++++++++++--- .../src/abci/handler/prepare_proposal.rs | 43 +++++++++++++++ .../src/abci/handler/process_proposal.rs | 43 +++++++++++++++ 4 files changed, 157 insertions(+), 7 deletions(-) diff --git a/packages/rs-drive-abci/src/abci/handler/finalize_block.rs b/packages/rs-drive-abci/src/abci/handler/finalize_block.rs index 9653391c7d..852f85cc6b 100644 --- a/packages/rs-drive-abci/src/abci/handler/finalize_block.rs +++ b/packages/rs-drive-abci/src/abci/handler/finalize_block.rs @@ -5,6 +5,7 @@ use crate::execution::types::block_execution_context::v0::BlockExecutionContextV use crate::platform_types::cleaned_abci_messages::finalized_block_cleaned_request::v0::FinalizeBlockCleanedRequest; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::rpc::core::CoreRPCLike; +use dpp::dashcore::Network; use std::sync::atomic::Ordering; use tenderdash_abci::proto::abci as proto; @@ -66,7 +67,30 @@ where )); } - app.commit_transaction(platform_version)?; + let result = app.commit_transaction(platform_version); + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + if app.platform().config.network == Network::Dash + && config.abci.chain_id == "evo1" + && block_height < 33000 + { + // Old behavior on mainnet below block 33000 + result?; + } else { + // In case if transaction commit failed we still have caches in memory that + // corresponds to the data that we weren't able to commit. + // The simplified solution is to restart the Drive, so all caches + // will be restored from the disk and try to process this block again. + // TODO: We need a better handling of the transaction is busy error with retry logic. + result.expect("commit transaction"); + } app.platform() .committed_block_height_guard diff --git a/packages/rs-drive-abci/src/abci/handler/info.rs b/packages/rs-drive-abci/src/abci/handler/info.rs index dbb8501891..9ac9d31626 100644 --- a/packages/rs-drive-abci/src/abci/handler/info.rs +++ b/packages/rs-drive-abci/src/abci/handler/info.rs @@ -3,6 +3,7 @@ use crate::abci::AbciError; use crate::error::Error; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::rpc::core::CoreRPCLike; +use dpp::dashcore::Network; use dpp::version::DESIRED_PLATFORM_VERSION; use tenderdash_abci::proto::abci as proto; @@ -21,19 +22,58 @@ where let platform_state = app.platform().state.load(); - let state_app_hash = platform_state + let last_block_height = platform_state.last_committed_block_height() as i64; + + // Verify that Platform State corresponds to Drive commited state + let platform_state_app_hash = platform_state .last_committed_block_app_hash() - .map(|app_hash| app_hash.to_vec()) .unwrap_or_default(); + let grove_version = &platform_state + .current_platform_version()? + .drive + .grove_version; + + let drive_storage_root_hash = app + .platform() + .drive + .grove + .root_hash(None, grove_version) + .unwrap()?; + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + #[allow(clippy::collapsible_if)] + if !(config.network == Network::Dash + && config.abci.chain_id == "evo1" + && last_block_height < 33000) + { + // App hash in memory must be equal to app hash on disk + if drive_storage_root_hash != platform_state_app_hash { + // We panic because we can't recover from this situation. + // Better to restart the Drive, so we might self-heal the node + // reloading state form the disk + panic!( + "drive and platform state app hash mismatch: drive_storage_root_hash: {:?}, platform_state_app_hash: {:?}", + drive_storage_root_hash, platform_state_app_hash + ); + } + } + let desired_protocol_version = DESIRED_PLATFORM_VERSION.protocol_version; let response = proto::ResponseInfo { data: "".to_string(), app_version: desired_protocol_version as u64, - last_block_height: platform_state.last_committed_block_height() as i64, + last_block_height, version: env!("CARGO_PKG_VERSION").to_string(), - last_block_app_hash: state_app_hash.clone(), + last_block_app_hash: platform_state_app_hash.to_vec(), }; tracing::debug!( @@ -41,8 +81,8 @@ where software_version = env!("CARGO_PKG_VERSION"), block_version = request.block_version, p2p_version = request.p2p_version, - app_hash = hex::encode(state_app_hash), - height = platform_state.last_committed_block_height(), + app_hash = hex::encode(platform_state_app_hash), + last_block_height, "Handshake with consensus engine", ); diff --git a/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs b/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs index 18252d0d45..61f58a0196 100644 --- a/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs +++ b/packages/rs-drive-abci/src/abci/handler/prepare_proposal.rs @@ -11,6 +11,7 @@ use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; use crate::rpc::core::CoreRPCLike; use dpp::dashcore::hashes::Hash; +use dpp::dashcore::Network; use dpp::version::TryIntoPlatformVersioned; use drive::grovedb_storage::Error::RocksDBError; use tenderdash_abci::proto::abci as proto; @@ -35,6 +36,48 @@ where let platform_state = app.platform().state.load(); + // Verify that Platform State corresponds to Drive commited state + let platform_state_app_hash = platform_state + .last_committed_block_app_hash() + .unwrap_or_default(); + + let grove_version = &platform_state + .current_platform_version()? + .drive + .grove_version; + + let drive_storage_root_hash = app + .platform() + .drive + .grove + .root_hash(None, grove_version) + .unwrap()?; + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + #[allow(clippy::collapsible_if)] + if !(config.network == Network::Dash + && config.abci.chain_id == "evo1" + && request.height < 33000) + { + // App hash in memory must be equal to app hash on disk + if drive_storage_root_hash != platform_state_app_hash { + // We panic because we can't recover from this situation. + // Better to restart the Drive, so we might self-heal the node + // reloading state form the disk + panic!( + "drive and platform state app hash mismatch: drive_storage_root_hash: {:?}, platform_state_app_hash: {:?}", + drive_storage_root_hash, platform_state_app_hash + ); + } + } + let last_committed_core_height = platform_state.last_committed_core_height(); let starting_platform_version = platform_state.current_platform_version()?; diff --git a/packages/rs-drive-abci/src/abci/handler/process_proposal.rs b/packages/rs-drive-abci/src/abci/handler/process_proposal.rs index 5bf547e14a..d40567d3db 100644 --- a/packages/rs-drive-abci/src/abci/handler/process_proposal.rs +++ b/packages/rs-drive-abci/src/abci/handler/process_proposal.rs @@ -12,6 +12,7 @@ use crate::platform_types::block_execution_outcome; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::platform_types::state_transitions_processing_result::StateTransitionExecutionResult; use crate::rpc::core::CoreRPCLike; +use dpp::dashcore::Network; use dpp::version::TryIntoPlatformVersioned; use drive::grovedb_storage::Error::RocksDBError; use tenderdash_abci::proto::abci as proto; @@ -179,6 +180,48 @@ where let platform_state = app.platform().state.load(); + // Verify that Platform State corresponds to Drive commited state + let platform_state_app_hash = platform_state + .last_committed_block_app_hash() + .unwrap_or_default(); + + let grove_version = &platform_state + .current_platform_version()? + .drive + .grove_version; + + let drive_storage_root_hash = app + .platform() + .drive + .grove + .root_hash(None, grove_version) + .unwrap()?; + + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state and updating the cache. + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable these fixes at the block when we consider the state is consistent. + let config = &app.platform().config; + + #[allow(clippy::collapsible_if)] + if !(app.platform().config.network == Network::Dash + && config.abci.chain_id == "evo1" + && request.height < 33000) + { + // App hash in memory must be equal to app hash on disk + if drive_storage_root_hash != platform_state_app_hash { + // We panic because we can't recover from this situation. + // Better to restart the Drive, so we might self-heal the node + // reloading state form the disk + panic!( + "drive and platform state app hash mismatch: drive_storage_root_hash: {:?}, platform_state_app_hash: {:?}", + drive_storage_root_hash, platform_state_app_hash + ); + } + } + let starting_platform_version = platform_state.current_platform_version()?; // Running the proposal executes all the state transitions for the block From 306b86cb35fce29bec2c94a660c3923bf13bedd6 Mon Sep 17 00:00:00 2001 From: QuantumExplorer Date: Mon, 4 Nov 2024 12:31:37 +0100 Subject: [PATCH 03/14] fix(drive): apply batch is not using transaction in `remove_all_votes_given_by_identities` (#2309) Co-authored-by: Ivan Shumkov --- .../mod.rs | 3 +++ .../v0/mod.rs | 5 ++++ .../voting/run_dao_platform_events/v0/mod.rs | 1 + .../state_transitions/masternode_vote/mod.rs | 1 + .../mod.rs | 8 +++++++ .../v0/mod.rs | 24 +++++++++++++++++-- 6 files changed, 40 insertions(+), 2 deletions(-) diff --git a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs index 8328eb0fc1..ffd97c4292 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/mod.rs @@ -3,6 +3,7 @@ use crate::error::Error; use crate::platform_types::platform::Platform; use crate::platform_types::platform_state::PlatformState; use crate::rpc::core::CoreRPCLike; +use dpp::block::block_info::BlockInfo; use dpp::version::PlatformVersion; use drive::grovedb::TransactionArg; @@ -14,6 +15,7 @@ where /// Removes the votes for removed masternodes pub(in crate::execution) fn remove_votes_for_removed_masternodes( &self, + block_info: &BlockInfo, last_committed_platform_state: &PlatformState, block_platform_state: &PlatformState, transaction: TransactionArg, @@ -26,6 +28,7 @@ where .remove_votes_for_removed_masternodes { 0 => self.remove_votes_for_removed_masternodes_v0( + block_info, last_committed_platform_state, block_platform_state, transaction, diff --git a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs index b0081570c7..04931a3928 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/voting/remove_votes_for_removed_masternodes/v0/mod.rs @@ -3,6 +3,7 @@ use crate::platform_types::platform::Platform; use crate::platform_types::platform_state::v0::PlatformStateV0Methods; use crate::platform_types::platform_state::PlatformState; use crate::rpc::core::CoreRPCLike; +use dpp::block::block_info::BlockInfo; use dpp::dashcore::hashes::Hash; use dpp::version::PlatformVersion; use drive::grovedb::TransactionArg; @@ -14,6 +15,7 @@ where /// Removes the votes for removed masternodes pub(super) fn remove_votes_for_removed_masternodes_v0( &self, + block_info: &BlockInfo, last_committed_platform_state: &PlatformState, block_platform_state: &PlatformState, transaction: TransactionArg, @@ -29,6 +31,9 @@ where .iter() .map(|pro_tx_hash| pro_tx_hash.as_byte_array().to_vec()) .collect(), + block_info.height, + self.config.network, + self.config.abci.chain_id.as_str(), transaction, platform_version, )?; diff --git a/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs b/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs index 2ea9357af1..57fbe635b2 100644 --- a/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs +++ b/packages/rs-drive-abci/src/execution/platform_events/voting/run_dao_platform_events/v0/mod.rs @@ -21,6 +21,7 @@ where // Remove any votes that self.remove_votes_for_removed_masternodes( + block_info, last_committed_platform_state, block_platform_state, transaction, diff --git a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs index fe7d8095b8..9d0394c442 100644 --- a/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs +++ b/packages/rs-drive-abci/src/execution/validation/state_transition/state_transitions/masternode_vote/mod.rs @@ -11287,6 +11287,7 @@ mod tests { platform .remove_votes_for_removed_masternodes( + &BlockInfo::default(), &platform_state_before_masternode_identity_removals, &block_platform_state, Some(&transaction), diff --git a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs index 0f5e0d9604..f93d92a424 100644 --- a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs +++ b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/mod.rs @@ -5,6 +5,8 @@ use crate::drive::Drive; use crate::error::drive::DriveError; use crate::error::Error; +use dpp::dashcore::Network; +use dpp::prelude::BlockHeight; use dpp::version::PlatformVersion; use grovedb::TransactionArg; @@ -14,6 +16,9 @@ impl Drive { pub fn remove_all_votes_given_by_identities( &self, identity_ids_as_byte_arrays: Vec>, + block_height: BlockHeight, + network: Network, + chain_id: &str, transaction: TransactionArg, platform_version: &PlatformVersion, ) -> Result<(), Error> { @@ -26,6 +31,9 @@ impl Drive { { 0 => self.remove_all_votes_given_by_identities_v0( identity_ids_as_byte_arrays, + block_height, + network, + chain_id, transaction, platform_version, ), diff --git a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs index 3c36b0ec64..81b3d0fab7 100644 --- a/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs +++ b/packages/rs-drive/src/drive/votes/cleanup/remove_all_votes_given_by_identities/v0/mod.rs @@ -11,7 +11,8 @@ use crate::drive::votes::paths::{ use crate::drive::votes::storage_form::contested_document_resource_reference_storage_form::ContestedDocumentResourceVoteReferenceStorageForm; use crate::query::QueryItem; use crate::util::grove_operations::BatchDeleteApplyType; -use dpp::prelude::Identifier; +use dpp::dashcore::Network; +use dpp::prelude::{BlockHeight, Identifier}; use dpp::version::PlatformVersion; use grovedb::query_result_type::QueryResultType::QueryPathKeyElementTrioResultType; use grovedb::{PathQuery, Query, SizedQuery, TransactionArg}; @@ -22,6 +23,9 @@ impl Drive { pub(super) fn remove_all_votes_given_by_identities_v0( &self, identity_ids_as_byte_arrays: Vec>, + block_height: BlockHeight, + network: Network, + chain_id: &str, transaction: TransactionArg, platform_version: &PlatformVersion, ) -> Result<(), Error> { @@ -112,9 +116,25 @@ impl Drive { } if !deletion_batch.is_empty() { + // We had a sequence of errors on the mainnet started since block 32326. + // We got RocksDB's "transaction is busy" error because of a bug (https://github.com/dashpay/platform/pull/2309). + // Due to another bug in Tenderdash (https://github.com/dashpay/tenderdash/pull/966), + // validators just proceeded to the next block partially committing the state + // and updating the cache (https://github.com/dashpay/platform/pull/2305). + // Full nodes are stuck and proceeded after re-sync. + // For the mainnet chain, we enable this fix at the block when we consider the state is consistent. + let transaction = + if network == Network::Dash && chain_id == "evo1" && block_height < 33000 { + // Old behaviour on mainnet + None + } else { + // We should use transaction + transaction + }; + self.apply_batch_low_level_drive_operations( None, - None, + transaction, deletion_batch, &mut vec![], &platform_version.drive, From 99fe5fa402ddcae329795bb29eaf73fd5044b4f2 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Mon, 4 Nov 2024 23:16:36 +0700 Subject: [PATCH 04/14] add comment --- packages/strategy-tests/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/strategy-tests/src/lib.rs b/packages/strategy-tests/src/lib.rs index 65e8a51f85..c2d81ea990 100644 --- a/packages/strategy-tests/src/lib.rs +++ b/packages/strategy-tests/src/lib.rs @@ -1312,7 +1312,7 @@ impl Strategy { &sender.0, &recipient.0, identity_nonce_counter, - signer, // Does this mean the loaded identity must be the sender since we're signing with it? + signer, // This means the TUI loaded identity must always be the sender since we're always signing with it for now transfer_info.amount, ); operations.push(state_transition); From 0d3e091a5591fde03dbfd8e501864b5ffa2e3602 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 13:07:49 +0700 Subject: [PATCH 05/14] comment --- packages/rs-drive-abci/tests/strategy_tests/strategy.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs index 4d1a7ccb62..bf3235ea78 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/strategy.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/strategy.rs @@ -404,6 +404,8 @@ impl NetworkStrategy { ); state_transitions.append(&mut new_transitions); } + // Extend the state transitions with the strategy's hard coded start identities + // Filtering out the ones that have no create transition if !self.strategy.start_identities.hard_coded.is_empty() { state_transitions.extend( self.strategy.start_identities.hard_coded.iter().filter_map( From e4215145ad7889300810472c5a91f15ae987c1eb Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 13:16:38 +0700 Subject: [PATCH 06/14] use into_iter instead of iter --- .../tests/strategy_tests/main.rs | 4 ++-- .../tests/strategy_tests/voting_tests.rs | 20 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/rs-drive-abci/tests/strategy_tests/main.rs b/packages/rs-drive-abci/tests/strategy_tests/main.rs index f2122d627e..03bb92bc1a 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/main.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/main.rs @@ -2603,8 +2603,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let strategy = NetworkStrategy { diff --git a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs index 2264a4cd5f..83834520c0 100644 --- a/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs +++ b/packages/rs-drive-abci/tests/strategy_tests/voting_tests.rs @@ -87,8 +87,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -375,8 +375,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -651,8 +651,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -1008,8 +1008,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform @@ -1377,8 +1377,8 @@ mod tests { &mut rng, platform_version, ) - .iter() - .map(|(identity, transition)| (identity.clone(), Some(transition.clone()))) + .into_iter() + .map(|(identity, transition)| (identity, Some(transition))) .collect(); let dpns_contract = platform From 3d941ec5ab7f947e53d41c68eb5758ff2e0bd074 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 13:31:07 +0700 Subject: [PATCH 07/14] use current identities instead of hardcoded start identities --- packages/strategy-tests/src/lib.rs | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/packages/strategy-tests/src/lib.rs b/packages/strategy-tests/src/lib.rs index c2d81ea990..efdb702a48 100644 --- a/packages/strategy-tests/src/lib.rs +++ b/packages/strategy-tests/src/lib.rs @@ -1290,33 +1290,32 @@ impl Strategy { // Generate state transition for identity transfer operation OperationType::IdentityTransfer(identity_transfer_info) => { for _ in 0..count { + // Handle the case where specific sender, recipient, and amount are provided if let Some(transfer_info) = identity_transfer_info { - let sender = self - .start_identities - .hard_coded + let sender = current_identities .iter() - .find(|(identity, _)| identity.id() == transfer_info.from) + .find(|identity| identity.id() == transfer_info.from) .expect( "Expected to find sender identity in hardcoded start identities", ); - let recipient = self - .start_identities - .hard_coded + let recipient = current_identities .iter() - .find(|(identity, _)| identity.id() == transfer_info.to) + .find(|identity| identity.id() == transfer_info.to) .expect( "Expected to find recipient identity in hardcoded start identities", ); let state_transition = create_identity_credit_transfer_transition( - &sender.0, - &recipient.0, + &sender, + &recipient, identity_nonce_counter, - signer, // This means the TUI loaded identity must always be the sender since we're always signing with it for now + signer, // This means in the TUI, the loaded identity must always be the sender since we're always signing with it for now transfer_info.amount, ); operations.push(state_transition); } else if current_identities.len() > 1 { + // Handle the case where no sender, recipient, and amount are provided + let identities_count = current_identities.len(); if identities_count == 0 { break; From 4bc0a653de328adacae4e8e85ab6971c42984786 Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 16:19:48 +0700 Subject: [PATCH 08/14] let transfer keys be any security level or key type --- packages/strategy-tests/src/transitions.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/strategy-tests/src/transitions.rs b/packages/strategy-tests/src/transitions.rs index c77b51e290..a9cb113d83 100644 --- a/packages/strategy-tests/src/transitions.rs +++ b/packages/strategy-tests/src/transitions.rs @@ -824,8 +824,8 @@ pub fn create_identity_credit_transfer_transition( let identity_public_key = identity .get_first_public_key_matching( Purpose::TRANSFER, - HashSet::from([SecurityLevel::CRITICAL]), - HashSet::from([KeyType::ECDSA_SECP256K1, KeyType::BLS12_381]), + SecurityLevel::full_range().into(), + KeyType::all_key_types().into(), false, ) .expect("expected to get a signing key"); From dc4882725f6ed6f7a6e4bc8835d84d95bd4ec41f Mon Sep 17 00:00:00 2001 From: pauldelucia Date: Tue, 5 Nov 2024 17:44:37 +0700 Subject: [PATCH 09/14] fix --- packages/strategy-tests/src/transitions.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/strategy-tests/src/transitions.rs b/packages/strategy-tests/src/transitions.rs index a9cb113d83..c77b51e290 100644 --- a/packages/strategy-tests/src/transitions.rs +++ b/packages/strategy-tests/src/transitions.rs @@ -824,8 +824,8 @@ pub fn create_identity_credit_transfer_transition( let identity_public_key = identity .get_first_public_key_matching( Purpose::TRANSFER, - SecurityLevel::full_range().into(), - KeyType::all_key_types().into(), + HashSet::from([SecurityLevel::CRITICAL]), + HashSet::from([KeyType::ECDSA_SECP256K1, KeyType::BLS12_381]), false, ) .expect("expected to get a signing key"); From ae97f47b72a9e5a1c914469107865cb971929878 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 6 Nov 2024 21:56:50 +0700 Subject: [PATCH 10/14] ci: run devcontainers workflow only on push to master (#2295) --- .github/workflows/prebuild-devcontainers.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/prebuild-devcontainers.yml b/.github/workflows/prebuild-devcontainers.yml index 794fa3d4a5..1825985c82 100644 --- a/.github/workflows/prebuild-devcontainers.yml +++ b/.github/workflows/prebuild-devcontainers.yml @@ -7,6 +7,8 @@ on: - '.github/workflows/prebuild-devcontainers.yml' - rust-toolchain.toml - Dockerfile + branches: + - master workflow_dispatch: concurrency: From 48cca1a319505ff7abda72c9a3bb87883d6e8a07 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Wed, 6 Nov 2024 21:57:07 +0700 Subject: [PATCH 11/14] ci: do not run test on push (#2308) --- .github/workflows/tests.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c823d0cd06..cca5f1c471 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -7,12 +7,6 @@ on: branches: - master - 'v[0-9]+\.[0-9]+-dev' - push: - branches: - - master - - 'v[0-9]+\.[0-9]+-dev' - schedule: - - cron: "30 4 * * *" concurrency: group: ${{ github.workflow }}-${{ github.ref }} From 5c80069de3937c439b9646321df3951c57406e32 Mon Sep 17 00:00:00 2001 From: lklimek <842586+lklimek@users.noreply.github.com> Date: Fri, 8 Nov 2024 10:50:04 +0100 Subject: [PATCH 12/14] ci: use prebuilt librocksdb in github actions (#2316) --- .github/actions/librocksdb/action.yaml | 55 ++++++++++++++++++++++ .github/workflows/cached.yml | 23 +++++++++ .github/workflows/docs.yml | 2 +- .github/workflows/manage-runs.yml | 2 +- .github/workflows/pr.yml | 2 +- .github/workflows/release-docker-image.yml | 2 +- .github/workflows/release.yml | 6 +-- .github/workflows/tests-codeql.yml | 2 +- .github/workflows/tests-js-package.yml | 4 +- .github/workflows/tests-rs-package.yml | 37 +++++++++++++-- .github/workflows/tests.yml | 40 ++++++++++++---- 11 files changed, 152 insertions(+), 23 deletions(-) create mode 100644 .github/actions/librocksdb/action.yaml create mode 100644 .github/workflows/cached.yml diff --git a/.github/actions/librocksdb/action.yaml b/.github/actions/librocksdb/action.yaml new file mode 100644 index 0000000000..895185f43d --- /dev/null +++ b/.github/actions/librocksdb/action.yaml @@ -0,0 +1,55 @@ +--- +# This action builds and caches librocksdb. If we find that this solution consumes too much time, we can consider +# prebuilding librocksdb outside of the pipeline (eg. in the grovedb release process), publish as an artifact, and +# download it in the pipeline. +name: "librocksdb" +description: "Build and install librocksdb" +inputs: + version: + description: RocksDB version, eg. "8.10.2" + required: false + default: "8.10.2" + bucket: + description: S3 bucket to use for caching + required: false + default: multi-runner-cache-x1xibo9c + force: + description: Force rebuild + required: false + default: "false" + +runs: + using: composite + steps: + # Cache librocksdb using s3 bucket + - name: Restore cached librocksdb from S3 + id: librocksdb-cache + uses: strophy/actions-cache@opendal-update + with: + bucket: ${{ inputs.bucket }} + path: /opt/rocksdb + key: librocksdb/${{ inputs.version }}/${{ runner.os }}/${{ runner.arch }} + + - if: ${{ steps.librocksdb-cache.outputs.cache-hit != 'true' || inputs.force == 'true' }} + shell: bash + name: Build librocksdb + run: | + set -ex + WORKDIR=/tmp/rocksdb-build + mkdir -p ${WORKDIR}/rocksdb + mkdir -p /opt/rocksdb/usr/local/lib/ + pushd ${WORKDIR}/rocksdb + + # building rocksdb + git clone https://github.com/facebook/rocksdb.git -b v${{ inputs.version }} --depth 1 . + make -j$(nproc) static_lib + make DESTDIR=/opt/rocksdb install-static + set +x + + echo Done. + echo Configuration: + echo + echo "ROCKSDB_STATIC='/opt/rocksdb/usr/local/lib/librocksdb.a'" + echo "ROCKSDB_LIB_DIR='/opt/rocksdb/usr/local/lib'" + + popd diff --git a/.github/workflows/cached.yml b/.github/workflows/cached.yml new file mode 100644 index 0000000000..12eed8dd3e --- /dev/null +++ b/.github/workflows/cached.yml @@ -0,0 +1,23 @@ +--- +name: Rebuild cached dependencies + +on: + workflow_dispatch: +jobs: + build-rust-deps: + name: Prebuild and cache some Rust dependencies + runs-on: ubuntu-24.04 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ secrets.AWS_REGION }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Precompile librocksdb + uses: ./.github/actions/librocksdb + with: + force: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 12f512b720..d8e24ef706 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -7,7 +7,7 @@ on: jobs: build: name: Deploy docs - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Checkout main uses: actions/checkout@v4 diff --git a/.github/workflows/manage-runs.yml b/.github/workflows/manage-runs.yml index 2c07af5b4f..29bc43aa53 100644 --- a/.github/workflows/manage-runs.yml +++ b/.github/workflows/manage-runs.yml @@ -7,7 +7,7 @@ on: jobs: cancel-merged-or-closed-pr-runs: name: Cancel runs for merged or closed PRs - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: octokit/request-action@v2.x id: get_active_workflows diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index d475a3eef8..e75151c841 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -10,7 +10,7 @@ on: jobs: pr-title: name: PR title - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 steps: - name: Validate conventional PR title uses: amannn/action-semantic-pull-request@v5 diff --git a/.github/workflows/release-docker-image.yml b/.github/workflows/release-docker-image.yml index 5c56ade272..54e4cb465a 100644 --- a/.github/workflows/release-docker-image.yml +++ b/.github/workflows/release-docker-image.yml @@ -89,7 +89,7 @@ jobs: publish-manifest: name: Publish image tags needs: build-image - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Download digests uses: actions/download-artifact@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index dba5d592f5..4c43639d3a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -208,11 +208,11 @@ jobs: matrix: include: - package_type: tarballs - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: win - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: deb - os: ubuntu-22.04 + os: ubuntu-24.04 - package_type: macos os: macos-14 steps: diff --git a/.github/workflows/tests-codeql.yml b/.github/workflows/tests-codeql.yml index d00a66c8df..78a3f53aeb 100644 --- a/.github/workflows/tests-codeql.yml +++ b/.github/workflows/tests-codeql.yml @@ -4,7 +4,7 @@ on: jobs: codeql: name: Run Code QL - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: actions: read contents: read diff --git a/.github/workflows/tests-js-package.yml b/.github/workflows/tests-js-package.yml index ef508ec073..bdffc8cd41 100644 --- a/.github/workflows/tests-js-package.yml +++ b/.github/workflows/tests-js-package.yml @@ -17,7 +17,7 @@ on: jobs: lint: name: Linting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: id-token: write contents: read @@ -51,7 +51,7 @@ jobs: test: name: Tests - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: id-token: write contents: read diff --git a/.github/workflows/tests-rs-package.yml b/.github/workflows/tests-rs-package.yml index e666491ebc..a05e68ee6c 100644 --- a/.github/workflows/tests-rs-package.yml +++ b/.github/workflows/tests-rs-package.yml @@ -12,7 +12,7 @@ on: lint-runner: description: Runner for linting. Must be JSON valid string. type: string - default: '"ubuntu-22.04"' + default: '"ubuntu-24.04"' check-each-feature: description: If true, try to build each individual feature for this crate type: boolean @@ -42,6 +42,9 @@ jobs: with: components: clippy + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - uses: clechasseur/rs-clippy-check@v3 with: args: --package ${{ inputs.package }} --all-features --locked -- --no-deps @@ -50,10 +53,12 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" formatting: name: Formatting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 timeout-minutes: 5 steps: - name: Check out repo @@ -65,12 +70,19 @@ jobs: components: rustfmt cache: false + # This step doesn't need librocksdb, so we don't install it + - name: Check formatting + env: + RUSTC_WRAPPER: sccache + SCCACHE_BUCKET: multi-runner-cache-x1xibo9c + SCCACHE_REGION: ${{ secrets.AWS_REGION }} + SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu run: cargo fmt --check --package=${{ inputs.package }} unused_deps: name: Unused dependencies - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: id-token: write contents: read @@ -89,6 +101,9 @@ jobs: - name: Setup Rust uses: ./.github/actions/rust + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Get crate ${{ inputs.package }} info id: crate_info uses: ./.github/actions/crate_info @@ -102,12 +117,14 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" with: args: ${{ steps.crate_info.outputs.cargo_manifest_dir }} detect_structure_changes: name: Detect immutable structure changes - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 # FIXME: as we use `gh pr view` below, this check can only # run on pull requests. We should find a way to run it # when manual triggers are used. @@ -184,6 +201,9 @@ jobs: - name: Setup Rust uses: ./.github/actions/rust + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Run tests run: cargo test --package=${{ inputs.package }} --all-features --locked env: @@ -191,6 +211,8 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" check_each_feature: name: Check each feature @@ -199,7 +221,7 @@ jobs: if: ${{ inputs.check-each-feature }} steps: - name: Check out repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Configure AWS credentials and bucket region uses: aws-actions/configure-aws-credentials@v4 @@ -211,6 +233,9 @@ jobs: - name: Setup Rust uses: ./.github/actions/rust + - name: Install librocksdb + uses: ./.github/actions/librocksdb + - name: Get crate ${{ runner.arch }} info id: crate_info uses: ./.github/actions/crate_info @@ -223,6 +248,8 @@ jobs: SCCACHE_BUCKET: multi-runner-cache-x1xibo9c SCCACHE_REGION: ${{ secrets.AWS_REGION }} SCCACHE_S3_KEY_PREFIX: ${{ runner.os }}/sccache/${{ runner.arch }}/linux-gnu + ROCKSDB_STATIC: "/opt/rocksdb/usr/local/lib/librocksdb.a" + ROCKSDB_LIB_DIR: "/opt/rocksdb/usr/local/lib" run: | echo Verify all features disabled set -ex diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index cca5f1c471..5f6cec2c08 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,6 +2,11 @@ name: Tests on: workflow_dispatch: + inputs: + rebuild-deps: + description: "Rebuild cached Rust dependencies" + required: false + default: "false" pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: @@ -16,13 +21,13 @@ jobs: changes: name: Determine changed packages if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 outputs: js-packages: ${{ steps.filter-js.outputs.changes }} rs-packages: ${{ steps.filter-rs.outputs.changes }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 @@ -36,6 +41,22 @@ jobs: with: filters: .github/package-filters/rs-packages.yml + build-rust-deps: + name: Prebuild and cache some Rust dependencies + runs-on: ubuntu-24.04 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ secrets.AWS_REGION }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 1 + - name: Precompile librocksdb + uses: ./.github/actions/librocksdb + build-js: name: Build JS packages if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} @@ -69,6 +90,7 @@ jobs: name: Rust packages needs: - changes + - build-rust-deps secrets: inherit strategy: fail-fast: false @@ -77,17 +99,19 @@ jobs: uses: ./.github/workflows/tests-rs-package.yml with: package: ${{ matrix.rs-package }} - # lint-runner: ${{ contains(fromJSON('["drive-abci", "drive"]'), matrix.rs-package) && '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' || '"ubuntu-22.04"' }} + # lint-runner: ${{ contains(fromJSON('["drive-abci", "drive"]'), matrix.rs-package) && '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' || '"ubuntu-24.04"' }} # FIXME: Clippy fails on github hosted runners, most likely due to RAM usage. Using self-hosted runners for now. - lint-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + # lint-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + lint-runner: '["ubuntu-24.04"]' # Run drive tests on self-hosted 4x - test-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + # test-runner: '[ "self-hosted", "linux", "arm64", "ubuntu-platform" ]' + test-runner: '["ubuntu-24.04"]' check-each-feature: ${{ contains(fromJSON('["dash-sdk","rs-dapi-client","dapi-grpc","dpp","drive-abci"]'), matrix.rs-package) }} rs-crates-security: name: Rust crates security audit if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 @@ -116,7 +140,7 @@ jobs: js-deps-versions: name: JS dependency versions check if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 @@ -135,7 +159,7 @@ jobs: js-npm-security: name: JS NPM security audit if: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || !github.event.pull_request.draft }} - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Check out repo uses: actions/checkout@v4 From 48d54cbe695945d059c2d5105702c346a013ea68 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Fri, 8 Nov 2024 21:30:29 +0700 Subject: [PATCH 13/14] Revert "ci: do not run test on push (#2308)" This reverts commit 48cca1a319505ff7abda72c9a3bb87883d6e8a07. --- .github/workflows/tests.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5f6cec2c08..0bc10271f2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -12,6 +12,12 @@ on: branches: - master - 'v[0-9]+\.[0-9]+-dev' + push: + branches: + - master + - 'v[0-9]+\.[0-9]+-dev' + schedule: + - cron: "30 4 * * *" concurrency: group: ${{ github.workflow }}-${{ github.ref }} From 79bb78985b7b17ef14d24aca9b5059c1fd096283 Mon Sep 17 00:00:00 2001 From: Ivan Shumkov Date: Tue, 19 Nov 2024 19:58:57 +0700 Subject: [PATCH 14/14] build: update dapi grpc build --- .github/workflows/cached.yml | 23 ----------------------- .github/workflows/tests.yml | 6 ++++++ 2 files changed, 6 insertions(+), 23 deletions(-) delete mode 100644 .github/workflows/cached.yml diff --git a/.github/workflows/cached.yml b/.github/workflows/cached.yml deleted file mode 100644 index 12eed8dd3e..0000000000 --- a/.github/workflows/cached.yml +++ /dev/null @@ -1,23 +0,0 @@ ---- -name: Rebuild cached dependencies - -on: - workflow_dispatch: -jobs: - build-rust-deps: - name: Prebuild and cache some Rust dependencies - runs-on: ubuntu-24.04 - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_REGION: ${{ secrets.AWS_REGION }} - - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 1 - - name: Precompile librocksdb - uses: ./.github/actions/librocksdb - with: - force: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 5f6cec2c08..0bc10271f2 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -12,6 +12,12 @@ on: branches: - master - 'v[0-9]+\.[0-9]+-dev' + push: + branches: + - master + - 'v[0-9]+\.[0-9]+-dev' + schedule: + - cron: "30 4 * * *" concurrency: group: ${{ github.workflow }}-${{ github.ref }}