From 4fa2b33beef8e1256fa167d5324bd6a76ec2cffa Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Wed, 6 Jan 2021 17:12:51 +0800 Subject: [PATCH 1/8] [config] config Optimization and synchronization --- Cargo.lock | 98 ++++++++++++++++++++- commons/system/Cargo.toml | 11 +++ commons/system/src/lib.rs | 14 +++ config/Cargo.toml | 2 + config/src/account_vault_config.rs | 5 +- config/src/lib.rs | 4 +- config/src/logger_config.rs | 33 ++++--- config/src/metrics_config.rs | 14 +-- config/src/miner_config.rs | 33 +++++-- config/src/network_config.rs | 2 +- config/src/rpc_config.rs | 33 ++++++- config/src/storage_config.rs | 19 +++- config/src/sync_config.rs | 55 ++++-------- config/src/txpool_config.rs | 20 ++++- miner/src/generate_block_event_pacemaker.rs | 4 +- miner/src/lib.rs | 5 +- miner/tests/miner_test.rs | 4 +- node/src/node.rs | 6 +- rpc/client/tests/client_server_test.rs | 8 +- sync/tests/full_sync_test.rs | 6 +- sync/tests/test_sync/mod.rs | 2 +- sync/tests/txn_sync_test.rs | 4 +- 22 files changed, 279 insertions(+), 103 deletions(-) create mode 100644 commons/system/Cargo.toml create mode 100644 commons/system/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 907b80778f..4a4858a416 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -732,7 +732,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" dependencies = [ "either", - "radium", + "radium 0.3.0", +] + +[[package]] +name = "bitvec" +version = "0.19.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7ba35e9565969edb811639dbebfe34edc0368e472c5018474c8eb2543397f81" +dependencies = [ + "funty", + "radium 0.5.3", + "tap", + "wyz", ] [[package]] @@ -959,6 +971,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad1f8e949d755f9d79112b5bb46938e0ef9d3804a0b16dfab13aafcaa5f0fa72" +[[package]] +name = "bytesize" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81a18687293a1546b67c246452202bbbf143d239cb43494cc163da14979082da" + [[package]] name = "cache-padded" version = "1.1.1" @@ -1001,7 +1019,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27" dependencies = [ - "nom", + "nom 5.1.2", ] [[package]] @@ -2431,6 +2449,12 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + [[package]] name = "futures" version = "0.1.30" @@ -3634,6 +3658,19 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" +[[package]] +name = "lexical-core" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db65c6da02e61f55dae90a0ae427b2a5f6b3e8db09f58d10efab23af92592616" +dependencies = [ + "arrayvec 0.5.2", + "bitflags", + "cfg-if 0.1.10", + "ryu", + "static_assertions", +] + [[package]] name = "libc" version = "0.2.81" @@ -4816,6 +4853,18 @@ dependencies = [ "version_check 0.9.2", ] +[[package]] +name = "nom" +version = "6.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88034cfd6b4a0d54dd14f4a507eceee36c0b70e5a02236c4e4df571102be17f0" +dependencies = [ + "bitvec 0.19.4", + "lexical-core", + "memchr", + "version_check 0.9.2", +] + [[package]] name = "nonzero_ext" version = "0.2.0" @@ -5085,7 +5134,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c740e5fbcb6847058b40ac7e5574766c6388f585e184d769910fe0d3a2ca861" dependencies = [ "arrayvec 0.5.2", - "bitvec", + "bitvec 0.17.4", "byte-slice-cast", "parity-scale-codec-derive", "serde", @@ -5829,6 +5878,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + [[package]] name = "rand" version = "0.3.23" @@ -7299,6 +7354,7 @@ name = "starcoin-config" version = "0.9.4" dependencies = [ "anyhow", + "clap 2.33.3", "diem-temppath", "dirs 3.0.1", "git-version", @@ -7313,6 +7369,7 @@ dependencies = [ "serde_json", "starcoin-crypto", "starcoin-logger", + "starcoin-system", "starcoin-types", "starcoin-vm-types", "structopt 0.3.21", @@ -8381,6 +8438,14 @@ dependencies = [ "stream-task", ] +[[package]] +name = "starcoin-system" +version = "0.9.4" +dependencies = [ + "anyhow", + "systemstat", +] + [[package]] name = "starcoin-traits" version = "0.9.4" @@ -8860,6 +8925,27 @@ dependencies = [ "unicode-xid 0.2.1", ] +[[package]] +name = "systemstat" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1f726379f0c746ad672b7e932bd390b5faae073f502e37d314950ced761ac3e" +dependencies = [ + "bytesize", + "chrono", + "lazy_static", + "libc", + "nom 6.0.1", + "time", + "winapi 0.3.9", +] + +[[package]] +name = "tap" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36474e732d1affd3a6ed582781b3683df3d0563714c59c39591e8ff707cf078e" + [[package]] name = "target-spec" version = "0.5.0" @@ -10230,6 +10316,12 @@ dependencies = [ "winapi-build", ] +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + [[package]] name = "x" version = "0.1.0" diff --git a/commons/system/Cargo.toml b/commons/system/Cargo.toml new file mode 100644 index 0000000000..0855e84801 --- /dev/null +++ b/commons/system/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "starcoin-system" +version = "0.9.4" +authors = ["Starcoin Core Dev "] +license = "Apache-2.0" +publish = false +edition = "2018" + +[dependencies] +systemstat ="0.1.6" +anyhow = "1.0.37" \ No newline at end of file diff --git a/commons/system/src/lib.rs b/commons/system/src/lib.rs new file mode 100644 index 0000000000..8cc36cc43a --- /dev/null +++ b/commons/system/src/lib.rs @@ -0,0 +1,14 @@ +// Copyright (c) The Starcoin Core Contributors +// SPDX-License-Identifier: Apache-2.0 + +use anyhow::Result; +use systemstat::{Platform, System}; + +pub fn get_free_mem_size() -> Result { + let sys = System::new(); + let free = match sys.memory() { + Ok(mem) => mem.free.as_u64(), + Err(_x) => 0u64, + }; + Ok(free) +} diff --git a/config/Cargo.toml b/config/Cargo.toml index d23e534b06..3006f9f7c9 100644 --- a/config/Cargo.toml +++ b/config/Cargo.toml @@ -7,6 +7,7 @@ publish = false edition = "2018" [dependencies] +clap = "2.33.3" dirs = "3" anyhow = "1.0.37" thiserror = "1.0" @@ -27,3 +28,4 @@ starcoin-vm-types = { path = "../vm/types" } network-p2p-types = { path = "../network-p2p/types"} starcoin-logger = {path = "../commons/logger", package="starcoin-logger"} diem-temppath = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +starcoin-system = {path = "../commons/system", package="starcoin-system"} diff --git a/config/src/account_vault_config.rs b/config/src/account_vault_config.rs index f9ffea7f0c..2ae0b46dee 100644 --- a/config/src/account_vault_config.rs +++ b/config/src/account_vault_config.rs @@ -5,10 +5,13 @@ use crate::{BaseConfig, ConfigModule, StarcoinOpt}; use anyhow::Result; use serde::{Deserialize, Serialize}; use std::path::PathBuf; +use structopt::StructOpt; -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct AccountVaultConfig { + #[structopt(long = "vault-dir", parse(from_os_str), conflicts_with("vault-dir"))] + /// Account vault dir config. dir: PathBuf, #[serde(skip)] absolute_dir: Option, diff --git a/config/src/lib.rs b/config/src/lib.rs index ef33738f03..3ce9654b62 100644 --- a/config/src/lib.rs +++ b/config/src/lib.rs @@ -190,7 +190,7 @@ pub struct StarcoinOpt { /// Node network private key file, only work for first init. pub node_key_file: Option, - #[structopt(long = "sync-mode", short = "s")] + #[structopt(long = "sync-mode", short = "s", possible_values = &SyncMode::variants(), case_insensitive = false)] /// Sync mode. Included value(full, fast, light). pub sync_mode: Option, @@ -444,9 +444,7 @@ impl NodeConfig { } } }; - config.after_load(opt, &base)?; - save_config(&config, &config_file_path)?; Ok(config) } diff --git a/config/src/logger_config.rs b/config/src/logger_config.rs index fcb26e049b..7bb797489f 100644 --- a/config/src/logger_config.rs +++ b/config/src/logger_config.rs @@ -5,15 +5,20 @@ use crate::{BaseConfig, ConfigModule, StarcoinOpt}; use anyhow::Result; use serde::{Deserialize, Serialize}; use std::path::PathBuf; +use structopt::StructOpt; static LOGGER_FILE_NAME: &str = "starcoin.log"; -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct LoggerConfig { - pub enable_stderr: bool, - pub enable_file: bool, + #[structopt(name = "disable-stderr", long, help = "disable stderr logger")] + pub disable_stderr: bool, + #[structopt(name = "disable-file", long, help = "disable file logger")] + pub disable_file: bool, + #[structopt(name = "max-file-size", long, default_value = "1073741824")] pub max_file_size: u64, + #[structopt(name = "max-backup", long, default_value = "7")] pub max_backup: u32, #[serde(skip)] log_path: Option, @@ -28,35 +33,35 @@ impl LoggerConfig { } pub fn enable_file(&self) -> bool { - self.enable_file && self.log_path.is_some() + (!self.disable_file) && self.log_path.is_some() } } impl ConfigModule for LoggerConfig { fn default_with_opt(opt: &StarcoinOpt, base: &BaseConfig) -> Result { - let enable_stderr = !opt.disable_std_log; - let enable_file = !opt.disable_file_log; + let disable_stderr = opt.disable_std_log; + let disable_file = opt.disable_file_log; Ok(if base.net.is_test() { Self { - enable_stderr, - enable_file, + disable_stderr, + disable_file, max_file_size: 10 * 1024 * 1024, max_backup: 1, log_path: None, } } else if base.net.is_dev() { Self { - enable_stderr, - enable_file, + disable_stderr, + disable_file, max_file_size: 10 * 1024 * 1024, max_backup: 2, log_path: None, } } else { Self { - enable_stderr, - enable_file, + disable_stderr, + disable_file, max_file_size: 1024 * 1024 * 1024, max_backup: 7, log_path: None, @@ -66,8 +71,8 @@ impl ConfigModule for LoggerConfig { fn after_load(&mut self, opt: &StarcoinOpt, base: &BaseConfig) -> Result<()> { self.log_path = Some(base.data_dir.join(LOGGER_FILE_NAME)); - self.enable_stderr = !opt.disable_std_log; - self.enable_file = !opt.disable_file_log; + self.disable_stderr = opt.disable_std_log; + self.disable_file = opt.disable_file_log; Ok(()) } } diff --git a/config/src/metrics_config.rs b/config/src/metrics_config.rs index a3bdf01b6a..254cfeb6ec 100644 --- a/config/src/metrics_config.rs +++ b/config/src/metrics_config.rs @@ -6,12 +6,16 @@ use crate::{ }; use anyhow::Result; use serde::{Deserialize, Serialize}; +use structopt::StructOpt; -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct MetricsConfig { - pub enable_metrics: bool, + #[structopt(name = "disable-metrics", long, help = "disable metrics")] + pub disable_metrics: bool, + #[structopt(name = "address", long, help = "address", default_value = "0.0.0.0")] pub address: String, + #[structopt(name = "metrics-port", long, default_value = "9101")] pub port: u16, } @@ -27,16 +31,14 @@ impl ConfigModule for MetricsConfig { DEFAULT_METRIC_SERVER_PORT }; Ok(Self { - enable_metrics: !opt.disable_metrics, + disable_metrics: opt.disable_metrics, address: "0.0.0.0".to_string(), port, }) } fn after_load(&mut self, opt: &StarcoinOpt, _base: &BaseConfig) -> Result<()> { - if opt.disable_metrics { - self.enable_metrics = false; - } + self.disable_metrics = opt.disable_metrics; Ok(()) } } diff --git a/config/src/miner_config.rs b/config/src/miner_config.rs index 3d311ed58d..6556e6b6eb 100644 --- a/config/src/miner_config.rs +++ b/config/src/miner_config.rs @@ -4,22 +4,39 @@ use crate::{BaseConfig, ConfigModule, StarcoinOpt}; use anyhow::Result; use serde::{Deserialize, Serialize}; +use structopt::StructOpt; -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct MinerConfig { - pub enable_mint_empty_block: bool, + #[structopt(long = "disable-mint-empty-block")] + pub disable_mint_empty_block: Option, + #[structopt(long = "block-gas-limit")] pub block_gas_limit: Option, - pub enable_miner_client: bool, + #[structopt(long = "disable-miner-client")] + pub disable_miner_client: bool, + #[structopt(flatten)] pub client_config: MinerClientConfig, } -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +impl MinerConfig { + pub fn is_disable_mint_empty_block(&self) -> bool { + if let Some(disable) = self.disable_mint_empty_block { + disable + } else { + false + } + } +} + +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct MinerClientConfig { pub server: Option, pub plugin_path: Option, + #[structopt(long = "thread-num")] pub thread_num: u16, + #[structopt(long = "enable-stderr")] #[serde(skip)] pub enable_stderr: bool, } @@ -33,9 +50,9 @@ impl ConfigModule for MinerConfig { .cloned() .unwrap_or_else(|| base.net.is_dev()); Ok(Self { - enable_mint_empty_block: !disable_mint_empty_block, + disable_mint_empty_block: Some(disable_mint_empty_block), block_gas_limit: None, - enable_miner_client: !opt.disable_miner_client, + disable_miner_client: opt.disable_miner_client, client_config: MinerClientConfig { server: None, plugin_path: None, @@ -52,12 +69,12 @@ impl ConfigModule for MinerConfig { .as_ref() .cloned() .unwrap_or_else(|| base.net.is_dev()); - self.enable_mint_empty_block = !disable_mint_empty_block; + self.disable_mint_empty_block = Some(disable_mint_empty_block); if let Some(thread) = opt.miner_thread { self.client_config.thread_num = thread; } if opt.disable_miner_client { - self.enable_miner_client = false; + self.disable_miner_client = true; } Ok(()) } diff --git a/config/src/network_config.rs b/config/src/network_config.rs index 705726d28e..1480c029fa 100644 --- a/config/src/network_config.rs +++ b/config/src/network_config.rs @@ -85,7 +85,7 @@ impl Default for NetworkRpcQuotaConfiguration { #[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] #[serde(deny_unknown_fields)] pub struct NetworkConfig { - // The address that this node is listening on for new connections. + /// The address that this node is listening on for new connections. pub listen: Multiaddr, #[serde(default)] pub seeds: Vec, diff --git a/config/src/rpc_config.rs b/config/src/rpc_config.rs index 08cf8f421a..776393cf13 100644 --- a/config/src/rpc_config.rs +++ b/config/src/rpc_config.rs @@ -289,7 +289,6 @@ impl ConfigModule for RpcConfig { config.ws.port = ports[2]; } else if base.net.is_dev() { config.http.port = get_available_port_from(DEFAULT_HTTP_PORT); - config.tcp.port = get_available_port_from(DEFAULT_TCP_PORT); config.ws.port = get_available_port_from(DEFAULT_WEB_SOCKET_PORT); } @@ -302,12 +301,38 @@ impl ConfigModule for RpcConfig { } fn after_load(&mut self, opt: &StarcoinOpt, _base: &BaseConfig) -> Result<()> { - if self.http.ip_headers.is_none() { - self.http.ip_headers = opt.http.ip_headers.clone(); + if !opt.http.disable { + self.http.disable = false; + self.http.apis = opt.http.apis.clone(); + self.http.port = opt.http.port; + self.http.max_request_body_size = opt.http.max_request_body_size; + if opt.http.threads.is_some() { + self.http.threads = opt.http.threads; + } + if opt.http.ip_headers.is_some() { + self.http.ip_headers = opt.http.ip_headers.clone(); + } } - info!("Ipc file path: {:?}", self.ipc.ipc_file_path); info!("Http rpc address: {:?}", self.get_http_address()); + if !opt.tcp.disable { + self.tcp.disable = false; + self.tcp.apis = opt.tcp.apis.clone(); + self.tcp.port = opt.tcp.port; + } info!("TCP rpc address: {:?}", self.get_tcp_address()); + if !opt.ipc.disable { + self.ipc.apis = opt.ipc.apis.clone(); + if opt.ipc.ipc_file_path.is_some() { + self.ipc.ipc_file_path = opt.ipc.ipc_file_path.clone(); + } + } + info!("Ipc file path: {:?}", self.ipc.ipc_file_path); + if !opt.ws.disable { + self.ws.disable = false; + self.ws.apis = opt.ws.apis.clone(); + self.ws.port = opt.ws.port; + self.ws.max_request_body_size = opt.ws.max_request_body_size; + } info!("Websocket rpc address: {:?}", self.get_ws_address()); Ok(()) } diff --git a/config/src/storage_config.rs b/config/src/storage_config.rs index 677b4dc8dd..e0225b0e6c 100644 --- a/config/src/storage_config.rs +++ b/config/src/storage_config.rs @@ -5,14 +5,21 @@ use crate::{BaseConfig, ConfigModule, StarcoinOpt}; use anyhow::Result; use serde::{Deserialize, Serialize}; use std::path::PathBuf; +use structopt::StructOpt; /// Port selected RocksDB options for tuning underlying rocksdb instance of DiemDB. /// see https://github.com/facebook/rocksdb/blob/master/include/rocksdb/options.h /// for detailed explanations. -#[derive(Copy, Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Copy, Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(default, deny_unknown_fields)] pub struct RocksdbConfig { + #[structopt(name = "rocksdb-max-open-files", long, help = "rocksdb max open files")] pub max_open_files: i32, + #[structopt( + name = "rocksdb-max-total-wal-sizes", + long, + help = "rocksdb max total WAL sizes" + )] pub max_total_wal_size: u64, } @@ -52,14 +59,20 @@ impl Default for RocksdbConfig { } } -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct StorageConfig { + #[structopt(long = "dir", parse(from_os_str), conflicts_with("dir"))] dir: PathBuf, + #[structopt( + long = "absolute-dir", + parse(from_os_str), + conflicts_with("absolute-dir") + )] #[serde(skip)] absolute_dir: Option, + #[structopt(flatten)] /// Rocksdb-specific configurations - #[serde(default)] pub rocksdb_config: RocksdbConfig, } diff --git a/config/src/sync_config.rs b/config/src/sync_config.rs index 8741476677..6140826cbd 100644 --- a/config/src/sync_config.rs +++ b/config/src/sync_config.rs @@ -2,15 +2,17 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{BaseConfig, ConfigModule, StarcoinOpt}; -use anyhow::{bail, format_err, Result}; +use anyhow::{bail, Result}; +use clap::arg_enum; use serde::{Deserialize, Serialize}; use starcoin_logger::prelude::*; -use std::fmt::{Display, Formatter}; -use std::str::FromStr; +use structopt::StructOpt; -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct SyncConfig { + #[structopt(long, possible_values = &SyncMode::variants(), case_insensitive = false)] + /// Sync mode: Light, Fast, Full, eg. sync_mode: SyncMode, } @@ -24,25 +26,25 @@ impl SyncConfig { } pub fn is_state_sync(&self) -> bool { - self.sync_mode == SyncMode::FAST + self.sync_mode == SyncMode::Fast } pub fn is_light(&self) -> bool { - self.sync_mode == SyncMode::LIGHT + self.sync_mode == SyncMode::Lignt } } impl ConfigModule for SyncConfig { fn default_with_opt(opt: &StarcoinOpt, _base: &BaseConfig) -> Result { - let sync_mode = opt.sync_mode.unwrap_or(SyncMode::FULL); + let sync_mode = opt.sync_mode.clone().unwrap_or(SyncMode::Full); Ok(SyncConfig { sync_mode }) } fn after_load(&mut self, opt: &StarcoinOpt, _base: &BaseConfig) -> Result<()> { - if let Some(sync_mode) = opt.sync_mode { + if let Some(sync_mode) = opt.sync_mode.clone() { self.sync_mode = sync_mode; } - if self.sync_mode == SyncMode::LIGHT || self.sync_mode == SyncMode::FAST { + if self.sync_mode == SyncMode::Lignt || self.sync_mode == SyncMode::Fast { bail!("{} is not supported yet.", self.sync_mode); } info!("Sync mode : {:?} : {:?}", opt.sync_mode, self.sync_mode); @@ -50,40 +52,17 @@ impl ConfigModule for SyncConfig { } } //TODO remove SyncMode. -#[allow(non_camel_case_types)] -#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)] -#[serde(tag = "type")] +arg_enum! { +#[derive(Debug,Clone, Deserialize, PartialEq, Serialize)] pub enum SyncMode { - LIGHT, - FAST, - FULL, -} - -impl FromStr for SyncMode { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - match s { - "light" => Ok(SyncMode::LIGHT), - "fast" => Ok(SyncMode::FAST), - "full" => Ok(SyncMode::FULL), - _ => Err(format_err!("")), - } - } -} - -impl Display for SyncMode { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - SyncMode::LIGHT => write!(f, "light"), - SyncMode::FAST => write!(f, "fast"), - SyncMode::FULL => write!(f, "full"), - } + Lignt, + Fast, + Full, } } impl Default for SyncMode { fn default() -> Self { - SyncMode::FULL + SyncMode::Full } } diff --git a/config/src/txpool_config.rs b/config/src/txpool_config.rs index 8ba04afe67..74a756e28d 100644 --- a/config/src/txpool_config.rs +++ b/config/src/txpool_config.rs @@ -4,24 +4,40 @@ use crate::{BaseConfig, ConfigModule, StarcoinOpt}; use anyhow::Result; use serde::{Deserialize, Serialize}; +use starcoin_system::get_free_mem_size; +use structopt::StructOpt; +pub const DEFAULT_MEM_SIZE: u64 = 128 * 1024 * 1024; // 128M -#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)] +#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct TxPoolConfig { + #[structopt(name = "max-count", long, default_value = "4096")] /// Maximal number of transactions in the pool. pub max_count: u64, + #[structopt(name = "max-per-sender", long, default_value = "128")] /// Maximal number of transactions from single sender. pub max_per_sender: u64, + #[structopt(name = "max-mem-usage", long, default_value = "134217728")] /// Maximal memory usage. pub max_mem_usage: u64, } impl ConfigModule for TxPoolConfig { fn default_with_opt(_opt: &StarcoinOpt, _base: &BaseConfig) -> Result { + let free_mem = match get_free_mem_size() { + Ok(free) => { + if free > 0 { + free / 2 + } else { + DEFAULT_MEM_SIZE + } + } + Err(_) => DEFAULT_MEM_SIZE, + }; Ok(Self { max_count: 4096, max_per_sender: 128, - max_mem_usage: 128 * 1024 * 1024, // 128M + max_mem_usage: free_mem, }) } } diff --git a/miner/src/generate_block_event_pacemaker.rs b/miner/src/generate_block_event_pacemaker.rs index 14e35d0bf8..c9fe79e744 100644 --- a/miner/src/generate_block_event_pacemaker.rs +++ b/miner/src/generate_block_event_pacemaker.rs @@ -45,7 +45,7 @@ impl ActorService for GenerateBlockEventPacemaker { ctx.subscribe::(); ctx.subscribe::(); //if mint empty block is disabled, trigger mint event for on demand mint (Dev) - if !self.config.miner.enable_mint_empty_block { + if self.config.miner.is_disable_mint_empty_block() { ctx.subscribe::(); } Ok(()) @@ -54,7 +54,7 @@ impl ActorService for GenerateBlockEventPacemaker { fn stopped(&mut self, ctx: &mut ServiceContext) -> Result<()> { ctx.unsubscribe::(); ctx.unsubscribe::(); - if !self.config.miner.enable_mint_empty_block { + if self.config.miner.is_disable_mint_empty_block() { ctx.unsubscribe::(); } Ok(()) diff --git a/miner/src/lib.rs b/miner/src/lib.rs index 559656ccac..11256ca699 100644 --- a/miner/src/lib.rs +++ b/miner/src/lib.rs @@ -72,9 +72,10 @@ impl MinerService { .send(CreateBlockTemplateRequest) .await? })?; - if block_template.body.transactions.is_empty() && !self.config.miner.enable_mint_empty_block + if block_template.body.transactions.is_empty() + && self.config.miner.is_disable_mint_empty_block() { - debug!("The flag enable_mint_empty_block is false and no txn in pool, so skip mint empty block."); + debug!("The flag disable_mint_empty_block is true and no txn in pool, so skip mint empty block."); Ok(()) } else { debug!("Mint block template: {:?}", block_template); diff --git a/miner/tests/miner_test.rs b/miner/tests/miner_test.rs index cbf70d4230..5ba3d9d3db 100644 --- a/miner/tests/miner_test.rs +++ b/miner/tests/miner_test.rs @@ -22,7 +22,7 @@ use types::{ #[stest::test] fn test_miner() { let mut config = NodeConfig::random_for_test(); - config.miner.enable_miner_client = false; + config.miner.disable_miner_client = false; let config = Arc::new(config); let handle = test_helper::run_node_by_config(config.clone()).unwrap(); let bus = handle.bus().unwrap(); @@ -60,7 +60,7 @@ fn test_miner() { #[stest::test] async fn test_miner_service() { let mut config = NodeConfig::random_for_test(); - config.miner.enable_mint_empty_block = true; + config.miner.disable_mint_empty_block = Some(false); let registry = RegistryService::launch(); let node_config = Arc::new(config.clone()); registry.put_shared(node_config.clone()).await.unwrap(); diff --git a/node/src/node.rs b/node/src/node.rs index 54daedd1c8..d789abc5f8 100644 --- a/node/src/node.rs +++ b/node/src/node.rs @@ -129,14 +129,14 @@ impl NodeService { config.logger.max_backup, ); } - if config.logger.enable_stderr { + if config.logger.disable_stderr { logger_handle.enable_stderr(); } else { logger_handle.disable_stderr(); } // start metric server - if config.metrics.enable_metrics { + if !config.metrics.disable_metrics { starcoin_metrics::metric_server::start_server( config.metrics.address.clone(), config.metrics.port, @@ -246,7 +246,7 @@ impl NodeService { registry.register::().await?; registry.register::().await?; - if config.miner.enable_miner_client { + if !config.miner.disable_miner_client { let miner_client_config = config.miner.client_config.clone(); registry.put_shared(miner_client_config).await?; let job_client = JobBusClient::new(bus.clone(), config.net().time_service()); diff --git a/rpc/client/tests/client_server_test.rs b/rpc/client/tests/client_server_test.rs index cbd9ace363..a667ca0f92 100644 --- a/rpc/client/tests/client_server_test.rs +++ b/rpc/client/tests/client_server_test.rs @@ -18,8 +18,7 @@ async fn test_in_async() -> Result<()> { } fn do_client_test() -> Result<()> { - let mut node_config = NodeConfig::random_for_test(); - node_config.miner.enable_miner_client = false; + let node_config = NodeConfig::random_for_test(); let config = Arc::new(node_config); let ipc_file = config.rpc.get_ipc_file().to_path_buf(); let url = config.rpc.get_ws_address().unwrap(); @@ -60,7 +59,7 @@ fn test_multi_client() -> Result<()> { #[stest::test(timeout = 120)] fn test_client_reconnect() -> Result<()> { let mut node_config = NodeConfig::random_for_test(); - node_config.miner.enable_miner_client = false; + node_config.miner.disable_miner_client = false; let config = Arc::new(node_config); let url = config.rpc.get_ws_address().unwrap(); debug!("url:{}", url); @@ -92,8 +91,7 @@ fn test_client_reconnect() -> Result<()> { #[stest::test(timeout = 120)] fn test_client_reconnect_subscribe() -> Result<()> { - let mut node_config = NodeConfig::random_for_test(); - node_config.miner.enable_miner_client = true; + let node_config = NodeConfig::random_for_test(); let config = Arc::new(node_config); let url = config.rpc.get_ws_address().unwrap(); debug!("url:{}", url); diff --git a/sync/tests/full_sync_test.rs b/sync/tests/full_sync_test.rs index b2ef58a23f..c46cb15131 100644 --- a/sync/tests/full_sync_test.rs +++ b/sync/tests/full_sync_test.rs @@ -13,7 +13,7 @@ use traits::ChainAsyncService; #[stest::test(timeout = 120)] fn test_full_sync() { - test_sync::test_sync(SyncMode::FULL) + test_sync::test_sync(SyncMode::Full) } #[ignore] @@ -30,7 +30,7 @@ fn test_sync_by_notification() { let mut second_config = NodeConfig::random_for_test(); info!("second peer : {:?}", second_config.network.self_peer_id()); second_config.network.seeds = vec![first_config.network.self_address()]; - second_config.miner.enable_miner_client = false; + second_config.miner.disable_miner_client = false; let second_node = run_node_by_config(Arc::new(second_config)).unwrap(); // stop sync service and just use notification message to sync. @@ -88,7 +88,7 @@ fn test_broadcast_with_difficulty() { info!("second peer : {:?}", second_config.network.self_peer_id()); second_config.network.seeds = vec![first_config.network.self_address()]; //second_config.miner.enable_miner_client = false; - second_config.sync.set_mode(SyncMode::FULL); + second_config.sync.set_mode(SyncMode::Full); let second_node = run_node_by_config(Arc::new(second_config)).unwrap(); let second_chain = second_node.chain_service().unwrap(); diff --git a/sync/tests/test_sync/mod.rs b/sync/tests/test_sync/mod.rs index 283c5ba5bf..f0c2273ae1 100644 --- a/sync/tests/test_sync/mod.rs +++ b/sync/tests/test_sync/mod.rs @@ -25,7 +25,7 @@ pub fn test_sync(sync_mode: SyncMode) { let mut second_config = NodeConfig::random_for_test(); info!("second peer : {:?}", second_config.network.self_peer_id()); second_config.network.seeds = vec![first_config.network.self_address()]; - second_config.miner.enable_miner_client = false; + second_config.miner.disable_miner_client = false; second_config.sync.set_mode(sync_mode); let second_node = run_node_by_config(Arc::new(second_config)).unwrap(); diff --git a/sync/tests/txn_sync_test.rs b/sync/tests/txn_sync_test.rs index 1f787338ac..4cb177c41e 100644 --- a/sync/tests/txn_sync_test.rs +++ b/sync/tests/txn_sync_test.rs @@ -15,7 +15,7 @@ use txpool::TxPoolService; #[stest::test] fn test_txn_sync_actor() { let mut first_config = NodeConfig::random_for_test(); - first_config.miner.enable_miner_client = false; + first_config.miner.disable_miner_client = false; let first_network_address = first_config.network.self_address(); let first_config = Arc::new(first_config); let first_node = run_node_by_config(first_config.clone()).unwrap(); @@ -31,7 +31,7 @@ fn test_txn_sync_actor() { let mut second_config = NodeConfig::random_for_test(); second_config.network.seeds = vec![first_network_address]; - second_config.miner.enable_miner_client = false; + second_config.miner.disable_miner_client = false; let second_config = Arc::new(second_config); let second_node = run_node_by_config(second_config.clone()).unwrap(); From 9918bc7d696f381aea3186d0d60f849cb4830df3 Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Thu, 7 Jan 2021 11:10:25 +0800 Subject: [PATCH 2/8] [config] update structopt bool to option --- config/src/lib.rs | 16 ++++++++-------- config/src/logger_config.rs | 13 ++++++++----- config/src/metrics_config.rs | 7 +++++-- config/src/miner_config.rs | 7 ++++--- config/src/network_config.rs | 12 ++++++++---- network/src/worker.rs | 2 +- 6 files changed, 34 insertions(+), 23 deletions(-) diff --git a/config/src/lib.rs b/config/src/lib.rs index 3ce9654b62..169f70524c 100644 --- a/config/src/lib.rs +++ b/config/src/lib.rs @@ -204,27 +204,27 @@ pub struct StarcoinOpt { #[structopt(long = "disable-std-log")] /// Disable std error log output. - pub disable_std_log: bool, + pub disable_std_log: Option, #[structopt(long = "disable-file-log")] /// Disable std error log output. - pub disable_file_log: bool, + pub disable_file_log: Option, #[structopt(long = "disable-metrics")] /// Disable metrics. - pub disable_metrics: bool, + pub disable_metrics: Option, #[structopt(long = "disable-miner-client")] /// Don't start a miner client in node. - pub disable_miner_client: bool, + pub disable_miner_client: Option, #[structopt(long = "disable-seed")] /// Do not connect to seed node, include builtin and config seed. - pub disable_seed: bool, + pub disable_seed: Option, - #[structopt(long = "enable-mdns")] - /// Enable p2p mdns discovery, for automatically discover the peer from the local network. - pub enable_mdns: bool, + #[structopt(long = "disable-mdns")] + /// Disable p2p mdns discovery, for automatically discover the peer from the local network. + pub disable_mdns: Option, #[structopt(long = "disable-mint-empty-block")] /// Do not mint empty block, default is true in Dev network. diff --git a/config/src/logger_config.rs b/config/src/logger_config.rs index 7bb797489f..9c8eff2ab9 100644 --- a/config/src/logger_config.rs +++ b/config/src/logger_config.rs @@ -39,9 +39,8 @@ impl LoggerConfig { impl ConfigModule for LoggerConfig { fn default_with_opt(opt: &StarcoinOpt, base: &BaseConfig) -> Result { - let disable_stderr = opt.disable_std_log; - let disable_file = opt.disable_file_log; - + let disable_stderr = opt.disable_std_log.unwrap_or(false); + let disable_file = opt.disable_file_log.unwrap_or(false); Ok(if base.net.is_test() { Self { disable_stderr, @@ -71,8 +70,12 @@ impl ConfigModule for LoggerConfig { fn after_load(&mut self, opt: &StarcoinOpt, base: &BaseConfig) -> Result<()> { self.log_path = Some(base.data_dir.join(LOGGER_FILE_NAME)); - self.disable_stderr = opt.disable_std_log; - self.disable_file = opt.disable_file_log; + if let Some(disable) = opt.disable_std_log { + self.disable_stderr = disable; + } + if let Some(disable) = opt.disable_file_log { + self.disable_file = disable; + } Ok(()) } } diff --git a/config/src/metrics_config.rs b/config/src/metrics_config.rs index 254cfeb6ec..61253f3756 100644 --- a/config/src/metrics_config.rs +++ b/config/src/metrics_config.rs @@ -30,15 +30,18 @@ impl ConfigModule for MetricsConfig { } else { DEFAULT_METRIC_SERVER_PORT }; + let disable_metrics = opt.disable_metrics.unwrap_or(false); Ok(Self { - disable_metrics: opt.disable_metrics, + disable_metrics, address: "0.0.0.0".to_string(), port, }) } fn after_load(&mut self, opt: &StarcoinOpt, _base: &BaseConfig) -> Result<()> { - self.disable_metrics = opt.disable_metrics; + if let Some(disable) = opt.disable_metrics { + self.disable_metrics = disable; + } Ok(()) } } diff --git a/config/src/miner_config.rs b/config/src/miner_config.rs index 6556e6b6eb..c0316f20ec 100644 --- a/config/src/miner_config.rs +++ b/config/src/miner_config.rs @@ -49,10 +49,11 @@ impl ConfigModule for MinerConfig { .as_ref() .cloned() .unwrap_or_else(|| base.net.is_dev()); + let disable_miner_client = opt.disable_miner_client.unwrap_or(false); Ok(Self { disable_mint_empty_block: Some(disable_mint_empty_block), block_gas_limit: None, - disable_miner_client: opt.disable_miner_client, + disable_miner_client, client_config: MinerClientConfig { server: None, plugin_path: None, @@ -73,8 +74,8 @@ impl ConfigModule for MinerConfig { if let Some(thread) = opt.miner_thread { self.client_config.thread_num = thread; } - if opt.disable_miner_client { - self.disable_miner_client = true; + if let Some(disable) = opt.disable_miner_client { + self.disable_miner_client = disable; } Ok(()) } diff --git a/config/src/network_config.rs b/config/src/network_config.rs index 1480c029fa..065cf1bc2a 100644 --- a/config/src/network_config.rs +++ b/config/src/network_config.rs @@ -90,7 +90,7 @@ pub struct NetworkConfig { #[serde(default)] pub seeds: Vec, #[serde(default)] - pub enable_mdns: bool, + pub disable_mdns: bool, //TODO skip this field, do not persistence this flag to config. this change will break network config. pub disable_seed: bool, #[serde(skip)] @@ -174,11 +174,13 @@ impl ConfigModule for NetworkConfig { .parse() .expect("Parse multi address fail.") }; + let disable_mdns = opt.disable_mdns.unwrap_or(false); + let disable_seed = opt.disable_seed.unwrap_or(false); Ok(Self { listen, seeds, - enable_mdns: opt.enable_mdns, - disable_seed: opt.disable_seed, + disable_mdns, + disable_seed, network_keypair: Some(Arc::new(Self::load_or_generate_keypair(opt, base)?)), self_peer_id: None, self_address: None, @@ -201,7 +203,9 @@ impl ConfigModule for NetworkConfig { } self.network_keypair = Some(Arc::new(Self::load_or_generate_keypair(opt, base)?)); - self.disable_seed = opt.disable_seed; + if let Some(disable) = opt.disable_seed { + self.disable_seed = disable; + } self.prepare_peer_id(); Ok(()) diff --git a/network/src/worker.rs b/network/src/worker.rs index 5faef97ec0..ad4d953ada 100644 --- a/network/src/worker.rs +++ b/network/src/worker.rs @@ -34,7 +34,7 @@ pub fn build_network_worker( TransportConfig::MemoryOnly } else { TransportConfig::Normal { - enable_mdns: node_config.network.enable_mdns, + enable_mdns: !node_config.network.disable_mdns, allow_private_ipv4: true, wasm_external_transport: None, } From f53af2b024485a31a2b0db68a20ee7554a2ba49c Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Thu, 7 Jan 2021 13:01:30 +0800 Subject: [PATCH 3/8] [config] fix sync_mode error and integration test --- config/src/lib.rs | 2 +- testsuite/tests/steps/node.rs | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/config/src/lib.rs b/config/src/lib.rs index 169f70524c..548dc14575 100644 --- a/config/src/lib.rs +++ b/config/src/lib.rs @@ -190,7 +190,7 @@ pub struct StarcoinOpt { /// Node network private key file, only work for first init. pub node_key_file: Option, - #[structopt(long = "sync-mode", short = "s", possible_values = &SyncMode::variants(), case_insensitive = false)] + #[structopt(long = "sync-mode", short = "s")] /// Sync mode. Included value(full, fast, light). pub sync_mode: Option, diff --git a/testsuite/tests/steps/node.rs b/testsuite/tests/steps/node.rs index 7a2b734762..606f32827c 100644 --- a/testsuite/tests/steps/node.rs +++ b/testsuite/tests/steps/node.rs @@ -14,8 +14,8 @@ pub fn steps() -> Steps { .given("a test node config", |world: &mut MyWorld, _step| { let mut opt = StarcoinOpt::default(); opt.net = Some(ChainNetworkID::TEST); - opt.disable_metrics = true; - opt.disable_seed = true; + opt.disable_metrics = Some(true); + opt.disable_seed = Some(true); let config = NodeConfig::load_with_opt(&opt).unwrap(); info!("config: {:?}", config); world.node_config = Some(config) @@ -23,15 +23,15 @@ pub fn steps() -> Steps { .given("a dev node config", |world: &mut MyWorld, _step| { let mut opt = StarcoinOpt::default(); opt.net = Some(ChainNetworkID::DEV); - opt.disable_metrics = true; - opt.disable_seed = true; + opt.disable_metrics = Some(true); + opt.disable_seed = Some(true); let config = NodeConfig::load_with_opt(&opt).unwrap(); world.node_config = Some(config) }) .given("halley node config", |world: &mut MyWorld, _step| { let mut opt = StarcoinOpt::default(); opt.net = Some(ChainNetworkID::HALLEY); - opt.disable_metrics = true; + opt.disable_metrics = Some(true); opt.data_dir = Some(PathBuf::from(starcoin_config::temp_path().as_ref())); let config = NodeConfig::load_with_opt(&opt).unwrap(); world.node_config = Some(config) From b31ad5d51a07e57bce2bb9947fb0a7a701c82ffb Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Fri, 8 Jan 2021 11:12:57 +0800 Subject: [PATCH 4/8] [config] update network logger miner config to flatten --- cmd/miner_client/src/cpu_solver.rs | 2 +- cmd/miner_client/src/main.rs | 2 +- cmd/starcoin/src/dev/sign_txn_helper.rs | 4 +- config/src/lib.rs | 41 ++++------------- config/src/logger_config.rs | 51 ++++++++++++++------- config/src/metrics_config.rs | 44 +++++++++++++----- config/src/miner_config.rs | 60 ++++++++++++++++++------- config/src/network_config.rs | 47 ++++++++++++++----- miner/tests/miner_test.rs | 2 +- network/src/worker.rs | 4 +- node/src/node.rs | 6 +-- node/tests/test_node_run.rs | 4 +- rpc/client/tests/client_server_test.rs | 2 +- sync/tests/full_sync_test.rs | 2 +- sync/tests/test_sync/mod.rs | 2 +- sync/tests/txn_sync_test.rs | 4 +- 16 files changed, 172 insertions(+), 105 deletions(-) diff --git a/cmd/miner_client/src/cpu_solver.rs b/cmd/miner_client/src/cpu_solver.rs index d8f9913862..d7f877ee51 100644 --- a/cmd/miner_client/src/cpu_solver.rs +++ b/cmd/miner_client/src/cpu_solver.rs @@ -54,7 +54,7 @@ impl Solver for CpuSolver { nonce_tx: mpsc::UnboundedSender<(Vec, u32)>, mut stop_rx: mpsc::UnboundedReceiver, ) { - let thread_num = self.config.thread_num; + let thread_num = self.config.miner_thread(); let worker_txs = (0..thread_num) .map(|i| { let worker_name = format!("starcoin-miner-cpu-worker-{}", i); diff --git a/cmd/miner_client/src/main.rs b/cmd/miner_client/src/main.rs index 27677de2e4..49c840cfb9 100644 --- a/cmd/miner_client/src/main.rs +++ b/cmd/miner_client/src/main.rs @@ -27,7 +27,7 @@ fn main() { MinerClientConfig { server: Some(opts.server.clone()), plugin_path: opts.plugin_path, - thread_num: opts.thread_num, + miner_thread: Some(opts.thread_num), enable_stderr: true, } }; diff --git a/cmd/starcoin/src/dev/sign_txn_helper.rs b/cmd/starcoin/src/dev/sign_txn_helper.rs index afe5d11c86..2a7f0b85af 100644 --- a/cmd/starcoin/src/dev/sign_txn_helper.rs +++ b/cmd/starcoin/src/dev/sign_txn_helper.rs @@ -210,7 +210,7 @@ mod tests { #[stest::test(timeout = 300)] fn test_upgrade_module() { let mut node_config = NodeConfig::random_for_test(); - node_config.network.disable_seed = true; + node_config.network.disable_seed = Some(true); let config = Arc::new(node_config); let node_handle = run_node_by_config(config.clone()).unwrap(); let rpc_service = node_handle.rpc_service().unwrap(); @@ -438,7 +438,7 @@ mod tests { #[stest::test(timeout = 300)] fn test_only_new_module() { let mut node_config = NodeConfig::random_for_test(); - node_config.network.disable_seed = true; + node_config.network.disable_seed = Some(true); let config = Arc::new(node_config); let node_handle = run_node_by_config(config.clone()).unwrap(); let rpc_service = node_handle.rpc_service().unwrap(); diff --git a/config/src/lib.rs b/config/src/lib.rs index 548dc14575..bd0c3f592f 100644 --- a/config/src/lib.rs +++ b/config/src/lib.rs @@ -198,37 +198,14 @@ pub struct StarcoinOpt { /// Rpc address, default is 127.0.0.1 pub rpc_address: Option, - #[structopt(long = "miner-thread")] - /// Miner thread number, not work for dev network, default is 1 - pub miner_thread: Option, - - #[structopt(long = "disable-std-log")] - /// Disable std error log output. - pub disable_std_log: Option, - - #[structopt(long = "disable-file-log")] - /// Disable std error log output. - pub disable_file_log: Option, - - #[structopt(long = "disable-metrics")] - /// Disable metrics. - pub disable_metrics: Option, - - #[structopt(long = "disable-miner-client")] - /// Don't start a miner client in node. - pub disable_miner_client: Option, - - #[structopt(long = "disable-seed")] - /// Do not connect to seed node, include builtin and config seed. - pub disable_seed: Option, - - #[structopt(long = "disable-mdns")] - /// Disable p2p mdns discovery, for automatically discover the peer from the local network. - pub disable_mdns: Option, - - #[structopt(long = "disable-mint-empty-block")] - /// Do not mint empty block, default is true in Dev network. - pub disable_mint_empty_block: Option, + #[structopt(flatten)] + pub logger: LoggerConfig, + #[structopt(flatten)] + pub metrics: MetricsConfig, + #[structopt(flatten)] + pub miner: MinerConfig, + #[structopt(flatten)] + pub network: NetworkConfig, #[structopt(long = "watch-timeout")] /// Watch timeout in seconds @@ -249,8 +226,6 @@ pub struct StarcoinOpt { pub ipc: IpcConfiguration, #[structopt(flatten)] pub api_quotas: ApiQuotaConfiguration, - #[structopt(flatten)] - pub network_rpc_quotas: NetworkRpcQuotaConfiguration, } #[derive(Clone, Debug, PartialEq)] diff --git a/config/src/logger_config.rs b/config/src/logger_config.rs index 9c8eff2ab9..05d5f5a009 100644 --- a/config/src/logger_config.rs +++ b/config/src/logger_config.rs @@ -9,21 +9,35 @@ use structopt::StructOpt; static LOGGER_FILE_NAME: &str = "starcoin.log"; +const DEFAULT_MAX_FILE_SIZE: u64 = 1024 * 1024 * 1024; +const MAX_FILE_SIZE_FOR_TEST: u64 = 10 * 1024 * 1024; +const DEFAULT_MAX_BACKUP: u32 = 7; + #[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct LoggerConfig { #[structopt(name = "disable-stderr", long, help = "disable stderr logger")] - pub disable_stderr: bool, + pub disable_stderr: Option, #[structopt(name = "disable-file", long, help = "disable file logger")] - pub disable_file: bool, - #[structopt(name = "max-file-size", long, default_value = "1073741824")] + pub disable_file: Option, + #[structopt(name = "max-file-size", long, default_value = "DEFAULT_MAX_FILE_SIZE")] pub max_file_size: u64, - #[structopt(name = "max-backup", long, default_value = "7")] + #[structopt(name = "max-backup", long, default_value = "DEFAULT_MAX_BACKUP")] pub max_backup: u32, #[serde(skip)] log_path: Option, } - +impl Default for LoggerConfig { + fn default() -> Self { + Self { + disable_stderr: None, + disable_file: None, + max_file_size: DEFAULT_MAX_FILE_SIZE, + max_backup: DEFAULT_MAX_BACKUP, + log_path: None, + } + } +} impl LoggerConfig { pub fn get_log_path(&self) -> PathBuf { self.log_path @@ -33,19 +47,24 @@ impl LoggerConfig { } pub fn enable_file(&self) -> bool { - (!self.disable_file) && self.log_path.is_some() + let disable = self.disable_file.unwrap_or(false); + (!disable) && self.log_path.is_some() + } + + pub fn disable_stderr(&self) -> bool { + self.disable_stderr.unwrap_or(false) } } impl ConfigModule for LoggerConfig { fn default_with_opt(opt: &StarcoinOpt, base: &BaseConfig) -> Result { - let disable_stderr = opt.disable_std_log.unwrap_or(false); - let disable_file = opt.disable_file_log.unwrap_or(false); + let disable_stderr = opt.logger.disable_stderr; + let disable_file = opt.logger.disable_file; Ok(if base.net.is_test() { Self { disable_stderr, disable_file, - max_file_size: 10 * 1024 * 1024, + max_file_size: MAX_FILE_SIZE_FOR_TEST, max_backup: 1, log_path: None, } @@ -53,7 +72,7 @@ impl ConfigModule for LoggerConfig { Self { disable_stderr, disable_file, - max_file_size: 10 * 1024 * 1024, + max_file_size: MAX_FILE_SIZE_FOR_TEST, max_backup: 2, log_path: None, } @@ -61,8 +80,8 @@ impl ConfigModule for LoggerConfig { Self { disable_stderr, disable_file, - max_file_size: 1024 * 1024 * 1024, - max_backup: 7, + max_file_size: DEFAULT_MAX_FILE_SIZE, + max_backup: DEFAULT_MAX_BACKUP, log_path: None, } }) @@ -70,11 +89,11 @@ impl ConfigModule for LoggerConfig { fn after_load(&mut self, opt: &StarcoinOpt, base: &BaseConfig) -> Result<()> { self.log_path = Some(base.data_dir.join(LOGGER_FILE_NAME)); - if let Some(disable) = opt.disable_std_log { - self.disable_stderr = disable; + if opt.logger.disable_stderr.is_some() { + self.disable_stderr = opt.logger.disable_stderr; } - if let Some(disable) = opt.disable_file_log { - self.disable_file = disable; + if opt.logger.disable_file.is_some() { + self.disable_file = opt.logger.disable_file; } Ok(()) } diff --git a/config/src/metrics_config.rs b/config/src/metrics_config.rs index 61253f3756..d521fc3d0e 100644 --- a/config/src/metrics_config.rs +++ b/config/src/metrics_config.rs @@ -8,19 +8,42 @@ use anyhow::Result; use serde::{Deserialize, Serialize}; use structopt::StructOpt; +pub static DEFAULT_METRIC_SERVER_ADDRESS: &str = "0.0.0.0"; +pub static DEFAULT_METRIC_SERVER_PORT: u16 = 9101; + #[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct MetricsConfig { #[structopt(name = "disable-metrics", long, help = "disable metrics")] - pub disable_metrics: bool, - #[structopt(name = "address", long, help = "address", default_value = "0.0.0.0")] + pub disable_metrics: Option, + #[structopt( + name = "address", + long, + help = "address", + default_value = "DEFAULT_METRIC_SERVER_ADDRESS" + )] pub address: String, - #[structopt(name = "metrics-port", long, default_value = "9101")] + #[structopt( + name = "metrics-port", + long, + default_value = "DEFAULT_METRIC_SERVER_PORT" + )] pub port: u16, } - -pub static DEFAULT_METRIC_SERVER_PORT: u16 = 9101; - +impl MetricsConfig { + pub fn disable_metrics(&self) -> bool { + self.disable_metrics.unwrap_or(false) + } +} +impl Default for MetricsConfig { + fn default() -> Self { + Self { + disable_metrics: None, + address: DEFAULT_METRIC_SERVER_ADDRESS.to_string(), + port: DEFAULT_METRIC_SERVER_PORT, + } + } +} impl ConfigModule for MetricsConfig { fn default_with_opt(opt: &StarcoinOpt, base: &BaseConfig) -> Result { let port = if base.net.is_test() { @@ -30,17 +53,16 @@ impl ConfigModule for MetricsConfig { } else { DEFAULT_METRIC_SERVER_PORT }; - let disable_metrics = opt.disable_metrics.unwrap_or(false); Ok(Self { - disable_metrics, - address: "0.0.0.0".to_string(), + disable_metrics: opt.metrics.disable_metrics, + address: DEFAULT_METRIC_SERVER_ADDRESS.to_string(), port, }) } fn after_load(&mut self, opt: &StarcoinOpt, _base: &BaseConfig) -> Result<()> { - if let Some(disable) = opt.disable_metrics { - self.disable_metrics = disable; + if opt.metrics.disable_metrics.is_some() { + self.disable_metrics = opt.metrics.disable_metrics; } Ok(()) } diff --git a/config/src/miner_config.rs b/config/src/miner_config.rs index c0316f20ec..622d0cedda 100644 --- a/config/src/miner_config.rs +++ b/config/src/miner_config.rs @@ -10,22 +10,32 @@ use structopt::StructOpt; #[serde(deny_unknown_fields)] pub struct MinerConfig { #[structopt(long = "disable-mint-empty-block")] + /// Do not mint empty block, default is true in Dev network. pub disable_mint_empty_block: Option, #[structopt(long = "block-gas-limit")] pub block_gas_limit: Option, #[structopt(long = "disable-miner-client")] - pub disable_miner_client: bool, + /// Don't start a miner client in node. + pub disable_miner_client: Option, #[structopt(flatten)] pub client_config: MinerClientConfig, } - +impl Default for MinerConfig { + fn default() -> Self { + Self { + disable_mint_empty_block: None, + block_gas_limit: None, + disable_miner_client: None, + client_config: MinerClientConfig::default(), + } + } +} impl MinerConfig { + pub fn disable_miner_client(&self) -> bool { + self.disable_miner_client.unwrap_or(false) + } pub fn is_disable_mint_empty_block(&self) -> bool { - if let Some(disable) = self.disable_mint_empty_block { - disable - } else { - false - } + self.disable_mint_empty_block.unwrap_or(false) } } @@ -34,30 +44,45 @@ impl MinerConfig { pub struct MinerClientConfig { pub server: Option, pub plugin_path: Option, - #[structopt(long = "thread-num")] - pub thread_num: u16, + #[structopt(long = "miner-thread")] + /// Miner thread number, not work for dev network, default is 1 + pub miner_thread: Option, #[structopt(long = "enable-stderr")] #[serde(skip)] pub enable_stderr: bool, } - +impl MinerClientConfig { + pub fn miner_thread(&self) -> u16 { + self.miner_thread.unwrap_or(1) + } +} +impl Default for MinerClientConfig { + fn default() -> Self { + Self { + server: None, + plugin_path: None, + miner_thread: Some(1), + enable_stderr: false, + } + } +} impl ConfigModule for MinerConfig { fn default_with_opt(opt: &StarcoinOpt, base: &BaseConfig) -> Result { // only dev network is on demand mine at default. let disable_mint_empty_block = opt + .miner .disable_mint_empty_block .as_ref() .cloned() .unwrap_or_else(|| base.net.is_dev()); - let disable_miner_client = opt.disable_miner_client.unwrap_or(false); Ok(Self { disable_mint_empty_block: Some(disable_mint_empty_block), block_gas_limit: None, - disable_miner_client, + disable_miner_client: opt.miner.disable_miner_client, client_config: MinerClientConfig { server: None, plugin_path: None, - thread_num: opt.miner_thread.unwrap_or(1), + miner_thread: opt.miner.client_config.miner_thread, enable_stderr: false, }, }) @@ -66,16 +91,17 @@ impl ConfigModule for MinerConfig { fn after_load(&mut self, opt: &StarcoinOpt, base: &BaseConfig) -> Result<()> { // only dev network is on demand mine at default. let disable_mint_empty_block = opt + .miner .disable_mint_empty_block .as_ref() .cloned() .unwrap_or_else(|| base.net.is_dev()); self.disable_mint_empty_block = Some(disable_mint_empty_block); - if let Some(thread) = opt.miner_thread { - self.client_config.thread_num = thread; + if opt.miner.client_config.miner_thread.is_some() { + self.client_config.miner_thread = opt.miner.client_config.miner_thread; } - if let Some(disable) = opt.disable_miner_client { - self.disable_miner_client = disable; + if opt.miner.disable_miner_client.is_some() { + self.disable_miner_client = opt.miner.disable_miner_client; } Ok(()) } diff --git a/config/src/network_config.rs b/config/src/network_config.rs index 065cf1bc2a..cfa3b04681 100644 --- a/config/src/network_config.rs +++ b/config/src/network_config.rs @@ -82,7 +82,7 @@ impl Default for NetworkRpcQuotaConfiguration { } } -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] +#[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct NetworkConfig { /// The address that this node is listening on for new connections. @@ -90,18 +90,38 @@ pub struct NetworkConfig { #[serde(default)] pub seeds: Vec, #[serde(default)] - pub disable_mdns: bool, + /// Disable p2p mdns discovery, for automatically discover the peer from the local network. + pub disable_mdns: Option, //TODO skip this field, do not persistence this flag to config. this change will break network config. - pub disable_seed: bool, + #[structopt(long = "disable-seed")] + /// Do not connect to seed node, include builtin and config seed. + pub disable_seed: Option, + #[structopt(skip)] #[serde(skip)] network_keypair: Option>>, #[serde(skip)] self_peer_id: Option, #[serde(skip)] self_address: Option, - #[serde(default)] + #[structopt(flatten)] pub network_rpc_quotas: NetworkRpcQuotaConfiguration, } +impl Default for NetworkConfig { + fn default() -> Self { + Self { + listen: format!("/ip4/0.0.0.0/tcp/{}", DEFAULT_NETWORK_PORT) + .parse() + .expect("Parse multi address fail."), + seeds: vec![], + disable_mdns: None, + disable_seed: None, + network_keypair: None, + self_peer_id: None, + self_address: None, + network_rpc_quotas: NetworkRpcQuotaConfiguration::default(), + } + } +} impl NetworkConfig { pub fn network_keypair(&self) -> Arc> { @@ -112,6 +132,13 @@ impl NetworkConfig { self.self_address.clone().expect("Config should init.") } + pub fn disable_mdns(&self) -> bool { + self.disable_mdns.unwrap_or(false) + } + pub fn disable_seed(&self) -> bool { + self.disable_seed.unwrap_or(false) + } + pub fn self_peer_id(&self) -> PeerId { self.self_peer_id.clone().expect("Config should init.") } @@ -174,17 +201,15 @@ impl ConfigModule for NetworkConfig { .parse() .expect("Parse multi address fail.") }; - let disable_mdns = opt.disable_mdns.unwrap_or(false); - let disable_seed = opt.disable_seed.unwrap_or(false); Ok(Self { listen, seeds, - disable_mdns, - disable_seed, + disable_mdns: opt.network.disable_mdns, + disable_seed: opt.network.disable_seed, network_keypair: Some(Arc::new(Self::load_or_generate_keypair(opt, base)?)), self_peer_id: None, self_address: None, - network_rpc_quotas: opt.network_rpc_quotas.clone(), + network_rpc_quotas: opt.network.network_rpc_quotas.clone(), }) } @@ -203,8 +228,8 @@ impl ConfigModule for NetworkConfig { } self.network_keypair = Some(Arc::new(Self::load_or_generate_keypair(opt, base)?)); - if let Some(disable) = opt.disable_seed { - self.disable_seed = disable; + if opt.network.disable_seed.is_some() { + self.disable_seed = opt.network.disable_seed; } self.prepare_peer_id(); diff --git a/miner/tests/miner_test.rs b/miner/tests/miner_test.rs index 5ba3d9d3db..723330dc77 100644 --- a/miner/tests/miner_test.rs +++ b/miner/tests/miner_test.rs @@ -22,7 +22,7 @@ use types::{ #[stest::test] fn test_miner() { let mut config = NodeConfig::random_for_test(); - config.miner.disable_miner_client = false; + config.miner.disable_miner_client = Some(false); let config = Arc::new(config); let handle = test_helper::run_node_by_config(config.clone()).unwrap(); let bus = handle.bus().unwrap(); diff --git a/network/src/worker.rs b/network/src/worker.rs index ad4d953ada..ffc2c0cefc 100644 --- a/network/src/worker.rs +++ b/network/src/worker.rs @@ -34,7 +34,7 @@ pub fn build_network_worker( TransportConfig::MemoryOnly } else { TransportConfig::Normal { - enable_mdns: !node_config.network.disable_mdns, + enable_mdns: !node_config.network.disable_mdns(), allow_private_ipv4: true, wasm_external_transport: None, } @@ -71,7 +71,7 @@ pub fn build_network_worker( None => vec![], }; let self_peer_id = node_config.network.self_peer_id(); - let boot_nodes = if node_config.network.disable_seed { + let boot_nodes = if node_config.network.disable_seed() { vec![] } else { let mut boot_nodes = node_config.network.seeds.clone(); diff --git a/node/src/node.rs b/node/src/node.rs index d789abc5f8..1bad16286c 100644 --- a/node/src/node.rs +++ b/node/src/node.rs @@ -129,14 +129,14 @@ impl NodeService { config.logger.max_backup, ); } - if config.logger.disable_stderr { + if config.logger.disable_stderr() { logger_handle.enable_stderr(); } else { logger_handle.disable_stderr(); } // start metric server - if !config.metrics.disable_metrics { + if !config.metrics.disable_metrics() { starcoin_metrics::metric_server::start_server( config.metrics.address.clone(), config.metrics.port, @@ -246,7 +246,7 @@ impl NodeService { registry.register::().await?; registry.register::().await?; - if !config.miner.disable_miner_client { + if !config.miner.disable_miner_client() { let miner_client_config = config.miner.client_config.clone(); registry.put_shared(miner_client_config).await?; let job_client = JobBusClient::new(bus.clone(), config.net().time_service()); diff --git a/node/tests/test_node_run.rs b/node/tests/test_node_run.rs index 04179130fc..3b8e005a16 100644 --- a/node/tests/test_node_run.rs +++ b/node/tests/test_node_run.rs @@ -13,7 +13,7 @@ use std::thread; #[stest::test] fn test_run_node() { let mut node_config = NodeConfig::random_for_test(); - node_config.network.disable_seed = true; + node_config.network.disable_seed = Some(true); let config = Arc::new(node_config); let handle = run_node(config).unwrap(); let services = handle.list_service().unwrap(); @@ -25,7 +25,7 @@ fn test_run_node() { #[stest::test] fn test_generate_block() { let mut node_config = NodeConfig::random_for_test(); - node_config.network.disable_seed = true; + node_config.network.disable_seed = Some(true); let config = Arc::new(node_config); let handle = run_node(config).unwrap(); let node_service = handle.node_service(); diff --git a/rpc/client/tests/client_server_test.rs b/rpc/client/tests/client_server_test.rs index a667ca0f92..b8af108331 100644 --- a/rpc/client/tests/client_server_test.rs +++ b/rpc/client/tests/client_server_test.rs @@ -59,7 +59,7 @@ fn test_multi_client() -> Result<()> { #[stest::test(timeout = 120)] fn test_client_reconnect() -> Result<()> { let mut node_config = NodeConfig::random_for_test(); - node_config.miner.disable_miner_client = false; + node_config.miner.disable_miner_client = Some(false); let config = Arc::new(node_config); let url = config.rpc.get_ws_address().unwrap(); debug!("url:{}", url); diff --git a/sync/tests/full_sync_test.rs b/sync/tests/full_sync_test.rs index c46cb15131..6d08e9e20d 100644 --- a/sync/tests/full_sync_test.rs +++ b/sync/tests/full_sync_test.rs @@ -30,7 +30,7 @@ fn test_sync_by_notification() { let mut second_config = NodeConfig::random_for_test(); info!("second peer : {:?}", second_config.network.self_peer_id()); second_config.network.seeds = vec![first_config.network.self_address()]; - second_config.miner.disable_miner_client = false; + second_config.miner.disable_miner_client = Some(false); let second_node = run_node_by_config(Arc::new(second_config)).unwrap(); // stop sync service and just use notification message to sync. diff --git a/sync/tests/test_sync/mod.rs b/sync/tests/test_sync/mod.rs index f0c2273ae1..91b1306698 100644 --- a/sync/tests/test_sync/mod.rs +++ b/sync/tests/test_sync/mod.rs @@ -25,7 +25,7 @@ pub fn test_sync(sync_mode: SyncMode) { let mut second_config = NodeConfig::random_for_test(); info!("second peer : {:?}", second_config.network.self_peer_id()); second_config.network.seeds = vec![first_config.network.self_address()]; - second_config.miner.disable_miner_client = false; + second_config.miner.disable_miner_client = Some(false); second_config.sync.set_mode(sync_mode); let second_node = run_node_by_config(Arc::new(second_config)).unwrap(); diff --git a/sync/tests/txn_sync_test.rs b/sync/tests/txn_sync_test.rs index 4cb177c41e..a4ae1609ab 100644 --- a/sync/tests/txn_sync_test.rs +++ b/sync/tests/txn_sync_test.rs @@ -15,7 +15,7 @@ use txpool::TxPoolService; #[stest::test] fn test_txn_sync_actor() { let mut first_config = NodeConfig::random_for_test(); - first_config.miner.disable_miner_client = false; + first_config.miner.disable_miner_client = Some(false); let first_network_address = first_config.network.self_address(); let first_config = Arc::new(first_config); let first_node = run_node_by_config(first_config.clone()).unwrap(); @@ -31,7 +31,7 @@ fn test_txn_sync_actor() { let mut second_config = NodeConfig::random_for_test(); second_config.network.seeds = vec![first_network_address]; - second_config.miner.disable_miner_client = false; + second_config.miner.disable_miner_client = Some(false); let second_config = Arc::new(second_config); let second_node = run_node_by_config(second_config.clone()).unwrap(); From dcf8736eebbddc4424504092f4c74bb9933f392b Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Fri, 8 Jan 2021 11:15:57 +0800 Subject: [PATCH 5/8] # Conflicts: # config/Cargo.toml --- Cargo.lock | 76 ++-- Cargo.toml | 90 ++++ account/lib/src/account_test.rs | 7 +- benchmarks/benches/bench_state_tree.rs | 11 +- cmd/starcoin/Cargo.toml | 2 +- cmd/starcoin/src/state/get_cmd.rs | 19 +- commons/crypto/Cargo.toml | 4 +- commons/proptest-helpers/Cargo.toml | 2 +- config/Cargo.toml | 2 +- core/forkable-jellyfish-merkle/Cargo.toml | 1 + .../benches/bench_merkle_tree.rs | 12 +- core/forkable-jellyfish-merkle/src/blob.rs | 9 +- .../src/iterator/mod.rs | 34 +- .../src/jellyfish_merkle_test.rs | 134 +++--- core/forkable-jellyfish-merkle/src/lib.rs | 234 +++++++---- .../src/mock_tree_store.rs | 29 +- .../src/node_type/mod.rs | 113 +++-- .../src/node_type/node_type_test.rs | 13 +- .../src/test_helper.rs | 4 +- .../src/tree_cache/mod.rs | 46 ++- .../src/tree_cache/tree_cache_test.rs | 11 +- core/genesis/generated/halley/genesis | Bin 46421 -> 46587 bytes core/genesis/generated/main/genesis | Bin 46421 -> 46587 bytes core/genesis/generated/proxima/genesis | Bin 46396 -> 46396 bytes dataformat-generator/build.rs | 3 +- devtools/x/Cargo.toml | 8 +- devtools/x/src/main.rs | 14 +- devtools/x/src/test.rs | 2 +- devtools/x/src/utils.rs | 15 + etc/starcoin_types.yml | 17 +- executor/src/account.rs | 5 +- kube/manifest/actions-runner-controller.yaml | 2 +- kube/manifest/runner.yaml | 2 +- rpc/api/src/types.rs | 4 +- rpc/server/src/module/contract_rpc.rs | 9 +- state/api/src/chain_state.rs | 20 +- state/state-store-api/src/lib.rs | 15 +- state/state-tree/Cargo.toml | 2 +- state/state-tree/src/lib.rs | 14 - state/state-tree/src/mock/mod.rs | 7 +- state/state-tree/src/state_tree.rs | 109 ++--- state/state-tree/src/state_tree_test.rs | 30 +- state/statedb/Cargo.toml | 2 +- state/statedb/src/lib.rs | 355 ++++++++-------- types/Cargo.toml | 1 + types/src/account_state.rs | 21 +- types/src/lib.rs | 4 +- types/src/state_set.rs | 39 +- vm/compiler/Cargo.toml | 4 +- vm/functional-tests/Cargo.toml | 4 +- vm/functional-tests/src/executor.rs | 5 +- .../cancel_upgrade_plan.move | 5 +- .../override_upgrade_plan.move | 13 +- .../package_txn_manager.move | 15 +- .../transaction_scripts/module_upgrade.move | 9 +- .../upgrade_module_dao_proposal/basic.move | 3 +- vm/move-coverage/Cargo.toml | 4 +- vm/move-explain/Cargo.toml | 6 +- vm/move-prover/Cargo.toml | 26 +- vm/resource-viewer/src/resolver.rs | 3 +- vm/stdlib/Cargo.toml | 4 +- .../latest/init_scripts/genesis_init.mv | Bin 1594 -> 1649 bytes .../latest/stdlib/16_PackageTxnManager.mv | Bin 0 -> 2662 bytes .../latest/stdlib/{18_Dao.mv => 17_Dao.mv} | Bin .../latest/stdlib/17_PackageTxnManager.mv | Bin 2510 -> 0 bytes .../stdlib/18_UpgradeModuleDaoProposal.mv | Bin 0 -> 833 bytes ...nfig.mv => 19_TransactionTimeoutConfig.mv} | Bin .../stdlib/19_UpgradeModuleDaoProposal.mv | Bin 874 -> 0 bytes ...tion.mv => 20_TransactionPublishOption.mv} | Bin ...{22_RewardConfig.mv => 21_RewardConfig.mv} | Bin ...ainConfigDao.mv => 22_OnChainConfigDao.mv} | Bin ...posal.mv => 23_ModifyDaoConfigProposal.mv} | Bin ...nsensusConfig.mv => 24_ConsensusConfig.mv} | Bin .../latest/stdlib/{26_STC.mv => 25_STC.mv} | Bin ...TransactionFee.mv => 26_TransactionFee.mv} | Bin .../latest/stdlib/{28_Hash.mv => 27_Hash.mv} | Bin ...9_Authenticator.mv => 28_Authenticator.mv} | Bin .../stdlib/{30_Account.mv => 29_Account.mv} | Bin .../stdlib/{16_Block.mv => 30_Block.mv} | Bin .../latest/stdlib/43_TransactionTimeout.mv | Bin 301 -> 301 bytes .../latest/stdlib/44_TransactionManager.mv | Bin 1278 -> 1278 bytes .../abi/update_module_upgrade_strategy.abi | Bin 372 -> 415 bytes .../update_module_upgrade_strategy.mv | Bin 326 -> 369 bytes vm/stdlib/init_scripts/genesis_init.move | 2 + vm/stdlib/modules/PackageTxnManager.move | 51 ++- .../modules/UpgradeModuleDaoProposal.move | 2 - vm/stdlib/modules/doc/PackageTxnManager.md | 127 ++++-- .../modules/doc/UpgradeModuleDaoProposal.md | 4 +- .../transaction_scripts/doc/genesis_init.md | 2 + .../doc/update_module_upgrade_strategy.md | 2 + .../update_module_upgrade_strategy.move | 2 + vm/transaction-builder-generator/Cargo.toml | 4 +- vm/types/Cargo.toml | 13 +- vm/types/src/access_path.rs | 385 +++++++++++++----- .../events/accept_token_payment.rs | 13 +- .../account_config/events/account_deposit.rs | 13 +- .../account_config/events/account_withdraw.rs | 13 +- vm/types/src/account_config/events/burn.rs | 2 +- vm/types/src/account_config/events/dao.rs | 2 +- vm/types/src/account_config/events/mint.rs | 2 +- .../src/account_config/resources/account.rs | 2 +- .../src/account_config/resources/balance.rs | 5 +- .../resources/key_rotation_capability.rs | 2 +- .../resources/module_upgrade_strategy.rs | 4 +- .../resources/withdraw_capability.rs | 2 +- vm/types/src/contract_event.rs | 2 +- vm/types/src/genesis_config.rs | 2 +- vm/types/src/lib.rs | 21 +- vm/types/src/move_resource.rs | 42 ++ vm/types/src/on_chain_config/mod.rs | 6 +- .../src/on_chain_resource/block_metadata.rs | 2 +- vm/types/src/on_chain_resource/epoch.rs | 12 +- vm/types/src/on_chain_resource/global_time.rs | 2 +- vm/types/src/token/token_info.rs | 15 +- vm/vm-runtime/Cargo.toml | 2 +- vm/vm-runtime/src/access_path_cache.rs | 45 +- vm/vm-runtime/src/lib.rs | 5 +- x.toml | 98 +++-- 118 files changed, 1553 insertions(+), 1009 deletions(-) create mode 100644 devtools/x/src/utils.rs create mode 100644 vm/stdlib/compiled/latest/stdlib/16_PackageTxnManager.mv rename vm/stdlib/compiled/latest/stdlib/{18_Dao.mv => 17_Dao.mv} (100%) delete mode 100644 vm/stdlib/compiled/latest/stdlib/17_PackageTxnManager.mv create mode 100644 vm/stdlib/compiled/latest/stdlib/18_UpgradeModuleDaoProposal.mv rename vm/stdlib/compiled/latest/stdlib/{20_TransactionTimeoutConfig.mv => 19_TransactionTimeoutConfig.mv} (100%) delete mode 100644 vm/stdlib/compiled/latest/stdlib/19_UpgradeModuleDaoProposal.mv rename vm/stdlib/compiled/latest/stdlib/{21_TransactionPublishOption.mv => 20_TransactionPublishOption.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{22_RewardConfig.mv => 21_RewardConfig.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{23_OnChainConfigDao.mv => 22_OnChainConfigDao.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{24_ModifyDaoConfigProposal.mv => 23_ModifyDaoConfigProposal.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{25_ConsensusConfig.mv => 24_ConsensusConfig.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{26_STC.mv => 25_STC.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{27_TransactionFee.mv => 26_TransactionFee.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{28_Hash.mv => 27_Hash.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{29_Authenticator.mv => 28_Authenticator.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{30_Account.mv => 29_Account.mv} (100%) rename vm/stdlib/compiled/latest/stdlib/{16_Block.mv => 30_Block.mv} (100%) create mode 100644 vm/types/src/move_resource.rs diff --git a/Cargo.lock b/Cargo.lock index 26f46a9132..319402cf37 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,7 +3,7 @@ [[package]] name = "abigen" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bcs", @@ -841,7 +841,7 @@ dependencies = [ [[package]] name = "borrow-graph" version = "0.0.1" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "mirai-annotations", @@ -892,7 +892,7 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "bytecode" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "borrow-graph", "bytecode-verifier", @@ -910,7 +910,7 @@ dependencies = [ [[package]] name = "bytecode-source-map" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bcs", @@ -927,7 +927,7 @@ dependencies = [ [[package]] name = "bytecode-verifier" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "borrow-graph", @@ -1742,7 +1742,7 @@ checksum = "993a608597367c6377b258c25d7120740f00ed23a2252b729b1932dd7866f908" [[package]] name = "datatest-stable" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "regex", @@ -1817,7 +1817,7 @@ dependencies = [ [[package]] name = "diem-crypto" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "aes-gcm 0.8.0", "anyhow", @@ -1851,7 +1851,7 @@ dependencies = [ [[package]] name = "diem-crypto-derive" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "proc-macro2 1.0.24", @@ -1862,7 +1862,7 @@ dependencies = [ [[package]] name = "diem-infallible" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", ] @@ -1870,7 +1870,7 @@ dependencies = [ [[package]] name = "diem-log-derive" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "proc-macro2 1.0.24", @@ -1881,7 +1881,7 @@ dependencies = [ [[package]] name = "diem-logger" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "backtrace", "chrono", @@ -1899,7 +1899,7 @@ dependencies = [ [[package]] name = "diem-network-address" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "aes-gcm 0.8.0", "bcs", @@ -1915,7 +1915,7 @@ dependencies = [ [[package]] name = "diem-nibble" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "serde", @@ -1924,7 +1924,7 @@ dependencies = [ [[package]] name = "diem-proptest-helpers" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "crossbeam 0.8.0", "diem-workspace-hack", @@ -1935,7 +1935,7 @@ dependencies = [ [[package]] name = "diem-temppath" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "hex", @@ -1945,7 +1945,7 @@ dependencies = [ [[package]] name = "diem-types" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bcs", @@ -1972,7 +1972,7 @@ dependencies = [ [[package]] name = "diem-workspace-hack" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "byteorder", "bytes 0.5.6", @@ -2090,7 +2090,7 @@ dependencies = [ [[package]] name = "docgen" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bytecode", @@ -2246,7 +2246,7 @@ dependencies = [ [[package]] name = "errmapgen" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bcs", @@ -2395,6 +2395,7 @@ dependencies = [ "rand 0.7.3", "rand_core 0.6.1", "serde", + "serde_bytes", "starcoin-canonical-serialization", "starcoin-crypto", "thiserror", @@ -3371,7 +3372,7 @@ checksum = "47be2f14c678be2fdcab04ab1171db51b2762ce6f0a8ee87c8dd4a04ed216135" [[package]] name = "ir-to-bytecode" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bytecode-source-map", @@ -3390,7 +3391,7 @@ dependencies = [ [[package]] name = "ir-to-bytecode-syntax" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "codespan", @@ -4347,7 +4348,7 @@ checksum = "b48e78b8626927bd980dff38d8147006323f5d7634d7bc9e31c3a59e07da1b28" [[package]] name = "move-core-types" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bcs", @@ -4389,7 +4390,7 @@ dependencies = [ [[package]] name = "move-ir-types" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "codespan", @@ -4404,7 +4405,7 @@ dependencies = [ [[package]] name = "move-lang" version = "0.0.1" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bcs", @@ -4432,7 +4433,7 @@ dependencies = [ [[package]] name = "move-lang-test-utils" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "datatest-stable", "diem-workspace-hack", @@ -4441,7 +4442,7 @@ dependencies = [ [[package]] name = "move-model" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "bytecode-source-map", @@ -4505,7 +4506,7 @@ dependencies = [ [[package]] name = "move-prover-test-utils" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "diem-workspace-hack", @@ -4516,7 +4517,7 @@ dependencies = [ [[package]] name = "move-vm-natives" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-crypto", "diem-workspace-hack", @@ -4531,7 +4532,7 @@ dependencies = [ [[package]] name = "move-vm-runtime" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "bytecode-verifier", "diem-crypto", @@ -4550,7 +4551,7 @@ dependencies = [ [[package]] name = "move-vm-types" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "bcs", "diem-crypto", @@ -4962,7 +4963,7 @@ dependencies = [ [[package]] name = "num-variants" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "proc-macro2 1.0.24", @@ -6864,7 +6865,7 @@ checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" [[package]] name = "short-hex-str" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "mirai-annotations", @@ -8304,6 +8305,7 @@ dependencies = [ "anyhow", "forkable-jellyfish-merkle", "serde", + "starcoin-canonical-serialization", "starcoin-crypto", "starcoin-state-store-api", "starcoin-types", @@ -8575,6 +8577,7 @@ dependencies = [ "anyhow", "byteorder", "bytes 0.5.6", + "forkable-jellyfish-merkle", "futures 0.3.8", "hex", "itertools 0.10.0", @@ -8641,6 +8644,7 @@ dependencies = [ "anyhow", "bytecode-verifier", "chrono", + "forkable-jellyfish-merkle", "hex", "log 0.4.11", "mirai-annotations", @@ -9989,7 +9993,7 @@ checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" [[package]] name = "vm" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "diem-crypto", @@ -10326,7 +10330,7 @@ checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" [[package]] name = "x" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "anyhow", "chrono", @@ -10352,7 +10356,7 @@ dependencies = [ [[package]] name = "x-core" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "determinator", "diem-workspace-hack", @@ -10369,7 +10373,7 @@ dependencies = [ [[package]] name = "x-lint" version = "0.1.0" -source = "git+https://github.com/starcoinorg/diem?rev=a69729b2d54af44d2f779bcf167e3f6d681a9821#a69729b2d54af44d2f779bcf167e3f6d681a9821" +source = "git+https://github.com/starcoinorg/diem?rev=89223522186cb4cd39e21e44fb2da745f7a45c7a#89223522186cb4cd39e21e44fb2da745f7a45c7a" dependencies = [ "diem-workspace-hack", "guppy", diff --git a/Cargo.toml b/Cargo.toml index 96026d8bc0..641d4b36c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -90,6 +90,96 @@ members = [ "cmd/indexer" ] +default-members = [ + "benchmarks", + "commons/stest", + "commons/scs", + "commons/crypto", + "commons/decrypt", + "commons/logger", + "commons/scmd", + "commons/metrics", + "commons/utils", + "commons/proptest-helpers", + "commons/service-registry", + "commons/timeout-join-handler", + "commons/serde-helpers", + "commons/stream-task", + "commons/api-limiter", + "types", + "types/uint", + "core/traits", + "core/accumulator", + "core/forkable-jellyfish-merkle", + "core/genesis", + "state/api", + "state/state-tree", + "state/statedb", + "state/state-store-api", + "state/service", + "config", + "storage", + "consensus", + "consensus/cryptonight-rs", + "testsuite", + "txpool", + "txpool/api", + "txpool/mock-service", + "executor", + "executor/benchmark", + "chain", + "chain/api", + "chain/open-block", + "chain/mock", + "chain/chain-notify", + "chain/service", + "devtools/x", + "node/api", + "node", + "sync", + "sync/api", + "block-relayer", + "miner", + "node", + "network-p2p", + "network-p2p/types", + "network-p2p/peerset", + "network", + "network/api", + "network-rpc", + "network-rpc/derive", + "network-rpc/core", + "network-rpc/api", + "account/api", + "account/lib", + "account/service", + "rpc/api", + "rpc/middleware", + "rpc/client", + "rpc/server", + "vm/types", + "vm/functional-tests", + "vm/vm-runtime", + "vm/stdlib", + "vm/compiler", + "vm/move-prover", + "vm/transaction-builder", + "vm/transaction-builder-generator", + "vm/move-coverage", + "vm/resource-viewer", + "vm/dev", + "vm/move-explain", + "test-helper", + "cmd/starcoin", + "cmd/faucet", + "cmd/tx-factory", + "cmd/replay", + "cmd/miner_client", + "cmd/generator", + "dataformat-generator", + "cmd/indexer" +] + [profile.dev] panic = "unwind" diff --git a/account/lib/src/account_test.rs b/account/lib/src/account_test.rs index 0980875196..d63d9dfae6 100644 --- a/account/lib/src/account_test.rs +++ b/account/lib/src/account_test.rs @@ -112,7 +112,7 @@ pub fn test_wallet_unlock() -> Result<()> { } #[test] -pub fn test_diem_wallet() -> Result<()> { +pub fn test_wallet_account() -> Result<()> { use core::convert::{From, TryFrom}; use scs::SCSCodec; use starcoin_crypto::ed25519::{Ed25519PrivateKey, Ed25519PublicKey, Ed25519Signature}; @@ -182,14 +182,13 @@ pub fn test_diem_wallet() -> Result<()> { ]) ); - let path = StructTag { + let struct_tag = StructTag { address: CORE_CODE_ADDRESS, module: Identifier::from(IdentStr::new("Account")?), name: Identifier::from(IdentStr::new("Account")?), type_params: vec![], }; - println!("path hash is {:?}", path.hash()); - let access_path = AccessPath::new(address, path.access_vector()); + let access_path = AccessPath::resource_access_path(address, struct_tag); println!("access path is {:?}", access_path); let stxn_bytes = vec![ 125, 67, 213, 38, 157, 219, 137, 205, 183, 247, 184, 18, 104, 155, 241, 53, 7, 0, 0, 0, 0, diff --git a/benchmarks/benches/bench_state_tree.rs b/benchmarks/benches/bench_state_tree.rs index 9ef40c2b53..f94bf01f11 100644 --- a/benchmarks/benches/bench_state_tree.rs +++ b/benchmarks/benches/bench_state_tree.rs @@ -4,6 +4,7 @@ use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; use crypto::hash::*; use forkable_jellyfish_merkle::blob::Blob; +use forkable_jellyfish_merkle::HashValueKey; use rand::{rngs::StdRng, SeedableRng}; use starcoin_config::RocksdbConfig; use starcoin_state_store_api::StateNodeStore; @@ -75,7 +76,7 @@ fn bench_put_and_commit(c: &mut Criterion) { std::iter::repeat(0u8) .take(*n as usize) .map(|_| { - let key = HashValue::random_with_rng(&mut rng); + let key = HashValueKey(HashValue::random_with_rng(&mut rng)); let value = Blob::from(HashValue::random_with_rng(&mut rng).to_vec()); (key, value) @@ -100,14 +101,14 @@ fn bench_put_and_commit(c: &mut Criterion) { criterion_group!(benches, bench_get_with_proof, bench_put_and_commit); criterion_main!(benches); -fn gen_kv_from_seed(seed: &[u8], num_keys: usize) -> HashMap { +fn gen_kv_from_seed(seed: &[u8], num_keys: usize) -> HashMap { assert!(seed.len() < 32); let mut actual_seed = [0u8; 32]; actual_seed[..seed.len()].copy_from_slice(&seed); let mut rng: StdRng = StdRng::from_seed(actual_seed); let mut kvs = HashMap::new(); for _i in 0..num_keys { - let key = HashValue::random_with_rng(&mut rng); + let key = HashValueKey(HashValue::random_with_rng(&mut rng)); let value = Blob::from(HashValue::random_with_rng(&mut rng).to_vec()); kvs.insert(key, value); } @@ -124,10 +125,10 @@ fn new_empty_store + Clone>(p: P) -> Arc { } fn prepare_tree( - state_tree: &StateTree, + state_tree: &StateTree, seed: &[u8], num_keys: usize, -) -> (HashMap, HashValue) { +) -> (HashMap, HashValue) { let kvs = gen_kv_from_seed(seed, num_keys); for (k, v) in kvs.clone() { state_tree.put(k, v.into()); diff --git a/cmd/starcoin/Cargo.toml b/cmd/starcoin/Cargo.toml index d21f472754..9c92149ed3 100644 --- a/cmd/starcoin/Cargo.toml +++ b/cmd/starcoin/Cargo.toml @@ -44,7 +44,7 @@ starcoin-genesis = { path = "../../core/genesis" } starcoin-resource-viewer = { path = "../../vm/resource-viewer" } starcoin-service-registry = { path = "../../commons/service-registry" } starcoin-move-explain = { path = "../../vm/move-explain" } -errmapgen = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +errmapgen = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } [dev-dependencies] test-helper= {path = "../../test-helper"} diff --git a/cmd/starcoin/src/state/get_cmd.rs b/cmd/starcoin/src/state/get_cmd.rs index a0d42f11fd..6507e26896 100644 --- a/cmd/starcoin/src/state/get_cmd.rs +++ b/cmd/starcoin/src/state/get_cmd.rs @@ -3,25 +3,17 @@ use crate::cli_state::CliState; use crate::StarcoinOpt; -use anyhow::{bail, format_err, Result}; +use anyhow::{format_err, Result}; use scmd::{CommandAction, ExecContext}; use starcoin_resource_viewer::{AnnotatedMoveStruct, MoveValueAnnotator}; use starcoin_rpc_client::RemoteStateReader; use starcoin_types::access_path::AccessPath; use starcoin_vm_types::account_address::AccountAddress; use starcoin_vm_types::account_config::account_struct_tag; -use starcoin_vm_types::language_storage::{StructTag, TypeTag}; -use starcoin_vm_types::parser::parse_type_tag; +use starcoin_vm_types::language_storage::StructTag; +use starcoin_vm_types::parser::parse_struct_tag; use structopt::StructOpt; -fn parse_struct_tag(s: &str) -> Result { - let type_tag = parse_type_tag(s)?; - match type_tag { - TypeTag::Struct(st) => Ok(st), - t => bail!("expect a struct tag, found: {:?}", t), - } -} - //TODO support custom access_path. #[derive(Debug, StructOpt)] #[structopt(name = "get")] @@ -57,7 +49,10 @@ impl CommandAction for GetCommand { None => account_struct_tag(), }; let state = client - .state_get(AccessPath::new(account_addr, struct_tag.access_vector()))? + .state_get(AccessPath::resource_access_path( + account_addr, + struct_tag.clone(), + ))? .ok_or_else(|| format_err!("Account with address {} state not exist.", account_addr))?; let chain_state_reader = RemoteStateReader::new(client)?; let viewer = MoveValueAnnotator::new(&chain_state_reader); diff --git a/commons/crypto/Cargo.toml b/commons/crypto/Cargo.toml index 8976aad4c5..901d2e1bc7 100644 --- a/commons/crypto/Cargo.toml +++ b/commons/crypto/Cargo.toml @@ -11,8 +11,8 @@ serde = { version = "1.0" } serde_bytes = "0.11.5" hex = "0.4.2" anyhow = "1.0" -diem-crypto = { package="diem-crypto", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821", features = ["fuzzing"] } -diem-crypto-derive = { package="diem-crypto-derive", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +diem-crypto = { package="diem-crypto", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a", features = ["fuzzing"] } +diem-crypto-derive = { package="diem-crypto-derive", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } scs = { package="starcoin-canonical-serialization", path = "../scs"} crypto-macro = { package="starcoin-crypto-macro", path = "./crypto-macro"} rand = "0.7.3" diff --git a/commons/proptest-helpers/Cargo.toml b/commons/proptest-helpers/Cargo.toml index b043d0304f..0fab5c888f 100644 --- a/commons/proptest-helpers/Cargo.toml +++ b/commons/proptest-helpers/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" [dependencies] crossbeam = "0.7.3" -diem-proptest-helpers = { package="diem-proptest-helpers", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +diem-proptest-helpers = { package="diem-proptest-helpers", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } proptest = "0.10.1" proptest-derive = "0.2.0" diff --git a/config/Cargo.toml b/config/Cargo.toml index 3006f9f7c9..aa868af8cf 100644 --- a/config/Cargo.toml +++ b/config/Cargo.toml @@ -27,5 +27,5 @@ starcoin-types = { path = "../types" } starcoin-vm-types = { path = "../vm/types" } network-p2p-types = { path = "../network-p2p/types"} starcoin-logger = {path = "../commons/logger", package="starcoin-logger"} -diem-temppath = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +diem-temppath = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } starcoin-system = {path = "../commons/system", package="starcoin-system"} diff --git a/core/forkable-jellyfish-merkle/Cargo.toml b/core/forkable-jellyfish-merkle/Cargo.toml index 68dddd6cb0..9805f366b4 100644 --- a/core/forkable-jellyfish-merkle/Cargo.toml +++ b/core/forkable-jellyfish-merkle/Cargo.toml @@ -17,6 +17,7 @@ num-traits = "0.2" proptest = { version = "0.10.1", optional = true } proptest-derive = { version = "0.2.0", optional = true } serde = { version = "1.0", features = ["derive"] } +serde_bytes = {version = "0.11"} starcoin-crypto = { path = "../../commons/crypto"} thiserror = "1.0" tiny-keccak = "1.5" diff --git a/core/forkable-jellyfish-merkle/benches/bench_merkle_tree.rs b/core/forkable-jellyfish-merkle/benches/bench_merkle_tree.rs index 65a3c8271b..65126b8a21 100644 --- a/core/forkable-jellyfish-merkle/benches/bench_merkle_tree.rs +++ b/core/forkable-jellyfish-merkle/benches/bench_merkle_tree.rs @@ -1,5 +1,7 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use forkable_jellyfish_merkle::{blob::Blob, mock_tree_store::MockTreeStore, JellyfishMerkleTree}; +use forkable_jellyfish_merkle::{ + blob::Blob, mock_tree_store::MockTreeStore, HashValueKey, JellyfishMerkleTree, RawKey, +}; use rand::{rngs::StdRng, SeedableRng}; use starcoin_crypto::hash::*; use std::collections::HashMap; @@ -18,7 +20,7 @@ fn bench_get_with_proof(c: &mut Criterion) { k }, |k| { - let (value, _proof) = tree.get_with_proof(root, *k).unwrap(); + let (value, _proof) = tree.get_with_proof(root, k.key_hash()).unwrap(); assert_eq!(&value.unwrap(), kvs.get(k).unwrap()) }, ); @@ -35,14 +37,14 @@ fn bench_get_with_proof(c: &mut Criterion) { criterion_group!(benches, bench_get_with_proof); criterion_main!(benches); -fn gen_kv_from_seed(seed: &[u8], num_keys: usize) -> HashMap { +fn gen_kv_from_seed(seed: &[u8], num_keys: usize) -> HashMap { assert!(seed.len() < 32); let mut actual_seed = [0u8; 32]; actual_seed[..seed.len()].copy_from_slice(&seed); let mut rng: StdRng = StdRng::from_seed(actual_seed); let mut kvs = HashMap::new(); for _i in 0..num_keys { - let key = HashValue::random_with_rng(&mut rng); + let key = HashValueKey(HashValue::random_with_rng(&mut rng)); let value = Blob::from(HashValue::random_with_rng(&mut rng).to_vec()); kvs.insert(key, value); } @@ -53,7 +55,7 @@ fn gen_kv_from_seed(seed: &[u8], num_keys: usize) -> HashMap { fn prepare_tree( seed: &[u8], num_keys: usize, -) -> (HashMap, MockTreeStore, HashValue) { +) -> (HashMap, MockTreeStore, HashValue) { let kvs = gen_kv_from_seed(seed, num_keys); let db = MockTreeStore::default(); diff --git a/core/forkable-jellyfish-merkle/src/blob.rs b/core/forkable-jellyfish-merkle/src/blob.rs index a9bda721e6..42f895dddb 100644 --- a/core/forkable-jellyfish-merkle/src/blob.rs +++ b/core/forkable-jellyfish-merkle/src/blob.rs @@ -15,17 +15,12 @@ pub struct Blob { impl fmt::Debug for Blob { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // let decoded = bcs::from_bytes(&self.blob) - // .map(|account_state: AccountState| format!("{:#?}", account_state)) - // .unwrap_or_else(|_| String::from("[fail]")); - write!( f, "Blob {{ \n \ Raw: 0x{} \n \ }}", hex::encode(&self.blob), - // decoded, ) } } @@ -37,8 +32,8 @@ impl AsRef<[u8]> for Blob { } impl From for Vec { - fn from(account_state_blob: Blob) -> Vec { - account_state_blob.blob + fn from(blob: Blob) -> Vec { + blob.blob } } diff --git a/core/forkable-jellyfish-merkle/src/iterator/mod.rs b/core/forkable-jellyfish-merkle/src/iterator/mod.rs index 319f366eaf..6bff6821bd 100644 --- a/core/forkable-jellyfish-merkle/src/iterator/mod.rs +++ b/core/forkable-jellyfish-merkle/src/iterator/mod.rs @@ -17,12 +17,12 @@ use crate::{ nibble::Nibble, nibble_path::NibblePath, node_type::{InternalNode, Node, NodeKey}, - TreeReader, + RawKey, TreeReader, }; use anyhow::{format_err, Result}; use starcoin_crypto::HashValue; +use std::marker::PhantomData; -type Version = HashValue; /// `NodeVisitInfo` keeps track of the status of an internal node during the iteration process. It /// indicates which ones of its children have been visited. #[derive(Debug)] @@ -94,12 +94,12 @@ impl NodeVisitInfo { } /// The `JellyfishMerkleIterator` implementation. -pub struct JellyfishMerkleIterator<'a, R: 'a + TreeReader> { +pub struct JellyfishMerkleIterator<'a, K: RawKey, R: 'a + TreeReader> { /// The storage engine from which we can read nodes using node keys. reader: &'a R, - /// The version of the tree this iterator is running on. - state_root_hash: Version, + /// The root hash of the tree this iterator is running on. + state_root_hash: HashValue, /// The stack used for depth first traversal. parent_stack: Vec, @@ -108,16 +108,19 @@ pub struct JellyfishMerkleIterator<'a, R: 'a + TreeReader> { /// `self.parent_stack` is empty. But in case of a tree with a single leaf, we need this /// additional bit. done: bool, + + raw_key: PhantomData, } -impl<'a, R> JellyfishMerkleIterator<'a, R> +impl<'a, K, R> JellyfishMerkleIterator<'a, K, R> where - R: 'a + TreeReader, + R: 'a + TreeReader, + K: RawKey, { /// Constructs a new iterator. This puts the internal state in the correct position, so the /// following `next` call will yield the smallest key that is greater or equal to /// `starting_key`. - pub fn new(reader: &'a R, state_root_hash: Version, starting_key: HashValue) -> Result { + pub fn new(reader: &'a R, state_root_hash: HashValue, starting_key: HashValue) -> Result { let mut parent_stack = vec![]; let mut done = false; @@ -158,6 +161,7 @@ where state_root_hash, parent_stack, done, + raw_key: PhantomData, }); } } @@ -166,7 +170,7 @@ where match reader.get_node(¤t_node_key)? { Node::Internal(_) => unreachable!("Should have reached the bottom of the tree."), Node::Leaf(leaf_node) => { - if leaf_node.account_key() < starting_key { + if leaf_node.raw_key().key_hash() < starting_key { Self::cleanup_stack(&mut parent_stack); if parent_stack.is_empty() { done = true; @@ -181,6 +185,7 @@ where state_root_hash, parent_stack, done, + raw_key: PhantomData, }) } @@ -208,11 +213,12 @@ where } } -impl<'a, R> Iterator for JellyfishMerkleIterator<'a, R> +impl<'a, K, R> Iterator for JellyfishMerkleIterator<'a, K, R> where - R: 'a + TreeReader, + R: 'a + TreeReader, + K: RawKey, { - type Item = Result<(HashValue, Blob)>; + type Item = Result<(K, Blob)>; fn next(&mut self) -> Option { if self.done { @@ -228,7 +234,7 @@ where // true in `new`). Return the node and mark `self.done` so next time we return // None. self.done = true; - return Some(Ok((leaf_node.account_key(), leaf_node.blob().clone()))); + return Some(Ok((leaf_node.raw_key().clone(), leaf_node.blob().clone()))); } Ok(Node::Internal(_)) => { // This means `starting_key` is bigger than every key in this tree, or we have @@ -259,7 +265,7 @@ where self.parent_stack.push(visit_info); } Ok(Node::Leaf(leaf_node)) => { - let ret = (leaf_node.account_key(), leaf_node.blob().clone()); + let ret = (leaf_node.raw_key().clone(), leaf_node.blob().clone()); Self::cleanup_stack(&mut self.parent_stack); return Some(Ok(ret)); } diff --git a/core/forkable-jellyfish-merkle/src/jellyfish_merkle_test.rs b/core/forkable-jellyfish-merkle/src/jellyfish_merkle_test.rs index a8f89a7b46..6c4a711dff 100644 --- a/core/forkable-jellyfish-merkle/src/jellyfish_merkle_test.rs +++ b/core/forkable-jellyfish-merkle/src/jellyfish_merkle_test.rs @@ -40,7 +40,9 @@ fn test_insert_to_empty_tree() { let key = HashValue::random(); let value = Blob::from(vec![1u8, 2u8, 3u8, 4u8]); - let (_new_root_hash, batch) = tree.put_blob_set(None, vec![(key, value.clone())]).unwrap(); + let (_new_root_hash, batch) = tree + .put_blob_set(None, vec![(key.into(), value.clone())]) + .unwrap(); assert!(batch.stale_node_index_batch.is_empty()); db.write_tree_update_batch(batch).unwrap(); @@ -57,10 +59,10 @@ fn test_delete_from_tree() { let key = HashValue::new([0x00u8; HashValue::LENGTH]); let value = Blob::from(vec![1u8, 2u8, 3u8, 4u8]); - let (_new_root_hash, batch) = tree.put_blob_set(None, vec![(key, value)]).unwrap(); + let (_new_root_hash, batch) = tree.put_blob_set(None, vec![(key.into(), value)]).unwrap(); db.write_tree_update_batch(batch).unwrap(); - let (new_root, batch) = tree.delete(Some(_new_root_hash), key).unwrap(); + let (new_root, batch) = tree.delete(Some(_new_root_hash), key.into()).unwrap(); assert_eq!(new_root, *SPARSE_MERKLE_PLACEHOLDER_HASH); assert_eq!(batch.num_stale_leaves, 1); assert_eq!(batch.stale_node_index_batch.len(), 1); @@ -71,12 +73,12 @@ fn test_delete_from_tree() { let value2 = Blob::from(vec![3u8, 4u8]); let (_root1_hash, batch) = tree - .put_blob_set(Some(_new_root_hash), vec![(key2, value2)]) + .put_blob_set(Some(_new_root_hash), vec![(key2.into(), value2)]) .unwrap(); assert_eq!(batch.stale_node_index_batch.len(), 0); db.write_tree_update_batch(batch).unwrap(); - let (new_root, batch) = tree.delete(Some(_root1_hash), key2).unwrap(); + let (new_root, batch) = tree.delete(Some(_root1_hash), key2.into()).unwrap(); assert_eq!(new_root, _new_root_hash); assert_eq!(batch.num_stale_leaves, 1); assert_eq!(batch.stale_node_index_batch.len(), 2); @@ -89,7 +91,7 @@ fn test_insert_at_leaf_with_internal_created() { let db = MockTreeStore::default(); let tree = JellyfishMerkleTree::new(&db); - let key1 = HashValue::new([0x00u8; HashValue::LENGTH]); + let key1 = HashValueKey(HashValue::new([0x00u8; HashValue::LENGTH])); let value1 = Blob::from(vec![1u8, 2u8]); let (_root0_hash, batch) = tree @@ -98,11 +100,14 @@ fn test_insert_at_leaf_with_internal_created() { assert!(batch.stale_node_index_batch.is_empty()); db.write_tree_update_batch(batch).unwrap(); - assert_eq!(tree.get(_root0_hash, key1).unwrap().unwrap(), value1); + assert_eq!( + tree.get(_root0_hash, key1.key_hash()).unwrap().unwrap(), + value1 + ); assert_eq!(db.num_nodes(), 1); // Insert at the previous leaf node. Should generate an internal node at the root. // Change the 1st nibble to 15. - let key2 = update_nibble(&key1, 0, 15); + let key2 = HashValueKey(update_nibble(&key1.key_hash(), 0, 15)); let value2 = Blob::from(vec![3u8, 4u8]); let (_root1_hash, batch) = tree @@ -111,9 +116,15 @@ fn test_insert_at_leaf_with_internal_created() { assert_eq!(batch.stale_node_index_batch.len(), 0); db.write_tree_update_batch(batch).unwrap(); - assert_eq!(tree.get(_root1_hash, key1).unwrap().unwrap(), value1); - assert!(tree.get(_root0_hash, key2).unwrap().is_none()); - assert_eq!(tree.get(_root1_hash, key2).unwrap().unwrap(), value2); + assert_eq!( + tree.get(_root1_hash, key1.key_hash()).unwrap().unwrap(), + value1 + ); + assert!(tree.get(_root0_hash, key2.key_hash()).unwrap().is_none()); + assert_eq!( + tree.get(_root1_hash, key2.key_hash()).unwrap().unwrap(), + value2 + ); // get # of nodes assert_eq!(db.num_nodes(), 3); @@ -145,7 +156,7 @@ fn test_insert_at_leaf_with_multiple_internals_created() { let value1 = Blob::from(vec![1u8, 2u8]); let (_root0_hash, batch) = tree - .put_blob_set(None, vec![(key1, value1.clone())]) + .put_blob_set(None, vec![(key1.into(), value1.clone())]) .unwrap(); db.write_tree_update_batch(batch).unwrap(); assert_eq!(tree.get(_root0_hash, key1).unwrap().unwrap(), value1); @@ -156,7 +167,7 @@ fn test_insert_at_leaf_with_multiple_internals_created() { let value2 = Blob::from(vec![3u8, 4u8]); let (_root1_hash, batch) = tree - .put_blob_set(Some(_root0_hash), vec![(key2, value2.clone())]) + .put_blob_set(Some(_root0_hash), vec![(key2.into(), value2.clone())]) .unwrap(); db.write_tree_update_batch(batch).unwrap(); assert_eq!(tree.get(_root0_hash, key1,).unwrap().unwrap(), value1); @@ -167,8 +178,8 @@ fn test_insert_at_leaf_with_multiple_internals_created() { assert_eq!(db.num_nodes(), 4); tree.print_tree(_root1_hash, key1).unwrap(); - let leaf1 = Node::new_leaf(key1, value1); - let leaf2 = Node::new_leaf(key2, value2.clone()); + let leaf1: Node = Node::new_leaf(key1.into(), value1); + let leaf2: Node = Node::new_leaf(key2.into(), value2.clone()); let internal = { let mut children = HashMap::new(); children.insert( @@ -194,7 +205,10 @@ fn test_insert_at_leaf_with_multiple_internals_created() { // 3. Update leaf2 with new value let value2_update = Blob::from(vec![5u8, 6u8]); let (_root2_hash, batch) = tree - .put_blob_set(Some(_root1_hash), vec![(key2, value2_update.clone())]) + .put_blob_set( + Some(_root1_hash), + vec![(key2.into(), value2_update.clone())], + ) .unwrap(); db.write_tree_update_batch(batch).unwrap(); assert!(tree.get(_root0_hash, key2,).unwrap().is_none()); @@ -255,24 +269,24 @@ fn test_batch_insertion() { let key6 = update_nibble(&key1, 3, 6); let value6 = Blob::from(vec![6u8]); - let batches = vec![ - vec![(key1, value1)], - vec![(key2, value2)], - vec![(key3, value3)], - vec![(key4, value4)], - vec![(key5, value5)], - vec![(key6, value6)], - vec![(key2, value2_update)], + let batches: Vec> = vec![ + vec![(key1.into(), value1)], + vec![(key2.into(), value2)], + vec![(key3.into(), value3)], + vec![(key4.into(), value4)], + vec![(key5.into(), value5)], + vec![(key6.into(), value6)], + vec![(key2.into(), value2_update)], ]; let one_batch = batches.iter().flatten().cloned().collect::>(); let mut to_verify = one_batch.clone(); // key2 was updated so we remove it. to_verify.remove(1); - let verify_fn = |tree: &JellyfishMerkleTree, root: HashValue| { + let verify_fn = |tree: &JellyfishMerkleTree, root: HashValue| { to_verify .iter() - .for_each(|(k, v)| assert_eq!(tree.get(root, *k).unwrap().unwrap(), *v)) + .for_each(|(k, v)| assert_eq!(tree.get(root, k.0).unwrap().unwrap(), *v)) }; // Insert as one batch. @@ -427,9 +441,9 @@ fn test_non_existence() { .put_blob_set( None, vec![ - (key1, value1.clone()), - (key2, value2.clone()), - (key3, value3.clone()), + (key1.into(), value1.clone()), + (key2.into(), value2.clone()), + (key3.into(), value3.clone()), ], ) .unwrap(); @@ -485,7 +499,8 @@ fn test_put_blob_sets() { for _version in 0..10 { let mut keyed_blob_set = vec![]; for _ in 0..2 { - keyed_blob_set.push(iter.next().unwrap()); + let next = iter.next().unwrap(); + keyed_blob_set.push((next.0.into(), next.1)); } let (root, batch) = tree.put_blob_set(temp_root, keyed_blob_set).unwrap(); db.write_tree_update_batch(batch.clone()).unwrap(); @@ -508,7 +523,7 @@ fn test_put_blob_sets() { let mut keyed_blob_set = vec![]; for _ in 0..2 { let val = iter.next().unwrap(); - keyed_blob_set.push((val.0, Some(val.1))); + keyed_blob_set.push((val.0.into(), Some(val.1))); } blob_sets.push(keyed_blob_set); } @@ -531,16 +546,16 @@ fn many_keys_get_proof_and_verify_tree_root(seed: &[u8], num_keys: usize) { for _i in 0..num_keys { let key = HashValue::random_with_rng(&mut rng); let value = Blob::from(HashValue::random_with_rng(&mut rng).to_vec()); - kvs.push((key, value)); + kvs.push((HashValueKey(key), value)); } let (root, batch) = tree.put_blob_set(None, kvs.clone()).unwrap(); db.write_tree_update_batch(batch).unwrap(); for (k, v) in &kvs { - let (value, proof) = tree.get_with_proof(root, *k).unwrap(); + let (value, proof) = tree.get_with_proof(root, k.key_hash()).unwrap(); assert_eq!(value.unwrap(), *v); - assert!(proof.verify(root, *k, Some(v)).is_ok()); + assert!(proof.verify(root, k.key_hash(), Some(v)).is_ok()); } } @@ -572,7 +587,7 @@ fn many_versions_get_proof_and_verify_tree_root(seed: &[u8], num_versions: usize let mut current_root = None; for (_idx, kvs) in kvs.iter().enumerate() { let (root, batch) = tree - .put_blob_set(current_root, vec![(kvs.0, kvs.1.clone())]) + .put_blob_set(current_root, vec![(kvs.0.into(), kvs.1.clone())]) .unwrap(); roots.push(root); db.write_tree_update_batch(batch).unwrap(); @@ -582,7 +597,7 @@ fn many_versions_get_proof_and_verify_tree_root(seed: &[u8], num_versions: usize // Update value of all keys for (_idx, kvs) in kvs.iter().enumerate() { let (root, batch) = tree - .put_blob_set(current_root, vec![(kvs.0, kvs.2.clone())]) + .put_blob_set(current_root, vec![(kvs.0.into(), kvs.2.clone())]) .unwrap(); roots.push(root); db.write_tree_update_batch(batch).unwrap(); @@ -618,7 +633,7 @@ proptest! { #[test] fn test_get_with_proof1( (existent_kvs, nonexistent_keys) in hash_map( - any::(), + any::(), any::(), 1..1000, ) @@ -627,7 +642,7 @@ proptest! { ( Just(kvs), vec( - any::().prop_filter( + any::().prop_filter( "Make sure these keys do not exist in the tree.", move |key| !kvs_clone.contains_key(key), ), @@ -645,14 +660,14 @@ proptest! { #[test] fn test_get_with_proof2( - key1 in any::() + key1 in any::() .prop_filter( "Can't be 0xffffff...", - |key| *key != HashValue::new([0xff; HashValue::LENGTH]), + |key| *key != HashValue::new([0xff; HashValue::LENGTH]).into(), ), accounts in vec(any::(), 2), ) { - let key2 = plus_one(key1); + let key2 = HashValueKey(plus_one(key1.0)); let mut kvs = HashMap::new(); kvs.insert(key1, accounts[0].clone()); @@ -666,7 +681,7 @@ proptest! { #[test] fn test_get_range_proof( - (btree, n) in btree_map(any::(), any::(), 1..50) + (btree, n) in btree_map(any::(), any::(), 1..50) .prop_flat_map(|btree| { let len = btree.len(); (Just(btree), 0..len) @@ -676,7 +691,7 @@ proptest! { let tree = JellyfishMerkleTree::new(&db); let root_hash = root_hash_option.unwrap(); let nth_key = *btree.keys().nth(n).unwrap(); - let proof = tree.get_range_proof(root_hash, nth_key).unwrap(); + let proof = tree.get_range_proof(root_hash, nth_key.key_hash()).unwrap(); verify_range_proof( root_hash, btree.into_iter().take(n + 1).collect(), @@ -686,33 +701,37 @@ proptest! { } fn test_existent_keys_impl<'a>( - tree: &JellyfishMerkleTree<'a, MockTreeStore>, + tree: &JellyfishMerkleTree<'a, HashValueKey, MockTreeStore>, root_hash: HashValue, - existent_kvs: &HashMap, + existent_kvs: &HashMap, ) { for (key, value) in existent_kvs { - let (account, proof) = tree.get_with_proof(root_hash, *key).unwrap(); - assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok()); + let (account, proof) = tree.get_with_proof(root_hash, key.key_hash()).unwrap(); + assert!(proof + .verify(root_hash, key.key_hash(), account.as_ref()) + .is_ok()); assert_eq!(account.unwrap(), *value); } } fn test_nonexistent_keys_impl<'a>( - tree: &JellyfishMerkleTree<'a, MockTreeStore>, + tree: &JellyfishMerkleTree<'a, HashValueKey, MockTreeStore>, root_hash: HashValue, - nonexistent_keys: &[HashValue], + nonexistent_keys: &[HashValueKey], ) { for key in nonexistent_keys { - let (account, proof) = tree.get_with_proof(root_hash, *key).unwrap(); - assert!(proof.verify(root_hash, *key, account.as_ref()).is_ok()); + let (account, proof) = tree.get_with_proof(root_hash, key.key_hash()).unwrap(); + assert!(proof + .verify(root_hash, key.key_hash(), account.as_ref()) + .is_ok()); assert!(account.is_none()); } } /// Checks if we can construct the expected root hash using the entries in the btree and the proof. -fn verify_range_proof( +fn verify_range_proof( expected_root_hash: HashValue, - btree: BTreeMap, + btree: BTreeMap, proof: SparseMerkleRangeProof, ) { // For example, given the following sparse Merkle tree: @@ -749,16 +768,17 @@ fn verify_range_proof( // that would cause `X` to end up in the above position. let mut btree1 = BTreeMap::new(); for (key, blob) in &btree { - let leaf = LeafNode::new(*key, blob.clone()); - btree1.insert(*key, leaf.hash()); + let leaf = LeafNode::new(key.clone(), blob.clone()); + btree1.insert(key.key_hash(), leaf.crypto_hash()); } // Using the above example, `last_proven_key` is `e`. We look at the path from root to `e`. // For each 0-bit, there should be a sibling in the proof. And we use the path from root to // this position, plus a `1` as the key. - let last_proven_key = *btree + let last_proven_key = btree .keys() .last() - .expect("We are proving at least one key."); + .expect("We are proving at least one key.") + .key_hash(); for (i, sibling) in last_proven_key .iter_bits() .enumerate() diff --git a/core/forkable-jellyfish-merkle/src/lib.rs b/core/forkable-jellyfish-merkle/src/lib.rs index e2cd09611d..01d39a051b 100644 --- a/core/forkable-jellyfish-merkle/src/lib.rs +++ b/core/forkable-jellyfish-merkle/src/lib.rs @@ -6,6 +6,8 @@ #![forbid(unsafe_code)] #![allow(dead_code)] +//TODO fix +#![allow(clippy::unit_arg)] //! This module implements [`JellyfishMerkleTree`] backed by storage module. The tree itself doesn't //! persist anything, but realizes the logic of R/W only. The write path will produce all the //! intermediate results in a batch for storage layer to commit and the read path will return @@ -91,8 +93,12 @@ use blob::Blob; use nibble_path::{skip_common_prefix, NibbleIterator, NibblePath}; use node_type::{Child, Children, InternalNode, LeafNode, Node, NodeKey}; use proof::{SparseMerkleProof, SparseMerkleRangeProof}; +#[cfg(any(test, feature = "fuzzing"))] +use proptest_derive::Arbitrary; +use serde::{de::DeserializeOwned, Serialize}; use starcoin_crypto::{hash::PlainCryptoHash, HashValue}; use std::collections::{BTreeMap, BTreeSet}; +use std::marker::PhantomData; use tree_cache::TreeCache; fn create_literal_hash(word: &str) -> HashValue { @@ -108,30 +114,31 @@ pub const ROOT_NIBBLE_HEIGHT: usize = HashValue::LENGTH * 2; /// `TreeReader` defines the interface between /// [`JellyfishMerkleTree`](struct.JellyfishMerkleTree.html) /// and underlying storage holding nodes. -pub trait TreeReader { +pub trait TreeReader { /// Gets node given a node key. Returns error if the node does not exist. - fn get_node(&self, node_key: &NodeKey) -> Result { + fn get_node(&self, node_key: &NodeKey) -> Result> { self.get_node_option(node_key)? .ok_or_else(|| format_err!("Missing node at {:?}.", node_key)) } /// Gets node given a node key. Returns `None` if the node does not exist. - fn get_node_option(&self, node_key: &NodeKey) -> Result>; + fn get_node_option(&self, node_key: &NodeKey) -> Result>>; /// Gets the rightmost leaf. Note that this assumes we are in the process of restoring the tree /// and all nodes are at the same version. - fn get_rightmost_leaf(&self) -> Result> { - Ok(None) + fn get_rightmost_leaf(&self) -> Result)>> { + //TODO + unimplemented!() } } -pub trait TreeWriter { +pub trait TreeWriter { /// Writes a node batch into storage. - fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()>; + fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()>; } /// Node batch that will be written into db atomically with other batches. -pub type NodeBatch = BTreeMap; +pub type NodeBatch = BTreeMap>; /// [`StaleNodeIndex`](struct.StaleNodeIndex.html) batch that will be written into db atomically /// with other batches. pub type StaleNodeIndexBatch = BTreeSet; @@ -150,34 +157,109 @@ pub struct StaleNodeIndex { /// [`StaleNodeIndexBatch`](type.StaleNodeIndexBatch.html) and some stats of nodes that represents /// the incremental updates of a tree and pruning indices after applying a write set, /// which is a vector of `hashed_account_address` and `new_account_state_blob` pairs. -#[derive(Clone, Debug, Default, Eq, PartialEq)] -pub struct TreeUpdateBatch { - pub node_batch: NodeBatch, +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct TreeUpdateBatch { + pub node_batch: NodeBatch, pub stale_node_index_batch: StaleNodeIndexBatch, pub num_new_leaves: usize, pub num_stale_leaves: usize, } +impl Default for TreeUpdateBatch +where + K: RawKey, +{ + fn default() -> Self { + Self { + node_batch: NodeBatch::default(), + stale_node_index_batch: StaleNodeIndexBatch::default(), + num_new_leaves: 0, + num_stale_leaves: 0, + } + } +} + +pub trait RawKey: Clone + Ord { + /// Raw key's hash, will used as tree's nibble path + /// Directly use origin byte's sha3_256 hash, do not use CryptoHash to add salt. + fn key_hash(&self) -> HashValue { + HashValue::sha3_256_of( + self.encode_key() + .expect("Serialize key failed when hash.") + .as_slice(), + ) + } + + /// Encode the raw key, the raw key's bytes will store to leaf node. + fn encode_key(&self) -> Result>; + + fn decode_key(bytes: &[u8]) -> Result; +} + +impl RawKey for T +where + T: Clone + Ord + Serialize + DeserializeOwned, +{ + fn encode_key(&self) -> Result> { + scs::to_bytes(self) + } + + fn decode_key(bytes: &[u8]) -> Result { + scs::from_bytes(bytes) + } +} + +//FIXME +#[allow(clippy::unit_arg)] +#[derive(Clone, Debug, Copy, Ord, PartialOrd, Eq, PartialEq, Hash)] +#[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))] +pub struct HashValueKey(pub HashValue); + +impl RawKey for HashValueKey { + fn key_hash(&self) -> HashValue { + self.0 + } + + fn encode_key(&self) -> Result> { + Ok(self.0.to_vec()) + } + + fn decode_key(bytes: &[u8]) -> Result { + Ok(HashValueKey(HashValue::from_slice(bytes)?)) + } +} + +impl From for HashValueKey { + fn from(hash: HashValue) -> Self { + HashValueKey(hash) + } +} + /// The Jellyfish Merkle tree data structure. See [`crate`] for description. -pub struct JellyfishMerkleTree<'a, R: 'a + TreeReader> { +pub struct JellyfishMerkleTree<'a, K: RawKey, R: 'a + TreeReader> { reader: &'a R, + raw_key: PhantomData, } -impl<'a, R> JellyfishMerkleTree<'a, R> +impl<'a, K, R> JellyfishMerkleTree<'a, K, R> where - R: 'a + TreeReader, + K: RawKey, + R: 'a + TreeReader, { /// Creates a `JellyfishMerkleTree` backed by the given [`TreeReader`](trait.TreeReader.html). pub fn new(reader: &'a R) -> Self { - Self { reader } + Self { + reader, + raw_key: PhantomData, + } } #[cfg(test)] pub fn put_blob_set( &self, state_root_hash: Option, - blob_set: Vec<(HashValue, Blob)>, - ) -> Result<(HashValue, TreeUpdateBatch)> { + blob_set: Vec<(K, Blob)>, + ) -> Result<(HashValue, TreeUpdateBatch)> { let blob_set = blob_set .into_iter() .map(|(k, v)| (k, Some(v))) @@ -196,8 +278,8 @@ where pub fn delete( &self, state_root_hash: Option, - key: HashValue, - ) -> Result<(HashValue, TreeUpdateBatch)> { + key: K, + ) -> Result<(HashValue, TreeUpdateBatch)> { self.updates(state_root_hash, vec![(key, None)]) } @@ -205,8 +287,8 @@ where pub fn insert_all( &self, state_root_hash: Option, - blob_set: Vec<(HashValue, Blob)>, - ) -> Result<(HashValue, TreeUpdateBatch)> { + blob_set: Vec<(K, Blob)>, + ) -> Result<(HashValue, TreeUpdateBatch)> { let blob_set = blob_set .into_iter() .map(|(k, v)| (k, Some(v))) @@ -217,8 +299,8 @@ where pub fn updates( &self, state_root_hash: Option, - blob_set: Vec<(HashValue, Option)>, - ) -> Result<(HashValue, TreeUpdateBatch)> { + blob_set: Vec<(K, Option)>, + ) -> Result<(HashValue, TreeUpdateBatch)> { let (root_hashes, tree_update_batch) = self.puts(state_root_hash, vec![blob_set])?; assert_eq!( root_hashes.len(), @@ -272,8 +354,8 @@ where fn puts( &self, state_root_hash: Option, - blob_sets: Vec)>>, - ) -> Result<(Vec, TreeUpdateBatch)> { + blob_sets: Vec)>>, + ) -> Result<(Vec, TreeUpdateBatch)> { let mut tree_cache = TreeCache::new(self.reader, state_root_hash); for (_idx, blob_set) in blob_sets.into_iter().enumerate() { assert!( @@ -291,13 +373,9 @@ where Ok(tree_cache.into()) } - fn put( - key: HashValue, - blob: Option, - // version: Version, - tree_cache: &mut TreeCache, - ) -> Result<()> { - let nibble_path = NibblePath::new(key.to_vec()); + fn put(key: K, blob: Option, tree_cache: &mut TreeCache) -> Result<()> { + let key_hash = key.key_hash(); + let nibble_path = NibblePath::new(key_hash.to_vec()); // Get the root node. If this is the first operation, it would get the root node from the // underlying db. Otherwise it most likely would come from `cache`. @@ -305,13 +383,8 @@ where let mut nibble_iter = nibble_path.nibbles(); // Start insertion from the root node. - let (new_root_node_key, _) = Self::insert_at( - *root_node_key, - // version, - &mut nibble_iter, - blob, - tree_cache, - )?; + let (new_root_node_key, _) = + Self::insert_at(*root_node_key, &mut nibble_iter, key, blob, tree_cache)?; tree_cache.set_root_node_key(new_root_node_key); Ok(()) @@ -323,29 +396,24 @@ where /// for this tree is the length of the hash of account addresses. fn insert_at( node_key: NodeKey, - // version: Version, nibble_iter: &mut NibbleIterator, + key: K, blob: Option, - tree_cache: &mut TreeCache, - ) -> Result<(NodeKey, Node)> { + tree_cache: &mut TreeCache, + ) -> Result<(NodeKey, Node)> { let node = tree_cache.get_node(&node_key)?; match node { Node::Internal(internal_node) => Self::insert_at_internal_node( node_key, internal_node, - // version, - nibble_iter, - blob, - tree_cache, - ), - Node::Leaf(leaf_node) => Self::insert_at_leaf_node( - node_key, - leaf_node, - // version, nibble_iter, + key, blob, tree_cache, ), + Node::Leaf(leaf_node) => { + Self::insert_at_leaf_node(node_key, leaf_node, nibble_iter, key, blob, tree_cache) + } Node::Null => { if blob.is_none() { return Ok((node_key, node)); @@ -353,7 +421,7 @@ where let blob = blob.unwrap(); tree_cache.delete_node(&node_key, false); - Self::create_leaf_node(&nibble_iter, blob, tree_cache) + Self::create_leaf_node(key, blob, tree_cache) } } } @@ -364,11 +432,11 @@ where fn insert_at_internal_node( node_key: NodeKey, internal_node: InternalNode, - // version: Version, nibble_iter: &mut NibbleIterator, + key: K, blob: Option, - tree_cache: &mut TreeCache, - ) -> Result<(NodeKey, Node)> { + tree_cache: &mut TreeCache, + ) -> Result<(NodeKey, Node)> { // Find the next node to visit following the next nibble as index. let child_index = nibble_iter.next().expect("Ran out of nibbles"); @@ -378,12 +446,12 @@ where Some(child) => { // let child_node_key = node_key.gen_child_node_key(child.version, child_index); let child_node_key = child.hash; - Self::insert_at(child_node_key, nibble_iter, blob, tree_cache)? + Self::insert_at(child_node_key, nibble_iter, key, blob, tree_cache)? } None if blob.is_some() => { let blob = blob.unwrap(); // let new_child_node_key = node_key.gen_child_node_key(version, child_index); - Self::create_leaf_node(nibble_iter, blob, tree_cache)? + Self::create_leaf_node(key, blob, tree_cache)? } _ => return Ok((node_key, Node::from(internal_node))), }; @@ -424,7 +492,7 @@ where let leaf_node = tree_cache.get_node(&leaf.hash)?; Ok((leaf.hash, leaf_node)) } else { - let new_internal_node: Node = InternalNode::new(children).into(); + let new_internal_node: Node = InternalNode::new(children).into(); // Cache this new internal node. tree_cache.put_node(new_internal_node.hash(), new_internal_node.clone())?; Ok((new_internal_node.hash(), new_internal_node)) @@ -436,12 +504,12 @@ where /// [`NodeKey`](node_type/struct.NodeKey.html). fn insert_at_leaf_node( node_key: NodeKey, - existing_leaf_node: LeafNode, - // version: Version, + existing_leaf_node: LeafNode, nibble_iter: &mut NibbleIterator, + key: K, blob: Option, - tree_cache: &mut TreeCache, - ) -> Result<(NodeKey, Node)> { + tree_cache: &mut TreeCache, + ) -> Result<(NodeKey, Node)> { // We are on a leaf node but trying to insert another node, so we may diverge. // We always delete the existing leaf node here because it will not be referenced anyway // since this version. @@ -450,7 +518,8 @@ where // visited part of the nibble iter of the incoming key and advances the existing leaf // nibble iterator by the length of that prefix. let mut visited_nibble_iter = nibble_iter.visited_nibbles(); - let existing_leaf_nibble_path = NibblePath::new(existing_leaf_node.account_key().to_vec()); + let existing_leaf_nibble_path = + NibblePath::new(existing_leaf_node.raw_key().key_hash().to_vec()); let mut existing_leaf_nibble_iter = existing_leaf_nibble_path.nibbles(); skip_common_prefix(&mut visited_nibble_iter, &mut existing_leaf_nibble_iter); @@ -486,12 +555,7 @@ where } else { // Else create the new leaf node with the same address but new blob content. tree_cache.delete_node(&node_key, true /* is_leaf */); - return Ok(Self::create_leaf_node( - // node_key, - nibble_iter, - blob, - tree_cache, - )?); + return Ok(Self::create_leaf_node(key, blob, tree_cache)?); } } @@ -520,12 +584,7 @@ where Child::new(existing_leaf_node.crypto_hash(), true /* is_leaf */), ); - let (_, new_leaf_node) = Self::create_leaf_node( - // node_key.gen_child_node_key(version, new_leaf_index), - nibble_iter, - blob, - tree_cache, - )?; + let (_, new_leaf_node) = Self::create_leaf_node(key, blob, tree_cache)?; children.insert( new_leaf_index, Child::new(new_leaf_node.hash(), true /* is_leaf */), @@ -533,7 +592,7 @@ where let internal_node = InternalNode::new(children); let mut next_internal_node = internal_node.clone(); - let internal_node: Node = internal_node.into(); + let internal_node: Node = internal_node.into(); tree_cache.put_node(internal_node.hash(), internal_node)?; for _i in 0..num_common_nibbles_below_internal { @@ -547,28 +606,23 @@ where ); let internal_node = InternalNode::new(children); next_internal_node = internal_node.clone(); - let internal_node: Node = internal_node.into(); + let internal_node: Node = internal_node.into(); tree_cache.put_node(internal_node.hash(), internal_node)?; } - let next_internal_node: Node = next_internal_node.into(); + let next_internal_node: Node = next_internal_node.into(); Ok((next_internal_node.hash(), next_internal_node)) } /// Helper function for creating leaf nodes. Returns the newly created leaf node. fn create_leaf_node( - // node_key: NodeKey, - nibble_iter: &NibbleIterator, + key: K, blob: Blob, - tree_cache: &mut TreeCache, - ) -> Result<(NodeKey, Node)> { + tree_cache: &mut TreeCache, + ) -> Result<(NodeKey, Node)> { // Get the underlying bytes of nibble_iter which must be a key, i.e., hashed account address // with `HashValue::LENGTH` bytes. - let new_leaf_node = Node::new_leaf( - HashValue::from_slice(nibble_iter.get_nibble_path().bytes()) - .expect("LeafNode must have full nibble path."), - blob, - ); + let new_leaf_node = Node::new_leaf(key, blob); let node_key = new_leaf_node.hash(); tree_cache.put_node(node_key, new_leaf_node.clone())?; Ok((node_key, new_leaf_node)) @@ -578,7 +632,7 @@ where pub fn get_with_proof( &self, state_root_hash: HashValue, - key: HashValue, // TODO should use &HashValue at here? + key: HashValue, ) -> Result<(Option, SparseMerkleProof)> { // Empty tree just returns proof with no sibling hash. // let mut next_node_key = NodeKey::new_empty_path(version); @@ -613,13 +667,13 @@ where } Node::Leaf(leaf_node) => { return Ok(( - if leaf_node.account_key() == key { + if leaf_node.raw_key().key_hash() == key { Some(leaf_node.blob().clone()) } else { None }, SparseMerkleProof::new( - Some((leaf_node.account_key(), leaf_node.blob_hash())), + Some((leaf_node.raw_key().key_hash(), leaf_node.blob_hash())), { siblings.reverse(); siblings diff --git a/core/forkable-jellyfish-merkle/src/mock_tree_store.rs b/core/forkable-jellyfish-merkle/src/mock_tree_store.rs index 5df63b8175..b567e23aa4 100644 --- a/core/forkable-jellyfish-merkle/src/mock_tree_store.rs +++ b/core/forkable-jellyfish-merkle/src/mock_tree_store.rs @@ -6,7 +6,7 @@ use crate::{ node_type::{LeafNode, Node, NodeKey}, - NodeBatch, StaleNodeIndex, TreeReader, TreeUpdateBatch, TreeWriter, + HashValueKey, NodeBatch, StaleNodeIndex, TreeReader, TreeUpdateBatch, TreeWriter, }; use anyhow::{bail, ensure, Result}; use starcoin_crypto::HashValue; @@ -16,21 +16,26 @@ use std::{ }; #[derive(Default)] -pub struct MockTreeStore(RwLock<(HashMap, BTreeSet)>); - -impl TreeReader for MockTreeStore { - fn get_node_option(&self, node_key: &NodeKey) -> Result> { +pub struct MockTreeStore( + RwLock<( + HashMap>, + BTreeSet, + )>, +); + +impl TreeReader for MockTreeStore { + fn get_node_option(&self, node_key: &NodeKey) -> Result>> { Ok(self.0.read().unwrap().0.get(node_key).cloned()) } - fn get_rightmost_leaf(&self) -> Result> { + fn get_rightmost_leaf(&self) -> Result)>> { let locked = self.0.read().unwrap(); - let mut node_key_and_node: Option<(NodeKey, LeafNode)> = None; + let mut node_key_and_node: Option<(NodeKey, LeafNode)> = None; for (key, value) in locked.0.iter() { if let Node::Leaf(leaf_node) = value { if node_key_and_node.is_none() - || leaf_node.account_key() > node_key_and_node.as_ref().unwrap().1.account_key() + || leaf_node.raw_key() > node_key_and_node.as_ref().unwrap().1.raw_key() { node_key_and_node.replace((*key, leaf_node.clone())); } @@ -41,8 +46,8 @@ impl TreeReader for MockTreeStore { } } -impl TreeWriter for MockTreeStore { - fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()> { +impl TreeWriter for MockTreeStore { + fn write_node_batch(&self, node_batch: &NodeBatch) -> Result<()> { let mut locked = self.0.write().unwrap(); for (node_key, node) in node_batch.clone() { assert_eq!(locked.0.insert(node_key, node), None); @@ -52,7 +57,7 @@ impl TreeWriter for MockTreeStore { } impl MockTreeStore { - pub fn put_node(&self, node_key: NodeKey, node: Node) -> Result<()> { + pub fn put_node(&self, node_key: NodeKey, node: Node) -> Result<()> { match self.0.write().unwrap().0.entry(node_key) { Entry::Occupied(o) => bail!("Key {:?} exists.", o.key()), Entry::Vacant(v) => { @@ -68,7 +73,7 @@ impl MockTreeStore { Ok(()) } - pub fn write_tree_update_batch(&self, batch: TreeUpdateBatch) -> Result<()> { + pub fn write_tree_update_batch(&self, batch: TreeUpdateBatch) -> Result<()> { batch .node_batch .into_iter() diff --git a/core/forkable-jellyfish-merkle/src/node_type/mod.rs b/core/forkable-jellyfish-merkle/src/node_type/mod.rs index cb5443d25a..a02a7fd366 100644 --- a/core/forkable-jellyfish-merkle/src/node_type/mod.rs +++ b/core/forkable-jellyfish-merkle/src/node_type/mod.rs @@ -16,7 +16,7 @@ #[cfg(test)] mod node_type_test; -use crate::{blob::Blob, nibble::Nibble}; +use crate::{blob::Blob, nibble::Nibble, RawKey}; use anyhow::{ensure, Context, Result}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use num_derive::{FromPrimitive, ToPrimitive}; @@ -25,7 +25,7 @@ use num_traits::cast::FromPrimitive; use proptest::{collection::hash_map, prelude::*}; #[cfg(any(test, feature = "fuzzing"))] use proptest_derive::Arbitrary; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; use starcoin_crypto::hash::*; use std::cell::Cell; use std::{ @@ -409,25 +409,51 @@ pub(crate) fn get_child_and_sibling_half_start(n: Nibble, height: u8) -> (u8, u8 (child_half_start, sibling_half_start) } +//TODO use serde helper's serialize_binary +pub fn serialize_raw_key(key: &K, s: S) -> std::result::Result +where + K: RawKey, + S: Serializer, +{ + use serde::ser::Error; + s.serialize_bytes(key.encode_key().map_err(S::Error::custom)?.as_slice()) +} + +pub fn deserialize_raw_key<'de, K, D>(d: D) -> std::result::Result +where + K: RawKey, + D: Deserializer<'de>, +{ + use serde::de::Error; + let bytes = serde_bytes::ByteBuf::deserialize(d)?; + K::decode_key(bytes.as_ref()).map_err(D::Error::custom) +} /// Represents an account. -#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, CryptoHasher)] -pub struct LeafNode { - // The hashed account address associated with this leaf node. - account_key: HashValue, - // The hash of the account state blob. +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub struct LeafNode { + /// The origin key associated with this leaf node's Blob. + #[serde( + deserialize_with = "deserialize_raw_key", + serialize_with = "serialize_raw_key" + )] + raw_key: K, + /// The hash of the blob. blob_hash: HashValue, - // The account blob associated with `account_key`. + /// The blob associated with `raw_key`. blob: Blob, #[serde(skip)] cached_hash: Cell>, } -impl LeafNode { +impl LeafNode +where + K: RawKey, +{ /// Creates a new leaf node. - pub fn new(account_key: HashValue, blob: Blob) -> Self { + pub fn new(raw_key: K, blob: Blob) -> Self { let blob_hash = blob.crypto_hash(); Self { - account_key, + raw_key, blob_hash, blob, cached_hash: Cell::new(None), @@ -445,20 +471,9 @@ impl LeafNode { } } - pub fn serialize(&self, out: &mut Vec) -> Result<()> { - //FIXME #1893 - Ok(bincode::serialize_into(out, self)?) - } - - pub fn deserialize(data: &[u8]) -> Result { - //FIXME custom serialize and deserialize, do not use bincode $1893 - let node: LeafNode = bincode::deserialize(data)?; - Ok(node) - } - - /// Gets the account key, the hashed account address. - pub fn account_key(&self) -> HashValue { - self.account_key + /// Gets the raw key + pub fn raw_key(&self) -> &K { + &self.raw_key } /// Gets the hash of associated blob. @@ -470,13 +485,24 @@ impl LeafNode { pub fn blob(&self) -> &Blob { &self.blob } + + pub fn serialize(&self, binary: &mut Vec) -> Result<()> { + binary.extend(scs::to_bytes(self)?); + Ok(()) + } + + pub fn deserialize(data: &[u8]) -> Result { + scs::from_bytes(data) + } } /// Computes the hash of a [`LeafNode`]. -impl CryptoHash for LeafNode { - type Hasher = LeafNodeHasher; - fn hash(&self) -> HashValue { - SparseMerkleLeafNode::new(self.account_key, self.blob_hash).crypto_hash() +impl PlainCryptoHash for LeafNode +where + K: RawKey, +{ + fn crypto_hash(&self) -> HashValue { + SparseMerkleLeafNode::new(self.raw_key.key_hash(), self.blob_hash).crypto_hash() } } @@ -490,16 +516,19 @@ enum NodeTag { /// The concrete node type of [`JellyfishMerkleTree`](crate::JellyfishMerkleTree). #[derive(Clone, Debug, Eq, PartialEq)] -pub enum Node { +pub enum Node { /// Represents `null`. Null, /// A wrapper of [`InternalNode`]. Internal(InternalNode), /// A wrapper of [`LeafNode`]. - Leaf(LeafNode), + Leaf(LeafNode), } -impl From for Node { +impl From for Node +where + K: RawKey, +{ fn from(node: InternalNode) -> Self { Node::Internal(node) } @@ -511,13 +540,19 @@ impl From for Children { } } -impl From for Node { - fn from(node: LeafNode) -> Self { +impl From> for Node +where + K: RawKey, +{ + fn from(node: LeafNode) -> Self { Node::Leaf(node) } } -impl Node { +impl Node +where + K: RawKey, +{ /// Creates the [`Null`](Node::Null) variant. pub fn new_null() -> Self { Node::Null @@ -529,8 +564,8 @@ impl Node { } /// Creates the [`Leaf`](Node::Leaf) variant. - pub fn new_leaf(account_key: HashValue, blob: Blob) -> Self { - Node::Leaf(LeafNode::new(account_key, blob)) + pub fn new_leaf(raw_key: K, blob: Blob) -> Self { + Node::Leaf(LeafNode::new(raw_key, blob)) } /// Returns `true` if the node is a leaf node. @@ -547,7 +582,7 @@ impl Node { } Node::Internal(internal_node) => { out.push(NodeTag::Internal as u8); - internal_node.serialize(&mut out)? + internal_node.serialize(&mut out)?; } Node::Leaf(leaf_node) => { out.push(NodeTag::Leaf as u8); @@ -567,7 +602,7 @@ impl Node { } /// Recovers from serialized bytes in physical storage. - pub fn decode(val: &[u8]) -> Result { + pub fn decode(val: &[u8]) -> Result> { if val.is_empty() { return Err(NodeDecodeError::EmptyInput.into()); } diff --git a/core/forkable-jellyfish-merkle/src/node_type/node_type_test.rs b/core/forkable-jellyfish-merkle/src/node_type/node_type_test.rs index 4f00a4934b..474ac6c7b5 100644 --- a/core/forkable-jellyfish-merkle/src/node_type/node_type_test.rs +++ b/core/forkable-jellyfish-merkle/src/node_type/node_type_test.rs @@ -6,6 +6,7 @@ use super::*; use crate::nibble_path::NibblePath; +use crate::HashValueKey; use proptest::prelude::*; use starcoin_crypto::{ hash::{CryptoHash, SPARSE_MERKLE_PLACEHOLDER_HASH}, @@ -44,15 +45,15 @@ fn test_encode_decode() { // let nibble_path = NibblePath::new(vec![]); let leaf1_keys = gen_leaf_keys(&nibble_path, Nibble::from(1)); - let leaf1_node = Node::new_leaf(leaf1_keys, Blob::from(vec![0x00])); + let leaf1_node: Node = Node::new_leaf(leaf1_keys.into(), Blob::from(vec![0x00])); let leaf2_keys = gen_leaf_keys(&nibble_path, Nibble::from(2)); - let leaf2_node = Node::new_leaf(leaf2_keys, Blob::from(vec![0x01])); + let leaf2_node: Node = Node::new_leaf(leaf2_keys.into(), Blob::from(vec![0x01])); let mut children = Children::default(); children.insert(Nibble::from(1), Child::new(leaf1_node.hash(), true)); children.insert(Nibble::from(2), Child::new(leaf2_node.hash(), true)); - let account_key = HashValue::random(); + let account_key = HashValueKey(HashValue::random()); let nodes = vec![ Node::new_internal(children), Node::new_leaf(account_key, Blob::from(vec![0x02])), @@ -62,13 +63,13 @@ fn test_encode_decode() { assert_eq!(*n, Node::decode(&v).unwrap()); } // Error cases - if let Err(e) = Node::decode(&[]) { + if let Err(e) = Node::::decode(&[]) { assert_eq!( e.downcast::().unwrap(), NodeDecodeError::EmptyInput ); } - if let Err(e) = Node::decode(&[100]) { + if let Err(e) = Node::::decode(&[100]) { assert_eq!( e.downcast::().unwrap(), NodeDecodeError::UnknownTag { unknown_tag: 100 } @@ -118,7 +119,7 @@ fn test_leaf_hash() { let blob = Blob::from(vec![0x02]); let value_hash = blob.hash(); let hash = hash_leaf(address, value_hash); - let leaf_node = Node::new_leaf(address, blob); + let leaf_node = Node::new_leaf(HashValueKey(address), blob); assert_eq!(leaf_node.hash(), hash); } } diff --git a/core/forkable-jellyfish-merkle/src/test_helper.rs b/core/forkable-jellyfish-merkle/src/test_helper.rs index f258bd85c8..4f20196259 100644 --- a/core/forkable-jellyfish-merkle/src/test_helper.rs +++ b/core/forkable-jellyfish-merkle/src/test_helper.rs @@ -4,7 +4,7 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::{blob::Blob, mock_tree_store::MockTreeStore, JellyfishMerkleTree}; +use crate::{blob::Blob, mock_tree_store::MockTreeStore, HashValueKey, JellyfishMerkleTree}; use starcoin_crypto::HashValue; use std::collections::HashMap; @@ -26,7 +26,7 @@ pub fn plus_one(key: HashValue) -> HashValue { /// Initializes a DB with a set of key-value pairs by inserting one key at each version. #[allow(clippy::all)] -pub fn init_mock_db(kvs: &HashMap) -> (MockTreeStore, Option) { +pub fn init_mock_db(kvs: &HashMap) -> (MockTreeStore, Option) { assert!(!kvs.is_empty()); let db = MockTreeStore::default(); diff --git a/core/forkable-jellyfish-merkle/src/tree_cache/mod.rs b/core/forkable-jellyfish-merkle/src/tree_cache/mod.rs index b4a6b6c983..d0addd6b01 100644 --- a/core/forkable-jellyfish-merkle/src/tree_cache/mod.rs +++ b/core/forkable-jellyfish-merkle/src/tree_cache/mod.rs @@ -74,7 +74,7 @@ mod tree_cache_test; use crate::{ node_type::{Node, NodeKey}, - StaleNodeIndex, TreeReader, TreeUpdateBatch, + RawKey, StaleNodeIndex, TreeReader, TreeUpdateBatch, }; use anyhow::{bail, Result}; use starcoin_crypto::{hash::SPARSE_MERKLE_PLACEHOLDER_HASH, HashValue}; @@ -87,10 +87,9 @@ use std::{ /// are generated by earlier transactions so they have to be immutable. The motivation of /// `FrozenTreeCache` is to let `TreeCache` freeze intermediate results from each transaction to /// help commit more than one transaction in a row atomically. -#[derive(Default)] -struct FrozenTreeCache { +struct FrozenTreeCache { /// Immutable node_cache. - node_cache: BTreeMap, + node_cache: BTreeMap>, /// # of leaves in the `node_cache`, num_new_leaves: usize, @@ -105,14 +104,29 @@ struct FrozenTreeCache { root_hashes: Vec, } +impl Default for FrozenTreeCache +where + K: RawKey, +{ + fn default() -> Self { + Self { + node_cache: BTreeMap::default(), + num_new_leaves: 0, + stale_node_index_cache: BTreeSet::default(), + num_stale_leaves: 0, + root_hashes: vec![], + } + } +} + /// `TreeCache` is a in-memory cache for per-transaction updates of sparse Merkle nodes and value /// blobs. -pub struct TreeCache<'a, R: 'a + TreeReader> { +pub struct TreeCache<'a, R: 'a + TreeReader, K: RawKey> { /// `NodeKey` of the current root node in cache. root_node_key: HashValue, /// Intermediate nodes keyed by node hash - node_cache: HashMap, + node_cache: HashMap>, /// # of leaves in the `node_cache`, num_new_leaves: usize, @@ -124,15 +138,16 @@ pub struct TreeCache<'a, R: 'a + TreeReader> { num_stale_leaves: usize, /// The immutable part of this cache, which will be committed to the underlying storage. - frozen_cache: FrozenTreeCache, + frozen_cache: FrozenTreeCache, /// The underlying persistent storage. reader: &'a R, } -impl<'a, R> TreeCache<'a, R> +impl<'a, R, K> TreeCache<'a, R, K> where - R: 'a + TreeReader, + R: 'a + TreeReader, + K: RawKey, { /// Constructs a new `TreeCache` instance. pub fn new(reader: &'a R, state_root_hash: Option) -> Self { @@ -156,7 +171,7 @@ where } /// Gets a node with given node key. If it doesn't exist in node cache, read from `reader`. - pub fn get_node(&self, node_key: &NodeKey) -> Result { + pub fn get_node(&self, node_key: &NodeKey) -> Result> { if node_key == &*SPARSE_MERKLE_PLACEHOLDER_HASH { return Ok(Node::Null); } @@ -180,7 +195,7 @@ where } #[cfg(test)] - pub fn get_root_node(&self) -> Result { + pub fn get_root_node(&self) -> Result> { self.get_node(&self.root_node_key) } @@ -190,7 +205,7 @@ where } /// Puts the node with given hash as key into node_cache. - pub fn put_node(&mut self, node_key: NodeKey, new_node: Node) -> Result<()> { + pub fn put_node(&mut self, node_key: NodeKey, new_node: Node) -> Result<()> { match self.node_cache.entry(node_key) { Entry::Vacant(o) => { if new_node.is_leaf() { @@ -246,11 +261,12 @@ where } } -impl<'a, R> Into<(Vec, TreeUpdateBatch)> for TreeCache<'a, R> +impl<'a, R, K> Into<(Vec, TreeUpdateBatch)> for TreeCache<'a, R, K> where - R: 'a + TreeReader, + R: 'a + TreeReader, + K: RawKey, { - fn into(self) -> (Vec, TreeUpdateBatch) { + fn into(self) -> (Vec, TreeUpdateBatch) { ( self.frozen_cache.root_hashes, TreeUpdateBatch { diff --git a/core/forkable-jellyfish-merkle/src/tree_cache/tree_cache_test.rs b/core/forkable-jellyfish-merkle/src/tree_cache/tree_cache_test.rs index a077ec2825..b7147706d7 100644 --- a/core/forkable-jellyfish-merkle/src/tree_cache/tree_cache_test.rs +++ b/core/forkable-jellyfish-merkle/src/tree_cache/tree_cache_test.rs @@ -5,13 +5,14 @@ // SPDX-License-Identifier: Apache-2.0 use super::*; -use crate::{blob::Blob, mock_tree_store::MockTreeStore, node_type::Node, NodeKey}; +use crate::{blob::Blob, mock_tree_store::MockTreeStore, node_type::Node, HashValueKey, NodeKey}; use starcoin_crypto::HashValue; -fn random_leaf_with_key() -> (Node, NodeKey) { - let address = HashValue::random(); - let node = Node::new_leaf(address, Blob::from(HashValue::random().to_vec())); - (node, address) +fn random_leaf_with_key() -> (Node, NodeKey) { + let hash_value = HashValue::random(); + let node = Node::new_leaf(hash_value.into(), Blob::from(HashValue::random().to_vec())); + let node_key = node.hash(); + (node, node_key) } #[test] diff --git a/core/genesis/generated/halley/genesis b/core/genesis/generated/halley/genesis index d7c4acc17f9a1748293a2a170b81a1d9c669547f..ae32455094d28e8d14765b2e51d4f80f8fbfee3a 100644 GIT binary patch delta 2836 zcmZ8jYiwLc6`nJ5XYRc-vv=>^r|q@9yMAqAukAQ#;+v#tD1j<1gxFG|6veGs?2ThJ ze&9F>KLSfggsLWLNd{EHL$qlg1lpvn=`X4jDkP);KN8BLDyV`;K@f;UfggQ9IrpZa z(5u~VUf;}|Ip@qd^UFWew{OrdZz>J#%)EN;&?|2h|Iq8c{MO4G8qZ%nT`YZdBz3X= zc<+@ve|UVNqjd7e-nl|yKX=C#$T)9^~{s4HxwWrAw^St zY{N@ny#qOZf?fgp8vCZ>ysLcim>!ci^m=_Z@lTQXMkZsPoynN~A-_Qh@qkzv0LnQ9 zfHtf-4kav!i!vWU1V9N*0V8&r5n^R62Gq*&2=id!#3o?_@Mg!wdW(=)FEEAJ>LNnB zMkW+NBbE|acgn;gq$7!YH#%`y(v?K$_7QplgbDuquymM-wBmpbAbQ>4~B)c%Dn_A}=O9U?4Pb0!n~OsNIN5mw-?< zc7VXnTUBh}58zh6a$4s(Ig^ojWi- zGrece^xpc!-0Y-zan;7apQrs#S9M=(=sda6NfVsUzZeZ017vAz&bMg#xvhX*8M zJv{0%&#l?sSWVH();M%HR#OjUnHAv{=Xbu{rX&U_+TDoIaG!Qh~s%-OsXcjS;ubBDVum`p|Hr&*oiP)|8 zN)j|{OQMO&%;7HIj8#=;9$$|m=;|H|iEW!J!>vHg&EedH)or6cYpGr3vUN&U;wiGs zKE&^it8SUy!#CTkLbJDoPNPfZa5vtTx-IhuMN2AHkx=C{k3QONrozsuA2LakLJ|o+ z%_#7BCdEQ41VxjOrA@dtwFzwo3>4-TP{C&UY+I4WOMB-@9L@)6Nt&`kuI%Voo$(&!BWS+JBAh#pcDcru zD#sd6@|2?`&tZ=&YpEn_=BH_qxSbmkKs40}*1XZWU+@P<>k+M@bZ z>pUdBD5!TtU}TQgA&gjrlc4(rE~ z1TID`l9CwpNm?K>ZF}PVFf1Gll@68EG;Q5s_HBLC+`lVlR*t;xXLwf9T$DNT4h8L) z@@Q|$MJW=kA=yB*P-2(_mU&=wS(I@gSvE&Uo53EryYzZLWlLiR_je11 zS|VV6^W>Q8Pwrc&El$tPn(C>tdGOTQLL3zY9x49H%;gC*5$&P@nCDM@t>$2^BTim0 z;x(Zuy=KQIi57^2Dd5QfS`{K~(~Pk81nR8FJR(X-pCrq~#!MF_wa!$eiLNxF%&MHu zVLXph5f$2e|EU4=YR;B^_tZf!^Jk9abMp&x6ZO4&$3MMiZsMWwnfhXFvbI<=hZWBqRj5&JE9xsxq)jgoy%ALrf>U!4P<~0l-MUO zfpv^#_!IOd*pFJTapzC2k57VD)ASv`L2`a{_5ET5K0+>s4$uQ6k)TAW=#hX?hp?uo zX5|?WI|2ulPn;G6ufQ#m%nKmNRyVw|?L_Q6f9`t$xp$0=svJdd8_`YJ(`~WV-b)VNKnY1t}4R}@67usI08VDWF z4&4$v$d4>tduGVKJHNK~TJyk`k6!*eS$W}8*S}P{_R}3=NA>D`{~CMY%`d#$y{s|! m?}HyZlRNO*<0lUvJG||kH%?f;IQ08B2d@l2aJ2j|C;tJ2orm%O delta 2649 zcmZ8jTZ~m#8D9Uo?X}lhXJ5{B&YUwd=L|EPxid3#V1t6Rw$NHygwoVD7#xOy0xb|a zKwD^zZD^`cwXk1kBEce1Y{iIENwuxj=tI?3nAvIr&GZdewTZpdc^=Q@FJju#lVQjaRyZ6g~I}Hyi=4OfGkOc*v5RsvPPs8gEX`X2C>89Ty>HRfiC9s z3Me6Ue2;Vo2+ai{Vo!w73lSj;Db5-+CZvxc^g9@EFz8^2W(gV2A<0M{VNn5Lw1_ZP zVnnt3?OuB(BRX}v&B#vPX)?e-S`Y+`0H08=HTVRik+=rwHwggDFK}(Vc`7&HQYny% zQ=us+)Ys7|#-$Yk7^0>)*OcLw;%=nUQYvsEkpP&7c2UfiinT6Mw+D>j8O6QUDV+!e za=?B3m~vt&ic%=mg7}M*Q@bX&Pj7s5_g#~_ak5X`^3a~CUFpg%SJS@He7bscBt1I1 zH#K7`(+9?mrFR}`Pd^+xJ^P(;4e6!DUFn|}_vUhwQ!_gbOmCjtHZ#3%^X~l*Z<$VC z7|x~RH{H`5r$E`_mV7t0SWdJ=pp-2UOEpbUDTd=RmM|2vCL$n=8xoN`bwL5V%{L^b z7BmcUOAU=N05N2s+}omZ2XV>Mb&toyGDpE8!x|a}z)HPVMvxFs#$bizhO-)qVnicB zTZd4fIrmiM4o%{c<(hk5ohjjq+i|MvED)c;ld5F_bC0B7ny5Ab*(mE7j}062m}(dr zv3wl%dIV3j^^MP#8{KHV;n&$JadT^`>TI>R-J=bfpI0%59xot!Ud`Un@c$JFTZ#;E zTEr;oSfI%Or2m-c>>1{(+)bnV$H7d+M zxAtkJHEK*DfNc82l1j6~86u8Q5`6l)G0*~9gqpDoB8`i*(-AS*B0^qqj&{xa$hI^< z1EPE<#Uc-IpeoUepXctKcP>3Yj4p~xz$L=kuVx>ZK8vZyNSiyH2zx^s^w&{6Y( z49*LpNfSeiURNA6@A0r2SYx%(Y^9?y4NdzAy9Hf|x($%fD~Pqu{#i1N=z?Xv_=+Zp zv;l6@EFb2$0uvaLcv%<*u?63m53B9su%<&`kwFe@#)e?}Gl?=`;v*N%4z|v@WroFh z$_YA)xCCaHOJw%8k;Y z6)!{PWl4jJ63ZjLh((uhB8Vg&ZeSq+9v~kd51H!^=tD}RMV0^=EPy=Z7^;zAgvQdF zp!j7KLnJvLWg~fwWtbEsBC{&+0xUW6l~F1zV+xJS((`NYlh+H(Cu_fn#iX1byX)hq zC`y{P;Y}zjiat-DyL%w39ph?3R-mtpTuNe*zI}IJ&ER&}?JS?roXbQ^>lP8ytVNVx z*Ufk=XP2%!F);hv!v}h$E6;?bYtC#+UpZ5bqpkb*P0sAtvwQO+(^GqPZ+#?v=ghJS zxI!XP@zz&ozQ{N~_RyH7_3xf+(juy^N)n;XVJK6?H=km;Q-t_CkX@#4eapSV!YD{;S8o~*Mki2Gbx27av5 z;R~h4PwkDE@c=vBP=&2|75Jx+702nHz`MqNFVq`;ly>Z@rq5i6nqh~tb*F<6p-{j; z=>=jf<}?;0!b&bUc##3|9H=(nM3oUT?Shk_C^0lE;E|vzfP`Hx$ikYB)(Y#!B6f=m zKwQMQ%R1UzF~m4p5Mj?R^u*b1*sKL!Aq}3xZk>12eo?@7ox{H(g9U`4B8pQiVKlq@ zV%Yt~@QcVy7P0baH!~wpCT0{RrxGNaKrc=RLcg*9w9l$k-vkAeST3p9f zkM7kAbt7&Mh7!Qtj$k0cx{<i zedUiHyQB2hdGEE2C!_A2-#`EEPoDi%^uhWEXKx+f`O9+$Ck7ts|Mq7Oe+cBi_nc0n diff --git a/core/genesis/generated/main/genesis b/core/genesis/generated/main/genesis index 0007b9cfb602f10ae45ea2ba65740ded5b1db4bd..7047c32332f7f9ea90b6855aca4e838ab74b6925 100644 GIT binary patch delta 2835 zcmZ8jYm8OZ72a#V&N+MUd+s^+IWu==?!EJbnLEQ{Waa=x6s-}9!XyyTQl{5=F_D>} z!@y9aOdmvy7%iNpDScQT3dN#T#>O^j#s09SHvXWgwFDd55^XJkq^7S#lXjhh)Yg-G zzy135-fOS5_FDUmztH!u(68QBI`GroPcBI3K6&oJ-R++qanF~o_Oc&Jx8wYe){lzd#>_7j(h8^&p(uW;_E^0A!u(?1iw=D z<<9qh{Ev(I27n4d z0iXeEwoM63;-bt$kUmgCQ^1IoW`yu8XFz#QM3@JT6KkXeAZl#~>vd9Ly}&eLy@Lpi z2ANORtG(|`!L#Ra16+w3d z3uu~vvDw zU7f(nRCs=%KJ5OS4E>cgVX>z>mt8+Ixo&H9$MA;jqf^zP;oBxBM#e_NyL+~K-o)h0 z!1&m4BNC>8%4p!z0z5+or?!dk%)*z3FJ7E*kl;6Q@lK zOplEZ4{RG7ADfHyM_m4VJqIWu)_|$-cPJ!m2ewwDyuc59rW<>*jt)B>ax z`qhf~wT3G+QJLAo5qDx$mzgV;;|RLC%K~Cq;rYILpy9Q?+#qkY(4UPmj&|5Gt=i)$ zs?2T?x5ibc%gwF-Z2%F;qO)Tm#aZXj4Ql zHD6ag4t{h(yg*u@@@2bX9bhbfmMhD%JtvdH`JlL>DIN%IMaOE6_bBg2^F~YFQYi&uik8Lb%u}+Pl!EQ%WQWFbhFr3XQ z*xI*nC+>@tyqk5Hl`b|#F z1%4V5UzXI}FEKJl>kvjb5hQA2fCpx<&&DC@AaE7AsK&qqEWwfM3N&|8S`=@lXR|IW ziSJ<4A}NJYpQI%sGnOkq3d7RIP-#;|!($Le_oVvAXyF%_SXUrpXzS{G51>k;f}dWU6jpj-n*++ zGSm`*@VCb{JKoUFsp|CDTO?5jkuU{389=>8qz#4<)|fz@6`4z9Dd~}9nOKg*2z0ShRvtO!440ds?Av9US(Ud%^cOuAUH|pC6Hu2SR~mXfOsX$c;J;Cw3XMS4PvuM0HpHx{76UK zOg;%D+v3fltL0jK%2v48IJGmETSn^xUb`+9jKar?pFzw!Nt#%}rf2U~7_bLNpBt-5c+bGKi* rIC)^rD;wXQ_{05wgO}^xU32fJzSw)=>@P>npZ>XUV$-GEHA(&h+Np}9 delta 2648 zcmZ8j35-=&89x8n?z!jOcbB)$TW03HVTQ-NSs6Mr7Y%4z3q>}mCPg9{hG9wx0u5WM zrFlvksY7Cbn<$}i0lH8cY;Zu-q-xZ}+G?#0CC0cmhC~oFHX4)K;(y*iOuWha&i0@4 z@Bjbay-we{MvvTEeYmst^dr}kj7V&%ya`-Z-JgU+rC%e)s%8ZuHOo>@$?buV+qHt{cEh2x*$Gv&9#H z-+-+6Jbf9Q@3U7O^^^(aar1z_W)^v0^{&a(p-h+*2D_U;L>hofNfArfGkLb*v4GMqDG`;gEX`P2C?14x#}P(0-en2;;8e9U>NL+(-n*;#n7PvOf9F-k#sT4@Xsn8S@ z>TBr~)*OcLw;&!Ccqg3ERA^|W5?V^|~6>D9jb`KcCGm3kyQ#$Gk$Be&(U6>9)S3KYQBNi_Ikk7&{^^OyhxShIp4ho>$L8teqje1KsL&AjKc;DdQ8;~4Oun{ zdK`i$+H;M|K45gc^@dwx9~O7FrmDu4iTfQ|r`b6bGwAUgvgeib9S#3qk)WkWKc{(& zqV{>3G$46rw4>XPnWll%L_LICOHiLr9C-5MeaQ4)OmwlUBGS_GQ06e5lCw8IiH*&;$-QHFNT`N(T&fCfY# z2_p<>s##Q}RePb>i}PT9%RU&@MzEB@RFL&K2jZ=$(7fZpIn%bIT{TA!y7&|dlxDFo zuGP(EHt^0TG+18=q40w6aHVT4rvxIcJSFnDAG3?SNIStmAW5=+m= zZJOo-99Lj`Lt-Zl10XizJ9A;RRUFW?A3S7`0WakRV0u%rGC}Mj7tS7SopX;F7G)_X z=qzFrm|-rF+56&)ndUWlrzEF<_|#9a@<;fD6w~XRZ^H~zKjfi&gLmkgJT&if>U~bU z2&oq(_0LHxkN6@MUBZbVk~p}5g#@^dd|W(awm+Z`DUlws1V~{4r+ml9k~3EsrNT0%(6}slZqb8mu7RzANBoaET^JHS15rkGXYlUg~l#y%<@v}x(PkxmBA z6zbpe?!=4(*y;MqZ25P9--5I_N`D8=P4-KnUUtKzeS0N2bS7*D?bg;E76OD^4g;kJ zi05HWV?iRUV1t9_84$;UY6DJG5g}C)ocMW(p;-op_!R*p=(Ir=R9&=IP%|E4mq-Cb zd5pWXrOo66jH7uW_WWFTlzs@CwZJQ+&NJApvv%4ma@ejj_*bMahtQu#aqW_w=@ z*uNNV9+}BJRz7WKW)OA|G^XB=--x6c!eY+s$%3t7AJTKL&+o&c$7A6Q z`u(OamVUzw#*XjAs>(hq4$gk@n+u)0*DS8&pEhq_{LrO|)idv%f9dediEXcZ?b&br z=ik9!j$FU_z}tU&>&TUT!{7bF%#E>My)k~}wXummpWHF@n@j6wu5B9oibMVj!5vsm diff --git a/core/genesis/generated/proxima/genesis b/core/genesis/generated/proxima/genesis index fb3c6ec04b57cd05755efa3753b80afabf5fef18..ebafaac88c9e41c59c3b645f1d4c23d92559bcd9 100644 GIT binary patch delta 190 zcmV;v073t}>H@s#0+2%>F{waO^{~U89aYN+D~x%hW?kCi04T3kz*nt zj;x=TetRi}@g3@NzTYslku`ACS(8r001D?z|+-Yb!*Ye zh^}&8a8m)HNzB78LaBN0F@2_re}pKp@(Hv-mOgF66C-AfrvoP2wgILx5wrCK?yPA8 s1=f&c+^{A7h1qtAU4MAoJn8eF#MTBu_(F3+W*uD{Y+!$%kjZBX0CSjKb^rhX delta 190 zcmV;v073t}>H@s#0+2%>{~4Ntp3P`uI|}W6Ear7LuL|iavA&lDR+dbPqD~|rkz*nt z^o_Wa&>TNgNOX4DT=4IA<9}Lo!gs$=GWCUiUHT`^lku`ACS(8r001Bdr+0Z#s&o7I z{{oZkTVi$=K|U9YiaH*p@b_^<&tI#t@(Hv-gJup;xz{mcBa8o%`+!HIe#IJ* sgW6W9*h@5;#g?`m^(8%tKnEGRSjKY7-Lf~qO=#9E-Or_W%F@ diff --git a/dataformat-generator/build.rs b/dataformat-generator/build.rs index bb7474e0e4..62877b7b53 100644 --- a/dataformat-generator/build.rs +++ b/dataformat-generator/build.rs @@ -4,7 +4,7 @@ use starcoin_crypto::ed25519::Ed25519PrivateKey; use starcoin_crypto::multi_ed25519::MultiEd25519PrivateKey; use starcoin_crypto::{HashValue, PrivateKey, SigningKey, Uniform}; // use starcoin_rpc_api::types::pubsub::Kind; -use starcoin_types::access_path::{AccessPath, DataType}; +use starcoin_types::access_path::{AccessPath, DataPath, DataType}; use starcoin_types::account_address::AccountAddress; use starcoin_types::account_config::AccountResource; use starcoin_types::block_metadata::BlockMetadata; @@ -25,6 +25,7 @@ fn main() { fn generate() -> Result<(), Error> { let mut tracer = Tracer::new(TracerConfig::default()); let mut samples = Samples::new(); + tracer.trace_type::(&samples)?; tracer.trace_type::(&samples)?; tracer.trace_value(&mut samples, &HashValue::zero())?; { diff --git a/devtools/x/Cargo.toml b/devtools/x/Cargo.toml index 661f3e141a..3702a96c57 100644 --- a/devtools/x/Cargo.toml +++ b/devtools/x/Cargo.toml @@ -23,7 +23,7 @@ globset = "0.4.6" regex = "1.4.2" rayon = "1.5.0" indexmap = "1.6.1" -x-core = { package="x-core", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -x-lint = { package="x-lint", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -diem-workspace-hack = { package="diem-workspace-hack", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -diem-x = { package="x", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +x-core = { package="x-core", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +x-lint = { package="x-lint", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +diem-workspace-hack = { package="diem-workspace-hack", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +diem-x = { package="x", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } diff --git a/devtools/x/src/main.rs b/devtools/x/src/main.rs index e4cb48ec08..48fcf26c86 100644 --- a/devtools/x/src/main.rs +++ b/devtools/x/src/main.rs @@ -9,7 +9,6 @@ use chrono::Local; use env_logger::{self, fmt::Color}; use log::Level; -use std::path::Path; use std::{boxed::Box, io::Write}; use structopt::StructOpt; @@ -65,16 +64,7 @@ pub mod tools { pub use diem_x::tools::*; } -pub mod utils { - pub use diem_x::utils::*; -} - -pub fn project_root() -> &'static Path { - Path::new(&env!("CARGO_MANIFEST_DIR")) - .ancestors() - .nth(2) - .unwrap() -} +pub mod utils; type Result = anyhow::Result; @@ -154,7 +144,7 @@ fn main() -> Result<()> { .init(); let args = Args::from_args(); - let xctx = context::XContext::new()?; + let xctx = context::XContext::with_project_root(utils::project_root())?; match args.cmd { Command::Tools(args) => tools::run(args, xctx), Command::Test(args) => test::run(args, xctx), diff --git a/devtools/x/src/test.rs b/devtools/x/src/test.rs index c6d677afa8..fb80916b23 100644 --- a/devtools/x/src/test.rs +++ b/devtools/x/src/test.rs @@ -102,7 +102,7 @@ pub fn run(mut args: Args, xctx: XContext) -> Result<()> { env: &env_vars, }; - let cmd_result = cmd.run_on_packages(&packages); + let cmd_result = cmd.run_on_packages(&packages, &xctx); if !args.no_fail_fast && cmd_result.is_err() { return cmd_result; diff --git a/devtools/x/src/utils.rs b/devtools/x/src/utils.rs new file mode 100644 index 0000000000..66c6a83d8b --- /dev/null +++ b/devtools/x/src/utils.rs @@ -0,0 +1,15 @@ +// Copyright (c) The Diem Core Contributors +// SPDX-License-Identifier: Apache-2.0 + +use std::path::Path; + +/// The number of directories between the project root and the root of this crate. +pub const X_DEPTH: usize = 2; + +/// Returns the project root. TODO: switch uses to XCoreContext::project_root instead) +pub fn project_root() -> &'static Path { + Path::new(&env!("CARGO_MANIFEST_DIR")) + .ancestors() + .nth(X_DEPTH) + .unwrap() +} diff --git a/etc/starcoin_types.yml b/etc/starcoin_types.yml index 50f3739bb7..83a82b385f 100644 --- a/etc/starcoin_types.yml +++ b/etc/starcoin_types.yml @@ -1,9 +1,8 @@ --- AccessPath: - STRUCT: - - address: - TYPENAME: AccountAddress - - path: BYTES + TUPLESTRUCT: + - TYPENAME: AccountAddress + - TYPENAME: DataPath AccountAddress: NEWTYPESTRUCT: TUPLEARRAY: @@ -65,6 +64,16 @@ ContractEventV0: - type_tag: TYPENAME: TypeTag - event_data: BYTES +DataPath: + ENUM: + 0: + Code: + NEWTYPE: + TYPENAME: Identifier + 1: + Resource: + NEWTYPE: + TYPENAME: StructTag DataType: ENUM: 0: diff --git a/executor/src/account.rs b/executor/src/account.rs index 3ebed1089c..b7657432d8 100644 --- a/executor/src/account.rs +++ b/executor/src/account.rs @@ -26,7 +26,7 @@ use starcoin_vm_types::value::{MoveStructLayout, MoveTypeLayout}; use starcoin_vm_types::{ account_config::stc_type_tag, account_config::{self, AccountResource, BalanceResource}, - language_storage::{ResourceKey, StructTag, TypeTag}, + language_storage::{StructTag, TypeTag}, move_resource::MoveResource, values::{Struct, Value}, }; @@ -144,8 +144,7 @@ impl Account { fn make_access_path(&self, tag: StructTag) -> AccessPath { // TODO: we need a way to get the type (FatStructType) of the Account in place - let resource_tag = ResourceKey::new(self.addr, tag); - AccessPath::resource_access_path(&resource_tag) + AccessPath::resource_access_path(self.addr, tag) } /// Changes the keys for this account to the provided ones. diff --git a/kube/manifest/actions-runner-controller.yaml b/kube/manifest/actions-runner-controller.yaml index 48334d08f1..91f3e82ef9 100644 --- a/kube/manifest/actions-runner-controller.yaml +++ b/kube/manifest/actions-runner-controller.yaml @@ -5142,7 +5142,7 @@ spec: - args: - --metrics-addr=127.0.0.1:8080 - --enable-leader-election - - --sync-period=2m + - --sync-period=5m command: - /manager env: diff --git a/kube/manifest/runner.yaml b/kube/manifest/runner.yaml index d4ceee9c2a..09e2c8503e 100644 --- a/kube/manifest/runner.yaml +++ b/kube/manifest/runner.yaml @@ -35,7 +35,7 @@ spec: name: starcoin-runner-deployment minReplicas: 1 maxReplicas: 6 - scaleDownDelaySecondsAfterScaleOut: 120 +# scaleDownDelaySecondsAfterScaleOut: 120 metrics: - type: TotalNumberOfQueuedAndInProgressWorkflowRuns repositoryNames: diff --git a/rpc/api/src/types.rs b/rpc/api/src/types.rs index 296e781a49..fb42b10c4d 100644 --- a/rpc/api/src/types.rs +++ b/rpc/api/src/types.rs @@ -670,14 +670,14 @@ impl From for WriteOpView { #[derive(Clone, Debug, Serialize, Deserialize)] pub struct AccessPathView { pub address: AccountAddress, - pub path: StrView>, + pub path: String, } impl From for AccessPathView { fn from(ap: AccessPath) -> Self { Self { address: ap.address, - path: StrView(ap.path), + path: ap.path.to_string(), } } } diff --git a/rpc/server/src/module/contract_rpc.rs b/rpc/server/src/module/contract_rpc.rs index 6c052dc791..b94348ca86 100644 --- a/rpc/server/src/module/contract_rpc.rs +++ b/rpc/server/src/module/contract_rpc.rs @@ -21,7 +21,6 @@ use starcoin_types::account_address::AccountAddress; use starcoin_types::language_storage::{ModuleId, StructTag}; use starcoin_types::transaction::DryRunTransaction; use starcoin_vm_types::access_path::AccessPath; -use starcoin_vm_types::language_storage::ResourceKey; use std::sync::Arc; pub struct ContractRpcImpl { @@ -78,9 +77,7 @@ where fn get_code(&self, module_id: StrView) -> FutureResult>>> { let service = self.chain_state.clone(); let f = async move { - let code = service - .get(AccessPath::code_access_path(&module_id.0)) - .await?; + let code = service.get(AccessPath::from(&module_id.0)).await?; Ok(code.map(StrView)) }; Box::new(f.map_err(map_err).boxed().compat()) @@ -96,10 +93,10 @@ where let f = async move { let state_root = service.clone().state_root().await?; let data = service - .get(AccessPath::resource_access_path(&ResourceKey::new( + .get(AccessPath::resource_access_path( addr, resource_type.0.clone(), - ))) + )) .await?; match data { None => Ok(None), diff --git a/state/api/src/chain_state.rs b/state/api/src/chain_state.rs index 439019ed4d..1d0b949036 100644 --- a/state/api/src/chain_state.rs +++ b/state/api/src/chain_state.rs @@ -2,13 +2,13 @@ // SPDX-License-Identifier: Apache-2.0 use anyhow::{ensure, format_err, Result}; -use merkle_tree::{blob::Blob, proof::SparseMerkleProof}; +use merkle_tree::{blob::Blob, proof::SparseMerkleProof, RawKey}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; -use starcoin_crypto::{hash::PlainCryptoHash, HashValue}; +use starcoin_crypto::HashValue; use starcoin_types::write_set::WriteSet; use starcoin_types::{ - access_path::{self, AccessPath}, + access_path::AccessPath, account_address::AccountAddress, account_config::{AccountResource, BalanceResource}, account_state::AccountState, @@ -54,7 +54,7 @@ impl StateProof { access_path: AccessPath, access_resource_blob: Option<&[u8]>, ) -> Result<()> { - let (account_address, data_type, ap_hash) = access_path::into_inner(access_path)?; + let (account_address, data_path) = access_path.into_inner(); match self.account_state.as_ref() { None => { ensure!( @@ -64,7 +64,7 @@ impl StateProof { } Some(s) => { let account_state = AccountState::try_from(s.as_ref())?; - match account_state.storage_roots()[data_type.storage_index()] { + match account_state.storage_roots()[data_path.data_type().storage_index()] { None => { ensure!( access_resource_blob.is_none(), @@ -73,16 +73,18 @@ impl StateProof { } Some(expected_hash) => { let blob = access_resource_blob.map(|data| Blob::from(data.to_vec())); - self.account_state_proof - .verify(expected_hash, ap_hash, blob.as_ref())?; + self.account_state_proof.verify( + expected_hash, + data_path.key_hash(), + blob.as_ref(), + )?; } } } } - let address_hash = account_address.crypto_hash(); self.account_proof.verify( expected_root_hash, - address_hash, + account_address.key_hash(), self.account_state.as_ref(), ) } diff --git a/state/state-store-api/src/lib.rs b/state/state-store-api/src/lib.rs index 76f1062cf6..dc236fcc9d 100644 --- a/state/state-store-api/src/lib.rs +++ b/state/state-store-api/src/lib.rs @@ -1,5 +1,6 @@ use anyhow::Result; use forkable_jellyfish_merkle::node_type::Node; +use forkable_jellyfish_merkle::RawKey; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use starcoin_crypto::hash::HashValue; use std::collections::BTreeMap; @@ -8,18 +9,24 @@ use std::convert::{TryFrom, TryInto}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct StateNode(pub Vec); -impl TryFrom for StateNode { +impl TryFrom> for StateNode +where + K: RawKey, +{ type Error = anyhow::Error; - fn try_from(n: Node) -> Result { + fn try_from(n: Node) -> Result { Ok(StateNode(n.encode()?)) } } -impl TryInto for StateNode { +impl TryInto> for StateNode +where + K: RawKey, +{ type Error = anyhow::Error; - fn try_into(self) -> Result { + fn try_into(self) -> Result, Self::Error> { Node::decode(self.0.as_slice()) } } diff --git a/state/state-tree/Cargo.toml b/state/state-tree/Cargo.toml index 30c4374475..476028e5ab 100644 --- a/state/state-tree/Cargo.toml +++ b/state/state-tree/Cargo.toml @@ -14,4 +14,4 @@ starcoin-crypto = { path = "../../commons/crypto"} serde = { version = "1.0" } forkable-jellyfish-merkle = { path = "../../core/forkable-jellyfish-merkle"} starcoin-state-store-api = {path = "../state-store-api"} - +scs = { package="starcoin-canonical-serialization", path = "../../commons/scs"} diff --git a/state/state-tree/src/lib.rs b/state/state-tree/src/lib.rs index b83f0271fb..1474770d69 100644 --- a/state/state-tree/src/lib.rs +++ b/state/state-tree/src/lib.rs @@ -9,17 +9,3 @@ mod state_tree_test; pub use starcoin_state_store_api::{StateNode, StateNodeStore}; pub use state_tree::StateTree; - -use starcoin_crypto::HashValue; - -/// change the `n`th nibble to `nibble` -pub fn update_nibble(original_key: &HashValue, n: usize, nibble: u8) -> HashValue { - assert!(nibble < 16); - let mut key = original_key.to_vec(); - key[n / 2] = if n % 2 == 0 { - key[n / 2] & 0x0f | nibble << 4 - } else { - key[n / 2] & 0xf0 | nibble - }; - HashValue::from_slice(&key).unwrap() -} diff --git a/state/state-tree/src/mock/mod.rs b/state/state-tree/src/mock/mod.rs index 248b44c900..f6ee789f6b 100644 --- a/state/state-tree/src/mock/mod.rs +++ b/state/state-tree/src/mock/mod.rs @@ -2,11 +2,11 @@ // SPDX-License-Identifier: Apache-2.0 use anyhow::{Error, Result}; -use starcoin_state_store_api::{StateNode, StateNodeStore}; - use starcoin_crypto::HashValue; +use starcoin_state_store_api::{StateNode, StateNodeStore}; use std::collections::{BTreeMap, HashMap}; use std::sync::RwLock; + #[derive(Default)] pub struct MockStateNodeStore { nodes: RwLock>, @@ -39,9 +39,6 @@ impl StateNodeStore for MockStateNodeStore { fn write_nodes(&self, nodes: BTreeMap) -> Result<(), Error> { let mut store_nodes = self.nodes.write().unwrap(); store_nodes.extend(nodes.into_iter()); - // for (node_key, node) in nodes.iter() { - // self.put(*node_key, node.clone()).unwrap(); - // } Ok(()) } } diff --git a/state/state-tree/src/state_tree.rs b/state/state-tree/src/state_tree.rs index 04ceeb8419..a6de02235e 100644 --- a/state/state-tree/src/state_tree.rs +++ b/state/state-tree/src/state_tree.rs @@ -3,28 +3,39 @@ use forkable_jellyfish_merkle::blob::Blob; use forkable_jellyfish_merkle::iterator::JellyfishMerkleIterator; use forkable_jellyfish_merkle::node_type::{Node, NodeKey}; use forkable_jellyfish_merkle::proof::SparseMerkleProof; -use forkable_jellyfish_merkle::{JellyfishMerkleTree, StaleNodeIndex, TreeReader, TreeUpdateBatch}; +use forkable_jellyfish_merkle::{ + JellyfishMerkleTree, RawKey, StaleNodeIndex, TreeReader, TreeUpdateBatch, +}; use starcoin_crypto::hash::*; use starcoin_state_store_api::*; use starcoin_types::state_set::StateSet; use std::collections::BTreeMap; -use std::ops::DerefMut; - use std::convert::TryInto; +use std::ops::DerefMut; use std::sync::{Arc, Mutex, RwLock}; -pub struct StateCache { +pub struct StateCache { root_hash: HashValue, - change_set: TreeUpdateBatch, + change_set: TreeUpdateBatch, } -impl StateCache { +impl StateCache +where + K: RawKey, +{ + pub fn new(initial_root: HashValue) -> Self { + Self { + root_hash: initial_root, + change_set: TreeUpdateBatch::default(), + } + } + fn reset(&mut self, root_hash: HashValue) { self.root_hash = root_hash; self.change_set = TreeUpdateBatch::default(); } - fn add_changeset(&mut self, root_hash: HashValue, cs: TreeUpdateBatch) { + fn add_changeset(&mut self, root_hash: HashValue, cs: TreeUpdateBatch) { let cur_change_set = &mut self.change_set; let mut cs_num_stale_leaves = cs.num_stale_leaves; for stale_node in cs.stale_node_index_batch.iter() { @@ -57,24 +68,18 @@ impl StateCache { } } -impl StateCache { - pub fn new(initial_root: HashValue) -> Self { - Self { - root_hash: initial_root, - change_set: TreeUpdateBatch::default(), - } - } -} - //TODO remove the Lock. -pub struct StateTree { +pub struct StateTree { storage: Arc, storage_root_hash: RwLock, - updates: RwLock>>, - cache: Mutex, + updates: RwLock>>, + cache: Mutex>, } -impl Clone for StateTree { +impl Clone for StateTree +where + K: RawKey, +{ fn clone(&self) -> Self { StateTree::new( self.storage.clone(), @@ -83,7 +88,10 @@ impl Clone for StateTree { } } -impl StateTree { +impl StateTree +where + K: RawKey, +{ /// Construct a new state_db from provided `state_root_hash` with underline `state_storage` pub fn new(state_storage: Arc, state_root_hash: Option) -> Self { let state_root_hash = state_root_hash.unwrap_or(*SPARSE_MERKLE_PLACEHOLDER_HASH); @@ -107,41 +115,38 @@ impl StateTree { /// and use it as the `key_hash`. /// this will not compute new root hash, /// Use `commit` to recompute the root hash. - pub fn put(&self, key_hash: HashValue, value: Vec) { + pub fn put(&self, key: K, value: Vec) { self.updates .write() .unwrap() - .insert(key_hash, Some(value.into())); + .insert(key, Some(value.into())); } /// Remove key_hash's data. /// this will not compute new root hash, /// Use `commit` to recompute the root hash. - pub fn remove(&self, key_hash: &HashValue) { - self.updates.write().unwrap().insert(*key_hash, None); + pub fn remove(&self, key: &K) { + self.updates.write().unwrap().insert(key.clone(), None); } /// use a key's hash `key_hash` to read a value. /// This will also read un-committed modification. - pub fn get(&self, key_hash: &HashValue) -> Result>> { + pub fn get(&self, key: &K) -> Result>> { let updates_guard = self.updates.read().unwrap(); - if let Some(uncomputed) = updates_guard.get(key_hash).cloned() { + if let Some(uncomputed) = updates_guard.get(key).cloned() { return Ok(uncomputed.map(|b| b.into())); } - Ok(self.get_with_proof(key_hash)?.0) + Ok(self.get_with_proof(key)?.0) } - pub fn contains(&self, key_hash: &HashValue) -> Result { - self.get(key_hash).map(|result| result.is_some()) + pub fn contains(&self, key: &K) -> Result { + self.get(key).map(|result| result.is_some()) } /// return value with it proof. /// NOTICE: this will only read from state tree. /// Any un-committed modification will not visible to the method. - pub fn get_with_proof( - &self, - key_hash: &HashValue, - ) -> Result<(Option>, SparseMerkleProof)> { + pub fn get_with_proof(&self, key: &K) -> Result<(Option>, SparseMerkleProof)> { let mut cache_guard = self.cache.lock().unwrap(); let cache = cache_guard.deref_mut(); let cur_root_hash = cache.root_hash; @@ -150,7 +155,7 @@ impl StateTree { cache, }; let tree = JellyfishMerkleTree::new(&reader); - let (data, proof) = tree.get_with_proof(cur_root_hash, *key_hash)?; + let (data, proof) = tree.get_with_proof(cur_root_hash, key.key_hash())?; match data { Some(b) => Ok((Some(b.into()), proof)), None => Ok((None, proof)), @@ -165,7 +170,7 @@ impl StateTree { let mut guard = self.updates.write().unwrap(); let updates = guard .iter() - .map(|(k, v)| (*k, v.clone())) + .map(|(k, v)| (k.clone(), v.clone())) .collect::>(); let new_root_hash = self.updates(updates)?; guard.clear(); @@ -179,18 +184,18 @@ impl StateTree { /// Write state_set to state tree. pub fn apply(&self, state_set: StateSet) -> Result<()> { - let inner: Vec<(HashValue, Vec)> = state_set.into(); + let inner: Vec<(Vec, Vec)> = state_set.into(); let updates = inner .into_iter() - .map(|(k, v)| (k, Some(v.into()))) - .collect::>(); - self.updates(updates)?; + .map(|(k, v)| Ok((K::decode_key(k.as_slice())?, Some(v.into())))) + .collect::>>(); + self.updates(updates?)?; Ok(()) } /// commit the state change into underline storage. pub fn flush(&self) -> Result<()> { - let (root_hash, change_sets) = self.get_change_sets(); + let (root_hash, change_sets) = self.change_sets(); let mut node_map = BTreeMap::new(); for (nk, n) in change_sets.node_batch.into_iter() { @@ -216,13 +221,13 @@ impl StateTree { let mut states = vec![]; for item in iterator { let item = item?; - states.push((item.0, item.1.into())); + states.push((item.0.encode_key()?, item.1.into())); } Ok(StateSet::new(states)) } /// passing None value with a key means delete the key - fn updates(&self, updates: Vec<(HashValue, Option)>) -> Result { + fn updates(&self, updates: Vec<(K, Option)>) -> Result { let cur_root_hash = self.root_hash(); //TODO should throw a error? if updates.is_empty() { @@ -272,13 +277,8 @@ impl StateTree { // Ok(()) // } - #[cfg(test)] - pub fn change_sets(&self) -> (HashValue, TreeUpdateBatch) { - self.get_change_sets() - } - /// get all changes so far based on initial root_hash. - fn get_change_sets(&self) -> (HashValue, TreeUpdateBatch) { + pub fn change_sets(&self) -> (HashValue, TreeUpdateBatch) { let cache_guard = self.cache.lock().unwrap(); (cache_guard.root_hash, cache_guard.change_set.clone()) } @@ -296,13 +296,16 @@ impl StateTree { } } -struct CachedTreeReader<'a> { +struct CachedTreeReader<'a, K: RawKey> { store: &'a dyn StateNodeStore, - cache: &'a StateCache, + cache: &'a StateCache, } -impl<'a> TreeReader for CachedTreeReader<'a> { - fn get_node_option(&self, node_key: &NodeKey) -> Result> { +impl<'a, K> TreeReader for CachedTreeReader<'a, K> +where + K: RawKey, +{ + fn get_node_option(&self, node_key: &NodeKey) -> Result>> { if node_key == &*SPARSE_MERKLE_PLACEHOLDER_HASH { return Ok(Some(Node::new_null())); } diff --git a/state/state-tree/src/state_tree_test.rs b/state/state-tree/src/state_tree_test.rs index e124f20602..e611edf323 100644 --- a/state/state-tree/src/state_tree_test.rs +++ b/state/state-tree/src/state_tree_test.rs @@ -1,16 +1,29 @@ use super::*; use crate::mock::MockStateNodeStore; use anyhow::Result; +use forkable_jellyfish_merkle::{HashValueKey, RawKey}; use starcoin_crypto::hash::*; use std::sync::Arc; +/// change the `n`th nibble to `nibble` +pub fn update_nibble(original_key: &HashValueKey, n: usize, nibble: u8) -> HashValueKey { + assert!(nibble < 16); + let mut key = original_key.key_hash().to_vec(); + key[n / 2] = if n % 2 == 0 { + key[n / 2] & 0x0f | nibble << 4 + } else { + key[n / 2] & 0xf0 | nibble + }; + HashValueKey(HashValue::from_slice(&key).unwrap()) +} + #[test] pub fn test_put_blob() -> Result<()> { let s = MockStateNodeStore::new(); - let state = StateTree::new(Arc::new(s), None); + let state = StateTree::::new(Arc::new(s), None); assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH); - let hash_value = HashValue::random(); + let hash_value = HashValue::random().into(); let account1 = update_nibble(&hash_value, 0, 1); let account1 = update_nibble(&account1, 2, 2); @@ -85,7 +98,7 @@ pub fn test_state_proof() -> Result<()> { let state = StateTree::new(Arc::new(s), None); assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH); - let hash_value = HashValue::random(); + let hash_value = HashValue::random().into(); let account1 = update_nibble(&hash_value, 0, 1); // re-update to make sure account2 never equal to account1 @@ -102,25 +115,24 @@ pub fn test_state_proof() -> Result<()> { assert!(value.is_some()); assert_eq!(value.unwrap(), vec![0, 0, 0]); let expected_value = Some(vec![0u8, 0, 0].into()); - proof.verify(new_root_hash, account1, expected_value.as_ref())?; + proof.verify(new_root_hash, account1.key_hash(), expected_value.as_ref())?; state.remove(&account1); let new_root_hash = state.commit()?; let (value, proof) = state.get_with_proof(&account1)?; assert!(value.is_none()); - proof.verify(new_root_hash, account1, None)?; + proof.verify(new_root_hash, account1.key_hash(), None)?; Ok(()) } #[test] pub fn test_state_commit() -> Result<()> { - // TODO: once storage support batch put, finish this. let s = MockStateNodeStore::new(); let state = StateTree::new(Arc::new(s), None); assert_eq!(state.root_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH); - let hash_value = HashValue::random(); + let hash_value = HashValue::random().into(); let account1 = update_nibble(&hash_value, 0, 1); let account1 = update_nibble(&account1, 2, 2); @@ -145,7 +157,7 @@ pub fn test_state_commit() -> Result<()> { pub fn test_state_dump() -> Result<()> { let s = MockStateNodeStore::new(); let state = StateTree::new(Arc::new(s), None); - let hash_value = HashValue::random(); + let hash_value = HashValueKey(HashValue::random()); let value = vec![1u8, 2u8]; state.put(hash_value, value); state.commit()?; @@ -158,7 +170,7 @@ pub fn test_state_dump() -> Result<()> { pub fn test_repeat_commit() -> Result<()> { let s = MockStateNodeStore::new(); let state = StateTree::new(Arc::new(s), None); - let hash_value = HashValue::random(); + let hash_value = HashValueKey(HashValue::random()); let value = vec![1u8, 2u8]; state.put(hash_value, value.clone()); state.commit()?; diff --git a/state/statedb/Cargo.toml b/state/statedb/Cargo.toml index c2192e520f..b4bf1aa9e8 100644 --- a/state/statedb/Cargo.toml +++ b/state/statedb/Cargo.toml @@ -20,4 +20,4 @@ starcoin-logger = {path = "../../commons/logger"} starcoin-state-tree = {path = "../state-tree"} scs = { package="starcoin-canonical-serialization", path = "../../commons/scs"} serde = { version = "1.0" } -merkle-tree = {package = "forkable-jellyfish-merkle", path = "../../core/forkable-jellyfish-merkle"} +forkable-jellyfish-merkle = {path = "../../core/forkable-jellyfish-merkle"} diff --git a/state/statedb/src/lib.rs b/state/statedb/src/lib.rs index cbae312af7..8a5530f5a1 100644 --- a/state/statedb/src/lib.rs +++ b/state/statedb/src/lib.rs @@ -3,31 +3,33 @@ use crate::StateError::AccountNotExist; use anyhow::{bail, ensure, Result}; +use forkable_jellyfish_merkle::proof::SparseMerkleProof; +use forkable_jellyfish_merkle::RawKey; use lru::LruCache; -use merkle_tree::proof::SparseMerkleProof; -use parking_lot::{Mutex, MutexGuard, RwLock}; +use parking_lot::{Mutex, RwLock}; use scs::SCSCodec; -use starcoin_crypto::{hash::PlainCryptoHash, HashValue}; +use starcoin_crypto::HashValue; use starcoin_logger::prelude::*; +pub use starcoin_state_api::{ + ChainState, ChainStateReader, ChainStateWriter, StateProof, StateWithProof, +}; use starcoin_state_tree::mock::MockStateNodeStore; use starcoin_state_tree::{StateNodeStore, StateTree}; +use starcoin_types::write_set::{WriteOp, WriteSet, WriteSetMut}; use starcoin_types::{ - access_path::{self, AccessPath, DataType}, + access_path::{AccessPath, DataType}, account_address::AccountAddress, account_state::AccountState, state_set::{AccountStateSet, ChainStateSet}, }; +use starcoin_vm_types::access_path::{DataPath, ModuleName}; +use starcoin_vm_types::language_storage::StructTag; use starcoin_vm_types::state_view::StateView; +use std::collections::HashSet; use std::convert::TryInto; use std::sync::Arc; use thiserror::Error; -pub use starcoin_state_api::{ - ChainState, ChainStateReader, ChainStateWriter, StateProof, StateWithProof, -}; -use starcoin_types::write_set::{WriteOp, WriteSet, WriteSetMut}; -use std::collections::HashSet; - #[derive(Error, Debug)] pub enum StateError { #[error("the Account for key `{0}` is not exist")] @@ -54,50 +56,48 @@ impl CacheItem { /// represent AccountState in runtime memory. struct AccountStateObject { - address: AccountAddress, //TODO if use RefCell at here, compile error for ActorRef async interface // the trait `std::marker::Sync` is not implemented for AccountStateObject // refactor AccountStateObject to a readonly object. - trees: Mutex>>, + code_tree: Mutex>>, + resource_tree: Mutex>, store: Arc, } impl AccountStateObject { - pub fn new( - address: AccountAddress, - account_state: AccountState, - store: Arc, - ) -> Self { - let trees = account_state - .storage_roots() - .iter() - .map(|root| match root { - Some(root) => Some(StateTree::new(store.clone(), Some(*root))), - None => None, - }) - .collect(); + pub fn new(account_state: AccountState, store: Arc) -> Self { + let code_tree = account_state + .code_root() + .map(|root| StateTree::::new(store.clone(), Some(root))); + let resource_tree = + StateTree::::new(store.clone(), Some(account_state.resource_root())); + Self { - address, - trees: Mutex::new(trees), + code_tree: Mutex::new(code_tree), + resource_tree: Mutex::new(resource_tree), store, } } - pub fn new_account(address: AccountAddress, store: Arc) -> Self { - let mut trees = vec![None; DataType::LENGTH]; - trees[0] = Some(StateTree::new(store.clone(), None)); + pub fn empty_account(store: Arc) -> Self { + let resource_tree = StateTree::::new(store.clone(), None); Self { - address, - trees: Mutex::new(trees), + code_tree: Mutex::new(None), + resource_tree: Mutex::new(resource_tree), store, } } - pub fn get(&self, data_type: DataType, key_hash: &HashValue) -> Result>> { - let trees = self.trees.lock(); - match trees[data_type.storage_index()].as_ref() { - Some(tree) => tree.get(key_hash), - None => Ok(None), + pub fn get(&self, data_path: &DataPath) -> Result>> { + match data_path { + DataPath::Code(module_name) => Ok(self + .code_tree + .lock() + .as_ref() + .map(|tree| tree.get(module_name)) + .transpose()? + .flatten()), + DataPath::Resource(struct_tag) => self.resource_tree.lock().get(struct_tag), } } @@ -105,102 +105,101 @@ impl AccountStateObject { /// NOTICE: Any un-committed modification will not visible to the method. pub fn get_with_proof( &self, - data_type: DataType, - key_hash: &HashValue, + data_path: &DataPath, ) -> Result<(Option>, SparseMerkleProof)> { - let trees = self.trees.lock(); - match trees[data_type.storage_index()].as_ref() { - Some(tree) => tree.get_with_proof(key_hash), - None => Ok((None, SparseMerkleProof::new(None, vec![]))), + match data_path { + DataPath::Code(module_name) => Ok(self + .code_tree + .lock() + .as_ref() + .map(|tree| tree.get_with_proof(module_name)) + .transpose()? + .unwrap_or((None, SparseMerkleProof::new(None, vec![])))), + DataPath::Resource(struct_tag) => self.resource_tree.lock().get_with_proof(struct_tag), } } - pub fn set(&self, data_type: DataType, key_hash: HashValue, value: Vec) { - let mut trees = self.trees.lock(); - if trees[data_type.storage_index()].as_ref().is_none() { - trees[data_type.storage_index()] = Some(StateTree::new(self.store.clone(), None)); + pub fn set(&self, data_path: DataPath, value: Vec) { + match data_path { + DataPath::Code(module_name) => { + if self.code_tree.lock().is_none() { + *self.code_tree.lock() = + Some(StateTree::::new(self.store.clone(), None)); + } + self.code_tree + .lock() + .as_ref() + .expect("state tree must exist after set.") + .put(module_name, value); + } + DataPath::Resource(struct_tag) => { + self.resource_tree.lock().put(struct_tag, value); + } } - let tree = trees[data_type.storage_index()] - .as_ref() - .expect("state tree must exist after set."); - tree.put(key_hash, value); } - pub fn remove(&self, data_type: DataType, key_hash: &HashValue) -> Result<()> { - if data_type.is_code() { + pub fn remove(&self, data_path: &DataPath) -> Result<()> { + if data_path.is_code() { bail!("Not supported remove code currently."); } - let trees = self.trees.lock(); - let tree = trees[data_type.storage_index()].as_ref(); - match tree { - Some(tree) => tree.remove(key_hash), - None => bail!( - "Can not find storage root fro data_type {:?} at: {:?}", - data_type, - self.address - ), - } + let struct_tag = data_path + .as_struct_tag() + .expect("DataPath must been struct tag at here."); + self.resource_tree.lock().remove(struct_tag); Ok(()) } pub fn is_dirty(&self) -> bool { - let trees = self.trees.lock(); - for tree in trees.iter() { - if let Some(tree) = tree { - if tree.is_dirty() { - return true; - } + if self.resource_tree.lock().is_dirty() { + return true; + } + if let Some(code_tree) = self.code_tree.lock().as_ref() { + if code_tree.is_dirty() { + return true; } } false } pub fn commit(&self) -> Result { - let trees = self.trees.lock(); - for tree in trees.iter() { - if let Some(tree) = tree { - if tree.is_dirty() { - tree.commit()?; + { + let code_tree = self.code_tree.lock(); + if let Some(code_tree) = code_tree.as_ref() { + if code_tree.is_dirty() { + code_tree.commit()?; } } } - - Ok(Self::build_state(trees)) - } - - pub fn flush(&self) -> Result<()> { - let trees = self.trees.lock(); - for tree in trees.iter() { - if let Some(tree) = tree { - tree.flush()?; + { + let resource_tree = self.resource_tree.lock(); + if resource_tree.is_dirty() { + resource_tree.commit()?; } } - Ok(()) + Ok(self.to_state()) } - fn build_state(trees: MutexGuard>>) -> AccountState { - let storage_roots = trees - .iter() - .map(|tree| match tree { - Some(tree) => Some(tree.root_hash()), - None => None, - }) - .collect(); + pub fn flush(&self) -> Result<()> { + self.resource_tree.lock().flush()?; + if let Some(code_tree) = self.code_tree.lock().as_ref() { + code_tree.flush()?; + } - AccountState::new(storage_roots) + Ok(()) } fn to_state(&self) -> AccountState { - let trees = self.trees.lock(); - Self::build_state(trees) + let code_root = self.code_tree.lock().as_ref().map(|tree| tree.root_hash()); + let resource_root = self.resource_tree.lock().root_hash(); + AccountState::new(code_root, resource_root) } } pub struct ChainStateDB { store: Arc, ///global state tree. - state_tree: StateTree, - cache: Mutex>, + state_tree: StateTree, + cache: Mutex>, updates: RwLock>, } @@ -235,7 +234,7 @@ impl ChainStateDB { } } - fn new_state_tree(&self, root_hash: HashValue) -> StateTree { + fn new_state_tree(&self, root_hash: HashValue) -> StateTree { StateTree::new(self.store.clone(), Some(root_hash)) } @@ -249,13 +248,13 @@ impl ChainStateDB { Some(account_state_object) => Ok(account_state_object), None => { if create { - let account_state_object = Arc::new(AccountStateObject::new_account( - *account_address, - self.store.clone(), - )); - let address_hash = account_address.crypto_hash(); + let account_state_object = + Arc::new(AccountStateObject::empty_account(self.store.clone())); let mut cache = self.cache.lock(); - cache.put(address_hash, CacheItem::new(account_state_object.clone())); + cache.put( + *account_address, + CacheItem::new(account_state_object.clone()), + ); Ok(account_state_object) } else { Err(AccountNotExist(*account_address).into()) @@ -268,35 +267,30 @@ impl ChainStateDB { &self, account_address: &AccountAddress, ) -> Result>> { - let address_hash = account_address.crypto_hash(); let mut cache = self.cache.lock(); - let item = cache.get(&address_hash); + let item = cache.get(account_address); let object = match item { Some(item) => item.as_object(), None => { let object = self - .get_account_state_by_hash(&address_hash)? + .get_account_state(account_address)? .map(|account_state| { - Arc::new(AccountStateObject::new( - *account_address, - account_state, - self.store.clone(), - )) + Arc::new(AccountStateObject::new(account_state, self.store.clone())) }); let cache_item = match &object { Some(object) => CacheItem::new(object.clone()), None => CacheItem::AccountNotExist(), }; - cache.put(address_hash, cache_item); + cache.put(*account_address, cache_item); object } }; Ok(object) } - fn get_account_state_by_hash(&self, address_hash: &HashValue) -> Result> { + fn get_account_state(&self, account_address: &AccountAddress) -> Result> { self.state_tree - .get(address_hash) + .get(account_address) .and_then(|value| match value { Some(v) => Ok(Some(AccountState::decode(v.as_slice())?)), None => Ok(None), @@ -308,10 +302,11 @@ impl ChainState for ChainStateDB {} impl StateView for ChainStateDB { fn get(&self, access_path: &AccessPath) -> Result>> { - let (account_address, data_type, hash) = access_path::into_inner(access_path.clone())?; + let account_address = &access_path.address; + let data_path = &access_path.path; self.get_account_state_object_option(&account_address) .and_then(|account_state| match account_state { - Some(account_state) => account_state.get(data_type, &hash), + Some(account_state) => account_state.get(&data_path), None => Ok(None), }) } @@ -331,9 +326,9 @@ impl StateView for ChainStateDB { impl ChainStateReader for ChainStateDB { fn get_with_proof(&self, access_path: &AccessPath) -> Result { - let (account_address, data_type, hash) = access_path::into_inner(access_path.clone())?; - let address_hash = account_address.crypto_hash(); - let (account_state, account_proof) = self.state_tree.get_with_proof(&address_hash)?; + let account_address = &access_path.address; + let data_path = &access_path.path; + let (account_state, account_proof) = self.state_tree.get_with_proof(account_address)?; let account_state = account_state .map(|v| AccountState::decode(v.as_slice())) .transpose()?; @@ -343,8 +338,7 @@ impl ChainStateReader for ChainStateDB { StateProof::new(None, account_proof, SparseMerkleProof::default()), ), Some(account_state) => { - let account_state_object = - self.get_account_state_object(&account_address, false)?; + let account_state_object = self.get_account_state_object(account_address, false)?; ensure!( !account_state_object.is_dirty(), "account {} has uncommitted modification", @@ -358,7 +352,7 @@ impl ChainStateReader for ChainStateDB { ); let (resource_value, resource_proof) = - account_state_object.get_with_proof(data_type, &hash)?; + account_state_object.get_with_proof(data_path)?; StateWithProof::new( resource_value, StateProof::new(Some(account_state.encode()?), account_proof, resource_proof), @@ -383,13 +377,23 @@ impl ChainStateReader for ChainStateDB { //TODO performance optimize. let global_states = self.state_tree.dump()?; let mut account_states = vec![]; - for (address_hash, account_state_bytes) in global_states.iter() { + for (address_bytes, account_state_bytes) in global_states.iter() { let account_state: AccountState = account_state_bytes.as_slice().try_into()?; let mut state_sets = vec![]; - for storage_root in account_state.storage_roots().iter() { + for (idx, storage_root) in account_state.storage_roots().iter().enumerate() { let state_set = match storage_root { - Some(storage_root) => Some(self.new_state_tree(*storage_root).dump()?), + Some(storage_root) => { + let data_type = DataType::from_index(idx as u8)?; + match data_type { + DataType::CODE => { + Some(self.new_state_tree::(*storage_root).dump()?) + } + DataType::RESOURCE => { + Some(self.new_state_tree::(*storage_root).dump()?) + } + } + } None => None, }; @@ -397,7 +401,10 @@ impl ChainStateReader for ChainStateDB { } let account_state_set = AccountStateSet::new(state_sets); - account_states.push((*address_hash, account_state_set)); + account_states.push(( + AccountAddress::decode_key(address_bytes.as_slice())?, + account_state_set, + )); } Ok(ChainStateSet::new(account_states)) } @@ -421,38 +428,49 @@ impl ChainStateWriter for ChainStateDB { } fn apply(&self, chain_state_set: ChainStateSet) -> Result<()> { - for (address_hash, account_state_set) in chain_state_set.state_sets() { - let account_state = self - .get_account_state_by_hash(address_hash)? - .unwrap_or_default(); - let mut new_storage_roots = vec![]; - for (storage_root, state_set) in account_state - .storage_roots() - .iter() - .zip(account_state_set.into_iter()) - { - let new_storage_root = match (storage_root, state_set) { - (Some(storage_root), Some(state_set)) => { - let state_tree = self.new_state_tree(*storage_root); - state_tree.apply(state_set.clone())?; - state_tree.flush()?; - Some(state_tree.root_hash()) - } - (Some(storage_root), None) => Some(*storage_root), - (None, Some(state_set)) => { - let state_tree = StateTree::new(self.store.clone(), None); - state_tree.apply(state_set.clone())?; - state_tree.flush()?; - Some(state_tree.root_hash()) - } - (None, None) => None, - }; - new_storage_roots.push(new_storage_root); - } - - let new_account_state = AccountState::new(new_storage_roots); - self.state_tree - .put(*address_hash, new_account_state.try_into()?); + for (address, account_state_set) in chain_state_set.state_sets() { + let (code_root, resource_root) = match self.get_account_state(address)? { + Some(account_state) => ( + account_state.code_root(), + Some(account_state.resource_root()), + ), + None => (None, None), + }; + let code_root = match (code_root, account_state_set.code_set()) { + (Some(storage_root), Some(state_set)) => { + let state_tree = self.new_state_tree::(storage_root); + state_tree.apply(state_set.clone())?; + state_tree.flush()?; + Some(state_tree.root_hash()) + } + (Some(storage_root), None) => Some(storage_root), + (None, Some(state_set)) => { + let state_tree = StateTree::::new(self.store.clone(), None); + state_tree.apply(state_set.clone())?; + state_tree.flush()?; + Some(state_tree.root_hash()) + } + (None, None) => None, + }; + + let resource_root = match (resource_root, account_state_set.resource_set()) { + (Some(storage_root), Some(state_set)) => { + let state_tree = self.new_state_tree::(storage_root); + state_tree.apply(state_set.clone())?; + state_tree.flush()?; + state_tree.root_hash() + } + (Some(storage_root), None) => storage_root, + (None, Some(state_set)) => { + let state_tree = StateTree::::new(self.store.clone(), None); + state_tree.apply(state_set.clone())?; + state_tree.flush()?; + state_tree.root_hash() + } + (None, None) => unreachable!("this should never happened"), + }; + let new_account_state = AccountState::new(code_root, resource_root); + self.state_tree.put(*address, new_account_state.try_into()?); } self.state_tree.commit()?; self.state_tree.flush()?; @@ -464,18 +482,17 @@ impl ChainStateWriter for ChainStateDB { for (access_path, write_op) in write_set { //update self updates record locks.insert(access_path.address); - let (account_address, data_type, key_hash) = - access_path::into_inner(access_path.clone())?; + let (account_address, data_path) = access_path.into_inner(); match write_op { WriteOp::Value(value) => { let account_state_object = self.get_account_state_object(&account_address, true)?; - account_state_object.set(data_type, key_hash, value.clone()); + account_state_object.set(data_path, value); } WriteOp::Deletion => { let account_state_object = self.get_account_state_object(&account_address, false)?; - account_state_object.remove(data_type, &key_hash)?; + account_state_object.remove(&data_path)?; } } } @@ -485,10 +502,9 @@ impl ChainStateWriter for ChainStateDB { fn commit(&self) -> Result { // cache commit for address in self.updates.read().iter() { - let address_hash = address.crypto_hash(); let account_state_object = self.get_account_state_object(address, false)?; let state = account_state_object.commit()?; - self.state_tree.put(address_hash, state.try_into()?); + self.state_tree.put(*address, state.try_into()?); } self.state_tree.commit() } @@ -529,7 +545,7 @@ mod tests { fn test_state_proof() -> Result<()> { let storage = MockStateNodeStore::new(); let chain_state_db = ChainStateDB::new(Arc::new(storage), None); - let access_path = access_path::random_resource(); + let access_path = AccessPath::random_resource(); let state0 = random_bytes(); chain_state_db.apply_write_set(to_write_set(access_path.clone(), state0.clone()))?; @@ -537,7 +553,10 @@ mod tests { let state1 = chain_state_db.get(&access_path)?; assert!(state1.is_some()); assert_eq!(state0, state1.unwrap()); + println!("{}", access_path.address.key_hash()); + println!("{}", access_path.key_hash()); let state_with_proof = chain_state_db.get_with_proof(&access_path)?; + println!("{:?}", state_with_proof); state_with_proof.proof.verify( state_root, access_path, @@ -550,7 +569,7 @@ mod tests { fn test_state_db() -> Result<()> { let storage = MockStateNodeStore::new(); let chain_state_db = ChainStateDB::new(Arc::new(storage), None); - let access_path = access_path::random_resource(); + let access_path = AccessPath::random_resource(); let state0 = random_bytes(); chain_state_db.apply_write_set(to_write_set(access_path.clone(), state0))?; @@ -567,7 +586,7 @@ mod tests { fn test_state_db_dump_and_apply() -> Result<()> { let storage = MockStateNodeStore::new(); let chain_state_db = ChainStateDB::new(Arc::new(storage), None); - let access_path = access_path::random_resource(); + let access_path = AccessPath::random_resource(); let state0 = random_bytes(); chain_state_db.apply_write_set(to_write_set(access_path, state0))?; chain_state_db.commit()?; diff --git a/types/Cargo.toml b/types/Cargo.toml index 302e7db374..34f5732c1f 100644 --- a/types/Cargo.toml +++ b/types/Cargo.toml @@ -32,6 +32,7 @@ network-p2p-types = { path = "../network-p2p/types"} starcoin-vm-types = { path = "../vm/types"} futures = "0.3" starcoin-accumulator = {path = "../core/accumulator"} +forkable-jellyfish-merkle = { path = "../core/forkable-jellyfish-merkle"} [features] default = [] diff --git a/types/src/account_state.rs b/types/src/account_state.rs index d8cd763d65..c16e76780b 100644 --- a/types/src/account_state.rs +++ b/types/src/account_state.rs @@ -17,15 +17,10 @@ pub struct AccountState { } impl AccountState { - pub fn new(mut storage_roots: Vec>) -> AccountState { - if storage_roots.len() < DataType::LENGTH { - storage_roots.extend(vec![None; DataType::LENGTH - storage_roots.len()]); - } - assert_eq!( - storage_roots.len(), - DataType::LENGTH, - "Storage root length must equals DataType length" - ); + pub fn new(code_root: Option, resource_root: HashValue) -> AccountState { + let mut storage_roots = vec![None; DataType::LENGTH]; + storage_roots[DataType::CODE.storage_index()] = code_root; + storage_roots[DataType::RESOURCE.storage_index()] = Some(resource_root); Self { storage_roots } } @@ -43,14 +38,6 @@ impl AccountState { } } -impl Default for AccountState { - fn default() -> Self { - Self { - storage_roots: vec![None; DataType::LENGTH], - } - } -} - impl Into>> for AccountState { fn into(self) -> Vec> { self.storage_roots diff --git a/types/src/lib.rs b/types/src/lib.rs index df9780408d..82051f8fee 100644 --- a/types/src/lib.rs +++ b/types/src/lib.rs @@ -6,9 +6,7 @@ mod event_info; pub mod access_path { - pub use starcoin_vm_types::access_path::{ - into_inner, random_code, random_resource, AccessPath, DataType, - }; + pub use starcoin_vm_types::access_path::{AccessPath, DataPath, DataType}; } pub mod account_address; diff --git a/types/src/state_set.rs b/types/src/state_set.rs index 703329da9d..26375061aa 100644 --- a/types/src/state_set.rs +++ b/types/src/state_set.rs @@ -2,16 +2,15 @@ // SPDX-License-Identifier: Apache-2.0 use crate::access_path::DataType; - use serde::{Deserialize, Serialize}; -use starcoin_crypto::HashValue; +use starcoin_vm_types::account_address::AccountAddress; /// StateSet is represent a single state-tree or sub state-tree dump result. #[derive(Debug, Default, Clone, Eq, Hash, PartialEq, Serialize, Deserialize)] -pub struct StateSet(Vec<(HashValue, Vec)>); +pub struct StateSet(Vec<(Vec, Vec)>); impl StateSet { - pub fn new(states: Vec<(HashValue, Vec)>) -> Self { + pub fn new(states: Vec<(Vec, Vec)>) -> Self { Self(states) } @@ -23,18 +22,17 @@ impl StateSet { self.0.is_empty() } - pub fn iter(&self) -> ::std::slice::Iter<(HashValue, Vec)> { + pub fn iter(&self) -> ::std::slice::Iter<(Vec, Vec)> { self.into_iter() } - fn push(&mut self, hash: HashValue, blob: Vec) { - //TODO check repeat value ? - self.0.push((hash, blob)) + fn push(&mut self, key: Vec, blob: Vec) { + self.0.push((key, blob)) } } -impl ::std::iter::FromIterator<(HashValue, Vec)> for StateSet { - fn from_iter)>>(iter: I) -> Self { +impl ::std::iter::FromIterator<(Vec, Vec)> for StateSet { + fn from_iter, Vec)>>(iter: I) -> Self { let mut s = StateSet::default(); for write in iter { s.push(write.0, write.1); @@ -44,16 +42,16 @@ impl ::std::iter::FromIterator<(HashValue, Vec)> for StateSet { } impl<'a> IntoIterator for &'a StateSet { - type Item = &'a (HashValue, Vec); - type IntoIter = ::std::slice::Iter<'a, (HashValue, Vec)>; + type Item = &'a (Vec, Vec); + type IntoIter = ::std::slice::Iter<'a, (Vec, Vec)>; fn into_iter(self) -> Self::IntoIter { self.0.iter() } } -impl Into)>> for StateSet { - fn into(self) -> Vec<(HashValue, Vec)> { +impl Into, Vec)>> for StateSet { + fn into(self) -> Vec<(Vec, Vec)> { self.0 } } @@ -92,22 +90,21 @@ impl<'a> IntoIterator for &'a AccountStateSet { /// ChainStateSet is represent ChainState dump result. #[derive(Clone, Debug, Hash, Eq, PartialEq, Serialize, Deserialize)] pub struct ChainStateSet { - /// AccountAddress hash to StateSet - state_sets: Vec<(HashValue, AccountStateSet)>, + state_sets: Vec<(AccountAddress, AccountStateSet)>, //TODO should include events? //events: Vec, } impl ChainStateSet { - pub fn new(state_sets: Vec<(HashValue, AccountStateSet)>) -> Self { + pub fn new(state_sets: Vec<(AccountAddress, AccountStateSet)>) -> Self { Self { state_sets } } - pub fn into_inner(self) -> Vec<(HashValue, AccountStateSet)> { + pub fn into_inner(self) -> Vec<(AccountAddress, AccountStateSet)> { self.state_sets } - pub fn state_sets(&self) -> &[(HashValue, AccountStateSet)] { + pub fn state_sets(&self) -> &[(AccountAddress, AccountStateSet)] { &self.state_sets } @@ -121,8 +118,8 @@ impl ChainStateSet { } impl<'a> IntoIterator for &'a ChainStateSet { - type Item = &'a (HashValue, AccountStateSet); - type IntoIter = ::std::slice::Iter<'a, (HashValue, AccountStateSet)>; + type Item = &'a (AccountAddress, AccountStateSet); + type IntoIter = ::std::slice::Iter<'a, (AccountAddress, AccountStateSet)>; fn into_iter(self) -> Self::IntoIter { self.state_sets.iter() diff --git a/vm/compiler/Cargo.toml b/vm/compiler/Cargo.toml index 991eb49cd6..70c1d62fc2 100644 --- a/vm/compiler/Cargo.toml +++ b/vm/compiler/Cargo.toml @@ -11,8 +11,8 @@ anyhow = "1.0.37" once_cell = "1.5.2" tempfile = "3.1.0" regex = { version = "1.4.2", default-features = false, features = ["std", "perf"] } -move-lang = { package="move-lang", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -move-lang-test-utils = { package="move-lang-test-utils", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +move-lang = { package="move-lang", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +move-lang-test-utils = { package="move-lang-test-utils", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } starcoin-crypto = { path = "../../commons/crypto"} starcoin-vm-types = { path = "../types"} starcoin-logger = { path = "../../commons/logger"} diff --git a/vm/functional-tests/Cargo.toml b/vm/functional-tests/Cargo.toml index 22d520bd2a..5d55c8e22a 100644 --- a/vm/functional-tests/Cargo.toml +++ b/vm/functional-tests/Cargo.toml @@ -9,7 +9,7 @@ edition = "2018" [dependencies] anyhow = "1.0.37" tempfile = "3.1.0" -datatest-stable = {git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +datatest-stable = {git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } stdlib = { package="stdlib", path = "../stdlib"} once_cell = "1.5.2" regex = { version = "1.4.2", default-features = false, features = ["std", "perf"] } @@ -30,7 +30,7 @@ executor = { package="starcoin-executor", path = "../../executor"} starcoin-genesis = { path = "../../core/genesis" } starcoin-consensus = { path = "../../consensus" } starcoin-account-api = { path = "../../account/api" } -move-lang = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +move-lang = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } [dev-dependencies] starcoin-vm-types = { path = "../types"} diff --git a/vm/functional-tests/src/executor.rs b/vm/functional-tests/src/executor.rs index 7896fef01e..3c058192f5 100644 --- a/vm/functional-tests/src/executor.rs +++ b/vm/functional-tests/src/executor.rs @@ -8,7 +8,6 @@ use starcoin_config::ChainNetwork; use starcoin_crypto::HashValue; use starcoin_genesis::Genesis; use starcoin_statedb::{ChainStateDB, ChainStateWriter}; -use starcoin_types::language_storage::ResourceKey; use starcoin_types::write_set::{WriteOp, WriteSetMut}; use starcoin_types::{ access_path::AccessPath, @@ -108,8 +107,8 @@ impl FakeExecutor { } pub fn read_timestamp(&self) -> u64 { - let resource_key = ResourceKey::new(genesis_address(), GlobalTimeOnChain::struct_tag()); - let ap = AccessPath::resource_access_path(&resource_key); + let ap = + AccessPath::resource_access_path(genesis_address(), GlobalTimeOnChain::struct_tag()); let data_blob = self .data_store .get(&ap) diff --git a/vm/functional-tests/tests/testsuite/package_txn_manager/cancel_upgrade_plan.move b/vm/functional-tests/tests/testsuite/package_txn_manager/cancel_upgrade_plan.move index 068378a68f..4d45d540a0 100644 --- a/vm/functional-tests/tests/testsuite/package_txn_manager/cancel_upgrade_plan.move +++ b/vm/functional-tests/tests/testsuite/package_txn_manager/cancel_upgrade_plan.move @@ -20,9 +20,10 @@ script { use 0x1::Config; use 0x1::Version; use 0x1::PackageTxnManager; +use 0x1::Option; fun main(account: &signer) { Config::publish_new_config(account, Version::new_version(1)); - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase(), Option::some(0)); } } @@ -48,7 +49,7 @@ script { use 0x1::PackageTxnManager; fun main(account: &signer) { let hash = x"1111111111111111"; - PackageTxnManager::submit_upgrade_plan(account, copy hash, 1, 1); + PackageTxnManager::submit_upgrade_plan(account, copy hash, 1); } } diff --git a/vm/functional-tests/tests/testsuite/package_txn_manager/override_upgrade_plan.move b/vm/functional-tests/tests/testsuite/package_txn_manager/override_upgrade_plan.move index fc65e26795..e3d9e71965 100644 --- a/vm/functional-tests/tests/testsuite/package_txn_manager/override_upgrade_plan.move +++ b/vm/functional-tests/tests/testsuite/package_txn_manager/override_upgrade_plan.move @@ -20,9 +20,10 @@ script { use 0x1::Config; use 0x1::Version; use 0x1::PackageTxnManager; +use 0x1::Option; fun main(account: &signer) { Config::publish_new_config(account, Version::new_version(1)); - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase(), Option::some(3)); } } @@ -34,7 +35,7 @@ script { use 0x1::PackageTxnManager; fun main(account: &signer) { let hash = x"1111111111111111"; - PackageTxnManager::submit_upgrade_plan(account, copy hash, 1, 2); + PackageTxnManager::submit_upgrade_plan(account, copy hash, 1); } } @@ -42,7 +43,7 @@ fun main(account: &signer) { //! block-prologue //! author: bob -//! block-time: 100000000000 +//! block-time: 1 //! block-number: 1 //! new-transaction @@ -51,7 +52,7 @@ script { use 0x1::PackageTxnManager; fun main(account: &signer) { let hash = x"2222222222222222"; - PackageTxnManager::submit_upgrade_plan(account, copy hash, 2, 3); + PackageTxnManager::submit_upgrade_plan(account, copy hash, 2); } } @@ -59,7 +60,7 @@ fun main(account: &signer) { //! block-prologue //! author: bob -//! block-time: 200000000000 +//! block-time: 2 //! block-number: 2 //! new-transaction @@ -77,7 +78,7 @@ fun main(account: &signer) { //! block-prologue //! author: bob -//! block-time: 300000000000 +//! block-time: 4 //! block-number: 3 //! new-transaction diff --git a/vm/functional-tests/tests/testsuite/package_txn_manager/package_txn_manager.move b/vm/functional-tests/tests/testsuite/package_txn_manager/package_txn_manager.move index 0c9522973c..e65dbeb8b2 100644 --- a/vm/functional-tests/tests/testsuite/package_txn_manager/package_txn_manager.move +++ b/vm/functional-tests/tests/testsuite/package_txn_manager/package_txn_manager.move @@ -21,9 +21,10 @@ script { use 0x1::Config; use 0x1::Version; use 0x1::PackageTxnManager; +use 0x1::Option; fun main(account: &signer) { Config::publish_new_config(account, Version::new_version(1)); - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase(), Option::some(2)); } } @@ -49,13 +50,12 @@ script { use 0x1::PackageTxnManager; fun main(account: &signer) { let hash = x"1111111111111111"; - PackageTxnManager::submit_upgrade_plan(account, copy hash, 1, 1); + PackageTxnManager::submit_upgrade_plan(account, copy hash, 1); } } // check: EXECUTED - // package txn must wait after plan's active_after_number //! new-transaction //! sender: alice @@ -116,8 +116,9 @@ script { //! sender: alice script { use 0x1::PackageTxnManager; + use 0x1::Option; fun main(account: &signer) { - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_arbitrary()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_arbitrary(), Option::some(0)); } } @@ -127,8 +128,9 @@ script { //! sender: alice script { use 0x1::PackageTxnManager; + use 0x1::Option; fun main(account: &signer) { - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_new_module()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_new_module(), Option::some(0)); } } @@ -139,8 +141,9 @@ script { //! sender: alice script { use 0x1::PackageTxnManager; + use 0x1::Option; fun main(account: &signer) { - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_freeze()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_freeze(), Option::some(0)); } } // check: EXECUTED diff --git a/vm/functional-tests/tests/testsuite/transaction_scripts/module_upgrade.move b/vm/functional-tests/tests/testsuite/transaction_scripts/module_upgrade.move index 4b61e23699..5190fcd599 100644 --- a/vm/functional-tests/tests/testsuite/transaction_scripts/module_upgrade.move +++ b/vm/functional-tests/tests/testsuite/transaction_scripts/module_upgrade.move @@ -49,16 +49,17 @@ script { use 0x1::Config; use 0x1::Version; use 0x1::PackageTxnManager; + use 0x1::Option; fun update_module_upgrade_strategy(account: &signer) { Config::publish_new_config(account, Version::new_version(1)); - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase(), Option::some(1)); } } // check: EXECUTED // check: gas_used -// check: 807 - +// check: 840 +// //! new-transaction //! sender: alice script { @@ -172,4 +173,4 @@ script { } // check: EXECUTED // check: gas_used -// check: 200 +// check: 186 diff --git a/vm/functional-tests/tests/testsuite/upgrade_module_dao_proposal/basic.move b/vm/functional-tests/tests/testsuite/upgrade_module_dao_proposal/basic.move index 611d1fef00..7d23fd7281 100644 --- a/vm/functional-tests/tests/testsuite/upgrade_module_dao_proposal/basic.move +++ b/vm/functional-tests/tests/testsuite/upgrade_module_dao_proposal/basic.move @@ -80,9 +80,10 @@ script { use 0x1::Config; use 0x1::Version; use 0x1::PackageTxnManager; + use 0x1::Option; fun main(account: &signer) { Config::publish_new_config(account, Version::new_version(1)); - PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase()); + PackageTxnManager::update_module_upgrade_strategy(account, PackageTxnManager::get_strategy_two_phase(), Option::some(0)); } } // check: EXECUTED diff --git a/vm/move-coverage/Cargo.toml b/vm/move-coverage/Cargo.toml index c67dbf7b60..8226e5aa30 100644 --- a/vm/move-coverage/Cargo.toml +++ b/vm/move-coverage/Cargo.toml @@ -18,8 +18,8 @@ colored = "2.0.0" scs = { package="starcoin-canonical-serialization", path = "../../commons/scs"} starcoin-types = { path = "../../types"} starcoin-vm-types = { path = "../types"} -bytecode-source-map = { package = "bytecode-source-map", version = "0.1.0", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -bytecode-verifier = { package = "bytecode-verifier", version = "0.1.0", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +bytecode-source-map = { package = "bytecode-source-map", version = "0.1.0", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +bytecode-verifier = { package = "bytecode-verifier", version = "0.1.0", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } [features] default = [] diff --git a/vm/move-explain/Cargo.toml b/vm/move-explain/Cargo.toml index 26342d0a4c..ca0637c5ea 100644 --- a/vm/move-explain/Cargo.toml +++ b/vm/move-explain/Cargo.toml @@ -12,9 +12,9 @@ edition = "2018" [dependencies] structopt = "0.3.21" stdlib = { package="stdlib", path = "../stdlib"} -diem-workspace-hack = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -errmapgen = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -move-core-types = { git = "https://github.com/starcoinorg/diem", rev = "a69729b2d54af44d2f779bcf167e3f6d681a9821" } +diem-workspace-hack = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +errmapgen = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +move-core-types = { git = "https://github.com/starcoinorg/diem", rev = "89223522186cb4cd39e21e44fb2da745f7a45c7a" } scs = { package="starcoin-canonical-serialization", path = "../../commons/scs"} [features] diff --git a/vm/move-prover/Cargo.toml b/vm/move-prover/Cargo.toml index b484ef3c83..4a002517a1 100644 --- a/vm/move-prover/Cargo.toml +++ b/vm/move-prover/Cargo.toml @@ -8,17 +8,17 @@ license = "Apache-2.0" [dependencies] # diem dependencies -move-lang = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -move-model = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -docgen = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -errmapgen = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -bytecode = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -vm = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -diem-temppath = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -diem-workspace-hack = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -bytecode-source-map = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -move-ir-types = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -abigen = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +move-lang = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +move-model = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +docgen = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +errmapgen = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +bytecode = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +vm = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +diem-temppath = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +diem-workspace-hack = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +bytecode-source-map = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +move-ir-types = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +abigen = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } # external dependencies async-trait = "0.1.42" @@ -43,8 +43,8 @@ tokio = { version = "0.2", features = ["full"] } toml = "0.5.8" [dev-dependencies] -datatest-stable = {git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -move-prover-test-utils = {git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +datatest-stable = {git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +move-prover-test-utils = {git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } shell-words = "1.0.0" [[test]] diff --git a/vm/resource-viewer/src/resolver.rs b/vm/resource-viewer/src/resolver.rs index d4e242628b..7bb32d4f78 100644 --- a/vm/resource-viewer/src/resolver.rs +++ b/vm/resource-viewer/src/resolver.rs @@ -48,9 +48,10 @@ impl<'a> Resolver<'a> { if let Some(module) = self.cache.get(&module_id) { return Ok(module); } + let access_path = AccessPath::from(&module_id); let blob = self .state - .get(&AccessPath::code_access_path(&module_id))? + .get(&access_path)? .ok_or_else(|| anyhow!("Module {:?} can't be found", module_id))?; let compiled_module = CompiledModule::deserialize(&blob).map_err(|status| { anyhow!( diff --git a/vm/stdlib/Cargo.toml b/vm/stdlib/Cargo.toml index 85083dda05..5b92481d11 100644 --- a/vm/stdlib/Cargo.toml +++ b/vm/stdlib/Cargo.toml @@ -8,8 +8,8 @@ edition = "2018" [dependencies] anyhow = "1.0.37" -bytecode-verifier = { package="bytecode-verifier", git = "https://github.com/starcoinorg/diem", version = "0.1.0",rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -datatest-stable = { package="datatest-stable", git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +bytecode-verifier = { package="bytecode-verifier", git = "https://github.com/starcoinorg/diem", version = "0.1.0",rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +datatest-stable = { package="datatest-stable", git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } starcoin-crypto = { path = "../../commons/crypto"} starcoin-vm-types = { path = "../types"} scs = { package="starcoin-canonical-serialization", path = "../../commons/scs"} diff --git a/vm/stdlib/compiled/latest/init_scripts/genesis_init.mv b/vm/stdlib/compiled/latest/init_scripts/genesis_init.mv index 3769d15d8b938c5ee4b928632ae72a93d17995ab..1f86ca4ad42c59c419c2d5de03a8a41b2feb2c29 100644 GIT binary patch delta 518 zcmW-exsKF85QeMzvfbU&;~tO4yK~vIvlq7UK-M7{MDPNH5NrU8070_SY9&I1g!}@K zlaUdC#0wx|o&X^skq}d+f=_k)b+r1E{bRT9m%j`GK!nh-uHtt;qj=4x@*Vq)(HrqY z#y{U~yGxj*YeY=T0V!cNV*tkpqY(pR7-b4T;Cq873XTvBi1$%IJSVt?6fiCn!gz@h zj8_sNt>R(aCeg$LV~JZiLYx#1Ase*QVvLXC~VveAP*xtYe*qciPKZMK4+GnLL^Kw+0QkPiy z1yfctASV4Se7eB|&?ur+mXnDAoFU`NGAi)?D}IU8y(iDl&L4lgVhxkx+4-Y~{ReTI zuhxD4SG>4Wy4F{&)#w-GZN2bQTT={v5`Z!rMW6BC0{R0g9O@%|tWW&hwSkdf%Yq^yDuM*f(34OuJ+38g6O}={Paw|B(=1SYc$s)DBHQ#G$C@e=|b$HkW~CazUr;& F{sFklHk|+f delta 507 zcmZvZJ8s)R5Qb;=K~l@rQcHAzWS#e0U$+aSv%)f4=A3oln3?))4%+e z%zhj;-M&~LVsR2t5UPp+oFQn<039IkBSvN6I6@kc9ixD3N*K|4))|B{_KsbFFxiY!y0x|C9^hQk2waFUz~v;$H?jh#$7H^#2(2SK z?g)X!dHI(7uz{mvi91G6QRN*@A>Wa`QA~X~_S#o|=yR_;F6@*gS}`CFA{a^}CYob^ zjkhCBib6zihOB?YpZDKb!}{OiiqGbUel0)kPfTl1O{>t(Nv*B06J1jdwhTa(rCm|B zRcGp4UD%mv15$Ni>VS&9qffkwE&m{IsGkLm^$x5YSNG``Dgj1f1h+Os^ptl03uZ~irdrE(>>Gub#r?DzW=SJgbz?pO|D@i3=}`QI{fh?w=6{p&e>(F2Y`#(7nuq2et^fF~zxMkupo9@lJR*os0wPI> zT@*ge2r>9t_}Y|seM*@Sj8b|BoRqNYz??@QeoPr*UCK$&xgW)Yd zpj7P25d3Xlf#2gAbUy^82L_t-kQ&gV7I--#Z9?vLaP~dyO5W*!+>b!sjX{oSOvv{b zxcBgp_gR;a6Atpgl2c1Qu;fG5Bjo#i=zKH)`N0t6hdUrYy2Xjw(|dBS)AkR|Ub`KV z;7}a4+LTk@P(i5nX-HV$hLBKS2bfbIvWCS3f0Q7$^vbk{b+z24@0uA1agwlENbNrRPkUl~K;I#vpU6TmWCI*b!22tAQVv)^W+< zj|?4&pQe)+={$S(s`x4`z*PR@vY6%b*5k6uKATRfY_-Z({ufnMRx9zv%dDvVUoGpr zEc~zYd68A&v-~_;)#>>%_-$6LurTP$=}NnkpQOcfk+r7?tSYaPGdES)d0r=(g&-=j z%Z6L8`OixrbQ#UFI=Rg2vt*jh(u+kM;HXu3o_R%CWS-@QKqsGFHQ~OFC7tbwdZ?5-dd6BQqZpd$2?AEX$B}G!B zu#!a%`#rmaxKUMG6J*BK)zGeOCa8nwITos`-VO1r%Cg^QJ2zw&nr6?NThy0jvb2Tj zl6l?p{0%@*mAQq$Ix7GV^M~-0q)1(yq`$+Q+KD0-SzY{cU?i@W6l|WD1NTkBknPd z@e}@tqv6~YF(us7BqDv+W6(!JO=4!Kq9+1ZCyRTmhYmz<-*;MeA>Ix2C=f9*+-mSh z^J7Kdr^Z))M8_luVrqo(w8xL&f)>YP+?*XcV12vO{x1}Xca5iT*N(Upp?CsTk9b== z#0nZ#o4ZYj!F{d;-_!mvM9eV7BG%V*6fhhvO`rpQs?h@Ev1SqP$KL$_?aL?=>X3h| z)v?=JjrgbHGslJ_{-t;l@MAV;0`H-P4B?fy)7`}VW_TSJ-UGpQ(31}CDS}9?Cx{f@ zA2`ke#yXy&k?6$Vkn4;c_jkrNUHee_53 z5iwL#cH8Mmw&%u>?YmL2J8snM06nQ^?djT{M%3Al(d=P=XuGmd^42f-M|sRpZ|IO6aUeF_Wx%nAq1e!+I{}!9(kP7DJ|7~w<^NdgiQMIw07cnl-N;%QJK z+mx~(V1T=Llww3QVJcbxVNBs5p`6H$;FQU(bZ~?{xv*-vId4CcuL&PSkF+8}2q5@<8v}MsOewOoMV7SSkrMl17Au{uL1#m;f{l z@K9RgKuuUHxfe*KXrKg>no)&pou1+%uu@VEs2On0I9N)!Hw!Fd5<~z6%0pB|@hDK- z8#2eEoI}RI^HSE7H~?(?xN<=%h9H#kAbFMzUu2{F`K$8ttOQcY$K%QHMet-&o))9V zlS!37IytHG*({$0pHz=H)#2({x@;%HWG)ROVIqx4fEx-1vNQvKZ%oosO#P zB>!SwW%GP=*?N94d3Kh~R(!jZdq-wTJR4`_#ufZYHqDNUaWTJ)i}GbQE>6;{8ZFLo zc4Nhu&Ea2Xb%5#QbTdPo)jS>LWj-rr=~80+K~J;tWSloo^4YwaT&8E9)cJWaPji=1 zT;>-wwv+|O6C`{Q!qIGUo{Mr)=E7l{$g()StZVcp&cl`Kb1zPZzJ}>=oycMS3Kbd7 z)1Oo5%<_49QOwV-7#SDV%euVVBglt~szSKwv1?RXF3ykhs+A3glLeT|j61`u9OmP6 zQ8yx;qPY8S;jIr#hG#jvP3v(;=da4`RW?_BxVUbvvY0K7UGaV@@0=E8F+01)-*bT@ zYs|))k2d(}*mSZLfgT?h5LB0)YwXi1&%eyKuJK%9YM<9#%r7SC)Qwy0)USruy^>#= zPmA$nw8*b9r`5zWJ8wyD|1G?A0S^|_6QuKVNvc(X!nMLI`{~NrSOw^Zu(gVC*+{#h zkz#`2$dH6ts_8vp znPCZT8Z3XhbIWTvL9!LR>(xpzV=7Oatc&jfw2m}Q#9tZQg5q&QsJ z(1iTRU?;1-VKMI}ty>|1MBB(j{1c=5-gVUDzmtzVs(SqQ@@dHWtXn7E!A{>mRARrq z%KNA3wS0Q_1>3|uapN6Lkg1CVnIifFkGX=eme4eo-K06-AOejm)`tsTn+*htz+4fw z(5R%T$sJT)YEABuIPBtnvJZ%Lm*qAhV`0d4{9EBVc67w{{9EHjrL^tr>C&FY)Y}iJ zx4+?voWw{6VJr)c>T222oHqbjvdIpd5)Zr*;z~N~X5D`4%Yf#f{V2XcUYaL|?9dtP z+yUOY?TcLZreSbn2K{@I1A8}g5?Q*Ckt=*IBR^6#1q0*ewOUU4KqQPVVJ5Vq+NmB;W? w59~;OrP+OIaX^RKhHX*P7%0;kL^wq73ojOy8=iETz`rm~+%50ER(!ob0^AP7J^%m! diff --git a/vm/stdlib/compiled/latest/stdlib/18_UpgradeModuleDaoProposal.mv b/vm/stdlib/compiled/latest/stdlib/18_UpgradeModuleDaoProposal.mv new file mode 100644 index 0000000000000000000000000000000000000000..c9f345b49e5589563233e2a58076a01787a8080a GIT binary patch literal 833 zcmY*XOK#La5Uo#l+tb~ij5GQ0iIB3&5f~OI3t$;(B!a{W%b76~D-+w;9tc~+0e~G# zmf3I(j(|8rE{a(H5>aHJqR}KLnLh$4xzJI7*8~tV){KP8?zpIhCRFCX0 zHFCcd`9JCsqfds0pAAV80s(^p4V@VvB75O{BzbcG1@9tWB!E!$gd{EP`u0Gu3jn z$cvrcOwQLYa>MnYr@rf5R~Yub{_eiIey!v*ZBB#tS=~qxmJ7(o|G$vKZ{38+;_&OO z_NCW=cuQ|v+L5ov-H_0S2yS}QB-->fWV-TqJYR$uz*E}+DUG0u1 W=*an@V|2lRld#h^j5SFI9{vK)lcYib literal 0 HcmV?d00001 diff --git a/vm/stdlib/compiled/latest/stdlib/20_TransactionTimeoutConfig.mv b/vm/stdlib/compiled/latest/stdlib/19_TransactionTimeoutConfig.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/20_TransactionTimeoutConfig.mv rename to vm/stdlib/compiled/latest/stdlib/19_TransactionTimeoutConfig.mv diff --git a/vm/stdlib/compiled/latest/stdlib/19_UpgradeModuleDaoProposal.mv b/vm/stdlib/compiled/latest/stdlib/19_UpgradeModuleDaoProposal.mv deleted file mode 100644 index 61998043fbfab739273e137412b87239ccfcb2a4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 874 zcmY*X&2H2%5T5baPS*COoBl&9B)IeuSPlpY%CS-r>E=vRNgaY~RgWZ~flLlg8 z8$V2N0tRU0?&+?9YUJ#-J63 z*1^ar3KkNsJusY{iVa|>a&pB(hE_bR$i8>a6ZGV?X9;a^V7Y9S31b;+!FO=*7oTLX A_y7O^ diff --git a/vm/stdlib/compiled/latest/stdlib/21_TransactionPublishOption.mv b/vm/stdlib/compiled/latest/stdlib/20_TransactionPublishOption.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/21_TransactionPublishOption.mv rename to vm/stdlib/compiled/latest/stdlib/20_TransactionPublishOption.mv diff --git a/vm/stdlib/compiled/latest/stdlib/22_RewardConfig.mv b/vm/stdlib/compiled/latest/stdlib/21_RewardConfig.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/22_RewardConfig.mv rename to vm/stdlib/compiled/latest/stdlib/21_RewardConfig.mv diff --git a/vm/stdlib/compiled/latest/stdlib/23_OnChainConfigDao.mv b/vm/stdlib/compiled/latest/stdlib/22_OnChainConfigDao.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/23_OnChainConfigDao.mv rename to vm/stdlib/compiled/latest/stdlib/22_OnChainConfigDao.mv diff --git a/vm/stdlib/compiled/latest/stdlib/24_ModifyDaoConfigProposal.mv b/vm/stdlib/compiled/latest/stdlib/23_ModifyDaoConfigProposal.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/24_ModifyDaoConfigProposal.mv rename to vm/stdlib/compiled/latest/stdlib/23_ModifyDaoConfigProposal.mv diff --git a/vm/stdlib/compiled/latest/stdlib/25_ConsensusConfig.mv b/vm/stdlib/compiled/latest/stdlib/24_ConsensusConfig.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/25_ConsensusConfig.mv rename to vm/stdlib/compiled/latest/stdlib/24_ConsensusConfig.mv diff --git a/vm/stdlib/compiled/latest/stdlib/26_STC.mv b/vm/stdlib/compiled/latest/stdlib/25_STC.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/26_STC.mv rename to vm/stdlib/compiled/latest/stdlib/25_STC.mv diff --git a/vm/stdlib/compiled/latest/stdlib/27_TransactionFee.mv b/vm/stdlib/compiled/latest/stdlib/26_TransactionFee.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/27_TransactionFee.mv rename to vm/stdlib/compiled/latest/stdlib/26_TransactionFee.mv diff --git a/vm/stdlib/compiled/latest/stdlib/28_Hash.mv b/vm/stdlib/compiled/latest/stdlib/27_Hash.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/28_Hash.mv rename to vm/stdlib/compiled/latest/stdlib/27_Hash.mv diff --git a/vm/stdlib/compiled/latest/stdlib/29_Authenticator.mv b/vm/stdlib/compiled/latest/stdlib/28_Authenticator.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/29_Authenticator.mv rename to vm/stdlib/compiled/latest/stdlib/28_Authenticator.mv diff --git a/vm/stdlib/compiled/latest/stdlib/30_Account.mv b/vm/stdlib/compiled/latest/stdlib/29_Account.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/30_Account.mv rename to vm/stdlib/compiled/latest/stdlib/29_Account.mv diff --git a/vm/stdlib/compiled/latest/stdlib/16_Block.mv b/vm/stdlib/compiled/latest/stdlib/30_Block.mv similarity index 100% rename from vm/stdlib/compiled/latest/stdlib/16_Block.mv rename to vm/stdlib/compiled/latest/stdlib/30_Block.mv diff --git a/vm/stdlib/compiled/latest/stdlib/43_TransactionTimeout.mv b/vm/stdlib/compiled/latest/stdlib/43_TransactionTimeout.mv index dbcfac9f141d7a0da224b2b3a6d1399597c7f2e8..1cc46864df2e1e14c0683b66b0cbac73cdb9535a 100644 GIT binary patch delta 43 zcmZ3>w3catHXAc510w_DM8g{2dG6 delta 51 zcmZ3>w3catHX9==10w_TM8lYg^CSfYQc8;wOEUBG;)_$0^Yc=QCvLZ7WSsa>l96dL HH=_~&eeVxP diff --git a/vm/stdlib/compiled/latest/stdlib/44_TransactionManager.mv b/vm/stdlib/compiled/latest/stdlib/44_TransactionManager.mv index c36386ffc1354b0bd8500dbd4852bd146c7fbea7..4f29f0ac4c11c3e707d873f86e028101ae445a0e 100644 GIT binary patch delta 210 zcmWN{Jx&5a7zW_?n{R%1W*64QMNmM{#Kgj5SeV#qsK|;`LLjoGB?rJv=QS+3i>P^26p zVw>+T{=MDX@9sm@OkjqRILZi$Mj0rMt%3Xm9U`83N}h!zjdB7#rUK-=cL2Q!7=VkC z051D!!&Q~^hoR1EB$R**N&<-lrpKfjp&z#9*Lbv?jp;Uz}T}CB@L8yF1_kM8A IGY_cXAK+6R_y7O^ diff --git a/vm/stdlib/compiled/latest/transaction_scripts/abi/update_module_upgrade_strategy.abi b/vm/stdlib/compiled/latest/transaction_scripts/abi/update_module_upgrade_strategy.abi index a392d12410421cec22cc2a92fefe83e8fe73808c..a291adf02e62551e84b0efc60cceaa874e0368b4 100644 GIT binary patch delta 199 zcmWlTF$%&!6h#00zx%VhNnuc|ECeBE&`Qw4(ldy}kR}mLW91bL9zg89nU&xHT)W{7 zZJMFr%y5I*=xfr(q delta 140 zcmbQw{Dojf3tW3-d3~Y=nj7$vdtZa--j2vw242+x{Yz$05RTCS9d6<~kc$nCn^YhX& p(vL9D+oo!IY$8qsL#k5b*daFrMJw36_ F;~yYz6Gi|4 delta 154 zcmWNIF%CgN6h+^?Z|2Q>e&I)>P)M{2dr+W|NZ7zc_z}t1>a5|%28g{_i$tZ?nChP7 zocraA*T?YFg@^=5WOQ1N+{F3mAS~{PZ}hT)P*jydmsvek#Na>)-atePf+CtQSFjQu uiYNfBEb^UsX|r>4IM&rV?M|uQZ!gU%HK}f&oAx&k(fS=!?zY5OE6+bd_Y{5r diff --git a/vm/stdlib/init_scripts/genesis_init.move b/vm/stdlib/init_scripts/genesis_init.move index 1865698bd1..8cfa79e148 100644 --- a/vm/stdlib/init_scripts/genesis_init.move +++ b/vm/stdlib/init_scripts/genesis_init.move @@ -21,6 +21,7 @@ script { use 0x1::Epoch; use 0x1::Version; use 0x1::Config; + use 0x1::Option; fun genesis_init( stdlib_version: u64, @@ -134,6 +135,7 @@ script { PackageTxnManager::update_module_upgrade_strategy( &genesis_account, PackageTxnManager::get_strategy_two_phase(), + Option::some(0), ); // stc should be initialized after genesis_account's module upgrade strategy set. { diff --git a/vm/stdlib/modules/PackageTxnManager.move b/vm/stdlib/modules/PackageTxnManager.move index 78082f9a18..1c0b9b0a6b 100644 --- a/vm/stdlib/modules/PackageTxnManager.move +++ b/vm/stdlib/modules/PackageTxnManager.move @@ -3,11 +3,11 @@ address 0x1 { use 0x1::Option::{Self,Option}; use 0x1::Signer; use 0x1::CoreAddresses; - use 0x1::Block; use 0x1::Errors; use 0x1::Version; use 0x1::Event; use 0x1::Config; + use 0x1::Timestamp; spec module { pragma verify = true; @@ -16,7 +16,7 @@ address 0x1 { struct UpgradePlan { package_hash: vector, - active_after_number: u64, + active_after_time: u64, version: u64, } @@ -29,6 +29,7 @@ address 0x1 { const STRATEGY_TWO_PHASE: u8 = 1; const STRATEGY_NEW_MODULE: u8 = 2; const STRATEGY_FREEZE: u8 = 3; + const DEFAULT_MIN_TIME_LIMIT: u64 = 86400000;// one day public fun get_strategy_arbitrary(): u8 { STRATEGY_ARBITRARY } @@ -38,6 +39,8 @@ address 0x1 { public fun get_strategy_freeze(): u8 { STRATEGY_FREEZE } + public fun get_default_min_time_limit(): u64 { DEFAULT_MIN_TIME_LIMIT } + const EUPGRADE_PLAN_IS_NONE: u64 = 102; const EPACKAGE_HASH_INCORRECT: u64 = 103; const EACTIVE_TIME_INCORRECT: u64 = 104; @@ -55,11 +58,16 @@ address 0x1 { } resource struct TwoPhaseUpgrade { + config: TwoPhaseUpgradeConfig, plan: Option, version_cap: Config::ModifyConfigCapability, upgrade_event: Event::EventHandle, } + struct TwoPhaseUpgradeConfig { + min_time_limit: u64, + } + struct UpgradeEvent { package_address: address, package_hash: vector, @@ -67,7 +75,7 @@ address 0x1 { } // Update account's ModuleUpgradeStrategy - public fun update_module_upgrade_strategy(account: &signer, strategy: u8) acquires ModuleUpgradeStrategy, TwoPhaseUpgrade, UpgradePlanCapability{ + public fun update_module_upgrade_strategy(account: &signer, strategy: u8, min_time: Option) acquires ModuleUpgradeStrategy, TwoPhaseUpgrade, UpgradePlanCapability{ assert(strategy == STRATEGY_ARBITRARY || strategy == STRATEGY_TWO_PHASE || strategy == STRATEGY_NEW_MODULE || strategy == STRATEGY_FREEZE, Errors::invalid_argument(EUNKNOWN_STRATEGY)); let account_address = Signer::address_of(account); let previous_strategy = get_module_upgrade_strategy(account_address); @@ -79,15 +87,19 @@ address 0x1 { }; if (strategy == STRATEGY_TWO_PHASE){ let version_cap = Config::extract_modify_config_capability(account); + let min_time_limit = Option::get_with_default(&min_time, DEFAULT_MIN_TIME_LIMIT); move_to(account, UpgradePlanCapability{ account_address: account_address}); - move_to(account, TwoPhaseUpgrade{plan: Option::none(), + move_to(account, TwoPhaseUpgrade{ + config: TwoPhaseUpgradeConfig{min_time_limit: min_time_limit}, + plan: Option::none(), version_cap: version_cap, - upgrade_event: Event::new_event_handle(account)}); + upgrade_event: Event::new_event_handle(account)} + ); }; //clean two phase upgrade resource if (previous_strategy == STRATEGY_TWO_PHASE){ let tpu = move_from(account_address); - let TwoPhaseUpgrade{plan:_, version_cap, upgrade_event} = tpu; + let TwoPhaseUpgrade{plan:_, version_cap, upgrade_event, config: _} = tpu; Event::destroy_handle(upgrade_event); Config::destroy_modify_config_capability(version_cap); // UpgradePlanCapability may be extracted @@ -138,10 +150,10 @@ address 0x1 { } // upgrade plan can override - public fun submit_upgrade_plan(account: &signer, package_hash: vector, version:u64, active_after_number: u64) acquires TwoPhaseUpgrade,UpgradePlanCapability,ModuleUpgradeStrategy{ + public fun submit_upgrade_plan(account: &signer, package_hash: vector, version:u64) acquires TwoPhaseUpgrade,UpgradePlanCapability,ModuleUpgradeStrategy{ let account_address = Signer::address_of(account); let cap = borrow_global(account_address); - submit_upgrade_plan_with_cap(cap, package_hash, version, active_after_number); + submit_upgrade_plan_with_cap(cap, package_hash, version); } spec fun submit_upgrade_plan { @@ -150,27 +162,26 @@ address 0x1 { ensures Option::spec_is_some(global(global(Signer::address_of(account)).account_address).plan); } - public fun submit_upgrade_plan_with_cap(cap: &UpgradePlanCapability, package_hash: vector, version: u64, active_after_number: u64) acquires TwoPhaseUpgrade,ModuleUpgradeStrategy{ - assert(active_after_number >= Block::get_current_block_number(), Errors::invalid_argument(EACTIVE_TIME_INCORRECT)); + public fun submit_upgrade_plan_with_cap(cap: &UpgradePlanCapability, package_hash: vector, version: u64) acquires TwoPhaseUpgrade,ModuleUpgradeStrategy{ let account_address = cap.account_address; assert(get_module_upgrade_strategy(account_address) == STRATEGY_TWO_PHASE, Errors::invalid_argument(ESTRATEGY_NOT_TWO_PHASE)); let tpu = borrow_global_mut(account_address); - tpu.plan = Option::some(UpgradePlan{ package_hash, active_after_number, version}); + let active_after_time = Timestamp::now_milliseconds() + tpu.config.min_time_limit; + tpu.plan = Option::some(UpgradePlan{ package_hash, active_after_time, version}); } spec fun submit_upgrade_plan_with_cap { - include SubmitUpgradePlanWithCapAbortsIf{account: cap.account_address, active_after_number}; + include SubmitUpgradePlanWithCapAbortsIf{account: cap.account_address}; ensures Option::spec_is_some(global(cap.account_address).plan); } spec schema SubmitUpgradePlanWithCapAbortsIf { account: address; - active_after_number: u64; - aborts_if !exists(CoreAddresses::GENESIS_ADDRESS()); - aborts_if active_after_number < global(CoreAddresses::GENESIS_ADDRESS()).number; aborts_if !exists(account); aborts_if global(account).strategy != 1; aborts_if !exists(account); + aborts_if !exists(CoreAddresses::GENESIS_ADDRESS()); + aborts_if Timestamp::now_milliseconds() + global(account).config.min_time_limit > max_u64(); } public fun cancel_upgrade_plan(account: &signer) acquires TwoPhaseUpgrade,UpgradePlanCapability,ModuleUpgradeStrategy{ @@ -255,7 +266,7 @@ address 0x1 { assert(Option::is_some(&plan_opt), Errors::invalid_argument(EUPGRADE_PLAN_IS_NONE)); let plan = Option::borrow(&plan_opt); assert(*&plan.package_hash == package_hash, Errors::invalid_argument(EPACKAGE_HASH_INCORRECT)); - assert(plan.active_after_number <= Block::get_current_block_number(), Errors::invalid_argument(EACTIVE_TIME_INCORRECT)); + assert(plan.active_after_time <= Timestamp::now_milliseconds(), Errors::invalid_argument(EACTIVE_TIME_INCORRECT)); }else if(strategy == STRATEGY_NEW_MODULE){ //do check at VM runtime. }else if(strategy == STRATEGY_FREEZE){ @@ -273,8 +284,8 @@ address 0x1 { aborts_if spec_get_module_upgrade_strategy(package_address) == 3; aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_is_none(spec_get_upgrade_plan(package_address)); aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).package_hash != package_hash; - aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && !exists(CoreAddresses::GENESIS_ADDRESS()); - aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_number > global(CoreAddresses::GENESIS_ADDRESS()).number; + aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && !exists(CoreAddresses::GENESIS_ADDRESS()); + aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_time > Timestamp::now_milliseconds(); } spec schema CheckPackageTxnAbortsIfWithType { @@ -285,8 +296,8 @@ address 0x1 { aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 3; aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_is_none(spec_get_upgrade_plan(package_address)); aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).package_hash != package_hash; - aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && !exists(CoreAddresses::GENESIS_ADDRESS()); - aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_number > global(CoreAddresses::GENESIS_ADDRESS()).number; + aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && !exists(CoreAddresses::GENESIS_ADDRESS()); + aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_time > Timestamp::now_milliseconds(); } fun finish_upgrade_plan(package_address: address) acquires TwoPhaseUpgrade { diff --git a/vm/stdlib/modules/UpgradeModuleDaoProposal.move b/vm/stdlib/modules/UpgradeModuleDaoProposal.move index ff6fded0c2..b44c60a190 100644 --- a/vm/stdlib/modules/UpgradeModuleDaoProposal.move +++ b/vm/stdlib/modules/UpgradeModuleDaoProposal.move @@ -5,7 +5,6 @@ module UpgradeModuleDaoProposal { use 0x1::Signer; use 0x1::Option; use 0x1::Dao; - use 0x1::Block; use 0x1::Errors; spec module { @@ -93,7 +92,6 @@ module UpgradeModuleDaoProposal { &cap.cap, package_hash, version, - Block::get_current_block_number(), ); } spec fun submit_module_upgrade_plan { diff --git a/vm/stdlib/modules/doc/PackageTxnManager.md b/vm/stdlib/modules/doc/PackageTxnManager.md index 02cb856ad0..fd9a674e72 100644 --- a/vm/stdlib/modules/doc/PackageTxnManager.md +++ b/vm/stdlib/modules/doc/PackageTxnManager.md @@ -9,12 +9,14 @@ - [Resource `UpgradePlanCapability`](#0x1_PackageTxnManager_UpgradePlanCapability) - [Resource `ModuleUpgradeStrategy`](#0x1_PackageTxnManager_ModuleUpgradeStrategy) - [Resource `TwoPhaseUpgrade`](#0x1_PackageTxnManager_TwoPhaseUpgrade) +- [Struct `TwoPhaseUpgradeConfig`](#0x1_PackageTxnManager_TwoPhaseUpgradeConfig) - [Struct `UpgradeEvent`](#0x1_PackageTxnManager_UpgradeEvent) - [Constants](#@Constants_0) - [Function `get_strategy_arbitrary`](#0x1_PackageTxnManager_get_strategy_arbitrary) - [Function `get_strategy_two_phase`](#0x1_PackageTxnManager_get_strategy_two_phase) - [Function `get_strategy_new_module`](#0x1_PackageTxnManager_get_strategy_new_module) - [Function `get_strategy_freeze`](#0x1_PackageTxnManager_get_strategy_freeze) +- [Function `get_default_min_time_limit`](#0x1_PackageTxnManager_get_default_min_time_limit) - [Function `update_module_upgrade_strategy`](#0x1_PackageTxnManager_update_module_upgrade_strategy) - [Function `account_address`](#0x1_PackageTxnManager_account_address) - [Function `destroy_upgrade_plan_cap`](#0x1_PackageTxnManager_destroy_upgrade_plan_cap) @@ -45,13 +47,13 @@ - [Function `package_txn_epilogue`](#@Specification_1_package_txn_epilogue) -
use 0x1::Block;
-use 0x1::Config;
+
use 0x1::Config;
 use 0x1::CoreAddresses;
 use 0x1::Errors;
 use 0x1::Event;
 use 0x1::Option;
 use 0x1::Signer;
+use 0x1::Timestamp;
 use 0x1::Version;
 
@@ -80,7 +82,7 @@
-active_after_number: u64 +active_after_time: u64
@@ -167,6 +169,12 @@
+config: PackageTxnManager::TwoPhaseUpgradeConfig +
+
+ +
+
plan: Option::Option<PackageTxnManager::UpgradePlan>
@@ -187,6 +195,33 @@
+ + + + +## Struct `TwoPhaseUpgradeConfig` + + + +
struct TwoPhaseUpgradeConfig
+
+ + + +
+Fields + + +
+
+min_time_limit: u64 +
+
+ +
+
+ +
@@ -233,6 +268,15 @@ ## Constants + + + + +
const DEFAULT_MIN_TIME_LIMIT: u64 = 86400000;
+
+ + + @@ -418,6 +462,28 @@ + + + + +## Function `get_default_min_time_limit` + + + +
public fun get_default_min_time_limit(): u64
+
+ + + +
+Implementation + + +
public fun get_default_min_time_limit(): u64 { DEFAULT_MIN_TIME_LIMIT }
+
+ + +
@@ -426,7 +492,7 @@ -
public fun update_module_upgrade_strategy(account: &signer, strategy: u8)
+
public fun update_module_upgrade_strategy(account: &signer, strategy: u8, min_time: Option::Option<u64>)
 
@@ -435,7 +501,7 @@ Implementation -
public fun update_module_upgrade_strategy(account: &signer, strategy: u8) acquires ModuleUpgradeStrategy, TwoPhaseUpgrade, UpgradePlanCapability{
+
public fun update_module_upgrade_strategy(account: &signer, strategy: u8, min_time: Option<u64>) acquires ModuleUpgradeStrategy, TwoPhaseUpgrade, UpgradePlanCapability{
     assert(strategy == STRATEGY_ARBITRARY || strategy == STRATEGY_TWO_PHASE || strategy == STRATEGY_NEW_MODULE || strategy == STRATEGY_FREEZE, Errors::invalid_argument(EUNKNOWN_STRATEGY));
     let account_address = Signer::address_of(account);
     let previous_strategy = get_module_upgrade_strategy(account_address);
@@ -447,15 +513,19 @@
     };
     if (strategy == STRATEGY_TWO_PHASE){
         let version_cap = Config::extract_modify_config_capability<Version::Version>(account);
+        let min_time_limit = Option::get_with_default(&min_time, DEFAULT_MIN_TIME_LIMIT);
         move_to(account, UpgradePlanCapability{ account_address: account_address});
-        move_to(account, TwoPhaseUpgrade{plan: Option::none<UpgradePlan>(),
+        move_to(account, TwoPhaseUpgrade{
+            config: TwoPhaseUpgradeConfig{min_time_limit: min_time_limit},
+            plan: Option::none<UpgradePlan>(),
             version_cap: version_cap,
-            upgrade_event: Event::new_event_handle<Self::UpgradeEvent>(account)});
+            upgrade_event: Event::new_event_handle<Self::UpgradeEvent>(account)}
+        );
     };
     //clean two phase upgrade resource
     if (previous_strategy == STRATEGY_TWO_PHASE){
         let tpu = move_from<TwoPhaseUpgrade>(account_address);
-        let TwoPhaseUpgrade{plan:_, version_cap, upgrade_event} = tpu;
+        let TwoPhaseUpgrade{plan:_, version_cap, upgrade_event, config: _} = tpu;
         Event::destroy_handle<Self::UpgradeEvent>(upgrade_event);
         Config::destroy_modify_config_capability<Version::Version>(version_cap);
         // UpgradePlanCapability may be extracted
@@ -551,7 +621,7 @@
 
 
 
-
public fun submit_upgrade_plan(account: &signer, package_hash: vector<u8>, version: u64, active_after_number: u64)
+
public fun submit_upgrade_plan(account: &signer, package_hash: vector<u8>, version: u64)
 
@@ -560,10 +630,10 @@ Implementation -
public fun submit_upgrade_plan(account: &signer, package_hash: vector<u8>, version:u64, active_after_number: u64) acquires TwoPhaseUpgrade,UpgradePlanCapability,ModuleUpgradeStrategy{
+
public fun submit_upgrade_plan(account: &signer, package_hash: vector<u8>, version:u64) acquires TwoPhaseUpgrade,UpgradePlanCapability,ModuleUpgradeStrategy{
     let account_address = Signer::address_of(account);
     let cap = borrow_global<UpgradePlanCapability>(account_address);
-    submit_upgrade_plan_with_cap(cap, package_hash, version, active_after_number);
+    submit_upgrade_plan_with_cap(cap, package_hash, version);
 }
 
@@ -577,7 +647,7 @@ -
public fun submit_upgrade_plan_with_cap(cap: &PackageTxnManager::UpgradePlanCapability, package_hash: vector<u8>, version: u64, active_after_number: u64)
+
public fun submit_upgrade_plan_with_cap(cap: &PackageTxnManager::UpgradePlanCapability, package_hash: vector<u8>, version: u64)
 
@@ -586,12 +656,12 @@ Implementation -
public fun submit_upgrade_plan_with_cap(cap: &UpgradePlanCapability, package_hash: vector<u8>, version: u64, active_after_number: u64) acquires TwoPhaseUpgrade,ModuleUpgradeStrategy{
-    assert(active_after_number >= Block::get_current_block_number(), Errors::invalid_argument(EACTIVE_TIME_INCORRECT));
+
public fun submit_upgrade_plan_with_cap(cap: &UpgradePlanCapability, package_hash: vector<u8>, version: u64) acquires TwoPhaseUpgrade,ModuleUpgradeStrategy{
     let account_address = cap.account_address;
     assert(get_module_upgrade_strategy(account_address) == STRATEGY_TWO_PHASE, Errors::invalid_argument(ESTRATEGY_NOT_TWO_PHASE));
     let tpu = borrow_global_mut<TwoPhaseUpgrade>(account_address);
-    tpu.plan = Option::some(UpgradePlan{ package_hash, active_after_number, version});
+    let active_after_time = Timestamp::now_milliseconds() + tpu.config.min_time_limit;
+    tpu.plan = Option::some(UpgradePlan{ package_hash, active_after_time, version});
 }
 
@@ -733,7 +803,7 @@ assert(Option::is_some(&plan_opt), Errors::invalid_argument(EUPGRADE_PLAN_IS_NONE)); let plan = Option::borrow(&plan_opt); assert(*&plan.package_hash == package_hash, Errors::invalid_argument(EPACKAGE_HASH_INCORRECT)); - assert(plan.active_after_number <= Block::get_current_block_number(), Errors::invalid_argument(EACTIVE_TIME_INCORRECT)); + assert(plan.active_after_time <= Timestamp::now_milliseconds(), Errors::invalid_argument(EACTIVE_TIME_INCORRECT)); }else if(strategy == STRATEGY_NEW_MODULE){ //do check at VM runtime. }else if(strategy == STRATEGY_FREEZE){ @@ -854,7 +924,7 @@ Package txn finished, and clean UpgradePlan ### Function `update_module_upgrade_strategy` -
public fun update_module_upgrade_strategy(account: &signer, strategy: u8)
+
public fun update_module_upgrade_strategy(account: &signer, strategy: u8, min_time: Option::Option<u64>)
 
@@ -865,7 +935,7 @@ Package txn finished, and clean UpgradePlan aborts_if !exists<ModuleUpgradeStrategy>(Signer::address_of(account)) && strategy == 0; aborts_if strategy == 1 && exists<UpgradePlanCapability>(Signer::address_of(account)); aborts_if strategy == 1 && !exists<Config::ModifyConfigCapabilityHolder<Version::Version>>(Signer::address_of(account)); - + let holder = global<Config::ModifyConfigCapabilityHolder<Version::Version>>(Signer::address_of(account)); aborts_if strategy == 1 && Option::spec_is_none<Config::ModifyConfigCapability<Version::Version>>(holder.cap); aborts_if strategy == 1 && exists<TwoPhaseUpgrade>(Signer::address_of(account)); @@ -914,7 +984,7 @@ Package txn finished, and clean UpgradePlan ### Function `submit_upgrade_plan` -
public fun submit_upgrade_plan(account: &signer, package_hash: vector<u8>, version: u64, active_after_number: u64)
+
public fun submit_upgrade_plan(account: &signer, package_hash: vector<u8>, version: u64)
 
@@ -932,13 +1002,13 @@ Package txn finished, and clean UpgradePlan ### Function `submit_upgrade_plan_with_cap` -
public fun submit_upgrade_plan_with_cap(cap: &PackageTxnManager::UpgradePlanCapability, package_hash: vector<u8>, version: u64, active_after_number: u64)
+
public fun submit_upgrade_plan_with_cap(cap: &PackageTxnManager::UpgradePlanCapability, package_hash: vector<u8>, version: u64)
 
-
include SubmitUpgradePlanWithCapAbortsIf{account: cap.account_address, active_after_number};
+
include SubmitUpgradePlanWithCapAbortsIf{account: cap.account_address};
 ensures Option::spec_is_some(global<TwoPhaseUpgrade>(cap.account_address).plan);
 
@@ -950,12 +1020,11 @@ Package txn finished, and clean UpgradePlan
schema SubmitUpgradePlanWithCapAbortsIf {
     account: address;
-    active_after_number: u64;
-    aborts_if !exists<Block::BlockMetadata>(CoreAddresses::GENESIS_ADDRESS());
-    aborts_if active_after_number < global<Block::BlockMetadata>(CoreAddresses::GENESIS_ADDRESS()).number;
     aborts_if !exists<ModuleUpgradeStrategy>(account);
     aborts_if global<ModuleUpgradeStrategy>(account).strategy != 1;
     aborts_if !exists<TwoPhaseUpgrade>(account);
+    aborts_if !exists<Timestamp::CurrentTimeMilliseconds>(CoreAddresses::GENESIS_ADDRESS());
+    aborts_if Timestamp::now_milliseconds() + global<TwoPhaseUpgrade>(account).config.min_time_limit > max_u64();
 }
 
@@ -1099,8 +1168,8 @@ Package txn finished, and clean UpgradePlan aborts_if spec_get_module_upgrade_strategy(package_address) == 3; aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_is_none(spec_get_upgrade_plan(package_address)); aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).package_hash != package_hash; - aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && !exists<Block::BlockMetadata>(CoreAddresses::GENESIS_ADDRESS()); - aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_number > global<Block::BlockMetadata>(CoreAddresses::GENESIS_ADDRESS()).number; + aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && !exists<Timestamp::CurrentTimeMilliseconds>(CoreAddresses::GENESIS_ADDRESS()); + aborts_if spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_time > Timestamp::now_milliseconds(); }
@@ -1118,8 +1187,8 @@ Package txn finished, and clean UpgradePlan aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 3; aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_is_none(spec_get_upgrade_plan(package_address)); aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).package_hash != package_hash; - aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && !exists<Block::BlockMetadata>(CoreAddresses::GENESIS_ADDRESS()); - aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_number > global<Block::BlockMetadata>(CoreAddresses::GENESIS_ADDRESS()).number; + aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && !exists<Timestamp::CurrentTimeMilliseconds>(CoreAddresses::GENESIS_ADDRESS()); + aborts_if is_package && spec_get_module_upgrade_strategy(package_address) == 1 && Option::spec_get(spec_get_upgrade_plan(package_address)).active_after_time > Timestamp::now_milliseconds(); }
@@ -1137,7 +1206,7 @@ Package txn finished, and clean UpgradePlan
aborts_if !exists<TwoPhaseUpgrade>(package_address);
-
+
 let tpu = global<TwoPhaseUpgrade>(package_address);
 aborts_if Option::spec_is_some(tpu.plan) && !exists<Config::Config<Version::Version>>(tpu.version_cap.account_address);
 
diff --git a/vm/stdlib/modules/doc/UpgradeModuleDaoProposal.md b/vm/stdlib/modules/doc/UpgradeModuleDaoProposal.md index adbb3f6659..ecfd2a1504 100644 --- a/vm/stdlib/modules/doc/UpgradeModuleDaoProposal.md +++ b/vm/stdlib/modules/doc/UpgradeModuleDaoProposal.md @@ -17,8 +17,7 @@ - [Function `submit_module_upgrade_plan`](#@Specification_1_submit_module_upgrade_plan) -
use 0x1::Block;
-use 0x1::Dao;
+
use 0x1::Dao;
 use 0x1::Errors;
 use 0x1::PackageTxnManager;
 use 0x1::Signer;
@@ -223,7 +222,6 @@ propose a module upgrade, called by proposer.
         &cap.cap,
         package_hash,
         version,
-        Block::get_current_block_number(),
     );
 }
 
diff --git a/vm/stdlib/transaction_scripts/doc/genesis_init.md b/vm/stdlib/transaction_scripts/doc/genesis_init.md index 52f9b28de2..dfeacdaad0 100644 --- a/vm/stdlib/transaction_scripts/doc/genesis_init.md +++ b/vm/stdlib/transaction_scripts/doc/genesis_init.md @@ -18,6 +18,7 @@ use 0x1::CoreAddresses; use 0x1::DummyToken; use 0x1::Epoch; +use 0x1::Option; use 0x1::PackageTxnManager; use 0x1::STC; use 0x1::Signer; @@ -155,6 +156,7 @@ PackageTxnManager::update_module_upgrade_strategy( &genesis_account, PackageTxnManager::get_strategy_two_phase(), + Option::some(0), ); // stc should be initialized after genesis_account's module upgrade strategy set. { diff --git a/vm/stdlib/transaction_scripts/doc/update_module_upgrade_strategy.md b/vm/stdlib/transaction_scripts/doc/update_module_upgrade_strategy.md index 9a79853b38..874ce4c2f8 100644 --- a/vm/stdlib/transaction_scripts/doc/update_module_upgrade_strategy.md +++ b/vm/stdlib/transaction_scripts/doc/update_module_upgrade_strategy.md @@ -8,6 +8,7 @@
use 0x1::Config;
+use 0x1::Option;
 use 0x1::PackageTxnManager;
 use 0x1::Signer;
 use 0x1::Version;
@@ -40,6 +41,7 @@
     PackageTxnManager::update_module_upgrade_strategy(
         signer,
         strategy,
+        Option::none<u64>(),
     );
 }
 
diff --git a/vm/stdlib/transaction_scripts/update_module_upgrade_strategy.move b/vm/stdlib/transaction_scripts/update_module_upgrade_strategy.move index 6863ea04d3..af61f195a0 100644 --- a/vm/stdlib/transaction_scripts/update_module_upgrade_strategy.move +++ b/vm/stdlib/transaction_scripts/update_module_upgrade_strategy.move @@ -3,6 +3,7 @@ script { use 0x1::Config; use 0x1::Signer; use 0x1::Version; + use 0x1::Option; fun update_module_upgrade_strategy( signer: &signer, @@ -19,6 +20,7 @@ script { PackageTxnManager::update_module_upgrade_strategy( signer, strategy, + Option::none(), ); } } diff --git a/vm/transaction-builder-generator/Cargo.toml b/vm/transaction-builder-generator/Cargo.toml index 6d2d3196ca..6e3c32723f 100644 --- a/vm/transaction-builder-generator/Cargo.toml +++ b/vm/transaction-builder-generator/Cargo.toml @@ -17,8 +17,8 @@ serde-generate = {git="https://github.com/starcoinorg/serde-reflection" , rev="1 serde-reflection = {git="https://github.com/starcoinorg/serde-reflection" , rev="128903725d0e057f1c8675b413995cf2e4bdf26d"} starcoin-vm-types = { path = "../types"} -diem-workspace-hack = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } -move-core-types = {git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +diem-workspace-hack = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } +move-core-types = {git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } bcs = "0.1.2" [dev-dependencies] diff --git a/vm/types/Cargo.toml b/vm/types/Cargo.toml index e4084d1da8..9f9a8b6671 100644 --- a/vm/types/Cargo.toml +++ b/vm/types/Cargo.toml @@ -22,11 +22,11 @@ log = "0.4.11" proptest = { version = "0.10.1", default-features = false, optional = true } proptest-derive = { version = "0.2.0", default-features = false, optional = true } -move-core-types = { git = "https://github.com/starcoinorg/diem", rev = "a69729b2d54af44d2f779bcf167e3f6d681a9821" } -move-vm-types = { git = "https://github.com/starcoinorg/diem", rev = "a69729b2d54af44d2f779bcf167e3f6d681a9821" } -bytecode-verifier = { git = "https://github.com/starcoinorg/diem", rev = "a69729b2d54af44d2f779bcf167e3f6d681a9821" } -vm = { git = "https://github.com/starcoinorg/diem", rev = "a69729b2d54af44d2f779bcf167e3f6d681a9821"} -move-ir-types = { git = "https://github.com/starcoinorg/diem", rev = "a69729b2d54af44d2f779bcf167e3f6d681a9821" } +move-core-types = { git = "https://github.com/starcoinorg/diem", rev = "89223522186cb4cd39e21e44fb2da745f7a45c7a" } +move-vm-types = { git = "https://github.com/starcoinorg/diem", rev = "89223522186cb4cd39e21e44fb2da745f7a45c7a" } +bytecode-verifier = { git = "https://github.com/starcoinorg/diem", rev = "89223522186cb4cd39e21e44fb2da745f7a45c7a" } +vm = { git = "https://github.com/starcoinorg/diem", rev = "89223522186cb4cd39e21e44fb2da745f7a45c7a"} +move-ir-types = { git = "https://github.com/starcoinorg/diem", rev = "89223522186cb4cd39e21e44fb2da745f7a45c7a" } scs = { package = "starcoin-canonical-serialization", path = "../../commons/scs" } starcoin-proptest-helpers = { path = "../../commons/proptest-helpers", optional = true } @@ -34,11 +34,12 @@ starcoin-crypto = { path = "../../commons/crypto" } starcoin-uint = { path = "../../types/uint" } starcoin-accumulator = { path = "../../core/accumulator"} network-p2p-types = { path = "../../network-p2p/types"} +forkable-jellyfish-merkle = { path = "../../core/forkable-jellyfish-merkle"} [dev-dependencies] proptest = "0.10.1" proptest-derive = "0.2.0" -vm = { git = "https://github.com/starcoinorg/diem", rev = "a69729b2d54af44d2f779bcf167e3f6d681a9821", features = ["fuzzing"]} +vm = { git = "https://github.com/starcoinorg/diem", rev = "89223522186cb4cd39e21e44fb2da745f7a45c7a", features = ["fuzzing"]} starcoin-crypto = { path = "../../commons/crypto", features = ["fuzzing"] } starcoin-proptest-helpers = { path = "../../commons/proptest-helpers"} diff --git a/vm/types/src/access_path.rs b/vm/types/src/access_path.rs index 27ff86b6d9..9390f0c358 100644 --- a/vm/types/src/access_path.rs +++ b/vm/types/src/access_path.rs @@ -39,106 +39,151 @@ //! `path` will be set to "/a" and use the `get_prefix()` method from statedb use crate::account_address::AccountAddress; -use anyhow::Result; -use move_core_types::language_storage::{ModuleId, ResourceKey, StructTag, CODE_TAG, RESOURCE_TAG}; +use crate::identifier::Identifier; +use crate::parser::parse_struct_tag; +use anyhow::{bail, Result}; +use forkable_jellyfish_merkle::RawKey; +use move_core_types::language_storage::{ModuleId, ResourceKey, StructTag, TypeTag}; use num_enum::{IntoPrimitive, TryFromPrimitive}; #[cfg(any(test, feature = "fuzzing"))] +use proptest::{collection::vec, prelude::*}; +#[cfg(any(test, feature = "fuzzing"))] use proptest_derive::Arbitrary; -use serde::{Deserialize, Serialize}; -use serde_helpers::{deserialize_binary, serialize_binary}; -use starcoin_crypto::hash::{CryptoHash, CryptoHasher, HashValue}; -use std::convert::TryFrom; +use rand::prelude::{Distribution, SliceRandom}; +use rand::rngs::OsRng; +use rand::Rng; +use serde::de::Error; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use starcoin_crypto::hash::HashValue; use std::fmt; -#[derive( - Clone, Eq, PartialEq, Hash, Serialize, Deserialize, Ord, PartialOrd, CryptoHasher, CryptoHash, -)] +use std::str::FromStr; + +#[derive(Clone, Eq, PartialEq, Hash, Ord, PartialOrd)] #[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))] pub struct AccessPath { pub address: AccountAddress, - #[serde( - deserialize_with = "deserialize_binary", - serialize_with = "serialize_binary" - )] - pub path: Vec, + pub path: DataPath, } impl AccessPath { - pub const CODE_TAG: u8 = 0; - pub const RESOURCE_TAG: u8 = 1; - - pub fn new(address: AccountAddress, path: Vec) -> Self { + pub fn new(address: AccountAddress, path: DataPath) -> Self { AccessPath { address, path } } - pub fn resource_access_vec(tag: &StructTag) -> Vec { - tag.access_vector() + pub fn resource_access_path(address: AccountAddress, struct_tag: StructTag) -> Self { + Self::new(address, Self::resource_data_path(struct_tag)) } - /// Convert Accesses into a byte offset which would be used by the storage layer to resolve - /// where fields are stored. - pub fn resource_access_path(key: &ResourceKey) -> AccessPath { - let path = AccessPath::resource_access_vec(&key.type_()); - AccessPath { - address: key.address().to_owned(), - path, - } + pub fn code_access_path(address: AccountAddress, module_name: Identifier) -> AccessPath { + AccessPath::new(address, Self::code_data_path(module_name)) + } + + pub fn resource_data_path(tag: StructTag) -> DataPath { + DataPath::Resource(tag) + } + + pub fn code_data_path(module_name: ModuleName) -> DataPath { + DataPath::Code(module_name) } - fn code_access_path_vec(key: &ModuleId) -> Vec { - key.access_vector() + pub fn into_inner(self) -> (AccountAddress, DataPath) { + let address = self.address; + let path = self.path; + (address, path) } - pub fn code_access_path(key: &ModuleId) -> AccessPath { - let path = AccessPath::code_access_path_vec(key); - AccessPath { - address: *key.address(), - path, + pub fn random_code() -> AccessPath { + AccessPath::new(AccountAddress::random(), DataPath::Code(random_identity())) + } + + pub fn random_resource() -> AccessPath { + let struct_tag = StructTag { + address: AccountAddress::random(), + module: random_identity(), + name: random_identity(), + type_params: vec![], + }; + AccessPath::new(AccountAddress::random(), DataPath::Resource(struct_tag)) + } +} + +impl Serialize for AccessPath { + fn serialize(&self, serializer: S) -> Result<::Ok, ::Error> + where + S: Serializer, + { + if serializer.is_human_readable() { + serializer.serialize_str(self.to_string().as_str()) + } else { + serializer.serialize_newtype_struct("AccessPath", &(self.address, self.path.clone())) } } } +impl<'de> Deserialize<'de> for AccessPath { + fn deserialize(deserializer: D) -> std::result::Result + where + D: Deserializer<'de>, + { + if deserializer.is_human_readable() { + let s = ::deserialize(deserializer)?; + AccessPath::from_str(&s).map_err(D::Error::custom) + } else { + // In order to preserve the Serde data model and help analysis tools, + // make sure to wrap our value in a container with the same name + // as the original type. + #[derive(::serde::Deserialize)] + #[serde(rename = "AccessPath")] + struct Value(AccountAddress, DataPath); + let value = Value::deserialize(deserializer)?; + Ok(AccessPath::new(value.0, value.1)) + } + } +} + +//TODO move to a suitable mod +struct IdentifierSymbols; + +impl Distribution for IdentifierSymbols { + fn sample(&self, rng: &mut R) -> char { + //TODO add more valid identity char + *b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + .choose(rng) + .unwrap() as char + } +} + +fn random_identity() -> Identifier { + let rng = OsRng; + let id: String = rng.sample_iter(&IdentifierSymbols).take(7).collect(); + Identifier::new(id).unwrap() +} + impl fmt::Debug for AccessPath { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result { write!( f, "AccessPath {{ address: {:x}, path: {} }}", - self.address, - hex::encode(&self.path) + self.address, self.path ) } } impl fmt::Display for AccessPath { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if self.path.len() < 1 + HashValue::LENGTH { - write!(f, "{:?}", self) - } else { - write!(f, "AccessPath {{ address: {:x}, ", self.address)?; - match self.path[0] { - RESOURCE_TAG => write!(f, "type: Resource, ")?, - CODE_TAG => write!(f, "type: Module, ")?, - tag => write!(f, "type: {:?}, ", tag)?, - }; - write!( - f, - "hash: {:?}, ", - hex::encode(&self.path[1..=HashValue::LENGTH]) - )?; - write!( - f, - "suffix: {:?} }} ", - String::from_utf8_lossy(&self.path[1 + HashValue::LENGTH..]) - ) - } + write!(f, "{}/{}", self.address, self.path) } } impl From<&ModuleId> for AccessPath { fn from(id: &ModuleId) -> AccessPath { - AccessPath { - address: *id.address(), - path: id.access_vector(), - } + AccessPath::code_access_path(*id.address(), id.name().to_owned()) + } +} + +impl From<&ResourceKey> for AccessPath { + fn from(key: &ResourceKey) -> AccessPath { + AccessPath::resource_access_path(key.address(), key.type_().clone()) } } @@ -182,59 +227,201 @@ impl DataType { pub fn storage_index(self) -> usize { self.type_index() as usize } + + pub fn from_index(idx: u8) -> Result { + Ok(Self::try_from_primitive(idx)?) + } +} + +#[cfg(any(test, feature = "fuzzing"))] +impl Arbitrary for DataType { + type Parameters = (); + fn arbitrary_with(_args: ()) -> Self::Strategy { + prop_oneof![Just(DataType::CODE), Just(DataType::RESOURCE),].boxed() + } + + type Strategy = BoxedStrategy; } -pub fn into_inner(access_path: AccessPath) -> Result<(AccountAddress, DataType, HashValue)> { - let address = access_path.address; - let path = &access_path.path; - let data_type = DataType::try_from(path[0])?; - let hash = access_path.hash(); - Ok((address, data_type, hash)) +pub type ModuleName = Identifier; + +#[derive(Clone, Eq, PartialEq, Hash, Serialize, Deserialize, Ord, PartialOrd, Debug)] +pub enum DataPath { + Code(ModuleName), + Resource(StructTag), } -pub fn new(address: AccountAddress, data_type: DataType, hash: HashValue) -> AccessPath { - let mut path = vec![data_type.into()]; - path.extend(hash.to_vec()); - AccessPath::new(address, path) +#[cfg(any(test, feature = "fuzzing"))] +impl Arbitrary for DataPath { + type Parameters = (); + fn arbitrary_with(_args: ()) -> Self::Strategy { + prop_oneof![ + (any::()).prop_map(DataPath::Code), + ( + any::(), + any::(), + any::(), + vec(any::(), 0..4), + ) + .prop_map(|(address, module, name, type_params)| DataPath::Resource( + StructTag { + address, + module, + name, + type_params, + } + )), + ] + .boxed() + } + + type Strategy = BoxedStrategy; } -pub fn random_code() -> AccessPath { - new( - AccountAddress::random(), - DataType::CODE, - HashValue::random(), - ) +impl DataPath { + pub fn is_code(&self) -> bool { + matches!(self, DataPath::Code(_)) + } + pub fn is_resource(&self) -> bool { + matches!(self, DataPath::Resource(_)) + } + pub fn as_struct_tag(&self) -> Option<&StructTag> { + match self { + DataPath::Resource(struct_tag) => Some(struct_tag), + _ => None, + } + } + pub fn data_type(&self) -> DataType { + match self { + DataPath::Code(_) => DataType::CODE, + DataPath::Resource(_) => DataType::RESOURCE, + } + } + + pub fn key_hash(&self) -> HashValue { + match self { + DataPath::Resource(struct_tag) => struct_tag.key_hash(), + DataPath::Code(module_name) => module_name.key_hash(), + } + } } -pub fn random_resource() -> AccessPath { - new( - AccountAddress::random(), - DataType::RESOURCE, - HashValue::random(), - ) +impl fmt::Display for DataPath { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let storage_index = self.data_type().storage_index(); + match self { + DataPath::Resource(struct_tag) => { + write!(f, "{}/{}", storage_index, struct_tag) + } + DataPath::Code(module_name) => { + write!(f, "{}/{}", storage_index, module_name) + } + } + } +} + +impl FromStr for AccessPath { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let parts = s.split('/').collect::>(); + if parts.len() != 3 { + bail!("Invalid access_path string: {}", s); + } + let address = AccountAddress::from_str(parts[0])?; + let data_type = DataType::from_index(parts[1].parse()?)?; + let data_path = match data_type { + DataType::CODE => AccessPath::code_data_path(Identifier::new(parts[2])?), + DataType::RESOURCE => AccessPath::resource_data_path(parse_struct_tag(parts[2])?), + }; + Ok(AccessPath::new(address, data_path)) + } } #[cfg(test)] mod tests { use super::*; - use crate::account_config::AccountResource; - use crate::language_storage::ModuleId; - use crate::move_resource::MoveResource; - use move_core_types::identifier::Identifier; #[test] fn test_data_type() { - let (_address, data_type, _hash) = into_inner(AccessPath::new( - AccountAddress::random(), - AccountResource::resource_path(), - )) - .unwrap(); - assert_eq!(data_type, DataType::RESOURCE); - - let (_address, data_type, _hash) = into_inner(AccessPath::code_access_path( - &ModuleId::new(AccountAddress::random(), Identifier::new("Test").unwrap()), - )) - .unwrap(); - assert_eq!(data_type, DataType::CODE); + let (_address, data_path) = AccessPath::random_resource().into_inner(); + assert_eq!(data_path.data_type(), DataType::RESOURCE); + + let (_address, data_path) = AccessPath::random_code().into_inner(); + assert_eq!(data_path.data_type(), DataType::CODE); + } + + #[test] + fn test_access_path_str_valid() { + let r1 = format!( + "{}/1/0x00000000000000000000000000000001::Account::Account", + AccountAddress::random() + ); + let test_cases = vec!["0x00000000000000000000000000000000/0/Account", + "0x00000000000000000000000000000001/0/Account", + "0x00000000000000000000000000000001/1/0x00000000000000000000000000000001::Account::Account", + "0x00000000000000000000000000000001/1/0x00000000000000000000000000000001::Account::Balance<0x00000000000000000000000000000001::STC::STC>", + r1.as_str()]; + for case in test_cases { + let access_path = AccessPath::from_str(case).unwrap(); + assert_eq!(case.to_owned(), access_path.to_string()) + } + } + + #[test] + fn test_access_path_str_invalid() { + //invalid address + let r1 = format!( + "{}00/1/0x00000000000000000000000000000001::Account::Account", + AccountAddress::random() + ); + let test_cases = vec![ + // invalid struct tag + "0x00000000000000000000000000000001/1/Account", + // invalid module name + "0x00000000000000000000000000000001/0/0x00000000000000000000000000000001::Account::Account", + //invalid data type + "0x00000000000000000000000000000001/3/Account", + //too many `/` + "0x00000000000000000000000000000001/0/Account/xxx", + "0x00000000000000000000000000000001/0//Account", + //too less '`' + "0x00000000000000000000000000000001/1", + r1.as_str()]; + for case in test_cases { + let access_path = AccessPath::from_str(case); + assert!( + access_path.is_err(), + "expect err in access_path case: {}, but got ok", + case + ); + } + } + + #[test] + fn test_bad_case_from_protest() { + //The struct name contains '_' will will encounter parse error + //This may be the parser error, or the identity's arbitrary error + let access_path_str = + "0x00000000000000000000000000000001/1/0x00000000000000000000000000000001::a::A_"; + let access_path = AccessPath::from_str(access_path_str); + assert!(access_path.is_err()); + } + + proptest! { + //TODO enable this test, when test_bad_case_from_protest is fixed. + #[ignore] + #[test] + fn test_access_path(access_path in any::()){ + let bytes = scs::to_bytes(&access_path).expect("access_path serialize should ok."); + let access_path2 = scs::from_bytes::(bytes.as_slice()).expect("access_path deserialize should ok."); + prop_assert_eq!(&access_path, &access_path2); + let access_path_str = access_path.to_string(); + let access_path3 = AccessPath::from_str(access_path_str.as_str()).expect("access_path from str should ok"); + prop_assert_eq!(&access_path, &access_path3); + let json_str = serde_json::to_string(&access_path).expect("access_path to json str should ok"); + let access_path4 = serde_json::from_str::(json_str.as_str()).expect("access_path from json str should ok"); + prop_assert_eq!(&access_path, &access_path4); + } } } diff --git a/vm/types/src/account_config/events/accept_token_payment.rs b/vm/types/src/account_config/events/accept_token_payment.rs index 19ddb327ca..1b668c50f5 100644 --- a/vm/types/src/account_config/events/accept_token_payment.rs +++ b/vm/types/src/account_config/events/accept_token_payment.rs @@ -1,24 +1,15 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::account_config::{constants::ACCOUNT_MODULE_NAME, resources::AccountResource}; +use crate::account_config::constants::ACCOUNT_MODULE_NAME; use crate::contract_event::ContractEvent; use crate::language_storage::TypeTag; +use crate::move_resource::MoveResource; use crate::token::token_code::TokenCode; use anyhow::{Error, Result}; -use move_core_types::move_resource::MoveResource; -use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; use std::convert::TryFrom; -/// The path to the accept token event counter for an Account resource. -/// It can be used to query the event DB for the given event. -pub static ACCEPT_TOKEN_EVENT_PATH: Lazy> = Lazy::new(|| { - let mut path = AccountResource::resource_path(); - path.extend_from_slice(b"/accept_token_events_count/"); - path -}); - /// Struct that represents a AcceptTokenEvent. #[derive(Debug, Serialize, Deserialize)] pub struct AcceptTokenEvent { diff --git a/vm/types/src/account_config/events/account_deposit.rs b/vm/types/src/account_config/events/account_deposit.rs index 9987c77ebf..d949055bad 100644 --- a/vm/types/src/account_config/events/account_deposit.rs +++ b/vm/types/src/account_config/events/account_deposit.rs @@ -1,21 +1,12 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::account_config::{constants::ACCOUNT_MODULE_NAME, resources::AccountResource}; +use crate::account_config::constants::ACCOUNT_MODULE_NAME; +use crate::move_resource::MoveResource; use crate::token::token_code::TokenCode; use anyhow::Result; -use move_core_types::move_resource::MoveResource; -use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; -/// Returns the path to the deposit for an Account resource. -/// It can be used to query the event DB for the given event. -pub static ACCOUNT_DEPOSIT_EVENT_PATH: Lazy> = Lazy::new(|| { - let mut path = AccountResource::resource_path(); - path.extend_from_slice(b"/deposit_events_count/"); - path -}); - /// Struct that represents a ReceivedPaymentEvent. #[derive(Debug, Serialize, Deserialize)] pub struct DepositEvent { diff --git a/vm/types/src/account_config/events/account_withdraw.rs b/vm/types/src/account_config/events/account_withdraw.rs index f3637bf60e..5c69686eaa 100644 --- a/vm/types/src/account_config/events/account_withdraw.rs +++ b/vm/types/src/account_config/events/account_withdraw.rs @@ -1,21 +1,12 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::account_config::{constants::ACCOUNT_MODULE_NAME, resources::AccountResource}; +use crate::account_config::constants::ACCOUNT_MODULE_NAME; +use crate::move_resource::MoveResource; use crate::token::token_code::TokenCode; use anyhow::Result; -use move_core_types::move_resource::MoveResource; -use once_cell::sync::Lazy; use serde::{Deserialize, Serialize}; -/// The path to the withdraw event counter for an Account resource. -/// It can be used to query the event DB for the given event. -pub static ACCOUNT_WITHDRAW_EVENT_PATH: Lazy> = Lazy::new(|| { - let mut path = AccountResource::resource_path(); - path.extend_from_slice(b"/withdraw_events_count/"); - path -}); - /// Struct that represents a SentPaymentEvent. #[derive(Debug, Serialize, Deserialize)] pub struct WithdrawEvent { diff --git a/vm/types/src/account_config/events/burn.rs b/vm/types/src/account_config/events/burn.rs index c6b4232e26..b84df432f0 100644 --- a/vm/types/src/account_config/events/burn.rs +++ b/vm/types/src/account_config/events/burn.rs @@ -2,9 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 use crate::account_config::TOKEN_MODULE_NAME; +use crate::move_resource::MoveResource; use crate::token::token_code::TokenCode; use anyhow::Result; -use move_core_types::move_resource::MoveResource; use serde::{Deserialize, Serialize}; /// Struct that represents a BurnEvent. diff --git a/vm/types/src/account_config/events/dao.rs b/vm/types/src/account_config/events/dao.rs index 4834a1707f..6c019a3a0e 100644 --- a/vm/types/src/account_config/events/dao.rs +++ b/vm/types/src/account_config/events/dao.rs @@ -1,6 +1,6 @@ +use crate::move_resource::MoveResource; use anyhow::Result; use move_core_types::account_address::AccountAddress; -use move_core_types::move_resource::MoveResource; use serde::{Deserialize, Serialize}; #[derive(Clone, Copy, Serialize, Deserialize)] diff --git a/vm/types/src/account_config/events/mint.rs b/vm/types/src/account_config/events/mint.rs index cd50c70f70..6474d6caaf 100644 --- a/vm/types/src/account_config/events/mint.rs +++ b/vm/types/src/account_config/events/mint.rs @@ -2,9 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 use crate::account_config::TOKEN_MODULE_NAME; +use crate::move_resource::MoveResource; use crate::token::token_code::TokenCode; use anyhow::Result; -use move_core_types::move_resource::MoveResource; use serde::{Deserialize, Serialize}; /// Struct that represents a MintEvent. diff --git a/vm/types/src/account_config/resources/account.rs b/vm/types/src/account_config/resources/account.rs index 1c6e75372f..c1066c9634 100644 --- a/vm/types/src/account_config/resources/account.rs +++ b/vm/types/src/account_config/resources/account.rs @@ -1,13 +1,13 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 +use crate::move_resource::MoveResource; use crate::{ account_config::{ constants::ACCOUNT_MODULE_NAME, KeyRotationCapabilityResource, WithdrawCapabilityResource, }, event::EventHandle, }; -use move_core_types::move_resource::MoveResource; use serde::{Deserialize, Serialize}; /// A Rust representation of an Account resource. diff --git a/vm/types/src/account_config/resources/balance.rs b/vm/types/src/account_config/resources/balance.rs index eba8237799..84506ecdc5 100644 --- a/vm/types/src/account_config/resources/balance.rs +++ b/vm/types/src/account_config/resources/balance.rs @@ -1,6 +1,7 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 +use crate::access_path::DataPath; use crate::token::token_code::TokenCode; use crate::{ access_path::AccessPath, @@ -42,8 +43,8 @@ impl BalanceResource { } // TODO: remove this once the MoveResource trait allows type arguments to `resource_path`. - pub fn access_path_for(token_type_tag: TypeTag) -> Vec { - AccessPath::resource_access_vec(&BalanceResource::struct_tag_for_token(token_type_tag)) + pub fn access_path_for(token_type_tag: TypeTag) -> DataPath { + AccessPath::resource_data_path(BalanceResource::struct_tag_for_token(token_type_tag)) } } diff --git a/vm/types/src/account_config/resources/key_rotation_capability.rs b/vm/types/src/account_config/resources/key_rotation_capability.rs index 2d340d6a28..7678c7cbca 100644 --- a/vm/types/src/account_config/resources/key_rotation_capability.rs +++ b/vm/types/src/account_config/resources/key_rotation_capability.rs @@ -1,8 +1,8 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 +use crate::move_resource::MoveResource; use crate::{account_address::AccountAddress, account_config::constants::ACCOUNT_MODULE_NAME}; -use move_core_types::move_resource::MoveResource; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize)] diff --git a/vm/types/src/account_config/resources/module_upgrade_strategy.rs b/vm/types/src/account_config/resources/module_upgrade_strategy.rs index 2c9db83061..3692a8a415 100644 --- a/vm/types/src/account_config/resources/module_upgrade_strategy.rs +++ b/vm/types/src/account_config/resources/module_upgrade_strategy.rs @@ -3,7 +3,7 @@ use crate::access_path::AccessPath; use crate::account_address::AccountAddress; -use move_core_types::move_resource::MoveResource; +use crate::move_resource::MoveResource; use serde::{Deserialize, Serialize}; pub const _STRATEGY_ARBITRARY: u8 = 0; @@ -32,5 +32,5 @@ impl MoveResource for ModuleUpgradeStrategy { } pub fn access_path_for_module_upgrade_strategy(address: AccountAddress) -> AccessPath { - AccessPath::new(address, ModuleUpgradeStrategy::resource_path()) + AccessPath::resource_access_path(address, ModuleUpgradeStrategy::struct_tag()) } diff --git a/vm/types/src/account_config/resources/withdraw_capability.rs b/vm/types/src/account_config/resources/withdraw_capability.rs index c54b21277b..7aedf300b0 100644 --- a/vm/types/src/account_config/resources/withdraw_capability.rs +++ b/vm/types/src/account_config/resources/withdraw_capability.rs @@ -1,8 +1,8 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 +use crate::move_resource::MoveResource; use crate::{account_address::AccountAddress, account_config::constants::ACCOUNT_MODULE_NAME}; -use move_core_types::move_resource::MoveResource; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize)] diff --git a/vm/types/src/contract_event.rs b/vm/types/src/contract_event.rs index be4b26b188..1ac239798e 100644 --- a/vm/types/src/contract_event.rs +++ b/vm/types/src/contract_event.rs @@ -8,8 +8,8 @@ use crate::{ account_config::{BurnEvent, DepositEvent, MintEvent, WithdrawEvent}, event::EventKey, }; +use crate::{language_storage::TypeTag, move_resource::MoveResource}; use anyhow::{Error, Result}; -use move_core_types::{language_storage::TypeTag, move_resource::MoveResource}; use serde::{Deserialize, Serialize}; use starcoin_crypto::hash::{CryptoHash, CryptoHasher}; use std::{convert::TryFrom, ops::Deref}; diff --git a/vm/types/src/genesis_config.rs b/vm/types/src/genesis_config.rs index 1751249421..0b13ae2c32 100644 --- a/vm/types/src/genesis_config.rs +++ b/vm/types/src/genesis_config.rs @@ -6,6 +6,7 @@ use crate::event::EventHandle; use crate::gas_schedule::{ AbstractMemorySize, GasAlgebra, GasCarrier, GasConstants, GasPrice, GasUnits, }; +use crate::move_resource::MoveResource; use crate::on_chain_config::DaoConfig; use crate::on_chain_config::{ ConsensusConfig, VMConfig, VMPublishingOption, Version, INITIAL_GAS_SCHEDULE, @@ -16,7 +17,6 @@ use crate::token::stc::STCUnit; use crate::token::token_value::TokenValue; use crate::transaction::{RawUserTransaction, SignedUserTransaction}; use anyhow::{bail, ensure, format_err, Result}; -use move_core_types::move_resource::MoveResource; use network_p2p_types::MultiaddrWithPeerId; use num_enum::{IntoPrimitive, TryFromPrimitive}; use once_cell::sync::Lazy; diff --git a/vm/types/src/lib.rs b/vm/types/src/lib.rs index 02c8194f5d..081abdd50e 100644 --- a/vm/types/src/lib.rs +++ b/vm/types/src/lib.rs @@ -43,9 +43,7 @@ pub mod language_storage { }; } -pub mod move_resource { - pub use move_core_types::move_resource::MoveResource; -} +pub mod move_resource; pub mod transaction_argument { pub use move_core_types::transaction_argument::*; @@ -53,13 +51,18 @@ pub mod transaction_argument { pub mod parser { use crate::language_storage::TypeTag; - use anyhow::{format_err, Result}; - pub use move_core_types::parser::{parse_transaction_argument, parse_type_tags}; + use anyhow::{bail, Result}; + use move_core_types::language_storage::StructTag; + pub use move_core_types::parser::{ + parse_transaction_argument, parse_type_tag, parse_type_tags, + }; - pub fn parse_type_tag(s: &str) -> Result { - parse_type_tags(s)? - .pop() - .ok_or_else(|| format_err!("parse type fail from {}", s)) + pub fn parse_struct_tag(s: &str) -> Result { + let type_tag = parse_type_tag(s)?; + match type_tag { + TypeTag::Struct(st) => Ok(st), + t => bail!("expect a struct tag, found: {:?}", t), + } } } diff --git a/vm/types/src/move_resource.rs b/vm/types/src/move_resource.rs new file mode 100644 index 0000000000..c7181166da --- /dev/null +++ b/vm/types/src/move_resource.rs @@ -0,0 +1,42 @@ +// Copyright (c) The Starcoin Core Contributors +// SPDX-License-Identifier: Apache-2.0 + +use crate::access_path::DataPath; +use crate::{ + identifier::{IdentStr, Identifier}, + language_storage::{StructTag, TypeTag}, +}; + +pub trait MoveResource { + const MODULE_NAME: &'static str; + const STRUCT_NAME: &'static str; + + fn module_identifier() -> Identifier { + IdentStr::new(Self::MODULE_NAME) + .expect("failed to get IdentStr for Move module") + .to_owned() + } + + fn struct_identifier() -> Identifier { + IdentStr::new(Self::STRUCT_NAME) + .expect("failed to get IdentStr for Move struct") + .to_owned() + } + + fn type_params() -> Vec { + vec![] + } + + fn struct_tag() -> StructTag { + StructTag { + address: crate::language_storage::CORE_CODE_ADDRESS, + name: Self::struct_identifier(), + module: Self::module_identifier(), + type_params: Self::type_params(), + } + } + + fn resource_path() -> DataPath { + DataPath::Resource(Self::struct_tag()) + } +} diff --git a/vm/types/src/on_chain_config/mod.rs b/vm/types/src/on_chain_config/mod.rs index 90889f118c..66f9a52a0d 100644 --- a/vm/types/src/on_chain_config/mod.rs +++ b/vm/types/src/on_chain_config/mod.rs @@ -151,9 +151,9 @@ pub fn access_path_for_config( config_name: Identifier, params: Vec, ) -> AccessPath { - AccessPath::new( + AccessPath::resource_access_path( address, - AccessPath::resource_access_vec(&StructTag { + StructTag { address: CORE_CODE_ADDRESS, module: Identifier::new("Config").unwrap(), name: Identifier::new("Config").unwrap(), @@ -163,6 +163,6 @@ pub fn access_path_for_config( name: config_name, type_params: params, })], - }), + }, ) } diff --git a/vm/types/src/on_chain_resource/block_metadata.rs b/vm/types/src/on_chain_resource/block_metadata.rs index 02ed2387ec..69bd01c3d7 100644 --- a/vm/types/src/on_chain_resource/block_metadata.rs +++ b/vm/types/src/on_chain_resource/block_metadata.rs @@ -2,8 +2,8 @@ // SPDX-License-Identifier: Apache-2.0 use crate::event::EventHandle; +use crate::move_resource::MoveResource; use move_core_types::account_address::AccountAddress; -use move_core_types::move_resource::MoveResource; use serde::{Deserialize, Serialize}; use starcoin_crypto::HashValue; diff --git a/vm/types/src/on_chain_resource/epoch.rs b/vm/types/src/on_chain_resource/epoch.rs index 326a298ebb..a4d36f5065 100644 --- a/vm/types/src/on_chain_resource/epoch.rs +++ b/vm/types/src/on_chain_resource/epoch.rs @@ -1,10 +1,10 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 -use crate::access_path::AccessPath; +use crate::access_path::{AccessPath, DataPath}; use crate::event::EventHandle; use crate::genesis_config::ConsensusStrategy; +use crate::move_resource::MoveResource; use move_core_types::language_storage::{StructTag, CORE_CODE_ADDRESS}; -use move_core_types::move_resource::MoveResource; use serde::export::TryFrom; use serde::{Deserialize, Serialize}; @@ -113,8 +113,8 @@ impl Epoch { } // TODO: remove this once the MoveResource trait allows type arguments to `resource_path`. - pub fn access_path_for() -> Vec { - AccessPath::resource_access_vec(&Epoch::struct_tag_for_epoch()) + pub fn data_path_for() -> DataPath { + AccessPath::resource_data_path(Epoch::struct_tag_for_epoch()) } } @@ -212,7 +212,7 @@ impl EpochData { } // TODO: remove this once the MoveResource trait allows type arguments to `resource_path`. - pub fn access_path_for() -> Vec { - AccessPath::resource_access_vec(&EpochData::struct_tag_for_epoch()) + pub fn data_path_for() -> DataPath { + AccessPath::resource_data_path(EpochData::struct_tag_for_epoch()) } } diff --git a/vm/types/src/on_chain_resource/global_time.rs b/vm/types/src/on_chain_resource/global_time.rs index a1e5ace8e6..17fa2924f6 100644 --- a/vm/types/src/on_chain_resource/global_time.rs +++ b/vm/types/src/on_chain_resource/global_time.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; -use move_core_types::move_resource::MoveResource; +use crate::move_resource::MoveResource; const TIMESTAMP_MODULE_NAME: &str = "Timestamp"; diff --git a/vm/types/src/token/token_info.rs b/vm/types/src/token/token_info.rs index a1dc7cd897..691363adbc 100644 --- a/vm/types/src/token/token_info.rs +++ b/vm/types/src/token/token_info.rs @@ -1,15 +1,13 @@ // Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 +use crate::access_path::DataPath; use crate::token::token_code::TokenCode; use crate::{ access_path::AccessPath, account_config::constants::CORE_CODE_ADDRESS, event::EventHandle, }; +use crate::{language_storage::StructTag, move_resource::MoveResource}; use anyhow::Result; -use move_core_types::{ - language_storage::{ResourceKey, StructTag}, - move_resource::MoveResource, -}; use serde::{Deserialize, Serialize}; /// Struct that represents a TokenInfo resource @@ -41,15 +39,14 @@ impl TokenInfoResource { } pub fn resource_path_for(token_code: TokenCode) -> AccessPath { - let resource_key = ResourceKey::new( + AccessPath::resource_access_path( token_code.address, TokenInfoResource::struct_tag_for(token_code), - ); - AccessPath::resource_access_path(&resource_key) + ) } - pub fn access_path_for(token_code: TokenCode) -> Vec { - AccessPath::resource_access_vec(&TokenInfoResource::struct_tag_for(token_code)) + pub fn data_path_for(token_code: TokenCode) -> DataPath { + AccessPath::resource_data_path(TokenInfoResource::struct_tag_for(token_code)) } pub fn try_from_bytes(bytes: &[u8]) -> Result { diff --git a/vm/vm-runtime/Cargo.toml b/vm/vm-runtime/Cargo.toml index 237f75398a..515c37d983 100644 --- a/vm/vm-runtime/Cargo.toml +++ b/vm/vm-runtime/Cargo.toml @@ -11,7 +11,7 @@ anyhow = "1.0.37" once_cell = "1.5.2" prometheus = "0.10" starcoin-types = { path = "../../types"} -move-vm-runtime = { git = "https://github.com/starcoinorg/diem", rev="a69729b2d54af44d2f779bcf167e3f6d681a9821" } +move-vm-runtime = { git = "https://github.com/starcoinorg/diem", rev="89223522186cb4cd39e21e44fb2da745f7a45c7a" } starcoin-state-api = {path="../../state/api"} starcoin-config = { path = "../../config"} starcoin-logger = {path = "../../commons/logger"} diff --git a/vm/vm-runtime/src/access_path_cache.rs b/vm/vm-runtime/src/access_path_cache.rs index fa8c484992..799eb0bf38 100644 --- a/vm/vm-runtime/src/access_path_cache.rs +++ b/vm/vm-runtime/src/access_path_cache.rs @@ -6,8 +6,8 @@ use starcoin_vm_types::{ account_address::AccountAddress, language_storage::{ModuleId, StructTag}, }; -use std::collections::btree_map::{self, BTreeMap}; +//TODO should remove this trait? pub trait AccessPathCache { fn get_module_path(&mut self, module_id: ModuleId) -> AccessPath; fn get_resource_path(&mut self, address: AccountAddress, struct_tag: StructTag) -> AccessPath; @@ -19,47 +19,6 @@ impl AccessPathCache for () { } fn get_resource_path(&mut self, address: AccountAddress, struct_tag: StructTag) -> AccessPath { - AccessPath::new(address, struct_tag.access_vector()) - } -} - -#[derive(Clone)] -pub struct BTreeAccessPathCache { - modules: BTreeMap>, - resources: BTreeMap>, -} - -impl AccessPathCache for BTreeAccessPathCache { - fn get_module_path(&mut self, module_id: ModuleId) -> AccessPath { - let addr = *module_id.address(); - let access_vec = match self.modules.entry(module_id) { - btree_map::Entry::Vacant(entry) => { - let v = entry.key().access_vector(); - entry.insert(v).clone() - } - btree_map::Entry::Occupied(entry) => entry.get().clone(), - }; - AccessPath::new(addr, access_vec) - } - - fn get_resource_path(&mut self, address: AccountAddress, struct_tag: StructTag) -> AccessPath { - let access_vec = match self.resources.entry(struct_tag) { - btree_map::Entry::Vacant(entry) => { - let v = entry.key().access_vector(); - entry.insert(v).clone() - } - btree_map::Entry::Occupied(entry) => entry.get().clone(), - }; - AccessPath::new(address, access_vec) - } -} - -impl BTreeAccessPathCache { - #[allow(dead_code)] - pub fn new() -> Self { - Self { - modules: BTreeMap::new(), - resources: BTreeMap::new(), - } + AccessPath::resource_access_path(address, struct_tag) } } diff --git a/vm/vm-runtime/src/lib.rs b/vm/vm-runtime/src/lib.rs index fd682fbc8e..f748e3192b 100644 --- a/vm/vm-runtime/src/lib.rs +++ b/vm/vm-runtime/src/lib.rs @@ -12,10 +12,9 @@ mod errors; use starcoin_vm_types::access_path::AccessPath; use starcoin_vm_types::account_address::AccountAddress; -use starcoin_vm_types::language_storage::{ResourceKey, StructTag}; +use starcoin_vm_types::language_storage::StructTag; /// Get the AccessPath to a resource stored under `address` with type name `tag` fn create_access_path(address: AccountAddress, tag: StructTag) -> AccessPath { - let resource_tag = ResourceKey::new(address, tag); - AccessPath::resource_access_path(&resource_tag) + AccessPath::resource_access_path(address, tag) } diff --git a/x.toml b/x.toml index 4933c955b0..5e5b700e77 100644 --- a/x.toml +++ b/x.toml @@ -1,7 +1,6 @@ -tools = [ - ["sccache", "0.2.13"], - ["grcov", "0.5.15"], -] +[grcov.installer] +version = "0.5.15" + [system-tests] transaction-builder-generator = { path = "vm/transaction-builder-generator" } @@ -12,6 +11,26 @@ benchmarks = { path = "benchmarks" } toolchain = "nightly" flags = "-Zfeatures=all" +[cargo.sccache] +bucket = "ci-artifacts.starcoin.org" +prefix = "sccache/starcoin/" +public = true +region = "us-west-2" +endpoint = "https://s3-us-west-2.amazonaws.com" +required-cargo-home = "/opt/cargo" +required-git-home = "/opt/git/starcoin" +envs = [ + #To debug sccache uncomment the two lines below. + #["SCCACHE_ERROR_LOG","/tmp/sccache_log"], + #["SCCACHE_LOG", "sccache::compiler::compiler=trace,rusoto_core::request=trace"], +] + +[cargo.sccache.installer] +version = "0.2.14-alpha.0" +git = "https://github.com/rexhoffman/sccache.git" +git-rev = "549babdd3866aa60dae01668c42ee00bf1e8c763" +features = [ "s3" ] + [fix] [clippy] @@ -47,8 +66,19 @@ include-dev = true proc-macros-on-target = false # Don't set target or host platforms, or omitted packages, for the full set. +[workspace] + +# Regex for allowed characters in paths. Regex must have ^ and $ anchors. +allowed-paths = "^([a-zA-Z0-9._\\-/@:]|-)+$" + +whitespace-exceptions = [ + ".github/actions/*/dist/*", + "**/*.exp", + "**/*.errmap", +] + [workspace.enforced-attributes] -authors = ["Diem Association "] +authors = ["Starcoin Association"] license = "Apache-2.0" [workspace.banned-deps.direct] @@ -61,18 +91,6 @@ proptest = "proptest is only for testing and fuzzing" [workspace.overlay] features = ["fuzzing"] -# This is a list of test-only members. These are workspace members that do not form part of the main -# Diem production codebase, and are only used to verify correctness and/or performance. -# -# *** IMPORTANT *** -# -# Published developer tools (e.g. Move compiler) ARE part of the production Diem codebase. -# They should be listed in the root Cargo.toml's default-members, not here! -# -# Before adding a new crate to this list, ensure that it is *actually* test-only. If not, add it -# (or a crate that depends on it) to the root Cargo.toml's default-members list! -# -# For more, see the "Conditional compilation for tests" section in documentation/coding_guidelines.md. [workspace.test-only] members = [ "common/proptest-helpers", @@ -84,17 +102,45 @@ members = [ # Interesting subsets of the workspace, These are used for generating and # checking dependency summaries. -[workspace.subsets.lsr] -# The Diem safety rules TCB. -members = [ +[subsets.lsr] +# The Starcoin safety rules TCB. +root-members = [ ] -[workspace.subsets.lec] -# The Diem execution correctness TCB. -members = [ +[subsets.lec] +# The Starcoin execution correctness TCB. +root-members = [ ] -[workspace.subsets.release] -# The Diem release binaries -members = [ +[subsets.release] +# The Starcoin release binaries +root-members = [ ] + +# --- +# Determinator rules +# --- + +[[determinator.path-rule]] +globs = [".github/**/*", ".circleci/**/*", "codecov.yml"] +mark-changed = "all" + +# Core devtools files. +[[determinator.path-rule]] +globs = ["cargo-toolchain", "cargo-flags", "scripts/dev_setup.sh", "x.toml"] +mark-changed = "all" + +[[determinator.path-rule]] +# Ignore website and other ancillary files, and scripts not listed above. +globs = [] +mark-changed = [] + +[[determinator.path-rule]] +# A bunch of images that should be ignored, I guess. +globs = [] +mark-changed = [] + +[[determinator.package-rule]] +# x controls the build process, so if it changes, build everything. +on-affected = ["x"] +mark-changed = "all" From de3eddfa551238f1c252e3d6904b29de34844074 Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Fri, 8 Jan 2021 13:57:10 +0800 Subject: [PATCH 6/8] [config] fix clap panic error --- config/src/logger_config.rs | 5 ++-- config/src/metrics_config.rs | 6 +---- config/src/miner_config.rs | 2 ++ config/src/network_config.rs | 33 +++++++++++++++++++-------- network/src/worker.rs | 4 ++-- network/tests/network_service_test.rs | 4 ++-- 6 files changed, 33 insertions(+), 21 deletions(-) diff --git a/config/src/logger_config.rs b/config/src/logger_config.rs index 05d5f5a009..4fbe27033f 100644 --- a/config/src/logger_config.rs +++ b/config/src/logger_config.rs @@ -20,10 +20,11 @@ pub struct LoggerConfig { pub disable_stderr: Option, #[structopt(name = "disable-file", long, help = "disable file logger")] pub disable_file: Option, - #[structopt(name = "max-file-size", long, default_value = "DEFAULT_MAX_FILE_SIZE")] + #[structopt(name = "max-file-size", long, default_value = "1073741824")] pub max_file_size: u64, - #[structopt(name = "max-backup", long, default_value = "DEFAULT_MAX_BACKUP")] + #[structopt(name = "max-backup", long, default_value = "7")] pub max_backup: u32, + #[structopt(skip)] #[serde(skip)] log_path: Option, } diff --git a/config/src/metrics_config.rs b/config/src/metrics_config.rs index d521fc3d0e..e2c3e978db 100644 --- a/config/src/metrics_config.rs +++ b/config/src/metrics_config.rs @@ -23,11 +23,7 @@ pub struct MetricsConfig { default_value = "DEFAULT_METRIC_SERVER_ADDRESS" )] pub address: String, - #[structopt( - name = "metrics-port", - long, - default_value = "DEFAULT_METRIC_SERVER_PORT" - )] + #[structopt(name = "metrics-port", long, default_value = "9101")] pub port: u16, } impl MetricsConfig { diff --git a/config/src/miner_config.rs b/config/src/miner_config.rs index 622d0cedda..d5317804eb 100644 --- a/config/src/miner_config.rs +++ b/config/src/miner_config.rs @@ -42,7 +42,9 @@ impl MinerConfig { #[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct MinerClientConfig { + #[structopt(skip)] pub server: Option, + #[structopt(skip)] pub plugin_path: Option, #[structopt(long = "miner-thread")] /// Miner thread number, not work for dev network, default is 1 diff --git a/config/src/network_config.rs b/config/src/network_config.rs index cfa3b04681..f20b1d593e 100644 --- a/config/src/network_config.rs +++ b/config/src/network_config.rs @@ -85,8 +85,10 @@ impl Default for NetworkRpcQuotaConfiguration { #[derive(Clone, Debug, Deserialize, PartialEq, Serialize, StructOpt)] #[serde(deny_unknown_fields)] pub struct NetworkConfig { + #[structopt(skip)] /// The address that this node is listening on for new connections. - pub listen: Multiaddr, + pub listen: Option, + #[structopt(skip)] #[serde(default)] pub seeds: Vec, #[serde(default)] @@ -99,8 +101,10 @@ pub struct NetworkConfig { #[structopt(skip)] #[serde(skip)] network_keypair: Option>>, + #[structopt(skip)] #[serde(skip)] self_peer_id: Option, + #[structopt(skip)] #[serde(skip)] self_address: Option, #[structopt(flatten)] @@ -109,9 +113,11 @@ pub struct NetworkConfig { impl Default for NetworkConfig { fn default() -> Self { Self { - listen: format!("/ip4/0.0.0.0/tcp/{}", DEFAULT_NETWORK_PORT) - .parse() - .expect("Parse multi address fail."), + listen: Some( + format!("/ip4/0.0.0.0/tcp/{}", DEFAULT_NETWORK_PORT) + .parse() + .expect("Parse multi address fail."), + ), seeds: vec![], disable_mdns: None, disable_seed: None, @@ -124,6 +130,13 @@ impl Default for NetworkConfig { } impl NetworkConfig { + pub fn listen(&self) -> Multiaddr { + self.listen.as_ref().cloned().unwrap_or( + format!("/ip4/0.0.0.0/tcp/{}", DEFAULT_NETWORK_PORT) + .parse() + .expect("Parse multi address fail."), + ) + } pub fn network_keypair(&self) -> Arc> { self.network_keypair.clone().expect("Config should init.") } @@ -145,11 +158,11 @@ impl NetworkConfig { fn prepare_peer_id(&mut self) { let peer_id = PeerId::from_ed25519_public_key(self.network_keypair().public_key.clone()); - let host = if is_memory_addr(&self.listen) { - self.listen.clone() + let addr = self.listen(); + let host = if is_memory_addr(&addr) { + addr } else { - self.listen - .clone() + addr.clone() .replace(0, |_p| Some(Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1)))) .expect("Replace multi address fail.") }; @@ -197,12 +210,12 @@ impl ConfigModule for NetworkConfig { let listen = if base.net.is_test() { memory_addr(port as u64) } else { - format!("/ip4/0.0.0.0/tcp/{}", port) + format!("/ip4/0.0.0.0/tcp/{}", DEFAULT_NETWORK_PORT) .parse() .expect("Parse multi address fail.") }; Ok(Self { - listen, + listen: Some(listen), seeds, disable_mdns: opt.network.disable_mdns, disable_seed: opt.network.disable_seed, diff --git a/network/src/worker.rs b/network/src/worker.rs index ffc2c0cefc..620363c547 100644 --- a/network/src/worker.rs +++ b/network/src/worker.rs @@ -30,7 +30,7 @@ pub fn build_network_worker( rpc_service: Option<(RpcInfo, ServiceRef)>, ) -> Result { let node_name = node_config.node_name(); - let transport_config = if is_memory_addr(&node_config.network.listen) { + let transport_config = if is_memory_addr(&node_config.network.listen()) { TransportConfig::MemoryOnly } else { TransportConfig::Normal { @@ -87,7 +87,7 @@ pub fn build_network_worker( boot_nodes }; let config = NetworkConfiguration { - listen_addresses: vec![node_config.network.listen.clone()], + listen_addresses: vec![node_config.network.listen()], boot_nodes, node_key: { let secret = identity::ed25519::SecretKey::from_bytes( diff --git a/network/tests/network_service_test.rs b/network/tests/network_service_test.rs index 4f112c1c3b..63fda8079e 100644 --- a/network/tests/network_service_test.rs +++ b/network/tests/network_service_test.rs @@ -48,7 +48,7 @@ fn build_test_network_services(num: usize) -> Vec { } let mut node_config = NodeConfig::random_for_test(); - node_config.network.listen = random_memory_addr(); + node_config.network.listen = Some(random_memory_addr()); node_config.network.seeds = boot_nodes; info!( @@ -56,7 +56,7 @@ fn build_test_network_services(num: usize) -> Vec { node_config.network.listen, node_config.network.seeds ); if first_addr.is_none() { - first_addr = Some(node_config.network.listen.clone()); + first_addr = node_config.network.listen.clone(); } let mut protocols = NotificationMessage::protocols(); protocols.push(TEST_NOTIF_PROTOCOL_NAME.into()); From 0e344742c8b502e7959c5bc3600f8666f87d447c Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Fri, 8 Jan 2021 14:39:55 +0800 Subject: [PATCH 7/8] [config] fix clippy check and scripts --- config/src/metrics_config.rs | 2 +- config/src/miner_config.rs | 2 +- config/src/network_config.rs | 10 +++++----- kube/manifest/centauri.yaml | 2 +- kube/manifest/starcoin.yaml | 2 +- kube/manifest/txfactory.yaml | 2 +- scripts/starcoin_ci_docker.sh | 2 +- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/config/src/metrics_config.rs b/config/src/metrics_config.rs index e2c3e978db..61b363ab5b 100644 --- a/config/src/metrics_config.rs +++ b/config/src/metrics_config.rs @@ -20,7 +20,7 @@ pub struct MetricsConfig { name = "address", long, help = "address", - default_value = "DEFAULT_METRIC_SERVER_ADDRESS" + default_value = DEFAULT_METRIC_SERVER_ADDRESS )] pub address: String, #[structopt(name = "metrics-port", long, default_value = "9101")] diff --git a/config/src/miner_config.rs b/config/src/miner_config.rs index d5317804eb..21437a2962 100644 --- a/config/src/miner_config.rs +++ b/config/src/miner_config.rs @@ -49,7 +49,7 @@ pub struct MinerClientConfig { #[structopt(long = "miner-thread")] /// Miner thread number, not work for dev network, default is 1 pub miner_thread: Option, - #[structopt(long = "enable-stderr")] + #[structopt(skip)] #[serde(skip)] pub enable_stderr: bool, } diff --git a/config/src/network_config.rs b/config/src/network_config.rs index f20b1d593e..c25d9cb964 100644 --- a/config/src/network_config.rs +++ b/config/src/network_config.rs @@ -91,6 +91,7 @@ pub struct NetworkConfig { #[structopt(skip)] #[serde(default)] pub seeds: Vec, + #[structopt(long = "disable-mdns")] #[serde(default)] /// Disable p2p mdns discovery, for automatically discover the peer from the local network. pub disable_mdns: Option, @@ -131,11 +132,11 @@ impl Default for NetworkConfig { impl NetworkConfig { pub fn listen(&self) -> Multiaddr { - self.listen.as_ref().cloned().unwrap_or( + self.listen.as_ref().cloned().unwrap_or_else(|| { format!("/ip4/0.0.0.0/tcp/{}", DEFAULT_NETWORK_PORT) .parse() - .expect("Parse multi address fail."), - ) + .expect("Parse multi address fail.") + }) } pub fn network_keypair(&self) -> Arc> { self.network_keypair.clone().expect("Config should init.") @@ -162,8 +163,7 @@ impl NetworkConfig { let host = if is_memory_addr(&addr) { addr } else { - addr.clone() - .replace(0, |_p| Some(Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1)))) + addr.replace(0, |_p| Some(Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1)))) .expect("Replace multi address fail.") }; self.self_address = Some(MultiaddrWithPeerId::new(host, peer_id.clone().into())); diff --git a/kube/manifest/centauri.yaml b/kube/manifest/centauri.yaml index 2a5147936d..5604a779a9 100644 --- a/kube/manifest/centauri.yaml +++ b/kube/manifest/centauri.yaml @@ -55,7 +55,7 @@ spec: node_key_flag="--node-key ${node_key}"; fi; if [ $POD_NAME = "starcoin-stress-0" ] && [ $DISABLE_SEED = "true" ]; then - /starcoin/starcoin -n centauri:1 --genesis-config proxima -d /sc-data --disable-seed $node_key_flag --rpc-address 0.0.0.0; + /starcoin/starcoin -n centauri:1 --genesis-config proxima -d /sc-data --disable-seed=true $node_key_flag --rpc-address 0.0.0.0; sleep 5; else /starcoin/starcoin -n centauri:1 --genesis-config proxima -d /sc-data --seed $(SEED) $node_key_flag --rpc-address 0.0.0.0; diff --git a/kube/manifest/starcoin.yaml b/kube/manifest/starcoin.yaml index e8380d3cd5..a9f04c3250 100644 --- a/kube/manifest/starcoin.yaml +++ b/kube/manifest/starcoin.yaml @@ -49,7 +49,7 @@ spec: if [ ! -z $node_key ]; then node_key_flag="--node-key ${node_key}"; fi; - /starcoin/starcoin -n proxima -d /sc-data --enable-mdns $node_key_flag --rpc-address 0.0.0.0; + /starcoin/starcoin -n proxima -d /sc-data --disable-mdns=true $node_key_flag --rpc-address 0.0.0.0; ports: - containerPort: 9840 hostPort: 9840 diff --git a/kube/manifest/txfactory.yaml b/kube/manifest/txfactory.yaml index 92e0c6bf47..5bd9fca975 100644 --- a/kube/manifest/txfactory.yaml +++ b/kube/manifest/txfactory.yaml @@ -55,7 +55,7 @@ spec: node_key_flag="--node-key ${node_key}"; fi; if [ $POD_NAME = "starcoin-stress-0" ] && [ $DISABLE_SEED = "true" ]; then - /starcoin/starcoin -n centauri:1 --genesis-config proxima -d /sc-data --disable-seed $node_key_flag --rpc-address 0.0.0.0; + /starcoin/starcoin -n centauri:1 --genesis-config proxima -d /sc-data --disable-seed=true $node_key_flag --rpc-address 0.0.0.0; sleep 5; else /starcoin/starcoin -n centauri:1 --genesis-config proxima -d /sc-data --seed $(SEED) $node_key_flag --rpc-address 0.0.0.0; diff --git a/scripts/starcoin_ci_docker.sh b/scripts/starcoin_ci_docker.sh index c40907dd51..6478ad1992 100755 --- a/scripts/starcoin_ci_docker.sh +++ b/scripts/starcoin_ci_docker.sh @@ -63,7 +63,7 @@ function start_cluster() { rpc_address="127.0.0.1" fi - start_starcoin $cluster_name-0 starcoin-0 9840 9101 $net --node-key ${node_keys[0]} --rpc-address 0.0.0.0 --disable-seed + start_starcoin $cluster_name-0 starcoin-0 9840 9101 $net --node-key ${node_keys[0]} --rpc-address 0.0.0.0 --disable-seed=true sleep 5 seed_peer_id=$(docker-machine ssh $cluster_name-0 grep 'Local\ node\ identity\ is:\ ' $cfg_root/starcoin-0/$net/starcoin.log | awk '{print $8}' | tac | head -n 1) seed=/ip4/$seed_host/tcp/9840/p2p/$seed_peer_id From d4e3effe585a4d8d45cba9025e18c49f2d8e7bc7 Mon Sep 17 00:00:00 2001 From: chengsuoyuan Date: Fri, 8 Jan 2021 15:38:48 +0800 Subject: [PATCH 8/8] [config] fix integration test --- testsuite/tests/steps/node.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/testsuite/tests/steps/node.rs b/testsuite/tests/steps/node.rs index 606f32827c..5f1ebb74d4 100644 --- a/testsuite/tests/steps/node.rs +++ b/testsuite/tests/steps/node.rs @@ -14,8 +14,8 @@ pub fn steps() -> Steps { .given("a test node config", |world: &mut MyWorld, _step| { let mut opt = StarcoinOpt::default(); opt.net = Some(ChainNetworkID::TEST); - opt.disable_metrics = Some(true); - opt.disable_seed = Some(true); + opt.metrics.disable_metrics = Some(true); + opt.network.disable_seed = Some(true); let config = NodeConfig::load_with_opt(&opt).unwrap(); info!("config: {:?}", config); world.node_config = Some(config) @@ -23,15 +23,15 @@ pub fn steps() -> Steps { .given("a dev node config", |world: &mut MyWorld, _step| { let mut opt = StarcoinOpt::default(); opt.net = Some(ChainNetworkID::DEV); - opt.disable_metrics = Some(true); - opt.disable_seed = Some(true); + opt.metrics.disable_metrics = Some(true); + opt.network.disable_seed = Some(true); let config = NodeConfig::load_with_opt(&opt).unwrap(); world.node_config = Some(config) }) .given("halley node config", |world: &mut MyWorld, _step| { let mut opt = StarcoinOpt::default(); opt.net = Some(ChainNetworkID::HALLEY); - opt.disable_metrics = Some(true); + opt.metrics.disable_metrics = Some(true); opt.data_dir = Some(PathBuf::from(starcoin_config::temp_path().as_ref())); let config = NodeConfig::load_with_opt(&opt).unwrap(); world.node_config = Some(config)