Skip to content

Commit

Permalink
Merge pull request #1323 from ainblockchain/release/v1.5.0
Browse files Browse the repository at this point in the history
Release/v1.5.0
  • Loading branch information
platfowner authored Sep 11, 2024
2 parents 143fa4d + da7b9b7 commit 288c7a3
Show file tree
Hide file tree
Showing 30 changed files with 1,171 additions and 374 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ You can override default port numbering system by setting `PORT` and `P2P_PORT`
```
gcloud init
# For genesis deploy
bash deploy_blockchain_genesis_gcp.sh [dev|staging|sandbox|exp|spring|summer|mainnet] <# of Shards> <Parent Node Index Begin> <Parent Node Index End> [--setup] [--keystore|--mnemonic|--private-key] [--keep-code|--no-keep-code] [--keep-data|--no-keep-data] [--full-sync|--fast-sync] [--chown-data|--no-chown-data] [--kill-only|--skip-kill]
bash deploy_blockchain_genesis_gcp.sh [dev|staging|sandbox|exp|spring|summer|mainnet] <# of Shards> <Parent Node Index Begin> <Parent Node Index End> [--setup] [--keystore|--mnemonic|--private-key] [--keep-code|--no-keep-code] [--keep-data|--no-keep-data] [--full-sync|--fast-sync] [--chown-data|--no-chown-data] [--kill-job|--kill-only]
# For incremental deploy
bash deploy_blockchain_incremental_gcp.sh [dev|staging|sandbox|exp|spring|summer|mainnet] <# of Shards> <Parent Node Index Begin> <Parent Node Index End> [--setup] [--keystore|--mnemonic|--private-key] [--keep-code|--no-keep-code] [--keep-data|--no-keep-data] [--full-sync|--fast-sync] [--chown-data|--no-chown-data]
```
Expand Down Expand Up @@ -130,7 +130,7 @@ BLOCKCHAIN_CONFIGS_DIR=blockchain-configs/afan-shard MIN_NUM_VALIDATORS=1 DEBUG=
```
gcloud init
# For genesis deploy
bash deploy_blockchain_genesis_gcp.sh [dev|staging|sandbox|exp|spring|summer|mainnet] <# of Shards> <Parent Node Index Begin> <Parent Node Index End> [--setup] [--keystore|--mnemonic|--private-key] [--keep-code|--no-keep-code] [--keep-data|--no-keep-data] [--full-sync|--fast-sync] [--chown-data|--no-chown-data] [--kill-only|--skip-kill]
bash deploy_blockchain_genesis_gcp.sh [dev|staging|sandbox|exp|spring|summer|mainnet] <# of Shards> <Parent Node Index Begin> <Parent Node Index End> [--setup] [--keystore|--mnemonic|--private-key] [--keep-code|--no-keep-code] [--keep-data|--no-keep-data] [--full-sync|--fast-sync] [--chown-data|--no-chown-data] [--kill-job|--kill-only]
# For incremental deploy
bash deploy_blockchain_incremental_gcp.sh [dev|staging|sandbox|exp|spring|summer|mainnet] <# of Shards> <Parent Node Index Begin> <Parent Node Index End> [--setup] [--keystore|--mnemonic|--private-key] [--keep-code|--no-keep-code] [--keep-data|--no-keep-data] [--full-sync|--fast-sync] [--chown-data|--no-chown-data]
```
Expand Down
2 changes: 1 addition & 1 deletion blockchain-configs/afan-shard/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 3,
Expand Down
2 changes: 1 addition & 1 deletion blockchain-configs/base/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand Down
2 changes: 1 addition & 1 deletion blockchain-configs/he-shard/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand Down
4 changes: 2 additions & 2 deletions blockchain-configs/mainnet-prod/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand All @@ -56,7 +56,7 @@
"ON_MEMORY_CHAIN_LENGTH": 10,
"P2P_HEARTBEAT_INTERVAL_MS": 15000,
"P2P_MESSAGE_TIMEOUT_MS": 600000,
"P2P_PORT": 5000,
"P2P_PORT": 4997,
"P2P_WAIT_FOR_ADDRESS_TIMEOUT_MS": 10000,
"PEER_CANDIDATE_JSON_RPC_URL": "https://mainnet-api.ainetwork.ai/json-rpc",
"PEER_CANDIDATES_CONNECTION_INTERVAL_MS": 20000,
Expand Down
2 changes: 1 addition & 1 deletion blockchain-configs/sim-shard/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand Down
2 changes: 1 addition & 1 deletion blockchain-configs/testnet-dev/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand Down
2 changes: 1 addition & 1 deletion blockchain-configs/testnet-exp/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand Down
4 changes: 2 additions & 2 deletions blockchain-configs/testnet-prod/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand All @@ -57,7 +57,7 @@
"ON_MEMORY_CHAIN_LENGTH": 10,
"P2P_HEARTBEAT_INTERVAL_MS": 15000,
"P2P_MESSAGE_TIMEOUT_MS": 600000,
"P2P_PORT": 5000,
"P2P_PORT": 4998,
"P2P_WAIT_FOR_ADDRESS_TIMEOUT_MS": 10000,
"PEER_CANDIDATE_JSON_RPC_URL": "https://testnet-api.ainetwork.ai/json-rpc",
"PEER_CANDIDATES_CONNECTION_INTERVAL_MS": 20000,
Expand Down
2 changes: 1 addition & 1 deletion blockchain-configs/testnet-sandbox/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand Down
4 changes: 2 additions & 2 deletions blockchain-configs/testnet-staging/node_params.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
"MAX_FINALIZED_BLOCK_INFO_ON_MEM": 1000,
"MAX_JSON_RPC_API_READ_RATE_LIMIT": 10,
"MAX_JSON_RPC_API_WRITE_RATE_LIMIT": 1,
"MAX_NUM_EVENT_CHANNELS": 20,
"MAX_NUM_EVENT_CHANNELS": 30,
"MAX_NUM_EVENT_FILTERS": 40,
"MAX_NUM_EVENT_FILTERS_PER_CHANNEL": 5,
"MAX_NUM_INBOUND_CONNECTION": 6,
Expand All @@ -57,7 +57,7 @@
"ON_MEMORY_CHAIN_LENGTH": 10,
"P2P_HEARTBEAT_INTERVAL_MS": 15000,
"P2P_MESSAGE_TIMEOUT_MS": 600000,
"P2P_PORT": 5000,
"P2P_PORT": 4999,
"P2P_WAIT_FOR_ADDRESS_TIMEOUT_MS": 10000,
"PEER_CANDIDATE_JSON_RPC_URL": "https://staging-api.ainetwork.ai/json-rpc",
"PEER_CANDIDATES_CONNECTION_INTERVAL_MS": 20000,
Expand Down
2 changes: 1 addition & 1 deletion client/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ app.get('/metrics', async (req, res, next) => {
.end();
});

// Used in wait_until_node_sync_gcp.sh
// Used in wait_until_node_sync.sh
app.get('/last_block_number', (req, res, next) => {
const beginTime = Date.now();
const result = node.bc.lastBlockNumber();
Expand Down
3 changes: 3 additions & 0 deletions client/protocol_versions.json
Original file line number Diff line number Diff line change
Expand Up @@ -155,5 +155,8 @@
},
"1.4.2": {
"min": "1.0.0"
},
"1.5.0": {
"min": "1.0.0"
}
}
12 changes: 6 additions & 6 deletions common/file-util.js
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ class FileUtil {
});
});
} catch (err) {
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err}`);
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err.stack}`);
return false;
}
}
Expand Down Expand Up @@ -288,7 +288,7 @@ class FileUtil {
});
});
} catch (err) {
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err}`);
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err.stack}`);
return null;
}
}
Expand All @@ -299,7 +299,7 @@ class FileUtil {
const zippedFs = fs.readFileSync(filePath);
return FileUtil.buildObjectFromChunks(JSON.parse(zlib.gunzipSync(zippedFs).toString()).docs);
} catch (err) {
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err}`);
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err.stack}`);
return null;
}
}
Expand All @@ -314,7 +314,7 @@ class FileUtil {
const zippedFs = fs.readFileSync(filePath);
return JSON.parse(zlib.gunzipSync(zippedFs).toString());
} catch (err) {
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err}`);
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err.stack}`);
return null;
}
}
Expand All @@ -325,7 +325,7 @@ class FileUtil {
const fileStr = fs.readFileSync(filePath);
return JSON.parse(fileStr);
} catch (err) {
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err}`);
logger.error(`[${LOG_HEADER}] Error while reading ${filePath}: ${err.stack}`);
return null;
}
}
Expand Down Expand Up @@ -399,7 +399,7 @@ class FileUtil {
try {
return Number(fs.readFileSync(h2nPath).toString());
} catch (err) {
logger.error(`[${LOG_HEADER}] Error while reading ${h2nPath}: ${err}`);
logger.error(`[${LOG_HEADER}] Error while reading ${h2nPath}: ${err.stack}`);
return -1;
}
}
Expand Down
5 changes: 3 additions & 2 deletions copy_blockchain_data_gcp.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

function usage() {
printf "Usage: bash copy_blockchain_data_gcp.sh [dev|staging|sandbox|exp|spring|summer|mainnet] <Node Index> [download|upload]\n"
printf "Example: bash copy_blockchain_data_gcp.sh spring 5 download\n"
printf "Example: bash copy_blockchain_data_gcp.sh spring 0 download\n"
printf "Example: bash copy_blockchain_data_gcp.sh spring 1 upload\n"
printf "\n"
exit
}
Expand Down Expand Up @@ -146,7 +147,7 @@ function upload_data() {

# 2. Extract tgz file for node
printf "\n\n<<< Extracting tgz file for node $node_index >>>\n\n"
TGZ_CMD="gcloud compute ssh $node_target_addr --command 'cd /home; sudo mkdir -p ain_blockchain_data; sudo chown runner:runner ain_blockchain_data; sudo chmod 777 ain_blockchain_data; cd ain_blockchain_data; gzip -dc ~/ain_blockchain_data.tar.gz | tar xvf -' --project $PROJECT_ID --zone $node_zone"
TGZ_CMD="gcloud compute ssh $node_target_addr --command 'cd /home; sudo mkdir -p ain_blockchain_data; sudo chown $GCP_USER:$GCP_USER ain_blockchain_data; sudo chmod 777 ain_blockchain_data; cd ain_blockchain_data; sudo rm -rf chains snapshots; gzip -dc ~/ain_blockchain_data.tar.gz | tar xvf -' --project $PROJECT_ID --zone $node_zone"
printf "TGZ_CMD=$TGZ_CMD\n\n"
eval $TGZ_CMD

Expand Down
134 changes: 134 additions & 0 deletions copy_blockchain_data_onprem.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
#!/bin/bash

function usage() {
printf "Usage: bash copy_blockchain_data_onprem.sh [staging|spring|mainnet] <Node Index> [download|upload]\n"
printf "Example: bash copy_blockchain_data_onprem.sh staging 0 download\n"
printf "Example: bash copy_blockchain_data_onprem.sh staging 1 upload\n"
printf "\n"
exit
}

if [[ $# -lt 3 ]] || [[ $# -gt 3 ]]; then
usage
fi

if [[ "$1" = 'staging' ]] || [[ "$1" = 'spring' ]] || [[ "$1" = 'mainnet' ]]; then
SEASON="$1"
else
printf "Invalid <Project/Season> argument: $1\n"
exit
fi
printf "\n"
printf "SEASON=$SEASON\n"

ONPREM_USER="nvidia"
printf "ONPREM_USER=$ONPREM_USER\n"

number_re='^[0-9]+$'
if ! [[ $2 =~ $number_re ]] ; then
printf "\n"
printf "Invalid <Node Index> argument: $2\n"
exit
fi
NODE_INDEX=$2
if [[ $NODE_INDEX -lt 0 ]] || [[ $NODE_INDEX -gt 9 ]]; then
printf "\n"
printf "Out-of-range <Node Index> argument: $NODE_INDEX\n"
exit
fi
printf "NODE_INDEX=$NODE_INDEX\n"

if [[ "$3" = 'download' ]] || [[ "$3" = 'upload' ]]; then
COMMAND="$3"
else
printf "\n"
printf "Invalid <Command> argument: $3\n"
printf "\n"
usage
fi
printf "COMMAND=$COMMAND\n"

# Get confirmation.
if [[ "$SEASON" = "mainnet" ]]; then
printf "\n"
printf "Do you want to proceed for $SEASON? Enter [mainnet]: "
read CONFIRM
printf "\n\n"
if [[ ! $CONFIRM = "mainnet" ]]
then
[[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell
fi
else
printf "\n"
read -p "Do you want to proceed for $SEASON? [y/N]: " -n 1 -r
printf "\n\n"
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
[[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell
fi
fi

# Read node ip addresses and passwords
IFS=$'\n' read -d '' -r -a NODE_IP_LIST < ./ip_addresses/${SEASON}_onprem_ip.txt
IFS=$'\n' read -d '' -r -a NODE_PW_LIST < ./ip_addresses/${SEASON}_onprem_pw.txt

function download_data() {
local node_index="$1"
local node_target_addr="${ONPREM_USER}@${NODE_IP_LIST[${node_index}]}"
local node_login_pw="${NODE_PW_LIST[${node_index}]}"

printf "\n* >> Downloading data from node $node_index ($node_target_addr) *********************************************************\n\n"

printf "node_target_addr='$node_target_addr'\n"

# 1. Create tgz file for node
printf "\n\n<<< Creating tgz file for node $node_index >>>\n\n"
TGZ_CMD="ssh $node_target_addr 'sudo -S ls -la; cd /home/${SEASON}/ain_blockchain_data; tar cvf - chains snapshots | gzip -c > ~/ain_blockchain_data.tar.gz'"
printf "TGZ_CMD=$TGZ_CMD\n\n"
eval "echo ${node_login_pw} | sshpass -f <(printf '%s\n' ${node_login_pw}) ${TGZ_CMD}"

# 2. Copy tgz file from node
printf "\n\n<<< Copying tgz file from node $node_index >>>\n\n"
SCP_CMD="scp -r $node_target_addr:~/ain_blockchain_data.tar.gz ."
printf "SCP_CMD=$SCP_CMD\n\n"
eval "sshpass -f <(printf '%s\n' ${node_login_pw}) ${SCP_CMD}"

# 3. Clean up tgz file for node
printf "\n\n<<< Cleaning up tgz file for node $node_index >>>\n\n"
CLEANUP_CMD="ssh $node_target_addr 'rm ~/ain_blockchain_data.tar.gz'"
printf "CLEANUP_CMD=$CLEANUP_CMD\n\n"
eval "sshpass -f <(printf '%s\n' ${node_login_pw}) ${CLEANUP_CMD}"
}

function upload_data() {
local node_index="$1"
local node_target_addr="${ONPREM_USER}@${NODE_IP_LIST[${node_index}]}"
local node_login_pw="${NODE_PW_LIST[${node_index}]}"

printf "\n* >> Uploading data from node $node_index ($node_target_addr) *********************************************************\n\n"

printf "node_target_addr='$node_target_addr'\n"

# 1. Copy tgz file to node
printf "\n\n<<< Copying tgz file to node $node_index >>>\n\n"
SCP_CMD="scp -r ./ain_blockchain_data.tar.gz $node_target_addr:~"
printf "SCP_CMD=$SCP_CMD\n\n"
eval "sshpass -f <(printf '%s\n' ${node_login_pw}) ${SCP_CMD}"

# 2. Extract tgz file for node
printf "\n\n<<< Extracting tgz file for node $node_index >>>\n\n"
TGZ_CMD="ssh $node_target_addr 'sudo -S ls -la; cd /home; sudo mkdir -p ${SEASON}/ain_blockchain_data; sudo chown $ONPREM_USER:$ONPREM_USER ${SEASON} ${SEASON}/ain_blockchain_data; sudo chmod 777 ${SEASON} ${SEASON}/ain_blockchain_data; cd ${SEASON}/ain_blockchain_data; sudo rm -rf chains snapshots; gzip -dc ~/ain_blockchain_data.tar.gz | tar xvf -'"
printf "TGZ_CMD=$TGZ_CMD\n\n"
eval "echo ${node_login_pw} | sshpass -f <(printf '%s\n' ${node_login_pw}) ${TGZ_CMD}"

# 3. Clean up tgz file for node
printf "\n\n<<< Cleaning up tgz file for node $node_index >>>\n\n"
CLEANUP_CMD="ssh $node_target_addr 'rm ~/ain_blockchain_data.tar.gz'"
printf "CLEANUP_CMD=$CLEANUP_CMD\n\n"
eval "sshpass -f <(printf '%s\n' ${node_login_pw}) ${CLEANUP_CMD}"
}

if [[ "$COMMAND" = 'upload' ]]; then
upload_data "$NODE_INDEX"
else
download_data "$NODE_INDEX"
fi
Loading

0 comments on commit 288c7a3

Please sign in to comment.