Skip to content

Commit

Permalink
add tool to generate a blockchain with full blocks, as a benchmark (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
arvidn authored Apr 15, 2022
1 parent 2fbe062 commit 511c13e
Show file tree
Hide file tree
Showing 4 changed files with 178 additions and 7 deletions.
2 changes: 1 addition & 1 deletion tests/tools/test_full_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@
def test_full_sync_test():
file_path = os.path.realpath(__file__)
db_file = Path(file_path).parent / "test-blockchain-db.sqlite"
asyncio.run(run_sync_test(db_file, db_version=2, profile=False, single_thread=False))
asyncio.run(run_sync_test(db_file, db_version=2, profile=False, single_thread=False, test_constants=False))
142 changes: 142 additions & 0 deletions tools/generate_chain.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
import cProfile
import random
import sqlite3
import time
from contextlib import closing, contextmanager
from pathlib import Path
from typing import Iterator, List

import zstd

from chia.types.blockchain_format.coin import Coin
from chia.types.spend_bundle import SpendBundle
from chia.util.chia_logging import initialize_logging
from chia.util.ints import uint64
from chia.util.path import mkdir
from tests.block_tools import create_block_tools
from tests.util.keyring import TempKeyring
from tools.test_constants import test_constants


@contextmanager
def enable_profiler(profile: bool) -> Iterator[None]:
if not profile:
yield
return

with cProfile.Profile() as pr:
yield

pr.create_stats()
pr.dump_stats("generate-chain.profile")


root_path = Path("./test-chain").resolve()
mkdir(root_path)
with TempKeyring() as keychain:

bt = create_block_tools(constants=test_constants, root_path=root_path, keychain=keychain)
initialize_logging(
"generate_chain", {"log_level": "DEBUG", "log_stdout": False, "log_syslog": False}, root_path=root_path
)

with closing(sqlite3.connect("stress-test-blockchain.sqlite")) as db:

print("initializing v2 block store")
db.execute(
"CREATE TABLE full_blocks("
"header_hash blob PRIMARY KEY,"
"prev_hash blob,"
"height bigint,"
"in_main_chain tinyint,"
"block blob)"
)

wallet = bt.get_farmer_wallet_tool()
coinbase_puzzlehash = wallet.get_new_puzzlehash()

blocks = bt.get_consecutive_blocks(
3,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
pool_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
genesis_timestamp=uint64(1234567890),
time_per_block=30,
)

unspent_coins: List[Coin] = []

for b in blocks:
for coin in b.get_included_reward_coins():
if coin.puzzle_hash == coinbase_puzzlehash:
unspent_coins.append(coin)
db.execute(
"INSERT INTO full_blocks VALUES(?, ?, ?, ?, ?)",
(
b.header_hash,
b.prev_header_hash,
b.height,
1, # in_main_chain
zstd.compress(bytes(b)),
),
)
db.commit()

# build 2000 transaction blocks
with enable_profiler(False):
for k in range(2000):

start_time = time.monotonic()

print(f"block: {len(blocks)} unspent: {len(unspent_coins)}")
new_coins: List[Coin] = []
spend_bundles: List[SpendBundle] = []
for i in range(1010):
if unspent_coins == []:
break
c = unspent_coins.pop(random.randrange(len(unspent_coins)))
receiver = wallet.get_new_puzzlehash()
bundle = wallet.generate_signed_transaction(uint64(c.amount // 2), receiver, c)
new_coins.extend(bundle.additions())
spend_bundles.append(bundle)

coinbase_puzzlehash = wallet.get_new_puzzlehash()
blocks = bt.get_consecutive_blocks(
1,
blocks,
farmer_reward_puzzle_hash=coinbase_puzzlehash,
pool_reward_puzzle_hash=coinbase_puzzlehash,
guarantee_transaction_block=True,
transaction_data=SpendBundle.aggregate(spend_bundles),
time_per_block=30,
)

b = blocks[-1]
for coin in b.get_included_reward_coins():
if coin.puzzle_hash == coinbase_puzzlehash:
unspent_coins.append(coin)
unspent_coins.extend(new_coins)

if b.transactions_info:
fill_rate = b.transactions_info.cost / test_constants.MAX_BLOCK_COST_CLVM
else:
fill_rate = 0

end_time = time.monotonic()

print(
f"included {i} spend bundles. fill_rate: {fill_rate*100:.1f}% "
f"new coins: {len(new_coins)} time: {end_time - start_time:0.2f}s"
)

db.execute(
"INSERT INTO full_blocks VALUES(?, ?, ?, ?, ?)",
(
b.header_hash,
b.prev_header_hash,
b.height,
1, # in_main_chain
zstd.compress(bytes(b)),
),
)
db.commit()
18 changes: 18 additions & 0 deletions tools/test_constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from chia.consensus.default_constants import DEFAULT_CONSTANTS

test_constants = DEFAULT_CONSTANTS.replace(
**{
"MIN_PLOT_SIZE": 20,
"MIN_BLOCKS_PER_CHALLENGE_BLOCK": 12,
"DISCRIMINANT_SIZE_BITS": 16,
"SUB_EPOCH_BLOCKS": 170,
"WEIGHT_PROOF_THRESHOLD": 2,
"WEIGHT_PROOF_RECENT_BLOCKS": 380,
"DIFFICULTY_CONSTANT_FACTOR": 33554432,
"NUM_SPS_SUB_SLOT": 16, # Must be a power of 2
"MAX_SUB_SLOT_BLOCKS": 50,
"EPOCH_BLOCKS": 340,
"SUB_SLOT_ITERS_STARTING": 2 ** 10, # Must be a multiple of 64
"NUMBER_ZERO_BITS_PLOT_FILTER": 1, # H(plot signature of the challenge) must start with these many zeroes
}
)
23 changes: 17 additions & 6 deletions tools/test_full_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from chia.full_node.full_node import FullNode
from chia.types.full_block import FullBlock
from chia.util.config import load_config
from tools.test_constants import test_constants as TEST_CONSTANTS


class ExitOnError(logging.Handler):
Expand Down Expand Up @@ -46,7 +47,7 @@ def enable_profiler(profile: bool, counter: int) -> Iterator[None]:
pr.dump_stats(f"slow-batch-{counter:05d}.profile")


async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bool) -> None:
async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bool, test_constants: bool) -> None:

logger = logging.getLogger()
logger.setLevel(logging.WARNING)
Expand All @@ -67,8 +68,11 @@ async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bo
chia_init(root_path, should_check_keys=False, v1_db=(db_version == 1))
config = load_config(root_path, "config.yaml")

overrides = config["network_overrides"]["constants"][config["selected_network"]]
constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides)
if test_constants:
constants = TEST_CONSTANTS
else:
overrides = config["network_overrides"]["constants"][config["selected_network"]]
constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides)
if single_thread:
config["full_node"]["single_threaded"] = True
config["full_node"]["db_sync"] = "off"
Expand All @@ -85,7 +89,7 @@ async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bo
counter = 0
height = 0
async with aiosqlite.connect(file) as in_db:

await in_db.execute("pragma query_only")
rows = await in_db.execute(
"SELECT header_hash, height, block FROM full_blocks WHERE in_main_chain=1 ORDER BY height"
)
Expand Down Expand Up @@ -135,18 +139,25 @@ def main() -> None:
@click.argument("file", type=click.Path(), required=True)
@click.option("--db-version", type=int, required=False, default=2, help="the DB version to use in simulated node")
@click.option("--profile", is_flag=True, required=False, default=False, help="dump CPU profiles for slow batches")
@click.option(
"--test-constants",
is_flag=True,
required=False,
default=False,
help="expect the blockchain database to be blocks using the test constants",
)
@click.option(
"--single-thread",
is_flag=True,
required=False,
default=False,
help="run node in a single process, to include validation in profiles",
)
def run(file: Path, db_version: int, profile: bool, single_thread: bool) -> None:
def run(file: Path, db_version: int, profile: bool, single_thread: bool, test_constants: bool) -> None:
"""
The FILE parameter should point to an existing blockchain database file (in v2 format)
"""
asyncio.run(run_sync_test(Path(file), db_version, profile, single_thread))
asyncio.run(run_sync_test(Path(file), db_version, profile, single_thread, test_constants))


@main.command("analyze", short_help="generate call stacks for all profiles dumped to current directory")
Expand Down

0 comments on commit 511c13e

Please sign in to comment.