Skip to content

Commit

Permalink
Merge branch 'master' into check_off_chain_drep_data
Browse files Browse the repository at this point in the history
  • Loading branch information
mkoura authored Aug 14, 2024
2 parents 0123e4c + 945c368 commit 9a07eaf
Show file tree
Hide file tree
Showing 12 changed files with 259 additions and 45 deletions.
25 changes: 25 additions & 0 deletions cardano_node_tests/tests/reqs_conway.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,3 +221,28 @@ def __r(id: str) -> requirements.Req:
gr027 = __r("GR027")
gr028 = __r("GR028")
gr029 = __r("GR029")

# DB Sync Conway related tables
# https://github.com/IntersectMBO/cardano-db-sync/blob/master/doc/schema.md
db001 = __r("drep_hash")
db002 = __r("committee_hash")
db003 = __r("delegation_vote")
db004 = __r("committee_registration")
db005 = __r("committee_de_registration")
db006 = __r("drep_registration")
db007 = __r("voting_anchor")
db008 = __r("gov_action_proposal")
db009 = __r("treasury_withdrawal")
db010 = __r("committee")
db011 = __r("committee_member")
db012 = __r("constitution")
db013 = __r("voting_procedure")
db014 = __r("drep_distr")
db015 = __r("off_chain_vote_data")
db016 = __r("off_chain_vote_drep_data")
db017 = __r("off_chain_vote_author")
db018 = __r("off_chain_vote_reference")
db019 = __r("off_chain_vote_data")
db020 = __r("off_chain_vote_external_update")
db021 = __r("off_chain_vote_fetch_error")
db022 = __r("reward_rest")
9 changes: 5 additions & 4 deletions cardano_node_tests/tests/test_dbsync.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,12 +225,13 @@ def test_blocks(self, cluster: clusterlib.ClusterLib): # noqa: C901
def test_cost_model(self, cluster: clusterlib.ClusterLib):
"""Check expected values in the `cost_model` table in db-sync."""
common.get_test_id(cluster)
curr_epoch = cluster.g_query.get_epoch()

db_cost_models = dbsync_queries.query_cost_model()
# wait till next epoch if the cost models are not yet available
db_cost_models = dbsync_queries.query_cost_model(epoch_no=curr_epoch)
# Wait till next epoch if the cost models are not yet available
if not db_cost_models:
cluster.wait_for_new_epoch(padding_seconds=5)
db_cost_models = dbsync_queries.query_cost_model()
curr_epoch = cluster.wait_for_new_epoch(padding_seconds=5)
db_cost_models = dbsync_queries.query_cost_model(epoch_no=curr_epoch)

protocol_params = cluster.g_query.get_protocol_params()

Expand Down
14 changes: 3 additions & 11 deletions cardano_node_tests/tests/test_pools.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,22 +649,14 @@ def test_stake_pool_metadata(
temp_template = f"{common.get_test_id(cluster)}_{rand_str}"

pool_name = "test_stake_pool_metadata"
pool_metadata = {
"name": "test_stake_pool_metadata",
"description": "cardano-node-tests E2E tests",
"ticker": "IOG1",
"homepage": "https://github.com/input-output-hk/cardano-node-tests",
}
pool_metadata_file = helpers.write_json(
out_file=f"{pool_name}_registration_metadata.json", content=pool_metadata
)
pool_metadata_file = DATA_DIR / "pool_metadata.json"

pool_data = clusterlib.PoolData(
pool_name=pool_name,
pool_pledge=1_000,
pool_cost=cluster.g_query.get_protocol_params().get("minPoolCost", 500),
pool_margin=0.2,
pool_metadata_url="https://bit.ly/3HvWQAy",
pool_metadata_url="https://tinyurl.com/yvkfs7pr",
pool_metadata_hash=cluster.g_stake_pool.gen_pool_metadata_hash(pool_metadata_file),
)

Expand Down Expand Up @@ -699,7 +691,7 @@ def test_stake_pool_metadata(

# check dbsync `PoolOfflineData` table
if configuration.HAS_DBSYNC:
pool_params = cluster.g_query.get_pool_params(
pool_params = cluster.g_query.get_pool_state(
stake_pool_id=pool_creation_out.stake_pool_id
).pool_params

Expand Down
18 changes: 12 additions & 6 deletions cardano_node_tests/tests/test_staking_rewards.py
Original file line number Diff line number Diff line change
Expand Up @@ -1001,13 +1001,19 @@ def _mir_tx(fund_src: str) -> clusterlib.TxRawOutput:

if repoch <= init_epoch + 1:
assert rtypes_set == {"leader"}
if repoch == init_epoch + 2:
assert rtypes_set == {"reserves", "leader", "member"}
if repoch == init_epoch + 3:
assert rtypes_set == {"treasury", "leader", "member"}
if init_epoch + 4 <= repoch <= 6:
elif repoch == init_epoch + 2:
expected_set = (
{"reserves", "leader", "member"} if mir_reward else {"leader", "member"}
)
assert rtypes_set == expected_set
elif repoch == init_epoch + 3:
expected_set = (
{"treasury", "leader", "member"} if mir_reward else {"leader", "member"}
)
assert rtypes_set == expected_set
elif init_epoch + 4 <= repoch <= 6:
assert rtypes_set == {"leader", "member"}
if repoch > init_epoch + 6:
elif repoch > init_epoch + 6:
assert rtypes_set == {"leader"}

@allure.link(helpers.get_vcs_link())
Expand Down
13 changes: 10 additions & 3 deletions cardano_node_tests/tests/tests_conway/test_committee.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,10 @@ def test_update_committee_action(
reqc.cip007.success()

# Check dbsync
_url = helpers.get_vcs_link()
[r.start(url=_url) for r in (reqc.db010, reqc.db011)]
dbsync_utils.check_committee_info(gov_state=gov_state, txid=txid)
[r.success() for r in (reqc.db010, reqc.db011)]

@allure.link(helpers.get_vcs_link())
@pytest.mark.long
Expand Down Expand Up @@ -369,16 +372,19 @@ def test_add_rm_committee_members( # noqa: C901
epoch=cc_member1_expire,
cold_vkey_file=cc_auth_record1.cold_key_pair.vkey_file,
cold_skey_file=cc_auth_record1.cold_key_pair.skey_file,
cold_vkey_hash=cc_auth_record1.key_hash,
),
clusterlib.CCMember(
epoch=cluster.g_query.get_epoch() + 5,
cold_vkey_file=cc_auth_record2.cold_key_pair.vkey_file,
cold_skey_file=cc_auth_record2.cold_key_pair.skey_file,
cold_vkey_hash=cc_auth_record2.key_hash,
),
clusterlib.CCMember(
epoch=cluster.g_query.get_epoch() + 5,
cold_vkey_file=cc_auth_record3.cold_key_pair.vkey_file,
cold_skey_file=cc_auth_record3.cold_key_pair.skey_file,
cold_vkey_hash=cc_auth_record3.key_hash,
),
]

Expand Down Expand Up @@ -682,10 +688,8 @@ def _check_add_state(gov_state: tp.Dict[str, tp.Any]):
assert cc_member_val == cc_members[i].epoch

def _check_resign_dbsync(res_member: clusterlib.CCMember) -> None:
auth_committee_state = cluster.g_conway_governance.query.committee_state()
dbsync_utils.check_committee_member_registration(
cc_member_cold_key=res_member.cold_vkey_hash,
committee_state=auth_committee_state,
cc_member_cold_key=res_member.cold_vkey_hash
)
dbsync_utils.check_committee_member_deregistration(
cc_member_cold_key=res_member.cold_vkey_hash
Expand Down Expand Up @@ -971,7 +975,10 @@ def _check_resign_dbsync(res_member: clusterlib.CCMember) -> None:
_resign_member(res_member=cc_members[2])
dbsync_resign_err = ""
try:
_url = helpers.get_vcs_link()
[r.start(url=_url) for r in (reqc.db002, reqc.db004, reqc.db005)]
_check_resign_dbsync(res_member=cc_members[2])
[r.success() for r in (reqc.db002, reqc.db004, reqc.db005)]
except Exception as excp:
dbsync_resign_err = str(excp)

Expand Down
4 changes: 4 additions & 0 deletions cardano_node_tests/tests/tests_conway/test_drep.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,16 +383,20 @@ def test_register_and_retire_drep(
), "Unexpected metadata hash"
assert metadata_anchor["url"] == drep_metadata_url, "Unexpected metadata url"
try:
_url = helpers.get_vcs_link()
[r.start(url=_url) for r in (reqc.db001, reqc.db006)]
drep_data = dbsync_utils.check_drep_registration(
drep=reg_drep, drep_state=reg_drep_state
)
[r.success() for r in (reqc.db002, reqc.db006)]

def _query_func():
dbsync_utils.check_off_chain_drep_registration(
drep_data=drep_data, metadata=drep_metadata_content
)

dbsync_utils.retry_query(query_func=_query_func, timeout=300)

except AssertionError as exc:
str_exc = str(exc)
errors_final.append(f"DB-Sync unexpected DRep registration error: {str_exc}")
Expand Down
2 changes: 2 additions & 0 deletions cardano_node_tests/tests/tests_conway/test_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,9 @@ def test_info(
reqc.cli022.success()

# Check dbsync
reqc.db013.start(url=helpers.get_vcs_link())
dbsync_utils.check_votes(
votes=governance_utils.VotedVotes(cc=votes_cc, drep=votes_drep, spo=votes_spo),
txhash=vote_txid,
)
reqc.db013.success()
2 changes: 2 additions & 0 deletions cardano_node_tests/tests/tests_conway/test_pparam_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -734,11 +734,13 @@ def _check_proposed_pparams(

# db-sync check
try:
reqc.db008.start(url=helpers.get_vcs_link())
dbsync_utils.check_conway_gov_action_proposal_description(
update_proposal=net_nodrep_prop_rec.future_pparams,
txhash=net_nodrep_prop_rec.action_txid,
)
dbsync_utils.check_conway_param_update_proposal(net_nodrep_prop_rec.future_pparams)
reqc.db008.success()
except AssertionError as exc:
db_errors_final.append(f"db-sync network params update error: {exc}")

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,8 @@ def _cast_vote(
governance_utils.check_vote_view(cluster_obj=cluster, vote_data=voted_votes.cc[0])
governance_utils.check_vote_view(cluster_obj=cluster, vote_data=voted_votes.drep[0])

reqc.cip084.start(url=helpers.get_vcs_link())
_url = helpers.get_vcs_link()
[r.start(url=_url) for r in (reqc.cip084, reqc.db009, reqc.db022)]
# Check dbsync
dbsync_utils.check_treasury_withdrawal(
actions_num=actions_num,
Expand All @@ -370,7 +371,7 @@ def _cast_vote(
transfer_amt=transfer_amt,
)

reqc.cip084.success()
[r.success() for r in (reqc.cip084, reqc.db009, reqc.db022)]

if xfail_ledger_3979_msgs:
ledger_3979 = issues.ledger_3979.copy()
Expand Down
22 changes: 16 additions & 6 deletions cardano_node_tests/utils/dbsync_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -1121,19 +1121,29 @@ def query_datum(datum_hash: str) -> tp.Generator[DatumDBRow, None, None]:
yield DatumDBRow(*result)


def query_cost_model(model_id: int = -1) -> tp.Dict[str, tp.Dict[str, tp.Any]]:
"""Query last cost model record (if id not specified) in db-sync."""
query = "SELECT * FROM cost_model ORDER BY ID DESC LIMIT 1"
def query_cost_model(model_id: int = -1, epoch_no: int = -1) -> tp.Dict[str, tp.Dict[str, tp.Any]]:
"""Query cost model record in db-sync.
If `model_id` is specified, query the cost model that corresponds to the given id.
If `epoch_no` is specified, query the cost model used in the given epoch.
Otherwise query the latest cost model.
"""
query_var: tp.Union[int, str]

if model_id != -1:
id_query = "WHERE id = %s "
subquery = "WHERE cm.id = %s "
query_var = model_id
elif epoch_no != -1:
subquery = (
"INNER JOIN epoch_param ON epoch_param.cost_model_id = cm.id "
"WHERE epoch_param.epoch_no = %s "
)
query_var = epoch_no
else:
id_query = ""
subquery = ""
query_var = ""

query = f"SELECT * FROM cost_model {id_query} ORDER BY ID DESC LIMIT 1"
query = f"SELECT * FROM cost_model AS cm {subquery} ORDER BY cm.id DESC LIMIT 1"

with execute(query=query, vars=(query_var,)) as cur:
results = cur.fetchone()
Expand Down
28 changes: 15 additions & 13 deletions cardano_node_tests/utils/dbsync_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -955,7 +955,7 @@ def check_conway_param_update_proposal(

# Get cost models
if param_proposal_db.cost_model_id:
db_cost_model = dbsync_queries.query_cost_model(param_proposal_db.cost_model_id)
db_cost_model = dbsync_queries.query_cost_model(model_id=param_proposal_db.cost_model_id)
pp_cost_model = param_proposal_ledger.get("costModels")
if db_cost_model != pp_cost_model:
failures.append(f"Cost model mismatch for {db_cost_model}. Expected: {pp_cost_model}")
Expand Down Expand Up @@ -1010,26 +1010,30 @@ def check_proposal_refunds(stake_address: str, refunds_num: int) -> None:


def check_conway_gov_action_proposal_description(
update_proposal: dict, txhash: str = ""
update_proposal: dict, txhash: str = "", action_ix: int = 0
) -> tp.Optional[dbsync_queries.GovActionProposalDBRow]:
"""Check expected values in the param proposal table in db-sync."""
"""Check expected values in the gov_action_proposal table in db-sync."""
if not configuration.HAS_DBSYNC:
return None

db_gov_action = get_gov_action_proposals(txhash=txhash).pop()
db_gov_prop_desc = db_gov_action.description["contents"][1]
gov_actions_all = get_gov_action_proposals(txhash=txhash)
assert gov_actions_all, "No data returned from db-sync for gov action proposal"
assert len(gov_actions_all) >= action_ix + 1, "Unexpected number of gov actions"

gov_action = gov_actions_all[action_ix]
db_gov_prop_desc = gov_action.description["contents"][1]

if db_gov_prop_desc != update_proposal:
msg = f"Comparison {db_gov_prop_desc} failed in db-sync:\n" f"Expected {update_proposal}"
raise AssertionError(msg)
return db_gov_action
return gov_action


def get_gov_action_proposals(
txhash: str = "", type: str = ""
) -> tp.List[dbsync_queries.GovActionProposalDBRow]:
"""Get goverment action proposal from db-sync."""
gov_action_proposals = list(dbsync_queries.query_gov_action_proposal(txhash, type))
"""Get government action proposal from db-sync."""
gov_action_proposals = list(dbsync_queries.query_gov_action_proposal(txhash=txhash, type=type))
return gov_action_proposals


Expand All @@ -1053,19 +1057,17 @@ def get_committee_member(cold_key: str) -> tp.Optional[dbsync_types.CommitteeReg


def check_committee_member_registration(
cc_member_cold_key: str, committee_state: tp.Dict[str, tp.Any]
cc_member_cold_key: str,
) -> tp.Optional[dbsync_types.CommitteeRegistrationRecord]:
"""Check committee member registration in db-sync."""
if not configuration.HAS_DBSYNC:
return None

cc_member_data = get_committee_member(cold_key=cc_member_cold_key)
member_key = f"keyHash-{cc_member_cold_key}"

assert cc_member_data, f"No data returned from db-sync for CC Member {member_key}"
assert cc_member_data, "No data returned from db-sync"
assert (
committee_state["committee"][member_key]["hotCredsAuthStatus"]["contents"]["keyHash"]
== cc_member_data.hot_key
cc_member_data.cold_key == cc_member_cold_key
), "CC Member not present in registration table in db-sync"

return cc_member_data
Expand Down
Loading

0 comments on commit 9a07eaf

Please sign in to comment.