Skip to content

Commit

Permalink
pytest: migrate away from using WASM blobs (#4418)
Browse files Browse the repository at this point in the history
To reduce reliance on blobs stored in repository, convert all tests
which use hello.wasm blob to use test-contract-rs instead (which is
built from source).

With that changed, remove add.wasm hello.wasm and singlepass_crash.wasm
files which are no longer used.

Fixes: #4408
  • Loading branch information
mina86 authored Jun 28, 2021
1 parent cfed276 commit 7e0fe60
Show file tree
Hide file tree
Showing 6 changed files with 118 additions and 107 deletions.
Binary file removed integration-tests/tests/add.wasm
Binary file not shown.
Binary file removed integration-tests/tests/singlepass_crash.wasm
Binary file not shown.
Binary file removed pytest/testdata/hello.wasm
Binary file not shown.
52 changes: 23 additions & 29 deletions pytest/tests/sanity/rpc_light_client_execution_outcome_proof.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@
# the transaction and receipts execution outcome proof for
# light client works

import sys
import base58, base64
import json
import hashlib
import json
import struct
import sys

sys.path.append('lib')
from cluster import start_cluster, Key
from utils import load_binary_file, compute_merkle_root_from_path
from utils import load_test_contract, compute_merkle_root_from_path
from serializer import BinarySerializer
import transaction
import time
Expand Down Expand Up @@ -113,36 +114,15 @@ def serialize_execution_outcome_with_id(outcome, id):
return borsh_res


nodes = start_cluster(
2, 0, 1, None,
[["epoch_length", 1000], ["block_producer_kickout_threshold", 80]], {}
)

# deploy a smart contract for testing
contract_key = nodes[0].signer_key
hello_smart_contract = load_binary_file('testdata/hello.wasm')

status = nodes[0].get_status()
latest_block_hash = status['sync_info']['latest_block_hash']
deploy_contract_tx = transaction.sign_deploy_contract_tx(
contract_key, hello_smart_contract, 10,
base58.b58decode(latest_block_hash.encode('utf8')))
deploy_contract_response = nodes[0].send_tx_and_wait(deploy_contract_tx, 15)
assert 'error' not in deploy_contract_response, deploy_contract_response


def check_transaction_outcome_proof(should_succeed, nonce):
def check_transaction_outcome_proof(nodes, should_succeed, nonce):
status = nodes[1].get_status()
latest_block_hash = status['sync_info']['latest_block_hash']
function_caller_key = nodes[0].signer_key
gas = 300000000000000 if should_succeed else 1000

function_call_1_tx = transaction.sign_function_call_tx(
function_caller_key, contract_key.account_id, 'setKeyValue',
json.dumps({
"key": "my_key",
"value": "my_value"
}).encode('utf-8'), gas, 100000000000, nonce,
function_caller_key, nodes[0].signer_key.account_id, 'write_key_value',
struct.pack('<QQ', 42, 10), gas, 100000000000, nonce,
base58.b58decode(latest_block_hash.encode('utf8')))
function_call_result = nodes[1].send_tx_and_wait(function_call_1_tx, 15)
assert 'error' not in function_call_result
Expand Down Expand Up @@ -204,5 +184,19 @@ def check_transaction_outcome_proof(should_succeed, nonce):
'block_merkle_root']) == block_merkle_root, f'expected block merkle root {light_client_block["inner_lite"]["block_merkle_root"]} actual {base58.b58encode(block_merkle_root)}'


check_transaction_outcome_proof(True, 20)
check_transaction_outcome_proof(False, 30)
def test_outcome_proof():
nodes = start_cluster(
2, 0, 1, None,
[["epoch_length", 1000], ["block_producer_kickout_threshold", 80]], {}
)

status = nodes[0].get_status()
latest_block_hash = status['sync_info']['latest_block_hash']
deploy_contract_tx = transaction.sign_deploy_contract_tx(
nodes[0].signer_key, load_test_contract(), 10,
base58.b58decode(latest_block_hash.encode('utf8')))
deploy_contract_response = nodes[0].send_tx_and_wait(deploy_contract_tx, 15)
assert 'error' not in deploy_contract_response, deploy_contract_response

check_transaction_outcome_proof(nodes, True, 20)
check_transaction_outcome_proof(nodes, False, 30)
92 changes: 45 additions & 47 deletions pytest/tests/sanity/rpc_state_changes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,18 @@
# and call various scenarios to trigger store changes.
# Check that the key changes are observable via `changes` RPC call.

import sys
import base58, base64
import json
import struct
import sys
import threading

import deepdiff

sys.path.append('lib')
from cluster import start_cluster
from key import Key
from utils import load_binary_file
from utils import load_test_contract
import transaction

nodes = start_cluster(
Expand Down Expand Up @@ -325,18 +326,18 @@ def test_key_value_changes():
1. Deploy a contract.
2. Observe the code changes in the block where the transaction outcome "lands".
3. Send two transactions to be included into the same block setting and overriding the value of
the same key (`my_key`).
the same key.
4. Observe the changes in the block where the transaction outcome "lands".
"""

contract_key = nodes[0].signer_key
hello_smart_contract = load_binary_file('testdata/hello.wasm')
contract_blob = load_test_contract()

# Step 1
status = nodes[0].get_status()
latest_block_hash = status['sync_info']['latest_block_hash']
deploy_contract_tx = transaction.sign_deploy_contract_tx(
contract_key, hello_smart_contract, 10,
contract_key, contract_blob, 10,
base58.b58decode(latest_block_hash.encode('utf8')))
deploy_contract_response = nodes[0].send_tx_and_wait(deploy_contract_tx, 10)

Expand Down Expand Up @@ -401,7 +402,7 @@ def test_key_value_changes():
"account_id":
contract_key.account_id,
"code_base64":
base64.b64encode(hello_smart_contract).decode('utf-8'),
base64.b64encode(contract_blob).decode('utf-8'),
}
},]
}
Expand Down Expand Up @@ -429,40 +430,37 @@ def test_key_value_changes():
latest_block_hash = status['sync_info']['latest_block_hash']
function_caller_key = nodes[0].signer_key

def set_value_1():
function_call_1_tx = transaction.sign_function_call_tx(
function_caller_key, contract_key.account_id, 'setKeyValue',
json.dumps({
"key": "my_key",
"value": "my_value_1"
}).encode('utf-8'), 300000000000000, 100000000000, 20,
base58.b58decode(latest_block_hash.encode('utf8')))
nodes[1].send_tx_and_wait(function_call_1_tx, 10)

function_call_1_thread = threading.Thread(target=set_value_1)
function_call_1_thread.start()

function_call_2_tx = transaction.sign_function_call_tx(
function_caller_key, contract_key.account_id, 'setKeyValue',
json.dumps({
"key": "my_key",
"value": "my_value_2"
}).encode('utf-8'), 300000000000000, 100000000000, 30,
base58.b58decode(latest_block_hash.encode('utf8')))
function_call_2_response = nodes[1].send_tx_and_wait(function_call_2_tx, 10)
assert function_call_2_response['result']['receipts_outcome'][0]['outcome']['status'] == {'SuccessValue': ''}, \
"Expected successful execution, but the output was: %s" % function_call_2_response
function_call_1_thread.join()
key = struct.pack('<Q', 42)
key_base64 = base64.b64encode(key).decode('ascii')

tx_block_hash = function_call_2_response['result']['transaction_outcome'][
'block_hash']
def set_value(value, *, nounce):
args = key + struct.pack('<Q', value)
tx = transaction.sign_function_call_tx(
function_caller_key, contract_key.account_id,
'write_key_value', args, 300000000000000, 100000000000, nounce,
base58.b58decode(latest_block_hash.encode('utf8')))
response = nodes[1].send_tx_and_wait(tx, 10)
try:
status = response['result']['receipts_outcome'][0]['outcome'][
'status']
except (KeyError, IndexError):
status = ()
assert 'SuccessValue' in status, (
"Expected successful execution, but the output was: %s" % response)
return response

thread = threading.Thread(target=lambda: set_value(10, nounce=20))
thread.start()
response = set_value(20, nounce=30)
thread.join()

tx_block_hash = response['result']['transaction_outcome']['block_hash']

# Step 4
assert_changes_in_block_response(
request={"block_id": tx_block_hash},
expected_response={
"block_hash":
tx_block_hash,
"block_hash": tx_block_hash,
"changes": [
{
"type": "account_touched",
Expand All @@ -482,7 +480,7 @@ def set_value_1():
base_request = {
"block_id": block_hash,
"changes_type": "data_changes",
"key_prefix_base64": base64.b64encode(b"my_key").decode('utf-8'),
"key_prefix_base64": key_base64,
}
for request in [
# Test empty account_ids
Expand All @@ -502,7 +500,7 @@ def set_value_1():
**base_request,
"account_ids": [contract_key.account_id],
"key_prefix_base64":
base64.b64encode(b"my_key_with_extra").decode('utf-8'),
base64.b64encode(struct.pack('<Q', 24)).decode('ascii'),
},
]:
assert_changes_response(request=request,
Expand All @@ -513,39 +511,39 @@ def set_value_1():

# Test happy-path
expected_response = {
"block_hash":
tx_block_hash,
"block_hash": tx_block_hash,
"changes": [{
"cause": {
"type": "receipt_processing",
},
"type": "data_update",
"change": {
"account_id": contract_key.account_id,
"key_base64": base64.b64encode(b"my_key").decode('utf-8'),
"value_base64": base64.b64encode(b"my_value_1").decode('utf-8'),
"key_base64": key_base64,
"value_base64": base64.b64encode(
struct.pack('<Q', 10)).decode('ascii'),
}
}, {
"cause": {
"type":
"receipt_processing",
"receipt_hash":
function_call_2_response["result"]["receipts_outcome"][0]
["id"],
response["result"]["receipts_outcome"][0]["id"],
},
"type": "data_update",
"change": {
"account_id": contract_key.account_id,
"key_base64": base64.b64encode(b"my_key").decode('utf-8'),
"value_base64": base64.b64encode(b"my_value_2").decode('utf-8'),
"key_base64": key_base64,
"value_base64": base64.b64encode(
struct.pack('<Q', 20)).decode('ascii'),
}
}]
}

base_request = {
"block_id": tx_block_hash,
"changes_type": "data_changes",
"key_prefix_base64": base64.b64encode(b"my_key").decode('utf-8'),
"key_prefix_base64": key_base64,
}
for request in [
{
Expand All @@ -561,12 +559,12 @@ def set_value_1():
{
**base_request,
"account_ids": [contract_key.account_id],
"key_prefix_base64": base64.b64encode(b"").decode('utf-8'),
"key_prefix_base64": '',
},
{
**base_request,
"account_ids": [contract_key.account_id],
"key_prefix_base64": base64.b64encode(b"my_ke").decode('utf-8'),
"key_prefix_base64": base64.b64encode(key[:3]).decode('ascii'),
},
]:
assert_changes_response(
Expand Down
81 changes: 50 additions & 31 deletions pytest/tests/sanity/rpc_tx_status.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,65 @@
import sys
import base58
import json
import struct
import sys

sys.path.append('lib')
from cluster import start_cluster, Key
from utils import load_binary_file
from utils import load_test_contract
import transaction


def submit_tx_and_check(node, tx):
def submit_tx_and_check(nodes, node_index, tx):
node = nodes[node_index]
res = node.send_tx_and_wait(tx, timeout=20)
assert 'error' not in res, res

tx_hash = res['result']['transaction']['hash']
query_res = nodes[0].json_rpc('EXPERIMENTAL_tx_status', [tx_hash, 'test0'])
assert 'error' not in query_res, query_res
receipt_id_from_outcomes = set(map(lambda x: x['id'], query_res['result']['receipts_outcome']))
receipt_id_from_receipts = set(map(lambda x: x['receipt_id'], query_res['result']['receipts']))
is_local_receipt = res['result']['transaction']['signer_id'] == res['result']['transaction']['receiver_id']

receipt_id_from_outcomes = set(
outcome['id'] for outcome in query_res['result']['receipts_outcome'])
receipt_id_from_receipts = set(
rec['receipt_id'] for rec in query_res['result']['receipts'])

is_local_receipt = (res['result']['transaction']['signer_id'] ==
res['result']['transaction']['receiver_id'])
if is_local_receipt:
receipt_id_from_outcomes.remove(res['result']['transaction_outcome']['outcome']['receipt_ids'][0])
assert receipt_id_from_outcomes == receipt_id_from_receipts, f'receipt id from outcomes {receipt_id_from_outcomes}, receipt id from receipts {receipt_id_from_receipts} '


nodes = start_cluster(
4, 0, 1, None,
[["epoch_length", 1000], ["block_producer_kickout_threshold", 80], ["transaction_validity_period", 10000]], {})

status = nodes[0].get_status()
block_hash = status['sync_info']['latest_block_hash']
print("1")
payment_tx = transaction.sign_payment_tx(nodes[0].signer_key, 'test1', 100, 1,
base58.b58decode(block_hash.encode('utf8')))
submit_tx_and_check(nodes[0], payment_tx)

print("2")
deploy_contract_tx = transaction.sign_deploy_contract_tx(nodes[0].signer_key, load_binary_file('testdata/hello.wasm'), 2, base58.b58decode(block_hash.encode('utf8')))
submit_tx_and_check(nodes[0], deploy_contract_tx)

print("3")
function_call_tx = transaction.sign_function_call_tx(nodes[0].signer_key, nodes[0].signer_key.account_id, 'setKeyValue', json.dumps({
"key": "my_key",
"value": "my_value_1"
}).encode('utf-8'), 300000000000000, 0, 3, base58.b58decode(block_hash.encode('utf8')))
submit_tx_and_check(nodes[0], deploy_contract_tx)
receipt_id_from_outcomes.remove(
res['result']['transaction_outcome']['outcome']['receipt_ids'][0])

assert receipt_id_from_outcomes == receipt_id_from_receipts, (
f'receipt id from outcomes {receipt_id_from_outcomes}, '
f'receipt id from receipts {receipt_id_from_receipts}')


def test_tx_status():
nodes = start_cluster(4, 0, 1, None,
[["epoch_length", 1000],
["block_producer_kickout_threshold", 80],
["transaction_validity_period", 10000]],
{})

signer_key = nodes[0].signer_key
status = nodes[0].get_status()
block_hash = status['sync_info']['latest_block_hash']
encoded_block_hash = base58.b58decode(block_hash.encode('ascii'))

payment_tx = transaction.sign_payment_tx(
signer_key, 'test1', 100, 1, encoded_block_hash)
submit_tx_and_check(nodes, 0, payment_tx)

deploy_contract_tx = transaction.sign_deploy_contract_tx(
signer_key, load_test_contract(), 2, encoded_block_hash)
submit_tx_and_check(nodes, 0, deploy_contract_tx)

function_call_tx = transaction.sign_function_call_tx(
signer_key, signer_key.account_id,
'write_key_value', struct.pack('<QQ', 42, 24),
300000000000000, 0, 3, encoded_block_hash)
submit_tx_and_check(nodes, 0, deploy_contract_tx)


if __name__ == '__main__':
test_tx_status()

0 comments on commit 7e0fe60

Please sign in to comment.