Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Protocol Change 2] New Transaction Format #2189

Draft
wants to merge 16 commits into
base: develop
Choose a base branch
from
Draft
5 changes: 3 additions & 2 deletions counterparty-core/counterpartycore/lib/api/api_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,8 +322,9 @@
except Exception as e:
capture_exception(e)
logger.error("Error in API: %s", e)
# import traceback
# print(traceback.format_exc()) # for debugging
import traceback

Check warning

Code scanning / pylint

Import outside toplevel (traceback). Warning

Import outside toplevel (traceback).

print(traceback.format_exc()) # for debugging
return return_result(
503, error="Unknown error", start_time=start_time, query_args=query_args
)
Expand Down
2 changes: 1 addition & 1 deletion counterparty-core/counterpartycore/lib/api/api_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -955,7 +955,7 @@ def get_tx_info(tx_hex, block_index=None):
@dispatcher.add_method
def unpack(data_hex):
data = binascii.unhexlify(data_hex)
message_type_id, message = message_type.unpack(data)
message_type_id, message = message_type.unpack(data)[0]

# TODO: Enabled only for `send`.
if message_type_id == send.ID:
Expand Down
155 changes: 78 additions & 77 deletions counterparty-core/counterpartycore/lib/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,88 +125,89 @@ def parse_tx(db, tx):

if len(tx["data"]) > 1:
try:
message_type_id, message = message_type.unpack(tx["data"], tx["block_index"])
except struct.error: # Deterministically raised.
message_type_id = None
message = None
messages = message_type.unpack(tx["data"], tx["block_index"])
except (struct.error, TypeError): # Deterministically raised.
messages = [(None, None)]
else:
message_type_id = None
message = None
messages = [(None, None)]

# Protocol change.
rps_enabled = tx["block_index"] >= 308500 or config.TESTNET or config.REGTEST

supported = True

if message_type_id == send.ID:
send.parse(db, tx, message)
elif message_type_id == enhanced_send.ID and util.enabled(
"enhanced_sends", block_index=tx["block_index"]
):
enhanced_send.parse(db, tx, message)
elif message_type_id == mpma.ID and util.enabled(
"mpma_sends", block_index=tx["block_index"]
):
mpma.parse(db, tx, message)
elif message_type_id == order.ID:
order.parse(db, tx, message)
elif message_type_id == btcpay.ID:
btcpay.parse(db, tx, message)
elif message_type_id == issuance.ID or (
util.enabled("issuance_backwards_compatibility", block_index=tx["block_index"])
and message_type_id == issuance.LR_ISSUANCE_ID
):
issuance.parse(db, tx, message, message_type_id)
elif (
message_type_id == issuance.SUBASSET_ID
and util.enabled("subassets", block_index=tx["block_index"])
) or (
util.enabled("issuance_backwards_compatibility", block_index=tx["block_index"])
and message_type_id == issuance.LR_SUBASSET_ID
):
issuance.parse(db, tx, message, message_type_id)
elif message_type_id == broadcast.ID:
broadcast.parse(db, tx, message)
elif message_type_id == bet.ID:
bet.parse(db, tx, message)
elif message_type_id == dividend.ID:
dividend.parse(db, tx, message)
elif message_type_id == cancel.ID:
cancel.parse(db, tx, message)
elif message_type_id == rps.ID and rps_enabled:
rps.parse(db, tx, message)
elif message_type_id == rpsresolve.ID and rps_enabled:
rpsresolve.parse(db, tx, message)
elif message_type_id == destroy.ID and util.enabled(
"destroy_reactivated", block_index=tx["block_index"]
):
destroy.parse(db, tx, message)
elif message_type_id == sweep.ID and util.enabled(
"sweep_send", block_index=tx["block_index"]
):
sweep.parse(db, tx, message)
elif message_type_id == dispenser.ID and util.enabled(
"dispensers", block_index=tx["block_index"]
):
dispenser.parse(db, tx, message)
elif message_type_id == dispenser.DISPENSE_ID and util.enabled(
"dispensers", block_index=tx["block_index"]
):
dispense.parse(db, tx)
elif message_type_id == fairminter.ID and util.enabled(
"fairminter", block_index=tx["block_index"]
):
fairminter.parse(db, tx, message)
elif message_type_id == fairmint.ID and util.enabled(
"fairminter", block_index=tx["block_index"]
):
fairmint.parse(db, tx, message)
elif message_type_id == utxo.ID and util.enabled(
"utxo_support", block_index=tx["block_index"]
):
utxo.parse(db, tx, message)
else:
supported = False
supported = []

for message_type_id, message in messages:
supported.append(True)
if message_type_id == send.ID:
send.parse(db, tx, message)
elif message_type_id == enhanced_send.ID and util.enabled(
"enhanced_sends", block_index=tx["block_index"]
):
enhanced_send.parse(db, tx, message)
elif message_type_id == mpma.ID and util.enabled(
"mpma_sends", block_index=tx["block_index"]
):
mpma.parse(db, tx, message)
elif message_type_id == order.ID:
order.parse(db, tx, message)
elif message_type_id == btcpay.ID:
btcpay.parse(db, tx, message)
elif message_type_id == issuance.ID or (
util.enabled("issuance_backwards_compatibility", block_index=tx["block_index"])
and message_type_id == issuance.LR_ISSUANCE_ID
):
issuance.parse(db, tx, message, message_type_id)
elif (
message_type_id == issuance.SUBASSET_ID
and util.enabled("subassets", block_index=tx["block_index"])
) or (
util.enabled("issuance_backwards_compatibility", block_index=tx["block_index"])
and message_type_id == issuance.LR_SUBASSET_ID
):
issuance.parse(db, tx, message, message_type_id)
elif message_type_id == broadcast.ID:
broadcast.parse(db, tx, message)
elif message_type_id == bet.ID:
bet.parse(db, tx, message)
elif message_type_id == dividend.ID:
dividend.parse(db, tx, message)
elif message_type_id == cancel.ID:
cancel.parse(db, tx, message)
elif message_type_id == rps.ID and rps_enabled:
rps.parse(db, tx, message)
elif message_type_id == rpsresolve.ID and rps_enabled:
rpsresolve.parse(db, tx, message)
elif message_type_id == destroy.ID and util.enabled(
"destroy_reactivated", block_index=tx["block_index"]
):
destroy.parse(db, tx, message)
elif message_type_id == sweep.ID and util.enabled(
"sweep_send", block_index=tx["block_index"]
):
sweep.parse(db, tx, message)
elif message_type_id == dispenser.ID and util.enabled(
"dispensers", block_index=tx["block_index"]
):
dispenser.parse(db, tx, message)
elif message_type_id == dispenser.DISPENSE_ID and util.enabled(
"dispensers", block_index=tx["block_index"]
):
dispense.parse(db, tx)
elif message_type_id == fairminter.ID and util.enabled(
"fairminter", block_index=tx["block_index"]
):
fairminter.parse(db, tx, message)
elif message_type_id == fairmint.ID and util.enabled(
"fairminter", block_index=tx["block_index"]
):
fairmint.parse(db, tx, message)
elif message_type_id == utxo.ID and util.enabled(
"utxo_support", block_index=tx["block_index"]
):
utxo.parse(db, tx, message)
else:
supported[-1] = False
supported = any(supported)

ledger.add_to_journal(
db,
Expand Down
56 changes: 29 additions & 27 deletions counterparty-core/counterpartycore/lib/gettxinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,25 +30,25 @@
raise DecodeError("invalid OP_RETURN")


def decode_opreturn(asm, decoded_tx):
def decode_opreturn(asm, decoded_tx, block_index):
chunk = get_opreturn(asm)
chunk = arc4_decrypt(chunk, decoded_tx)
if chunk[: len(config.PREFIX)] == config.PREFIX: # Data
destination, data = None, chunk[len(config.PREFIX) :]
if chunk[: len(util.prefix(block_index))] == util.prefix(block_index): # Data
destination, data = None, chunk[len(util.prefix(block_index)) :]
else:
raise DecodeError("unrecognised OP_RETURN output")

return destination, data


def decode_checksig(asm, decoded_tx):
def decode_checksig(asm, decoded_tx, block_index):
pubkeyhash = script.get_checksig(asm)
chunk = arc4_decrypt(pubkeyhash, decoded_tx) # TODO: This is slow!
if chunk[1 : len(config.PREFIX) + 1] == config.PREFIX: # Data
if chunk[1 : len(util.prefix(block_index)) + 1] == util.prefix(block_index): # Data
# Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data.
chunk_length = chunk[0]
chunk = chunk[1 : chunk_length + 1]
destination, data = None, chunk[len(config.PREFIX) :]
destination, data = None, chunk[len(util.prefix(block_index)) :]
else: # Destination
pubkeyhash = binascii.hexlify(pubkeyhash).decode("utf-8")
destination, data = script.base58_check_encode(pubkeyhash, config.ADDRESSVERSION), None
Expand All @@ -63,17 +63,17 @@
return destination, None


def decode_checkmultisig(asm, decoded_tx):
def decode_checkmultisig(asm, decoded_tx, block_index):
pubkeys, signatures_required = script.get_checkmultisig(asm)
chunk = b""
for pubkey in pubkeys[:-1]: # (No data in last pubkey.)
chunk += pubkey[1:-1] # Skip sign byte and nonce byte.
chunk = arc4_decrypt(chunk, decoded_tx)
if chunk[1 : len(config.PREFIX) + 1] == config.PREFIX: # Data
if chunk[1 : len(util.prefix(block_index)) + 1] == util.prefix(block_index): # Data
# Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data.
chunk_length = chunk[0]
chunk = chunk[1 : chunk_length + 1]
destination, data = None, chunk[len(config.PREFIX) :]
destination, data = None, chunk[len(util.prefix(block_index)) :]
else: # Destination
pubkeyhashes = [script.pubkey_to_pubkeyhash(pubkey) for pubkey in pubkeys]
destination, data = (
Expand Down Expand Up @@ -155,7 +155,7 @@
return vout["value"], vout["script_pub_key"], is_segwit


def get_transaction_sources(decoded_tx):
def get_transaction_sources(decoded_tx, block_index):
sources = []
outputs_value = 0

Expand All @@ -167,11 +167,11 @@
asm = script.script_to_asm(script_pubkey)

if asm[-1] == OP_CHECKSIG: # noqa: F405
new_source, new_data = decode_checksig(asm, decoded_tx)
new_source, new_data = decode_checksig(asm, decoded_tx, block_index)
if new_data or not new_source:
raise DecodeError("data in source")
elif asm[-1] == OP_CHECKMULTISIG: # noqa: F405
new_source, new_data = decode_checkmultisig(asm, decoded_tx)
new_source, new_data = decode_checkmultisig(asm, decoded_tx, block_index)
if new_data or not new_source:
raise DecodeError("data in source")
elif asm[0] == OP_HASH160 and asm[-1] == OP_EQUAL and len(asm) == 3: # noqa: F405
Expand All @@ -195,7 +195,7 @@
return "-".join(sources), outputs_value


def get_transaction_source_from_p2sh(decoded_tx, p2sh_is_segwit):
def get_transaction_source_from_p2sh(decoded_tx, p2sh_is_segwit, block_index):
p2sh_encoding_source = None
data = b""
outputs_value = 0
Expand All @@ -214,7 +214,7 @@
asm = script.script_to_asm(vin["script_sig"])

new_source, new_destination, new_data = p2sh_encoding.decode_p2sh_input(
asm, p2sh_is_segwit=prevout_is_segwit
asm, block_index, p2sh_is_segwit=prevout_is_segwit
)
# this could be a p2sh source address with no encoded data
if new_data is None:
Expand Down Expand Up @@ -259,6 +259,8 @@
fee = 0
data = struct.pack(config.SHORT_TXTYPE_FORMAT, dispenser.DISPENSE_ID)
data += b"\x00"
if util.enabled("new_prefix_xcp1"):
data = b"\x00\x02" + data

if util.enabled("multiple_dispenses"):
outs.append({"destination": out[0], "btc_amount": out[1], "out_index": out_index})
Expand All @@ -270,7 +272,7 @@
return source, destination, btc_amount, fee, data, outs


def parse_transaction_vouts(decoded_tx):
def parse_transaction_vouts(decoded_tx, block_index):

Check warning

Code scanning / pylint

Too many branches (15/12). Warning

Too many branches (15/12).
# Get destinations and data outputs.
destinations, btc_amount, fee, data, potential_dispensers = [], 0, 0, b"", []

Expand All @@ -285,13 +287,13 @@
# Ignore transactions with invalid script.
asm = script.script_to_asm(script_pub_key)
if asm[0] == OP_RETURN: # noqa: F405
new_destination, new_data = decode_opreturn(asm, decoded_tx)
new_destination, new_data = decode_opreturn(asm, decoded_tx, block_index)
elif asm[-1] == OP_CHECKSIG: # noqa: F405
new_destination, new_data = decode_checksig(asm, decoded_tx)
new_destination, new_data = decode_checksig(asm, decoded_tx, block_index)
potential_dispensers[-1] = (new_destination, output_value)
elif asm[-1] == OP_CHECKMULTISIG: # noqa: F405
try:
new_destination, new_data = decode_checkmultisig(asm, decoded_tx)
new_destination, new_data = decode_checkmultisig(asm, decoded_tx, block_index)
potential_dispensers[-1] = (new_destination, output_value)
except script.MultiSigAddressError:
raise DecodeError("invalid OP_CHECKMULTISIG") # noqa: B904
Expand Down Expand Up @@ -361,7 +363,7 @@
else:
logger.trace("parsed_vouts not in decoded_tx")
destinations, btc_amount, fee, data, potential_dispensers = parse_transaction_vouts(
decoded_tx
decoded_tx, block_index
)

# source can be determined by parsing the p2sh_data transaction
Expand All @@ -372,7 +374,7 @@
p2sh_encoding_source = None
if util.enabled("p2sh_encoding") and data == b"P2SH":
p2sh_encoding_source, data, outputs_value = get_transaction_source_from_p2sh(
decoded_tx, p2sh_is_segwit
decoded_tx, p2sh_is_segwit, block_index
)
fee += outputs_value
fee_added = True
Expand All @@ -396,7 +398,7 @@
# Collect all (unique) source addresses.
# if we haven't found them yet
if p2sh_encoding_source is None:
sources, outputs_value = get_transaction_sources(decoded_tx)
sources, outputs_value = get_transaction_sources(decoded_tx, block_index)
if not fee_added:
fee += outputs_value
else: # use the source from the p2sh data source
Expand All @@ -414,8 +416,8 @@
destinations = "-".join(destinations)

try:
message_type_id, _ = message_type.unpack(data, block_index)
except struct.error: # Deterministically raised.
message_type_id, _ = message_type.unpack(data, block_index)[0]
except (struct.error, IndexError): # Deterministically raised.
message_type_id = None

if message_type_id == dispenser.DISPENSE_ID and util.enabled(
Expand Down Expand Up @@ -475,11 +477,11 @@
continue

data_pubkey = arc4_decrypt(pubkeyhash, decoded_tx)
if data_pubkey[1:9] == config.PREFIX or pubkeyhash_encoding:
if data_pubkey[1:9] == util.prefix(block_index) or pubkeyhash_encoding:
pubkeyhash_encoding = True
data_chunk_length = data_pubkey[0] # No ord() necessary.
data_chunk = data_pubkey[1 : data_chunk_length + 1]
if data_chunk[-8:] == config.PREFIX:
if data_chunk[-8:] == util.prefix(block_index):

Check warning

Code scanning / pylint

Unnecessary "else" after "break", remove the "else" and de-indent the code inside it. Warning

Unnecessary "else" after "break", remove the "else" and de-indent the code inside it.
data += data_chunk[:-8]
break
else:
Expand All @@ -495,8 +497,8 @@
# Check for, and strip away, prefix (except for burns).
if destination == config.UNSPENDABLE:
pass
elif data[: len(config.PREFIX)] == config.PREFIX:
data = data[len(config.PREFIX) :]
elif data[: len(util.prefix(block_index))] == util.prefix(block_index):
data = data[len(util.prefix(block_index)) :]
else:
raise DecodeError("no prefix")

Expand Down
Loading
Loading