Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[v11.1.0] New Transaction Format #2189

Draft
wants to merge 16 commits into
base: develop
Choose a base branch
from
Draft
52 changes: 27 additions & 25 deletions counterparty-core/counterpartycore/lib/gettxinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,25 +30,25 @@
raise DecodeError("invalid OP_RETURN")


def decode_opreturn(asm, decoded_tx):
def decode_opreturn(asm, decoded_tx, block_index):
chunk = get_opreturn(asm)
chunk = arc4_decrypt(chunk, decoded_tx)
if chunk[: len(config.PREFIX)] == config.PREFIX: # Data
destination, data = None, chunk[len(config.PREFIX) :]
if chunk[: len(util.prefix(block_index))] == util.prefix(block_index): # Data
destination, data = None, chunk[len(util.prefix(block_index)) :]
else:
raise DecodeError("unrecognised OP_RETURN output")

return destination, data


def decode_checksig(asm, decoded_tx):
def decode_checksig(asm, decoded_tx, block_index):
pubkeyhash = script.get_checksig(asm)
chunk = arc4_decrypt(pubkeyhash, decoded_tx) # TODO: This is slow!
if chunk[1 : len(config.PREFIX) + 1] == config.PREFIX: # Data
if chunk[1 : len(util.prefix(block_index)) + 1] == util.prefix(block_index): # Data
# Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data.
chunk_length = chunk[0]
chunk = chunk[1 : chunk_length + 1]
destination, data = None, chunk[len(config.PREFIX) :]
destination, data = None, chunk[len(util.prefix(block_index)) :]
else: # Destination
pubkeyhash = binascii.hexlify(pubkeyhash).decode("utf-8")
destination, data = script.base58_check_encode(pubkeyhash, config.ADDRESSVERSION), None
Expand All @@ -63,17 +63,17 @@
return destination, None


def decode_checkmultisig(asm, decoded_tx):
def decode_checkmultisig(asm, decoded_tx, block_index):
pubkeys, signatures_required = script.get_checkmultisig(asm)
chunk = b""
for pubkey in pubkeys[:-1]: # (No data in last pubkey.)
chunk += pubkey[1:-1] # Skip sign byte and nonce byte.
chunk = arc4_decrypt(chunk, decoded_tx)
if chunk[1 : len(config.PREFIX) + 1] == config.PREFIX: # Data
if chunk[1 : len(util.prefix(block_index)) + 1] == util.prefix(block_index): # Data
# Padding byte in each output (instead of just in the last one) so that encoding methods may be mixed. Also, it’s just not very much data.
chunk_length = chunk[0]
chunk = chunk[1 : chunk_length + 1]
destination, data = None, chunk[len(config.PREFIX) :]
destination, data = None, chunk[len(util.prefix(block_index)) :]
else: # Destination
pubkeyhashes = [script.pubkey_to_pubkeyhash(pubkey) for pubkey in pubkeys]
destination, data = (
Expand Down Expand Up @@ -155,7 +155,7 @@
return vout["value"], vout["script_pub_key"], is_segwit


def get_transaction_sources(decoded_tx):
def get_transaction_sources(decoded_tx, block_index):
sources = []
outputs_value = 0

Expand All @@ -167,11 +167,11 @@
asm = script.script_to_asm(script_pubkey)

if asm[-1] == OP_CHECKSIG: # noqa: F405
new_source, new_data = decode_checksig(asm, decoded_tx)
new_source, new_data = decode_checksig(asm, decoded_tx, block_index)
if new_data or not new_source:
raise DecodeError("data in source")
elif asm[-1] == OP_CHECKMULTISIG: # noqa: F405
new_source, new_data = decode_checkmultisig(asm, decoded_tx)
new_source, new_data = decode_checkmultisig(asm, decoded_tx, block_index)
if new_data or not new_source:
raise DecodeError("data in source")
elif asm[0] == OP_HASH160 and asm[-1] == OP_EQUAL and len(asm) == 3: # noqa: F405
Expand All @@ -195,7 +195,7 @@
return "-".join(sources), outputs_value


def get_transaction_source_from_p2sh(decoded_tx, p2sh_is_segwit):
def get_transaction_source_from_p2sh(decoded_tx, p2sh_is_segwit, block_index):
p2sh_encoding_source = None
data = b""
outputs_value = 0
Expand All @@ -214,7 +214,7 @@
asm = script.script_to_asm(vin["script_sig"])

new_source, new_destination, new_data = p2sh_encoding.decode_p2sh_input(
asm, p2sh_is_segwit=prevout_is_segwit
asm, block_index, p2sh_is_segwit=prevout_is_segwit
)
# this could be a p2sh source address with no encoded data
if new_data is None:
Expand Down Expand Up @@ -259,6 +259,8 @@
fee = 0
data = struct.pack(config.SHORT_TXTYPE_FORMAT, dispenser.DISPENSE_ID)
data += b"\x00"
if util.enabled("new_prefix_xcp1"):
data = b"\x00\x02" + data

if util.enabled("multiple_dispenses"):
outs.append({"destination": out[0], "btc_amount": out[1], "out_index": out_index})
Expand All @@ -270,7 +272,7 @@
return source, destination, btc_amount, fee, data, outs


def parse_transaction_vouts(decoded_tx):
def parse_transaction_vouts(decoded_tx, block_index):

Check warning

Code scanning / pylint

Too many branches (15/12). Warning

Too many branches (15/12).
# Get destinations and data outputs.
destinations, btc_amount, fee, data, potential_dispensers = [], 0, 0, b"", []

Expand All @@ -285,13 +287,13 @@
# Ignore transactions with invalid script.
asm = script.script_to_asm(script_pub_key)
if asm[0] == OP_RETURN: # noqa: F405
new_destination, new_data = decode_opreturn(asm, decoded_tx)
new_destination, new_data = decode_opreturn(asm, decoded_tx, block_index)
elif asm[-1] == OP_CHECKSIG: # noqa: F405
new_destination, new_data = decode_checksig(asm, decoded_tx)
new_destination, new_data = decode_checksig(asm, decoded_tx, block_index)
potential_dispensers[-1] = (new_destination, output_value)
elif asm[-1] == OP_CHECKMULTISIG: # noqa: F405
try:
new_destination, new_data = decode_checkmultisig(asm, decoded_tx)
new_destination, new_data = decode_checkmultisig(asm, decoded_tx, block_index)
potential_dispensers[-1] = (new_destination, output_value)
except script.MultiSigAddressError:
raise DecodeError("invalid OP_CHECKMULTISIG") # noqa: B904
Expand Down Expand Up @@ -361,7 +363,7 @@
else:
logger.trace("parsed_vouts not in decoded_tx")
destinations, btc_amount, fee, data, potential_dispensers = parse_transaction_vouts(
decoded_tx
decoded_tx, block_index
)

# source can be determined by parsing the p2sh_data transaction
Expand All @@ -372,7 +374,7 @@
p2sh_encoding_source = None
if util.enabled("p2sh_encoding") and data == b"P2SH":
p2sh_encoding_source, data, outputs_value = get_transaction_source_from_p2sh(
decoded_tx, p2sh_is_segwit
decoded_tx, p2sh_is_segwit, block_index
)
fee += outputs_value
fee_added = True
Expand All @@ -396,7 +398,7 @@
# Collect all (unique) source addresses.
# if we haven't found them yet
if p2sh_encoding_source is None:
sources, outputs_value = get_transaction_sources(decoded_tx)
sources, outputs_value = get_transaction_sources(decoded_tx, block_index)
if not fee_added:
fee += outputs_value
else: # use the source from the p2sh data source
Expand Down Expand Up @@ -475,11 +477,11 @@
continue

data_pubkey = arc4_decrypt(pubkeyhash, decoded_tx)
if data_pubkey[1:9] == config.PREFIX or pubkeyhash_encoding:
if data_pubkey[1:9] == util.prefix(block_index) or pubkeyhash_encoding:
pubkeyhash_encoding = True
data_chunk_length = data_pubkey[0] # No ord() necessary.
data_chunk = data_pubkey[1 : data_chunk_length + 1]
if data_chunk[-8:] == config.PREFIX:
if data_chunk[-8:] == util.prefix(block_index):

Check warning

Code scanning / pylint

Unnecessary "else" after "break", remove the "else" and de-indent the code inside it. Warning

Unnecessary "else" after "break", remove the "else" and de-indent the code inside it.
data += data_chunk[:-8]
break
else:
Expand All @@ -495,8 +497,8 @@
# Check for, and strip away, prefix (except for burns).
if destination == config.UNSPENDABLE:
pass
elif data[: len(config.PREFIX)] == config.PREFIX:
data = data[len(config.PREFIX) :]
elif data[: len(util.prefix(block_index))] == util.prefix(block_index):
data = data[len(util.prefix(block_index)) :]
else:
raise DecodeError("no prefix")

Expand Down
18 changes: 12 additions & 6 deletions counterparty-core/counterpartycore/lib/message_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,23 @@
message_type_id = None
message_remainder = None

if len(packed_data) > 1:
if util.enabled("new_prefix_xcp1"):
message_length = struct.unpack(">H", packed_data[:2])[0] # noqa: F841
Fixed Show fixed Hide fixed
message_data = packed_data[2:]
else:
message_data = packed_data

if len(message_data) > 1:
# try to read 1 byte first
if util.enabled("short_tx_type_id", block_index):
message_type_id = struct.unpack(config.SHORT_TXTYPE_FORMAT, packed_data[:1])[0]
message_type_id = struct.unpack(config.SHORT_TXTYPE_FORMAT, message_data[:1])[0]
if message_type_id > 0:
message_remainder = packed_data[1:]
message_remainder = message_data[1:]
return (message_type_id, message_remainder)

# First message byte was 0. We will read 4 bytes
if len(packed_data) > 4:
message_type_id = struct.unpack(config.TXTYPE_FORMAT, packed_data[:4])[0]
message_remainder = packed_data[4:]
if len(message_data) > 4:
message_type_id = struct.unpack(config.TXTYPE_FORMAT, message_data[:4])[0]
message_remainder = message_data[4:]

return (message_type_id, message_remainder)
17 changes: 15 additions & 2 deletions counterparty-core/counterpartycore/lib/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import inspect
import io
import logging
import struct
import sys
import threading

Expand Down Expand Up @@ -70,7 +71,7 @@ def initialise(force=False):

TRANSACTION_SERVICE_SINGLETON = TransactionService(
backend=backend,
prefix=config.PREFIX,
prefix=util.prefix(util.CURRENT_BLOCK_INDEX),
ps2h_dust_return_pubkey=config.P2SH_DUST_RETURN_PUBKEY,
utxo_locks_max_age=config.UTXO_LOCKS_MAX_AGE,
utxo_locks_max_addresses=config.UTXO_LOCKS_MAX_ADDRESSES,
Expand Down Expand Up @@ -710,7 +711,9 @@ def construct(
if dust_return_pubkey is not None:
pubkeylength = len(dust_return_pubkey)

chunk_size = p2sh_encoding.maximum_data_chunk_size(pubkeylength)
chunk_size = p2sh_encoding.maximum_data_chunk_size(
pubkeylength, util.CURRENT_BLOCK_INDEX
)
elif encoding == "opreturn":
chunk_size = config.OP_RETURN_MAX_SIZE
if len(data) + len(self.prefix) > chunk_size:
Expand Down Expand Up @@ -829,6 +832,7 @@ def construct(
source=source_address,
source_value=source_value,
data_output=data_output,
block_index=util.CURRENT_BLOCK_INDEX,
change_output=change_output,
pubkey=dust_return_pubkey,
multisig_pubkeys=p2sh_source_multisig_pubkeys,
Expand Down Expand Up @@ -867,6 +871,7 @@ def construct(
source_input=source_input,
destination_outputs=destination_outputs,
data_output=data_output,
block_index=util.CURRENT_BLOCK_INDEX,
pubkey=dust_return_pubkey,
multisig_pubkeys=p2sh_source_multisig_pubkeys,
multisig_pubkeys_required=p2sh_source_multisig_pubkeys_required,
Expand All @@ -886,6 +891,7 @@ def construct(
encoding,
inputs,
destination_outputs,
util.CURRENT_BLOCK_INDEX,
data_output,
change_output,
dust_return_pubkey=dust_return_pubkey,
Expand Down Expand Up @@ -1240,6 +1246,13 @@ def compose_transaction(

tx_info = compose_method(db, **params)

if util.enabled("new_prefix_xcp1"):
# add message length to the message
if tx_info[2]:
message_length = len(tx_info[2])
message_lenth_bytes = struct.pack(">H", message_length) # 2 bytes
tx_info = (tx_info[0], tx_info[1], message_lenth_bytes + tx_info[2])

raw_transaction = construct(
db,
tx_info,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,22 @@
import bitcoin as bitcoinlib
from bitcoin.core.script import CScript

from counterpartycore.lib import config, exceptions, script
from counterpartycore.lib import config, exceptions, script, util

logger = logging.getLogger(config.LOGGER_NAME)


def maximum_data_chunk_size(pubkeylength):
def maximum_data_chunk_size(pubkeylength, block_index):
if pubkeylength >= 0:
return (
bitcoinlib.core.script.MAX_SCRIPT_ELEMENT_SIZE - len(config.PREFIX) - pubkeylength - 12
bitcoinlib.core.script.MAX_SCRIPT_ELEMENT_SIZE
- len(util.prefix(block_index))
- pubkeylength
- 12
) # Two bytes are for unique offset. This will work for a little more than 1000 outputs
else:
return (
bitcoinlib.core.script.MAX_SCRIPT_ELEMENT_SIZE - len(config.PREFIX) - 44
bitcoinlib.core.script.MAX_SCRIPT_ELEMENT_SIZE - len(util.prefix(block_index)) - 44
) # Redeemscript size for p2pkh addresses, multisig won't work here


Expand Down Expand Up @@ -63,7 +66,7 @@ def calculate_outputs(destination_outputs, data_array, fee_per_kb, exact_fee=Non
return size_for_fee, datatx_necessary_fee, data_value, data_btc_out


def decode_p2sh_input(asm, p2sh_is_segwit=False):
def decode_p2sh_input(asm, block_index, p2sh_is_segwit=False):
"""Looks at the scriptSig for the input of the p2sh-encoded data transaction
[signature] [data] [OP_HASH160 ... OP_EQUAL]
"""
Expand All @@ -86,8 +89,8 @@ def decode_p2sh_input(asm, p2sh_is_segwit=False):
datachunk, redeem_script, _substitute_script = asm

data = datachunk
if data[: len(config.PREFIX)] == config.PREFIX:
data = data[len(config.PREFIX) :]
if data[: len(util.prefix(block_index))] == util.prefix(block_index):
data = data[len(util.prefix(block_index)) :]
else:
if data == b"":
return source, None, None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import bitcoin as bitcoinlib
from bitcoin.bech32 import CBech32Data

from counterpartycore.lib import arc4, backend, config, exceptions, script # noqa: F401
from counterpartycore.lib import arc4, backend, config, exceptions, script, util # noqa: F401

Check warning

Code scanning / pylint

Unused util imported from counterpartycore.lib. Warning

Unused util imported from counterpartycore.lib.
from counterpartycore.lib.transaction_helper import p2sh_encoding

logger = logging.getLogger(config.LOGGER_NAME)
Expand Down Expand Up @@ -187,6 +187,7 @@
encoding,
inputs,
destination_outputs,
block_index,
data_output=None,
change_output=None,
dust_return_pubkey=None,
Expand Down Expand Up @@ -261,7 +262,7 @@
data_array, value = data_output
s += value.to_bytes(8, byteorder="little") # Value

data_chunk = config.PREFIX + data_chunk # noqa: PLW2901
data_chunk = util.prefix(block_index) + data_chunk # noqa: PLW2901

# Initialise encryption key (once per output).
assert isinstance(inputs[0]["txid"], str)
Expand Down Expand Up @@ -347,6 +348,7 @@
source,
source_value,
data_output,
block_index,
change_output=None,
pubkey=None,
multisig_pubkeys=None,
Expand Down Expand Up @@ -382,7 +384,7 @@

# P2SH for data encodeded inputs
for n, data_chunk in enumerate(data_array):
data_chunk = config.PREFIX + data_chunk # prefix the data_chunk # noqa: PLW2901
data_chunk = util.prefix(block_index) + data_chunk # prefix the data_chunk # noqa: PLW2901

# get the scripts
script_sig, redeem_script, output_script = p2sh_encoding.make_p2sh_encoding_redeemscript(
Expand Down Expand Up @@ -417,6 +419,7 @@
source_input,
destination_outputs,
data_output,
block_index,
pubkey=None,
multisig_pubkeys=None,
multisig_pubkeys_required=None,
Expand Down Expand Up @@ -450,7 +453,7 @@

# list of inputs
for n, data_chunk in enumerate(data_array):
data_chunk = config.PREFIX + data_chunk # prefix the data_chunk # noqa: PLW2901
data_chunk = util.prefix(block_index) + data_chunk # prefix the data_chunk # noqa: PLW2901

# get the scripts
script_sig, redeem_script, output_script = p2sh_encoding.make_p2sh_encoding_redeemscript(
Expand Down Expand Up @@ -487,7 +490,7 @@

# opreturn to signal P2SH encoding
key = arc4.init_arc4(txid)
data_chunk = config.PREFIX + b"P2SH"
data_chunk = util.prefix(block_index) + b"P2SH"
data_chunk = key.encrypt(data_chunk)
tx_script = OP_RETURN # OP_RETURN
tx_script += op_push(len(data_chunk)) # Push bytes of data chunk
Expand Down
Loading
Loading