Skip to content

Commit

Permalink
refactor!: clean up #testnet reset TODOs
Browse files Browse the repository at this point in the history
  • Loading branch information
sdbondi committed Jan 5, 2022
1 parent 834eff9 commit 8e83731
Show file tree
Hide file tree
Showing 12 changed files with 24 additions and 101 deletions.
2 changes: 1 addition & 1 deletion base_layer/core/src/blocks/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ impl BlockBuilder {
header: self.header,
body: AggregateBody::new(self.inputs, self.outputs, self.kernels),
};
block.body.sort(block.header.version);
block.body.sort();
block
}
}
Expand Down
4 changes: 2 additions & 2 deletions base_layer/core/src/blocks/genesis_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ fn get_igor_genesis_block_raw() -> Block {
excess_sig: sig,
}],
);
body.sort(1);
body.sort();
// set genesis timestamp
let genesis = DateTime::parse_from_rfc2822("31 Oct 2021 06:00:00 +0200").unwrap();
let timestamp = genesis.timestamp() as u64;
Expand Down Expand Up @@ -195,7 +195,7 @@ fn get_dibbler_genesis_block_raw() -> Block {
excess_sig: sig,
}],
);
body.sort(2);
body.sort();
// set genesis timestamp
let genesis = DateTime::parse_from_rfc2822("31 Oct 2021 00:00:00 +0200").unwrap();
let timestamp = genesis.timestamp() as u64;
Expand Down
9 changes: 2 additions & 7 deletions base_layer/core/src/chain_storage/blockchain_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -716,7 +716,7 @@ where B: BlockchainBackend
});
}

body.sort(header.version);
body.sort();
let mut header = BlockHeader::from(header);
// If someone advanced the median timestamp such that the local time is less than the median timestamp, we need
// to increase the timestamp to be greater than the median timestamp
Expand Down Expand Up @@ -1191,12 +1191,7 @@ pub fn calculate_mmr_roots<T: BlockchainBackend>(db: &T, block: &Block) -> Resul

output_mmr.compress();

// TODO: #testnet_reset clean up this code
let input_mr = if header.version == 1 {
MutableMmr::<HashDigest, _>::new(input_mmr.get_pruned_hash_set()?, Bitmap::create())?.get_merkle_root()?
} else {
input_mmr.get_merkle_root()?
};
let input_mr = input_mmr.get_merkle_root()?;

let mmr_roots = MmrRoots {
kernel_mr: kernel_mmr.get_merkle_root()?,
Expand Down
9 changes: 2 additions & 7 deletions base_layer/core/src/transactions/aggregated_body.rs
Original file line number Diff line number Diff line change
Expand Up @@ -220,18 +220,13 @@ impl AggregateBody {
}

/// Sort the component lists of the aggregate body
pub fn sort(&mut self, version: u16) {
pub fn sort(&mut self) {
if self.sorted {
return;
}
self.inputs.sort();
self.outputs.sort();
// TODO: #testnet_reset clean up this code
if version <= 1 {
self.kernels.sort_by(|a, b| a.deprecated_cmp(b));
} else {
self.kernels.sort();
}
self.kernels.sort();
self.sorted = true;
}

Expand Down
3 changes: 1 addition & 2 deletions base_layer/core/src/transactions/transaction/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,7 @@ pub const MAX_TRANSACTION_RECIPIENTS: usize = 15;
/// c) TransactionInputs will now have the same hash as UTXOs, which makes locating STXOs easier when doing reorgs
pub fn hash_output(features: &OutputFeatures, commitment: &Commitment, script: &TariScript) -> Vec<u8> {
HashDigest::new()
// TODO: use consensus encoding #testnet_reset
.chain(features.to_v1_bytes())
.chain(features.to_consensus_bytes())
.chain(commitment.as_bytes())
// .chain(range proof) // See docs as to why we exclude this
.chain(script.as_bytes())
Expand Down
10 changes: 0 additions & 10 deletions base_layer/core/src/transactions/transaction/output_features.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,6 @@ impl OutputFeatures {
/// The version number to use in consensus encoding. In future, this value could be dynamic.
const CONSENSUS_ENCODING_VERSION: u8 = 0;

/// Encodes output features using deprecated bincode encoding
pub fn to_v1_bytes(&self) -> Vec<u8> {
// unreachable panic: serialized_size is infallible because it uses DefaultOptions
let encode_size = bincode::serialized_size(self).expect("unreachable");
let mut buf = Vec::with_capacity(encode_size as usize);
// unreachable panic: Vec's Write impl is infallible
bincode::serialize_into(&mut buf, self).expect("unreachable");
buf
}

/// Encodes output features using consensus encoding
pub fn to_consensus_bytes(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(self.consensus_encode_exact_size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ impl TransactionInput {
ref features,
..
} => HashDigest::new()
.chain(features.to_v1_bytes())
.chain(features.to_consensus_bytes())
.chain(commitment.as_bytes())
.chain(script.as_bytes())
.finalize()
Expand All @@ -285,7 +285,7 @@ impl TransactionInput {
ref script,
ref sender_offset_public_key,
} => Ok(HashDigest::new()
.chain(features.to_v1_bytes())
.chain(features.to_consensus_bytes())
.chain(commitment.as_bytes())
.chain(script.as_bytes())
.chain(sender_offset_public_key.as_bytes())
Expand Down
10 changes: 0 additions & 10 deletions base_layer/core/src/transactions/transaction/transaction_kernel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,16 +82,6 @@ impl TransactionKernel {
))
}
}

/// This method was used to sort kernels. It has been replaced, and will be removed in future
pub fn deprecated_cmp(&self, other: &Self) -> Ordering {
self.features
.cmp(&other.features)
.then(self.fee.cmp(&other.fee))
.then(self.lock_height.cmp(&other.lock_height))
.then(self.excess.cmp(&other.excess))
.then(self.excess_sig.cmp(&other.excess_sig))
}
}

impl Hashable for TransactionKernel {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,7 @@ impl TransactionOutput {
Challenge::new()
.chain(public_commitment_nonce.as_bytes())
.chain(script.as_bytes())
// TODO: Use consensus encoded bytes #testnet_reset
.chain(features.to_v1_bytes())
.chain(features.to_consensus_bytes())
.chain(sender_offset_public_key.as_bytes())
.chain(commitment.as_bytes())
.finalize()
Expand Down
16 changes: 2 additions & 14 deletions base_layer/core/src/validation/block_validators/async_validator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use std::{cmp, cmp::Ordering, convert::TryInto, thread, time::Instant};
use std::{cmp, convert::TryInto, thread, time::Instant};

use async_trait::async_trait;
use futures::{stream::FuturesUnordered, StreamExt};
Expand Down Expand Up @@ -165,13 +165,6 @@ impl<B: BlockchainBackend + 'static> BlockValidator<B> {
) -> AbortOnDropJoinHandle<Result<KernelValidationData, ValidationError>> {
let height = header.height;
let block_version = header.version;
let kernel_comparer = move |a: &TransactionKernel, b: &TransactionKernel| -> Ordering {
if block_version == 1 {
a.deprecated_cmp(b)
} else {
a.cmp(b)
}
};

let total_kernel_offset = header.total_kernel_offset.clone();
let total_reward = self.rules.calculate_coinbase_and_fees(height, &kernels);
Expand All @@ -190,12 +183,7 @@ impl<B: BlockchainBackend + 'static> BlockValidator<B> {
let mut coinbase_index = None;
let mut max_kernel_timelock = 0;
for (i, kernel) in kernels.iter().enumerate() {
if i > 0 &&
matches!(
kernel_comparer(kernel, &kernels[i - 1]),
Ordering::Equal | Ordering::Less
)
{
if i > 0 && kernel <= &kernels[i - 1] {
return Err(ValidationError::UnsortedOrDuplicateKernel);
}

Expand Down
34 changes: 2 additions & 32 deletions base_layer/core/src/validation/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use std::cmp::Ordering;

use log::*;
use tari_common_types::types::{Commitment, CommitmentFactory, PublicKey};
Expand Down Expand Up @@ -305,47 +304,18 @@ pub fn check_sorting_and_duplicates(block: &Block) -> Result<(), ValidationError
if !is_all_unique_and_sorted(body.inputs()) {
return Err(ValidationError::UnsortedOrDuplicateInput);
}

if !is_all_unique_and_sorted(body.outputs()) {
return Err(ValidationError::UnsortedOrDuplicateOutput);
}

if block.version() == 1 {
// TODO: #testnetreset clean up
let wrapped = body
.kernels()
.iter()
.map(KernelDeprecatedOrdWrapper::new)
.collect::<Vec<_>>();
if !is_all_unique_and_sorted(&wrapped) {
return Err(ValidationError::UnsortedOrDuplicateKernel);
}
} else if !is_all_unique_and_sorted(body.kernels()) {
if !is_all_unique_and_sorted(body.kernels()) {
return Err(ValidationError::UnsortedOrDuplicateKernel);
}

Ok(())
}

#[derive(PartialEq, Eq)]
struct KernelDeprecatedOrdWrapper<'a> {
kernel: &'a TransactionKernel,
}
impl<'a> KernelDeprecatedOrdWrapper<'a> {
pub fn new(kernel: &'a TransactionKernel) -> Self {
Self { kernel }
}
}
impl PartialOrd for KernelDeprecatedOrdWrapper<'_> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.kernel.deprecated_cmp(other.kernel))
}
}
impl Ord for KernelDeprecatedOrdWrapper<'_> {
fn cmp(&self, other: &Self) -> Ordering {
self.kernel.deprecated_cmp(other.kernel)
}
}

/// This function checks that all inputs in the blocks are valid UTXO's to be spent
pub fn check_inputs_are_utxos<B: BlockchainBackend>(db: &B, body: &AggregateBody) -> Result<(), ValidationError> {
let mut not_found_inputs = Vec::new();
Expand Down
21 changes: 9 additions & 12 deletions comms/dht/src/crypt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,18 +107,15 @@ pub fn create_origin_mac_challenge_parts(
body: &[u8],
) -> Challenge {
let mut mac_challenge = Challenge::new();
// TODO: #testnet_reset remove conditional
if protocol_version.as_major() > 1 {
mac_challenge.update(&protocol_version.to_bytes());
mac_challenge.update(destination.to_inner_bytes().as_slice());
mac_challenge.update(&(*message_type as i32).to_le_bytes());
mac_challenge.update(&flags.bits().to_le_bytes());
if let Some(t) = expires {
mac_challenge.update(&t.as_u64().to_le_bytes());
}
if let Some(e_pk) = ephemeral_public_key.as_ref() {
mac_challenge.update(e_pk.as_bytes());
}
mac_challenge.update(&protocol_version.to_bytes());
mac_challenge.update(destination.to_inner_bytes().as_slice());
mac_challenge.update(&(*message_type as i32).to_le_bytes());
mac_challenge.update(&flags.bits().to_le_bytes());
if let Some(t) = expires {
mac_challenge.update(&t.as_u64().to_le_bytes());
}
if let Some(e_pk) = ephemeral_public_key.as_ref() {
mac_challenge.update(e_pk.as_bytes());
}
mac_challenge.update(&body);
mac_challenge
Expand Down

0 comments on commit 8e83731

Please sign in to comment.