From 55bbdf2481bb7522ede5cc3e37ca8cdeb323b4f7 Mon Sep 17 00:00:00 2001 From: SW van Heerden Date: Mon, 13 Nov 2023 07:58:49 +0200 Subject: [PATCH] feat!: fix difficulty overflow (#5935) Description --- Increases the accumulated algo difficulty from u64 ti u128. Increases the total accumulated difficulty from u128 to u256. Motivation and Context --- Having block difficulty as u64 and accumulated difficulty as u64 will overflow. See: https://github.com/tari-project/tari/issues/5851 How Has This Been Tested? --- unit tests Fixes: https://github.com/tari-project/tari/issues/5851 --------- Co-authored-by: Aaron Feickert <66188213+AaronFeickert@users.noreply.github.com> --- Cargo.lock | 115 +++++++++- .../src/conversions/chain_metadata.rs | 5 +- .../src/commands/command/header_stats.rs | 4 +- base_layer/common_types/Cargo.toml | 1 + base_layer/common_types/src/chain_metadata.rs | 9 +- base_layer/core/Cargo.toml | 2 +- .../chain_metadata_service/service.rs | 11 +- .../src/base_node/proto/chain_metadata.rs | 23 +- .../state_machine_service/states/listening.rs | 16 +- .../base_node/sync/block_sync/synchronizer.rs | 4 +- .../src/base_node/sync/header_sync/error.rs | 7 +- .../sync/header_sync/synchronizer.rs | 3 +- .../base_node/sync/header_sync/validator.rs | 3 +- .../core/src/blocks/accumulated_data.rs | 18 +- base_layer/core/src/blocks/genesis_block.rs | 26 +-- base_layer/core/src/chain_storage/async_db.rs | 3 +- .../src/chain_storage/blockchain_database.rs | 39 ++-- .../core/src/chain_storage/db_transaction.rs | 9 +- .../core/src/chain_storage/lmdb_db/lmdb_db.rs | 5 +- base_layer/core/src/chain_storage/mod.rs | 3 +- base_layer/core/src/lib.rs | 17 -- .../proof_of_work/accumulated_difficulty.rs | 212 ++++++++++++++++++ .../core/src/proof_of_work/difficulty.rs | 18 +- base_layer/core/src/proof_of_work/error.rs | 2 + base_layer/core/src/proof_of_work/mod.rs | 4 + base_layer/core/src/proto/block.proto | 4 +- base_layer/core/src/proto/block.rs | 62 ++++- .../side_chain/validator_node_registration.rs | 2 +- .../core/tests/helpers/block_builders.rs | 14 +- .../core/tests/tests/node_state_machine.rs | 2 +- .../tests/support/base_node_service_mock.rs | 4 +- .../transaction_service_tests/service.rs | 2 +- .../wallet_ffi/src/callback_handler_tests.rs | 9 +- 33 files changed, 513 insertions(+), 145 deletions(-) create mode 100644 base_layer/core/src/proof_of_work/accumulated_difficulty.rs diff --git a/Cargo.lock b/Cargo.lock index c6563074e3..20c1fe776b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -458,6 +458,18 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22a39c0db600cfe77ef1b6d9ea71173bdccf190722877969d526d380519b6ecc" +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + [[package]] name = "blake2" version = "0.10.6" @@ -587,6 +599,12 @@ version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + [[package]] name = "bytecount" version = "0.6.7" @@ -1999,6 +2017,12 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + [[package]] name = "futures" version = "0.1.31" @@ -2569,6 +2593,35 @@ dependencies = [ "num-traits", ] +[[package]] +name = "impl-codec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-serde" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "indexmap" version = "1.9.3" @@ -3817,6 +3870,32 @@ dependencies = [ "sha2 0.10.8", ] +[[package]] +name = "parity-scale-codec" +version = "3.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dec8a8073036902368c2cdc0387e85ff9a37054d7e7c98e592145e0c92cd4fb" +dependencies = [ + "arrayvec", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "312270ee71e1cd70289dacf597cab7b207aa107d2f28191c2ae45b2ece18a260" +dependencies = [ + "proc-macro-crate 1.1.3", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "parking_lot" version = "0.11.2" @@ -4215,6 +4294,18 @@ dependencies = [ "elliptic-curve", ] +[[package]] +name = "primitive-types" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-serde", + "uint", +] + [[package]] name = "proc-macro-crate" version = "0.1.5" @@ -4424,6 +4515,12 @@ dependencies = [ "scheduled-thread-pool", ] +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + [[package]] name = "radix_trie" version = "0.2.1" @@ -5430,6 +5527,12 @@ dependencies = [ "libc", ] +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + [[package]] name = "tari-curve25519-dalek" version = "4.0.3" @@ -5547,6 +5650,7 @@ dependencies = [ "digest 0.10.7", "lazy_static", "newtype-ops", + "primitive-types", "rand", "serde", "tari_common", @@ -5730,6 +5834,7 @@ dependencies = [ "num-format", "num-traits", "once_cell", + "primitive-types", "prost 0.9.0", "rand", "randomx-rs", @@ -5761,7 +5866,6 @@ dependencies = [ "thiserror", "tokio", "tracing", - "uint", "zeroize", ] @@ -7168,6 +7272,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + [[package]] name = "x25519-dalek" version = "2.0.0" diff --git a/applications/minotari_app_grpc/src/conversions/chain_metadata.rs b/applications/minotari_app_grpc/src/conversions/chain_metadata.rs index 5a60eec672..3a5c3e7e69 100644 --- a/applications/minotari_app_grpc/src/conversions/chain_metadata.rs +++ b/applications/minotari_app_grpc/src/conversions/chain_metadata.rs @@ -26,12 +26,13 @@ use crate::tari_rpc as grpc; impl From for grpc::MetaData { fn from(meta: ChainMetadata) -> Self { - let diff = meta.accumulated_difficulty(); + let mut diff = [0u8; 32]; + meta.accumulated_difficulty().to_big_endian(&mut diff); Self { height_of_longest_chain: meta.height_of_longest_chain(), best_block: meta.best_block().to_vec(), pruned_height: meta.pruned_height(), - accumulated_difficulty: diff.to_be_bytes().to_vec(), + accumulated_difficulty: diff.to_vec(), } } } diff --git a/applications/minotari_node/src/commands/command/header_stats.rs b/applications/minotari_node/src/commands/command/header_stats.rs index 3a00ed16d2..4b9e01192f 100644 --- a/applications/minotari_node/src/commands/command/header_stats.rs +++ b/applications/minotari_node/src/commands/command/header_stats.rs @@ -145,8 +145,8 @@ impl CommandContext { Utc ), target_diff.get(pow_algo).len(), - acc_monero.as_u64(), - acc_sha3.as_u64(), + acc_monero, + acc_sha3, )?; output.write_all(&buff).await?; diff --git a/base_layer/common_types/Cargo.toml b/base_layer/common_types/Cargo.toml index 47e3d557de..ac526be069 100644 --- a/base_layer/common_types/Cargo.toml +++ b/base_layer/common_types/Cargo.toml @@ -23,3 +23,4 @@ thiserror = "1.0.29" tokio = { version = "1.23", features = ["time", "sync"] } base64 = "0.21.0" blake2 = "0.10" +primitive-types = { version = "0.12", features = ["serde"] } diff --git a/base_layer/common_types/src/chain_metadata.rs b/base_layer/common_types/src/chain_metadata.rs index 8a566508ce..ca08d44753 100644 --- a/base_layer/common_types/src/chain_metadata.rs +++ b/base_layer/common_types/src/chain_metadata.rs @@ -22,6 +22,7 @@ use std::fmt::{Display, Error, Formatter}; +use primitive_types::U256; use serde::{Deserialize, Serialize}; use tari_utilities::hex::Hex; @@ -43,7 +44,7 @@ pub struct ChainMetadata { /// provided. Archival nodes wil always have an `pruned_height` of zero. pruned_height: u64, /// The total accumulated proof of work of the longest chain - accumulated_difficulty: u128, + accumulated_difficulty: U256, /// Timestamp of the tip block in the longest valid chain timestamp: u64, } @@ -54,7 +55,7 @@ impl ChainMetadata { hash: BlockHash, pruning_horizon: u64, pruned_height: u64, - accumulated_difficulty: u128, + accumulated_difficulty: U256, timestamp: u64, ) -> ChainMetadata { ChainMetadata { @@ -73,7 +74,7 @@ impl ChainMetadata { best_block: FixedHash::zero(), pruning_horizon: 0, pruned_height: 0, - accumulated_difficulty: 0, + accumulated_difficulty: 0.into(), timestamp: 0, } } @@ -128,7 +129,7 @@ impl ChainMetadata { self.pruned_height } - pub fn accumulated_difficulty(&self) -> u128 { + pub fn accumulated_difficulty(&self) -> U256 { self.accumulated_difficulty } diff --git a/base_layer/core/Cargo.toml b/base_layer/core/Cargo.toml index 6b32f655e3..4beb7bb763 100644 --- a/base_layer/core/Cargo.toml +++ b/base_layer/core/Cargo.toml @@ -72,8 +72,8 @@ strum_macros = "0.22" thiserror = "1.0.26" tokio = { version = "1.23", features = ["time", "sync", "macros"] } tracing = "0.1.26" -uint = { version = "0.9", default-features = false } zeroize = "1" +primitive-types = { version = "0.12", features = ["serde"] } [dev-dependencies] criterion = { version = "0.4.0" } diff --git a/base_layer/core/src/base_node/chain_metadata_service/service.rs b/base_layer/core/src/base_node/chain_metadata_service/service.rs index fbd89604e9..1253c167e4 100644 --- a/base_layer/core/src/base_node/chain_metadata_service/service.rs +++ b/base_layer/core/src/base_node/chain_metadata_service/service.rs @@ -23,7 +23,6 @@ use std::{convert::TryFrom, sync::Arc}; use log::*; -use num_format::{Locale, ToFormattedString}; use prost::Message; use tari_common::log_if_error; use tari_common_types::chain_metadata::ChainMetadata; @@ -204,7 +203,7 @@ impl ChainMetadataService { "Received chain metadata from NodeId '{}' #{}, Acc_diff {}", event.node_id, chain_metadata.height_of_longest_chain(), - chain_metadata.accumulated_difficulty().to_formatted_string(&Locale::en), + chain_metadata.accumulated_difficulty(), ); let peer_chain_metadata = PeerChainMetadata::new(event.node_id.clone(), chain_metadata, event.latency); @@ -225,6 +224,7 @@ mod test { use std::convert::TryInto; use futures::StreamExt; + use primitive_types::U256; use tari_comms::{peer_manager::NodeId, test_utils::mocks::create_connectivity_mock}; use tari_p2p::services::liveness::{ mock::{create_p2p_liveness_mock, LivenessMockState}, @@ -253,7 +253,9 @@ mod test { } fn create_sample_proto_chain_metadata() -> proto::ChainMetadata { - let diff: u128 = 1; + let diff: U256 = 1.into(); + let mut bytes = [0u8; 32]; + diff.to_big_endian(&mut bytes); proto::ChainMetadata { height_of_longest_chain: 1, best_block: vec![ @@ -261,7 +263,7 @@ mod test { 28, 29, 30, 31, ], pruned_height: 0, - accumulated_difficulty: diff.to_be_bytes().to_vec(), + accumulated_difficulty: bytes.to_vec(), timestamp: EpochTime::now().as_u64(), } } @@ -303,7 +305,6 @@ mod test { }); service.update_liveness_chain_metadata().await.unwrap(); - assert_eq!(liveness_mock_state.call_count(), 1); let last_call = liveness_mock_state.take_calls().remove(0); diff --git a/base_layer/core/src/base_node/proto/chain_metadata.rs b/base_layer/core/src/base_node/proto/chain_metadata.rs index 33a19dcee7..702141d288 100644 --- a/base_layer/core/src/base_node/proto/chain_metadata.rs +++ b/base_layer/core/src/base_node/proto/chain_metadata.rs @@ -20,31 +20,27 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::{ - convert::{TryFrom, TryInto}, - mem, -}; +use std::convert::{TryFrom, TryInto}; +use primitive_types::U256; use tari_common_types::{chain_metadata::ChainMetadata, types::FixedHash}; use crate::proto::base_node as proto; +const ACCUMULATED_DIFFICULTY_BYTE_SIZE: usize = 32; impl TryFrom for ChainMetadata { type Error = String; fn try_from(metadata: proto::ChainMetadata) -> Result { - const ACC_DIFFICULTY_ARRAY_LEN: usize = mem::size_of::(); - if metadata.accumulated_difficulty.len() != ACC_DIFFICULTY_ARRAY_LEN { + if metadata.accumulated_difficulty.len() != ACCUMULATED_DIFFICULTY_BYTE_SIZE { return Err(format!( "Invalid accumulated difficulty byte length. {} was expected but the actual length was {}", - ACC_DIFFICULTY_ARRAY_LEN, + ACCUMULATED_DIFFICULTY_BYTE_SIZE, metadata.accumulated_difficulty.len() )); } - let mut acc_diff = [0; ACC_DIFFICULTY_ARRAY_LEN]; - acc_diff.copy_from_slice(&metadata.accumulated_difficulty[0..ACC_DIFFICULTY_ARRAY_LEN]); - let accumulated_difficulty = u128::from_be_bytes(acc_diff); + let accumulated_difficulty = U256::from_big_endian(&metadata.accumulated_difficulty); let height_of_longest_chain = metadata.height_of_longest_chain; let pruning_horizon = if metadata.pruned_height == 0 { @@ -73,12 +69,15 @@ impl TryFrom for ChainMetadata { impl From for proto::ChainMetadata { fn from(metadata: ChainMetadata) -> Self { - let accumulated_difficulty = metadata.accumulated_difficulty().to_be_bytes().to_vec(); + let mut accumulated_difficulty = [0u8; ACCUMULATED_DIFFICULTY_BYTE_SIZE]; + metadata + .accumulated_difficulty() + .to_big_endian(&mut accumulated_difficulty); Self { height_of_longest_chain: metadata.height_of_longest_chain(), best_block: metadata.best_block().to_vec(), pruned_height: metadata.pruned_height(), - accumulated_difficulty, + accumulated_difficulty: accumulated_difficulty.to_vec(), timestamp: metadata.timestamp(), } } diff --git a/base_layer/core/src/base_node/state_machine_service/states/listening.rs b/base_layer/core/src/base_node/state_machine_service/states/listening.rs index 50aeaf8621..9e030714fc 100644 --- a/base_layer/core/src/base_node/state_machine_service/states/listening.rs +++ b/base_layer/core/src/base_node/state_machine_service/states/listening.rs @@ -28,7 +28,6 @@ use std::{ }; use log::*; -use num_format::{Locale, ToFormattedString}; use serde::{Deserialize, Serialize}; use tari_common_types::chain_metadata::ChainMetadata; use tari_utilities::epoch_time::EpochTime; @@ -339,9 +338,9 @@ fn determine_sync_mode( with an accumulated difficulty of {}, and the network chain tip is at #{} with an accumulated difficulty \ of {}", local_tip_height, - local_tip_accum_difficulty.to_formatted_string(&Locale::en), + local_tip_accum_difficulty, network_tip_height, - network_tip_accum_difficulty.to_formatted_string(&Locale::en), + network_tip_accum_difficulty, ); // If both the local and remote are pruned mode, we need to ensure that the remote pruning horizon is @@ -423,9 +422,9 @@ fn determine_sync_mode( "Our blockchain is up-to-date." }, local.height_of_longest_chain(), - local_tip_accum_difficulty.to_formatted_string(&Locale::en), + local_tip_accum_difficulty, network.claimed_chain_metadata().height_of_longest_chain(), - network_tip_accum_difficulty.to_formatted_string(&Locale::en), + network_tip_accum_difficulty, ); UpToDate } @@ -433,6 +432,7 @@ fn determine_sync_mode( #[cfg(test)] mod test { + use primitive_types::U256; use rand::rngs::OsRng; use tari_common_types::types::FixedHash; use tari_comms::{peer_manager::NodeId, types::CommsPublicKey}; @@ -452,11 +452,11 @@ mod test { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, ]); - const ACCUMULATED_DIFFICULTY: u128 = 10000; + let accumulated_difficulty = U256::from(10000); let archival_node = PeerChainMetadata::new( random_node_id(), - ChainMetadata::new(NETWORK_TIP_HEIGHT, block_hash, 0, 0, ACCUMULATED_DIFFICULTY, 0), + ChainMetadata::new(NETWORK_TIP_HEIGHT, block_hash, 0, 0, accumulated_difficulty, 0), None, ); @@ -467,7 +467,7 @@ mod test { block_hash, 0, 0, - ACCUMULATED_DIFFICULTY - 1000, + accumulated_difficulty - U256::from(1000), 0, ), None, diff --git a/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs b/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs index 7ff60f6b09..a77664ef74 100644 --- a/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs +++ b/base_layer/core/src/base_node/sync/block_sync/synchronizer.rs @@ -28,7 +28,6 @@ use std::{ use futures::StreamExt; use log::*; -use num_format::{Locale, ToFormattedString}; use tari_comms::{connectivity::ConnectivityRequester, peer_manager::NodeId, protocol::rpc::RpcClient, PeerConnection}; use tari_utilities::hex::Hex; use tokio::task; @@ -393,8 +392,7 @@ impl<'a, B: BlockchainBackend + 'static> BlockSynchronizer<'a, B> { timer.elapsed(), block .accumulated_data() - .total_accumulated_difficulty - .to_formatted_string(&Locale::en), + .total_accumulated_difficulty, block.accumulated_data().accumulated_randomx_difficulty, block.accumulated_data().accumulated_sha3x_difficulty, latency diff --git a/base_layer/core/src/base_node/sync/header_sync/error.rs b/base_layer/core/src/base_node/sync/header_sync/error.rs index e83ee7c05a..e286609049 100644 --- a/base_layer/core/src/base_node/sync/header_sync/error.rs +++ b/base_layer/core/src/base_node/sync/header_sync/error.rs @@ -22,6 +22,7 @@ use std::time::Duration; +use primitive_types::U256; use tari_comms::{ connectivity::ConnectivityError, peer_manager::NodeId, @@ -77,9 +78,9 @@ pub enum BlockHeaderSyncError { {local}" )] PeerSentInaccurateChainMetadata { - claimed: u128, - actual: Option, - local: u128, + claimed: U256, + actual: Option, + local: U256, }, #[error("This peer sent too many headers ({0}) in response to a chain split request")] PeerSentTooManyHeaders(usize), diff --git a/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs b/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs index 9a3bf2edcc..bce5ada1a3 100644 --- a/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs +++ b/base_layer/core/src/base_node/sync/header_sync/synchronizer.rs @@ -27,6 +27,7 @@ use std::{ use futures::StreamExt; use log::*; +use primitive_types::U256; use tari_common_types::{chain_metadata::ChainMetadata, types::HashOutput}; use tari_comms::{ connectivity::ConnectivityRequester, @@ -631,7 +632,7 @@ impl<'a, B: BlockchainBackend + 'static> HeaderSynchronizer<'a, B> { let mut last_sync_timer = Instant::now(); - let mut last_total_accumulated_difficulty = 0; + let mut last_total_accumulated_difficulty = U256::zero(); let mut avg_latency = RollingAverageTime::new(20); let mut prev_height: Option = None; while let Some(header) = header_stream.next().await { diff --git a/base_layer/core/src/base_node/sync/header_sync/validator.rs b/base_layer/core/src/base_node/sync/header_sync/validator.rs index 416c711011..3e49e0b1a2 100644 --- a/base_layer/core/src/base_node/sync/header_sync/validator.rs +++ b/base_layer/core/src/base_node/sync/header_sync/validator.rs @@ -22,6 +22,7 @@ use std::cmp::Ordering; use log::*; +use primitive_types::U256; use tari_common_types::types::HashOutput; use tari_utilities::{epoch_time::EpochTime, hex::Hex}; @@ -109,7 +110,7 @@ impl BlockHeaderSyncValidator { self.valid_headers().last() } - pub async fn validate(&mut self, header: BlockHeader) -> Result { + pub async fn validate(&mut self, header: BlockHeader) -> Result { let state = self.state(); let constants = self.consensus_rules.consensus_constants(header.height); diff --git a/base_layer/core/src/blocks/accumulated_data.rs b/base_layer/core/src/blocks/accumulated_data.rs index bde0a81db6..1227a419d1 100644 --- a/base_layer/core/src/blocks/accumulated_data.rs +++ b/base_layer/core/src/blocks/accumulated_data.rs @@ -26,7 +26,7 @@ use std::{ }; use log::*; -use num_format::{Locale, ToFormattedString}; +use primitive_types::U256; use serde::{Deserialize, Serialize}; use tari_common_types::types::{Commitment, HashOutput, PrivateKey}; use tari_mmr::{pruned_hashset::PrunedHashSet, ArrayLike}; @@ -34,7 +34,7 @@ use tari_utilities::hex::Hex; use crate::{ blocks::{error::BlockError, Block, BlockHeader}, - proof_of_work::{difficulty::CheckedAdd, AchievedTargetDifficulty, Difficulty, PowAlgorithm}, + proof_of_work::{AccumulatedDifficulty, AchievedTargetDifficulty, Difficulty, PowAlgorithm}, transactions::aggregated_body::AggregateBody, }; @@ -128,7 +128,7 @@ impl BlockHeaderAccumulatedDataBuilder<'_> { PowAlgorithm::RandomX => ( previous_accum .accumulated_randomx_difficulty - .checked_add(achieved_target.achieved()) + .checked_add_difficulty(achieved_target.achieved()) .ok_or(BlockError::DifficultyOverflow)?, previous_accum.accumulated_sha3x_difficulty, ), @@ -136,7 +136,7 @@ impl BlockHeaderAccumulatedDataBuilder<'_> { previous_accum.accumulated_randomx_difficulty, previous_accum .accumulated_sha3x_difficulty - .checked_add(achieved_target.achieved()) + .checked_add_difficulty(achieved_target.achieved()) .ok_or(BlockError::DifficultyOverflow)?, ), }; @@ -152,7 +152,7 @@ impl BlockHeaderAccumulatedDataBuilder<'_> { hash, total_kernel_offset, achieved_difficulty: achieved_target.achieved(), - total_accumulated_difficulty: u128::from(randomx_diff.as_u64()) * u128::from(sha3x_diff.as_u64()), + total_accumulated_difficulty: U256::from(randomx_diff.as_u128()) * U256::from(sha3x_diff.as_u128()), accumulated_randomx_difficulty: randomx_diff, accumulated_sha3x_difficulty: sha3x_diff, target_difficulty: achieved_target.target(), @@ -160,7 +160,7 @@ impl BlockHeaderAccumulatedDataBuilder<'_> { trace!( target: LOG_TARGET, "Calculated: Tot_acc_diff {}, RandomX {}, SHA3 {}", - result.total_accumulated_difficulty.to_formatted_string(&Locale::en), + result.total_accumulated_difficulty, result.accumulated_randomx_difficulty, result.accumulated_sha3x_difficulty, ); @@ -179,13 +179,13 @@ pub struct BlockHeaderAccumulatedData { pub achieved_difficulty: Difficulty, /// The total accumulated difficulty for all blocks since Genesis, but not including this block, tracked /// separately. - pub total_accumulated_difficulty: u128, + pub total_accumulated_difficulty: U256, /// The total accumulated difficulty for RandomX proof of work for all blocks since Genesis, /// but not including this block, tracked separately. - pub accumulated_randomx_difficulty: Difficulty, + pub accumulated_randomx_difficulty: AccumulatedDifficulty, /// The total accumulated difficulty for SHA3 proof of work for all blocks since Genesis, /// but not including this block, tracked separately. - pub accumulated_sha3x_difficulty: Difficulty, + pub accumulated_sha3x_difficulty: AccumulatedDifficulty, /// The target difficulty for solving the current block using the specified proof of work algorithm. pub target_difficulty: Difficulty, } diff --git a/base_layer/core/src/blocks/genesis_block.rs b/base_layer/core/src/blocks/genesis_block.rs index a2d86f91bc..a0ef9f5a13 100644 --- a/base_layer/core/src/blocks/genesis_block.rs +++ b/base_layer/core/src/blocks/genesis_block.rs @@ -31,7 +31,7 @@ use tari_utilities::ByteArray; use crate::{ blocks::{block::Block, BlockHeader, BlockHeaderAccumulatedData, ChainBlock}, - proof_of_work::{Difficulty, PowAlgorithm, ProofOfWork}, + proof_of_work::{AccumulatedDifficulty, Difficulty, PowAlgorithm, ProofOfWork}, transactions::{aggregated_body::AggregateBody, transaction_components::TransactionOutput}, OutputSmt, }; @@ -131,9 +131,9 @@ pub fn get_stagenet_genesis_block() -> ChainBlock { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), achieved_difficulty: Difficulty::min(), - total_accumulated_difficulty: 1, - accumulated_randomx_difficulty: Difficulty::min(), - accumulated_sha3x_difficulty: Difficulty::min(), + total_accumulated_difficulty: 1.into(), + accumulated_randomx_difficulty: AccumulatedDifficulty::min(), + accumulated_sha3x_difficulty: AccumulatedDifficulty::min(), target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() @@ -184,9 +184,9 @@ pub fn get_nextnet_genesis_block() -> ChainBlock { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), achieved_difficulty: Difficulty::min(), - total_accumulated_difficulty: 1, - accumulated_randomx_difficulty: Difficulty::min(), - accumulated_sha3x_difficulty: Difficulty::min(), + total_accumulated_difficulty: 1.into(), + accumulated_randomx_difficulty: AccumulatedDifficulty::min(), + accumulated_sha3x_difficulty: AccumulatedDifficulty::min(), target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() @@ -242,9 +242,9 @@ pub fn get_igor_genesis_block() -> ChainBlock { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), achieved_difficulty: Difficulty::min(), - total_accumulated_difficulty: 1, - accumulated_randomx_difficulty: Difficulty::min(), - accumulated_sha3x_difficulty: Difficulty::min(), + total_accumulated_difficulty: 1.into(), + accumulated_randomx_difficulty: AccumulatedDifficulty::min(), + accumulated_sha3x_difficulty: AccumulatedDifficulty::min(), target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() @@ -296,9 +296,9 @@ pub fn get_esmeralda_genesis_block() -> ChainBlock { hash: block.hash(), total_kernel_offset: block.header.total_kernel_offset.clone(), achieved_difficulty: Difficulty::min(), - total_accumulated_difficulty: 1, - accumulated_randomx_difficulty: Difficulty::min(), - accumulated_sha3x_difficulty: Difficulty::min(), + total_accumulated_difficulty: 1.into(), + accumulated_randomx_difficulty: AccumulatedDifficulty::min(), + accumulated_sha3x_difficulty: AccumulatedDifficulty::min(), target_difficulty: Difficulty::min(), }; ChainBlock::try_construct(Arc::new(block), accumulated_data).unwrap() diff --git a/base_layer/core/src/chain_storage/async_db.rs b/base_layer/core/src/chain_storage/async_db.rs index 76b68b338c..b24cd9bc72 100644 --- a/base_layer/core/src/chain_storage/async_db.rs +++ b/base_layer/core/src/chain_storage/async_db.rs @@ -22,6 +22,7 @@ use std::{mem, ops::RangeBounds, sync::Arc, time::Instant}; use log::*; +use primitive_types::U256; use rand::{rngs::OsRng, RngCore}; use tari_common_types::{ chain_metadata::ChainMetadata, @@ -301,7 +302,7 @@ impl<'a, B: BlockchainBackend + 'static> AsyncDbTransaction<'a, B> { &mut self, height: u64, hash: HashOutput, - accumulated_difficulty: u128, + accumulated_difficulty: U256, expected_prev_best_block: HashOutput, timestamp: u64, ) -> &mut Self { diff --git a/base_layer/core/src/chain_storage/blockchain_database.rs b/base_layer/core/src/chain_storage/blockchain_database.rs index b7261ef388..3a6c7881e3 100644 --- a/base_layer/core/src/chain_storage/blockchain_database.rs +++ b/base_layer/core/src/chain_storage/blockchain_database.rs @@ -34,6 +34,7 @@ use std::{ use blake2::Blake2b; use digest::consts::U32; use log::*; +use primitive_types::U256; use serde::{Deserialize, Serialize}; use tari_common_types::{ chain_metadata::ChainMetadata, @@ -371,7 +372,7 @@ where B: BlockchainBackend /// Return the accumulated proof of work of the longest chain. /// The proof of work is returned as the product of total difficulties of all PoW algorithms - pub fn get_accumulated_difficulty(&self) -> Result { + pub fn get_accumulated_difficulty(&self) -> Result { let db = self.db_read_access()?; Ok(db.fetch_chain_metadata()?.accumulated_difficulty()) } @@ -2631,7 +2632,7 @@ mod test { .unwrap(); let fork_tip = access.fetch_orphan_chain_tip_by_hash(block.hash()).unwrap().unwrap(); assert_eq!(fork_tip, block.to_chain_header()); - assert_eq!(fork_tip.accumulated_data().total_accumulated_difficulty, 3); + assert_eq!(fork_tip.accumulated_data().total_accumulated_difficulty, 3.into()); let strongest_tips = access.fetch_strongest_orphan_chain_tips().unwrap().len(); assert_eq!(strongest_tips, 1); @@ -2689,7 +2690,7 @@ mod test { let fork_tip_1 = access.fetch_orphan_chain_tip_by_hash(block.hash()).unwrap().unwrap(); assert_eq!(fork_tip_1, block.to_chain_header()); - assert_eq!(fork_tip_1.accumulated_data().total_accumulated_difficulty, 5); + assert_eq!(fork_tip_1.accumulated_data().total_accumulated_difficulty, 5.into()); // Fork 2 (add 1 block) let block = orphan_chain_2.get("B3").unwrap().clone(); @@ -2698,7 +2699,7 @@ mod test { let fork_tip_2 = access.fetch_orphan_chain_tip_by_hash(block.hash()).unwrap().unwrap(); assert_eq!(fork_tip_2, block.to_chain_header()); - assert_eq!(fork_tip_2.accumulated_data().total_accumulated_difficulty, 2); + assert_eq!(fork_tip_2.accumulated_data().total_accumulated_difficulty, 2.into()); // Fork 3 (add 1 block) let block = orphan_chain_3.get("B4").unwrap().clone(); @@ -2707,7 +2708,7 @@ mod test { let fork_tip_3 = access.fetch_orphan_chain_tip_by_hash(block.hash()).unwrap().unwrap(); assert_eq!(fork_tip_3, block.to_chain_header()); - assert_eq!(fork_tip_3.accumulated_data().total_accumulated_difficulty, 5); + assert_eq!(fork_tip_3.accumulated_data().total_accumulated_difficulty, 5.into()); assert_ne!(fork_tip_1, fork_tip_2); assert_ne!(fork_tip_1, fork_tip_3); @@ -3035,16 +3036,16 @@ mod test { result[8].assert_reorg(1, 1); assert_added_hashes_eq(&result[5], vec!["B2"], &blocks); - assert_difficulty_eq(&result[5], vec![7]); + assert_difficulty_eq(&result[5], vec![7.into()]); assert_added_hashes_eq(&result[6], vec!["B", "C", "D2"], &blocks); - assert_difficulty_eq(&result[6], vec![3, 4, 10]); + assert_difficulty_eq(&result[6], vec![3.into(), 4.into(), 10.into()]); assert_added_hashes_eq(&result[7], vec!["D3"], &blocks); - assert_difficulty_eq(&result[7], vec![11]); + assert_difficulty_eq(&result[7], vec![11.into()]); assert_added_hashes_eq(&result[8], vec!["D4"], &blocks); - assert_difficulty_eq(&result[8], vec![12]); + assert_difficulty_eq(&result[8], vec![12.into()]); } #[tokio::test] @@ -3291,17 +3292,17 @@ mod test { result[1].assert_added(); result[2].assert_added(); - assert_difficulty_eq(&result[0], vec![2]); - assert_difficulty_eq(&result[1], vec![3]); - assert_difficulty_eq(&result[2], vec![4]); + assert_difficulty_eq(&result[0], vec![2.into()]); + assert_difficulty_eq(&result[1], vec![3.into()]); + assert_difficulty_eq(&result[2], vec![4.into()]); result[3].assert_added(); result[4].assert_added(); result[5].assert_added(); - assert_difficulty_eq(&result[3], vec![6]); - assert_difficulty_eq(&result[4], vec![8]); - assert_difficulty_eq(&result[5], vec![10]); + assert_difficulty_eq(&result[3], vec![6.into()]); + assert_difficulty_eq(&result[4], vec![8.into()]); + assert_difficulty_eq(&result[5], vec![10.into()]); result[6].assert_orphaned(); result[7].assert_orphaned(); @@ -3309,14 +3310,14 @@ mod test { // ("D2->C2", 1, 120), // Chain 2 at 11 result[9].assert_reorg(4, 3); - assert_difficulty_eq(&result[9], vec![6, 8, 10, 11]); + assert_difficulty_eq(&result[9], vec![6.into(), 8.into(), 10.into(), 11.into()]); // ("D1->C1", 1, 120), // Chain 1 at 11 result[10].assert_orphaned(); // ("E1->D1", 1, 120), // Chain 1 at 12 result[11].assert_reorg(5, 4); - assert_difficulty_eq(&result[11], vec![6, 8, 10, 11, 12]); + assert_difficulty_eq(&result[11], vec![6.into(), 8.into(), 10.into(), 11.into(), 12.into()]); // ("E2->D2", 1, 120), // Chain 2 at 12 result[12].assert_orphaned(); @@ -3346,8 +3347,8 @@ mod test { ); } - fn assert_difficulty_eq(result: &BlockAddResult, values: Vec) { - let accum_difficulty: Vec = result + fn assert_difficulty_eq(result: &BlockAddResult, values: Vec) { + let accum_difficulty: Vec = result .added_blocks() .iter() .map(|cb| cb.accumulated_data().total_accumulated_difficulty) diff --git a/base_layer/core/src/chain_storage/db_transaction.rs b/base_layer/core/src/chain_storage/db_transaction.rs index ffe6a8d9c7..1b05b8e2ce 100644 --- a/base_layer/core/src/chain_storage/db_transaction.rs +++ b/base_layer/core/src/chain_storage/db_transaction.rs @@ -26,6 +26,7 @@ use std::{ sync::Arc, }; +use primitive_types::U256; use tari_common_types::types::{BlockHash, Commitment, HashOutput}; use tari_utilities::hex::Hex; @@ -186,7 +187,7 @@ impl DbTransaction { } /// Add an orphan to the orphan tip set - pub fn insert_orphan_chain_tip(&mut self, hash: HashOutput, total_accumulated_difficulty: u128) -> &mut Self { + pub fn insert_orphan_chain_tip(&mut self, hash: HashOutput, total_accumulated_difficulty: U256) -> &mut Self { self.operations .push(WriteOperation::InsertOrphanChainTip(hash, total_accumulated_difficulty)); self @@ -205,7 +206,7 @@ impl DbTransaction { &mut self, height: u64, hash: HashOutput, - accumulated_difficulty: u128, + accumulated_difficulty: U256, expected_prev_best_block: HashOutput, timestamp: u64, ) -> &mut Self { @@ -294,7 +295,7 @@ pub enum WriteOperation { DeleteOrphan(HashOutput), DeleteTipBlock(HashOutput), DeleteOrphanChainTip(HashOutput), - InsertOrphanChainTip(HashOutput, u128), + InsertOrphanChainTip(HashOutput, U256), InsertMoneroSeedHeight(Vec, u64), UpdateBlockAccumulatedData { header_hash: HashOutput, @@ -310,7 +311,7 @@ pub enum WriteOperation { SetBestBlock { height: u64, hash: HashOutput, - accumulated_difficulty: u128, + accumulated_difficulty: U256, expected_prev_best_block: HashOutput, timestamp: u64, }, diff --git a/base_layer/core/src/chain_storage/lmdb_db/lmdb_db.rs b/base_layer/core/src/chain_storage/lmdb_db/lmdb_db.rs index 3eab3c2041..28e561f1bb 100644 --- a/base_layer/core/src/chain_storage/lmdb_db/lmdb_db.rs +++ b/base_layer/core/src/chain_storage/lmdb_db/lmdb_db.rs @@ -25,6 +25,7 @@ use std::{convert::TryFrom, fmt, fs, fs::File, ops::Deref, path::Path, sync::Arc use fs2::FileExt; use lmdb_zero::{open, ConstTransaction, Database, Environment, ReadTransaction, WriteTransaction}; use log::*; +use primitive_types::U256; use serde::{Deserialize, Serialize}; use tari_common_types::{ chain_metadata::ChainMetadata, @@ -2391,7 +2392,7 @@ fn fetch_best_block_timestamp(txn: &ConstTransaction<'_>, db: &Database) -> Resu } // Fetches the accumulated work from the provided metadata db. -fn fetch_accumulated_work(txn: &ConstTransaction<'_>, db: &Database) -> Result { +fn fetch_accumulated_work(txn: &ConstTransaction<'_>, db: &Database) -> Result { let k = MetadataKey::AccumulatedWork; let val: Option = lmdb_get(txn, db, &k.as_u32())?; match val { @@ -2462,7 +2463,7 @@ impl fmt::Display for MetadataKey { enum MetadataValue { ChainHeight(u64), BestBlock(BlockHash), - AccumulatedWork(u128), + AccumulatedWork(U256), PruningHorizon(u64), PrunedHeight(u64), HorizonData(HorizonData), diff --git a/base_layer/core/src/chain_storage/mod.rs b/base_layer/core/src/chain_storage/mod.rs index de617c1799..156e9fcb25 100644 --- a/base_layer/core/src/chain_storage/mod.rs +++ b/base_layer/core/src/chain_storage/mod.rs @@ -34,6 +34,7 @@ pub mod async_db; mod block_add_result; pub use block_add_result::BlockAddResult; +use primitive_types::U256; use serde::{Deserialize, Serialize}; mod blockchain_database; @@ -90,5 +91,5 @@ pub use template_registation::TemplateRegistrationEntry; #[derive(Debug, Serialize, Deserialize, Default, Clone, PartialEq, Eq)] pub struct ChainTipData { pub hash: HashOutput, - pub total_accumulated_difficulty: u128, + pub total_accumulated_difficulty: U256, } diff --git a/base_layer/core/src/lib.rs b/base_layer/core/src/lib.rs index cf4ca9672a..95033dee32 100644 --- a/base_layer/core/src/lib.rs +++ b/base_layer/core/src/lib.rs @@ -52,23 +52,6 @@ pub mod transactions; mod common; pub use common::{borsh, one_sided, ConfidentialOutputHasher}; -#[allow(clippy::ptr_offset_with_cast)] -#[allow(clippy::assign_op_pattern)] -#[allow(clippy::manual_range_contains)] -#[allow(clippy::range_plus_one)] -pub mod large_ints { - uint::construct_uint! { - /// 256-bit unsigned integer. - pub struct U256(4); - } - - uint::construct_uint! { - /// 512-bit unsigned integer. - pub struct U512(8); - } -} - -pub use large_ints::{U256, U512}; #[cfg(feature = "base_node")] mod domain_hashing { use blake2::Blake2b; diff --git a/base_layer/core/src/proof_of_work/accumulated_difficulty.rs b/base_layer/core/src/proof_of_work/accumulated_difficulty.rs new file mode 100644 index 0000000000..cd5646590b --- /dev/null +++ b/base_layer/core/src/proof_of_work/accumulated_difficulty.rs @@ -0,0 +1,212 @@ +// Copyright 2019. The Tari Project +// +// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the +// following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following +// disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the +// following disclaimer in the documentation and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote +// products derived from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +use std::fmt; + +use serde::{Deserialize, Serialize}; +use tari_utilities::ByteArray; + +use crate::proof_of_work::{difficulty::MIN_DIFFICULTY, error::DifficultyError, Difficulty}; + +/// The difficulty is defined as the maximum target divided by the block hash. +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Deserialize, Serialize)] +pub struct AccumulatedDifficulty(u128); + +impl AccumulatedDifficulty { + /// A const constructor for Difficulty + pub fn from_u128(d: u128) -> Result { + if d < u128::from(MIN_DIFFICULTY) { + return Err(DifficultyError::InvalidDifficulty); + } + Ok(Self(d)) + } + + /// Return the difficulty as a `u128` + pub fn as_u128(self) -> u128 { + self.0 + } + + /// Difficulty of MIN_DIFFICULTY + pub fn min() -> AccumulatedDifficulty { + AccumulatedDifficulty(MIN_DIFFICULTY.into()) + } + + /// Maximum Difficulty + pub fn max() -> AccumulatedDifficulty { + AccumulatedDifficulty(u128::MAX) + } + + pub fn checked_add_difficulty(&self, d: Difficulty) -> Option { + self.0.checked_add(u128::from(d.as_u64())).map(AccumulatedDifficulty) + } + + pub fn to_be_bytes(&self) -> Vec { + self.0.to_be_bytes().to_vec() + } +} + +impl Default for AccumulatedDifficulty { + fn default() -> Self { + AccumulatedDifficulty::min() + } +} + +// impl From for u64 { +// fn from(value: Difficulty) -> Self { +// value.0 +// } +// } +// +impl fmt::Display for AccumulatedDifficulty { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let formatted = self.0; + write!(f, "{}", formatted) + } +} + +// #[cfg(test)] +// mod test { +// use crate::{ +// proof_of_work::{ +// difficulty::{CheckedAdd, CheckedSub, MIN_DIFFICULTY}, +// Difficulty, +// }, +// U256, +// }; +// +// #[test] +// fn add_difficulty() { +// assert_eq!( +// Difficulty::from_u64(1_000).unwrap().checked_add(8_000).unwrap(), +// Difficulty::from_u64(9_000).unwrap() +// ); +// assert_eq!( +// Difficulty::default().checked_add(42).unwrap(), +// Difficulty::from_u64(MIN_DIFFICULTY + 42).unwrap() +// ); +// assert_eq!( +// Difficulty::from_u64(15).unwrap().checked_add(5).unwrap(), +// Difficulty::from_u64(20).unwrap() +// ); +// } +// +// #[test] +// fn test_format() { +// let d = Difficulty::from_u64(1_000_000).unwrap(); +// assert_eq!("1,000,000", format!("{}", d)); +// } +// +// #[test] +// fn difficulty_converts_correctly_at_its_limits() { +// for d in 0..=MIN_DIFFICULTY + 1 { +// if d < MIN_DIFFICULTY { +// assert!(Difficulty::from_u64(d).is_err()); +// } else { +// assert!(Difficulty::from_u64(d).is_ok()); +// } +// } +// assert_eq!(Difficulty::min().as_u64(), MIN_DIFFICULTY); +// assert_eq!(Difficulty::max().as_u64(), u64::MAX); +// } +// +// #[test] +// fn addition_does_not_overflow() { +// let d1 = Difficulty::from_u64(100).unwrap(); +// assert!(d1.checked_add(1).is_some()); +// let d2 = Difficulty::max(); +// assert!(d2.checked_add(1).is_none()); +// } +// +// #[test] +// fn subtraction_does_not_underflow() { +// let d1 = Difficulty::from_u64(100).unwrap(); +// assert!(d1.checked_sub(1).is_some()); +// let d2 = Difficulty::max(); +// assert!(d1.checked_sub(d2).is_none()); +// } +// +// #[test] +// fn be_high_target() { +// let target: &[u8] = &[ +// 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +// 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +// ]; +// let expected = Difficulty::min(); +// assert_eq!(Difficulty::big_endian_difficulty(target).unwrap(), expected); +// } +// +// #[test] +// fn be_max_difficulty() { +// let target = U256::MAX / U256::from(u64::MAX); +// let mut bytes = [0u8; 32]; +// target.to_big_endian(&mut bytes); +// assert_eq!(Difficulty::big_endian_difficulty(&bytes).unwrap(), Difficulty::max()); +// } +// +// #[test] +// fn be_stop_overflow() { +// let target: u64 = 64; +// let expected = u64::MAX; +// assert_eq!( +// Difficulty::big_endian_difficulty(&target.to_be_bytes()).unwrap(), +// Difficulty::from_u64(expected).unwrap() +// ); +// } +// +// #[test] +// fn le_high_target() { +// let target: &[u8] = &[ +// 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, +// 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, +// ]; +// let expected = Difficulty::min(); +// assert_eq!(Difficulty::little_endian_difficulty(target).unwrap(), expected); +// } +// +// #[test] +// fn le_max_difficulty() { +// let target = U256::MAX / U256::from(u64::MAX); +// let mut bytes = [0u8; 32]; +// target.to_little_endian(&mut bytes); +// assert_eq!(Difficulty::little_endian_difficulty(&bytes).unwrap(), Difficulty::max()); +// } +// +// #[test] +// fn le_stop_overflow() { +// let target: u64 = 64; +// let expected = u64::MAX; +// assert_eq!( +// Difficulty::little_endian_difficulty(&target.to_be_bytes()).unwrap(), +// Difficulty::from_u64(expected).unwrap() +// ); +// } +// +// #[test] +// fn u256_scalar_to_difficulty_division_by_zero() { +// let bytes = []; +// assert!(Difficulty::little_endian_difficulty(&bytes).is_err()); +// assert!(Difficulty::big_endian_difficulty(&bytes).is_err()); +// let bytes = [0u8; 32]; +// assert!(Difficulty::little_endian_difficulty(&bytes).is_err()); +// assert!(Difficulty::big_endian_difficulty(&bytes).is_err()); +// } +// } diff --git a/base_layer/core/src/proof_of_work/difficulty.rs b/base_layer/core/src/proof_of_work/difficulty.rs index 2d7a38a600..13e05d34a1 100644 --- a/base_layer/core/src/proof_of_work/difficulty.rs +++ b/base_layer/core/src/proof_of_work/difficulty.rs @@ -23,13 +23,11 @@ use std::fmt; use num_format::{Locale, ToFormattedString}; +use primitive_types::U256; use serde::{Deserialize, Serialize}; use tari_utilities::epoch_time::EpochTime; -use crate::{ - proof_of_work::{error::DifficultyError, DifficultyAdjustmentError}, - U256, -}; +use crate::proof_of_work::{error::DifficultyError, DifficultyAdjustmentError}; /// Minimum difficulty, enforced in diff retargeting /// avoids getting stuck when trying to increase difficulty subject to dampening @@ -187,14 +185,12 @@ pub trait DifficultyAdjustment { #[cfg(test)] mod test { - use crate::{ - proof_of_work::{ - difficulty::{CheckedAdd, CheckedSub, MIN_DIFFICULTY}, - Difficulty, - }, - U256, - }; + use primitive_types::U256; + use crate::proof_of_work::{ + difficulty::{CheckedAdd, CheckedSub, MIN_DIFFICULTY}, + Difficulty, + }; #[test] fn add_difficulty() { assert_eq!( diff --git a/base_layer/core/src/proof_of_work/error.rs b/base_layer/core/src/proof_of_work/error.rs index f5a2f8f3be..8ddfa7d80b 100644 --- a/base_layer/core/src/proof_of_work/error.rs +++ b/base_layer/core/src/proof_of_work/error.rs @@ -62,4 +62,6 @@ pub enum DifficultyError { MaxBlockTimeOverflow, #[error("Divide by zero")] DivideByZero, + #[error("Overflow")] + Overflow, } diff --git a/base_layer/core/src/proof_of_work/mod.rs b/base_layer/core/src/proof_of_work/mod.rs index afe317d240..80ce8855ba 100644 --- a/base_layer/core/src/proof_of_work/mod.rs +++ b/base_layer/core/src/proof_of_work/mod.rs @@ -25,6 +25,10 @@ pub(crate) mod difficulty; #[cfg(any(feature = "base_node", feature = "transactions"))] pub use difficulty::{Difficulty, DifficultyAdjustment}; +#[cfg(any(feature = "base_node", feature = "transactions"))] +pub(crate) mod accumulated_difficulty; +#[cfg(any(feature = "base_node", feature = "transactions"))] +pub use accumulated_difficulty::AccumulatedDifficulty; /// Crates for proof of work error #[cfg(any(feature = "base_node", feature = "transactions"))] diff --git a/base_layer/core/src/proto/block.proto b/base_layer/core/src/proto/block.proto index 6da78324c5..db062262bb 100644 --- a/base_layer/core/src/proto/block.proto +++ b/base_layer/core/src/proto/block.proto @@ -98,10 +98,10 @@ message BlockHeaderAccumulatedData { uint64 achieved_difficulty = 1; // The total accumulated difficulty for RandomX proof of work for all blocks since Genesis, // but not including this block, tracked separately. - uint64 accumulated_randomx_difficulty = 2; + bytes accumulated_randomx_difficulty = 2; // The total accumulated difficulty for SHA3 proof of work for all blocks since Genesis, // but not including this block, tracked separately. - uint64 accumulated_sha3x_difficulty = 3; + bytes accumulated_sha3x_difficulty = 3; // The target difficulty for solving the current block using the specified proof of work algorithm. uint64 target_difficulty = 4; // The total accumulated offset for all kernels in the block. diff --git a/base_layer/core/src/proto/block.rs b/base_layer/core/src/proto/block.rs index 769685be0b..8118d1f083 100644 --- a/base_layer/core/src/proto/block.rs +++ b/base_layer/core/src/proto/block.rs @@ -20,15 +20,19 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::convert::{TryFrom, TryInto}; +use std::{ + convert::{TryFrom, TryInto}, + mem, +}; +use primitive_types::U256; use tari_common_types::types::PrivateKey; use tari_utilities::ByteArray; use super::core as proto; use crate::{ blocks::{Block, BlockHeaderAccumulatedData, HistoricalBlock, NewBlock}, - proof_of_work::Difficulty, + proof_of_work::{AccumulatedDifficulty, Difficulty}, }; //---------------------------------- Block --------------------------------------------// @@ -101,14 +105,20 @@ impl TryFrom for proto::HistoricalBlock { impl From for proto::BlockHeaderAccumulatedData { fn from(source: BlockHeaderAccumulatedData) -> Self { + let accumulated_randomx_difficulty = source.accumulated_randomx_difficulty.to_be_bytes(); + let accumulated_sha3x_difficulty = source.accumulated_sha3x_difficulty.to_be_bytes(); + let mut total_accumulated_difficulty = [0u8; 32]; + source + .total_accumulated_difficulty + .to_big_endian(&mut total_accumulated_difficulty); Self { achieved_difficulty: source.achieved_difficulty.into(), - accumulated_randomx_difficulty: source.accumulated_randomx_difficulty.into(), - accumulated_sha3x_difficulty: source.accumulated_sha3x_difficulty.into(), + accumulated_randomx_difficulty, + accumulated_sha3x_difficulty, target_difficulty: source.target_difficulty.into(), total_kernel_offset: source.total_kernel_offset.to_vec(), hash: source.hash.to_vec(), - total_accumulated_difficulty: Vec::from(source.total_accumulated_difficulty.to_le_bytes()), + total_accumulated_difficulty: total_accumulated_difficulty.to_vec(), } } } @@ -117,17 +127,49 @@ impl TryFrom for BlockHeaderAccumulatedData { type Error = String; fn try_from(source: proto::BlockHeaderAccumulatedData) -> Result { - let mut acc_diff = [0; 16]; - acc_diff.copy_from_slice(&source.total_accumulated_difficulty[0..16]); - let accumulated_difficulty = u128::from_le_bytes(acc_diff); + const TOTAL_ACC_DIFFICULTY_ARRAY_LEN: usize = 32; + if source.total_accumulated_difficulty.len() != TOTAL_ACC_DIFFICULTY_ARRAY_LEN { + return Err(format!( + "Invalid accumulated difficulty byte length. {} was expected but the actual length was {}", + TOTAL_ACC_DIFFICULTY_ARRAY_LEN, + source.total_accumulated_difficulty.len() + )); + } + let mut acc_diff = [0u8; TOTAL_ACC_DIFFICULTY_ARRAY_LEN]; + acc_diff.copy_from_slice(&source.total_accumulated_difficulty[0..TOTAL_ACC_DIFFICULTY_ARRAY_LEN]); + let accumulated_difficulty = U256::from_big_endian(&acc_diff); + + const SINGLE_ACC_DIFFICULTY_ARRAY_LEN: usize = mem::size_of::(); + if source.accumulated_sha3x_difficulty.len() != SINGLE_ACC_DIFFICULTY_ARRAY_LEN { + return Err(format!( + "Invalid accumulated Sha3x difficulty byte length. {} was expected but the actual length was {}", + SINGLE_ACC_DIFFICULTY_ARRAY_LEN, + source.accumulated_sha3x_difficulty.len() + )); + } + let mut acc_diff = [0; SINGLE_ACC_DIFFICULTY_ARRAY_LEN]; + acc_diff.copy_from_slice(&source.accumulated_randomx_difficulty[0..SINGLE_ACC_DIFFICULTY_ARRAY_LEN]); + let accumulated_sha3x_difficulty = u128::from_be_bytes(acc_diff); + + if source.accumulated_randomx_difficulty.len() != SINGLE_ACC_DIFFICULTY_ARRAY_LEN { + return Err(format!( + "Invalid accumulated RandomX difficulty byte length. {} was expected but the actual length was {}", + SINGLE_ACC_DIFFICULTY_ARRAY_LEN, + source.accumulated_randomx_difficulty.len() + )); + } + let mut acc_diff = [0; SINGLE_ACC_DIFFICULTY_ARRAY_LEN]; + acc_diff.copy_from_slice(&source.accumulated_randomx_difficulty[0..SINGLE_ACC_DIFFICULTY_ARRAY_LEN]); + let accumulated_randomx_difficulty = u128::from_be_bytes(acc_diff); + let hash = source.hash.try_into().map_err(|_| "Malformed hash".to_string())?; Ok(Self { hash, achieved_difficulty: Difficulty::from_u64(source.achieved_difficulty).map_err(|e| e.to_string())?, total_accumulated_difficulty: accumulated_difficulty, - accumulated_randomx_difficulty: Difficulty::from_u64(source.accumulated_randomx_difficulty) + accumulated_randomx_difficulty: AccumulatedDifficulty::from_u128(accumulated_randomx_difficulty) .map_err(|e| e.to_string())?, - accumulated_sha3x_difficulty: Difficulty::from_u64(source.accumulated_sha3x_difficulty) + accumulated_sha3x_difficulty: AccumulatedDifficulty::from_u128(accumulated_sha3x_difficulty) .map_err(|e| e.to_string())?, target_difficulty: Difficulty::from_u64(source.target_difficulty).map_err(|e| e.to_string())?, total_kernel_offset: PrivateKey::from_canonical_bytes(source.total_kernel_offset.as_slice()) diff --git a/base_layer/core/src/transactions/transaction_components/side_chain/validator_node_registration.rs b/base_layer/core/src/transactions/transaction_components/side_chain/validator_node_registration.rs index 17db00a145..650807ad56 100644 --- a/base_layer/core/src/transactions/transaction_components/side_chain/validator_node_registration.rs +++ b/base_layer/core/src/transactions/transaction_components/side_chain/validator_node_registration.rs @@ -23,6 +23,7 @@ use blake2::Blake2b; use borsh::{BorshDeserialize, BorshSerialize}; use digest::consts::U32; +use primitive_types::U256; use serde::{Deserialize, Serialize}; use tari_common_types::{ epoch::VnEpoch, @@ -33,7 +34,6 @@ use tari_utilities::ByteArray; use crate::{ consensus::DomainSeparatedConsensusHasher, transactions::{transaction_components::ValidatorNodeSignature, TransactionHashDomain}, - U256, }; #[derive(Debug, Clone, Hash, PartialEq, Eq, Deserialize, Serialize, BorshSerialize, BorshDeserialize)] diff --git a/base_layer/core/tests/helpers/block_builders.rs b/base_layer/core/tests/helpers/block_builders.rs index 3f497d7378..3a7407e747 100644 --- a/base_layer/core/tests/helpers/block_builders.rs +++ b/base_layer/core/tests/helpers/block_builders.rs @@ -33,7 +33,7 @@ use tari_core::{ ChainStorageError, }, consensus::{emission::Emission, ConsensusConstants, ConsensusManager}, - proof_of_work::{sha3x_difficulty, AchievedTargetDifficulty, Difficulty}, + proof_of_work::{sha3x_difficulty, AccumulatedDifficulty, AchievedTargetDifficulty, Difficulty}, transactions::{ key_manager::{TransactionKeyManagerBranch, TransactionKeyManagerInterface, TxoStage}, tari_amount::MicroMinotari, @@ -210,9 +210,9 @@ pub async fn create_genesis_block_with_coinbase_value( hash, total_kernel_offset: Default::default(), achieved_difficulty: Difficulty::min(), - total_accumulated_difficulty: 1, - accumulated_randomx_difficulty: Difficulty::min(), - accumulated_sha3x_difficulty: Difficulty::min(), + total_accumulated_difficulty: 1.into(), + accumulated_randomx_difficulty: AccumulatedDifficulty::min(), + accumulated_sha3x_difficulty: AccumulatedDifficulty::min(), target_difficulty: Difficulty::min(), }) .unwrap(), @@ -251,9 +251,9 @@ pub async fn create_genesis_block_with_utxos( hash, total_kernel_offset: Default::default(), achieved_difficulty: Difficulty::min(), - total_accumulated_difficulty: 1, - accumulated_randomx_difficulty: Difficulty::min(), - accumulated_sha3x_difficulty: Difficulty::min(), + total_accumulated_difficulty: 1.into(), + accumulated_randomx_difficulty: AccumulatedDifficulty::min(), + accumulated_sha3x_difficulty: AccumulatedDifficulty::min(), target_difficulty: Difficulty::min(), }) .unwrap(), diff --git a/base_layer/core/tests/tests/node_state_machine.rs b/base_layer/core/tests/tests/node_state_machine.rs index 81aa942f8d..db52c208e7 100644 --- a/base_layer/core/tests/tests/node_state_machine.rs +++ b/base_layer/core/tests/tests/node_state_machine.rs @@ -283,7 +283,7 @@ async fn test_event_channel() { let node_identity = random_node_identity(); let block_hash = Blake2b::::digest(node_identity.node_id().as_bytes()).into(); - let metadata = ChainMetadata::new(10, block_hash, 2800, 0, 5000, 0); + let metadata = ChainMetadata::new(10, block_hash, 2800, 0, 5000.into(), 0); node.comms .peer_manager() diff --git a/base_layer/wallet/tests/support/base_node_service_mock.rs b/base_layer/wallet/tests/support/base_node_service_mock.rs index 1d77e5ed47..46f755d12d 100644 --- a/base_layer/wallet/tests/support/base_node_service_mock.rs +++ b/base_layer/wallet/tests/support/base_node_service_mock.rs @@ -79,7 +79,7 @@ impl MockBaseNodeService { pub fn set_base_node_state(&mut self, height: Option) { let (chain_metadata, is_synced) = match height { Some(height) => { - let metadata = ChainMetadata::new(height, FixedHash::zero(), 0, 0, 0, 0); + let metadata = ChainMetadata::new(height, FixedHash::zero(), 0, 0, 0.into(), 0); (Some(metadata), Some(true)) }, None => (None, None), @@ -95,7 +95,7 @@ impl MockBaseNodeService { } pub fn set_default_base_node_state(&mut self) { - let metadata = ChainMetadata::new(i64::MAX as u64, FixedHash::zero(), 0, 0, 0, 0); + let metadata = ChainMetadata::new(i64::MAX as u64, FixedHash::zero(), 0, 0, 0.into(), 0); self.state = BaseNodeState { node_id: None, chain_metadata: Some(metadata), diff --git a/base_layer/wallet/tests/transaction_service_tests/service.rs b/base_layer/wallet/tests/transaction_service_tests/service.rs index 5f1bd51ae2..4326665cac 100644 --- a/base_layer/wallet/tests/transaction_service_tests/service.rs +++ b/base_layer/wallet/tests/transaction_service_tests/service.rs @@ -204,7 +204,7 @@ async fn setup_transaction_service>( let passphrase = SafePassword::from("My lovely secret passphrase"); let db = WalletDatabase::new(WalletSqliteDatabase::new(db_connection.clone(), passphrase).unwrap()); - let metadata = ChainMetadata::new(std::i64::MAX as u64, FixedHash::zero(), 0, 0, 0, 0); + let metadata = ChainMetadata::new(std::i64::MAX as u64, FixedHash::zero(), 0, 0, 0.into(), 0); db.set_chain_metadata(metadata).unwrap(); diff --git a/base_layer/wallet_ffi/src/callback_handler_tests.rs b/base_layer/wallet_ffi/src/callback_handler_tests.rs index 4851e23061..94e52ffd7c 100644 --- a/base_layer/wallet_ffi/src/callback_handler_tests.rs +++ b/base_layer/wallet_ffi/src/callback_handler_tests.rs @@ -501,7 +501,14 @@ mod test { ) .unwrap(); - let chain_metadata = ChainMetadata::new(1, Default::default(), 0, 0, 123, ts_now.timestamp_millis() as u64); + let chain_metadata = ChainMetadata::new( + 1, + Default::default(), + 0, + 0, + 123.into(), + ts_now.timestamp_millis() as u64, + ); base_node_event_sender .send(Arc::new(BaseNodeEvent::BaseNodeStateChanged(BaseNodeState {