diff --git a/Cargo.lock b/Cargo.lock index 08a58d5382..24bba7a5b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5752,6 +5752,7 @@ version = "0.48.0-pre.1" dependencies = [ "bincode", "blake2 0.9.2", + "borsh", "criterion 0.2.11", "croaring", "digest 0.9.0", diff --git a/base_layer/mmr/Cargo.toml b/base_layer/mmr/Cargo.toml index 5500595c33..3602e453db 100644 --- a/base_layer/mmr/Cargo.toml +++ b/base_layer/mmr/Cargo.toml @@ -17,6 +17,7 @@ tari_utilities = { git = "https://github.com/tari-project/tari_utilities.git", t tari_crypto = { git = "https://github.com/tari-project/tari-crypto.git", tag = "v0.16.8" } tari_common = {path = "../../common"} thiserror = "1.0.26" +borsh = "0.9.3" digest = "0.9.0" log = "0.4" serde = { version = "1.0.97", features = ["derive"] } diff --git a/base_layer/mmr/src/balanced_binary_merkle_proof.rs b/base_layer/mmr/src/balanced_binary_merkle_proof.rs index 4d5feaf42d..3db26431b8 100644 --- a/base_layer/mmr/src/balanced_binary_merkle_proof.rs +++ b/base_layer/mmr/src/balanced_binary_merkle_proof.rs @@ -20,19 +20,29 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::{collections::HashMap, convert::TryInto, marker::PhantomData}; +use std::{ + collections::HashMap, + convert::{TryFrom, TryInto}, + marker::PhantomData, +}; +use borsh::{BorshDeserialize, BorshSerialize}; use digest::Digest; +use serde::{Deserialize, Serialize}; use tari_common::DomainDigest; use tari_utilities::ByteArray; use thiserror::Error; use crate::{common::hash_together, BalancedBinaryMerkleTree, Hash}; -#[derive(Debug)] +pub(crate) fn cast_to_u32(value: usize) -> Result { + u32::try_from(value).map_err(|_| BalancedBinaryMerkleProofError::MathOverflow) +} + +#[derive(BorshDeserialize, BorshSerialize, Deserialize, Serialize, Clone, Debug, Default, PartialEq, Eq)] pub struct BalancedBinaryMerkleProof { pub path: Vec, - pub node_index: usize, + pub node_index: u32, _phantom: PhantomData, } @@ -55,7 +65,10 @@ where D: Digest + DomainDigest &computed_root == root } - pub fn generate_proof(tree: &BalancedBinaryMerkleTree, leaf_index: usize) -> Self { + pub fn generate_proof( + tree: &BalancedBinaryMerkleTree, + leaf_index: usize, + ) -> Result { let mut node_index = tree.get_node_index(leaf_index); let mut proof = Vec::new(); while node_index > 0 { @@ -67,20 +80,22 @@ where D: Digest + DomainDigest // Traverse to parent node_index = parent; } - Self { + Ok(Self { path: proof, - node_index: tree.get_node_index(leaf_index), + node_index: cast_to_u32(tree.get_node_index(leaf_index))?, _phantom: PhantomData, - } + }) } } #[derive(Debug, Error)] -pub enum MergedBalancedBinaryMerkleProofError { +pub enum BalancedBinaryMerkleProofError { #[error("Can't merge zero proofs.")] CantMergeZeroProofs, #[error("Bad proof semantics")] BadProofSemantics, + #[error("Math overflow")] + MathOverflow, } /// Flag to indicate if proof data represents an index or a node hash @@ -94,8 +109,8 @@ pub enum MergedBalancedBinaryMerkleDataType { #[derive(Debug)] pub struct MergedBalancedBinaryMerkleProof { pub paths: Vec)>>, // these tuples can contain indexes or hashes! - pub node_indices: Vec, - pub heights: Vec, + pub node_indices: Vec, + pub heights: Vec, _phantom: PhantomData, } @@ -104,12 +119,15 @@ where D: Digest + DomainDigest { pub fn create_from_proofs( proofs: Vec>, - ) -> Result { - let heights = proofs.iter().map(|proof| proof.path.len()).collect::>(); + ) -> Result { + let heights = proofs + .iter() + .map(|proof| cast_to_u32(proof.path.len())) + .collect::, _>>()?; let max_height = heights .iter() .max() - .ok_or(MergedBalancedBinaryMerkleProofError::CantMergeZeroProofs)?; + .ok_or(BalancedBinaryMerkleProofError::CantMergeZeroProofs)?; let mut indices = proofs.iter().map(|proof| proof.node_index).collect::>(); let mut paths = vec![Vec::new(); proofs.len()]; let mut join_indices = vec![None; proofs.len()]; @@ -117,7 +135,7 @@ where D: Digest + DomainDigest let mut hash_map = HashMap::new(); for (index, proof) in proofs.iter().enumerate() { // If this path was already joined ignore it. - if join_indices[index].is_none() && proof.path.len() > height { + if join_indices[index].is_none() && proof.path.len() > height as usize { let parent = (indices[index] - 1) >> 1; if let Some(other_proof) = hash_map.insert(parent, index) { join_indices[index] = Some(other_proof); @@ -129,7 +147,7 @@ where D: Digest + DomainDigest 0, ( MergedBalancedBinaryMerkleDataType::Hash, - proof.path[proof.path.len() - 1 - height].clone(), + proof.path[proof.path.len() - 1 - height as usize].clone(), ), ); } @@ -149,11 +167,11 @@ where D: Digest + DomainDigest mut self, root: &Hash, leaves_hashes: Vec, - ) -> Result { + ) -> Result { // Check that the proof and verifier data match let n = self.node_indices.len(); // number of merged proofs if self.paths.len() != n || leaves_hashes.len() != n { - return Err(MergedBalancedBinaryMerkleProofError::BadProofSemantics); + return Err(BalancedBinaryMerkleProofError::BadProofSemantics); } let mut computed_hashes = leaves_hashes; @@ -161,7 +179,7 @@ where D: Digest + DomainDigest .heights .iter() .max() - .ok_or(MergedBalancedBinaryMerkleProofError::CantMergeZeroProofs)?; + .ok_or(BalancedBinaryMerkleProofError::CantMergeZeroProofs)?; // We need to compute the hashes row by row to be sure they are processed correctly. for height in (0..*max_height).rev() { @@ -177,14 +195,14 @@ where D: Digest + DomainDigest .1 .as_bytes() .try_into() - .map_err(|_| MergedBalancedBinaryMerkleProofError::BadProofSemantics)?, + .map_err(|_| BalancedBinaryMerkleProofError::BadProofSemantics)?, ); // The index must also point to one of the proofs if index < hashes.len() { &hashes[index] } else { - return Err(MergedBalancedBinaryMerkleProofError::BadProofSemantics); + return Err(BalancedBinaryMerkleProofError::BadProofSemantics); } }, MergedBalancedBinaryMerkleDataType::Hash => &hash_or_index.1, @@ -223,11 +241,11 @@ mod test { let hash_last = leaves[n - 1].clone(); let bmt = BalancedBinaryMerkleTree::>::create(leaves); let root = bmt.get_merkle_root(); - let proof = BalancedBinaryMerkleProof::generate_proof(&bmt, 0); + let proof = BalancedBinaryMerkleProof::generate_proof(&bmt, 0).unwrap(); assert!(proof.verify(&root, hash_0)); - let proof = BalancedBinaryMerkleProof::generate_proof(&bmt, n / 2); + let proof = BalancedBinaryMerkleProof::generate_proof(&bmt, n / 2).unwrap(); assert!(proof.verify(&root, hash_n_half)); - let proof = BalancedBinaryMerkleProof::generate_proof(&bmt, n - 1); + let proof = BalancedBinaryMerkleProof::generate_proof(&bmt, n - 1).unwrap(); assert!(proof.verify(&root, hash_last)); } } @@ -241,7 +259,8 @@ mod test { let proofs = indices .iter() .map(|i| BalancedBinaryMerkleProof::generate_proof(&bmt, *i)) - .collect::>(); + .collect::, _>>() + .unwrap(); let merged_proof = MergedBalancedBinaryMerkleProof::create_from_proofs(proofs).unwrap(); assert!(merged_proof .verify_consume(&root, indices.iter().map(|i| leaves[*i].clone()).collect::>()) @@ -255,7 +274,8 @@ mod test { let root = bmt.get_merkle_root(); let proofs = (0..255) .map(|i| BalancedBinaryMerkleProof::generate_proof(&bmt, i)) - .collect::>(); + .collect::, _>>() + .unwrap(); let merged_proof = MergedBalancedBinaryMerkleProof::create_from_proofs(proofs).unwrap(); assert!(merged_proof.verify_consume(&root, leaves).unwrap()); } diff --git a/base_layer/mmr/src/balanced_binary_merkle_tree.rs b/base_layer/mmr/src/balanced_binary_merkle_tree.rs index 08770ea653..488ad22397 100644 --- a/base_layer/mmr/src/balanced_binary_merkle_tree.rs +++ b/base_layer/mmr/src/balanced_binary_merkle_tree.rs @@ -20,7 +20,7 @@ // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -use std::marker::PhantomData; +use std::{convert::TryFrom, marker::PhantomData}; use digest::Digest; use tari_common::DomainDigest; @@ -28,10 +28,16 @@ use thiserror::Error; use crate::{common::hash_together, ArrayLike, Hash}; +pub(crate) fn cast_to_u32(value: usize) -> Result { + u32::try_from(value).map_err(|_| BalancedBinaryMerkleTreeError::MathOverFlow) +} + #[derive(Clone, Debug, PartialEq, Eq, Error)] pub enum BalancedBinaryMerkleTreeError { #[error("There is no leaf with the hash provided.")] LeafNotFound, + #[error("Math overflow")] + MathOverFlow, } // The hashes are perfectly balanced binary tree, so parent at index `i` (0-based) has children at positions `2*i+1` and @@ -92,7 +98,7 @@ where D: Digest + DomainDigest leaf_index + (self.hashes.len() >> 1) } - pub fn find_leaf_index_for_hash(&self, hash: &Hash) -> Result { + pub fn find_leaf_index_for_hash(&self, hash: &Hash) -> Result { let pos = self .hashes .position(hash) @@ -102,7 +108,7 @@ where D: Digest + DomainDigest // The hash provided was not for leaf, but for node. Err(BalancedBinaryMerkleTreeError::LeafNotFound) } else { - Ok(pos - (self.hashes.len() >> 1)) + Ok(cast_to_u32(pos - (self.hashes.len() >> 1))?) } } } diff --git a/base_layer/mmr/src/lib.rs b/base_layer/mmr/src/lib.rs index 107fc83e5f..6588712897 100644 --- a/base_layer/mmr/src/lib.rs +++ b/base_layer/mmr/src/lib.rs @@ -154,8 +154,8 @@ pub mod pruned_hashset; pub use backend::{ArrayLike, ArrayLikeExt}; pub use balanced_binary_merkle_proof::{ BalancedBinaryMerkleProof, + BalancedBinaryMerkleProofError, MergedBalancedBinaryMerkleProof, - MergedBalancedBinaryMerkleProofError, }; pub use balanced_binary_merkle_tree::{BalancedBinaryMerkleTree, BalancedBinaryMerkleTreeError}; /// MemBackendVec is a shareable, memory only, vector that can be be used with MmrCache to store checkpoints.