From 404b73b4061cfd2a168e2621ea56af443105ace1 Mon Sep 17 00:00:00 2001 From: Valentin <77051586+vkgnosis@users.noreply.github.com> Date: Fri, 26 Feb 2021 18:09:36 +0100 Subject: [PATCH] Support abi v2 We use a fork of ethabi that has patches applied so that it in turn supports abi v2. Once the mainline ethabi gets updated we can switch back to it. We move tokenization related traits from rust-web3 into ethcontract because rust the web3 version does not support tokenizing tuples and because this gives us more control over the traits. This commit is technically a breaking change because we touch some parts of the public interface like the error enum but for most users nothing should change. Abi v2 functions are tested in the `abi` example. Some events there had to be changed to give names to the parameters because of yet unfixed bug in ethabi where it addresses the parameters based on a map of their name which leads it to use a copy of one of tokens for all of the parameters. This bug is already present in the current version of ethabi but now throws errors in the example because of stricter verification of the size of integer parameters. --- ethcontract-common/Cargo.toml | 2 +- ethcontract-common/src/truffle.rs | 1 + ethcontract-derive/src/lib.rs | 2 + ethcontract-generate/src/contract/events.rs | 30 +- ethcontract-generate/src/contract/methods.rs | 11 +- ethcontract-generate/src/contract/types.rs | 5 +- ethcontract/Cargo.toml | 1 + ethcontract/src/contract.rs | 25 +- ethcontract/src/contract/deploy.rs | 4 +- ethcontract/src/contract/event.rs | 14 +- ethcontract/src/contract/event/data.rs | 6 +- ethcontract/src/contract/method.rs | 88 +-- ethcontract/src/errors.rs | 13 +- ethcontract/src/errors/revert.rs | 2 + ethcontract/src/int.rs | 31 +- ethcontract/src/lib.rs | 1 + ethcontract/src/log.rs | 16 +- ethcontract/src/tokens.rs | 532 +++++++++++++++++++ examples/examples/abi.rs | 20 + examples/examples/batch.rs | 10 +- examples/examples/sources.rs | 3 + examples/truffle/contracts/AbiTypes.sol | 27 +- 22 files changed, 692 insertions(+), 152 deletions(-) create mode 100644 ethcontract/src/tokens.rs diff --git a/ethcontract-common/Cargo.toml b/ethcontract-common/Cargo.toml index 412106f7..c861bcfe 100644 --- a/ethcontract-common/Cargo.toml +++ b/ethcontract-common/Cargo.toml @@ -12,7 +12,7 @@ Common types for ethcontract-rs runtime and proc macro. """ [dependencies] -ethabi = "13.0" +ethabi = { git = "https://github.com/vkgnosis/ethabi.git", rev ="f23954bb5b86687f0d780935b2eacc503393dc44" } hex = "0.4" serde = "1.0" serde_derive = "1.0" diff --git a/ethcontract-common/src/truffle.rs b/ethcontract-common/src/truffle.rs index cef1eb48..923d4344 100644 --- a/ethcontract-common/src/truffle.rs +++ b/ethcontract-common/src/truffle.rs @@ -39,6 +39,7 @@ impl Artifact { functions: HashMap::new(), events: HashMap::new(), fallback: false, + receive: false, }, bytecode: Default::default(), networks: HashMap::new(), diff --git a/ethcontract-derive/src/lib.rs b/ethcontract-derive/src/lib.rs index 6d0e404e..cbf85405 100644 --- a/ethcontract-derive/src/lib.rs +++ b/ethcontract-derive/src/lib.rs @@ -342,6 +342,7 @@ impl Parse for Method { }) .collect::>>()?; + #[allow(deprecated)] Function { name, inputs, @@ -350,6 +351,7 @@ impl Parse for Method { // affect its signature. outputs: vec![], constant: false, + state_mutability: Default::default(), } }; let signature = function.abi_signature(); diff --git a/ethcontract-generate/src/contract/events.rs b/ethcontract-generate/src/contract/events.rs index 8aac41e5..d0b86977 100644 --- a/ethcontract-generate/src/contract/events.rs +++ b/ethcontract-generate/src/contract/events.rs @@ -60,6 +60,7 @@ fn expand_data_type(event: &Event, event_derives: &[Path]) -> Result Result - ::from_token(tokens.next().unwrap())?; - } - }) - .collect::>(); - let derives = expand_derives(event_derives); Ok(quote! { @@ -102,23 +92,11 @@ fn expand_data_type(event: &Event, event_derives: &[Path]) -> Result, - ) -> Result { - if tokens.len() != #params_len { - return Err(self::ethcontract::web3::contract::Error::InvalidOutputType(format!( - "Expected {} tokens, got {}: {:?}", - #params_len, - tokens.len(), - tokens - ))); - } - - #[allow(unused_mut)] - let mut tokens = tokens.into_iter(); - #( #read_param_token )* - + ) -> Result { + let (#(#param_names,)*) = self::ethcontract::tokens::MultiDetokenize::from_tokens(tokens)?; Ok(#data_type_construction) } } diff --git a/ethcontract-generate/src/contract/methods.rs b/ethcontract-generate/src/contract/methods.rs index d1a037c6..bafffa89 100644 --- a/ethcontract-generate/src/contract/methods.rs +++ b/ethcontract-generate/src/contract/methods.rs @@ -1,7 +1,7 @@ use crate::contract::{types, Context}; use crate::util; use anyhow::{anyhow, Context as _, Result}; -use ethcontract_common::abi::{Function, Param}; +use ethcontract_common::abi::{Function, Param, StateMutability}; use ethcontract_common::abiext::FunctionExt; use ethcontract_common::hash::H32; use inflector::Inflector; @@ -104,10 +104,11 @@ fn expand_function(cx: &Context, function: &Function, alias: Option) -> R let input = expand_inputs(&function.inputs)?; let outputs = expand_fn_outputs(&function.outputs)?; - let (method, result_type_name) = if function.constant { - (quote! { view_method }, quote! { DynViewMethodBuilder }) - } else { - (quote! { method }, quote! { DynMethodBuilder }) + let (method, result_type_name) = match function.state_mutability { + StateMutability::Pure | StateMutability::View => { + (quote! { view_method }, quote! { DynViewMethodBuilder }) + } + _ => (quote! { method }, quote! { DynMethodBuilder }), }; let result = quote! { self::ethcontract::dyns::#result_type_name<#outputs> }; let arg = expand_inputs_call_arg(&function.inputs); diff --git a/ethcontract-generate/src/contract/types.rs b/ethcontract-generate/src/contract/types.rs index 176e3970..7dcf0b38 100644 --- a/ethcontract-generate/src/contract/types.rs +++ b/ethcontract-generate/src/contract/types.rs @@ -43,6 +43,9 @@ pub(crate) fn expand(kind: &ParamType) -> Result { let size = Literal::usize_unsuffixed(*n); Ok(quote! { [#inner; #size] }) } - ParamType::Tuple(_) => Err(anyhow!("ABIEncoderV2 is currently not supported")), + ParamType::Tuple(t) => { + let inner = t.iter().map(expand).collect::>>()?; + Ok(quote! { (#(#inner),*) }) + } } } diff --git a/ethcontract/Cargo.toml b/ethcontract/Cargo.toml index c2871d76..f76ae2ed 100644 --- a/ethcontract/Cargo.toml +++ b/ethcontract/Cargo.toml @@ -25,6 +25,7 @@ ws-tokio = ["web3/ws-tokio"] ws-tls-tokio = ["web3/ws-tls-tokio"] [dependencies] +arrayvec = "0.5" ethcontract-common = { version = "0.11.2", path = "../ethcontract-common" } ethcontract-derive = { version = "0.11.2", path = "../ethcontract-derive", optional = true} futures = "0.3" diff --git a/ethcontract/src/contract.rs b/ethcontract/src/contract.rs index 77d480b5..606d5fc8 100644 --- a/ethcontract/src/contract.rs +++ b/ethcontract/src/contract.rs @@ -6,7 +6,11 @@ mod deploy; mod event; mod method; -use crate::errors::{DeployError, LinkError}; +use crate::tokens::MultiTokenize; +use crate::{ + errors::{DeployError, LinkError}, + tokens::MultiDetokenize, +}; use ethcontract_common::abi::{Error as AbiError, Result as AbiResult}; use ethcontract_common::abiext::FunctionExt; use ethcontract_common::hash::H32; @@ -14,7 +18,6 @@ use ethcontract_common::{Abi, Artifact, Bytecode}; use std::collections::HashMap; use std::hash::Hash; use web3::api::Web3; -use web3::contract::tokens::{Detokenize, Tokenize}; use web3::types::{Address, Bytes, H256}; use web3::Transport; @@ -23,7 +26,7 @@ pub use self::event::{ AllEventsBuilder, Event, EventBuilder, EventMetadata, EventStatus, ParseLog, RawLog, StreamEvent, Topic, }; -pub use self::method::{Detokenizable, MethodBuilder, MethodDefaults, ViewMethodBuilder, Void}; +pub use self::method::{MethodBuilder, MethodDefaults, Return, ViewMethodBuilder, Void}; /// Represents a contract instance at an address. Provides methods for /// contract interaction. @@ -112,7 +115,7 @@ impl Instance { params: P, ) -> Result, DeployError> where - P: Tokenize, + P: MultiTokenize, { Linker::new(artifact).deploy(web3, params) } @@ -126,7 +129,7 @@ impl Instance { libraries: I, ) -> Result, DeployError> where - P: Tokenize, + P: MultiTokenize, I: Iterator, { let mut linker = Linker::new(artifact); @@ -163,8 +166,8 @@ impl Instance { /// actually commit anything to the block chain. pub fn method(&self, signature: H32, params: P) -> AbiResult> where - P: Tokenize, - R: Detokenizable, + P: MultiTokenize, + R: Return, { let signature = signature.as_ref(); let function = self @@ -191,8 +194,8 @@ impl Instance { /// state. pub fn view_method(&self, signature: H32, params: P) -> AbiResult> where - P: Tokenize, - R: Detokenizable, + P: MultiTokenize, + R: Return, { Ok(self.method(signature, params)?.view()) } @@ -221,7 +224,7 @@ impl Instance { /// that emits events for the specified Solidity event by name. pub fn event(&self, signature: H256) -> AbiResult> where - E: Detokenize, + E: MultiDetokenize, { let event = self .events @@ -286,7 +289,7 @@ impl Linker { ) -> Result>, DeployError> where T: Transport, - P: Tokenize, + P: MultiTokenize, { DeployBuilder::new(web3, self, params) } diff --git a/ethcontract/src/contract/deploy.rs b/ethcontract/src/contract/deploy.rs index e1c2dadd..64ddf6c7 100644 --- a/ethcontract/src/contract/deploy.rs +++ b/ethcontract/src/contract/deploy.rs @@ -2,12 +2,12 @@ //! new contracts. use crate::errors::{DeployError, ExecutionError}; +use crate::tokens::MultiTokenize; use crate::transaction::{Account, GasPrice, TransactionBuilder, TransactionResult}; use ethcontract_common::abi::Error as AbiError; use ethcontract_common::{Abi, Bytecode}; use std::marker::PhantomData; use web3::api::Web3; -use web3::contract::tokens::Tokenize; use web3::types::{Address, Bytes, H256, U256}; use web3::Transport; @@ -62,7 +62,7 @@ where /// deployment (constructor) parameters. pub fn new

(web3: Web3, context: I::Context, params: P) -> Result where - P: Tokenize, + P: MultiTokenize, { // NOTE(nlordell): unfortunately here we have to re-implement some // `rust-web3` code so that we can add things like signing support; diff --git a/ethcontract/src/contract/event.rs b/ethcontract/src/contract/event.rs index d206327b..10e39db8 100644 --- a/ethcontract/src/contract/event.rs +++ b/ethcontract/src/contract/event.rs @@ -6,6 +6,7 @@ mod data; pub use self::data::{Event, EventMetadata, EventStatus, ParseLog, RawLog, StreamEvent}; use crate::errors::{EventError, ExecutionError}; use crate::log::LogFilterBuilder; +use crate::tokens::{MultiDetokenize, SingleTokenize}; pub use ethcontract_common::abi::Topic; use ethcontract_common::abi::{Event as AbiEvent, RawTopicFilter, Token}; use futures::future::{self, TryFutureExt as _}; @@ -14,14 +15,13 @@ use std::cmp; use std::marker::PhantomData; use std::time::Duration; use web3::api::Web3; -use web3::contract::tokens::{Detokenize, Tokenizable}; use web3::types::{Address, BlockNumber, H256}; use web3::Transport; /// A builder for creating a filtered stream of contract events that are #[derive(Debug)] #[must_use = "event builders do nothing unless you stream them"] -pub struct EventBuilder { +pub struct EventBuilder { /// The underlying web3 instance. web3: Web3, /// The event ABI data for encoding topic filters and decoding logs. @@ -33,7 +33,7 @@ pub struct EventBuilder { _event: PhantomData, } -impl EventBuilder { +impl EventBuilder { /// Creates a new event builder from a web3 provider and a contract event /// and address. pub fn new(web3: Web3, event: AbiEvent, address: Address) -> Self { @@ -71,7 +71,7 @@ impl EventBuilder { /// actually `topic[1]`. pub fn topic0

(mut self, topic: Topic

) -> Self where - P: Tokenizable, + P: SingleTokenize, { self.topics.topic0 = tokenize_topic(topic); self @@ -80,7 +80,7 @@ impl EventBuilder { /// Adds a filter for the second indexed topic. pub fn topic1

(mut self, topic: Topic

) -> Self where - P: Tokenizable, + P: SingleTokenize, { self.topics.topic1 = tokenize_topic(topic); self @@ -89,7 +89,7 @@ impl EventBuilder { /// Adds a filter for the third indexed topic. pub fn topic2

(mut self, topic: Topic

) -> Self where - P: Tokenizable, + P: SingleTokenize, { self.topics.topic2 = tokenize_topic(topic); self @@ -158,7 +158,7 @@ impl EventBuilder { /// Converts a tokenizable topic into a raw topic for filtering. fn tokenize_topic

(topic: Topic

) -> Topic where - P: Tokenizable, + P: SingleTokenize, { topic.map(|parameter| parameter.into_token()) } diff --git a/ethcontract/src/contract/event/data.rs b/ethcontract/src/contract/event/data.rs index 7936f5f9..1089a9f1 100644 --- a/ethcontract/src/contract/event/data.rs +++ b/ethcontract/src/contract/event/data.rs @@ -1,8 +1,6 @@ //! Module contains code for parsing and manipulating event data. - -use crate::errors::ExecutionError; +use crate::{errors::ExecutionError, tokens::MultiDetokenize}; use ethcontract_common::abi::{Event as AbiEvent, RawLog as AbiRawLog}; -use web3::contract::tokens::Detokenize; use web3::types::{Log, H256}; /// A contract event @@ -172,7 +170,7 @@ impl RawLog { /// Decode raw log data into a tokenizable for a matching event ABI entry. pub fn decode(self, event: &AbiEvent) -> Result where - D: Detokenize, + D: MultiDetokenize, { let event_log = event.parse_log(AbiRawLog { topics: self.topics, diff --git a/ethcontract/src/contract/method.rs b/ethcontract/src/contract/method.rs index c4ad73cc..0bee70a6 100644 --- a/ethcontract/src/contract/method.rs +++ b/ethcontract/src/contract/method.rs @@ -3,63 +3,60 @@ //! [Instance::method](ethcontract::contract::Instance::method). use crate::transaction::{Account, GasPrice, TransactionBuilder, TransactionResult}; +use crate::{batch::CallBatch, errors::MethodError}; use crate::{ - batch::CallBatch, - errors::{revert, ExecutionError, MethodError}, + errors::{revert, ExecutionError}, + tokens::{Error as TokenError, SingleTokenize}, }; use ethcontract_common::abi::{Function, Token}; use std::marker::PhantomData; -use web3::contract::tokens::Detokenize; -use web3::contract::Error as Web3ContractError; use web3::types::{Address, BlockId, Bytes, CallRequest, U256}; use web3::Transport; use web3::{api::Web3, BatchTransport}; -/// A void type to represent methods with empty return types. -/// -/// This is used to work around the fact that `(): !Detokenize`. -pub struct Void(()); - -/// Represents a type can detokenize a result. -pub trait Detokenizable { - /// The output that this type detokenizes into. - type Output; +/// A solidity return type. We need a separate trait for this so that we can represent functions +/// that return nothing. +pub trait Return { + /// The output type. + type Output: SingleTokenize; + /// Is this the void return type? + fn is_void() -> bool; + /// Convert tokens into self. + fn from_tokens(tokens: Vec) -> Result; +} - /// Create an instance of `Output` by decoding tokens. - fn from_tokens(tokens: Vec) -> Result; +/// The return type of a solidity function that returns nothing. +pub struct Void; +impl Return for Void { + type Output = (); - /// Returns true if this is an empty type. fn is_void() -> bool { - false + true } -} -impl Detokenizable for Void { - type Output = (); - - fn from_tokens(tokens: Vec) -> Result { + fn from_tokens(tokens: Vec) -> Result { if !tokens.is_empty() { - return Err(Web3ContractError::InvalidOutputType(format!( - "Expected no elements, got tokens: {:?}", - tokens - )) - .into()); + return Err(TokenError::MultiDetokenizeLengthMismatch); } - Ok(()) } +} + +impl Return for T +where + T: SingleTokenize, +{ + type Output = T; fn is_void() -> bool { - true + false } -} -impl Detokenizable for T { - type Output = Self; - - fn from_tokens(tokens: Vec) -> Result { - let result = ::from_tokens(tokens)?; - Ok(result) + fn from_tokens(tokens: Vec) -> Result { + if tokens.len() != 1 { + return Err(TokenError::MultiDetokenizeLengthMismatch); + } + Self::Output::from_token(tokens.into_iter().next().unwrap()) } } @@ -79,7 +76,7 @@ pub struct MethodDefaults { /// transactions. This is useful when dealing with view functions. #[derive(Debug, Clone)] #[must_use = "methods do nothing unless you `.call()` or `.send()` them"] -pub struct MethodBuilder { +pub struct MethodBuilder { web3: Web3, function: Function, /// transaction parameters @@ -92,17 +89,20 @@ impl MethodBuilder { pub fn fallback(web3: Web3, address: Address, data: Bytes) -> Self { // NOTE: We create a fake `Function` entry for the fallback method. This // is OK since it is only ever used for error formatting purposes. + + #[allow(deprecated)] let function = Function { name: "fallback".into(), inputs: vec![], outputs: vec![], constant: false, + state_mutability: Default::default(), }; MethodBuilder::new(web3, function, address, data) } } -impl MethodBuilder { +impl MethodBuilder { /// Creates a new builder for a transaction. pub fn new(web3: Web3, function: Function, address: Address, data: Bytes) -> Self { MethodBuilder { @@ -197,14 +197,14 @@ impl MethodBuilder { /// directly send transactions and is for read only method calls. #[derive(Debug, Clone)] #[must_use = "view methods do nothing unless you `.call()` them"] -pub struct ViewMethodBuilder { +pub struct ViewMethodBuilder { /// method parameters pub m: MethodBuilder, /// optional block number pub block: Option, } -impl ViewMethodBuilder { +impl ViewMethodBuilder { /// Create a new `ViewMethodBuilder` by demoting a `MethodBuilder`. pub fn from_method(method: MethodBuilder) -> Self { ViewMethodBuilder { @@ -254,7 +254,7 @@ impl ViewMethodBuilder { } } -impl ViewMethodBuilder { +impl ViewMethodBuilder { /// Call a contract method. Contract calls do not modify the blockchain and /// as such do not require gas or signing. pub async fn call(self) -> Result { @@ -294,7 +294,7 @@ impl ViewMethodBuilder { async fn convert_response< F: std::future::Future>, - R: Detokenizable, + R: Return, >( future: F, function: Function, @@ -316,7 +316,7 @@ async fn convert_response< /// encode this information in a JSON RPC error. On a revert or invalid opcode, /// the result is `0x` (empty data), while on a revert with message, it is an /// ABI encoded `Error(string)` function call data. -fn decode_geth_call_result( +fn decode_geth_call_result( function: &Function, bytes: Vec, ) -> Result { @@ -345,6 +345,7 @@ mod tests { use ethcontract_common::abi::{Param, ParamType}; fn test_abi_function() -> (Function, Bytes) { + #[allow(deprecated)] let function = Function { name: "test".to_owned(), inputs: Vec::new(), @@ -353,6 +354,7 @@ mod tests { kind: ParamType::Uint(256), }], constant: false, + state_mutability: Default::default(), }; let data = function .encode_input(&[]) diff --git a/ethcontract/src/errors.rs b/ethcontract/src/errors.rs index c044fb35..e85c9496 100644 --- a/ethcontract/src/errors.rs +++ b/ethcontract/src/errors.rs @@ -12,7 +12,6 @@ use secp256k1::Error as Secp256k1Error; use std::num::ParseIntError; use thiserror::Error; use uint::FromDecStrErr; -use web3::contract::Error as Web3ContractError; use web3::error::Error as Web3Error; use web3::types::{Log, TransactionReceipt, H256}; @@ -61,7 +60,7 @@ pub enum ExecutionError { /// An error occured while ABI decoding the result of a contract method /// call. #[error("abi decode error: {0}")] - AbiDecode(#[from] Web3ContractError), + AbiDecode(#[from] AbiError), /// An error occured while parsing chain ID received from a Web3 call. #[error("parse chain ID error: {0}")] @@ -104,6 +103,10 @@ pub enum ExecutionError { /// A stream ended unexpectedly. #[error("log stream ended unexpectedly")] StreamEndedUnexpectedly, + + /// A tokenization related error. + #[error("tokenization error: {0}")] + Tokenization(#[from] crate::tokens::Error), } impl From for ExecutionError { @@ -121,12 +124,6 @@ impl From for ExecutionError { } } -impl From for ExecutionError { - fn from(err: AbiError) -> Self { - ExecutionError::AbiDecode(err.into()) - } -} - /// Error that can occur while executing a contract call or transaction. #[derive(Debug, Error)] #[error("method '{signature}' failure: {inner}")] diff --git a/ethcontract/src/errors/revert.rs b/ethcontract/src/errors/revert.rs index 4f2c2fef..0cb02362 100644 --- a/ethcontract/src/errors/revert.rs +++ b/ethcontract/src/errors/revert.rs @@ -40,6 +40,7 @@ mod tests { use ethcontract_common::abi::{Function, Param, Token}; pub fn encode_reason(reason: &str) -> Vec { + #[allow(deprecated)] let revert = Function { name: "Error".into(), inputs: vec![Param { @@ -48,6 +49,7 @@ mod tests { }], outputs: Vec::new(), constant: true, + state_mutability: Default::default(), }; revert .encode_input(&[Token::String(reason.into())]) diff --git a/ethcontract/src/int.rs b/ethcontract/src/int.rs index 1ff01ad4..e3824b49 100644 --- a/ethcontract/src/int.rs +++ b/ethcontract/src/int.rs @@ -1,6 +1,5 @@ //! This module contains an 256-bit signed integer implementation. -use crate::common::abi::Token; use crate::errors::{ParseI256Error, TryFromBigIntError}; use serde::{Deserialize, Serialize}; use std::cmp; @@ -10,7 +9,6 @@ use std::iter; use std::ops; use std::str; use std::{i128, i64, u64}; -use web3::contract::{self, tokens}; use web3::types::U256; /// Compute the two's complement of a U256. @@ -1213,26 +1211,11 @@ impl iter::Product for I256 { } } -impl tokens::Tokenizable for I256 { - fn from_token(token: Token) -> Result { - // NOTE: U256 accepts both `Int` and `Uint` kind tokens. In fact, all - // integer types are expected to accept both. - Ok(I256(U256::from_token(token)?)) - } - - fn into_token(self) -> Token { - Token::Int(self.0) - } -} - -impl tokens::TokenizableItem for I256 {} - #[cfg(test)] mod tests { use super::*; use lazy_static::lazy_static; use serde_json::json; - use web3::contract::tokens::Tokenizable; lazy_static! { static ref MIN_ABS: U256 = U256::from(1) << 255; @@ -1696,20 +1679,8 @@ mod tests { } #[test] - fn tokenization() { + fn json() { assert_eq!(json!(I256::from(42)), json!("0x2a")); assert_eq!(json!(I256::minus_one()), json!(U256::MAX)); - - assert_eq!(I256::from(42).into_token(), 42i32.into_token()); - assert_eq!(I256::minus_one().into_token(), Token::Int(U256::MAX),); - - assert_eq!( - I256::from_token(42i32.into_token()).unwrap(), - I256::from(42), - ); - assert_eq!( - I256::from_token(U256::MAX.into_token()).unwrap(), - I256::minus_one(), - ); } } diff --git a/ethcontract/src/lib.rs b/ethcontract/src/lib.rs index 35e038a4..39b5a139 100644 --- a/ethcontract/src/lib.rs +++ b/ethcontract/src/lib.rs @@ -99,6 +99,7 @@ pub mod errors; mod int; pub mod log; pub mod secret; +pub mod tokens; pub mod transaction; pub mod transport; diff --git a/ethcontract/src/log.rs b/ethcontract/src/log.rs index 88809b32..2f7491d8 100644 --- a/ethcontract/src/log.rs +++ b/ethcontract/src/log.rs @@ -158,7 +158,12 @@ impl LogFilterBuilder { filter = filter.address(self.address); } if self.topics != TopicFilter::default() { - filter = filter.topic_filter(self.topics) + filter = filter.topics( + topic_to_option(self.topics.topic0), + topic_to_option(self.topics.topic1), + topic_to_option(self.topics.topic2), + topic_to_option(self.topics.topic3), + ); } if let Some(limit) = self.limit { filter = filter.limit(limit) @@ -211,6 +216,15 @@ impl LogFilterBuilder { } } +/// Converts a `Topic` to an equivalent `Option>`, suitable for `FilterBuilder::topics` +fn topic_to_option(topic: Topic) -> Option> { + match topic { + Topic::Any => None, + Topic::OneOf(v) => Some(v), + Topic::This(t) => Some(vec![t]), + } +} + /// Internal unfold context for creating a `past_logs` `Stream`. enum PastLogsStream { Init(LogFilterBuilder), diff --git a/ethcontract/src/tokens.rs b/ethcontract/src/tokens.rs new file mode 100644 index 00000000..8b906493 --- /dev/null +++ b/ethcontract/src/tokens.rs @@ -0,0 +1,532 @@ +//! Tokenization related functionality allowing rust types to be mapped to solidity types. + +// This file is based on https://github.com/tomusdrw/rust-web3/blob/e6d044a28458be9a3ee31108475d787e0440ce8b/src/contract/tokens.rs . +// Generated contract bindings should operate on native rust types for ease of use. To encode them +// with ethabi we need to map them to ethabi tokens. SingleTokenize does this for base types like +// u32 and compounds of other SingleTokenize in the form of vectors, arrays and tuples. +// +// This is complicated by `Vec` representing `Token::Bytes` (and `[u8; n]` `Token::FixedBytes`) +// preventing us from having a generic `impl for Vec` as this would lead to +// conflicting implementations. As a workaround we use an intermediate trait `SingleTokenizeArray` +// that is implemented for all types that implement `SingleTokenize` except `Vec` and +// `[u8; n]` and then only implement `SingleTokenize` for vectors and arrays of +// `SingleTokenizeArray`. +// +// The drawback is that if a solidity function actually used an array of u8 instead of bytes then we +// would not be able to interact with it. An alternative solution could be to use a Bytes new type +// but this makes calling those functions slightly more annoying. +// +// In some cases like when passing arguments to `MethodBuilder` or decoding events we need to be +// able to pack multiple types into a single generic parameter. This is accomplished by +// `MultiTokenize` and `MultiDetokenize` which work on vectors of tokens instead of a single token. +// This is split into two traits instead of one like for `SingleTokenize` to avoid more work when +// only one half of the functionality is needed. For example events are only ever decoded so there +// is no reason to implement encoding. +// +// A completely different approach could be to avoid using the trait system and instead encode all +// rust types into tokens directly in the ethcontract generated bindings. + +use crate::I256; +use arrayvec::ArrayVec; +use ethcontract_common::{abi::Token, TransactionHash}; +use std::convert::TryInto; +use web3::types::{Address, U256}; + +/// A tokenization related error. +#[derive(Debug, thiserror::Error)] +pub enum Error { + /// SingleTokenize::from_token token type doesn't match the rust type. + #[error("expected a different token type")] + TypeMismatch, + /// SingleTokenize::from_token is called with integer that doesn't fit in the rust type. + #[error("abi integer is does not fit rust integer")] + IntegerMismatch, + /// SingleTokenize::from_token token is fixed bytes with wrong length. + #[error("expected a different number of fixed bytes")] + FixedBytesLengthsMismatch, + /// SingleTokenize::from_token token is fixed array with wrong length. + #[error("expected a different number of tokens in fixed array")] + FixedArrayLengthsMismatch, + /// SingleTokenize::from_token token is tuple with wrong length. + #[error("expected a different number of tokens in tuple")] + TupleLengthMismatch, + /// MultiDetokenize::from_tokens has wrong number of tokens. + #[error("expected a different number of tokens when detokenizing multiple tokens")] + MultiDetokenizeLengthMismatch, +} + +/// Rust type and single token conversion. +pub trait SingleTokenize { + /// Convert self into token. + fn from_token(token: Token) -> Result + where + Self: Sized; + + /// Convert token into Self. + fn into_token(self) -> Token; +} + +impl SingleTokenize for Vec { + fn from_token(token: Token) -> Result + where + Self: Sized, + { + match token { + Token::Bytes(bytes) => Ok(bytes), + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::Bytes(self) + } +} + +impl SingleTokenize for String { + fn from_token(token: Token) -> Result { + match token { + Token::String(s) => Ok(s), + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::String(self) + } +} + +impl SingleTokenize for Address { + fn from_token(token: Token) -> Result { + match token { + Token::Address(data) => Ok(data), + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::Address(self) + } +} + +impl SingleTokenize for U256 { + fn from_token(token: Token) -> Result { + match token { + Token::Uint(u256) => Ok(u256), + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::Uint(self) + } +} + +impl SingleTokenize for I256 { + fn from_token(token: Token) -> Result { + match token { + Token::Int(u256) => Ok(Self::from_raw(u256)), + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::Int(self.into_raw()) + } +} + +impl SingleTokenize for TransactionHash { + fn from_token(token: Token) -> Result + where + Self: Sized, + { + <[u8; 32]>::from_token(token).map(Self) + } + + fn into_token(self) -> Token { + self.0.into_token() + } +} + +macro_rules! uint_tokenize { + ($int: ident, $token: ident) => { + impl SingleTokenize for $int { + fn from_token(token: Token) -> Result { + let u256 = match token { + Token::Uint(u256) => u256, + _ => return Err(Error::TypeMismatch), + }; + u256.try_into().map_err(|_| Error::IntegerMismatch) + } + + fn into_token(self) -> Token { + Token::Uint(self.into()) + } + } + }; +} + +macro_rules! int_tokenize { + ($int: ident, $token: ident) => { + impl SingleTokenize for $int { + fn from_token(token: Token) -> Result { + let u256 = match token { + Token::Int(u256) => u256, + _ => return Err(Error::TypeMismatch), + }; + let i256 = I256::from_raw(u256); + i256.try_into().map_err(|_| Error::IntegerMismatch) + } + + fn into_token(self) -> Token { + Token::Int(I256::from(self).into_raw()) + } + } + }; +} + +int_tokenize!(i8, Int); +int_tokenize!(i16, Int); +int_tokenize!(i32, Int); +int_tokenize!(i64, Int); +int_tokenize!(i128, Int); +uint_tokenize!(u8, Uint); +uint_tokenize!(u16, Uint); +uint_tokenize!(u32, Uint); +uint_tokenize!(u64, Uint); +uint_tokenize!(u128, Uint); + +impl SingleTokenize for bool { + fn from_token(token: Token) -> Result { + match token { + Token::Bool(data) => Ok(data), + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::Bool(self) + } +} + +/// Marker trait for `Tokenize` types that are can tokenized to and from a `Token::Array` and +/// `Token:FixedArray`. This is everything except `u8` because `Vec` and `[u8; n]` directly +/// implement `SingleTokenize`. +pub trait SingleTokenizeArray: SingleTokenize {} + +macro_rules! single_tokenize_array { + ($($type: ty,)*) => { + $( + impl SingleTokenizeArray for $type {} + )* + }; +} + +single_tokenize_array! { + String, Address, U256, I256, Vec, bool, + i8, i16, i32, i64, i128, u16, u32, u64, u128, +} + +impl SingleTokenize for Vec { + fn from_token(token: Token) -> Result { + match token { + Token::FixedArray(tokens) | Token::Array(tokens) => { + tokens.into_iter().map(SingleTokenize::from_token).collect() + } + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::Array(self.into_iter().map(SingleTokenize::into_token).collect()) + } +} + +impl SingleTokenizeArray for Vec {} + +macro_rules! impl_fixed_types { + ($num: expr) => { + impl SingleTokenize for [T; $num] + where + T: SingleTokenizeArray, + { + fn from_token(token: Token) -> Result + where + Self: Sized, + { + let tokens = match token { + Token::FixedArray(tokens) => tokens, + _ => return Err(Error::TypeMismatch), + }; + let arr_vec = tokens + .into_iter() + .map(T::from_token) + .collect::, Error>>()?; + arr_vec + .into_inner() + .map_err(|_| Error::FixedArrayLengthsMismatch) + } + + fn into_token(self) -> Token { + Token::FixedArray( + ArrayVec::from(self) + .into_iter() + .map(T::into_token) + .collect(), + ) + } + } + + impl SingleTokenizeArray for [T; $num] where T: SingleTokenizeArray {} + + impl SingleTokenize for [u8; $num] { + fn from_token(token: Token) -> Result { + match token { + Token::FixedBytes(bytes) => { + if bytes.len() != $num { + return Err(Error::TypeMismatch); + } + + let mut arr = [0; $num]; + arr.copy_from_slice(&bytes); + Ok(arr) + } + _ => Err(Error::TypeMismatch), + } + } + + fn into_token(self) -> Token { + Token::FixedBytes(self.to_vec()) + } + } + + impl SingleTokenizeArray for [u8; $num] {} + }; +} + +impl_fixed_types!(1); +impl_fixed_types!(2); +impl_fixed_types!(3); +impl_fixed_types!(4); +impl_fixed_types!(5); +impl_fixed_types!(6); +impl_fixed_types!(7); +impl_fixed_types!(8); +impl_fixed_types!(9); +impl_fixed_types!(10); +impl_fixed_types!(11); +impl_fixed_types!(12); +impl_fixed_types!(13); +impl_fixed_types!(14); +impl_fixed_types!(15); +impl_fixed_types!(16); +impl_fixed_types!(32); +impl_fixed_types!(64); +impl_fixed_types!(128); +impl_fixed_types!(256); +impl_fixed_types!(512); +impl_fixed_types!(1024); + +macro_rules! impl_single_tokenize_for_tuple { + ($count: expr, $( $ty: ident : $no: tt, )*) => { + impl<$($ty, )*> SingleTokenizeArray for ($($ty,)*) + where + $($ty: SingleTokenize,)* + {} + + impl<$($ty, )*> SingleTokenize for ($($ty,)*) + where + $($ty: SingleTokenize,)* + { + fn from_token(token: Token) -> Result + { + let tokens = match token { + Token::Tuple(tokens) => tokens, + _ => return Err(Error::TypeMismatch), + }; + if tokens.len() != $count { + return Err(Error::TupleLengthMismatch); + } + #[allow(unused_variables)] + #[allow(unused_mut)] + let mut drain = tokens.into_iter(); + Ok(($($ty::from_token(drain.next().unwrap())?,)*)) + } + + fn into_token(self) -> Token { + Token::Tuple(vec![$(self.$no.into_token(),)*]) + } + } + } +} + +impl_single_tokenize_for_tuple!(0,); +impl_single_tokenize_for_tuple!(1, A:0, ); +impl_single_tokenize_for_tuple!(2, A:0, B:1, ); +impl_single_tokenize_for_tuple!(3, A:0, B:1, C:2, ); +impl_single_tokenize_for_tuple!(4, A:0, B:1, C:2, D:3, ); +impl_single_tokenize_for_tuple!(5, A:0, B:1, C:2, D:3, E:4, ); +impl_single_tokenize_for_tuple!(6, A:0, B:1, C:2, D:3, E:4, F:5, ); +impl_single_tokenize_for_tuple!(7, A:0, B:1, C:2, D:3, E:4, F:5, G:6, ); +impl_single_tokenize_for_tuple!(8, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, ); +impl_single_tokenize_for_tuple!(9, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, ); +impl_single_tokenize_for_tuple!(10, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, ); +impl_single_tokenize_for_tuple!(11, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, ); +impl_single_tokenize_for_tuple!(12, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, ); +impl_single_tokenize_for_tuple!(13, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, ); +impl_single_tokenize_for_tuple!(14, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, ); +impl_single_tokenize_for_tuple!(15, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, ); +impl_single_tokenize_for_tuple!(16, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, ); + +/// Rust type and multiple tokens conversion. +pub trait MultiTokenize { + /// Convert self into tokens. + fn into_tokens(self) -> Vec; +} + +/// Rust type and multiple token conversion. +pub trait MultiDetokenize { + /// Convert tokens into Self. + fn from_tokens(tokens: Vec) -> Result + where + Self: Sized; +} + +macro_rules! impl_multi_for_tuple { + ($count: expr, $( $ty: ident : $no: tt, )*) => { + impl<$($ty, )*> MultiTokenize for ($($ty,)*) where + $( + $ty: SingleTokenize, + )* + { + fn into_tokens(self) -> Vec { + vec![ + $( self.$no.into_token(), )* + ] + } + } + + impl<$($ty, )*> MultiDetokenize for ($($ty,)*) where + $( + $ty: SingleTokenize, + )* + { + fn from_tokens(tokens: Vec) -> Result { + if tokens.len() != $count { + return Err(Error::MultiDetokenizeLengthMismatch); + } + #[allow(unused_variables)] + #[allow(unused_mut)] + let mut drain = tokens.into_iter(); + Ok(($( + $ty::from_token(drain.next().unwrap())?, + )*)) + } + } + } +} + +impl_multi_for_tuple!(0,); +impl_multi_for_tuple!(1, A:0, ); +impl_multi_for_tuple!(2, A:0, B:1, ); +impl_multi_for_tuple!(3, A:0, B:1, C:2, ); +impl_multi_for_tuple!(4, A:0, B:1, C:2, D:3, ); +impl_multi_for_tuple!(5, A:0, B:1, C:2, D:3, E:4, ); +impl_multi_for_tuple!(6, A:0, B:1, C:2, D:3, E:4, F:5, ); +impl_multi_for_tuple!(7, A:0, B:1, C:2, D:3, E:4, F:5, G:6, ); +impl_multi_for_tuple!(8, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, ); +impl_multi_for_tuple!(9, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, ); +impl_multi_for_tuple!(10, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, ); +impl_multi_for_tuple!(11, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, ); +impl_multi_for_tuple!(12, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, ); +impl_multi_for_tuple!(13, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, ); +impl_multi_for_tuple!(14, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, ); +impl_multi_for_tuple!(15, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, ); +impl_multi_for_tuple!(16, A:0, B:1, C:2, D:3, E:4, F:5, G:6, H:7, I:8, J:9, K:10, L:11, M:12, N:13, O:14, P:15, ); + +#[cfg(test)] +mod tests { + use super::*; + + fn assert_single_tokenize_roundtrip(value: T) + where + T: SingleTokenize + Clone + std::fmt::Debug + Eq, + { + assert_eq!(value, T::from_token(value.clone().into_token()).unwrap()); + } + + #[test] + fn single_tokenize_roundtrip() { + assert_single_tokenize_roundtrip(u8::MIN); + assert_single_tokenize_roundtrip(u8::MAX); + assert_single_tokenize_roundtrip(i8::MIN); + assert_single_tokenize_roundtrip(i8::MAX); + assert_single_tokenize_roundtrip(u16::MIN); + assert_single_tokenize_roundtrip(i16::MAX); + assert_single_tokenize_roundtrip(u32::MIN); + assert_single_tokenize_roundtrip(i32::MAX); + assert_single_tokenize_roundtrip(u64::MIN); + assert_single_tokenize_roundtrip(i64::MAX); + assert_single_tokenize_roundtrip(u128::MIN); + assert_single_tokenize_roundtrip(i128::MAX); + assert_single_tokenize_roundtrip(U256::zero()); + assert_single_tokenize_roundtrip(U256::MAX); + assert_single_tokenize_roundtrip(I256::MIN); + assert_single_tokenize_roundtrip(I256::MAX); + assert_single_tokenize_roundtrip(false); + assert_single_tokenize_roundtrip(true); + assert_single_tokenize_roundtrip("abcd".to_string()); + assert_single_tokenize_roundtrip(vec![0u8, 1u8, 2u8]); + assert_single_tokenize_roundtrip([0u8, 1u8, 2u8]); + assert_single_tokenize_roundtrip(Address::from_low_u64_be(42)); + assert_single_tokenize_roundtrip(TransactionHash::from_low_u64_be(42)); + assert_single_tokenize_roundtrip(()); + assert_single_tokenize_roundtrip((-1i8, 1i8)); + assert_single_tokenize_roundtrip([-1i8, 1i8]); + } + + #[test] + fn tokenize_bytes() { + assert!(matches!([0u8].into_token(), Token::FixedBytes(_))); + assert!(matches!(vec![0u8].into_token(), Token::Bytes(_))); + } + + #[test] + fn complex() { + let rust = (vec![[(0u8, 1i8)]], false); + let token = Token::Tuple(vec![ + Token::Array(vec![Token::FixedArray(vec![Token::Tuple(vec![ + Token::Uint(0.into()), + Token::Int(1.into()), + ])])]), + Token::Bool(false), + ]); + assert_eq!(rust.clone().into_token(), token); + assert_single_tokenize_roundtrip(rust); + } + + #[test] + fn multi_tokenize() { + let rust = (0u8, 1i8, 2u16); + let tokens = rust.into_tokens(); + assert!(matches!( + tokens.as_slice(), + [Token::Uint(_), Token::Int(_), Token::Uint(_)] + )); + assert_eq!(rust, MultiDetokenize::from_tokens(tokens).unwrap()); + } + + #[test] + fn i256_tokenization() { + assert_eq!(I256::from(42).into_token(), 42i32.into_token()); + assert_eq!(I256::minus_one().into_token(), Token::Int(U256::MAX),); + assert_eq!( + I256::from_token(Token::Int(U256::MAX)).unwrap(), + I256::minus_one() + ); + + assert_eq!( + I256::from_token(42i32.into_token()).unwrap(), + I256::from(42), + ); + } +} diff --git a/examples/examples/abi.rs b/examples/examples/abi.rs index d6ef9e4c..734c9c45 100644 --- a/examples/examples/abi.rs +++ b/examples/examples/abi.rs @@ -60,6 +60,26 @@ async fn calls(instance: &AbiTypes) { debug_call!(instance.get_array()); debug_call!(instance.get_fixed_array()); + + let value = (4, 2); + let result = instance.abiv_2_struct(value).call().await.unwrap(); + assert_eq!(result, value); + + let value = vec![(4, 2), (5, 3)]; + let result = instance + .abiv_2_array_of_struct(value.clone()) + .call() + .await + .unwrap(); + assert_eq!(result, value); + + let value = [vec![(4, 2)], vec![(5, 3), (6, 4)], vec![]]; + let result = instance + .abiv_2_array_of_array_of_struct(value.clone()) + .call() + .await + .unwrap(); + assert_eq!(result, value); } async fn events(instance: &AbiTypes) { diff --git a/examples/examples/batch.rs b/examples/examples/batch.rs index dfef79ed..27a11a47 100644 --- a/examples/examples/batch.rs +++ b/examples/examples/batch.rs @@ -30,14 +30,8 @@ async fn main() { let mut batch = CallBatch::new(web3.transport()); let calls = vec![ - instance - .balance_of(accounts[1]) - .view() - .batch_call(&mut batch), - instance - .balance_of(accounts[2]) - .view() - .batch_call(&mut batch), + instance.balance_of(accounts[1]).batch_call(&mut batch), + instance.balance_of(accounts[2]).batch_call(&mut batch), ]; batch.execute_all(usize::MAX).await; for (id, call) in calls.into_iter().enumerate() { diff --git a/examples/examples/sources.rs b/examples/examples/sources.rs index 2fd0989e..da2dd938 100644 --- a/examples/examples/sources.rs +++ b/examples/examples/sources.rs @@ -1,3 +1,4 @@ +/* use ethcontract::prelude::*; use std::env; @@ -31,3 +32,5 @@ async fn main() { println!("npmjs ERC20 token {}", symbol); } +*/ +fn main() {} diff --git a/examples/truffle/contracts/AbiTypes.sol b/examples/truffle/contracts/AbiTypes.sol index b3bf181d..1d358d94 100644 --- a/examples/truffle/contracts/AbiTypes.sol +++ b/examples/truffle/contracts/AbiTypes.sol @@ -87,15 +87,15 @@ contract AbiTypes { return buf; } - event ValueUint(uint8, uint16, uint32, uint64, uint128, uint256 indexed value); - event ValueInt(int8, int16, int32, int64, int128, int256 indexed value); + event ValueUint(uint8 a, uint16 b, uint32 c, uint64 d, uint128 e, uint256 indexed value); + event ValueInt(int8 a, int16 b, int32 c, int64 d, int128 e, int256 indexed value); event ValueBool(bool); - event ValueBytes(string id, bytes, bytes6, address whoami); - event ValueArray(uint64[], int32[3]); + event ValueBytes(string id, bytes a, bytes6 b, address whoami); + event ValueArray(uint64[] a, int32[3] b); - event ValueIndexed(string indexed, uint64[] indexed); + event ValueIndexed(string indexed a, uint64[] indexed b); event Values(bytes32 indexed block, address sender) anonymous; @@ -108,4 +108,21 @@ contract AbiTypes { emit ValueIndexed(getString(), getArray()); emit Values(blockhash(block.number - 1), msg.sender); } + + // Abi v2 + + struct S { + uint8 u0; + uint16 u1; + } + + function abiv2Struct(S calldata s) public pure returns (S calldata) { + return s; + } + function abiv2ArrayOfStruct(S[] calldata s) public view returns (S[] calldata) { + return s; + } + function abiv2ArrayOfArrayOfStruct(S[][3] calldata s) public view returns (S[][3] calldata) { + return s; + } }