Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Merged by Bors] - VC: accept unknown fields in chain spec #2277

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,11 @@ test-full: cargo-fmt test-release test-debug test-ef
# Lints the code for bad style and potentially unsafe arithmetic using Clippy.
# Clippy lints are opt-in per-crate for now. By default, everything is allowed except for performance and correctness lints.
lint:
cargo clippy --all --tests -- -D warnings
cargo clippy --all --tests -- \
-D warnings \
-A clippy::from-over-into \
-A clippy::upper-case-acronyms \
-A clippy::vec-init-then-push
Comment on lines +126 to +128
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There were a large number of places where we violated these lints, so I think we should leave them for another time. Particularly so we don't introduce unnecessary noise to conflict with other larger refactors that are ongoing (e.g. #2279).


# Runs the makefile in the `ef_tests` repo.
#
Expand Down
6 changes: 3 additions & 3 deletions account_manager/src/validator/exit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use eth2_keystore::Keystore;
use eth2_network_config::Eth2NetworkConfig;
use safe_arith::SafeArith;
use slot_clock::{SlotClock, SystemTimeSlotClock};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::time::Duration;
use tokio::time::sleep;
use types::{ChainSpec, Epoch, EthSpec, Fork, VoluntaryExit};
Expand Down Expand Up @@ -91,7 +91,7 @@ pub fn cli_run<E: EthSpec>(matches: &ArgMatches, env: Environment<E>) -> Result<

/// Gets the keypair and validator_index for every validator and calls `publish_voluntary_exit` on it.
async fn publish_voluntary_exit<E: EthSpec>(
keystore_path: &PathBuf,
keystore_path: &Path,
password_file_path: Option<&PathBuf>,
client: &BeaconNodeHttpClient,
spec: &ChainSpec,
Expand Down Expand Up @@ -310,7 +310,7 @@ fn get_current_epoch<E: EthSpec>(genesis_time: u64, spec: &ChainSpec) -> Option<
/// If the `password_file_path` is Some, unlock keystore using password in given file
/// otherwise, prompts user for a password to unlock the keystore.
fn load_voting_keypair(
voting_keystore_path: &PathBuf,
voting_keystore_path: &Path,
password_file_path: Option<&PathBuf>,
stdin_inputs: bool,
) -> Result<Keypair, String> {
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/eth1/src/inner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl Inner {
pub fn from_bytes(bytes: &[u8], config: Config, spec: ChainSpec) -> Result<Self, String> {
let ssz_cache = SszEth1Cache::from_ssz_bytes(bytes)
.map_err(|e| format!("Ssz decoding error: {:?}", e))?;
Ok(ssz_cache.to_inner(config, spec)?)
ssz_cache.to_inner(config, spec)
}

/// Returns a reference to the specification.
Expand Down
4 changes: 2 additions & 2 deletions beacon_node/eth2_libp2p/src/behaviour/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ use ssz::Encode;
use std::collections::HashSet;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::{
collections::VecDeque,
marker::PhantomData,
Expand Down Expand Up @@ -1336,7 +1336,7 @@ impl<TSpec: EthSpec> std::convert::From<Response<TSpec>> for RPCCodedResponse<TS
}

/// Persist metadata to disk
pub fn save_metadata_to_disk<E: EthSpec>(dir: &PathBuf, metadata: MetaData<E>, log: &slog::Logger) {
pub fn save_metadata_to_disk<E: EthSpec>(dir: &Path, metadata: MetaData<E>, log: &slog::Logger) {
let _ = std::fs::create_dir_all(&dir);
match File::create(dir.join(METADATA_FILENAME))
.and_then(|mut f| f.write_all(&metadata.as_ssz_bytes()))
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/eth2_libp2p/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ fn strip_peer_id(addr: &mut Multiaddr) {

/// Load metadata from persisted file. Return default metadata if loading fails.
fn load_or_build_metadata<E: EthSpec>(
network_dir: &std::path::PathBuf,
network_dir: &std::path::Path,
log: &slog::Logger,
) -> MetaData<E> {
// Default metadata
Expand Down
4 changes: 2 additions & 2 deletions beacon_node/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::cmp::max;
use std::fs;
use std::net::{IpAddr, Ipv4Addr, ToSocketAddrs};
use std::net::{TcpListener, UdpSocket};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use types::{ChainSpec, Checkpoint, Epoch, EthSpec, Hash256, PublicKeyBytes, GRAFFITI_BYTES_LEN};

Expand Down Expand Up @@ -422,7 +422,7 @@ pub fn get_config<E: EthSpec>(
pub fn set_network_config(
config: &mut NetworkConfig,
cli_args: &ArgMatches,
data_dir: &PathBuf,
data_dir: &Path,
log: &Logger,
use_listening_port_as_enr_port_by_default: bool,
) -> Result<(), String> {
Expand Down
2 changes: 1 addition & 1 deletion beacon_node/store/src/hot_cold_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
partial_state.load_historical_roots(&self.cold_db, &self.spec)?;
partial_state.load_randao_mixes(&self.cold_db, &self.spec)?;

Ok(partial_state.try_into()?)
partial_state.try_into()
}

/// Load a restore point state by its `restore_point_index`.
Expand Down
2 changes: 1 addition & 1 deletion common/deposit_contract/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ fn testnet_url() -> String {
fn main() {
match get_all_contracts() {
Ok(()) => (),
Err(e) => panic!(e),
Err(e) => panic!("{}", e),
}
}

Expand Down
2 changes: 1 addition & 1 deletion common/deposit_contract/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ pub fn decode_eth1_tx_data(
)
.map_err(DecodeError::SszDecodeError)?
};
};
}

let root = decode_token!(Hash256, to_fixed_bytes);

Expand Down
6 changes: 5 additions & 1 deletion common/lighthouse_version/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,10 @@ mod test {
fn version_formatting() {
let re = Regex::new(r"^Lighthouse/v[0-9]+\.[0-9]+\.[0-9]+(-rc.[0-9])?-[[:xdigit:]]{7}\+?$")
.unwrap();
assert!(re.is_match(VERSION), VERSION);
assert!(
re.is_match(VERSION),
"version doesn't match regex: {}",
VERSION
);
}
}
2 changes: 1 addition & 1 deletion common/remote_signer_consumer/tests/post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ mod post {
let r = run_testcase(u).unwrap_err();

for msg in msgs.iter() {
assert!(r.contains(msg), format!("{:?} should contain {:?}", r, msg));
assert!(r.contains(msg), "{:?} should contain {:?}", r, msg);
}
};

Expand Down
10 changes: 5 additions & 5 deletions common/validator_dir/src/validator_dir.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ impl ValidatorDir {
}

/// Returns the `dir` provided to `Self::open`.
pub fn dir(&self) -> &PathBuf {
pub fn dir(&self) -> &Path {
&self.dir
}

Expand Down Expand Up @@ -204,7 +204,7 @@ impl ValidatorDir {

/// Attempts to load and decrypt a Keypair given path to the keystore.
pub fn unlock_keypair<P: AsRef<Path>>(
keystore_path: &PathBuf,
keystore_path: &Path,
password_dir: P,
) -> Result<Keypair, Error> {
let keystore = Keystore::from_json_reader(
Expand All @@ -229,8 +229,8 @@ pub fn unlock_keypair<P: AsRef<Path>>(

/// Attempts to load and decrypt a Keypair given path to the keystore and the password file.
pub fn unlock_keypair_from_password_path(
keystore_path: &PathBuf,
password_path: &PathBuf,
keystore_path: &Path,
password_path: &Path,
) -> Result<Keypair, Error> {
let keystore = Keystore::from_json_reader(
&mut OpenOptions::new()
Expand All @@ -242,7 +242,7 @@ pub fn unlock_keypair_from_password_path(
.map_err(Error::UnableToReadKeystore)?;

let password: PlainText = read(password_path)
.map_err(|_| Error::UnableToReadPassword(password_path.clone()))?
.map_err(|_| Error::UnableToReadPassword(password_path.into()))?
.into();
keystore
.decrypt_keypair(password.as_bytes())
Expand Down
4 changes: 2 additions & 2 deletions consensus/serde_utils/src/hex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ impl<'de> Visitor<'de> for HexVisitor {
where
E: de::Error,
{
Ok(hex::decode(value.trim_start_matches("0x"))
.map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))?)
hex::decode(value.trim_start_matches("0x"))
.map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))
}
}

Expand Down
2 changes: 1 addition & 1 deletion consensus/ssz/src/decode/impls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ macro_rules! impl_decodable_for_u8_array {
Err(DecodeError::InvalidByteLength { len, expected })
} else {
let mut array: [u8; $len] = [0; $len];
array.copy_from_slice(&bytes[..]);
array.copy_from_slice(bytes);

Ok(array)
}
Expand Down
6 changes: 3 additions & 3 deletions consensus/ssz_types/src/fixed_vector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,9 @@ impl<T: Default, N: Unsigned> From<Vec<T>> for FixedVector<T, N> {
}
}

impl<T, N: Unsigned> Into<Vec<T>> for FixedVector<T, N> {
fn into(self) -> Vec<T> {
self.vec
impl<T, N: Unsigned> From<FixedVector<T, N>> for Vec<T> {
fn from(vector: FixedVector<T, N>) -> Vec<T> {
vector.vec
}
}

Expand Down
6 changes: 3 additions & 3 deletions consensus/ssz_types/src/variable_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,9 @@ impl<T, N: Unsigned> From<Vec<T>> for VariableList<T, N> {
}
}

impl<T, N: Unsigned> Into<Vec<T>> for VariableList<T, N> {
fn into(self) -> Vec<T> {
self.vec
impl<T, N: Unsigned> From<VariableList<T, N>> for Vec<T> {
fn from(list: VariableList<T, N>) -> Vec<T> {
list.vec
}
}

Expand Down
36 changes: 33 additions & 3 deletions consensus/types/src/chain_spec.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::*;
use int_to_bytes::int_to_bytes4;
use serde_derive::{Deserialize, Serialize};
use std::collections::HashMap;
use std::fs::File;
use std::path::Path;
use tree_hash::TreeHash;
Expand Down Expand Up @@ -449,10 +450,8 @@ mod tests {
}

/// YAML config file as defined by the spec.
///
/// Spec v0.12.3
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
#[serde(rename_all = "UPPERCASE", deny_unknown_fields)]
#[serde(rename_all = "UPPERCASE")]
pub struct YamlConfig {
pub config_name: String,
// ChainSpec
Expand Down Expand Up @@ -576,6 +575,10 @@ pub struct YamlConfig {
#[serde(with = "serde_utils::quoted_u64")]
deposit_network_id: u64,
deposit_contract_address: Address,

// Extra fields (could be from a future hard-fork that we don't yet know).
#[serde(flatten)]
pub extra_fields: HashMap<String, String>,
}

impl Default for YamlConfig {
Expand Down Expand Up @@ -671,6 +674,8 @@ impl YamlConfig {
deposit_chain_id: spec.deposit_chain_id,
deposit_network_id: spec.deposit_network_id,
deposit_contract_address: spec.deposit_contract_address,

extra_fields: HashMap::new(),
}
}

Expand Down Expand Up @@ -849,6 +854,31 @@ mod yaml_tests {
assert_eq!(from, yamlconfig);
}

#[test]
fn extra_fields_round_trip() {
let tmp_file = NamedTempFile::new().expect("failed to create temp file");
let writer = OpenOptions::new()
.read(false)
.write(true)
.open(tmp_file.as_ref())
.expect("error opening file");
let mainnet_spec = ChainSpec::mainnet();
let mut yamlconfig = YamlConfig::from_spec::<MainnetEthSpec>(&mainnet_spec);
let (k1, v1) = ("SAMPLE_HARDFORK_KEY1", "123456789");
let (k2, v2) = ("SAMPLE_HARDFORK_KEY2", "987654321");
yamlconfig.extra_fields.insert(k1.into(), v1.into());
yamlconfig.extra_fields.insert(k2.into(), v2.into());
serde_yaml::to_writer(writer, &yamlconfig).expect("failed to write or serialize");

let reader = OpenOptions::new()
.read(true)
.write(false)
.open(tmp_file.as_ref())
.expect("error while opening the file");
let from: YamlConfig = serde_yaml::from_reader(reader).expect("error while deserializing");
assert_eq!(from, yamlconfig);
}

#[test]
fn apply_to_spec() {
let mut spec = ChainSpec::minimal();
Expand Down
16 changes: 8 additions & 8 deletions crypto/bls/src/generic_signature_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,35 +19,35 @@ where
aggregate: Cow<'a, GenericAggregateSignature<Pub, AggPub, Sig, AggSig>>,
}

impl<'a, Pub, AggPub, Sig, AggSig> Into<WrappedSignature<'a, Pub, AggPub, Sig, AggSig>>
for &'a GenericSignature<Pub, Sig>
impl<'a, Pub, AggPub, Sig, AggSig> From<&'a GenericSignature<Pub, Sig>>
for WrappedSignature<'a, Pub, AggPub, Sig, AggSig>
where
Pub: TPublicKey + Clone,
AggPub: Clone,
Sig: TSignature<Pub> + Clone,
AggSig: TAggregateSignature<Pub, AggPub, Sig> + Clone,
{
fn into(self) -> WrappedSignature<'a, Pub, AggPub, Sig, AggSig> {
fn from(sig: &'a GenericSignature<Pub, Sig>) -> Self {
let mut aggregate: GenericAggregateSignature<Pub, AggPub, Sig, AggSig> =
GenericAggregateSignature::infinity();
aggregate.add_assign(self);
aggregate.add_assign(sig);
WrappedSignature {
aggregate: Cow::Owned(aggregate),
}
}
}

impl<'a, Pub, AggPub, Sig, AggSig> Into<WrappedSignature<'a, Pub, AggPub, Sig, AggSig>>
for &'a GenericAggregateSignature<Pub, AggPub, Sig, AggSig>
impl<'a, Pub, AggPub, Sig, AggSig> From<&'a GenericAggregateSignature<Pub, AggPub, Sig, AggSig>>
for WrappedSignature<'a, Pub, AggPub, Sig, AggSig>
where
Pub: TPublicKey + Clone,
AggPub: Clone,
Sig: Clone,
AggSig: Clone,
{
fn into(self) -> WrappedSignature<'a, Pub, AggPub, Sig, AggSig> {
fn from(aggregate: &'a GenericAggregateSignature<Pub, AggPub, Sig, AggSig>) -> Self {
WrappedSignature {
aggregate: Cow::Borrowed(self),
aggregate: Cow::Borrowed(aggregate),
}
}
}
Expand Down
6 changes: 6 additions & 0 deletions testing/ef_tests/tests/tests.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#![cfg(feature = "ef_tests")]

use ef_tests::*;
use std::collections::HashMap;
use std::path::PathBuf;
use types::*;

Expand All @@ -17,6 +18,11 @@ fn config_test<E: EthSpec + TypeName>() {
let yaml_from_spec = YamlConfig::from_spec::<E>(&spec);
assert_eq!(yaml_config.apply_to_chain_spec::<E>(&spec), Some(spec));
assert_eq!(yaml_from_spec, yaml_config);
assert_eq!(
yaml_config.extra_fields,
HashMap::new(),
"not all config fields read"
);
}

#[test]
Expand Down
6 changes: 3 additions & 3 deletions testing/state_transition_vectors/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use state_processing::test_utils::BlockProcessingBuilder;
use std::env;
use std::fs::{self, File};
use std::io::Write;
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::process::exit;
use types::MainnetEthSpec;
use types::{BeaconState, ChainSpec, EthSpec, SignedBeaconBlock};
Expand Down Expand Up @@ -91,12 +91,12 @@ fn write_vectors_to_file(title: &str, vectors: &[TestVector]) -> Result<(), Stri
}

/// Write some SSZ object to file.
fn write_to_ssz_file<T: Encode>(path: &PathBuf, item: &T) -> Result<(), String> {
fn write_to_ssz_file<T: Encode>(path: &Path, item: &T) -> Result<(), String> {
write_to_file(path, &item.as_ssz_bytes())
}

/// Write some bytes to file.
fn write_to_file(path: &PathBuf, item: &[u8]) -> Result<(), String> {
fn write_to_file(path: &Path, item: &[u8]) -> Result<(), String> {
File::create(path)
.map_err(|e| format!("Unable to create {:?}: {:?}", path, e))
.and_then(|mut file| {
Expand Down
Loading