From fac2e3c04baf090c3bfe228d74bf1b5abfb1c07d Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 10 Jun 2024 04:29:23 +0300 Subject: [PATCH 1/9] feat: track and cache context of each compiler invocation --- src/artifact_output/mod.rs | 26 +++++-- src/artifacts/mod.rs | 21 +++++- src/buildinfo.rs | 128 +++++++++++++++++++++++--------- src/cache.rs | 112 +++++++++++++++++++++------- src/compile/output/contracts.rs | 1 + src/compile/output/mod.rs | 95 ++++++++++++++---------- src/compile/output/sources.rs | 1 + src/compile/project.rs | 32 ++++---- src/compilers/mod.rs | 13 +++- src/compilers/multi.rs | 18 ++++- src/compilers/solc.rs | 16 ++-- src/compilers/vyper/error.rs | 4 +- src/compilers/vyper/input.rs | 13 +++- src/compilers/vyper/mod.rs | 25 ++++++- src/flatten.rs | 2 +- src/lib.rs | 12 +-- src/project_util/mod.rs | 2 +- tests/project.rs | 15 ++-- 18 files changed, 377 insertions(+), 159 deletions(-) diff --git a/src/artifact_output/mod.rs b/src/artifact_output/mod.rs index 95163b9a..474f4c58 100644 --- a/src/artifact_output/mod.rs +++ b/src/artifact_output/mod.rs @@ -6,6 +6,7 @@ use crate::{ BytecodeObject, CompactBytecode, CompactContractBytecodeCow, CompactDeployedBytecode, FileToContractsMap, SourceFile, }, + cache::CachedArtifact, compile::output::{contracts::VersionedContracts, sources::VersionedSourceFiles}, error::Result, sourcemap::{SourceMap, SyntaxError}, @@ -40,6 +41,8 @@ pub struct ArtifactId { pub source: PathBuf, /// `solc` version that produced this artifact pub version: Version, + /// `solc` build id + pub build_id: String, } impl ArtifactId { @@ -68,7 +71,7 @@ impl ArtifactId { /// Returns a `:` slug that uniquely identifies an artifact pub fn identifier(&self) -> String { - format!("{}:{}", self.source.to_string_lossy(), self.name) + format!("{}:{}", self.source.display(), self.name) } /// Returns a `:` slug that identifies an artifact @@ -93,6 +96,7 @@ pub struct ArtifactFile { pub file: PathBuf, /// `solc` version that produced this artifact pub version: Version, + pub build_id: String, } impl ArtifactFile { @@ -274,6 +278,7 @@ impl Artifacts { name, source: source.clone(), version: artifact.version.clone(), + build_id: artifact.build_id.clone(), } .with_slashed_paths(), &artifact.artifact, @@ -299,6 +304,7 @@ impl Artifacts { name, source: source.clone(), version: artifact.version, + build_id: artifact.build_id.clone(), } .with_slashed_paths(), artifact.artifact, @@ -821,9 +827,9 @@ pub trait ArtifactOutput { // we reuse the path, this will make sure that even if there are conflicting // files (files for witch `T::output_file()` would return the same path) we use // consistent output paths - if let Some(existing_artifact) = ctx.existing_artifact(file, name, version).cloned() { + if let Some(existing_artifact) = ctx.existing_artifact(file, name, version) { trace!("use existing artifact file {:?}", existing_artifact,); - existing_artifact + existing_artifact.to_path_buf() } else { let path = if versioned { Self::output_file_versioned(file, name, version) @@ -863,7 +869,7 @@ pub trait ArtifactOutput { .existing_artifacts .values() .flat_map(|artifacts| artifacts.values().flat_map(|artifacts| artifacts.values())) - .map(|p| p.to_slash_lossy().to_lowercase()) + .map(|a| a.path.to_slash_lossy().to_lowercase()) .collect::>(); let mut files = contracts.keys().collect::>(); @@ -911,6 +917,7 @@ pub trait ArtifactOutput { artifact, file: artifact_path, version: contract.version.clone(), + build_id: contract.build_id.clone(), }; artifacts @@ -969,6 +976,7 @@ pub trait ArtifactOutput { artifact, file: artifact_path, version: source.version.clone(), + build_id: source.build_id.clone(), }); } } @@ -1021,7 +1029,8 @@ pub struct OutputContext<'a> { /// └── inner /// └── a.sol /// ``` - pub existing_artifacts: BTreeMap<&'a Path, &'a BTreeMap>>, + pub existing_artifacts: + BTreeMap<&'a Path, &'a BTreeMap>>, } // === impl OutputContext @@ -1047,9 +1056,12 @@ impl<'a> OutputContext<'a> { file: impl AsRef, contract: &str, version: &Version, - ) -> Option<&PathBuf> { + ) -> Option<&Path> { self.existing_artifacts.get(file.as_ref()).and_then(|contracts| { - contracts.get(contract).and_then(|versions| versions.get(version)) + contracts + .get(contract) + .and_then(|versions| versions.get(version)) + .map(|a| a.path.as_path()) }) } } diff --git a/src/artifacts/mod.rs b/src/artifacts/mod.rs index 83656795..60ea635e 100644 --- a/src/artifacts/mod.rs +++ b/src/artifacts/mod.rs @@ -1981,7 +1981,14 @@ impl SourceFiles { #[cfg(test)] mod tests { use super::*; - use crate::AggregatedCompilerOutput; + use crate::{ + buildinfo::RawBuildInfo, + compilers::{ + solc::{SolcCompiler, SolcVersionedInput}, + CompilerInput, + }, + AggregatedCompilerOutput, + }; use alloy_primitives::Address; #[test] @@ -2014,8 +2021,16 @@ mod tests { sources: Default::default(), }; - let mut aggregated = AggregatedCompilerOutput::default(); - aggregated.extend("0.8.12".parse().unwrap(), out_converted); + let v: Version = "0.8.12".parse().unwrap(); + let input = SolcVersionedInput::build( + Default::default(), + Default::default(), + SolcLanguage::Solidity, + v.clone(), + ); + let build_info = RawBuildInfo::new(&input, &out_converted).unwrap(); + let mut aggregated = AggregatedCompilerOutput::::default(); + aggregated.extend(v, build_info, out_converted); assert!(!aggregated.is_unchanged()); } diff --git a/src/buildinfo.rs b/src/buildinfo.rs index c06561b7..37ed5d9d 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -1,11 +1,20 @@ //! Represents an entire build -use crate::{utils, SolcError}; +use crate::{ + compilers::{CompilationError, CompilerInput, CompilerOutput, Language}, + error::Result, + utils, +}; use alloy_primitives::hex; use md5::Digest; use semver::Version; -use serde::{de::DeserializeOwned, ser::SerializeStruct, Deserialize, Serialize, Serializer}; -use std::{cell::RefCell, path::Path, rc::Rc}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use std::{ + cell::RefCell, + collections::{BTreeMap, HashMap, HashSet}, + path::{Path, PathBuf}, + rc::Rc, +}; pub const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-build-info-1"; @@ -24,59 +33,97 @@ pub struct BuildInfo { impl BuildInfo { /// Deserializes the `BuildInfo` object from the given file - pub fn read(path: impl AsRef) -> Result { + pub fn read(path: impl AsRef) -> Result { utils::read_json_file(path) } } +/// Additional context we cache for each compiler run. +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] +pub struct BuildContext { + pub source_id_to_path: HashMap, + pub language: L, +} + +impl BuildContext { + pub fn new(input: &I, output: &CompilerOutput) -> Result + where + I: CompilerInput, + { + let mut source_id_to_path = HashMap::new(); + + let input_sources = input.sources().map(|(path, _)| path).collect::>(); + for (path, source) in output.sources.iter() { + if input_sources.contains(path.as_path()) { + source_id_to_path.insert(source.id, path.to_path_buf()); + } + } + + Ok(Self { source_id_to_path, language: input.language() }) + } + + pub fn join_all(&mut self, root: impl AsRef) { + self.source_id_to_path.values_mut().for_each(|path| { + *path = root.as_ref().join(path.as_path()); + }); + } +} + /// Represents `BuildInfo` object #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] -pub struct RawBuildInfo { +pub struct RawBuildInfo { /// The hash that identifies the BuildInfo pub id: String, + #[serde(flatten)] + pub build_context: BuildContext, /// serialized `BuildInfo` json - pub build_info: String, + #[serde(flatten)] + pub build_info: BTreeMap, } // === impl RawBuildInfo === -impl RawBuildInfo { +impl RawBuildInfo { /// Serializes a `BuildInfo` object - pub fn new( + pub fn new, E: CompilationError>( input: &I, - output: &O, - version: &Version, - ) -> serde_json::Result { + output: &CompilerOutput, + ) -> Result> { + let version = input.version().clone(); + let build_context = BuildContext::new(input, output)?; + let mut hasher = md5::Md5::new(); - let w = BuildInfoWriter { buf: Rc::new(RefCell::new(Vec::with_capacity(128))) }; - let mut buf = w.clone(); - let mut serializer = serde_json::Serializer::pretty(&mut buf); - let mut s = serializer.serialize_struct("BuildInfo", 6)?; - s.serialize_field("_format", ÐERS_FORMAT_VERSION)?; + let mut build_info = BTreeMap::new(); + + build_info.insert("_format".to_string(), serde_json::to_value(ÐERS_FORMAT_VERSION)?); + hasher.update(ETHERS_FORMAT_VERSION); + let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch); - s.serialize_field("solcVersion", &solc_short)?; - s.serialize_field("solcLongVersion", &version)?; - s.serialize_field("input", input)?; + build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); + hasher.update(&solc_short); + + build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); + hasher.update(&version.to_string()); + + let input = serde_json::to_value(input)?; + hasher.update(&serde_json::to_string(&input)?); + build_info.insert("input".to_string(), input); // create the hash for `{_format,solcVersion,solcLongVersion,input}` // N.B. this is not exactly the same as hashing the json representation of these values but // the must efficient one - hasher.update(&*w.buf.borrow()); let result = hasher.finalize(); let id = hex::encode(result); - s.serialize_field("id", &id)?; - s.serialize_field("output", output)?; - s.end()?; + build_info.insert("output".to_string(), serde_json::to_value(&output)?); - drop(buf); - - let build_info = unsafe { - // serde_json does not emit non UTF8 - String::from_utf8_unchecked(w.buf.take()) - }; + Ok(RawBuildInfo { id, build_info, build_context }) + } - Ok(RawBuildInfo { id, build_info }) + // We only join [BuildContext] paths here because input and output are kept in the same format + // as compiler seen/produced them. + pub fn join_all(&mut self, root: impl AsRef) { + self.build_context.join_all(root); } } @@ -98,19 +145,28 @@ impl std::io::Write for BuildInfoWriter { #[cfg(test)] mod tests { use super::*; - use crate::{artifacts::Error, compilers::CompilerOutput, SolcInput, Source}; + use crate::{ + artifacts::Error, + compilers::{ + solc::{SolcLanguage, SolcVersionedInput}, + CompilerOutput, + }, + Source, + }; use std::{collections::BTreeMap, path::PathBuf}; #[test] fn build_info_serde() { - let inputs = SolcInput::resolve_and_build( + let v: Version = "0.8.4+commit.c7e474f2".parse().unwrap(); + let input = SolcVersionedInput::build( BTreeMap::from([(PathBuf::from("input.sol"), Source::new(""))]), Default::default(), + SolcLanguage::Solidity, + v, ); let output = CompilerOutput::::default(); - let v: Version = "0.8.4+commit.c7e474f2".parse().unwrap(); - let raw_info = RawBuildInfo::new(&inputs[0], &output, &v).unwrap(); - let _info: BuildInfo> = - serde_json::from_str(&raw_info.build_info).unwrap(); + let raw_info = RawBuildInfo::new(&input, &output).unwrap(); + let _info: BuildInfo> = + serde_json::from_str(&serde_json::to_string(&raw_info).unwrap()).unwrap(); } } diff --git a/src/cache.rs b/src/cache.rs index baeda828..187272fe 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -2,7 +2,8 @@ use crate::{ artifacts::{Settings, Sources}, - compilers::{Compiler, CompilerSettings}, + buildinfo::{BuildContext, RawBuildInfo}, + compilers::{Compiler, CompilerSettings, Language}, config::ProjectPaths, error::{Result, SolcError}, filter::{FilteredSources, SourceCompilationKind}, @@ -37,11 +38,12 @@ pub struct CompilerCache { /// contains all directories used for the project pub paths: ProjectPaths, pub files: BTreeMap>, + pub builds: BTreeSet, } impl CompilerCache { pub fn new(format: String, paths: ProjectPaths) -> Self { - CompilerCache { format, paths, files: Default::default() } + CompilerCache { format, paths, files: Default::default(), builds: Default::default() } } } @@ -308,6 +310,25 @@ impl CompilerCache { .collect::>>()?; Ok(Artifacts(artifacts)) } + + /// Reads all cached [BuildContext]s from disk. [BuildContext] is inlined into [RawBuildInfo] + /// objects, so we are basically just partially deserializing build infos here. + pub fn read_builds( + &self, + build_info_dir: impl AsRef, + ) -> Result>> { + use rayon::prelude::*; + + let build_info_dir = build_info_dir.as_ref(); + + self.builds + .par_iter() + .map(|build_id| { + utils::read_json_file(build_info_dir.join(build_id).with_extension("json")) + .map(|b| (build_id.clone(), b)) + }) + .collect() + } } #[cfg(feature = "async")] @@ -342,6 +363,7 @@ impl Default for CompilerCache { fn default() -> Self { CompilerCache { format: ETHERS_FORMAT_VERSION.to_string(), + builds: Default::default(), files: Default::default(), paths: Default::default(), } @@ -355,6 +377,12 @@ impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache } } +#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] +pub struct CachedArtifact { + pub path: PathBuf, + pub build_id: String, +} + /// A `CacheEntry` in the cache file represents a solidity file /// /// A solidity file can contain several contracts, for every contract a separate `Artifact` is @@ -386,7 +414,7 @@ pub struct CacheEntry { /// /// This map tracks the artifacts by `name -> (Version -> PathBuf)`. /// This mimics the default artifacts directory structure - pub artifacts: BTreeMap>, + pub artifacts: BTreeMap>, /// Whether this file was compiled at least once. /// /// If this is true and `artifacts` are empty, it means that given version of the file does @@ -418,7 +446,7 @@ impl CacheEntry { /// # } /// ``` pub fn find_artifact_path(&self, contract_name: impl AsRef) -> Option<&Path> { - self.artifacts.get(contract_name.as_ref())?.iter().next().map(|(_, p)| p.as_path()) + self.artifacts.get(contract_name.as_ref())?.iter().next().map(|(_, p)| p.path.as_path()) } /// Reads the last modification date from the file's metadata @@ -443,9 +471,14 @@ impl CacheEntry { let mut artifacts = BTreeMap::new(); for (artifact_name, versioned_files) in self.artifacts.iter() { let mut files = Vec::with_capacity(versioned_files.len()); - for (version, file) in versioned_files { - let artifact: Artifact = utils::read_json_file(file)?; - files.push(ArtifactFile { artifact, file: file.clone(), version: version.clone() }); + for (version, cached_artifact) in versioned_files { + let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?; + files.push(ArtifactFile { + artifact, + file: cached_artifact.path.clone(), + version: version.clone(), + build_id: cached_artifact.build_id.clone(), + }); } artifacts.insert(artifact_name.clone(), files); } @@ -459,10 +492,13 @@ impl CacheEntry { { for (name, artifacts) in artifacts.into_iter() { for artifact in artifacts { - self.artifacts - .entry(name.clone()) - .or_default() - .insert(artifact.version.clone(), artifact.file.clone()); + self.artifacts.entry(name.clone()).or_default().insert( + artifact.version.clone(), + CachedArtifact { + build_id: artifact.build_id.clone(), + path: artifact.file.clone(), + }, + ); } } } @@ -473,12 +509,12 @@ impl CacheEntry { } /// Iterator that yields all artifact files and their version - pub fn artifacts_versions(&self) -> impl Iterator { + pub fn artifacts_versions(&self) -> impl Iterator { self.artifacts.values().flatten() } /// Returns the artifact file for the contract and version pair - pub fn find_artifact(&self, contract: &str, version: &Version) -> Option<&PathBuf> { + pub fn find_artifact(&self, contract: &str, version: &Version) -> Option<&CachedArtifact> { self.artifacts.get(contract).and_then(|files| files.get(version)) } @@ -486,37 +522,37 @@ impl CacheEntry { pub fn artifacts_for_version<'a>( &'a self, version: &'a Version, - ) -> impl Iterator + 'a { + ) -> impl Iterator + 'a { self.artifacts_versions().filter_map(move |(ver, file)| (ver == version).then_some(file)) } /// Iterator that yields all artifact files - pub fn artifacts(&self) -> impl Iterator { + pub fn artifacts(&self) -> impl Iterator { self.artifacts.values().flat_map(BTreeMap::values) } /// Mutable iterator over all artifact files - pub fn artifacts_mut(&mut self) -> impl Iterator { + pub fn artifacts_mut(&mut self) -> impl Iterator { self.artifacts.values_mut().flat_map(BTreeMap::values_mut) } /// Checks if all artifact files exist pub fn all_artifacts_exist(&self) -> bool { - self.artifacts().all(|p| p.exists()) + self.artifacts().all(|a| a.path.exists()) } /// Sets the artifact's paths to `base` adjoined to the artifact's `path`. pub fn join_artifacts_files(&mut self, base: impl AsRef) { let base = base.as_ref(); - self.artifacts_mut().for_each(|p| *p = base.join(&*p)) + self.artifacts_mut().for_each(|a| a.path = base.join(&a.path)) } /// Removes `base` from the artifact's path pub fn strip_artifact_files_prefixes(&mut self, base: impl AsRef) { let base = base.as_ref(); - self.artifacts_mut().for_each(|p| { - if let Ok(rem) = p.strip_prefix(base) { - *p = rem.to_path_buf(); + self.artifacts_mut().for_each(|a| { + if let Ok(rem) = a.path.strip_prefix(base) { + a.path = rem.to_path_buf(); } }) } @@ -557,6 +593,9 @@ pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput, C: Compiler> { /// All already existing artifacts. pub cached_artifacts: Artifacts, + /// All already existing build infos. + pub cached_builds: BTreeMap>, + /// Relationship between all the files. pub edges: GraphEdges, @@ -677,10 +716,10 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCacheInner<'a, T, C> { return true; } - if entry.artifacts_for_version(version).any(|artifact_path| { - let missing_artifact = !self.cached_artifacts.has_artifact(artifact_path); + if entry.artifacts_for_version(version).any(|artifact| { + let missing_artifact = !self.cached_artifacts.has_artifact(&artifact.path); if missing_artifact { - trace!("missing artifact \"{}\"", artifact_path.display()); + trace!("missing artifact \"{}\"", artifact.path.display()); } missing_artifact }) { @@ -847,7 +886,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { cache.remove_missing_files(); // read all artifacts - let cached_artifacts = if project.paths.artifacts.exists() { + let mut cached_artifacts = if project.paths.artifacts.exists() { trace!("reading artifacts from cache..."); // if we failed to read the whole set of artifacts we use an empty set let artifacts = cache.read_artifacts::().unwrap_or_default(); @@ -857,9 +896,22 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { Default::default() }; + trace!("reading build infos from cache..."); + let cached_builds = cache.read_builds(&project.paths.build_infos).unwrap_or_default(); + + // Remove artifacts for which we are missing a build info. + cached_artifacts.0.retain(|_, artifacts| { + artifacts.retain(|_, artifacts| { + artifacts.retain(|artifact| cached_builds.contains_key(&artifact.build_id)); + !artifacts.is_empty() + }); + !artifacts.is_empty() + }); + let cache = ArtifactsCacheInner { cache, cached_artifacts, + cached_builds, edges, project, dirty_sources: Default::default(), @@ -933,8 +985,9 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { pub fn consume( self, written_artifacts: &Artifacts, + written_build_infos: &Vec>, write_to_disk: bool, - ) -> Result> { + ) -> Result<(Artifacts, BTreeMap>)> { let ArtifactsCache::Cached(cache) = self else { trace!("no cache configured, ephemeral"); return Ok(Default::default()); @@ -943,6 +996,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { let ArtifactsCacheInner { mut cache, mut cached_artifacts, + cached_builds, dirty_sources, sources_in_scope, project, @@ -983,6 +1037,10 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { } } + for build_info in written_build_infos { + cache.builds.insert(build_info.id.clone()); + } + // write to disk if write_to_disk { // make all `CacheEntry` paths relative to the project root and all artifact @@ -993,7 +1051,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { cache.write(project.cache_path())?; } - Ok(cached_artifacts) + Ok((cached_artifacts, cached_builds)) } /// Marks the cached entry as seen by the compiler, if it's cached. diff --git a/src/compile/output/contracts.rs b/src/compile/output/contracts.rs index a0ba08c8..626fd038 100644 --- a/src/compile/output/contracts.rs +++ b/src/compile/output/contracts.rs @@ -284,6 +284,7 @@ impl IntoIterator for VersionedContracts { pub struct VersionedContract { pub contract: Contract, pub version: Version, + pub build_id: String, } /// A mapping of `ArtifactId` and their `CompactContractBytecode` diff --git a/src/compile/output/mod.rs b/src/compile/output/mod.rs index 864f29a4..1c7b6849 100644 --- a/src/compile/output/mod.rs +++ b/src/compile/output/mod.rs @@ -5,15 +5,16 @@ use crate::{ contract::{CompactContractBytecode, CompactContractRef, Contract}, Severity, }, - buildinfo::RawBuildInfo, - compilers::{multi::MultiCompilerError, CompilationError, CompilerOutput}, + buildinfo::{BuildContext, RawBuildInfo}, + compilers::{multi::MultiCompiler, CompilationError, Compiler, CompilerOutput}, + error::SolcError, info::ContractInfoRef, sources::{VersionedSourceFile, VersionedSourceFiles}, Artifact, ArtifactId, ArtifactOutput, Artifacts, ConfigurableArtifacts, SolcIoError, }; use contracts::{VersionedContract, VersionedContracts}; use semver::Version; -use serde::{Deserialize, Serialize}; +use serde::Serialize; use std::{ borrow::Cow, collections::BTreeMap, @@ -29,9 +30,12 @@ pub mod sources; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. #[derive(Debug, Clone, PartialEq, Default)] -pub struct ProjectCompileOutput { +pub struct ProjectCompileOutput< + C: Compiler = MultiCompiler, + T: ArtifactOutput = ConfigurableArtifacts, +> { /// contains the aggregated `CompilerOutput` - pub(crate) compiler_output: AggregatedCompilerOutput, + pub(crate) compiler_output: AggregatedCompilerOutput, /// all artifact files from `output` that were freshly compiled and written pub(crate) compiled_artifacts: Artifacts, /// All artifacts that were read from cache @@ -42,9 +46,11 @@ pub struct ProjectCompileOutput, /// set minimum level of severity that is treated as an error pub(crate) compiler_severity_filter: Severity, + /// all build infos that were just compiled + pub(crate) builds: BTreeMap>, } -impl ProjectCompileOutput { +impl ProjectCompileOutput { /// Converts all `\\` separators in _all_ paths to `/` pub fn slash_paths(&mut self) { self.compiler_output.slash_paths(); @@ -227,17 +233,17 @@ impl ProjectCompileOutput { /// project.compile()?.into_output().contracts_into_iter().collect(); /// # Ok::<_, Box>(()) /// ``` - pub fn output(&self) -> &AggregatedCompilerOutput { + pub fn output(&self) -> &AggregatedCompilerOutput { &self.compiler_output } /// Returns a mutable reference to the (merged) solc compiler output. - pub fn output_mut(&mut self) -> &mut AggregatedCompilerOutput { + pub fn output_mut(&mut self) -> &mut AggregatedCompilerOutput { &mut self.compiler_output } /// Consumes the output and returns the (merged) solc compiler output. - pub fn into_output(self) -> AggregatedCompilerOutput { + pub fn into_output(self) -> AggregatedCompilerOutput { self.compiler_output } @@ -438,9 +444,13 @@ impl ProjectCompileOutput { self.into_artifacts() .map(|(artifact_id, artifact)| (artifact_id, artifact.into_contract_bytecode())) } + + pub fn builds(&self) -> impl Iterator)> { + self.builds.iter() + } } -impl ProjectCompileOutput { +impl ProjectCompileOutput { /// Returns whether any errors were emitted by the compiler. pub fn has_compiler_errors(&self) -> bool { self.compiler_output.has_error( @@ -470,7 +480,7 @@ impl ProjectCompileOutput { } } -impl fmt::Display for ProjectCompileOutput { +impl fmt::Display for ProjectCompileOutput { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if self.compiler_output.is_unchanged() { f.write_str("Nothing to compile") @@ -489,19 +499,19 @@ impl fmt::Display for ProjectCompileOutp /// The aggregated output of (multiple) compile jobs /// /// This is effectively a solc version aware `CompilerOutput` -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -pub struct AggregatedCompilerOutput { +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AggregatedCompilerOutput { /// all errors from all `CompilerOutput` - pub errors: Vec, + pub errors: Vec, /// All source files combined with the solc version used to compile them pub sources: VersionedSourceFiles, /// All compiled contracts combined with the solc version used to compile them pub contracts: VersionedContracts, // All the `BuildInfo`s of solc invocations. - pub build_infos: BTreeMap, + pub build_infos: Vec>, } -impl Default for AggregatedCompilerOutput { +impl Default for AggregatedCompilerOutput { fn default() -> Self { Self { errors: Vec::new(), @@ -552,7 +562,7 @@ impl<'a> From<&'a [u64]> for ErrorFilter<'a> { } } -impl AggregatedCompilerOutput { +impl AggregatedCompilerOutput { /// Converts all `\\` separators in _all_ paths to `/` pub fn slash_paths(&mut self) { self.sources.slash_paths(); @@ -564,7 +574,7 @@ impl AggregatedCompilerOutput { ignored_error_codes: &'a [u64], ignored_file_paths: &'a [PathBuf], compiler_severity_filter: Severity, - ) -> OutputDiagnostics<'a, E> { + ) -> OutputDiagnostics<'a, C> { OutputDiagnostics { compiler_output: self, ignored_error_codes, @@ -581,30 +591,37 @@ impl AggregatedCompilerOutput { self.contracts.is_empty() && self.errors.is_empty() } - pub fn extend_all(&mut self, out: I) - where - I: IntoIterator)>, - { - for (v, o) in out { - self.extend(v, o) - } - } - /// adds a new `CompilerOutput` to the aggregated output - pub fn extend(&mut self, version: Version, output: CompilerOutput) { + pub fn extend( + &mut self, + version: Version, + build_info: RawBuildInfo, + output: CompilerOutput, + ) { + let build_id = build_info.id.clone(); + self.build_infos.push(build_info); + let CompilerOutput { errors, sources, contracts } = output; self.errors.extend(errors); for (path, source_file) in sources { let sources = self.sources.as_mut().entry(path).or_default(); - sources.push(VersionedSourceFile { source_file, version: version.clone() }); + sources.push(VersionedSourceFile { + source_file, + version: version.clone(), + build_id: build_id.clone(), + }); } for (file_name, new_contracts) in contracts { let contracts = self.contracts.as_mut().entry(file_name).or_default(); for (contract_name, contract) in new_contracts { let versioned = contracts.entry(contract_name).or_default(); - versioned.push(VersionedContract { contract, version: version.clone() }); + versioned.push(VersionedContract { + contract, + version: version.clone(), + build_id: build_id.clone(), + }); } } } @@ -615,18 +632,18 @@ impl AggregatedCompilerOutput { /// /// The created files have the md5 hash `{_format,solcVersion,solcLongVersion,input}` as their /// file name - pub fn write_build_infos(&self, build_info_dir: impl AsRef) -> Result<(), SolcIoError> { + pub fn write_build_infos(&self, build_info_dir: impl AsRef) -> Result<(), SolcError> { if self.build_infos.is_empty() { return Ok(()); } let build_info_dir = build_info_dir.as_ref(); std::fs::create_dir_all(build_info_dir) .map_err(|err| SolcIoError::new(err, build_info_dir))?; - for (version, build_info) in &self.build_infos { - trace!("writing build info file for solc {}", version); + for build_info in &self.build_infos { + trace!("writing build info file {}", build_info.id); let file_name = format!("{}.json", build_info.id); let file = build_info_dir.join(file_name); - std::fs::write(&file, &build_info.build_info) + std::fs::write(&file, &serde_json::to_string(build_info)?) .map_err(|err| SolcIoError::new(err, file))?; } Ok(()) @@ -829,7 +846,7 @@ impl AggregatedCompilerOutput { } } -impl AggregatedCompilerOutput { +impl AggregatedCompilerOutput { /// Whether the output contains a compiler error /// /// This adheres to the given `compiler_severity_filter` and also considers [CompilationError] @@ -885,9 +902,9 @@ impl AggregatedCompilerOutput { /// Helper type to implement display for solc errors #[derive(Clone, Debug)] -pub struct OutputDiagnostics<'a, E> { +pub struct OutputDiagnostics<'a, C: Compiler> { /// output of the compiled project - compiler_output: &'a AggregatedCompilerOutput, + compiler_output: &'a AggregatedCompilerOutput, /// the error codes to ignore ignored_error_codes: &'a [u64], /// the file paths to ignore @@ -896,7 +913,7 @@ pub struct OutputDiagnostics<'a, E> { compiler_severity_filter: Severity, } -impl<'a, E: CompilationError> OutputDiagnostics<'a, E> { +impl<'a, C: Compiler> OutputDiagnostics<'a, C> { /// Returns true if there is at least one error of high severity pub fn has_error(&self) -> bool { self.compiler_output.has_error( @@ -924,7 +941,7 @@ impl<'a, E: CompilationError> OutputDiagnostics<'a, E> { } } -impl<'a, E: CompilationError> fmt::Display for OutputDiagnostics<'a, E> { +impl<'a, C: Compiler> fmt::Display for OutputDiagnostics<'a, C> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("Compiler run ")?; if self.has_error() { diff --git a/src/compile/output/sources.rs b/src/compile/output/sources.rs index 60275908..4191e1ea 100644 --- a/src/compile/output/sources.rs +++ b/src/compile/output/sources.rs @@ -240,4 +240,5 @@ impl IntoIterator for VersionedSourceFiles { pub struct VersionedSourceFile { pub source_file: SourceFile, pub version: Version, + pub build_id: String, } diff --git a/src/compile/project.rs b/src/compile/project.rs index c9b2e73e..5649a08f 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -178,7 +178,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ProjectCompiler<'a, T, C> { /// let output = project.compile()?; /// # Ok::<(), Box>(()) /// ``` - pub fn compile(self) -> Result::CompilationError, T>> { + pub fn compile(self) -> Result> { let slash_paths = self.project.slash_paths; // drive the compiler statemachine to completion @@ -245,7 +245,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> PreprocessedState<'a, T, C> { /// Represents the state after `solc` was successfully invoked #[derive(Debug)] struct CompiledState<'a, T: ArtifactOutput, C: Compiler> { - output: AggregatedCompilerOutput, + output: AggregatedCompilerOutput, cache: ArtifactsCache<'a, T, C>, } @@ -308,7 +308,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> CompiledState<'a, T, C> { /// Represents the state after all artifacts were written to disk #[derive(Debug)] struct ArtifactsState<'a, T: ArtifactOutput, C: Compiler> { - output: AggregatedCompilerOutput, + output: AggregatedCompilerOutput, cache: ArtifactsCache<'a, T, C>, compiled_artifacts: Artifacts, } @@ -317,7 +317,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { /// Writes the cache file /// /// this concludes the [`Project::compile()`] statemachine - fn write_cache(self) -> Result> { + fn write_cache(self) -> Result> { let ArtifactsState { output, cache, compiled_artifacts } = self; let project = cache.project(); let ignored_error_codes = project.ignored_error_codes.clone(); @@ -328,10 +328,18 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { let skip_write_to_disk = project.no_artifacts || has_error; trace!(has_error, project.no_artifacts, skip_write_to_disk, cache_path=?project.cache_path(),"prepare writing cache file"); - let cached_artifacts = cache.consume(&compiled_artifacts, !skip_write_to_disk)?; + let (cached_artifacts, cached_builds) = + cache.consume(&compiled_artifacts, &output.build_infos, !skip_write_to_disk)?; project.artifacts_handler().handle_cached_artifacts(&cached_artifacts)?; + let builds = output + .build_infos + .iter() + .map(|build_info| (build_info.id.clone(), build_info.build_context.clone())) + .chain(cached_builds) + .collect(); + Ok(ProjectCompileOutput { compiler_output: output, compiled_artifacts, @@ -339,6 +347,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { ignored_error_codes, ignored_file_paths, compiler_severity_filter, + builds, }) } } @@ -443,7 +452,7 @@ impl FilteredCompilerSources { fn compile, T: ArtifactOutput>( self, cache: &mut ArtifactsCache<'_, T, C>, - ) -> Result> { + ) -> Result> { let project = cache.project(); let graph = cache.graph(); @@ -509,21 +518,18 @@ impl FilteredCompilerSources { cache.compiler_seen(file); } + let mut build_info = RawBuildInfo::new(&input, &output)?; + output.retain_files( actually_dirty .iter() .map(|f| f.strip_prefix(project.paths.root.as_path()).unwrap_or(f)), ); - // if configured also create the build info - if project.build_info { - let build_info = RawBuildInfo::new(&input, &output, version)?; - aggregated.build_infos.insert(version.clone(), build_info); - } - + build_info.join_all(project.paths.root.as_path()); output.join_all(project.paths.root.as_path()); - aggregated.extend(version.clone(), output); + aggregated.extend(version.clone(), build_info, output); } Ok(aggregated) diff --git a/src/compilers/mod.rs b/src/compilers/mod.rs index b9081457..1b195a61 100644 --- a/src/compilers/mod.rs +++ b/src/compilers/mod.rs @@ -1,6 +1,7 @@ use crate::{ artifacts::{ - output_selection::OutputSelection, Contract, FileToContractsMap, SourceFile, Sources, + output_selection::OutputSelection, Contract, FileToContractsMap, Source, SourceFile, + Sources, }, error::Result, remappings::Remapping, @@ -93,6 +94,8 @@ pub trait CompilerInput: Serialize + Send + Sync + Sized + Debug { /// Returns compiler version for which this input is intended. fn version(&self) -> &Version; + fn sources(&self) -> impl Iterator; + /// Returns compiler name used by reporters to display output during compilation. fn compiler_name(&self) -> Cow<'static, str>; @@ -156,7 +159,9 @@ pub trait ParsedSource: Debug + Sized + Send + Clone { } /// Error returned by compiler. Might also represent a warning or informational message. -pub trait CompilationError: Serialize + Send + Sync + Display + Debug + Clone + 'static { +pub trait CompilationError: + Serialize + Send + Sync + Display + Debug + Clone + PartialEq + Eq + 'static +{ fn is_warning(&self) -> bool; fn is_error(&self) -> bool; fn source_location(&self) -> Option; @@ -227,7 +232,9 @@ impl Default for CompilerOutput { } /// Keeps a set of languages recognized by the compiler. -pub trait Language: Hash + Eq + Clone + Debug + Display + 'static { +pub trait Language: + Hash + Eq + Copy + Clone + Debug + Display + Send + Sync + Serialize + DeserializeOwned + 'static +{ /// Extensions of source files recognized by the language set. const FILE_EXTENSIONS: &'static [&'static str]; } diff --git a/src/compilers/multi.rs b/src/compilers/multi.rs index 9cc259de..18f30b45 100644 --- a/src/compilers/multi.rs +++ b/src/compilers/multi.rs @@ -8,7 +8,9 @@ use super::{ Language, ParsedSource, }; use crate::{ - artifacts::{output_selection::OutputSelection, Error, Settings as SolcSettings, Sources}, + artifacts::{ + output_selection::OutputSelection, Error, Settings as SolcSettings, Source, Sources, + }, error::{Result, SolcError}, remappings::Remapping, resolver::parse::SolData, @@ -47,7 +49,8 @@ impl MultiCompiler { } /// Languages supported by the [MultiCompiler]. -#[derive(Debug, Clone, Hash, Eq, PartialEq)] +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] pub enum MultiCompilerLanguage { Solc(SolcLanguage), Vyper(VyperLanguage), @@ -102,7 +105,7 @@ impl MultiCompilerParsedSource { } /// Compilation error which may occur when compiling Solidity or Vyper sources. -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, PartialEq, Eq)] #[serde(untagged)] pub enum MultiCompilerError { Solc(Error), @@ -231,6 +234,15 @@ impl CompilerInput for MultiCompilerInput { Self::Vyper(input) => Self::Vyper(input.with_remappings(remappings)), } } + + fn sources(&self) -> impl Iterator { + let ret: Box> = match self { + Self::Solc(input) => Box::new(input.sources()), + Self::Vyper(input) => Box::new(input.sources()), + }; + + ret + } } impl Compiler for MultiCompiler { diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index c40b6179..766e5774 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -6,7 +6,8 @@ use super::{ }; use crate::{ artifacts::{ - output_selection::OutputSelection, Error, Settings as SolcSettings, SolcInput, Sources, + output_selection::OutputSelection, Error, Settings as SolcSettings, SolcInput, Source, + Sources, }, error::Result, remappings::Remapping, @@ -33,7 +34,7 @@ pub enum SolcCompiler { } /// Languages supported by the Solc compiler. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[non_exhaustive] pub enum SolcLanguage { Solidity, @@ -116,17 +117,14 @@ impl Compiler for SolcCompiler { } } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] pub struct SolcVersionedInput { - #[serde(skip)] pub version: Version, #[serde(flatten)] pub input: SolcInput, - #[serde(skip)] pub allow_paths: BTreeSet, - #[serde(skip)] pub base_path: Option, - #[serde(skip)] pub include_paths: BTreeSet, } @@ -163,6 +161,10 @@ impl CompilerInput for SolcVersionedInput { &self.version } + fn sources(&self) -> impl Iterator { + self.input.sources.iter().map(|(path, source)| (path.as_path(), source)) + } + fn with_remappings(mut self, remappings: Vec) -> Self { self.input = self.input.with_remappings(remappings); diff --git a/src/compilers/vyper/error.rs b/src/compilers/vyper/error.rs index 17f7f00f..8afd87ec 100644 --- a/src/compilers/vyper/error.rs +++ b/src/compilers/vyper/error.rs @@ -7,7 +7,7 @@ use crate::{ }; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] pub struct VyperSourceLocation { file: PathBuf, #[serde(rename = "lineno")] @@ -16,7 +16,7 @@ pub struct VyperSourceLocation { offset: Option, } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct VyperCompilationError { pub message: String, diff --git a/src/compilers/vyper/input.rs b/src/compilers/vyper/input.rs index 0e9a6566..03e7ff02 100644 --- a/src/compilers/vyper/input.rs +++ b/src/compilers/vyper/input.rs @@ -1,7 +1,10 @@ use std::{borrow::Cow, path::Path}; use super::{settings::VyperSettings, VyperLanguage, VYPER_INTERFACE_EXTENSION}; -use crate::{artifacts::Sources, compilers::CompilerInput}; +use crate::{ + artifacts::{Source, Sources}, + compilers::CompilerInput, +}; use semver::Version; use serde::{Deserialize, Serialize}; @@ -83,4 +86,12 @@ impl CompilerInput for VyperVersionedInput { fn version(&self) -> &Version { &self.version } + + fn sources(&self) -> impl Iterator { + self.input + .sources + .iter() + .chain(self.input.interfaces.iter()) + .map(|(path, source)| (path.as_path(), source)) + } } diff --git a/src/compilers/vyper/mod.rs b/src/compilers/vyper/mod.rs index 6db7c7f0..75524063 100644 --- a/src/compilers/vyper/mod.rs +++ b/src/compilers/vyper/mod.rs @@ -32,10 +32,33 @@ pub const VYPER_EXTENSIONS: &[&str] = &["vy", "vyi"]; pub const VYPER_INTERFACE_EXTENSION: &str = "vyi"; /// Vyper language, used as [Compiler::Language] for the Vyper compiler. -#[derive(Debug, Clone, Hash, Eq, PartialEq)] +#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)] #[non_exhaustive] pub struct VyperLanguage; +impl serde::Serialize for VyperLanguage { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + serializer.serialize_str("vyper") + } +} + +impl<'de> serde::Deserialize<'de> for VyperLanguage { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + let res = String::deserialize(deserializer)?; + if res != "vyper" { + Err(serde::de::Error::custom(format!("Invalid Vyper language: {}", res))) + } else { + Ok(VyperLanguage) + } + } +} + impl Language for VyperLanguage { const FILE_EXTENSIONS: &'static [&'static str] = VYPER_EXTENSIONS; } diff --git a/src/flatten.rs b/src/flatten.rs index 317cc142..d6e3fb35 100644 --- a/src/flatten.rs +++ b/src/flatten.rs @@ -178,7 +178,7 @@ impl Flattener { /// into this function. pub fn new( project: &Project, - output: &ProjectCompileOutput, + output: &ProjectCompileOutput, target: &Path, ) -> Result where diff --git a/src/lib.rs b/src/lib.rs index 5d0257af..9461b6f6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -280,7 +280,7 @@ impl Project { println!("cargo:rerun-if-changed={}", self.paths.sources.display()) } - pub fn compile(&self) -> Result> { + pub fn compile(&self) -> Result> { project::ProjectCompiler::new(self)?.compile() } @@ -295,10 +295,7 @@ impl Project { /// let output = project.compile_file("example/Greeter.sol")?; /// # Ok::<(), Box>(()) /// ``` - pub fn compile_file( - &self, - file: impl Into, - ) -> Result> { + pub fn compile_file(&self, file: impl Into) -> Result> { let file = file.into(); let source = Source::read(&file)?; project::ProjectCompiler::with_sources(self, Sources::from([(file, source)]))?.compile() @@ -316,10 +313,7 @@ impl Project { /// let output = project.compile_files(["examples/Foo.sol", "examples/Bar.sol"])?; /// # Ok::<(), Box>(()) /// ``` - pub fn compile_files( - &self, - files: I, - ) -> Result> + pub fn compile_files(&self, files: I) -> Result> where I: IntoIterator, P: Into, diff --git a/src/project_util/mod.rs b/src/project_util/mod.rs index b1252581..1bb75970 100644 --- a/src/project_util/mod.rs +++ b/src/project_util/mod.rs @@ -346,7 +346,7 @@ contract {} {{}} self.project().paths.root.as_path() } - pub fn compile(&self) -> Result> { + pub fn compile(&self) -> Result> { self.project().compile() } diff --git a/tests/project.rs b/tests/project.rs index ae02d2c6..43b97067 100644 --- a/tests/project.rs +++ b/tests/project.rs @@ -11,8 +11,7 @@ use foundry_compilers::{ cache::{CompilerCache, SOLIDITY_FILES_CACHE_FILENAME}, compilers::{ multi::{ - MultiCompiler, MultiCompilerError, MultiCompilerLanguage, MultiCompilerParsedSource, - MultiCompilerSettings, + MultiCompiler, MultiCompilerLanguage, MultiCompilerParsedSource, MultiCompilerSettings, }, solc::{SolcCompiler, SolcLanguage}, vyper::{Vyper, VyperLanguage, VyperSettings}, @@ -327,7 +326,7 @@ fn can_compile_dapp_detect_changes_in_sources() { let cache = CompilerCache::::read(&project.paths().cache).unwrap(); assert_eq!(cache.files.len(), 2); - let mut artifacts = compiled.into_artifacts().collect::>(); + let artifacts = compiled.into_artifacts().collect::>(); // overwrite import let _ = project @@ -356,8 +355,12 @@ fn can_compile_dapp_detect_changes_in_sources() { // and all recompiled artifacts are different for (p, artifact) in compiled.into_artifacts() { - let other = artifacts.remove(&p).unwrap(); - assert_ne!(artifact, other); + let other = artifacts + .iter() + .find(|(id, _)| id.name == p.name && id.version == p.version && id.source == p.source) + .unwrap() + .1; + assert_ne!(artifact, *other); } } @@ -2787,7 +2790,7 @@ fn compile_project_with_options( severity_filter: Option, ignore_paths: Option>, ignore_error_code: Option, -) -> ProjectCompileOutput { +) -> ProjectCompileOutput { let mut builder = Project::builder().no_artifacts().paths(gen_test_data_licensing_warning()).ephemeral(); From 88e3dd935b4d3e5dfef439d3f7209fb64e3356aa Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 10 Jun 2024 04:51:04 +0300 Subject: [PATCH 2/9] full_build_info --- src/artifacts/mod.rs | 2 +- src/buildinfo.rs | 19 +++++++++++-------- src/compile/project.rs | 2 +- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/artifacts/mod.rs b/src/artifacts/mod.rs index 60ea635e..faec1435 100644 --- a/src/artifacts/mod.rs +++ b/src/artifacts/mod.rs @@ -2028,7 +2028,7 @@ mod tests { SolcLanguage::Solidity, v.clone(), ); - let build_info = RawBuildInfo::new(&input, &out_converted).unwrap(); + let build_info = RawBuildInfo::new(&input, &out_converted, true).unwrap(); let mut aggregated = AggregatedCompilerOutput::::default(); aggregated.extend(v, build_info, out_converted); assert!(!aggregated.is_unchanged()); diff --git a/src/buildinfo.rs b/src/buildinfo.rs index 37ed5d9d..88573f08 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -88,26 +88,21 @@ impl RawBuildInfo { pub fn new, E: CompilationError>( input: &I, output: &CompilerOutput, + full_build_info: bool, ) -> Result> { let version = input.version().clone(); let build_context = BuildContext::new(input, output)?; let mut hasher = md5::Md5::new(); - let mut build_info = BTreeMap::new(); - build_info.insert("_format".to_string(), serde_json::to_value(ÐERS_FORMAT_VERSION)?); hasher.update(ETHERS_FORMAT_VERSION); let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch); - build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); hasher.update(&solc_short); - - build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); hasher.update(&version.to_string()); let input = serde_json::to_value(input)?; hasher.update(&serde_json::to_string(&input)?); - build_info.insert("input".to_string(), input); // create the hash for `{_format,solcVersion,solcLongVersion,input}` // N.B. this is not exactly the same as hashing the json representation of these values but @@ -115,7 +110,15 @@ impl RawBuildInfo { let result = hasher.finalize(); let id = hex::encode(result); - build_info.insert("output".to_string(), serde_json::to_value(&output)?); + let mut build_info = BTreeMap::new(); + + if full_build_info { + build_info.insert("_format".to_string(), serde_json::to_value(ÐERS_FORMAT_VERSION)?); + build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); + build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); + build_info.insert("input".to_string(), input); + build_info.insert("output".to_string(), serde_json::to_value(&output)?); + } Ok(RawBuildInfo { id, build_info, build_context }) } @@ -165,7 +168,7 @@ mod tests { v, ); let output = CompilerOutput::::default(); - let raw_info = RawBuildInfo::new(&input, &output).unwrap(); + let raw_info = RawBuildInfo::new(&input, &output, true).unwrap(); let _info: BuildInfo> = serde_json::from_str(&serde_json::to_string(&raw_info).unwrap()).unwrap(); } diff --git a/src/compile/project.rs b/src/compile/project.rs index 5649a08f..09132b15 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -518,7 +518,7 @@ impl FilteredCompilerSources { cache.compiler_seen(file); } - let mut build_info = RawBuildInfo::new(&input, &output)?; + let mut build_info = RawBuildInfo::new(&input, &output, project.build_info)?; output.retain_files( actually_dirty From 84e751735236ced8911b9f1569521dc2e60fda86 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 10 Jun 2024 04:55:04 +0300 Subject: [PATCH 3/9] docs --- src/buildinfo.rs | 2 ++ src/cache.rs | 3 +++ 2 files changed, 5 insertions(+) diff --git a/src/buildinfo.rs b/src/buildinfo.rs index 88573f08..c79283ba 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -41,7 +41,9 @@ impl BuildInfo { /// Additional context we cache for each compiler run. #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] pub struct BuildContext { + /// Mapping from internal compiler source id to path of the source file. pub source_id_to_path: HashMap, + /// Language of the compiler. pub language: L, } diff --git a/src/cache.rs b/src/cache.rs index 187272fe..947da5ac 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -377,9 +377,12 @@ impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache } } +/// Cached artifact data. #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct CachedArtifact { + /// Path to the artifact file. pub path: PathBuf, + /// Build id which produced the given artifact. pub build_id: String, } From 406f9af35be02e5e3e527beea64aa1420eb6ea78 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 10 Jun 2024 05:30:15 +0300 Subject: [PATCH 4/9] clippy --- src/buildinfo.rs | 23 +++-------------------- src/cache.rs | 19 ++++++++++--------- src/compile/output/mod.rs | 4 +++- src/compile/project.rs | 11 +++++------ src/compilers/solc.rs | 2 +- 5 files changed, 22 insertions(+), 37 deletions(-) diff --git a/src/buildinfo.rs b/src/buildinfo.rs index c79283ba..5d8fdcdc 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -10,10 +10,8 @@ use md5::Digest; use semver::Version; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ - cell::RefCell, collections::{BTreeMap, HashMap, HashSet}, path::{Path, PathBuf}, - rc::Rc, }; pub const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-build-info-1"; @@ -101,7 +99,7 @@ impl RawBuildInfo { let solc_short = format!("{}.{}.{}", version.major, version.minor, version.patch); hasher.update(&solc_short); - hasher.update(&version.to_string()); + hasher.update(version.to_string()); let input = serde_json::to_value(input)?; hasher.update(&serde_json::to_string(&input)?); @@ -115,11 +113,11 @@ impl RawBuildInfo { let mut build_info = BTreeMap::new(); if full_build_info { - build_info.insert("_format".to_string(), serde_json::to_value(ÐERS_FORMAT_VERSION)?); + build_info.insert("_format".to_string(), serde_json::to_value(ETHERS_FORMAT_VERSION)?); build_info.insert("solcVersion".to_string(), serde_json::to_value(&solc_short)?); build_info.insert("solcLongVersion".to_string(), serde_json::to_value(&version)?); build_info.insert("input".to_string(), input); - build_info.insert("output".to_string(), serde_json::to_value(&output)?); + build_info.insert("output".to_string(), serde_json::to_value(output)?); } Ok(RawBuildInfo { id, build_info, build_context }) @@ -132,21 +130,6 @@ impl RawBuildInfo { } } -#[derive(Clone)] -struct BuildInfoWriter { - buf: Rc>>, -} - -impl std::io::Write for BuildInfoWriter { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - self.buf.borrow_mut().write(buf) - } - - fn flush(&mut self) -> std::io::Result<()> { - self.buf.borrow_mut().flush() - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/src/cache.rs b/src/cache.rs index 947da5ac..34891d59 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -2,11 +2,12 @@ use crate::{ artifacts::{Settings, Sources}, - buildinfo::{BuildContext, RawBuildInfo}, + buildinfo::RawBuildInfo, compilers::{Compiler, CompilerSettings, Language}, config::ProjectPaths, error::{Result, SolcError}, filter::{FilteredSources, SourceCompilationKind}, + output::Builds, resolver::GraphEdges, utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project, ProjectPathsConfig, Source, @@ -313,10 +314,7 @@ impl CompilerCache { /// Reads all cached [BuildContext]s from disk. [BuildContext] is inlined into [RawBuildInfo] /// objects, so we are basically just partially deserializing build infos here. - pub fn read_builds( - &self, - build_info_dir: impl AsRef, - ) -> Result>> { + pub fn read_builds(&self, build_info_dir: impl AsRef) -> Result> { use rayon::prelude::*; let build_info_dir = build_info_dir.as_ref(); @@ -597,7 +595,7 @@ pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput, C: Compiler> { pub cached_artifacts: Artifacts, /// All already existing build infos. - pub cached_builds: BTreeMap>, + pub cached_builds: Builds, /// Relationship between all the files. pub edges: GraphEdges, @@ -985,12 +983,15 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsCache<'a, T, C> { /// compiled and written to disk `written_artifacts`. /// /// Returns all the _cached_ artifacts. - pub fn consume( + pub fn consume( self, - written_artifacts: &Artifacts, + written_artifacts: &Artifacts, written_build_infos: &Vec>, write_to_disk: bool, - ) -> Result<(Artifacts, BTreeMap>)> { + ) -> Result<(Artifacts, Builds)> + where + T: ArtifactOutput, + { let ArtifactsCache::Cached(cache) = self else { trace!("no cache configured, ephemeral"); return Ok(Default::default()); diff --git a/src/compile/output/mod.rs b/src/compile/output/mod.rs index 1c7b6849..438ceda5 100644 --- a/src/compile/output/mod.rs +++ b/src/compile/output/mod.rs @@ -27,6 +27,8 @@ pub mod contracts; pub mod info; pub mod sources; +pub type Builds = BTreeMap>; + /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. #[derive(Debug, Clone, PartialEq, Default)] @@ -47,7 +49,7 @@ pub struct ProjectCompileOutput< /// set minimum level of severity that is treated as an error pub(crate) compiler_severity_filter: Severity, /// all build infos that were just compiled - pub(crate) builds: BTreeMap>, + pub(crate) builds: Builds, } impl ProjectCompileOutput { diff --git a/src/compile/project.rs b/src/compile/project.rs index 09132b15..22f74957 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -489,12 +489,11 @@ impl FilteredCompilerSources { trace!("calling {} with {} sources {:?}", version, sources.len(), sources.keys()); - let mut input = - C::Input::build(sources, opt_settings, language.clone(), version.clone()) - .with_base_path(project.paths.root.clone()) - .with_allow_paths(project.paths.allowed_paths.clone()) - .with_include_paths(include_paths.clone()) - .with_remappings(project.paths.remappings.clone()); + let mut input = C::Input::build(sources, opt_settings, language, version.clone()) + .with_base_path(project.paths.root.clone()) + .with_allow_paths(project.paths.allowed_paths.clone()) + .with_include_paths(include_paths.clone()) + .with_remappings(project.paths.remappings.clone()); input.strip_prefix(project.paths.root.as_path()); diff --git a/src/compilers/solc.rs b/src/compilers/solc.rs index 766e5774..da969591 100644 --- a/src/compilers/solc.rs +++ b/src/compilers/solc.rs @@ -154,7 +154,7 @@ impl CompilerInput for SolcVersionedInput { } fn language(&self) -> Self::Language { - self.input.language.clone() + self.input.language } fn version(&self) -> &Version { From 3a90945da0063d4e3b2e7ad1d23a6c679f19b456 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 10 Jun 2024 05:30:36 +0300 Subject: [PATCH 5/9] clippy --- src/compile/output/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/compile/output/mod.rs b/src/compile/output/mod.rs index 438ceda5..6ef19ace 100644 --- a/src/compile/output/mod.rs +++ b/src/compile/output/mod.rs @@ -27,6 +27,7 @@ pub mod contracts; pub mod info; pub mod sources; +/// A mapping from build_id to [BuildContext]. pub type Builds = BTreeMap>; /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still From ad6ac9868c994280f94b0ef236848728c61f1361 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 10 Jun 2024 08:47:24 +0300 Subject: [PATCH 6/9] fix doc --- src/cache.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/cache.rs b/src/cache.rs index 34891d59..89809b95 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -314,6 +314,8 @@ impl CompilerCache { /// Reads all cached [BuildContext]s from disk. [BuildContext] is inlined into [RawBuildInfo] /// objects, so we are basically just partially deserializing build infos here. + /// + /// [BuildContext]: crate::buildinfo::BuildContext pub fn read_builds(&self, build_info_dir: impl AsRef) -> Result> { use rayon::prelude::*; From e6e02290435bfc9341b1bf520be47119a255e58c Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Mon, 10 Jun 2024 22:17:05 +0300 Subject: [PATCH 7/9] review fixes --- src/buildinfo.rs | 6 +++--- src/cache.rs | 4 ++-- src/compile/output/mod.rs | 36 ++++++++++++++++++++++++++++++++++-- src/compile/project.rs | 16 +++++++++------- 4 files changed, 48 insertions(+), 14 deletions(-) diff --git a/src/buildinfo.rs b/src/buildinfo.rs index 5d8fdcdc..f400b96d 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -10,7 +10,7 @@ use md5::Digest; use semver::Version; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use std::{ - collections::{BTreeMap, HashMap, HashSet}, + collections::{BTreeMap, HashSet}, path::{Path, PathBuf}, }; @@ -40,7 +40,7 @@ impl BuildInfo { #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] pub struct BuildContext { /// Mapping from internal compiler source id to path of the source file. - pub source_id_to_path: HashMap, + pub source_id_to_path: BTreeMap, /// Language of the compiler. pub language: L, } @@ -50,7 +50,7 @@ impl BuildContext { where I: CompilerInput, { - let mut source_id_to_path = HashMap::new(); + let mut source_id_to_path = BTreeMap::new(); let input_sources = input.sources().map(|(path, _)| path).collect::>(); for (path, source) in output.sources.iter() { diff --git a/src/cache.rs b/src/cache.rs index 89809b95..eca71e2c 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -320,14 +320,14 @@ impl CompilerCache { use rayon::prelude::*; let build_info_dir = build_info_dir.as_ref(); - self.builds .par_iter() .map(|build_id| { utils::read_json_file(build_info_dir.join(build_id).with_extension("json")) .map(|b| (build_id.clone(), b)) }) - .collect() + .collect::>() + .map(|b| Builds(b)) } } diff --git a/src/compile/output/mod.rs b/src/compile/output/mod.rs index 6ef19ace..cd24f915 100644 --- a/src/compile/output/mod.rs +++ b/src/compile/output/mod.rs @@ -14,11 +14,12 @@ use crate::{ }; use contracts::{VersionedContract, VersionedContracts}; use semver::Version; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use std::{ borrow::Cow, collections::BTreeMap, fmt, + ops::{Deref, DerefMut}, path::{Path, PathBuf}, }; use yansi::Paint; @@ -28,7 +29,38 @@ pub mod info; pub mod sources; /// A mapping from build_id to [BuildContext]. -pub type Builds = BTreeMap>; +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(transparent)] +pub struct Builds(pub BTreeMap>); + +impl Default for Builds { + fn default() -> Self { + Self(Default::default()) + } +} + +impl Deref for Builds { + type Target = BTreeMap>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Builds { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl IntoIterator for Builds { + type Item = (String, BuildContext); + type IntoIter = std::collections::btree_map::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} /// Contains a mixture of already compiled/cached artifacts and the input set of sources that still /// need to be compiled. diff --git a/src/compile/project.rs b/src/compile/project.rs index 22f74957..a58f02f1 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -108,7 +108,7 @@ use crate::{ compilers::{Compiler, CompilerInput, CompilerOutput, Language}, error::Result, filter::SparseOutputFilter, - output::AggregatedCompilerOutput, + output::{AggregatedCompilerOutput, Builds}, report, resolver::GraphEdges, ArtifactOutput, Graph, Project, ProjectCompileOutput, Sources, @@ -333,12 +333,14 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { project.artifacts_handler().handle_cached_artifacts(&cached_artifacts)?; - let builds = output - .build_infos - .iter() - .map(|build_info| (build_info.id.clone(), build_info.build_context.clone())) - .chain(cached_builds) - .collect(); + let builds = Builds( + output + .build_infos + .iter() + .map(|build_info| (build_info.id.clone(), build_info.build_context.clone())) + .chain(cached_builds) + .collect(), + ); Ok(ProjectCompileOutput { compiler_output: output, From 60109185c77946175abf9c3935bbaef668f78346 Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 11 Jun 2024 01:41:37 +0300 Subject: [PATCH 8/9] join later --- src/buildinfo.rs | 11 +++++------ src/compile/project.rs | 5 ++--- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/buildinfo.rs b/src/buildinfo.rs index f400b96d..31562f88 100644 --- a/src/buildinfo.rs +++ b/src/buildinfo.rs @@ -67,6 +67,11 @@ impl BuildContext { *path = root.as_ref().join(path.as_path()); }); } + + pub fn with_joined_paths(mut self, root: impl AsRef) -> Self { + self.join_all(root); + self + } } /// Represents `BuildInfo` object @@ -122,12 +127,6 @@ impl RawBuildInfo { Ok(RawBuildInfo { id, build_info, build_context }) } - - // We only join [BuildContext] paths here because input and output are kept in the same format - // as compiler seen/produced them. - pub fn join_all(&mut self, root: impl AsRef) { - self.build_context.join_all(root); - } } #[cfg(test)] diff --git a/src/compile/project.rs b/src/compile/project.rs index a58f02f1..7f1def72 100644 --- a/src/compile/project.rs +++ b/src/compile/project.rs @@ -339,6 +339,7 @@ impl<'a, T: ArtifactOutput, C: Compiler> ArtifactsState<'a, T, C> { .iter() .map(|build_info| (build_info.id.clone(), build_info.build_context.clone())) .chain(cached_builds) + .map(|(id, context)| (id, context.with_joined_paths(project.paths.root.as_path()))) .collect(), ); @@ -519,15 +520,13 @@ impl FilteredCompilerSources { cache.compiler_seen(file); } - let mut build_info = RawBuildInfo::new(&input, &output, project.build_info)?; + let build_info = RawBuildInfo::new(&input, &output, project.build_info)?; output.retain_files( actually_dirty .iter() .map(|f| f.strip_prefix(project.paths.root.as_path()).unwrap_or(f)), ); - - build_info.join_all(project.paths.root.as_path()); output.join_all(project.paths.root.as_path()); aggregated.extend(version.clone(), build_info, output); From 5eeb13c57799adf3fc66604aa24dec4d75b8012b Mon Sep 17 00:00:00 2001 From: Arsenii Kulikov Date: Tue, 11 Jun 2024 15:39:44 +0300 Subject: [PATCH 9/9] fix deny.toml --- deny.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/deny.toml b/deny.toml index 6eafb1e1..263e1d12 100644 --- a/deny.toml +++ b/deny.toml @@ -55,6 +55,7 @@ allow = [ # https://github.com/briansmith/webpki/issues/148 "LicenseRef-webpki", "BSL-1.0", + "Unicode-3.0", ] # Allow 1 or more licenses on a per-crate basis, so that particular licenses