Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: improve conatiner workspace instantiation #311

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 10 additions & 12 deletions crates/pop-cli/src/commands/build/spec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -370,13 +370,13 @@ async fn guide_user_to_generate_spec(args: BuildSpecCommand) -> anyhow::Result<B
}
// Prompt relays chains based on the chain type
match chain_type {
ChainType::Live => {
ChainType::Live =>
for relay in RelayChain::VARIANTS {
if !matches!(
relay,
RelayChain::Westend
| RelayChain::Paseo | RelayChain::Kusama
| RelayChain::Polkadot
RelayChain::Westend |
RelayChain::Paseo | RelayChain::Kusama |
RelayChain::Polkadot
) {
continue;
} else {
Expand All @@ -386,15 +386,14 @@ async fn guide_user_to_generate_spec(args: BuildSpecCommand) -> anyhow::Result<B
relay.get_detailed_message().unwrap_or_default(),
);
}
}
},
_ => {
},
_ =>
for relay in RelayChain::VARIANTS {
if matches!(
relay,
RelayChain::Westend
| RelayChain::Paseo | RelayChain::Kusama
| RelayChain::Polkadot
RelayChain::Westend |
RelayChain::Paseo | RelayChain::Kusama |
RelayChain::Polkadot
) {
continue;
} else {
Expand All @@ -404,8 +403,7 @@ async fn guide_user_to_generate_spec(args: BuildSpecCommand) -> anyhow::Result<B
relay.get_detailed_message().unwrap_or_default(),
);
}
}
},
},
}

let relay_chain = prompt.interact()?.clone();
Expand Down
5 changes: 2 additions & 3 deletions crates/pop-cli/src/commands/install/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -299,12 +299,11 @@ async fn install_rustup() -> anyhow::Result<()> {
async fn install_homebrew() -> anyhow::Result<()> {
match cmd("which", vec!["brew"]).read() {
Ok(output) => log::info(format!("ℹ️ Homebrew installed already at {}.", output))?,
Err(_) => {
Err(_) =>
run_external_script(
"https://raw.githubusercontent.com/Homebrew/install/master/install.sh",
)
.await?
},
.await?,
}
Ok(())
}
Expand Down
4 changes: 2 additions & 2 deletions crates/pop-cli/src/commands/new/parachain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,8 +241,8 @@ fn get_customization_value(
decimals: Option<u8>,
initial_endowment: Option<String>,
) -> Result<Config> {
if !matches!(template, Parachain::Standard)
&& (symbol.is_some() || decimals.is_some() || initial_endowment.is_some())
if !matches!(template, Parachain::Standard) &&
(symbol.is_some() || decimals.is_some() || initial_endowment.is_some())
{
log::warning("Customization options are not available for this template")?;
sleep(Duration::from_secs(3))
Expand Down
4 changes: 2 additions & 2 deletions crates/pop-cli/src/commands/up/contract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -326,8 +326,8 @@ pub fn has_contract_been_built(path: Option<&Path>) -> bool {
Err(_) => return false,
};
let contract_name = manifest.package().name();
project_path.join("target/ink").exists()
&& project_path.join(format!("target/ink/{}.contract", contract_name)).exists()
project_path.join("target/ink").exists() &&
project_path.join(format!("target/ink/{}.contract", contract_name)).exists()
}

#[cfg(test)]
Expand Down
5 changes: 2 additions & 3 deletions crates/pop-cli/src/commands/up/parachain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ impl ZombienetCommand {
.await
{
Ok(n) => n,
Err(e) => {
Err(e) =>
return match e {
Error::Config(message) => {
outro_cancel(format!("🚫 A configuration error occurred: `{message}`"))?;
Expand All @@ -82,8 +82,7 @@ impl ZombienetCommand {
Ok(())
},
_ => Err(e.into()),
}
},
},
};

// Source any missing/stale binaries
Expand Down
2 changes: 2 additions & 0 deletions crates/pop-common/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,6 @@ pub enum Error {
UnsupportedCommand(String),
#[error("Unsupported platform: {arch} {os}")]
UnsupportedPlatform { arch: &'static str, os: &'static str },
#[error("Toml error: {0}")]
TomlError(#[from] toml_edit::TomlError),
}
3 changes: 2 additions & 1 deletion crates/pop-common/src/git.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ impl Git {
Self::parse_latest_tag(tags.iter().flatten().collect::<Vec<_>>())
}

/// Parses a list of tags to identify the latest one, prioritizing tags in the stable format first.
/// Parses a list of tags to identify the latest one, prioritizing tags in the stable format
/// first.
fn parse_latest_tag(tags: Vec<&str>) -> Option<String> {
match Self::parse_stable_format(tags.clone()) {
Some(last_stable_tag) => Some(last_stable_tag),
Expand Down
10 changes: 4 additions & 6 deletions crates/pop-common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,16 @@ pub fn target() -> Result<&'static str, Error> {
}

match ARCH {
"aarch64" => {
"aarch64" =>
return match OS {
"macos" => Ok("aarch64-apple-darwin"),
_ => Ok("aarch64-unknown-linux-gnu"),
}
},
"x86_64" | "x86" => {
},
"x86_64" | "x86" =>
return match OS {
"macos" => Ok("x86_64-apple-darwin"),
_ => Ok("x86_64-unknown-linux-gnu"),
}
},
},
&_ => {},
}
Err(Error::UnsupportedPlatform { arch: ARCH, os: OS })
Expand Down
87 changes: 87 additions & 0 deletions crates/pop-common/src/manifest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use crate::Error;
use anyhow;
pub use cargo_toml::{Dependency, Manifest};
use std::{
collections::HashSet,
fs::{read_to_string, write},
path::{Path, PathBuf},
};
Expand Down Expand Up @@ -91,6 +92,92 @@ pub fn add_crate_to_workspace(workspace_toml: &Path, crate_path: &Path) -> anyho
Ok(())
}

/// Collects the dependencies of the given `Cargo.toml` manifests in a HashMap.
///
/// # Arguments
/// * `manifests`: Paths of the manifests to collect dependencies from.
pub fn collect_manifest_dependencies(manifests: Vec<&Path>) -> anyhow::Result<HashSet<String>> {
let mut dependencies = HashSet::new();
for m in manifests {
let cargo = &std::fs::read_to_string(m)?.parse::<DocumentMut>()?;
for d in cargo["dependencies"].as_table().unwrap().into_iter() {
dependencies.insert(d.0.into());
}
for d in cargo["build-dependencies"].as_table().unwrap().into_iter() {
dependencies.insert(d.0.into());
}
}
Ok(dependencies)
}

/// Extends `target` cargo manifest with `dependencies` using versions from `source`.
///
/// It is useful when the list of dependencies is compiled from some external crates,
/// but there is interest on using a separate manifest as the source of truth for their versions.
///
/// # Arguments
/// * `target`: Manifest to be modified.
/// * `source`: Manifest used as reference for the dependency versions.
/// * `tag`: Version to use when transforming local dependencies from `source`.
/// * `dependencies`: List to extend `target` with.
/// * `local_exceptions`: List of dependencies that need to be reference a local path.
pub fn extend_dependencies_with_version_source<I>(
target: &mut Manifest,
source: Manifest,
tag: Option<String>,
dependencies: I,
local_exceptions: Option<Vec<(String, String)>>,
) where
I: IntoIterator<Item = String>,
{
let mut target_manifest_workspace = target.workspace.clone().unwrap();
let source_manifest_workspace = source.workspace.clone().unwrap();

let updated_dependencies: Vec<_> = dependencies
.into_iter()
.filter_map(|k| {
if let Some(dependency) = source_manifest_workspace.dependencies.get(&k) {
if let Some(d) = dependency.detail() {
let mut detail = d.clone();
if d.path.is_some() {
detail.path = None;
detail.git = source_manifest_workspace.package.clone().unwrap().repository;
if let Some(tag) = &tag {
match tag.as_str() {
"master" => detail.branch = Some("master".to_string()),
_ => detail.tag = Some(tag.to_string()),
}
};
}
Some((k, Dependency::Detailed(Box::from(detail))))
} else {
Some((k, dependency.clone()))
}
} else {
None
}
})
.collect();

target_manifest_workspace.dependencies.extend(updated_dependencies);

// Address local exceptions.
if let Some(local_exceptions) = local_exceptions {
for (dependency, path) in local_exceptions {
let d = target_manifest_workspace
.dependencies
.get_mut(&dependency)
.unwrap()
.detail_mut();
d.git = None;
d.tag = None;
d.branch = None;
d.path = Some(path);
}
}
target.workspace = Some(target_manifest_workspace);
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
20 changes: 8 additions & 12 deletions crates/pop-common/src/sourcing/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,12 @@ impl Binary {
pub fn latest(&self) -> Option<&str> {
match self {
Self::Local { .. } => None,
Self::Source { source, .. } => {
Self::Source { source, .. } =>
if let GitHub(ReleaseArchive { latest, .. }) = source {
latest.as_deref()
} else {
None
}
},
},
}
}

Expand Down Expand Up @@ -135,18 +134,15 @@ impl Binary {
) -> Result<(), Error> {
match self {
Self::Local { name, path, manifest, .. } => match manifest {
None => {
None =>
return Err(Error::MissingBinary(format!(
"The {path:?} binary cannot be sourced automatically."
)))
},
Some(manifest) => {
from_local_package(manifest, name, release, status, verbose).await
},
},
Self::Source { source, cache, .. } => {
source.source(cache, release, status, verbose).await
))),
Some(manifest) =>
from_local_package(manifest, name, release, status, verbose).await,
},
Self::Source { source, cache, .. } =>
source.source(cache, release, status, verbose).await,
}
}

Expand Down
10 changes: 4 additions & 6 deletions crates/pop-common/src/sourcing/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,9 +210,8 @@ impl GitHub {
let artifacts: Vec<_> = artifacts
.iter()
.map(|name| match reference {
Some(reference) => {
(name.as_str(), cache.join(&format!("{name}-{reference}")))
},
Some(reference) =>
(name.as_str(), cache.join(&format!("{name}-{reference}"))),
None => (name.as_str(), cache.join(&name)),
})
.collect();
Expand Down Expand Up @@ -379,11 +378,10 @@ async fn from_github_archive(
// Prepare archive contents for build
let entries: Vec<_> = read_dir(&working_dir)?.take(2).filter_map(|x| x.ok()).collect();
match entries.len() {
0 => {
0 =>
return Err(Error::ArchiveError(
"The downloaded archive does not contain any entries.".into(),
))
},
)),
1 => working_dir = entries[0].path(), // Automatically switch to top level directory
_ => {}, /* Assume that downloaded archive does not have a
* top level directory */
Expand Down
16 changes: 10 additions & 6 deletions crates/pop-common/src/templates/extractor.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// SPDX-License-Identifier: GPL-3.0
// SPDX-License-Identifier: GPL-3.0

use std::{fs, io, path::Path};

Expand All @@ -10,8 +10,8 @@ use anyhow::Result;
/// * `template_file` - The name of the template to extract.
/// * `repo_directory` - The path to the repository directory containing the template.
/// * `target_directory` - The destination path where the template files should be copied.
/// * `ignore_directories` - A vector of directory names to ignore during the extraction. If empty, no
/// directories are ignored.
/// * `ignore_directories` - A vector of directory names to ignore during the extraction. If empty,
/// no directories are ignored.
pub fn extract_template_files(
template_file: String,
repo_directory: &Path,
Expand All @@ -21,7 +21,11 @@ pub fn extract_template_files(
let template_directory = repo_directory.join(&template_file);
if template_directory.is_dir() {
// Recursively copy all directories and files within. Ignores the specified ones.
copy_dir_all(&template_directory, target_directory, &ignore_directories.unwrap_or_else(|| vec![]))?;
copy_dir_all(
&template_directory,
target_directory,
&ignore_directories.unwrap_or_else(|| vec![]),
)?;
return Ok(());
} else {
// If not a dir, just copy the file.
Expand Down Expand Up @@ -49,8 +53,8 @@ fn copy_dir_all(
for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir()
&& ignore_directories.contains(&entry.file_name().to_string_lossy().to_string())
if ty.is_dir() &&
ignore_directories.contains(&entry.file_name().to_string_lossy().to_string())
{
continue;
} else if ty.is_dir() {
Expand Down
4 changes: 2 additions & 2 deletions crates/pop-parachains/src/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ pub fn is_supported(path: Option<&Path>) -> Result<bool, Error> {
const DEPENDENCIES: [&str; 4] =
["cumulus-client-collator", "cumulus-primitives-core", "parachains-common", "polkadot-sdk"];
Ok(DEPENDENCIES.into_iter().any(|d| {
manifest.dependencies.contains_key(d)
|| manifest.workspace.as_ref().map_or(false, |w| w.dependencies.contains_key(d))
manifest.dependencies.contains_key(d) ||
manifest.workspace.as_ref().map_or(false, |w| w.dependencies.contains_key(d))
}))
}

Expand Down
9 changes: 1 addition & 8 deletions crates/pop-parachains/src/generator/container.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,4 @@ pub(crate) struct Network {

#[derive(Template)]
#[template(path = "container/Cargo.templ", escape = "none")]
pub(crate) struct Cargo {
pub(crate) template: String,
pub(crate) template_tag: String,
pub(crate) dancekit_branch: String,
pub(crate) moonkit_branch: String,
pub(crate) sdk_branch: String,
pub(crate) frontier_branch: String,
}
pub(crate) struct Cargo {}
Loading