From 2dff1ed6109f4b2d5952f16f5fc60d75114d6ac7 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Thu, 11 Sep 2014 11:50:57 -0700 Subject: [PATCH] Implement a `cargo fetch` command This command is used to download all dependencies of a package ahead of time to ensure that no more network communication will be necessary as part of a build. cc #358 --- src/bin/cargo.rs | 1 + src/bin/fetch.rs | 38 +++++++++ src/cargo/ops/cargo_compile.rs | 89 +++----------------- src/cargo/ops/cargo_fetch.rs | 100 +++++++++++++++++++++++ src/cargo/ops/cargo_generate_lockfile.rs | 11 +-- src/cargo/ops/mod.rs | 2 + tests/test_cargo_compile_git_deps.rs | 29 +++++++ tests/test_cargo_fetch.rs | 21 +++++ tests/tests.rs | 1 + 9 files changed, 205 insertions(+), 87 deletions(-) create mode 100644 src/bin/fetch.rs create mode 100644 src/cargo/ops/cargo_fetch.rs create mode 100644 tests/test_cargo_fetch.rs diff --git a/src/bin/cargo.rs b/src/bin/cargo.rs index 4fe2f3e0176..017abb33a25 100644 --- a/src/bin/cargo.rs +++ b/src/bin/cargo.rs @@ -55,6 +55,7 @@ macro_rules! each_subcommand( ($macro:ident) => ({ $macro!(config_for_key) $macro!(config_list) $macro!(doc) + $macro!(fetch) $macro!(generate_lockfile) $macro!(git_checkout) $macro!(locate_project) diff --git a/src/bin/fetch.rs b/src/bin/fetch.rs new file mode 100644 index 00000000000..0c025389976 --- /dev/null +++ b/src/bin/fetch.rs @@ -0,0 +1,38 @@ +use docopt; + +use cargo::ops; +use cargo::core::{MultiShell}; +use cargo::util::{CliResult, CliError}; +use cargo::util::important_paths::find_root_manifest_for_cwd; + +docopt!(Options, " +Fetch dependencies of a package from the network. + +Usage: + cargo fetch [options] + +Options: + -h, --help Print this message + --manifest-path PATH Path to the manifest to fetch dependencies for + -v, --verbose Use verbose output + +If a lockfile is available, this command will ensure that all of the git +dependencies and/or registries dependencies are downloaded and locally +available. The network is never touched after a `cargo fetch` unless +the lockfile changes. + +If the lockfile is not available, then this is the equivalent of +`cargo generate-lockfile`. A lockfile is generated and dependencies are also +all updated. +", flag_manifest_path: Option) + +pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult> { + shell.set_verbose(options.flag_verbose); + let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); + try!(ops::fetch(&root, shell).map_err(|e| { + CliError::from_boxed(e, 101) + })); + Ok(None) +} + + diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 0c437b77735..f9cce0b7169 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -23,11 +23,11 @@ //! use std::os; -use std::collections::{HashMap, HashSet}; +use std::collections::HashMap; use core::registry::PackageRegistry; use core::{MultiShell, Source, SourceId, PackageSet, Target, PackageId}; -use core::{Package, Summary, Resolve, resolver}; +use core::resolver; use ops; use sources::{PathSource}; use util::config::{Config, ConfigValue}; @@ -72,26 +72,21 @@ pub fn compile(manifest_path: &Path, manifest_path.dir_path())); let (packages, resolve_with_overrides, sources) = { - let _p = profile::start("resolving..."); - let lockfile = manifest_path.dir_path().join("Cargo.lock"); - let source_id = package.get_package_id().get_source_id(); - let mut config = try!(Config::new(*shell, jobs, target.clone())); let mut registry = PackageRegistry::new(&mut config); - let dependencies = package.get_dependencies().iter().filter(|dep| { - dep.is_transitive() || dev_deps - }).map(|d| d.clone()).collect::>(); - match try!(ops::load_lockfile(&lockfile, source_id)) { - Some(r) => try!(add_lockfile_sources(&mut registry, &package, &r)), - None => try!(registry.add_sources(package.get_source_ids())), - } + // First, resolve the package's *listed* dependencies, as well as + // downloading and updating all remotes and such. + try!(ops::resolve_and_fetch(&mut registry, &package)); - let resolved = try!(resolver::resolve(package.get_package_id(), - package.get_dependencies(), - &mut registry)); - try!(ops::write_resolve(&package, &resolved)); + // Second, resolve with precisely what we're doing. Filter out + // transitive dependencies if necessary, specify features, handle + // overrides, etc. + let _p = profile::start("resolving w/ overrides..."); + let dependencies = package.get_dependencies().iter().filter(|dep| { + dep.is_transitive() || dev_deps + }).map(|d| d.clone()).collect::>(); try!(registry.add_overrides(override_ids)); let resolved_with_overrides = try!(resolver::resolve(package.get_package_id(), @@ -196,63 +191,3 @@ fn scrape_target_config(config: &mut Config, Ok(()) } - -/// When a lockfile is present, we want to keep as many dependencies at their -/// original revision as possible. We need to account, however, for -/// modifications to the manifest in terms of modifying, adding, or deleting -/// dependencies. -/// -/// This method will add any appropriate sources from the lockfile into the -/// registry, and add all other sources from the root package to the registry. -/// Any dependency which has not been modified has its source added to the -/// registry (to retain the precise field if possible). Any dependency which -/// *has* changed has its source id listed in the manifest added and all of its -/// transitive dependencies are blacklisted to not be added from the lockfile. -/// -/// TODO: this won't work too well for registry-based packages, but we don't -/// have many of those anyway so we should be ok for now. -fn add_lockfile_sources(registry: &mut PackageRegistry, - root: &Package, - resolve: &Resolve) -> CargoResult<()> { - let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| { - deps.map(|d| (d.get_name(), d)) - }).collect::>(); - - let mut sources = vec![root.get_package_id().get_source_id().clone()]; - let mut to_avoid = HashSet::new(); - let mut to_add = HashSet::new(); - for dep in root.get_dependencies().iter() { - match deps.find(&dep.get_name()) { - Some(&lockfile_dep) => { - let summary = Summary::new(lockfile_dep, []); - if dep.matches(&summary) { - fill_with_deps(resolve, lockfile_dep, &mut to_add); - } else { - fill_with_deps(resolve, lockfile_dep, &mut to_avoid); - sources.push(dep.get_source_id().clone()); - } - } - None => sources.push(dep.get_source_id().clone()), - } - } - - // Only afterward once we know the entire blacklist are the lockfile - // sources added. - for addition in to_add.iter() { - if !to_avoid.contains(addition) { - sources.push(addition.get_source_id().clone()); - } - } - - return registry.add_sources(sources); - - fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId, - set: &mut HashSet<&'a PackageId>) { - if !set.insert(dep) { return } - for mut deps in resolve.deps(dep).move_iter() { - for dep in deps { - fill_with_deps(resolve, dep, set); - } - } - } -} diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs new file mode 100644 index 00000000000..3cdb92fbe1b --- /dev/null +++ b/src/cargo/ops/cargo_fetch.rs @@ -0,0 +1,100 @@ +use std::collections::{HashSet, HashMap}; + +use core::{MultiShell, Package, PackageId, Summary}; +use core::registry::PackageRegistry; +use core::resolver::{mod, Resolve}; +use core::source::Source; +use ops; +use sources::PathSource; +use util::{CargoResult, Config}; +use util::profile; + +pub fn fetch(manifest_path: &Path, + shell: &mut MultiShell) -> CargoResult<()> { + let mut source = try!(PathSource::for_path(&manifest_path.dir_path())); + try!(source.update()); + let package = try!(source.get_root_package()); + + let mut config = try!(Config::new(shell, None, None)); + let mut registry = PackageRegistry::new(&mut config); + try!(resolve_and_fetch(&mut registry, &package)); + Ok(()) +} + +pub fn resolve_and_fetch(registry: &mut PackageRegistry, package: &Package) + -> CargoResult { + let _p = profile::start("resolve and fetch..."); + + let lockfile = package.get_manifest_path().dir_path().join("Cargo.lock"); + let source_id = package.get_package_id().get_source_id(); + match try!(ops::load_lockfile(&lockfile, source_id)) { + Some(r) => try!(add_lockfile_sources(registry, package, &r)), + None => try!(registry.add_sources(package.get_source_ids())), + } + + let resolved = try!(resolver::resolve(package.get_package_id(), + package.get_dependencies(), + registry)); + try!(ops::write_resolve(package, &resolved)); + Ok(resolved) +} + +/// When a lockfile is present, we want to keep as many dependencies at their +/// original revision as possible. We need to account, however, for +/// modifications to the manifest in terms of modifying, adding, or deleting +/// dependencies. +/// +/// This method will add any appropriate sources from the lockfile into the +/// registry, and add all other sources from the root package to the registry. +/// Any dependency which has not been modified has its source added to the +/// registry (to retain the precise field if possible). Any dependency which +/// *has* changed has its source id listed in the manifest added and all of its +/// transitive dependencies are blacklisted to not be added from the lockfile. +/// +/// TODO: this won't work too well for registry-based packages, but we don't +/// have many of those anyway so we should be ok for now. +fn add_lockfile_sources(registry: &mut PackageRegistry, + root: &Package, + resolve: &Resolve) -> CargoResult<()> { + let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| { + deps.map(|d| (d.get_name(), d)) + }).collect::>(); + + let mut sources = vec![root.get_package_id().get_source_id().clone()]; + let mut to_avoid = HashSet::new(); + let mut to_add = HashSet::new(); + for dep in root.get_dependencies().iter() { + match deps.find(&dep.get_name()) { + Some(&lockfile_dep) => { + let summary = Summary::new(lockfile_dep, []); + if dep.matches(&summary) { + fill_with_deps(resolve, lockfile_dep, &mut to_add); + } else { + fill_with_deps(resolve, lockfile_dep, &mut to_avoid); + sources.push(dep.get_source_id().clone()); + } + } + None => sources.push(dep.get_source_id().clone()), + } + } + + // Only afterward once we know the entire blacklist are the lockfile + // sources added. + for addition in to_add.iter() { + if !to_avoid.contains(addition) { + sources.push(addition.get_source_id().clone()); + } + } + + return registry.add_sources(sources); + + fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId, + set: &mut HashSet<&'a PackageId>) { + if !set.insert(dep) { return } + for mut deps in resolve.deps(dep).move_iter() { + for dep in deps { + fill_with_deps(resolve, dep, set); + } + } + } +} diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 4875034d1b1..3ec6560b78c 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -16,28 +16,19 @@ use util::toml as cargo_toml; pub fn generate_lockfile(manifest_path: &Path, shell: &mut MultiShell) -> CargoResult<()> { - - log!(4, "compile; manifest-path={}", manifest_path.display()); - let mut source = try!(PathSource::for_path(&manifest_path.dir_path())); try!(source.update()); - - // TODO: Move this into PathSource let package = try!(source.get_root_package()); - debug!("loaded package; package={}", package); - let source_ids = package.get_source_ids(); + let mut config = try!(Config::new(shell, None, None)); let resolve = { - let mut config = try!(Config::new(shell, None, None)); - let mut registry = PackageRegistry::new(&mut config); try!(registry.add_sources(source_ids)); try!(resolver::resolve(package.get_package_id(), package.get_dependencies(), &mut registry)) }; - try!(write_resolve(&package, &resolve)); Ok(()) } diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs index ba772460fa4..7ea99eb1dd0 100644 --- a/src/cargo/ops/mod.rs +++ b/src/cargo/ops/mod.rs @@ -11,6 +11,7 @@ pub use self::cargo_test::{run_tests, run_benches, TestOptions}; pub use self::cargo_package::package; pub use self::cargo_upload::{upload, upload_configuration, UploadConfig}; pub use self::cargo_upload::{upload_login, http_proxy, http_handle}; +pub use self::cargo_fetch::{fetch, resolve_and_fetch}; mod cargo_clean; mod cargo_compile; @@ -23,3 +24,4 @@ mod cargo_generate_lockfile; mod cargo_test; mod cargo_package; mod cargo_upload; +mod cargo_fetch; diff --git a/tests/test_cargo_compile_git_deps.rs b/tests/test_cargo_compile_git_deps.rs index c1501678603..9c4bf20c63f 100644 --- a/tests/test_cargo_compile_git_deps.rs +++ b/tests/test_cargo_compile_git_deps.rs @@ -1244,3 +1244,32 @@ test!(git_dep_build_cmd { execs().with_stdout("1\n")); }) +test!(fetch_downloads { + let bar = git_repo("bar", |project| { + project.file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.5.0" + authors = ["wycats@example.com"] + "#) + .file("src/lib.rs", "pub fn bar() -> int { 1 }") + }).assert(); + + let p = project("p1") + .file("Cargo.toml", format!(r#" + [project] + name = "p1" + version = "0.5.0" + authors = [] + [dependencies.bar] + git = '{}' + "#, bar.url()).as_slice()) + .file("src/main.rs", "fn main() {}"); + assert_that(p.cargo_process("fetch"), + execs().with_status(0).with_stdout(format!("\ +{updating} git repository `{url}` +", updating = UPDATING, url = bar.url()))); + + assert_that(p.process(cargo_dir().join("cargo")).arg("fetch"), + execs().with_status(0).with_stdout("")); +}) diff --git a/tests/test_cargo_fetch.rs b/tests/test_cargo_fetch.rs new file mode 100644 index 00000000000..7ebd1cde52c --- /dev/null +++ b/tests/test_cargo_fetch.rs @@ -0,0 +1,21 @@ +use support::{project, execs}; +use hamcrest::assert_that; + +fn setup() {} + +test!(no_deps { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + authors = [] + version = "0.0.1" + "#) + .file("src/main.rs", r#" + mod a; fn main() {} + "#) + .file("src/a.rs", ""); + + assert_that(p.cargo_process("fetch"), + execs().with_status(0).with_stdout("")); +}) diff --git a/tests/tests.rs b/tests/tests.rs index 7ece4ff5f20..1c7e95c40a8 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -46,3 +46,4 @@ mod test_cargo_package; mod test_cargo_build_auth; mod test_cargo_registry; mod test_cargo_upload; +mod test_cargo_fetch;