Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More parallel tweaks #68218

Closed
wants to merge 43 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
c5d42b1
Ensure all iterations in Rayon iterators run in the presence of panics
Zoxc Jan 13, 2020
69276ba
Update tests
Zoxc Jan 13, 2020
934abf2
Use $crate
Zoxc Jan 16, 2020
c621cd6
Parallelize type collecting and item-types checking
Zoxc Jan 2, 2020
8c07291
Tweak misc checking 1
Zoxc Jan 2, 2020
bef0c61
Run item-types checking and item-bodies checking in parallel
Zoxc Jan 3, 2020
337d647
Handle panics with `join`
Zoxc Jan 3, 2020
6b67617
Make typeck_item_bodies eval_always
Zoxc Jan 3, 2020
6283565
Make coherence checking parallel
Zoxc Jan 3, 2020
efe44ff
Move privacy_access_levels out of misc checking 3 and run it in paral…
Zoxc Jan 3, 2020
ff125ff
Move some other passes into a parallel block
Zoxc Jan 3, 2020
7a6895f
Make liveness checking more parallel
Zoxc Jan 3, 2020
db0bd38
Prefetch upstream_monomorphizations
Zoxc Jan 3, 2020
171b1bb
Add a new misc checking 3 block
Zoxc Jan 3, 2020
133190b
Prefetch lint_levels and visible_parent_map
Zoxc Jan 4, 2020
f68672b
Fix duplicate test fallout
Zoxc Jan 11, 2020
c7aabbd
Drop `ensure` for privacy_access_levels. Add some comments.
Zoxc Jan 13, 2020
477ad98
Calculate accessor_map in parallel earlier
Zoxc Jan 11, 2020
85fa6a8
Add par_partition
Zoxc Jan 14, 2020
e478795
Parallelize place_root_mono_items
Zoxc Jan 11, 2020
730689e
Estimate CGU cost in place_root_mono_items
Zoxc Jan 12, 2020
1a37f05
Parallelize assert_symbols_are_distinct
Zoxc Jan 12, 2020
20fd50d
Make impl WF inference more parallel
Zoxc Jan 12, 2020
0fdcfe1
Check `Copy` impls in parallel
Zoxc Jan 12, 2020
98d0f7d
Prefetch mir_keys
Zoxc Jan 12, 2020
2bc0d3f
Use a parallel block for coherence checking
Zoxc Jan 12, 2020
9bfdcde
Ensure type checking each function is stealable by other threads
Zoxc Jan 13, 2020
c010632
Tune misc_checking_1
Zoxc Jan 13, 2020
36cbf0e
Prefetch queries used by the metadata encoder
Zoxc Jan 4, 2020
d739ca9
Encode exported symbols last
Zoxc Jan 11, 2020
9f257bb
Prefetch exported symbols
Zoxc Jan 11, 2020
8ea0206
Make the timer more verbose
Zoxc Jan 11, 2020
4cea70f
Make metadata prefetching more accurate
Zoxc Jan 13, 2020
6674519
Run HIR indexing and loading of query results in parallel
Zoxc Jan 9, 2020
fbb9396
Move check_for_entry_fn and check_unused to a later stage
Zoxc Jan 14, 2020
13910d4
Add sync::spawn
Zoxc Jan 9, 2020
9972b3e
Add a Future type
Zoxc Jan 7, 2020
1e02626
Drop the AST in the background
Zoxc Jan 9, 2020
62b7273
Run link_binary_remove_temps in the background
Zoxc Jan 10, 2020
64f2bbd
Make copy_cgu_workproducts_to_incr_comp_cache_dir parallel
Zoxc Jan 10, 2020
0a36b4d
Do incremental setup in the background
Zoxc Jan 11, 2020
3e2090c
Check Session reference count
Zoxc Jan 14, 2020
a6ca4ff
Run early lint checks in the background
Zoxc Jan 7, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/librustc/hir/map/hir_id_validator.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::hir::map::Map;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::{par_iter, Lock, ParallelIterator};
use rustc_data_structures::sync::{par_for_each, Lock};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX};
use rustc_hir::intravisit;
Expand All @@ -12,7 +12,7 @@ pub fn check_crate(hir_map: &Map<'_>) {

let errors = Lock::new(Vec::new());

par_iter(&hir_map.krate().modules).for_each(|(module_id, _)| {
par_for_each(&hir_map.krate().modules, |(module_id, _)| {
let local_def_id = hir_map.local_def_id(*module_id);
hir_map.visit_item_likes_in_module(
local_def_id,
Expand Down
38 changes: 37 additions & 1 deletion src/librustc/hir/map/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,12 @@ use crate::middle::cstore::CrateStoreDyn;
use crate::ty::query::Providers;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::svh::Svh;

use rustc_data_structures::sync::{self, par_for_each};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX};
use rustc_hir::intravisit;
use rustc_hir::itemlikevisit::ItemLikeVisitor;
use rustc_hir::itemlikevisit::{ItemLikeVisitor, ParItemLikeVisitor};
use rustc_hir::print::Nested;
use rustc_hir::*;
use rustc_index::vec::IndexVec;
Expand Down Expand Up @@ -582,6 +584,40 @@ impl<'hir> Map<'hir> {
}
}

/// A parallel version of `visit_item_likes_in_module`.
pub fn par_visit_item_likes_in_module<V>(&self, module: DefId, visitor: &V)
where
V: ParItemLikeVisitor<'hir> + sync::Sync,
{
let hir_id = self.as_local_hir_id(module).unwrap();

// Read the module so we'll be re-executed if new items
// appear immediately under in the module. If some new item appears
// in some nested item in the module, we'll be re-executed due to reads
// in the expect_* calls the loops below
self.read(hir_id);

let module = &self.forest.krate.modules[&hir_id];

parallel!(
{
par_for_each(&module.items, |id| {
visitor.visit_item(self.expect_item(*id));
});
},
{
par_for_each(&module.trait_items, |id| {
visitor.visit_trait_item(self.expect_trait_item(id.hir_id));
});
},
{
par_for_each(&module.impl_items, |id| {
visitor.visit_impl_item(self.expect_impl_item(id.hir_id));
});
}
);
}

/// Retrieves the `Node` corresponding to `id`, panicking if it cannot be found.
pub fn get(&self, id: HirId) -> Node<'hir> {
// read recorded by `find`
Expand Down
14 changes: 5 additions & 9 deletions src/librustc/mir/mono.rs
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ pub struct CodegenUnit<'tcx> {
/// as well as the crate name and disambiguator.
name: Symbol,
items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>,
size_estimate: Option<usize>,
size_estimate: usize,
}

#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable, HashStable)]
Expand All @@ -261,7 +261,7 @@ pub enum Visibility {

impl<'tcx> CodegenUnit<'tcx> {
pub fn new(name: Symbol) -> CodegenUnit<'tcx> {
CodegenUnit { name: name, items: Default::default(), size_estimate: None }
CodegenUnit { name: name, items: Default::default(), size_estimate: 0 }
}

pub fn name(&self) -> Symbol {
Expand Down Expand Up @@ -293,19 +293,15 @@ impl<'tcx> CodegenUnit<'tcx> {
pub fn estimate_size(&mut self, tcx: TyCtxt<'tcx>) {
// Estimate the size of a codegen unit as (approximately) the number of MIR
// statements it corresponds to.
self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum());
self.size_estimate = self.items.keys().map(|mi| mi.size_estimate(tcx)).sum();
}

pub fn size_estimate(&self) -> usize {
// Should only be called if `estimate_size` has previously been called.
self.size_estimate.expect("estimate_size must be called before getting a size_estimate")
self.size_estimate
}

pub fn modify_size_estimate(&mut self, delta: usize) {
assert!(self.size_estimate.is_some());
if let Some(size_estimate) = self.size_estimate {
self.size_estimate = Some(size_estimate + delta);
}
self.size_estimate += delta;
}

pub fn contains_item(&self, item: &MonoItem<'tcx>) -> bool {
Expand Down
1 change: 1 addition & 0 deletions src/librustc/query/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,7 @@ rustc_queries! {

TypeChecking {
query typeck_item_bodies(_: CrateNum) -> () {
eval_always
desc { "type-checking all item bodies" }
}

Expand Down
2 changes: 1 addition & 1 deletion src/librustc/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1329,7 +1329,7 @@ impl<'tcx> TyCtxt<'tcx> {
}

pub fn encode_metadata(self) -> EncodedMetadata {
let _prof_timer = self.prof.generic_activity("generate_crate_metadata");
let _prof_timer = self.prof.verbose_generic_activity("generate_crate_metadata");
self.cstore.encode_metadata(self)
}

Expand Down
7 changes: 4 additions & 3 deletions src/librustc/ty/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{self, par_iter, Lrc, ParallelIterator};
use rustc_data_structures::sync::{self, balance_par_for_each, Lrc};
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE};
Expand Down Expand Up @@ -2642,8 +2642,9 @@ impl<'tcx> TyCtxt<'tcx> {
}

pub fn par_body_owners<F: Fn(DefId) + sync::Sync + sync::Send>(self, f: F) {
par_iter(&self.hir().krate().body_ids)
.for_each(|&body_id| f(self.hir().body_owner_def_id(body_id)));
balance_par_for_each(&self.hir().krate().body_ids, |&body_id| {
f(self.hir().body_owner_def_id(body_id))
});
}

pub fn provided_trait_methods(self, id: DefId) -> Vec<AssocItem> {
Expand Down
4 changes: 4 additions & 0 deletions src/librustc/ty/steal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,8 @@ impl<T> Steal<T> {
let value = value_ref.take();
value.expect("attempt to read from stolen value")
}

pub fn into_inner(self) -> Option<T> {
self.value.into_inner()
}
}
12 changes: 9 additions & 3 deletions src/librustc_codegen_llvm/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ use rustc::ty::{self, TyCtxt};
use rustc::util::common::ErrorReported;
use rustc_codegen_ssa::ModuleCodegen;
use rustc_codegen_utils::codegen_backend::CodegenBackend;
use rustc_data_structures::sync::Lrc;

mod back {
pub mod archive;
Expand Down Expand Up @@ -271,7 +272,7 @@ impl CodegenBackend for LlvmCodegenBackend {
fn join_codegen_and_link(
&self,
ongoing_codegen: Box<dyn Any>,
sess: &Session,
sess: &Lrc<Session>,
dep_graph: &DepGraph,
outputs: &OutputFilenames,
) -> Result<(), ErrorReported> {
Expand All @@ -298,9 +299,11 @@ impl CodegenBackend for LlvmCodegenBackend {
return Ok(());
}

let codegen_results = Lrc::new(codegen_results);

// Run the linker on any artifacts that resulted from the LLVM run.
// This should produce either a finished executable or library.
sess.time("link_crate", || {
let cleanup_future = sess.time("link_crate", || {
use crate::back::archive::LlvmArchiveBuilder;
use rustc_codegen_ssa::back::link::link_binary;

Expand All @@ -311,13 +314,16 @@ impl CodegenBackend for LlvmCodegenBackend {
outputs,
&codegen_results.crate_name.as_str(),
target_cpu,
);
)
});

// Now that we won't touch anything in the incremental compilation directory
// any more, we can finalize it (which involves renaming it)
rustc_incremental::finalize_session_directory(sess, codegen_results.crate_hash);

// Join the cleanup future so it drops the `Lrc<Session>` it holds.
cleanup_future.join();

Ok(())
}
}
Expand Down
58 changes: 33 additions & 25 deletions src/librustc_codegen_ssa/back/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ use rustc::session::search_paths::PathKind;
/// need out of the shared crate context before we get rid of it.
use rustc::session::{filesearch, Session};
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::future::Future;
use rustc_data_structures::sync::Lrc;
use rustc_fs_util::fix_windows_verbatim_for_gcc;
use rustc_hir::def_id::CrateNum;
use rustc_span::symbol::Symbol;
Expand Down Expand Up @@ -47,12 +49,12 @@ pub fn remove(sess: &Session, path: &Path) {
/// Performs the linkage portion of the compilation phase. This will generate all
/// of the requested outputs for this compilation session.
pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
sess: &'a Session,
codegen_results: &CodegenResults,
sess: &'a Lrc<Session>,
codegen_results: &Lrc<CodegenResults>,
outputs: &OutputFilenames,
crate_name: &str,
target_cpu: &str,
) {
) -> Future<'static, ()> {
let _timer = sess.timer("link_binary");
let output_metadata = sess.opts.output_types.contains_key(&OutputType::Metadata);
for &crate_type in sess.crate_types.borrow().iter() {
Expand Down Expand Up @@ -122,34 +124,40 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
}

// Remove the temporary object file and metadata if we aren't saving temps
sess.time("link_binary_remove_temps", || {
if !sess.opts.cg.save_temps {
if sess.opts.output_types.should_codegen()
&& !preserve_objects_for_their_debuginfo(sess)
{
for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) {
remove(sess, obj);
let sess = sess.clone();
let codegen_results = codegen_results.clone();
Future::spawn(move || {
let sess = &*sess;
sess.time("link_binary_remove_temps", || {
if !sess.opts.cg.save_temps {
if sess.opts.output_types.should_codegen()
&& !preserve_objects_for_their_debuginfo(sess)
{
for obj in codegen_results.modules.iter().filter_map(|m| m.object.as_ref()) {
remove(sess, obj);
}
}
}
for obj in codegen_results.modules.iter().filter_map(|m| m.bytecode_compressed.as_ref())
{
remove(sess, obj);
}
if let Some(ref metadata_module) = codegen_results.metadata_module {
if let Some(ref obj) = metadata_module.object {
for obj in
codegen_results.modules.iter().filter_map(|m| m.bytecode_compressed.as_ref())
{
remove(sess, obj);
}
}
if let Some(ref allocator_module) = codegen_results.allocator_module {
if let Some(ref obj) = allocator_module.object {
remove(sess, obj);
if let Some(ref metadata_module) = codegen_results.metadata_module {
if let Some(ref obj) = metadata_module.object {
remove(sess, obj);
}
}
if let Some(ref bc) = allocator_module.bytecode_compressed {
remove(sess, bc);
if let Some(ref allocator_module) = codegen_results.allocator_module {
if let Some(ref obj) = allocator_module.object {
remove(sess, obj);
}
if let Some(ref bc) = allocator_module.bytecode_compressed {
remove(sess, bc);
}
}
}
}
});
});
})
}

// The third parameter is for env vars, used on windows to set up the
Expand Down
61 changes: 38 additions & 23 deletions src/librustc_codegen_ssa/back/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::profiling::VerboseTimingGuard;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::sync::{par_partition, Lrc};
use rustc_errors::emitter::Emitter;
use rustc_errors::{DiagnosticId, FatalError, Handler, Level};
use rustc_fs_util::link_or_copy;
Expand Down Expand Up @@ -239,7 +239,7 @@ pub struct CodegenContext<B: WriteBackendMethods> {
pub worker: usize,
// The incremental compilation session directory, or None if we are not
// compiling incrementally
pub incr_comp_session_dir: Option<PathBuf>,
pub incr_comp_session_dir: Option<Arc<PathBuf>>,
// Used to update CGU re-use information during the thinlto phase.
pub cgu_reuse_tracker: CguReuseTracker,
// Channel back to the main control thread to send messages to
Expand Down Expand Up @@ -473,32 +473,47 @@ fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
sess: &Session,
compiled_modules: &CompiledModules,
) -> FxHashMap<WorkProductId, WorkProduct> {
let mut work_products = FxHashMap::default();

if sess.opts.incremental.is_none() {
return work_products;
return FxHashMap::default();
}

let _timer = sess.timer("incr_comp_copy_cgu_workproducts");
let _timer = sess.timer("incr_comp_copy_all_cgu_workproducts");

for module in compiled_modules.modules.iter().filter(|m| m.kind == ModuleKind::Regular) {
let mut files = vec![];
let session_dir = sess.incr_comp_session_dir();

if let Some(ref path) = module.object {
files.push((WorkProductFileKind::Object, path.clone()));
}
if let Some(ref path) = module.bytecode {
files.push((WorkProductFileKind::Bytecode, path.clone()));
}
if let Some(ref path) = module.bytecode_compressed {
files.push((WorkProductFileKind::BytecodeCompressed, path.clone()));
}
// Split the modules into 3 parts, which limits usage to 3 threads.
// That seems to be all Windows' file system can handle.
let work_product_chunks = par_partition(&compiled_modules.modules, 3, |chunk| {
chunk
.iter()
.filter(|m| m.kind == ModuleKind::Regular)
.filter_map(|module| {
let mut files = vec![];

if let Some((id, product)) =
copy_cgu_workproducts_to_incr_comp_cache_dir(sess, &module.name, &files)
{
work_products.insert(id, product);
}
if let Some(ref path) = module.object {
files.push((WorkProductFileKind::Object, path.clone()));
}
if let Some(ref path) = module.bytecode {
files.push((WorkProductFileKind::Bytecode, path.clone()));
}
if let Some(ref path) = module.bytecode_compressed {
files.push((WorkProductFileKind::BytecodeCompressed, path.clone()));
}

copy_cgu_workproducts_to_incr_comp_cache_dir(
sess,
&session_dir,
&module.name,
&files,
)
})
.collect::<Vec<_>>()
});

let mut work_products = FxHashMap::default();

for (id, product) in work_product_chunks.into_iter().flat_map(|chunk| chunk.into_iter()) {
work_products.insert(id, product);
}

work_products
Expand Down Expand Up @@ -1029,7 +1044,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
exported_symbols,
remark: sess.opts.cg.remark.clone(),
worker: 0,
incr_comp_session_dir: sess.incr_comp_session_dir_opt().map(|r| r.clone()),
incr_comp_session_dir: sess.incr_comp_session_dir_opt(),
cgu_reuse_tracker: sess.cgu_reuse_tracker.clone(),
coordinator_send,
diag_emitter: shared_emitter.clone(),
Expand Down
Loading