From 6bfaf3a9cb7b601e3c4ed2e661ed213b8bc4d639 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Tue, 2 Mar 2021 22:38:49 +0100
Subject: [PATCH 01/11] Stream the dep-graph to a file.

---
 .../rustc_incremental/src/assert_dep_graph.rs |  43 +-
 compiler/rustc_incremental/src/lib.rs         |   4 +-
 .../src/persist/dirty_clean.rs                |  24 +-
 compiler/rustc_incremental/src/persist/fs.rs  |   4 +
 .../rustc_incremental/src/persist/load.rs     |   4 +-
 compiler/rustc_incremental/src/persist/mod.rs |   1 +
 .../rustc_incremental/src/persist/save.rs     | 107 +-
 compiler/rustc_interface/src/passes.rs        |   3 -
 compiler/rustc_interface/src/queries.rs       |  11 +-
 compiler/rustc_middle/src/dep_graph/mod.rs    |   5 +-
 compiler/rustc_query_impl/src/plumbing.rs     |   5 +-
 .../rustc_query_system/src/dep_graph/debug.rs |  14 +-
 .../rustc_query_system/src/dep_graph/graph.rs | 922 ++++--------------
 .../rustc_query_system/src/dep_graph/mod.rs   |   3 +-
 .../rustc_query_system/src/dep_graph/query.rs |  35 +-
 .../src/dep_graph/serialized.rs               | 426 ++++++--
 compiler/rustc_query_system/src/lib.rs        |   1 +
 .../rustc_query_system/src/query/plumbing.rs  |  18 +-
 18 files changed, 711 insertions(+), 919 deletions(-)

diff --git a/compiler/rustc_incremental/src/assert_dep_graph.rs b/compiler/rustc_incremental/src/assert_dep_graph.rs
index a080b0ce3395c..b5680beae142d 100644
--- a/compiler/rustc_incremental/src/assert_dep_graph.rs
+++ b/compiler/rustc_incremental/src/assert_dep_graph.rs
@@ -40,8 +40,9 @@ use rustc_graphviz as dot;
 use rustc_hir as hir;
 use rustc_hir::def_id::DefId;
 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
-use rustc_middle::dep_graph::debug::{DepNodeFilter, EdgeFilter};
-use rustc_middle::dep_graph::{DepGraphQuery, DepKind, DepNode, DepNodeExt};
+use rustc_middle::dep_graph::{
+    DepGraphQuery, DepKind, DepNode, DepNodeExt, DepNodeFilter, EdgeFilter,
+};
 use rustc_middle::hir::map::Map;
 use rustc_middle::ty::TyCtxt;
 use rustc_span::symbol::{sym, Symbol};
@@ -54,7 +55,7 @@ use std::io::{BufWriter, Write};
 pub fn assert_dep_graph(tcx: TyCtxt<'_>) {
     tcx.dep_graph.with_ignore(|| {
         if tcx.sess.opts.debugging_opts.dump_dep_graph {
-            dump_graph(tcx);
+            tcx.dep_graph.with_query(dump_graph);
         }
 
         if !tcx.sess.opts.debugging_opts.query_dep_graph {
@@ -200,29 +201,29 @@ fn check_paths<'tcx>(tcx: TyCtxt<'tcx>, if_this_changed: &Sources, then_this_wou
         }
         return;
     }
-    let query = tcx.dep_graph.query();
-    for &(_, source_def_id, ref source_dep_node) in if_this_changed {
-        let dependents = query.transitive_predecessors(source_dep_node);
-        for &(target_span, ref target_pass, _, ref target_dep_node) in then_this_would_need {
-            if !dependents.contains(&target_dep_node) {
-                tcx.sess.span_err(
-                    target_span,
-                    &format!(
-                        "no path from `{}` to `{}`",
-                        tcx.def_path_str(source_def_id),
-                        target_pass
-                    ),
-                );
-            } else {
-                tcx.sess.span_err(target_span, "OK");
+    tcx.dep_graph.with_query(|query| {
+        for &(_, source_def_id, ref source_dep_node) in if_this_changed {
+            let dependents = query.transitive_predecessors(source_dep_node);
+            for &(target_span, ref target_pass, _, ref target_dep_node) in then_this_would_need {
+                if !dependents.contains(&target_dep_node) {
+                    tcx.sess.span_err(
+                        target_span,
+                        &format!(
+                            "no path from `{}` to `{}`",
+                            tcx.def_path_str(source_def_id),
+                            target_pass
+                        ),
+                    );
+                } else {
+                    tcx.sess.span_err(target_span, "OK");
+                }
             }
         }
-    }
+    });
 }
 
-fn dump_graph(tcx: TyCtxt<'_>) {
+fn dump_graph(query: &DepGraphQuery) {
     let path: String = env::var("RUST_DEP_GRAPH").unwrap_or_else(|_| "dep_graph".to_string());
-    let query = tcx.dep_graph.query();
 
     let nodes = match env::var("RUST_DEP_GRAPH_FILTER") {
         Ok(string) => {
diff --git a/compiler/rustc_incremental/src/lib.rs b/compiler/rustc_incremental/src/lib.rs
index 95456c07b10aa..f089cbcfca6e5 100644
--- a/compiler/rustc_incremental/src/lib.rs
+++ b/compiler/rustc_incremental/src/lib.rs
@@ -14,7 +14,7 @@ mod assert_dep_graph;
 pub mod assert_module_sources;
 mod persist;
 
-pub use assert_dep_graph::assert_dep_graph;
+use assert_dep_graph::assert_dep_graph;
 pub use persist::copy_cgu_workproduct_to_incr_comp_cache_dir;
 pub use persist::delete_workproduct_files;
 pub use persist::finalize_session_directory;
@@ -26,4 +26,4 @@ pub use persist::prepare_session_directory;
 pub use persist::save_dep_graph;
 pub use persist::save_work_product_index;
 pub use persist::LoadResult;
-pub use persist::{load_dep_graph, DepGraphFuture};
+pub use persist::{build_dep_graph, load_dep_graph, DepGraphFuture};
diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs
index 91b7221f20552..145c168f8c443 100644
--- a/compiler/rustc_incremental/src/persist/dirty_clean.rs
+++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs
@@ -14,7 +14,6 @@
 //! the required condition is not met.
 
 use rustc_ast::{self as ast, Attribute, NestedMetaItem};
-use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_hir as hir;
 use rustc_hir::def_id::{DefId, LocalDefId};
@@ -381,10 +380,7 @@ impl DirtyCleanVisitor<'tcx> {
     fn assert_dirty(&self, item_span: Span, dep_node: DepNode) {
         debug!("assert_dirty({:?})", dep_node);
 
-        let current_fingerprint = self.get_fingerprint(&dep_node);
-        let prev_fingerprint = self.tcx.dep_graph.prev_fingerprint_of(&dep_node);
-
-        if current_fingerprint == prev_fingerprint {
+        if self.tcx.dep_graph.is_green(&dep_node) {
             let dep_node_str = self.dep_node_str(&dep_node);
             self.tcx
                 .sess
@@ -392,28 +388,12 @@ impl DirtyCleanVisitor<'tcx> {
         }
     }
 
-    fn get_fingerprint(&self, dep_node: &DepNode) -> Option<Fingerprint> {
-        if self.tcx.dep_graph.dep_node_exists(dep_node) {
-            let dep_node_index = self.tcx.dep_graph.dep_node_index_of(dep_node);
-            Some(self.tcx.dep_graph.fingerprint_of(dep_node_index))
-        } else {
-            None
-        }
-    }
-
     fn assert_clean(&self, item_span: Span, dep_node: DepNode) {
         debug!("assert_clean({:?})", dep_node);
 
-        let current_fingerprint = self.get_fingerprint(&dep_node);
-        let prev_fingerprint = self.tcx.dep_graph.prev_fingerprint_of(&dep_node);
-
         // if the node wasn't previously evaluated and now is (or vice versa),
         // then the node isn't actually clean or dirty.
-        if (current_fingerprint == None) ^ (prev_fingerprint == None) {
-            return;
-        }
-
-        if current_fingerprint != prev_fingerprint {
+        if self.tcx.dep_graph.is_red(&dep_node) {
             let dep_node_str = self.dep_node_str(&dep_node);
             self.tcx
                 .sess
diff --git a/compiler/rustc_incremental/src/persist/fs.rs b/compiler/rustc_incremental/src/persist/fs.rs
index c7a6c1195c503..30c6c408bc7c0 100644
--- a/compiler/rustc_incremental/src/persist/fs.rs
+++ b/compiler/rustc_incremental/src/persist/fs.rs
@@ -122,6 +122,7 @@ mod tests;
 
 const LOCK_FILE_EXT: &str = ".lock";
 const DEP_GRAPH_FILENAME: &str = "dep-graph.bin";
+const STAGING_DEP_GRAPH_FILENAME: &str = "dep-graph.part.bin";
 const WORK_PRODUCTS_FILENAME: &str = "work-products.bin";
 const QUERY_CACHE_FILENAME: &str = "query-cache.bin";
 
@@ -134,6 +135,9 @@ const INT_ENCODE_BASE: usize = base_n::CASE_INSENSITIVE;
 pub fn dep_graph_path(sess: &Session) -> PathBuf {
     in_incr_comp_dir_sess(sess, DEP_GRAPH_FILENAME)
 }
+pub fn staging_dep_graph_path(sess: &Session) -> PathBuf {
+    in_incr_comp_dir_sess(sess, STAGING_DEP_GRAPH_FILENAME)
+}
 pub fn dep_graph_path_from(incr_comp_session_dir: &Path) -> PathBuf {
     in_incr_comp_dir(incr_comp_session_dir, DEP_GRAPH_FILENAME)
 }
diff --git a/compiler/rustc_incremental/src/persist/load.rs b/compiler/rustc_incremental/src/persist/load.rs
index 2b5649bb0594f..259e540c6125e 100644
--- a/compiler/rustc_incremental/src/persist/load.rs
+++ b/compiler/rustc_incremental/src/persist/load.rs
@@ -5,7 +5,7 @@ use rustc_hir::definitions::Definitions;
 use rustc_middle::dep_graph::{PreviousDepGraph, SerializedDepGraph, WorkProduct, WorkProductId};
 use rustc_middle::ty::query::OnDiskCache;
 use rustc_serialize::opaque::Decoder;
-use rustc_serialize::Decodable as RustcDecodable;
+use rustc_serialize::Decodable;
 use rustc_session::Session;
 use std::path::Path;
 
@@ -120,7 +120,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
             // Decode the list of work_products
             let mut work_product_decoder = Decoder::new(&work_products_data[..], start_pos);
             let work_products: Vec<SerializedWorkProduct> =
-                RustcDecodable::decode(&mut work_product_decoder).unwrap_or_else(|e| {
+                Decodable::decode(&mut work_product_decoder).unwrap_or_else(|e| {
                     let msg = format!(
                         "Error decoding `work-products` from incremental \
                                     compilation session directory: {}",
diff --git a/compiler/rustc_incremental/src/persist/mod.rs b/compiler/rustc_incremental/src/persist/mod.rs
index 8821b34b50212..1336189bc0d26 100644
--- a/compiler/rustc_incremental/src/persist/mod.rs
+++ b/compiler/rustc_incremental/src/persist/mod.rs
@@ -18,6 +18,7 @@ pub use fs::prepare_session_directory;
 pub use load::load_query_result_cache;
 pub use load::LoadResult;
 pub use load::{load_dep_graph, DepGraphFuture};
+pub use save::build_dep_graph;
 pub use save::save_dep_graph;
 pub use save::save_work_product_index;
 pub use work_product::copy_cgu_workproduct_to_incr_comp_cache_dir;
diff --git a/compiler/rustc_incremental/src/persist/save.rs b/compiler/rustc_incremental/src/persist/save.rs
index 45d474b89b8df..d80397970ac6a 100644
--- a/compiler/rustc_incremental/src/persist/save.rs
+++ b/compiler/rustc_incremental/src/persist/save.rs
@@ -1,6 +1,6 @@
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::join;
-use rustc_middle::dep_graph::{DepGraph, WorkProduct, WorkProductId};
+use rustc_middle::dep_graph::{DepGraph, PreviousDepGraph, WorkProduct, WorkProductId};
 use rustc_middle::ty::TyCtxt;
 use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
 use rustc_serialize::Encodable as RustcEncodable;
@@ -15,6 +15,9 @@ use super::file_format;
 use super::fs::*;
 use super::work_product;
 
+/// Save and dump the DepGraph.
+///
+/// No query must be invoked after this function.
 pub fn save_dep_graph(tcx: TyCtxt<'_>) {
     debug!("save_dep_graph()");
     tcx.dep_graph.with_ignore(|| {
@@ -29,6 +32,16 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
 
         let query_cache_path = query_cache_path(sess);
         let dep_graph_path = dep_graph_path(sess);
+        let staging_dep_graph_path = staging_dep_graph_path(sess);
+
+        join(
+            || sess.time("assert_dep_graph", || crate::assert_dep_graph(tcx)),
+            || sess.time("check_dirty_clean", || dirty_clean::check_dirty_clean_annotations(tcx)),
+        );
+
+        if sess.opts.debugging_opts.incremental_info {
+            tcx.dep_graph.print_incremental_info()
+        }
 
         join(
             move || {
@@ -36,16 +49,26 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
                     save_in(sess, query_cache_path, "query cache", |e| encode_query_cache(tcx, e));
                 });
             },
-            || {
+            move || {
                 sess.time("incr_comp_persist_dep_graph", || {
-                    save_in(sess, dep_graph_path, "dependency graph", |e| {
-                        sess.time("incr_comp_encode_dep_graph", || encode_dep_graph(tcx, e))
-                    });
+                    if let Err(err) = tcx.dep_graph.encode() {
+                        sess.err(&format!(
+                            "failed to write dependency graph to `{}`: {}",
+                            staging_dep_graph_path.display(),
+                            err
+                        ));
+                    }
+                    if let Err(err) = fs::rename(&staging_dep_graph_path, &dep_graph_path) {
+                        sess.err(&format!(
+                            "failed to move dependency graph from `{}` to `{}`: {}",
+                            staging_dep_graph_path.display(),
+                            dep_graph_path.display(),
+                            err
+                        ));
+                    }
                 });
             },
         );
-
-        dirty_clean::check_dirty_clean_annotations(tcx);
     })
 }
 
@@ -92,7 +115,7 @@ pub fn save_work_product_index(
     });
 }
 
-fn save_in<F>(sess: &Session, path_buf: PathBuf, name: &str, encode: F)
+pub(crate) fn save_in<F>(sess: &Session, path_buf: PathBuf, name: &str, encode: F)
 where
     F: FnOnce(&mut FileEncoder) -> FileEncodeResult,
 {
@@ -144,21 +167,6 @@ where
     debug!("save: data written to disk successfully");
 }
 
-fn encode_dep_graph(tcx: TyCtxt<'_>, encoder: &mut FileEncoder) -> FileEncodeResult {
-    // First encode the commandline arguments hash
-    tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
-
-    if tcx.sess.opts.debugging_opts.incremental_info {
-        tcx.dep_graph.print_incremental_info();
-    }
-
-    // There is a tiny window between printing the incremental info above and encoding the dep
-    // graph below in which the dep graph could change, thus making the printed incremental info
-    // slightly out of date. If this matters to you, please feel free to submit a patch. :)
-
-    tcx.sess.time("incr_comp_encode_serialized_dep_graph", || tcx.dep_graph.encode(encoder))
-}
-
 fn encode_work_product_index(
     work_products: &FxHashMap<WorkProductId, WorkProduct>,
     encoder: &mut FileEncoder,
@@ -177,3 +185,56 @@ fn encode_work_product_index(
 fn encode_query_cache(tcx: TyCtxt<'_>, encoder: &mut FileEncoder) -> FileEncodeResult {
     tcx.sess.time("incr_comp_serialize_result_cache", || tcx.serialize_query_result_cache(encoder))
 }
+
+pub fn build_dep_graph(
+    sess: &Session,
+    prev_graph: PreviousDepGraph,
+    prev_work_products: FxHashMap<WorkProductId, WorkProduct>,
+) -> Option<DepGraph> {
+    if sess.opts.incremental.is_none() {
+        // No incremental compilation.
+        return None;
+    }
+
+    // Stream the dep-graph to an alternate file, to avoid overwriting anything in case of errors.
+    let path_buf = staging_dep_graph_path(sess);
+
+    let mut encoder = match FileEncoder::new(&path_buf) {
+        Ok(encoder) => encoder,
+        Err(err) => {
+            sess.err(&format!(
+                "failed to create dependency graph at `{}`: {}",
+                path_buf.display(),
+                err
+            ));
+            return None;
+        }
+    };
+
+    if let Err(err) = file_format::write_file_header(&mut encoder, sess.is_nightly_build()) {
+        sess.err(&format!(
+            "failed to write dependency graph header to `{}`: {}",
+            path_buf.display(),
+            err
+        ));
+        return None;
+    }
+
+    // First encode the commandline arguments hash
+    if let Err(err) = sess.opts.dep_tracking_hash().encode(&mut encoder) {
+        sess.err(&format!(
+            "failed to write dependency graph hash `{}`: {}",
+            path_buf.display(),
+            err
+        ));
+        return None;
+    }
+
+    Some(DepGraph::new(
+        prev_graph,
+        prev_work_products,
+        encoder,
+        sess.opts.debugging_opts.query_dep_graph,
+        sess.opts.debugging_opts.incremental_info,
+    ))
+}
diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs
index 87d002878289f..c693155994f56 100644
--- a/compiler/rustc_interface/src/passes.rs
+++ b/compiler/rustc_interface/src/passes.rs
@@ -1021,9 +1021,6 @@ pub fn start_codegen<'tcx>(
         rustc_symbol_mangling::test::report_symbol_names(tcx);
     }
 
-    tcx.sess.time("assert_dep_graph", || rustc_incremental::assert_dep_graph(tcx));
-    tcx.sess.time("serialize_dep_graph", || rustc_incremental::save_dep_graph(tcx));
-
     info!("Post-codegen\n{:?}", tcx.debug_stats());
 
     if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs
index 9c38d2b91ab31..01853eab530da 100644
--- a/compiler/rustc_interface/src/queries.rs
+++ b/compiler/rustc_interface/src/queries.rs
@@ -207,7 +207,13 @@ impl<'tcx> Queries<'tcx> {
                                 })
                                 .open(self.session())
                         });
-                    DepGraph::new(prev_graph, prev_work_products)
+
+                    rustc_incremental::build_dep_graph(
+                        self.session(),
+                        prev_graph,
+                        prev_work_products,
+                    )
+                    .unwrap_or_else(DepGraph::new_disabled)
                 }
             })
         })
@@ -435,6 +441,9 @@ impl Compiler {
             if self.session().opts.debugging_opts.query_stats {
                 gcx.enter(rustc_query_impl::print_stats);
             }
+
+            self.session()
+                .time("serialize_dep_graph", || gcx.enter(rustc_incremental::save_dep_graph));
         }
 
         _timer = Some(self.session().timer("free_global_ctxt"));
diff --git a/compiler/rustc_middle/src/dep_graph/mod.rs b/compiler/rustc_middle/src/dep_graph/mod.rs
index c688b23be1d02..d2fe9af34fb62 100644
--- a/compiler/rustc_middle/src/dep_graph/mod.rs
+++ b/compiler/rustc_middle/src/dep_graph/mod.rs
@@ -8,8 +8,8 @@ use rustc_session::Session;
 mod dep_node;
 
 pub use rustc_query_system::dep_graph::{
-    debug, hash_result, DepContext, DepNodeColor, DepNodeIndex, SerializedDepNodeIndex,
-    WorkProduct, WorkProductId,
+    debug::DepNodeFilter, hash_result, DepContext, DepNodeColor, DepNodeIndex,
+    SerializedDepNodeIndex, WorkProduct, WorkProductId,
 };
 
 crate use dep_node::make_compile_codegen_unit;
@@ -20,6 +20,7 @@ pub type TaskDeps = rustc_query_system::dep_graph::TaskDeps<DepKind>;
 pub type DepGraphQuery = rustc_query_system::dep_graph::DepGraphQuery<DepKind>;
 pub type PreviousDepGraph = rustc_query_system::dep_graph::PreviousDepGraph<DepKind>;
 pub type SerializedDepGraph = rustc_query_system::dep_graph::SerializedDepGraph<DepKind>;
+pub type EdgeFilter = rustc_query_system::dep_graph::debug::EdgeFilter<DepKind>;
 
 impl rustc_query_system::dep_graph::DepKind for DepKind {
     const NULL: Self = DepKind::Null;
diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs
index d958b3c18cdcd..4194b28dc7d68 100644
--- a/compiler/rustc_query_impl/src/plumbing.rs
+++ b/compiler/rustc_query_impl/src/plumbing.rs
@@ -477,10 +477,7 @@ macro_rules! define_queries {
                         return
                     }
 
-                    debug_assert!(tcx.dep_graph
-                                     .node_color(dep_node)
-                                     .map(|c| c.is_green())
-                                     .unwrap_or(false));
+                    debug_assert!(tcx.dep_graph.is_green(dep_node));
 
                     let key = recover(*tcx, dep_node).unwrap_or_else(|| panic!("Failed to recover key for {:?} with hash {}", dep_node, dep_node.hash));
                     if queries::$name::cache_on_disk(tcx, &key, None) {
diff --git a/compiler/rustc_query_system/src/dep_graph/debug.rs b/compiler/rustc_query_system/src/dep_graph/debug.rs
index 43429cd11a2b7..a544ac2c343ae 100644
--- a/compiler/rustc_query_system/src/dep_graph/debug.rs
+++ b/compiler/rustc_query_system/src/dep_graph/debug.rs
@@ -1,6 +1,8 @@
 //! Code for debugging the dep-graph.
 
-use super::{DepKind, DepNode};
+use super::{DepKind, DepNode, DepNodeIndex};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::Lock;
 use std::error::Error;
 
 /// A dep-node filter goes from a user-defined string to a query over
@@ -34,13 +36,14 @@ impl DepNodeFilter {
 
 /// A filter like `F -> G` where `F` and `G` are valid dep-node
 /// filters. This can be used to test the source/target independently.
-pub struct EdgeFilter {
+pub struct EdgeFilter<K: DepKind> {
     pub source: DepNodeFilter,
     pub target: DepNodeFilter,
+    pub index_to_node: Lock<FxHashMap<DepNodeIndex, DepNode<K>>>,
 }
 
-impl EdgeFilter {
-    pub fn new(test: &str) -> Result<EdgeFilter, Box<dyn Error>> {
+impl<K: DepKind> EdgeFilter<K> {
+    pub fn new(test: &str) -> Result<EdgeFilter<K>, Box<dyn Error>> {
         let parts: Vec<_> = test.split("->").collect();
         if parts.len() != 2 {
             Err(format!("expected a filter like `a&b -> c&d`, not `{}`", test).into())
@@ -48,12 +51,13 @@ impl EdgeFilter {
             Ok(EdgeFilter {
                 source: DepNodeFilter::new(parts[0]),
                 target: DepNodeFilter::new(parts[1]),
+                index_to_node: Lock::new(FxHashMap::default()),
             })
         }
     }
 
     #[cfg(debug_assertions)]
-    pub fn test<K: DepKind>(&self, source: &DepNode<K>, target: &DepNode<K>) -> bool {
+    pub fn test(&self, source: &DepNode<K>, target: &DepNode<K>) -> bool {
         self.source.test(source) && self.target.test(target)
     }
 }
diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs
index 9fe2497a57bda..295b2a97e4cf4 100644
--- a/compiler/rustc_query_system/src/dep_graph/graph.rs
+++ b/compiler/rustc_query_system/src/dep_graph/graph.rs
@@ -3,29 +3,30 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_data_structures::profiling::QueryInvocationId;
 use rustc_data_structures::sharded::{self, Sharded};
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
-use rustc_data_structures::sync::{AtomicU32, AtomicU64, Lock, LockGuard, Lrc, Ordering};
+use rustc_data_structures::steal::Steal;
+use rustc_data_structures::sync::{AtomicU32, AtomicU64, Lock, Lrc, Ordering};
 use rustc_data_structures::unlikely;
 use rustc_errors::Diagnostic;
-use rustc_index::vec::{Idx, IndexVec};
-use rustc_serialize::{Encodable, Encoder};
+use rustc_index::vec::IndexVec;
+use rustc_serialize::opaque::{FileEncodeResult, FileEncoder};
 
 use parking_lot::{Condvar, Mutex};
 use smallvec::{smallvec, SmallVec};
 use std::collections::hash_map::Entry;
-use std::env;
 use std::hash::Hash;
 use std::marker::PhantomData;
 use std::mem;
-use std::ops::Range;
 use std::sync::atomic::Ordering::Relaxed;
 
-use super::debug::EdgeFilter;
 use super::prev::PreviousDepGraph;
 use super::query::DepGraphQuery;
-use super::serialized::SerializedDepNodeIndex;
+use super::serialized::{GraphEncoder, SerializedDepNodeIndex};
 use super::{DepContext, DepKind, DepNode, HasDepContext, WorkProductId};
 use crate::query::QueryContext;
 
+#[cfg(debug_assertions)]
+use {super::debug::EdgeFilter, std::env};
+
 #[derive(Clone)]
 pub struct DepGraph<K: DepKind> {
     data: Option<Lrc<DepGraphData<K>>>,
@@ -109,6 +110,9 @@ impl<K: DepKind> DepGraph<K> {
     pub fn new(
         prev_graph: PreviousDepGraph<K>,
         prev_work_products: FxHashMap<WorkProductId, WorkProduct>,
+        encoder: FileEncoder,
+        record_graph: bool,
+        record_stats: bool,
     ) -> DepGraph<K> {
         let prev_graph_node_count = prev_graph.node_count();
 
@@ -116,7 +120,12 @@ impl<K: DepKind> DepGraph<K> {
             data: Some(Lrc::new(DepGraphData {
                 previous_work_products: prev_work_products,
                 dep_node_debug: Default::default(),
-                current: CurrentDepGraph::new(prev_graph_node_count),
+                current: CurrentDepGraph::new(
+                    prev_graph_node_count,
+                    encoder,
+                    record_graph,
+                    record_stats,
+                ),
                 emitting_diagnostics: Default::default(),
                 emitting_diagnostics_cond_var: Condvar::new(),
                 previous: prev_graph,
@@ -136,62 +145,10 @@ impl<K: DepKind> DepGraph<K> {
         self.data.is_some()
     }
 
-    pub fn query(&self) -> DepGraphQuery<K> {
-        let data = self.data.as_ref().unwrap();
-        let previous = &data.previous;
-
-        // Note locking order: `prev_index_to_index`, then `data`.
-        let prev_index_to_index = data.current.prev_index_to_index.lock();
-        let data = data.current.data.lock();
-        let node_count = data.hybrid_indices.len();
-        let edge_count = self.edge_count(&data);
-
-        let mut nodes = Vec::with_capacity(node_count);
-        let mut edge_list_indices = Vec::with_capacity(node_count);
-        let mut edge_list_data = Vec::with_capacity(edge_count);
-
-        // See `DepGraph`'s `Encodable` implementation for notes on the approach used here.
-
-        edge_list_data.extend(data.unshared_edges.iter().map(|i| i.index()));
-
-        for &hybrid_index in data.hybrid_indices.iter() {
-            match hybrid_index.into() {
-                HybridIndex::New(new_index) => {
-                    nodes.push(data.new.nodes[new_index]);
-                    let edges = &data.new.edges[new_index];
-                    edge_list_indices.push((edges.start.index(), edges.end.index()));
-                }
-                HybridIndex::Red(red_index) => {
-                    nodes.push(previous.index_to_node(data.red.node_indices[red_index]));
-                    let edges = &data.red.edges[red_index];
-                    edge_list_indices.push((edges.start.index(), edges.end.index()));
-                }
-                HybridIndex::LightGreen(lg_index) => {
-                    nodes.push(previous.index_to_node(data.light_green.node_indices[lg_index]));
-                    let edges = &data.light_green.edges[lg_index];
-                    edge_list_indices.push((edges.start.index(), edges.end.index()));
-                }
-                HybridIndex::DarkGreen(prev_index) => {
-                    nodes.push(previous.index_to_node(prev_index));
-
-                    let edges_iter = previous
-                        .edge_targets_from(prev_index)
-                        .iter()
-                        .map(|&dst| prev_index_to_index[dst].unwrap().index());
-
-                    let start = edge_list_data.len();
-                    edge_list_data.extend(edges_iter);
-                    let end = edge_list_data.len();
-                    edge_list_indices.push((start, end));
-                }
-            }
+    pub fn with_query(&self, f: impl Fn(&DepGraphQuery<K>)) {
+        if let Some(data) = &self.data {
+            data.current.encoder.borrow().with_query(f)
         }
-
-        debug_assert_eq!(nodes.len(), node_count);
-        debug_assert_eq!(edge_list_indices.len(), node_count);
-        debug_assert_eq!(edge_list_data.len(), edge_count);
-
-        DepGraphQuery::new(&nodes[..], &edge_list_indices[..], &edge_list_data[..])
     }
 
     pub fn assert_ignored(&self) {
@@ -283,56 +240,15 @@ impl<K: DepKind> DepGraph<K> {
             let print_status = cfg!(debug_assertions) && dcx.sess().opts.debugging_opts.dep_tasks;
 
             // Intern the new `DepNode`.
-            let dep_node_index = if let Some(prev_index) = data.previous.node_to_index_opt(&key) {
-                // Determine the color and index of the new `DepNode`.
-                let (color, dep_node_index) = if let Some(current_fingerprint) = current_fingerprint
-                {
-                    if current_fingerprint == data.previous.fingerprint_by_index(prev_index) {
-                        if print_status {
-                            eprintln!("[task::green] {:?}", key);
-                        }
-
-                        // This is a light green node: it existed in the previous compilation,
-                        // its query was re-executed, and it has the same result as before.
-                        let dep_node_index =
-                            data.current.intern_light_green_node(&data.previous, prev_index, edges);
-
-                        (DepNodeColor::Green(dep_node_index), dep_node_index)
-                    } else {
-                        if print_status {
-                            eprintln!("[task::red] {:?}", key);
-                        }
-
-                        // This is a red node: it existed in the previous compilation, its query
-                        // was re-executed, but it has a different result from before.
-                        let dep_node_index = data.current.intern_red_node(
-                            &data.previous,
-                            prev_index,
-                            edges,
-                            current_fingerprint,
-                        );
-
-                        (DepNodeColor::Red, dep_node_index)
-                    }
-                } else {
-                    if print_status {
-                        eprintln!("[task::unknown] {:?}", key);
-                    }
-
-                    // This is a red node, effectively: it existed in the previous compilation
-                    // session, its query was re-executed, but it doesn't compute a result hash
-                    // (i.e. it represents a `no_hash` query), so we have no way of determining
-                    // whether or not the result was the same as before.
-                    let dep_node_index = data.current.intern_red_node(
-                        &data.previous,
-                        prev_index,
-                        edges,
-                        Fingerprint::ZERO,
-                    );
-
-                    (DepNodeColor::Red, dep_node_index)
-                };
+            let (dep_node_index, prev_and_color) = data.current.intern_node(
+                &data.previous,
+                key,
+                edges,
+                current_fingerprint,
+                print_status,
+            );
 
+            if let Some((prev_index, color)) = prev_and_color {
                 debug_assert!(
                     data.colors.get(prev_index).is_none(),
                     "DepGraph::with_task() - Duplicate DepNodeColor \
@@ -341,20 +257,7 @@ impl<K: DepKind> DepGraph<K> {
                 );
 
                 data.colors.insert(prev_index, color);
-                dep_node_index
-            } else {
-                if print_status {
-                    eprintln!("[task::new] {:?}", key);
-                }
-
-                // This is a new node: it didn't exist in the previous compilation session.
-                data.current.intern_new_node(
-                    &data.previous,
-                    key,
-                    edges,
-                    current_fingerprint.unwrap_or(Fingerprint::ZERO),
-                )
-            };
+            }
 
             (result, dep_node_index)
         } else {
@@ -395,12 +298,8 @@ impl<K: DepKind> DepGraph<K> {
                 hash: data.current.anon_id_seed.combine(hasher.finish()).into(),
             };
 
-            let dep_node_index = data.current.intern_new_node(
-                &data.previous,
-                target_dep_node,
-                task_deps.reads,
-                Fingerprint::ZERO,
-            );
+            let dep_node_index =
+                data.current.intern_new_node(target_dep_node, task_deps.reads, Fingerprint::ZERO);
 
             (result, dep_node_index)
         } else {
@@ -451,7 +350,7 @@ impl<K: DepKind> DepGraph<K> {
                         {
                             if let Some(target) = task_deps.node {
                                 if let Some(ref forbidden_edge) = data.current.forbidden_edge {
-                                    let src = self.dep_node_of(dep_node_index);
+                                    let src = forbidden_edge.index_to_node.lock()[&dep_node_index];
                                     if forbidden_edge.test(&src, &target) {
                                         panic!("forbidden edge {:?} -> {:?} created", src, target)
                                     }
@@ -488,38 +387,6 @@ impl<K: DepKind> DepGraph<K> {
         self.data.is_some() && self.dep_node_index_of_opt(dep_node).is_some()
     }
 
-    #[cfg(debug_assertions)]
-    fn dep_node_of(&self, dep_node_index: DepNodeIndex) -> DepNode<K> {
-        let data = self.data.as_ref().unwrap();
-        let previous = &data.previous;
-        let data = data.current.data.lock();
-
-        match data.hybrid_indices[dep_node_index].into() {
-            HybridIndex::New(new_index) => data.new.nodes[new_index],
-            HybridIndex::Red(red_index) => previous.index_to_node(data.red.node_indices[red_index]),
-            HybridIndex::LightGreen(light_green_index) => {
-                previous.index_to_node(data.light_green.node_indices[light_green_index])
-            }
-            HybridIndex::DarkGreen(prev_index) => previous.index_to_node(prev_index),
-        }
-    }
-
-    #[inline]
-    pub fn fingerprint_of(&self, dep_node_index: DepNodeIndex) -> Fingerprint {
-        let data = self.data.as_ref().unwrap();
-        let previous = &data.previous;
-        let data = data.current.data.lock();
-
-        match data.hybrid_indices[dep_node_index].into() {
-            HybridIndex::New(new_index) => data.new.fingerprints[new_index],
-            HybridIndex::Red(red_index) => data.red.fingerprints[red_index],
-            HybridIndex::LightGreen(light_green_index) => {
-                previous.fingerprint_by_index(data.light_green.node_indices[light_green_index])
-            }
-            HybridIndex::DarkGreen(prev_index) => previous.fingerprint_by_index(prev_index),
-        }
-    }
-
     pub fn prev_fingerprint_of(&self, dep_node: &DepNode<K>) -> Option<Fingerprint> {
         self.data.as_ref().unwrap().previous.fingerprint_of(dep_node)
     }
@@ -554,29 +421,13 @@ impl<K: DepKind> DepGraph<K> {
         self.data.as_ref()?.dep_node_debug.borrow().get(&dep_node).cloned()
     }
 
-    fn edge_count(&self, node_data: &LockGuard<'_, DepNodeData<K>>) -> usize {
-        let data = self.data.as_ref().unwrap();
-        let previous = &data.previous;
-
-        let mut edge_count = node_data.unshared_edges.len();
-
-        for &hybrid_index in node_data.hybrid_indices.iter() {
-            if let HybridIndex::DarkGreen(prev_index) = hybrid_index.into() {
-                edge_count += previous.edge_targets_from(prev_index).len()
-            }
-        }
-
-        edge_count
-    }
-
-    pub fn node_color(&self, dep_node: &DepNode<K>) -> Option<DepNodeColor> {
+    fn node_color(&self, dep_node: &DepNode<K>) -> Option<DepNodeColor> {
         if let Some(ref data) = self.data {
             if let Some(prev_index) = data.previous.node_to_index_opt(dep_node) {
                 return data.colors.get(prev_index);
             } else {
-                // This is a node that did not exist in the previous compilation
-                // session, so we consider it to be red.
-                return Some(DepNodeColor::Red);
+                // This is a node that did not exist in the previous compilation session.
+                return None;
             }
         }
 
@@ -862,6 +713,12 @@ impl<K: DepKind> DepGraph<K> {
         }
     }
 
+    // Returns true if the given node has been marked as green during the
+    // current compilation session. Used in various assertions
+    pub fn is_red(&self, dep_node: &DepNode<K>) -> bool {
+        self.node_color(dep_node) == Some(DepNodeColor::Red)
+    }
+
     // Returns true if the given node has been marked as green during the
     // current compilation session. Used in various assertions
     pub fn is_green(&self, dep_node: &DepNode<K>) -> bool {
@@ -911,106 +768,16 @@ impl<K: DepKind> DepGraph<K> {
     }
 
     pub fn print_incremental_info(&self) {
-        #[derive(Clone)]
-        struct Stat<Kind: DepKind> {
-            kind: Kind,
-            node_counter: u64,
-            edge_counter: u64,
-        }
-
-        let data = self.data.as_ref().unwrap();
-        let prev = &data.previous;
-        let current = &data.current;
-        let data = current.data.lock();
-
-        let mut stats: FxHashMap<_, Stat<K>> = FxHashMap::with_hasher(Default::default());
-
-        for &hybrid_index in data.hybrid_indices.iter() {
-            let (kind, edge_count) = match hybrid_index.into() {
-                HybridIndex::New(new_index) => {
-                    let kind = data.new.nodes[new_index].kind;
-                    let edge_range = &data.new.edges[new_index];
-                    (kind, edge_range.end.as_usize() - edge_range.start.as_usize())
-                }
-                HybridIndex::Red(red_index) => {
-                    let kind = prev.index_to_node(data.red.node_indices[red_index]).kind;
-                    let edge_range = &data.red.edges[red_index];
-                    (kind, edge_range.end.as_usize() - edge_range.start.as_usize())
-                }
-                HybridIndex::LightGreen(lg_index) => {
-                    let kind = prev.index_to_node(data.light_green.node_indices[lg_index]).kind;
-                    let edge_range = &data.light_green.edges[lg_index];
-                    (kind, edge_range.end.as_usize() - edge_range.start.as_usize())
-                }
-                HybridIndex::DarkGreen(prev_index) => {
-                    let kind = prev.index_to_node(prev_index).kind;
-                    let edge_count = prev.edge_targets_from(prev_index).len();
-                    (kind, edge_count)
-                }
-            };
-
-            let stat = stats.entry(kind).or_insert(Stat { kind, node_counter: 0, edge_counter: 0 });
-            stat.node_counter += 1;
-            stat.edge_counter += edge_count as u64;
-        }
-
-        let total_node_count = data.hybrid_indices.len();
-        let total_edge_count = self.edge_count(&data);
-
-        // Drop the lock guard.
-        std::mem::drop(data);
-
-        let mut stats: Vec<_> = stats.values().cloned().collect();
-        stats.sort_by_key(|s| -(s.node_counter as i64));
-
-        const SEPARATOR: &str = "[incremental] --------------------------------\
-                                 ----------------------------------------------\
-                                 ------------";
-
-        eprintln!("[incremental]");
-        eprintln!("[incremental] DepGraph Statistics");
-        eprintln!("{}", SEPARATOR);
-        eprintln!("[incremental]");
-        eprintln!("[incremental] Total Node Count: {}", total_node_count);
-        eprintln!("[incremental] Total Edge Count: {}", total_edge_count);
-
-        if cfg!(debug_assertions) {
-            let total_edge_reads = current.total_read_count.load(Relaxed);
-            let total_duplicate_edge_reads = current.total_duplicate_read_count.load(Relaxed);
-
-            eprintln!("[incremental] Total Edge Reads: {}", total_edge_reads);
-            eprintln!("[incremental] Total Duplicate Edge Reads: {}", total_duplicate_edge_reads);
-        }
-
-        eprintln!("[incremental]");
-
-        eprintln!(
-            "[incremental]  {:<36}| {:<17}| {:<12}| {:<17}|",
-            "Node Kind", "Node Frequency", "Node Count", "Avg. Edge Count"
-        );
-
-        eprintln!(
-            "[incremental] -------------------------------------\
-                  |------------------\
-                  |-------------\
-                  |------------------|"
-        );
-
-        for stat in stats {
-            let node_kind_ratio = (100.0 * (stat.node_counter as f64)) / (total_node_count as f64);
-            let node_kind_avg_edges = (stat.edge_counter as f64) / (stat.node_counter as f64);
-
-            eprintln!(
-                "[incremental]  {:<36}|{:>16.1}% |{:>12} |{:>17.1} |",
-                format!("{:?}", stat.kind),
-                node_kind_ratio,
-                stat.node_counter,
-                node_kind_avg_edges,
-            );
+        if let Some(data) = &self.data {
+            data.current.encoder.borrow().print_incremental_info(
+                data.current.total_read_count.load(Relaxed),
+                data.current.total_duplicate_read_count.load(Relaxed),
+            )
         }
+    }
 
-        eprintln!("{}", SEPARATOR);
-        eprintln!("[incremental]");
+    pub fn encode(&self) -> FileEncodeResult {
+        if let Some(data) = &self.data { data.current.encoder.steal().finish() } else { Ok(()) }
     }
 
     fn next_virtual_depnode_index(&self) -> DepNodeIndex {
@@ -1019,142 +786,6 @@ impl<K: DepKind> DepGraph<K> {
     }
 }
 
-impl<E: Encoder, K: DepKind + Encodable<E>> Encodable<E> for DepGraph<K> {
-    fn encode(&self, e: &mut E) -> Result<(), E::Error> {
-        // We used to serialize the dep graph by creating and serializing a `SerializedDepGraph`
-        // using data copied from the `DepGraph`. But copying created a large memory spike, so we
-        // now serialize directly from the `DepGraph` as if it's a `SerializedDepGraph`. Because we
-        // deserialize that data into a `SerializedDepGraph` in the next compilation session, we
-        // need `DepGraph`'s `Encodable` and `SerializedDepGraph`'s `Decodable` implementations to
-        // be in sync. If you update this encoding, be sure to update the decoding, and vice-versa.
-
-        let data = self.data.as_ref().unwrap();
-        let prev = &data.previous;
-
-        // Note locking order: `prev_index_to_index`, then `data`.
-        let prev_index_to_index = data.current.prev_index_to_index.lock();
-        let data = data.current.data.lock();
-        let new = &data.new;
-        let red = &data.red;
-        let lg = &data.light_green;
-
-        let node_count = data.hybrid_indices.len();
-        let edge_count = self.edge_count(&data);
-
-        // `rustc_middle::ty::query::OnDiskCache` expects nodes to be encoded in `DepNodeIndex`
-        // order. The edges in `edge_list_data` don't need to be in a particular order, as long as
-        // each node references its edges as a contiguous range within it. Therefore, we can encode
-        // `edge_list_data` directly from `unshared_edges`. It meets the above requirements, as
-        // each non-dark-green node already knows the range of edges to reference within it, which
-        // they'll encode in `edge_list_indices`. Dark green nodes, however, don't have their edges
-        // in `unshared_edges`, so need to add them to `edge_list_data`.
-
-        use HybridIndex::*;
-
-        // Encoded values (nodes, etc.) are explicitly typed below to avoid inadvertently
-        // serializing data in the wrong format (i.e. one incompatible with `SerializedDepGraph`).
-        e.emit_struct("SerializedDepGraph", 4, |e| {
-            e.emit_struct_field("nodes", 0, |e| {
-                // `SerializedDepGraph` expects this to be encoded as a sequence of `DepNode`s.
-                e.emit_seq(node_count, |e| {
-                    for (seq_index, &hybrid_index) in data.hybrid_indices.iter().enumerate() {
-                        let node: DepNode<K> = match hybrid_index.into() {
-                            New(i) => new.nodes[i],
-                            Red(i) => prev.index_to_node(red.node_indices[i]),
-                            LightGreen(i) => prev.index_to_node(lg.node_indices[i]),
-                            DarkGreen(prev_index) => prev.index_to_node(prev_index),
-                        };
-
-                        e.emit_seq_elt(seq_index, |e| node.encode(e))?;
-                    }
-
-                    Ok(())
-                })
-            })?;
-
-            e.emit_struct_field("fingerprints", 1, |e| {
-                // `SerializedDepGraph` expects this to be encoded as a sequence of `Fingerprints`s.
-                e.emit_seq(node_count, |e| {
-                    for (seq_index, &hybrid_index) in data.hybrid_indices.iter().enumerate() {
-                        let fingerprint: Fingerprint = match hybrid_index.into() {
-                            New(i) => new.fingerprints[i],
-                            Red(i) => red.fingerprints[i],
-                            LightGreen(i) => prev.fingerprint_by_index(lg.node_indices[i]),
-                            DarkGreen(prev_index) => prev.fingerprint_by_index(prev_index),
-                        };
-
-                        e.emit_seq_elt(seq_index, |e| fingerprint.encode(e))?;
-                    }
-
-                    Ok(())
-                })
-            })?;
-
-            e.emit_struct_field("edge_list_indices", 2, |e| {
-                // `SerializedDepGraph` expects this to be encoded as a sequence of `(u32, u32)`s.
-                e.emit_seq(node_count, |e| {
-                    // Dark green node edges start after the unshared (all other nodes') edges.
-                    let mut dark_green_edge_index = data.unshared_edges.len();
-
-                    for (seq_index, &hybrid_index) in data.hybrid_indices.iter().enumerate() {
-                        let edge_indices: (u32, u32) = match hybrid_index.into() {
-                            New(i) => (new.edges[i].start.as_u32(), new.edges[i].end.as_u32()),
-                            Red(i) => (red.edges[i].start.as_u32(), red.edges[i].end.as_u32()),
-                            LightGreen(i) => (lg.edges[i].start.as_u32(), lg.edges[i].end.as_u32()),
-                            DarkGreen(prev_index) => {
-                                let edge_count = prev.edge_targets_from(prev_index).len();
-                                let start = dark_green_edge_index as u32;
-                                dark_green_edge_index += edge_count;
-                                let end = dark_green_edge_index as u32;
-                                (start, end)
-                            }
-                        };
-
-                        e.emit_seq_elt(seq_index, |e| edge_indices.encode(e))?;
-                    }
-
-                    assert_eq!(dark_green_edge_index, edge_count);
-
-                    Ok(())
-                })
-            })?;
-
-            e.emit_struct_field("edge_list_data", 3, |e| {
-                // `SerializedDepGraph` expects this to be encoded as a sequence of
-                // `SerializedDepNodeIndex`.
-                e.emit_seq(edge_count, |e| {
-                    for (seq_index, &edge) in data.unshared_edges.iter().enumerate() {
-                        let serialized_edge = SerializedDepNodeIndex::new(edge.index());
-                        e.emit_seq_elt(seq_index, |e| serialized_edge.encode(e))?;
-                    }
-
-                    let mut seq_index = data.unshared_edges.len();
-
-                    for &hybrid_index in data.hybrid_indices.iter() {
-                        if let DarkGreen(prev_index) = hybrid_index.into() {
-                            for &edge in prev.edge_targets_from(prev_index) {
-                                // Dark green node edges are stored in the previous graph
-                                // and must be converted to edges in the current graph,
-                                // and then serialized as `SerializedDepNodeIndex`.
-                                let serialized_edge = SerializedDepNodeIndex::new(
-                                    prev_index_to_index[edge].as_ref().unwrap().index(),
-                                );
-
-                                e.emit_seq_elt(seq_index, |e| serialized_edge.encode(e))?;
-                                seq_index += 1;
-                            }
-                        }
-                    }
-
-                    assert_eq!(seq_index, edge_count);
-
-                    Ok(())
-                })
-            })
-        })
-    }
-}
-
 /// A "work product" is an intermediate result that we save into the
 /// incremental directory for later re-use. The primary example are
 /// the object files that we save for each partition at code
@@ -1193,201 +824,11 @@ pub struct WorkProduct {
     pub saved_file: Option<String>,
 }
 
-// The maximum value of the follow index types leaves the upper two bits unused
-// so that we can store multiple index types in `CompressedHybridIndex`, and use
-// those bits to encode which index type it contains.
-
-// Index type for `NewDepNodeData`.
-rustc_index::newtype_index! {
-    struct NewDepNodeIndex {
-        MAX = 0x7FFF_FFFF
-    }
-}
-
-// Index type for `RedDepNodeData`.
-rustc_index::newtype_index! {
-    struct RedDepNodeIndex {
-        MAX = 0x7FFF_FFFF
-    }
-}
-
-// Index type for `LightGreenDepNodeData`.
-rustc_index::newtype_index! {
-    struct LightGreenDepNodeIndex {
-        MAX = 0x7FFF_FFFF
-    }
-}
-
-/// Compressed representation of `HybridIndex` enum. Bits unused by the
-/// contained index types are used to encode which index type it contains.
-#[derive(Copy, Clone)]
-struct CompressedHybridIndex(u32);
-
-impl CompressedHybridIndex {
-    const NEW_TAG: u32 = 0b0000_0000_0000_0000_0000_0000_0000_0000;
-    const RED_TAG: u32 = 0b0100_0000_0000_0000_0000_0000_0000_0000;
-    const LIGHT_GREEN_TAG: u32 = 0b1000_0000_0000_0000_0000_0000_0000_0000;
-    const DARK_GREEN_TAG: u32 = 0b1100_0000_0000_0000_0000_0000_0000_0000;
-
-    const TAG_MASK: u32 = 0b1100_0000_0000_0000_0000_0000_0000_0000;
-    const INDEX_MASK: u32 = !Self::TAG_MASK;
-}
-
-impl From<NewDepNodeIndex> for CompressedHybridIndex {
-    #[inline]
-    fn from(index: NewDepNodeIndex) -> Self {
-        CompressedHybridIndex(Self::NEW_TAG | index.as_u32())
-    }
-}
-
-impl From<RedDepNodeIndex> for CompressedHybridIndex {
-    #[inline]
-    fn from(index: RedDepNodeIndex) -> Self {
-        CompressedHybridIndex(Self::RED_TAG | index.as_u32())
-    }
-}
-
-impl From<LightGreenDepNodeIndex> for CompressedHybridIndex {
-    #[inline]
-    fn from(index: LightGreenDepNodeIndex) -> Self {
-        CompressedHybridIndex(Self::LIGHT_GREEN_TAG | index.as_u32())
-    }
-}
-
-impl From<SerializedDepNodeIndex> for CompressedHybridIndex {
-    #[inline]
-    fn from(index: SerializedDepNodeIndex) -> Self {
-        CompressedHybridIndex(Self::DARK_GREEN_TAG | index.as_u32())
-    }
-}
-
-/// Contains an index into one of several node data collections. Elsewhere, we
-/// store `CompressedHyridIndex` instead of this to save space, but convert to
-/// this type during processing to take advantage of the enum match ergonomics.
-enum HybridIndex {
-    New(NewDepNodeIndex),
-    Red(RedDepNodeIndex),
-    LightGreen(LightGreenDepNodeIndex),
-    DarkGreen(SerializedDepNodeIndex),
-}
-
-impl From<CompressedHybridIndex> for HybridIndex {
-    #[inline]
-    fn from(hybrid_index: CompressedHybridIndex) -> Self {
-        let index = hybrid_index.0 & CompressedHybridIndex::INDEX_MASK;
-
-        match hybrid_index.0 & CompressedHybridIndex::TAG_MASK {
-            CompressedHybridIndex::NEW_TAG => HybridIndex::New(NewDepNodeIndex::from_u32(index)),
-            CompressedHybridIndex::RED_TAG => HybridIndex::Red(RedDepNodeIndex::from_u32(index)),
-            CompressedHybridIndex::LIGHT_GREEN_TAG => {
-                HybridIndex::LightGreen(LightGreenDepNodeIndex::from_u32(index))
-            }
-            CompressedHybridIndex::DARK_GREEN_TAG => {
-                HybridIndex::DarkGreen(SerializedDepNodeIndex::from_u32(index))
-            }
-            _ => unreachable!(),
-        }
-    }
-}
-
 // Index type for `DepNodeData`'s edges.
 rustc_index::newtype_index! {
     struct EdgeIndex { .. }
 }
 
-/// Data for nodes in the current graph, divided into different collections
-/// based on their presence in the previous graph, and if present, their color.
-/// We divide nodes this way because different types of nodes are able to share
-/// more or less data with the previous graph.
-///
-/// To enable more sharing, we distinguish between two kinds of green nodes.
-/// Light green nodes are nodes in the previous graph that have been marked
-/// green because we re-executed their queries and the results were the same as
-/// in the previous session. Dark green nodes are nodes in the previous graph
-/// that have been marked green because we were able to mark all of their
-/// dependencies green.
-///
-/// Both light and dark green nodes can share the dep node and fingerprint with
-/// the previous graph, but for light green nodes, we can't be sure that the
-/// edges may be shared without comparing them against the previous edges, so we
-/// store them directly (an approach in which we compare edges with the previous
-/// edges to see if they can be shared was evaluated, but was not found to be
-/// very profitable).
-///
-/// For dark green nodes, we can share everything with the previous graph, which
-/// is why the `HybridIndex::DarkGreen` enum variant contains the index of the
-/// node in the previous graph, and why we don't have a separate collection for
-/// dark green node data--the collection is the `PreviousDepGraph` itself.
-///
-/// (Note that for dark green nodes, the edges in the previous graph
-/// (`SerializedDepNodeIndex`s) must be converted to edges in the current graph
-/// (`DepNodeIndex`s). `CurrentDepGraph` contains `prev_index_to_index`, which
-/// can perform this conversion. It should always be possible, as by definition,
-/// a dark green node is one whose dependencies from the previous session have
-/// all been marked green--which means `prev_index_to_index` contains them.)
-///
-/// Node data is stored in parallel vectors to eliminate the padding between
-/// elements that would be needed to satisfy alignment requirements of the
-/// structure that would contain all of a node's data. We could group tightly
-/// packing subsets of node data together and use fewer vectors, but for
-/// consistency's sake, we use separate vectors for each piece of data.
-struct DepNodeData<K> {
-    /// Data for nodes not in previous graph.
-    new: NewDepNodeData<K>,
-
-    /// Data for nodes in previous graph that have been marked red.
-    red: RedDepNodeData,
-
-    /// Data for nodes in previous graph that have been marked light green.
-    light_green: LightGreenDepNodeData,
-
-    // Edges for all nodes other than dark-green ones. Edges for each node
-    // occupy a contiguous region of this collection, which a node can reference
-    // using two indices. Storing edges this way rather than using an `EdgesVec`
-    // for each node reduces memory consumption by a not insignificant amount
-    // when compiling large crates. The downside is that we have to copy into
-    // this collection the edges from the `EdgesVec`s that are built up during
-    // query execution. But this is mostly balanced out by the more efficient
-    // implementation of `DepGraph::serialize` enabled by this representation.
-    unshared_edges: IndexVec<EdgeIndex, DepNodeIndex>,
-
-    /// Mapping from `DepNodeIndex` to an index into a collection above.
-    /// Indicates which of the above collections contains a node's data.
-    ///
-    /// This collection is wasteful in time and space during incr-full builds,
-    /// because for those, all nodes are new. However, the waste is relatively
-    /// small, and the maintenance cost of avoiding using this for incr-full
-    /// builds is somewhat high and prone to bugginess. It does not seem worth
-    /// it at the time of this writing, but we may want to revisit the idea.
-    hybrid_indices: IndexVec<DepNodeIndex, CompressedHybridIndex>,
-}
-
-/// Data for nodes not in previous graph. Since we cannot share any data with
-/// the previous graph, so we must store all of such a node's data here.
-struct NewDepNodeData<K> {
-    nodes: IndexVec<NewDepNodeIndex, DepNode<K>>,
-    edges: IndexVec<NewDepNodeIndex, Range<EdgeIndex>>,
-    fingerprints: IndexVec<NewDepNodeIndex, Fingerprint>,
-}
-
-/// Data for nodes in previous graph that have been marked red. We can share the
-/// dep node with the previous graph, but the edges may be different, and the
-/// fingerprint is known to be different, so we store the latter two directly.
-struct RedDepNodeData {
-    node_indices: IndexVec<RedDepNodeIndex, SerializedDepNodeIndex>,
-    edges: IndexVec<RedDepNodeIndex, Range<EdgeIndex>>,
-    fingerprints: IndexVec<RedDepNodeIndex, Fingerprint>,
-}
-
-/// Data for nodes in previous graph that have been marked green because we
-/// re-executed their queries and the results were the same as in the previous
-/// session. We can share the dep node and the fingerprint with the previous
-/// graph, but the edges may be different, so we store them directly.
-struct LightGreenDepNodeData {
-    node_indices: IndexVec<LightGreenDepNodeIndex, SerializedDepNodeIndex>,
-    edges: IndexVec<LightGreenDepNodeIndex, Range<EdgeIndex>>,
-}
-
 /// `CurrentDepGraph` stores the dependency graph for the current session. It
 /// will be populated as we run queries or tasks. We never remove nodes from the
 /// graph: they are only added.
@@ -1417,15 +858,15 @@ struct LightGreenDepNodeData {
 /// `new_node_to_index` and `data`, or `prev_index_to_index` and `data`. When
 /// manipulating both, we acquire `new_node_to_index` or `prev_index_to_index`
 /// first, and `data` second.
-pub(super) struct CurrentDepGraph<K> {
-    data: Lock<DepNodeData<K>>,
+pub(super) struct CurrentDepGraph<K: DepKind> {
+    encoder: Steal<GraphEncoder<K>>,
     new_node_to_index: Sharded<FxHashMap<DepNode<K>, DepNodeIndex>>,
     prev_index_to_index: Lock<IndexVec<SerializedDepNodeIndex, Option<DepNodeIndex>>>,
 
     /// Used to trap when a specific edge is added to the graph.
     /// This is used for debug purposes and is only active with `debug_assertions`.
-    #[allow(dead_code)]
-    forbidden_edge: Option<EdgeFilter>,
+    #[cfg(debug_assertions)]
+    forbidden_edge: Option<EdgeFilter<K>>,
 
     /// Anonymous `DepNode`s are nodes whose IDs we compute from the list of
     /// their edges. This has the beneficial side-effect that multiple anonymous
@@ -1447,7 +888,12 @@ pub(super) struct CurrentDepGraph<K> {
 }
 
 impl<K: DepKind> CurrentDepGraph<K> {
-    fn new(prev_graph_node_count: usize) -> CurrentDepGraph<K> {
+    fn new(
+        prev_graph_node_count: usize,
+        encoder: FileEncoder,
+        record_graph: bool,
+        record_stats: bool,
+    ) -> CurrentDepGraph<K> {
         use std::time::{SystemTime, UNIX_EPOCH};
 
         let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
@@ -1455,70 +901,29 @@ impl<K: DepKind> CurrentDepGraph<K> {
         let mut stable_hasher = StableHasher::new();
         nanos.hash(&mut stable_hasher);
 
-        let forbidden_edge = if cfg!(debug_assertions) {
-            match env::var("RUST_FORBID_DEP_GRAPH_EDGE") {
-                Ok(s) => match EdgeFilter::new(&s) {
-                    Ok(f) => Some(f),
-                    Err(err) => panic!("RUST_FORBID_DEP_GRAPH_EDGE invalid: {}", err),
-                },
-                Err(_) => None,
-            }
-        } else {
-            None
+        #[cfg(debug_assertions)]
+        let forbidden_edge = match env::var("RUST_FORBID_DEP_GRAPH_EDGE") {
+            Ok(s) => match EdgeFilter::new(&s) {
+                Ok(f) => Some(f),
+                Err(err) => panic!("RUST_FORBID_DEP_GRAPH_EDGE invalid: {}", err),
+            },
+            Err(_) => None,
         };
 
-        // Pre-allocate the dep node structures. We over-allocate a little so
-        // that we hopefully don't have to re-allocate during this compilation
-        // session. The over-allocation for new nodes is 2% plus a small
-        // constant to account for the fact that in very small crates 2% might
-        // not be enough. The allocation for red and green node data doesn't
-        // include a constant, as we don't want to allocate anything for these
-        // structures during full incremental builds, where they aren't used.
-        //
-        // These estimates are based on the distribution of node and edge counts
-        // seen in rustc-perf benchmarks, adjusted somewhat to account for the
-        // fact that these benchmarks aren't perfectly representative.
-        //
-        // FIXME Use a collection type that doesn't copy node and edge data and
-        // grow multiplicatively on reallocation. Without such a collection or
-        // solution having the same effect, there is a performance hazard here
-        // in both time and space, as growing these collections means copying a
-        // large amount of data and doubling already large buffer capacities. A
-        // solution for this will also mean that it's less important to get
-        // these estimates right.
-        let new_node_count_estimate = (prev_graph_node_count * 2) / 100 + 200;
-        let red_node_count_estimate = (prev_graph_node_count * 3) / 100;
-        let light_green_node_count_estimate = (prev_graph_node_count * 25) / 100;
-        let total_node_count_estimate = prev_graph_node_count + new_node_count_estimate;
-
-        let average_edges_per_node_estimate = 6;
-        let unshared_edge_count_estimate = average_edges_per_node_estimate
-            * (new_node_count_estimate + red_node_count_estimate + light_green_node_count_estimate);
-
         // We store a large collection of these in `prev_index_to_index` during
         // non-full incremental builds, and want to ensure that the element size
         // doesn't inadvertently increase.
         static_assert_size!(Option<DepNodeIndex>, 4);
 
+        let new_node_count_estimate = 102 * prev_graph_node_count / 100 + 200;
+
         CurrentDepGraph {
-            data: Lock::new(DepNodeData {
-                new: NewDepNodeData {
-                    nodes: IndexVec::with_capacity(new_node_count_estimate),
-                    edges: IndexVec::with_capacity(new_node_count_estimate),
-                    fingerprints: IndexVec::with_capacity(new_node_count_estimate),
-                },
-                red: RedDepNodeData {
-                    node_indices: IndexVec::with_capacity(red_node_count_estimate),
-                    edges: IndexVec::with_capacity(red_node_count_estimate),
-                    fingerprints: IndexVec::with_capacity(red_node_count_estimate),
-                },
-                light_green: LightGreenDepNodeData {
-                    node_indices: IndexVec::with_capacity(light_green_node_count_estimate),
-                    edges: IndexVec::with_capacity(light_green_node_count_estimate),
-                },
-                unshared_edges: IndexVec::with_capacity(unshared_edge_count_estimate),
-                hybrid_indices: IndexVec::with_capacity(total_node_count_estimate),
-            }),
+            encoder: Steal::new(GraphEncoder::new(
+                encoder,
+                prev_graph_node_count,
+                record_graph,
+                record_stats,
+            )),
             new_node_to_index: Sharded::new(|| {
                 FxHashMap::with_capacity_and_hasher(
                     new_node_count_estimate / sharded::SHARDS,
@@ -1527,6 +932,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
             }),
             prev_index_to_index: Lock::new(IndexVec::from_elem_n(None, prev_graph_node_count)),
             anon_id_seed: stable_hasher.finish(),
+            #[cfg(debug_assertions)]
             forbidden_edge,
             total_read_count: AtomicU64::new(0),
             total_duplicate_read_count: AtomicU64::new(0),
@@ -1535,76 +941,124 @@ impl<K: DepKind> CurrentDepGraph<K> {
 
     fn intern_new_node(
         &self,
-        prev_graph: &PreviousDepGraph<K>,
-        dep_node: DepNode<K>,
+        key: DepNode<K>,
         edges: EdgesVec,
-        fingerprint: Fingerprint,
+        current_fingerprint: Fingerprint,
     ) -> DepNodeIndex {
-        debug_assert!(
-            prev_graph.node_to_index_opt(&dep_node).is_none(),
-            "node in previous graph should be interned using one \
-            of `intern_red_node`, `intern_light_green_node`, etc."
-        );
-
-        match self.new_node_to_index.get_shard_by_value(&dep_node).lock().entry(dep_node) {
+        match self.new_node_to_index.get_shard_by_value(&key).lock().entry(key) {
             Entry::Occupied(entry) => *entry.get(),
             Entry::Vacant(entry) => {
-                let data = &mut *self.data.lock();
-                let new_index = data.new.nodes.push(dep_node);
-                add_edges(&mut data.unshared_edges, &mut data.new.edges, edges);
-                data.new.fingerprints.push(fingerprint);
-                let dep_node_index = data.hybrid_indices.push(new_index.into());
+                let dep_node_index = self.encoder.borrow().send(key, current_fingerprint, edges);
                 entry.insert(dep_node_index);
+                #[cfg(debug_assertions)]
+                if let Some(forbidden_edge) = &self.forbidden_edge {
+                    forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
+                }
                 dep_node_index
             }
         }
     }
 
-    fn intern_red_node(
+    fn intern_node(
         &self,
         prev_graph: &PreviousDepGraph<K>,
-        prev_index: SerializedDepNodeIndex,
+        key: DepNode<K>,
         edges: EdgesVec,
-        fingerprint: Fingerprint,
-    ) -> DepNodeIndex {
-        self.debug_assert_not_in_new_nodes(prev_graph, prev_index);
+        current_fingerprint: Option<Fingerprint>,
+        print_status: bool,
+    ) -> (DepNodeIndex, Option<(SerializedDepNodeIndex, DepNodeColor)>) {
+        let print_status = cfg!(debug_assertions) && print_status;
+
+        if let Some(prev_index) = prev_graph.node_to_index_opt(&key) {
+            // Determine the color and index of the new `DepNode`.
+            if let Some(current_fingerprint) = current_fingerprint {
+                if current_fingerprint == prev_graph.fingerprint_by_index(prev_index) {
+                    if print_status {
+                        eprintln!("[task::green] {:?}", key);
+                    }
 
-        let mut prev_index_to_index = self.prev_index_to_index.lock();
+                    // This is a light green node: it existed in the previous compilation,
+                    // its query was re-executed, and it has the same result as before.
+                    let mut prev_index_to_index = self.prev_index_to_index.lock();
+
+                    let dep_node_index = match prev_index_to_index[prev_index] {
+                        Some(dep_node_index) => dep_node_index,
+                        None => {
+                            let dep_node_index =
+                                self.encoder.borrow().send(key, current_fingerprint, edges);
+                            prev_index_to_index[prev_index] = Some(dep_node_index);
+                            dep_node_index
+                        }
+                    };
 
-        match prev_index_to_index[prev_index] {
-            Some(dep_node_index) => dep_node_index,
-            None => {
-                let data = &mut *self.data.lock();
-                let red_index = data.red.node_indices.push(prev_index);
-                add_edges(&mut data.unshared_edges, &mut data.red.edges, edges);
-                data.red.fingerprints.push(fingerprint);
-                let dep_node_index = data.hybrid_indices.push(red_index.into());
-                prev_index_to_index[prev_index] = Some(dep_node_index);
-                dep_node_index
-            }
-        }
-    }
+                    #[cfg(debug_assertions)]
+                    if let Some(forbidden_edge) = &self.forbidden_edge {
+                        forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
+                    }
+                    (dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index))))
+                } else {
+                    if print_status {
+                        eprintln!("[task::red] {:?}", key);
+                    }
 
-    fn intern_light_green_node(
-        &self,
-        prev_graph: &PreviousDepGraph<K>,
-        prev_index: SerializedDepNodeIndex,
-        edges: EdgesVec,
-    ) -> DepNodeIndex {
-        self.debug_assert_not_in_new_nodes(prev_graph, prev_index);
+                    // This is a red node: it existed in the previous compilation, its query
+                    // was re-executed, but it has a different result from before.
+                    let mut prev_index_to_index = self.prev_index_to_index.lock();
+
+                    let dep_node_index = match prev_index_to_index[prev_index] {
+                        Some(dep_node_index) => dep_node_index,
+                        None => {
+                            let dep_node_index =
+                                self.encoder.borrow().send(key, current_fingerprint, edges);
+                            prev_index_to_index[prev_index] = Some(dep_node_index);
+                            dep_node_index
+                        }
+                    };
 
-        let mut prev_index_to_index = self.prev_index_to_index.lock();
+                    #[cfg(debug_assertions)]
+                    if let Some(forbidden_edge) = &self.forbidden_edge {
+                        forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
+                    }
+                    (dep_node_index, Some((prev_index, DepNodeColor::Red)))
+                }
+            } else {
+                if print_status {
+                    eprintln!("[task::unknown] {:?}", key);
+                }
 
-        match prev_index_to_index[prev_index] {
-            Some(dep_node_index) => dep_node_index,
-            None => {
-                let data = &mut *self.data.lock();
-                let light_green_index = data.light_green.node_indices.push(prev_index);
-                add_edges(&mut data.unshared_edges, &mut data.light_green.edges, edges);
-                let dep_node_index = data.hybrid_indices.push(light_green_index.into());
-                prev_index_to_index[prev_index] = Some(dep_node_index);
-                dep_node_index
+                // This is a red node, effectively: it existed in the previous compilation
+                // session, its query was re-executed, but it doesn't compute a result hash
+                // (i.e. it represents a `no_hash` query), so we have no way of determining
+                // whether or not the result was the same as before.
+                let mut prev_index_to_index = self.prev_index_to_index.lock();
+
+                let dep_node_index = match prev_index_to_index[prev_index] {
+                    Some(dep_node_index) => dep_node_index,
+                    None => {
+                        let dep_node_index =
+                            self.encoder.borrow().send(key, Fingerprint::ZERO, edges);
+                        prev_index_to_index[prev_index] = Some(dep_node_index);
+                        dep_node_index
+                    }
+                };
+
+                #[cfg(debug_assertions)]
+                if let Some(forbidden_edge) = &self.forbidden_edge {
+                    forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
+                }
+                (dep_node_index, Some((prev_index, DepNodeColor::Red)))
             }
+        } else {
+            if print_status {
+                eprintln!("[task::new] {:?}", key);
+            }
+
+            let current_fingerprint = current_fingerprint.unwrap_or(Fingerprint::ZERO);
+
+            // This is a new node: it didn't exist in the previous compilation session.
+            let dep_node_index = self.intern_new_node(key, edges, current_fingerprint);
+
+            (dep_node_index, None)
         }
     }
 
@@ -1620,9 +1074,21 @@ impl<K: DepKind> CurrentDepGraph<K> {
         match prev_index_to_index[prev_index] {
             Some(dep_node_index) => dep_node_index,
             None => {
-                let mut data = self.data.lock();
-                let dep_node_index = data.hybrid_indices.push(prev_index.into());
+                let key = prev_graph.index_to_node(prev_index);
+                let dep_node_index = self.encoder.borrow().send(
+                    key,
+                    prev_graph.fingerprint_by_index(prev_index),
+                    prev_graph
+                        .edge_targets_from(prev_index)
+                        .iter()
+                        .map(|i| prev_index_to_index[*i].unwrap())
+                        .collect(),
+                );
                 prev_index_to_index[prev_index] = Some(dep_node_index);
+                #[cfg(debug_assertions)]
+                if let Some(forbidden_edge) = &self.forbidden_edge {
+                    forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
+                }
                 dep_node_index
             }
         }
@@ -1642,18 +1108,6 @@ impl<K: DepKind> CurrentDepGraph<K> {
     }
 }
 
-#[inline]
-fn add_edges<I: Idx>(
-    edges: &mut IndexVec<EdgeIndex, DepNodeIndex>,
-    edge_indices: &mut IndexVec<I, Range<EdgeIndex>>,
-    new_edges: EdgesVec,
-) {
-    let start = edges.next_index();
-    edges.extend(new_edges);
-    let end = edges.next_index();
-    edge_indices.push(start..end);
-}
-
 /// The capacity of the `reads` field `SmallVec`
 const TASK_DEPS_READS_CAP: usize = 8;
 type EdgesVec = SmallVec<[DepNodeIndex; TASK_DEPS_READS_CAP]>;
diff --git a/compiler/rustc_query_system/src/dep_graph/mod.rs b/compiler/rustc_query_system/src/dep_graph/mod.rs
index e8fb71be3e08f..1b6ecf3e637f3 100644
--- a/compiler/rustc_query_system/src/dep_graph/mod.rs
+++ b/compiler/rustc_query_system/src/dep_graph/mod.rs
@@ -13,6 +13,7 @@ pub use serialized::{SerializedDepGraph, SerializedDepNodeIndex};
 
 use rustc_data_structures::profiling::SelfProfilerRef;
 use rustc_data_structures::sync::Lock;
+use rustc_serialize::{opaque::FileEncoder, Encodable};
 use rustc_session::Session;
 
 use std::fmt;
@@ -59,7 +60,7 @@ impl<T: DepContext> HasDepContext for T {
 }
 
 /// Describe the different families of dependency nodes.
-pub trait DepKind: Copy + fmt::Debug + Eq + Hash {
+pub trait DepKind: Copy + fmt::Debug + Eq + Hash + Send + Encodable<FileEncoder> + 'static {
     const NULL: Self;
 
     /// Return whether this kind always require evaluation.
diff --git a/compiler/rustc_query_system/src/dep_graph/query.rs b/compiler/rustc_query_system/src/dep_graph/query.rs
index e678a16249b10..9c85cdd59d9d4 100644
--- a/compiler/rustc_query_system/src/dep_graph/query.rs
+++ b/compiler/rustc_query_system/src/dep_graph/query.rs
@@ -1,7 +1,7 @@
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::graph::implementation::{Direction, Graph, NodeIndex, INCOMING};
 
-use super::{DepKind, DepNode};
+use super::{DepKind, DepNode, DepNodeIndex};
 
 pub struct DepGraphQuery<K> {
     pub graph: Graph<DepNode<K>, ()>,
@@ -9,28 +9,27 @@ pub struct DepGraphQuery<K> {
 }
 
 impl<K: DepKind> DepGraphQuery<K> {
-    pub fn new(
-        nodes: &[DepNode<K>],
-        edge_list_indices: &[(usize, usize)],
-        edge_list_data: &[usize],
-    ) -> DepGraphQuery<K> {
-        let mut graph = Graph::with_capacity(nodes.len(), edge_list_data.len());
-        let mut indices = FxHashMap::default();
-        for node in nodes {
-            indices.insert(*node, graph.add_node(*node));
-        }
+    pub fn new(prev_node_count: usize) -> DepGraphQuery<K> {
+        let node_count = prev_node_count + prev_node_count / 4;
+        let edge_count = 6 * node_count;
 
-        for (source, &(start, end)) in edge_list_indices.iter().enumerate() {
-            for &target in &edge_list_data[start..end] {
-                let source = indices[&nodes[source]];
-                let target = indices[&nodes[target]];
-                graph.add_edge(source, target, ());
-            }
-        }
+        let graph = Graph::with_capacity(node_count, edge_count);
+        let indices = FxHashMap::default();
 
         DepGraphQuery { graph, indices }
     }
 
+    pub fn push(&mut self, index: DepNodeIndex, node: DepNode<K>, edges: &[DepNodeIndex]) {
+        let source = self.graph.add_node(node);
+        debug_assert_eq!(index.index(), source.0);
+        self.indices.insert(node, source);
+
+        for &target in edges.iter() {
+            let target = NodeIndex(target.index());
+            self.graph.add_edge(source, target, ());
+        }
+    }
+
     pub fn nodes(&self) -> Vec<&DepNode<K>> {
         self.graph.all_nodes().iter().map(|n| &n.data).collect()
     }
diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 9bb922b0a9008..a76100cc2281f 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -1,9 +1,18 @@
 //! The data that we will serialize and deserialize.
 
-use super::{DepKind, DepNode};
+use super::query::DepGraphQuery;
+use super::{DepKind, DepNode, DepNodeIndex};
 use rustc_data_structures::fingerprint::Fingerprint;
-use rustc_index::vec::IndexVec;
-use rustc_serialize::{Decodable, Decoder};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::{AtomicU32, Lock, Lrc, Ordering};
+use rustc_index::vec::{Idx, IndexVec};
+use rustc_serialize::opaque::{self, FileEncodeResult, FileEncoder, IntEncodedWithFixedSize};
+use rustc_serialize::{Decodable, Encodable};
+use smallvec::SmallVec;
+use std::convert::TryInto;
+
+#[cfg(parallel_compiler)]
+use {rustc_data_structures::sync::WorkerLocal, std::sync::mpsc, std::thread};
 
 // The maximum value of `SerializedDepNodeIndex` leaves the upper two bits
 // unused so that we can store multiple index types in `CompressedHybridIndex`,
@@ -50,78 +59,347 @@ impl<K: DepKind> SerializedDepGraph<K> {
     }
 }
 
-impl<D: Decoder, K: DepKind + Decodable<D>> Decodable<D> for SerializedDepGraph<K> {
-    fn decode(d: &mut D) -> Result<SerializedDepGraph<K>, D::Error> {
-        // We used to serialize the dep graph by creating and serializing a `SerializedDepGraph`
-        // using data copied from the `DepGraph`. But copying created a large memory spike, so we
-        // now serialize directly from the `DepGraph` as if it's a `SerializedDepGraph`. Because we
-        // deserialize that data into a `SerializedDepGraph` in the next compilation session, we
-        // need `DepGraph`'s `Encodable` and `SerializedDepGraph`'s `Decodable` implementations to
-        // be in sync. If you update this decoding, be sure to update the encoding, and vice-versa.
-        //
-        // We mimic the sequence of `Encode` and `Encodable` method calls used by the `DepGraph`'s
-        // `Encodable` implementation with the corresponding sequence of `Decode` and `Decodable`
-        // method calls. E.g. `Decode::read_struct` pairs with `Encode::emit_struct`, `DepNode`'s
-        // `decode` pairs with `DepNode`'s `encode`, and so on. Any decoding methods not associated
-        // with corresponding encoding methods called in `DepGraph`'s `Encodable` implementation
-        // are off limits, because we'd be relying on their implementation details.
-        //
-        // For example, because we know it happens to do the right thing, its tempting to just use
-        // `IndexVec`'s `Decodable` implementation to decode into some of the collections below,
-        // even though `DepGraph` doesn't use its `Encodable` implementation. But the `IndexVec`
-        // implementation could change, and we'd have a bug.
-        //
-        // Variables below are explicitly typed so that anyone who changes the `SerializedDepGraph`
-        // representation without updating this function will encounter a compilation error, and
-        // know to update this and possibly the `DepGraph` `Encodable` implementation accordingly
-        // (the latter should serialize data in a format compatible with our representation).
-
-        d.read_struct("SerializedDepGraph", 4, |d| {
-            let nodes: IndexVec<SerializedDepNodeIndex, DepNode<K>> =
-                d.read_struct_field("nodes", 0, |d| {
-                    d.read_seq(|d, len| {
-                        let mut v = IndexVec::with_capacity(len);
-                        for i in 0..len {
-                            v.push(d.read_seq_elt(i, |d| Decodable::decode(d))?);
-                        }
-                        Ok(v)
-                    })
-                })?;
-
-            let fingerprints: IndexVec<SerializedDepNodeIndex, Fingerprint> =
-                d.read_struct_field("fingerprints", 1, |d| {
-                    d.read_seq(|d, len| {
-                        let mut v = IndexVec::with_capacity(len);
-                        for i in 0..len {
-                            v.push(d.read_seq_elt(i, |d| Decodable::decode(d))?);
-                        }
-                        Ok(v)
-                    })
-                })?;
-
-            let edge_list_indices: IndexVec<SerializedDepNodeIndex, (u32, u32)> = d
-                .read_struct_field("edge_list_indices", 2, |d| {
-                    d.read_seq(|d, len| {
-                        let mut v = IndexVec::with_capacity(len);
-                        for i in 0..len {
-                            v.push(d.read_seq_elt(i, |d| Decodable::decode(d))?);
-                        }
-                        Ok(v)
-                    })
-                })?;
-
-            let edge_list_data: Vec<SerializedDepNodeIndex> =
-                d.read_struct_field("edge_list_data", 3, |d| {
-                    d.read_seq(|d, len| {
-                        let mut v = Vec::with_capacity(len);
-                        for i in 0..len {
-                            v.push(d.read_seq_elt(i, |d| Decodable::decode(d))?);
-                        }
-                        Ok(v)
+impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<'a>>
+    for SerializedDepGraph<K>
+{
+    #[instrument(skip(d))]
+    fn decode(d: &mut opaque::Decoder<'a>) -> Result<SerializedDepGraph<K>, String> {
+        let position = d.position();
+
+        // The last 16 bytes are the node count and edge count.
+        debug!("position: {:?}", d.position());
+        d.set_position(d.data.len() - 2 * IntEncodedWithFixedSize::ENCODED_SIZE);
+        debug!("position: {:?}", d.position());
+
+        let node_count = IntEncodedWithFixedSize::decode(d)?.0 as usize;
+        let edge_count = IntEncodedWithFixedSize::decode(d)?.0 as usize;
+        debug!(?node_count, ?edge_count);
+
+        debug!("position: {:?}", d.position());
+        d.set_position(position);
+        debug!("position: {:?}", d.position());
+
+        let mut nodes = IndexVec::with_capacity(node_count);
+        let mut fingerprints = IndexVec::with_capacity(node_count);
+        let mut edge_list_indices = IndexVec::with_capacity(node_count);
+        let mut edge_list_data = Vec::with_capacity(edge_count);
+
+        for _index in 0..node_count {
+            let node = NodeInfo::<K, SerializedDepNodeIndex>::decode(d)?;
+            debug!(?_index, ?node);
+
+            let _i: SerializedDepNodeIndex = nodes.push(node.node);
+            debug_assert_eq!(_i.index(), _index);
+            let _i: SerializedDepNodeIndex = fingerprints.push(node.fingerprint);
+            debug_assert_eq!(_i.index(), _index);
+
+            let start = edge_list_data.len().try_into().unwrap();
+            edge_list_data.extend(node.edges.into_iter());
+            let end = edge_list_data.len().try_into().unwrap();
+
+            let _i: SerializedDepNodeIndex = edge_list_indices.push((start, end));
+            debug_assert_eq!(_i.index(), _index);
+        }
+
+        Ok(SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data })
+    }
+}
+
+#[derive(Debug, Encodable, Decodable)]
+pub struct NodeInfo<K: DepKind, I: Idx> {
+    node: DepNode<K>,
+    fingerprint: Fingerprint,
+    edges: SmallVec<[I; 8]>,
+}
+
+struct Stat<K: DepKind> {
+    kind: K,
+    node_counter: u64,
+    edge_counter: u64,
+}
+
+struct Stats<K: DepKind> {
+    stats: FxHashMap<K, Stat<K>>,
+    total_node_count: usize,
+    total_edge_count: usize,
+}
+
+#[instrument(skip(encoder, _record_graph, record_stats))]
+fn encode_node<K: DepKind>(
+    encoder: &mut FileEncoder,
+    _index: DepNodeIndex,
+    node: &NodeInfo<K, DepNodeIndex>,
+    _record_graph: &Option<Lrc<Lock<DepGraphQuery<K>>>>,
+    record_stats: &Option<Lrc<Lock<Stats<K>>>>,
+) -> FileEncodeResult {
+    #[cfg(debug_assertions)]
+    if let Some(record_graph) = &_record_graph {
+        record_graph.lock().push(_index, node.node, &node.edges);
+    }
+
+    if let Some(record_stats) = &record_stats {
+        let mut stats = record_stats.lock();
+        let kind = node.node.kind;
+        let edge_count = node.edges.len();
+
+        let stat =
+            stats.stats.entry(kind).or_insert(Stat { kind, node_counter: 0, edge_counter: 0 });
+        stat.node_counter += 1;
+        stat.edge_counter += edge_count as u64;
+        stats.total_node_count += 1;
+        stats.total_edge_count += edge_count;
+    }
+
+    debug!(?_index, ?node);
+    node.encode(encoder)
+}
+
+fn encode_counts(
+    mut encoder: FileEncoder,
+    node_count: usize,
+    edge_count: usize,
+) -> FileEncodeResult {
+    let node_count = node_count.try_into().unwrap();
+    let edge_count = edge_count.try_into().unwrap();
+
+    debug!(?node_count, ?edge_count);
+    debug!("position: {:?}", encoder.position());
+    IntEncodedWithFixedSize(node_count).encode(&mut encoder)?;
+    IntEncodedWithFixedSize(edge_count).encode(&mut encoder)?;
+    debug!("position: {:?}", encoder.position());
+    // Drop the encoder so that nothing is written after the counts.
+    encoder.flush()
+}
+
+#[cfg(not(parallel_compiler))]
+pub struct GraphEncoder<K: DepKind> {
+    status: Lock<(FileEncoder, usize, FileEncodeResult)>,
+    counter: AtomicU32,
+    record_graph: Option<Lrc<Lock<DepGraphQuery<K>>>>,
+    record_stats: Option<Lrc<Lock<Stats<K>>>>,
+}
+
+#[cfg(parallel_compiler)]
+pub struct GraphEncoder<K: DepKind> {
+    send: WorkerLocal<mpsc::Sender<(DepNodeIndex, NodeInfo<K, DepNodeIndex>)>>,
+    thread: thread::JoinHandle<FileEncodeResult>,
+    counter: AtomicU32,
+    record_graph: Option<Lrc<Lock<DepGraphQuery<K>>>>,
+    record_stats: Option<Lrc<Lock<Stats<K>>>>,
+}
+
+impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
+    pub fn new(
+        encoder: FileEncoder,
+        prev_node_count: usize,
+        record_graph: bool,
+        record_stats: bool,
+    ) -> Self {
+        let record_graph = if cfg!(debug_assertions) && record_graph {
+            Some(Lrc::new(Lock::new(DepGraphQuery::new(prev_node_count))))
+        } else {
+            None
+        };
+        let record_stats = if record_stats {
+            Some(Lrc::new(Lock::new(Stats {
+                stats: FxHashMap::default(),
+                total_node_count: 0,
+                total_edge_count: 0,
+            })))
+        } else {
+            None
+        };
+        let counter = AtomicU32::new(0);
+
+        #[cfg(not(parallel_compiler))]
+        {
+            let status = Lock::new((encoder, 0, Ok(())));
+            GraphEncoder { status, counter, record_graph, record_stats }
+        }
+        #[cfg(parallel_compiler)]
+        {
+            let (send, recv) = mpsc::channel();
+            let thread = {
+                let record_graph = record_graph.clone();
+                let record_stats = record_stats.clone();
+                thread::spawn(move || {
+                    encode_graph(encoder, recv, |encoder, index, node| {
+                        encode_node(encoder, index, node, &record_graph, &record_stats)
                     })
-                })?;
+                })
+            };
+            let send = WorkerLocal::new(move |_| send.clone());
 
-            Ok(SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data })
-        })
+            GraphEncoder { send, thread, counter, record_graph, record_stats }
+        }
+    }
+
+    pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery<K>)) {
+        if let Some(record_graph) = &self.record_graph {
+            f(&record_graph.lock())
+        }
+    }
+
+    pub(crate) fn print_incremental_info(
+        &self,
+        total_read_count: u64,
+        total_duplicate_read_count: u64,
+    ) {
+        if let Some(record_stats) = &self.record_stats {
+            let record_stats = record_stats.lock();
+
+            let mut stats: Vec<_> = record_stats.stats.values().collect();
+            stats.sort_by_key(|s| -(s.node_counter as i64));
+
+            const SEPARATOR: &str = "[incremental] --------------------------------\
+                                     ----------------------------------------------\
+                                     ------------";
+
+            eprintln!("[incremental]");
+            eprintln!("[incremental] DepGraph Statistics");
+            eprintln!("{}", SEPARATOR);
+            eprintln!("[incremental]");
+            eprintln!("[incremental] Total Node Count: {}", record_stats.total_node_count);
+            eprintln!("[incremental] Total Edge Count: {}", record_stats.total_edge_count);
+
+            if cfg!(debug_assertions) {
+                eprintln!("[incremental] Total Edge Reads: {}", total_read_count);
+                eprintln!(
+                    "[incremental] Total Duplicate Edge Reads: {}",
+                    total_duplicate_read_count
+                );
+            }
+
+            eprintln!("[incremental]");
+            eprintln!(
+                "[incremental]  {:<36}| {:<17}| {:<12}| {:<17}|",
+                "Node Kind", "Node Frequency", "Node Count", "Avg. Edge Count"
+            );
+            eprintln!("{}", SEPARATOR);
+
+            for stat in stats {
+                let node_kind_ratio =
+                    (100.0 * (stat.node_counter as f64)) / (record_stats.total_node_count as f64);
+                let node_kind_avg_edges = (stat.edge_counter as f64) / (stat.node_counter as f64);
+
+                eprintln!(
+                    "[incremental]  {:<36}|{:>16.1}% |{:>12} |{:>17.1} |",
+                    format!("{:?}", stat.kind),
+                    node_kind_ratio,
+                    stat.node_counter,
+                    node_kind_avg_edges,
+                );
+            }
+
+            eprintln!("{}", SEPARATOR);
+            eprintln!("[incremental]");
+        }
+    }
+}
+
+#[cfg(not(parallel_compiler))]
+impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
+    pub(crate) fn send(
+        &self,
+        node: DepNode<K>,
+        fingerprint: Fingerprint,
+        edges: SmallVec<[DepNodeIndex; 8]>,
+    ) -> DepNodeIndex {
+        let index = self.counter.fetch_add(1, Ordering::SeqCst);
+        let index = DepNodeIndex::from_u32(index);
+        let &mut (ref mut encoder, ref mut edge_count, ref mut result) = &mut *self.status.lock();
+        *edge_count += edges.len();
+        *result = std::mem::replace(result, Ok(())).and_then(|()| {
+            let node = NodeInfo { node, fingerprint, edges };
+            encode_node(encoder, index, &node, &self.record_graph, &self.record_stats)
+        });
+        index
+    }
+
+    pub fn finish(self) -> FileEncodeResult {
+        let (encoder, edge_count, result) = self.status.into_inner();
+        let () = result?;
+        let node_count = self.counter.into_inner() as usize;
+
+        encode_counts(encoder, node_count, edge_count)
+    }
+}
+
+#[cfg(parallel_compiler)]
+impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
+    pub(crate) fn send(
+        &self,
+        node: DepNode<K>,
+        fingerprint: Fingerprint,
+        edges: SmallVec<[DepNodeIndex; 8]>,
+    ) -> DepNodeIndex {
+        let node = NodeInfo { node, fingerprint, edges };
+        let index = self.counter.fetch_add(1, Ordering::SeqCst);
+        let index = DepNodeIndex::from_u32(index);
+        self.send.send((index, node)).unwrap();
+        index
     }
+
+    pub fn finish(self) -> FileEncodeResult {
+        std::mem::drop(self.send);
+        self.thread.join().unwrap()
+    }
+}
+
+#[cfg(parallel_compiler)]
+#[instrument(skip(encoder, recv, process))]
+fn encode_graph<K: DepKind + Encodable<FileEncoder>>(
+    mut encoder: FileEncoder,
+    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K, DepNodeIndex>)>,
+    process: impl Fn(&mut FileEncoder, DepNodeIndex, &NodeInfo<K, DepNodeIndex>) -> FileEncodeResult,
+) -> FileEncodeResult {
+    let mut edge_count: usize = 0;
+    let node_count: usize = ordered_recv(recv, |index, node| {
+        edge_count += node.edges.len();
+        process(&mut encoder, index, node)
+    })?;
+
+    encode_counts(encoder, node_count, edge_count)
+}
+
+/// Since there are multiple producers assigning the DepNodeIndex using an atomic,
+/// the messages may not arrive in order. This function sorts them as they come.
+#[cfg(parallel_compiler)]
+fn ordered_recv<K: DepKind + Encodable<opaque::FileEncoder>>(
+    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K, DepNodeIndex>)>,
+    mut f: impl FnMut(DepNodeIndex, &NodeInfo<K, DepNodeIndex>) -> FileEncodeResult,
+) -> Result<usize, std::io::Error> {
+    let mut pending = Vec::<(DepNodeIndex, _)>::new();
+    let mut expected = DepNodeIndex::new(0);
+
+    // INVARIANT: No message can arrive with an index less than `expected`.
+    'outer: loop {
+        pending.sort_by_key(|n| n.0);
+        for (index, node) in pending.drain_filter(|(index, _)| {
+            if *index == expected {
+                expected.increment_by(1);
+                true
+            } else {
+                false
+            }
+        }) {
+            f(index, &node)?;
+        }
+
+        while let Ok((index, node)) = recv.recv() {
+            if index > expected {
+                pending.push((index, node));
+            } else if index == expected {
+                f(index, &node)?;
+                expected.increment_by(1);
+                continue 'outer;
+            } else {
+                panic!("Unexpected index {:?} while waiting for {:?}", index, expected);
+            }
+        }
+
+        break;
+    }
+
+    Ok(expected.as_u32() as usize)
 }
diff --git a/compiler/rustc_query_system/src/lib.rs b/compiler/rustc_query_system/src/lib.rs
index 3db57c0ab3a4c..071144f38e702 100644
--- a/compiler/rustc_query_system/src/lib.rs
+++ b/compiler/rustc_query_system/src/lib.rs
@@ -2,6 +2,7 @@
 #![feature(const_fn)]
 #![feature(const_panic)]
 #![feature(core_intrinsics)]
+#![feature(drain_filter)]
 #![feature(hash_raw_entry)]
 #![feature(iter_zip)]
 #![feature(min_specialization)]
diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs
index 77267489a7526..b58802474757e 100644
--- a/compiler/rustc_query_system/src/query/plumbing.rs
+++ b/compiler/rustc_query_system/src/query/plumbing.rs
@@ -537,7 +537,7 @@ where
         // If `-Zincremental-verify-ich` is specified, re-hash results from
         // the cache and make sure that they have the expected fingerprint.
         if unlikely!(tcx.dep_context().sess().opts.debugging_opts.incremental_verify_ich) {
-            incremental_verify_ich(*tcx.dep_context(), &result, dep_node, dep_node_index, query);
+            incremental_verify_ich(*tcx.dep_context(), &result, dep_node, query);
         }
 
         result
@@ -560,7 +560,7 @@ where
         //
         // See issue #82920 for an example of a miscompilation that would get turned into
         // an ICE by this check
-        incremental_verify_ich(*tcx.dep_context(), &result, dep_node, dep_node_index, query);
+        incremental_verify_ich(*tcx.dep_context(), &result, dep_node, query);
 
         result
     }
@@ -570,14 +570,12 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
     tcx: CTX::DepContext,
     result: &V,
     dep_node: &DepNode<CTX::DepKind>,
-    dep_node_index: DepNodeIndex,
     query: &QueryVtable<CTX, K, V>,
 ) where
     CTX: QueryContext,
 {
     assert!(
-        Some(tcx.dep_graph().fingerprint_of(dep_node_index))
-            == tcx.dep_graph().prev_fingerprint_of(dep_node),
+        tcx.dep_graph().is_green(dep_node),
         "fingerprint for green query instance not loaded from cache: {:?}",
         dep_node,
     );
@@ -588,9 +586,15 @@ fn incremental_verify_ich<CTX, K, V: Debug>(
     let new_hash = query.hash_result(&mut hcx, result).unwrap_or(Fingerprint::ZERO);
     debug!("END verify_ich({:?})", dep_node);
 
-    let old_hash = tcx.dep_graph().fingerprint_of(dep_node_index);
+    let old_hash = tcx.dep_graph().prev_fingerprint_of(dep_node);
 
-    assert!(new_hash == old_hash, "found unstable fingerprints for {:?}: {:?}", dep_node, result);
+    assert_eq!(
+        Some(new_hash),
+        old_hash,
+        "found unstable fingerprints for {:?}: {:?}",
+        dep_node,
+        result
+    );
 }
 
 fn force_query_with_job<C, CTX>(

From 39b306a53db513c754d5e8d60fc2691829209131 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Sat, 6 Mar 2021 11:17:56 +0100
Subject: [PATCH 02/11] Do not allocate in decoder.

---
 .../src/dep_graph/serialized.rs               | 58 +++++++++++--------
 1 file changed, 34 insertions(+), 24 deletions(-)

diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index a76100cc2281f..3067da9436d86 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -5,9 +5,9 @@ use super::{DepKind, DepNode, DepNodeIndex};
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::{AtomicU32, Lock, Lrc, Ordering};
-use rustc_index::vec::{Idx, IndexVec};
+use rustc_index::vec::IndexVec;
 use rustc_serialize::opaque::{self, FileEncodeResult, FileEncoder, IntEncodedWithFixedSize};
-use rustc_serialize::{Decodable, Encodable};
+use rustc_serialize::{Decodable, Decoder, Encodable};
 use smallvec::SmallVec;
 use std::convert::TryInto;
 
@@ -85,20 +85,30 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
         let mut edge_list_data = Vec::with_capacity(edge_count);
 
         for _index in 0..node_count {
-            let node = NodeInfo::<K, SerializedDepNodeIndex>::decode(d)?;
-            debug!(?_index, ?node);
-
-            let _i: SerializedDepNodeIndex = nodes.push(node.node);
-            debug_assert_eq!(_i.index(), _index);
-            let _i: SerializedDepNodeIndex = fingerprints.push(node.fingerprint);
-            debug_assert_eq!(_i.index(), _index);
-
-            let start = edge_list_data.len().try_into().unwrap();
-            edge_list_data.extend(node.edges.into_iter());
-            let end = edge_list_data.len().try_into().unwrap();
-
-            let _i: SerializedDepNodeIndex = edge_list_indices.push((start, end));
-            debug_assert_eq!(_i.index(), _index);
+            d.read_struct("NodeInfo", 3, |d| {
+                let dep_node: DepNode<K> = d.read_struct_field("node", 0, Decodable::decode)?;
+                let _i: SerializedDepNodeIndex = nodes.push(dep_node);
+                debug_assert_eq!(_i.index(), _index);
+
+                let fingerprint: Fingerprint =
+                    d.read_struct_field("fingerprint", 1, Decodable::decode)?;
+                let _i: SerializedDepNodeIndex = fingerprints.push(fingerprint);
+                debug_assert_eq!(_i.index(), _index);
+
+                d.read_struct_field("edges", 2, |d| {
+                    d.read_seq(|d, len| {
+                        let start = edge_list_data.len().try_into().unwrap();
+                        for e in 0..len {
+                            let edge = d.read_seq_elt(e, Decodable::decode)?;
+                            edge_list_data.push(edge);
+                        }
+                        let end = edge_list_data.len().try_into().unwrap();
+                        let _i: SerializedDepNodeIndex = edge_list_indices.push((start, end));
+                        debug_assert_eq!(_i.index(), _index);
+                        Ok(())
+                    })
+                })
+            })?;
         }
 
         Ok(SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data })
@@ -106,10 +116,10 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
 }
 
 #[derive(Debug, Encodable, Decodable)]
-pub struct NodeInfo<K: DepKind, I: Idx> {
+pub struct NodeInfo<K: DepKind> {
     node: DepNode<K>,
     fingerprint: Fingerprint,
-    edges: SmallVec<[I; 8]>,
+    edges: SmallVec<[DepNodeIndex; 8]>,
 }
 
 struct Stat<K: DepKind> {
@@ -128,7 +138,7 @@ struct Stats<K: DepKind> {
 fn encode_node<K: DepKind>(
     encoder: &mut FileEncoder,
     _index: DepNodeIndex,
-    node: &NodeInfo<K, DepNodeIndex>,
+    node: &NodeInfo<K>,
     _record_graph: &Option<Lrc<Lock<DepGraphQuery<K>>>>,
     record_stats: &Option<Lrc<Lock<Stats<K>>>>,
 ) -> FileEncodeResult {
@@ -181,7 +191,7 @@ pub struct GraphEncoder<K: DepKind> {
 
 #[cfg(parallel_compiler)]
 pub struct GraphEncoder<K: DepKind> {
-    send: WorkerLocal<mpsc::Sender<(DepNodeIndex, NodeInfo<K, DepNodeIndex>)>>,
+    send: WorkerLocal<mpsc::Sender<(DepNodeIndex, NodeInfo<K>)>>,
     thread: thread::JoinHandle<FileEncodeResult>,
     counter: AtomicU32,
     record_graph: Option<Lrc<Lock<DepGraphQuery<K>>>>,
@@ -350,8 +360,8 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
 #[instrument(skip(encoder, recv, process))]
 fn encode_graph<K: DepKind + Encodable<FileEncoder>>(
     mut encoder: FileEncoder,
-    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K, DepNodeIndex>)>,
-    process: impl Fn(&mut FileEncoder, DepNodeIndex, &NodeInfo<K, DepNodeIndex>) -> FileEncodeResult,
+    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K>)>,
+    process: impl Fn(&mut FileEncoder, DepNodeIndex, &NodeInfo<K>) -> FileEncodeResult,
 ) -> FileEncodeResult {
     let mut edge_count: usize = 0;
     let node_count: usize = ordered_recv(recv, |index, node| {
@@ -366,8 +376,8 @@ fn encode_graph<K: DepKind + Encodable<FileEncoder>>(
 /// the messages may not arrive in order. This function sorts them as they come.
 #[cfg(parallel_compiler)]
 fn ordered_recv<K: DepKind + Encodable<opaque::FileEncoder>>(
-    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K, DepNodeIndex>)>,
-    mut f: impl FnMut(DepNodeIndex, &NodeInfo<K, DepNodeIndex>) -> FileEncodeResult,
+    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K>)>,
+    mut f: impl FnMut(DepNodeIndex, &NodeInfo<K>) -> FileEncodeResult,
 ) -> Result<usize, std::io::Error> {
     let mut pending = Vec::<(DepNodeIndex, _)>::new();
     let mut expected = DepNodeIndex::new(0);

From cfe786e5e00316fb70b48fc6e324b72acf069df4 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Sat, 6 Mar 2021 13:55:20 +0100
Subject: [PATCH 03/11] Fix tests.

Avoid invoking queries inside `check_paths`, since we are holding a lock
to the reconstructed graph.
---
 .../rustc_query_system/src/dep_graph/query.rs   | 17 +++++++++++++----
 .../src/dep_graph/serialized.rs                 |  9 ++++++++-
 src/test/ui/async-await/issues/issue-64964.rs   |  2 +-
 .../dep-graph/dep-graph-assoc-type-codegen.rs   |  2 +-
 .../ui/dep-graph/dep-graph-caller-callee.rs     |  2 +-
 .../ui/dep-graph/dep-graph-struct-signature.rs  |  2 +-
 ...p-graph-trait-impl-two-traits-same-method.rs |  2 +-
 .../dep-graph-trait-impl-two-traits.rs          |  2 +-
 src/test/ui/dep-graph/dep-graph-trait-impl.rs   |  2 +-
 src/test/ui/dep-graph/dep-graph-type-alias.rs   |  2 +-
 .../ui/dep-graph/dep-graph-variance-alias.rs    |  2 +-
 11 files changed, 30 insertions(+), 14 deletions(-)

diff --git a/compiler/rustc_query_system/src/dep_graph/query.rs b/compiler/rustc_query_system/src/dep_graph/query.rs
index 9c85cdd59d9d4..0fe3748e38630 100644
--- a/compiler/rustc_query_system/src/dep_graph/query.rs
+++ b/compiler/rustc_query_system/src/dep_graph/query.rs
@@ -1,11 +1,13 @@
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::graph::implementation::{Direction, Graph, NodeIndex, INCOMING};
+use rustc_index::vec::IndexVec;
 
 use super::{DepKind, DepNode, DepNodeIndex};
 
 pub struct DepGraphQuery<K> {
     pub graph: Graph<DepNode<K>, ()>,
     pub indices: FxHashMap<DepNode<K>, NodeIndex>,
+    pub dep_index_to_index: IndexVec<DepNodeIndex, Option<NodeIndex>>,
 }
 
 impl<K: DepKind> DepGraphQuery<K> {
@@ -15,18 +17,25 @@ impl<K: DepKind> DepGraphQuery<K> {
 
         let graph = Graph::with_capacity(node_count, edge_count);
         let indices = FxHashMap::default();
+        let dep_index_to_index = IndexVec::new();
 
-        DepGraphQuery { graph, indices }
+        DepGraphQuery { graph, indices, dep_index_to_index }
     }
 
     pub fn push(&mut self, index: DepNodeIndex, node: DepNode<K>, edges: &[DepNodeIndex]) {
         let source = self.graph.add_node(node);
-        debug_assert_eq!(index.index(), source.0);
+        if index.index() >= self.dep_index_to_index.len() {
+            self.dep_index_to_index.resize(index.index() + 1, None);
+        }
+        self.dep_index_to_index[index] = Some(source);
         self.indices.insert(node, source);
 
         for &target in edges.iter() {
-            let target = NodeIndex(target.index());
-            self.graph.add_edge(source, target, ());
+            let target = self.dep_index_to_index[target];
+            // Skip missing edges.
+            if let Some(target) = target {
+                self.graph.add_edge(source, target, ());
+            }
         }
     }
 
diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 3067da9436d86..6a3cc215a0b3c 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -144,7 +144,14 @@ fn encode_node<K: DepKind>(
 ) -> FileEncodeResult {
     #[cfg(debug_assertions)]
     if let Some(record_graph) = &_record_graph {
-        record_graph.lock().push(_index, node.node, &node.edges);
+        if let Some(record_graph) = &mut if cfg!(parallel_compiler) {
+            Some(record_graph.lock())
+        } else {
+            // Do not ICE when a query is called from within `with_query`.
+            record_graph.try_lock()
+        } {
+            record_graph.push(_index, node.node, &node.edges);
+        }
     }
 
     if let Some(record_stats) = &record_stats {
diff --git a/src/test/ui/async-await/issues/issue-64964.rs b/src/test/ui/async-await/issues/issue-64964.rs
index 11f6cb6af9cc6..5313d1715c483 100644
--- a/src/test/ui/async-await/issues/issue-64964.rs
+++ b/src/test/ui/async-await/issues/issue-64964.rs
@@ -1,5 +1,5 @@
 // check-pass
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/issue-64964
 // edition:2018
 
 // Regression test for ICE related to `await`ing in a method + incr. comp. (#64964)
diff --git a/src/test/ui/dep-graph/dep-graph-assoc-type-codegen.rs b/src/test/ui/dep-graph/dep-graph-assoc-type-codegen.rs
index 0d11d933af04e..a0ee3ad31e697 100644
--- a/src/test/ui/dep-graph/dep-graph-assoc-type-codegen.rs
+++ b/src/test/ui/dep-graph/dep-graph-assoc-type-codegen.rs
@@ -1,7 +1,7 @@
 // Test that when a trait impl changes, fns whose body uses that trait
 // must also be recompiled.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-assoc-type-codegen
 
 #![feature(rustc_attrs)]
 #![allow(warnings)]
diff --git a/src/test/ui/dep-graph/dep-graph-caller-callee.rs b/src/test/ui/dep-graph/dep-graph-caller-callee.rs
index b12c635d2e733..c95ea53650b47 100644
--- a/src/test/ui/dep-graph/dep-graph-caller-callee.rs
+++ b/src/test/ui/dep-graph/dep-graph-caller-callee.rs
@@ -1,7 +1,7 @@
 // Test that immediate callers have to change when callee changes, but
 // not callers' callers.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-caller-callee
 
 #![feature(rustc_attrs)]
 #![allow(dead_code)]
diff --git a/src/test/ui/dep-graph/dep-graph-struct-signature.rs b/src/test/ui/dep-graph/dep-graph-struct-signature.rs
index 7ef6fac48c3a6..50a670b877238 100644
--- a/src/test/ui/dep-graph/dep-graph-struct-signature.rs
+++ b/src/test/ui/dep-graph/dep-graph-struct-signature.rs
@@ -1,7 +1,7 @@
 // Test cases where a changing struct appears in the signature of fns
 // and methods.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-struct-signature
 
 #![feature(rustc_attrs)]
 #![allow(dead_code)]
diff --git a/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits-same-method.rs b/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits-same-method.rs
index 1b3bf5a3933fe..c0a6617316b8d 100644
--- a/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits-same-method.rs
+++ b/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits-same-method.rs
@@ -1,7 +1,7 @@
 // Test that adding an impl to a trait `Foo` DOES affect functions
 // that only use `Bar` if they have methods in common.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-trait-impl-two-traits-same-method
 
 #![feature(rustc_attrs)]
 #![allow(dead_code)]
diff --git a/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits.rs b/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits.rs
index ebfe8ccc3dfaf..56e9762ddb26c 100644
--- a/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits.rs
+++ b/src/test/ui/dep-graph/dep-graph-trait-impl-two-traits.rs
@@ -1,7 +1,7 @@
 // Test that adding an impl to a trait `Foo` does not affect functions
 // that only use `Bar`, so long as they do not have methods in common.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-trait-impl-two-traits
 
 #![feature(rustc_attrs)]
 #![allow(warnings)]
diff --git a/src/test/ui/dep-graph/dep-graph-trait-impl.rs b/src/test/ui/dep-graph/dep-graph-trait-impl.rs
index 9dd201e2a1fbc..3bbe3e745ca69 100644
--- a/src/test/ui/dep-graph/dep-graph-trait-impl.rs
+++ b/src/test/ui/dep-graph/dep-graph-trait-impl.rs
@@ -1,7 +1,7 @@
 // Test that when a trait impl changes, fns whose body uses that trait
 // must also be recompiled.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-trait-impl
 
 #![feature(rustc_attrs)]
 #![allow(warnings)]
diff --git a/src/test/ui/dep-graph/dep-graph-type-alias.rs b/src/test/ui/dep-graph/dep-graph-type-alias.rs
index c9151ce79c5f6..5c5e24693a4f5 100644
--- a/src/test/ui/dep-graph/dep-graph-type-alias.rs
+++ b/src/test/ui/dep-graph/dep-graph-type-alias.rs
@@ -1,6 +1,6 @@
 // Test that changing what a `type` points to does not go unnoticed.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-type-alias
 
 #![feature(rustc_attrs)]
 #![allow(dead_code)]
diff --git a/src/test/ui/dep-graph/dep-graph-variance-alias.rs b/src/test/ui/dep-graph/dep-graph-variance-alias.rs
index 927ea5597783a..6cc1f44104a09 100644
--- a/src/test/ui/dep-graph/dep-graph-variance-alias.rs
+++ b/src/test/ui/dep-graph/dep-graph-variance-alias.rs
@@ -1,7 +1,7 @@
 // Test that changing what a `type` points to does not go unnoticed
 // by the variance analysis.
 
-// compile-flags: -Z query-dep-graph
+// compile-flags: -Z query-dep-graph -C incremental=tmp/dep-graph-variance-alias
 
 #![feature(rustc_attrs)]
 #![allow(dead_code)]

From 8208872fa28550068e9e30075af09da1a8144fb4 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Sat, 6 Mar 2021 15:58:32 +0100
Subject: [PATCH 04/11] Fix parallel compiler.

---
 compiler/rustc_query_system/src/dep_graph/serialized.rs | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 6a3cc215a0b3c..7b6ee721eba9f 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -12,7 +12,9 @@ use smallvec::SmallVec;
 use std::convert::TryInto;
 
 #[cfg(parallel_compiler)]
-use {rustc_data_structures::sync::WorkerLocal, std::sync::mpsc, std::thread};
+use {
+    rustc_data_structures::sync::WorkerLocal, rustc_index::vec::Idx, std::sync::mpsc, std::thread,
+};
 
 // The maximum value of `SerializedDepNodeIndex` leaves the upper two bits
 // unused so that we can store multiple index types in `CompressedHybridIndex`,

From e1c99e5fccdd97e7b78e364c3e408f535994e23c Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Tue, 16 Mar 2021 19:32:00 +0100
Subject: [PATCH 05/11] Remove the parallel version.

---
 .../src/dep_graph/serialized.rs               | 148 ++----------------
 1 file changed, 13 insertions(+), 135 deletions(-)

diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 7b6ee721eba9f..6cb728bf0a855 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -4,18 +4,13 @@ use super::query::DepGraphQuery;
 use super::{DepKind, DepNode, DepNodeIndex};
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::{AtomicU32, Lock, Lrc, Ordering};
-use rustc_index::vec::IndexVec;
+use rustc_data_structures::sync::{Lock, Lrc};
+use rustc_index::vec::{Idx, IndexVec};
 use rustc_serialize::opaque::{self, FileEncodeResult, FileEncoder, IntEncodedWithFixedSize};
 use rustc_serialize::{Decodable, Decoder, Encodable};
 use smallvec::SmallVec;
 use std::convert::TryInto;
 
-#[cfg(parallel_compiler)]
-use {
-    rustc_data_structures::sync::WorkerLocal, rustc_index::vec::Idx, std::sync::mpsc, std::thread,
-};
-
 // The maximum value of `SerializedDepNodeIndex` leaves the upper two bits
 // unused so that we can store multiple index types in `CompressedHybridIndex`,
 // and use those bits to encode which index type it contains.
@@ -146,12 +141,8 @@ fn encode_node<K: DepKind>(
 ) -> FileEncodeResult {
     #[cfg(debug_assertions)]
     if let Some(record_graph) = &_record_graph {
-        if let Some(record_graph) = &mut if cfg!(parallel_compiler) {
-            Some(record_graph.lock())
-        } else {
-            // Do not ICE when a query is called from within `with_query`.
-            record_graph.try_lock()
-        } {
+        // Do not ICE when a query is called from within `with_query`.
+        if let Some(record_graph) = &mut record_graph.try_lock() {
             record_graph.push(_index, node.node, &node.edges);
         }
     }
@@ -190,19 +181,8 @@ fn encode_counts(
     encoder.flush()
 }
 
-#[cfg(not(parallel_compiler))]
 pub struct GraphEncoder<K: DepKind> {
-    status: Lock<(FileEncoder, usize, FileEncodeResult)>,
-    counter: AtomicU32,
-    record_graph: Option<Lrc<Lock<DepGraphQuery<K>>>>,
-    record_stats: Option<Lrc<Lock<Stats<K>>>>,
-}
-
-#[cfg(parallel_compiler)]
-pub struct GraphEncoder<K: DepKind> {
-    send: WorkerLocal<mpsc::Sender<(DepNodeIndex, NodeInfo<K>)>>,
-    thread: thread::JoinHandle<FileEncodeResult>,
-    counter: AtomicU32,
+    status: Lock<(FileEncoder, DepNodeIndex, usize, FileEncodeResult)>,
     record_graph: Option<Lrc<Lock<DepGraphQuery<K>>>>,
     record_stats: Option<Lrc<Lock<Stats<K>>>>,
 }
@@ -228,29 +208,8 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
         } else {
             None
         };
-        let counter = AtomicU32::new(0);
-
-        #[cfg(not(parallel_compiler))]
-        {
-            let status = Lock::new((encoder, 0, Ok(())));
-            GraphEncoder { status, counter, record_graph, record_stats }
-        }
-        #[cfg(parallel_compiler)]
-        {
-            let (send, recv) = mpsc::channel();
-            let thread = {
-                let record_graph = record_graph.clone();
-                let record_stats = record_stats.clone();
-                thread::spawn(move || {
-                    encode_graph(encoder, recv, |encoder, index, node| {
-                        encode_node(encoder, index, node, &record_graph, &record_stats)
-                    })
-                })
-            };
-            let send = WorkerLocal::new(move |_| send.clone());
-
-            GraphEncoder { send, thread, counter, record_graph, record_stats }
-        }
+        let status = Lock::new((encoder, DepNodeIndex::new(0), 0, Ok(())));
+        GraphEncoder { status, record_graph, record_stats }
     }
 
     pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery<K>)) {
@@ -314,19 +273,17 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
             eprintln!("[incremental]");
         }
     }
-}
 
-#[cfg(not(parallel_compiler))]
-impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
     pub(crate) fn send(
         &self,
         node: DepNode<K>,
         fingerprint: Fingerprint,
         edges: SmallVec<[DepNodeIndex; 8]>,
     ) -> DepNodeIndex {
-        let index = self.counter.fetch_add(1, Ordering::SeqCst);
-        let index = DepNodeIndex::from_u32(index);
-        let &mut (ref mut encoder, ref mut edge_count, ref mut result) = &mut *self.status.lock();
+        let &mut (ref mut encoder, ref mut next_index, ref mut edge_count, ref mut result) =
+            &mut *self.status.lock();
+        let index = next_index.clone();
+        next_index.increment_by(1);
         *edge_count += edges.len();
         *result = std::mem::replace(result, Ok(())).and_then(|()| {
             let node = NodeInfo { node, fingerprint, edges };
@@ -336,89 +293,10 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
     }
 
     pub fn finish(self) -> FileEncodeResult {
-        let (encoder, edge_count, result) = self.status.into_inner();
+        let (encoder, node_count, edge_count, result) = self.status.into_inner();
         let () = result?;
-        let node_count = self.counter.into_inner() as usize;
+        let node_count = node_count.index();
 
         encode_counts(encoder, node_count, edge_count)
     }
 }
-
-#[cfg(parallel_compiler)]
-impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
-    pub(crate) fn send(
-        &self,
-        node: DepNode<K>,
-        fingerprint: Fingerprint,
-        edges: SmallVec<[DepNodeIndex; 8]>,
-    ) -> DepNodeIndex {
-        let node = NodeInfo { node, fingerprint, edges };
-        let index = self.counter.fetch_add(1, Ordering::SeqCst);
-        let index = DepNodeIndex::from_u32(index);
-        self.send.send((index, node)).unwrap();
-        index
-    }
-
-    pub fn finish(self) -> FileEncodeResult {
-        std::mem::drop(self.send);
-        self.thread.join().unwrap()
-    }
-}
-
-#[cfg(parallel_compiler)]
-#[instrument(skip(encoder, recv, process))]
-fn encode_graph<K: DepKind + Encodable<FileEncoder>>(
-    mut encoder: FileEncoder,
-    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K>)>,
-    process: impl Fn(&mut FileEncoder, DepNodeIndex, &NodeInfo<K>) -> FileEncodeResult,
-) -> FileEncodeResult {
-    let mut edge_count: usize = 0;
-    let node_count: usize = ordered_recv(recv, |index, node| {
-        edge_count += node.edges.len();
-        process(&mut encoder, index, node)
-    })?;
-
-    encode_counts(encoder, node_count, edge_count)
-}
-
-/// Since there are multiple producers assigning the DepNodeIndex using an atomic,
-/// the messages may not arrive in order. This function sorts them as they come.
-#[cfg(parallel_compiler)]
-fn ordered_recv<K: DepKind + Encodable<opaque::FileEncoder>>(
-    recv: mpsc::Receiver<(DepNodeIndex, NodeInfo<K>)>,
-    mut f: impl FnMut(DepNodeIndex, &NodeInfo<K>) -> FileEncodeResult,
-) -> Result<usize, std::io::Error> {
-    let mut pending = Vec::<(DepNodeIndex, _)>::new();
-    let mut expected = DepNodeIndex::new(0);
-
-    // INVARIANT: No message can arrive with an index less than `expected`.
-    'outer: loop {
-        pending.sort_by_key(|n| n.0);
-        for (index, node) in pending.drain_filter(|(index, _)| {
-            if *index == expected {
-                expected.increment_by(1);
-                true
-            } else {
-                false
-            }
-        }) {
-            f(index, &node)?;
-        }
-
-        while let Ok((index, node)) = recv.recv() {
-            if index > expected {
-                pending.push((index, node));
-            } else if index == expected {
-                f(index, &node)?;
-                expected.increment_by(1);
-                continue 'outer;
-            } else {
-                panic!("Unexpected index {:?} while waiting for {:?}", index, expected);
-            }
-        }
-
-        break;
-    }
-
-    Ok(expected.as_u32() as usize)
-}

From c5c935af9218c64f5960bd2af58a086e580b35e9 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Tue, 16 Mar 2021 20:52:28 +0100
Subject: [PATCH 06/11] Simplify tracking the encoder state.

---
 .../src/dep_graph/serialized.rs               | 152 +++++++++---------
 1 file changed, 72 insertions(+), 80 deletions(-)

diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 6cb728bf0a855..ab97222e19aba 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -4,7 +4,7 @@ use super::query::DepGraphQuery;
 use super::{DepKind, DepNode, DepNodeIndex};
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::{Lock, Lrc};
+use rustc_data_structures::sync::Lock;
 use rustc_index::vec::{Idx, IndexVec};
 use rustc_serialize::opaque::{self, FileEncodeResult, FileEncoder, IntEncodedWithFixedSize};
 use rustc_serialize::{Decodable, Decoder, Encodable};
@@ -125,66 +125,80 @@ struct Stat<K: DepKind> {
     edge_counter: u64,
 }
 
-struct Stats<K: DepKind> {
-    stats: FxHashMap<K, Stat<K>>,
+struct EncodingStatus<K: DepKind> {
+    encoder: FileEncoder,
     total_node_count: usize,
     total_edge_count: usize,
+    result: FileEncodeResult,
+    stats: Option<FxHashMap<K, Stat<K>>>,
 }
 
-#[instrument(skip(encoder, _record_graph, record_stats))]
-fn encode_node<K: DepKind>(
-    encoder: &mut FileEncoder,
-    _index: DepNodeIndex,
-    node: &NodeInfo<K>,
-    _record_graph: &Option<Lrc<Lock<DepGraphQuery<K>>>>,
-    record_stats: &Option<Lrc<Lock<Stats<K>>>>,
-) -> FileEncodeResult {
-    #[cfg(debug_assertions)]
-    if let Some(record_graph) = &_record_graph {
-        // Do not ICE when a query is called from within `with_query`.
-        if let Some(record_graph) = &mut record_graph.try_lock() {
-            record_graph.push(_index, node.node, &node.edges);
+impl<K: DepKind> EncodingStatus<K> {
+    fn new(encoder: FileEncoder, record_stats: bool) -> Self {
+        Self {
+            encoder,
+            total_edge_count: 0,
+            total_node_count: 0,
+            result: Ok(()),
+            stats: if record_stats { Some(FxHashMap::default()) } else { None },
         }
     }
 
-    if let Some(record_stats) = &record_stats {
-        let mut stats = record_stats.lock();
-        let kind = node.node.kind;
+    #[instrument(skip(self, _record_graph))]
+    fn encode_node(
+        &mut self,
+        node: &NodeInfo<K>,
+        _record_graph: &Option<Lock<DepGraphQuery<K>>>,
+    ) -> DepNodeIndex {
+        let index = DepNodeIndex::new(self.total_node_count);
+        self.total_node_count += 1;
+
         let edge_count = node.edges.len();
+        self.total_edge_count += edge_count;
+
+        #[cfg(debug_assertions)]
+        if let Some(record_graph) = &_record_graph {
+            // Do not ICE when a query is called from within `with_query`.
+            if let Some(record_graph) = &mut record_graph.try_lock() {
+                record_graph.push(index, node.node, &node.edges);
+            }
+        }
+
+        if let Some(stats) = &mut self.stats {
+            let kind = node.node.kind;
 
-        let stat =
-            stats.stats.entry(kind).or_insert(Stat { kind, node_counter: 0, edge_counter: 0 });
-        stat.node_counter += 1;
-        stat.edge_counter += edge_count as u64;
-        stats.total_node_count += 1;
-        stats.total_edge_count += edge_count;
+            let stat = stats.entry(kind).or_insert(Stat { kind, node_counter: 0, edge_counter: 0 });
+            stat.node_counter += 1;
+            stat.edge_counter += edge_count as u64;
+        }
+
+        debug!(?index, ?node);
+        let encoder = &mut self.encoder;
+        self.result =
+            std::mem::replace(&mut self.result, Ok(())).and_then(|()| node.encode(encoder));
+        index
     }
 
-    debug!(?_index, ?node);
-    node.encode(encoder)
-}
+    fn finish(self) -> FileEncodeResult {
+        let Self { mut encoder, total_node_count, total_edge_count, result, stats: _ } = self;
+        let () = result?;
 
-fn encode_counts(
-    mut encoder: FileEncoder,
-    node_count: usize,
-    edge_count: usize,
-) -> FileEncodeResult {
-    let node_count = node_count.try_into().unwrap();
-    let edge_count = edge_count.try_into().unwrap();
-
-    debug!(?node_count, ?edge_count);
-    debug!("position: {:?}", encoder.position());
-    IntEncodedWithFixedSize(node_count).encode(&mut encoder)?;
-    IntEncodedWithFixedSize(edge_count).encode(&mut encoder)?;
-    debug!("position: {:?}", encoder.position());
-    // Drop the encoder so that nothing is written after the counts.
-    encoder.flush()
+        let node_count = total_node_count.try_into().unwrap();
+        let edge_count = total_edge_count.try_into().unwrap();
+
+        debug!(?node_count, ?edge_count);
+        debug!("position: {:?}", encoder.position());
+        IntEncodedWithFixedSize(node_count).encode(&mut encoder)?;
+        IntEncodedWithFixedSize(edge_count).encode(&mut encoder)?;
+        debug!("position: {:?}", encoder.position());
+        // Drop the encoder so that nothing is written after the counts.
+        encoder.flush()
+    }
 }
 
 pub struct GraphEncoder<K: DepKind> {
-    status: Lock<(FileEncoder, DepNodeIndex, usize, FileEncodeResult)>,
-    record_graph: Option<Lrc<Lock<DepGraphQuery<K>>>>,
-    record_stats: Option<Lrc<Lock<Stats<K>>>>,
+    status: Lock<EncodingStatus<K>>,
+    record_graph: Option<Lock<DepGraphQuery<K>>>,
 }
 
 impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
@@ -195,21 +209,12 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
         record_stats: bool,
     ) -> Self {
         let record_graph = if cfg!(debug_assertions) && record_graph {
-            Some(Lrc::new(Lock::new(DepGraphQuery::new(prev_node_count))))
-        } else {
-            None
-        };
-        let record_stats = if record_stats {
-            Some(Lrc::new(Lock::new(Stats {
-                stats: FxHashMap::default(),
-                total_node_count: 0,
-                total_edge_count: 0,
-            })))
+            Some(Lock::new(DepGraphQuery::new(prev_node_count)))
         } else {
             None
         };
-        let status = Lock::new((encoder, DepNodeIndex::new(0), 0, Ok(())));
-        GraphEncoder { status, record_graph, record_stats }
+        let status = Lock::new(EncodingStatus::new(encoder, record_stats));
+        GraphEncoder { status, record_graph }
     }
 
     pub(crate) fn with_query(&self, f: impl Fn(&DepGraphQuery<K>)) {
@@ -223,10 +228,9 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
         total_read_count: u64,
         total_duplicate_read_count: u64,
     ) {
-        if let Some(record_stats) = &self.record_stats {
-            let record_stats = record_stats.lock();
-
-            let mut stats: Vec<_> = record_stats.stats.values().collect();
+        let status = self.status.lock();
+        if let Some(record_stats) = &status.stats {
+            let mut stats: Vec<_> = record_stats.values().collect();
             stats.sort_by_key(|s| -(s.node_counter as i64));
 
             const SEPARATOR: &str = "[incremental] --------------------------------\
@@ -237,8 +241,8 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
             eprintln!("[incremental] DepGraph Statistics");
             eprintln!("{}", SEPARATOR);
             eprintln!("[incremental]");
-            eprintln!("[incremental] Total Node Count: {}", record_stats.total_node_count);
-            eprintln!("[incremental] Total Edge Count: {}", record_stats.total_edge_count);
+            eprintln!("[incremental] Total Node Count: {}", status.total_node_count);
+            eprintln!("[incremental] Total Edge Count: {}", status.total_edge_count);
 
             if cfg!(debug_assertions) {
                 eprintln!("[incremental] Total Edge Reads: {}", total_read_count);
@@ -257,7 +261,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
 
             for stat in stats {
                 let node_kind_ratio =
-                    (100.0 * (stat.node_counter as f64)) / (record_stats.total_node_count as f64);
+                    (100.0 * (stat.node_counter as f64)) / (status.total_node_count as f64);
                 let node_kind_avg_edges = (stat.edge_counter as f64) / (stat.node_counter as f64);
 
                 eprintln!(
@@ -280,23 +284,11 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
         fingerprint: Fingerprint,
         edges: SmallVec<[DepNodeIndex; 8]>,
     ) -> DepNodeIndex {
-        let &mut (ref mut encoder, ref mut next_index, ref mut edge_count, ref mut result) =
-            &mut *self.status.lock();
-        let index = next_index.clone();
-        next_index.increment_by(1);
-        *edge_count += edges.len();
-        *result = std::mem::replace(result, Ok(())).and_then(|()| {
-            let node = NodeInfo { node, fingerprint, edges };
-            encode_node(encoder, index, &node, &self.record_graph, &self.record_stats)
-        });
-        index
+        let node = NodeInfo { node, fingerprint, edges };
+        self.status.lock().encode_node(&node, &self.record_graph)
     }
 
     pub fn finish(self) -> FileEncodeResult {
-        let (encoder, node_count, edge_count, result) = self.status.into_inner();
-        let () = result?;
-        let node_count = node_count.index();
-
-        encode_counts(encoder, node_count, edge_count)
+        self.status.into_inner().finish()
     }
 }

From 65a8681a1701fa01e62a9fc9698e682df465f2ec Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Wed, 17 Mar 2021 19:23:17 +0100
Subject: [PATCH 07/11] Add documentation.

---
 .../rustc_query_system/src/dep_graph/serialized.rs   | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index ab97222e19aba..663113543fcd8 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -1,4 +1,16 @@
 //! The data that we will serialize and deserialize.
+//!
+//! The dep-graph is serialized as a sequence of NodeInfo, with the dependencies
+//! specified inline.  The total number of nodes and edges are stored as the last
+//! 16 bytes of the file, so we can find them easily at decoding time.
+//!
+//! The serialisation is performed on-demand when each node is emitted. Using this
+//! scheme, we do not need to keep the current graph in memory.
+//!
+//! The deserisalisation is performed manually, in order to convert from the stored
+//! sequence of NodeInfos to the different arrays in SerializedDepGraph.  Since the
+//! node and edge count are stored at the end of the file, all the arrays can be
+//! pre-allocated with the right length.
 
 use super::query::DepGraphQuery;
 use super::{DepKind, DepNode, DepNodeIndex};

From fe89f3236c08abd8fd2c81cdd2f41ff2066f13ac Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Thu, 18 Mar 2021 19:26:08 +0100
Subject: [PATCH 08/11] Address review.

---
 .../src/persist/dirty_clean.rs                |  2 -
 .../rustc_incremental/src/persist/save.rs     |  6 +-
 .../rustc_query_system/src/dep_graph/graph.rs | 72 +++++++++----------
 .../rustc_query_system/src/dep_graph/query.rs |  3 +-
 .../src/dep_graph/serialized.rs               | 17 ++---
 5 files changed, 45 insertions(+), 55 deletions(-)

diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs
index 145c168f8c443..e7bd488af8ebf 100644
--- a/compiler/rustc_incremental/src/persist/dirty_clean.rs
+++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs
@@ -391,8 +391,6 @@ impl DirtyCleanVisitor<'tcx> {
     fn assert_clean(&self, item_span: Span, dep_node: DepNode) {
         debug!("assert_clean({:?})", dep_node);
 
-        // if the node wasn't previously evaluated and now is (or vice versa),
-        // then the node isn't actually clean or dirty.
         if self.tcx.dep_graph.is_red(&dep_node) {
             let dep_node_str = self.dep_node_str(&dep_node);
             self.tcx
diff --git a/compiler/rustc_incremental/src/persist/save.rs b/compiler/rustc_incremental/src/persist/save.rs
index d80397970ac6a..23bd63a37d637 100644
--- a/compiler/rustc_incremental/src/persist/save.rs
+++ b/compiler/rustc_incremental/src/persist/save.rs
@@ -34,10 +34,8 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
         let dep_graph_path = dep_graph_path(sess);
         let staging_dep_graph_path = staging_dep_graph_path(sess);
 
-        join(
-            || sess.time("assert_dep_graph", || crate::assert_dep_graph(tcx)),
-            || sess.time("check_dirty_clean", || dirty_clean::check_dirty_clean_annotations(tcx)),
-        );
+        sess.time("assert_dep_graph", || crate::assert_dep_graph(tcx));
+        sess.time("check_dirty_clean", || dirty_clean::check_dirty_clean_annotations(tcx));
 
         if sess.opts.debugging_opts.incremental_info {
             tcx.dep_graph.print_incremental_info()
diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs
index 295b2a97e4cf4..04def90913185 100644
--- a/compiler/rustc_query_system/src/dep_graph/graph.rs
+++ b/compiler/rustc_query_system/src/dep_graph/graph.rs
@@ -626,11 +626,10 @@ impl<K: DepKind> DepGraph<K> {
 
         // There may be multiple threads trying to mark the same dep node green concurrently
 
-        let dep_node_index = {
-            // We allocating an entry for the node in the current dependency graph and
-            // adding all the appropriate edges imported from the previous graph
-            data.current.intern_dark_green_node(&data.previous, prev_dep_node_index)
-        };
+        // We allocating an entry for the node in the current dependency graph and
+        // adding all the appropriate edges imported from the previous graph
+        let dep_node_index =
+            data.current.promote_node_and_deps_to_current(&data.previous, prev_dep_node_index);
 
         // ... emitting any stored diagnostic ...
 
@@ -713,7 +712,7 @@ impl<K: DepKind> DepGraph<K> {
         }
     }
 
-    // Returns true if the given node has been marked as green during the
+    // Returns true if the given node has been marked as red during the
     // current compilation session. Used in various assertions
     pub fn is_red(&self, dep_node: &DepNode<K>) -> bool {
         self.node_color(dep_node) == Some(DepNodeColor::Red)
@@ -833,17 +832,11 @@ rustc_index::newtype_index! {
 /// will be populated as we run queries or tasks. We never remove nodes from the
 /// graph: they are only added.
 ///
-/// The nodes in it are identified by a `DepNodeIndex`. Internally, this maps to
-/// a `HybridIndex`, which identifies which collection in the `data` field
-/// contains a node's data. Which collection is used for a node depends on
-/// whether the node was present in the `PreviousDepGraph`, and if so, the color
-/// of the node. Each type of node can share more or less data with the previous
-/// graph. When possible, we can store just the index of the node in the
-/// previous graph, rather than duplicating its data in our own collections.
-/// This is important, because these graph structures are some of the largest in
-/// the compiler.
+/// The nodes in it are identified by a `DepNodeIndex`. We avoid keeping the nodes
+/// in memory.  This is important, because these graph structures are some of the
+/// largest in the compiler.
 ///
-/// For the same reason, we also avoid storing `DepNode`s more than once as map
+/// For this reason, we avoid storing `DepNode`s more than once as map
 /// keys. The `new_node_to_index` map only contains nodes not in the previous
 /// graph, and we map nodes in the previous graph to indices via a two-step
 /// mapping. `PreviousDepGraph` maps from `DepNode` to `SerializedDepNodeIndex`,
@@ -939,6 +932,15 @@ impl<K: DepKind> CurrentDepGraph<K> {
         }
     }
 
+    #[cfg(debug_assertions)]
+    fn record_edge(&self, dep_node_index: DepNodeIndex, key: DepNode<K>) {
+        if let Some(forbidden_edge) = &self.forbidden_edge {
+            forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
+        }
+    }
+
+    /// Writes the node to the current dep-graph and allocates a `DepNodeIndex` for it.
+    /// Assumes that this is a node that has no equivalent in the previous dep-graph.
     fn intern_new_node(
         &self,
         key: DepNode<K>,
@@ -951,9 +953,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
                 let dep_node_index = self.encoder.borrow().send(key, current_fingerprint, edges);
                 entry.insert(dep_node_index);
                 #[cfg(debug_assertions)]
-                if let Some(forbidden_edge) = &self.forbidden_edge {
-                    forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
-                }
+                self.record_edge(dep_node_index, key);
                 dep_node_index
             }
         }
@@ -964,20 +964,20 @@ impl<K: DepKind> CurrentDepGraph<K> {
         prev_graph: &PreviousDepGraph<K>,
         key: DepNode<K>,
         edges: EdgesVec,
-        current_fingerprint: Option<Fingerprint>,
+        fingerprint: Option<Fingerprint>,
         print_status: bool,
     ) -> (DepNodeIndex, Option<(SerializedDepNodeIndex, DepNodeColor)>) {
         let print_status = cfg!(debug_assertions) && print_status;
 
         if let Some(prev_index) = prev_graph.node_to_index_opt(&key) {
             // Determine the color and index of the new `DepNode`.
-            if let Some(current_fingerprint) = current_fingerprint {
-                if current_fingerprint == prev_graph.fingerprint_by_index(prev_index) {
+            if let Some(fingerprint) = fingerprint {
+                if fingerprint == prev_graph.fingerprint_by_index(prev_index) {
                     if print_status {
                         eprintln!("[task::green] {:?}", key);
                     }
 
-                    // This is a light green node: it existed in the previous compilation,
+                    // This is a green node: it existed in the previous compilation,
                     // its query was re-executed, and it has the same result as before.
                     let mut prev_index_to_index = self.prev_index_to_index.lock();
 
@@ -985,16 +985,14 @@ impl<K: DepKind> CurrentDepGraph<K> {
                         Some(dep_node_index) => dep_node_index,
                         None => {
                             let dep_node_index =
-                                self.encoder.borrow().send(key, current_fingerprint, edges);
+                                self.encoder.borrow().send(key, fingerprint, edges);
                             prev_index_to_index[prev_index] = Some(dep_node_index);
                             dep_node_index
                         }
                     };
 
                     #[cfg(debug_assertions)]
-                    if let Some(forbidden_edge) = &self.forbidden_edge {
-                        forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
-                    }
+                    self.record_edge(dep_node_index, key);
                     (dep_node_index, Some((prev_index, DepNodeColor::Green(dep_node_index))))
                 } else {
                     if print_status {
@@ -1009,16 +1007,14 @@ impl<K: DepKind> CurrentDepGraph<K> {
                         Some(dep_node_index) => dep_node_index,
                         None => {
                             let dep_node_index =
-                                self.encoder.borrow().send(key, current_fingerprint, edges);
+                                self.encoder.borrow().send(key, fingerprint, edges);
                             prev_index_to_index[prev_index] = Some(dep_node_index);
                             dep_node_index
                         }
                     };
 
                     #[cfg(debug_assertions)]
-                    if let Some(forbidden_edge) = &self.forbidden_edge {
-                        forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
-                    }
+                    self.record_edge(dep_node_index, key);
                     (dep_node_index, Some((prev_index, DepNodeColor::Red)))
                 }
             } else {
@@ -1043,9 +1039,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
                 };
 
                 #[cfg(debug_assertions)]
-                if let Some(forbidden_edge) = &self.forbidden_edge {
-                    forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
-                }
+                self.record_edge(dep_node_index, key);
                 (dep_node_index, Some((prev_index, DepNodeColor::Red)))
             }
         } else {
@@ -1053,16 +1047,16 @@ impl<K: DepKind> CurrentDepGraph<K> {
                 eprintln!("[task::new] {:?}", key);
             }
 
-            let current_fingerprint = current_fingerprint.unwrap_or(Fingerprint::ZERO);
+            let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO);
 
             // This is a new node: it didn't exist in the previous compilation session.
-            let dep_node_index = self.intern_new_node(key, edges, current_fingerprint);
+            let dep_node_index = self.intern_new_node(key, edges, fingerprint);
 
             (dep_node_index, None)
         }
     }
 
-    fn intern_dark_green_node(
+    fn promote_node_and_deps_to_current(
         &self,
         prev_graph: &PreviousDepGraph<K>,
         prev_index: SerializedDepNodeIndex,
@@ -1086,9 +1080,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
                 );
                 prev_index_to_index[prev_index] = Some(dep_node_index);
                 #[cfg(debug_assertions)]
-                if let Some(forbidden_edge) = &self.forbidden_edge {
-                    forbidden_edge.index_to_node.lock().insert(dep_node_index, key);
-                }
+                self.record_edge(dep_node_index, key);
                 dep_node_index
             }
         }
diff --git a/compiler/rustc_query_system/src/dep_graph/query.rs b/compiler/rustc_query_system/src/dep_graph/query.rs
index 0fe3748e38630..27b3b5e13667e 100644
--- a/compiler/rustc_query_system/src/dep_graph/query.rs
+++ b/compiler/rustc_query_system/src/dep_graph/query.rs
@@ -32,7 +32,8 @@ impl<K: DepKind> DepGraphQuery<K> {
 
         for &target in edges.iter() {
             let target = self.dep_index_to_index[target];
-            // Skip missing edges.
+            // We may miss the edges that are pushed while the `DepGraphQuery` is being accessed.
+            // Skip them to issues.
             if let Some(target) = target {
                 self.graph.add_edge(source, target, ());
             }
diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 663113543fcd8..aeb0e2b0da1a6 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -73,7 +73,7 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
 {
     #[instrument(skip(d))]
     fn decode(d: &mut opaque::Decoder<'a>) -> Result<SerializedDepGraph<K>, String> {
-        let position = d.position();
+        let start_position = d.position();
 
         // The last 16 bytes are the node count and edge count.
         debug!("position: {:?}", d.position());
@@ -85,7 +85,7 @@ impl<'a, K: DepKind + Decodable<opaque::Decoder<'a>>> Decodable<opaque::Decoder<
         debug!(?node_count, ?edge_count);
 
         debug!("position: {:?}", d.position());
-        d.set_position(position);
+        d.set_position(start_position);
         debug!("position: {:?}", d.position());
 
         let mut nodes = IndexVec::with_capacity(node_count);
@@ -137,7 +137,7 @@ struct Stat<K: DepKind> {
     edge_counter: u64,
 }
 
-struct EncodingStatus<K: DepKind> {
+struct EncoderState<K: DepKind> {
     encoder: FileEncoder,
     total_node_count: usize,
     total_edge_count: usize,
@@ -145,7 +145,7 @@ struct EncodingStatus<K: DepKind> {
     stats: Option<FxHashMap<K, Stat<K>>>,
 }
 
-impl<K: DepKind> EncodingStatus<K> {
+impl<K: DepKind> EncoderState<K> {
     fn new(encoder: FileEncoder, record_stats: bool) -> Self {
         Self {
             encoder,
@@ -186,8 +186,9 @@ impl<K: DepKind> EncodingStatus<K> {
 
         debug!(?index, ?node);
         let encoder = &mut self.encoder;
-        self.result =
-            std::mem::replace(&mut self.result, Ok(())).and_then(|()| node.encode(encoder));
+        if self.result.is_ok() {
+            self.result = node.encode(encoder);
+        }
         index
     }
 
@@ -209,7 +210,7 @@ impl<K: DepKind> EncodingStatus<K> {
 }
 
 pub struct GraphEncoder<K: DepKind> {
-    status: Lock<EncodingStatus<K>>,
+    status: Lock<EncoderState<K>>,
     record_graph: Option<Lock<DepGraphQuery<K>>>,
 }
 
@@ -225,7 +226,7 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
         } else {
             None
         };
-        let status = Lock::new(EncodingStatus::new(encoder, record_stats));
+        let status = Lock::new(EncoderState::new(encoder, record_stats));
         GraphEncoder { status, record_graph }
     }
 

From df24315ddf0103a5f9ecd8d3cd15e069e3571a53 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Thu, 18 Mar 2021 19:38:50 +0100
Subject: [PATCH 09/11] Adjust profiling.

---
 .../rustc_query_system/src/dep_graph/graph.rs | 39 ++++++++++++++-----
 .../src/dep_graph/serialized.rs               |  3 ++
 .../rustc_query_system/src/query/plumbing.rs  |  8 ++--
 .../src/traits/select/mod.rs                  |  2 +-
 4 files changed, 38 insertions(+), 14 deletions(-)

diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs
index 04def90913185..f92ee85f62e69 100644
--- a/compiler/rustc_query_system/src/dep_graph/graph.rs
+++ b/compiler/rustc_query_system/src/dep_graph/graph.rs
@@ -1,6 +1,7 @@
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
 use rustc_data_structures::profiling::QueryInvocationId;
+use rustc_data_structures::profiling::SelfProfilerRef;
 use rustc_data_structures::sharded::{self, Sharded};
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
 use rustc_data_structures::steal::Steal;
@@ -241,6 +242,7 @@ impl<K: DepKind> DepGraph<K> {
 
             // Intern the new `DepNode`.
             let (dep_node_index, prev_and_color) = data.current.intern_node(
+                dcx.profiler(),
                 &data.previous,
                 key,
                 edges,
@@ -271,7 +273,12 @@ impl<K: DepKind> DepGraph<K> {
 
     /// Executes something within an "anonymous" task, that is, a task the
     /// `DepNode` of which is determined by the list of inputs it read from.
-    pub fn with_anon_task<OP, R>(&self, dep_kind: K, op: OP) -> (R, DepNodeIndex)
+    pub fn with_anon_task<Ctxt: DepContext<DepKind = K>, OP, R>(
+        &self,
+        cx: Ctxt,
+        dep_kind: K,
+        op: OP,
+    ) -> (R, DepNodeIndex)
     where
         OP: FnOnce() -> R,
     {
@@ -298,8 +305,12 @@ impl<K: DepKind> DepGraph<K> {
                 hash: data.current.anon_id_seed.combine(hasher.finish()).into(),
             };
 
-            let dep_node_index =
-                data.current.intern_new_node(target_dep_node, task_deps.reads, Fingerprint::ZERO);
+            let dep_node_index = data.current.intern_new_node(
+                cx.profiler(),
+                target_dep_node,
+                task_deps.reads,
+                Fingerprint::ZERO,
+            );
 
             (result, dep_node_index)
         } else {
@@ -628,8 +639,11 @@ impl<K: DepKind> DepGraph<K> {
 
         // We allocating an entry for the node in the current dependency graph and
         // adding all the appropriate edges imported from the previous graph
-        let dep_node_index =
-            data.current.promote_node_and_deps_to_current(&data.previous, prev_dep_node_index);
+        let dep_node_index = data.current.promote_node_and_deps_to_current(
+            tcx.dep_context().profiler(),
+            &data.previous,
+            prev_dep_node_index,
+        );
 
         // ... emitting any stored diagnostic ...
 
@@ -943,6 +957,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
     /// Assumes that this is a node that has no equivalent in the previous dep-graph.
     fn intern_new_node(
         &self,
+        profiler: &SelfProfilerRef,
         key: DepNode<K>,
         edges: EdgesVec,
         current_fingerprint: Fingerprint,
@@ -950,7 +965,8 @@ impl<K: DepKind> CurrentDepGraph<K> {
         match self.new_node_to_index.get_shard_by_value(&key).lock().entry(key) {
             Entry::Occupied(entry) => *entry.get(),
             Entry::Vacant(entry) => {
-                let dep_node_index = self.encoder.borrow().send(key, current_fingerprint, edges);
+                let dep_node_index =
+                    self.encoder.borrow().send(profiler, key, current_fingerprint, edges);
                 entry.insert(dep_node_index);
                 #[cfg(debug_assertions)]
                 self.record_edge(dep_node_index, key);
@@ -961,6 +977,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
 
     fn intern_node(
         &self,
+        profiler: &SelfProfilerRef,
         prev_graph: &PreviousDepGraph<K>,
         key: DepNode<K>,
         edges: EdgesVec,
@@ -985,7 +1002,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
                         Some(dep_node_index) => dep_node_index,
                         None => {
                             let dep_node_index =
-                                self.encoder.borrow().send(key, fingerprint, edges);
+                                self.encoder.borrow().send(profiler, key, fingerprint, edges);
                             prev_index_to_index[prev_index] = Some(dep_node_index);
                             dep_node_index
                         }
@@ -1007,7 +1024,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
                         Some(dep_node_index) => dep_node_index,
                         None => {
                             let dep_node_index =
-                                self.encoder.borrow().send(key, fingerprint, edges);
+                                self.encoder.borrow().send(profiler, key, fingerprint, edges);
                             prev_index_to_index[prev_index] = Some(dep_node_index);
                             dep_node_index
                         }
@@ -1032,7 +1049,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
                     Some(dep_node_index) => dep_node_index,
                     None => {
                         let dep_node_index =
-                            self.encoder.borrow().send(key, Fingerprint::ZERO, edges);
+                            self.encoder.borrow().send(profiler, key, Fingerprint::ZERO, edges);
                         prev_index_to_index[prev_index] = Some(dep_node_index);
                         dep_node_index
                     }
@@ -1050,7 +1067,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
             let fingerprint = fingerprint.unwrap_or(Fingerprint::ZERO);
 
             // This is a new node: it didn't exist in the previous compilation session.
-            let dep_node_index = self.intern_new_node(key, edges, fingerprint);
+            let dep_node_index = self.intern_new_node(profiler, key, edges, fingerprint);
 
             (dep_node_index, None)
         }
@@ -1058,6 +1075,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
 
     fn promote_node_and_deps_to_current(
         &self,
+        profiler: &SelfProfilerRef,
         prev_graph: &PreviousDepGraph<K>,
         prev_index: SerializedDepNodeIndex,
     ) -> DepNodeIndex {
@@ -1070,6 +1088,7 @@ impl<K: DepKind> CurrentDepGraph<K> {
             None => {
                 let key = prev_graph.index_to_node(prev_index);
                 let dep_node_index = self.encoder.borrow().send(
+                    profiler,
                     key,
                     prev_graph.fingerprint_by_index(prev_index),
                     prev_graph
diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index aeb0e2b0da1a6..27f7e5730a7a2 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -16,6 +16,7 @@ use super::query::DepGraphQuery;
 use super::{DepKind, DepNode, DepNodeIndex};
 use rustc_data_structures::fingerprint::Fingerprint;
 use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::profiling::SelfProfilerRef;
 use rustc_data_structures::sync::Lock;
 use rustc_index::vec::{Idx, IndexVec};
 use rustc_serialize::opaque::{self, FileEncodeResult, FileEncoder, IntEncodedWithFixedSize};
@@ -293,10 +294,12 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
 
     pub(crate) fn send(
         &self,
+        profiler: &SelfProfilerRef,
         node: DepNode<K>,
         fingerprint: Fingerprint,
         edges: SmallVec<[DepNodeIndex; 8]>,
     ) -> DepNodeIndex {
+        let _prof_timer = profiler.generic_activity("incr_comp_encode_dep_graph");
         let node = NodeInfo { node, fingerprint, edges };
         self.status.lock().encode_node(&node, &self.record_graph)
     }
diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs
index b58802474757e..fb8a53048faba 100644
--- a/compiler/rustc_query_system/src/query/plumbing.rs
+++ b/compiler/rustc_query_system/src/query/plumbing.rs
@@ -449,9 +449,11 @@ where
 
         let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
             tcx.start_query(job.id, diagnostics, || {
-                tcx.dep_context()
-                    .dep_graph()
-                    .with_anon_task(query.dep_kind, || query.compute(tcx, key))
+                tcx.dep_context().dep_graph().with_anon_task(
+                    *tcx.dep_context(),
+                    query.dep_kind,
+                    || query.compute(tcx, key),
+                )
             })
         });
 
diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs
index 45680c90cdc17..0a15ca87d16e0 100644
--- a/compiler/rustc_trait_selection/src/traits/select/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs
@@ -981,7 +981,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
         OP: FnOnce(&mut Self) -> R,
     {
         let (result, dep_node) =
-            self.tcx().dep_graph.with_anon_task(DepKind::TraitSelect, || op(self));
+            self.tcx().dep_graph.with_anon_task(self.tcx(), DepKind::TraitSelect, || op(self));
         self.tcx().dep_graph.read_index(dep_node);
         (result, dep_node)
     }

From 8ee9322c1041bcbaee408961727c4418bd792979 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Tue, 23 Mar 2021 13:19:42 +0100
Subject: [PATCH 10/11] Also profile finishing the encoding.

---
 compiler/rustc_incremental/src/persist/save.rs          | 2 +-
 compiler/rustc_query_system/src/dep_graph/graph.rs      | 8 ++++++--
 compiler/rustc_query_system/src/dep_graph/serialized.rs | 3 ++-
 3 files changed, 9 insertions(+), 4 deletions(-)

diff --git a/compiler/rustc_incremental/src/persist/save.rs b/compiler/rustc_incremental/src/persist/save.rs
index 23bd63a37d637..d558af3c1d558 100644
--- a/compiler/rustc_incremental/src/persist/save.rs
+++ b/compiler/rustc_incremental/src/persist/save.rs
@@ -49,7 +49,7 @@ pub fn save_dep_graph(tcx: TyCtxt<'_>) {
             },
             move || {
                 sess.time("incr_comp_persist_dep_graph", || {
-                    if let Err(err) = tcx.dep_graph.encode() {
+                    if let Err(err) = tcx.dep_graph.encode(&tcx.sess.prof) {
                         sess.err(&format!(
                             "failed to write dependency graph to `{}`: {}",
                             staging_dep_graph_path.display(),
diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs
index f92ee85f62e69..7a0fc320663f7 100644
--- a/compiler/rustc_query_system/src/dep_graph/graph.rs
+++ b/compiler/rustc_query_system/src/dep_graph/graph.rs
@@ -789,8 +789,12 @@ impl<K: DepKind> DepGraph<K> {
         }
     }
 
-    pub fn encode(&self) -> FileEncodeResult {
-        if let Some(data) = &self.data { data.current.encoder.steal().finish() } else { Ok(()) }
+    pub fn encode(&self, profiler: &SelfProfilerRef) -> FileEncodeResult {
+        if let Some(data) = &self.data {
+            data.current.encoder.steal().finish(profiler)
+        } else {
+            Ok(())
+        }
     }
 
     fn next_virtual_depnode_index(&self) -> DepNodeIndex {
diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 27f7e5730a7a2..1e34b14d9060f 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -304,7 +304,8 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
         self.status.lock().encode_node(&node, &self.record_graph)
     }
 
-    pub fn finish(self) -> FileEncodeResult {
+    pub fn finish(self, profiler: &SelfProfilerRef) -> FileEncodeResult {
+        let _prof_timer = profiler.generic_activity("incr_comp_encode_dep_graph");
         self.status.into_inner().finish()
     }
 }

From f3dde45d2a963c32994a78f3ea0119a2da973c14 Mon Sep 17 00:00:00 2001
From: Camille GILLOT <gillot.camille@gmail.com>
Date: Wed, 31 Mar 2021 17:12:03 +0200
Subject: [PATCH 11/11] Enable debugging the dep-graph without
 debug-assertions.

It may also be useful in these cases,
and some CI configurations test without debug assertions.
---
 .../rustc_query_system/src/dep_graph/serialized.rs | 14 +++++---------
 1 file changed, 5 insertions(+), 9 deletions(-)

diff --git a/compiler/rustc_query_system/src/dep_graph/serialized.rs b/compiler/rustc_query_system/src/dep_graph/serialized.rs
index 1e34b14d9060f..6f3d1fb71994e 100644
--- a/compiler/rustc_query_system/src/dep_graph/serialized.rs
+++ b/compiler/rustc_query_system/src/dep_graph/serialized.rs
@@ -157,11 +157,11 @@ impl<K: DepKind> EncoderState<K> {
         }
     }
 
-    #[instrument(skip(self, _record_graph))]
+    #[instrument(skip(self, record_graph))]
     fn encode_node(
         &mut self,
         node: &NodeInfo<K>,
-        _record_graph: &Option<Lock<DepGraphQuery<K>>>,
+        record_graph: &Option<Lock<DepGraphQuery<K>>>,
     ) -> DepNodeIndex {
         let index = DepNodeIndex::new(self.total_node_count);
         self.total_node_count += 1;
@@ -169,8 +169,7 @@ impl<K: DepKind> EncoderState<K> {
         let edge_count = node.edges.len();
         self.total_edge_count += edge_count;
 
-        #[cfg(debug_assertions)]
-        if let Some(record_graph) = &_record_graph {
+        if let Some(record_graph) = &record_graph {
             // Do not ICE when a query is called from within `with_query`.
             if let Some(record_graph) = &mut record_graph.try_lock() {
                 record_graph.push(index, node.node, &node.edges);
@@ -222,11 +221,8 @@ impl<K: DepKind + Encodable<FileEncoder>> GraphEncoder<K> {
         record_graph: bool,
         record_stats: bool,
     ) -> Self {
-        let record_graph = if cfg!(debug_assertions) && record_graph {
-            Some(Lock::new(DepGraphQuery::new(prev_node_count)))
-        } else {
-            None
-        };
+        let record_graph =
+            if record_graph { Some(Lock::new(DepGraphQuery::new(prev_node_count))) } else { None };
         let status = Lock::new(EncoderState::new(encoder, record_stats));
         GraphEncoder { status, record_graph }
     }