From 141ef0fc4f1da76d925ebd5d0e9d775ac4e46f6b Mon Sep 17 00:00:00 2001 From: Jordan Eldredge Date: Wed, 25 May 2022 09:51:32 -0700 Subject: [PATCH] Avoid bug where watch mode can get stuck in invalid state (#3922) Summary: 1. Start the Relay compiler in watch mode 2. Find a file with an existing fragment and duplicate is (cmd+c, cmd+v in VSCode file tree) 3. Observe a compiler error in your terminal due to duplicate fragment names 4. Delete the `... copy.js` file Compiler compiles with no errors Compiler still complains about duplicate fragments despite the `... copy.js` file not existing any more When merging new sets of pending files, we ignore empty (deleted in this case) files that are missing from the processed set of files. But we fail to consider the fact that the file might already exist in the pending set. If a file is already in the pending set, always update it. Otherwise we can end up with state data in the compiler. Pull Request resolved: https://github.com/facebook/relay/pull/3922 Reviewed By: voideanvalue Differential Revision: D36629649 Pulled By: captbaritone fbshipit-source-id: a92eb6cc92696a5af782950beed2dbbe340cda13 --- .../relay-compiler/src/compiler_state.rs | 106 ++++++++++++++---- 1 file changed, 83 insertions(+), 23 deletions(-) diff --git a/compiler/crates/relay-compiler/src/compiler_state.rs b/compiler/crates/relay-compiler/src/compiler_state.rs index 16aa681e51aa9..f857a6309c2e2 100644 --- a/compiler/crates/relay-compiler/src/compiler_state.rs +++ b/compiler/crates/relay-compiler/src/compiler_state.rs @@ -24,6 +24,7 @@ use relay_transforms::DependencyMap; use schema::SDLSchema; use schema_diff::{definitions::SchemaChange, detect_changes}; use serde::{Deserialize, Serialize}; +use std::collections::hash_map::Entry; use std::{ env, fmt, fs::File as FsFile, @@ -148,29 +149,18 @@ impl IncrementalSources { /// Docblock, .etc). We need to keep the empty source only when there is a /// corresponding source in `processed`, and compiler needs to do the work to remove it. fn merge_pending_sources(&mut self, additional_pending_sources: &IncrementalSourceSet) { - if self.processed.is_empty() { - self.pending.extend( - additional_pending_sources - .iter() - .filter(|&(_, value)| !value.is_empty()) - .map(|(k, v)| (k.clone(), v.clone())), - ); - } else { - self.pending.extend( - additional_pending_sources - .iter() - .filter(|&(key, value)| { - if value.is_empty() { - match self.processed.get(key) { - Some(v) => !v.is_empty(), - None => false, - } - } else { - true - } - }) - .map(|(k, v)| (k.clone(), v.clone())), - ); + for (key, value) in additional_pending_sources.iter() { + match self.pending.entry(key.to_path_buf()) { + Entry::Occupied(mut entry) => { + entry.insert(value.clone()); + } + Entry::Vacant(vacant) => { + if !value.is_empty() || self.processed.get(key).map_or(false, |v| !v.is_empty()) + { + vacant.insert(value.clone()); + } + } + } } } @@ -796,3 +786,73 @@ mod clock_json_string { } } } + +#[cfg(test)] +mod tests { + // Note this useful idiom: importing names from outer (for mod tests) scope. + use super::*; + + impl Source for Vec { + fn is_empty(&self) -> bool { + self.is_empty() + } + } + + #[test] + fn empty_pending_incremental_source_overwrites_existing_pending_source() { + let mut incremental_source: IncrementalSources> = IncrementalSources::default(); + + let a = PathBuf::new(); + + let mut initial = FnvHashMap::default(); + initial.insert(a.clone(), vec![1, 2, 3]); + + // Starting with a pending source of a => [1, 2, 3] + incremental_source.merge_pending_sources(&initial); + + assert_eq!(incremental_source.pending.get(&a), Some(&vec![1, 2, 3])); + + let mut update: FnvHashMap> = FnvHashMap::default(); + update.insert(a.clone(), Vec::new()); + + // Merge in a pending source of a => [] + incremental_source.merge_pending_sources(&update); + + // Pending for a should now be empty + assert_eq!(incremental_source.pending.get(&a), Some(&vec![])); + } + + #[test] + fn empty_pending_incremental_is_ignored_if_no_processed_source_exists() { + let mut incremental_source: IncrementalSources> = IncrementalSources::default(); + + let a = PathBuf::new(); + + let mut update: FnvHashMap> = FnvHashMap::default(); + update.insert(a.clone(), Vec::new()); + + // Merge in a pending source of a => [] + incremental_source.merge_pending_sources(&update); + + // Pending for a should not be populated + assert_eq!(incremental_source.pending.get(&a), None); + } + + #[test] + fn empty_pending_incremental_is_ignored_if_no_processed_source_is_empty() { + let mut incremental_source: IncrementalSources> = IncrementalSources::default(); + + let a = PathBuf::new(); + + incremental_source.processed.insert(a.clone(), Vec::new()); + + let mut update: FnvHashMap> = FnvHashMap::default(); + update.insert(a.clone(), Vec::new()); + + // Merge in a pending source of a => [] + incremental_source.merge_pending_sources(&update); + + // Pending for a should not be populated + assert_eq!(incremental_source.pending.get(&a), None); + } +}