diff --git a/Cargo.lock b/Cargo.lock index f893ff620f029..d46aa5e99c676 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1131,6 +1131,7 @@ dependencies = [ "ropey", "serde", "serde_json", + "sha1_smol", "slotmap", "smallvec", "smartstring", @@ -1267,7 +1268,6 @@ dependencies = [ "chardetng", "clipboard-win", "crossterm", - "either", "futures-util", "helix-core", "helix-dap", @@ -1286,6 +1286,7 @@ dependencies = [ "tokio-stream", "toml", "url", + "walkdir", "which", "winapi", ] diff --git a/helix-core/Cargo.toml b/helix-core/Cargo.toml index 6605f7cedbe35..024275561c45c 100644 --- a/helix-core/Cargo.toml +++ b/helix-core/Cargo.toml @@ -32,6 +32,7 @@ regex = "1" bitflags = "1.3" ahash = "0.8.2" hashbrown = { version = "0.13.2", features = ["raw"] } +sha1_smol = "1.0" log = "0.4" serde = { version = "1.0", features = ["derive"] } diff --git a/helix-core/src/history.rs b/helix-core/src/history.rs index 633ff3c0e736b..6003a13b4475e 100644 --- a/helix-core/src/history.rs +++ b/helix-core/src/history.rs @@ -5,7 +5,6 @@ use regex::Regex; use std::io::{Read, Write}; use std::num::NonZeroUsize; use std::time::{Duration, Instant}; - #[derive(Debug, Clone)] pub struct State { pub doc: Rope, @@ -69,62 +68,20 @@ struct Revision { const HEADER_TAG: &str = "Helix Undofile 1\n"; -pub fn serialize_history( - writer: &mut W, - history: &History, - mtime: u64, - hash: [u8; 20], -) -> std::io::Result<()> { - write_string(writer, HEADER_TAG)?; - write_usize(writer, history.current)?; - write_u64(writer, mtime)?; - writer.write_all(&hash)?; - write_vec(writer, &history.revisions, serialize_revision)?; - Ok(()) -} +fn get_hash(reader: &mut R) -> std::io::Result<[u8; 20]> { + const BUF_SIZE: usize = 8192; -pub fn deserialize_history(reader: &mut R) -> std::io::Result { - let header = read_string(reader)?; - if HEADER_TAG != header { - Err(std::io::Error::new( - std::io::ErrorKind::Other, - format!("missing undofile header"), - )) - } else { - let timestamp = Instant::now(); - let current = read_usize(reader)?; - let mtime = read_u64(reader)?; - let mut hash = [0u8; 20]; - reader.read_exact(&mut hash)?; - let revisions = read_vec(reader, |reader| deserialize_revision(reader, timestamp))?; - Ok(History { current, revisions }) - } -} - -fn serialize_revision(writer: &mut W, revision: &Revision) -> std::io::Result<()> { - write_usize(writer, revision.parent)?; - write_usize(writer, revision.last_child.map(|n| n.get()).unwrap_or(0))?; - crate::transaction::serialize_transaction(writer, &revision.transaction)?; - crate::transaction::serialize_transaction(writer, &revision.inversion)?; - - Ok(()) -} + let mut buf = [0u8; BUF_SIZE]; + let mut hash = sha1_smol::Sha1::new(); + loop { + let total_read = reader.read(&mut buf)?; + if total_read == 0 { + break; + } -fn deserialize_revision(reader: &mut R, timestamp: Instant) -> std::io::Result { - let parent = read_usize(reader)?; - let last_child = match read_usize(reader)? { - 0 => None, - n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), - }; - let transaction = crate::transaction::deserialize_transaction(reader)?; - let inversion = crate::transaction::deserialize_transaction(reader)?; - Ok(Revision { - parent, - last_child, - transaction, - inversion, - timestamp, - }) + hash.update(&buf[0..total_read]); + } + Ok(hash.digest().bytes()) } impl Default for History { @@ -143,7 +100,82 @@ impl Default for History { } } +impl Revision { + fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + write_usize(writer, self.parent)?; + write_usize(writer, self.last_child.map(|n| n.get()).unwrap_or(0))?; + crate::transaction::serialize_transaction(writer, &self.transaction)?; + crate::transaction::serialize_transaction(writer, &self.inversion)?; + + Ok(()) + } + + fn deserialize(reader: &mut R, timestamp: Instant) -> std::io::Result { + let parent = read_usize(reader)?; + let last_child = match read_usize(reader)? { + 0 => None, + n => Some(unsafe { NonZeroUsize::new_unchecked(n) }), + }; + let transaction = crate::transaction::deserialize_transaction(reader)?; + let inversion = crate::transaction::deserialize_transaction(reader)?; + Ok(Revision { + parent, + last_child, + transaction, + inversion, + timestamp, + }) + } +} + impl History { + pub fn serialize( + &self, + writer: &mut W, + text: &mut R, + last_saved_revision: usize, + last_mtime: u64, + ) -> std::io::Result<()> { + write_string(writer, HEADER_TAG)?; + write_usize(writer, self.current)?; + write_usize(writer, last_saved_revision)?; + write_u64(writer, last_mtime)?; + writer.write_all(&get_hash(text)?)?; + write_vec(writer, &self.revisions, |writer, rev| rev.serialize(writer))?; + Ok(()) + } + + pub fn deserialize( + reader: &mut R, + text: &mut R, + last_mtime: u64, + ) -> std::io::Result<(usize, Self)> { + let header = read_string(reader)?; + if HEADER_TAG != header { + Err(std::io::Error::new( + std::io::ErrorKind::Other, + "missing undofile header", + )) + } else { + let timestamp = Instant::now(); + let current = read_usize(reader)?; + let last_saved_revision = read_usize(reader)?; + let mtime = read_u64(reader)?; + let mut hash = [0u8; 20]; + reader.read_exact(&mut hash)?; + + if mtime != last_mtime && hash != get_hash(text)? { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "outdated undo file", + )); + } + + let revisions = read_vec(reader, |reader| Revision::deserialize(reader, timestamp))?; + Ok((last_saved_revision, History { current, revisions })) + } + } + pub fn commit_revision(&mut self, transaction: &Transaction, original: &State) { self.commit_revision_at_timestamp(transaction, original, Instant::now()); } @@ -708,8 +740,12 @@ mod test { selection: Selection::point(0), }; history.commit_revision(&transaction, &state); - serialize_history(&mut buf, &history).unwrap(); - deserialize_history(&mut buf.as_slice()).unwrap(); + + let text = Vec::new(); + history + .serialize(&mut buf, &mut text.as_slice(), 0, 0) + .unwrap(); + History::deserialize(&mut buf.as_slice(), &mut text.as_slice(), 0).unwrap(); true } ); diff --git a/helix-core/src/path.rs b/helix-core/src/path.rs index d59a6baad604d..c4d86ed821d19 100644 --- a/helix-core/src/path.rs +++ b/helix-core/src/path.rs @@ -1,5 +1,9 @@ use etcetera::home_dir; -use std::path::{Component, Path, PathBuf}; +use std::{ + ffi::OsString, + path::{Component, Path, PathBuf}, + str::Utf8Error, +}; /// Replaces users home directory from `path` with tilde `~` if the directory /// is available, otherwise returns the path unchanged. @@ -141,3 +145,23 @@ pub fn get_truncated_path>(path: P) -> PathBuf { ret.push(file); ret } + +pub fn path_as_bytes>(path: P) -> Vec { + let path = path.as_ref(); + + #[cfg(windows)] + return path.to_str().unwrap().into(); + + #[cfg(unix)] + return std::os::unix::ffi::OsStrExt::as_bytes(path.as_os_str()).into(); +} + +pub fn path_from_bytes(slice: &[u8]) -> Result { + #[cfg(windows)] + return Ok(PathBuf::from(std::str::from_utf8(slice))); + + #[cfg(unix)] + return Ok(PathBuf::from( + ::from_bytes(slice), + )); +} diff --git a/helix-term/src/commands/typed.rs b/helix-term/src/commands/typed.rs index ec7100a637405..befb816984043 100644 --- a/helix-term/src/commands/typed.rs +++ b/helix-term/src/commands/typed.rs @@ -1822,6 +1822,30 @@ fn run_shell_command( Ok(()) } +fn save_workspace( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + cx.editor.save_workspace() +} + +fn open_workspace( + cx: &mut compositor::Context, + _args: &[Cow], + event: PromptEvent, +) -> anyhow::Result<()> { + if event != PromptEvent::Validate { + return Ok(()); + } + + cx.editor.open_workspace() +} + pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ TypableCommand { name: "quit", @@ -2337,6 +2361,20 @@ pub const TYPABLE_COMMAND_LIST: &[TypableCommand] = &[ fun: run_shell_command, completer: Some(completers::filename), }, + TypableCommand { + name: "save-workspace", + aliases: &["sw"], + doc: "Save open document undo history", + fun: save_workspace, + completer: None, + }, + TypableCommand { + name: "open-workspace", + aliases: &["ow"], + doc: "Open document undo history", + fun: open_workspace, + completer: None, + }, ]; pub static TYPABLE_COMMAND_MAP: Lazy> = diff --git a/helix-view/Cargo.toml b/helix-view/Cargo.toml index 775ccc93bd9a0..23195dc8faf15 100644 --- a/helix-view/Cargo.toml +++ b/helix-view/Cargo.toml @@ -25,7 +25,7 @@ helix-vcs = { version = "0.6", path = "../helix-vcs" } winapi = "0.3" sha1_smol = "1.0" -either = "1.8" +walkdir = "2.3" # Conversion traits once_cell = "1.17" diff --git a/helix-view/src/editor.rs b/helix-view/src/editor.rs index b8b7cea66ef0a..4383fd3e1d6a7 100644 --- a/helix-view/src/editor.rs +++ b/helix-view/src/editor.rs @@ -1,5 +1,5 @@ use crate::{ - align_view, + align_view, apply_transaction, clipboard::{get_clipboard_provider, ClipboardProvider}, document::{DocumentSavedEventFuture, DocumentSavedEventResult, Mode}, graphics::{CursorKind, Rect}, @@ -7,7 +7,7 @@ use crate::{ input::KeyEvent, theme::{self, Theme}, tree::{self, Tree}, - workspace::Workspace, + workspace::{undo::UndoIndex, Workspace}, Align, Document, DocumentId, View, ViewId, }; use helix_vcs::DiffProviderRegistry; @@ -20,6 +20,7 @@ use tokio_stream::wrappers::UnboundedReceiverStream; use std::{ borrow::Cow, collections::{BTreeMap, HashMap}, + fs::File, io::stdin, num::NonZeroUsize, path::{Path, PathBuf}, @@ -35,7 +36,7 @@ use tokio::{ time::{sleep, Duration, Instant, Sleep}, }; -use anyhow::{anyhow, bail, Error}; +use anyhow::{anyhow, bail, Context, Error}; pub use helix_core::diagnostic::Severity; pub use helix_core::register::Registers; @@ -914,13 +915,117 @@ impl Editor { } } - pub fn save_workspace(&self) -> anyhow::Result<()> { - let mut workspace = Workspace::new(std::env::current_dir()?)?; + // TODO: Async? + pub fn save_workspace(&mut self) -> anyhow::Result<()> { + let mut workspace = Workspace::new()?; + let mut index_file = workspace.get_mut("index.undo")?; + let index = { + let mut current_index = + UndoIndex::deserialize(&mut index_file).unwrap_or(UndoIndex::default()); + let new_files = self.documents().filter_map(|doc| { + doc.path().filter(|path| { + !current_index + .0 + .iter() + .any(|(_, indexed_path)| indexed_path == *path) + }) + }); + let mut last_id = current_index.0.last().map(|(id, _)| *id).unwrap_or(0); + current_index.0.append( + &mut new_files + .map(|path| { + let current_id = last_id; + last_id += 1; + (current_id, path.clone()) + }) + .collect(), + ); + current_index + }; + log::debug!("Saving undo index: {:?}", index); + + index + .serialize(&mut index_file) + .context("failed to serialize index")?; + for doc in self.documents_mut().filter(|doc| doc.path().is_some()) { + let history = doc.history.take(); + let last_saved_revision = doc.get_last_saved_revision(); + let path = doc.path().unwrap(); + let mtime = std::fs::metadata(path.clone())? + .modified()? + .duration_since(std::time::UNIX_EPOCH)? + .as_secs(); + let id = index.find_id(path).unwrap(); + let mut undo_file = workspace.get_mut(&id.to_string())?; + + history + .serialize( + &mut undo_file, + &mut File::open(path)?, + last_saved_revision, + mtime, + ) + .context(format!( + "failed to save history for {}", + path.to_string_lossy() + ))?; + doc.history.set(history); + } Ok(()) } - pub fn session(&self) -> anyhow::Result { - Workspace::new(std::env::current_dir()?) + pub fn open_workspace(&mut self) -> anyhow::Result<()> { + let mut workspace = Workspace::new()?; + let index = UndoIndex::deserialize(&mut workspace.get("index.undo")?) + .context("failed to load index")?; + + let scrolloff = self.config().scrolloff; + for (id, path) in index.0 { + if !path.exists() { + continue; + } + let current_view_id = view!(&self).id; + + let mut undo_file = workspace.get(&id.to_string())?; + let last_mtime = std::fs::metadata(path.clone())? + .modified()? + .duration_since(std::time::UNIX_EPOCH)? + .as_secs(); + let id = self.open(path.as_path(), Action::Load)?; + let doc = doc_mut!(self, &id); + let (last_saved_revision, history) = helix_core::history::History::deserialize( + &mut undo_file, + &mut File::open(path)?, + last_mtime, + ) + .context("failed to load history")?; + + if history.current_revision() != last_saved_revision { + let selections = doc.selections(); + let view_id = if selections.contains_key(¤t_view_id) { + // use current if possible + current_view_id + } else { + // Hack: we take the first available view_id + selections + .keys() + .next() + .copied() + .expect("No view_id available") + }; + let view = view_mut!(self, view_id); + apply_transaction( + &history.changes_since(last_saved_revision).unwrap(), + doc, + &view, + ); + view.ensure_cursor_in_view(&doc, scrolloff); + } + doc.history.set(history); + doc.set_last_saved_revision(last_saved_revision); + } + + Ok(()) } /// Current editing mode for the [`Editor`]. diff --git a/helix-view/src/workspace/mod.rs b/helix-view/src/workspace/mod.rs index cdc5648014992..9ec543d84e68a 100644 --- a/helix-view/src/workspace/mod.rs +++ b/helix-view/src/workspace/mod.rs @@ -6,6 +6,7 @@ use std::{ }; use anyhow::{Context, Result}; +use helix_core::path::path_as_bytes; use sha1_smol::Sha1; pub struct Workspace { @@ -13,17 +14,10 @@ pub struct Workspace { lock: Option, } -fn path_as_bytes(path: PathBuf) -> Vec { - #[cfg(windows)] - return path.to_str().unwrap().into(); - - #[cfg(unix)] - return std::os::unix::ffi::OsStrExt::as_bytes(path.as_os_str()).into(); -} - impl Workspace { // TODO: Allow custom session names to be passed. - pub fn new(path: PathBuf) -> Result { + pub fn new() -> Result { + let path = std::env::current_dir()?; let bytes = path_as_bytes(path); let hash = Sha1::from(bytes).digest().to_string(); let path = helix_loader::cache_dir().join("workspaces").join(hash); @@ -34,33 +28,35 @@ impl Workspace { self.path.clone() } - pub fn get(&mut self, filename: &str) -> Result { + pub fn get(&mut self, path: &str) -> Result { if self.lock.is_none() { let lock = FileLock::shared(self.path.join(".helix.lock"))?; lock.lock()?; self.lock = Some(lock); } + let path = self.path.join(path); OpenOptions::new() .read(true) - .open(self.path.join(filename)) + .open(path) .context("failed to open file") } - pub fn get_mut(&mut self, filename: &str) -> Result { + pub fn get_mut(&mut self, path: &str) -> Result { if self.lock.is_none() { let lock = FileLock::exclusive(self.path.join(".helix.lock"))?; lock.lock()?; self.lock = Some(lock); } + let path = self.path.join(path); OpenOptions::new() .read(true) .write(true) .create(true) - .open(self.path.join(filename)) + .open(path) .context("failed to open file") } } diff --git a/helix-view/src/workspace/undo.rs b/helix-view/src/workspace/undo.rs index 55d9f8bef3326..8ed1cefcae887 100644 --- a/helix-view/src/workspace/undo.rs +++ b/helix-view/src/workspace/undo.rs @@ -1,70 +1,42 @@ -// use std::fs::File; -// use std::io::BufReader; -// use std::io::BufWriter; -// use std::path::PathBuf; - -// #[cfg(unix)] -// use std::os::unix::prelude::OsStrExt; - -// use anyhow::Context; -// use anyhow::Result; -// use helix_core::history::deserialize_history; -// use helix_core::history::serialize_history; -// use helix_core::parse::*; - -// use crate::Editor; - -// use super::Session; - -// // TODO: Check if serialized files already exist, and use them. -// // TODO: Maybe have a way to verify that the histories match, and overwrite if they don't. -// pub fn serialize(session: &mut Session, editor: &mut Editor) -> Result<()> { -// let cwd = std::env::current_dir()?; -// for doc in editor.documents_mut().filter(|doc| doc.path().is_some()) { - -// } -// // Handle existing index file to merge. -// let mut index_file = session.get_mut("undo/index")?; -// let mut index = deserialize_index(&index_file).context("failed to parse undo index")?; -// for path in editor.documents().filter_map(|doc| doc.path().cloned()) { -// if !index.iter().any(|(_, value)| *value == path) { -// let key = index.last().map(|(key, _)| key + 1).unwrap_or(0); -// index.push((key, path)); -// } -// } -// serialize_index(&mut index_file, &index)?; - -// for (filename, doc_path) in index { -// let doc = match editor -// .documents_mut() -// .find(|doc| doc.path() == Some(&doc_path)) -// { -// Some(doc) => doc, -// None => continue, -// }; -// let filename = format!("undo/{filename}"); -// let file = session.get_mut(&filename)?; -// let history = doc.history.take(); -// serialize_history(file, &history)?; -// doc.history.set(history); -// } - -// Ok(()) -// } - -// pub fn deserialize(session: &mut Session, editor: &mut Editor) -> Result<()> { -// let index = session -// .get("undo/index") -// .and_then(|file| deserialize_index(&file)) -// .context("failed to parse index file")?; - -// for (filename, doc_path) in index { -// let id = editor.open(&doc_path, crate::editor::Action::Load)?; -// let doc = editor.document_mut(id).unwrap(); -// let filename = format!("undo/{filename}"); -// let file = session.get(&filename)?; -// doc.history = std::cell::Cell::new(deserialize_history(file)?); -// } - -// Ok(()) -// } +use anyhow::Result; +use std::{ + io::{Error, ErrorKind, Read, Write}, + path::PathBuf, +}; + +use helix_core::{ + parse::*, + path::{path_as_bytes, path_from_bytes}, +}; + +#[derive(Default, Debug)] +pub struct UndoIndex(pub Vec<(usize, PathBuf)>); + +impl UndoIndex { + pub fn serialize(&self, writer: &mut W) -> Result<()> { + write_vec(writer, &self.0, |writer, (id, path)| { + write_usize(writer, *id)?; + write_vec(writer, &path_as_bytes(path), |writer, byte| { + write_byte(writer, *byte) + })?; + Ok(()) + })?; + Ok(()) + } + + pub fn deserialize(reader: &mut R) -> Result { + let res = read_vec(reader, |reader| { + let id = read_usize(reader)?; + let path = path_from_bytes(&read_vec(reader, read_byte)?) + .map_err(|e| Error::new(ErrorKind::InvalidData, e))?; + Ok((id, path)) + })?; + Ok(Self(res)) + } + + pub fn find_id(&self, path: &PathBuf) -> Option { + self.0 + .iter() + .find_map(|(id, index_path)| (index_path == path).then_some(*id)) + } +}