Skip to content

Commit

Permalink
Immediately parse all files on LSP startup
Browse files Browse the repository at this point in the history
  • Loading branch information
VonTum committed Feb 6, 2024
1 parent 9c82ec9 commit 58d20c4
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 42 deletions.
74 changes: 53 additions & 21 deletions src/dev_aid/lsp.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@

use std::{error::Error, net::SocketAddr};
use std::{error::Error, ffi::OsStr, fs::read_dir, net::SocketAddr};
use lsp_types::{notification::*, request::Request, *};

use lsp_server::{Connection, Message, Response};
Expand All @@ -24,7 +24,7 @@ impl LoadedFileCache {
.find(|(_uuid, uri_found)| **uri_found == *uri)
.map(|(uuid, _uri_found)| uuid)
}
fn update_text(&mut self, uri : Url, new_file_text : String) -> FileUUID {
fn update_text(&mut self, uri : Url, new_file_text : String) {
let found_opt = self.find_uri(&uri);
let found_opt_was_none = found_opt.is_none();
let file_uuid : FileUUID = found_opt.unwrap_or_else(|| self.linker.reserve_file());
Expand All @@ -37,8 +37,6 @@ impl LoadedFileCache {
self.linker.relink(file_uuid, full_parse);
}
self.linker.recompile_all();

file_uuid
}
fn ensure_contains_file(&mut self, uri : &Url) -> FileUUID {
if let Some(found) = self.find_uri(uri) {
Expand Down Expand Up @@ -215,28 +213,31 @@ fn cvt_span_to_lsp_range(ch_sp : Span, tokens : &TokenizeResult) -> lsp_types::R
}

// Requires that token_positions.len() == tokens.len() + 1 to include EOF token
fn convert_diagnostic(err : CompileError, tokens : &TokenizeResult, uris : &ArenaVector<Url, FileUUIDMarker>) -> Diagnostic {
let error_pos = cvt_span_to_lsp_range(err.position, tokens);
fn convert_diagnostic(err : CompileError, main_tokens : &TokenizeResult, linker : &Linker, uris : &ArenaVector<Url, FileUUIDMarker>) -> Diagnostic {
assert!(err.position.1 < main_tokens.token_types.len(), "bad error: {}", err.reason);
let error_pos = cvt_span_to_lsp_range(err.position, main_tokens);

let severity = match err.level {
ErrorLevel::Error => DiagnosticSeverity::ERROR,
ErrorLevel::Warning => DiagnosticSeverity::WARNING,
};
let mut related_info = Vec::new();
for info in err.infos {
let info_pos = cvt_span_to_lsp_range(info.position, tokens);
let info_tokens = &linker.files[info.file].tokens;
assert!(info.position.1 < info_tokens.token_types.len(), "bad info: {}; in err: {}", info.info, err.reason);
let info_pos = cvt_span_to_lsp_range(info.position, info_tokens);
let location = Location{uri : uris[info.file].clone(), range : info_pos};
related_info.push(DiagnosticRelatedInformation { location, message: info.info });
}
Diagnostic::new(error_pos, Some(severity), None, None, err.reason, Some(related_info), None)
}

// Requires that token_positions.len() == tokens.len() + 1 to include EOF token
fn send_errors_warnings(connection: &Connection, errors : ErrorCollector, token_boundaries : &TokenizeResult, uris : &ArenaVector<Url, FileUUIDMarker>) -> Result<(), Box<dyn Error + Sync + Send>> {
fn send_errors_warnings(connection: &Connection, errors : ErrorCollector, main_tokens : &TokenizeResult, linker : &Linker, uris : &ArenaVector<Url, FileUUIDMarker>) -> Result<(), Box<dyn Error + Sync + Send>> {
let mut diag_vec : Vec<Diagnostic> = Vec::new();
let (err_vec, file) = errors.get();
for err in err_vec {
diag_vec.push(convert_diagnostic(err, token_boundaries, uris));
diag_vec.push(convert_diagnostic(err, main_tokens, linker, uris));
}

let params = &PublishDiagnosticsParams{
Expand Down Expand Up @@ -268,11 +269,48 @@ fn get_hover_info<'l>(file_cache : &'l LoadedFileCache, text_pos : &lsp_types::T
Some((info, Some(to_position_range(char_line_range))))
}

fn push_all_errors(connection: &Connection, file_cache : &LoadedFileCache) -> Result<(), Box<dyn Error + Sync + Send>> {
for (uuid, file_data) in &file_cache.linker.files {
let errors = file_cache.linker.get_all_errors_in_file(uuid);

// println!("Errors: {:?}", &errors);
send_errors_warnings(&connection, errors, &file_data.tokens, &file_cache.linker, &file_cache.uris)?;
}
Ok(())
}

fn initialize_all_files(init_params : &InitializeParams) -> LoadedFileCache {
let mut linker = Linker::new();
let mut uris = ArenaVector::new();

if let Some(workspace_folder) = &init_params.workspace_folders {
for folder in workspace_folder {
let Ok(path) = folder.uri.to_file_path() else {continue};

for file in std::fs::read_dir(path).unwrap() {
let file_path = file.unwrap().path();
if file_path.is_file() && file_path.extension() == Some(OsStr::new("sus")) {
let file_uuid = linker.reserve_file();
let file_text = std::fs::read_to_string(&file_path).unwrap();
let full_parse = perform_full_semantic_parse(file_text, file_uuid);
linker.add_reserved_file(file_uuid, full_parse);
uris.insert(file_uuid, Url::from_file_path(&file_path).unwrap());
}
}
}
}
let mut result = LoadedFileCache::new(linker, uris);
result.linker.recompile_all();
result
}

fn main_loop(connection: Connection, params: serde_json::Value, debug : bool) -> Result<(), Box<dyn Error + Sync + Send>> {
let params: InitializeParams = serde_json::from_value(params).unwrap();

let mut file_cache = LoadedFileCache::new(Linker::new(), ArenaVector::new());
let mut file_cache = initialize_all_files(&params);

push_all_errors(&connection, &file_cache)?;

let _params: InitializeParams = serde_json::from_value(params).unwrap();
println!("starting LSP main loop");
for msg in &connection.receiver {
println!("got msg: {msg:?}");
Expand Down Expand Up @@ -398,18 +436,12 @@ fn main_loop(connection: Connection, params: serde_json::Value, debug : bool) ->
match not.method.as_str() {
notification::DidChangeTextDocument::METHOD => {
let params : DidChangeTextDocumentParams = serde_json::from_value(not.params).expect("JSON Encoding Error while parsing params");
let uuid = file_cache.update_text(params.text_document.uri, params.content_changes.into_iter().next().unwrap().text);

// println!("Flattening...");
file_cache.linker.recompile_all();
file_cache.update_text(params.text_document.uri, params.content_changes.into_iter().next().unwrap().text);

let file_data = &file_cache.linker.files[uuid]; // Have to grab it again because previous line mutates

let mut errors = file_data.parsing_errors.clone();
file_cache.linker.get_all_errors_in_file(uuid, &mut errors);
push_all_errors(&connection, &file_cache)?;
}
notification::DidDeleteFiles::METHOD => {

// println!("Errors: {:?}", &errors);
send_errors_warnings(&connection, errors, &file_data.tokens, &file_cache.uris)?;
}
other => {
println!("got notification: {other:?}");
Expand Down
3 changes: 1 addition & 2 deletions src/dev_aid/syntax_highlighting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -253,8 +253,7 @@ pub fn print_all_errors(linker : &Linker, paths_arena : &ArenaVector<PathBuf, Fi
for (file_uuid, f) in &linker.files {
let token_offsets = generate_character_offsets(&f.file_text, &f.tokens);

let mut errors = f.parsing_errors.clone();
linker.get_all_errors_in_file(file_uuid, &mut errors);
let errors = linker.get_all_errors_in_file(file_uuid);

for err in errors.get().0 {
err.pretty_print_error(f.parsing_errors.file, &token_offsets, &paths_arena, &mut file_cache);
Expand Down
8 changes: 5 additions & 3 deletions src/linker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -282,9 +282,11 @@ impl Linker {
}
}

pub fn get_all_errors_in_file(&self, file_uuid : FileUUID, errors : &ErrorCollector) {
self.get_duplicate_declaration_errors(file_uuid, errors);
self.get_flattening_errors(file_uuid, errors);
pub fn get_all_errors_in_file(&self, file_uuid : FileUUID) -> ErrorCollector {
let errors = self.files[file_uuid].parsing_errors.clone();
self.get_duplicate_declaration_errors(file_uuid, &errors);
self.get_flattening_errors(file_uuid, &errors);
errors
}

pub fn remove_file_datas(&mut self, files : &[FileUUID]) {
Expand Down
32 changes: 16 additions & 16 deletions valid_syntax.sus
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@



module multiply_add :
module multiply_add_valid :
int a,
int b,
int c
Expand All @@ -11,7 +11,7 @@ module multiply_add :
total = tmp + c;
}

module fibonnaci : -> int num {
module fibonnaci_valid : -> int num {
state int current = 1;
state int current_prev = 0;

Expand All @@ -21,7 +21,7 @@ module fibonnaci : -> int num {
}

//timeline (v, true -> /) .. (v, false -> v)*
module blur2 :
module blur2_valid :
int data,
bool first
-> int blurred {
Expand All @@ -46,7 +46,7 @@ module blur2 :
}


module Tree_Multiply : int[4] values -> int total {
module Tree_Multiply_valid : int[4] values -> int total {
int a = values[0] * values[1];
int b = values[2] * values[3];
total = a * b;
Expand All @@ -56,7 +56,7 @@ module Tree_Multiply : int[4] values -> int total {


//timeline (X, false -> /)* .. (X, true -> T)
module Accumulator : int term, bool done -> int total {
module Accumulator_valid : int term, bool done -> int total {
state int tot;
initial tot = 0;

Expand All @@ -71,7 +71,7 @@ module Accumulator : int term, bool done -> int total {


//timeline (a, true -> /) | (a, false -> /) .. (a, false -> r)* .. (a, true -> r)
module blur : int a, bool done -> int result {
module blur_valid : int a, bool done -> int result {
state bool working;
initial working = false;
state int prev;
Expand All @@ -86,7 +86,7 @@ module blur : int a, bool done -> int result {


//timeline (X -> X) .. (/ -> X) .. (/ -> X) .. (/ -> X)
module Unpack4 : int[4] packed -> int out_stream {
module Unpack4_valid : int[4] packed -> int out_stream {
state int st;
initial st = 0;
state int[3] stored_packed;
Expand All @@ -110,7 +110,7 @@ module Unpack4 : int[4] packed -> int out_stream {
}
}

module generative : int i -> int o, int o2 {
module generative_valid : int i -> int o, int o2 {
gen int x = 5;
gen int[x] ys;

Expand All @@ -135,7 +135,7 @@ module generative : int i -> int o, int o2 {
o2 = a[a[0]];
}

module add_stuff_to_indices : int[10] values -> int[10] added_values {
module add_stuff_to_indices_valid : int[10] values -> int[10] added_values {
for int i in 0..10 {
int sum = values[i] + i;
added_values[i] = sum;
Expand All @@ -144,7 +144,7 @@ module add_stuff_to_indices : int[10] values -> int[10] added_values {


//timeline (bs -> /, true) | (bs -> v, false)
module first_bit_idx_6 : bool[6] bits -> int first, bool all_zeros {
module first_bit_idx_6_valid : bool[6] bits -> int first, bool all_zeros {
if bits[0] {
first = 0;
all_zeros = false;
Expand Down Expand Up @@ -177,36 +177,36 @@ module first_bit_idx_6 : bool[6] bits -> int first, bool all_zeros {
}


module disjoint_ports : int a, int b, int c -> int result {
module disjoint_ports_valid : int a, int b, int c -> int result {
reg result = a + b;
// don't touch c
}

module undeteriminable_input_latency : int a, int b -> int x, int y {
module undeteriminable_input_latency_valid : int a, int b -> int x, int y {
reg int a_d = a;
reg int t = a_d + b;
reg reg reg int a_ddd = a;
x = t + a_ddd;
y = t;
}

module determinable_input_latency : int a, int b -> int x, int y {
module determinable_input_latency_valid : int a, int b -> int x, int y {
reg int a_d = a;
reg int t = a_d + b;
reg reg int a_ddd = a;
x = t + a_ddd;
y = t;
}

module specified_input_latency : int a'0, int b'1 -> int x, int y {
module specified_input_latency_valid : int a'0, int b'1 -> int x, int y {
reg int a_d = a;
reg int t = a_d + b;
reg reg reg int a_ddd = a;
x = t + a_ddd;
y = t;
}

module bad_cycle : int a -> int r {
module bad_cycle_valid : int a -> int r {
state int test;
initial test = 0;

Expand All @@ -216,7 +216,7 @@ module bad_cycle : int a -> int r {
r = new_test;
}

module good_cycle : int a -> int r {
module good_cycle_valid : int a -> int r {
state int test;
initial test = 0;

Expand Down

0 comments on commit 58d20c4

Please sign in to comment.