Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add file to error messages #1737

Merged
merged 13 commits into from
Nov 22, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/assignment_resolver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> {
column: 0,
length: 0,
kind: TokenKind::Unspecified,
path: "".as_ref(),
};
return Err(CompileError::new(token, Internal { message }));
}
Expand Down
4 changes: 4 additions & 0 deletions src/color.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,10 @@ impl Color {
self.redirect(Stream::Stdout)
}

pub(crate) fn context(self) -> Self {
self.restyle(Style::new().fg(Blue).bold())
}

pub(crate) fn doc(self) -> Self {
self.restyle(Style::new().fg(Blue))
}
Expand Down
8 changes: 4 additions & 4 deletions src/compiler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ impl Compiler {
paths.push(root.into());

while let Some(current) = paths.pop() {
let src = loader.load(&current)?;
let tokens = Lexer::lex(src)?;
let (relative, src) = loader.load(root, &current)?;
let tokens = Lexer::lex(relative, src)?;
let mut ast = Parser::parse(&tokens)?;

srcs.insert(current.clone(), src);
Expand Down Expand Up @@ -56,9 +56,9 @@ impl Compiler {

#[cfg(test)]
pub(crate) fn test_compile(src: &str) -> CompileResult<Justfile> {
let tokens = Lexer::lex(src)?;
let tokens = Lexer::test_lex(src)?;
let ast = Parser::parse(&tokens)?;
let root = PathBuf::from("<ROOT>");
let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
asts.insert(root.clone(), ast);
Analyzer::analyze(&asts, &root)
Expand Down
68 changes: 43 additions & 25 deletions src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,38 +9,45 @@ use {super::*, CompileErrorKind::*, TokenKind::*};
/// slight against regular expressions, the lexer was just idiosyncratically
/// bad.
pub(crate) struct Lexer<'src> {
/// Source text
src: &'src str,
/// Char iterator
chars: Chars<'src>,
/// Tokens
tokens: Vec<Token<'src>>,
/// Current token start
token_start: Position,
/// Current token end
token_end: Position,
/// Next character to be lexed
next: Option<char>,
/// Next indent will start a recipe body
recipe_body_pending: bool,
/// Inside recipe body
recipe_body: bool,
/// Indentation stack
indentation: Vec<&'src str>,
/// Interpolation token start stack
interpolation_stack: Vec<Token<'src>>,
/// Next character to be lexed
next: Option<char>,
/// Current open delimiters
open_delimiters: Vec<(Delimiter, usize)>,
/// Path to source file
path: &'src Path,
/// Inside recipe body
recipe_body: bool,
/// Next indent will start a recipe body
recipe_body_pending: bool,
/// Source text
src: &'src str,
/// Tokens
tokens: Vec<Token<'src>>,
/// Current token end
token_end: Position,
/// Current token start
token_start: Position,
}

impl<'src> Lexer<'src> {
/// Lex `text`
pub(crate) fn lex(src: &'src str) -> CompileResult<Vec<Token<'src>>> {
Lexer::new(src).tokenize()
/// Lex `src`
pub(crate) fn lex(path: &'src Path, src: &'src str) -> CompileResult<'src, Vec<Token<'src>>> {
Lexer::new(path, src).tokenize()
}

#[cfg(test)]
pub(crate) fn test_lex(src: &'src str) -> CompileResult<'src, Vec<Token<'src>>> {
Lexer::new("justfile".as_ref(), src).tokenize()
}

/// Create a new Lexer to lex `text`
fn new(src: &'src str) -> Lexer<'src> {
/// Create a new Lexer to lex `src`
fn new(path: &'src Path, src: &'src str) -> Lexer<'src> {
let mut chars = src.chars();
let next = chars.next();

Expand All @@ -62,6 +69,7 @@ impl<'src> Lexer<'src> {
chars,
next,
src,
path,
}
}

Expand Down Expand Up @@ -189,6 +197,7 @@ impl<'src> Lexer<'src> {
src: self.src,
length: self.token_end.offset - self.token_start.offset,
kind,
path: self.path,
});

// Set `token_start` to point after the lexed token
Expand All @@ -205,6 +214,7 @@ impl<'src> Lexer<'src> {
column: self.token_end.column,
length: 0,
kind: Unspecified,
path: self.path,
};
CompileError::new(
token,
Expand Down Expand Up @@ -240,6 +250,7 @@ impl<'src> Lexer<'src> {
line: self.token_start.line,
column: self.token_start.column,
length,
path: self.path,
};

CompileError::new(token, kind)
Expand Down Expand Up @@ -920,7 +931,7 @@ mod tests {
text.to_owned()
};

let have = Lexer::lex(&text).unwrap();
let have = Lexer::test_lex(&text).unwrap();

let have_kinds = have
.iter()
Expand Down Expand Up @@ -1028,7 +1039,7 @@ mod tests {
length: usize,
kind: CompileErrorKind,
) {
match Lexer::lex(src) {
match Lexer::test_lex(src) {
Ok(_) => panic!("Lexing succeeded but expected"),
Err(have) => {
let want = CompileError {
Expand All @@ -1039,6 +1050,7 @@ mod tests {
line,
column,
length,
path: "justfile".as_ref(),
},
kind: Box::new(kind),
};
Expand Down Expand Up @@ -2321,7 +2333,9 @@ mod tests {

#[test]
fn presume_error() {
let compile_error = Lexer::new("!").presume('-').unwrap_err();
let compile_error = Lexer::new("justfile".as_ref(), "!")
.presume('-')
.unwrap_err();
assert_matches!(
compile_error.token,
Token {
Expand All @@ -2331,6 +2345,7 @@ mod tests {
length: 0,
src: "!",
kind: Unspecified,
path: _,
}
);
assert_matches!(&*compile_error.kind,
Expand All @@ -2342,9 +2357,12 @@ mod tests {
Error::Compile { compile_error }
.color_display(Color::never())
.to_string(),
"error: Internal error, this may indicate a bug in just: \
Lexer presumed character `-`\nconsider filing an issue: \
https://github.com/casey/just/issues/new\n |\n1 | !\n | ^"
"error: Internal error, this may indicate a bug in just: Lexer presumed character `-`
consider filing an issue: https://github.com/casey/just/issues/new
--> justfile:1:1
|
1 | !
| ^"
);
}
}
20 changes: 16 additions & 4 deletions src/loader.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,34 @@
use super::*;

pub(crate) struct Loader {
arena: Arena<String>,
srcs: Arena<String>,
paths: Arena<PathBuf>,
}

impl Loader {
pub(crate) fn new() -> Self {
Loader {
arena: Arena::new(),
srcs: Arena::new(),
paths: Arena::new(),
}
}

pub(crate) fn load<'src>(&'src self, path: &Path) -> RunResult<&'src str> {
pub(crate) fn load<'src>(
&'src self,
root: &Path,
path: &Path,
) -> RunResult<(&'src Path, &'src str)> {
let src = fs::read_to_string(path).map_err(|io_error| Error::Load {
path: path.to_owned(),
io_error,
})?;

Ok(self.arena.alloc(src))
let relative = if let Ok(path) = path.strip_prefix(root.parent().unwrap()) {
path
} else {
path
};

Ok((self.paths.alloc(relative.into()), self.srcs.alloc(src)))
}
}
15 changes: 9 additions & 6 deletions src/name.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ use super::*;
/// it its own type for clarity.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
pub(crate) struct Name<'src> {
pub(crate) offset: usize,
pub(crate) column: usize,
pub(crate) length: usize,
pub(crate) line: usize,
pub(crate) column: usize,
pub(crate) offset: usize,
pub(crate) path: &'src Path,
pub(crate) src: &'src str,
}

Expand All @@ -20,22 +21,24 @@ impl<'src> Name<'src> {
/// Turn this name back into a token
pub(crate) fn token(&self) -> Token<'src> {
Token {
column: self.column,
kind: TokenKind::Identifier,
offset: self.offset,
length: self.length,
line: self.line,
column: self.column,
offset: self.offset,
path: self.path,
src: self.src,
}
}

pub(crate) fn from_identifier(token: Token<'src>) -> Name {
assert_eq!(token.kind, TokenKind::Identifier);
Name {
offset: token.offset,
column: token.column,
length: token.length,
line: token.line,
column: token.column,
offset: token.offset,
path: token.path,
src: token.src,
}
}
Expand Down
5 changes: 3 additions & 2 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -927,7 +927,7 @@ mod tests {

fn test(text: &str, want: Tree) {
let unindented = unindent(text);
let tokens = Lexer::lex(&unindented).expect("lexing failed");
let tokens = Lexer::test_lex(&unindented).expect("lexing failed");
let justfile = Parser::parse(&tokens).expect("parsing failed");
let have = justfile.tree();
if have != want {
Expand Down Expand Up @@ -964,7 +964,7 @@ mod tests {
length: usize,
kind: CompileErrorKind,
) {
let tokens = Lexer::lex(src).expect("Lexing failed in parse test...");
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");

match Parser::parse(&tokens) {
Ok(_) => panic!("Parsing unexpectedly succeeded"),
Expand All @@ -977,6 +977,7 @@ mod tests {
line,
column,
length,
path: "justfile".as_ref(),
},
kind: Box::new(kind),
};
Expand Down
5 changes: 3 additions & 2 deletions src/testing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,11 @@ pub(crate) fn analysis_error(
length: usize,
kind: CompileErrorKind,
) {
let tokens = Lexer::lex(src).expect("Lexing failed in parse test...");
let tokens = Lexer::test_lex(src).expect("Lexing failed in parse test...");

let ast = Parser::parse(&tokens).expect("Parsing failed in analysis test...");

let root = PathBuf::from("<ROOT>");
let root = PathBuf::from("justfile");
let mut asts: HashMap<PathBuf, Ast> = HashMap::new();
asts.insert(root.clone(), ast);

Expand All @@ -76,6 +76,7 @@ pub(crate) fn analysis_error(
line,
column,
length,
path: "justfile".as_ref(),
},
kind: Box::new(kind),
};
Expand Down
39 changes: 33 additions & 6 deletions src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@ use super::*;

#[derive(Debug, PartialEq, Clone, Copy)]
pub(crate) struct Token<'src> {
pub(crate) offset: usize,
pub(crate) column: usize,
pub(crate) kind: TokenKind,
pub(crate) length: usize,
pub(crate) line: usize,
pub(crate) column: usize,
pub(crate) offset: usize,
pub(crate) path: &'src Path,
pub(crate) src: &'src str,
pub(crate) kind: TokenKind,
}

impl<'src> Token<'src> {
Expand Down Expand Up @@ -52,9 +53,35 @@ impl<'src> ColorDisplay for Token<'src> {
i += c.len_utf8();
}
let line_number_width = line_number.to_string().len();
writeln!(f, "{0:1$} |", "", line_number_width)?;
writeln!(f, "{line_number} | {space_line}")?;
write!(f, "{0:1$} |", "", line_number_width)?;
writeln!(
f,
"{:width$}{} {}:{}:{}",
"",
color.context().paint("-->"),
self.path.display(),
line_number,
self.column.ordinal(),
width = line_number_width
)?;
writeln!(
f,
"{:width$} {}",
"",
color.context().paint("|"),
width = line_number_width
)?;
writeln!(
f,
"{} {space_line}",
color.context().paint(&format!("{line_number} |"))
)?;
write!(
f,
"{:width$} {}",
"",
color.context().paint("|"),
width = line_number_width
)?;
write!(
f,
" {0:1$}{2}{3:^<4$}{5}",
Expand Down
Loading
Loading