Skip to content

Commit

Permalink
Move Span to it's own file
Browse files Browse the repository at this point in the history
  • Loading branch information
VonTum committed Feb 20, 2024
1 parent 731d1f9 commit 610dc1b
Show file tree
Hide file tree
Showing 12 changed files with 123 additions and 109 deletions.
98 changes: 1 addition & 97 deletions src/ast.rs
Original file line number Diff line number Diff line change
@@ -1,105 +1,9 @@


use crate::{errors::ErrorCollector, flattening::FlattenedModule, instantiation::InstantiationList, linker::FileUUID, tokenizer::{get_token_type_name, TokenTypeIdx, TokenizeResult}, value::Value};
use crate::{errors::ErrorCollector, file_position::{BracketSpan, Span}, flattening::FlattenedModule, instantiation::InstantiationList, linker::FileUUID, tokenizer::{get_token_type_name, TokenTypeIdx}, value::Value};
use core::ops::Range;
use std::fmt::Display;


// Token span. Indices are INCLUSIVE
#[derive(Clone,Copy,Debug,PartialEq,Eq,Hash)]
pub struct Span(pub usize, pub usize);

impl Span {
pub fn to_range<T : Clone>(&self, tokens : &[Range<T>]) -> Range<T> {
let min = tokens[self.0].start.clone();
let max = tokens[self.1].end.clone();
min..max
}
pub fn new_overarching(left : Span, right : Span) -> Span {
assert!(left.0 <= right.0);
assert!(left.1 <= right.1);
Span(left.0, right.1)
}
pub fn new_single_token(tok_idx : usize) -> Span {
Span(tok_idx, tok_idx)
}
pub fn new_extend_to_include_token(left : Span, tok_idx : usize) -> Span {
Span::new_overarching(left, Span::new_single_token(tok_idx))
}
pub fn dont_include_last_token(self) -> Span {
self
}
pub fn only_last_token(self) -> Span {
Span(self.1, self.1)
}
pub fn new_extend_before(tok_idx : usize, right : Span) -> Span {
Span::new_overarching(Span::new_single_token(tok_idx), right)
}
pub fn new_across_tokens(start_tok : usize, end_tok : usize) -> Span {
assert!(start_tok <= end_tok);
Span(start_tok, end_tok)
}
pub fn whole_file_span(tokens : &TokenizeResult) -> Span {
Span(0, tokens.token_types.len())
}
pub fn contains_token(&self, token_idx : usize) -> bool {
token_idx >= self.0 && token_idx <= self.1
}
// Not really a useful quantity. Should only be used comparatively, find which is the nested-most span
pub fn size(&self) -> usize {
self.1 - self.0
}
#[track_caller]
pub fn assert_is_single_token(&self) -> usize {
assert!(self.1 == self.0, "Span is not singleton! {}..{}", self.0, self.1);
self.0
}
pub fn is_single_token(&self) -> Option<usize> {
if self.0 == self.1 {
Some(self.0)
} else {
None
}
}
pub fn difference_left(outer : Span, inner : Span) -> Span {
assert!(outer.0 <= inner.0);
assert!(outer.1 >= inner.1);

Span(outer.0, inner.0 - 1) // temporary, because right now spans are still inclusive.
// Span(outer.0, inner.0)
}
pub fn difference_right(outer : Span, inner : Span) -> Span {
assert!(outer.0 <= inner.0);
assert!(outer.1 >= inner.1);

Span(inner.1 + 1, outer.1) // temporary, because right now spans are still inclusive.
// Span(inner.1, outer.1)
}
}

impl IntoIterator for Span {
type Item = usize;

type IntoIter = <std::ops::Range<usize> as IntoIterator>::IntoIter;

fn into_iter(self) -> Self::IntoIter {
Range{start : self.0, end : self.1 + 1}.into_iter()
}
}

#[derive(Clone,Copy,Debug,PartialEq,Eq,Hash)]
pub struct BracketSpan(Span);

impl BracketSpan {
pub fn from_outer(span : Span) -> Self {Self(span)}
pub fn inner_span(&self) -> Span {
Span(self.0.0 + 1, self.0.1 - 1)
}
pub fn outer_span(&self) -> Span {
self.0
}
}

#[derive(Debug,Clone,Copy,PartialEq,Eq)]
pub enum IdentifierType {
Input,
Expand Down
7 changes: 4 additions & 3 deletions src/dev_aid/lsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ use lsp_types::notification::Notification;

use crate::{
arena_alloc::ArenaVector,
ast::{IdentifierType, Module, Span},
file_position::Span,
ast::{IdentifierType, Module},
dev_aid::syntax_highlighting::create_token_ide_info,
errors::{CompileError, ErrorCollector, ErrorLevel},
flattening::FlatID,
Expand Down Expand Up @@ -223,7 +224,7 @@ fn cvt_span_to_lsp_range(ch_sp : Span, tokens : &TokenizeResult) -> lsp_types::R

// Requires that token_positions.len() == tokens.len() + 1 to include EOF token
fn convert_diagnostic(err : CompileError, main_tokens : &TokenizeResult, linker : &Linker, uris : &ArenaVector<Url, FileUUIDMarker>) -> Diagnostic {
assert!(err.position.1 < main_tokens.token_types.len(), "bad error: {}", err.reason);
assert!(main_tokens.is_span_valid(err.position), "bad error: {}", err.reason);
let error_pos = cvt_span_to_lsp_range(err.position, main_tokens);

let severity = match err.level {
Expand All @@ -233,7 +234,7 @@ fn convert_diagnostic(err : CompileError, main_tokens : &TokenizeResult, linker
let mut related_info = Vec::new();
for info in err.infos {
let info_tokens = &linker.files[info.file].tokens;
assert!(info.position.1 < info_tokens.token_types.len(), "bad info: {}; in err: {}", info.info, err.reason);
assert!(info_tokens.is_span_valid(info.position), "bad info: {}; in err: {}", info.info, err.reason);
let info_pos = cvt_span_to_lsp_range(info.position, info_tokens);
let location = Location{uri : uris[info.file].clone(), range : info_pos};
related_info.push(DiagnosticRelatedInformation { location, message: info.info });
Expand Down
2 changes: 1 addition & 1 deletion src/dev_aid/syntax_highlighting.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

use std::{ops::Range, path::PathBuf};

use crate::{ast::*, tokenizer::*, parser::*, linker::{FileData, Linker, FileUUIDMarker, FileUUID, NameElem}, arena_alloc::ArenaVector, flattening::{Instruction, WireSource}};
use crate::{arena_alloc::ArenaVector, ast::*, file_position::Span, flattening::{Instruction, WireSource}, linker::{FileData, FileUUID, FileUUIDMarker, Linker, NameElem}, parser::*, tokenizer::*};

use ariadne::FileCache;
use console::Style;
Expand Down
2 changes: 1 addition & 1 deletion src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

use std::{ops::Range, path::{Path, PathBuf}, cell::{RefCell, Cell}};

use crate::{ast::Span, linker::{FileUUID, FileUUIDMarker}, arena_alloc::ArenaVector};
use crate::{linker::{FileUUID, FileUUIDMarker}, arena_alloc::ArenaVector, file_position::Span};
use ariadne::*;

use crate::tokenizer::{TokenTypeIdx, get_token_type_name};
Expand Down
104 changes: 104 additions & 0 deletions src/file_position.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
use std::ops::Range;

use crate::tokenizer::TokenizeResult;



// Token span. Indices are INCLUSIVE
#[derive(Clone,Copy,Debug,PartialEq,Eq,Hash)]
pub struct Span(pub usize, pub usize);

impl Span {
pub const MAX_POSSIBLE_SPAN : Span = Span(0, usize::MAX);

pub fn to_range<T : Clone>(&self, tokens : &[Range<T>]) -> Range<T> {
let min = tokens[self.0].start.clone();
let max = tokens[self.1].end.clone();
min..max
}
pub fn new_overarching(left : Span, right : Span) -> Span {
assert!(left.0 <= right.0);
assert!(left.1 <= right.1);
Span(left.0, right.1)
}
pub fn new_single_token(tok_idx : usize) -> Span {
Span(tok_idx, tok_idx)
}
pub fn new_extend_to_include_token(left : Span, tok_idx : usize) -> Span {
Span::new_overarching(left, Span::new_single_token(tok_idx))
}
pub fn dont_include_last_token(self) -> Span {
self
}
pub fn only_last_token(self) -> Span {
Span(self.1, self.1)
}
pub fn new_extend_before(tok_idx : usize, right : Span) -> Span {
Span::new_overarching(Span::new_single_token(tok_idx), right)
}
pub fn new_across_tokens(start_tok : usize, end_tok : usize) -> Span {
assert!(start_tok <= end_tok);
Span(start_tok, end_tok)
}
pub fn whole_file_span(tokens : &TokenizeResult) -> Span {
Span(0, tokens.token_types.len())
}
pub fn contains_token(&self, token_idx : usize) -> bool {
token_idx >= self.0 && token_idx <= self.1
}
// Not really a useful quantity. Should only be used comparatively, find which is the nested-most span
pub fn size(&self) -> usize {
self.1 - self.0
}
#[track_caller]
pub fn assert_is_single_token(&self) -> usize {
assert!(self.1 == self.0, "Span is not singleton! {}..{}", self.0, self.1);
self.0
}
pub fn is_single_token(&self) -> Option<usize> {
if self.0 == self.1 {
Some(self.0)
} else {
None
}
}
pub fn difference_left(outer : Span, inner : Span) -> Span {
assert!(outer.0 <= inner.0);
assert!(outer.1 >= inner.1);

Span(outer.0, inner.0 - 1) // temporary, because right now spans are still inclusive.
// Span(outer.0, inner.0)
}
pub fn difference_right(outer : Span, inner : Span) -> Span {
assert!(outer.0 <= inner.0);
assert!(outer.1 >= inner.1);

Span(inner.1 + 1, outer.1) // temporary, because right now spans are still inclusive.
// Span(inner.1, outer.1)
}
}

impl IntoIterator for Span {
type Item = usize;

type IntoIter = <std::ops::Range<usize> as IntoIterator>::IntoIter;

fn into_iter(self) -> Self::IntoIter {
Range{start : self.0, end : self.1 + 1}.into_iter()
}
}

#[derive(Clone,Copy,Debug,PartialEq,Eq,Hash)]
pub struct BracketSpan(Span);

impl BracketSpan {
pub fn from_outer(span : Span) -> Self {Self(span)}
pub fn inner_span(&self) -> Span {
Span(self.0.0 + 1, self.0.1 - 1)
}
pub fn outer_span(&self) -> Span {
self.0
}
}


2 changes: 1 addition & 1 deletion src/flattening/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ pub mod name_context;
use std::{ops::Deref, iter::zip};

use crate::{
arena_alloc::{ArenaAllocator, FlatAlloc, UUIDMarker, UUIDRange, UUID}, ast::{AssignableExpressionModifiers, BracketSpan, CodeBlock, Expression, Identifier, IdentifierType, InterfacePorts, LeftExpression, Module, Operator, SignalDeclaration, Span, SpanExpression, SpanTypeExpression, Statement, TypeExpression}, errors::{error_info, ErrorCollector, ErrorInfo}, linker::{ConstantUUID, FileUUID, GlobalResolver, Linker, ModuleUUID, NameElem, NamedConstant, NamedType, ResolvedGlobals, ResolvedNameElem, TypeUUIDMarker}, tokenizer::TOKEN_IDENTIFIER, typing::{get_binary_operator_types, typecheck, typecheck_is_array_indexer, typecheck_unary_operator, Type, WrittenType, BOOL_TYPE, INT_TYPE}, value::Value
arena_alloc::{ArenaAllocator, FlatAlloc, UUIDMarker, UUIDRange, UUID}, ast::{AssignableExpressionModifiers, CodeBlock, Expression, Identifier, IdentifierType, InterfacePorts, LeftExpression, Module, Operator, SignalDeclaration, SpanExpression, SpanTypeExpression, Statement, TypeExpression}, errors::{error_info, ErrorCollector, ErrorInfo}, file_position::{BracketSpan, Span}, linker::{ConstantUUID, FileUUID, GlobalResolver, Linker, ModuleUUID, NameElem, NamedConstant, NamedType, ResolvedGlobals, ResolvedNameElem, TypeUUIDMarker}, tokenizer::TOKEN_IDENTIFIER, typing::{get_binary_operator_types, typecheck, typecheck_is_array_indexer, typecheck_unary_operator, Type, WrittenType, BOOL_TYPE, INT_TYPE}, value::Value
};

use self::name_context::LocalVariableContext;
Expand Down
2 changes: 1 addition & 1 deletion src/instantiation/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::{cell::RefCell, cmp::max, iter::zip, ops::Deref, rc::Rc};

use num::BigInt;

use crate::{arena_alloc::{FlatAlloc, UUIDMarker, UUIDRange, UUID}, ast::{IdentifierType, InterfacePorts, Operator, Span}, errors::ErrorCollector, flattening::{ConnectionWritePathElement, ConnectionWritePathElementComputed, FlatID, FlatIDMarker, FlatIDRange, FlattenedModule, Instruction, WireInstance, WireSource, Write, WriteType}, instantiation::latency_algorithm::{convert_fanin_to_fanout, solve_latencies, FanInOut, LatencyCountingError}, linker::{Linker, NamedConstant}, list_of_lists::ListOfLists, tokenizer::kw, typing::{ConcreteType, Type, BOOL_CONCRETE_TYPE, INT_CONCRETE_TYPE}, value::{compute_binary_op, compute_unary_op, Value}};
use crate::{arena_alloc::{FlatAlloc, UUIDMarker, UUIDRange, UUID}, ast::{IdentifierType, InterfacePorts, Operator}, errors::ErrorCollector, file_position::Span, flattening::{ConnectionWritePathElement, ConnectionWritePathElementComputed, FlatID, FlatIDMarker, FlatIDRange, FlattenedModule, Instruction, WireInstance, WireSource, Write, WriteType}, instantiation::latency_algorithm::{convert_fanin_to_fanout, solve_latencies, FanInOut, LatencyCountingError}, linker::{Linker, NamedConstant}, list_of_lists::ListOfLists, tokenizer::kw, typing::{ConcreteType, Type, BOOL_CONCRETE_TYPE, INT_CONCRETE_TYPE}, value::{compute_binary_op, compute_unary_op, Value}};

use self::latency_algorithm::SpecifiedLatency;

Expand Down
4 changes: 2 additions & 2 deletions src/linker.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::{collections::{HashMap, HashSet}, rc::Rc, cell::RefCell};

use crate::{arena_alloc::{ArenaAllocator, UUIDMarker, UUID}, ast::{LinkInfo, Module, Span}, errors::{error_info, ErrorCollector}, flattening::{FlatID, FlattenedModule, Instruction, WireInstance, WireSource}, instantiation::InstantiatedModule, parser::{FullParseResult, TokenTreeNode}, tokenizer::TokenizeResult, typing::{Type, WrittenType}, util::{const_str_position, const_str_position_in_tuples}, value::Value};
use crate::{arena_alloc::{ArenaAllocator, UUIDMarker, UUID}, ast::{LinkInfo, Module}, errors::{error_info, ErrorCollector}, file_position::Span, flattening::{FlatID, FlattenedModule, Instruction, WireInstance, WireSource}, instantiation::InstantiatedModule, parser::{FullParseResult, TokenTreeNode}, tokenizer::TokenizeResult, typing::{Type, WrittenType}, util::{const_str_position, const_str_position_in_tuples}, value::Value};

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ModuleUUIDMarker;
Expand Down Expand Up @@ -464,7 +464,7 @@ impl<'linker> LocationInfoBuilder<'linker> {
fn new(token_idx : usize) -> Self {
Self{
best_instruction : None,
best_span : Span(0, usize::MAX),
best_span : Span::MAX_POSSIBLE_SPAN,
token_idx
}
}
Expand Down
1 change: 1 addition & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ mod block_vector;
mod arena_alloc;
mod list_of_lists;

mod file_position;
mod tokenizer;
mod parser;
mod errors;
Expand Down
2 changes: 1 addition & 1 deletion src/parser.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

use num::BigInt;

use crate::{tokenizer::*, errors::*, ast::*, linker::FileUUID, flattening::FlattenedModule, instantiation::InstantiationList, value::Value};
use crate::{ast::*, errors::*, file_position::{BracketSpan, Span}, flattening::FlattenedModule, instantiation::InstantiationList, linker::FileUUID, tokenizer::*, value::Value};

use std::{iter::Peekable, ops::Range, str::FromStr};
use core::slice::Iter;
Expand Down
6 changes: 5 additions & 1 deletion src/tokenizer.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use std::ops::Range;
use std::str::CharIndices;

use crate::ast::Span;
use crate::errors::ErrorCollector;
use crate::file_position::Span;
use crate::util::const_str_position_in_tuples;

pub type TokenTypeIdx = u8;
Expand Down Expand Up @@ -282,6 +282,10 @@ impl TokenizeResult {
}
}
}

pub fn is_span_valid(&self, span : Span) -> bool {
span.1 < self.token_types.len()
}
}

pub fn tokenize<'txt>(file_text : &'txt str, errors : &ErrorCollector) -> TokenizeResult {
Expand Down
2 changes: 1 addition & 1 deletion src/typing.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::ops::Deref;

use crate::{ast::{Operator, Span}, linker::{get_builtin_type, TypeUUID, Linker, Linkable, NamedType, TypeUUIDMarker}, tokenizer::kw, flattening::FlatID, errors::ErrorCollector, value::Value, arena_alloc::ArenaAllocator};
use crate::{arena_alloc::ArenaAllocator, ast::Operator, errors::ErrorCollector, file_position::Span, flattening::FlatID, linker::{get_builtin_type, Linkable, Linker, NamedType, TypeUUID, TypeUUIDMarker}, tokenizer::kw, value::Value};

// These are
#[derive(Debug, Clone)]
Expand Down

0 comments on commit 610dc1b

Please sign in to comment.