Skip to content

Commit

Permalink
Properly integrated Linker into everything now
Browse files Browse the repository at this point in the history
  • Loading branch information
VonTum committed Sep 24, 2023
1 parent 74f18c3 commit da75785
Show file tree
Hide file tree
Showing 9 changed files with 247 additions and 164 deletions.
6 changes: 6 additions & 0 deletions multiply_add.sus
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,8 @@ module exists : hello a -> int result {

}

module hello : int a -> int b {}

module test_exists : -> int result {
int x = exists(5);
int b = doesnt_exist(3);
Expand All @@ -160,3 +162,7 @@ module test_exists : -> int result {
module exists : duplicate a {
// Should be a duplicate of previous exists
}

module use_other_file : int a -> int r {
r = hello_from_the_other_side(a);
}
7 changes: 6 additions & 1 deletion resetNormalizer.sus
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@

/*
clocked module resetNormalizer :
bool resetn -> bool reg rst, bool reg isInitialized;

Expand All @@ -15,3 +15,8 @@ if(!resetn) {
}

endmodule
*/

module hello_from_the_other_side : int a -> int result {
result = a;
}
60 changes: 50 additions & 10 deletions src/arena_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,10 @@ impl<T, IndexMarker> ArenaAllocator<T, IndexMarker> {
l
}, PhantomData)
}
pub fn revert_to_reservation(&mut self, UUID(uuid, _) : UUID<IndexMarker>) {
assert!(self.data[uuid].is_some());
self.data[uuid] = None;
}
pub fn alloc_reservation(&mut self, UUID(uuid, _) : UUID<IndexMarker>, v : T) {
assert!(self.data[uuid].is_none());
self.data[uuid] = Some(v);
Expand All @@ -54,6 +58,12 @@ impl<T, IndexMarker> ArenaAllocator<T, IndexMarker> {
self.free_slots.push(uuid);
std::mem::replace(&mut self.data[uuid], None).unwrap()
}
pub fn iter<'a>(&'a self) -> ArenaIterator<'a, T, IndexMarker> {
self.into_iter()
}
pub fn iter_mut<'a>(&'a mut self) -> ArenaIteratorMut<'a, T, IndexMarker> {
self.into_iter()
}
}

impl<T, IndexMarker> Index<UUID<IndexMarker>> for ArenaAllocator<T, IndexMarker> {
Expand Down Expand Up @@ -138,33 +148,63 @@ impl<'a, T, IndexMarker> IntoIterator for &'a mut ArenaAllocator<T, IndexMarker>
}
}

pub struct ArenaVector<T : Default, IndexMarker> {
data : Vec<T>,
pub struct ArenaVector<T, IndexMarker> {
data : Vec<Option<T>>,
_ph : PhantomData<IndexMarker>
}

impl<T : Default, IndexMarker> ArenaVector<T, IndexMarker> {
impl<T, IndexMarker> ArenaVector<T, IndexMarker> {
pub fn new() -> Self {
Self{data : Vec::new(), _ph : PhantomData}
}
pub fn insert(&mut self, UUID(uuid, _) : UUID<IndexMarker>, value : T) {
if uuid >= self.data.len() {
self.data.resize_with(uuid+1, Default::default);
self.data.resize_with(uuid+1, || None);
}
self.data[uuid] = value;
assert!(self.data[uuid].is_none());
self.data[uuid] = Some(value);
}
pub fn remove(&mut self, UUID(uuid, _) : UUID<IndexMarker>) {
self.data[uuid] = Default::default();
self.data[uuid] = None;
}
pub fn iter<'a>(&'a self) -> ArenaIterator<'a, T, IndexMarker> {
self.into_iter()
}
pub fn iter_mut<'a>(&'a mut self) -> ArenaIteratorMut<'a, T, IndexMarker> {
self.into_iter()
}
}

impl<T : Default, IndexMarker> Index<UUID<IndexMarker>> for ArenaVector<T, IndexMarker> {
impl<T, IndexMarker> Index<UUID<IndexMarker>> for ArenaVector<T, IndexMarker> {
type Output = T;

fn index(&self, UUID(uuid, _): UUID<IndexMarker>) -> &Self::Output {
&self.data[uuid]
self.data[uuid].as_ref().unwrap()
}
}

impl<T : Default, IndexMarker> IndexMut<UUID<IndexMarker>> for ArenaVector<T, IndexMarker> {
impl<T, IndexMarker> IndexMut<UUID<IndexMarker>> for ArenaVector<T, IndexMarker> {
fn index_mut(&mut self, UUID(uuid, _): UUID<IndexMarker>) -> &mut Self::Output {
&mut self.data[uuid]
self.data[uuid].as_mut().unwrap()
}
}

impl<'a, T, IndexMarker> IntoIterator for &'a ArenaVector<T, IndexMarker> {
type Item = (UUID<IndexMarker>, &'a T);

type IntoIter = ArenaIterator<'a, T, IndexMarker>;

fn into_iter(self) -> Self::IntoIter {
ArenaIterator{it : self.data.iter().enumerate(), _ph : PhantomData}
}
}

impl<'a, T, IndexMarker> IntoIterator for &'a mut ArenaVector<T, IndexMarker> {
type Item = (UUID<IndexMarker>, &'a mut T);

type IntoIter = ArenaIteratorMut<'a, T, IndexMarker>;

fn into_iter(self) -> Self::IntoIter {
ArenaIteratorMut{it : self.data.iter_mut().enumerate(), _ph : PhantomData}
}
}
3 changes: 1 addition & 2 deletions src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,7 @@ pub struct LinkInfo {
pub file : FileUUID,
pub name_token : usize,
pub span : Span,
pub global_references : Vec<GlobalReference>,
pub resolved_globals : Vec<ValueUUID>
pub global_references : Vec<(GlobalReference, ValueUUID)>
}

#[derive(Debug)]
Expand Down
111 changes: 48 additions & 63 deletions src/dev_aid/lsp.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@

use std::error::Error;
use std::fs::File;
use std::collections::HashMap;
use std::path::PathBuf;
use std::rc::Rc;
use lsp_types::{*, request::Request, notification::*};

use lsp_server::{Response, Message, Connection};

use lsp_types::notification::Notification;

use crate::{parser::{perform_full_semantic_parse, FullParseResult}, dev_aid::syntax_highlighting::create_token_ide_info, ast::{IdentifierType, Span}, errors::{ErrorCollector, ParsingError}, linker::{Linker, PreLinker}};
use crate::{parser::perform_full_semantic_parse, dev_aid::syntax_highlighting::create_token_ide_info, ast::{IdentifierType, Span}, errors::{ErrorCollector, ParsingError}, linker::{PreLinker, FileUUIDMarker, Linker, FileUUID, FileData, Links}, arena_alloc::ArenaVector};

use super::syntax_highlighting::{IDETokenType, IDEIdentifierType, IDEToken};

Expand All @@ -37,42 +34,39 @@ macro_rules! println {
}};
}*/

struct LoadedFile {
file_text : String
}
struct LoadedFileCache {
loaded_files : HashMap<PathBuf, Rc<LoadedFile>>
linker : Linker,
uris : ArenaVector<Url, FileUUIDMarker>
}

impl LoadedFileCache {
fn new() -> LoadedFileCache {
LoadedFileCache{loaded_files : HashMap::new()}
}
fn get(&mut self, path : &PathBuf) -> Rc<LoadedFile> {
if let Some(found) = self.loaded_files.get(path) {
found.clone()
} else {
self.update_from_disk(path.clone())
}
fn new(linker : Linker, uris : ArenaVector<Url, FileUUIDMarker>) -> Self {
Self{linker, uris}
}
fn update_text(&mut self, path : PathBuf, new_text : String) -> Rc<LoadedFile> {
//let tokens = tokenize(file_data)
let result = Rc::new(LoadedFile{
file_text: new_text
});
self.update(path, result.clone());
result
fn find_uri(&self, uri : &Url) -> Option<FileUUID> {
self.uris.iter()
.find(|(_uuid, uri_found)| **uri_found == *uri)
.map(|(uuid, _uri_found)| uuid)
}
fn update_from_disk(&mut self, path : PathBuf) -> Rc<LoadedFile> {
let file_text = std::fs::read_to_string(&path).expect("Could not load file");
self.update_text(path, file_text)
fn update_text(&mut self, uri : Url, new_file_text : String) {
let file_uuid = self.find_uri(&uri).unwrap();
let (full_parse, parsing_errors) = perform_full_semantic_parse(&new_file_text, file_uuid);
self.linker.relink(file_uuid, new_file_text, full_parse, parsing_errors);
}
fn update(&mut self, path : PathBuf, new_val : Rc<LoadedFile>) {
self.loaded_files.insert(path, new_val);
fn ensure_contains_file(&mut self, uri : &Url) -> FileUUID {
if let Some(found) = self.find_uri(uri) {
found
} else {
let file_uuid = self.linker.reserve_file();
let file_text = std::fs::read_to_string(uri.to_file_path().unwrap()).unwrap();
let (full_parse, parsing_errors) = perform_full_semantic_parse(&file_text, file_uuid);
self.linker.add_reserved_file(file_uuid, file_text, full_parse, parsing_errors);
self.uris.insert(file_uuid, uri.clone());
file_uuid
}
}
}


pub fn lsp_main() -> Result<(), Box<dyn Error + Sync + Send>> {
// Note that we must have our logging only write out to stderr.
//println!("starting generic LSP server");
Expand Down Expand Up @@ -109,7 +103,8 @@ pub fn lsp_main() -> Result<(), Box<dyn Error + Sync + Send>> {
SemanticTokenType::TYPE,
SemanticTokenType::NUMBER,
SemanticTokenType::FUNCTION,
SemanticTokenType::EVENT
SemanticTokenType::EVENT,
SemanticTokenType::ENUM_MEMBER,
],
token_modifiers: vec![
SemanticTokenModifier::ASYNC, // repurpose ASYNC for "State"
Expand Down Expand Up @@ -152,9 +147,10 @@ fn get_semantic_token_type_from_ide_token(tok : &IDEToken) -> u32 {
IDETokenType::Identifier(IDEIdentifierType::Value(IdentifierType::Output)) => 4,
IDETokenType::Identifier(IDEIdentifierType::Value(IdentifierType::State)) => 3,
IDETokenType::Identifier(IDEIdentifierType::Value(IdentifierType::Local)) => 3,
IDETokenType::Identifier(IDEIdentifierType::Constant) => 9, // make it 'OPERATOR'?
IDETokenType::Identifier(IDEIdentifierType::Unknown) => 2, // make it 'OPERATOR'?
IDETokenType::Identifier(IDEIdentifierType::Interface) => 7, // FUNCTION
IDETokenType::Identifier(_) => 5, // All others are 'TYPE'
IDETokenType::Identifier(IDEIdentifierType::Type) => 5, // All others are 'TYPE'
IDETokenType::Number => 6,
IDETokenType::Invalid => 2, // make it 'OPERATOR'?
IDETokenType::InvalidBracket => 2, // make it 'OPERATOR'?
Expand Down Expand Up @@ -198,22 +194,22 @@ impl SemanticTokensDeltaAccumulator {
}
}

fn do_syntax_highlight(file_data : &LoadedFile, full_parse : &FullParseResult) -> (SemanticTokensResult, Vec<std::ops::Range<Position>>) {
fn do_syntax_highlight(file_data : &FileData, links : &Links) -> (SemanticTokensResult, Vec<std::ops::Range<Position>>) {
let file_text = &file_data.file_text;
let ide_tokens = create_token_ide_info(&full_parse);
let ide_tokens = create_token_ide_info(&file_data, links);

let mut semantic_tokens_acc = SemanticTokensDeltaAccumulator{prev : Position {line : 0, character : 0}, semantic_tokens : Vec::new()};
semantic_tokens_acc.semantic_tokens.reserve(full_parse.tokens.len());
semantic_tokens_acc.semantic_tokens.reserve(file_data.tokens.len());
let mut positions : Vec<std::ops::Range<Position>> = Vec::new();
positions.reserve(full_parse.tokens.len());
positions.reserve(file_data.tokens.len());

let mut cur_whitespace_start = 0;
let mut cur_position = Position{line : 0, character : 0};
for (tok_idx, ide_tok) in ide_tokens.iter().enumerate() {
let typ = get_semantic_token_type_from_ide_token(ide_tok);
let mod_bits = get_modifiers_for_token(ide_tok);

let tok_range = full_parse.tokens[tok_idx].get_range();
let tok_range = file_data.tokens[tok_idx].get_range();
let whitespace_text = &file_text[cur_whitespace_start..tok_range.start];
cur_whitespace_start = tok_range.end;
let token_text = &file_text[tok_range];
Expand Down Expand Up @@ -270,27 +266,27 @@ fn cvt_span_to_lsp_range(ch_sp : Span, token_positions : &[std::ops::Range<Posit
}

// Requires that token_positions.len() == tokens.len() + 1 to include EOF token
fn convert_diagnostic(err : ParsingError, severity : DiagnosticSeverity, token_positions : &[std::ops::Range<Position>], linker : &Linker) -> Diagnostic {
fn convert_diagnostic(err : ParsingError, severity : DiagnosticSeverity, token_positions : &[std::ops::Range<Position>], uris : &ArenaVector<Url, FileUUIDMarker>) -> Diagnostic {
let error_pos = cvt_span_to_lsp_range(err.position, token_positions);

let mut related_info = Vec::new();
for info in err.infos {
let info_pos = cvt_span_to_lsp_range(info.position, token_positions);
let location = Location{uri : Url::from_file_path(&linker.files[info.file].file_path).unwrap(), range : info_pos};
let location = Location{uri : uris[info.file].clone(), range : info_pos};
related_info.push(DiagnosticRelatedInformation { location, message: info.info });
}
Diagnostic::new(error_pos, Some(severity), None, None, err.reason, Some(related_info), None)
}

// Requires that token_positions.len() == tokens.len() + 1 to include EOF token
fn send_errors_warnings(connection: &Connection, errors : ErrorCollector, uri : Url, token_positions : &[std::ops::Range<Position>], linker : &Linker) -> Result<(), Box<dyn Error + Sync + Send>> {
fn send_errors_warnings(connection: &Connection, errors : ErrorCollector, token_positions : &[std::ops::Range<Position>], uris : &ArenaVector<Url, FileUUIDMarker>) -> Result<(), Box<dyn Error + Sync + Send>> {
let mut diag_vec : Vec<Diagnostic> = Vec::new();
for err in errors.errors {
diag_vec.push(convert_diagnostic(err, DiagnosticSeverity::ERROR, token_positions, linker));
diag_vec.push(convert_diagnostic(err, DiagnosticSeverity::ERROR, token_positions, uris));
}

let params = &PublishDiagnosticsParams{
uri: uri,
uri: uris[errors.file].clone(),
diagnostics: diag_vec,
version: None
};
Expand All @@ -309,10 +305,11 @@ fn main_loop(
params: serde_json::Value,
) -> Result<(), Box<dyn Error + Sync + Send>> {

let mut file_cache = LoadedFileCache::new();
let prelinker = PreLinker::new();
let mut file_cache = LoadedFileCache::new(prelinker.link(), ArenaVector::new());

let _params: InitializeParams = serde_json::from_value(params).unwrap();
println!("starting example main loop");
println!("starting LSP main loop");
for msg in &connection.receiver {
println!("got msg: {msg:?}");
match msg {
Expand All @@ -336,30 +333,21 @@ fn main_loop(

println!("got fullSemanticTokens request: {params:?}");

let path : PathBuf = params.text_document.uri.to_file_path().unwrap();
let file_data : Rc<LoadedFile> = file_cache.get(&path);
let uuid = file_cache.ensure_contains_file(&params.text_document.uri);

let file_data = &file_cache.linker.files[uuid];

let mut prelink = PreLinker::new();
let uuid = prelink.reserve_file();

let (full_parse, parsing_errors) = perform_full_semantic_parse(&file_data.file_text, uuid);

let (syntax_highlight, token_positions) = do_syntax_highlight(&file_data, &full_parse);
let (syntax_highlight, token_positions) = do_syntax_highlight(file_data, &file_cache.linker.links);

let result = serde_json::to_value(&syntax_highlight).unwrap();
connection.sender.send(Message::Response(Response{
id: req.id, result: Some(result), error: None
}))?;

prelink.add_reserved_file(uuid, path, file_data.file_text.clone(), full_parse, parsing_errors);

let linker = prelink.link();

let mut errors = linker.files[uuid].parsing_errors.clone();
linker.get_linking_errors(uuid, &mut errors);
let mut errors = file_cache.linker.files[uuid].parsing_errors.clone();
file_cache.linker.get_linking_errors(uuid, &mut errors);

send_errors_warnings(&connection, errors, params.text_document.uri, &token_positions, &linker)?;
send_errors_warnings(&connection, errors, &token_positions, &file_cache.uris)?;
},
// TODO ...
req => {
Expand All @@ -374,10 +362,7 @@ fn main_loop(
match not.method.as_str() {
notification::DidChangeTextDocument::METHOD => {
let params : DidChangeTextDocumentParams = serde_json::from_value(not.params).expect("JSON Encoding Error while parsing params");
let path_to_update = params.text_document.uri.to_file_path().unwrap();
//let original_file_text = file_cache.get(&path_to_update).file_text;
let new_file_text = params.content_changes[0].text.clone();
file_cache.update_text(path_to_update, new_file_text);
file_cache.update_text(params.text_document.uri, params.content_changes.into_iter().next().unwrap().text);
},
other => {
println!("got notification: {other:?}");
Expand Down
Loading

0 comments on commit da75785

Please sign in to comment.