From 4208d59364be345cb743a071e3c7fd62054c737e Mon Sep 17 00:00:00 2001 From: Lennart Van Hirtum Date: Thu, 19 Oct 2023 16:04:32 +0200 Subject: [PATCH] Added parsing of if-else statements. Flattening is broken for the moment --- multiply_add.sus | 24 ++++++++++++++ resetNormalizer.sus | 22 ++----------- src/arena_alloc.rs | 11 +++++-- src/ast.rs | 52 +++++++++++++++++------------- src/codegen.rs | 9 ++++++ src/dev_aid/lsp.rs | 11 ++++--- src/dev_aid/syntax_highlighting.rs | 4 +-- src/errors.rs | 2 +- src/flattening.rs | 19 +++++++++++ src/linker.rs | 16 ++++----- src/main.rs | 3 +- src/parser.rs | 47 +++++++++++++++++++++++---- src/tokenizer.rs | 5 +-- 13 files changed, 155 insertions(+), 70 deletions(-) create mode 100644 src/codegen.rs diff --git a/multiply_add.sus b/multiply_add.sus index 1204fb5..b59d6eb 100644 --- a/multiply_add.sus +++ b/multiply_add.sus @@ -170,3 +170,27 @@ module exists : duplicate a { module use_other_file : int a -> int r { r = hello_from_the_other_side(a); } + +//timeline (X -> X) .. (/ -> X) .. (/ -> X) .. (/ -> X) +module Unpack4 : int[4] packed -> int out_stream { + state int st = 0; // Initial value, not a real assignment + state int[3] stored_packed; + + if st == 0 { + out_stream = packed[0]; + stored_packed[0] = packed[1]; // Shorthand notation is possible here "stored_packed[0:2] = packed[1:3];" + stored_packed[1] = packed[2]; + stored_packed[2] = packed[3]; + st = 1; + } else if st == 1 { + out_stream = stored_packed[0]; + st = 2; + } else if st == 2 { + out_stream = stored_packed[1]; + st = 3; + } else if st == 3 { + out_stream = stored_packed[2]; + st = 0; + finish; // packet is hereby finished. + } +} diff --git a/resetNormalizer.sus b/resetNormalizer.sus index 863c5e8..1859a57 100644 --- a/resetNormalizer.sus +++ b/resetNormalizer.sus @@ -1,22 +1,6 @@ -/* -clocked module resetNormalizer : - bool resetn -> bool reg rst, bool reg isInitialized; - -int reg cyclesSinceReset; - -if(!resetn) { - cyclesSinceReset = 0; - rst = 1; - isInitialized = 0; -} else { - if(cyclesSinceReset > 30) rst = 0; - if(cyclesSinceReset > 512) isInitialized = 1; - cyclesSinceReset = cyclesSinceReset + 1; -} - -endmodule -*/ module hello_from_the_other_side : int a -> int result { - result = a; + if true { + result = a; + } } diff --git a/src/arena_alloc.rs b/src/arena_alloc.rs index 169be5f..012bcc1 100644 --- a/src/arena_alloc.rs +++ b/src/arena_alloc.rs @@ -1,8 +1,15 @@ -use std::{ops::{IndexMut, Index}, marker::PhantomData, iter::Enumerate}; +use std::{ops::{IndexMut, Index}, marker::PhantomData, iter::Enumerate, fmt}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct UUID(usize, PhantomData); +impl fmt::Debug for UUID { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("id_")?; + self.0.fmt(f) + } +} + impl Default for UUID { fn default() -> Self { Self::INVALID diff --git a/src/ast.rs b/src/ast.rs index 639d8da..8827696 100644 --- a/src/ast.rs +++ b/src/ast.rs @@ -1,7 +1,7 @@ use num_bigint::BigUint; -use crate::{tokenizer::TokenTypeIdx, linker::{ValueUUID, FileUUID}}; +use crate::{tokenizer::TokenTypeIdx, linker::{ValueUUID, FileUUID}, flattening::FlattenedModule}; use core::ops::Range; use std::ops::Deref; @@ -127,6 +127,7 @@ pub struct CodeBlock { pub enum Statement { Declaration{local_id : usize}, Assign{to : Vec, eq_sign_position : Option, expr : SpanExpression}, // num_regs v = expr; + If{condition : SpanExpression, then : CodeBlock, els : Option}, Block(CodeBlock), TimelineStage(usize) } @@ -145,7 +146,9 @@ pub struct Module { pub link_info : LinkInfo, pub declarations : Vec, - pub code : CodeBlock + pub code : CodeBlock, + + pub flattened : Option } impl Module { @@ -272,16 +275,29 @@ impl IterIdentifiers for SpanTypeExpression { } } -pub fn for_each_assign_in_block(block : &Vec, func : &mut F) where F: FnMut(&Vec, &SpanExpression) { - for (stmt, _span) in block { - match stmt { - Statement::Assign{to, eq_sign_position : _, expr} => { - func(to, expr); - }, - Statement::Block(b) => { - for_each_assign_in_block(&b.statements, func); - }, - _other => {} +impl IterIdentifiers for CodeBlock { + fn for_each_value(&self, func : &mut F) where F : FnMut(LocalOrGlobal, usize) -> () { + for (stmt, _span) in &self.statements { + match stmt { + Statement::Assign{to, eq_sign_position : _, expr} => { + for assign_to in to { + assign_to.expr.for_each_value(func); + } + expr.for_each_value(func); + }, + Statement::Block(b) => { + b.for_each_value(func); + }, + Statement::Declaration { local_id : _ } => {} + Statement::If { condition, then, els } => { + condition.for_each_value(func); + then.for_each_value(func); + if let Some(e) = &els { + e.for_each_value(func); + } + } + Statement::TimelineStage(_) => {} + } } } } @@ -291,16 +307,6 @@ impl IterIdentifiers for Module { for (pos, decl) in self.declarations.iter().enumerate() { func(LocalOrGlobal::Local(pos), decl.span.1); } - for_each_assign_in_block(&self.code.statements, &mut |to, v| { - for assign_to in to { - assign_to.expr.for_each_value(func); - } - v.for_each_value(func); - }); + self.code.for_each_value(func); } } - - - - - diff --git a/src/codegen.rs b/src/codegen.rs new file mode 100644 index 0000000..bb0c238 --- /dev/null +++ b/src/codegen.rs @@ -0,0 +1,9 @@ +use crate::linker::Linker; + + + + +pub fn gen_code(linker : &mut Linker, ) { + +} + diff --git a/src/dev_aid/lsp.rs b/src/dev_aid/lsp.rs index 7b817ea..79dec2c 100644 --- a/src/dev_aid/lsp.rs +++ b/src/dev_aid/lsp.rs @@ -50,8 +50,8 @@ impl LoadedFileCache { } fn update_text(&mut self, uri : Url, new_file_text : String) { let file_uuid = self.find_uri(&uri).unwrap(); - let (full_parse, parsing_errors) = perform_full_semantic_parse(&new_file_text, file_uuid); - self.linker.relink(file_uuid, new_file_text, full_parse, parsing_errors); + let (full_parse, parsing_errors) = perform_full_semantic_parse(new_file_text, file_uuid); + self.linker.relink(file_uuid, full_parse, parsing_errors); } fn ensure_contains_file(&mut self, uri : &Url) -> FileUUID { if let Some(found) = self.find_uri(uri) { @@ -59,8 +59,8 @@ impl LoadedFileCache { } else { let file_uuid = self.linker.reserve_file(); let file_text = std::fs::read_to_string(uri.to_file_path().unwrap()).unwrap(); - let (full_parse, parsing_errors) = perform_full_semantic_parse(&file_text, file_uuid); - self.linker.add_reserved_file(file_uuid, file_text, full_parse, parsing_errors); + let (full_parse, parsing_errors) = perform_full_semantic_parse(file_text, file_uuid); + self.linker.add_reserved_file(file_uuid, full_parse, parsing_errors); self.uris.insert(file_uuid, uri.clone()); file_uuid } @@ -347,8 +347,9 @@ fn main_loop( let mut errors = file_cache.linker.files[uuid].parsing_errors.clone(); file_cache.linker.get_linking_errors(uuid, &mut errors); - file_cache.linker.flatten_all_modules_in_file(uuid, &mut errors); + //file_cache.linker.flatten_all_modules_in_file(uuid, &mut errors); + println!("Errors: {:?}", &errors); send_errors_warnings(&connection, errors, &token_positions, &file_cache.uris)?; }, // TODO ... diff --git a/src/dev_aid/syntax_highlighting.rs b/src/dev_aid/syntax_highlighting.rs index e3e6a12..486cbb3 100644 --- a/src/dev_aid/syntax_highlighting.rs +++ b/src/dev_aid/syntax_highlighting.rs @@ -216,11 +216,11 @@ pub fn syntax_highlight_file(file_paths : Vec) { } }; - let (full_parse, errors) = perform_full_semantic_parse(&file_text, uuid); + let (full_parse, errors) = perform_full_semantic_parse(file_text, uuid); println!("{:?}", full_parse.ast); - prelinker.add_reserved_file(uuid, file_text, full_parse, errors); + prelinker.add_reserved_file(uuid, full_parse, errors); paths_arena.insert(uuid, file_path); } diff --git a/src/errors.rs b/src/errors.rs index a25dd3e..c95261f 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -94,7 +94,7 @@ pub fn join_expected_list(expected : &[TokenTypeIdx]) -> String { } // Class that collects and manages errors and warnings -#[derive(Clone)] +#[derive(Debug,Clone)] pub struct ErrorCollector { pub errors : Vec, pub file : FileUUID diff --git a/src/flattening.rs b/src/flattening.rs index fb6b46f..a71759d 100644 --- a/src/flattening.rs +++ b/src/flattening.rs @@ -261,6 +261,9 @@ impl<'l, 'm, 'e> FlatteningContext<'l, 'm, 'e> { Statement::Declaration{local_id} => { // TODO } + Statement::If { condition, then, els } => { + //todo!() + } Statement::Assign{to, expr : (Expression::FuncCall(func_and_args), func_span), eq_sign_position} => { let Some((md, instantiation_idx, output_range)) = self.desugar_func_call(&func_and_args, func_span.1) else {return;}; @@ -318,3 +321,19 @@ pub struct FlattenedModule { instantiations : ListAllocator, connections : Vec } + + + +#[derive(Debug)] +struct InstantiatedWire { + typ : TypeExpression, + latency : i64 +} + +#[derive(Debug)] +pub struct InstantiatedModule { + wires : ListAllocator, + instantiations : ListAllocator, + connections : Vec +} + diff --git a/src/linker.rs b/src/linker.rs index 4c3d0f3..6870143 100644 --- a/src/linker.rs +++ b/src/linker.rs @@ -246,15 +246,15 @@ impl PreLinker { pub fn reserve_file(&mut self) -> FileUUID { self.files.reserve() } - pub fn add_reserved_file(&mut self, file : FileUUID, file_text : String, parse_result : FullParseResult, parsing_errors : ErrorCollector) { + pub fn add_reserved_file(&mut self, file : FileUUID, parse_result : FullParseResult, parsing_errors : ErrorCollector) { let mut associated_values = Vec::new(); for md in parse_result.ast.modules { - let module_name = &file_text[parse_result.tokens[md.link_info.name_token].get_range()]; + let module_name = &parse_result.file_text[parse_result.tokens[md.link_info.name_token].get_range()]; let new_module_uuid = self.links.globals.alloc(Named::Module(md)); associated_values.push(new_module_uuid); self.links.add_name(module_name, new_module_uuid); } - self.files.alloc_reservation(file, FileData { file_text, tokens: parse_result.tokens, token_hierarchy: parse_result.token_hierarchy, parsing_errors, associated_values}); + self.files.alloc_reservation(file, FileData{file_text : parse_result.file_text, tokens: parse_result.tokens, token_hierarchy: parse_result.token_hierarchy, parsing_errors, associated_values}); } // This should be called once all modules have been added. Adds errors for globals it couldn't match @@ -395,15 +395,15 @@ impl Linker { self.files.reserve() } - pub fn add_reserved_file(&mut self, file : FileUUID, file_text : String, parse_result : FullParseResult, parsing_errors : ErrorCollector) { + pub fn add_reserved_file(&mut self, file : FileUUID, parse_result : FullParseResult, parsing_errors : ErrorCollector) { let mut associated_values = Vec::new(); for md in parse_result.ast.modules { - let module_name = &file_text[parse_result.tokens[md.link_info.name_token].get_range()]; + let module_name = &parse_result.file_text[parse_result.tokens[md.link_info.name_token].get_range()]; let new_module_uuid = self.links.globals.alloc(Named::Module(md)); associated_values.push(new_module_uuid); self.links.add_name(module_name, new_module_uuid); } - self.files.alloc_reservation(file, FileData { file_text, tokens: parse_result.tokens, token_hierarchy: parse_result.token_hierarchy, parsing_errors, associated_values}); + self.files.alloc_reservation(file, FileData { file_text : parse_result.file_text, tokens: parse_result.tokens, token_hierarchy: parse_result.token_hierarchy, parsing_errors, associated_values}); for (_uuid, val_in_file) in &mut self.links.globals { if let Some(link_info) = val_in_file.get_link_info_mut() { @@ -415,10 +415,10 @@ impl Linker { } } - pub fn relink(&mut self, file : FileUUID, file_text : String, parse_result : FullParseResult, parsing_errors : ErrorCollector) { + pub fn relink(&mut self, file : FileUUID, parse_result : FullParseResult, parsing_errors : ErrorCollector) { self.remove_file_datas(&[file]); self.files.revert_to_reservation(file); - self.add_reserved_file(file, file_text, parse_result, parsing_errors); + self.add_reserved_file(file, parse_result, parsing_errors); } pub fn get_constant(&self, GlobalReference(identifier_span, uuid) : GlobalReference, errors : &mut ErrorCollector) -> Option { diff --git a/src/main.rs b/src/main.rs index 494ea29..ce9d5a2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,10 +1,11 @@ +mod arena_alloc; mod tokenizer; mod parser; mod errors; mod ast; mod flattening; -mod arena_alloc; +mod codegen; mod dev_aid; mod linker; diff --git a/src/parser.rs b/src/parser.rs index 6e162c2..1557fc6 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -497,7 +497,8 @@ impl<'g, 'file> ASTParserContext<'g, 'file> { code_block.statements.push((Statement::TimelineStage(token.position), Span::from(token.position))); return Some(()); } - + + let mut left_expressions : Vec<(SpanExpression, u32)> = Vec::new(); let mut all_decls = true; loop { // Loop over a number of declarations possibly @@ -543,7 +544,7 @@ impl<'g, 'file> ASTParserContext<'g, 'file> { return self.parse_statement_handle_end(left_expressions, all_decls, &mut code_block.statements); } other => { - self.error_unexpected_tree_node(&[kw(";"), kw("="), kw(",")], other, token_stream.unexpected_eof_token, "statement"); + self.error_unexpected_tree_node(&[kw(";"), kw("="), kw(","), kw("if")], other, token_stream.unexpected_eof_token, "statement"); return None } } @@ -612,6 +613,30 @@ impl<'g, 'file> ASTParserContext<'g, 'file> { return None; } } + fn parse_if_statement(&mut self, token_stream : &mut TokenStream, if_token : &TokenContent, declarations : &mut Vec, scope : &LocalVariableContext<'_, 'file>) -> Option<(Statement, Span)> { + let condition = self.parse_expression(token_stream, &scope)?; + + let (then_block, then_block_span) = self.eat_block(token_stream, kw("{"), "Then block of if statement")?; + let then_content = self.parse_code_block(then_block, then_block_span, declarations, &scope); + + let (else_content, span_end) = if let Some(_else_tok) = token_stream.eat_is_plain(kw("else")) { + if let Some(continuation_if) = token_stream.eat_is_plain(kw("if")) { + if let Some(stmt) = self.parse_if_statement(token_stream, &continuation_if, declarations, scope) { + let end = stmt.1.1; + (Some(CodeBlock{statements : vec![stmt]}), end) + } else { + (Some(CodeBlock{statements : Vec::new()}), continuation_if.position) + } + } else { + let (else_block, else_block_span) = self.eat_block(token_stream, kw("{"), "Else block of if statement")?; + (Some(self.parse_code_block(else_block, else_block_span, declarations, &scope)), else_block_span.1) + } + } else { + (None, then_block_span.1) + }; + + Some((Statement::If{condition, then: then_content, els: else_content }, Span(if_token.position, span_end))) + } fn parse_code_block(&mut self, block_tokens : &[TokenTreeNode], span : Span, declarations : &mut Vec, outer_scope : &LocalVariableContext<'_, 'file>) -> CodeBlock { let mut token_stream = TokenStream::new(block_tokens, span.0, span.1); @@ -632,6 +657,12 @@ impl<'g, 'file> ASTParserContext<'g, 'file> { continue; // Can't add condition to if let, so have to do some weird control flow here } } + + // If statements + if let Some(if_token) = token_stream.eat_is_plain(kw("if")) { + let Some(if_stmt) = self.parse_if_statement(&mut token_stream, &if_token, declarations, &mut inner_scope) else {continue;}; + code_block.statements.push(if_stmt); + } if self.parse_statement(&mut token_stream, declarations, &mut inner_scope, &mut code_block).is_none() { // Error recovery. Find end of statement @@ -658,7 +689,7 @@ impl<'g, 'file> ASTParserContext<'g, 'file> { let code = self.parse_code_block(block_tokens, block_span, &mut declarations, &scope); let span = Span(declaration_start_idx, token_stream.last_idx); - + let link_info = LinkInfo{ file : self.errors.file, name_token : name.position, @@ -666,7 +697,7 @@ impl<'g, 'file> ASTParserContext<'g, 'file> { global_references : replace(&mut self.global_references, Vec::new()), is_fully_linked : false }; - Some(Module{declarations, code, link_info}) + Some(Module{declarations, code, link_info, flattened : None}) } fn parse_ast(mut self, outer_token_iter : &mut TokenStream) -> ASTRoot { @@ -700,21 +731,23 @@ pub fn parse<'nums, 'g, 'file>(token_hierarchy : &Vec, file_text pub struct FullParseResult { + pub file_text : String, pub tokens : Vec, pub token_hierarchy : Vec, pub ast : ASTRoot } -pub fn perform_full_semantic_parse<'txt>(file_text : &'txt str, file : FileUUID) -> (FullParseResult, ErrorCollector) { +pub fn perform_full_semantic_parse<'txt>(file_text : String, file : FileUUID) -> (FullParseResult, ErrorCollector) { let mut errors = ErrorCollector::new(file); - let tokens = tokenize(file_text, &mut errors); + let tokens = tokenize(&file_text, &mut errors); let token_hierarchy = to_token_hierarchy(&tokens, &mut errors); - let ast = parse(&token_hierarchy, file_text, tokens.len(), &mut errors); + let ast = parse(&token_hierarchy, &file_text, tokens.len(), &mut errors); (FullParseResult{ + file_text, tokens, token_hierarchy, ast, diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 72af382..ba74bcf 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -25,7 +25,7 @@ impl Token { } } -pub const ALL_KEYWORDS : [(&'static str, u8); 15] = [ +pub const ALL_KEYWORDS : [(&'static str, u8); 16] = [ ("template", 0), ("module", 0), ("pipeline", 0), @@ -40,7 +40,8 @@ pub const ALL_KEYWORDS : [(&'static str, u8); 15] = [ ("for", 0), ("struct", 0), ("enum", 0), - ("reg", 0) + ("reg", 0), + ("finish", 0) ]; // Extra data is opreator prescedence. Lower number is higher prescedence of operators