diff --git a/Cargo.toml b/Cargo.toml index 4b6e49f..a5f6f82 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,6 @@ bit-set = "=0.5.*" once_cell = "^1.2.*" backtrace = "=0.3" typed-arena = "^2.0.*" -git2 = "0.12.*" [lib] diff --git a/README.md b/README.md index 1b4fad6..2cfa67e 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ and [tests/my_tests.rs](tests/my_test.rs) for actual usage examples ### Implementation status Everything is implemented, "business" logic is quite stable and well tested, but user facing -API is not very robust yet an very likely will have some changes. +API is not very robust yet and very likely will have some changes. For now development is going on in this repository but eventually it will be merged to main ANTLR4 repo @@ -46,7 +46,7 @@ Can be done after merge: ### Usage -You use the ANTLR4 "tool" to generate a parser, that will use the ANTLR +You should use the ANTLR4 "tool" to generate a parser, that will use the ANTLR runtime, located here. You can run it with the following command: ```bash java -jar -Dlanguage=Rust MyGrammar.g4 @@ -94,15 +94,19 @@ there are quite some differences because Rust is not an OOP language and is much otherwise `ParseTreeWalker` should be used. - In embedded actions to access parser you should use `recog` variable instead of `self`/`this`. This is because predicate have to be inserted into two syntactically different places in generated parser - - `InputStream`s have different index behavior for unicode characters. + - String `InputStream` have different index behavior when there are unicode characters. If you need exactly the same behavior, use `[u32]` based `InputStream`, or implement custom `CharStream`. - - In actions you have to escape `'` in rust lifetimes with `\ ` because ANTLR considers them as strings: `Struct<\'lifetime>` + - In actions you have to escape `'` in rust lifetimes with `\ ` because ANTLR considers them as strings, e.g. `Struct<\'lifetime>` - For custom tokens you should use `@tokenfactory` custom action, instead of usual `TokenLabelType` parser option - All rule context variables (rule argument or rule return) should implement `Default + Clone`. ### Unsafe Currently unsafe is used only to cast from trait object back to original type and to update data inside Rc via `get_mut_unchecked`(returned mutable reference is used immediately and not stored anywhere) + +### Versioning +In addition to usual Rust semantic versioning, +patch version changes of the crate should not require updating of generator part ## Licence diff --git a/build.rs b/build.rs index 2e4a09f..60efa3a 100644 --- a/build.rs +++ b/build.rs @@ -2,7 +2,7 @@ use std::convert::TryInto; use std::env; use std::env::VarError; use std::error::Error; -use std::fs::{DirEntry, File, read_dir}; +use std::fs::{read_dir, DirEntry, File}; use std::io::Write; use std::path::Path; use std::process::Command; diff --git a/src/atn.rs b/src/atn.rs index 74ac34c..c77115b 100644 --- a/src/atn.rs +++ b/src/atn.rs @@ -126,8 +126,7 @@ impl ATN { pub fn get_expected_tokens( &self, state_number: isize, - states_stack:impl Iterator - // _ctx: &Rc, + states_stack: impl Iterator, // _ctx: &Rc, ) -> IntervalSet { let s = self.states[state_number as usize].as_ref(); let mut following = self.next_tokens(s); diff --git a/src/atn_config.rs b/src/atn_config.rs index 08f187a..f7d7d83 100644 --- a/src/atn_config.rs +++ b/src/atn_config.rs @@ -33,7 +33,7 @@ impl PartialEq for ATNConfig { self.get_state() == other.get_state() && self.get_alt() == other.get_alt() && (Arc::ptr_eq(self.get_context().unwrap(), other.get_context().unwrap()) - || self.get_context() == other.get_context()) + || self.get_context() == other.get_context()) && self.get_type() == other.get_type() && self.semantic_context == other.semantic_context && self.precedence_filter_suppressed == other.precedence_filter_suppressed diff --git a/src/atn_config_set.rs b/src/atn_config_set.rs index 9fa930c..ddbb507 100644 --- a/src/atn_config_set.rs +++ b/src/atn_config_set.rs @@ -178,7 +178,7 @@ impl ATNConfigSet { pub fn add(&mut self, config: Box) -> bool { self.add_cached(config, None) } - pub fn get_items(&self) -> impl Iterator { + pub fn get_items(&self) -> impl Iterator { self.configs.iter().map(|c| c.as_ref()) } diff --git a/src/atn_deserialization_options.rs b/src/atn_deserialization_options.rs index d850e2f..4295520 100644 --- a/src/atn_deserialization_options.rs +++ b/src/atn_deserialization_options.rs @@ -10,9 +10,7 @@ impl ATNDeserializationOptions { ) -> ATNDeserializationOptions { unimplemented!() } - pub fn is_verify(&self) -> bool { - self.verify_atn - } + pub fn is_verify(&self) -> bool { self.verify_atn } } impl Default for ATNDeserializationOptions { diff --git a/src/atn_deserializer.rs b/src/atn_deserializer.rs index 66d4d95..92c6ce6 100644 --- a/src/atn_deserializer.rs +++ b/src/atn_deserializer.rs @@ -7,20 +7,20 @@ use uuid::Uuid; use crate::atn::ATN; use crate::atn_deserialization_options::ATNDeserializationOptions; -use crate::atn_state::*; use crate::atn_state::ATNBlockStart; use crate::atn_state::ATNDecisionState; use crate::atn_state::ATNState; use crate::atn_state::ATNStateType; use crate::atn_state::BaseATNState; +use crate::atn_state::*; use crate::atn_type::ATNType; use crate::int_stream::EOF; use crate::interval_set::IntervalSet; -use crate::lexer_action::*; use crate::lexer_action::LexerAction::*; +use crate::lexer_action::*; use crate::rule_context::CustomRuleContext; -use crate::transition::*; use crate::transition::Transition; +use crate::transition::*; lazy_static! { static ref BASE_SERIALIZED_UUID: Uuid = @@ -122,7 +122,7 @@ impl ATNDeserializer { } } - fn check_uuid(&self, data: &mut dyn Iterator) -> Uuid { + fn check_uuid(&self, data: &mut dyn Iterator) -> Uuid { //rust uses UTF-8 encoding so we need explicitly convert unicode //codepoint numbers to bytes let mut bytes = Vec::new(); @@ -138,7 +138,7 @@ impl ATNDeserializer { uuid } - fn read_atn(&self, data: &mut dyn Iterator) -> ATN { + fn read_atn(&self, data: &mut dyn Iterator) -> ATN { let atn = ATN::new_atn( match data.next() { Some(0) => ATNType::LEXER, @@ -151,7 +151,7 @@ impl ATNDeserializer { atn } - fn read_states(&self, atn: &mut ATN, data: &mut dyn Iterator) { + fn read_states(&self, atn: &mut ATN, data: &mut dyn Iterator) { // let loop_back_states = Vec::<(BaseATNState,isize)>::new(); // let end_states = Vec::<(BaseATNState,isize)>::new(); let states_count = data.next().unwrap() as usize; @@ -187,7 +187,7 @@ impl ATNDeserializer { for _ in 0..num_non_greedy { let st = data.next().unwrap() as usize; if let ATNStateType::DecisionState { nongreedy: ng, .. } = - atn.states[st].get_state_type_mut() + atn.states[st].get_state_type_mut() { *ng = true } @@ -209,7 +209,7 @@ impl ATNDeserializer { } } - fn read_rules(&self, atn: &mut ATN, data: &mut dyn Iterator) { + fn read_rules(&self, atn: &mut ATN, data: &mut dyn Iterator) { let nrules = data.next().unwrap() as usize; // if atn.grammar_type == ATNType::LEXER { // atn.rule_to_token_type.resize(nrules, 0) @@ -248,14 +248,14 @@ impl ATNDeserializer { } } - fn read_modes(&self, atn: &mut ATN, data: &mut dyn Iterator) { + fn read_modes(&self, atn: &mut ATN, data: &mut dyn Iterator) { let nmodes = data.next().unwrap(); for _i in 0..nmodes { atn.mode_to_start_state.push(data.next().unwrap() as usize); } } - fn read_sets>( + fn read_sets>( &self, _atn: &mut ATN, data: &mut T, @@ -285,7 +285,7 @@ impl ATNDeserializer { fn read_edges( &self, atn: &mut ATN, - data: &mut dyn Iterator, + data: &mut dyn Iterator, sets: &Vec, ) { let nedges = data.next().unwrap(); @@ -352,10 +352,10 @@ impl ATNDeserializer { match atn_state.get_state_type() { ATNStateType::DecisionState { state: - ATNDecisionState::BlockStartState { - end_state: _, - en: _, - }, + ATNDecisionState::BlockStartState { + end_state: _, + en: _, + }, .. } => { @@ -379,7 +379,7 @@ impl ATNDeserializer { } } - fn read_decisions(&self, atn: &mut ATN, _data: &mut dyn Iterator) { + fn read_decisions(&self, atn: &mut ATN, _data: &mut dyn Iterator) { let ndecisions = _data.next().unwrap(); for i in 0..ndecisions { let s = _data.next().unwrap() as usize; @@ -391,7 +391,7 @@ impl ATNDeserializer { } } - fn read_lexer_actions(&self, atn: &mut ATN, _data: &mut dyn Iterator) { + fn read_lexer_actions(&self, atn: &mut ATN, _data: &mut dyn Iterator) { //lexer actions are always supported here let nactions = _data.next().unwrap() as usize; @@ -416,7 +416,7 @@ impl ATNDeserializer { fn generate_rule_bypass_transitions( &self, _atn: &mut ATN, - _data: &mut dyn Iterator, + _data: &mut dyn Iterator, ) { unimplemented!() } @@ -424,7 +424,7 @@ impl ATNDeserializer { fn generate_rule_bypass_transition( &self, _atn: &mut ATN, - _data: &mut dyn Iterator, + _data: &mut dyn Iterator, _idx: isize, ) { unimplemented!() @@ -434,15 +434,15 @@ impl ATNDeserializer { unimplemented!() } - fn mark_precedence_decisions(&self, _atn: &mut ATN, _data: &mut dyn Iterator) { + fn mark_precedence_decisions(&self, _atn: &mut ATN, _data: &mut dyn Iterator) { let mut precedence_states = Vec::new(); for state in _atn.states.iter() { if let ATNStateType::DecisionState { state: - ATNDecisionState::StarLoopEntry { - loop_back_state, - is_precedence, - }, + ATNDecisionState::StarLoopEntry { + loop_back_state, + is_precedence, + }, .. } = state.get_state_type() { @@ -450,7 +450,7 @@ impl ATNDeserializer { is_left_recursive: true, .. } = - _atn.states[_atn.rule_to_start_state[state.get_rule_index()]].get_state_type() + _atn.states[_atn.rule_to_start_state[state.get_rule_index()]].get_state_type() { let maybe_loop_end = state.get_transitions().iter().last().unwrap().get_target(); @@ -459,7 +459,7 @@ impl ATNDeserializer { if maybe_loop_end.has_epsilon_only_transitions() { if let ATNStateType::RuleStopState = _atn.states [maybe_loop_end.get_transitions()[0].get_target()] - .get_state_type() + .get_state_type() { precedence_states.push(state.get_state_number()) } @@ -471,10 +471,10 @@ impl ATNDeserializer { for st in precedence_states { if let ATNStateType::DecisionState { state: - ATNDecisionState::StarLoopEntry { - loop_back_state, - is_precedence, - }, + ATNDecisionState::StarLoopEntry { + loop_back_state, + is_precedence, + }, .. } = _atn.states[st].get_state_type_mut() { @@ -483,7 +483,7 @@ impl ATNDeserializer { } } - fn verify_atn(&self, _atn: &mut ATN, _data: &mut dyn Iterator) { + fn verify_atn(&self, _atn: &mut ATN, _data: &mut dyn Iterator) { //TODO } diff --git a/src/atn_simulator.rs b/src/atn_simulator.rs index 9b6733f..cba58f5 100644 --- a/src/atn_simulator.rs +++ b/src/atn_simulator.rs @@ -13,7 +13,6 @@ pub trait IATNSimulator { fn decision_to_dfa(&self) -> &Vec; } - pub struct BaseATNSimulator { pub atn: Arc, pub shared_context_cache: Arc, @@ -45,15 +44,9 @@ impl BaseATNSimulator { } impl IATNSimulator for BaseATNSimulator { - fn shared_context_cache(&self) -> &PredictionContextCache { - self.shared_context_cache.deref() - } + fn shared_context_cache(&self) -> &PredictionContextCache { self.shared_context_cache.deref() } - fn atn(&self) -> &ATN { - self.atn.as_ref() - } + fn atn(&self) -> &ATN { self.atn.as_ref() } - fn decision_to_dfa(&self) -> &Vec { - self.decision_to_dfa.as_ref() - } + fn decision_to_dfa(&self) -> &Vec { self.decision_to_dfa.as_ref() } } diff --git a/src/char_stream.rs b/src/char_stream.rs index 4f9c0ec..c60aae7 100644 --- a/src/char_stream.rs +++ b/src/char_stream.rs @@ -21,10 +21,7 @@ pub trait CharStream: IntStream { /// Trait for input that can be accepted by `InputStream` to be able to provide lexer with data. /// Is sealed for now just in case. pub trait InputData: -Index, Output=Self> -+ Index, Output=Self> -+ ToOwned -+ 'static + Index, Output = Self> + Index, Output = Self> + ToOwned + 'static { // fn to_indexed_vec(&self) -> Vec<(u32, u32)>; @@ -39,7 +36,10 @@ Index, Output=Self> fn to_display(&self) -> String; } -impl + From + TryFrom + Copy + Debug + 'static> InputData for [T] where >::Error:Debug { +impl + From + TryFrom + Copy + Debug + 'static> InputData for [T] +where + >::Error: Debug, +{ // fn to_indexed_vec(&self) -> Vec<(u32, u32)> { // self.into_iter() // .enumerate() @@ -69,7 +69,11 @@ impl + From + TryFrom + Copy + Debug + 'static> InputData fn len(&self) -> usize { self.len() } #[inline] - fn from_text(text: &str) -> Self::Owned { text.chars().map(|it| T::try_from(it as u32).unwrap()).collect() } + fn from_text(text: &str) -> Self::Owned { + text.chars() + .map(|it| T::try_from(it as u32).unwrap()) + .collect() + } #[inline] // default @@ -135,9 +139,7 @@ impl InputData for str { #[inline] fn len(&self) -> usize { self.len() } - fn from_text(text: &str) -> Self::Owned { - text.to_owned() - } + fn from_text(text: &str) -> Self::Owned { text.to_owned() } // #[inline] // fn from_text(text: &str) -> Self::Owned { text.to_owned() } diff --git a/src/common_token_stream.rs b/src/common_token_stream.rs index c619825..7cf268c 100644 --- a/src/common_token_stream.rs +++ b/src/common_token_stream.rs @@ -2,7 +2,7 @@ use std::borrow::Borrow; use std::ops::Deref; use crate::errors::ANTLRError; -use crate::int_stream::{EOF, IntStream, IterWrapper}; +use crate::int_stream::{IntStream, IterWrapper, EOF}; use crate::token::{OwningToken, Token, TOKEN_DEFAULT_CHANNEL, TOKEN_INVALID_TYPE}; use crate::token_factory::TokenFactory; use crate::token_source::TokenSource; @@ -55,7 +55,7 @@ impl<'input, T: TokenSource<'input>> TokenStream<'input> for CommonTokenStream<' } let mut i = self.base.p; let mut n = 1; // we know tokens[p] is a good one - // find k good tokens + // find k good tokens while n < k { // skip off-channel tokens, but make sure to not look past EOF if self.sync(i + 1) { @@ -73,7 +73,7 @@ impl<'input, T: TokenSource<'input>> TokenStream<'input> for CommonTokenStream<' self.base.get_inner(index) } - fn get_token_source(&self) -> &dyn TokenSource<'input, TF=Self::TF> { + fn get_token_source(&self) -> &dyn TokenSource<'input, TF = Self::TF> { self.base.get_token_source() } diff --git a/src/dfa.rs b/src/dfa.rs index e363df5..2c9c7d6 100644 --- a/src/dfa.rs +++ b/src/dfa.rs @@ -12,12 +12,8 @@ use crate::vocabulary::Vocabulary; ///Helper trait for scope management and temporary values not living long enough pub(crate) trait ScopeExt: Sized { - fn convert_with T>(self, f: F) -> T { - f(self) - } - fn run T>(&self, f: F) -> T { - f(self) - } + fn convert_with T>(self, f: F) -> T { f(self) } + fn run T>(&self, f: F) -> T { f(self) } //apply fn modify_with(mut self, f: F) -> Self { @@ -35,7 +31,6 @@ pub(crate) trait ScopeExt: Sized { impl ScopeExt for Any {} - pub struct DFA { /// ATN state from which this DFA creation was started from pub atn_start_state: ATNStateRef, @@ -73,7 +68,11 @@ impl DFA { Box::new(ATNConfigSet::new_base_atnconfig_set(true)), )); if let ATNStateType::DecisionState { - state: ATNDecisionState::StarLoopEntry { is_precedence: true, .. }, + state: + ATNDecisionState::StarLoopEntry { + is_precedence: true, + .. + }, .. } = atn.states[atn_start_state].get_state_type() { @@ -97,19 +96,16 @@ impl DFA { panic!("dfa is supposed to be precedence here"); } - self.s0.read().unwrap() - .and_then(|s0| { - self.states - .read().unwrap()[s0] - .edges - .get(_precedence as usize) - .copied() - .and_then(|it| match it { - 0 => None, - x => Some(x) - }) - } - ) + self.s0.read().unwrap().and_then(|s0| { + self.states.read().unwrap()[s0] + .edges + .get(_precedence as usize) + .copied() + .and_then(|it| match it { + 0 => None, + x => Some(x), + }) + }) } pub fn set_precedence_start_state(&self, precedence: isize, _start_state: DFAStateRef) { @@ -123,40 +119,40 @@ impl DFA { let precedence = precedence as usize; if let Some(x) = self.s0.write().unwrap().deref() { - self.states - .write().unwrap()[*x] - .edges - .apply(|edges| { - if edges.len() <= precedence { - edges.resize(precedence + 1, 0); - } - edges[precedence] = _start_state; - }); + self.states.write().unwrap()[*x].edges.apply(|edges| { + if edges.len() <= precedence { + edges.resize(precedence + 1, 0); + } + edges[precedence] = _start_state; + }); } } - pub fn is_precedence_dfa(&self) -> bool { - self.is_precedence_dfa - } + pub fn is_precedence_dfa(&self) -> bool { self.is_precedence_dfa } pub fn set_precedence_dfa(&mut self, precedence_dfa: bool) { self.is_precedence_dfa = precedence_dfa } - fn num_states(&self) -> isize { - unimplemented!() - } + fn num_states(&self) -> isize { unimplemented!() } pub fn to_string(&self, vocabulary: &dyn Vocabulary) -> String { - if self.s0.read().unwrap().is_none() { return String::new(); } + if self.s0.read().unwrap().is_none() { + return String::new(); + } - return format!("{}", DFASerializer::new(self, &|x| - vocabulary.get_display_name(x as isize - 1).into_owned(), - )); + return format!( + "{}", + DFASerializer::new(self, &|x| vocabulary + .get_display_name(x as isize - 1) + .into_owned(),) + ); } pub fn to_lexer_string(&self) -> String { - if self.s0.read().unwrap().is_none() { return String::new(); } + if self.s0.read().unwrap().is_none() { + return String::new(); + } format!( "{}", DFASerializer::new(self, &|x| format!( diff --git a/src/dfa_serializer.rs b/src/dfa_serializer.rs index 91b25b5..1e8effe 100644 --- a/src/dfa_serializer.rs +++ b/src/dfa_serializer.rs @@ -16,10 +16,11 @@ impl Display for DFASerializer<'_, '_> { for (i, edge) in source.edges.iter().copied().enumerate() { if edge != 0 && edge != ERROR_DFA_STATE_REF { let target = &dfa[edge]; - f.write_fmt(format_args!("{}-{}->{}\n", - self.get_state_string(source), - (self.get_edge_label)(i), - self.get_state_string(target) + f.write_fmt(format_args!( + "{}-{}->{}\n", + self.get_state_string(source), + (self.get_edge_label)(i), + self.get_state_string(target) ))?; } } @@ -29,7 +30,10 @@ impl Display for DFASerializer<'_, '_> { } impl DFASerializer<'_, '_> { - pub fn new<'a, 'b>(dfa: &'a DFA, get_edge_label: &'b dyn Fn(usize) -> String) -> DFASerializer<'a, 'b> { + pub fn new<'a, 'b>( + dfa: &'a DFA, + get_edge_label: &'b dyn Fn(usize) -> String, + ) -> DFASerializer<'a, 'b> { DFASerializer { dfa, get_edge_label, @@ -37,15 +41,16 @@ impl DFASerializer<'_, '_> { } fn get_state_string(&self, state: &DFAState) -> String { - let mut base_str = format!("{}s{}{}", - if state.is_accept_state { ":" } else { "" }, - state.state_number - 1, - if state.requires_full_context { "^" } else { "" }, + let mut base_str = format!( + "{}s{}{}", + if state.is_accept_state { ":" } else { "" }, + state.state_number - 1, + if state.requires_full_context { "^" } else { "" }, ); if state.is_accept_state { base_str = if !state.predicates.is_empty() { unimplemented!() -// format!("{}=>{:?}", base_str, state.predicates) + // format!("{}=>{:?}", base_str, state.predicates) } else { format!("{}=>{}", base_str, state.prediction) }; diff --git a/src/dfa_state.rs b/src/dfa_state.rs index beb0c1d..d596a5c 100644 --- a/src/dfa_state.rs +++ b/src/dfa_state.rs @@ -40,15 +40,11 @@ pub struct DFAState { } impl PartialEq for DFAState { - fn eq(&self, other: &Self) -> bool { - self.configs == other.configs - } + fn eq(&self, other: &Self) -> bool { self.configs == other.configs } } impl Hash for DFAState { - fn hash(&self, state: &mut H) { - self.configs.hash(state); - } + fn hash(&self, state: &mut H) { self.configs.hash(state); } } impl DFAState { @@ -62,7 +58,7 @@ impl DFAState { DFAState { state_number: stateNumber, configs, -// edges: Vec::with_capacity((MAX_DFA_EDGE - MIN_DFA_EDGE + 1) as usize), + // edges: Vec::with_capacity((MAX_DFA_EDGE - MIN_DFA_EDGE + 1) as usize), edges: Vec::new(), is_accept_state: false, prediction: 0, @@ -74,7 +70,5 @@ impl DFAState { // fn get_alt_set(&self) -> &Set { unimplemented!() } - fn set_prediction(&self, _v: isize) { - unimplemented!() - } + fn set_prediction(&self, _v: isize) { unimplemented!() } } diff --git a/src/error_listener.rs b/src/error_listener.rs index 4044004..d34ea2f 100644 --- a/src/error_listener.rs +++ b/src/error_listener.rs @@ -14,63 +14,156 @@ use crate::token::Token; use crate::token_factory::TokenFactory; pub trait ErrorListener<'a, T: Recognizer<'a>> { - fn syntax_error(&self, _recognizer: &T, _offending_symbol: Option<&>::Inner>, - _line: isize, _column: isize, _msg: &str, _e: Option<&ANTLRError>, ) {} + fn syntax_error( + &self, + _recognizer: &T, + _offending_symbol: Option<&>::Inner>, + _line: isize, + _column: isize, + _msg: &str, + _e: Option<&ANTLRError>, + ) { + } - fn report_ambiguity(&self, _recognizer: &T, _dfa: &DFA, _start_index: isize, _stop_index: isize, - _exact: bool, _ambig_alts: &BitSet, _configs: &ATNConfigSet) {} + fn report_ambiguity( + &self, + _recognizer: &T, + _dfa: &DFA, + _start_index: isize, + _stop_index: isize, + _exact: bool, + _ambig_alts: &BitSet, + _configs: &ATNConfigSet, + ) { + } - fn report_attempting_full_context(&self, _recognizer: &T, _dfa: &DFA, _start_index: isize, _stop_index: isize, - _conflicting_alts: &BitSet, _configs: &ATNConfigSet) {} + fn report_attempting_full_context( + &self, + _recognizer: &T, + _dfa: &DFA, + _start_index: isize, + _stop_index: isize, + _conflicting_alts: &BitSet, + _configs: &ATNConfigSet, + ) { + } - fn report_context_sensitivity(&self, _recognizer: &T, _dfa: &DFA, _start_index: isize, - _stop_index: isize, _prediction: isize, _configs: &ATNConfigSet) {} + fn report_context_sensitivity( + &self, + _recognizer: &T, + _dfa: &DFA, + _start_index: isize, + _stop_index: isize, + _prediction: isize, + _configs: &ATNConfigSet, + ) { + } } #[derive(Debug)] pub struct ConsoleErrorListener {} impl<'a, T: Recognizer<'a>> ErrorListener<'a, T> for ConsoleErrorListener { - fn syntax_error(&self, _recognizer: &T, _offending_symbol: Option<&>::Inner>, - line: isize, column: isize, msg: &str, _e: Option<&ANTLRError>) { + fn syntax_error( + &self, + _recognizer: &T, + _offending_symbol: Option<&>::Inner>, + line: isize, + column: isize, + msg: &str, + _e: Option<&ANTLRError>, + ) { eprintln!("line {}:{} {}", line, column, msg); } } pub(crate) struct ProxyErrorListener<'b, 'a, T> { - pub delegates: Ref<'b, Vec>>> + pub delegates: Ref<'b, Vec>>>, } impl<'b, 'a, T: Recognizer<'a>> ErrorListener<'a, T> for ProxyErrorListener<'b, 'a, T> { - fn syntax_error(&self, _recognizer: &T, offending_symbol: Option<&>::Inner>, line: isize, column: isize, msg: &str, e: Option<&ANTLRError>) { + fn syntax_error( + &self, + _recognizer: &T, + offending_symbol: Option<&>::Inner>, + line: isize, + column: isize, + msg: &str, + e: Option<&ANTLRError>, + ) { for listener in self.delegates.deref() { listener.syntax_error(_recognizer, offending_symbol, line, column, msg, e) } } - fn report_ambiguity(&self, recognizer: &T, dfa: &DFA, start_index: isize, stop_index: isize, exact: bool, ambig_alts: &BitSet, configs: &ATNConfigSet) { + fn report_ambiguity( + &self, + recognizer: &T, + dfa: &DFA, + start_index: isize, + stop_index: isize, + exact: bool, + ambig_alts: &BitSet, + configs: &ATNConfigSet, + ) { for listener in self.delegates.deref() { - listener.report_ambiguity(recognizer, dfa, start_index, stop_index, exact, ambig_alts, configs) + listener.report_ambiguity( + recognizer, + dfa, + start_index, + stop_index, + exact, + ambig_alts, + configs, + ) } } - fn report_attempting_full_context(&self, recognizer: &T, dfa: &DFA, start_index: isize, stop_index: isize, - conflicting_alts: &BitSet, configs: &ATNConfigSet) { + fn report_attempting_full_context( + &self, + recognizer: &T, + dfa: &DFA, + start_index: isize, + stop_index: isize, + conflicting_alts: &BitSet, + configs: &ATNConfigSet, + ) { for listener in self.delegates.deref() { - listener.report_attempting_full_context(recognizer, dfa, start_index, stop_index, - conflicting_alts, configs) + listener.report_attempting_full_context( + recognizer, + dfa, + start_index, + stop_index, + conflicting_alts, + configs, + ) } } - fn report_context_sensitivity(&self, recognizer: &T, dfa: &DFA, start_index: isize, stop_index: isize, prediction: isize, configs: &ATNConfigSet) { + fn report_context_sensitivity( + &self, + recognizer: &T, + dfa: &DFA, + start_index: isize, + stop_index: isize, + prediction: isize, + configs: &ATNConfigSet, + ) { for listener in self.delegates.deref() { - listener.report_context_sensitivity(recognizer, dfa, start_index, stop_index, prediction, configs) + listener.report_context_sensitivity( + recognizer, + dfa, + start_index, + stop_index, + prediction, + configs, + ) } } } pub struct DiagnosticErrorListener { - exact_only: bool + exact_only: bool, } impl DiagnosticErrorListener { @@ -95,29 +188,64 @@ impl DiagnosticErrorListener { } impl<'a, T: Parser<'a>> ErrorListener<'a, T> for DiagnosticErrorListener { - fn report_ambiguity(&self, recognizer: &T, dfa: &DFA, start_index: isize, stop_index: isize, exact: bool, ambig_alts: &BitSet, _configs: &ATNConfigSet) { - if self.exact_only && !exact { return } - let msg = format!("reportAmbiguity d={}: ambigAlts={:?}, input='{}'", - self.get_decision_description(recognizer, dfa), - ambig_alts, - recognizer.get_input_stream().get_text_from_interval(start_index, stop_index) + fn report_ambiguity( + &self, + recognizer: &T, + dfa: &DFA, + start_index: isize, + stop_index: isize, + exact: bool, + ambig_alts: &BitSet, + _configs: &ATNConfigSet, + ) { + if self.exact_only && !exact { + return; + } + let msg = format!( + "reportAmbiguity d={}: ambigAlts={:?}, input='{}'", + self.get_decision_description(recognizer, dfa), + ambig_alts, + recognizer + .get_input_stream() + .get_text_from_interval(start_index, stop_index) ); recognizer.notify_error_listeners(msg, None, None); } - fn report_attempting_full_context(&self, recognizer: &T, dfa: &DFA, start_index: isize, stop_index: isize, - _conflicting_alts: &BitSet, _configs: &ATNConfigSet) { - let msg = format!("reportAttemptingFullContext d={}, input='{}'", - self.get_decision_description(recognizer, dfa), - recognizer.get_input_stream().get_text_from_interval(start_index, stop_index) + fn report_attempting_full_context( + &self, + recognizer: &T, + dfa: &DFA, + start_index: isize, + stop_index: isize, + _conflicting_alts: &BitSet, + _configs: &ATNConfigSet, + ) { + let msg = format!( + "reportAttemptingFullContext d={}, input='{}'", + self.get_decision_description(recognizer, dfa), + recognizer + .get_input_stream() + .get_text_from_interval(start_index, stop_index) ); recognizer.notify_error_listeners(msg, None, None); } - fn report_context_sensitivity(&self, recognizer: &T, dfa: &DFA, start_index: isize, stop_index: isize, _prediction: isize, _configs: &ATNConfigSet) { - let msg = format!("reportContextSensitivity d={}, input='{}'", - self.get_decision_description(recognizer, dfa), - recognizer.get_input_stream().get_text_from_interval(start_index, stop_index) + fn report_context_sensitivity( + &self, + recognizer: &T, + dfa: &DFA, + start_index: isize, + stop_index: isize, + _prediction: isize, + _configs: &ATNConfigSet, + ) { + let msg = format!( + "reportContextSensitivity d={}, input='{}'", + self.get_decision_description(recognizer, dfa), + recognizer + .get_input_stream() + .get_text_from_interval(start_index, stop_index) ); recognizer.notify_error_listeners(msg, None, None); } diff --git a/src/error_strategy.rs b/src/error_strategy.rs index 80d5f59..c42a366 100644 --- a/src/error_strategy.rs +++ b/src/error_strategy.rs @@ -69,14 +69,14 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { } } - fn begin_error_condition>( + fn begin_error_condition>( &mut self, _recognizer: &T, ) { self.error_recovery_mode = true; } - fn end_error_condition>( + fn end_error_condition>( &mut self, _recognizer: &T, ) { @@ -85,7 +85,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { self.last_error_states = None; } - fn report_no_viable_alternative>( + fn report_no_viable_alternative>( &self, recognizer: &mut T, e: &NoViableAltError, @@ -102,7 +102,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { format!("no viable alternative at input '{}'", input) } - fn report_input_mismatch>( + fn report_input_mismatch>( &self, recognizer: &T, e: &InputMisMatchError, @@ -116,7 +116,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { ) } - fn report_failed_predicate>( + fn report_failed_predicate>( &self, recognizer: &T, e: &FailedPredicateError, @@ -128,7 +128,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { ) } - fn report_unwanted_token>( + fn report_unwanted_token>( &mut self, recognizer: &mut T, ) { @@ -146,7 +146,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { recognizer.notify_error_listeners(msg, Some(t), None); } - fn report_missing_token>( + fn report_missing_token>( &mut self, recognizer: &mut T, ) { @@ -168,7 +168,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { recognizer.notify_error_listeners(msg, Some(t), None); } - fn single_token_insertion>( + fn single_token_insertion>( &mut self, recognizer: &mut T, ) -> bool { @@ -192,7 +192,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { false } - fn single_token_deletion<'a, T: Parser<'input, Node=Ctx, TF=Ctx::TF>>( + fn single_token_deletion<'a, T: Parser<'input, Node = Ctx, TF = Ctx::TF>>( &mut self, recognizer: &'a mut T, ) -> Option<&'a >::Tok> { @@ -209,7 +209,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { None } - fn get_missing_symbol>( + fn get_missing_symbol>( &self, recognizer: &mut T, ) -> >::Tok { @@ -247,7 +247,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { // .modify_with(|it| it.text = token_text) } - fn get_expected_tokens>( + fn get_expected_tokens>( &self, recognizer: &T, ) -> IntervalSet { @@ -263,7 +263,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { format!("'{}'", escape_whitespaces(s, false)) } - fn get_error_recovery_set>( + fn get_error_recovery_set>( &self, recognizer: &T, ) -> IntervalSet { @@ -286,7 +286,7 @@ impl<'input, Ctx: ParserNodeType<'input>> DefaultErrorStrategy<'input, Ctx> { return recover_set; } - fn consume_until>( + fn consume_until>( &mut self, recognizer: &mut T, set: &IntervalSet, @@ -338,10 +338,10 @@ impl<'a, T: Parser<'a>> ErrorStrategy<'a, T> for DefaultErrorStrategy<'a, T::Nod if self.last_error_index == recognizer.get_input_stream_mut().index() && self.last_error_states.is_some() && self - .last_error_states - .as_ref() - .unwrap() - .contains(recognizer.get_state()) + .last_error_states + .as_ref() + .unwrap() + .contains(recognizer.get_state()) { recognizer.consume(self) } @@ -464,7 +464,7 @@ pub struct BailErrorStrategy<'input, Ctx: ParserNodeType<'input>>( impl<'input, Ctx: ParserNodeType<'input>> BailErrorStrategy<'input, Ctx> { pub fn new() -> Self { Self(DefaultErrorStrategy::new()) } - fn process_error>( + fn process_error>( &self, recognizer: &mut T, e: &ANTLRError, diff --git a/src/errors.rs b/src/errors.rs index d318f8f..e2fdb81 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -1,8 +1,8 @@ use std::borrow::Borrow; use std::error::Error; -use std::fmt::{Debug, Display}; use std::fmt; use std::fmt::Formatter; +use std::fmt::{Debug, Display}; use std::ops::Deref; use std::rc::Rc; @@ -56,7 +56,9 @@ pub enum ANTLRError { impl Clone for ANTLRError { fn clone(&self) -> Self { match self { - ANTLRError::LexerNoAltError { start_index } => ANTLRError::LexerNoAltError { start_index: *start_index }, + ANTLRError::LexerNoAltError { start_index } => ANTLRError::LexerNoAltError { + start_index: *start_index, + }, ANTLRError::NoAltError(e) => ANTLRError::NoAltError(e.clone()), ANTLRError::InputMismatchError(e) => ANTLRError::InputMismatchError(e.clone()), ANTLRError::PredicateError(e) => ANTLRError::PredicateError(e.clone()), @@ -68,9 +70,7 @@ impl Clone for ANTLRError { } impl Display for ANTLRError { - fn fmt(&self, _f: &mut Formatter) -> fmt::Result { - ::fmt(self, _f) - } + fn fmt(&self, _f: &mut Formatter) -> fmt::Result { ::fmt(self, _f) } } impl Error for ANTLRError { @@ -78,7 +78,7 @@ impl Error for ANTLRError { match self { ANTLRError::FallThrough(x) => Some(x.as_ref()), ANTLRError::OtherError(x) => Some(x.as_ref()), - _ => None + _ => None, } } } @@ -89,7 +89,7 @@ impl RecognitionError for ANTLRError { ANTLRError::NoAltError(e) => &e.base.offending_token, ANTLRError::InputMismatchError(e) => &e.base.offending_token, ANTLRError::PredicateError(e) => &e.base.offending_token, - _ => return None + _ => return None, }) } } @@ -104,7 +104,7 @@ impl RecognitionError for ANTLRError { pub trait RecognitionError: Error { fn get_offending_token(&self) -> Option<&OwningToken>; fn get_message(&self) -> String { self.to_string() } -// fn get_input_stream(&self) -> &IntStream; + // fn get_input_stream(&self) -> &IntStream; } #[derive(Debug, Clone)] @@ -113,14 +113,15 @@ pub struct BaseRecognitionError { // recognizer: Box, pub offending_token: OwningToken, pub offending_state: isize, - states_stack:Vec - // ctx: Rc - // input: Box, + states_stack: Vec, // ctx: Rc + // input: Box } impl BaseRecognitionError { pub fn get_expected_tokens<'a, T: Parser<'a>>(&self, recognizer: &T) -> IntervalSet { - recognizer.get_interpreter().atn() + recognizer + .get_interpreter() + .atn() .get_expected_tokens(self.offending_state, self.states_stack.iter().copied()) } @@ -130,7 +131,7 @@ impl BaseRecognitionError { offending_token: recog.get_current_token().borrow().to_owned(), offending_state: recog.get_state(), // ctx: recog.get_parser_rule_context().clone(), - states_stack: states_stack(recog.get_parser_rule_context().clone()).collect() + states_stack: states_stack(recog.get_parser_rule_context().clone()).collect(), } } } @@ -146,7 +147,7 @@ pub struct LexerNoViableAltError { pub struct NoViableAltError { pub base: BaseRecognitionError, pub start_token: OwningToken, -// ctx: Rc, + // ctx: Rc, // dead_end_configs: BaseATNConfigSet, } @@ -158,28 +159,30 @@ impl NoViableAltError { offending_token: recog.get_current_token().borrow().to_owned(), offending_state: recog.get_state(), // ctx: recog.get_parser_rule_context().clone(), - states_stack: states_stack(recog.get_parser_rule_context().clone()).collect() + states_stack: states_stack(recog.get_parser_rule_context().clone()).collect(), }, start_token: recog.get_current_token().borrow().to_owned(), -// ctx: recog.get_parser_rule_context().clone() + // ctx: recog.get_parser_rule_context().clone() } } - pub fn new_full<'a, T: Parser<'a>>(recog: &mut T, start_token: OwningToken, offending_token: OwningToken) -> NoViableAltError { + pub fn new_full<'a, T: Parser<'a>>( + recog: &mut T, + start_token: OwningToken, + offending_token: OwningToken, + ) -> NoViableAltError { Self { base: BaseRecognitionError { message: "".to_string(), offending_token, offending_state: recog.get_state(), - states_stack: states_stack(recog.get_parser_rule_context().clone()).collect() - // ctx: recog.get_parser_rule_context().clone(), + states_stack: states_stack(recog.get_parser_rule_context().clone()).collect(), // ctx: recog.get_parser_rule_context().clone(), }, start_token, -// ctx + // ctx } } } - #[derive(Debug, Clone)] pub struct InputMisMatchError { pub(crate) base: BaseRecognitionError, @@ -192,7 +195,11 @@ impl InputMisMatchError { } } - pub fn with_state<'a, T: Parser<'a>>(recognizer: &mut T, offending_state: isize, ctx: Rc<>::Type>) -> InputMisMatchError { + pub fn with_state<'a, T: Parser<'a>>( + recognizer: &mut T, + offending_state: isize, + ctx: Rc<>::Type>, + ) -> InputMisMatchError { let mut a = Self::new(recognizer); // a.base.ctx = ctx; a.base.offending_state = offending_state; @@ -212,10 +219,15 @@ pub struct FailedPredicateError { } impl FailedPredicateError { - pub fn new<'a, T: Parser<'a>>(recog: &mut T, predicate: Option, msg: Option) -> ANTLRError { - let tr = recog.get_interpreter().atn() - .states[recog.get_state() as usize] - .get_transitions().first().unwrap(); + pub fn new<'a, T: Parser<'a>>( + recog: &mut T, + predicate: Option, + msg: Option, + ) -> ANTLRError { + let tr = recog.get_interpreter().atn().states[recog.get_state() as usize] + .get_transitions() + .first() + .unwrap(); let (rule_index, predicate_index) = if tr.get_serialization_type() == TRANSITION_PREDICATE { let pr = tr.deref().cast::(); (pr.rule_index, pr.pred_index) @@ -225,11 +237,15 @@ impl FailedPredicateError { ANTLRError::PredicateError(FailedPredicateError { base: BaseRecognitionError { - message: msg.unwrap_or_else(|| format!("failed predicate: {}", predicate.as_deref().unwrap_or("None"))), + message: msg.unwrap_or_else(|| { + format!( + "failed predicate: {}", + predicate.as_deref().unwrap_or("None") + ) + }), offending_token: recog.get_current_token().borrow().to_owned(), offending_state: recog.get_state(), - states_stack: states_stack(recog.get_parser_rule_context().clone()).collect() - // ctx: recog.get_parser_rule_context().clone() + states_stack: states_stack(recog.get_parser_rule_context().clone()).collect(), // ctx: recog.get_parser_rule_context().clone() }, rule_index, predicate_index, diff --git a/src/input_stream.rs b/src/input_stream.rs index bde0801..9e6c908 100644 --- a/src/input_stream.rs +++ b/src/input_stream.rs @@ -12,7 +12,7 @@ use std::str::{CharIndices, Chars}; use crate::char_stream::{CharStream, InputData}; use crate::errors::ANTLRError; -use crate::int_stream::{EOF, IntStream}; +use crate::int_stream::{IntStream, EOF}; use crate::interval_set::Interval; use crate::token::Token; @@ -36,10 +36,10 @@ impl<'a, T: From<&'a str>> CharStream for InputStream<'a, str> { fn get_text(&self, start: isize, stop: isize) -> T { self.get_text_inner(start, stop).into() } } -pub type ByteCharStream<'a> = InputStream<'a,[u8]>; -pub type CodePoint8BitCharStream<'a> = InputStream<'a,[u8]>; -pub type CodePoint16BitCharStream<'a> = InputStream<'a,[u16]>; -pub type CodePoint32BitCharStream<'a> = InputStream<'a,[u32]>; +pub type ByteCharStream<'a> = InputStream<'a, [u8]>; +pub type CodePoint8BitCharStream<'a> = InputStream<'a, [u8]>; +pub type CodePoint16BitCharStream<'a> = InputStream<'a, [u16]>; +pub type CodePoint32BitCharStream<'a> = InputStream<'a, [u32]>; impl<'a, T> CharStream<&'a [T]> for InputStream<'a, [T]> where @@ -49,24 +49,21 @@ where } impl<'a, T> CharStream for InputStream<'a, [T]> - where - [T]: InputData, +where + [T]: InputData, { - fn get_text(&self, a: isize, b: isize) -> String { - self.get_text_inner(a, b).to_display() - } + fn get_text(&self, a: isize, b: isize) -> String { self.get_text_inner(a, b).to_display() } } -impl<'a,'b, T> CharStream> for InputStream<'a, [T]> - where - [T]: InputData, +impl<'a, 'b, T> CharStream> for InputStream<'a, [T]> +where + [T]: InputData, { - fn get_text(&self, a: isize, b: isize) -> Cow<'b,str> { + fn get_text(&self, a: isize, b: isize) -> Cow<'b, str> { self.get_text_inner(a, b).to_display().into() } } - impl<'a, Data> InputStream<'a, Data> where Data: InputData + ?Sized, @@ -153,7 +150,7 @@ mod test { use std::ops::Deref; use crate::char_stream::CharStream; - use crate::int_stream::{EOF, IntStream}; + use crate::int_stream::{IntStream, EOF}; use super::InputStream; diff --git a/src/int_stream.rs b/src/int_stream.rs index 7ca1031..29398bc 100644 --- a/src/int_stream.rs +++ b/src/int_stream.rs @@ -24,7 +24,7 @@ impl<'a, T: IntStream> Iterator for IterWrapper<'a, T> { self.0.consume(); match result { EOF => None, - x => Some(x) + x => Some(x), } } } diff --git a/src/interval_set.rs b/src/interval_set.rs index 3ad4f8c..dd5e773 100644 --- a/src/interval_set.rs +++ b/src/interval_set.rs @@ -3,7 +3,7 @@ use std::borrow::Cow::Borrowed; use std::cmp::{max, min, Ordering}; use crate::token::{TOKEN_EOF, TOKEN_EPSILON}; -use crate::vocabulary::{DUMMY_VOCAB, Vocabulary}; +use crate::vocabulary::{Vocabulary, DUMMY_VOCAB}; #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub struct Interval { @@ -15,17 +15,11 @@ pub const INVALID: Interval = Interval { a: -1, b: -2 }; impl Interval { /* stop is not included! */ - fn new(a: isize, b: isize) -> Interval { - Interval { a, b } - } + fn new(a: isize, b: isize) -> Interval { Interval { a, b } } - fn contains(&self, _item: isize) -> bool { - unimplemented!() - } + fn contains(&self, _item: isize) -> bool { unimplemented!() } - fn length(&self) -> isize { - self.b - self.a - } + fn length(&self) -> isize { self.b - self.a } fn union(&self, another: &Interval) -> Interval { Interval { @@ -45,14 +39,10 @@ impl Interval { } /** Does self.a start after other.b? May or may not be disjoint */ - pub fn startsAfter(&self, other: &Interval) -> bool { - return self.a > other.a; - } + pub fn startsAfter(&self, other: &Interval) -> bool { return self.a > other.a; } /** Does self start completely after other? Disjoint */ - pub fn startsAfterDisjoint(&self, other: &Interval) -> bool { - return self.a > other.b; - } + pub fn startsAfterDisjoint(&self, other: &Interval) -> bool { return self.a > other.b; } /** Does self start after other? NonDisjoint */ pub fn startsAfterNonDisjoint(&self, other: &Interval) -> bool { @@ -98,17 +88,11 @@ impl IntervalSet { } } - pub fn get_min(&self) -> Option { - self.intervals.first().map(|x| x.a) - } + pub fn get_min(&self) -> Option { self.intervals.first().map(|x| x.a) } - pub fn add_one(&mut self, _v: isize) { - self.add_range(_v, _v) - } + pub fn add_one(&mut self, _v: isize) { self.add_range(_v, _v) } - pub fn add_range(&mut self, l: isize, h: isize) { - self.add_interval(Interval { a: l, b: h }) - } + pub fn add_range(&mut self, l: isize, h: isize) { self.add_interval(Interval { a: l, b: h }) } pub fn add_interval(&mut self, added: Interval) { if added.length() < 0 { @@ -171,20 +155,24 @@ impl IntervalSet { if right_interval.b < result_interval.a { right_i += 1; - continue + continue; } if right_interval.a > result_interval.b { result_i += 1; - continue + continue; } let before_curr = if right_interval.a > result_interval.a { Some(Interval::new(result_interval.a, right_interval.a - 1)) - } else { None }; + } else { + None + }; let after_curr = if right_interval.b < result_interval.b { Some(Interval::new(right_interval.b + 1, result_interval.b)) - } else { None }; + } else { + None + }; match (before_curr, after_curr) { (Some(before_curr), Some(after_curr)) => { @@ -201,11 +189,13 @@ impl IntervalSet { result.intervals[result_i] = after_curr; right_i += 1; } - (None, None) => { result.intervals.remove(result_i); } + (None, None) => { + result.intervals.remove(result_i); + } } } -// return result; + // return result; } pub fn complement(&self, start: isize, stop: isize) -> IntervalSet { @@ -216,27 +206,37 @@ impl IntervalSet { } pub fn contains(&self, _item: isize) -> bool { - self.intervals.binary_search_by(|x| { - if _item < x.a { return Ordering::Greater; } - if _item > x.b { return Ordering::Less; } - Ordering::Equal - }).is_ok() + self.intervals + .binary_search_by(|x| { + if _item < x.a { + return Ordering::Greater; + } + if _item > x.b { + return Ordering::Less; + } + Ordering::Equal + }) + .is_ok() } fn length(&self) -> isize { - self.intervals.iter().fold(0, |acc, it| acc + it.b - it.a + 1) + self.intervals + .iter() + .fold(0, |acc, it| acc + it.b - it.a + 1) } - fn remove_range(&self, _v: &Interval) { - unimplemented!() - } + fn remove_range(&self, _v: &Interval) { unimplemented!() } pub fn remove_one(&mut self, el: isize) { - if self.read_only { panic!("can't alter readonly IntervalSet") } + if self.read_only { + panic!("can't alter readonly IntervalSet") + } for i in 0..self.intervals.len() { let int = &mut self.intervals[i]; - if el < int.a { break } + if el < int.a { + break; + } if el == int.a && el == int.b { self.intervals.remove(i); @@ -245,12 +245,12 @@ impl IntervalSet { if el == int.a { int.a += 1; - break + break; } if el == int.b { int.b -= 1; - break + break; } if el > int.a && el < int.b { @@ -261,26 +261,24 @@ impl IntervalSet { } } -// fn String(&self) -> String { -// unimplemented!() -// } -// -// fn String_verbose( -// &self, -// _literalNames: Vec, -// _symbolicNames: Vec, -// _elemsAreChar: bool, -// ) -> String { -// unimplemented!() -// } -// -// fn to_char_String(&self) -> String { -// unimplemented!() -// } -// - pub fn to_index_string(&self) -> String { - self.to_token_string(&DUMMY_VOCAB) - } + // fn String(&self) -> String { + // unimplemented!() + // } + // + // fn String_verbose( + // &self, + // _literalNames: Vec, + // _symbolicNames: Vec, + // _elemsAreChar: bool, + // ) -> String { + // unimplemented!() + // } + // + // fn to_char_String(&self) -> String { + // unimplemented!() + // } + // + pub fn to_index_string(&self) -> String { self.to_token_string(&DUMMY_VOCAB) } pub fn to_token_string(&self, vocabulary: &dyn Vocabulary) -> String { if self.intervals.is_empty() { @@ -296,7 +294,9 @@ impl IntervalSet { buf += self.element_name(vocabulary, int.a).as_ref(); } else { for i in int.a..(int.b + 1) { - if i > int.a { buf += ", "; } + if i > int.a { + buf += ", "; + } buf += self.element_name(vocabulary, i).as_ref(); } } @@ -312,10 +312,7 @@ impl IntervalSet { return buf; } - fn element_name<'a>(&self, - vocabulary: &'a dyn Vocabulary, - a: isize, - ) -> Cow<'a, str> { + fn element_name<'a>(&self, vocabulary: &'a dyn Vocabulary, a: isize) -> Cow<'a, str> { if a == TOKEN_EOF { Borrowed("") } else if a == TOKEN_EPSILON { @@ -346,7 +343,10 @@ mod test { let mut set = IntervalSet::new(); set.add_range(1, 3); set.add_range(5, 6); - assert_eq!(&set.intervals, &[Interval { a: 1, b: 3 }, Interval { a: 5, b: 6 }]); + assert_eq!( + &set.intervals, + &[Interval { a: 1, b: 3 }, Interval { a: 5, b: 6 }] + ); set.add_range(3, 4); assert_eq!(&set.intervals, &[Interval { a: 1, b: 6 }]); } @@ -356,7 +356,10 @@ mod test { let mut set = IntervalSet::new(); set.add_range(1, 5); set.remove_one(3); - assert_eq!(&set.intervals, &[Interval { a: 1, b: 2 }, Interval { a: 4, b: 5 }]); + assert_eq!( + &set.intervals, + &[Interval { a: 1, b: 2 }, Interval { a: 4, b: 5 }] + ); } #[test] @@ -367,6 +370,9 @@ mod test { let mut set2 = IntervalSet::new(); set2.add_range(2, 4); set1.substract(&set2); - assert_eq!(&set1.intervals, &[Interval { a: 1, b: 1 }, Interval { a: 5, b: 5 }]); + assert_eq!( + &set1.intervals, + &[Interval { a: 1, b: 1 }, Interval { a: 5, b: 5 }] + ); } } diff --git a/src/lexer.rs b/src/lexer.rs index 2a4ed80..ea66da9 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -1,5 +1,5 @@ -use std::borrow::{Borrow, Cow}; use std::borrow::Cow::{Borrowed, Owned}; +use std::borrow::{Borrow, Cow}; use std::cell::{Cell, RefCell}; use std::ops::DerefMut; use std::rc::Rc; @@ -18,8 +18,8 @@ use crate::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use crate::token_source::TokenSource; pub trait Lexer<'input>: -TokenSource<'input> -+ Recognizer<'input, Node=EmptyContextType<'input, >::TF>> + TokenSource<'input> + + Recognizer<'input, Node = EmptyContextType<'input, >::TF>> { /// Sets channel where current token will be pushed /// @@ -95,10 +95,10 @@ pub(crate) struct LexerPosition { } impl<'input, T, Input, TF> Recognizer<'input> for BaseLexer<'input, T, Input, TF> - where - T: LexerRecog<'input, Self> + 'static, - Input: CharStream, - TF: TokenFactory<'input>, +where + T: LexerRecog<'input, Self> + 'static, + Input: CharStream, + TF: TokenFactory<'input>, { type Node = EmptyContextType<'input, TF>; @@ -131,10 +131,10 @@ pub const LEXER_MIN_CHAR_VALUE: isize = 0x0000; pub const LEXER_MAX_CHAR_VALUE: isize = 0x10FFFF; impl<'input, T, Input, TF> BaseLexer<'input, T, Input, TF> - where - T: LexerRecog<'input, Self> + 'static, - Input: CharStream, - TF: TokenFactory<'input>, +where + T: LexerRecog<'input, Self> + 'static, + Input: CharStream, + TF: TokenFactory<'input>, { fn emit_token(&mut self, token: TF::Tok) { self.token = Some(token); } @@ -173,12 +173,21 @@ impl<'input, T, Input, TF> BaseLexer<'input, T, Input, TF> pub fn get_char_index(&self) -> isize { self.input.as_ref().unwrap().index() } /// Current token text - pub fn get_text<'a>(&'a self) -> Cow<'a,TF::Data> where 'input:'a { - self.text.as_ref().map(|it| Borrowed(it.borrow())) + pub fn get_text<'a>(&'a self) -> Cow<'a, TF::Data> + where + 'input: 'a, + { + self.text + .as_ref() + .map(|it| Borrowed(it.borrow())) // .unwrap_or("") - .unwrap_or_else( - || self.input.as_ref().unwrap().get_text(self.token_start_char_index, self.get_char_index() - 1).into() - ) + .unwrap_or_else(|| { + self.input + .as_ref() + .unwrap() + .get_text(self.token_start_char_index, self.get_char_index() - 1) + .into() + }) } /// Used from lexer actions to override text of the token that will be emitted next @@ -230,19 +239,19 @@ impl<'input, T, Input, TF> BaseLexer<'input, T, Input, TF> } impl<'input, T, Input, TF> TokenAware<'input> for BaseLexer<'input, T, Input, TF> - where - T: LexerRecog<'input, Self> + 'static, - Input: CharStream, - TF: TokenFactory<'input>, +where + T: LexerRecog<'input, Self> + 'static, + Input: CharStream, + TF: TokenFactory<'input>, { type TF = TF; } impl<'input, T, Input, TF> TokenSource<'input> for BaseLexer<'input, T, Input, TF> - where - T: LexerRecog<'input, Self> + 'static, - Input: CharStream, - TF: TokenFactory<'input>, +where + T: LexerRecog<'input, Self> + 'static, + Input: CharStream, + TF: TokenFactory<'input>, { #[inline] #[allow(unused_labels)] @@ -377,10 +386,10 @@ fn notify_listeners<'input, T, Input, TF>( } impl<'input, T, Input, TF> Lexer<'input> for BaseLexer<'input, T, Input, TF> - where - T: LexerRecog<'input, Self> + 'static, - Input: CharStream, - TF: TokenFactory<'input>, +where + T: LexerRecog<'input, Self> + 'static, + Input: CharStream, + TF: TokenFactory<'input>, { fn set_channel(&mut self, v: isize) { self.channel = v; } diff --git a/src/lexer_action.rs b/src/lexer_action.rs index 191521d..f0a5460 100644 --- a/src/lexer_action.rs +++ b/src/lexer_action.rs @@ -33,25 +33,30 @@ pub(crate) enum LexerAction { impl LexerAction { // fn get_action_type(&self) -> isize { -// unimplemented!() -//// unsafe {discriminant_value(self)} as isize -// } + // unimplemented!() + //// unsafe {discriminant_value(self)} as isize + // } pub fn is_position_dependent(&self) -> bool { match self { - LexerAction::LexerCustomAction { .. } | - LexerAction::LexerIndexedCustomAction { .. } => true, - _ => false + LexerAction::LexerCustomAction { .. } + | LexerAction::LexerIndexedCustomAction { .. } => true, + _ => false, } } pub(crate) fn execute<'input, T: Lexer<'input>>(&self, lexer: &mut T) { match self { &LexerAction::LexerChannelAction(channel) => lexer.set_channel(channel), - &LexerAction::LexerCustomAction { rule_index, action_index } => { + &LexerAction::LexerCustomAction { + rule_index, + action_index, + } => { lexer.action(&*empty_ctx::(), rule_index, action_index); - }, + } &LexerAction::LexerModeAction(mode) => lexer.set_mode(mode as usize), &LexerAction::LexerMoreAction => lexer.more(), - &LexerAction::LexerPopModeAction => { lexer.pop_mode(); }, + &LexerAction::LexerPopModeAction => { + lexer.pop_mode(); + } &LexerAction::LexerPushModeAction(mode) => lexer.push_mode(mode as usize), &LexerAction::LexerSkipAction => lexer.skip(), &LexerAction::LexerTypeAction(ty) => lexer.set_type(ty), @@ -59,4 +64,3 @@ impl LexerAction { } } } - diff --git a/src/lexer_action_executor.rs b/src/lexer_action_executor.rs index 81af255..f124efc 100644 --- a/src/lexer_action_executor.rs +++ b/src/lexer_action_executor.rs @@ -14,29 +14,32 @@ pub(crate) struct LexerActionExecutor { } impl Hash for LexerActionExecutor { - fn hash(&self, state: &mut H) { - state.write_u64(self.cached_hash) - } + fn hash(&self, state: &mut H) { state.write_u64(self.cached_hash) } } impl LexerActionExecutor { pub(crate) fn new(lexer_actions: Vec) -> LexerActionExecutor { -// let mut hasher = ; - let cached_hash = lexer_actions.iter().fold( - MurmurHasher::default(), - |mut acc, x| { + // let mut hasher = ; + let cached_hash = lexer_actions + .iter() + .fold(MurmurHasher::default(), |mut acc, x| { x.hash(&mut acc); acc - }, - ).finish(); + }) + .finish(); LexerActionExecutor { lexer_actions, cached_hash, } } - pub(crate) fn new_copy_append(old: Option<&Self>, lexer_action: LexerAction) -> LexerActionExecutor { - let mut new = old.cloned().unwrap_or_else(|| LexerActionExecutor::new(Vec::new())); + pub(crate) fn new_copy_append( + old: Option<&Self>, + lexer_action: LexerAction, + ) -> LexerActionExecutor { + let mut new = old + .cloned() + .unwrap_or_else(|| LexerActionExecutor::new(Vec::new())); new.lexer_actions.push(lexer_action); new } @@ -44,9 +47,17 @@ impl LexerActionExecutor { pub fn fix_offset_before_match(mut self, offset: isize) -> LexerActionExecutor { for action in self.lexer_actions.iter_mut() { match action { - LexerAction::LexerIndexedCustomAction { .. } => {}, - _ => if action.is_position_dependent() { - mem::replace(action, LexerIndexedCustomAction { offset, action: Box::new(action.clone()) }); + LexerAction::LexerIndexedCustomAction { .. } => {} + _ => { + if action.is_position_dependent() { + mem::replace( + action, + LexerIndexedCustomAction { + offset, + action: Box::new(action.clone()), + }, + ); + } } } } @@ -72,7 +83,5 @@ impl LexerActionExecutor { } } -// fn hash(&self) -> int { unimplemented!() } - + // fn hash(&self) -> int { unimplemented!() } } - \ No newline at end of file diff --git a/src/lexer_atn_simulator.rs b/src/lexer_atn_simulator.rs index 9df033a..5bfd9b9 100644 --- a/src/lexer_atn_simulator.rs +++ b/src/lexer_atn_simulator.rs @@ -10,34 +10,38 @@ use crate::atn::ATN; use crate::atn_config::{ATNConfig, ATNConfigType}; use crate::atn_config_set::ATNConfigSet; use crate::atn_simulator::{BaseATNSimulator, IATNSimulator}; -use crate::atn_state::{ATNState, ATNStateType}; use crate::atn_state::ATNStateType::RuleStopState; +use crate::atn_state::{ATNState, ATNStateType}; use crate::char_stream::CharStream; use crate::dfa::DFA; use crate::dfa_state::{DFAState, DFAStateRef}; use crate::errors::ANTLRError; use crate::errors::ANTLRError::LexerNoAltError; -use crate::int_stream::{EOF, IntStream}; -use crate::lexer::{BaseLexer, Lexer, LEXER_MAX_CHAR_VALUE, LEXER_MIN_CHAR_VALUE, LexerPosition, LexerRecog}; +use crate::int_stream::{IntStream, EOF}; +use crate::lexer::{ + BaseLexer, Lexer, LexerPosition, LexerRecog, LEXER_MAX_CHAR_VALUE, LEXER_MIN_CHAR_VALUE, +}; use crate::lexer_action_executor::LexerActionExecutor; use crate::parser_rule_context::empty_ctx; -use crate::prediction_context::{PREDICTION_CONTEXT_EMPTY_RETURN_STATE, PredictionContext, - PredictionContextCache}; use crate::prediction_context::EMPTY_PREDICTION_CONTEXT; +use crate::prediction_context::{ + PredictionContext, PredictionContextCache, PREDICTION_CONTEXT_EMPTY_RETURN_STATE, +}; use crate::recognizer::Recognizer; use crate::token::TOKEN_EOF; use crate::token_source::TokenSource; -use crate::transition::{ActionTransition, PredicateTransition, RuleTransition, Transition, TransitionType}; +use crate::transition::{ + ActionTransition, PredicateTransition, RuleTransition, Transition, TransitionType, +}; pub const ERROR_DFA_STATE_REF: DFAStateRef = usize::MAX; pub trait ILexerATNSimulator: IATNSimulator { - fn reset(&mut self); fn match_token<'input>( &mut self, mode: usize, -// input:&mut dyn CharStream, + // input:&mut dyn CharStream, lexer: &mut impl Lexer<'input>, ) -> Result; fn get_char_position_in_line(&self) -> isize; @@ -64,24 +68,23 @@ pub struct LexerATNSimulator { } impl ILexerATNSimulator for LexerATNSimulator { - fn reset(&mut self) { - self.prev_accept.reset() - } + fn reset(&mut self) { self.prev_accept.reset() } fn match_token<'input>( &mut self, mode: usize, -// input:&mut dyn CharStream, + // input:&mut dyn CharStream, lexer: &mut impl Lexer<'input>, ) -> Result { self.mode = mode; let mark = lexer.get_input_stream().unwrap().mark(); -// println!("start matching on mode {}",mode); + // println!("start matching on mode {}",mode); let result = (|| { self.start_index = lexer.get_input_stream().unwrap().index(); self.prev_accept.reset(); let temp = self.decision_to_dfa(); - let dfa = temp.get(mode) + let dfa = temp + .get(mode) .ok_or_else(|| ANTLRError::IllegalStateError("invalid mode".into()))?; let s0 = dfa.s0.read().unwrap().as_ref().copied(); @@ -95,21 +98,15 @@ impl ILexerATNSimulator for LexerATNSimulator { result } - fn get_char_position_in_line(&self) -> isize { - self.current_pos.char_position_in_line.get() - } + fn get_char_position_in_line(&self) -> isize { self.current_pos.char_position_in_line.get() } fn set_char_position_in_line(&mut self, column: isize) { self.current_pos.char_position_in_line.set(column) } - fn get_line(&self) -> isize { - self.current_pos.line.get() - } + fn get_line(&self) -> isize { self.current_pos.line.get() } - fn set_line(&mut self, line: isize) { - self.current_pos.char_position_in_line.set(line) - } + fn set_line(&mut self, line: isize) { self.current_pos.char_position_in_line.set(line) } fn consume(&self, _input: &mut dyn IntStream) { let ch = _input.la(1); @@ -122,23 +119,17 @@ impl ILexerATNSimulator for LexerATNSimulator { _input.consume(); } -// fn get_recog(&self) -> Rc>>{ -// Rc::clone(&self.recog) -// } + // fn get_recog(&self) -> Rc>>{ + // Rc::clone(&self.recog) + // } } impl IATNSimulator for LexerATNSimulator { - fn shared_context_cache(&self) -> &PredictionContextCache { - self.base.shared_context_cache() - } + fn shared_context_cache(&self) -> &PredictionContextCache { self.base.shared_context_cache() } - fn atn(&self) -> &ATN { - self.base.atn() - } + fn atn(&self) -> &ATN { self.base.atn() } - fn decision_to_dfa(&self) -> &Vec { - self.base.decision_to_dfa() - } + fn decision_to_dfa(&self) -> &Vec { self.base.decision_to_dfa() } } pub const MIN_DFA_EDGE: isize = 0; @@ -151,23 +142,31 @@ impl LexerATNSimulator { shared_context_cache: Arc, ) -> LexerATNSimulator { LexerATNSimulator { - base: BaseATNSimulator::new_base_atnsimulator(atn, decision_to_dfa, shared_context_cache), + base: BaseATNSimulator::new_base_atnsimulator( + atn, + decision_to_dfa, + shared_context_cache, + ), start_index: 0, - current_pos: Rc::new(LexerPosition { line: Cell::new(0), char_position_in_line: Cell::new(0) }), + current_pos: Rc::new(LexerPosition { + line: Cell::new(0), + char_position_in_line: Cell::new(0), + }), mode: 0, prev_accept: SimState::new(), lexer_action_executor: None, } } -// fn copy_state(&self, _simulator: &mut LexerATNSimulator) { -// unimplemented!() -// } + // fn copy_state(&self, _simulator: &mut LexerATNSimulator) { + // unimplemented!() + // } fn match_atn<'input>(&mut self, lexer: &mut impl Lexer<'input>) -> Result { // let start_state = self.atn().mode_to_start_state.get(self.mode as usize).ok_or(ANTLRError::IllegalStateError("invalid mode".into()))?; let atn = self.atn(); - let start_state = *atn.mode_to_start_state + let start_state = *atn + .mode_to_start_state .get(self.mode) .ok_or_else(|| ANTLRError::IllegalStateError("invalid mode".into()))?; @@ -186,7 +185,7 @@ impl LexerATNSimulator { fn exec_atn<'input>( &mut self, -// input: &'a mut dyn CharStream, + // input: &'a mut dyn CharStream, ds0: DFAStateRef, lexer: &mut impl Lexer<'input>, ) -> Result { @@ -201,11 +200,10 @@ impl LexerATNSimulator { let target = target.unwrap_or_else(|| self.compute_target_state(s, symbol, lexer)); // let target = dfastates.deref().get(s).unwrap() ;x - if target == ERROR_DFA_STATE_REF { break; } -// println!(" --- target computed {:?}", self.get_dfa().states.read().unwrap()[target].configs.configs.iter().map(|it|it.get_state()).collect::>()); + // println!(" --- target computed {:?}", self.get_dfa().states.read().unwrap()[target].configs.configs.iter().map(|it|it.get_state()).collect::>()); if symbol != EOF { self.consume(lexer.get_input_stream().unwrap()) @@ -233,8 +231,10 @@ impl LexerATNSimulator { self.get_dfa() .states - .read().unwrap() - .get(_s).unwrap() + .read() + .unwrap() + .get(_s) + .unwrap() .edges .get((t - MIN_DFA_EDGE) as usize) .and_then(|x| match x { @@ -244,20 +244,24 @@ impl LexerATNSimulator { .copied() } - fn compute_target_state<'input>(&self, _s: DFAStateRef, _t: isize, lexer: &mut impl Lexer<'input>) -> DFAStateRef { + fn compute_target_state<'input>( + &self, + _s: DFAStateRef, + _t: isize, + lexer: &mut impl Lexer<'input>, + ) -> DFAStateRef { let states = self.get_dfa().states.read().unwrap(); let mut reach = ATNConfigSet::new_ordered(); self.get_reachable_config_set( &states, -// _input, + // _input, &states.get(_s).unwrap().configs, &mut reach, _t, lexer, ); -// println!(" --- target computed {:?}", reach.configs.iter().map(|it|it.get_state()).collect::>()); - + // println!(" --- target computed {:?}", reach.configs.iter().map(|it|it.get_state()).collect::>()); drop(states); let mut states = self.get_dfa().states.write().unwrap(); @@ -275,7 +279,7 @@ impl LexerATNSimulator { let from = states.get_mut(_s).unwrap(); self.add_dfaedge(from, _t, to); } -// println!("target state computed from {:?} to {:?} on symbol {}", _s, to, char::try_from(_t as u32).unwrap()); + // println!("target state computed from {:?} to {:?} on symbol {}", _s, to, char::try_from(_t as u32).unwrap()); to // states.get(to).unwrap() @@ -284,21 +288,22 @@ impl LexerATNSimulator { fn get_reachable_config_set<'input, V>( &self, _states: &V, -// _input: &mut dyn CharStream, + // _input: &mut dyn CharStream, _closure: &ATNConfigSet, _reach: &mut ATNConfigSet, _t: isize, lexer: &mut impl Lexer<'input>, ) where - V: Deref>, + V: Deref>, { let mut skip_alt = 0; -// println!(" --- source {:?}", _closure.configs.iter().map(|it|it.get_state()).collect::>()); + // println!(" --- source {:?}", _closure.configs.iter().map(|it|it.get_state()).collect::>()); for config in _closure.get_items() { let current_alt_reached_accept_state = config.get_alt() == skip_alt; if current_alt_reached_accept_state { if let ATNConfigType::LexerATNConfig { - passed_through_non_greedy_decision: true, .. + passed_through_non_greedy_decision: true, + .. } = config.get_type() { continue; @@ -307,8 +312,11 @@ impl LexerATNSimulator { let atn_state = self.atn().states[config.get_state()].as_ref(); for tr in atn_state.get_transitions() { if let Some(target) = tr.get_reachable_target(_t) { - let exec = config.get_lexer_executor() - .map(|x| x.clone().fix_offset_before_match(lexer.get_input_stream().unwrap().index() - self.start_index)); + let exec = config.get_lexer_executor().map(|x| { + x.clone().fix_offset_before_match( + lexer.get_input_stream().unwrap().index() - self.start_index, + ) + }); let new = config.cloned_with_new_exec(self.atn().states[target].as_ref(), exec); if self.closure( @@ -327,25 +335,26 @@ impl LexerATNSimulator { } } -// fn get_reachable_target(&self, states: &T, _trans: &Transition, _t: isize) -> &ATNState -// where -// T: Deref>, -// { -// unimplemented!() -// } + // fn get_reachable_target(&self, states: &T, _trans: &Transition, _t: isize) -> &ATNState + // where + // T: Deref>, + // { + // unimplemented!() + // } - fn fail_or_accept<'input>(&mut self, _t: isize, lexer: &mut impl Lexer<'input>) -> Result { -// println!("fail_or_accept"); + fn fail_or_accept<'input>( + &mut self, + _t: isize, + lexer: &mut impl Lexer<'input>, + ) -> Result { + // println!("fail_or_accept"); if let Some(state) = self.prev_accept.dfa_state { -// let lexer_action_executor; + // let lexer_action_executor; let prediction = { - let dfa_state_prediction = &mut self.get_dfa() - .states - .write().unwrap() - [state]; -// println!("accepted, prediction = {}, on dfastate {}", dfa_state_prediction.prediction, dfa_state_prediction.state_number); -// lexer_action_executor = dfa_state_prediction.lexer_action_executor.clone(); -// let recog = self.recog.clone(); + let dfa_state_prediction = &mut self.get_dfa().states.write().unwrap()[state]; + // println!("accepted, prediction = {}, on dfastate {}", dfa_state_prediction.prediction, dfa_state_prediction.state_number); + // lexer_action_executor = dfa_state_prediction.lexer_action_executor.clone(); + // let recog = self.recog.clone(); if let Some(x) = dfa_state_prediction.lexer_action_executor.as_ref() { x.execute(lexer, self.start_index) } @@ -353,7 +362,7 @@ impl LexerATNSimulator { dfa_state_prediction.prediction }; self.accept(lexer.get_input_stream().unwrap()); -// self.lexer_action_executor = lexer_action_executor; + // self.lexer_action_executor = lexer_action_executor; Ok(prediction) } else { if _t == EOF && lexer.get_input_stream().unwrap().index() == self.start_index { @@ -368,10 +377,16 @@ impl LexerATNSimulator { fn accept<'input>(&mut self, input: &mut dyn IntStream) { input.seek(self.prev_accept.index); self.current_pos.line.set(self.prev_accept.line); - self.current_pos.char_position_in_line.set(self.prev_accept.column); + self.current_pos + .char_position_in_line + .set(self.prev_accept.column); } - fn compute_start_state<'input>(&self, _p: &dyn ATNState, lexer: &mut impl Lexer<'input>) -> Box { + fn compute_start_state<'input>( + &self, + _p: &dyn ATNState, + lexer: &mut impl Lexer<'input>, + ) -> Box { // let initial_context = &EMPTY_PREDICTION_CONTEXT; let mut config_set = ATNConfigSet::new_ordered(); @@ -382,14 +397,7 @@ impl LexerATNSimulator { (i + 1) as isize, EMPTY_PREDICTION_CONTEXT.clone(), ); - self.closure( - atn_config, - &mut config_set, - false, - false, - false, - lexer, - ); + self.closure(atn_config, &mut config_set, false, false, false, lexer); } Box::new(config_set) @@ -397,7 +405,7 @@ impl LexerATNSimulator { fn closure<'input>( &self, -// _input: &mut dyn CharStream, + // _input: &mut dyn CharStream, mut config: ATNConfig, _configs: &mut ATNConfigSet, mut _current_alt_reached_accept_state: bool, @@ -408,19 +416,18 @@ impl LexerATNSimulator { // let config = &config; let atn = self.atn(); let state = atn.states[config.get_state()].as_ref(); -// println!("closure called on state {} {:?}", state.get_state_number(), state.get_state_type()); + // println!("closure called on state {} {:?}", state.get_state_number(), state.get_state_type()); if let ATNStateType::RuleStopState {} = state.get_state_type() { -// println!("reached rulestopstate {}",state.get_state_number()); + // println!("reached rulestopstate {}",state.get_state_number()); if config.get_context().map(|x| x.has_empty_path()) != Some(false) { if config.get_context().map(|x| x.is_empty()) != Some(false) { _configs.add(Box::new(config)); return true; } else { - _configs.add(Box::new(config.cloned_with_new_ctx( - state, - Some(EMPTY_PREDICTION_CONTEXT.clone()), - ))); + _configs.add(Box::new( + config.cloned_with_new_ctx(state, Some(EMPTY_PREDICTION_CONTEXT.clone())), + )); _current_alt_reached_accept_state = true } } @@ -430,7 +437,8 @@ impl LexerATNSimulator { for i in 0..ctx.length() { if ctx.get_return_state(i) != PREDICTION_CONTEXT_EMPTY_RETURN_STATE { let new_ctx = ctx.get_parent(i).cloned(); - let return_state = self.atn().states[ctx.get_return_state(i) as usize].as_ref(); + let return_state = + self.atn().states[ctx.get_return_state(i) as usize].as_ref(); let next_config = config.cloned_with_new_ctx(return_state, new_ctx); _current_alt_reached_accept_state = self.closure( next_config, @@ -448,7 +456,11 @@ impl LexerATNSimulator { } if !state.has_epsilon_only_transitions() { - if let ATNConfigType::LexerATNConfig { passed_through_non_greedy_decision, .. } = config.config_type { + if let ATNConfigType::LexerATNConfig { + passed_through_non_greedy_decision, + .. + } = config.config_type + { if !_current_alt_reached_accept_state || !passed_through_non_greedy_decision { _configs.add(Box::new(config.clone())); } @@ -482,10 +494,9 @@ impl LexerATNSimulator { _current_alt_reached_accept_state } - fn get_epsilon_target<'input>( &self, -// _input: &mut dyn CharStream, + // _input: &mut dyn CharStream, _config: &mut ATNConfig, _trans: &dyn Transition, _configs: &mut ATNConfigSet, @@ -495,7 +506,7 @@ impl LexerATNSimulator { ) -> Option { let mut result = None; let target = self.atn().states.get(_trans.get_target()).unwrap().as_ref(); -// println!("epsilon target for {:?} is {:?}", _trans, target.get_state_type()); + // println!("epsilon target for {:?} is {:?}", _trans, target.get_state_type()); match _trans.get_serialization_type() { TransitionType::TRANSITION_EPSILON => { result = Some(_config.cloned(target)); @@ -519,26 +530,36 @@ impl LexerATNSimulator { TransitionType::TRANSITION_ACTION => { //println!("action transition"); if _config.get_context().map(|x| x.has_empty_path()) != Some(false) { - if let ATNConfigType::LexerATNConfig { lexer_action_executor, .. } = _config.get_type() { + if let ATNConfigType::LexerATNConfig { + lexer_action_executor, + .. + } = _config.get_type() + { let tr = _trans.cast::(); - let lexer_action = self.atn().lexer_actions[tr.action_index as usize].clone(); + let lexer_action = + self.atn().lexer_actions[tr.action_index as usize].clone(); //dbg!(&lexer_action); - let lexer_action_executor = LexerActionExecutor::new_copy_append(lexer_action_executor.as_deref(), lexer_action); - result = Some(_config.cloned_with_new_exec(target, Some(lexer_action_executor))) + let lexer_action_executor = LexerActionExecutor::new_copy_append( + lexer_action_executor.as_deref(), + lexer_action, + ); + result = + Some(_config.cloned_with_new_exec(target, Some(lexer_action_executor))) } } else { result = Some(_config.cloned(target)); } } - TransitionType::TRANSITION_RANGE | - TransitionType::TRANSITION_SET | - TransitionType::TRANSITION_ATOM => + TransitionType::TRANSITION_RANGE + | TransitionType::TRANSITION_SET + | TransitionType::TRANSITION_ATOM => { if _treat_eofas_epsilon { if _trans.matches(EOF, LEXER_MIN_CHAR_VALUE, LEXER_MAX_CHAR_VALUE) { let target = self.atn().states[_trans.get_target()].as_ref(); result = Some(_config.cloned(target)); } - }, + } + } TransitionType::TRANSITION_WILDCARD => {} TransitionType::TRANSITION_NOTSET => {} TransitionType::TRANSITION_PRECEDENCE => { @@ -551,7 +572,7 @@ impl LexerATNSimulator { fn evaluate_predicate<'input, T: Lexer<'input>>( &self, -// input: &mut dyn CharStream, + // input: &mut dyn CharStream, rule_index: isize, pred_index: isize, speculative: bool, @@ -577,10 +598,13 @@ impl LexerATNSimulator { } fn capture_sim_state(&mut self, input: &dyn IntStream, dfa_state: DFAStateRef) -> bool { - if self.get_dfa() + if self + .get_dfa() .states - .read().unwrap() - .get(dfa_state).unwrap() + .read() + .unwrap() + .get(dfa_state) + .unwrap() .is_accept_state { self.prev_accept = SimState { @@ -600,29 +624,34 @@ impl LexerATNSimulator { } if _from.edges.len() < (MAX_DFA_EDGE - MIN_DFA_EDGE + 1) as usize { - _from.edges + _from + .edges .resize((MAX_DFA_EDGE - MIN_DFA_EDGE + 1) as usize, 0); } _from.edges[(t - MIN_DFA_EDGE) as usize] = _to; } fn add_dfastate(&self, states: &mut V, _configs: Box) -> DFAStateRef - where - V: DerefMut>, + where + V: DerefMut>, { assert!(!_configs.has_semantic_context()); let mut dfastate = DFAState::new_dfastate(usize::MAX, _configs); - let rule_index = dfastate.configs//_configs + let rule_index = dfastate + .configs //_configs .get_items() - .find(|c| - RuleStopState == *self.atn().states[c.get_state()].get_state_type() - ).map(|c| { - let rule_index = self.atn().states[c.get_state()].get_rule_index(); - - //println!("accepted rule {} on state {}",rule_index,c.get_state()); - (self.atn().rule_to_token_type[rule_index], - c.get_lexer_executor().map(LexerActionExecutor::clone).map(Box::new)) - }); + .find(|c| RuleStopState == *self.atn().states[c.get_state()].get_state_type()) + .map(|c| { + let rule_index = self.atn().states[c.get_state()].get_rule_index(); + + //println!("accepted rule {} on state {}",rule_index,c.get_state()); + ( + self.atn().rule_to_token_type[rule_index], + c.get_lexer_executor() + .map(LexerActionExecutor::clone) + .map(Box::new), + ) + }); if let Some((prediction, exec)) = rule_index { dfastate.prediction = prediction; @@ -632,7 +661,10 @@ impl LexerATNSimulator { let dfa = self.get_dfa(); let key = dfastate.default_hash(); - let dfastate_index = *dfa.states_map.write().unwrap() + let dfastate_index = *dfa + .states_map + .write() + .unwrap() .entry(key) .or_insert_with(|| { dfastate.state_number = states.deref().len(); @@ -642,7 +674,8 @@ impl LexerATNSimulator { states.deref_mut().push(dfastate); vec![i] }) - .first().unwrap(); + .first() + .unwrap(); //println!("new DFA state {}", dfastate_index); @@ -650,21 +683,13 @@ impl LexerATNSimulator { dfastate_index } - pub fn get_dfa(&self) -> &DFA { - &self.decision_to_dfa()[self.mode] - } + pub fn get_dfa(&self) -> &DFA { &self.decision_to_dfa()[self.mode] } - pub fn get_dfa_for_mode(&self, mode: usize) -> &DFA { - &self.decision_to_dfa()[mode] - } + pub fn get_dfa_for_mode(&self, mode: usize) -> &DFA { &self.decision_to_dfa()[mode] } - fn get_token_name(&self, _tt: isize) -> String { - unimplemented!() - } + fn get_token_name(&self, _tt: isize) -> String { unimplemented!() } - fn reset_sim_state(_sim: &mut SimState) { - unimplemented!() - } + fn reset_sim_state(_sim: &mut SimState) { unimplemented!() } } pub struct SimState { diff --git a/src/lib.rs b/src/lib.rs index 5799815..8d80847 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -56,57 +56,57 @@ pub use parser::{BaseParser, ListenerId, Parser}; #[doc(inline)] pub use prediction_context::PredictionContextCache; -mod ll1_analyzer; -pub mod token_factory; -pub mod recognizer; +pub mod atn_config; +pub mod atn_simulator; pub mod int_stream; mod lexer_action; -pub mod atn_simulator; -pub mod atn_config; +mod ll1_analyzer; +pub mod recognizer; +pub mod token_factory; //pub mod tokenstream_rewriter; #[doc(hidden)] -pub mod semantic_context; -#[doc(hidden)] -pub mod dfa_state; +pub mod atn_deserialization_options; #[doc(hidden)] pub mod atn_state; +pub mod char_stream; +#[doc(hidden)] +pub mod dfa_state; +pub mod interval_set; pub mod parser_rule_context; mod prediction_context; -pub mod interval_set; -pub mod token_source; #[doc(hidden)] -pub mod atn_deserialization_options; +pub mod semantic_context; +pub mod token_source; pub mod token_stream; -pub mod char_stream; //pub mod trace_listener; #[doc(hidden)] +pub mod dfa; +#[doc(hidden)] pub mod transition; pub mod tree; -#[doc(hidden)] -pub mod dfa; //pub mod file_stream; #[doc(hidden)] -pub mod atn_deserializer; -pub mod token; -mod utils; -pub mod trees; +pub mod atn; #[doc(hidden)] pub mod atn_config_set; -pub mod error_listener; -pub mod prediction_mode; -mod input_stream; +#[doc(hidden)] +pub mod atn_deserializer; pub mod common_token_stream; -pub mod lexer; mod dfa_serializer; -pub mod lexer_atn_simulator; -#[doc(hidden)] -pub mod atn; -pub mod errors; +pub mod error_listener; pub mod error_strategy; +pub mod errors; +mod input_stream; +pub mod lexer; #[doc(hidden)] pub mod lexer_action_executor; +pub mod lexer_atn_simulator; pub mod parser; pub mod parser_atn_simulator; +pub mod prediction_mode; +pub mod token; +pub mod trees; +mod utils; //pub mod tokenstream_rewriter_test; #[doc(hidden)] pub mod atn_type; @@ -118,15 +118,15 @@ pub mod vocabulary; macro_rules! type_id { ($struct: tt) => { unsafe impl antlr_rust::rule_context::Tid for $struct<'_> { - fn self_id(&self) -> TypeId{ + fn self_id(&self) -> TypeId { core::any::TypeId::of::<$struct<'static>>() } + fn id() -> TypeId + where + Self: Sized, + { core::any::TypeId::of::<$struct<'static>>() } - fn id() -> TypeId where Self:Sized{ - core::any::TypeId::of::<$struct<'static>>() - } - } - } + }; } //#[cfg(test)] diff --git a/src/ll1_analyzer.rs b/src/ll1_analyzer.rs index b7d5f41..a4e783a 100644 --- a/src/ll1_analyzer.rs +++ b/src/ll1_analyzer.rs @@ -10,12 +10,12 @@ use crate::atn_state::{ATNState, ATNStateType}; use crate::interval_set::IntervalSet; use crate::parser::ParserNodeType; use crate::parser_rule_context::ParserRuleContext; -use crate::prediction_context::EMPTY_PREDICTION_CONTEXT; use crate::prediction_context::PredictionContext; +use crate::prediction_context::EMPTY_PREDICTION_CONTEXT; use crate::token::{TOKEN_EOF, TOKEN_EPSILON, TOKEN_INVALID_TYPE, TOKEN_MIN_USER_TOKEN_TYPE}; use crate::token_factory::TokenFactory; -use crate::transition::{RuleTransition, TransitionType}; use crate::transition::TransitionType::TRANSITION_NOTSET; +use crate::transition::{RuleTransition, TransitionType}; pub struct LL1Analyzer<'a> { atn: &'a ATN, diff --git a/src/parser.rs b/src/parser.rs index 49136e8..83f98f0 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -19,7 +19,7 @@ use crate::interval_set::IntervalSet; use crate::parser_atn_simulator::ParserATNSimulator; use crate::parser_rule_context::{BaseParserRuleContext, ParserRuleContext}; use crate::recognizer::{Actions, Recognizer}; -use crate::rule_context::{CustomRuleContext, RuleContext, states_stack}; +use crate::rule_context::{states_stack, CustomRuleContext, RuleContext}; use crate::token::{OwningToken, Token, TOKEN_EOF}; use crate::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use crate::token_source::TokenSource; diff --git a/src/parser_atn_simulator.rs b/src/parser_atn_simulator.rs index 3f310c0..0f06e9b 100644 --- a/src/parser_atn_simulator.rs +++ b/src/parser_atn_simulator.rs @@ -1,4 +1,3 @@ -use std::{ptr, usize}; use std::borrow::{Borrow, BorrowMut}; use std::cell::Cell; use std::collections::{HashMap, HashSet}; @@ -7,6 +6,7 @@ use std::marker::PhantomData; use std::ops::Deref; use std::rc::Rc; use std::sync::Arc; +use std::{ptr, usize}; use bit_set::BitSet; use typed_arena::Arena; @@ -15,9 +15,9 @@ use crate::atn::{ATN, INVALID_ALT}; use crate::atn_config::ATNConfig; use crate::atn_config_set::ATNConfigSet; use crate::atn_simulator::{BaseATNSimulator, IATNSimulator}; -use crate::atn_state::{ATNDecisionState, ATNState, ATNSTATE_BLOCK_END, ATNStateRef, ATNStateType}; use crate::atn_state::ATNStateType::RuleStopState; -use crate::dfa::{DFA, ScopeExt}; +use crate::atn_state::{ATNDecisionState, ATNState, ATNStateRef, ATNStateType, ATNSTATE_BLOCK_END}; +use crate::dfa::{ScopeExt, DFA}; use crate::dfa_state::{DFAState, DFAStateRef, PredPrediction}; use crate::errors::{ANTLRError, NoViableAltError}; use crate::int_stream::EOF; @@ -25,14 +25,24 @@ use crate::interval_set::IntervalSet; use crate::lexer_atn_simulator::ERROR_DFA_STATE_REF; use crate::parser::{Parser, ParserNodeType}; use crate::parser_rule_context::{empty_ctx, ParserRuleContext}; -use crate::prediction_context::{EMPTY_PREDICTION_CONTEXT, MurmurHasherBuilder, PREDICTION_CONTEXT_EMPTY_RETURN_STATE, PredictionContext, PredictionContextCache}; -use crate::prediction_mode::{all_subsets_conflict, all_subsets_equal, get_alts, get_conflicting_alt_subsets, get_single_viable_alt, has_sll_conflict_terminating_prediction, PredictionMode, resolves_to_just_one_viable_alt}; +use crate::prediction_context::{ + MurmurHasherBuilder, PredictionContext, PredictionContextCache, EMPTY_PREDICTION_CONTEXT, + PREDICTION_CONTEXT_EMPTY_RETURN_STATE, +}; +use crate::prediction_mode::{ + all_subsets_conflict, all_subsets_equal, get_alts, get_conflicting_alt_subsets, + get_single_viable_alt, has_sll_conflict_terminating_prediction, + resolves_to_just_one_viable_alt, PredictionMode, +}; use crate::rule_context::RuleContext; use crate::semantic_context::SemanticContext; use crate::token::{Token, TOKEN_EOF, TOKEN_EPSILON}; use crate::token_factory::CommonTokenFactory; use crate::token_stream::TokenStream; -use crate::transition::{ActionTransition, EpsilonTransition, PrecedencePredicateTransition, PredicateTransition, RuleTransition, Transition, TransitionType}; +use crate::transition::{ + ActionTransition, EpsilonTransition, PrecedencePredicateTransition, PredicateTransition, + RuleTransition, Transition, TransitionType, +}; /// ### The embodiment of the adaptive LL(*), ALL(*), parsing strategy. /// @@ -87,21 +97,37 @@ struct Local<'a, 'input, T: Parser<'input> + 'a> { merge_cache: &'a mut MergeCache, precedence: isize, parser: &'a mut T, - pd: PhantomData>>, + pd: PhantomData>>, } impl<'a, 'input, T: Parser<'input> + 'a> Local<'a, 'input, T> { - fn input(&mut self) -> &mut dyn TokenStream<'input, TF=T::TF> { self.parser.get_input_stream_mut() } + fn input(&mut self) -> &mut dyn TokenStream<'input, TF = T::TF> { + self.parser.get_input_stream_mut() + } fn seek(&mut self, i: isize) { self.input().seek(i) } - fn outer_context(&self) -> &>::Type { self.outer_context.deref() } + fn outer_context(&self) -> &>::Type { + self.outer_context.deref() + } } -pub type MergeCache = HashMap<(Arc, Arc), Arc, MurmurHasherBuilder>; +pub type MergeCache = HashMap< + (Arc, Arc), + Arc, + MurmurHasherBuilder, +>; impl ParserATNSimulator { - pub fn new(atn: Arc, decision_to_dfa: Arc>, shared_context_cache: Arc) -> ParserATNSimulator { + pub fn new( + atn: Arc, + decision_to_dfa: Arc>, + shared_context_cache: Arc, + ) -> ParserATNSimulator { ParserATNSimulator { - base: BaseATNSimulator::new_base_atnsimulator(atn, decision_to_dfa, shared_context_cache), + base: BaseATNSimulator::new_base_atnsimulator( + atn, + decision_to_dfa, + shared_context_cache, + ), prediction_mode: Cell::new(PredictionMode::LL), start_index: Cell::new(0), } @@ -113,9 +139,10 @@ impl ParserATNSimulator { fn reset(&self) { unimplemented!() } - pub fn adaptive_predict<'a, T: Parser<'a>>(&self, - decision: isize, - parser: &mut T, + pub fn adaptive_predict<'a, T: Parser<'a>>( + &self, + decision: isize, + parser: &mut T, ) -> Result { self.start_index.set(parser.get_input_stream_mut().index()); let mut merge_cache: MergeCache = HashMap::with_hasher(MurmurHasherBuilder {}); @@ -125,15 +152,17 @@ impl ParserATNSimulator { merge_cache: &mut merge_cache, precedence: parser.get_precedence(), parser, - pd: PhantomData + pd: PhantomData, }; -// 4!("adaptive_predict decision {}, is_prec {}",decision,local.dfa.is_precedence_dfa()); + // 4!("adaptive_predict decision {}, is_prec {}",decision,local.dfa.is_precedence_dfa()); let m = local.input().mark(); let result = { let s0 = if local.dfa.is_precedence_dfa() { - local.dfa.get_precedence_start_state(local.precedence/*parser.get_precedence()*/) + local + .dfa + .get_precedence_start_state(local.precedence /*parser.get_precedence()*/) } else { local.dfa.s0.read().unwrap().as_ref().copied() }; @@ -151,42 +180,54 @@ impl ParserATNSimulator { let s0_closure_updated = self.apply_precedence_filter(&s0_closure, &mut local); local.dfa.states.write().unwrap()[s0].configs = Box::new(s0_closure); - s0 = self.add_dfastate(&local.dfa, DFAState::new_dfastate(0, Box::new(s0_closure_updated))); + s0 = self.add_dfastate( + &local.dfa, + DFAState::new_dfastate(0, Box::new(s0_closure_updated)), + ); local.dfa.set_precedence_start_state(local.precedence, s0); s0 } else { - let s0 = self.add_dfastate(&local.dfa, DFAState::new_dfastate(0, Box::new(s0_closure))); + let s0 = self + .add_dfastate(&local.dfa, DFAState::new_dfastate(0, Box::new(s0_closure))); local.dfa.s0.write().unwrap().replace(s0); s0 } }); - self.exec_atn(&mut local, s0)? }; local.input().seek(self.start_index.get()); local.input().release(m); -// println!("result = {}", result); + // println!("result = {}", result); Ok(result) } #[allow(non_snake_case)] - fn exec_atn<'a, T: Parser<'a>>(&self, local: &mut Local<'_, 'a, T>, s0: DFAStateRef) -> Result { + fn exec_atn<'a, T: Parser<'a>>( + &self, + local: &mut Local<'_, 'a, T>, + s0: DFAStateRef, + ) -> Result { let mut previousD = s0; let mut token = local.input().la(1); loop { -// println!("exec atn loop previous D {}",previousD as isize -1); - let D = self.get_existing_target_state(local.dfa, previousD, token) + // println!("exec atn loop previous D {}",previousD as isize -1); + let D = self + .get_existing_target_state(local.dfa, previousD, token) .unwrap_or_else(|| self.compute_target_state(local.dfa, previousD, token, local)); assert!(D > 0); let states = local.dfa.states.read().unwrap(); if D == ERROR_DFA_STATE_REF { let previousDstate = &states[previousD]; - let err = self.no_viable_alt(local, previousDstate.configs.as_ref(), self.start_index.get()); + let err = self.no_viable_alt( + local, + previousDstate.configs.as_ref(), + self.start_index.get(), + ); local.input().seek(self.start_index.get()); let alt = self.get_syn_valid_or_sem_invalid_alt_that_finished_decision_entry_rule( previousDstate.configs.as_ref(), @@ -200,7 +241,7 @@ impl ParserATNSimulator { let Dstate = &states[D]; if Dstate.requires_full_context && self.prediction_mode.get() != PredictionMode::SLL { - let mut conflicting_alts = Dstate.configs.conflicting_alts.clone();//todo get rid of clone? + let mut conflicting_alts = Dstate.configs.conflicting_alts.clone(); //todo get rid of clone? if !Dstate.predicates.is_empty() { let conflict_index = local.input().index(); if conflict_index != self.start_index.get() { @@ -208,7 +249,7 @@ impl ParserATNSimulator { } conflicting_alts = self.eval_semantic_context(local, &Dstate.predicates, true); -// println!("conflicting_alts {:?}",&conflicting_alts); + // println!("conflicting_alts {:?}",&conflicting_alts); if conflicting_alts.len() == 1 { return Ok(conflicting_alts.iter().next().unwrap() as isize); } @@ -220,7 +261,10 @@ impl ParserATNSimulator { let s0_closure = self.compute_start_state( local.dfa.atn_start_state, - PredictionContext::from_rule_context::<'a, T::Node>(self.atn(), local.outer_context()), + PredictionContext::from_rule_context::<'a, T::Node>( + self.atn(), + local.outer_context(), + ), true, local, ); @@ -231,7 +275,7 @@ impl ParserATNSimulator { Dstate.configs.as_ref(), self.start_index.get(), local.input().index(), - local.parser + local.parser, ); return self.exec_atn_with_full_context(local, &Dstate, s0_closure); @@ -239,7 +283,7 @@ impl ParserATNSimulator { if Dstate.is_accept_state { if Dstate.predicates.is_empty() { -// println!("prediction !!{}",Dstate.prediction); + // println!("prediction !!{}",Dstate.prediction); return Ok(Dstate.prediction); } @@ -248,7 +292,13 @@ impl ParserATNSimulator { let alts = self.eval_semantic_context(local, &Dstate.predicates, true); match alts.len() { - 0 => return Err(self.no_viable_alt(local, Dstate.configs.as_ref(), self.start_index.get())), + 0 => { + return Err(self.no_viable_alt( + local, + Dstate.configs.as_ref(), + self.start_index.get(), + )) + } 1 => return Ok(alts.iter().next().unwrap() as isize), _ => { self.report_ambiguity( @@ -274,19 +324,30 @@ impl ParserATNSimulator { } #[allow(non_snake_case)] - fn get_existing_target_state(&self, dfa: &DFA, previousD: DFAStateRef, t: isize) -> Option { + fn get_existing_target_state( + &self, + dfa: &DFA, + previousD: DFAStateRef, + t: isize, + ) -> Option { dfa.states.read().unwrap()[previousD] .edges .get((t + 1) as usize) .and_then(|x| match *x { 0 => None, - x => Some(x) + x => Some(x), }) } #[allow(non_snake_case)] - fn compute_target_state<'a, T: Parser<'a>>(&self, dfa: &DFA, previousD: DFAStateRef, t: isize, local: &mut Local<'_, 'a, T>) -> DFAStateRef { -// println!("source config {:?}",dfa.states.read().unwrap()[previousD].configs.as_ref()); + fn compute_target_state<'a, T: Parser<'a>>( + &self, + dfa: &DFA, + previousD: DFAStateRef, + t: isize, + local: &mut Local<'_, 'a, T>, + ) -> DFAStateRef { + // println!("source config {:?}",dfa.states.read().unwrap()[previousD].configs.as_ref()); let reach = self.compute_reach_set( dfa.states.read().unwrap()[previousD].configs.as_ref(), t, @@ -295,14 +356,18 @@ impl ParserATNSimulator { ); let reach = match reach { None => { - self.add_dfaedge(dfa.states.write().unwrap()[previousD].borrow_mut(), t, ERROR_DFA_STATE_REF); + self.add_dfaedge( + dfa.states.write().unwrap()[previousD].borrow_mut(), + t, + ERROR_DFA_STATE_REF, + ); return ERROR_DFA_STATE_REF; } Some(x) => x, }; let predicted_alt = self.get_unique_alt(&reach); -// println!("predicted_alt {}",predicted_alt); + // println!("predicted_alt {}",predicted_alt); let mut D = DFAState::new_dfastate(0, reach.into()); let reach = D.configs.as_ref(); @@ -311,8 +376,9 @@ impl ParserATNSimulator { D.is_accept_state = true; D.configs.set_unique_alt(predicted_alt); D.prediction = predicted_alt - } else if self.all_configs_in_rule_stop_state(reach) || - has_sll_conflict_terminating_prediction(self.prediction_mode.get(), reach) { + } else if self.all_configs_in_rule_stop_state(reach) + || has_sll_conflict_terminating_prediction(self.prediction_mode.get(), reach) + { let alts = self.get_conflicting_alts(reach); D.prediction = alts.iter().next().unwrap() as isize; D.configs.conflicting_alts = alts; @@ -320,11 +386,11 @@ impl ParserATNSimulator { D.is_accept_state = true; } -// println!("target config {:?}",&D.configs); + // println!("target config {:?}",&D.configs); if D.is_accept_state && D.configs.has_semantic_context() { let decision_state = self.atn().decision_to_state[dfa.decision as usize]; self.predicate_dfa_state(&mut D, self.atn().states[decision_state].deref()); -// println!("predicates compute target {:?}",&D.predicates); + // println!("predicates compute target {:?}",&D.predicates); if !D.predicates.is_empty() { D.prediction = INVALID_ALT } @@ -337,22 +403,32 @@ impl ParserATNSimulator { fn predicate_dfa_state(&self, dfa_state: &mut DFAState, decision_state: &dyn ATNState) { let nalts = decision_state.get_transitions().len(); - let alts_to_collect_preds_from = self.get_conflicting_alts_or_unique_alt(dfa_state.configs.as_ref()); + let alts_to_collect_preds_from = + self.get_conflicting_alts_or_unique_alt(dfa_state.configs.as_ref()); let alt_to_pred = self.get_preds_for_ambig_alts( &alts_to_collect_preds_from, dfa_state.configs.as_ref(), nalts, ); if let Some(alt_to_pred) = alt_to_pred { - dfa_state.predicates = self.get_predicate_predictions(&alts_to_collect_preds_from, alt_to_pred); + dfa_state.predicates = + self.get_predicate_predictions(&alts_to_collect_preds_from, alt_to_pred); dfa_state.prediction = INVALID_ALT; } else { - dfa_state.prediction = alts_to_collect_preds_from.iter() - .next().unwrap_or(0/*in java it is -1 but looks like 0 is good enough*/) as isize; + dfa_state.prediction = alts_to_collect_preds_from + .iter() + .next() + .unwrap_or(0 /*in java it is -1 but looks like 0 is good enough*/) + as isize; } } - fn exec_atn_with_full_context<'a, T: Parser<'a>>(&self, local: &mut Local<'_, 'a, T>, _D: &DFAState, s0: ATNConfigSet) -> Result { + fn exec_atn_with_full_context<'a, T: Parser<'a>>( + &self, + local: &mut Local<'_, 'a, T>, + _D: &DFAState, + s0: ATNConfigSet, + ) -> Result { //println!("exec_atn_with_full_context"); let full_ctx = true; let mut found_exact_ambig = false; @@ -361,12 +437,15 @@ impl ParserATNSimulator { let mut t = local.input().la(1); let mut predicted_alt = 0; loop { -// println!("full_ctx loop"); + // println!("full_ctx loop"); let reach = self.compute_reach_set(&prev, t, full_ctx, local); prev = match reach { None => { local.input().seek(self.start_index.get()); - let alt = self.get_syn_valid_or_sem_invalid_alt_that_finished_decision_entry_rule(&prev, local); + let alt = self + .get_syn_valid_or_sem_invalid_alt_that_finished_decision_entry_rule( + &prev, local, + ); if alt != INVALID_ALT { return Ok(alt); } @@ -386,14 +465,12 @@ impl ParserATNSimulator { if predicted_alt != INVALID_ALT { break; } - } else if all_subsets_conflict(&alt_sub_sets) - && all_subsets_equal(&alt_sub_sets) { + } else if all_subsets_conflict(&alt_sub_sets) && all_subsets_equal(&alt_sub_sets) { found_exact_ambig = true; predicted_alt = get_single_viable_alt(&alt_sub_sets); break; } - if t != TOKEN_EOF { local.input().consume(); t = local.input().la(1); @@ -401,7 +478,14 @@ impl ParserATNSimulator { } if prev.get_unique_alt() != INVALID_ALT { - self.report_context_sensitivity(local.dfa, predicted_alt, &prev, self.start_index.get(), local.input().index(), local.parser); + self.report_context_sensitivity( + local.dfa, + predicted_alt, + &prev, + self.start_index.get(), + local.input().index(), + local.parser, + ); return Ok(predicted_alt); } self.report_ambiguity( @@ -418,8 +502,14 @@ impl ParserATNSimulator { } // ATNConfigSet is pretty big so should be boxed to move it cheaper - fn compute_reach_set<'a, T: Parser<'a>>(&self, closure: &ATNConfigSet, t: isize, full_ctx: bool, local: &mut Local<'_, 'a, T>) -> Option { -// println!("in computeReachSet, starting closure: {:?}",closure); + fn compute_reach_set<'a, T: Parser<'a>>( + &self, + closure: &ATNConfigSet, + t: isize, + full_ctx: bool, + local: &mut Local<'_, 'a, T>, + ) -> Option { + // println!("in computeReachSet, starting closure: {:?}",closure); let mut intermediate = ATNConfigSet::new_base_atnconfig_set(full_ctx); let mut skipped_stop_states = Vec::<&ATNConfig>::new(); @@ -435,47 +525,63 @@ impl ParserATNSimulator { } for tr in state.get_transitions() { - self.get_reachable_target(tr.as_ref(), t) - .map(|target| { - let added = Box::new(c.cloned(self.atn().states[target].as_ref())); - intermediate.add_cached(added, Some(local.merge_cache)) - }); + self.get_reachable_target(tr.as_ref(), t).map(|target| { + let added = Box::new(c.cloned(self.atn().states[target].as_ref())); + intermediate.add_cached(added, Some(local.merge_cache)) + }); } } -// println!("intermediate {:?}",intermediate); - + // println!("intermediate {:?}",intermediate); let mut look_to_end_of_rule = false; - let mut reach = if skipped_stop_states.is_empty() && t != TOKEN_EOF - && (intermediate.length() == 1 || self.get_unique_alt(&intermediate) != INVALID_ALT) { + let mut reach = if skipped_stop_states.is_empty() + && t != TOKEN_EOF + && (intermediate.length() == 1 || self.get_unique_alt(&intermediate) != INVALID_ALT) + { look_to_end_of_rule = true; intermediate } else { let mut reach = ATNConfigSet::new_base_atnconfig_set(full_ctx); let mut closure_busy = HashSet::new(); -// println!("calc reach {:?}",intermediate.length()); + // println!("calc reach {:?}",intermediate.length()); for c in intermediate.configs { let treat_eofas_epsilon = t == TOKEN_EOF; - self.closure(*c, &mut reach, &mut closure_busy, false, full_ctx, treat_eofas_epsilon, local); + self.closure( + *c, + &mut reach, + &mut closure_busy, + false, + full_ctx, + treat_eofas_epsilon, + local, + ); } -// println!("calc reach {:?}",reach); + // println!("calc reach {:?}",reach); reach }; if t == TOKEN_EOF { - reach = self.remove_all_configs_not_in_rule_stop_state(reach, look_to_end_of_rule, local.merge_cache); + reach = self.remove_all_configs_not_in_rule_stop_state( + reach, + look_to_end_of_rule, + local.merge_cache, + ); } - if !skipped_stop_states.is_empty() && (!full_ctx || !self.has_config_in_rule_stop_state(&reach)) { + if !skipped_stop_states.is_empty() + && (!full_ctx || !self.has_config_in_rule_stop_state(&reach)) + { for c in skipped_stop_states { reach.add_cached(c.clone().into(), Some(local.merge_cache)); } } -// println!("result?"); - if reach.is_empty() { return None; } + // println!("result?"); + if reach.is_empty() { + return None; + } -// println!("result {:?}",&reach); + // println!("result {:?}",&reach); return Some(reach); } @@ -490,13 +596,20 @@ impl ParserATNSimulator { fn all_configs_in_rule_stop_state(&self, configs: &ATNConfigSet) -> bool { for c in configs.get_items() { - if let RuleStopState = self.atn().states[c.get_state()].get_state_type() {} else { return false; } + if let RuleStopState = self.atn().states[c.get_state()].get_state_type() { + } else { + return false; + } } return true; } - - fn remove_all_configs_not_in_rule_stop_state(&self, configs: ATNConfigSet, look_to_end_of_rule: bool, merge_cache: &mut MergeCache) -> ATNConfigSet { + fn remove_all_configs_not_in_rule_stop_state( + &self, + configs: ATNConfigSet, + look_to_end_of_rule: bool, + merge_cache: &mut MergeCache, + ) -> ATNConfigSet { if self.all_configs_in_rule_stop_state(&configs) { return configs; } @@ -515,7 +628,11 @@ impl ParserATNSimulator { let next_tokens = self.atn().next_tokens(state); if next_tokens.contains(TOKEN_EPSILON) { let end_of_rule_state = self.atn().rule_to_stop_state[state.get_rule_index()]; - result.add_cached(c.cloned(self.atn().states[end_of_rule_state].as_ref()).into(), Some(merge_cache)); + result.add_cached( + c.cloned(self.atn().states[end_of_rule_state].as_ref()) + .into(), + Some(merge_cache), + ); } } } @@ -523,17 +640,17 @@ impl ParserATNSimulator { result } - fn compute_start_state<'a, T: Parser<'a>>(&self, - a: ATNStateRef, - initial_ctx: Arc, - full_ctx: bool, - local: &mut Local<'_, 'a, T>, + fn compute_start_state<'a, T: Parser<'a>>( + &self, + a: ATNStateRef, + initial_ctx: Arc, + full_ctx: bool, + local: &mut Local<'_, 'a, T>, ) -> ATNConfigSet { -// let initial_ctx = PredictionContext::prediction_context_from_rule_context(self.atn(),ctx); + // let initial_ctx = PredictionContext::prediction_context_from_rule_context(self.atn(),ctx); let mut configs = ATNConfigSet::new_base_atnconfig_set(full_ctx); -// println!("initial {:?}",initial_ctx); -// println!("initial state {:?}",a); - + // println!("initial {:?}",initial_ctx); + // println!("initial state {:?}",a); let atn_states = &self.atn().states; for (i, tr) in atn_states[a].get_transitions().iter().enumerate() { @@ -544,14 +661,26 @@ impl ParserATNSimulator { Some(initial_ctx.clone()), ); let mut closure_busy = HashSet::new(); - self.closure(c, &mut configs, &mut closure_busy, true, full_ctx, false, local); + self.closure( + c, + &mut configs, + &mut closure_busy, + true, + full_ctx, + false, + local, + ); } -// println!("start state {:?}",configs); + // println!("start state {:?}",configs); configs } - fn apply_precedence_filter<'a, T: Parser<'a>>(&self, configs: &ATNConfigSet, local: &mut Local<'_, 'a, T>) -> ATNConfigSet { + fn apply_precedence_filter<'a, T: Parser<'a>>( + &self, + configs: &ATNConfigSet, + local: &mut Local<'_, 'a, T>, + ) -> ATNConfigSet { //println!("apply_precedence_filter"); let mut states_from_alt1 = HashMap::new(); let mut config_set = ATNConfigSet::new_base_atnconfig_set(configs.full_context()); @@ -561,19 +690,23 @@ impl ParserATNSimulator { continue; } - let updated_sem_ctx = config.semantic_context + let updated_sem_ctx = config + .semantic_context .eval_precedence(local.parser, local.outer_context()); if let Some(updated_sem_ctx) = updated_sem_ctx.as_deref() { states_from_alt1.insert(config.get_state(), config.get_context()); if *updated_sem_ctx != *config.semantic_context { - config_set.add_cached(Box::new(ATNConfig::new_with_semantic( - config.get_state(), - config.get_alt(), - config.get_context().cloned(), - Box::new(updated_sem_ctx.clone()), - )), Some(local.merge_cache)); + config_set.add_cached( + Box::new(ATNConfig::new_with_semantic( + config.get_state(), + config.get_alt(), + config.get_context().cloned(), + Box::new(updated_sem_ctx.clone()), + )), + Some(local.merge_cache), + ); } else { config_set.add_cached(Box::new(config.clone()), Some(local.merge_cache)); } @@ -604,24 +737,34 @@ impl ParserATNSimulator { None } - fn get_preds_for_ambig_alts(&self, ambig_alts: &BitSet, configs: &ATNConfigSet, nalts: usize) -> Option> { + fn get_preds_for_ambig_alts( + &self, + ambig_alts: &BitSet, + configs: &ATNConfigSet, + nalts: usize, + ) -> Option> { let mut alt_to_pred = Vec::with_capacity(nalts + 1); alt_to_pred.resize_with(nalts + 1, || None); for c in configs.configs.iter() { let alt = c.get_alt() as usize; if ambig_alts.contains(alt) { - alt_to_pred[alt] = Some(SemanticContext::or(alt_to_pred[alt].as_ref(), Some(&*c.semantic_context))); + alt_to_pred[alt] = Some(SemanticContext::or( + alt_to_pred[alt].as_ref(), + Some(&*c.semantic_context), + )); } } - let alt_to_pred: Vec = alt_to_pred.into_iter() - .map(|it| + let alt_to_pred: Vec = alt_to_pred + .into_iter() + .map(|it| { if let Some(inner) = it { inner } else { SemanticContext::NONE } - ).collect(); + }) + .collect(); let npred_alts = alt_to_pred .iter() @@ -634,46 +777,71 @@ impl ParserATNSimulator { return Some(alt_to_pred); } - fn get_predicate_predictions(&self, ambig_alts: &BitSet, alt_to_pred: Vec) -> Vec { + fn get_predicate_predictions( + &self, + ambig_alts: &BitSet, + alt_to_pred: Vec, + ) -> Vec { let mut pairs = vec![]; let mut contains_predicate = false; for (i, pred) in alt_to_pred.into_iter().enumerate().skip(1) { - if pred != SemanticContext::NONE { contains_predicate = true } + if pred != SemanticContext::NONE { + contains_predicate = true + } if ambig_alts.contains(i) { - pairs.push(PredPrediction { alt: i as isize, pred }) + pairs.push(PredPrediction { + alt: i as isize, + pred, + }) } } - if !contains_predicate { return Vec::new(); } + if !contains_predicate { + return Vec::new(); + } pairs } - fn get_syn_valid_or_sem_invalid_alt_that_finished_decision_entry_rule<'a, T: Parser<'a>>(&self, - configs: &ATNConfigSet, - local: &mut Local<'_, 'a, T>, + fn get_syn_valid_or_sem_invalid_alt_that_finished_decision_entry_rule<'a, T: Parser<'a>>( + &self, + configs: &ATNConfigSet, + local: &mut Local<'_, 'a, T>, ) -> isize { let (sem_valid_configs, sem_invalid_configs) = self.split_according_to_semantic_validity(configs, local); let alt = self.get_alt_that_finished_decision_entry_rule(&sem_valid_configs); - if alt != INVALID_ALT { return alt; } + if alt != INVALID_ALT { + return alt; + } if !sem_invalid_configs.is_empty() { let alt = self.get_alt_that_finished_decision_entry_rule(&sem_invalid_configs); - if alt != INVALID_ALT { return alt; } + if alt != INVALID_ALT { + return alt; + } } INVALID_ALT } - fn split_according_to_semantic_validity<'a, T: Parser<'a>>(&self, configs: &ATNConfigSet, local: &mut Local<'_, 'a, T>) -> (ATNConfigSet, ATNConfigSet) { + fn split_according_to_semantic_validity<'a, T: Parser<'a>>( + &self, + configs: &ATNConfigSet, + local: &mut Local<'_, 'a, T>, + ) -> (ATNConfigSet, ATNConfigSet) { let mut succeeded = ATNConfigSet::new_base_atnconfig_set(configs.full_context()); let mut failed = ATNConfigSet::new_base_atnconfig_set(configs.full_context()); for c in configs.get_items() { let clone = Box::new(c.clone()); if *c.semantic_context != SemanticContext::NONE { - let predicate_eval_result = self.eval_predicate(local, &*c.semantic_context, c.get_alt(), configs.full_context()); + let predicate_eval_result = self.eval_predicate( + local, + &*c.semantic_context, + c.get_alt(), + configs.full_context(), + ); if predicate_eval_result { succeeded.add(clone); } else { @@ -699,43 +867,60 @@ impl ParserATNSimulator { return alts.get_min().unwrap_or(INVALID_ALT); } - fn eval_semantic_context<'a, T: Parser<'a>>(&self, local: &mut Local<'_, 'a, T>, pred_predictions: &Vec, complete: bool) -> BitSet { + fn eval_semantic_context<'a, T: Parser<'a>>( + &self, + local: &mut Local<'_, 'a, T>, + pred_predictions: &Vec, + complete: bool, + ) -> BitSet { let mut predictions = BitSet::new(); for pred in pred_predictions { if pred.pred == SemanticContext::NONE { predictions.insert(pred.alt as usize); - if !complete { break; } + if !complete { + break; + } continue; } let full_ctx = false; - let predicate_evaluation_result = self.eval_predicate(local, &pred.pred, pred.alt, full_ctx); + let predicate_evaluation_result = + self.eval_predicate(local, &pred.pred, pred.alt, full_ctx); if predicate_evaluation_result { predictions.insert(pred.alt as usize); - if !complete { break; } + if !complete { + break; + } } } predictions } - fn eval_predicate<'a, T: Parser<'a>>(&self, local: &mut Local<'_, 'a, T>, pred: impl Borrow, _alt: isize, _full_ctx: bool) -> bool { + fn eval_predicate<'a, T: Parser<'a>>( + &self, + local: &mut Local<'_, 'a, T>, + pred: impl Borrow, + _alt: isize, + _full_ctx: bool, + ) -> bool { pred.borrow().evaluate(local.parser, &*local.outer_context) } - fn closure<'a, T: Parser<'a>>(&self, - config: ATNConfig, - configs: &mut ATNConfigSet, - closure_busy: &mut HashSet, - collect_predicates: bool, - full_ctx: bool, - treat_eofas_epsilon: bool, - local: &mut Local<'_, 'a, T>, + fn closure<'a, T: Parser<'a>>( + &self, + config: ATNConfig, + configs: &mut ATNConfigSet, + closure_busy: &mut HashSet, + collect_predicates: bool, + full_ctx: bool, + treat_eofas_epsilon: bool, + local: &mut Local<'_, 'a, T>, ) { -// println!("cl{}", config.get_state()); + // println!("cl{}", config.get_state()); let initial_depth = 0; -// local.merge_cache.clear(); + // local.merge_cache.clear(); self.closure_checking_stop_state( config, @@ -750,21 +935,24 @@ impl ParserATNSimulator { assert!(!full_ctx || !configs.get_dips_into_outer_context()) } - fn closure_checking_stop_state<'a, T: Parser<'a>>(&self, - mut config: ATNConfig, - configs: &mut ATNConfigSet, - closure_busy: &mut HashSet, - collect_predicates: bool, - full_ctx: bool, - depth: isize, - treat_eofas_epsilon: bool, - local: &mut Local<'_, 'a, T>, + fn closure_checking_stop_state<'a, T: Parser<'a>>( + &self, + mut config: ATNConfig, + configs: &mut ATNConfigSet, + closure_busy: &mut HashSet, + collect_predicates: bool, + full_ctx: bool, + depth: isize, + treat_eofas_epsilon: bool, + local: &mut Local<'_, 'a, T>, ) { -// println!("closure({:?})",config); + // println!("closure({:?})",config); if let RuleStopState = self.atn().states[config.get_state()].get_state_type() { if !config.get_context().unwrap().is_empty() { config.get_context().unwrap().run(|temp| { - if temp.get_return_state(temp.length() - 1) == PREDICTION_CONTEXT_EMPTY_RETURN_STATE { + if temp.get_return_state(temp.length() - 1) + == PREDICTION_CONTEXT_EMPTY_RETURN_STATE + { if full_ctx { let new_config = config.cloned_with_new_ctx( self.atn().states[config.get_state()].as_ref(), @@ -788,11 +976,13 @@ impl ParserATNSimulator { let mut context = config.take_context(); for i in 0..context.length() { if context.get_return_state(i) == PREDICTION_CONTEXT_EMPTY_RETURN_STATE { - if i != context.length() - 1 { panic!("EMPTY_RETURN_STATE is not last for some reason, please report error") } + if i != context.length() - 1 { + panic!("EMPTY_RETURN_STATE is not last for some reason, please report error") + } continue; } let return_state = context.get_return_state(i) as ATNStateRef; -// let new_ctx = context.take_parent(i).unwrap(); + // let new_ctx = context.take_parent(i).unwrap(); let new_ctx = context.get_parent(i).cloned(); let mut c = ATNConfig::new_with_semantic( return_state, @@ -817,7 +1007,8 @@ impl ParserATNSimulator { } else if full_ctx { configs.add_cached(Box::new(config), Some(local.merge_cache)); return; - } else {} + } else { + } } self.closure_work( config, @@ -831,28 +1022,32 @@ impl ParserATNSimulator { ) } - fn closure_work<'a, T: Parser<'a>>(&self, - config: ATNConfig, - configs: &mut ATNConfigSet, - closure_busy: &mut HashSet, - collect_predicates: bool, - full_ctx: bool, - depth: isize, - treat_eofas_epsilon: bool, - local: &mut Local<'_, 'a, T>, + fn closure_work<'a, T: Parser<'a>>( + &self, + config: ATNConfig, + configs: &mut ATNConfigSet, + closure_busy: &mut HashSet, + collect_predicates: bool, + full_ctx: bool, + depth: isize, + treat_eofas_epsilon: bool, + local: &mut Local<'_, 'a, T>, ) { //println!("depth {}",depth); -// println!("closure_work started {:?}",config); + // println!("closure_work started {:?}",config); let p = self.atn().states[config.get_state()].as_ref(); if !p.has_epsilon_only_transitions() { configs.add_cached(Box::new(config.clone()), Some(local.merge_cache)); } for (i, tr) in p.get_transitions().iter().enumerate() { - if i == 0 && self.can_drop_loop_entry_edge_in_left_recursive_rule(&config) { continue; } + if i == 0 && self.can_drop_loop_entry_edge_in_left_recursive_rule(&config) { + continue; + } - let continue_collecting = - tr.get_serialization_type() != TransitionType::TRANSITION_ACTION && collect_predicates; + let continue_collecting = tr.get_serialization_type() + != TransitionType::TRANSITION_ACTION + && collect_predicates; let c = self.get_epsilon_target( &config, tr.as_ref(), @@ -868,10 +1063,13 @@ impl ParserATNSimulator { assert!(!full_ctx); if local.dfa.is_precedence_dfa() { - let outermost_precedence_return = - tr.as_ref().cast::().outermost_precedence_return; + let outermost_precedence_return = tr + .as_ref() + .cast::() + .outermost_precedence_return; let atn_start_state = self.atn().states[local.dfa.atn_start_state].as_ref(); - if outermost_precedence_return == atn_start_state.get_rule_index() as isize { + if outermost_precedence_return == atn_start_state.get_rule_index() as isize + { c.set_precedence_filter_suppressed(true); } } @@ -907,18 +1105,25 @@ impl ParserATNSimulator { ) }; } -// println!("closure_work ended {:?}",config); + // println!("closure_work ended {:?}",config); } fn can_drop_loop_entry_edge_in_left_recursive_rule(&self, _config: &ATNConfig) -> bool { -// if std::env::var("TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT").ok() -// .and_then(|it|str::parse::(&it).ok()) == Some(true) -// { return false } + // if std::env::var("TURN_OFF_LR_LOOP_ENTRY_BRANCH_OPT").ok() + // .and_then(|it|str::parse::(&it).ok()) == Some(true) + // { return false } let state = self.atn().states[_config.get_state()].as_ref(); - if let ATNStateType::DecisionState { state: ATNDecisionState::StarLoopEntry { is_precedence, .. }, .. } = state.get_state_type() { - if !*is_precedence || _config.get_context().unwrap().is_empty() || _config.get_context().unwrap().has_empty_path() { + if let ATNStateType::DecisionState { + state: ATNDecisionState::StarLoopEntry { is_precedence, .. }, + .. + } = state.get_state_type() + { + if !*is_precedence + || _config.get_context().unwrap().is_empty() + || _config.get_context().unwrap().has_empty_path() + { return false; } } else { @@ -929,24 +1134,36 @@ impl ParserATNSimulator { let ctx_len = pred_ctx.length(); for i in 0..ctx_len { let return_state = self.atn().states[pred_ctx.get_return_state(i) as usize].as_ref(); - if return_state.get_rule_index() != state.get_rule_index() { return false } + if return_state.get_rule_index() != state.get_rule_index() { + return false; + } } let decision_start_state = state.get_transitions()[0].get_target(); let decision_start_state = self.atn().states[decision_start_state].as_ref(); - let block_end_state_num = if let ATNStateType::DecisionState { state: ATNDecisionState::BlockStartState { end_state, .. }, .. } = decision_start_state.get_state_type() { + let block_end_state_num = if let ATNStateType::DecisionState { + state: ATNDecisionState::BlockStartState { end_state, .. }, + .. + } = decision_start_state.get_state_type() + { *end_state - } else { unreachable!("cast error") }; + } else { + unreachable!("cast error") + }; for i in 0..ctx_len { let return_state = self.atn().states[pred_ctx.get_return_state(i) as usize].as_ref(); - if return_state.get_transitions().len() != 1 || !return_state.get_transitions()[0].is_epsilon() { -// println!("test1"); + if return_state.get_transitions().len() != 1 + || !return_state.get_transitions()[0].is_epsilon() + { + // println!("test1"); return false; } - let return_state_target = self.atn().states[return_state.get_transitions()[0].get_target()].as_ref(); + let return_state_target = + self.atn().states[return_state.get_transitions()[0].get_target()].as_ref(); if return_state.get_state_type_id() == ATNSTATE_BLOCK_END - && ptr::eq(return_state_target, state) { + && ptr::eq(return_state_target, state) + { continue; } if return_state.get_state_number() == block_end_state_num { @@ -959,60 +1176,66 @@ impl ParserATNSimulator { if return_state_target.get_state_type_id() == ATNSTATE_BLOCK_END && return_state_target.get_transitions().len() == 1 && return_state_target.get_transitions()[0].is_epsilon() - && return_state_target.get_transitions()[0].get_target() == state.get_state_number() { + && return_state_target.get_transitions()[0].get_target() == state.get_state_number() + { continue; } -// println!("test2"); + // println!("test2"); return false; } -// println!("dropping on state {} ", state.get_state_number()); + // println!("dropping on state {} ", state.get_state_number()); return true; } -// -// fn get_rule_name(&self, index: isize) -> String { unimplemented!() } - - fn get_epsilon_target<'a, T: Parser<'a>>(&self, - config: &ATNConfig, - t: &dyn Transition, - collect_predicates: bool, - in_context: bool, - full_ctx: bool, - treat_eofas_epsilon: bool, - local: &mut Local<'_, 'a, T>, + // + // fn get_rule_name(&self, index: isize) -> String { unimplemented!() } + + fn get_epsilon_target<'a, T: Parser<'a>>( + &self, + config: &ATNConfig, + t: &dyn Transition, + collect_predicates: bool, + in_context: bool, + full_ctx: bool, + treat_eofas_epsilon: bool, + local: &mut Local<'_, 'a, T>, ) -> Option { match t.get_serialization_type() { - TransitionType::TRANSITION_EPSILON => Some(config.cloned(self.atn().states[t.get_target()].as_ref())), - TransitionType::TRANSITION_RULE => Some(self.rule_transition(config, t.cast::())), - TransitionType::TRANSITION_PREDICATE => - self.pred_transition( - config, - t.cast::(), - collect_predicates, - in_context, - full_ctx, - local, - ), - TransitionType::TRANSITION_ACTION => Some(self.action_transition(config, t.cast::())), - TransitionType::TRANSITION_PRECEDENCE => - self.precedence_transition( - config, - t.cast::(), - collect_predicates, - in_context, - full_ctx, - local, - ), - TransitionType::TRANSITION_ATOM | - TransitionType::TRANSITION_SET | - TransitionType::TRANSITION_RANGE => + TransitionType::TRANSITION_EPSILON => { + Some(config.cloned(self.atn().states[t.get_target()].as_ref())) + } + TransitionType::TRANSITION_RULE => { + Some(self.rule_transition(config, t.cast::())) + } + TransitionType::TRANSITION_PREDICATE => self.pred_transition( + config, + t.cast::(), + collect_predicates, + in_context, + full_ctx, + local, + ), + TransitionType::TRANSITION_ACTION => { + Some(self.action_transition(config, t.cast::())) + } + TransitionType::TRANSITION_PRECEDENCE => self.precedence_transition( + config, + t.cast::(), + collect_predicates, + in_context, + full_ctx, + local, + ), + TransitionType::TRANSITION_ATOM + | TransitionType::TRANSITION_SET + | TransitionType::TRANSITION_RANGE => { if treat_eofas_epsilon && t.matches(TOKEN_EOF, 0, 1) { Some(config.cloned(self.atn().states[t.get_target()].as_ref())) } else { None - }, - TransitionType::TRANSITION_NOTSET | - TransitionType::TRANSITION_WILDCARD => None, + } + } + TransitionType::TRANSITION_NOTSET | TransitionType::TRANSITION_WILDCARD => None, } } @@ -1020,26 +1243,33 @@ impl ParserATNSimulator { config.cloned(self.atn().states[t.target].as_ref()) } - fn precedence_transition<'a, T: Parser<'a>>(&self, - config: &ATNConfig, - pt: &PrecedencePredicateTransition, - collect_predicates: bool, - in_context: bool, - full_ctx: bool, - local: &mut Local<'_, 'a, T>, + fn precedence_transition<'a, T: Parser<'a>>( + &self, + config: &ATNConfig, + pt: &PrecedencePredicateTransition, + collect_predicates: bool, + in_context: bool, + full_ctx: bool, + local: &mut Local<'_, 'a, T>, ) -> Option { let target = self.atn().states[pt.target].deref(); if collect_predicates && in_context { if full_ctx { let curr_pos = local.input().index(); local.input().seek(self.start_index.get()); - let prec_succeeds = self.eval_predicate(local, pt.get_predicate().unwrap(), config.get_alt(), full_ctx); + let prec_succeeds = self.eval_predicate( + local, + pt.get_predicate().unwrap(), + config.get_alt(), + full_ctx, + ); local.input().seek(curr_pos); if prec_succeeds { return Some(config.cloned(target)); } } else { - let new_sem_ctx = SemanticContext::and(Some(&*config.semantic_context), pt.get_predicate()); + let new_sem_ctx = + SemanticContext::and(Some(&*config.semantic_context), pt.get_predicate()); return Some(config.cloned_with_new_semantic(target, Box::new(new_sem_ctx))); } } else { @@ -1049,28 +1279,33 @@ impl ParserATNSimulator { None } - fn pred_transition<'a, T: Parser<'a>>(&self, - config: &ATNConfig, - pt: &PredicateTransition, - collect_predicates: bool, - in_context: bool, - full_ctx: bool, - local: &mut Local<'_, 'a, T>, + fn pred_transition<'a, T: Parser<'a>>( + &self, + config: &ATNConfig, + pt: &PredicateTransition, + collect_predicates: bool, + in_context: bool, + full_ctx: bool, + local: &mut Local<'_, 'a, T>, ) -> Option { let target = self.atn().states[pt.target].deref(); - if collect_predicates && - (!pt.is_ctx_dependent || (pt.is_ctx_dependent && in_context)) - { + if collect_predicates && (!pt.is_ctx_dependent || (pt.is_ctx_dependent && in_context)) { if full_ctx { let curr_pos = local.input().index(); local.input().seek(self.start_index.get()); - let prec_succeeds = self.eval_predicate(local, pt.get_predicate().unwrap(), config.get_alt(), full_ctx); + let prec_succeeds = self.eval_predicate( + local, + pt.get_predicate().unwrap(), + config.get_alt(), + full_ctx, + ); local.input().seek(curr_pos); if prec_succeeds { return Some(config.cloned(target)); } } else { - let new_sem_ctx = SemanticContext::and(Some(&*config.semantic_context), pt.get_predicate()); + let new_sem_ctx = + SemanticContext::and(Some(&*config.semantic_context), pt.get_predicate()); return Some(config.cloned_with_new_semantic(target, Box::new(new_sem_ctx))); } } else { @@ -1097,20 +1332,26 @@ impl ParserATNSimulator { //todo can return Cow fn get_conflicting_alts_or_unique_alt(&self, configs: &ATNConfigSet) -> BitSet { return if configs.get_unique_alt() != INVALID_ALT { - BitSet::new().modify_with(|it| { it.insert(configs.get_unique_alt() as usize); }) + BitSet::new().modify_with(|it| { + it.insert(configs.get_unique_alt() as usize); + }) } else { configs.conflicting_alts.clone() }; } // -// fn get_token_name(&self, t: isize) -> String { unimplemented!() } -// -// fn get_lookahead_name(&self, input: TokenStream) -> String { unimplemented!() } -// -// fn dump_dead_end_configs(&self, nvae: * NoViableAltError) { unimplemented!() } -// - fn no_viable_alt<'a, T: Parser<'a>>(&self, local: &mut Local<'_, 'a, T>, _configs: &ATNConfigSet, start_index: isize) - -> ANTLRError { + // fn get_token_name(&self, t: isize) -> String { unimplemented!() } + // + // fn get_lookahead_name(&self, input: TokenStream) -> String { unimplemented!() } + // + // fn dump_dead_end_configs(&self, nvae: * NoViableAltError) { unimplemented!() } + // + fn no_viable_alt<'a, T: Parser<'a>>( + &self, + local: &mut Local<'_, 'a, T>, + _configs: &ATNConfigSet, + start_index: isize, + ) -> ANTLRError { let start_token = local.parser.get_input_stream().get(start_index).borrow(); let start_token = Token::to_owned(start_token); let offending_token = local.input().lt(1).unwrap().borrow(); @@ -1136,7 +1377,9 @@ impl ParserATNSimulator { } fn add_dfaedge(&self, from: &mut DFAState, t: isize, to: DFAStateRef) -> DFAStateRef { - if t < -1 || t > self.atn().max_token_type { return to; } + if t < -1 || t > self.atn().max_token_type { + return to; + } if from.edges.is_empty() { from.edges.resize(self.atn().max_token_type as usize + 2, 0); } @@ -1170,53 +1413,79 @@ impl ParserATNSimulator { states.push(dfastate); - -// if key != new_hash { - dfa.states_map.write().unwrap() + // if key != new_hash { + dfa.states_map + .write() + .unwrap() .entry(key) .or_insert(Vec::new()) .push(a); -// } + // } a } - fn report_attempting_full_context<'a, T: Parser<'a>>(&self, - dfa: &DFA, - conflicting_alts: &BitSet, - configs: &ATNConfigSet, - start_index: isize, - stop_index: isize, - parser: &mut T, + fn report_attempting_full_context<'a, T: Parser<'a>>( + &self, + dfa: &DFA, + conflicting_alts: &BitSet, + configs: &ATNConfigSet, + start_index: isize, + stop_index: isize, + parser: &mut T, ) { -// let ambig_index = parser.get_current_token().get_token_index(); - parser.get_error_lister_dispatch() - .report_attempting_full_context(parser, dfa, start_index, stop_index, - conflicting_alts, configs) + // let ambig_index = parser.get_current_token().get_token_index(); + parser + .get_error_lister_dispatch() + .report_attempting_full_context( + parser, + dfa, + start_index, + stop_index, + conflicting_alts, + configs, + ) } - fn report_context_sensitivity<'a, T: Parser<'a>>(&self, dfa: &DFA, prediction: isize, configs: &ATNConfigSet, - start_index: isize, stop_index: isize, parser: &mut T) { - parser.get_error_lister_dispatch() + fn report_context_sensitivity<'a, T: Parser<'a>>( + &self, + dfa: &DFA, + prediction: isize, + configs: &ATNConfigSet, + start_index: isize, + stop_index: isize, + parser: &mut T, + ) { + parser + .get_error_lister_dispatch() .report_context_sensitivity(parser, dfa, start_index, stop_index, prediction, configs) } - fn report_ambiguity<'a, T: Parser<'a>>(&self, dfa: &DFA, start_index: isize, stop_index: isize, exact: bool, - ambig_alts: &BitSet, configs: &ATNConfigSet, parser: &mut T) { - parser.get_error_lister_dispatch() - .report_ambiguity(parser, dfa, start_index, stop_index, exact, ambig_alts, configs) + fn report_ambiguity<'a, T: Parser<'a>>( + &self, + dfa: &DFA, + start_index: isize, + stop_index: isize, + exact: bool, + ambig_alts: &BitSet, + configs: &ATNConfigSet, + parser: &mut T, + ) { + parser.get_error_lister_dispatch().report_ambiguity( + parser, + dfa, + start_index, + stop_index, + exact, + ambig_alts, + configs, + ) } } impl IATNSimulator for ParserATNSimulator { - fn shared_context_cache(&self) -> &PredictionContextCache { - self.base.shared_context_cache() - } + fn shared_context_cache(&self) -> &PredictionContextCache { self.base.shared_context_cache() } - fn atn(&self) -> &ATN { - self.base.atn() - } + fn atn(&self) -> &ATN { self.base.atn() } - fn decision_to_dfa(&self) -> &Vec { - self.base.decision_to_dfa() - } -} \ No newline at end of file + fn decision_to_dfa(&self) -> &Vec { self.base.decision_to_dfa() } +} diff --git a/src/parser_rule_context.rs b/src/parser_rule_context.rs index 297a44f..169a6fb 100644 --- a/src/parser_rule_context.rs +++ b/src/parser_rule_context.rs @@ -1,4 +1,4 @@ -use std::any::{Any, type_name, TypeId}; +use std::any::{type_name, Any, TypeId}; use std::borrow::{Borrow, BorrowMut}; use std::cell::{Ref, RefCell, RefMut}; use std::convert::identity; @@ -10,15 +10,18 @@ use std::rc::Rc; use crate::errors::ANTLRError; use crate::interval_set::Interval; use crate::parser::ParserNodeType; -use crate::rule_context::{BaseRuleContext, CustomRuleContext, EmptyContextType, EmptyCustomRuleContext, RuleContext, Tid}; +use crate::rule_context::{ + BaseRuleContext, CustomRuleContext, EmptyContextType, EmptyCustomRuleContext, RuleContext, Tid, +}; use crate::token::{OwningToken, Token}; use crate::token_factory::{CommonTokenFactory, TokenFactory}; -use crate::tree::{ErrorNode, ParseTree, ParseTreeListener, ParseTreeVisitor, TerminalNode, Tree, Visitable}; +use crate::tree::{ + ErrorNode, ParseTree, ParseTreeListener, ParseTreeVisitor, TerminalNode, Tree, Visitable, +}; // use crate::utils::IndexIter; -pub trait ParserRuleContext<'input>: ParseTree<'input> + RuleContext<'input> + Debug -{ +pub trait ParserRuleContext<'input>: ParseTree<'input> + RuleContext<'input> + Debug { fn set_exception(&self, e: ANTLRError) {} fn set_start(&self, t: Option<>::Tok>) {} @@ -27,8 +30,18 @@ pub trait ParserRuleContext<'input>: ParseTree<'input> + RuleContext<'input> + D /// Note that the range from start to stop is inclusive, so for rules that do not consume anything /// (for example, zero length or error productions) this token may exceed stop. /// - fn start<'a>(&'a self) -> Ref<'a, >::Inner> where 'input: 'a { unimplemented!() } - fn start_mut<'a>(&'a self) -> RefMut<'a, >::Tok> where 'input: 'a { unimplemented!() } + fn start<'a>(&'a self) -> Ref<'a, >::Inner> + where + 'input: 'a, + { + unimplemented!() + } + fn start_mut<'a>(&'a self) -> RefMut<'a, >::Tok> + where + 'input: 'a, + { + unimplemented!() + } fn set_stop(&self, t: Option<>::Tok>) {} /// @@ -36,8 +49,18 @@ pub trait ParserRuleContext<'input>: ParseTree<'input> + RuleContext<'input> + D /// Note that the range from start to stop is inclusive, so for rules that do not consume anything /// (for example, zero length or error productions) this token may precede start. /// - fn stop<'a>(&'a self) -> Ref<'a, >::Inner> where 'input: 'a { unimplemented!() } - fn stop_mut<'a>(&'a self) -> RefMut<'a, >::Tok> where 'input: 'a { unimplemented!() } + fn stop<'a>(&'a self) -> Ref<'a, >::Inner> + where + 'input: 'a, + { + unimplemented!() + } + fn stop_mut<'a>(&'a self) -> RefMut<'a, >::Tok> + where + 'input: 'a, + { + unimplemented!() + } // fn add_token_node(&self, token: TerminalNode<'input, Self::TF>) { } // fn add_error_node(&self, bad_token: ErrorNode<'input, Self::TF>) { } @@ -48,8 +71,15 @@ pub trait ParserRuleContext<'input>: ParseTree<'input> + RuleContext<'input> + D // fn enter_rule(&self, listener: &mut dyn Any); // fn exit_rule(&self, listener: &mut dyn Any); - fn child_of_type + 'input>(&self, pos: usize) -> Option> where Self: Sized { - let result = self.get_children() + fn child_of_type + 'input>( + &self, + pos: usize, + ) -> Option> + where + Self: Sized, + { + let result = self + .get_children() // .iter() .filter(|it| it.self_id() == T::id()) .nth(pos); @@ -59,7 +89,12 @@ pub trait ParserRuleContext<'input>: ParseTree<'input> + RuleContext<'input> + D } // todo, return iterator - fn children_of_type + 'input>(&self) -> Vec> where Self: Sized { + fn children_of_type + 'input>( + &self, + ) -> Vec> + where + Self: Sized, + { self.get_children() // .iter() // might not be fully sound until `non_static_type_id` is implemented @@ -92,30 +127,36 @@ pub trait ParserRuleContext<'input>: ParseTree<'input> + RuleContext<'input> + D // allows to implement generic functions on trait object as well pub trait RuleContextExt<'input>: ParserRuleContext<'input> { fn to_string(self: &Rc, rule_names: Option<&[&str]>, stop: Option>) -> String - where Z: ParserRuleContext<'input, Ctx=Self::Ctx, TF=Self::TF> + ?Sized + 'input, - Self::Ctx: ParserNodeType<'input, Type=Z>, - Rc: CoerceUnsized>; + where + Z: ParserRuleContext<'input, Ctx = Self::Ctx, TF = Self::TF> + ?Sized + 'input, + Self::Ctx: ParserNodeType<'input, Type = Z>, + Rc: CoerceUnsized>; fn accept_children(&self, visitor: &mut V) - where V: ParseTreeVisitor<'input, Self::Ctx> + ?Sized, - >::Type: Visitable; + where + V: ParseTreeVisitor<'input, Self::Ctx> + ?Sized, + >::Type: Visitable; } impl<'input, T: ParserRuleContext<'input> + ?Sized + 'input> RuleContextExt<'input> for T { fn to_string(self: &Rc, rule_names: Option<&[&str]>, stop: Option>) -> String - where Z: ParserRuleContext<'input, Ctx=T::Ctx, TF=T::TF> + ?Sized + 'input, - T::Ctx: ParserNodeType<'input, Type=Z>, - Rc: CoerceUnsized> + where + Z: ParserRuleContext<'input, Ctx = T::Ctx, TF = T::TF> + ?Sized + 'input, + T::Ctx: ParserNodeType<'input, Type = Z>, + Rc: CoerceUnsized>, { let mut result = String::from("["); let mut next: Option> = Some(self.clone() as Rc); while let Some(ref p) = next { - if stop.is_some() && (stop.is_none() || Rc::ptr_eq(p, stop.as_ref().unwrap())) { break } - + if stop.is_some() && (stop.is_none() || Rc::ptr_eq(p, stop.as_ref().unwrap())) { + break; + } if let Some(rule_names) = rule_names { let rule_index = p.get_rule_index(); - let rule_name = rule_names.get(rule_index).map(|&it| it.to_owned()) + let rule_name = rule_names + .get(rule_index) + .map(|&it| it.to_owned()) .unwrap_or_else(|| rule_index.to_string()); result.extend(rule_name.chars()); result.push(' '); @@ -134,30 +175,33 @@ impl<'input, T: ParserRuleContext<'input> + ?Sized + 'input> RuleContextExt<'inp } result.push(']'); - return result + return result; } fn accept_children(&self, visitor: &mut V) - where V: ParseTreeVisitor<'input, Self::Ctx> + ?Sized, - >::Type: Visitable + where + V: ParseTreeVisitor<'input, Self::Ctx> + ?Sized, + >::Type: Visitable, { - self.get_children() - .for_each(|child| child.accept(visitor)) + self.get_children().for_each(|child| child.accept(visitor)) } } - //requires ParserRuleContext to be Sync //lazy_static! { // pub static ref EMPTY_CTX: Box = // Box::new(BaseParserRuleContext::new_parser_ctx(None,-1,CustomRuleContextInternal)); //} - //todo do not calc this every time, maybe threadlocal? or it might be ok as it is because it is inlined #[inline] -pub(crate) fn empty_ctx<'a, TF: TokenFactory<'a>>() -> Box>> { - Box::new(BaseRuleContext::new_ctx(None, -1, EmptyCustomRuleContext(PhantomData))) +pub(crate) fn empty_ctx<'a, TF: TokenFactory<'a>>( +) -> Box>> { + Box::new(BaseRuleContext::new_ctx( + None, + -1, + EmptyCustomRuleContext(PhantomData), + )) } #[inline] @@ -176,10 +220,12 @@ pub fn cast<'a, T: ParserRuleContext<'a> + 'a + ?Sized, Result: 'a>(ctx: &T) -> /// should be called from generated parser only #[inline] #[doc(hidden)] -pub fn cast_mut<'a, T: ParserRuleContext<'a> + 'a + ?Sized, Result: 'a>(ctx: &mut Rc) -> &mut Result { -// if Rc::strong_count(ctx) != 1 { panic!("cant mutate Rc with multiple strong ref count"); } -// is it safe because parser does not save/move mutable references anywhere. -// they are only used to write data immediately in the corresponding expression +pub fn cast_mut<'a, T: ParserRuleContext<'a> + 'a + ?Sized, Result: 'a>( + ctx: &mut Rc, +) -> &mut Result { + // if Rc::strong_count(ctx) != 1 { panic!("cant mutate Rc with multiple strong ref count"); } + // is it safe because parser does not save/move mutable references anywhere. + // they are only used to write data immediately in the corresponding expression unsafe { &mut *(Rc::get_mut_unchecked(ctx) as *mut T as *mut Result) } } @@ -215,14 +261,12 @@ impl<'input, Ctx: CustomRuleContext<'input>> Debug for BaseParserRuleContext<'in } } -impl<'input, Ctx: CustomRuleContext<'input>> RuleContext<'input> for BaseParserRuleContext<'input, Ctx> { - fn get_invoking_state(&self) -> isize { - self.base.get_invoking_state() - } +impl<'input, Ctx: CustomRuleContext<'input>> RuleContext<'input> + for BaseParserRuleContext<'input, Ctx> +{ + fn get_invoking_state(&self) -> isize { self.base.get_invoking_state() } - fn set_invoking_state(&self, t: isize) { - self.base.set_invoking_state(t) - } + fn set_invoking_state(&self, t: isize) { self.base.set_invoking_state(t) } fn get_parent_ctx(&self) -> Option>::Type>> { self.base.get_parent_ctx() @@ -233,7 +277,9 @@ impl<'input, Ctx: CustomRuleContext<'input>> RuleContext<'input> for BaseParserR } } -impl<'input, Ctx: CustomRuleContext<'input>> CustomRuleContext<'input> for BaseParserRuleContext<'input, Ctx> { +impl<'input, Ctx: CustomRuleContext<'input>> CustomRuleContext<'input> + for BaseParserRuleContext<'input, Ctx> +{ type TF = Ctx::TF; type Ctx = Ctx::Ctx; @@ -241,11 +287,12 @@ impl<'input, Ctx: CustomRuleContext<'input>> CustomRuleContext<'input> for BaseP } unsafe impl<'input, Ctx: CustomRuleContext<'input>> Tid for BaseParserRuleContext<'input, Ctx> { - fn self_id(&self) -> TypeId { - self.base.ext.self_id() - } + fn self_id(&self) -> TypeId { self.base.ext.self_id() } - fn id() -> TypeId where Self: Sized { + fn id() -> TypeId + where + Self: Sized, + { Ctx::id() } } @@ -253,45 +300,44 @@ unsafe impl<'input, Ctx: CustomRuleContext<'input>> Tid for BaseParserRuleContex impl<'input, Ctx: CustomRuleContext<'input>> Deref for BaseParserRuleContext<'input, Ctx> { type Target = Ctx; - fn deref(&self) -> &Self::Target { - &self.base.ext - } + fn deref(&self) -> &Self::Target { &self.base.ext } } impl<'input, Ctx: CustomRuleContext<'input>> DerefMut for BaseParserRuleContext<'input, Ctx> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base.ext - } + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base.ext } } impl<'input, Ctx: CustomRuleContext<'input>> Borrow for BaseParserRuleContext<'input, Ctx> { - fn borrow(&self) -> &Ctx { - &self.base.ext - } + fn borrow(&self) -> &Ctx { &self.base.ext } } impl<'input, Ctx: CustomRuleContext<'input>> BorrowMut for BaseParserRuleContext<'input, Ctx> { - fn borrow_mut(&mut self) -> &mut Ctx { - &mut self.base.ext - } + fn borrow_mut(&mut self) -> &mut Ctx { &mut self.base.ext } } - -impl<'input, Ctx: CustomRuleContext<'input>> ParserRuleContext<'input> for BaseParserRuleContext<'input, Ctx> { +impl<'input, Ctx: CustomRuleContext<'input>> ParserRuleContext<'input> + for BaseParserRuleContext<'input, Ctx> +{ fn set_exception(&self, _e: ANTLRError) { unimplemented!() -// self.exception = Some(Box::new(e)); + // self.exception = Some(Box::new(e)); } fn set_start(&self, t: Option<>::Tok>) { *self.start.borrow_mut() = t.unwrap_or(Ctx::TF::create_invalid().clone()); } - fn start<'a>(&'a self) -> Ref<'a, >::Inner> where 'input: 'a { + fn start<'a>(&'a self) -> Ref<'a, >::Inner> + where + 'input: 'a, + { Ref::map(self.start.borrow(), |t| t.borrow()) } - fn start_mut<'a>(&'a self) -> RefMut<'a, >::Tok> where 'input: 'a { + fn start_mut<'a>(&'a self) -> RefMut<'a, >::Tok> + where + 'input: 'a, + { self.start.borrow_mut() } @@ -299,36 +345,40 @@ impl<'input, Ctx: CustomRuleContext<'input>> ParserRuleContext<'input> for BaseP *self.stop.borrow_mut() = t.unwrap_or(Ctx::TF::create_invalid().clone()); } - fn stop<'a>(&'a self) -> Ref<'a, >::Inner> where 'input: 'a { + fn stop<'a>(&'a self) -> Ref<'a, >::Inner> + where + 'input: 'a, + { Ref::map(self.stop.borrow(), |t| t.borrow()) } - fn stop_mut<'a>(&'a self) -> RefMut<'a, >::Tok> where 'input: 'a { + fn stop_mut<'a>(&'a self) -> RefMut<'a, >::Tok> + where + 'input: 'a, + { self.stop.borrow_mut() } -// fn add_token_node(&self, token: TerminalNode<'input, Ctx::TF>) -> ParserRuleContextType<'input, Ctx::TF> { -// let node: ParserRuleContextType<'input, Ctx::TF> = Rc::new(token); -// self.children.borrow_mut().push(node.clone()); -// node -// } -// -// fn add_error_node(&self, bad_token: ErrorNode<'input, Ctx::TF>) -> ParserRuleContextType<'input, Ctx::TF> { -// // bad_token.base.parent_ctx = -// let node: ParserRuleContextType<'input, Ctx::TF> = Rc::new(bad_token); -// // Backtrace::new().frames()[0].symbols()[0]; -// -// self.children.borrow_mut().push(node.clone()); -// node -// } + // fn add_token_node(&self, token: TerminalNode<'input, Ctx::TF>) -> ParserRuleContextType<'input, Ctx::TF> { + // let node: ParserRuleContextType<'input, Ctx::TF> = Rc::new(token); + // self.children.borrow_mut().push(node.clone()); + // node + // } + // + // fn add_error_node(&self, bad_token: ErrorNode<'input, Ctx::TF>) -> ParserRuleContextType<'input, Ctx::TF> { + // // bad_token.base.parent_ctx = + // let node: ParserRuleContextType<'input, Ctx::TF> = Rc::new(bad_token); + // // Backtrace::new().frames()[0].symbols()[0]; + // + // self.children.borrow_mut().push(node.clone()); + // node + // } fn add_child(&self, child: Rc<>::Type>) { self.children.borrow_mut().push(child); } - fn remove_last_child(&self) { - self.children.borrow_mut().pop(); - } + fn remove_last_child(&self) { self.children.borrow_mut().pop(); } // fn enter_rule(&self, listener: &mut dyn Any) { // Ctx::enter(self, listener) @@ -348,21 +398,15 @@ impl<'input, Ctx: CustomRuleContext<'input>> Tree<'input> for BaseParserRuleCont self.get_parent_ctx() } - fn has_parent(&self) -> bool { - self.base.parent_ctx.borrow().is_some() - } + fn has_parent(&self) -> bool { self.base.parent_ctx.borrow().is_some() } - fn get_payload(&self) -> Box { - unimplemented!() - } + fn get_payload(&self) -> Box { unimplemented!() } fn get_child(&self, i: usize) -> Option>::Type>> { self.children.borrow().get(i).cloned() } - fn get_child_count(&self) -> usize { - self.children.borrow().len() - } + fn get_child_count(&self) -> usize { self.children.borrow().len() } // fn get_children<'a>(&'a self) -> Box>::Type>> + 'a> where 'input:'a{ // let len = self.children.borrow().len(); @@ -375,9 +419,14 @@ impl<'input, Ctx: CustomRuleContext<'input>> Tree<'input> for BaseParserRuleCont // } } -impl<'input, Ctx: CustomRuleContext<'input>> ParseTree<'input> for BaseParserRuleContext<'input, Ctx> { +impl<'input, Ctx: CustomRuleContext<'input>> ParseTree<'input> + for BaseParserRuleContext<'input, Ctx> +{ fn get_source_interval(&self) -> Interval { - Interval { a: self.start().get_token_index(), b: self.stop().get_token_index() } + Interval { + a: self.start().get_token_index(), + b: self.stop().get_token_index(), + } } default fn get_text(&self) -> String { @@ -390,11 +439,14 @@ impl<'input, Ctx: CustomRuleContext<'input>> ParseTree<'input> for BaseParserRul result } - } impl<'input, Ctx: CustomRuleContext<'input> + 'input> BaseParserRuleContext<'input, Ctx> { - pub fn new_parser_ctx(parent_ctx: Option>::Type>>, invoking_state: isize, ext: Ctx) -> Self { + pub fn new_parser_ctx( + parent_ctx: Option>::Type>>, + invoking_state: isize, + ext: Ctx, + ) -> Self { Self { base: BaseRuleContext::new_ctx(parent_ctx, invoking_state, ext), start: RefCell::new(Ctx::TF::create_invalid()), @@ -403,7 +455,10 @@ impl<'input, Ctx: CustomRuleContext<'input> + 'input> BaseParserRuleContext<'inp children: RefCell::new(vec![]), } } - pub fn copy_from + ?Sized>(ctx: &T, ext: Ctx) -> Self { + pub fn copy_from + ?Sized>( + ctx: &T, + ext: Ctx, + ) -> Self { Self { base: BaseRuleContext::new_ctx(ctx.get_parent_ctx(), ctx.get_invoking_state(), ext), start: RefCell::new(ctx.start_mut().clone()), @@ -418,7 +473,6 @@ impl<'input, Ctx: CustomRuleContext<'input> + 'input> BaseParserRuleContext<'inp // } } - /////////////////////////////////////////////// // Needed to significantly reduce boilerplate in the generated code, // because there is no simple way to implement trait for enum @@ -428,22 +482,53 @@ impl<'input, Ctx: CustomRuleContext<'input> + 'input> BaseParserRuleContext<'inp #[doc(hidden)] pub trait DerefSeal: Deref {} -impl<'input, T: DerefSeal + 'input + Debug + Tid, I: ParserRuleContext<'input> + 'input + ?Sized> ParserRuleContext<'input> for T { +impl< + 'input, + T: DerefSeal + 'input + Debug + Tid, + I: ParserRuleContext<'input> + 'input + ?Sized, + > ParserRuleContext<'input> for T +{ fn set_exception(&self, e: ANTLRError) { self.deref().set_exception(e) } - fn set_start(&self, t: Option<>::Tok>) { self.deref().set_start(t) } + fn set_start(&self, t: Option<>::Tok>) { + self.deref().set_start(t) + } - fn start<'a>(&'a self) -> Ref<'a, >::Inner> where 'input: 'a { self.deref().start() } + fn start<'a>(&'a self) -> Ref<'a, >::Inner> + where + 'input: 'a, + { + self.deref().start() + } - fn start_mut<'a>(&'a self) -> RefMut<'a, >::Tok> where 'input: 'a { self.deref().start_mut() } + fn start_mut<'a>(&'a self) -> RefMut<'a, >::Tok> + where + 'input: 'a, + { + self.deref().start_mut() + } - fn set_stop(&self, t: Option<>::Tok>) { self.deref().set_stop(t) } + fn set_stop(&self, t: Option<>::Tok>) { + self.deref().set_stop(t) + } - fn stop<'a>(&'a self) -> Ref<'a, >::Inner> where 'input: 'a { self.deref().stop() } + fn stop<'a>(&'a self) -> Ref<'a, >::Inner> + where + 'input: 'a, + { + self.deref().stop() + } - fn stop_mut<'a>(&'a self) -> RefMut<'a, >::Tok> where 'input: 'a { self.deref().stop_mut() } + fn stop_mut<'a>(&'a self) -> RefMut<'a, >::Tok> + where + 'input: 'a, + { + self.deref().stop_mut() + } - fn add_child(&self, child: Rc<>::Type>) { self.deref().add_child(child) } + fn add_child(&self, child: Rc<>::Type>) { + self.deref().add_child(child) + } fn remove_last_child(&self) { self.deref().remove_last_child() } @@ -454,41 +539,76 @@ impl<'input, T: DerefSeal + 'input + Debug + Tid, I: ParserRuleContext // fn upcast(&self) -> &dyn ParserRuleContext<'input, TF=Self::TF> { self.deref().upcast() } } -impl<'input, T: DerefSeal + 'input + Debug + Tid, I: ParserRuleContext<'input> + 'input + ?Sized> RuleContext<'input> for T { +impl< + 'input, + T: DerefSeal + 'input + Debug + Tid, + I: ParserRuleContext<'input> + 'input + ?Sized, + > RuleContext<'input> for T +{ fn get_invoking_state(&self) -> isize { self.deref().get_invoking_state() } fn set_invoking_state(&self, t: isize) { self.deref().set_invoking_state(t) } fn is_empty(&self) -> bool { self.deref().is_empty() } - fn get_parent_ctx(&self) -> Option>::Type>> { self.deref().get_parent_ctx() } + fn get_parent_ctx(&self) -> Option>::Type>> { + self.deref().get_parent_ctx() + } - fn set_parent(&self, parent: &Option>::Type>>) { self.deref().set_parent(parent) } + fn set_parent(&self, parent: &Option>::Type>>) { + self.deref().set_parent(parent) + } } -impl<'input, T: DerefSeal + 'input + Debug + Tid, I: ParserRuleContext<'input> + 'input + ?Sized> ParseTree<'input> for T { +impl< + 'input, + T: DerefSeal + 'input + Debug + Tid, + I: ParserRuleContext<'input> + 'input + ?Sized, + > ParseTree<'input> for T +{ fn get_source_interval(&self) -> Interval { self.deref().get_source_interval() } fn get_text(&self) -> String { self.deref().get_text() } } -impl<'input, T: DerefSeal + 'input + Debug + Tid, I: ParserRuleContext<'input> + 'input + ?Sized +> Tree<'input> for T { - fn get_parent(&self) -> Option>::Type>> { self.deref().get_parent() } +impl< + 'input, + T: DerefSeal + 'input + Debug + Tid, + I: ParserRuleContext<'input> + 'input + ?Sized, + > Tree<'input> for T +{ + fn get_parent(&self) -> Option>::Type>> { + self.deref().get_parent() + } fn has_parent(&self) -> bool { self.deref().has_parent() } fn get_payload(&self) -> Box { self.deref().get_payload() } - fn get_child(&self, i: usize) -> Option>::Type>> { self.deref().get_child(i) } + fn get_child(&self, i: usize) -> Option>::Type>> { + self.deref().get_child(i) + } fn get_child_count(&self) -> usize { self.deref().get_child_count() } - fn get_children<'a>(&'a self) -> Box>::Type>> + 'a> where 'input: 'a { self.deref().get_children() } + fn get_children<'a>( + &'a self, + ) -> Box>::Type>> + 'a> + where + 'input: 'a, + { + self.deref().get_children() + } // fn get_children_full(&self) -> &RefCell>::Type>>> { self.deref().get_children_full() } } -impl<'input, T: DerefSeal + 'input + Debug + Tid, I: ParserRuleContext<'input> + 'input + ?Sized> CustomRuleContext<'input> for T { +impl< + 'input, + T: DerefSeal + 'input + Debug + Tid, + I: ParserRuleContext<'input> + 'input + ?Sized, + > CustomRuleContext<'input> for T +{ type TF = I::TF; type Ctx = I::Ctx; diff --git a/src/prediction_context.rs b/src/prediction_context.rs index 84ae800..bed7c57 100644 --- a/src/prediction_context.rs +++ b/src/prediction_context.rs @@ -166,7 +166,7 @@ impl PredictionContext { parent_ctx, return_state, }) - .modify_with(|x| x.calc_hash()) + .modify_with(|x| x.calc_hash()) } pub fn new_empty() -> PredictionContext { @@ -183,10 +183,10 @@ impl PredictionContext { let mut hasher = MurmurHasher::default(); match self { PredictionContext::Singleton(SingletonPredictionContext { - parent_ctx, - return_state, - .. - }) => { + parent_ctx, + return_state, + .. + }) => { hasher.write_i32(match parent_ctx { None => 0, Some(x) => x.hash_code(), @@ -194,10 +194,10 @@ impl PredictionContext { hasher.write_i32(*return_state as i32); } PredictionContext::Array(ArrayPredictionContext { - parents, - return_states, - .. - }) => { + parents, + return_states, + .. + }) => { parents.iter().for_each(|x| { hasher.write_i32(match x { None => 0, @@ -391,7 +391,7 @@ impl PredictionContext { } Array(result) } - .alloc() + .alloc() }) } @@ -414,7 +414,7 @@ impl PredictionContext { vec![b.parent_ctx.clone(), None], vec![b.return_state, PREDICTION_CONTEXT_EMPTY_RETURN_STATE], ) - .alloc(), + .alloc(), ); } if b.is_empty() { @@ -423,7 +423,7 @@ impl PredictionContext { vec![a.parent_ctx.clone(), None], vec![a.return_state, PREDICTION_CONTEXT_EMPTY_RETURN_STATE], ) - .alloc(), + .alloc(), ); } } diff --git a/src/prediction_context/test.rs b/src/prediction_context/test.rs index 9ff2a27..2be7152 100644 --- a/src/prediction_context/test.rs +++ b/src/prediction_context/test.rs @@ -10,11 +10,13 @@ fn full_ctx() -> bool { false } #[test] fn test_e_e() { - let r = PredictionContext::merge(&*EMPTY_PREDICTION_CONTEXT, - &*EMPTY_PREDICTION_CONTEXT, - root_is_wildcard(), &mut None); - let expecting = - "digraph G { + let r = PredictionContext::merge( + &*EMPTY_PREDICTION_CONTEXT, + &*EMPTY_PREDICTION_CONTEXT, + root_is_wildcard(), + &mut None, + ); + let expecting = "digraph G { rankdir=LR; s0[label=\"*\"]; }\n"; @@ -23,11 +25,13 @@ rankdir=LR; #[test] fn test_e_e_fullctx() { - let r = PredictionContext::merge(&*EMPTY_PREDICTION_CONTEXT, - &*EMPTY_PREDICTION_CONTEXT, - full_ctx(), &mut None); - let expecting = - "digraph G { + let r = PredictionContext::merge( + &*EMPTY_PREDICTION_CONTEXT, + &*EMPTY_PREDICTION_CONTEXT, + full_ctx(), + &mut None, + ); + let expecting = "digraph G { rankdir=LR; s0[label=\"$\"]; }\n"; @@ -36,29 +40,27 @@ rankdir=LR; #[test] fn test_x_e() { - let r = PredictionContext::merge(&x(), - &*EMPTY_PREDICTION_CONTEXT, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"*\"];\n" + - "}\n"; + let r = PredictionContext::merge( + &x(), + &*EMPTY_PREDICTION_CONTEXT, + root_is_wildcard(), + &mut None, + ); + let expecting = + String::new() + "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"*\"];\n" + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } #[test] fn test_x_e_fullctx() { - let r = PredictionContext::merge(&x(), - &*EMPTY_PREDICTION_CONTEXT, - full_ctx(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|$\"];\n" + - " s1[label=\"$\"];\n" + - " s0:p0->s1[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&x(), &*EMPTY_PREDICTION_CONTEXT, full_ctx(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|$\"];\n" + + " s1[label=\"$\"];\n" + + " s0:p0->s1[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, full_ctx())) } @@ -67,44 +69,37 @@ fn test_e_x() { let r = PredictionContext::merge( &*EMPTY_PREDICTION_CONTEXT, &x(), - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"*\"];\n" + - "}\n"; + root_is_wildcard(), + &mut None, + ); + let expecting = + String::new() + "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"*\"];\n" + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } #[test] fn test_e_x_fullctx() { - let r = PredictionContext::merge( - &*EMPTY_PREDICTION_CONTEXT, - &x(), - full_ctx(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|$\"];\n" + - " s1[label=\"$\"];\n" + - " s0:p0->s1[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&*EMPTY_PREDICTION_CONTEXT, &x(), full_ctx(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|$\"];\n" + + " s1[label=\"$\"];\n" + + " s0:p0->s1[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, full_ctx())) } #[test] fn test_a_a() { - let r = PredictionContext::merge( - &a(), - &a(), - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a(), &a(), root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -113,17 +108,14 @@ fn test_ae_ax() { let a1 = a(); let x = x(); let a2 = PredictionContext::new_singleton(Some(x), 1).alloc(); - let r = PredictionContext::merge( - &a1, - &a2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -132,19 +124,16 @@ fn test_ae_ax_fullctx() { let a1 = a(); let x = x(); let a2 = PredictionContext::new_singleton(Some(x), 1).alloc(); - let r = PredictionContext::merge( - &a1, - &a2, - full_ctx(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[shape=record, label=\"|$\"];\n" + - " s2[label=\"$\"];\n" + - " s0->s1[label=\"1\"];\n" + - " s1:p0->s2[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, full_ctx(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[shape=record, label=\"|$\"];\n" + + " s2[label=\"$\"];\n" + + " s0->s1[label=\"1\"];\n" + + " s1:p0->s2[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, full_ctx())) } @@ -153,17 +142,14 @@ fn test_axe_ae() { let x = x(); let a1 = PredictionContext::new_singleton(Some(x), 1).alloc(); let a2 = a(); - let r = PredictionContext::merge( - &a1, - &a2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -173,19 +159,16 @@ fn test_aae_ae_e_fullctx() { let child1 = PredictionContext::new_singleton(Some(empty.clone()), 8).alloc(); let right = PredictionContext::merge(&empty, &child1, false, &mut None); let left = PredictionContext::new_singleton(Some(right.clone()), 8).alloc(); - let r = PredictionContext::merge( - &left, - &right, - false, &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|$\"];\n" + - " s1[shape=record, label=\"|$\"];\n" + - " s2[label=\"$\"];\n" + - " s0:p0->s1[label=\"8\"];\n" + - " s1:p0->s2[label=\"8\"];\n" + - "}\n"; + let r = PredictionContext::merge(&left, &right, false, &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|$\"];\n" + + " s1[shape=record, label=\"|$\"];\n" + + " s2[label=\"$\"];\n" + + " s0:p0->s1[label=\"8\"];\n" + + " s1:p0->s2[label=\"8\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, false)) } @@ -194,36 +177,30 @@ fn test_axe_ae_fullctx() { let x = x(); let a1 = PredictionContext::new_singleton(Some(x), 1).alloc(); let a2 = a(); - let r = PredictionContext::merge( - &a1, - &a2, - full_ctx(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[shape=record, label=\"|$\"];\n" + - " s2[label=\"$\"];\n" + - " s0->s1[label=\"1\"];\n" + - " s1:p0->s2[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, full_ctx(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[shape=record, label=\"|$\"];\n" + + " s2[label=\"$\"];\n" + + " s0->s1[label=\"1\"];\n" + + " s1:p0->s2[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, full_ctx())) } #[test] fn test_a_b() { - let r = PredictionContext::merge( - &a(), - &b(), - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s1[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a(), &b(), root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s1[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -232,19 +209,16 @@ fn test_ax_ax_same() { let x = x(); let a1 = PredictionContext::new_singleton(Some(x.clone()), 1).alloc(); let a2 = PredictionContext::new_singleton(Some(x.clone()), 1).alloc(); - let r = PredictionContext::merge( - &a1, - &a2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[label=\"1\"];\n" + - " s2[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - " s1->s2[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[label=\"1\"];\n" + + " s2[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + " s1->s2[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -252,19 +226,16 @@ fn test_ax_ax_same() { fn test_ax_ax() { let a1 = PredictionContext::new_singleton(Some(x()), 1).alloc(); let a2 = PredictionContext::new_singleton(Some(x()), 1).alloc(); - let r = PredictionContext::merge( - &a1, - &a2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[label=\"1\"];\n" + - " s2[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - " s1->s2[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[label=\"1\"];\n" + + " s2[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + " s1->s2[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -274,21 +245,18 @@ fn test_abx_abx() { let b2 = PredictionContext::new_singleton(Some(x()), 2).alloc(); let a1 = PredictionContext::new_singleton(Some(b1), 1).alloc(); let a2 = PredictionContext::new_singleton(Some(b2), 1).alloc(); - let r = PredictionContext::merge( - &a1, - &a2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[label=\"1\"];\n" + - " s2[label=\"2\"];\n" + - " s3[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - " s1->s2[label=\"2\"];\n" + - " s2->s3[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[label=\"1\"];\n" + + " s2[label=\"2\"];\n" + + " s3[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + " s1->s2[label=\"2\"];\n" + + " s2->s3[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -298,22 +266,19 @@ fn test_abx_acx() { let c = PredictionContext::new_singleton(Some(x()), 3).alloc(); let a1 = PredictionContext::new_singleton(Some(b1), 1).alloc(); let a2 = PredictionContext::new_singleton(Some(c), 1).alloc(); - let r = PredictionContext::merge( - &a1, - &a2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[shape=record, label=\"|\"];\n" + - " s2[label=\"2\"];\n" + - " s3[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - " s1:p0->s2[label=\"2\"];\n" + - " s1:p1->s2[label=\"3\"];\n" + - " s2->s3[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a1, &a2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[shape=record, label=\"|\"];\n" + + " s2[label=\"2\"];\n" + + " s3[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + " s1:p0->s2[label=\"2\"];\n" + + " s1:p1->s2[label=\"3\"];\n" + + " s2->s3[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -322,20 +287,17 @@ fn test_ax_bx_same() { let x = x(); let a = PredictionContext::new_singleton(Some(x.clone()), 1).alloc(); let b = PredictionContext::new_singleton(Some(x.clone()), 2).alloc(); - let r = PredictionContext::merge( - &a, - &b, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s1[label=\"1\"];\n" + - " s2[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - " s1->s2[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a, &b, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s1[label=\"1\"];\n" + + " s2[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + " s1->s2[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -343,20 +305,17 @@ fn test_ax_bx_same() { fn test_ax_bx() { let a = PredictionContext::new_singleton(Some(x()), 1).alloc(); let b = PredictionContext::new_singleton(Some(x()), 2).alloc(); - let r = PredictionContext::merge( - &a, - &b, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s1[label=\"1\"];\n" + - " s2[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - " s1->s2[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a, &b, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s1[label=\"1\"];\n" + + " s2[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + " s1->s2[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -365,20 +324,17 @@ fn test_ae_bx() { let x2 = x(); let a = a(); let b = PredictionContext::new_singleton(Some(x2), 2).alloc(); - let r = PredictionContext::merge( - &a, - &b, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s2[label=\"2\"];\n" + - " s1[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s2->s1[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a, &b, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s2[label=\"2\"];\n" + + " s1[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s2->s1[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -387,20 +343,17 @@ fn test_ae_bx_fullctx() { let x2 = x(); let a = a(); let b = PredictionContext::new_singleton(Some(x2), 2).alloc(); - let r = PredictionContext::merge( - &a, - &b, - full_ctx(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s2[label=\"2\"];\n" + - " s1[label=\"$\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s2->s1[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a, &b, full_ctx(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s2[label=\"2\"];\n" + + " s1[label=\"$\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s2->s1[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, full_ctx())) } @@ -413,24 +366,21 @@ fn test_aex_bfx() { let f = PredictionContext::new_singleton(Some(x2), 6).alloc(); let a = PredictionContext::new_singleton(Some(e), 1).alloc(); let b = PredictionContext::new_singleton(Some(f), 2).alloc(); - let r = PredictionContext::merge( - &a, - &b, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s2[label=\"2\"];\n" + - " s3[label=\"3\"];\n" + - " s4[label=\"*\"];\n" + - " s1[label=\"1\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s2->s3[label=\"6\"];\n" + - " s3->s4[label=\"9\"];\n" + - " s1->s3[label=\"5\"];\n" + - "}\n"; + let r = PredictionContext::merge(&a, &b, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s2[label=\"2\"];\n" + + " s3[label=\"3\"];\n" + + " s4[label=\"*\"];\n" + + " s1[label=\"1\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s2->s3[label=\"6\"];\n" + + " s3->s4[label=\"9\"];\n" + + " s1->s3[label=\"5\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -438,15 +388,9 @@ fn test_aex_bfx() { fn test_Ae_Ae_fullctx() { let A1 = array(vec![EMPTY_PREDICTION_CONTEXT.clone()]); let A2 = array(vec![EMPTY_PREDICTION_CONTEXT.clone()]); - let r = PredictionContext::merge( - &A1, - &A2, - full_ctx(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"$\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, full_ctx(), &mut None); + let expecting = + String::new() + "digraph G {\n" + "rankdir=LR;\n" + " s0[label=\"$\"];\n" + "}\n"; assert_eq!(expecting, to_dot_string(r, full_ctx())) } @@ -454,19 +398,16 @@ fn test_Ae_Ae_fullctx() { fn test_Aab_Ac() { let A1 = array(vec![a(), b()]); let A2 = array(vec![c()]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"||\"];\n" + - " s1[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - " s0:p2->s1[label=\"3\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"||\"];\n" + + " s1[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + " s0:p2->s1[label=\"3\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -474,17 +415,14 @@ fn test_Aab_Ac() { fn test_Aa_Aa() { let A1 = array(vec![a()]); let A2 = array(vec![a()]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -492,19 +430,16 @@ fn test_Aa_Aa() { fn test_Aa_Abc() { let A1 = array(vec![a()]); let A2 = array(vec![b(), c()]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"||\"];\n" + - " s1[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - " s0:p2->s1[label=\"3\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"||\"];\n" + + " s1[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + " s0:p2->s1[label=\"3\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -512,19 +447,16 @@ fn test_Aa_Abc() { fn test_Aac_Ab() { let A1 = array(vec![a(), c()]); let A2 = array(vec![b()]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"||\"];\n" + - " s1[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - " s0:p2->s1[label=\"3\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"||\"];\n" + + " s1[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + " s0:p2->s1[label=\"3\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -532,18 +464,15 @@ fn test_Aac_Ab() { fn test_Aab_Aa() { let A1 = array(vec![a(), b()]); let A2 = array(vec![a()]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s1[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s1[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -551,18 +480,15 @@ fn test_Aab_Aa() { fn test_Aab_Ab() { let A1 = array(vec![a(), b()]); let A2 = array(vec![b()]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s1[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s1[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -572,22 +498,19 @@ fn test_Aax_Aby() { let b = PredictionContext::new_singleton(y().into(), 2).alloc(); let A1 = array(vec![a]); let A2 = array(vec![b]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|\"];\n" + - " s2[label=\"2\"];\n" + - " s3[label=\"*\"];\n" + - " s1[label=\"1\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s2->s3[label=\"10\"];\n" + - " s1->s3[label=\"9\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|\"];\n" + + " s2[label=\"2\"];\n" + + " s3[label=\"*\"];\n" + + " s1[label=\"1\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s2->s3[label=\"10\"];\n" + + " s1->s3[label=\"9\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -597,20 +520,17 @@ fn test_Aax_Aay() { let a2 = PredictionContext::new_singleton(y().into(), 1).alloc(); let A1 = array(vec![a1]); let A2 = array(vec![a2]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[label=\"0\"];\n" + - " s1[shape=record, label=\"|\"];\n" + - " s2[label=\"*\"];\n" + - " s0->s1[label=\"1\"];\n" + - " s1:p0->s2[label=\"9\"];\n" + - " s1:p1->s2[label=\"10\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[label=\"0\"];\n" + + " s1[shape=record, label=\"|\"];\n" + + " s2[label=\"*\"];\n" + + " s0->s1[label=\"1\"];\n" + + " s1:p0->s2[label=\"9\"];\n" + + " s1:p1->s2[label=\"10\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -620,22 +540,19 @@ fn test_Aaxc_Aayd() { let a2 = PredictionContext::new_singleton(y().into(), 1).alloc(); let A1 = array(vec![a1, c()]); let A2 = array(vec![a2, d()]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"||\"];\n" + - " s2[label=\"*\"];\n" + - " s1[shape=record, label=\"|\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"3\"];\n" + - " s0:p2->s2[label=\"4\"];\n" + - " s1:p0->s2[label=\"9\"];\n" + - " s1:p1->s2[label=\"10\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"||\"];\n" + + " s2[label=\"*\"];\n" + + " s1[shape=record, label=\"|\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"3\"];\n" + + " s0:p2->s2[label=\"4\"];\n" + + " s1:p0->s2[label=\"9\"];\n" + + " s1:p1->s2[label=\"10\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -647,28 +564,25 @@ fn test_Aaubv_Acwdx() { let d = PredictionContext::new_singleton(x().into(), 4).alloc(); let A1 = array(vec![a, b]); let A2 = array(vec![c, d]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|||\"];\n" + - " s4[label=\"4\"];\n" + - " s5[label=\"*\"];\n" + - " s3[label=\"3\"];\n" + - " s2[label=\"2\"];\n" + - " s1[label=\"1\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s0:p2->s3[label=\"3\"];\n" + - " s0:p3->s4[label=\"4\"];\n" + - " s4->s5[label=\"9\"];\n" + - " s3->s5[label=\"8\"];\n" + - " s2->s5[label=\"7\"];\n" + - " s1->s5[label=\"6\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|||\"];\n" + + " s4[label=\"4\"];\n" + + " s5[label=\"*\"];\n" + + " s3[label=\"3\"];\n" + + " s2[label=\"2\"];\n" + + " s1[label=\"1\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s0:p2->s3[label=\"3\"];\n" + + " s0:p3->s4[label=\"4\"];\n" + + " s4->s5[label=\"9\"];\n" + + " s3->s5[label=\"8\"];\n" + + " s2->s5[label=\"7\"];\n" + + " s1->s5[label=\"6\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -680,25 +594,22 @@ fn test_Aaubv_Abvdx() { let d = PredictionContext::new_singleton(x().into(), 4).alloc(); let A1 = array(vec![a, b1]); let A2 = array(vec![b2, d]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"||\"];\n" + - " s3[label=\"3\"];\n" + - " s4[label=\"*\"];\n" + - " s2[label=\"2\"];\n" + - " s1[label=\"1\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s0:p2->s3[label=\"4\"];\n" + - " s3->s4[label=\"9\"];\n" + - " s2->s4[label=\"7\"];\n" + - " s1->s4[label=\"6\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"||\"];\n" + + " s3[label=\"3\"];\n" + + " s4[label=\"*\"];\n" + + " s2[label=\"2\"];\n" + + " s1[label=\"1\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s0:p2->s3[label=\"4\"];\n" + + " s3->s4[label=\"9\"];\n" + + " s2->s4[label=\"7\"];\n" + + " s1->s4[label=\"6\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -710,26 +621,23 @@ fn test_Aaubv_Abwdx() { let d = PredictionContext::new_singleton(x().into(), 4).alloc(); let A1 = array(vec![a, b1]); let A2 = array(vec![b2, d]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"||\"];\n" + - " s3[label=\"3\"];\n" + - " s4[label=\"*\"];\n" + - " s2[shape=record, label=\"|\"];\n" + - " s1[label=\"1\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s0:p2->s3[label=\"4\"];\n" + - " s3->s4[label=\"9\"];\n" + - " s2:p0->s4[label=\"7\"];\n" + - " s2:p1->s4[label=\"8\"];\n" + - " s1->s4[label=\"6\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"||\"];\n" + + " s3[label=\"3\"];\n" + + " s4[label=\"*\"];\n" + + " s2[shape=record, label=\"|\"];\n" + + " s1[label=\"1\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s0:p2->s3[label=\"4\"];\n" + + " s3->s4[label=\"9\"];\n" + + " s2:p0->s4[label=\"7\"];\n" + + " s2:p1->s4[label=\"8\"];\n" + + " s1->s4[label=\"6\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -741,23 +649,20 @@ fn test_Aaubv_Abvdu() { let d = PredictionContext::new_singleton(u().into(), 4).alloc(); let A1 = array(vec![a, b1]); let A2 = array(vec![b2, d]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"||\"];\n" + - " s2[label=\"2\"];\n" + - " s3[label=\"*\"];\n" + - " s1[label=\"1\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s2[label=\"2\"];\n" + - " s0:p2->s1[label=\"4\"];\n" + - " s2->s3[label=\"7\"];\n" + - " s1->s3[label=\"6\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"||\"];\n" + + " s2[label=\"2\"];\n" + + " s3[label=\"*\"];\n" + + " s1[label=\"1\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s2[label=\"2\"];\n" + + " s0:p2->s1[label=\"4\"];\n" + + " s2->s3[label=\"7\"];\n" + + " s1->s3[label=\"6\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } @@ -769,26 +674,22 @@ fn test_Aaubu_Acudu() { let d = PredictionContext::new_singleton(u().into(), 4).alloc(); let A1 = array(vec![a, b]); let A2 = array(vec![c, d]); - let r = PredictionContext::merge( - &A1, - &A2, - root_is_wildcard(), &mut None); - let expecting = String::new() + - "digraph G {\n" + - "rankdir=LR;\n" + - " s0[shape=record, label=\"|||\"];\n" + - " s1[label=\"1\"];\n" + - " s2[label=\"*\"];\n" + - " s0:p0->s1[label=\"1\"];\n" + - " s0:p1->s1[label=\"2\"];\n" + - " s0:p2->s1[label=\"3\"];\n" + - " s0:p3->s1[label=\"4\"];\n" + - " s1->s2[label=\"6\"];\n" + - "}\n"; + let r = PredictionContext::merge(&A1, &A2, root_is_wildcard(), &mut None); + let expecting = String::new() + + "digraph G {\n" + + "rankdir=LR;\n" + + " s0[shape=record, label=\"|||\"];\n" + + " s1[label=\"1\"];\n" + + " s2[label=\"*\"];\n" + + " s0:p0->s1[label=\"1\"];\n" + + " s0:p1->s1[label=\"2\"];\n" + + " s0:p2->s1[label=\"3\"];\n" + + " s0:p3->s1[label=\"4\"];\n" + + " s1->s2[label=\"6\"];\n" + + "}\n"; assert_eq!(expecting, to_dot_string(r, root_is_wildcard())) } - fn array(nodes: Vec>) -> Arc { let mut parents = Vec::with_capacity(nodes.len()); let mut invoking_states = Vec::with_capacity(nodes.len()); @@ -800,23 +701,41 @@ fn array(nodes: Vec>) -> Arc { PredictionContext::new_array(parents, invoking_states).alloc() } -fn y() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 10).alloc() } +fn y() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 10).alloc() +} -fn x() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 9).alloc() } +fn x() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 9).alloc() +} -fn w() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 8).alloc() } +fn w() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 8).alloc() +} -fn v() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 7).alloc() } +fn v() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 7).alloc() +} -fn u() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 6).alloc() } +fn u() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 6).alloc() +} -fn d() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 4).alloc() } +fn d() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 4).alloc() +} -fn c() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 3).alloc() } +fn c() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 3).alloc() +} -fn b() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 2).alloc() } +fn b() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 2).alloc() +} -fn a() -> Arc { PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 1).alloc() } +fn a() -> Arc { + PredictionContext::new_singleton(Some(EMPTY_PREDICTION_CONTEXT.clone()), 1).alloc() +} fn to_dot_string(context: Arc, is_root_wildcard: bool) -> String { let mut nodes = String::new(); @@ -878,12 +797,15 @@ fn to_dot_string(context: Arc, is_root_wildcard: bool) -> Str edges += &i.to_string(); } - edges += &format!("->s{}[label=\"{}\"];\n" - , context_ids.get(&(current.get_parent(i).unwrap().deref() as *const PredictionContext)).unwrap() - , current.get_return_state(i)); + edges += &format!( + "->s{}[label=\"{}\"];\n", + context_ids + .get(&(current.get_parent(i).unwrap().deref() as *const PredictionContext)) + .unwrap(), + current.get_return_state(i) + ); } } return format!("digraph G {{\nrankdir=LR;\n{}{}}}\n", nodes, edges); } - diff --git a/src/recognizer.rs b/src/recognizer.rs index 4b8bfb3..9d0b7ab 100644 --- a/src/recognizer.rs +++ b/src/recognizer.rs @@ -1,9 +1,9 @@ use crate::atn::ATN; -use crate::Parser; use crate::parser::ParserNodeType; use crate::parser_rule_context::ParserRuleContext; use crate::token_factory::{TokenAware, TokenFactory}; use crate::vocabulary::Vocabulary; +use crate::Parser; /// Major version of this runtime. /// Used by generated parser to verify that it is compatible with current version of runtime @@ -23,13 +23,27 @@ pub fn check_version(major: &str, minor: &str) { /// **! Usually generated by ANTLR !** pub trait Recognizer<'input>: TokenAware<'input> { - type Node: ParserNodeType<'input, TF=Self::TF>; - fn sempred(&mut self, localctx: &>::Type, rule_index: isize, action_index: isize) -> bool - where Self: Sized - { true } - fn action(&mut self, localctx: &>::Type, rule_index: isize, action_index: isize) - where Self: Sized - {} + type Node: ParserNodeType<'input, TF = Self::TF>; + fn sempred( + &mut self, + localctx: &>::Type, + rule_index: isize, + action_index: isize, + ) -> bool + where + Self: Sized, + { + true + } + fn action( + &mut self, + localctx: &>::Type, + rule_index: isize, + action_index: isize, + ) where + Self: Sized, + { + } /// Returns array of rule names. /// Used for debugging and error reporting @@ -46,15 +60,22 @@ pub trait Recognizer<'input>: TokenAware<'input> { /// Used to make user predicates and actions callable by parser /// Generated by ANTLR tool from actions and predicated added in grammar file pub trait Actions<'a, P: Recognizer<'a>> { - fn sempred(_localctx: &>::Type, _rule_index: isize, _action_index: isize, - _recog: &mut P, + fn sempred( + _localctx: &>::Type, + _rule_index: isize, + _action_index: isize, + _recog: &mut P, ) -> bool { true } - fn action(_localctx: &>::Type, _rule_index: isize, _action_index: isize, - _recog: &mut P, - ) {} + fn action( + _localctx: &>::Type, + _rule_index: isize, + _action_index: isize, + _recog: &mut P, + ) { + } /// Returns array of rule names. /// Used for debugging and error reporting diff --git a/src/rule_context.rs b/src/rule_context.rs index adcfa8f..8ba1e20 100644 --- a/src/rule_context.rs +++ b/src/rule_context.rs @@ -21,44 +21,47 @@ pub trait RuleContext<'input>: CustomRuleContext<'input> { /// A context is empty if there is no invoking state; meaning nobody called /// current context. Which is usually true for the root of the syntax tree - fn is_empty(&self) -> bool { - self.get_invoking_state() == -1 - } + fn is_empty(&self) -> bool { self.get_invoking_state() == -1 } fn get_parent_ctx(&self) -> Option>::Type>> { None } fn set_parent(&self, parent: &Option>::Type>>) {} } -pub(crate) fn states_stack<'input,T:ParserRuleContext<'input>+?Sized + 'input>(mut ctx:Rc) -> impl Iterator -where T::Ctx:ParserNodeType<'input,Type=T> +pub(crate) fn states_stack<'input, T: ParserRuleContext<'input> + ?Sized + 'input>( + mut ctx: Rc, +) -> impl Iterator +where + T::Ctx: ParserNodeType<'input, Type = T>, { - from_fn(move|| - if ctx.get_invoking_state()<0{ + from_fn(move || { + if ctx.get_invoking_state() < 0 { None } else { let state = ctx.get_invoking_state(); ctx = ctx.get_parent_ctx().unwrap(); Some(state) } - ) + }) } #[doc(hidden)] pub unsafe trait Tid { fn self_id(&self) -> TypeId; - fn id() -> TypeId where Self: Sized; + fn id() -> TypeId + where + Self: Sized; } -pub struct EmptyCustomRuleContext<'a, TF: TokenFactory<'a> + 'a>(pub(crate) PhantomData<&'a TF::Tok>); +pub struct EmptyCustomRuleContext<'a, TF: TokenFactory<'a> + 'a>( + pub(crate) PhantomData<&'a TF::Tok>, +); impl<'a, TF: TokenFactory<'a> + 'a> CustomRuleContext<'a> for EmptyCustomRuleContext<'a, TF> { type TF = TF; type Ctx = EmptyContextType<'a, TF>; - fn get_rule_index(&self) -> usize { - usize::max_value() - } + fn get_rule_index(&self) -> usize { usize::max_value() } } unsafe impl<'a, TF: TokenFactory<'a> + 'a> Tid for EmptyCustomRuleContext<'a, TF> { @@ -66,24 +69,27 @@ unsafe impl<'a, TF: TokenFactory<'a> + 'a> Tid for EmptyCustomRuleContext<'a, TF TypeId::of::>() } - fn id() -> TypeId where Self: Sized { + fn id() -> TypeId + where + Self: Sized, + { TypeId::of::>() } } -pub type EmptyContext<'a, TF> = dyn ParserRuleContext<'a, TF=TF, Ctx=EmptyContextType<'a, TF>> + 'a; +pub type EmptyContext<'a, TF> = + dyn ParserRuleContext<'a, TF = TF, Ctx = EmptyContextType<'a, TF>> + 'a; pub struct EmptyContextType<'a, TF: TokenFactory<'a>>(pub PhantomData<&'a TF>); impl<'a, TF: TokenFactory<'a>> ParserNodeType<'a> for EmptyContextType<'a, TF> { type TF = TF; - type Type = dyn ParserRuleContext<'a, TF=Self::TF, Ctx=Self> + 'a; + type Type = dyn ParserRuleContext<'a, TF = Self::TF, Ctx = Self> + 'a; } - pub trait CustomRuleContext<'input>: Tid { type TF: TokenFactory<'input> + 'input; - type Ctx: ParserNodeType<'input, TF=Self::TF>; + type Ctx: ParserNodeType<'input, TF = Self::TF>; //const RULE_INDEX:usize; fn get_rule_index(&self) -> usize; @@ -100,7 +106,11 @@ pub struct BaseRuleContext<'input, ExtCtx: CustomRuleContext<'input>> { } impl<'input, ExtCtx: CustomRuleContext<'input>> BaseRuleContext<'input, ExtCtx> { - pub(crate) fn new_ctx(parent_ctx: Option>::Type>>, invoking_state: isize, ext: ExtCtx) -> Self { + pub(crate) fn new_ctx( + parent_ctx: Option>::Type>>, + invoking_state: isize, + ext: ExtCtx, + ) -> Self { BaseRuleContext { parent_ctx: RefCell::new(parent_ctx.as_ref().map(Rc::downgrade)), invoking_state: Cell::new(invoking_state), @@ -109,41 +119,44 @@ impl<'input, ExtCtx: CustomRuleContext<'input>> BaseRuleContext<'input, ExtCtx> } } -impl<'input, ExtCtx: CustomRuleContext<'input>> CustomRuleContext<'input> for BaseRuleContext<'input, ExtCtx> { +impl<'input, ExtCtx: CustomRuleContext<'input>> CustomRuleContext<'input> + for BaseRuleContext<'input, ExtCtx> +{ type TF = ExtCtx::TF; type Ctx = ExtCtx::Ctx; - fn get_rule_index(&self) -> usize { - self.ext.get_rule_index() - } + fn get_rule_index(&self) -> usize { self.ext.get_rule_index() } } unsafe impl<'input, Ctx: CustomRuleContext<'input>> Tid for BaseRuleContext<'input, Ctx> { - fn self_id(&self) -> TypeId { - self.ext.self_id() - } + fn self_id(&self) -> TypeId { self.ext.self_id() } - fn id() -> TypeId where Self: Sized { + fn id() -> TypeId + where + Self: Sized, + { Ctx::id() } } -impl<'input, ExtCtx: CustomRuleContext<'input>> RuleContext<'input> for BaseRuleContext<'input, ExtCtx> { - fn get_invoking_state(&self) -> isize { - self.invoking_state.get() - } +impl<'input, ExtCtx: CustomRuleContext<'input>> RuleContext<'input> + for BaseRuleContext<'input, ExtCtx> +{ + fn get_invoking_state(&self) -> isize { self.invoking_state.get() } - fn set_invoking_state(&self, t: isize) { - self.invoking_state.set(t) - } + fn set_invoking_state(&self, t: isize) { self.invoking_state.set(t) } fn get_parent_ctx(&self) -> Option>::Type>> { - self.parent_ctx.borrow().as_ref().map(Weak::upgrade).flatten() + self.parent_ctx + .borrow() + .as_ref() + .map(Weak::upgrade) + .flatten() } -// fn get_parent_ctx(&self) -> Option { -// self.parent_ctx.borrow().as_ref().map(Weak::upgrade).map(Option::unwrap) -// } + // fn get_parent_ctx(&self) -> Option { + // self.parent_ctx.borrow().as_ref().map(Weak::upgrade).map(Option::unwrap) + // } fn set_parent(&self, parent: &Option>::Type>>) { *self.parent_ctx.borrow_mut() = parent.as_ref().map(Rc::downgrade); @@ -151,13 +164,17 @@ impl<'input, ExtCtx: CustomRuleContext<'input>> RuleContext<'input> for BaseRule } impl<'input, ExtCtx: CustomRuleContext<'input>> Debug for BaseRuleContext<'input, ExtCtx> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - unimplemented!() - } + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { unimplemented!() } } impl<'input, ExtCtx: CustomRuleContext<'input>> Tree<'input> for BaseRuleContext<'input, ExtCtx> {} -impl<'input, ExtCtx: CustomRuleContext<'input>> ParseTree<'input> for BaseRuleContext<'input, ExtCtx> {} +impl<'input, ExtCtx: CustomRuleContext<'input>> ParseTree<'input> + for BaseRuleContext<'input, ExtCtx> +{ +} -impl<'input, ExtCtx: CustomRuleContext<'input>> ParserRuleContext<'input> for BaseRuleContext<'input, ExtCtx> {} \ No newline at end of file +impl<'input, ExtCtx: CustomRuleContext<'input>> ParserRuleContext<'input> + for BaseRuleContext<'input, ExtCtx> +{ +} diff --git a/src/semantic_context.rs b/src/semantic_context.rs index e007e3d..0010f3f 100644 --- a/src/semantic_context.rs +++ b/src/semantic_context.rs @@ -1,5 +1,5 @@ -use std::borrow::{Borrow, Cow}; use std::borrow::Cow::{Borrowed, Owned}; +use std::borrow::{Borrow, Cow}; use std::cmp::Ordering; use std::collections::HashSet; diff --git a/src/token.rs b/src/token.rs index 4e95e22..fcd31ce 100644 --- a/src/token.rs +++ b/src/token.rs @@ -1,8 +1,8 @@ use std::borrow::{Borrow, BorrowMut, Cow}; use std::cell::Cell; use std::convert::identity; -use std::fmt::{Debug, Display}; use std::fmt::Formatter; +use std::fmt::{Debug, Display}; use std::ops::{CoerceUnsized, Deref, DerefMut}; use std::sync::atomic::{AtomicIsize, AtomicUsize, Ordering}; diff --git a/src/token_factory.rs b/src/token_factory.rs index 007bd59..d490b97 100644 --- a/src/token_factory.rs +++ b/src/token_factory.rs @@ -1,5 +1,5 @@ -use std::borrow::{Borrow, BorrowMut, Cow}; use std::borrow::Cow::{Borrowed, Owned}; +use std::borrow::{Borrow, BorrowMut, Cow}; use std::cell::Cell; use std::fmt::Debug; use std::marker::{PhantomData, Unsize}; @@ -9,8 +9,8 @@ use std::sync::atomic::AtomicIsize; use typed_arena::Arena; use crate::char_stream::{CharStream, InputData}; -use crate::token::{CommonToken, OwningToken, TOKEN_INVALID_TYPE}; use crate::token::Token; +use crate::token::{CommonToken, OwningToken, TOKEN_INVALID_TYPE}; lazy_static! { pub static ref CommonTokenFactoryDEFAULT: Box = @@ -44,14 +44,14 @@ lazy_static! { /// Trait for creating tokens pub trait TokenFactory<'a>: Sized { /// type of tokens emitted by this factory - type Inner: Token + ?Sized + 'a; + type Inner: Token + ?Sized + 'a; /// ownership of the emitted tokens type Tok: Borrow + Clone + 'a + Debug; // can relax InputData to just ToOwned here? /// type of the underlying storage type Data: InputData + ?Sized; /// type of the reference to `Self::Data` that factory needs for producing tokens - type From: Borrow + Into>; + type From: Borrow + Into>; /// Creates token fn create( @@ -65,8 +65,8 @@ pub trait TokenFactory<'a>: Sized { line: isize, column: isize, ) -> Self::Tok - where - T: CharStream + ?Sized; + where + T: CharStream + ?Sized; /// Creates invalid token /// Invalid tokens must have `TOKEN_INVALID_TYPE` token type. @@ -84,7 +84,7 @@ impl<'a> TokenFactory<'a> for CommonTokenFactory { type Inner = CommonToken<'a>; type Tok = Box; type Data = str; - type From = Cow<'a,str>; + type From = Cow<'a, str>; #[inline] fn create( @@ -98,8 +98,8 @@ impl<'a> TokenFactory<'a> for CommonTokenFactory { line: isize, column: isize, ) -> Self::Tok - where - T: CharStream + ?Sized, + where + T: CharStream + ?Sized, { let text = match (text, source) { (Some(t), _) => Owned(t), @@ -149,8 +149,8 @@ impl<'a> TokenFactory<'a> for OwningTokenFactory { line: isize, column: isize, ) -> Self::Tok - where - T: CharStream + ?Sized, + where + T: CharStream + ?Sized, { let text = match (text, source) { (Some(t), _) => t, @@ -194,8 +194,8 @@ pub type ArenaCommonFactory<'a> = ArenaFactory<'a, CommonTokenFactory, CommonTok // Box is used here because it is almost always should be used for token factory pub struct ArenaFactory< 'input, - TF: TokenFactory<'input, Tok=Box, Inner=T>, - T: Token + Clone + 'input, + TF: TokenFactory<'input, Tok = Box, Inner = T>, + T: Token + Clone + 'input, > { arena: Arena, factory: TF, @@ -203,10 +203,10 @@ pub struct ArenaFactory< } impl< - 'input, - TF: TokenFactory<'input, Tok=Box, Inner=T> + Default, - T: Token + Clone + 'input, -> Default for ArenaFactory<'input, TF, T> + 'input, + TF: TokenFactory<'input, Tok = Box, Inner = T> + Default, + T: Token + Clone + 'input, + > Default for ArenaFactory<'input, TF, T> { fn default() -> Self { Self { @@ -218,10 +218,10 @@ impl< } impl<'input, TF, Tok> TokenFactory<'input> for ArenaFactory<'input, TF, Tok> - where - TF: TokenFactory<'input, Tok=Box, Inner=Tok>, - Tok: Token + Clone + 'input, - for<'a> &'a Tok: Default, +where + TF: TokenFactory<'input, Tok = Box, Inner = Tok>, + Tok: Token + Clone + 'input, + for<'a> &'a Tok: Default, { type Inner = Tok; type Tok = &'input Tok; @@ -240,8 +240,8 @@ impl<'input, TF, Tok> TokenFactory<'input> for ArenaFactory<'input, TF, Tok> line: isize, column: isize, ) -> Self::Tok - where - T: CharStream + ?Sized, + where + T: CharStream + ?Sized, { let token = self .factory diff --git a/src/token_source.rs b/src/token_source.rs index 8e2fbf1..17541aa 100644 --- a/src/token_source.rs +++ b/src/token_source.rs @@ -3,7 +3,7 @@ use std::marker::Unsize; use std::ops::Deref; use crate::char_stream::CharStream; -use crate::int_stream::{EOF, IntStream}; +use crate::int_stream::{IntStream, EOF}; use crate::token::{Token, TOKEN_DEFAULT_CHANNEL}; use crate::token_factory::{TokenAware, TokenFactory}; @@ -39,16 +39,16 @@ pub trait TokenSource<'input>: TokenAware<'input> { } impl<'input, T> TokenAware<'input> for &mut T - where - T: TokenSource<'input>, +where + T: TokenSource<'input>, { type TF = T::TF; } // allows user to call parser with &mut reference to Lexer impl<'input, T> TokenSource<'input> for &mut T - where - T: TokenSource<'input>, +where + T: TokenSource<'input>, { #[inline(always)] fn next_token(&mut self) -> >::Tok { (**self).next_token() } diff --git a/src/token_stream.rs b/src/token_stream.rs index 5748a7f..b3085ca 100644 --- a/src/token_stream.rs +++ b/src/token_stream.rs @@ -22,27 +22,38 @@ pub trait TokenStream<'input>: IntStream { fn lt(&mut self, k: isize) -> Option<&>::Tok>; fn get(&self, index: isize) -> &>::Tok; fn get_inner(&self, index: isize) -> &>::Inner; - fn get_token_source(&self) -> &dyn TokenSource<'input, TF=Self::TF>; + fn get_token_source(&self) -> &dyn TokenSource<'input, TF = Self::TF>; // fn set_token_source(&self,source: Box); fn get_all_text(&self) -> String; fn get_text_from_interval(&self, start: isize, stop: isize) -> String; // fn get_text_from_rule_context(&self,context: RuleContext) -> String; - fn get_text_from_tokens(&self, a: &T, b: &T) -> String where Self: Sized { + fn get_text_from_tokens(&self, a: &T, b: &T) -> String + where + Self: Sized, + { self.get_text_from_interval(a.get_token_index(), b.get_token_index()) } } // -pub struct TokenIter<'a, 'input: 'a, T: TokenStream<'input>>(&'a mut T, bool, PhantomData &'input str>); +pub struct TokenIter<'a, 'input: 'a, T: TokenStream<'input>>( + &'a mut T, + bool, + PhantomData &'input str>, +); impl<'a, 'input: 'a, T: TokenStream<'input>> Iterator for TokenIter<'a, 'input, T> { type Item = OwningToken; fn next(&mut self) -> Option { - if self.1 { return None } + if self.1 { + return None; + } let result = self.0.lt(1).unwrap().borrow().to_owned(); self.0.consume(); - if result.get_token_type() == TOKEN_EOF { self.1 = true; } + if result.get_token_type() == TOKEN_EOF { + self.1 = true; + } Some(result) } } @@ -54,13 +65,10 @@ pub struct UnbufferedTokenStream<'input, T: TokenSource<'input>> { pub(crate) current_token_index: isize, markers_count: isize, pub(crate) p: isize, - } impl<'input, T: TokenSource<'input>> UnbufferedTokenStream<'input, T> { - pub fn iter(&mut self) -> IterWrapper<'_, Self> { - IterWrapper(self) - } + pub fn iter(&mut self) -> IterWrapper<'_, Self> { IterWrapper(self) } pub fn token_iter(&mut self) -> TokenIter<'_, 'input, Self> { TokenIter(self, false, PhantomData) @@ -89,17 +97,19 @@ impl<'input, T: TokenSource<'input>> UnbufferedTokenStream<'input, T> { } } - fn get_buffer_start_index(&self) -> isize { - self.current_token_index - self.p - } + fn get_buffer_start_index(&self) -> isize { self.current_token_index - self.p } pub(crate) fn fill(&mut self, need: isize) -> isize { for i in 0..need { - if self.tokens.len() > 0 && self.tokens.last().unwrap().borrow().get_token_type() == TOKEN_EOF { + if self.tokens.len() > 0 + && self.tokens.last().unwrap().borrow().get_token_type() == TOKEN_EOF + { return i; } let mut token = self.token_source.next_token(); - token.borrow().set_token_index(self.get_buffer_start_index() + self.tokens.len() as isize); + token + .borrow() + .set_token_index(self.get_buffer_start_index() + self.tokens.len() as isize); self.tokens.push(token); } @@ -128,22 +138,21 @@ impl<'input, T: TokenSource<'input>> TokenStream<'input> for UnbufferedTokenStre self.tokens[(index - self.get_buffer_start_index()) as usize].borrow() } - fn get_token_source(&self) -> &dyn TokenSource<'input, TF=Self::TF> { - &self.token_source - } + fn get_token_source(&self) -> &dyn TokenSource<'input, TF = Self::TF> { &self.token_source } - fn get_all_text(&self) -> String { - self.get_text_from_interval(0, self.size()) - } + fn get_all_text(&self) -> String { self.get_text_from_interval(0, self.size()) } fn get_text_from_interval(&self, start: isize, stop: isize) -> String { -// println!("get_text_from_interval {}..{}",start,stop); -// println!("all tokens {:?}",self.tokens.iter().map(|x|x.as_ref().to_owned()).collect::>()); + // println!("get_text_from_interval {}..{}",start,stop); + // println!("all tokens {:?}",self.tokens.iter().map(|x|x.as_ref().to_owned()).collect::>()); let buffer_start_index = self.get_buffer_start_index(); let buffer_stop_index = buffer_start_index + self.tokens.len() as isize - 1; if start < buffer_start_index || stop > buffer_stop_index { - panic!("interval {}..={} not in token buffer window: {}..{}", start, stop, buffer_start_index, buffer_stop_index); + panic!( + "interval {}..={} not in token buffer window: {}..{}", + start, stop, buffer_start_index, buffer_stop_index + ); } let a = start - buffer_start_index; @@ -152,19 +161,22 @@ impl<'input, T: TokenSource<'input>> TokenStream<'input> for UnbufferedTokenStre let mut buf = String::new(); for i in a..(b + 1) { let t = self.tokens[i as usize].borrow(); - if t.get_token_type() == TOKEN_EOF { break } + if t.get_token_type() == TOKEN_EOF { + break; + } buf.extend(t.get_text().to_display().chars()); } return buf; } - } impl<'input, T: TokenSource<'input>> IntStream for UnbufferedTokenStream<'input, T> { fn consume(&mut self) -> Result<(), ANTLRError> { if self.la(1) == TOKEN_EOF { - return Err(ANTLRError::IllegalStateError("cannot consume EOF".to_owned())); + return Err(ANTLRError::IllegalStateError( + "cannot consume EOF".to_owned(), + )); } if self.p == self.tokens.len() as isize && self.markers_count == 0 { @@ -180,7 +192,9 @@ impl<'input, T: TokenSource<'input>> IntStream for UnbufferedTokenStream<'input, } fn la(&mut self, i: isize) -> isize { - self.lt(i).map(|t| t.borrow().get_token_type()).unwrap_or(TOKEN_INVALID_TYPE) + self.lt(i) + .map(|t| t.borrow().get_token_type()) + .unwrap_or(TOKEN_INVALID_TYPE) } fn mark(&mut self) -> isize { @@ -214,29 +228,27 @@ impl<'input, T: TokenSource<'input>> IntStream for UnbufferedTokenStream<'input, } #[inline(always)] - fn index(&self) -> isize { - self.current_token_index - } + fn index(&self) -> isize { self.current_token_index } fn seek(&mut self, mut index: isize) { - if self.current_token_index == index { return; } + if self.current_token_index == index { + return; + } if index > self.current_token_index { self.sync(index - self.current_token_index); index = min(index, self.get_buffer_start_index() + self.size() + 1); } let i = index - self.get_buffer_start_index(); - if i < 0 || i >= self.tokens.len() as isize { panic!() } + if i < 0 || i >= self.tokens.len() as isize { + panic!() + } self.p = i; self.current_token_index = index; } #[inline(always)] - fn size(&self) -> isize { - self.tokens.len() as isize - } + fn size(&self) -> isize { self.tokens.len() as isize } - fn get_source_name(&self) -> String { - self.token_source.get_source_name() - } + fn get_source_name(&self) -> String { self.token_source.get_source_name() } } diff --git a/src/transition.rs b/src/transition.rs index 6075f41..747c31a 100644 --- a/src/transition.rs +++ b/src/transition.rs @@ -52,19 +52,13 @@ pub enum TransitionType { pub trait Transition: Sync + Send + Debug + Any { fn get_target(&self) -> ATNStateRef; fn set_target(&mut self, s: ATNStateRef); - fn is_epsilon(&self) -> bool { - false - } - fn get_label(&self) -> Option> { - None - } + fn is_epsilon(&self) -> bool { false } + fn get_label(&self) -> Option> { None } fn get_serialization_type(&self) -> TransitionType; fn matches(&self, symbol: isize, min_vocab_symbol: isize, max_vocab_symbol: isize) -> bool; - fn get_predicate(&self) -> Option { - None - } + fn get_predicate(&self) -> Option { None } fn get_reachable_target(&self, symbol: isize) -> Option { -// println!("reachable target called on {:?}", self); + // println!("reachable target called on {:?}", self); if self.matches(symbol, LEXER_MIN_CHAR_VALUE, LEXER_MAX_CHAR_VALUE) { return Some(self.get_target()); } @@ -87,13 +81,9 @@ pub struct AtomTransition { } impl Transition for AtomTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } + fn get_target(&self) -> ATNStateRef { self.target } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } fn get_label(&self) -> Option> { let mut r = IntervalSet::new(); @@ -101,9 +91,7 @@ impl Transition for AtomTransition { Some(Cow::Owned(r)) } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_ATOM - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_ATOM } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { _symbol == self.label @@ -119,20 +107,12 @@ pub struct RuleTransition { } impl Transition for RuleTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } - fn is_epsilon(&self) -> bool { - true - } + fn is_epsilon(&self) -> bool { true } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_RULE - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_RULE } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { unimplemented!() @@ -146,20 +126,12 @@ pub struct EpsilonTransition { } impl Transition for EpsilonTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } - fn is_epsilon(&self) -> bool { - true - } + fn is_epsilon(&self) -> bool { true } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_EPSILON - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_EPSILON } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { false @@ -174,12 +146,8 @@ pub struct RangeTransition { } impl Transition for RangeTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } fn get_label(&self) -> Option> { let mut r = IntervalSet::new(); @@ -187,14 +155,11 @@ impl Transition for RangeTransition { Some(Cow::Owned(r)) } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_RANGE - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_RANGE } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { _symbol >= self.start && _symbol <= self.stop } - } #[derive(Debug)] @@ -207,20 +172,12 @@ pub struct ActionTransition { } impl Transition for ActionTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } - fn is_epsilon(&self) -> bool { - true - } + fn is_epsilon(&self) -> bool { true } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_ACTION - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_ACTION } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { false @@ -234,25 +191,16 @@ pub struct SetTransition { } impl Transition for SetTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } - fn get_label(&self) -> Option> { - Some(Cow::Borrowed(&self.set)) - } + fn get_label(&self) -> Option> { Some(Cow::Borrowed(&self.set)) } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_SET - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_SET } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { self.set.contains(_symbol) } - } #[derive(Debug)] @@ -262,24 +210,15 @@ pub struct NotSetTransition { } impl Transition for NotSetTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } - fn get_label(&self) -> Option> { - Some(Cow::Borrowed(&self.set)) - } + fn get_label(&self) -> Option> { Some(Cow::Borrowed(&self.set)) } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_NOTSET - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_NOTSET } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { - _symbol >= _min_vocab_symbol && _symbol <= _max_vocab_symbol - && !self.set.contains(_symbol) + _symbol >= _min_vocab_symbol && _symbol <= _max_vocab_symbol && !self.set.contains(_symbol) } } @@ -289,16 +228,10 @@ pub struct WildcardTransition { } impl Transition for WildcardTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_WILDCARD - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_WILDCARD } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { _symbol < _max_vocab_symbol && _symbol > _min_vocab_symbol @@ -314,21 +247,13 @@ pub struct PredicateTransition { } impl Transition for PredicateTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } + fn get_target(&self) -> ATNStateRef { self.target } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } - fn is_epsilon(&self) -> bool { - true - } + fn is_epsilon(&self) -> bool { true } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_PREDICATE - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_PREDICATE } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { false @@ -350,18 +275,12 @@ pub struct PrecedencePredicateTransition { } impl Transition for PrecedencePredicateTransition { - fn get_target(&self) -> ATNStateRef { - self.target - } - fn set_target(&mut self, s: ATNStateRef) { - self.target = s - } + fn get_target(&self) -> ATNStateRef { self.target } + fn set_target(&mut self, s: ATNStateRef) { self.target = s } fn is_epsilon(&self) -> bool { true } - fn get_serialization_type(&self) -> TransitionType { - TransitionType::TRANSITION_PRECEDENCE - } + fn get_serialization_type(&self) -> TransitionType { TransitionType::TRANSITION_PRECEDENCE } fn matches(&self, _symbol: isize, _min_vocab_symbol: isize, _max_vocab_symbol: isize) -> bool { false diff --git a/src/tree.rs b/src/tree.rs index ce82ab4..b97a020 100644 --- a/src/tree.rs +++ b/src/tree.rs @@ -7,17 +7,17 @@ use std::marker::PhantomData; use std::ops::{CoerceUnsized, Deref}; use std::rc::Rc; -use crate::{interval_set, trees}; use crate::atn::INVALID_ALT; use crate::char_stream::InputData; use crate::int_stream::EOF; use crate::interval_set::Interval; use crate::parser::{Parser, ParserNodeType}; -use crate::parser_rule_context::{BaseParserRuleContext, cast, ParserRuleContext, RuleContextExt}; +use crate::parser_rule_context::{cast, BaseParserRuleContext, ParserRuleContext, RuleContextExt}; use crate::recognizer::Recognizer; use crate::rule_context::{CustomRuleContext, EmptyContextType, RuleContext, Tid}; use crate::token::{OwningToken, Token}; use crate::token_factory::{CommonTokenFactory, TokenFactory}; +use crate::{interval_set, trees}; //todo try to make in more generic pub trait Tree<'input>: NodeText + RuleContext<'input> { @@ -30,9 +30,9 @@ pub trait Tree<'input>: NodeText + RuleContext<'input> { fn get_child_count(&self) -> usize { 0 } fn get_children<'a>( &'a self, - ) -> Box>::Type>> + 'a> - where - 'input: 'a, + ) -> Box>::Type>> + 'a> + where + 'input: 'a, { let mut index = 0; let iter = from_fn(move || { @@ -70,7 +70,7 @@ pub trait ParseTree<'input>: Tree<'input> { /// We have to know the recognizer so we can get rule names. fn to_string_tree( &self, - r: &dyn Recognizer<'input, TF=Self::TF, Node=Self::Ctx>, + r: &dyn Recognizer<'input, TF = Self::TF, Node = Self::Ctx>, ) -> String { trees::string_tree(self, r.get_rule_names()) } @@ -108,7 +108,7 @@ pub struct LeafNode<'input, Node: ParserNodeType<'input>, T: 'static> { } impl<'input, Node: ParserNodeType<'input>, T: 'static> CustomRuleContext<'input> -for LeafNode<'input, Node, T> + for LeafNode<'input, Node, T> { type TF = Node::TF; type Ctx = Node; @@ -117,8 +117,9 @@ for LeafNode<'input, Node, T> } impl<'input, Node: ParserNodeType<'input>, T: 'static> ParserRuleContext<'input> -for LeafNode<'input, Node, T> -{} + for LeafNode<'input, Node, T> +{ +} impl<'input, Node: ParserNodeType<'input>, T: 'static> Tree<'input> for LeafNode<'input, Node, T> {} @@ -128,16 +129,17 @@ unsafe impl<'input, Node: ParserNodeType<'input>, T: 'static> Tid for LeafNode<' } fn id() -> TypeId - where - Self: Sized, + where + Self: Sized, { TypeId::of::, T>>() } } impl<'input, Node: ParserNodeType<'input>, T: 'static> RuleContext<'input> -for LeafNode<'input, Node, T> -{} + for LeafNode<'input, Node, T> +{ +} impl<'input, Node: ParserNodeType<'input>, T: 'static> NodeText for LeafNode<'input, Node, T> { fn get_node_text(&self, _rule_names: &[&str]) -> String { @@ -146,7 +148,7 @@ impl<'input, Node: ParserNodeType<'input>, T: 'static> NodeText for LeafNode<'in } impl<'input, Node: ParserNodeType<'input>, T: 'static> ParseTree<'input> -for LeafNode<'input, Node, T> + for LeafNode<'input, Node, T> { fn get_source_interval(&self) -> Interval { let i = self.symbol.borrow().get_token_index(); @@ -182,7 +184,7 @@ impl<'input, Node: ParserNodeType<'input>, T: 'static> LeafNode<'input, Node, T> pub type TerminalNode<'input, NodeType> = LeafNode<'input, NodeType, NoError>; impl<'input, Node: ParserNodeType<'input>, Listener: ParseTreeListener<'input, Node> + ?Sized> -Listenable for TerminalNode<'input, Node> + Listenable for TerminalNode<'input, Node> { fn enter(&self, listener: &mut Listener) { listener.visit_terminal(self) } @@ -192,7 +194,7 @@ Listenable for TerminalNode<'input, Node> } impl<'input, Node: ParserNodeType<'input>, Visitor: ParseTreeVisitor<'input, Node> + ?Sized> -Visitable for TerminalNode<'input, Node> + Visitable for TerminalNode<'input, Node> { fn accept(&self, visitor: &mut Visitor) { visitor.visit_terminal(self) } } @@ -202,7 +204,7 @@ Visitable for TerminalNode<'input, Node> pub type ErrorNode<'input, NodeType> = LeafNode<'input, NodeType, IsError>; impl<'input, Node: ParserNodeType<'input>, Listener: ParseTreeListener<'input, Node> + ?Sized> -Listenable for ErrorNode<'input, Node> + Listenable for ErrorNode<'input, Node> { fn enter(&self, listener: &mut Listener) { listener.visit_error_node(self) } @@ -212,7 +214,7 @@ Listenable for ErrorNode<'input, Node> } impl<'input, Node: ParserNodeType<'input>, Visitor: ParseTreeVisitor<'input, Node> + ?Sized> -Visitable for ErrorNode<'input, Node> + Visitable for ErrorNode<'input, Node> { fn accept(&self, visitor: &mut Visitor) { visitor.visit_error_node(self) } } @@ -250,7 +252,9 @@ pub trait ParseTreeVisitor<'input, Node: ParserNodeType<'input>> { // } pub trait Visitable { - fn accept(&self, visitor: &mut Vis) { unreachable!("should have been properly implemented by generated context when reachable") } + fn accept(&self, visitor: &mut Vis) { + unreachable!("should have been properly implemented by generated context when reachable") + } } pub trait ParseTreeListener<'input, Node: ParserNodeType<'input>> { @@ -275,23 +279,23 @@ pub trait Listenable { pub struct ParseTreeWalker<'input, 'a, Node, T = dyn ParseTreeListener<'input, Node> + 'a>( PhantomData &'input Node::Type>, ) - where - Node: ParserNodeType<'input>, - T: ParseTreeListener<'input, Node> + 'a + ?Sized; +where + Node: ParserNodeType<'input>, + T: ParseTreeListener<'input, Node> + 'a + ?Sized; impl<'input, 'a, Node, T> ParseTreeWalker<'input, 'a, Node, T> - where - Node: ParserNodeType<'input>, - T: ParseTreeListener<'input, Node> + 'a + ?Sized, - Node::Type: Listenable, +where + Node: ParserNodeType<'input>, + T: ParseTreeListener<'input, Node> + 'a + ?Sized, + Node::Type: Listenable, { // #[doc(hidden)] // pub fn new() -> Self{ Self(PhantomData) } pub fn walk(mut listener: Box, t: &Ctx) -> Box - where - for<'x> &'x mut Listener: CoerceUnsized<&'x mut T>, - for<'x> &'x Ctx: CoerceUnsized<&'x Node::Type>, + where + for<'x> &'x mut Listener: CoerceUnsized<&'x mut T>, + for<'x> &'x Ctx: CoerceUnsized<&'x Node::Type>, { // let mut listener = listener as Box; Self::walk_inner(listener.as_mut(), t as &Node::Type); diff --git a/src/utils.rs b/src/utils.rs index 1a1206e..65debb0 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -13,7 +13,7 @@ pub fn escape_whitespaces(data: impl Borrow, escape_spaces: bool) -> String '\t' => res.extend("\\t".chars()), '\n' => res.extend("\\n".chars()), '\r' => res.extend("\\r".chars()), - _ => res.push(ch) + _ => res.push(ch), }); res } diff --git a/src/vocabulary.rs b/src/vocabulary.rs index 4afd96e..f20218f 100644 --- a/src/vocabulary.rs +++ b/src/vocabulary.rs @@ -23,7 +23,7 @@ pub struct VocabularyImpl { } fn collect_to_string<'b, T: Borrow + 'b>( - iter: impl IntoIterator>, + iter: impl IntoIterator>, ) -> Vec> { iter.into_iter() .map(|x| x.as_ref().map(|it| it.borrow().to_owned())) @@ -31,7 +31,7 @@ fn collect_to_string<'b, T: Borrow + 'b>( } impl VocabularyImpl { - pub fn new<'b, T: Borrow + 'b, Iter: IntoIterator>>( + pub fn new<'b, T: Borrow + 'b, Iter: IntoIterator>>( literal_names: Iter, symbolic_names: Iter, display_names: Option, @@ -43,13 +43,13 @@ impl VocabularyImpl { display_names: collect_to_string(display_names.into_iter().flatten()), max_token_type: 0, } - .modify_with(|it| { - it.max_token_type = max( - it.literal_names.len(), - max(it.symbolic_names.len(), it.display_names.len()), - ) as isize - - 1 - }) + .modify_with(|it| { + it.max_token_type = max( + it.literal_names.len(), + max(it.symbolic_names.len(), it.display_names.len()), + ) as isize + - 1 + }) } pub fn from_token_names(token_names: &[Option<&str>]) -> VocabularyImpl { @@ -113,25 +113,17 @@ impl Vocabulary for VocabularyImpl { } } -pub(crate) static DUMMY_VOCAB:DummyVocab = DummyVocab; +pub(crate) static DUMMY_VOCAB: DummyVocab = DummyVocab; #[derive(Debug)] pub(crate) struct DummyVocab; -impl Vocabulary for DummyVocab{ - fn get_max_token_type(&self) -> isize { - unimplemented!() - } +impl Vocabulary for DummyVocab { + fn get_max_token_type(&self) -> isize { unimplemented!() } - fn get_literal_name(&self, token_type: isize) -> Option<&str> { - unimplemented!() - } + fn get_literal_name(&self, token_type: isize) -> Option<&str> { unimplemented!() } - fn get_symbolic_name(&self, token_type: isize) -> Option<&str> { - unimplemented!() - } + fn get_symbolic_name(&self, token_type: isize) -> Option<&str> { unimplemented!() } - fn get_display_name(&self, token_type: isize) -> Cow<'_, str> { - token_type.to_string().into() - } -} \ No newline at end of file + fn get_display_name(&self, token_type: isize) -> Cow<'_, str> { token_type.to_string().into() } +} diff --git a/tests/gen/csvlexer.rs b/tests/gen/csvlexer.rs index 060eff3..76dba59 100644 --- a/tests/gen/csvlexer.rs +++ b/tests/gen/csvlexer.rs @@ -2,200 +2,181 @@ #![allow(dead_code)] #![allow(nonstandard_style)] #![allow(unused_imports)] - -use std::cell::RefCell; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - use antlr_rust::atn::ATN; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::char_stream::CharStream; use antlr_rust::dfa::DFA; use antlr_rust::error_listener::ErrorListener; use antlr_rust::int_stream::IntStream; -use antlr_rust::lazy_static; use antlr_rust::lexer::{BaseLexer, Lexer, LexerRecog}; use antlr_rust::lexer_atn_simulator::{ILexerATNSimulator, LexerATNSimulator}; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, EmptyContext, EmptyCustomRuleContext}; use antlr_rust::token::*; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -pub const T__0:isize=1; - pub const T__1:isize=2; - pub const T__2:isize=3; - pub const WS:isize=4; - pub const TEXT:isize=5; - pub const STRING:isize=6; - pub const channelNames: [&'static str;0+2] = [ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN" - ]; - - pub const modeNames: [&'static str;1] = [ - "DEFAULT_MODE" - ]; - - pub const ruleNames: [&'static str;6] = [ - "T__0", "T__1", "T__2", "WS", "TEXT", "STRING" - ]; - +use antlr_rust::lazy_static; - pub const _LITERAL_NAMES: [Option<&'static str>;4] = [ - None, Some("','"), Some("'\r'"), Some("'\n'") - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;7] = [ - None, None, None, None, Some("WS"), Some("TEXT"), Some("STRING") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } +use std::cell::RefCell; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; +pub const T__0: isize = 1; +pub const T__1: isize = 2; +pub const T__2: isize = 3; +pub const WS: isize = 4; +pub const TEXT: isize = 5; +pub const STRING: isize = 6; +pub const channelNames: [&'static str; 0 + 2] = ["DEFAULT_TOKEN_CHANNEL", "HIDDEN"]; + +pub const modeNames: [&'static str; 1] = ["DEFAULT_MODE"]; + +pub const ruleNames: [&'static str; 6] = ["T__0", "T__1", "T__2", "WS", "TEXT", "STRING"]; + +pub const _LITERAL_NAMES: [Option<&'static str>; 4] = + [None, Some("','"), Some("'\r'"), Some("'\n'")]; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 7] = [ + None, + None, + None, + None, + Some("WS"), + Some("TEXT"), + Some("STRING"), +]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); +} -pub type LexerContext<'input> = BaseParserRuleContext<'input,EmptyCustomRuleContext<'input,LocalTokenFactory<'input> >>; +pub type LexerContext<'input> = + BaseParserRuleContext<'input, EmptyCustomRuleContext<'input, LocalTokenFactory<'input>>>; pub type LocalTokenFactory<'input> = antlr_rust::token_factory::ArenaCommonFactory<'input>; -type From<'a> = as TokenFactory<'a> >::From; +type From<'a> = as TokenFactory<'a>>::From; -pub struct CSVLexer<'input, Input:CharStream >> { - base: BaseLexer<'input,CSVLexerActions,Input,LocalTokenFactory<'input>>, -// static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } +pub struct CSVLexer<'input, Input: CharStream>> { + base: BaseLexer<'input, CSVLexerActions, Input, LocalTokenFactory<'input>>, + // static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } } -impl<'input, Input:CharStream >> Deref for CSVLexer<'input,Input>{ - type Target = BaseLexer<'input,CSVLexerActions,Input,LocalTokenFactory<'input>>; +impl<'input, Input: CharStream>> Deref for CSVLexer<'input, Input> { + type Target = BaseLexer<'input, CSVLexerActions, Input, LocalTokenFactory<'input>>; - fn deref(&self) -> &Self::Target { - &self.base - } + fn deref(&self) -> &Self::Target { &self.base } } -impl<'input, Input:CharStream >> DerefMut for CSVLexer<'input,Input>{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } +impl<'input, Input: CharStream>> DerefMut for CSVLexer<'input, Input> { + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } - -impl<'input, Input:CharStream >> CSVLexer<'input,Input>{ - fn get_rule_names(&self) -> &'static [&'static str] { - &ruleNames - } - fn get_literal_names(&self) -> &[Option<&str>] { - &_LITERAL_NAMES - } - - fn get_symbolic_names(&self) -> &[Option<&str>] { - &_SYMBOLIC_NAMES +impl<'input, Input: CharStream>> CSVLexer<'input, Input> { + fn get_rule_names(&self) -> &'static [&'static str] { &ruleNames } + fn get_literal_names(&self) -> &[Option<&str>] { &_LITERAL_NAMES } + + fn get_symbolic_names(&self) -> &[Option<&str>] { &_SYMBOLIC_NAMES } + + fn get_grammar_file_name(&self) -> &'static str { "CSVLexer.g4" } + + pub fn new_with_token_factory( + input: Box, + tf: &'input LocalTokenFactory<'input>, + ) -> Self { + antlr_rust::recognizer::check_version("0", "2"); + Self { + base: BaseLexer::new_base_lexer( + input, + LexerATNSimulator::new_lexer_atnsimulator( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + ), + CSVLexerActions {}, + tf, + ), + } } +} - fn get_grammar_file_name(&self) -> &'static str { - "CSVLexer.g4" +impl<'input, Input: CharStream>> CSVLexer<'input, Input> +where + &'input LocalTokenFactory<'input>: Default, +{ + pub fn new(input: Box) -> Self { + CSVLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) } - - pub fn new_with_token_factory(input: Box,tf: &'input LocalTokenFactory<'input>) -> Self { - antlr_rust::recognizer::check_version("0","2"); - Self { - base: BaseLexer::new_base_lexer( - input, - LexerATNSimulator::new_lexer_atnsimulator( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - ), - CSVLexerActions{}, - tf - ) - } - } } -impl<'input, Input:CharStream >> CSVLexer<'input,Input> where &'input LocalTokenFactory<'input>:Default{ - pub fn new(input: Box) -> Self{ - CSVLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) - } -} +pub struct CSVLexerActions {} -pub struct CSVLexerActions { -} +impl CSVLexerActions {} -impl CSVLexerActions{ +impl<'input, Input: CharStream>> + Actions<'input, BaseLexer<'input, CSVLexerActions, Input, LocalTokenFactory<'input>>> + for CSVLexerActions +{ } -impl<'input, Input:CharStream >> Actions<'input,BaseLexer<'input,CSVLexerActions,Input,LocalTokenFactory<'input>>> for CSVLexerActions{ - } - - impl<'input, Input:CharStream >> CSVLexer<'input,Input>{ +impl<'input, Input: CharStream>> CSVLexer<'input, Input> {} +impl<'input, Input: CharStream>> + LexerRecog<'input, BaseLexer<'input, CSVLexerActions, Input, LocalTokenFactory<'input>>> + for CSVLexerActions +{ } - -impl<'input, Input:CharStream >> LexerRecog<'input,BaseLexer<'input,CSVLexerActions,Input,LocalTokenFactory<'input>>> for CSVLexerActions{ -} -impl<'input> TokenAware<'input> for CSVLexerActions{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for CSVLexerActions { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenAware<'input> for CSVLexer<'input,Input>{ - type TF = LocalTokenFactory<'input>; +impl<'input, Input: CharStream>> TokenAware<'input> for CSVLexer<'input, Input> { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenSource<'input> for CSVLexer<'input,Input>{ +impl<'input, Input: CharStream>> TokenSource<'input> for CSVLexer<'input, Input> { + fn next_token(&mut self) -> >::Tok { self.base.next_token() } - fn next_token(&mut self) -> >::Tok { - self.base.next_token() - } + fn get_line(&self) -> isize { self.base.get_line() } - fn get_line(&self) -> isize { - self.base.get_line() - } + fn get_char_position_in_line(&self) -> isize { self.base.get_char_position_in_line() } - fn get_char_position_in_line(&self) -> isize { - self.base.get_char_position_in_line() - } + fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { self.base.get_input_stream() } - fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { - self.base.get_input_stream() - } + fn get_source_name(&self) -> String { self.base.get_source_name() } - fn get_source_name(&self) -> String { - self.base.get_source_name() - } - - fn get_token_factory(&self) -> &'input Self::TF { - self.base.get_token_factory() - } + fn get_token_factory(&self) -> &'input Self::TF { self.base.get_token_factory() } } +lazy_static! { + static ref _ATN: Arc = + Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); + static ref _decision_to_DFA: Arc> = { + let mut dfa = Vec::new(); + let size = _ATN.decision_to_state.len(); + for i in 0..size { + dfa.push(DFA::new( + _ATN.clone(), + _ATN.get_decision_state(i), + i as isize, + )) + } + Arc::new(dfa) + }; +} - - lazy_static! { - static ref _ATN: Arc = - Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); - static ref _decision_to_DFA: Arc> = { - let mut dfa = Vec::new(); - let size = _ATN.decision_to_state.len(); - for i in 0..size { - dfa.push(DFA::new( - _ATN.clone(), - _ATN.get_decision_state(i), - i as isize, - )) - } - Arc::new(dfa) - }; - } - - - - const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ \x08\x2c\x08\x01\x04\x02\x09\x02\x04\x03\x09\x03\x04\x04\x09\x04\x04\x05\ \x09\x05\x04\x06\x09\x06\x04\x07\x09\x07\x03\x02\x03\x02\x03\x03\x03\x03\ \x03\x04\x03\x04\x03\x05\x06\x05\x17\x0a\x05\x0d\x05\x0e\x05\x18\x03\x05\ diff --git a/tests/gen/csvlistener.rs b/tests/gen/csvlistener.rs index e376023..d031144 100644 --- a/tests/gen/csvlistener.rs +++ b/tests/gen/csvlistener.rs @@ -1,57 +1,53 @@ #![allow(nonstandard_style)] - -use std::any::Any; - -use antlr_rust::token_factory::CommonTokenFactory; // Generated from CSV.g4 by ANTLR 4.8 -use antlr_rust::tree::ParseTreeListener; - use super::csvparser::*; +use antlr_rust::token_factory::CommonTokenFactory; +use antlr_rust::tree::ParseTreeListener; -pub trait CSVListener<'input> : ParseTreeListener<'input,CSVParserContextType>{ - -/** - * Enter a parse tree produced by {@link CSVParser#csvFile}. - * @param ctx the parse tree - */ -fn enter_csvFile(&mut self, _ctx: &CsvFileContext<'input>) { } -/** - * Exit a parse tree produced by {@link CSVParser#csvFile}. - * @param ctx the parse tree - */ -fn exit_csvFile(&mut self, _ctx: &CsvFileContext<'input>) { } - -/** - * Enter a parse tree produced by {@link CSVParser#hdr}. - * @param ctx the parse tree - */ -fn enter_hdr(&mut self, _ctx: &HdrContext<'input>) { } -/** - * Exit a parse tree produced by {@link CSVParser#hdr}. - * @param ctx the parse tree - */ -fn exit_hdr(&mut self, _ctx: &HdrContext<'input>) { } - -/** - * Enter a parse tree produced by {@link CSVParser#row}. - * @param ctx the parse tree - */ -fn enter_row(&mut self, _ctx: &RowContext<'input>) { } -/** - * Exit a parse tree produced by {@link CSVParser#row}. - * @param ctx the parse tree - */ -fn exit_row(&mut self, _ctx: &RowContext<'input>) { } - -/** - * Enter a parse tree produced by {@link CSVParser#field}. - * @param ctx the parse tree - */ -fn enter_field(&mut self, _ctx: &FieldContext<'input>) { } -/** - * Exit a parse tree produced by {@link CSVParser#field}. - * @param ctx the parse tree - */ -fn exit_field(&mut self, _ctx: &FieldContext<'input>) { } +use std::any::Any; +pub trait CSVListener<'input>: ParseTreeListener<'input, CSVParserContextType> { + /** + * Enter a parse tree produced by {@link CSVParser#csvFile}. + * @param ctx the parse tree + */ + fn enter_csvFile(&mut self, _ctx: &CsvFileContext<'input>) {} + /** + * Exit a parse tree produced by {@link CSVParser#csvFile}. + * @param ctx the parse tree + */ + fn exit_csvFile(&mut self, _ctx: &CsvFileContext<'input>) {} + + /** + * Enter a parse tree produced by {@link CSVParser#hdr}. + * @param ctx the parse tree + */ + fn enter_hdr(&mut self, _ctx: &HdrContext<'input>) {} + /** + * Exit a parse tree produced by {@link CSVParser#hdr}. + * @param ctx the parse tree + */ + fn exit_hdr(&mut self, _ctx: &HdrContext<'input>) {} + + /** + * Enter a parse tree produced by {@link CSVParser#row}. + * @param ctx the parse tree + */ + fn enter_row(&mut self, _ctx: &RowContext<'input>) {} + /** + * Exit a parse tree produced by {@link CSVParser#row}. + * @param ctx the parse tree + */ + fn exit_row(&mut self, _ctx: &RowContext<'input>) {} + + /** + * Enter a parse tree produced by {@link CSVParser#field}. + * @param ctx the parse tree + */ + fn enter_field(&mut self, _ctx: &FieldContext<'input>) {} + /** + * Exit a parse tree produced by {@link CSVParser#field}. + * @param ctx the parse tree + */ + fn exit_field(&mut self, _ctx: &FieldContext<'input>) {} } diff --git a/tests/gen/csvparser.rs b/tests/gen/csvparser.rs index 3d8256b..c59d2eb 100644 --- a/tests/gen/csvparser.rs +++ b/tests/gen/csvparser.rs @@ -5,622 +5,648 @@ #![allow(nonstandard_style)] #![allow(unused_imports)] #![allow(unused_mut)] - -use std::any::{Any, TypeId}; -use std::borrow::{Borrow, BorrowMut}; -use std::cell::RefCell; -use std::convert::TryFrom; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - +use super::csvlistener::*; +use super::csvvisitor::*; use antlr_rust::atn::{ATN, INVALID_ALT}; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::dfa::DFA; use antlr_rust::error_strategy::{DefaultErrorStrategy, ErrorStrategy}; use antlr_rust::errors::*; use antlr_rust::int_stream::EOF; -use antlr_rust::lazy_static; use antlr_rust::parser::{BaseParser, Parser, ParserNodeType, ParserRecog}; use antlr_rust::parser_atn_simulator::ParserATNSimulator; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, cast_mut, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, cast_mut, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, CustomRuleContext, RuleContext}; use antlr_rust::token::{OwningToken, Token, TOKEN_EOF}; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::token_stream::TokenStream; -use antlr_rust::tree::{ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, Visitable}; +use antlr_rust::tree::{ + ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, + Visitable, +}; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -use super::csvlistener::*; -use super::csvvisitor::*; +use antlr_rust::lazy_static; -pub const T__0:isize=1; - pub const T__1:isize=2; - pub const T__2:isize=3; - pub const WS:isize=4; - pub const TEXT:isize=5; - pub const STRING:isize=6; - pub const RULE_csvFile:usize = 0; - pub const RULE_hdr:usize = 1; - pub const RULE_row:usize = 2; - pub const RULE_field:usize = 3; - pub const ruleNames: [&'static str; 4] = [ - "csvFile", "hdr", "row", "field" - ]; - - - pub const _LITERAL_NAMES: [Option<&'static str>;4] = [ - None, Some("','"), Some("'\r'"), Some("'\n'") - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;7] = [ - None, None, None, None, Some("WS"), Some("TEXT"), Some("STRING") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } +use std::any::{Any, TypeId}; +use std::borrow::{Borrow, BorrowMut}; +use std::cell::RefCell; +use std::convert::TryFrom; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; +pub const T__0: isize = 1; +pub const T__1: isize = 2; +pub const T__2: isize = 3; +pub const WS: isize = 4; +pub const TEXT: isize = 5; +pub const STRING: isize = 6; +pub const RULE_csvFile: usize = 0; +pub const RULE_hdr: usize = 1; +pub const RULE_row: usize = 2; +pub const RULE_field: usize = 3; +pub const ruleNames: [&'static str; 4] = ["csvFile", "hdr", "row", "field"]; + +pub const _LITERAL_NAMES: [Option<&'static str>; 4] = + [None, Some("','"), Some("'\r'"), Some("'\n'")]; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 7] = [ + None, + None, + None, + None, + Some("WS"), + Some("TEXT"), + Some("STRING"), +]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); +} type BaseParserType<'input, I> = - BaseParser<'input,CSVParserExt, I, CSVParserContextType , dyn CSVListener<'input> + 'static >; + BaseParser<'input, CSVParserExt, I, CSVParserContextType, dyn CSVListener<'input> + 'static>; type TokenType<'input> = as TokenFactory<'input>>::Tok; pub type LocalTokenFactory<'input> = antlr_rust::token_factory::ArenaCommonFactory<'input>; -pub type CSVTreeWalker<'input,'a> = - ParseTreeWalker<'input, 'a, CSVParserContextType , dyn CSVListener<'input> + 'a>; +pub type CSVTreeWalker<'input, 'a> = + ParseTreeWalker<'input, 'a, CSVParserContextType, dyn CSVListener<'input> + 'a>; -pub struct CSVParser<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> { - base:BaseParserType<'input,I>, - interpreter:Arc, - _shared_context_cache: Box, - pub err_handler: Box> + 'input>, +pub struct CSVParser<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> { + base: BaseParserType<'input, I>, + interpreter: Arc, + _shared_context_cache: Box, + pub err_handler: Box> + 'input>, } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> CSVParser<'input,I> { - - pub fn get_serialized_atn() -> &'static str { unimplemented!() } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> CSVParser<'input, I> { + pub fn get_serialized_atn() -> &'static str { unimplemented!() } - pub fn set_error_strategy(&mut self, strategy: Box> >) { + pub fn set_error_strategy( + &mut self, + strategy: Box>>, + ) { self.err_handler = strategy } pub fn new(input: Box) -> Self { - antlr_rust::recognizer::check_version("0","2"); - let interpreter = Arc::new(ParserATNSimulator::new( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - )); - Self { - base: BaseParser::new_base_parser( - input, - Arc::clone(&interpreter), - CSVParserExt{ - } - ), - interpreter, + antlr_rust::recognizer::check_version("0", "2"); + let interpreter = Arc::new(ParserATNSimulator::new( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + )); + Self { + base: BaseParser::new_base_parser(input, Arc::clone(&interpreter), CSVParserExt {}), + interpreter, _shared_context_cache: Box::new(PredictionContextCache::new()), - err_handler: Box::new(DefaultErrorStrategy::<'input,CSVParserContextType>::new()), + err_handler: Box::new(DefaultErrorStrategy::<'input, CSVParserContextType>::new()), } } } /// Trait for monomorphized trait object that corresponds to nodes of parse tree generated by CSVParser pub trait CSVParserContext<'input>: - for<'x> Listenable + 'x > + - for<'x> Visitable + 'x > + - ParserRuleContext<'input, TF=LocalTokenFactory<'input>, Ctx=CSVParserContextType> -{} + for<'x> Listenable + 'x> + + for<'x> Visitable + 'x> + + ParserRuleContext<'input, TF = LocalTokenFactory<'input>, Ctx = CSVParserContextType> +{ +} -impl<'input> CSVParserContext<'input> for TerminalNode<'input,CSVParserContextType> {} -impl<'input> CSVParserContext<'input> for ErrorNode<'input,CSVParserContextType> {} +impl<'input> CSVParserContext<'input> for TerminalNode<'input, CSVParserContextType> {} +impl<'input> CSVParserContext<'input> for ErrorNode<'input, CSVParserContextType> {} pub struct CSVParserContextType; -impl<'input> ParserNodeType<'input> for CSVParserContextType{ - type TF = LocalTokenFactory<'input>; - type Type = dyn CSVParserContext<'input> + 'input; +impl<'input> ParserNodeType<'input> for CSVParserContextType { + type TF = LocalTokenFactory<'input>; + type Type = dyn CSVParserContext<'input> + 'input; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Deref for CSVParser<'input,I> { - type Target = BaseParserType<'input,I>; - - fn deref(&self) -> &Self::Target { - &self.base - } -} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> Deref + for CSVParser<'input, I> +{ + type Target = BaseParserType<'input, I>; -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > DerefMut for CSVParser<'input,I> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } + fn deref(&self) -> &Self::Target { &self.base } } -pub struct CSVParserExt{ +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> DerefMut + for CSVParser<'input, I> +{ + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } -impl CSVParserExt{ -} +pub struct CSVParserExt {} +impl CSVParserExt {} -impl<'input> TokenAware<'input> for CSVParserExt{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for CSVParserExt { + type TF = LocalTokenFactory<'input>; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > ParserRecog<'input, BaseParserType<'input,I>> for CSVParserExt{} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + ParserRecog<'input, BaseParserType<'input, I>> for CSVParserExt +{ +} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Actions<'input, BaseParserType<'input,I>> for CSVParserExt{ - fn get_grammar_file_name(&self) -> & str{ "CSV.g4"} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + Actions<'input, BaseParserType<'input, I>> for CSVParserExt +{ + fn get_grammar_file_name(&self) -> &str { "CSV.g4" } - fn get_rule_names(&self) -> &[& str] {&ruleNames} + fn get_rule_names(&self) -> &[&str] { &ruleNames } - fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } + fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } } //------------------- csvFile ---------------- pub type CsvFileContextAll<'input> = CsvFileContext<'input>; - -pub type CsvFileContext<'input> = BaseParserRuleContext<'input,CsvFileContextExt<'input>>; +pub type CsvFileContext<'input> = BaseParserRuleContext<'input, CsvFileContextExt<'input>>; #[derive(Clone)] -pub struct CsvFileContextExt<'input>{ -ph:PhantomData<&'input str> +pub struct CsvFileContextExt<'input> { + ph: PhantomData<&'input str>, } -impl<'input> CSVParserContext<'input> for CsvFileContext<'input>{} - -impl<'input,'a> Listenable + 'a> for CsvFileContext<'input>{ - fn enter(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_csvFile(self); - } - fn exit(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.exit_csvFile(self); - listener.exit_every_rule(self); - } -} +impl<'input> CSVParserContext<'input> for CsvFileContext<'input> {} -impl<'input,'a> Visitable + 'a> for CsvFileContext<'input>{ - fn accept(&self,visitor: &mut (dyn CSVVisitor<'input> + 'a)) { - visitor.visit_csvFile(self); - } +impl<'input, 'a> Listenable + 'a> for CsvFileContext<'input> { + fn enter(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_csvFile(self); + } + fn exit(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.exit_csvFile(self); + listener.exit_every_rule(self); + } } -impl<'input> CustomRuleContext<'input> for CsvFileContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = CSVParserContextType; - fn get_rule_index(&self) -> usize { RULE_csvFile } - //fn type_rule_index() -> usize where Self: Sized { RULE_csvFile } +impl<'input, 'a> Visitable + 'a> for CsvFileContext<'input> { + fn accept(&self, visitor: &mut (dyn CSVVisitor<'input> + 'a)) { visitor.visit_csvFile(self); } } -antlr_rust::type_id!{CsvFileContextExt} -impl<'input> CsvFileContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,CsvFileContextExt{ - ph:PhantomData - }), - ) - } +impl<'input> CustomRuleContext<'input> for CsvFileContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = CSVParserContextType; + fn get_rule_index(&self) -> usize { RULE_csvFile } + //fn type_rule_index() -> usize where Self: Sized { RULE_csvFile } } +antlr_rust::type_id! {CsvFileContextExt} -pub trait CsvFileContextAttrs<'input>: CSVParserContext<'input> + BorrowMut>{ - -fn hdr(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) -} -fn row_all(&self) -> Vec>> where Self:Sized{ - self.children_of_type() -} -fn row(&self, i: usize) -> Option>> where Self:Sized{ - self.child_of_type(i) +impl<'input> CsvFileContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + CsvFileContextExt { ph: PhantomData }, + )) + } } +pub trait CsvFileContextAttrs<'input>: + CSVParserContext<'input> + BorrowMut> +{ + fn hdr(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } + fn row_all(&self) -> Vec>> + where + Self: Sized, + { + self.children_of_type() + } + fn row(&self, i: usize) -> Option>> + where + Self: Sized, + { + self.child_of_type(i) + } } -impl<'input> CsvFileContextAttrs<'input> for CsvFileContext<'input>{} +impl<'input> CsvFileContextAttrs<'input> for CsvFileContext<'input> {} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > CSVParser<'input,I>{ - pub fn csvFile(&mut self,) - -> Result>,ANTLRError> { - let mut recog = self; - let _parentctx = recog.ctx.take(); - let mut _localctx = CsvFileContextExt::new(_parentctx.clone(), recog.base.get_state()); +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> CSVParser<'input, I> { + pub fn csvFile(&mut self) -> Result>, ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = CsvFileContextExt::new(_parentctx.clone(), recog.base.get_state()); recog.base.enter_rule(_localctx.clone(), 0, RULE_csvFile); let mut _localctx: Rc = _localctx; - let mut _la: isize; - let result: Result<(), ANTLRError> = try { - - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - /*InvokeRule hdr*/ - recog.base.set_state(8); - recog.hdr()?; - - recog.base.set_state(10); - recog.err_handler.sync(&mut recog.base)?; - _la = recog.base.input.la(1); - loop { - { - { - /*InvokeRule row*/ - recog.base.set_state(9); - recog.row()?; - - } - } - recog.base.set_state(12); - recog.err_handler.sync(&mut recog.base)?; - _la = recog.base.input.la(1); - if !((((_la) & !0x3f) == 0 && ((1usize << _la) & ((1usize << T__0) | (1usize << T__1) | (1usize << T__2) | (1usize << TEXT) | (1usize << STRING))) != 0)) {break} - } - } - }; - match result { - Ok(_)=>{}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re) => { - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?; - } - } - recog.base.exit_rule(); - - Ok(_localctx) - } + let mut _la: isize; + let result: Result<(), ANTLRError> = try { + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule hdr*/ + recog.base.set_state(8); + recog.hdr()?; + + recog.base.set_state(10); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + loop { + { + { + /*InvokeRule row*/ + recog.base.set_state(9); + recog.row()?; + } + } + recog.base.set_state(12); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if !(((_la) & !0x3f) == 0 + && ((1usize << _la) + & ((1usize << T__0) + | (1usize << T__1) + | (1usize << T__2) + | (1usize << TEXT) + | (1usize << STRING))) + != 0) + { + break; + } + } + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } } //------------------- hdr ---------------- pub type HdrContextAll<'input> = HdrContext<'input>; - -pub type HdrContext<'input> = BaseParserRuleContext<'input,HdrContextExt<'input>>; +pub type HdrContext<'input> = BaseParserRuleContext<'input, HdrContextExt<'input>>; #[derive(Clone)] -pub struct HdrContextExt<'input>{ -ph:PhantomData<&'input str> +pub struct HdrContextExt<'input> { + ph: PhantomData<&'input str>, } -impl<'input> CSVParserContext<'input> for HdrContext<'input>{} +impl<'input> CSVParserContext<'input> for HdrContext<'input> {} -impl<'input,'a> Listenable + 'a> for HdrContext<'input>{ - fn enter(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_hdr(self); - } - fn exit(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.exit_hdr(self); - listener.exit_every_rule(self); - } -} - -impl<'input,'a> Visitable + 'a> for HdrContext<'input>{ - fn accept(&self,visitor: &mut (dyn CSVVisitor<'input> + 'a)) { - visitor.visit_hdr(self); - } +impl<'input, 'a> Listenable + 'a> for HdrContext<'input> { + fn enter(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_hdr(self); + } + fn exit(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.exit_hdr(self); + listener.exit_every_rule(self); + } } -impl<'input> CustomRuleContext<'input> for HdrContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = CSVParserContextType; - fn get_rule_index(&self) -> usize { RULE_hdr } - //fn type_rule_index() -> usize where Self: Sized { RULE_hdr } +impl<'input, 'a> Visitable + 'a> for HdrContext<'input> { + fn accept(&self, visitor: &mut (dyn CSVVisitor<'input> + 'a)) { visitor.visit_hdr(self); } } -antlr_rust::type_id!{HdrContextExt} -impl<'input> HdrContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,HdrContextExt{ - ph:PhantomData - }), - ) - } +impl<'input> CustomRuleContext<'input> for HdrContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = CSVParserContextType; + fn get_rule_index(&self) -> usize { RULE_hdr } + //fn type_rule_index() -> usize where Self: Sized { RULE_hdr } } +antlr_rust::type_id! {HdrContextExt} -pub trait HdrContextAttrs<'input>: CSVParserContext<'input> + BorrowMut>{ - -fn row(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) +impl<'input> HdrContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + HdrContextExt { ph: PhantomData }, + )) + } } +pub trait HdrContextAttrs<'input>: + CSVParserContext<'input> + BorrowMut> +{ + fn row(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } } -impl<'input> HdrContextAttrs<'input> for HdrContext<'input>{} +impl<'input> HdrContextAttrs<'input> for HdrContext<'input> {} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > CSVParser<'input,I>{ - pub fn hdr(&mut self,) - -> Result>,ANTLRError> { - let mut recog = self; - let _parentctx = recog.ctx.take(); - let mut _localctx = HdrContextExt::new(_parentctx.clone(), recog.base.get_state()); +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> CSVParser<'input, I> { + pub fn hdr(&mut self) -> Result>, ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = HdrContextExt::new(_parentctx.clone(), recog.base.get_state()); recog.base.enter_rule(_localctx.clone(), 2, RULE_hdr); let mut _localctx: Rc = _localctx; - let result: Result<(), ANTLRError> = try { - - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - /*InvokeRule row*/ - recog.base.set_state(14); - recog.row()?; - - } - }; - match result { - Ok(_)=>{}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re) => { - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?; - } - } - recog.base.exit_rule(); - - Ok(_localctx) - } + let result: Result<(), ANTLRError> = try { + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule row*/ + recog.base.set_state(14); + recog.row()?; + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } } //------------------- row ---------------- pub type RowContextAll<'input> = RowContext<'input>; - -pub type RowContext<'input> = BaseParserRuleContext<'input,RowContextExt<'input>>; +pub type RowContext<'input> = BaseParserRuleContext<'input, RowContextExt<'input>>; #[derive(Clone)] -pub struct RowContextExt<'input>{ -ph:PhantomData<&'input str> +pub struct RowContextExt<'input> { + ph: PhantomData<&'input str>, } -impl<'input> CSVParserContext<'input> for RowContext<'input>{} +impl<'input> CSVParserContext<'input> for RowContext<'input> {} -impl<'input,'a> Listenable + 'a> for RowContext<'input>{ - fn enter(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_row(self); - } - fn exit(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.exit_row(self); - listener.exit_every_rule(self); - } -} - -impl<'input,'a> Visitable + 'a> for RowContext<'input>{ - fn accept(&self,visitor: &mut (dyn CSVVisitor<'input> + 'a)) { - visitor.visit_row(self); - } +impl<'input, 'a> Listenable + 'a> for RowContext<'input> { + fn enter(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_row(self); + } + fn exit(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.exit_row(self); + listener.exit_every_rule(self); + } } -impl<'input> CustomRuleContext<'input> for RowContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = CSVParserContextType; - fn get_rule_index(&self) -> usize { RULE_row } - //fn type_rule_index() -> usize where Self: Sized { RULE_row } +impl<'input, 'a> Visitable + 'a> for RowContext<'input> { + fn accept(&self, visitor: &mut (dyn CSVVisitor<'input> + 'a)) { visitor.visit_row(self); } } -antlr_rust::type_id!{RowContextExt} -impl<'input> RowContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,RowContextExt{ - ph:PhantomData - }), - ) - } +impl<'input> CustomRuleContext<'input> for RowContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = CSVParserContextType; + fn get_rule_index(&self) -> usize { RULE_row } + //fn type_rule_index() -> usize where Self: Sized { RULE_row } } +antlr_rust::type_id! {RowContextExt} -pub trait RowContextAttrs<'input>: CSVParserContext<'input> + BorrowMut>{ - -fn field_all(&self) -> Vec>> where Self:Sized{ - self.children_of_type() -} -fn field(&self, i: usize) -> Option>> where Self:Sized{ - self.child_of_type(i) +impl<'input> RowContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + RowContextExt { ph: PhantomData }, + )) + } } +pub trait RowContextAttrs<'input>: + CSVParserContext<'input> + BorrowMut> +{ + fn field_all(&self) -> Vec>> + where + Self: Sized, + { + self.children_of_type() + } + fn field(&self, i: usize) -> Option>> + where + Self: Sized, + { + self.child_of_type(i) + } } -impl<'input> RowContextAttrs<'input> for RowContext<'input>{} +impl<'input> RowContextAttrs<'input> for RowContext<'input> {} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > CSVParser<'input,I>{ - pub fn row(&mut self,) - -> Result>,ANTLRError> { - let mut recog = self; - let _parentctx = recog.ctx.take(); - let mut _localctx = RowContextExt::new(_parentctx.clone(), recog.base.get_state()); +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> CSVParser<'input, I> { + pub fn row(&mut self) -> Result>, ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = RowContextExt::new(_parentctx.clone(), recog.base.get_state()); recog.base.enter_rule(_localctx.clone(), 4, RULE_row); let mut _localctx: Rc = _localctx; - let mut _la: isize; - let result: Result<(), ANTLRError> = try { - - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - /*InvokeRule field*/ - recog.base.set_state(16); - recog.field()?; - - recog.base.set_state(21); - recog.err_handler.sync(&mut recog.base)?; - _la = recog.base.input.la(1); - while _la==T__0 { - { - { - recog.base.set_state(17); - recog.base.match_token(T__0,recog.err_handler.as_mut())?; - - /*InvokeRule field*/ - recog.base.set_state(18); - recog.field()?; - - } - } - recog.base.set_state(23); - recog.err_handler.sync(&mut recog.base)?; - _la = recog.base.input.la(1); - } - recog.base.set_state(25); - recog.err_handler.sync(&mut recog.base)?; - _la = recog.base.input.la(1); - if _la==T__1 { - { - recog.base.set_state(24); - recog.base.match_token(T__1,recog.err_handler.as_mut())?; - - } - } - - recog.base.set_state(27); - recog.base.match_token(T__2,recog.err_handler.as_mut())?; - - println!("test"); - } - }; - match result { - Ok(_)=>{}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re) => { - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?; - } - } - recog.base.exit_rule(); - - Ok(_localctx) - } + let mut _la: isize; + let result: Result<(), ANTLRError> = try { + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule field*/ + recog.base.set_state(16); + recog.field()?; + + recog.base.set_state(21); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + while _la == T__0 { + { + { + recog.base.set_state(17); + recog.base.match_token(T__0, recog.err_handler.as_mut())?; + + /*InvokeRule field*/ + recog.base.set_state(18); + recog.field()?; + } + } + recog.base.set_state(23); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + } + recog.base.set_state(25); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if _la == T__1 { + { + recog.base.set_state(24); + recog.base.match_token(T__1, recog.err_handler.as_mut())?; + } + } + + recog.base.set_state(27); + recog.base.match_token(T__2, recog.err_handler.as_mut())?; + + println!("test"); + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } } //------------------- field ---------------- pub type FieldContextAll<'input> = FieldContext<'input>; - -pub type FieldContext<'input> = BaseParserRuleContext<'input,FieldContextExt<'input>>; +pub type FieldContext<'input> = BaseParserRuleContext<'input, FieldContextExt<'input>>; #[derive(Clone)] -pub struct FieldContextExt<'input>{ -ph:PhantomData<&'input str> +pub struct FieldContextExt<'input> { + ph: PhantomData<&'input str>, } -impl<'input> CSVParserContext<'input> for FieldContext<'input>{} - -impl<'input,'a> Listenable + 'a> for FieldContext<'input>{ - fn enter(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_field(self); - } - fn exit(&self,listener: &mut (dyn CSVListener<'input> + 'a)) { - listener.exit_field(self); - listener.exit_every_rule(self); - } -} +impl<'input> CSVParserContext<'input> for FieldContext<'input> {} -impl<'input,'a> Visitable + 'a> for FieldContext<'input>{ - fn accept(&self,visitor: &mut (dyn CSVVisitor<'input> + 'a)) { - visitor.visit_field(self); - } +impl<'input, 'a> Listenable + 'a> for FieldContext<'input> { + fn enter(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_field(self); + } + fn exit(&self, listener: &mut (dyn CSVListener<'input> + 'a)) { + listener.exit_field(self); + listener.exit_every_rule(self); + } } -impl<'input> CustomRuleContext<'input> for FieldContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = CSVParserContextType; - fn get_rule_index(&self) -> usize { RULE_field } - //fn type_rule_index() -> usize where Self: Sized { RULE_field } +impl<'input, 'a> Visitable + 'a> for FieldContext<'input> { + fn accept(&self, visitor: &mut (dyn CSVVisitor<'input> + 'a)) { visitor.visit_field(self); } } -antlr_rust::type_id!{FieldContextExt} -impl<'input> FieldContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,FieldContextExt{ - ph:PhantomData - }), - ) - } +impl<'input> CustomRuleContext<'input> for FieldContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = CSVParserContextType; + fn get_rule_index(&self) -> usize { RULE_field } + //fn type_rule_index() -> usize where Self: Sized { RULE_field } } +antlr_rust::type_id! {FieldContextExt} -pub trait FieldContextAttrs<'input>: CSVParserContext<'input> + BorrowMut>{ - -/// Retrieves first TerminalNode corresponding to token TEXT -/// Returns `None` if there is no child corresponding to token TEXT -fn TEXT(&self) -> Option>> where Self:Sized{ - self.get_token(TEXT, 0) -} -/// Retrieves first TerminalNode corresponding to token STRING -/// Returns `None` if there is no child corresponding to token STRING -fn STRING(&self) -> Option>> where Self:Sized{ - self.get_token(STRING, 0) +impl<'input> FieldContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + FieldContextExt { ph: PhantomData }, + )) + } } +pub trait FieldContextAttrs<'input>: + CSVParserContext<'input> + BorrowMut> +{ + /// Retrieves first TerminalNode corresponding to token TEXT + /// Returns `None` if there is no child corresponding to token TEXT + fn TEXT(&self) -> Option>> + where + Self: Sized, + { + self.get_token(TEXT, 0) + } + /// Retrieves first TerminalNode corresponding to token STRING + /// Returns `None` if there is no child corresponding to token STRING + fn STRING(&self) -> Option>> + where + Self: Sized, + { + self.get_token(STRING, 0) + } } -impl<'input> FieldContextAttrs<'input> for FieldContext<'input>{} +impl<'input> FieldContextAttrs<'input> for FieldContext<'input> {} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > CSVParser<'input,I>{ - pub fn field(&mut self,) - -> Result>,ANTLRError> { - let mut recog = self; - let _parentctx = recog.ctx.take(); - let mut _localctx = FieldContextExt::new(_parentctx.clone(), recog.base.get_state()); +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> CSVParser<'input, I> { + pub fn field(&mut self) -> Result>, ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = FieldContextExt::new(_parentctx.clone(), recog.base.get_state()); recog.base.enter_rule(_localctx.clone(), 6, RULE_field); let mut _localctx: Rc = _localctx; - let result: Result<(), ANTLRError> = try { - - recog.base.set_state(33); - recog.err_handler.sync(&mut recog.base)?; - match recog.base.input.la(1) { - TEXT - => { - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - recog.base.set_state(30); - recog.base.match_token(TEXT,recog.err_handler.as_mut())?; - - } - } - - STRING - => { - //recog.base.enter_outer_alt(_localctx.clone(), 2); - recog.base.enter_outer_alt(None, 2); - { - recog.base.set_state(31); - recog.base.match_token(STRING,recog.err_handler.as_mut())?; - - } - } - - T__0 | T__1 | T__2 - => { - //recog.base.enter_outer_alt(_localctx.clone(), 3); - recog.base.enter_outer_alt(None, 3); - { - } - } - - _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? - } - }; - match result { - Ok(_)=>{}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re) => { - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?; - } - } - recog.base.exit_rule(); - - Ok(_localctx) - } + let result: Result<(), ANTLRError> = try { + recog.base.set_state(33); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + TEXT => { + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(30); + recog.base.match_token(TEXT, recog.err_handler.as_mut())?; + } + } + + STRING => { + //recog.base.enter_outer_alt(_localctx.clone(), 2); + recog.base.enter_outer_alt(None, 2); + { + recog.base.set_state(31); + recog.base.match_token(STRING, recog.err_handler.as_mut())?; + } + } + + T__0 | T__1 | T__2 => { + //recog.base.enter_outer_alt(_localctx.clone(), 3); + recog.base.enter_outer_alt(None, 3); + {} + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new( + &mut recog.base, + )))?, + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } } lazy_static! { @@ -640,10 +666,8 @@ lazy_static! { }; } - - -const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ \x08\x26\x04\x02\x09\x02\x04\x03\x09\x03\x04\x04\x09\x04\x04\x05\x09\x05\ \x03\x02\x03\x02\x06\x02\x0d\x0a\x02\x0d\x02\x0e\x02\x0e\x03\x03\x03\x03\ \x03\x04\x03\x04\x03\x04\x07\x04\x16\x0a\x04\x0c\x04\x0e\x04\x19\x0b\x04\ @@ -662,4 +686,3 @@ const _serializedATN:&'static str = \x02\x21\x24\x07\x08\x02\x02\x22\x24\x03\x02\x02\x02\x23\x20\x03\x02\x02\ \x02\x23\x21\x03\x02\x02\x02\x23\x22\x03\x02\x02\x02\x24\x09\x03\x02\x02\ \x02\x06\x0e\x17\x1b\x23"; - diff --git a/tests/gen/csvvisitor.rs b/tests/gen/csvvisitor.rs index 52cf635..ad18641 100644 --- a/tests/gen/csvvisitor.rs +++ b/tests/gen/csvvisitor.rs @@ -1,10 +1,8 @@ #![allow(nonstandard_style)] - -use antlr_rust::parser_rule_context::RuleContextExt; // Generated from CSV.g4 by ANTLR 4.8 -use antlr_rust::tree::ParseTreeVisitor; - use super::csvparser::*; +use antlr_rust::parser_rule_context::RuleContextExt; +use antlr_rust::tree::ParseTreeVisitor; /** * This interface defines a complete generic visitor for a parse tree produced @@ -13,47 +11,42 @@ use super::csvparser::*; * @param The return type of the visit operation. Use {@link Void} for * operations with no return type. */ -pub trait CSVVisitor<'input>: ParseTreeVisitor<'input,CSVParserContextType>{ - /** - * Visit a parse tree produced by {@link CSVParser#csvFile}. - * @param ctx the parse tree - * @return the visitor result - */ - fn visit_csvFile(&mut self, ctx: &CsvFileContext<'input>){ - self.visit_children(ctx) - } - /** - * Visit a parse tree produced by {@link CSVParser#hdr}. - * @param ctx the parse tree - * @return the visitor result - */ - fn visit_hdr(&mut self, ctx: &HdrContext<'input>){ - self.visit_children(ctx) - } - /** - * Visit a parse tree produced by {@link CSVParser#row}. - * @param ctx the parse tree - * @return the visitor result - */ - fn visit_row(&mut self, ctx: &RowContext<'input>){ - self.visit_children(ctx) - } - /** - * Visit a parse tree produced by {@link CSVParser#field}. - * @param ctx the parse tree - * @return the visitor result - */ - fn visit_field(&mut self, ctx: &FieldContext<'input>){ - self.visit_children(ctx) - } +pub trait CSVVisitor<'input>: ParseTreeVisitor<'input, CSVParserContextType> { + /** + * Visit a parse tree produced by {@link CSVParser#csvFile}. + * @param ctx the parse tree + * @return the visitor result + */ + fn visit_csvFile(&mut self, ctx: &CsvFileContext<'input>) { self.visit_children(ctx) } + /** + * Visit a parse tree produced by {@link CSVParser#hdr}. + * @param ctx the parse tree + * @return the visitor result + */ + fn visit_hdr(&mut self, ctx: &HdrContext<'input>) { self.visit_children(ctx) } + /** + * Visit a parse tree produced by {@link CSVParser#row}. + * @param ctx the parse tree + * @return the visitor result + */ + fn visit_row(&mut self, ctx: &RowContext<'input>) { self.visit_children(ctx) } + /** + * Visit a parse tree produced by {@link CSVParser#field}. + * @param ctx the parse tree + * @return the visitor result + */ + fn visit_field(&mut self, ctx: &FieldContext<'input>) { self.visit_children(ctx) } - /// By default recursively visits all childrens of the node. - /// Implement it if you want different default visiting logic. - fn visit_children(&mut self, node: &(dyn CSVParserContext<'input> + 'input)); + /// By default recursively visits all childrens of the node. + /// Implement it if you want different default visiting logic. + fn visit_children(&mut self, node: &(dyn CSVParserContext<'input> + 'input)); } -impl<'input,T> CSVVisitor<'input> for T where T: CSVVisitor<'input>{ - default fn visit_children(&mut self, node: &(dyn CSVParserContext<'input> + 'input)){ - node.accept_children(self as &mut dyn CSVVisitor<'input>) - } +impl<'input, T> CSVVisitor<'input> for T +where + T: CSVVisitor<'input>, +{ + default fn visit_children(&mut self, node: &(dyn CSVParserContext<'input> + 'input)) { + node.accept_children(self as &mut dyn CSVVisitor<'input>) + } } diff --git a/tests/gen/labelslexer.rs b/tests/gen/labelslexer.rs index f72314a..00e6577 100644 --- a/tests/gen/labelslexer.rs +++ b/tests/gen/labelslexer.rs @@ -2,203 +2,198 @@ #![allow(dead_code)] #![allow(nonstandard_style)] #![allow(unused_imports)] - -use std::cell::RefCell; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - use antlr_rust::atn::ATN; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::char_stream::CharStream; use antlr_rust::dfa::DFA; use antlr_rust::error_listener::ErrorListener; use antlr_rust::int_stream::IntStream; -use antlr_rust::lazy_static; use antlr_rust::lexer::{BaseLexer, Lexer, LexerRecog}; use antlr_rust::lexer_atn_simulator::{ILexerATNSimulator, LexerATNSimulator}; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, EmptyContext, EmptyCustomRuleContext}; use antlr_rust::token::*; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -pub const T__0:isize=1; - pub const T__1:isize=2; - pub const T__2:isize=3; - pub const T__3:isize=4; - pub const T__4:isize=5; - pub const T__5:isize=6; - pub const ID:isize=7; - pub const INT:isize=8; - pub const WS:isize=9; - pub const channelNames: [&'static str;0+2] = [ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN" - ]; - - pub const modeNames: [&'static str;1] = [ - "DEFAULT_MODE" - ]; - - pub const ruleNames: [&'static str;9] = [ - "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "ID", "INT", "WS" - ]; - - - pub const _LITERAL_NAMES: [Option<&'static str>;7] = [ - None, Some("'*'"), Some("'+'"), Some("'('"), Some("')'"), Some("'++'"), - Some("'--'") - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;10] = [ - None, None, None, None, None, None, None, Some("ID"), Some("INT"), Some("WS") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } - - -pub type LexerContext<'input> = BaseParserRuleContext<'input,EmptyCustomRuleContext<'input,LocalTokenFactory<'input> >>; -pub type LocalTokenFactory<'input> = CommonTokenFactory; +use antlr_rust::lazy_static; -type From<'a> = as TokenFactory<'a> >::From; +use std::cell::RefCell; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; -pub struct LabelsLexer<'input, Input:CharStream >> { - base: BaseLexer<'input,LabelsLexerActions,Input,LocalTokenFactory<'input>>, -// static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } +pub const T__0: isize = 1; +pub const T__1: isize = 2; +pub const T__2: isize = 3; +pub const T__3: isize = 4; +pub const T__4: isize = 5; +pub const T__5: isize = 6; +pub const ID: isize = 7; +pub const INT: isize = 8; +pub const WS: isize = 9; +pub const channelNames: [&'static str; 0 + 2] = ["DEFAULT_TOKEN_CHANNEL", "HIDDEN"]; + +pub const modeNames: [&'static str; 1] = ["DEFAULT_MODE"]; + +pub const ruleNames: [&'static str; 9] = [ + "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "ID", "INT", "WS", +]; + +pub const _LITERAL_NAMES: [Option<&'static str>; 7] = [ + None, + Some("'*'"), + Some("'+'"), + Some("'('"), + Some("')'"), + Some("'++'"), + Some("'--'"), +]; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 10] = [ + None, + None, + None, + None, + None, + None, + None, + Some("ID"), + Some("INT"), + Some("WS"), +]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); } -impl<'input, Input:CharStream >> Deref for LabelsLexer<'input,Input>{ - type Target = BaseLexer<'input,LabelsLexerActions,Input,LocalTokenFactory<'input>>; +pub type LexerContext<'input> = + BaseParserRuleContext<'input, EmptyCustomRuleContext<'input, LocalTokenFactory<'input>>>; +pub type LocalTokenFactory<'input> = CommonTokenFactory; - fn deref(&self) -> &Self::Target { - &self.base - } -} +type From<'a> = as TokenFactory<'a>>::From; -impl<'input, Input:CharStream >> DerefMut for LabelsLexer<'input,Input>{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } +pub struct LabelsLexer<'input, Input: CharStream>> { + base: BaseLexer<'input, LabelsLexerActions, Input, LocalTokenFactory<'input>>, + // static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } } +impl<'input, Input: CharStream>> Deref for LabelsLexer<'input, Input> { + type Target = BaseLexer<'input, LabelsLexerActions, Input, LocalTokenFactory<'input>>; -impl<'input, Input:CharStream >> LabelsLexer<'input,Input>{ - fn get_rule_names(&self) -> &'static [&'static str] { - &ruleNames - } - fn get_literal_names(&self) -> &[Option<&str>] { - &_LITERAL_NAMES - } - - fn get_symbolic_names(&self) -> &[Option<&str>] { - &_SYMBOLIC_NAMES - } - - fn get_grammar_file_name(&self) -> &'static str { - "LabelsLexer.g4" - } - - pub fn new_with_token_factory(input: Box,tf: &'input LocalTokenFactory<'input>) -> Self { - antlr_rust::recognizer::check_version("0","2"); - Self { - base: BaseLexer::new_base_lexer( - input, - LexerATNSimulator::new_lexer_atnsimulator( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - ), - LabelsLexerActions{}, - tf - ) - } - } + fn deref(&self) -> &Self::Target { &self.base } } -impl<'input, Input:CharStream >> LabelsLexer<'input,Input> where &'input LocalTokenFactory<'input>:Default{ - pub fn new(input: Box) -> Self{ - LabelsLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) - } +impl<'input, Input: CharStream>> DerefMut for LabelsLexer<'input, Input> { + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } -pub struct LabelsLexerActions { +impl<'input, Input: CharStream>> LabelsLexer<'input, Input> { + fn get_rule_names(&self) -> &'static [&'static str] { &ruleNames } + fn get_literal_names(&self) -> &[Option<&str>] { &_LITERAL_NAMES } + + fn get_symbolic_names(&self) -> &[Option<&str>] { &_SYMBOLIC_NAMES } + + fn get_grammar_file_name(&self) -> &'static str { "LabelsLexer.g4" } + + pub fn new_with_token_factory( + input: Box, + tf: &'input LocalTokenFactory<'input>, + ) -> Self { + antlr_rust::recognizer::check_version("0", "2"); + Self { + base: BaseLexer::new_base_lexer( + input, + LexerATNSimulator::new_lexer_atnsimulator( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + ), + LabelsLexerActions {}, + tf, + ), + } + } } -impl LabelsLexerActions{ +impl<'input, Input: CharStream>> LabelsLexer<'input, Input> +where + &'input LocalTokenFactory<'input>: Default, +{ + pub fn new(input: Box) -> Self { + LabelsLexer::new_with_token_factory( + input, + <&LocalTokenFactory<'input> as Default>::default(), + ) + } } -impl<'input, Input:CharStream >> Actions<'input,BaseLexer<'input,LabelsLexerActions,Input,LocalTokenFactory<'input>>> for LabelsLexerActions{ - } +pub struct LabelsLexerActions {} - impl<'input, Input:CharStream >> LabelsLexer<'input,Input>{ +impl LabelsLexerActions {} +impl<'input, Input: CharStream>> + Actions<'input, BaseLexer<'input, LabelsLexerActions, Input, LocalTokenFactory<'input>>> + for LabelsLexerActions +{ } -impl<'input, Input:CharStream >> LexerRecog<'input,BaseLexer<'input,LabelsLexerActions,Input,LocalTokenFactory<'input>>> for LabelsLexerActions{ +impl<'input, Input: CharStream>> LabelsLexer<'input, Input> {} + +impl<'input, Input: CharStream>> + LexerRecog<'input, BaseLexer<'input, LabelsLexerActions, Input, LocalTokenFactory<'input>>> + for LabelsLexerActions +{ } -impl<'input> TokenAware<'input> for LabelsLexerActions{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for LabelsLexerActions { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenAware<'input> for LabelsLexer<'input,Input>{ - type TF = LocalTokenFactory<'input>; +impl<'input, Input: CharStream>> TokenAware<'input> for LabelsLexer<'input, Input> { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenSource<'input> for LabelsLexer<'input,Input>{ - - fn next_token(&mut self) -> >::Tok { - self.base.next_token() - } +impl<'input, Input: CharStream>> TokenSource<'input> for LabelsLexer<'input, Input> { + fn next_token(&mut self) -> >::Tok { self.base.next_token() } - fn get_line(&self) -> isize { - self.base.get_line() - } + fn get_line(&self) -> isize { self.base.get_line() } - fn get_char_position_in_line(&self) -> isize { - self.base.get_char_position_in_line() - } + fn get_char_position_in_line(&self) -> isize { self.base.get_char_position_in_line() } - fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { - self.base.get_input_stream() - } + fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { self.base.get_input_stream() } - fn get_source_name(&self) -> String { - self.base.get_source_name() - } + fn get_source_name(&self) -> String { self.base.get_source_name() } - fn get_token_factory(&self) -> &'input Self::TF { - self.base.get_token_factory() - } + fn get_token_factory(&self) -> &'input Self::TF { self.base.get_token_factory() } } +lazy_static! { + static ref _ATN: Arc = + Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); + static ref _decision_to_DFA: Arc> = { + let mut dfa = Vec::new(); + let size = _ATN.decision_to_state.len(); + for i in 0..size { + dfa.push(DFA::new( + _ATN.clone(), + _ATN.get_decision_state(i), + i as isize, + )) + } + Arc::new(dfa) + }; +} - - lazy_static! { - static ref _ATN: Arc = - Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); - static ref _decision_to_DFA: Arc> = { - let mut dfa = Vec::new(); - let size = _ATN.decision_to_state.len(); - for i in 0..size { - dfa.push(DFA::new( - _ATN.clone(), - _ATN.get_decision_state(i), - i as isize, - )) - } - Arc::new(dfa) - }; - } - - - - const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ \x0b\x31\x08\x01\x04\x02\x09\x02\x04\x03\x09\x03\x04\x04\x09\x04\x04\x05\ \x09\x05\x04\x06\x09\x06\x04\x07\x09\x07\x04\x08\x09\x08\x04\x09\x09\x09\ \x04\x0a\x09\x0a\x03\x02\x03\x02\x03\x03\x03\x03\x03\x04\x03\x04\x03\x05\ diff --git a/tests/gen/labelslistener.rs b/tests/gen/labelslistener.rs index 3134166..88d3cec 100644 --- a/tests/gen/labelslistener.rs +++ b/tests/gen/labelslistener.rs @@ -1,115 +1,111 @@ #![allow(nonstandard_style)] - -use std::any::Any; - -use antlr_rust::token_factory::CommonTokenFactory; // Generated from Labels.g4 by ANTLR 4.8 -use antlr_rust::tree::ParseTreeListener; - use super::labelsparser::*; +use antlr_rust::token_factory::CommonTokenFactory; +use antlr_rust::tree::ParseTreeListener; -pub trait LabelsListener<'input> : ParseTreeListener<'input,LabelsParserContextType>{ - -/** - * Enter a parse tree produced by {@link LabelsParser#s}. - * @param ctx the parse tree - */ -fn enter_s(&mut self, _ctx: &SContext<'input>) { } -/** - * Exit a parse tree produced by {@link LabelsParser#s}. - * @param ctx the parse tree - */ -fn exit_s(&mut self, _ctx: &SContext<'input>) { } +use std::any::Any; -/** - * Enter a parse tree produced by the {@code add} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn enter_add(&mut self, _ctx: &AddContext<'input>) { } -/** - * Exit a parse tree produced by the {@code add} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn exit_add(&mut self, _ctx: &AddContext<'input>) { } +pub trait LabelsListener<'input>: ParseTreeListener<'input, LabelsParserContextType> { + /** + * Enter a parse tree produced by {@link LabelsParser#s}. + * @param ctx the parse tree + */ + fn enter_s(&mut self, _ctx: &SContext<'input>) {} + /** + * Exit a parse tree produced by {@link LabelsParser#s}. + * @param ctx the parse tree + */ + fn exit_s(&mut self, _ctx: &SContext<'input>) {} -/** - * Enter a parse tree produced by the {@code parens} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn enter_parens(&mut self, _ctx: &ParensContext<'input>) { } -/** - * Exit a parse tree produced by the {@code parens} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn exit_parens(&mut self, _ctx: &ParensContext<'input>) { } + /** + * Enter a parse tree produced by the {@code add} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn enter_add(&mut self, _ctx: &AddContext<'input>) {} + /** + * Exit a parse tree produced by the {@code add} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn exit_add(&mut self, _ctx: &AddContext<'input>) {} -/** - * Enter a parse tree produced by the {@code mult} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn enter_mult(&mut self, _ctx: &MultContext<'input>) { } -/** - * Exit a parse tree produced by the {@code mult} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn exit_mult(&mut self, _ctx: &MultContext<'input>) { } + /** + * Enter a parse tree produced by the {@code parens} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn enter_parens(&mut self, _ctx: &ParensContext<'input>) {} + /** + * Exit a parse tree produced by the {@code parens} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn exit_parens(&mut self, _ctx: &ParensContext<'input>) {} -/** - * Enter a parse tree produced by the {@code dec} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn enter_dec(&mut self, _ctx: &DecContext<'input>) { } -/** - * Exit a parse tree produced by the {@code dec} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn exit_dec(&mut self, _ctx: &DecContext<'input>) { } + /** + * Enter a parse tree produced by the {@code mult} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn enter_mult(&mut self, _ctx: &MultContext<'input>) {} + /** + * Exit a parse tree produced by the {@code mult} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn exit_mult(&mut self, _ctx: &MultContext<'input>) {} -/** - * Enter a parse tree produced by the {@code anID} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn enter_anID(&mut self, _ctx: &AnIDContext<'input>) { } -/** - * Exit a parse tree produced by the {@code anID} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn exit_anID(&mut self, _ctx: &AnIDContext<'input>) { } + /** + * Enter a parse tree produced by the {@code dec} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn enter_dec(&mut self, _ctx: &DecContext<'input>) {} + /** + * Exit a parse tree produced by the {@code dec} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn exit_dec(&mut self, _ctx: &DecContext<'input>) {} -/** - * Enter a parse tree produced by the {@code anInt} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn enter_anInt(&mut self, _ctx: &AnIntContext<'input>) { } -/** - * Exit a parse tree produced by the {@code anInt} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn exit_anInt(&mut self, _ctx: &AnIntContext<'input>) { } + /** + * Enter a parse tree produced by the {@code anID} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn enter_anID(&mut self, _ctx: &AnIDContext<'input>) {} + /** + * Exit a parse tree produced by the {@code anID} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn exit_anID(&mut self, _ctx: &AnIDContext<'input>) {} -/** - * Enter a parse tree produced by the {@code inc} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn enter_inc(&mut self, _ctx: &IncContext<'input>) { } -/** - * Exit a parse tree produced by the {@code inc} - * labeled alternative in {@link LabelsParser#e}. - * @param ctx the parse tree - */ -fn exit_inc(&mut self, _ctx: &IncContext<'input>) { } + /** + * Enter a parse tree produced by the {@code anInt} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn enter_anInt(&mut self, _ctx: &AnIntContext<'input>) {} + /** + * Exit a parse tree produced by the {@code anInt} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn exit_anInt(&mut self, _ctx: &AnIntContext<'input>) {} + /** + * Enter a parse tree produced by the {@code inc} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn enter_inc(&mut self, _ctx: &IncContext<'input>) {} + /** + * Exit a parse tree produced by the {@code inc} + * labeled alternative in {@link LabelsParser#e}. + * @param ctx the parse tree + */ + fn exit_inc(&mut self, _ctx: &IncContext<'input>) {} } diff --git a/tests/gen/labelsparser.rs b/tests/gen/labelsparser.rs index d1daa85..438e5d4 100644 --- a/tests/gen/labelsparser.rs +++ b/tests/gen/labelsparser.rs @@ -5,16 +5,7 @@ #![allow(nonstandard_style)] #![allow(unused_imports)] #![allow(unused_mut)] - -use std::any::{Any, TypeId}; -use std::borrow::{Borrow, BorrowMut}; -use std::cell::RefCell; -use std::convert::TryFrom; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - +use super::labelslistener::*; use antlr_rust::atn::{ATN, INVALID_ALT}; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::dfa::DFA; @@ -24,996 +15,1294 @@ use antlr_rust::int_stream::EOF; use antlr_rust::lazy_static; use antlr_rust::parser::{BaseParser, Parser, ParserNodeType, ParserRecog}; use antlr_rust::parser_atn_simulator::ParserATNSimulator; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, cast_mut, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, cast_mut, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, CustomRuleContext, RuleContext}; use antlr_rust::token::{OwningToken, Token, TOKEN_EOF}; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::token_stream::TokenStream; -use antlr_rust::tree::{ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, Visitable}; +use antlr_rust::tree::{ + ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, + Visitable, +}; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -use super::labelslistener::*; +use std::any::{Any, TypeId}; +use std::borrow::{Borrow, BorrowMut}; +use std::cell::RefCell; +use std::convert::TryFrom; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; -pub const T__0:isize=1; - pub const T__1:isize=2; - pub const T__2:isize=3; - pub const T__3:isize=4; - pub const T__4:isize=5; - pub const T__5:isize=6; - pub const ID:isize=7; - pub const INT:isize=8; - pub const WS:isize=9; - pub const RULE_s:usize = 0; - pub const RULE_e:usize = 1; - pub const ruleNames: [&'static str; 2] = [ - "s", "e" - ]; - - - pub const _LITERAL_NAMES: [Option<&'static str>;7] = [ - None, Some("'*'"), Some("'+'"), Some("'('"), Some("')'"), Some("'++'"), - Some("'--'") - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;10] = [ - None, None, None, None, None, None, None, Some("ID"), Some("INT"), Some("WS") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } - - -type BaseParserType<'input, I> = - BaseParser<'input,LabelsParserExt, I, LabelsParserContextType , dyn LabelsListener<'input> + 'static >; +pub const T__0: isize = 1; +pub const T__1: isize = 2; +pub const T__2: isize = 3; +pub const T__3: isize = 4; +pub const T__4: isize = 5; +pub const T__5: isize = 6; +pub const ID: isize = 7; +pub const INT: isize = 8; +pub const WS: isize = 9; +pub const RULE_s: usize = 0; +pub const RULE_e: usize = 1; +pub const ruleNames: [&'static str; 2] = ["s", "e"]; + +pub const _LITERAL_NAMES: [Option<&'static str>; 7] = [ + None, + Some("'*'"), + Some("'+'"), + Some("'('"), + Some("')'"), + Some("'++'"), + Some("'--'"), +]; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 10] = [ + None, + None, + None, + None, + None, + None, + None, + Some("ID"), + Some("INT"), + Some("WS"), +]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); +} + +type BaseParserType<'input, I> = BaseParser< + 'input, + LabelsParserExt, + I, + LabelsParserContextType, + dyn LabelsListener<'input> + 'static, +>; type TokenType<'input> = as TokenFactory<'input>>::Tok; pub type LocalTokenFactory<'input> = CommonTokenFactory; -pub type LabelsTreeWalker<'input,'a> = - ParseTreeWalker<'input, 'a, LabelsParserContextType , dyn LabelsListener<'input> + 'a>; +pub type LabelsTreeWalker<'input, 'a> = + ParseTreeWalker<'input, 'a, LabelsParserContextType, dyn LabelsListener<'input> + 'a>; -pub struct LabelsParser<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> { - base:BaseParserType<'input,I>, - interpreter:Arc, - _shared_context_cache: Box, - pub err_handler: Box> + 'input>, +pub struct LabelsParser<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> { + base: BaseParserType<'input, I>, + interpreter: Arc, + _shared_context_cache: Box, + pub err_handler: Box> + 'input>, } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> LabelsParser<'input,I> { - - pub fn get_serialized_atn() -> &'static str { unimplemented!() } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> LabelsParser<'input, I> { + pub fn get_serialized_atn() -> &'static str { unimplemented!() } - pub fn set_error_strategy(&mut self, strategy: Box> >) { + pub fn set_error_strategy( + &mut self, + strategy: Box>>, + ) { self.err_handler = strategy } pub fn new(input: Box) -> Self { - antlr_rust::recognizer::check_version("0","2"); - let interpreter = Arc::new(ParserATNSimulator::new( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - )); - Self { - base: BaseParser::new_base_parser( - input, - Arc::clone(&interpreter), - LabelsParserExt{ - } - ), - interpreter, + antlr_rust::recognizer::check_version("0", "2"); + let interpreter = Arc::new(ParserATNSimulator::new( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + )); + Self { + base: BaseParser::new_base_parser(input, Arc::clone(&interpreter), LabelsParserExt {}), + interpreter, _shared_context_cache: Box::new(PredictionContextCache::new()), - err_handler: Box::new(DefaultErrorStrategy::<'input,LabelsParserContextType>::new()), + err_handler: Box::new(DefaultErrorStrategy::<'input, LabelsParserContextType>::new()), } } } /// Trait for monomorphized trait object that corresponds to nodes of parse tree generated by LabelsParser pub trait LabelsParserContext<'input>: - for<'x> Listenable + 'x > + - ParserRuleContext<'input, TF=LocalTokenFactory<'input>, Ctx=LabelsParserContextType> -{} + for<'x> Listenable + 'x> + + ParserRuleContext<'input, TF = LocalTokenFactory<'input>, Ctx = LabelsParserContextType> +{ +} -impl<'input> LabelsParserContext<'input> for TerminalNode<'input,LabelsParserContextType> {} -impl<'input> LabelsParserContext<'input> for ErrorNode<'input,LabelsParserContextType> {} +impl<'input> LabelsParserContext<'input> for TerminalNode<'input, LabelsParserContextType> {} +impl<'input> LabelsParserContext<'input> for ErrorNode<'input, LabelsParserContextType> {} pub struct LabelsParserContextType; -impl<'input> ParserNodeType<'input> for LabelsParserContextType{ - type TF = LocalTokenFactory<'input>; - type Type = dyn LabelsParserContext<'input> + 'input; +impl<'input> ParserNodeType<'input> for LabelsParserContextType { + type TF = LocalTokenFactory<'input>; + type Type = dyn LabelsParserContext<'input> + 'input; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Deref for LabelsParser<'input,I> { - type Target = BaseParserType<'input,I>; - - fn deref(&self) -> &Self::Target { - &self.base - } -} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> Deref + for LabelsParser<'input, I> +{ + type Target = BaseParserType<'input, I>; -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > DerefMut for LabelsParser<'input,I> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } + fn deref(&self) -> &Self::Target { &self.base } } -pub struct LabelsParserExt{ +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> DerefMut + for LabelsParser<'input, I> +{ + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } -impl LabelsParserExt{ -} +pub struct LabelsParserExt {} +impl LabelsParserExt {} -impl<'input> TokenAware<'input> for LabelsParserExt{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for LabelsParserExt { + type TF = LocalTokenFactory<'input>; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > ParserRecog<'input, BaseParserType<'input,I>> for LabelsParserExt{} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + ParserRecog<'input, BaseParserType<'input, I>> for LabelsParserExt +{ +} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Actions<'input, BaseParserType<'input,I>> for LabelsParserExt{ - fn get_grammar_file_name(&self) -> & str{ "Labels.g4"} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + Actions<'input, BaseParserType<'input, I>> for LabelsParserExt +{ + fn get_grammar_file_name(&self) -> &str { "Labels.g4" } - fn get_rule_names(&self) -> &[& str] {&ruleNames} + fn get_rule_names(&self) -> &[&str] { &ruleNames } - fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } - fn sempred(_localctx: &(dyn LabelsParserContext<'input> + 'input), rule_index: isize, pred_index: isize, - recog:&mut BaseParserType<'input,I> - )->bool{ - match rule_index { - 1 => LabelsParser::<'input,I>::e_sempred(cast::<_,EContext<'input> >(_localctx), pred_index, recog), - _ => true - } - } + fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } + fn sempred( + _localctx: &(dyn LabelsParserContext<'input> + 'input), + rule_index: isize, + pred_index: isize, + recog: &mut BaseParserType<'input, I>, + ) -> bool { + match rule_index { + 1 => LabelsParser::<'input, I>::e_sempred( + cast::<_, EContext<'input>>(_localctx), + pred_index, + recog, + ), + _ => true, + } + } } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > LabelsParser<'input,I>{ - fn e_sempred(_localctx: &EContext<'input>, pred_index:isize, - recog:&mut ::Target - ) -> bool { - match pred_index { - 0=>{ - recog.precpred(None, 7) - } - 1=>{ - recog.precpred(None, 6) - } - 2=>{ - recog.precpred(None, 3) - } - 3=>{ - recog.precpred(None, 2) - } - _ => true - } - } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> LabelsParser<'input, I> { + fn e_sempred( + _localctx: &EContext<'input>, + pred_index: isize, + recog: &mut ::Target, + ) -> bool { + match pred_index { + 0 => recog.precpred(None, 7), + 1 => recog.precpred(None, 6), + 2 => recog.precpred(None, 3), + 3 => recog.precpred(None, 2), + _ => true, + } + } } //------------------- s ---------------- pub type SContextAll<'input> = SContext<'input>; - -pub type SContext<'input> = BaseParserRuleContext<'input,SContextExt<'input>>; +pub type SContext<'input> = BaseParserRuleContext<'input, SContextExt<'input>>; #[derive(Clone)] -pub struct SContextExt<'input>{ - pub q: Option>>, -ph:PhantomData<&'input str> -} - -impl<'input> LabelsParserContext<'input> for SContext<'input>{} - -impl<'input,'a> Listenable + 'a> for SContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_s(self); - } +pub struct SContextExt<'input> { + pub q: Option>>, + ph: PhantomData<&'input str>, } -impl<'input> CustomRuleContext<'input> for SContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_s } - //fn type_rule_index() -> usize where Self: Sized { RULE_s } -} -antlr_rust::type_id!{SContextExt} +impl<'input> LabelsParserContext<'input> for SContext<'input> {} -impl<'input> SContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,SContextExt{ - q: None, - ph:PhantomData - }), - ) - } +impl<'input, 'a> Listenable + 'a> for SContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_s(self); + } } -pub trait SContextAttrs<'input>: LabelsParserContext<'input> + BorrowMut>{ - -fn e(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) +impl<'input> CustomRuleContext<'input> for SContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_s } + //fn type_rule_index() -> usize where Self: Sized { RULE_s } +} +antlr_rust::type_id! {SContextExt} + +impl<'input> SContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + SContextExt { + q: None, + ph: PhantomData, + }, + )) + } } +pub trait SContextAttrs<'input>: + LabelsParserContext<'input> + BorrowMut> +{ + fn e(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } } -impl<'input> SContextAttrs<'input> for SContext<'input>{} +impl<'input> SContextAttrs<'input> for SContext<'input> {} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > LabelsParser<'input,I>{ - pub fn s(&mut self,) - -> Result>,ANTLRError> { - let mut recog = self; - let _parentctx = recog.ctx.take(); - let mut _localctx = SContextExt::new(_parentctx.clone(), recog.base.get_state()); +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> LabelsParser<'input, I> { + pub fn s(&mut self) -> Result>, ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = SContextExt::new(_parentctx.clone(), recog.base.get_state()); recog.base.enter_rule(_localctx.clone(), 0, RULE_s); let mut _localctx: Rc = _localctx; - let result: Result<(), ANTLRError> = try { - - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - /*InvokeRule e*/ - recog.base.set_state(4); - let tmp = recog.e_rec(0)?; - cast_mut::<_,SContext >(&mut _localctx).q = Some(tmp.clone()); - - - } - }; - match result { - Ok(_)=>{}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re) => { - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?; - } - } - recog.base.exit_rule(); - - Ok(_localctx) - } + let result: Result<(), ANTLRError> = try { + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule e*/ + recog.base.set_state(4); + let tmp = recog.e_rec(0)?; + cast_mut::<_, SContext>(&mut _localctx).q = Some(tmp.clone()); + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } } //------------------- e ---------------- #[derive(Debug)] -pub enum EContextAll<'input>{ - AddContext(AddContext<'input>), - ParensContext(ParensContext<'input>), - MultContext(MultContext<'input>), - DecContext(DecContext<'input>), - AnIDContext(AnIDContext<'input>), - AnIntContext(AnIntContext<'input>), - IncContext(IncContext<'input>), -Error(EContext<'input>) -} -antlr_rust::type_id!{EContextAll} - -impl<'input> antlr_rust::parser_rule_context::DerefSeal for EContextAll<'input>{} - -impl<'input> LabelsParserContext<'input> for EContextAll<'input>{} - -impl<'input> Deref for EContextAll<'input>{ - type Target = dyn EContextAttrs<'input> + 'input; - fn deref(&self) -> &Self::Target{ - use EContextAll::*; - match self{ - AddContext(inner) => inner, - ParensContext(inner) => inner, - MultContext(inner) => inner, - DecContext(inner) => inner, - AnIDContext(inner) => inner, - AnIntContext(inner) => inner, - IncContext(inner) => inner, -Error(inner) => inner - } - } -} -impl<'input,'a> Listenable + 'a> for EContextAll<'input>{ - fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { self.deref().enter(listener) } - fn exit(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { self.deref().exit(listener) } +pub enum EContextAll<'input> { + AddContext(AddContext<'input>), + ParensContext(ParensContext<'input>), + MultContext(MultContext<'input>), + DecContext(DecContext<'input>), + AnIDContext(AnIDContext<'input>), + AnIntContext(AnIntContext<'input>), + IncContext(IncContext<'input>), + Error(EContext<'input>), } +antlr_rust::type_id! {EContextAll} +impl<'input> antlr_rust::parser_rule_context::DerefSeal for EContextAll<'input> {} +impl<'input> LabelsParserContext<'input> for EContextAll<'input> {} -pub type EContext<'input> = BaseParserRuleContext<'input,EContextExt<'input>>; - -#[derive(Clone)] -pub struct EContextExt<'input>{ - pub v: String, -ph:PhantomData<&'input str> +impl<'input> Deref for EContextAll<'input> { + type Target = dyn EContextAttrs<'input> + 'input; + fn deref(&self) -> &Self::Target { + use EContextAll::*; + match self { + AddContext(inner) => inner, + ParensContext(inner) => inner, + MultContext(inner) => inner, + DecContext(inner) => inner, + AnIDContext(inner) => inner, + AnIntContext(inner) => inner, + IncContext(inner) => inner, + Error(inner) => inner, + } + } } - -impl<'input> LabelsParserContext<'input> for EContext<'input>{} - -impl<'input,'a> Listenable + 'a> for EContext<'input>{ +impl<'input, 'a> Listenable + 'a> for EContextAll<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + self.deref().enter(listener) + } + fn exit(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { self.deref().exit(listener) } } -impl<'input> CustomRuleContext<'input> for EContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } -} -antlr_rust::type_id!{EContextExt} +pub type EContext<'input> = BaseParserRuleContext<'input, EContextExt<'input>>; -impl<'input> EContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - EContextAll::Error( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,EContextExt{ - v: Default::default(), - ph:PhantomData - }), - ) - ) - } +#[derive(Clone)] +pub struct EContextExt<'input> { + pub v: String, + ph: PhantomData<&'input str>, +} + +impl<'input> LabelsParserContext<'input> for EContext<'input> {} + +impl<'input, 'a> Listenable + 'a> for EContext<'input> {} + +impl<'input> CustomRuleContext<'input> for EContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } +} +antlr_rust::type_id! {EContextExt} + +impl<'input> EContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(EContextAll::Error(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + EContextExt { + v: Default::default(), + ph: PhantomData, + }, + ))) + } } -pub trait EContextAttrs<'input>: LabelsParserContext<'input> + BorrowMut>{ - -fn get_v<'a>(&'a self) -> &'a String where 'input: 'a { &self.borrow().v } -fn set_v(&mut self,attr: String) { self.borrow_mut().v = attr; } - +pub trait EContextAttrs<'input>: + LabelsParserContext<'input> + BorrowMut> +{ + fn get_v<'a>(&'a self) -> &'a String + where + 'input: 'a, + { + &self.borrow().v + } + fn set_v(&mut self, attr: String) { self.borrow_mut().v = attr; } } -impl<'input> EContextAttrs<'input> for EContext<'input>{} +impl<'input> EContextAttrs<'input> for EContext<'input> {} -pub type AddContext<'input> = BaseParserRuleContext<'input,AddContextExt<'input>>; +pub type AddContext<'input> = BaseParserRuleContext<'input, AddContextExt<'input>>; -pub trait AddContextAttrs<'input>: LabelsParserContext<'input>{ - fn e_all(&self) -> Vec>> where Self:Sized{ - self.children_of_type() - } - fn e(&self, i: usize) -> Option>> where Self:Sized{ - self.child_of_type(i) - } +pub trait AddContextAttrs<'input>: LabelsParserContext<'input> { + fn e_all(&self) -> Vec>> + where + Self: Sized, + { + self.children_of_type() + } + fn e(&self, i: usize) -> Option>> + where + Self: Sized, + { + self.child_of_type(i) + } } -impl<'input> AddContextAttrs<'input> for AddContext<'input>{} +impl<'input> AddContextAttrs<'input> for AddContext<'input> {} -pub struct AddContextExt<'input>{ - base:EContextExt<'input>, - pub a: Option>>, - pub b: Option>>, - ph:PhantomData<&'input str> +pub struct AddContextExt<'input> { + base: EContextExt<'input>, + pub a: Option>>, + pub b: Option>>, + ph: PhantomData<&'input str>, } -antlr_rust::type_id!{AddContextExt} +antlr_rust::type_id! {AddContextExt} -impl<'input> LabelsParserContext<'input> for AddContext<'input>{} +impl<'input> LabelsParserContext<'input> for AddContext<'input> {} -impl<'input,'a> Listenable + 'a> for AddContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_add(self); - } +impl<'input, 'a> Listenable + 'a> for AddContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_add(self); + } } -impl<'input> CustomRuleContext<'input> for AddContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } +impl<'input> CustomRuleContext<'input> for AddContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } } -impl<'input> Borrow> for AddContext<'input>{ - fn borrow(&self) -> &EContextExt<'input> { &self.base } +impl<'input> Borrow> for AddContext<'input> { + fn borrow(&self) -> &EContextExt<'input> { &self.base } } -impl<'input> BorrowMut> for AddContext<'input>{ - fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } +impl<'input> BorrowMut> for AddContext<'input> { + fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } } impl<'input> EContextAttrs<'input> for AddContext<'input> {} -impl<'input> AddContextExt<'input>{ - fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { - //let base = (cast::<_,EContext>(&ctx)); - Rc::new( - EContextAll::AddContext( - BaseParserRuleContext::copy_from(ctx,AddContextExt{ - a:None, b:None, - base: ctx.borrow().clone(), - ph:PhantomData - }) - ) - ) - } +impl<'input> AddContextExt<'input> { + fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { + //let base = (cast::<_,EContext>(&ctx)); + Rc::new(EContextAll::AddContext(BaseParserRuleContext::copy_from( + ctx, + AddContextExt { + a: None, + b: None, + base: ctx.borrow().clone(), + ph: PhantomData, + }, + ))) + } } -pub type ParensContext<'input> = BaseParserRuleContext<'input,ParensContextExt<'input>>; +pub type ParensContext<'input> = BaseParserRuleContext<'input, ParensContextExt<'input>>; -pub trait ParensContextAttrs<'input>: LabelsParserContext<'input>{ - fn e(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) - } +pub trait ParensContextAttrs<'input>: LabelsParserContext<'input> { + fn e(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } } -impl<'input> ParensContextAttrs<'input> for ParensContext<'input>{} +impl<'input> ParensContextAttrs<'input> for ParensContext<'input> {} -pub struct ParensContextExt<'input>{ - base:EContextExt<'input>, - pub x: Option>>, - ph:PhantomData<&'input str> +pub struct ParensContextExt<'input> { + base: EContextExt<'input>, + pub x: Option>>, + ph: PhantomData<&'input str>, } -antlr_rust::type_id!{ParensContextExt} +antlr_rust::type_id! {ParensContextExt} -impl<'input> LabelsParserContext<'input> for ParensContext<'input>{} +impl<'input> LabelsParserContext<'input> for ParensContext<'input> {} -impl<'input,'a> Listenable + 'a> for ParensContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_parens(self); - } +impl<'input, 'a> Listenable + 'a> for ParensContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_parens(self); + } } -impl<'input> CustomRuleContext<'input> for ParensContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } +impl<'input> CustomRuleContext<'input> for ParensContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } } -impl<'input> Borrow> for ParensContext<'input>{ - fn borrow(&self) -> &EContextExt<'input> { &self.base } +impl<'input> Borrow> for ParensContext<'input> { + fn borrow(&self) -> &EContextExt<'input> { &self.base } } -impl<'input> BorrowMut> for ParensContext<'input>{ - fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } +impl<'input> BorrowMut> for ParensContext<'input> { + fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } } impl<'input> EContextAttrs<'input> for ParensContext<'input> {} -impl<'input> ParensContextExt<'input>{ - fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { - //let base = (cast::<_,EContext>(&ctx)); - Rc::new( - EContextAll::ParensContext( - BaseParserRuleContext::copy_from(ctx,ParensContextExt{ - x:None, - base: ctx.borrow().clone(), - ph:PhantomData - }) - ) - ) - } +impl<'input> ParensContextExt<'input> { + fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { + //let base = (cast::<_,EContext>(&ctx)); + Rc::new(EContextAll::ParensContext( + BaseParserRuleContext::copy_from( + ctx, + ParensContextExt { + x: None, + base: ctx.borrow().clone(), + ph: PhantomData, + }, + ), + )) + } } -pub type MultContext<'input> = BaseParserRuleContext<'input,MultContextExt<'input>>; +pub type MultContext<'input> = BaseParserRuleContext<'input, MultContextExt<'input>>; -pub trait MultContextAttrs<'input>: LabelsParserContext<'input>{ - fn e_all(&self) -> Vec>> where Self:Sized{ - self.children_of_type() - } - fn e(&self, i: usize) -> Option>> where Self:Sized{ - self.child_of_type(i) - } +pub trait MultContextAttrs<'input>: LabelsParserContext<'input> { + fn e_all(&self) -> Vec>> + where + Self: Sized, + { + self.children_of_type() + } + fn e(&self, i: usize) -> Option>> + where + Self: Sized, + { + self.child_of_type(i) + } } -impl<'input> MultContextAttrs<'input> for MultContext<'input>{} +impl<'input> MultContextAttrs<'input> for MultContext<'input> {} -pub struct MultContextExt<'input>{ - base:EContextExt<'input>, - pub a: Option>>, - pub op: Option>, - pub b: Option>>, - ph:PhantomData<&'input str> +pub struct MultContextExt<'input> { + base: EContextExt<'input>, + pub a: Option>>, + pub op: Option>, + pub b: Option>>, + ph: PhantomData<&'input str>, } -antlr_rust::type_id!{MultContextExt} +antlr_rust::type_id! {MultContextExt} -impl<'input> LabelsParserContext<'input> for MultContext<'input>{} +impl<'input> LabelsParserContext<'input> for MultContext<'input> {} -impl<'input,'a> Listenable + 'a> for MultContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_mult(self); - } +impl<'input, 'a> Listenable + 'a> for MultContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_mult(self); + } } -impl<'input> CustomRuleContext<'input> for MultContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } +impl<'input> CustomRuleContext<'input> for MultContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } } -impl<'input> Borrow> for MultContext<'input>{ - fn borrow(&self) -> &EContextExt<'input> { &self.base } +impl<'input> Borrow> for MultContext<'input> { + fn borrow(&self) -> &EContextExt<'input> { &self.base } } -impl<'input> BorrowMut> for MultContext<'input>{ - fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } +impl<'input> BorrowMut> for MultContext<'input> { + fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } } impl<'input> EContextAttrs<'input> for MultContext<'input> {} -impl<'input> MultContextExt<'input>{ - fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { - //let base = (cast::<_,EContext>(&ctx)); - Rc::new( - EContextAll::MultContext( - BaseParserRuleContext::copy_from(ctx,MultContextExt{ - op:None, - a:None, b:None, - base: ctx.borrow().clone(), - ph:PhantomData - }) - ) - ) - } +impl<'input> MultContextExt<'input> { + fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { + //let base = (cast::<_,EContext>(&ctx)); + Rc::new(EContextAll::MultContext(BaseParserRuleContext::copy_from( + ctx, + MultContextExt { + op: None, + a: None, + b: None, + base: ctx.borrow().clone(), + ph: PhantomData, + }, + ))) + } } -pub type DecContext<'input> = BaseParserRuleContext<'input,DecContextExt<'input>>; +pub type DecContext<'input> = BaseParserRuleContext<'input, DecContextExt<'input>>; -pub trait DecContextAttrs<'input>: LabelsParserContext<'input>{ - fn e(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) - } +pub trait DecContextAttrs<'input>: LabelsParserContext<'input> { + fn e(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } } -impl<'input> DecContextAttrs<'input> for DecContext<'input>{} +impl<'input> DecContextAttrs<'input> for DecContext<'input> {} -pub struct DecContextExt<'input>{ - base:EContextExt<'input>, - pub x: Option>>, - ph:PhantomData<&'input str> +pub struct DecContextExt<'input> { + base: EContextExt<'input>, + pub x: Option>>, + ph: PhantomData<&'input str>, } -antlr_rust::type_id!{DecContextExt} +antlr_rust::type_id! {DecContextExt} -impl<'input> LabelsParserContext<'input> for DecContext<'input>{} +impl<'input> LabelsParserContext<'input> for DecContext<'input> {} -impl<'input,'a> Listenable + 'a> for DecContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_dec(self); - } +impl<'input, 'a> Listenable + 'a> for DecContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_dec(self); + } } -impl<'input> CustomRuleContext<'input> for DecContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } +impl<'input> CustomRuleContext<'input> for DecContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } } -impl<'input> Borrow> for DecContext<'input>{ - fn borrow(&self) -> &EContextExt<'input> { &self.base } +impl<'input> Borrow> for DecContext<'input> { + fn borrow(&self) -> &EContextExt<'input> { &self.base } } -impl<'input> BorrowMut> for DecContext<'input>{ - fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } +impl<'input> BorrowMut> for DecContext<'input> { + fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } } impl<'input> EContextAttrs<'input> for DecContext<'input> {} -impl<'input> DecContextExt<'input>{ - fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { - //let base = (cast::<_,EContext>(&ctx)); - Rc::new( - EContextAll::DecContext( - BaseParserRuleContext::copy_from(ctx,DecContextExt{ - x:None, - base: ctx.borrow().clone(), - ph:PhantomData - }) - ) - ) - } +impl<'input> DecContextExt<'input> { + fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { + //let base = (cast::<_,EContext>(&ctx)); + Rc::new(EContextAll::DecContext(BaseParserRuleContext::copy_from( + ctx, + DecContextExt { + x: None, + base: ctx.borrow().clone(), + ph: PhantomData, + }, + ))) + } } -pub type AnIDContext<'input> = BaseParserRuleContext<'input,AnIDContextExt<'input>>; +pub type AnIDContext<'input> = BaseParserRuleContext<'input, AnIDContextExt<'input>>; -pub trait AnIDContextAttrs<'input>: LabelsParserContext<'input>{ - /// Retrieves first TerminalNode corresponding to token ID - /// Returns `None` if there is no child corresponding to token ID - fn ID(&self) -> Option>> where Self:Sized{ - self.get_token(ID, 0) - } +pub trait AnIDContextAttrs<'input>: LabelsParserContext<'input> { + /// Retrieves first TerminalNode corresponding to token ID + /// Returns `None` if there is no child corresponding to token ID + fn ID(&self) -> Option>> + where + Self: Sized, + { + self.get_token(ID, 0) + } } -impl<'input> AnIDContextAttrs<'input> for AnIDContext<'input>{} +impl<'input> AnIDContextAttrs<'input> for AnIDContext<'input> {} -pub struct AnIDContextExt<'input>{ - base:EContextExt<'input>, - pub ID: Option>, - ph:PhantomData<&'input str> +pub struct AnIDContextExt<'input> { + base: EContextExt<'input>, + pub ID: Option>, + ph: PhantomData<&'input str>, } -antlr_rust::type_id!{AnIDContextExt} +antlr_rust::type_id! {AnIDContextExt} -impl<'input> LabelsParserContext<'input> for AnIDContext<'input>{} +impl<'input> LabelsParserContext<'input> for AnIDContext<'input> {} -impl<'input,'a> Listenable + 'a> for AnIDContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_anID(self); - } +impl<'input, 'a> Listenable + 'a> for AnIDContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_anID(self); + } } -impl<'input> CustomRuleContext<'input> for AnIDContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } +impl<'input> CustomRuleContext<'input> for AnIDContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } } -impl<'input> Borrow> for AnIDContext<'input>{ - fn borrow(&self) -> &EContextExt<'input> { &self.base } +impl<'input> Borrow> for AnIDContext<'input> { + fn borrow(&self) -> &EContextExt<'input> { &self.base } } -impl<'input> BorrowMut> for AnIDContext<'input>{ - fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } +impl<'input> BorrowMut> for AnIDContext<'input> { + fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } } impl<'input> EContextAttrs<'input> for AnIDContext<'input> {} -impl<'input> AnIDContextExt<'input>{ - fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { - //let base = (cast::<_,EContext>(&ctx)); - Rc::new( - EContextAll::AnIDContext( - BaseParserRuleContext::copy_from(ctx,AnIDContextExt{ - ID:None, - base: ctx.borrow().clone(), - ph:PhantomData - }) - ) - ) - } +impl<'input> AnIDContextExt<'input> { + fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { + //let base = (cast::<_,EContext>(&ctx)); + Rc::new(EContextAll::AnIDContext(BaseParserRuleContext::copy_from( + ctx, + AnIDContextExt { + ID: None, + base: ctx.borrow().clone(), + ph: PhantomData, + }, + ))) + } } -pub type AnIntContext<'input> = BaseParserRuleContext<'input,AnIntContextExt<'input>>; +pub type AnIntContext<'input> = BaseParserRuleContext<'input, AnIntContextExt<'input>>; -pub trait AnIntContextAttrs<'input>: LabelsParserContext<'input>{ - /// Retrieves first TerminalNode corresponding to token INT - /// Returns `None` if there is no child corresponding to token INT - fn INT(&self) -> Option>> where Self:Sized{ - self.get_token(INT, 0) - } +pub trait AnIntContextAttrs<'input>: LabelsParserContext<'input> { + /// Retrieves first TerminalNode corresponding to token INT + /// Returns `None` if there is no child corresponding to token INT + fn INT(&self) -> Option>> + where + Self: Sized, + { + self.get_token(INT, 0) + } } -impl<'input> AnIntContextAttrs<'input> for AnIntContext<'input>{} +impl<'input> AnIntContextAttrs<'input> for AnIntContext<'input> {} -pub struct AnIntContextExt<'input>{ - base:EContextExt<'input>, - pub INT: Option>, - ph:PhantomData<&'input str> +pub struct AnIntContextExt<'input> { + base: EContextExt<'input>, + pub INT: Option>, + ph: PhantomData<&'input str>, } -antlr_rust::type_id!{AnIntContextExt} +antlr_rust::type_id! {AnIntContextExt} -impl<'input> LabelsParserContext<'input> for AnIntContext<'input>{} +impl<'input> LabelsParserContext<'input> for AnIntContext<'input> {} -impl<'input,'a> Listenable + 'a> for AnIntContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_anInt(self); - } +impl<'input, 'a> Listenable + 'a> for AnIntContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_anInt(self); + } } -impl<'input> CustomRuleContext<'input> for AnIntContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } +impl<'input> CustomRuleContext<'input> for AnIntContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } } -impl<'input> Borrow> for AnIntContext<'input>{ - fn borrow(&self) -> &EContextExt<'input> { &self.base } +impl<'input> Borrow> for AnIntContext<'input> { + fn borrow(&self) -> &EContextExt<'input> { &self.base } } -impl<'input> BorrowMut> for AnIntContext<'input>{ - fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } +impl<'input> BorrowMut> for AnIntContext<'input> { + fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } } impl<'input> EContextAttrs<'input> for AnIntContext<'input> {} -impl<'input> AnIntContextExt<'input>{ - fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { - //let base = (cast::<_,EContext>(&ctx)); - Rc::new( - EContextAll::AnIntContext( - BaseParserRuleContext::copy_from(ctx,AnIntContextExt{ - INT:None, - base: ctx.borrow().clone(), - ph:PhantomData - }) - ) - ) - } +impl<'input> AnIntContextExt<'input> { + fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { + //let base = (cast::<_,EContext>(&ctx)); + Rc::new(EContextAll::AnIntContext(BaseParserRuleContext::copy_from( + ctx, + AnIntContextExt { + INT: None, + base: ctx.borrow().clone(), + ph: PhantomData, + }, + ))) + } } -pub type IncContext<'input> = BaseParserRuleContext<'input,IncContextExt<'input>>; +pub type IncContext<'input> = BaseParserRuleContext<'input, IncContextExt<'input>>; -pub trait IncContextAttrs<'input>: LabelsParserContext<'input>{ - fn e(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) - } +pub trait IncContextAttrs<'input>: LabelsParserContext<'input> { + fn e(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } } -impl<'input> IncContextAttrs<'input> for IncContext<'input>{} +impl<'input> IncContextAttrs<'input> for IncContext<'input> {} -pub struct IncContextExt<'input>{ - base:EContextExt<'input>, - pub x: Option>>, - ph:PhantomData<&'input str> +pub struct IncContextExt<'input> { + base: EContextExt<'input>, + pub x: Option>>, + ph: PhantomData<&'input str>, } -antlr_rust::type_id!{IncContextExt} +antlr_rust::type_id! {IncContextExt} -impl<'input> LabelsParserContext<'input> for IncContext<'input>{} +impl<'input> LabelsParserContext<'input> for IncContext<'input> {} -impl<'input,'a> Listenable + 'a> for IncContext<'input>{ - fn enter(&self,listener: &mut (dyn LabelsListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_inc(self); - } +impl<'input, 'a> Listenable + 'a> for IncContext<'input> { + fn enter(&self, listener: &mut (dyn LabelsListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_inc(self); + } } -impl<'input> CustomRuleContext<'input> for IncContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = LabelsParserContextType; - fn get_rule_index(&self) -> usize { RULE_e } - //fn type_rule_index() -> usize where Self: Sized { RULE_e } +impl<'input> CustomRuleContext<'input> for IncContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = LabelsParserContextType; + fn get_rule_index(&self) -> usize { RULE_e } + //fn type_rule_index() -> usize where Self: Sized { RULE_e } } -impl<'input> Borrow> for IncContext<'input>{ - fn borrow(&self) -> &EContextExt<'input> { &self.base } +impl<'input> Borrow> for IncContext<'input> { + fn borrow(&self) -> &EContextExt<'input> { &self.base } } -impl<'input> BorrowMut> for IncContext<'input>{ - fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } +impl<'input> BorrowMut> for IncContext<'input> { + fn borrow_mut(&mut self) -> &mut EContextExt<'input> { &mut self.base } } impl<'input> EContextAttrs<'input> for IncContext<'input> {} -impl<'input> IncContextExt<'input>{ - fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { - //let base = (cast::<_,EContext>(&ctx)); - Rc::new( - EContextAll::IncContext( - BaseParserRuleContext::copy_from(ctx,IncContextExt{ - x:None, - base: ctx.borrow().clone(), - ph:PhantomData - }) - ) - ) - } -} - -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > LabelsParser<'input,I> { - - pub fn e(&mut self,) - -> Result>,ANTLRError> { - self.e_rec(0) - } - - fn e_rec(&mut self, _p: isize) - -> Result>,ANTLRError> { - let recog = self; - let _parentctx = recog.ctx.take(); - let _parentState = recog.base.get_state(); - let mut _localctx = EContextExt::new(_parentctx.clone(), recog.base.get_state()); - recog.base.enter_recursion_rule(_localctx.clone(), 2, RULE_e, _p); - let mut _localctx: Rc = _localctx; +impl<'input> IncContextExt<'input> { + fn new(ctx: &dyn EContextAttrs<'input>) -> Rc> { + //let base = (cast::<_,EContext>(&ctx)); + Rc::new(EContextAll::IncContext(BaseParserRuleContext::copy_from( + ctx, + IncContextExt { + x: None, + base: ctx.borrow().clone(), + ph: PhantomData, + }, + ))) + } +} + +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> LabelsParser<'input, I> { + pub fn e(&mut self) -> Result>, ANTLRError> { self.e_rec(0) } + + fn e_rec(&mut self, _p: isize) -> Result>, ANTLRError> { + let recog = self; + let _parentctx = recog.ctx.take(); + let _parentState = recog.base.get_state(); + let mut _localctx = EContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog + .base + .enter_recursion_rule(_localctx.clone(), 2, RULE_e, _p); + let mut _localctx: Rc = _localctx; let mut _prevctx = _localctx.clone(); - let _startState = 2; - let result: Result<(), ANTLRError> = try { - let mut _alt: isize; - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - recog.base.set_state(16); - recog.err_handler.sync(&mut recog.base)?; - match recog.base.input.la(1) { - INT - => { - { - let mut tmp = AnIntContextExt::new(&**_localctx); - recog.ctx = Some(tmp.clone()); - _localctx = tmp; - _prevctx = _localctx.clone(); - - recog.base.set_state(7); - let tmp = recog.base.match_token(INT,recog.err_handler.as_mut())?; - if let EContextAll::AnIntContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.INT = Some(tmp.clone()); } else {unreachable!("cant cast");} - - let tmp = { if let Some(it) = &if let EContextAll::AnIntContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .INT { it.get_text() } else { "null" } .to_owned()}.to_owned(); - if let EContextAll::AnIntContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.set_v(tmp); } else {unreachable!("cant cast");} - } - } - - T__2 - => { - { - let mut tmp = ParensContextExt::new(&**_localctx); - recog.ctx = Some(tmp.clone()); - _localctx = tmp; - _prevctx = _localctx.clone(); - recog.base.set_state(9); - recog.base.match_token(T__2,recog.err_handler.as_mut())?; - - /*InvokeRule e*/ - recog.base.set_state(10); - let tmp = recog.e_rec(0)?; - if let EContextAll::ParensContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.x = Some(tmp.clone()); } else {unreachable!("cant cast");} - - recog.base.set_state(11); - recog.base.match_token(T__3,recog.err_handler.as_mut())?; - - let tmp = { if let EContextAll::ParensContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .x.as_ref().unwrap().get_v()}.to_owned(); - if let EContextAll::ParensContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.set_v(tmp); } else {unreachable!("cant cast");} - } - } - - ID - => { - { - let mut tmp = AnIDContextExt::new(&**_localctx); - recog.ctx = Some(tmp.clone()); - _localctx = tmp; - _prevctx = _localctx.clone(); - recog.base.set_state(14); - let tmp = recog.base.match_token(ID,recog.err_handler.as_mut())?; - if let EContextAll::AnIDContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.ID = Some(tmp.clone()); } else {unreachable!("cant cast");} - - let tmp = { if let Some(it) = &if let EContextAll::AnIDContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .ID { it.get_text() } else { "null" } .to_owned()}.to_owned(); - if let EContextAll::AnIDContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.set_v(tmp); } else {unreachable!("cant cast");} - } - } - - _ => Err(ANTLRError::NoAltError(NoViableAltError::new(&mut recog.base)))? - } - let tmp = recog.input.lt(-1).cloned(); - recog.ctx.as_ref().unwrap().set_stop(tmp); - recog.base.set_state(36); - recog.err_handler.sync(&mut recog.base)?; - _alt = recog.interpreter.adaptive_predict(2,&mut recog.base)?; - while { _alt!=2 && _alt!=INVALID_ALT } { - if _alt==1 { - recog.trigger_exit_rule_event(); - _prevctx = _localctx.clone(); - { - recog.base.set_state(34); - recog.err_handler.sync(&mut recog.base)?; - match recog.interpreter.adaptive_predict(1,&mut recog.base)? { - 1 =>{ - { - /*recRuleLabeledAltStartAction*/ - let mut tmp = MultContextExt::new(&**EContextExt::new(_parentctx.clone(), _parentState)); - if let EContextAll::MultContext(ctx) = cast_mut::<_,EContextAll >(&mut tmp){ - ctx.a = Some(_prevctx.clone()); - } else {unreachable!("cant cast");} - recog.push_new_recursion_context(tmp.clone(), _startState, RULE_e); - _localctx = tmp; - recog.base.set_state(18); - if !({recog.precpred(None, 7)}) { - Err(FailedPredicateError::new(&mut recog.base, Some("recog.precpred(None, 7)".to_owned()), None))?; - } - recog.base.set_state(19); - let tmp = recog.base.match_token(T__0,recog.err_handler.as_mut())?; - if let EContextAll::MultContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.op = Some(tmp.clone()); } else {unreachable!("cant cast");} - - /*InvokeRule e*/ - recog.base.set_state(20); - let tmp = recog.e_rec(8)?; - if let EContextAll::MultContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.b = Some(tmp.clone()); } else {unreachable!("cant cast");} - - let tmp = { "* ".to_owned() + if let EContextAll::MultContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .a.as_ref().unwrap().get_v() + " " + if let EContextAll::MultContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .b.as_ref().unwrap().get_v()}.to_owned(); - if let EContextAll::MultContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.set_v(tmp); } else {unreachable!("cant cast");} - } - } - , - 2 =>{ - { - /*recRuleLabeledAltStartAction*/ - let mut tmp = AddContextExt::new(&**EContextExt::new(_parentctx.clone(), _parentState)); - if let EContextAll::AddContext(ctx) = cast_mut::<_,EContextAll >(&mut tmp){ - ctx.a = Some(_prevctx.clone()); - } else {unreachable!("cant cast");} - recog.push_new_recursion_context(tmp.clone(), _startState, RULE_e); - _localctx = tmp; - recog.base.set_state(23); - if !({recog.precpred(None, 6)}) { - Err(FailedPredicateError::new(&mut recog.base, Some("recog.precpred(None, 6)".to_owned()), None))?; - } - recog.base.set_state(24); - recog.base.match_token(T__1,recog.err_handler.as_mut())?; - - /*InvokeRule e*/ - recog.base.set_state(25); - let tmp = recog.e_rec(7)?; - if let EContextAll::AddContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.b = Some(tmp.clone()); } else {unreachable!("cant cast");} - - let tmp = { "+ ".to_owned() + if let EContextAll::AddContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .a.as_ref().unwrap().get_v() + " " + if let EContextAll::AddContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .b.as_ref().unwrap().get_v()}.to_owned(); - if let EContextAll::AddContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.set_v(tmp); } else {unreachable!("cant cast");} - } - } - , - 3 =>{ - { - /*recRuleLabeledAltStartAction*/ - let mut tmp = IncContextExt::new(&**EContextExt::new(_parentctx.clone(), _parentState)); - if let EContextAll::IncContext(ctx) = cast_mut::<_,EContextAll >(&mut tmp){ - ctx.x = Some(_prevctx.clone()); - } else {unreachable!("cant cast");} - recog.push_new_recursion_context(tmp.clone(), _startState, RULE_e); - _localctx = tmp; - recog.base.set_state(28); - if !({recog.precpred(None, 3)}) { - Err(FailedPredicateError::new(&mut recog.base, Some("recog.precpred(None, 3)".to_owned()), None))?; - } - recog.base.set_state(29); - recog.base.match_token(T__4,recog.err_handler.as_mut())?; - - let tmp = { " ++".to_owned() + if let EContextAll::IncContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .x.as_ref().unwrap().get_v()}.to_owned(); - if let EContextAll::IncContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.set_v(tmp); } else {unreachable!("cant cast");} - } - } - , - 4 =>{ - { - /*recRuleLabeledAltStartAction*/ - let mut tmp = DecContextExt::new(&**EContextExt::new(_parentctx.clone(), _parentState)); - if let EContextAll::DecContext(ctx) = cast_mut::<_,EContextAll >(&mut tmp){ - ctx.x = Some(_prevctx.clone()); - } else {unreachable!("cant cast");} - recog.push_new_recursion_context(tmp.clone(), _startState, RULE_e); - _localctx = tmp; - recog.base.set_state(31); - if !({recog.precpred(None, 2)}) { - Err(FailedPredicateError::new(&mut recog.base, Some("recog.precpred(None, 2)".to_owned()), None))?; - } - recog.base.set_state(32); - recog.base.match_token(T__5,recog.err_handler.as_mut())?; - - let tmp = { " --".to_owned() + if let EContextAll::DecContext(ctx) = cast::<_,EContextAll >(&*_localctx){ - ctx } else {unreachable!("cant cast")} .x.as_ref().unwrap().get_v()}.to_owned(); - if let EContextAll::DecContext(ctx) = cast_mut::<_,EContextAll >(&mut _localctx){ - ctx.set_v(tmp); } else {unreachable!("cant cast");} - } - } - - _ => {} - } - } - } - recog.base.set_state(38); - recog.err_handler.sync(&mut recog.base)?; - _alt = recog.interpreter.adaptive_predict(2,&mut recog.base)?; - } - } - }; - match result { - Ok(_) => {}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re)=>{ - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?;} - } - recog.base.unroll_recursion_context(_parentctx); - - Ok(_localctx) - } + let _startState = 2; + let result: Result<(), ANTLRError> = try { + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(16); + recog.err_handler.sync(&mut recog.base)?; + match recog.base.input.la(1) { + INT => { + let mut tmp = AnIntContextExt::new(&**_localctx); + recog.ctx = Some(tmp.clone()); + _localctx = tmp; + _prevctx = _localctx.clone(); + + recog.base.set_state(7); + let tmp = recog.base.match_token(INT, recog.err_handler.as_mut())?; + if let EContextAll::AnIntContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.INT = Some(tmp.clone()); + } else { + unreachable!("cant cast"); + } + + let tmp = { + if let Some(it) = &if let EContextAll::AnIntContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .INT + { + it.get_text() + } else { + "null" + } + .to_owned() + } + .to_owned(); + if let EContextAll::AnIntContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.set_v(tmp); + } else { + unreachable!("cant cast"); + } + } + + T__2 => { + { + let mut tmp = ParensContextExt::new(&**_localctx); + recog.ctx = Some(tmp.clone()); + _localctx = tmp; + _prevctx = _localctx.clone(); + recog.base.set_state(9); + recog.base.match_token(T__2, recog.err_handler.as_mut())?; + + /*InvokeRule e*/ + recog.base.set_state(10); + let tmp = recog.e_rec(0)?; + if let EContextAll::ParensContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.x = Some(tmp.clone()); + } else { + unreachable!("cant cast"); + } + + recog.base.set_state(11); + recog.base.match_token(T__3, recog.err_handler.as_mut())?; + + let tmp = { + if let EContextAll::ParensContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .x + .as_ref() + .unwrap() + .get_v() + } + .to_owned(); + if let EContextAll::ParensContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.set_v(tmp); + } else { + unreachable!("cant cast"); + } + } + } + + ID => { + let mut tmp = AnIDContextExt::new(&**_localctx); + recog.ctx = Some(tmp.clone()); + _localctx = tmp; + _prevctx = _localctx.clone(); + recog.base.set_state(14); + let tmp = recog.base.match_token(ID, recog.err_handler.as_mut())?; + if let EContextAll::AnIDContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.ID = Some(tmp.clone()); + } else { + unreachable!("cant cast"); + } + + let tmp = { + if let Some(it) = &if let EContextAll::AnIDContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .ID + { + it.get_text() + } else { + "null" + } + .to_owned() + } + .to_owned(); + if let EContextAll::AnIDContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.set_v(tmp); + } else { + unreachable!("cant cast"); + } + } + + _ => Err(ANTLRError::NoAltError(NoViableAltError::new( + &mut recog.base, + )))?, + } + let tmp = recog.input.lt(-1).cloned(); + recog.ctx.as_ref().unwrap().set_stop(tmp); + recog.base.set_state(36); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(2, &mut recog.base)?; + while { _alt != 2 && _alt != INVALID_ALT } { + if _alt == 1 { + recog.trigger_exit_rule_event(); + _prevctx = _localctx.clone(); + { + recog.base.set_state(34); + recog.err_handler.sync(&mut recog.base)?; + match recog.interpreter.adaptive_predict(1, &mut recog.base)? { + 1 => { + { + /*recRuleLabeledAltStartAction*/ + let mut tmp = MultContextExt::new(&**EContextExt::new( + _parentctx.clone(), + _parentState, + )); + if let EContextAll::MultContext(ctx) = + cast_mut::<_, EContextAll>(&mut tmp) + { + ctx.a = Some(_prevctx.clone()); + } else { + unreachable!("cant cast"); + } + recog.push_new_recursion_context( + tmp.clone(), + _startState, + RULE_e, + ); + _localctx = tmp; + recog.base.set_state(18); + if !({ recog.precpred(None, 7) }) { + Err(FailedPredicateError::new( + &mut recog.base, + Some("recog.precpred(None, 7)".to_owned()), + None, + ))?; + } + recog.base.set_state(19); + let tmp = recog + .base + .match_token(T__0, recog.err_handler.as_mut())?; + if let EContextAll::MultContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.op = Some(tmp.clone()); + } else { + unreachable!("cant cast"); + } + + /*InvokeRule e*/ + recog.base.set_state(20); + let tmp = recog.e_rec(8)?; + if let EContextAll::MultContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.b = Some(tmp.clone()); + } else { + unreachable!("cant cast"); + } + + let tmp = { + "* ".to_owned() + + if let EContextAll::MultContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .a + .as_ref() + .unwrap() + .get_v() + + " " + + if let EContextAll::MultContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .b + .as_ref() + .unwrap() + .get_v() + } + .to_owned(); + if let EContextAll::MultContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.set_v(tmp); + } else { + unreachable!("cant cast"); + } + } + } + 2 => { + { + /*recRuleLabeledAltStartAction*/ + let mut tmp = AddContextExt::new(&**EContextExt::new( + _parentctx.clone(), + _parentState, + )); + if let EContextAll::AddContext(ctx) = + cast_mut::<_, EContextAll>(&mut tmp) + { + ctx.a = Some(_prevctx.clone()); + } else { + unreachable!("cant cast"); + } + recog.push_new_recursion_context( + tmp.clone(), + _startState, + RULE_e, + ); + _localctx = tmp; + recog.base.set_state(23); + if !({ recog.precpred(None, 6) }) { + Err(FailedPredicateError::new( + &mut recog.base, + Some("recog.precpred(None, 6)".to_owned()), + None, + ))?; + } + recog.base.set_state(24); + recog.base.match_token(T__1, recog.err_handler.as_mut())?; + + /*InvokeRule e*/ + recog.base.set_state(25); + let tmp = recog.e_rec(7)?; + if let EContextAll::AddContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.b = Some(tmp.clone()); + } else { + unreachable!("cant cast"); + } + + let tmp = { + "+ ".to_owned() + + if let EContextAll::AddContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .a + .as_ref() + .unwrap() + .get_v() + + " " + + if let EContextAll::AddContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .b + .as_ref() + .unwrap() + .get_v() + } + .to_owned(); + if let EContextAll::AddContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.set_v(tmp); + } else { + unreachable!("cant cast"); + } + } + } + 3 => { + { + /*recRuleLabeledAltStartAction*/ + let mut tmp = IncContextExt::new(&**EContextExt::new( + _parentctx.clone(), + _parentState, + )); + if let EContextAll::IncContext(ctx) = + cast_mut::<_, EContextAll>(&mut tmp) + { + ctx.x = Some(_prevctx.clone()); + } else { + unreachable!("cant cast"); + } + recog.push_new_recursion_context( + tmp.clone(), + _startState, + RULE_e, + ); + _localctx = tmp; + recog.base.set_state(28); + if !({ recog.precpred(None, 3) }) { + Err(FailedPredicateError::new( + &mut recog.base, + Some("recog.precpred(None, 3)".to_owned()), + None, + ))?; + } + recog.base.set_state(29); + recog.base.match_token(T__4, recog.err_handler.as_mut())?; + + let tmp = { + " ++".to_owned() + + if let EContextAll::IncContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .x + .as_ref() + .unwrap() + .get_v() + } + .to_owned(); + if let EContextAll::IncContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.set_v(tmp); + } else { + unreachable!("cant cast"); + } + } + } + 4 => { + { + /*recRuleLabeledAltStartAction*/ + let mut tmp = DecContextExt::new(&**EContextExt::new( + _parentctx.clone(), + _parentState, + )); + if let EContextAll::DecContext(ctx) = + cast_mut::<_, EContextAll>(&mut tmp) + { + ctx.x = Some(_prevctx.clone()); + } else { + unreachable!("cant cast"); + } + recog.push_new_recursion_context( + tmp.clone(), + _startState, + RULE_e, + ); + _localctx = tmp; + recog.base.set_state(31); + if !({ recog.precpred(None, 2) }) { + Err(FailedPredicateError::new( + &mut recog.base, + Some("recog.precpred(None, 2)".to_owned()), + None, + ))?; + } + recog.base.set_state(32); + recog.base.match_token(T__5, recog.err_handler.as_mut())?; + + let tmp = { + " --".to_owned() + + if let EContextAll::DecContext(ctx) = + cast::<_, EContextAll>(&*_localctx) + { + ctx + } else { + unreachable!("cant cast") + } + .x + .as_ref() + .unwrap() + .get_v() + } + .to_owned(); + if let EContextAll::DecContext(ctx) = + cast_mut::<_, EContextAll>(&mut _localctx) + { + ctx.set_v(tmp); + } else { + unreachable!("cant cast"); + } + } + } + + _ => {} + } + } + } + recog.base.set_state(38); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(2, &mut recog.base)?; + } + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.unroll_recursion_context(_parentctx); + + Ok(_localctx) + } } lazy_static! { @@ -1033,10 +1322,8 @@ lazy_static! { }; } - - -const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ \x0b\x2a\x04\x02\x09\x02\x04\x03\x09\x03\x03\x02\x03\x02\x03\x03\x03\x03\ \x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x05\x03\ \x13\x0a\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\x03\ @@ -1057,4 +1344,3 @@ const _serializedATN:&'static str = \x19\x03\x02\x02\x02\x24\x1e\x03\x02\x02\x02\x24\x21\x03\x02\x02\x02\x25\ \x28\x03\x02\x02\x02\x26\x24\x03\x02\x02\x02\x26\x27\x03\x02\x02\x02\x27\ \x05\x03\x02\x02\x02\x28\x26\x03\x02\x02\x02\x05\x12\x24\x26"; - diff --git a/tests/gen/referencetoatnlexer.rs b/tests/gen/referencetoatnlexer.rs index 81a058b..a1de215 100644 --- a/tests/gen/referencetoatnlexer.rs +++ b/tests/gen/referencetoatnlexer.rs @@ -2,196 +2,178 @@ #![allow(dead_code)] #![allow(nonstandard_style)] #![allow(unused_imports)] - -use std::cell::RefCell; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - use antlr_rust::atn::ATN; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::char_stream::CharStream; use antlr_rust::dfa::DFA; use antlr_rust::error_listener::ErrorListener; use antlr_rust::int_stream::IntStream; -use antlr_rust::lazy_static; use antlr_rust::lexer::{BaseLexer, Lexer, LexerRecog}; use antlr_rust::lexer_atn_simulator::{ILexerATNSimulator, LexerATNSimulator}; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, EmptyContext, EmptyCustomRuleContext}; use antlr_rust::token::*; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -pub const ID:isize=1; - pub const ATN:isize=2; - pub const WS:isize=3; - pub const channelNames: [&'static str;0+2] = [ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN" - ]; - - pub const modeNames: [&'static str;1] = [ - "DEFAULT_MODE" - ]; - - pub const ruleNames: [&'static str;3] = [ - "ID", "ATN", "WS" - ]; - +use antlr_rust::lazy_static; - pub const _LITERAL_NAMES: [Option<&'static str>;0] = [ - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;4] = [ - None, Some("ID"), Some("ATN"), Some("WS") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } +use std::cell::RefCell; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; +pub const ID: isize = 1; +pub const ATN: isize = 2; +pub const WS: isize = 3; +pub const channelNames: [&'static str; 0 + 2] = ["DEFAULT_TOKEN_CHANNEL", "HIDDEN"]; + +pub const modeNames: [&'static str; 1] = ["DEFAULT_MODE"]; + +pub const ruleNames: [&'static str; 3] = ["ID", "ATN", "WS"]; + +pub const _LITERAL_NAMES: [Option<&'static str>; 0] = []; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 4] = [None, Some("ID"), Some("ATN"), Some("WS")]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); +} -pub type LexerContext<'input> = BaseParserRuleContext<'input,EmptyCustomRuleContext<'input,LocalTokenFactory<'input> >>; +pub type LexerContext<'input> = + BaseParserRuleContext<'input, EmptyCustomRuleContext<'input, LocalTokenFactory<'input>>>; pub type LocalTokenFactory<'input> = antlr_rust::token_factory::OwningTokenFactory; -type From<'a> = as TokenFactory<'a> >::From; +type From<'a> = as TokenFactory<'a>>::From; -pub struct ReferenceToATNLexer<'input, Input:CharStream >> { - base: BaseLexer<'input,ReferenceToATNLexerActions,Input,LocalTokenFactory<'input>>, -// static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } +pub struct ReferenceToATNLexer<'input, Input: CharStream>> { + base: BaseLexer<'input, ReferenceToATNLexerActions, Input, LocalTokenFactory<'input>>, + // static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } } -impl<'input, Input:CharStream >> Deref for ReferenceToATNLexer<'input,Input>{ - type Target = BaseLexer<'input,ReferenceToATNLexerActions,Input,LocalTokenFactory<'input>>; +impl<'input, Input: CharStream>> Deref for ReferenceToATNLexer<'input, Input> { + type Target = BaseLexer<'input, ReferenceToATNLexerActions, Input, LocalTokenFactory<'input>>; - fn deref(&self) -> &Self::Target { - &self.base - } + fn deref(&self) -> &Self::Target { &self.base } } -impl<'input, Input:CharStream >> DerefMut for ReferenceToATNLexer<'input,Input>{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } +impl<'input, Input: CharStream>> DerefMut for ReferenceToATNLexer<'input, Input> { + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } - -impl<'input, Input:CharStream >> ReferenceToATNLexer<'input,Input>{ - fn get_rule_names(&self) -> &'static [&'static str] { - &ruleNames - } - fn get_literal_names(&self) -> &[Option<&str>] { - &_LITERAL_NAMES - } - - fn get_symbolic_names(&self) -> &[Option<&str>] { - &_SYMBOLIC_NAMES +impl<'input, Input: CharStream>> ReferenceToATNLexer<'input, Input> { + fn get_rule_names(&self) -> &'static [&'static str] { &ruleNames } + fn get_literal_names(&self) -> &[Option<&str>] { &_LITERAL_NAMES } + + fn get_symbolic_names(&self) -> &[Option<&str>] { &_SYMBOLIC_NAMES } + + fn get_grammar_file_name(&self) -> &'static str { "ReferenceToATNLexer.g4" } + + pub fn new_with_token_factory( + input: Box, + tf: &'input LocalTokenFactory<'input>, + ) -> Self { + antlr_rust::recognizer::check_version("0", "2"); + Self { + base: BaseLexer::new_base_lexer( + input, + LexerATNSimulator::new_lexer_atnsimulator( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + ), + ReferenceToATNLexerActions {}, + tf, + ), + } } +} - fn get_grammar_file_name(&self) -> &'static str { - "ReferenceToATNLexer.g4" +impl<'input, Input: CharStream>> ReferenceToATNLexer<'input, Input> +where + &'input LocalTokenFactory<'input>: Default, +{ + pub fn new(input: Box) -> Self { + ReferenceToATNLexer::new_with_token_factory( + input, + <&LocalTokenFactory<'input> as Default>::default(), + ) } - - pub fn new_with_token_factory(input: Box,tf: &'input LocalTokenFactory<'input>) -> Self { - antlr_rust::recognizer::check_version("0","2"); - Self { - base: BaseLexer::new_base_lexer( - input, - LexerATNSimulator::new_lexer_atnsimulator( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - ), - ReferenceToATNLexerActions{}, - tf - ) - } - } } -impl<'input, Input:CharStream >> ReferenceToATNLexer<'input,Input> where &'input LocalTokenFactory<'input>:Default{ - pub fn new(input: Box) -> Self{ - ReferenceToATNLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) - } -} +pub struct ReferenceToATNLexerActions {} -pub struct ReferenceToATNLexerActions { -} +impl ReferenceToATNLexerActions {} -impl ReferenceToATNLexerActions{ +impl<'input, Input: CharStream>> + Actions<'input, BaseLexer<'input, ReferenceToATNLexerActions, Input, LocalTokenFactory<'input>>> + for ReferenceToATNLexerActions +{ } -impl<'input, Input:CharStream >> Actions<'input,BaseLexer<'input,ReferenceToATNLexerActions,Input,LocalTokenFactory<'input>>> for ReferenceToATNLexerActions{ - } - - impl<'input, Input:CharStream >> ReferenceToATNLexer<'input,Input>{ +impl<'input, Input: CharStream>> ReferenceToATNLexer<'input, Input> {} +impl<'input, Input: CharStream>> + LexerRecog< + 'input, + BaseLexer<'input, ReferenceToATNLexerActions, Input, LocalTokenFactory<'input>>, + > for ReferenceToATNLexerActions +{ } - -impl<'input, Input:CharStream >> LexerRecog<'input,BaseLexer<'input,ReferenceToATNLexerActions,Input,LocalTokenFactory<'input>>> for ReferenceToATNLexerActions{ -} -impl<'input> TokenAware<'input> for ReferenceToATNLexerActions{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for ReferenceToATNLexerActions { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenAware<'input> for ReferenceToATNLexer<'input,Input>{ - type TF = LocalTokenFactory<'input>; +impl<'input, Input: CharStream>> TokenAware<'input> + for ReferenceToATNLexer<'input, Input> +{ + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenSource<'input> for ReferenceToATNLexer<'input,Input>{ +impl<'input, Input: CharStream>> TokenSource<'input> + for ReferenceToATNLexer<'input, Input> +{ + fn next_token(&mut self) -> >::Tok { self.base.next_token() } - fn next_token(&mut self) -> >::Tok { - self.base.next_token() - } + fn get_line(&self) -> isize { self.base.get_line() } - fn get_line(&self) -> isize { - self.base.get_line() - } + fn get_char_position_in_line(&self) -> isize { self.base.get_char_position_in_line() } - fn get_char_position_in_line(&self) -> isize { - self.base.get_char_position_in_line() - } + fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { self.base.get_input_stream() } - fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { - self.base.get_input_stream() - } + fn get_source_name(&self) -> String { self.base.get_source_name() } - fn get_source_name(&self) -> String { - self.base.get_source_name() - } - - fn get_token_factory(&self) -> &'input Self::TF { - self.base.get_token_factory() - } + fn get_token_factory(&self) -> &'input Self::TF { self.base.get_token_factory() } } +lazy_static! { + static ref _ATN: Arc = + Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); + static ref _decision_to_DFA: Arc> = { + let mut dfa = Vec::new(); + let size = _ATN.decision_to_state.len(); + for i in 0..size { + dfa.push(DFA::new( + _ATN.clone(), + _ATN.get_decision_state(i), + i as isize, + )) + } + Arc::new(dfa) + }; +} - - lazy_static! { - static ref _ATN: Arc = - Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); - static ref _decision_to_DFA: Arc> = { - let mut dfa = Vec::new(); - let size = _ATN.decision_to_state.len(); - for i in 0..size { - dfa.push(DFA::new( - _ATN.clone(), - _ATN.get_decision_state(i), - i as isize, - )) - } - Arc::new(dfa) - }; - } - - - - const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ \x05\x17\x08\x01\x04\x02\x09\x02\x04\x03\x09\x03\x04\x04\x09\x04\x03\x02\ \x06\x02\x0b\x0a\x02\x0d\x02\x0e\x02\x0c\x03\x03\x06\x03\x10\x0a\x03\x0d\ \x03\x0e\x03\x11\x03\x04\x03\x04\x03\x04\x03\x04\x02\x02\x05\x03\x03\x05\ diff --git a/tests/gen/referencetoatnlistener.rs b/tests/gen/referencetoatnlistener.rs index 6b27fed..e83ffee 100644 --- a/tests/gen/referencetoatnlistener.rs +++ b/tests/gen/referencetoatnlistener.rs @@ -1,24 +1,22 @@ #![allow(nonstandard_style)] - -use std::any::Any; - -use antlr_rust::token_factory::CommonTokenFactory; // Generated from ReferenceToATN.g4 by ANTLR 4.8 -use antlr_rust::tree::ParseTreeListener; - use super::referencetoatnparser::*; +use antlr_rust::token_factory::CommonTokenFactory; +use antlr_rust::tree::ParseTreeListener; -pub trait ReferenceToATNListener<'input> : ParseTreeListener<'input,ReferenceToATNParserContextType>{ - -/** - * Enter a parse tree produced by {@link ReferenceToATNParser#a}. - * @param ctx the parse tree - */ -fn enter_a(&mut self, _ctx: &AContext<'input>) { } -/** - * Exit a parse tree produced by {@link ReferenceToATNParser#a}. - * @param ctx the parse tree - */ -fn exit_a(&mut self, _ctx: &AContext<'input>) { } +use std::any::Any; +pub trait ReferenceToATNListener<'input>: + ParseTreeListener<'input, ReferenceToATNParserContextType> +{ + /** + * Enter a parse tree produced by {@link ReferenceToATNParser#a}. + * @param ctx the parse tree + */ + fn enter_a(&mut self, _ctx: &AContext<'input>) {} + /** + * Exit a parse tree produced by {@link ReferenceToATNParser#a}. + * @param ctx the parse tree + */ + fn exit_a(&mut self, _ctx: &AContext<'input>) {} } diff --git a/tests/gen/referencetoatnparser.rs b/tests/gen/referencetoatnparser.rs index 75b534a..8a9ba3b 100644 --- a/tests/gen/referencetoatnparser.rs +++ b/tests/gen/referencetoatnparser.rs @@ -5,16 +5,7 @@ #![allow(nonstandard_style)] #![allow(unused_imports)] #![allow(unused_mut)] - -use std::any::{Any, TypeId}; -use std::borrow::{Borrow, BorrowMut}; -use std::cell::RefCell; -use std::convert::TryFrom; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - +use super::referencetoatnlistener::*; use antlr_rust::atn::{ATN, INVALID_ALT}; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::dfa::DFA; @@ -24,265 +15,320 @@ use antlr_rust::int_stream::EOF; use antlr_rust::lazy_static; use antlr_rust::parser::{BaseParser, Parser, ParserNodeType, ParserRecog}; use antlr_rust::parser_atn_simulator::ParserATNSimulator; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, cast_mut, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, cast_mut, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, CustomRuleContext, RuleContext}; use antlr_rust::token::{OwningToken, Token, TOKEN_EOF}; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::token_stream::TokenStream; -use antlr_rust::tree::{ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, Visitable}; +use antlr_rust::tree::{ + ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, + Visitable, +}; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -use super::referencetoatnlistener::*; - -pub const ID:isize=1; - pub const ATN:isize=2; - pub const WS:isize=3; - pub const RULE_a:usize = 0; - pub const ruleNames: [&'static str; 1] = [ - "a" - ]; - +use std::any::{Any, TypeId}; +use std::borrow::{Borrow, BorrowMut}; +use std::cell::RefCell; +use std::convert::TryFrom; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; - pub const _LITERAL_NAMES: [Option<&'static str>;0] = [ - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;4] = [ - None, Some("ID"), Some("ATN"), Some("WS") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } +pub const ID: isize = 1; +pub const ATN: isize = 2; +pub const WS: isize = 3; +pub const RULE_a: usize = 0; +pub const ruleNames: [&'static str; 1] = ["a"]; +pub const _LITERAL_NAMES: [Option<&'static str>; 0] = []; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 4] = [None, Some("ID"), Some("ATN"), Some("WS")]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); +} -type BaseParserType<'input, I> = - BaseParser<'input,ReferenceToATNParserExt, I, ReferenceToATNParserContextType , dyn ReferenceToATNListener<'input> + 'static >; +type BaseParserType<'input, I> = BaseParser< + 'input, + ReferenceToATNParserExt, + I, + ReferenceToATNParserContextType, + dyn ReferenceToATNListener<'input> + 'static, +>; type TokenType<'input> = as TokenFactory<'input>>::Tok; pub type LocalTokenFactory<'input> = antlr_rust::token_factory::OwningTokenFactory; -pub type ReferenceToATNTreeWalker<'input,'a> = - ParseTreeWalker<'input, 'a, ReferenceToATNParserContextType , dyn ReferenceToATNListener<'input> + 'a>; - -pub struct ReferenceToATNParser<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> { - base:BaseParserType<'input,I>, - interpreter:Arc, - _shared_context_cache: Box, - pub err_handler: Box> + 'input>, +pub type ReferenceToATNTreeWalker<'input, 'a> = ParseTreeWalker< + 'input, + 'a, + ReferenceToATNParserContextType, + dyn ReferenceToATNListener<'input> + 'a, +>; + +pub struct ReferenceToATNParser<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> { + base: BaseParserType<'input, I>, + interpreter: Arc, + _shared_context_cache: Box, + pub err_handler: Box> + 'input>, } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> ReferenceToATNParser<'input,I> { - - pub fn get_serialized_atn() -> &'static str { unimplemented!() } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + ReferenceToATNParser<'input, I> +{ + pub fn get_serialized_atn() -> &'static str { unimplemented!() } - pub fn set_error_strategy(&mut self, strategy: Box> >) { + pub fn set_error_strategy( + &mut self, + strategy: Box>>, + ) { self.err_handler = strategy } pub fn new(input: Box) -> Self { - antlr_rust::recognizer::check_version("0","2"); - let interpreter = Arc::new(ParserATNSimulator::new( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - )); - Self { - base: BaseParser::new_base_parser( - input, - Arc::clone(&interpreter), - ReferenceToATNParserExt{ - } - ), - interpreter, + antlr_rust::recognizer::check_version("0", "2"); + let interpreter = Arc::new(ParserATNSimulator::new( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + )); + Self { + base: BaseParser::new_base_parser( + input, + Arc::clone(&interpreter), + ReferenceToATNParserExt {}, + ), + interpreter, _shared_context_cache: Box::new(PredictionContextCache::new()), - err_handler: Box::new(DefaultErrorStrategy::<'input,ReferenceToATNParserContextType>::new()), + err_handler: Box::new(DefaultErrorStrategy::< + 'input, + ReferenceToATNParserContextType, + >::new()), } } } /// Trait for monomorphized trait object that corresponds to nodes of parse tree generated by ReferenceToATNParser -pub trait ReferenceToATNParserContext<'input>: - for<'x> Listenable + 'x > + - ParserRuleContext<'input, TF=LocalTokenFactory<'input>, Ctx=ReferenceToATNParserContextType> -{} +pub trait ReferenceToATNParserContext<'input>: for<'x> Listenable + 'x> + + ParserRuleContext<'input, TF = LocalTokenFactory<'input>, Ctx = ReferenceToATNParserContextType> +{ +} -impl<'input> ReferenceToATNParserContext<'input> for TerminalNode<'input,ReferenceToATNParserContextType> {} -impl<'input> ReferenceToATNParserContext<'input> for ErrorNode<'input,ReferenceToATNParserContextType> {} +impl<'input> ReferenceToATNParserContext<'input> + for TerminalNode<'input, ReferenceToATNParserContextType> +{ +} +impl<'input> ReferenceToATNParserContext<'input> + for ErrorNode<'input, ReferenceToATNParserContextType> +{ +} pub struct ReferenceToATNParserContextType; -impl<'input> ParserNodeType<'input> for ReferenceToATNParserContextType{ - type TF = LocalTokenFactory<'input>; - type Type = dyn ReferenceToATNParserContext<'input> + 'input; +impl<'input> ParserNodeType<'input> for ReferenceToATNParserContextType { + type TF = LocalTokenFactory<'input>; + type Type = dyn ReferenceToATNParserContext<'input> + 'input; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Deref for ReferenceToATNParser<'input,I> { - type Target = BaseParserType<'input,I>; +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> Deref + for ReferenceToATNParser<'input, I> +{ + type Target = BaseParserType<'input, I>; - fn deref(&self) -> &Self::Target { - &self.base - } -} - -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > DerefMut for ReferenceToATNParser<'input,I> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } + fn deref(&self) -> &Self::Target { &self.base } } -pub struct ReferenceToATNParserExt{ +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> DerefMut + for ReferenceToATNParser<'input, I> +{ + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } -impl ReferenceToATNParserExt{ -} +pub struct ReferenceToATNParserExt {} +impl ReferenceToATNParserExt {} -impl<'input> TokenAware<'input> for ReferenceToATNParserExt{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for ReferenceToATNParserExt { + type TF = LocalTokenFactory<'input>; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > ParserRecog<'input, BaseParserType<'input,I>> for ReferenceToATNParserExt{} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + ParserRecog<'input, BaseParserType<'input, I>> for ReferenceToATNParserExt +{ +} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Actions<'input, BaseParserType<'input,I>> for ReferenceToATNParserExt{ - fn get_grammar_file_name(&self) -> & str{ "ReferenceToATN.g4"} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + Actions<'input, BaseParserType<'input, I>> for ReferenceToATNParserExt +{ + fn get_grammar_file_name(&self) -> &str { "ReferenceToATN.g4" } - fn get_rule_names(&self) -> &[& str] {&ruleNames} + fn get_rule_names(&self) -> &[&str] { &ruleNames } - fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } + fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } } //------------------- a ---------------- pub type AContextAll<'input> = AContext<'input>; - -pub type AContext<'input> = BaseParserRuleContext<'input,AContextExt<'input>>; +pub type AContext<'input> = BaseParserRuleContext<'input, AContextExt<'input>>; #[derive(Clone)] -pub struct AContextExt<'input>{ -ph:PhantomData<&'input str> +pub struct AContextExt<'input> { + ph: PhantomData<&'input str>, } -impl<'input> ReferenceToATNParserContext<'input> for AContext<'input>{} +impl<'input> ReferenceToATNParserContext<'input> for AContext<'input> {} -impl<'input,'a> Listenable + 'a> for AContext<'input>{ - fn enter(&self,listener: &mut (dyn ReferenceToATNListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_a(self); - } -} - -impl<'input> CustomRuleContext<'input> for AContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = ReferenceToATNParserContextType; - fn get_rule_index(&self) -> usize { RULE_a } - //fn type_rule_index() -> usize where Self: Sized { RULE_a } -} -antlr_rust::type_id!{AContextExt} - -impl<'input> AContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,AContextExt{ - ph:PhantomData - }), - ) - } +impl<'input, 'a> Listenable + 'a> for AContext<'input> { + fn enter(&self, listener: &mut (dyn ReferenceToATNListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_a(self); + } } -pub trait AContextAttrs<'input>: ReferenceToATNParserContext<'input> + BorrowMut>{ - -/// Retrieves all `TerminalNode`s corresponding to token ATN in current rule -fn ATN_all(&self) -> Vec>> where Self:Sized{ - self.children_of_type() -} -/// Retrieves 'i's TerminalNode corresponding to token ATN, starting from 0. -/// Returns `None` if number of children corresponding to token ATN is less or equal than `i`. -fn ATN(&self, i: usize) -> Option>> where Self:Sized{ - self.get_token(ATN, i) +impl<'input> CustomRuleContext<'input> for AContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = ReferenceToATNParserContextType; + fn get_rule_index(&self) -> usize { RULE_a } + //fn type_rule_index() -> usize where Self: Sized { RULE_a } } -/// Retrieves all `TerminalNode`s corresponding to token ID in current rule -fn ID_all(&self) -> Vec>> where Self:Sized{ - self.children_of_type() -} -/// Retrieves 'i's TerminalNode corresponding to token ID, starting from 0. -/// Returns `None` if number of children corresponding to token ID is less or equal than `i`. -fn ID(&self, i: usize) -> Option>> where Self:Sized{ - self.get_token(ID, i) +antlr_rust::type_id! {AContextExt} + +impl<'input> AContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + AContextExt { ph: PhantomData }, + )) + } } +pub trait AContextAttrs<'input>: + ReferenceToATNParserContext<'input> + BorrowMut> +{ + /// Retrieves all `TerminalNode`s corresponding to token ATN in current rule + fn ATN_all(&self) -> Vec>> + where + Self: Sized, + { + self.children_of_type() + } + /// Retrieves 'i's TerminalNode corresponding to token ATN, starting from 0. + /// Returns `None` if number of children corresponding to token ATN is less or equal than `i`. + fn ATN(&self, i: usize) -> Option>> + where + Self: Sized, + { + self.get_token(ATN, i) + } + /// Retrieves all `TerminalNode`s corresponding to token ID in current rule + fn ID_all(&self) -> Vec>> + where + Self: Sized, + { + self.children_of_type() + } + /// Retrieves 'i's TerminalNode corresponding to token ID, starting from 0. + /// Returns `None` if number of children corresponding to token ID is less or equal than `i`. + fn ID(&self, i: usize) -> Option>> + where + Self: Sized, + { + self.get_token(ID, i) + } } -impl<'input> AContextAttrs<'input> for AContext<'input>{} +impl<'input> AContextAttrs<'input> for AContext<'input> {} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > ReferenceToATNParser<'input,I>{ - pub fn a(&mut self,) - -> Result>,ANTLRError> { - let mut recog = self; - let _parentctx = recog.ctx.take(); - let mut _localctx = AContextExt::new(_parentctx.clone(), recog.base.get_state()); +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + ReferenceToATNParser<'input, I> +{ + pub fn a(&mut self) -> Result>, ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = AContextExt::new(_parentctx.clone(), recog.base.get_state()); recog.base.enter_rule(_localctx.clone(), 0, RULE_a); let mut _localctx: Rc = _localctx; - let mut _la: isize; - let result: Result<(), ANTLRError> = try { - - let mut _alt: isize; - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - recog.base.set_state(5); - recog.err_handler.sync(&mut recog.base)?; - _alt = recog.interpreter.adaptive_predict(0,&mut recog.base)?; - while { _alt!=2 && _alt!=INVALID_ALT } { - if _alt==1 { - { - { - recog.base.set_state(2); - _la = recog.base.input.la(1); - if { !(_la==ID || _la==ATN) } { - recog.err_handler.recover_inline(&mut recog.base)?; - - } - else { - if recog.base.input.la(1)==TOKEN_EOF { recog.base.matched_eof = true }; - recog.err_handler.report_match(&mut recog.base); - recog.base.consume(recog.err_handler.as_mut()); - } - } - } - } - recog.base.set_state(7); - recog.err_handler.sync(&mut recog.base)?; - _alt = recog.interpreter.adaptive_predict(0,&mut recog.base)?; - } - recog.base.set_state(9); - recog.err_handler.sync(&mut recog.base)?; - _la = recog.base.input.la(1); - if _la==ATN { - { - recog.base.set_state(8); - recog.base.match_token(ATN,recog.err_handler.as_mut())?; - - } - } - - println!("{}",{let temp = recog.base.input.lt(-1).map(|it|it.get_token_index()).unwrap_or(-1); recog.input.get_text_from_interval(recog.get_parser_rule_context().start().get_token_index(), temp)}); - } - }; - match result { - Ok(_)=>{}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re) => { - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?; - } - } - recog.base.exit_rule(); - - Ok(_localctx) - } + let mut _la: isize; + let result: Result<(), ANTLRError> = try { + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + recog.base.set_state(5); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(0, &mut recog.base)?; + while { _alt != 2 && _alt != INVALID_ALT } { + if _alt == 1 { + { + { + recog.base.set_state(2); + _la = recog.base.input.la(1); + if { !(_la == ID || _la == ATN) } { + recog.err_handler.recover_inline(&mut recog.base)?; + } else { + if recog.base.input.la(1) == TOKEN_EOF { + recog.base.matched_eof = true + }; + recog.err_handler.report_match(&mut recog.base); + recog.base.consume(recog.err_handler.as_mut()); + } + } + } + } + recog.base.set_state(7); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(0, &mut recog.base)?; + } + recog.base.set_state(9); + recog.err_handler.sync(&mut recog.base)?; + _la = recog.base.input.la(1); + if _la == ATN { + { + recog.base.set_state(8); + recog.base.match_token(ATN, recog.err_handler.as_mut())?; + } + } + + println!("{}", { + let temp = recog + .base + .input + .lt(-1) + .map(|it| it.get_token_index()) + .unwrap_or(-1); + recog.input.get_text_from_interval( + recog.get_parser_rule_context().start().get_token_index(), + temp, + ) + }); + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } } lazy_static! { @@ -302,10 +348,8 @@ lazy_static! { }; } - - -const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ \x05\x10\x04\x02\x09\x02\x03\x02\x07\x02\x06\x0a\x02\x0c\x02\x0e\x02\x09\ \x0b\x02\x03\x02\x05\x02\x0c\x0a\x02\x03\x02\x03\x02\x03\x02\x02\x02\x03\ \x02\x02\x03\x03\x02\x03\x04\x02\x10\x02\x07\x03\x02\x02\x02\x04\x06\x09\ @@ -314,4 +358,3 @@ const _serializedATN:&'static str = \x02\x02\x02\x0a\x0c\x07\x04\x02\x02\x0b\x0a\x03\x02\x02\x02\x0b\x0c\x03\ \x02\x02\x02\x0c\x0d\x03\x02\x02\x02\x0d\x0e\x08\x02\x01\x02\x0e\x03\x03\ \x02\x02\x02\x04\x07\x0b"; - diff --git a/tests/gen/simplelrlexer.rs b/tests/gen/simplelrlexer.rs index 6f9ccaf..7bfa0c3 100644 --- a/tests/gen/simplelrlexer.rs +++ b/tests/gen/simplelrlexer.rs @@ -2,194 +2,170 @@ #![allow(dead_code)] #![allow(nonstandard_style)] #![allow(unused_imports)] - -use std::cell::RefCell; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - use antlr_rust::atn::ATN; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::char_stream::CharStream; use antlr_rust::dfa::DFA; use antlr_rust::error_listener::ErrorListener; use antlr_rust::int_stream::IntStream; -use antlr_rust::lazy_static; use antlr_rust::lexer::{BaseLexer, Lexer, LexerRecog}; use antlr_rust::lexer_atn_simulator::{ILexerATNSimulator, LexerATNSimulator}; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, EmptyContext, EmptyCustomRuleContext}; use antlr_rust::token::*; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -pub const ID:isize=1; - pub const WS:isize=2; - pub const channelNames: [&'static str;0+2] = [ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN" - ]; +use antlr_rust::lazy_static; - pub const modeNames: [&'static str;1] = [ - "DEFAULT_MODE" - ]; +use std::cell::RefCell; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; - pub const ruleNames: [&'static str;2] = [ - "ID", "WS" - ]; +pub const ID: isize = 1; +pub const WS: isize = 2; +pub const channelNames: [&'static str; 0 + 2] = ["DEFAULT_TOKEN_CHANNEL", "HIDDEN"]; +pub const modeNames: [&'static str; 1] = ["DEFAULT_MODE"]; - pub const _LITERAL_NAMES: [Option<&'static str>;0] = [ - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;3] = [ - None, Some("ID"), Some("WS") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } +pub const ruleNames: [&'static str; 2] = ["ID", "WS"]; +pub const _LITERAL_NAMES: [Option<&'static str>; 0] = []; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 3] = [None, Some("ID"), Some("WS")]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); +} -pub type LexerContext<'input> = BaseParserRuleContext<'input,EmptyCustomRuleContext<'input,LocalTokenFactory<'input> >>; +pub type LexerContext<'input> = + BaseParserRuleContext<'input, EmptyCustomRuleContext<'input, LocalTokenFactory<'input>>>; pub type LocalTokenFactory<'input> = CommonTokenFactory; -type From<'a> = as TokenFactory<'a> >::From; +type From<'a> = as TokenFactory<'a>>::From; -pub struct SimpleLRLexer<'input, Input:CharStream >> { - base: BaseLexer<'input,SimpleLRLexerActions,Input,LocalTokenFactory<'input>>, -// static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } +pub struct SimpleLRLexer<'input, Input: CharStream>> { + base: BaseLexer<'input, SimpleLRLexerActions, Input, LocalTokenFactory<'input>>, + // static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } } -impl<'input, Input:CharStream >> Deref for SimpleLRLexer<'input,Input>{ - type Target = BaseLexer<'input,SimpleLRLexerActions,Input,LocalTokenFactory<'input>>; +impl<'input, Input: CharStream>> Deref for SimpleLRLexer<'input, Input> { + type Target = BaseLexer<'input, SimpleLRLexerActions, Input, LocalTokenFactory<'input>>; - fn deref(&self) -> &Self::Target { - &self.base - } + fn deref(&self) -> &Self::Target { &self.base } } -impl<'input, Input:CharStream >> DerefMut for SimpleLRLexer<'input,Input>{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } +impl<'input, Input: CharStream>> DerefMut for SimpleLRLexer<'input, Input> { + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } - -impl<'input, Input:CharStream >> SimpleLRLexer<'input,Input>{ - fn get_rule_names(&self) -> &'static [&'static str] { - &ruleNames - } - fn get_literal_names(&self) -> &[Option<&str>] { - &_LITERAL_NAMES - } - - fn get_symbolic_names(&self) -> &[Option<&str>] { - &_SYMBOLIC_NAMES +impl<'input, Input: CharStream>> SimpleLRLexer<'input, Input> { + fn get_rule_names(&self) -> &'static [&'static str] { &ruleNames } + fn get_literal_names(&self) -> &[Option<&str>] { &_LITERAL_NAMES } + + fn get_symbolic_names(&self) -> &[Option<&str>] { &_SYMBOLIC_NAMES } + + fn get_grammar_file_name(&self) -> &'static str { "SimpleLRLexer.g4" } + + pub fn new_with_token_factory( + input: Box, + tf: &'input LocalTokenFactory<'input>, + ) -> Self { + antlr_rust::recognizer::check_version("0", "2"); + Self { + base: BaseLexer::new_base_lexer( + input, + LexerATNSimulator::new_lexer_atnsimulator( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + ), + SimpleLRLexerActions {}, + tf, + ), + } } +} - fn get_grammar_file_name(&self) -> &'static str { - "SimpleLRLexer.g4" +impl<'input, Input: CharStream>> SimpleLRLexer<'input, Input> +where + &'input LocalTokenFactory<'input>: Default, +{ + pub fn new(input: Box) -> Self { + SimpleLRLexer::new_with_token_factory( + input, + <&LocalTokenFactory<'input> as Default>::default(), + ) } - - pub fn new_with_token_factory(input: Box,tf: &'input LocalTokenFactory<'input>) -> Self { - antlr_rust::recognizer::check_version("0","2"); - Self { - base: BaseLexer::new_base_lexer( - input, - LexerATNSimulator::new_lexer_atnsimulator( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - ), - SimpleLRLexerActions{}, - tf - ) - } - } } -impl<'input, Input:CharStream >> SimpleLRLexer<'input,Input> where &'input LocalTokenFactory<'input>:Default{ - pub fn new(input: Box) -> Self{ - SimpleLRLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) - } -} +pub struct SimpleLRLexerActions {} -pub struct SimpleLRLexerActions { -} +impl SimpleLRLexerActions {} -impl SimpleLRLexerActions{ +impl<'input, Input: CharStream>> + Actions<'input, BaseLexer<'input, SimpleLRLexerActions, Input, LocalTokenFactory<'input>>> + for SimpleLRLexerActions +{ } -impl<'input, Input:CharStream >> Actions<'input,BaseLexer<'input,SimpleLRLexerActions,Input,LocalTokenFactory<'input>>> for SimpleLRLexerActions{ - } +impl<'input, Input: CharStream>> SimpleLRLexer<'input, Input> {} - impl<'input, Input:CharStream >> SimpleLRLexer<'input,Input>{ - -} - -impl<'input, Input:CharStream >> LexerRecog<'input,BaseLexer<'input,SimpleLRLexerActions,Input,LocalTokenFactory<'input>>> for SimpleLRLexerActions{ +impl<'input, Input: CharStream>> + LexerRecog<'input, BaseLexer<'input, SimpleLRLexerActions, Input, LocalTokenFactory<'input>>> + for SimpleLRLexerActions +{ } -impl<'input> TokenAware<'input> for SimpleLRLexerActions{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for SimpleLRLexerActions { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenAware<'input> for SimpleLRLexer<'input,Input>{ - type TF = LocalTokenFactory<'input>; +impl<'input, Input: CharStream>> TokenAware<'input> for SimpleLRLexer<'input, Input> { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenSource<'input> for SimpleLRLexer<'input,Input>{ +impl<'input, Input: CharStream>> TokenSource<'input> for SimpleLRLexer<'input, Input> { + fn next_token(&mut self) -> >::Tok { self.base.next_token() } - fn next_token(&mut self) -> >::Tok { - self.base.next_token() - } + fn get_line(&self) -> isize { self.base.get_line() } - fn get_line(&self) -> isize { - self.base.get_line() - } + fn get_char_position_in_line(&self) -> isize { self.base.get_char_position_in_line() } - fn get_char_position_in_line(&self) -> isize { - self.base.get_char_position_in_line() - } - - fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { - self.base.get_input_stream() - } + fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { self.base.get_input_stream() } - fn get_source_name(&self) -> String { - self.base.get_source_name() - } + fn get_source_name(&self) -> String { self.base.get_source_name() } - fn get_token_factory(&self) -> &'input Self::TF { - self.base.get_token_factory() - } + fn get_token_factory(&self) -> &'input Self::TF { self.base.get_token_factory() } } +lazy_static! { + static ref _ATN: Arc = + Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); + static ref _decision_to_DFA: Arc> = { + let mut dfa = Vec::new(); + let size = _ATN.decision_to_state.len(); + for i in 0..size { + dfa.push(DFA::new( + _ATN.clone(), + _ATN.get_decision_state(i), + i as isize, + )) + } + Arc::new(dfa) + }; +} - - lazy_static! { - static ref _ATN: Arc = - Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); - static ref _decision_to_DFA: Arc> = { - let mut dfa = Vec::new(); - let size = _ATN.decision_to_state.len(); - for i in 0..size { - dfa.push(DFA::new( - _ATN.clone(), - _ATN.get_decision_state(i), - i as isize, - )) - } - Arc::new(dfa) - }; - } - - - - const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ \x04\x10\x08\x01\x04\x02\x09\x02\x04\x03\x09\x03\x03\x02\x06\x02\x09\x0a\ \x02\x0d\x02\x0e\x02\x0a\x03\x03\x03\x03\x03\x03\x03\x03\x02\x02\x04\x03\ \x03\x05\x04\x03\x02\x03\x04\x02\x0c\x0c\x22\x22\x02\x10\x02\x03\x03\x02\ diff --git a/tests/gen/simplelrlistener.rs b/tests/gen/simplelrlistener.rs index 60655ad..ea4515f 100644 --- a/tests/gen/simplelrlistener.rs +++ b/tests/gen/simplelrlistener.rs @@ -1,35 +1,31 @@ #![allow(nonstandard_style)] - -use std::any::Any; - -use antlr_rust::token_factory::CommonTokenFactory; // Generated from SimpleLR.g4 by ANTLR 4.8 -use antlr_rust::tree::ParseTreeListener; - use super::simplelrparser::*; +use antlr_rust::token_factory::CommonTokenFactory; +use antlr_rust::tree::ParseTreeListener; -pub trait SimpleLRListener<'input> : ParseTreeListener<'input,SimpleLRParserContextType>{ - -/** - * Enter a parse tree produced by {@link SimpleLRParser#s}. - * @param ctx the parse tree - */ -fn enter_s(&mut self, _ctx: &SContext<'input>) { } -/** - * Exit a parse tree produced by {@link SimpleLRParser#s}. - * @param ctx the parse tree - */ -fn exit_s(&mut self, _ctx: &SContext<'input>) { } +use std::any::Any; -/** - * Enter a parse tree produced by {@link SimpleLRParser#a}. - * @param ctx the parse tree - */ -fn enter_a(&mut self, _ctx: &AContext<'input>) { } -/** - * Exit a parse tree produced by {@link SimpleLRParser#a}. - * @param ctx the parse tree - */ -fn exit_a(&mut self, _ctx: &AContext<'input>) { } +pub trait SimpleLRListener<'input>: ParseTreeListener<'input, SimpleLRParserContextType> { + /** + * Enter a parse tree produced by {@link SimpleLRParser#s}. + * @param ctx the parse tree + */ + fn enter_s(&mut self, _ctx: &SContext<'input>) {} + /** + * Exit a parse tree produced by {@link SimpleLRParser#s}. + * @param ctx the parse tree + */ + fn exit_s(&mut self, _ctx: &SContext<'input>) {} + /** + * Enter a parse tree produced by {@link SimpleLRParser#a}. + * @param ctx the parse tree + */ + fn enter_a(&mut self, _ctx: &AContext<'input>) {} + /** + * Exit a parse tree produced by {@link SimpleLRParser#a}. + * @param ctx the parse tree + */ + fn exit_a(&mut self, _ctx: &AContext<'input>) {} } diff --git a/tests/gen/simplelrparser.rs b/tests/gen/simplelrparser.rs index c02019b..39ca457 100644 --- a/tests/gen/simplelrparser.rs +++ b/tests/gen/simplelrparser.rs @@ -5,16 +5,7 @@ #![allow(nonstandard_style)] #![allow(unused_imports)] #![allow(unused_mut)] - -use std::any::{Any, TypeId}; -use std::borrow::{Borrow, BorrowMut}; -use std::cell::RefCell; -use std::convert::TryFrom; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - +use super::simplelrlistener::*; use antlr_rust::atn::{ATN, INVALID_ALT}; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::dfa::DFA; @@ -24,361 +15,395 @@ use antlr_rust::int_stream::EOF; use antlr_rust::lazy_static; use antlr_rust::parser::{BaseParser, Parser, ParserNodeType, ParserRecog}; use antlr_rust::parser_atn_simulator::ParserATNSimulator; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, cast_mut, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, cast_mut, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, CustomRuleContext, RuleContext}; use antlr_rust::token::{OwningToken, Token, TOKEN_EOF}; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::token_stream::TokenStream; -use antlr_rust::tree::{ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, Visitable}; +use antlr_rust::tree::{ + ErrorNode, LeafNode, Listenable, ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode, + Visitable, +}; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -use super::simplelrlistener::*; - -pub const ID:isize=1; - pub const WS:isize=2; - pub const RULE_s:usize = 0; - pub const RULE_a:usize = 1; - pub const ruleNames: [&'static str; 2] = [ - "s", "a" - ]; - +use std::any::{Any, TypeId}; +use std::borrow::{Borrow, BorrowMut}; +use std::cell::RefCell; +use std::convert::TryFrom; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; - pub const _LITERAL_NAMES: [Option<&'static str>;0] = [ - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;3] = [ - None, Some("ID"), Some("WS") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } +pub const ID: isize = 1; +pub const WS: isize = 2; +pub const RULE_s: usize = 0; +pub const RULE_a: usize = 1; +pub const ruleNames: [&'static str; 2] = ["s", "a"]; +pub const _LITERAL_NAMES: [Option<&'static str>; 0] = []; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 3] = [None, Some("ID"), Some("WS")]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); +} -type BaseParserType<'input, I> = - BaseParser<'input,SimpleLRParserExt, I, SimpleLRParserContextType , dyn SimpleLRListener<'input> + 'static >; +type BaseParserType<'input, I> = BaseParser< + 'input, + SimpleLRParserExt, + I, + SimpleLRParserContextType, + dyn SimpleLRListener<'input> + 'static, +>; type TokenType<'input> = as TokenFactory<'input>>::Tok; pub type LocalTokenFactory<'input> = CommonTokenFactory; -pub type SimpleLRTreeWalker<'input,'a> = - ParseTreeWalker<'input, 'a, SimpleLRParserContextType , dyn SimpleLRListener<'input> + 'a>; +pub type SimpleLRTreeWalker<'input, 'a> = + ParseTreeWalker<'input, 'a, SimpleLRParserContextType, dyn SimpleLRListener<'input> + 'a>; -pub struct SimpleLRParser<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> { - base:BaseParserType<'input,I>, - interpreter:Arc, - _shared_context_cache: Box, - pub err_handler: Box> + 'input>, +pub struct SimpleLRParser<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> { + base: BaseParserType<'input, I>, + interpreter: Arc, + _shared_context_cache: Box, + pub err_handler: Box> + 'input>, } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> >> SimpleLRParser<'input,I> { - - pub fn get_serialized_atn() -> &'static str { unimplemented!() } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> SimpleLRParser<'input, I> { + pub fn get_serialized_atn() -> &'static str { unimplemented!() } - pub fn set_error_strategy(&mut self, strategy: Box> >) { + pub fn set_error_strategy( + &mut self, + strategy: Box>>, + ) { self.err_handler = strategy } pub fn new(input: Box) -> Self { - antlr_rust::recognizer::check_version("0","2"); - let interpreter = Arc::new(ParserATNSimulator::new( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - )); - Self { - base: BaseParser::new_base_parser( - input, - Arc::clone(&interpreter), - SimpleLRParserExt{ - } - ), - interpreter, + antlr_rust::recognizer::check_version("0", "2"); + let interpreter = Arc::new(ParserATNSimulator::new( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + )); + Self { + base: BaseParser::new_base_parser( + input, + Arc::clone(&interpreter), + SimpleLRParserExt {}, + ), + interpreter, _shared_context_cache: Box::new(PredictionContextCache::new()), - err_handler: Box::new(DefaultErrorStrategy::<'input,SimpleLRParserContextType>::new()), + err_handler: Box::new(DefaultErrorStrategy::<'input, SimpleLRParserContextType>::new()), } } } /// Trait for monomorphized trait object that corresponds to nodes of parse tree generated by SimpleLRParser pub trait SimpleLRParserContext<'input>: - for<'x> Listenable + 'x > + - ParserRuleContext<'input, TF=LocalTokenFactory<'input>, Ctx=SimpleLRParserContextType> -{} + for<'x> Listenable + 'x> + + ParserRuleContext<'input, TF = LocalTokenFactory<'input>, Ctx = SimpleLRParserContextType> +{ +} -impl<'input> SimpleLRParserContext<'input> for TerminalNode<'input,SimpleLRParserContextType> {} -impl<'input> SimpleLRParserContext<'input> for ErrorNode<'input,SimpleLRParserContextType> {} +impl<'input> SimpleLRParserContext<'input> for TerminalNode<'input, SimpleLRParserContextType> {} +impl<'input> SimpleLRParserContext<'input> for ErrorNode<'input, SimpleLRParserContextType> {} pub struct SimpleLRParserContextType; -impl<'input> ParserNodeType<'input> for SimpleLRParserContextType{ - type TF = LocalTokenFactory<'input>; - type Type = dyn SimpleLRParserContext<'input> + 'input; +impl<'input> ParserNodeType<'input> for SimpleLRParserContextType { + type TF = LocalTokenFactory<'input>; + type Type = dyn SimpleLRParserContext<'input> + 'input; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Deref for SimpleLRParser<'input,I> { - type Target = BaseParserType<'input,I>; - - fn deref(&self) -> &Self::Target { - &self.base - } -} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> Deref + for SimpleLRParser<'input, I> +{ + type Target = BaseParserType<'input, I>; -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > DerefMut for SimpleLRParser<'input,I> { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } + fn deref(&self) -> &Self::Target { &self.base } } -pub struct SimpleLRParserExt{ +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> DerefMut + for SimpleLRParser<'input, I> +{ + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } -impl SimpleLRParserExt{ -} +pub struct SimpleLRParserExt {} +impl SimpleLRParserExt {} -impl<'input> TokenAware<'input> for SimpleLRParserExt{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for SimpleLRParserExt { + type TF = LocalTokenFactory<'input>; } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > ParserRecog<'input, BaseParserType<'input,I>> for SimpleLRParserExt{} - -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > Actions<'input, BaseParserType<'input,I>> for SimpleLRParserExt{ - fn get_grammar_file_name(&self) -> & str{ "SimpleLR.g4"} - - fn get_rule_names(&self) -> &[& str] {&ruleNames} +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + ParserRecog<'input, BaseParserType<'input, I>> for SimpleLRParserExt +{ +} - fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } - fn sempred(_localctx: &(dyn SimpleLRParserContext<'input> + 'input), rule_index: isize, pred_index: isize, - recog:&mut BaseParserType<'input,I> - )->bool{ - match rule_index { - 1 => SimpleLRParser::<'input,I>::a_sempred(cast::<_,AContext<'input> >(_localctx), pred_index, recog), - _ => true - } - } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> + Actions<'input, BaseParserType<'input, I>> for SimpleLRParserExt +{ + fn get_grammar_file_name(&self) -> &str { "SimpleLR.g4" } + + fn get_rule_names(&self) -> &[&str] { &ruleNames } + + fn get_vocabulary(&self) -> &dyn Vocabulary { &**VOCABULARY } + fn sempred( + _localctx: &(dyn SimpleLRParserContext<'input> + 'input), + rule_index: isize, + pred_index: isize, + recog: &mut BaseParserType<'input, I>, + ) -> bool { + match rule_index { + 1 => SimpleLRParser::<'input, I>::a_sempred( + cast::<_, AContext<'input>>(_localctx), + pred_index, + recog, + ), + _ => true, + } + } } -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > SimpleLRParser<'input,I>{ - fn a_sempred(_localctx: &AContext<'input>, pred_index:isize, - recog:&mut ::Target - ) -> bool { - match pred_index { - 0=>{ - recog.precpred(None, 2) - } - _ => true - } - } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> SimpleLRParser<'input, I> { + fn a_sempred( + _localctx: &AContext<'input>, + pred_index: isize, + recog: &mut ::Target, + ) -> bool { + match pred_index { + 0 => recog.precpred(None, 2), + _ => true, + } + } } //------------------- s ---------------- pub type SContextAll<'input> = SContext<'input>; - -pub type SContext<'input> = BaseParserRuleContext<'input,SContextExt<'input>>; +pub type SContext<'input> = BaseParserRuleContext<'input, SContextExt<'input>>; #[derive(Clone)] -pub struct SContextExt<'input>{ -ph:PhantomData<&'input str> +pub struct SContextExt<'input> { + ph: PhantomData<&'input str>, } -impl<'input> SimpleLRParserContext<'input> for SContext<'input>{} +impl<'input> SimpleLRParserContext<'input> for SContext<'input> {} -impl<'input,'a> Listenable + 'a> for SContext<'input>{ - fn enter(&self,listener: &mut (dyn SimpleLRListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_s(self); - } +impl<'input, 'a> Listenable + 'a> for SContext<'input> { + fn enter(&self, listener: &mut (dyn SimpleLRListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_s(self); + } } -impl<'input> CustomRuleContext<'input> for SContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = SimpleLRParserContextType; - fn get_rule_index(&self) -> usize { RULE_s } - //fn type_rule_index() -> usize where Self: Sized { RULE_s } +impl<'input> CustomRuleContext<'input> for SContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = SimpleLRParserContextType; + fn get_rule_index(&self) -> usize { RULE_s } + //fn type_rule_index() -> usize where Self: Sized { RULE_s } } -antlr_rust::type_id!{SContextExt} - -impl<'input> SContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,SContextExt{ - ph:PhantomData - }), - ) - } -} - -pub trait SContextAttrs<'input>: SimpleLRParserContext<'input> + BorrowMut>{ - -fn a(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) +antlr_rust::type_id! {SContextExt} + +impl<'input> SContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + SContextExt { ph: PhantomData }, + )) + } } +pub trait SContextAttrs<'input>: + SimpleLRParserContext<'input> + BorrowMut> +{ + fn a(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } } -impl<'input> SContextAttrs<'input> for SContext<'input>{} +impl<'input> SContextAttrs<'input> for SContext<'input> {} -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > SimpleLRParser<'input,I>{ - pub fn s(&mut self,) - -> Result>,ANTLRError> { - let mut recog = self; - let _parentctx = recog.ctx.take(); - let mut _localctx = SContextExt::new(_parentctx.clone(), recog.base.get_state()); +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> SimpleLRParser<'input, I> { + pub fn s(&mut self) -> Result>, ANTLRError> { + let mut recog = self; + let _parentctx = recog.ctx.take(); + let mut _localctx = SContextExt::new(_parentctx.clone(), recog.base.get_state()); recog.base.enter_rule(_localctx.clone(), 0, RULE_s); let mut _localctx: Rc = _localctx; - let result: Result<(), ANTLRError> = try { - - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - /*InvokeRule a*/ - recog.base.set_state(4); - recog.a_rec(0)?; - - } - let tmp = recog.input.lt(-1).cloned(); - recog.ctx.as_ref().unwrap().set_stop(tmp); - println!("test"); - }; - match result { - Ok(_)=>{}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re) => { - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?; - } - } - recog.base.exit_rule(); - - Ok(_localctx) - } + let result: Result<(), ANTLRError> = try { + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + /*InvokeRule a*/ + recog.base.set_state(4); + recog.a_rec(0)?; + } + let tmp = recog.input.lt(-1).cloned(); + recog.ctx.as_ref().unwrap().set_stop(tmp); + println!("test"); + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.exit_rule(); + + Ok(_localctx) + } } //------------------- a ---------------- pub type AContextAll<'input> = AContext<'input>; - -pub type AContext<'input> = BaseParserRuleContext<'input,AContextExt<'input>>; +pub type AContext<'input> = BaseParserRuleContext<'input, AContextExt<'input>>; #[derive(Clone)] -pub struct AContextExt<'input>{ -ph:PhantomData<&'input str> +pub struct AContextExt<'input> { + ph: PhantomData<&'input str>, } -impl<'input> SimpleLRParserContext<'input> for AContext<'input>{} - -impl<'input,'a> Listenable + 'a> for AContext<'input>{ - fn enter(&self,listener: &mut (dyn SimpleLRListener<'input> + 'a)) { - listener.enter_every_rule(self); - listener.enter_a(self); - } -} +impl<'input> SimpleLRParserContext<'input> for AContext<'input> {} -impl<'input> CustomRuleContext<'input> for AContextExt<'input>{ - type TF = LocalTokenFactory<'input>; - type Ctx = SimpleLRParserContextType; - fn get_rule_index(&self) -> usize { RULE_a } - //fn type_rule_index() -> usize where Self: Sized { RULE_a } -} -antlr_rust::type_id!{AContextExt} - -impl<'input> AContextExt<'input>{ - fn new(parent: Option + 'input > >, invoking_state: isize) -> Rc> { - Rc::new( - BaseParserRuleContext::new_parser_ctx(parent, invoking_state,AContextExt{ - ph:PhantomData - }), - ) - } +impl<'input, 'a> Listenable + 'a> for AContext<'input> { + fn enter(&self, listener: &mut (dyn SimpleLRListener<'input> + 'a)) { + listener.enter_every_rule(self); + listener.enter_a(self); + } } -pub trait AContextAttrs<'input>: SimpleLRParserContext<'input> + BorrowMut>{ - -/// Retrieves first TerminalNode corresponding to token ID -/// Returns `None` if there is no child corresponding to token ID -fn ID(&self) -> Option>> where Self:Sized{ - self.get_token(ID, 0) +impl<'input> CustomRuleContext<'input> for AContextExt<'input> { + type TF = LocalTokenFactory<'input>; + type Ctx = SimpleLRParserContextType; + fn get_rule_index(&self) -> usize { RULE_a } + //fn type_rule_index() -> usize where Self: Sized { RULE_a } } -fn a(&self) -> Option>> where Self:Sized{ - self.child_of_type(0) +antlr_rust::type_id! {AContextExt} + +impl<'input> AContextExt<'input> { + fn new( + parent: Option + 'input>>, + invoking_state: isize, + ) -> Rc> { + Rc::new(BaseParserRuleContext::new_parser_ctx( + parent, + invoking_state, + AContextExt { ph: PhantomData }, + )) + } } +pub trait AContextAttrs<'input>: + SimpleLRParserContext<'input> + BorrowMut> +{ + /// Retrieves first TerminalNode corresponding to token ID + /// Returns `None` if there is no child corresponding to token ID + fn ID(&self) -> Option>> + where + Self: Sized, + { + self.get_token(ID, 0) + } + fn a(&self) -> Option>> + where + Self: Sized, + { + self.child_of_type(0) + } } -impl<'input> AContextAttrs<'input> for AContext<'input>{} - -impl<'input,I: TokenStream<'input, TF=LocalTokenFactory<'input> > > SimpleLRParser<'input,I> { +impl<'input> AContextAttrs<'input> for AContext<'input> {} - pub fn a(&mut self,) - -> Result>,ANTLRError> { - self.a_rec(0) - } +impl<'input, I: TokenStream<'input, TF = LocalTokenFactory<'input>>> SimpleLRParser<'input, I> { + pub fn a(&mut self) -> Result>, ANTLRError> { self.a_rec(0) } - fn a_rec(&mut self, _p: isize) - -> Result>,ANTLRError> { - let recog = self; - let _parentctx = recog.ctx.take(); - let _parentState = recog.base.get_state(); - let mut _localctx = AContextExt::new(_parentctx.clone(), recog.base.get_state()); - recog.base.enter_recursion_rule(_localctx.clone(), 2, RULE_a, _p); - let mut _localctx: Rc = _localctx; + fn a_rec(&mut self, _p: isize) -> Result>, ANTLRError> { + let recog = self; + let _parentctx = recog.ctx.take(); + let _parentState = recog.base.get_state(); + let mut _localctx = AContextExt::new(_parentctx.clone(), recog.base.get_state()); + recog + .base + .enter_recursion_rule(_localctx.clone(), 2, RULE_a, _p); + let mut _localctx: Rc = _localctx; let mut _prevctx = _localctx.clone(); - let _startState = 2; - let result: Result<(), ANTLRError> = try { - let mut _alt: isize; - //recog.base.enter_outer_alt(_localctx.clone(), 1); - recog.base.enter_outer_alt(None, 1); - { - { - recog.base.set_state(7); - recog.base.match_token(ID,recog.err_handler.as_mut())?; - - } - let tmp = recog.input.lt(-1).cloned(); - recog.ctx.as_ref().unwrap().set_stop(tmp); - recog.base.set_state(13); - recog.err_handler.sync(&mut recog.base)?; - _alt = recog.interpreter.adaptive_predict(0,&mut recog.base)?; - while { _alt!=2 && _alt!=INVALID_ALT } { - if _alt==1 { - recog.trigger_exit_rule_event(); - _prevctx = _localctx.clone(); - { - { - /*recRuleAltStartAction*/ - let mut tmp = AContextExt::new(_parentctx.clone(), _parentState); - recog.push_new_recursion_context(tmp.clone(), _startState, RULE_a); - _localctx = tmp; - recog.base.set_state(9); - if !({recog.precpred(None, 2)}) { - Err(FailedPredicateError::new(&mut recog.base, Some("recog.precpred(None, 2)".to_owned()), None))?; - } - recog.base.set_state(10); - recog.base.match_token(ID,recog.err_handler.as_mut())?; - - } - } - } - recog.base.set_state(15); - recog.err_handler.sync(&mut recog.base)?; - _alt = recog.interpreter.adaptive_predict(0,&mut recog.base)?; - } - } - }; - match result { - Ok(_) => {}, - Err(e @ ANTLRError::FallThrough(_)) => return Err(e), - Err(ref re)=>{ - //_localctx.exception = re; - recog.err_handler.report_error(&mut recog.base, re); - recog.err_handler.recover(&mut recog.base, re)?;} - } - recog.base.unroll_recursion_context(_parentctx); - - Ok(_localctx) - } + let _startState = 2; + let result: Result<(), ANTLRError> = try { + let mut _alt: isize; + //recog.base.enter_outer_alt(_localctx.clone(), 1); + recog.base.enter_outer_alt(None, 1); + { + { + recog.base.set_state(7); + recog.base.match_token(ID, recog.err_handler.as_mut())?; + } + let tmp = recog.input.lt(-1).cloned(); + recog.ctx.as_ref().unwrap().set_stop(tmp); + recog.base.set_state(13); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(0, &mut recog.base)?; + while { _alt != 2 && _alt != INVALID_ALT } { + if _alt == 1 { + recog.trigger_exit_rule_event(); + _prevctx = _localctx.clone(); + { + { + /*recRuleAltStartAction*/ + let mut tmp = AContextExt::new(_parentctx.clone(), _parentState); + recog.push_new_recursion_context(tmp.clone(), _startState, RULE_a); + _localctx = tmp; + recog.base.set_state(9); + if !({ recog.precpred(None, 2) }) { + Err(FailedPredicateError::new( + &mut recog.base, + Some("recog.precpred(None, 2)".to_owned()), + None, + ))?; + } + recog.base.set_state(10); + recog.base.match_token(ID, recog.err_handler.as_mut())?; + } + } + } + recog.base.set_state(15); + recog.err_handler.sync(&mut recog.base)?; + _alt = recog.interpreter.adaptive_predict(0, &mut recog.base)?; + } + } + }; + match result { + Ok(_) => {} + Err(e @ ANTLRError::FallThrough(_)) => return Err(e), + Err(ref re) => { + //_localctx.exception = re; + recog.err_handler.report_error(&mut recog.base, re); + recog.err_handler.recover(&mut recog.base, re)?; + } + } + recog.base.unroll_recursion_context(_parentctx); + + Ok(_localctx) + } } lazy_static! { @@ -398,10 +423,8 @@ lazy_static! { }; } - - -const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x03\ \x04\x13\x04\x02\x09\x02\x04\x03\x09\x03\x03\x02\x03\x02\x03\x03\x03\x03\ \x03\x03\x03\x03\x03\x03\x07\x03\x0e\x0a\x03\x0c\x03\x0e\x03\x11\x0b\x03\ \x03\x03\x02\x03\x04\x04\x02\x04\x02\x02\x02\x11\x02\x06\x03\x02\x02\x02\ @@ -410,4 +433,3 @@ const _serializedATN:&'static str = \x0b\x0c\x0c\x04\x02\x02\x0c\x0e\x07\x03\x02\x02\x0d\x0b\x03\x02\x02\x02\ \x0e\x11\x03\x02\x02\x02\x0f\x0d\x03\x02\x02\x02\x0f\x10\x03\x02\x02\x02\ \x10\x05\x03\x02\x02\x02\x11\x0f\x03\x02\x02\x02\x03\x0f"; - diff --git a/tests/gen/xmllexer.rs b/tests/gen/xmllexer.rs index a0b7dad..bde525a 100644 --- a/tests/gen/xmllexer.rs +++ b/tests/gen/xmllexer.rs @@ -2,263 +2,300 @@ #![allow(dead_code)] #![allow(nonstandard_style)] #![allow(unused_imports)] - -use std::cell::RefCell; -use std::marker::PhantomData; -use std::ops::{Deref, DerefMut}; -use std::rc::Rc; -use std::sync::Arc; - use antlr_rust::atn::ATN; use antlr_rust::atn_deserializer::ATNDeserializer; use antlr_rust::char_stream::CharStream; use antlr_rust::dfa::DFA; use antlr_rust::error_listener::ErrorListener; use antlr_rust::int_stream::IntStream; -use antlr_rust::lazy_static; use antlr_rust::lexer::{BaseLexer, Lexer, LexerRecog}; use antlr_rust::lexer_atn_simulator::{ILexerATNSimulator, LexerATNSimulator}; -use antlr_rust::parser_rule_context::{BaseParserRuleContext, cast, ParserRuleContext}; -use antlr_rust::PredictionContextCache; +use antlr_rust::parser_rule_context::{cast, BaseParserRuleContext, ParserRuleContext}; use antlr_rust::recognizer::{Actions, Recognizer}; use antlr_rust::rule_context::{BaseRuleContext, EmptyContext, EmptyCustomRuleContext}; use antlr_rust::token::*; use antlr_rust::token_factory::{CommonTokenFactory, TokenAware, TokenFactory}; use antlr_rust::token_source::TokenSource; use antlr_rust::vocabulary::{Vocabulary, VocabularyImpl}; +use antlr_rust::PredictionContextCache; -pub const COMMENT:isize=1; - pub const CDATA:isize=2; - pub const DTD:isize=3; - pub const EntityRef:isize=4; - pub const CharRef:isize=5; - pub const SEA_WS:isize=6; - pub const OPEN:isize=7; - pub const XMLDeclOpen:isize=8; - pub const TEXT:isize=9; - pub const CLOSE:isize=10; - pub const SPECIAL_CLOSE:isize=11; - pub const SLASH_CLOSE:isize=12; - pub const SLASH:isize=13; - pub const EQUALS:isize=14; - pub const STRING:isize=15; - pub const Name:isize=16; - pub const S:isize=17; - pub const PI:isize=18; - pub const INSIDE: usize=1; - pub const PROC_INSTR: usize=2; - pub const channelNames: [&'static str;0+2] = [ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN" - ]; - - pub const modeNames: [&'static str;3] = [ - "DEFAULT_MODE", "INSIDE", "PROC_INSTR" - ]; - - pub const ruleNames: [&'static str;24] = [ - "COMMENT", "CDATA", "DTD", "EntityRef", "CharRef", "SEA_WS", "OPEN", "XMLDeclOpen", - "SPECIAL_OPEN", "TEXT", "CLOSE", "SPECIAL_CLOSE", "SLASH_CLOSE", "SLASH", - "EQUALS", "STRING", "Name", "S", "HEXDIGIT", "DIGIT", "NameChar", "NameStartChar", - "PI", "IGNORE" - ]; - - - pub const _LITERAL_NAMES: [Option<&'static str>;15] = [ - None, None, None, None, None, None, None, Some("'<'"), None, None, Some("'>'"), - None, Some("'/>'"), Some("'/'"), Some("'='") - ]; - pub const _SYMBOLIC_NAMES: [Option<&'static str>;19] = [ - None, Some("COMMENT"), Some("CDATA"), Some("DTD"), Some("EntityRef"), - Some("CharRef"), Some("SEA_WS"), Some("OPEN"), Some("XMLDeclOpen"), Some("TEXT"), - Some("CLOSE"), Some("SPECIAL_CLOSE"), Some("SLASH_CLOSE"), Some("SLASH"), - Some("EQUALS"), Some("STRING"), Some("Name"), Some("S"), Some("PI") - ]; - lazy_static!{ - static ref _shared_context_cache: Arc = Arc::new(PredictionContextCache::new()); - static ref VOCABULARY: Box = Box::new(VocabularyImpl::new(_LITERAL_NAMES.iter(), _SYMBOLIC_NAMES.iter(), None)); - } - - -pub type LexerContext<'input> = BaseParserRuleContext<'input,EmptyCustomRuleContext<'input,LocalTokenFactory<'input> >>; -pub type LocalTokenFactory<'input> = CommonTokenFactory; +use antlr_rust::lazy_static; -type From<'a> = as TokenFactory<'a> >::From; +use std::cell::RefCell; +use std::marker::PhantomData; +use std::ops::{Deref, DerefMut}; +use std::rc::Rc; +use std::sync::Arc; -pub struct XMLLexer<'input, Input:CharStream >> { - base: BaseLexer<'input,XMLLexerActions,Input,LocalTokenFactory<'input>>, -// static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } +pub const COMMENT: isize = 1; +pub const CDATA: isize = 2; +pub const DTD: isize = 3; +pub const EntityRef: isize = 4; +pub const CharRef: isize = 5; +pub const SEA_WS: isize = 6; +pub const OPEN: isize = 7; +pub const XMLDeclOpen: isize = 8; +pub const TEXT: isize = 9; +pub const CLOSE: isize = 10; +pub const SPECIAL_CLOSE: isize = 11; +pub const SLASH_CLOSE: isize = 12; +pub const SLASH: isize = 13; +pub const EQUALS: isize = 14; +pub const STRING: isize = 15; +pub const Name: isize = 16; +pub const S: isize = 17; +pub const PI: isize = 18; +pub const INSIDE: usize = 1; +pub const PROC_INSTR: usize = 2; +pub const channelNames: [&'static str; 0 + 2] = ["DEFAULT_TOKEN_CHANNEL", "HIDDEN"]; + +pub const modeNames: [&'static str; 3] = ["DEFAULT_MODE", "INSIDE", "PROC_INSTR"]; + +pub const ruleNames: [&'static str; 24] = [ + "COMMENT", + "CDATA", + "DTD", + "EntityRef", + "CharRef", + "SEA_WS", + "OPEN", + "XMLDeclOpen", + "SPECIAL_OPEN", + "TEXT", + "CLOSE", + "SPECIAL_CLOSE", + "SLASH_CLOSE", + "SLASH", + "EQUALS", + "STRING", + "Name", + "S", + "HEXDIGIT", + "DIGIT", + "NameChar", + "NameStartChar", + "PI", + "IGNORE", +]; + +pub const _LITERAL_NAMES: [Option<&'static str>; 15] = [ + None, + None, + None, + None, + None, + None, + None, + Some("'<'"), + None, + None, + Some("'>'"), + None, + Some("'/>'"), + Some("'/'"), + Some("'='"), +]; +pub const _SYMBOLIC_NAMES: [Option<&'static str>; 19] = [ + None, + Some("COMMENT"), + Some("CDATA"), + Some("DTD"), + Some("EntityRef"), + Some("CharRef"), + Some("SEA_WS"), + Some("OPEN"), + Some("XMLDeclOpen"), + Some("TEXT"), + Some("CLOSE"), + Some("SPECIAL_CLOSE"), + Some("SLASH_CLOSE"), + Some("SLASH"), + Some("EQUALS"), + Some("STRING"), + Some("Name"), + Some("S"), + Some("PI"), +]; +lazy_static! { + static ref _shared_context_cache: Arc = + Arc::new(PredictionContextCache::new()); + static ref VOCABULARY: Box = Box::new(VocabularyImpl::new( + _LITERAL_NAMES.iter(), + _SYMBOLIC_NAMES.iter(), + None + )); } -impl<'input, Input:CharStream >> Deref for XMLLexer<'input,Input>{ - type Target = BaseLexer<'input,XMLLexerActions,Input,LocalTokenFactory<'input>>; +pub type LexerContext<'input> = + BaseParserRuleContext<'input, EmptyCustomRuleContext<'input, LocalTokenFactory<'input>>>; +pub type LocalTokenFactory<'input> = CommonTokenFactory; - fn deref(&self) -> &Self::Target { - &self.base - } -} +type From<'a> = as TokenFactory<'a>>::From; -impl<'input, Input:CharStream >> DerefMut for XMLLexer<'input,Input>{ - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.base - } +pub struct XMLLexer<'input, Input: CharStream>> { + base: BaseLexer<'input, XMLLexerActions, Input, LocalTokenFactory<'input>>, + // static { RuntimeMetaData.checkVersion("4.8", RuntimeMetaData.VERSION); } } +impl<'input, Input: CharStream>> Deref for XMLLexer<'input, Input> { + type Target = BaseLexer<'input, XMLLexerActions, Input, LocalTokenFactory<'input>>; -impl<'input, Input:CharStream >> XMLLexer<'input,Input>{ - fn get_rule_names(&self) -> &'static [&'static str] { - &ruleNames - } - fn get_literal_names(&self) -> &[Option<&str>] { - &_LITERAL_NAMES - } - - fn get_symbolic_names(&self) -> &[Option<&str>] { - &_SYMBOLIC_NAMES - } - - fn get_grammar_file_name(&self) -> &'static str { - "XMLLexer.g4" - } - - pub fn new_with_token_factory(input: Box,tf: &'input LocalTokenFactory<'input>) -> Self { - antlr_rust::recognizer::check_version("0","2"); - Self { - base: BaseLexer::new_base_lexer( - input, - LexerATNSimulator::new_lexer_atnsimulator( - _ATN.clone(), - _decision_to_DFA.clone(), - _shared_context_cache.clone(), - ), - XMLLexerActions{}, - tf - ) - } - } + fn deref(&self) -> &Self::Target { &self.base } } -impl<'input, Input:CharStream >> XMLLexer<'input,Input> where &'input LocalTokenFactory<'input>:Default{ - pub fn new(input: Box) -> Self{ - XMLLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) - } +impl<'input, Input: CharStream>> DerefMut for XMLLexer<'input, Input> { + fn deref_mut(&mut self) -> &mut Self::Target { &mut self.base } } -pub struct XMLLexerActions { +impl<'input, Input: CharStream>> XMLLexer<'input, Input> { + fn get_rule_names(&self) -> &'static [&'static str] { &ruleNames } + fn get_literal_names(&self) -> &[Option<&str>] { &_LITERAL_NAMES } + + fn get_symbolic_names(&self) -> &[Option<&str>] { &_SYMBOLIC_NAMES } + + fn get_grammar_file_name(&self) -> &'static str { "XMLLexer.g4" } + + pub fn new_with_token_factory( + input: Box, + tf: &'input LocalTokenFactory<'input>, + ) -> Self { + antlr_rust::recognizer::check_version("0", "2"); + Self { + base: BaseLexer::new_base_lexer( + input, + LexerATNSimulator::new_lexer_atnsimulator( + _ATN.clone(), + _decision_to_DFA.clone(), + _shared_context_cache.clone(), + ), + XMLLexerActions {}, + tf, + ), + } + } } -impl XMLLexerActions{ +impl<'input, Input: CharStream>> XMLLexer<'input, Input> +where + &'input LocalTokenFactory<'input>: Default, +{ + pub fn new(input: Box) -> Self { + XMLLexer::new_with_token_factory(input, <&LocalTokenFactory<'input> as Default>::default()) + } } -impl<'input, Input:CharStream >> Actions<'input,BaseLexer<'input,XMLLexerActions,Input,LocalTokenFactory<'input>>> for XMLLexerActions{ - - fn action(_localctx: &EmptyContext<'input,LocalTokenFactory<'input>>, rule_index: isize, action_index: isize, - recog:&mut BaseLexer<'input,XMLLexerActions,Input,LocalTokenFactory<'input>> - ){ - match rule_index { - 10 => - XMLLexer::<'input>::CLOSE_action(cast::<_,LexerContext<'input> >(_localctx), action_index, recog), - _ => {} - } - } - fn sempred(_localctx: &EmptyContext<'input,LocalTokenFactory<'input>>, rule_index: isize, pred_index: isize, - recog:&mut BaseLexer<'input,XMLLexerActions,Input,LocalTokenFactory<'input>> - ) -> bool { - match rule_index { - 0 => - XMLLexer::<'input>::COMMENT_sempred(cast::<_,LexerContext<'input> >(_localctx), pred_index, recog), - _ => true - } - } - - } - - impl<'input, Input:CharStream >> XMLLexer<'input,Input>{ - - fn CLOSE_action(_localctx: &LexerContext<'input>, action_index: isize, - recog:&mut ::Target - ) { - match action_index { - 0=>{ - recog.pop_mode(); - }, - - _ => {} - } - } - fn COMMENT_sempred(_localctx: &LexerContext<'input>, pred_index:isize, - recog:&mut ::Target - ) -> bool { - match pred_index { - 0=>{ - true - } - _ => true - } - } - +pub struct XMLLexerActions {} + +impl XMLLexerActions {} + +impl<'input, Input: CharStream>> + Actions<'input, BaseLexer<'input, XMLLexerActions, Input, LocalTokenFactory<'input>>> + for XMLLexerActions +{ + fn action( + _localctx: &EmptyContext<'input, LocalTokenFactory<'input>>, + rule_index: isize, + action_index: isize, + recog: &mut BaseLexer<'input, XMLLexerActions, Input, LocalTokenFactory<'input>>, + ) { + match rule_index { + 10 => XMLLexer::<'input>::CLOSE_action( + cast::<_, LexerContext<'input>>(_localctx), + action_index, + recog, + ), + _ => {} + } + } + fn sempred( + _localctx: &EmptyContext<'input, LocalTokenFactory<'input>>, + rule_index: isize, + pred_index: isize, + recog: &mut BaseLexer<'input, XMLLexerActions, Input, LocalTokenFactory<'input>>, + ) -> bool { + match rule_index { + 0 => XMLLexer::<'input>::COMMENT_sempred( + cast::<_, LexerContext<'input>>(_localctx), + pred_index, + recog, + ), + _ => true, + } + } +} +impl<'input, Input: CharStream>> XMLLexer<'input, Input> { + fn CLOSE_action( + _localctx: &LexerContext<'input>, + action_index: isize, + recog: &mut ::Target, + ) { + match action_index { + 0 => { + recog.pop_mode(); + } + + _ => {} + } + } + fn COMMENT_sempred( + _localctx: &LexerContext<'input>, + pred_index: isize, + recog: &mut ::Target, + ) -> bool { + match pred_index { + 0 => true, + _ => true, + } + } } -impl<'input, Input:CharStream >> LexerRecog<'input,BaseLexer<'input,XMLLexerActions,Input,LocalTokenFactory<'input>>> for XMLLexerActions{ +impl<'input, Input: CharStream>> + LexerRecog<'input, BaseLexer<'input, XMLLexerActions, Input, LocalTokenFactory<'input>>> + for XMLLexerActions +{ } -impl<'input> TokenAware<'input> for XMLLexerActions{ - type TF = LocalTokenFactory<'input>; +impl<'input> TokenAware<'input> for XMLLexerActions { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenAware<'input> for XMLLexer<'input,Input>{ - type TF = LocalTokenFactory<'input>; +impl<'input, Input: CharStream>> TokenAware<'input> for XMLLexer<'input, Input> { + type TF = LocalTokenFactory<'input>; } -impl<'input, Input:CharStream >> TokenSource<'input> for XMLLexer<'input,Input>{ +impl<'input, Input: CharStream>> TokenSource<'input> for XMLLexer<'input, Input> { + fn next_token(&mut self) -> >::Tok { self.base.next_token() } - fn next_token(&mut self) -> >::Tok { - self.base.next_token() - } - - fn get_line(&self) -> isize { - self.base.get_line() - } + fn get_line(&self) -> isize { self.base.get_line() } - fn get_char_position_in_line(&self) -> isize { - self.base.get_char_position_in_line() - } + fn get_char_position_in_line(&self) -> isize { self.base.get_char_position_in_line() } - fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { - self.base.get_input_stream() - } + fn get_input_stream(&mut self) -> Option<&mut dyn IntStream> { self.base.get_input_stream() } - fn get_source_name(&self) -> String { - self.base.get_source_name() - } + fn get_source_name(&self) -> String { self.base.get_source_name() } - fn get_token_factory(&self) -> &'input Self::TF { - self.base.get_token_factory() - } + fn get_token_factory(&self) -> &'input Self::TF { self.base.get_token_factory() } } +lazy_static! { + static ref _ATN: Arc = + Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); + static ref _decision_to_DFA: Arc> = { + let mut dfa = Vec::new(); + let size = _ATN.decision_to_state.len(); + for i in 0..size { + dfa.push(DFA::new( + _ATN.clone(), + _ATN.get_decision_state(i), + i as isize, + )) + } + Arc::new(dfa) + }; +} - - lazy_static! { - static ref _ATN: Arc = - Arc::new(ATNDeserializer::new(None).deserialize(_serializedATN.chars())); - static ref _decision_to_DFA: Arc> = { - let mut dfa = Vec::new(); - let size = _ATN.decision_to_state.len(); - for i in 0..size { - dfa.push(DFA::new( - _ATN.clone(), - _ATN.get_decision_state(i), - i as isize, - )) - } - Arc::new(dfa) - }; - } - - - - const _serializedATN:&'static str = - "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ +const _serializedATN: &'static str = + "\x03\u{608b}\u{a72a}\u{8133}\u{b9ed}\u{417c}\u{3be7}\u{7786}\u{5964}\x02\ \x14\u{e8}\x08\x01\x08\x01\x08\x01\x04\x02\x09\x02\x04\x03\x09\x03\x04\ \x04\x09\x04\x04\x05\x09\x05\x04\x06\x09\x06\x04\x07\x09\x07\x04\x08\x09\ \x08\x04\x09\x09\x09\x04\x0a\x09\x0a\x04\x0b\x09\x0b\x04\x0c\x09\x0c\x04\ diff --git a/tests/my_test.rs b/tests/my_test.rs index fcb3829..04a6c45 100644 --- a/tests/my_test.rs +++ b/tests/my_test.rs @@ -11,7 +11,6 @@ mod gen { use std::io::Read; use std::iter::FromIterator; - use antlr_rust::InputStream; use antlr_rust::common_token_stream::CommonTokenStream; use antlr_rust::int_stream::IntStream; use antlr_rust::lexer::Lexer; @@ -20,6 +19,7 @@ mod gen { use antlr_rust::token_factory::{ArenaCommonFactory, CommonTokenFactory, OwningTokenFactory}; use antlr_rust::token_stream::{TokenStream, UnbufferedTokenStream}; use antlr_rust::tree::{ParseTree, ParseTreeListener, ParseTreeWalker, TerminalNode}; + use antlr_rust::InputStream; use csvlexer::*; use csvlistener::*; use csvparser::CSVParser; @@ -88,7 +88,7 @@ if (x < x && a > 0) then duh data.chars().skip(token.get_start() as usize).take(len) ) ) - .chars(), + .chars(), ); } token_source.consume(); @@ -270,7 +270,6 @@ if (x < x && a > 0) then duh assert_eq!(result.to_string_tree(&*parser), "(a (a (a x) y) z)"); } - struct Listener4 { data: String, } @@ -328,7 +327,7 @@ if (x < x && a > 0) then duh #[test] fn test_complex_convert() { - let codepoints = "(a+4)*2".chars().map(|x|x as u32).collect::>(); + let codepoints = "(a+4)*2".chars().map(|x| x as u32).collect::>(); // let codepoints = "(a+4)*2"; let input = InputStream::new(&*codepoints); let mut lexer = LabelsLexer::new(Box::new(input)); diff --git a/tests/perf.rs b/tests/perf.rs index 7a6999f..7ef37e2 100644 --- a/tests/perf.rs +++ b/tests/perf.rs @@ -11,7 +11,7 @@ mod gen { use antlr_rust::common_token_stream::CommonTokenStream; use antlr_rust::InputStream; -// use crate::gen::perflexer::PerfLexer; + // use crate::gen::perflexer::PerfLexer; // use crate::gen::perfparser::PerfParser; // mod perflexer; // mod perfparser;