Skip to content

Commit

Permalink
some cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
rrevenantt committed Feb 10, 2020
1 parent 0da7b52 commit ca4149e
Show file tree
Hide file tree
Showing 8 changed files with 44 additions and 40 deletions.
2 changes: 1 addition & 1 deletion src/atn.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ impl ATN {
following = self.next_tokens(self.states[tr.follow_state].as_ref());
expected.add_set(following);
expected.remove_one(TOKEN_EPSILON);
ctx = c.peek_parent();
ctx = c.get_parent_ctx();
}

if following.contains(TOKEN_EPSILON) {
Expand Down
2 changes: 1 addition & 1 deletion src/error_strategy.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ impl DefaultErrorStrategy {
let tr = tr.cast::<RuleTransition>();
let follow = atn.next_tokens(atn.states[tr.follow_state].as_ref());
recover_set.add_set(follow);
ctx = c.peek_parent();
ctx = c.get_parent_ctx();
}
recover_set.remove_one(TOKEN_EPSILON);
return recover_set;
Expand Down
5 changes: 0 additions & 5 deletions src/input_stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ pub struct InputStream {
name: String,
index: isize,
data: Vec<isize>,
// size: isize,
}

impl InputStream {
Expand All @@ -24,7 +23,6 @@ impl InputStream {
name: "<empty>".to_string(),
index: 0,
data,
// size: len,
}
}

Expand Down Expand Up @@ -80,11 +78,8 @@ impl IntStream for InputStream {
}

if (self.index + offset - 1) >= self.size() {
//System.out.println("char LA("+i+")=EOF; p="+p);
return crate::int_stream::EOF;
}
//System.out.println("char LA("+i+")="+(char)data[p+i-1]+"; p="+p);
//System.out.println("LA("+i+"); p="+p+" n="+n+" data.length="+data.length);
return self.data[(self.index + offset - 1) as usize] as isize;
}

Expand Down
2 changes: 1 addition & 1 deletion src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ impl<T, Ext> BaseParser<Ext, T>
prev.set_stop(self.input.lt(-1).map(Token::to_owned));

// println!("{}",prev.get_start().unwrap());
localctx.set_start(prev.get_start());
localctx.set_start(Some(prev.get_start().clone()));
self.ctx = Some(localctx);

if self.build_parse_trees {
Expand Down
58 changes: 34 additions & 24 deletions src/parser_rule_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::fmt::{Debug, Error, Formatter};
use std::ops::{CoerceUnsized, Deref, DerefMut};
use std::rc::Rc;

use crate::common_token_factory::INVALID_TOKEN;
use crate::errors::ANTLRError;
use crate::interval_set::Interval;
use crate::parser::{ListenerCaller, Parser, ParserRecog};
Expand All @@ -18,10 +19,20 @@ pub trait ParserRuleContext: RuleContext + CustomRuleContext + ParseTree + Any +
fn set_exception(&self, e: ANTLRError);

fn set_start(&self, t: Option<OwningToken>);
fn get_start(&self) -> Option<OwningToken>;
///
/// Get the initial token in this context.
/// Note that the range from start to stop is inclusive, so for rules that do not consume anything
/// (for example, zero length or error productions) this token may exceed stop.
///
fn get_start(&self) -> Ref<OwningToken>;

fn set_stop(&self, t: Option<OwningToken>);
fn get_stop(&self) -> Option<OwningToken>;
///
/// Get the final token in this context.
/// Note that the range from start to stop is inclusive, so for rules that do not consume anything
/// (for example, zero length or error productions) this token may precede start.
///
fn get_stop(&self) -> Ref<OwningToken>;


fn add_token_node(&self, token: TerminalNode) -> Rc<dyn ParserRuleContext>;
Expand Down Expand Up @@ -50,17 +61,22 @@ pub trait ParserRuleContext: RuleContext + CustomRuleContext + ParseTree + Any +
.collect()
}

fn get_token(&self, ttype: isize, pos: usize) -> Rc<TerminalNode> {
fn get_token(&self, ttype: isize, pos: usize) -> Option<Rc<TerminalNode>> {
self.get_children()
.iter()
.filter(|&it| it.deref().type_id() == TypeId::of::<TerminalNode>())
.map(|it| cast_rc::<TerminalNode>(it.clone()))
.filter(|it| it.symbol.get_token_type() == ttype)
.nth(pos).unwrap()
.nth(pos)
}

fn get_tokens(&self, _ttype: isize) -> Box<dyn Iterator<Item=&OwningToken>> {
unimplemented!()
fn get_tokens(&self, ttype: isize) -> Vec<Rc<TerminalNode>> {
self.get_children()
.iter()
.filter(|&it| it.deref().type_id() == TypeId::of::<TerminalNode>())
.map(|it| cast_rc::<TerminalNode>(it.clone()))
.filter(|it| it.symbol.get_token_type() == ttype)
.collect()
}

fn upcast_any(&self) -> &dyn Any;
Expand Down Expand Up @@ -142,8 +158,8 @@ pub type ParserRuleContextType = Rc<dyn ParserRuleContext>;
pub struct BaseParserRuleContext<Ctx: CustomRuleContext> {
base: BaseRuleContext<Ctx>,

start: RefCell<Option<OwningToken>>,
stop: RefCell<Option<OwningToken>>,
start: RefCell<OwningToken>,
stop: RefCell<OwningToken>,
exception: Option<Box<ANTLRError>>,
/// List of children of current node
pub(crate) children: RefCell<Vec<ParserRuleContextType>>,
Expand All @@ -168,10 +184,6 @@ impl<Ctx: CustomRuleContext> RuleContext for BaseParserRuleContext<Ctx> {
self.base.get_parent_ctx()
}

fn peek_parent(&self) -> Option<ParserRuleContextType> {
self.base.peek_parent()
}

fn set_parent(&self, parent: &Option<Rc<dyn ParserRuleContext>>) {
self.base.set_parent(parent)
}
Expand Down Expand Up @@ -214,19 +226,19 @@ impl<Ctx: CustomRuleContext> ParserRuleContext for BaseParserRuleContext<Ctx> {
}

fn set_start(&self, t: Option<OwningToken>) {
*self.start.borrow_mut() = t;
*self.start.borrow_mut() = t.unwrap_or((**INVALID_TOKEN).clone());
}

fn get_start(&self) -> Option<OwningToken> {
self.start.borrow().clone()
fn get_start(&self) -> Ref<OwningToken> {
self.start.borrow()
}

fn set_stop(&self, t: Option<OwningToken>) {
*self.stop.borrow_mut() = t;
*self.stop.borrow_mut() = t.unwrap_or((**INVALID_TOKEN).clone());
}

fn get_stop(&self) -> Option<OwningToken> {
self.stop.borrow().clone()
fn get_stop(&self) -> Ref<OwningToken> {
self.stop.borrow()
}

fn add_token_node(&self, token: TerminalNode) -> Rc<dyn ParserRuleContext> {
Expand Down Expand Up @@ -317,8 +329,8 @@ impl<Ctx: CustomRuleContext> BaseParserRuleContext<Ctx> {
pub fn new_parser_ctx(parent_ctx: Option<ParserRuleContextType>, invoking_state: isize, ext: Ctx) -> Self {
BaseParserRuleContext {
base: BaseRuleContext::new_ctx(parent_ctx, invoking_state, ext),
start: RefCell::new(None),
stop: RefCell::new(None),
start: RefCell::new((**INVALID_TOKEN).clone()),
stop: RefCell::new((**INVALID_TOKEN).clone()),
exception: None,
children: RefCell::new(vec![]),
}
Expand Down Expand Up @@ -352,11 +364,11 @@ impl<T: DerefSeal<Target=I> + Debug + 'static, I: ParserRuleContext + ?Sized> Pa

fn set_start(&self, t: Option<OwningToken>) { self.deref().set_start(t) }

fn get_start(&self) -> Option<OwningToken> { self.deref().get_start() }
fn get_start(&self) -> Ref<OwningToken> { self.deref().get_start() }

fn set_stop(&self, t: Option<OwningToken>) { self.deref().set_stop(t) }

fn get_stop(&self) -> Option<OwningToken> { self.deref().get_stop() }
fn get_stop(&self) -> Ref<OwningToken> { self.deref().get_stop() }

fn add_token_node(&self, token: BaseParserRuleContext<TerminalNodeCtx>) -> Rc<dyn ParserRuleContext> { self.deref().add_token_node(token) }

Expand Down Expand Up @@ -384,8 +396,6 @@ impl<T: DerefSeal<Target=I> + Debug + 'static, I: ParserRuleContext + ?Sized> Ru

fn get_parent_ctx(&self) -> Option<Rc<dyn ParserRuleContext>> { self.deref().get_parent_ctx() }

fn peek_parent(&self) -> Option<Rc<dyn ParserRuleContext>> { self.deref().peek_parent() }

fn set_parent(&self, parent: &Option<Rc<dyn ParserRuleContext>>) { self.deref().set_parent(parent) }
}

Expand Down
4 changes: 2 additions & 2 deletions src/prediction_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -477,11 +477,11 @@ impl PredictionContext {
}

pub fn from_rule_context(atn: &ATN, outer_context: &dyn ParserRuleContext) -> Arc<PredictionContext> {
if outer_context.peek_parent().is_none() || ptr::eq(outer_context, empty_ctx().as_ref()) {
if outer_context.get_parent_ctx().is_none() || ptr::eq(outer_context, empty_ctx().as_ref()) {
return EMPTY_PREDICTION_CONTEXT.clone()
}

let parent = PredictionContext::from_rule_context(atn, outer_context.peek_parent().unwrap().deref());
let parent = PredictionContext::from_rule_context(atn, outer_context.get_parent_ctx().unwrap().deref());

let transition = atn.states[outer_context.get_invoking_state() as usize]
.get_transitions()
Expand Down
9 changes: 4 additions & 5 deletions src/rule_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@ pub trait RuleContext {
self.get_invoking_state() == -1
}

//todo rewrite into take and get
fn get_parent_ctx(&self) -> Option<Rc<dyn ParserRuleContext>>;
fn peek_parent(&self) -> Option<ParserRuleContextType>;

fn set_parent(&self, parent: &Option<Rc<dyn ParserRuleContext>>);
}

Expand Down Expand Up @@ -68,9 +67,9 @@ impl<Ctx: CustomRuleContext> RuleContext for BaseRuleContext<Ctx> {
self.parent_ctx.borrow().as_ref().map(Weak::upgrade).flatten()
}

fn peek_parent(&self) -> Option<ParserRuleContextType> {
self.parent_ctx.borrow().as_ref().map(Weak::upgrade).map(Option::unwrap)
}
// fn get_parent_ctx(&self) -> Option<ParserRuleContextType> {
// self.parent_ctx.borrow().as_ref().map(Weak::upgrade).map(Option::unwrap)
// }

fn set_parent(&self, parent: &Option<Rc<dyn ParserRuleContext>>) {
*self.parent_ctx.borrow_mut() = parent.as_ref().map(Rc::downgrade);
Expand Down
2 changes: 1 addition & 1 deletion tests/gen/referencetoatnparser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ impl ReferenceToATNParser {

println!("{}", {
let temp = recog.base.input.lt(-1).map(|it| it.get_token_index()).unwrap_or(-1);
recog.input.get_text_from_interval(recog.get_parser_rule_context().get_start().unwrap().get_token_index(), temp)
recog.input.get_text_from_interval(recog.get_parser_rule_context().get_start().get_token_index(), temp)
});
}
};
Expand Down

0 comments on commit ca4149e

Please sign in to comment.