Skip to content

Commit

Permalink
Add experimental draft support for GPML-style graph query
Browse files Browse the repository at this point in the history
jpschorr committed Sep 26, 2022
1 parent 17cbcb5 commit f4931e5
Showing 6 changed files with 543 additions and 5 deletions.
15 changes: 15 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -17,11 +17,25 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Adds the capability for exporting the playground session on client side to be able to get fetched from another playground windows.
- Adds a REST API and exposes /parse for parsing the query over http request.
- Containerization using Docker.
- An experimental (pending [#15](https://github.com/partiql/partiql-docs/issues/15)) embedding of a subset of
the [GPML (Graph Pattern Matching Language)](https://arxiv.org/abs/2112.06217) graph query into the `FROM` clause,
supporting. The use within the grammar is based on the assumption of a new graph data type being added to the
specification of data types within PartiQL, and should be considered experimental until the semantics of the graph
data type are specified.
- basic and abbreviated node and edge patterns (section 4.1 of the GPML paper)
- concatenated path patterns (section 4.2 of the GPML paper)
- path variables (section 4.2 of the GPML paper)
- graph patterns (i.e., comma separated path patterns) (section 4.3 of the GPML paper)
- parenthesized patterns (section 4.4 of the GPML paper)
- path quantifiers (section 4.4 of the GPML paper)
- restrictors and selector (section 5.1 of the GPML paper)
- pre-filters and post-filters (section 5.2 of the GPML paper)

### Fixes
- Fixes the bug with AST graph PAN and ZOOM—before this change the pan and zoom was quite flaky and very hard to work with.
- Fixes the version value for the session and JSON output by ensuring it gets picked from the selected version in the UI.


## [0.1.0] - 2022-08-05
### Added
- Lexer & Parser for the majority of PartiQL query capabilities—see syntax [success](https://github.com/partiql/partiql-tests/tree/main/partiql-tests-data/success/syntax)
@@ -33,5 +47,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- PartiQL Playground proof of concept (POC)
- PartiQL CLI with REPL and query visualization features


[Unreleased]: https://github.com/partiql/partiql-lang-rust/compare/v0.1.0...HEAD
[0.1.0]: https://github.com/partiql/partiql-lang-rust/compare/v0.1.0
151 changes: 150 additions & 1 deletion partiql-ast/src/ast.rs
Original file line number Diff line number Diff line change
@@ -10,6 +10,7 @@
use rust_decimal::Decimal as RustDecimal;

use std::fmt;
use std::num::NonZeroU32;

#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
@@ -534,6 +535,8 @@ pub enum FromClause {
FromLet(AstNode<FromLet>),
/// <from_source> JOIN \[INNER | LEFT | RIGHT | FULL\] <from_source> ON <expr>
Join(AstNode<Join>),
/// <expr> MATCH <graph_pattern>
GraphMatch(AstNode<GraphMatch>),
}

#[derive(Clone, Debug, PartialEq)]
@@ -583,7 +586,153 @@ pub enum JoinSpec {
Natural,
}

/// GROUP BY <grouping_strategy> <group_key>[, <group_key>]... \[AS <symbol>\]
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct GraphMatch {
pub expr: Box<Expr>,
pub graph_expr: Box<AstNode<GraphMatchExpr>>,
}

/// The direction of an edge
/// | Orientation | Edge pattern | Abbreviation |
/// |---------------------------+--------------+--------------|
/// | Pointing left | <−[ spec ]− | <− |
/// | Undirected | ~[ spec ]~ | ~ |
/// | Pointing right | −[ spec ]−> | −> |
/// | Left or undirected | <~[ spec ]~ | <~ |
/// | Undirected or right | ~[ spec ]~> | ~> |
/// | Left or right | <−[ spec ]−> | <−> |
/// | Left, undirected or right | −[ spec ]− | − |
///
/// Fig. 5. Table of edge patterns:
/// https://arxiv.org/abs/2112.06217
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum GraphMatchDirection {
Left,
Undirected,
Right,
LeftOrUndirected,
UndirectedOrRight,
LeftOrRight,
LeftOrUndirectedOrRight,
}

/// A part of a graph pattern
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum GraphMatchPatternPart {
/// A single node in a graph pattern.
Node(AstNode<GraphMatchNode>),

/// A single edge in a graph pattern.
Edge(AstNode<GraphMatchEdge>),

/// A sub-pattern.
Pattern(AstNode<GraphMatchPattern>),
}

/// A quantifier for graph edges or patterns. (e.g., the `{2,5}` in `MATCH (x)->{2,5}(y)`)
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct GraphMatchQuantifier {
pub lower: u32,
pub upper: Option<NonZeroU32>,
}

/// A path restrictor
/// | Keyword | Description
/// |----------------+--------------
/// | TRAIL | No repeated edges.
/// | ACYCLIC | No repeated nodes.
/// | SIMPLE | No repeated nodes, except that the first and last nodes may be the same.
///
/// Fig. 7. Table of restrictors:
/// https://arxiv.org/abs/2112.06217
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum GraphMatchRestrictor {
Trail,
Acyclic,
Simple,
}

/// A single node in a graph pattern.
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct GraphMatchNode {
/// an optional node pre-filter, e.g.: `WHERE c.name='Alarm'` in `MATCH (c WHERE c.name='Alarm')`
pub prefilter: Option<Box<Expr>>,
/// the optional element variable of the node match, e.g.: `x` in `MATCH (x)`
pub variable: Option<SymbolPrimitive>,
/// the optional label(s) to match for the node, e.g.: `Entity` in `MATCH (x:Entity)`
pub label: Option<Vec<SymbolPrimitive>>,
}

/// A single edge in a graph pattern.
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct GraphMatchEdge {
/// edge direction
pub direction: GraphMatchDirection,
/// an optional quantifier for the edge match
pub quantifier: Option<AstNode<GraphMatchQuantifier>>,
/// an optional edge pre-filter, e.g.: `WHERE t.capacity>100` in `MATCH −[t:hasSupply WHERE t.capacity>100]−>`
pub prefilter: Option<Box<Expr>>,
/// the optional element variable of the edge match, e.g.: `t` in `MATCH −[t]−>`
pub variable: Option<SymbolPrimitive>,
/// the optional label(s) to match for the edge. e.g.: `Target` in `MATCH −[t:Target]−>`
pub label: Option<Vec<SymbolPrimitive>>,
}

/// A single graph match pattern.
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct GraphMatchPattern {
pub restrictor: Option<GraphMatchRestrictor>,
/// an optional quantifier for the entire pattern match
pub quantifier: Option<AstNode<GraphMatchQuantifier>>,
/// an optional pattern pre-filter, e.g.: `WHERE a.name=b.name` in `MATCH [(a)->(b) WHERE a.name=b.name]`
pub prefilter: Option<Box<Expr>>,
/// the optional element variable of the pattern, e.g.: `p` in `MATCH p = (a) −[t]−> (b)`
pub variable: Option<SymbolPrimitive>,
/// the ordered pattern parts
pub parts: Vec<GraphMatchPatternPart>,
}

/// A path selector
/// | Keyword
/// |------------------
/// | ANY SHORTEST
/// | ALL SHORTEST
/// | ANY
/// | ANY k
/// | SHORTEST k
/// | SHORTEST k GROUP
///
/// Fig. 8. Table of restrictors:
/// https://arxiv.org/abs/2112.06217
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum GraphMatchSelector {
AnyShortest,
AllShortest,
Any,
AnyK(NonZeroU32),
ShortestK(NonZeroU32),
ShortestKGroup(NonZeroU32),
}

/// A graph match clause as defined in GPML
/// See https://arxiv.org/abs/2112.06217
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct GraphMatchExpr {
pub selector: Option<GraphMatchSelector>,
pub patterns: Vec<AstNode<GraphMatchPattern>>,
}

/// GROUP BY <grouping_strategy> <group_key_list>... \[AS <symbol>\]
#[derive(Clone, Debug, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct GroupByExpr {
17 changes: 17 additions & 0 deletions partiql-parser/benches/bench_parse.rs
Original file line number Diff line number Diff line change
@@ -34,6 +34,20 @@ const Q_COMPLEX_FEXPR: &str = r#"
AS deltas FROM SOURCE_VIEW_DELTA_FULL_TRANSACTIONS delta_full_transactions
"#;

const Q_COMPLEX_MATCH: &str = r#"
SELECT (
SELECT numRec, data
FROM
(deltaGraph MATCH (t) -[:hasChange]-> (dt), (dt) -[:checkPointedBy]-> (t1)),
(
SELECT foo(u.id), bar(review), rindex
FROM delta.data as u CROSS JOIN UNPIVOT u.reviews as review AT rindex
) as data,
delta.numRec as numRec
)
AS deltas FROM SOURCE_VIEW_DELTA_FULL_TRANSACTIONS delta_full_transactions
"#;

fn parse_bench(c: &mut Criterion) {
fn parse(text: &str) -> ParserResult {
Parser::default().parse(text)
@@ -45,6 +59,9 @@ fn parse_bench(c: &mut Criterion) {
c.bench_function("parse-complex-fexpr", |b| {
b.iter(|| parse(black_box(Q_COMPLEX_FEXPR)))
});
c.bench_function("parse-complex-match", |b| {
b.iter(|| parse(black_box(Q_COMPLEX_MATCH)))
});
}

criterion_group! {
36 changes: 32 additions & 4 deletions partiql-parser/src/lexer.rs
Original file line number Diff line number Diff line change
@@ -467,6 +467,8 @@ pub enum Token<'input> {
Caret,
#[token(".")]
Period,
#[token("~")]
Tilde,
#[token("||")]
DblPipe,

@@ -512,10 +514,14 @@ pub enum Token<'input> {
// Keywords
#[regex("(?i:All)")]
All,
#[regex("(?i:Acyclic)")]
Acyclic,
#[regex("(?i:Asc)")]
Asc,
#[regex("(?i:And)")]
And,
#[regex("(?i:Any)")]
Any,
#[regex("(?i:As)")]
As,
#[regex("(?i:At)")]
@@ -576,6 +582,8 @@ pub enum Token<'input> {
Like,
#[regex("(?i:Limit)")]
Limit,
#[regex("(?i:Match)")]
Match,
#[regex("(?i:Missing)")]
Missing,
#[regex("(?i:Natural)")]
@@ -612,8 +620,14 @@ pub enum Token<'input> {
Time,
#[regex("(?i:Timestamp)")]
Timestamp,
#[regex("(?i:Simple)")]
Simple,
#[regex("(?i:Shortest)")]
Shortest,
#[regex("(?i:Then)")]
Then,
#[regex("(?i:Trail)")]
Trail,
#[regex("(?i:True)")]
True,
#[regex("(?i:Union)")]
@@ -642,9 +656,11 @@ impl<'input> Token<'input> {
pub fn is_keyword(&self) -> bool {
matches!(
self,
Token::All
Token::Acyclic
| Token::All
| Token::Asc
| Token::And
| Token::Any
| Token::As
| Token::At
| Token::Between
@@ -671,6 +687,7 @@ impl<'input> Token<'input> {
| Token::Left
| Token::Like
| Token::Limit
| Token::Match
| Token::Missing
| Token::Natural
| Token::Not
@@ -689,7 +706,10 @@ impl<'input> Token<'input> {
| Token::Table
| Token::Time
| Token::Timestamp
| Token::Simple
| Token::Shortest
| Token::Then
| Token::Trail
| Token::Union
| Token::Unpivot
| Token::Using
@@ -736,6 +756,7 @@ impl<'input> fmt::Display for Token<'input> {
Token::Slash => write!(f, "/"),
Token::Caret => write!(f, "^"),
Token::Period => write!(f, "."),
Token::Tilde => write!(f, "~"),
Token::DblPipe => write!(f, "||"),
Token::UnquotedIdent(id) => write!(f, "<{}:UNQUOTED_IDENT>", id),
Token::QuotedIdent(id) => write!(f, "<{}:QUOTED_IDENT>", id),
@@ -748,9 +769,11 @@ impl<'input> fmt::Display for Token<'input> {
Token::EmbeddedIonQuote => write!(f, "<ION>"),
Token::Ion(txt) => write!(f, "<{}:ION>", txt),

Token::All
Token::Acyclic
| Token::All
| Token::Asc
| Token::And
| Token::Any
| Token::As
| Token::At
| Token::Between
@@ -781,6 +804,7 @@ impl<'input> fmt::Display for Token<'input> {
| Token::Left
| Token::Like
| Token::Limit
| Token::Match
| Token::Missing
| Token::Natural
| Token::Not
@@ -799,7 +823,10 @@ impl<'input> fmt::Display for Token<'input> {
| Token::Table
| Token::Time
| Token::Timestamp
| Token::Simple
| Token::Shortest
| Token::Then
| Token::Trail
| Token::True
| Token::Union
| Token::Unpivot
@@ -836,7 +863,8 @@ mod tests {
"WiTH Where Value uSiNg Unpivot UNION True Select right Preserve pivoT Outer Order Or \
On Offset Nulls Null Not Natural Missing Limit Like Left Lateral Last Join \
Intersect Is Inner In Having Group From For Full First False Except Escape Desc \
Cross Table Time Timestamp Date By Between At As And Asc All Values Case When Then Else End";
Cross Table Time Timestamp Date By Between At As And Asc All Values Case When Then Else End \
Match Any Shortest Trail Acyclic Simple";
let symbols = symbols.split(' ').chain(primitives.split(' '));
let keywords = keywords.split(' ');

@@ -858,7 +886,7 @@ mod tests {
"<unquoted_atident:UNQUOTED_ATIDENT>", "GROUP", "<quoted_atident:QUOTED_ATIDENT>",
"FROM", "FOR", "FULL", "FIRST", "FALSE", "EXCEPT", "ESCAPE", "DESC", "CROSS", "TABLE",
"TIME", "TIMESTAMP", "DATE", "BY", "BETWEEN", "AT", "AS", "AND", "ASC", "ALL", "VALUES",
"CASE", "WHEN", "THEN", "ELSE", "END"
"CASE", "WHEN", "THEN", "ELSE", "END", "MATCH", "ANY", "SHORTEST", "TRAIL", "ACYCLIC", "SIMPLE"
];
let displayed = toks
.into_iter()
127 changes: 127 additions & 0 deletions partiql-parser/src/parse/mod.rs
Original file line number Diff line number Diff line change
@@ -679,6 +679,133 @@ mod tests {
}
}

mod graph {
use super::*;

#[test]
fn no_labels() {
parse!(r#"SELECT 1 FROM my_graph MATCH ()"#);
parse!(r#"SELECT 1 FROM my_graph MATCH () WHERE contains_value('1')"#);
parse!(r#"SELECT x.info AS info FROM my_graph MATCH (x) WHERE x.name LIKE 'foo'"#);
//parse!(r#"SELECT 1 FROM g MATCH -[]-> "#);
}

#[test]
fn labelled_nodes() {
parse!(r#"SELECT x AS target FROM my_graph MATCH (x:Label) WHERE x.has_data = true"#);
}

#[test]
fn edges() {
parse!(r#"SELECT a,b FROM g MATCH (a:A) -[e:E]-> (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) -> (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) ~[e:E]~ (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) ~ (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) <-[e:E]- (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) <- (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) ~[e:E]~> (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) ~> (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) <~[e:E]~ (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) <~ (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) <-[e:E]-> (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) <-> (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) -[e:E]- (b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A) - (b:B)"#);
}

#[test]
fn quantifiers() {
parse!(r#"SELECT a,b FROM g MATCH (a:A)-[:edge]->*(b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A)<-[:edge]-+(b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A)~[:edge]~{5,}(b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A)-[e:edge]-{2,6}(b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A)->*(b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A)<-+(b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A)~{5,}(b:B)"#);
parse!(r#"SELECT a,b FROM g MATCH (a:A)-{2,6}(b:B)"#);
}

#[test]
fn patterns() {
parse!(
r#"SELECT the_a.name AS src, the_b.name AS dest FROM my_graph MATCH (the_a:a) -[the_y:y]-> (the_b:b) WHERE the_y.score > 10"#
);
parse!(r#""SELECT a,b FROM g MATCH (a)-[:has]->()-[:contains]->(b)""#);
parse!(r#"SELECT a,b FROM (g MATCH (a) -[:has]-> (x), (x)-[:contains]->(b))"#);
}

#[test]
fn path_var() {
parse!(r#"SELECT a,b FROM g MATCH p = (a:A) -[e:E]-> (b:B)"#);
}

#[test]
fn paranthesized() {
parse!(r#"SELECT a,b FROM g MATCH [(a:A)-[e:Edge]->(b:A) WHERE a.owner=b.owner]{2,5}"#);
parse!(r#"SELECT a,b FROM g MATCH pathVar = (a:A)[()-[e:Edge]->()]{1,3}(b:B)"#);

// brackets
parse!(r#"SELECT a,b FROM g MATCH pathVar = (a:A)[-[e:Edge]->]*(b:B)"#);
// parens
parse!(r#"SELECT a,b FROM g MATCH pathVar = (a:A)(-[e:Edge]->)*(b:B)"#);
}

#[test]
fn filters() {
parse!(
r#"SELECT u as banCandidate FROM g MATCH (p:Post Where p.isFlagged = true) <-[:createdPost]- (u:User WHERE u.isBanned = false AND u.karma < 20) -[:createdComment]->(c:Comment WHERE c.isFlagged = true) WHERE p.title LIKE '%considered harmful%'"#
);
}

#[test]
fn restrictors() {
parse!(
r#"SELECT p FROM g MATCH TRAIL p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
parse!(
r#"SELECT p FROM g MATCH SIMPLE p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
parse!(
r#"SELECT p FROM g MATCH ACYCLIC p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
}

#[test]
fn selectors() {
parse!(
r#"SELECT p FROM g MATCH ANY SHORTEST p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
parse!(
r#"SELECT p FROM g MATCH ALL SHORTEST p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
parse!(
r#"SELECT p FROM g MATCH ANY p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
parse!(
r#"SELECT p FROM g MATCH ANY 5 p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
parse!(
r#"SELECT p FROM g MATCH SHORTEST 5 p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
parse!(
r#"SELECT p FROM g MATCH SHORTEST 5 GROUP p = (a WHERE a.owner='Dave') -[t:Transfer]-> * (b WHERE b.owner='Aretha')"#
);
}

#[test]
fn match_and_join() {
parse!(
r#"SELECT a,b,c, t1.x as x, t2.y as y FROM (graph MATCH (a) -> (b), (a) -> (c)), table1 as t1, table2 as t2"#
);
}

#[test]
fn etc() {
parse!("SELECT * FROM g MATCH ALL SHORTEST [ (x)-[e]->*(y) ]");
parse!("SELECT * FROM g MATCH ALL SHORTEST [ TRAIL (x)-[e]->*(y) ]");
}
}

mod errors {
use super::*;
use crate::error::{LexError, UnexpectedToken, UnexpectedTokenData};
202 changes: 202 additions & 0 deletions partiql-parser/src/parse/partiql.lalrpop
Original file line number Diff line number Diff line change
@@ -248,6 +248,7 @@ TableReference: ast::AstNode<ast::FromClause> = {
TableNonJoin: ast::AstNode<ast::FromClause> = {
<lo:@L> <t:TableBaseReference> <hi:@R> => state.node(ast::FromClause::FromLet( t ), lo..hi),
<lo:@L> <t:TableUnpivot> <hi:@R> => state.node(ast::FromClause::FromLet( t ), lo..hi),
<lo:@L> <t:TableMatch> <hi:@R> => state.node(ast::FromClause::GraphMatch( t ), lo..hi),
}

#[inline]
@@ -285,6 +286,200 @@ TableUnpivot: ast::AstNode<ast::FromLet> = {
}
}

TableMatch: ast::AstNode<ast::GraphMatch> = {
<lo:@L> <expr:ExprQuery> "MATCH" <selector:MatchPatternSelector?> <patterns:MatchPattern> <hi:@R> => {
let graph_expr = Box::new(state.node(ast::GraphMatchExpr{selector, patterns: vec![patterns]}, lo..hi));
state.node(ast::GraphMatch{ expr, graph_expr }, lo..hi)
},
<lo:@L> "(" <expr:ExprQuery> "MATCH" <selector:MatchPatternSelector?> <patterns:CommaSepPlus<MatchPattern>> ")" <hi:@R> => {
let graph_expr = Box::new(state.node(ast::GraphMatchExpr{selector, patterns}, lo..hi));
state.node(ast::GraphMatch{ expr, graph_expr }, lo..hi)
},
}

#[inline]
MatchPatternSelector: ast::GraphMatchSelector = {
"ANY" "SHORTEST" => ast::GraphMatchSelector::AnyShortest,
"ALL" "SHORTEST" => ast::GraphMatchSelector::AllShortest,
"ANY" <k:"Int"?> => {
// TODO handle bad number parse
k.map(|n| ast::GraphMatchSelector::AnyK(n.parse().unwrap())).unwrap_or(ast::GraphMatchSelector::Any)
},
"SHORTEST" <k:"Int"> => {
// TODO handle bad number parse
ast::GraphMatchSelector::ShortestK(k.parse().unwrap())
},
"SHORTEST" <k:"Int"> "GROUP" => {
// TODO handle bad number parse
ast::GraphMatchSelector::ShortestKGroup(k.parse().unwrap())
}
}

MatchPattern: ast::AstNode<ast::GraphMatchPattern> = {
<lo:@L> <restrictor:MatchPatternRestrictor?> <variable:MatchPatternPathVariable?> <parts:MatchPatternParts> <hi:@R> => {
state.node(ast::GraphMatchPattern{
restrictor,
quantifier: None,
prefilter: None,
variable,
parts,
}, lo..hi)
},
<lo:@L> "(" <pattern:MatchPattern> <prefilter:MatchPatternPartPrefilter?> ")" <quantifier:MatchPatternQuantifier?> <hi:@R> => {
state.node(ast::GraphMatchPattern{
quantifier,
prefilter,
..pattern.node
}, lo..hi)
},
<lo:@L> "[" <pattern:MatchPattern> <prefilter:MatchPatternPartPrefilter?> "]" <quantifier:MatchPatternQuantifier?> <hi:@R> => {
state.node(ast::GraphMatchPattern{
quantifier,
prefilter,
..pattern.node
}, lo..hi)
},
}

MatchPatternNested: ast::AstNode<ast::GraphMatchPattern> = {
<lo:@L> <restrictor:MatchPatternRestrictor?> <variable:MatchPatternPathVariable?> <parts:MatchPatternPartsNested> <hi:@R> => {
state.node(ast::GraphMatchPattern{
restrictor,
quantifier: None,
prefilter: None,
variable,
parts,
}, lo..hi)
}
}

#[inline]
MatchPatternRestrictor: ast::GraphMatchRestrictor = {
"TRAIL" => ast::GraphMatchRestrictor::Trail,
"ACYCLIC" => ast::GraphMatchRestrictor::Acyclic,
"SIMPLE" => ast::GraphMatchRestrictor::Simple,
}

#[inline]
MatchPatternParts: Vec<ast::GraphMatchPatternPart> = {
<n:MatchPatternPartNode> <parts:MatchPatternPartContinue*> => {
let node = ast::GraphMatchPatternPart::Node(n);
std::iter::once(node).chain(parts.into_iter().flatten()).collect()
}
}

#[inline]
MatchPatternPartsNested: Vec<ast::GraphMatchPatternPart> = {
<MatchPatternParts>,
<MatchPatternPartContinue>,
<e:MatchPatternPartEdge> => vec![ast::GraphMatchPatternPart::Edge(e)],
}

MatchPatternPartContinue: Vec<ast::GraphMatchPatternPart> = {
<e:MatchPatternPartEdge> <n:MatchPatternPartNode> => vec![ast::GraphMatchPatternPart::Edge(e),ast::GraphMatchPatternPart::Node(n)],
<p:MatchPatternPartParen> <n:MatchPatternPartNode> => vec![ast::GraphMatchPatternPart::Pattern(p),ast::GraphMatchPatternPart::Node(n)],
}

MatchPatternPartParen: ast::AstNode<ast::GraphMatchPattern> = {
<lo:@L> "(" <pattern:MatchPatternNested> <prefilter:MatchPatternPartPrefilter?> ")" <quantifier:MatchPatternQuantifier?> <hi:@R> => {
state.node(ast::GraphMatchPattern {
prefilter,
quantifier,
..pattern.node
}, lo..hi)
},
<lo:@L> "[" <pattern:MatchPatternNested> <prefilter:MatchPatternPartPrefilter?> "]" <quantifier:MatchPatternQuantifier?> <hi:@R> => {
state.node(ast::GraphMatchPattern {
prefilter,
quantifier,
..pattern.node
}, lo..hi)
},
}

MatchPatternPartNode: ast::AstNode<ast::GraphMatchNode> = {
<lo:@L> "(" <variable:MatchPatternPartName?> <label:MatchPatternPartLabel?> <prefilter:MatchPatternPartPrefilter?> ")" <hi:@R> => {
state.node(ast::GraphMatchNode {
prefilter,
variable,
label,
}, lo..hi)
},
}

#[inline]
MatchPatternQuantifier: ast::AstNode<ast::GraphMatchQuantifier> = {
<lo:@L> "+" <hi:@R> => state.node(ast::GraphMatchQuantifier{ lower:0, upper:None }, lo..hi),
<lo:@L> "*" <hi:@R> => state.node(ast::GraphMatchQuantifier{ lower:1, upper:None }, lo..hi),
<lo:@L> "{" <lower:"Int"> "," <upper:"Int"?> "}" <hi:@R> => {
// TODO error on invalid literal
state.node(ast::GraphMatchQuantifier{ lower: lower.parse().unwrap(), upper: upper.map(|n| n.parse().unwrap()) }, lo..hi)
},
}

MatchPatternPartEdge: ast::AstNode<ast::GraphMatchEdge> = {
<lo:@L> <spec:MatchPatternPartEdgeAbbr> <quantifier:MatchPatternQuantifier?> <hi:@R> => state.node(ast::GraphMatchEdge{ quantifier, ..spec}, lo..hi),
<lo:@L> <spec:MatchPatternPartEdgeWSpec> <quantifier:MatchPatternQuantifier?> <hi:@R> => state.node(ast::GraphMatchEdge{ quantifier, ..spec}, lo..hi),
}

MatchPatternPartEdgeWSpec: ast::GraphMatchEdge = {
"-" <spec:MatchPatternPartEdgeSpec> "-" ">" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::Right, ..spec},
"~" <spec:MatchPatternPartEdgeSpec> "~" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::Undirected, ..spec},
"<" "-" <spec:MatchPatternPartEdgeSpec> "-" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::Left, ..spec},
"~" <spec:MatchPatternPartEdgeSpec> "~" ">" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::UndirectedOrRight, ..spec},
"<" "~" <spec:MatchPatternPartEdgeSpec> "~" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::LeftOrUndirected, ..spec},
"<" "-" <spec:MatchPatternPartEdgeSpec> "-" ">" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::LeftOrRight, ..spec},
"-" <spec:MatchPatternPartEdgeSpec> "-" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::LeftOrUndirectedOrRight, ..spec},
}

MatchPatternPartEdgeSpec: ast::GraphMatchEdge = {
"[" <variable:MatchPatternPartName?> <label:MatchPatternPartLabel?> <prefilter:MatchPatternPartPrefilter?> "]" => {
ast::GraphMatchEdge {
direction: ast::GraphMatchDirection::Undirected,
quantifier: None,
prefilter,
variable,
label,
}
}
}

MatchPatternPartEdgeAbbr: ast::GraphMatchEdge = {
"-" ">" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::Right, quantifier: None, prefilter: None, variable: None, label: Default::default() },
"~" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::Undirected, quantifier: None, prefilter: None, variable: None, label: Default::default() },
"<" "-" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::Left, quantifier: None, prefilter: None, variable: None, label: Default::default() },
"~" ">" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::UndirectedOrRight, quantifier: None, prefilter: None, variable: None, label: Default::default() },
"<" "~" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::LeftOrUndirected, quantifier: None, prefilter: None, variable: None, label: Default::default() },
"<" "-" ">" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::LeftOrRight, quantifier: None, prefilter: None, variable: None, label: Default::default() },
"-" => ast::GraphMatchEdge{ direction: ast::GraphMatchDirection::LeftOrUndirectedOrRight, quantifier: None, prefilter: None, variable: None, label: Default::default() },
}

#[inline]
MatchPatternPartName: ast::SymbolPrimitive = {
<name:"UnquotedIdent"> => {
ast::SymbolPrimitive {
value: name.to_owned(),
case: ast::CaseSensitivity::CaseSensitive
}
}
}

#[inline] // TODO conjunction/disjunction/negation
MatchPatternPartLabel: Vec<ast::SymbolPrimitive> = {
":" <l:MatchPatternPartName> => vec![l]
}

#[inline]
MatchPatternPartPrefilter: Box<ast::Expr> = {
"WHERE" <ExprQuery>
}

#[inline]
MatchPatternPathVariable: ast::SymbolPrimitive = {
<MatchPatternPartName> "="
}


TableJoined: ast::AstNode<ast::FromClause> = {
<TableCrossJoin>,
<TableQualifiedJoin>,
@@ -1248,6 +1443,7 @@ extern {
"==" => lexer::Token::EqualEqual,
"!=" => lexer::Token::BangEqual,
"<>" => lexer::Token::LessGreater,
"~" => lexer::Token::Tilde,

"<" => lexer::Token::LessThan,
">" => lexer::Token::GreaterThan,
@@ -1267,9 +1463,11 @@ extern {
"Ion" => lexer::Token::Ion(<&'input str>),

// Keywords
"ACYCLIC" => lexer::Token::Acyclic,
"ALL" => lexer::Token::All,
"ASC" => lexer::Token::Asc,
"AND" => lexer::Token::And,
"ANY" => lexer::Token::Any,
"AS" => lexer::Token::As,
"AT" => lexer::Token::At,
"BETWEEN" => lexer::Token::Between,
@@ -1300,6 +1498,7 @@ extern {
"LEFT" => lexer::Token::Left,
"LIKE" => lexer::Token::Like,
"LIMIT" => lexer::Token::Limit,
"MATCH" => lexer::Token::Match,
"MISSING" => lexer::Token::Missing,
"NATURAL" => lexer::Token::Natural,
"NOT" => lexer::Token::Not,
@@ -1318,7 +1517,10 @@ extern {
"TABLE" => lexer::Token::Table,
"TIME" => lexer::Token::Time,
"TIMESTAMP" => lexer::Token::Timestamp,
"SIMPLE" => lexer::Token::Simple,
"SHORTEST" => lexer::Token::Shortest,
"THEN" => lexer::Token::Then,
"TRAIL" => lexer::Token::Trail,
"TRUE" => lexer::Token::True,
"UNION" => lexer::Token::Union,
"UNPIVOT" => lexer::Token::Unpivot,

0 comments on commit f4931e5

Please sign in to comment.