Skip to content

Commit

Permalink
Merge branch 'main' into fix-grammar-static
Browse files Browse the repository at this point in the history
  • Loading branch information
Xanewok authored Nov 2, 2023
2 parents 52d65c0 + de7285a commit 7575e8f
Show file tree
Hide file tree
Showing 66 changed files with 4,437 additions and 2,685 deletions.
82 changes: 81 additions & 1 deletion .cargo/config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,84 @@ incremental = true
lto = true

[build]
rustflags = ["--warn", "unused_crate_dependencies"]
rustflags = [
"--warn",
"unused_crate_dependencies",
# This is a list of allowed Clippy rules for the purposes of gradual migration.
# See https://github.com/NomicFoundation/slang/pull/626
"-A",
"clippy::bool_assert_comparison",
"-A",
"clippy::borrow_interior_mutable_const",
"-A",
"clippy::cmp_owned",
"-A",
"clippy::collapsible_if",
"-A",
"clippy::comparison_chain",
"-A",
"clippy::declare_interior_mutable_const",
"-A",
"clippy::enum_variant_names",
"-A",
"clippy::expect_fun_call",
"-A",
"clippy::explicit_auto_deref",
"-A",
"clippy::from_over_into",
"-A",
"clippy::inherent_to_string",
"-A",
"clippy::into_iter_on_ref",
"-A",
"clippy::len_without_is_empty",
"-A",
"clippy::len_zero",
"-A",
"clippy::manual_range_contains",
"-A",
"clippy::match_like_matches_macro",
"-A",
"clippy::needless_borrow",
"-A",
"clippy::needless_range_loop",
"-A",
"clippy::needless_return",
"-A",
"clippy::new_without_default",
"-A",
"clippy::println_empty_string",
"-A",
"clippy::ptr_arg",
"-A",
"clippy::redundant_closure",
"-A",
"clippy::redundant_pattern",
"-A",
"clippy::redundant_pattern_matching",
"-A",
"clippy::redundant_static_lifetimes",
"-A",
"clippy::should_implement_trait",
"-A",
"clippy::single_char_add_str",
"-A",
"clippy::single_char_pattern",
"-A",
"clippy::to_string_in_format_args",
"-A",
"clippy::upper_case_acronyms",
"-A",
"clippy::useless_asref",
"-A",
"clippy::useless_conversion",
"-A",
"clippy::useless_format",
"-A",
"clippy::write_literal",
"-A",
"clippy::writeln_empty_string",
"-A",
"clippy::wrong_self_convention",

]
2 changes: 2 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
"**/generated/**": true
},
"rust-analyzer.check.allTargets": true,
"rust-analyzer.check.command": "clippy",
"rust-analyzer.check.features": "all",
"rust-analyzer.checkOnSave": true,
"rust-analyzer.server.path": "${workspaceFolder}/bin/rust-analyzer",
"search.exclude": {
// Packages and Dependencies
Expand Down
135 changes: 51 additions & 84 deletions crates/codegen/parser/generator/src/parser_definition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,9 @@ pub trait ParserDefinitionNodeExtensions {

impl ParserDefinitionNodeExtensions for ParserDefinitionNode {
fn to_parser_code(&self, context_name: &'static str, is_trivia: bool) -> TokenStream {
let context = format_ident!("{context_name}");
let lex_ctx = quote! { LexicalContextType::#context };

match self {
Self::Versioned(body, _, _) => body.to_parser_code(context_name, is_trivia),

Expand Down Expand Up @@ -102,91 +105,71 @@ impl ParserDefinitionNodeExtensions for ParserDefinitionNode {

Self::ScannerDefinition(scanner_definition, _) => {
let kind = format_ident!("{name}", name = scanner_definition.name());
if is_trivia {
let function_name =
format_ident!("{}_parse_token", context_name.to_snake_case());
quote! {
self.#function_name(input, TokenKind::#kind)
}

let parse_token = if is_trivia {
format_ident!("parse_token")
} else {
let function_name =
format_ident!("{}_parse_token_with_trivia", context_name.to_snake_case());
quote! {
self.#function_name(input, TokenKind::#kind)
}
format_ident!("parse_token_with_trivia")
};

quote! {
self.#parse_token::<#lex_ctx>(input, TokenKind::#kind)
}
}

Self::TriviaParserDefinition(trivia_parser_definition, _) => {
let function_name = format_ident!(
"{snake_case}",
snake_case = trivia_parser_definition.name().to_snake_case()
);
let function_name =
format_ident!("{}", trivia_parser_definition.name().to_snake_case());

quote! { self.#function_name(input) }
}

Self::ParserDefinition(parser_definition, _) => {
if is_trivia {
unreachable!(
"Trivia productions can only reference trivia or token productions"
)
}
assert!(
!is_trivia,
"Trivia productions can only reference trivia or token productions"
);

if parser_definition.is_inline() {
parser_definition.to_parser_code()
} else {
let function_name = format_ident!(
"{snake_case}",
snake_case = parser_definition.name().to_snake_case()
);
let function_name =
format_ident!("{}", parser_definition.name().to_snake_case());

quote! {
self.#function_name(input)
}
}
}

Self::PrecedenceParserDefinition(precedence_parser_definition, _) => {
if is_trivia {
unreachable!(
"Trivia productions can only reference trivia or token productions"
)
}
let function_name = format_ident!(
"{snake_case}",
snake_case = precedence_parser_definition.name().to_snake_case()
assert!(
!is_trivia,
"Trivia productions can only reference trivia or token productions"
);

let function_name =
format_ident!("{}", precedence_parser_definition.name().to_snake_case());

quote! { self.#function_name(input) }
}

Self::DelimitedBy(open, body, close, _) => {
let [open_token, close_token] = match (open.as_ref(), close.as_ref()) {
let [open_delim, close_delim] = match (open.as_ref(), close.as_ref()) {
(
ParserDefinitionNode::ScannerDefinition(open, ..),
ParserDefinitionNode::ScannerDefinition(close, ..),
) => [open, close].map(|scanner| format_ident!("{}", scanner.name())),
_ => unreachable!("Only tokens are permitted as delimiters"),
};

let parse_token = format_ident!(
"{context_name}_parse_token_with_trivia",
context_name = context_name.to_snake_case()
);

let delimiters = format_ident!(
"{context_name}_delimiters",
context_name = context_name.to_snake_case()
);

let context = format_ident!("{context_name}");

let parser = body.to_parser_code(context_name, is_trivia);
let body_parser = body.applicable_version_quality_ranges().wrap_code(
quote! {
seq.elem(#parser
.recover_until_with_nested_delims(input,
|input| Lexer::next_token::<{ LexicalContext::#context as u8 }>(self, input),
|input| Lexer::leading_trivia(self, input),
TokenKind::#close_token,
Self::#delimiters(),
.recover_until_with_nested_delims::<_, #lex_ctx>(input,
self,
TokenKind::#close_delim,
RecoverFromNoMatch::Yes,
)
)?;
Expand All @@ -196,67 +179,51 @@ impl ParserDefinitionNodeExtensions for ParserDefinitionNode {

quote! {
SequenceHelper::run(|mut seq| {
let mut delim_guard = input.open_delim(TokenKind::#close_token);
let mut delim_guard = input.open_delim(TokenKind::#close_delim);
let input = delim_guard.ctx();

seq.elem(self.#parse_token(input, TokenKind::#open_token))?;
seq.elem(self.parse_token_with_trivia::<#lex_ctx>(input, TokenKind::#open_delim))?;
#body_parser
seq.elem(self.#parse_token(input, TokenKind::#close_token))?;
seq.elem(self.parse_token_with_trivia::<#lex_ctx>(input, TokenKind::#close_delim))?;
seq.finish()
})
}
}

Self::SeparatedBy(body, separator, _) => {
let separator_scanner = match separator.as_ref() {
ParserDefinitionNode::ScannerDefinition(scanner, ..) => scanner,
let separator = match separator.as_ref() {
ParserDefinitionNode::ScannerDefinition(scanner, ..) => {
format_ident!("{name}", name = scanner.name())
}
_ => unreachable!("Only tokens are permitted as separators"),
};

let separator_token_kind = format_ident!("{name}", name = separator_scanner.name());
let context = format_ident!("{context_name}");

let parser = body.to_parser_code(context_name, is_trivia);

quote! {
SeparatedHelper::run::<{ LexicalContext::#context as u8}, Self>(
SeparatedHelper::run::<_, #lex_ctx>(
input,
|input| #parser,
TokenKind::#separator_token_kind,
self,
|input| #parser,
TokenKind::#separator,
)
}
}
Self::TerminatedBy(body, terminator, _) => {
let terminator_scanner = match terminator.as_ref() {
ParserDefinitionNode::ScannerDefinition(scanner, ..) => scanner,
let terminator = match terminator.as_ref() {
ParserDefinitionNode::ScannerDefinition(scanner, ..) => {
format_ident!("{name}", name = scanner.name())
}
_ => unreachable!("Only tokens are permitted as terminators"),
};

let terminator_token_kind =
format_ident!("{name}", name = terminator_scanner.name());

let context = format_ident!("{context_name}");

let delimiters = format_ident!(
"{context_name}_delimiters",
context_name = context_name.to_snake_case()
);

let parse_token = format_ident!(
"{context_name}_parse_token_with_trivia",
context_name = context_name.to_snake_case()
);

let parser = body.to_parser_code(context_name, is_trivia);
let body_parser = body.applicable_version_quality_ranges().wrap_code(
quote! {
seq.elem(#parser
.recover_until_with_nested_delims(input,
|input| Lexer::next_token::<{ LexicalContext::#context as u8 }>(self, input),
|input| Lexer::leading_trivia(self, input),
TokenKind::#terminator_token_kind,
Self::#delimiters(),
.recover_until_with_nested_delims::<_, #lex_ctx>(input,
self,
TokenKind::#terminator,
RecoverFromNoMatch::No,
)
)?;
Expand All @@ -267,7 +234,7 @@ impl ParserDefinitionNodeExtensions for ParserDefinitionNode {
quote! {
SequenceHelper::run(|mut seq| {
#body_parser
seq.elem(self.#parse_token(input, TokenKind::#terminator_token_kind))?;
seq.elem(self.parse_token_with_trivia::<#lex_ctx>(input, TokenKind::#terminator))?;
seq.finish()
})
}
Expand Down
10 changes: 9 additions & 1 deletion crates/codegen/parser/runtime/src/kinds.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,15 @@ pub enum ProductionKind {
#[derive(strum_macros::FromRepr)]
#[cfg_attr(feature = "slang_napi_interfaces", /* derives `Clone` and `Copy` */ napi(string_enum, namespace = "language"))]
#[cfg_attr(not(feature = "slang_napi_interfaces"), derive(Clone, Copy))]
#[repr(u8)] // This is used as a const fn argument, which only supports primitive types
pub enum LexicalContext {
XXX,
}

/// Marker trait for type-level [`LexicalContext`] variants.
pub trait IsLexicalContext {
/// Returns a run-time [`LexicalContext`] value.
fn value() -> LexicalContext;
}

#[allow(non_snake_case)]
pub mod LexicalContextType {}
Loading

0 comments on commit 7575e8f

Please sign in to comment.