Skip to content

Commit

Permalink
refacto: rework the collected tokens into a TokenizedAbi type
Browse files Browse the repository at this point in the history
  • Loading branch information
glihm committed Jan 16, 2024
1 parent da22948 commit 6c18e4a
Show file tree
Hide file tree
Showing 5 changed files with 97 additions and 81 deletions.
101 changes: 61 additions & 40 deletions crates/parser/src/abi/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,18 @@ use std::collections::HashMap;
use crate::tokens::{Array, CompositeInner, CompositeType, CoreBasic, Function, Token};
use crate::{CainomeResult, Error};

#[derive(Debug)]
pub struct TokenizedAbi {
/// All enums found in the contract ABI.
pub enums: Vec<Token>,
/// All structs found in the contract ABI.
pub structs: Vec<Token>,
/// Standalone functions in the contract ABI.
pub functions: Vec<Token>,
/// Fully qualified interface name mapped to all the defined functions in it.
pub interfaces: HashMap<String, Vec<Token>>,
}

pub struct AbiParser {}

impl AbiParser {
Expand All @@ -20,12 +32,12 @@ impl AbiParser {
pub fn tokens_from_abi_string(
abi: &str,
type_aliases: &HashMap<String, String>,
) -> CainomeResult<HashMap<String, Vec<Token>>> {
) -> CainomeResult<TokenizedAbi> {
let abi_entries = Self::parse_abi_string(abi)?;
let abi_tokens = AbiParser::collect_tokens(&abi_entries).expect("failed tokens parsing");
let abi_tokens = AbiParser::organize_tokens(abi_tokens, type_aliases);
let tokenized_abi =
AbiParser::collect_tokens(&abi_entries, type_aliases).expect("failed tokens parsing");

Ok(abi_tokens)
Ok(tokenized_abi)
}

/// Parses an ABI string to output a `Vec<AbiEntry>`.
Expand All @@ -47,61 +59,58 @@ impl AbiParser {
Ok(entries)
}

/// Organizes the tokens by cairo types.
pub fn organize_tokens(
tokens: HashMap<String, Token>,
/// Parse all tokens in the ABI.
pub fn collect_tokens(
entries: &[AbiEntry],
type_aliases: &HashMap<String, String>,
) -> HashMap<String, Vec<Token>> {
) -> CainomeResult<TokenizedAbi> {
let mut token_candidates: HashMap<String, Vec<Token>> = HashMap::new();

// Entry tokens are structs, enums and events (which are structs and enums).
for entry in entries {
Self::collect_entry_token(entry, &mut token_candidates)?;
}

let tokens = Self::filter_struct_enum_tokens(token_candidates);

let mut structs = vec![];
let mut enums = vec![];
let mut functions = vec![];

// Apply type aliases only on structs and enums.
for (_, mut t) in tokens {
for (type_path, alias) in type_aliases {
t.apply_alias(type_path, alias);
}

match t {
Token::Composite(ref c) => {
match c.r#type {
CompositeType::Struct => structs.push(t),
CompositeType::Enum => enums.push(t),
_ => (), // TODO: warn?
}
if let Token::Composite(ref c) = t {
match c.r#type {
CompositeType::Struct => structs.push(t),
CompositeType::Enum => enums.push(t),
_ => (),
}
Token::Function(_) => functions.push(t),
_ => (), // TODO: warn?
}
}

let mut out = HashMap::new();
out.insert("structs".to_string(), structs);
out.insert("enums".to_string(), enums);
out.insert("functions".to_string(), functions);
out
}

/// Parse all tokens in the ABI.
pub fn collect_tokens(entries: &[AbiEntry]) -> CainomeResult<HashMap<String, Token>> {
let mut token_candidates: HashMap<String, Vec<Token>> = HashMap::new();

for entry in entries {
Self::collect_entry_token(entry, &mut token_candidates)?;
}

let mut tokens = Self::filter_struct_enum_tokens(token_candidates);
let mut functions = vec![];
let mut interfaces: HashMap<String, Vec<Token>> = HashMap::new();

for entry in entries {
Self::collect_entry_function(entry, &mut tokens)?;
Self::collect_entry_function(entry, &mut functions, &mut interfaces, None)?;
}

Ok(tokens)
Ok(TokenizedAbi {
enums,
structs,
functions,
interfaces,
})
}

///
fn collect_entry_function(
entry: &AbiEntry,
tokens: &mut HashMap<String, Token>,
functions: &mut Vec<Token>,
interfaces: &mut HashMap<String, Vec<Token>>,
interface_name: Option<String>,
) -> CainomeResult<()> {
match entry {
AbiEntry::Function(f) => {
Expand All @@ -117,11 +126,23 @@ impl AbiParser {
func.outputs.push(Token::parse(&o.r#type)?);
}

tokens.insert(f.name.clone(), Token::Function(func));
if let Some(name) = interface_name {
interfaces
.entry(name)
.or_default()
.push(Token::Function(func));
} else {
functions.push(Token::Function(func));
}
}
AbiEntry::Interface(interface) => {
for entry in &interface.items {
Self::collect_entry_function(entry, tokens)?;
Self::collect_entry_function(
entry,
functions,
interfaces,
Some(interface.name.clone()),
)?;
}
}
_ => (),
Expand Down
2 changes: 1 addition & 1 deletion crates/parser/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@ mod error;
pub use error::{CainomeResult, Error};

mod abi;
pub use crate::abi::parser::AbiParser;
pub use crate::abi::parser::{AbiParser, TokenizedAbi};

pub mod tokens;
4 changes: 2 additions & 2 deletions crates/rs-macro/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ fn abigen_internal(input: TokenStream) -> TokenStream {
let contract_name = contract_abi.name;
let abi_entries = contract_abi.abi;

let abi_tokens = AbiParser::collect_tokens(&abi_entries).expect("failed tokens parsing");
let abi_tokens = AbiParser::organize_tokens(abi_tokens, &contract_abi.type_aliases);
let abi_tokens = AbiParser::collect_tokens(&abi_entries, &contract_abi.type_aliases)
.expect("failed tokens parsing");

let expanded = cainome_rs::abi_to_tokenstream(&contract_name.to_string(), &abi_tokens);

Expand Down
66 changes: 31 additions & 35 deletions crates/rs/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use cainome_parser::tokens::{StateMutability, Token};
use cainome_parser::tokens::StateMutability;
use cainome_parser::TokenizedAbi;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use std::collections::HashMap;

mod expand;

Expand All @@ -14,54 +14,50 @@ use crate::expand::{CairoContract, CairoEnum, CairoEnumEvent, CairoFunction, Cai
///
/// * `contract_name` - Name of the contract.
/// * `abi_tokens` - Tokenized ABI.
pub fn abi_to_tokenstream(
contract_name: &str,
abi_tokens: &HashMap<String, Vec<Token>>,
) -> TokenStream2 {
pub fn abi_to_tokenstream(contract_name: &str, abi_tokens: &TokenizedAbi) -> TokenStream2 {
let contract_name = utils::str_to_ident(contract_name);

let mut tokens: Vec<TokenStream2> = vec![];

tokens.push(CairoContract::expand(contract_name.clone()));

if let Some(structs) = abi_tokens.get("structs") {
for s in structs {
let s_composite = s.to_composite().expect("composite expected");
tokens.push(CairoStruct::expand_decl(s_composite));
tokens.push(CairoStruct::expand_impl(s_composite));
}
for s in &abi_tokens.structs {
let s_composite = s.to_composite().expect("composite expected");
tokens.push(CairoStruct::expand_decl(s_composite));
tokens.push(CairoStruct::expand_impl(s_composite));
}

if let Some(enums) = abi_tokens.get("enums") {
for e in enums {
let e_composite = e.to_composite().expect("composite expected");
tokens.push(CairoEnum::expand_decl(e_composite));
tokens.push(CairoEnum::expand_impl(e_composite));

tokens.push(CairoEnumEvent::expand(
e.to_composite().expect("composite expected"),
enums,
abi_tokens
.get("structs")
.expect("at least one struct expected to expand events"),
));
}
for e in &abi_tokens.enums {
let e_composite = e.to_composite().expect("composite expected");
tokens.push(CairoEnum::expand_decl(e_composite));
tokens.push(CairoEnum::expand_impl(e_composite));

tokens.push(CairoEnumEvent::expand(
e.to_composite().expect("composite expected"),
&abi_tokens.enums,
&abi_tokens.structs,
));
}

let mut reader_views = vec![];
let mut views = vec![];
let mut externals = vec![];

if let Some(funcs) = abi_tokens.get("functions") {
for f in funcs {
let f = f.to_function().expect("function expected");
match f.state_mutability {
StateMutability::View => {
reader_views.push(CairoFunction::expand(f, true));
views.push(CairoFunction::expand(f, false));
}
StateMutability::External => externals.push(CairoFunction::expand(f, false)),
// Interfaces are not yet reflected in the generated contract.
// Then, the standalone functions and functions from interfaces are put together.
let mut functions = abi_tokens.functions.clone();
for funcs in abi_tokens.interfaces.values() {
functions.extend(funcs.clone());
}

for f in functions {
let f = f.to_function().expect("function expected");
match f.state_mutability {
StateMutability::View => {
reader_views.push(CairoFunction::expand(f, true));
views.push(CairoFunction::expand(f, false));
}
StateMutability::External => externals.push(CairoFunction::expand(f, false)),
}
}

Expand Down
5 changes: 2 additions & 3 deletions src/bin/cli/contract/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use cainome_parser::tokens::Token;
use cainome_parser::AbiParser;
use cainome_parser::{AbiParser, TokenizedAbi};
use camino::Utf8PathBuf;
use convert_case::{Case, Casing};
use std::collections::HashMap;
Expand Down Expand Up @@ -33,7 +32,7 @@ pub struct ContractData {
/// Contract's origin.
pub origin: ContractOrigin,
/// Tokens parsed from the ABI.
pub tokens: HashMap<String, Vec<Token>>,
pub tokens: TokenizedAbi,
}

pub struct ContractParser {}
Expand Down

0 comments on commit 6c18e4a

Please sign in to comment.