Skip to content

Commit

Permalink
chore: SwcParser
Browse files Browse the repository at this point in the history
  • Loading branch information
sun0day committed Dec 4, 2023
1 parent 30a9eba commit fa802a1
Showing 1 changed file with 113 additions and 83 deletions.
196 changes: 113 additions & 83 deletions crates/parser/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ use crate::{
resolver::ImportResolver,
visitor::ImportVisitor,
};
use std::{collections::HashMap, env, path::Path, sync::Arc};
use oxc_resolver::Alias;
use std::{collections::HashMap, env, path::Path, sync::Arc};
use swc_core::{
base::{config::IsModule, Compiler},
common::{
Expand All @@ -18,99 +18,37 @@ use swc_core::{
visit::{VisitMutWith, VisitWith},
},
};
use swc_ecmascript::parser::{EsConfig, TsConfig};
use swc_ecmascript::{
parser::{EsConfig, TsConfig},
visit::Visit,
};

pub struct Parser {
pub struct SwcParser {
source_map: Arc<SourceMap>,
handler: Handler,
compiler: Compiler,
root: Arc<String>,
alias: Arc<Alias>,
}

impl Parser {
pub fn new(root: Option<String>, alias: Option<Alias>) -> Parser {
impl SwcParser {
pub fn new() -> SwcParser {
let source_map = Arc::<SourceMap>::default();

Parser {
SwcParser {
source_map: source_map.clone(),
handler: Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(source_map.clone())),
compiler: swc::Compiler::new(source_map.clone()),
root: Arc::new(match root {
Some(r) => r,
_ => env::current_dir().unwrap().to_string_lossy().to_string(),
}),
alias: Arc::new(alias.unwrap_or(vec![]))
}
}

pub fn parse(
&self,
files: Vec<&str>,
depth: Option<u8>,
should_resolve: Option<bool>,
) -> HashMap<Arc<String>, ImportNode> {
let wrapped_depth = depth.unwrap_or(2);
let wrapped_should_resolve = should_resolve.unwrap_or(true);
let mut visitor = ImportVisitor::new(ImportResolver::new(self.root.clone(), wrapped_should_resolve, self.alias.clone()));

GLOBALS.set(&Globals::new(), || {
let mut processed_ids: HashMap<Arc<String>, bool> = HashMap::new();

for file in files.iter() {
self.deep_parse(file, &mut visitor, if wrapped_should_resolve {wrapped_depth} else { 1 }, &mut processed_ids);
}

visitor.import_node.map
})
}

fn deep_parse<'a>(
&self,
file: &str,
visitor: &mut ImportVisitor,
mut depth: u8,
processed_ids: &mut HashMap<Arc<String>, bool>
) {
let mut file_queue = vec![Arc::new(file.to_owned())];
let mut current_count = 1;
let mut next_count = 0;

while file_queue.is_empty() == false && depth > 0 {
let target_file = file_queue.pop().unwrap();
let resolved_file = Arc::new(ImportResolver::resolve_file(&self.root, &target_file));
let process_id = Arc::new(visitor.resolver.resolve_relative_root(&target_file).0);

if processed_ids.contains_key(&process_id.clone()) == false {
processed_ids.insert(process_id.clone(), true);

visitor.set_process_id(process_id.clone());
visitor.create_node(process_id.clone());
self.parse_file(&resolved_file, visitor);

let map = visitor.import_node.map.clone();

for (id, node) in map {
if processed_ids.contains_key(&id) || node.kind != ImportNodeKind::Local {
continue;
}
next_count += 1;
file_queue.push(id);
}
}

current_count -= 1;

if current_count == 0 {
current_count = next_count;
depth -= 1;
}
}
fn run<F, R>(&self, f: F) -> R
where
F: FnOnce() -> R,
{
GLOBALS.set(&Globals::new(), f)
}

/// parse single js file
fn parse_file(&self, file: &str, visitor: &mut ImportVisitor) {
let (syntax,is_js, is_ts) = self.get_options(file);
fn parse_file(&self, file: &str, visitor: &mut dyn Visit) {
let (syntax, is_js, is_ts) = self.get_options(file);

if !is_js {
return;
Expand All @@ -134,7 +72,7 @@ impl Parser {
.unwrap();

program.visit_mut_with(&mut resolver(Mark::new(), Mark::new(), is_ts));
program.visit_with(visitor);
program.visit_with(visitor)
}

/// return (Syntax, is_js, is_ts)
Expand All @@ -150,7 +88,7 @@ impl Parser {
..Default::default()
}),
true,
true
true,
);
}

Expand All @@ -166,11 +104,103 @@ impl Parser {
}

if file.ends_with(".js") {
return (Syntax::default(),true, false);
return (Syntax::default(), true, false);
}


return (Syntax::default(), false, false);

}
}

pub struct Parser {
swc: SwcParser,
root: Arc<String>,
alias: Arc<Alias>,
}

impl Parser {
pub fn new(root: Option<String>, alias: Option<Alias>) -> Parser {
Parser {
swc: SwcParser::new(),
root: Arc::new(match root {
Some(r) => r,
_ => env::current_dir().unwrap().to_string_lossy().to_string(),
}),
alias: Arc::new(alias.unwrap_or(vec![])),
}
}

pub fn parse(
&self,
files: Vec<&str>,
depth: Option<u8>,
should_resolve: Option<bool>,
) -> HashMap<Arc<String>, ImportNode> {
let wrapped_depth = depth.unwrap_or(2);
let wrapped_should_resolve = should_resolve.unwrap_or(true);
let mut visitor = ImportVisitor::new(ImportResolver::new(
self.root.clone(),
wrapped_should_resolve,
self.alias.clone(),
));

self.swc.run(|| {
let mut processed_ids: HashMap<Arc<String>, bool> = HashMap::new();

for file in files.iter() {
self.deep_parse(
file,
&mut visitor,
if wrapped_should_resolve {
wrapped_depth
} else {
1
},
&mut processed_ids,
);
}

visitor.import_node.map
})
}

fn deep_parse<'a>(
&self,
file: &str,
visitor: &mut ImportVisitor,
mut depth: u8,
processed_ids: &mut HashMap<Arc<String>, bool>,
) {
let mut file_queue = vec![Arc::new(file.to_owned())];
let mut current_count = 1;
let mut next_count = 0;

while file_queue.is_empty() == false && depth > 0 {
let target_file = file_queue.pop().unwrap();
let resolved_file = Arc::new(ImportResolver::resolve_file(&self.root, &target_file));
let process_id = Arc::new(visitor.resolver.resolve_relative_root(&target_file).0);

if processed_ids.contains_key(&process_id.clone()) == false {
processed_ids.insert(process_id.clone(), true);

visitor.set_process_id(process_id.clone());
visitor.create_node(process_id.clone());
self.swc.parse_file(&resolved_file, visitor);

for (id, node) in &visitor.import_node.map {
if processed_ids.contains_key(&id.clone()) || node.kind != ImportNodeKind::Local {
continue;
}
next_count += 1;
file_queue.push(id.clone());
}
}

current_count -= 1;

if current_count == 0 {
current_count = next_count;
depth -= 1;
}
}
}
}

0 comments on commit fa802a1

Please sign in to comment.