diff --git a/cli/src/generate.rs b/cli/src/generate.rs deleted file mode 100644 index aa8f3b5b..00000000 --- a/cli/src/generate.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crate::build_tables::build_tables; -use crate::error::Result; -use crate::parse_grammar::parse_grammar; -use crate::prepare_grammar::prepare_grammar; -use crate::render::render_c_code; - -pub fn generate_parser_for_grammar( - input: &str, - minimize: bool, - state_ids_to_log: Vec, -) -> Result { - let input_grammar = parse_grammar(input)?; - let (syntax_grammar, lexical_grammar, inlines, simple_aliases) = - prepare_grammar(&input_grammar)?; - let (parse_table, main_lex_table, keyword_lex_table, keyword_capture_token) = build_tables( - &syntax_grammar, - &lexical_grammar, - &simple_aliases, - &inlines, - minimize, - state_ids_to_log, - )?; - let c_code = render_c_code( - &input_grammar.name, - parse_table, - main_lex_table, - keyword_lex_table, - keyword_capture_token, - syntax_grammar, - lexical_grammar, - simple_aliases, - ); - Ok(c_code) -} diff --git a/cli/src/build_tables/build_lex_table.rs b/cli/src/generate/build_tables/build_lex_table.rs similarity index 97% rename from cli/src/build_tables/build_lex_table.rs rename to cli/src/generate/build_tables/build_lex_table.rs index 0f828f5c..200c6959 100644 --- a/cli/src/build_tables/build_lex_table.rs +++ b/cli/src/generate/build_tables/build_lex_table.rs @@ -1,10 +1,10 @@ use super::coincident_tokens::CoincidentTokenIndex; use super::item::TokenSet; use super::token_conflicts::TokenConflictMap; -use crate::grammars::{LexicalGrammar, SyntaxGrammar}; -use crate::nfa::{CharacterSet, NfaCursor, NfaTransition}; -use crate::rules::Symbol; -use crate::tables::{AdvanceAction, LexState, LexTable, ParseStateId, ParseTable}; +use crate::generate::grammars::{LexicalGrammar, SyntaxGrammar}; +use crate::generate::nfa::{CharacterSet, NfaCursor, NfaTransition}; +use crate::generate::rules::Symbol; +use crate::generate::tables::{AdvanceAction, LexState, LexTable, ParseStateId, ParseTable}; use std::collections::hash_map::Entry; use std::collections::{BTreeMap, HashMap, VecDeque}; diff --git a/cli/src/build_tables/build_parse_table.rs b/cli/src/generate/build_tables/build_parse_table.rs similarity index 99% rename from cli/src/build_tables/build_parse_table.rs rename to cli/src/generate/build_tables/build_parse_table.rs index 27baf146..73c9c0e2 100644 --- a/cli/src/build_tables/build_parse_table.rs +++ b/cli/src/generate/build_tables/build_parse_table.rs @@ -1,9 +1,9 @@ use super::item::{ParseItem, ParseItemSet, TokenSet}; use super::item_set_builder::ParseItemSetBuilder; use crate::error::{Error, Result}; -use crate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar, VariableType}; -use crate::rules::{Alias, Associativity, Symbol, SymbolType}; -use crate::tables::{ +use crate::generate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar, VariableType}; +use crate::generate::rules::{Alias, Associativity, Symbol, SymbolType}; +use crate::generate::tables::{ AliasSequenceId, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry, }; use core::ops::Range; diff --git a/cli/src/build_tables/coincident_tokens.rs b/cli/src/generate/build_tables/coincident_tokens.rs similarity index 93% rename from cli/src/build_tables/coincident_tokens.rs rename to cli/src/generate/build_tables/coincident_tokens.rs index 62295073..25dbc331 100644 --- a/cli/src/build_tables/coincident_tokens.rs +++ b/cli/src/generate/build_tables/coincident_tokens.rs @@ -1,6 +1,6 @@ -use crate::grammars::LexicalGrammar; -use crate::rules::Symbol; -use crate::tables::{ParseStateId, ParseTable}; +use crate::generate::grammars::LexicalGrammar; +use crate::generate::rules::Symbol; +use crate::generate::tables::{ParseStateId, ParseTable}; use std::fmt; pub(crate) struct CoincidentTokenIndex<'a> { diff --git a/cli/src/build_tables/item.rs b/cli/src/generate/build_tables/item.rs similarity index 98% rename from cli/src/build_tables/item.rs rename to cli/src/generate/build_tables/item.rs index 2be331b0..81c86f4a 100644 --- a/cli/src/build_tables/item.rs +++ b/cli/src/generate/build_tables/item.rs @@ -1,6 +1,6 @@ -use crate::grammars::{LexicalGrammar, Production, ProductionStep, SyntaxGrammar}; -use crate::rules::Associativity; -use crate::rules::{Symbol, SymbolType}; +use crate::generate::grammars::{LexicalGrammar, Production, ProductionStep, SyntaxGrammar}; +use crate::generate::rules::Associativity; +use crate::generate::rules::{Symbol, SymbolType}; use smallbitvec::SmallBitVec; use std::cmp::Ordering; use std::collections::BTreeMap; diff --git a/cli/src/build_tables/item_set_builder.rs b/cli/src/generate/build_tables/item_set_builder.rs similarity index 99% rename from cli/src/build_tables/item_set_builder.rs rename to cli/src/generate/build_tables/item_set_builder.rs index fea3b4d1..56d7c7c4 100644 --- a/cli/src/build_tables/item_set_builder.rs +++ b/cli/src/generate/build_tables/item_set_builder.rs @@ -1,6 +1,6 @@ use super::item::{ParseItem, ParseItemDisplay, ParseItemSet, TokenSet}; -use crate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar}; -use crate::rules::Symbol; +use crate::generate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar}; +use crate::generate::rules::Symbol; use hashbrown::{HashMap, HashSet}; use std::fmt; diff --git a/cli/src/build_tables/minimize_parse_table.rs b/cli/src/generate/build_tables/minimize_parse_table.rs similarity index 98% rename from cli/src/build_tables/minimize_parse_table.rs rename to cli/src/generate/build_tables/minimize_parse_table.rs index d83e117f..007c9703 100644 --- a/cli/src/build_tables/minimize_parse_table.rs +++ b/cli/src/generate/build_tables/minimize_parse_table.rs @@ -1,8 +1,8 @@ use super::item::TokenSet; use super::token_conflicts::TokenConflictMap; -use crate::grammars::{SyntaxGrammar, VariableType}; -use crate::rules::{AliasMap, Symbol}; -use crate::tables::{ParseAction, ParseState, ParseTable, ParseTableEntry}; +use crate::generate::grammars::{SyntaxGrammar, VariableType}; +use crate::generate::rules::{AliasMap, Symbol}; +use crate::generate::tables::{ParseAction, ParseState, ParseTable, ParseTableEntry}; use hashbrown::{HashMap, HashSet}; pub(crate) fn minimize_parse_table( diff --git a/cli/src/build_tables/mod.rs b/cli/src/generate/build_tables/mod.rs similarity index 97% rename from cli/src/build_tables/mod.rs rename to cli/src/generate/build_tables/mod.rs index 1f9acc14..7d55d0fa 100644 --- a/cli/src/build_tables/mod.rs +++ b/cli/src/generate/build_tables/mod.rs @@ -13,10 +13,10 @@ use self::item::TokenSet; use self::minimize_parse_table::minimize_parse_table; use self::token_conflicts::TokenConflictMap; use crate::error::Result; -use crate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar}; -use crate::nfa::{CharacterSet, NfaCursor}; -use crate::rules::{AliasMap, Symbol}; -use crate::tables::{LexTable, ParseAction, ParseTable, ParseTableEntry}; +use crate::generate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar}; +use crate::generate::nfa::{CharacterSet, NfaCursor}; +use crate::generate::rules::{AliasMap, Symbol}; +use crate::generate::tables::{LexTable, ParseAction, ParseTable, ParseTableEntry}; pub(crate) fn build_tables( syntax_grammar: &SyntaxGrammar, diff --git a/cli/src/build_tables/token_conflicts.rs b/cli/src/generate/build_tables/token_conflicts.rs similarity index 97% rename from cli/src/build_tables/token_conflicts.rs rename to cli/src/generate/build_tables/token_conflicts.rs index 7bb443a5..1a63bfc8 100644 --- a/cli/src/build_tables/token_conflicts.rs +++ b/cli/src/generate/build_tables/token_conflicts.rs @@ -1,6 +1,6 @@ -use crate::build_tables::item::TokenSet; -use crate::grammars::LexicalGrammar; -use crate::nfa::{CharacterSet, NfaCursor, NfaTransition}; +use crate::generate::build_tables::item::TokenSet; +use crate::generate::grammars::LexicalGrammar; +use crate::generate::nfa::{CharacterSet, NfaCursor, NfaTransition}; use hashbrown::HashSet; use std::cmp::Ordering; use std::fmt; @@ -288,9 +288,9 @@ fn variable_ids_for_states<'a>( #[cfg(test)] mod tests { use super::*; - use crate::grammars::{Variable, VariableType}; - use crate::prepare_grammar::{expand_tokens, ExtractedLexicalGrammar}; - use crate::rules::{Rule, Symbol}; + use crate::generate::grammars::{Variable, VariableType}; + use crate::generate::prepare_grammar::{expand_tokens, ExtractedLexicalGrammar}; + use crate::generate::rules::{Rule, Symbol}; #[test] fn test_starting_characters() { diff --git a/cli/src/js/dsl.js b/cli/src/generate/dsl.js similarity index 100% rename from cli/src/js/dsl.js rename to cli/src/generate/dsl.js diff --git a/cli/src/grammar-schema.json b/cli/src/generate/grammar-schema.json similarity index 100% rename from cli/src/grammar-schema.json rename to cli/src/generate/grammar-schema.json diff --git a/cli/src/grammars.rs b/cli/src/generate/grammars.rs similarity index 98% rename from cli/src/grammars.rs rename to cli/src/generate/grammars.rs index f82d6b02..3772bfd4 100644 --- a/cli/src/grammars.rs +++ b/cli/src/generate/grammars.rs @@ -1,5 +1,5 @@ -use crate::nfa::Nfa; -use crate::rules::{Alias, Associativity, Rule, Symbol}; +use super::nfa::Nfa; +use super::rules::{Alias, Associativity, Rule, Symbol}; use hashbrown::HashMap; #[derive(Clone, Copy, Debug, PartialEq, Eq)] diff --git a/cli/src/generate/mod.rs b/cli/src/generate/mod.rs new file mode 100644 index 00000000..7dfe5a4b --- /dev/null +++ b/cli/src/generate/mod.rs @@ -0,0 +1,79 @@ +use self::build_tables::build_tables; +use self::parse_grammar::parse_grammar; +use self::prepare_grammar::prepare_grammar; +use self::render::render_c_code; +use crate::error::Result; +use std::io::Write; +use std::path::PathBuf; +use std::process::{Command, Stdio}; + +mod build_tables; +mod grammars; +mod nfa; +mod parse_grammar; +mod prepare_grammar; +mod render; +mod rules; +mod tables; + +pub fn generate_parser_for_grammar( + grammar_path: &PathBuf, + minimize: bool, + state_ids_to_log: Vec, +) -> Result { + let grammar_json = load_js_grammar_file(grammar_path); + let input_grammar = parse_grammar(&grammar_json)?; + let (syntax_grammar, lexical_grammar, inlines, simple_aliases) = + prepare_grammar(&input_grammar)?; + let (parse_table, main_lex_table, keyword_lex_table, keyword_capture_token) = build_tables( + &syntax_grammar, + &lexical_grammar, + &simple_aliases, + &inlines, + minimize, + state_ids_to_log, + )?; + let c_code = render_c_code( + &input_grammar.name, + parse_table, + main_lex_table, + keyword_lex_table, + keyword_capture_token, + syntax_grammar, + lexical_grammar, + simple_aliases, + ); + Ok(c_code) +} + +fn load_js_grammar_file(grammar_path: &PathBuf) -> String { + let mut node_process = Command::new("node") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn() + .expect("Failed to run `node`"); + + let js_prelude = include_str!("./dsl.js"); + let mut node_stdin = node_process + .stdin + .take() + .expect("Failed to open stdin for node"); + write!( + node_stdin, + "{}\nconsole.log(JSON.stringify(require(\"{}\"), null, 2));\n", + js_prelude, + grammar_path.to_str().unwrap() + ) + .expect("Failed to write to node's stdin"); + drop(node_stdin); + let output = node_process + .wait_with_output() + .expect("Failed to read output from node"); + match output.status.code() { + None => panic!("Node process was killed"), + Some(0) => {} + Some(code) => panic!(format!("Node process exited with status {}", code)), + } + + String::from_utf8(output.stdout).expect("Got invalid UTF8 from node") +} diff --git a/cli/src/nfa.rs b/cli/src/generate/nfa.rs similarity index 100% rename from cli/src/nfa.rs rename to cli/src/generate/nfa.rs diff --git a/cli/src/parse_grammar.rs b/cli/src/generate/parse_grammar.rs similarity index 73% rename from cli/src/parse_grammar.rs rename to cli/src/generate/parse_grammar.rs index 6808f402..e77dce9b 100644 --- a/cli/src/parse_grammar.rs +++ b/cli/src/generate/parse_grammar.rs @@ -1,7 +1,7 @@ -use serde_json::{Map, Value}; +use super::grammars::{InputGrammar, Variable, VariableType}; +use super::rules::Rule; use crate::error::Result; -use crate::grammars::{InputGrammar, Variable, VariableType}; -use crate::rules::Rule; +use serde_json::{Map, Value}; #[derive(Deserialize)] #[serde(tag = "type")] @@ -81,20 +81,20 @@ pub(crate) fn parse_grammar(input: &str) -> Result { }) } - let extra_tokens = grammar_json.extras + let extra_tokens = grammar_json + .extras .unwrap_or(Vec::new()) .into_iter() .map(parse_rule) .collect(); - let external_tokens = grammar_json.externals + let external_tokens = grammar_json + .externals .unwrap_or(Vec::new()) .into_iter() .map(parse_rule) .collect(); - let expected_conflicts = grammar_json.conflicts - .unwrap_or(Vec::new()); - let variables_to_inline = grammar_json.inline - .unwrap_or(Vec::new()); + let expected_conflicts = grammar_json.conflicts.unwrap_or(Vec::new()); + let variables_to_inline = grammar_json.inline.unwrap_or(Vec::new()); Ok(InputGrammar { name: grammar_json.name, @@ -109,7 +109,11 @@ pub(crate) fn parse_grammar(input: &str) -> Result { fn parse_rule(json: RuleJSON) -> Rule { match json { - RuleJSON::ALIAS { content, value, named } => Rule::alias(parse_rule(*content), value, named), + RuleJSON::ALIAS { + content, + value, + named, + } => Rule::alias(parse_rule(*content), value, named), RuleJSON::BLANK => Rule::Blank, RuleJSON::STRING { value } => Rule::String(value), RuleJSON::PATTERN { value } => Rule::Pattern(value), @@ -117,11 +121,15 @@ fn parse_rule(json: RuleJSON) -> Rule { RuleJSON::CHOICE { members } => Rule::choice(members.into_iter().map(parse_rule).collect()), RuleJSON::SEQ { members } => Rule::seq(members.into_iter().map(parse_rule).collect()), RuleJSON::REPEAT1 { content } => Rule::repeat(parse_rule(*content)), - RuleJSON::REPEAT { content } => Rule::choice(vec![Rule::repeat(parse_rule(*content)), Rule::Blank]), + RuleJSON::REPEAT { content } => { + Rule::choice(vec![Rule::repeat(parse_rule(*content)), Rule::Blank]) + } RuleJSON::PREC { value, content } => Rule::prec(value, parse_rule(*content)), RuleJSON::PREC_LEFT { value, content } => Rule::prec_left(value, parse_rule(*content)), RuleJSON::PREC_RIGHT { value, content } => Rule::prec_right(value, parse_rule(*content)), - RuleJSON::PREC_DYNAMIC { value, content } => Rule::prec_dynamic(value, parse_rule(*content)), + RuleJSON::PREC_DYNAMIC { value, content } => { + Rule::prec_dynamic(value, parse_rule(*content)) + } RuleJSON::TOKEN { content } => Rule::token(parse_rule(*content)), RuleJSON::IMMEDIATE_TOKEN { content } => Rule::immediate_token(parse_rule(*content)), } @@ -133,7 +141,8 @@ mod tests { #[test] fn test_parse_grammar() { - let grammar = parse_grammar(r#"{ + let grammar = parse_grammar( + r#"{ "name": "my_lang", "rules": { "file": { @@ -148,20 +157,25 @@ mod tests { "value": "foo" } } - }"#).unwrap(); + }"#, + ) + .unwrap(); assert_eq!(grammar.name, "my_lang"); - assert_eq!(grammar.variables, vec![ - Variable { - name: "file".to_string(), - kind: VariableType::Named, - rule: Rule::repeat(Rule::NamedSymbol("statement".to_string())) - }, - Variable { - name: "statement".to_string(), - kind: VariableType::Named, - rule: Rule::String("foo".to_string()) - }, - ]); + assert_eq!( + grammar.variables, + vec![ + Variable { + name: "file".to_string(), + kind: VariableType::Named, + rule: Rule::repeat(Rule::NamedSymbol("statement".to_string())) + }, + Variable { + name: "statement".to_string(), + kind: VariableType::Named, + rule: Rule::String("foo".to_string()) + }, + ] + ); } } diff --git a/cli/src/prepare_grammar/expand_repeats.rs b/cli/src/generate/prepare_grammar/expand_repeats.rs similarity index 98% rename from cli/src/prepare_grammar/expand_repeats.rs rename to cli/src/generate/prepare_grammar/expand_repeats.rs index 4589bd11..b290799b 100644 --- a/cli/src/prepare_grammar/expand_repeats.rs +++ b/cli/src/generate/prepare_grammar/expand_repeats.rs @@ -1,6 +1,6 @@ use super::ExtractedSyntaxGrammar; -use crate::grammars::{Variable, VariableType}; -use crate::rules::{Rule, Symbol}; +use crate::generate::grammars::{Variable, VariableType}; +use crate::generate::rules::{Rule, Symbol}; use hashbrown::HashMap; use std::mem; diff --git a/cli/src/prepare_grammar/expand_tokens.rs b/cli/src/generate/prepare_grammar/expand_tokens.rs similarity index 98% rename from cli/src/prepare_grammar/expand_tokens.rs rename to cli/src/generate/prepare_grammar/expand_tokens.rs index 2678df19..d1922dc0 100644 --- a/cli/src/prepare_grammar/expand_tokens.rs +++ b/cli/src/generate/prepare_grammar/expand_tokens.rs @@ -1,8 +1,8 @@ use super::ExtractedLexicalGrammar; use crate::error::{Error, Result}; -use crate::grammars::{LexicalGrammar, LexicalVariable}; -use crate::nfa::{CharacterSet, Nfa, NfaState}; -use crate::rules::Rule; +use crate::generate::grammars::{LexicalGrammar, LexicalVariable}; +use crate::generate::nfa::{CharacterSet, Nfa, NfaState}; +use crate::generate::rules::Rule; use regex_syntax::ast::{ parse, Ast, Class, ClassPerlKind, ClassSet, ClassSetItem, RepetitionKind, RepetitionRange, }; @@ -366,8 +366,8 @@ impl NfaBuilder { #[cfg(test)] mod tests { use super::*; - use crate::grammars::Variable; - use crate::nfa::{NfaCursor, NfaTransition}; + use crate::generate::grammars::Variable; + use crate::generate::nfa::{NfaCursor, NfaTransition}; fn simulate_nfa<'a>(grammar: &'a LexicalGrammar, s: &'a str) -> Option<(usize, &'a str)> { let start_states = grammar.variables.iter().map(|v| v.start_state).collect(); diff --git a/cli/src/prepare_grammar/extract_simple_aliases.rs b/cli/src/generate/prepare_grammar/extract_simple_aliases.rs similarity index 96% rename from cli/src/prepare_grammar/extract_simple_aliases.rs rename to cli/src/generate/prepare_grammar/extract_simple_aliases.rs index aa8b3f77..84c535b9 100644 --- a/cli/src/prepare_grammar/extract_simple_aliases.rs +++ b/cli/src/generate/prepare_grammar/extract_simple_aliases.rs @@ -1,5 +1,5 @@ -use crate::rules::{Alias, AliasMap, Symbol, SymbolType}; -use crate::grammars::{LexicalGrammar, SyntaxGrammar}; +use crate::generate::rules::{Alias, AliasMap, Symbol, SymbolType}; +use crate::generate::grammars::{LexicalGrammar, SyntaxGrammar}; #[derive(Clone, Default)] struct SymbolStatus { @@ -83,8 +83,8 @@ pub(super) fn extract_simple_aliases( #[cfg(test)] mod tests { use super::*; - use crate::grammars::{LexicalVariable, SyntaxVariable, VariableType, Production, ProductionStep}; - use crate::nfa::Nfa; + use crate::generate::grammars::{LexicalVariable, SyntaxVariable, VariableType, Production, ProductionStep}; + use crate::generate::nfa::Nfa; #[test] fn test_extract_simple_aliases() { diff --git a/cli/src/prepare_grammar/extract_tokens.rs b/cli/src/generate/prepare_grammar/extract_tokens.rs similarity index 98% rename from cli/src/prepare_grammar/extract_tokens.rs rename to cli/src/generate/prepare_grammar/extract_tokens.rs index 5a54d34e..ae07763b 100644 --- a/cli/src/prepare_grammar/extract_tokens.rs +++ b/cli/src/generate/prepare_grammar/extract_tokens.rs @@ -1,7 +1,7 @@ use super::{ExtractedLexicalGrammar, ExtractedSyntaxGrammar, InternedGrammar}; use crate::error::{Error, Result}; -use crate::grammars::{ExternalToken, Variable, VariableType}; -use crate::rules::{MetadataParams, Rule, Symbol, SymbolType}; +use crate::generate::grammars::{ExternalToken, Variable, VariableType}; +use crate::generate::rules::{MetadataParams, Rule, Symbol, SymbolType}; use hashbrown::HashMap; use std::mem; @@ -311,7 +311,7 @@ impl SymbolReplacer { #[cfg(test)] mod test { use super::*; - use crate::grammars::VariableType; + use crate::generate::grammars::VariableType; #[test] fn test_extraction() { diff --git a/cli/src/prepare_grammar/flatten_grammar.rs b/cli/src/generate/prepare_grammar/flatten_grammar.rs similarity index 97% rename from cli/src/prepare_grammar/flatten_grammar.rs rename to cli/src/generate/prepare_grammar/flatten_grammar.rs index 3ffef086..9409a010 100644 --- a/cli/src/prepare_grammar/flatten_grammar.rs +++ b/cli/src/generate/prepare_grammar/flatten_grammar.rs @@ -1,7 +1,7 @@ use super::ExtractedSyntaxGrammar; use crate::error::Result; -use crate::grammars::{Production, ProductionStep, SyntaxGrammar, SyntaxVariable, Variable}; -use crate::rules::{Alias, Associativity, Rule}; +use crate::generate::grammars::{Production, ProductionStep, SyntaxGrammar, SyntaxVariable, Variable}; +use crate::generate::rules::{Alias, Associativity, Rule}; struct RuleFlattener { production: Production, @@ -163,8 +163,8 @@ pub(super) fn flatten_grammar(grammar: ExtractedSyntaxGrammar) -> Result Result { let interner = Interner { grammar }; diff --git a/cli/src/prepare_grammar/mod.rs b/cli/src/generate/prepare_grammar/mod.rs similarity index 95% rename from cli/src/prepare_grammar/mod.rs rename to cli/src/generate/prepare_grammar/mod.rs index b0c1d2a3..41f668f4 100644 --- a/cli/src/prepare_grammar/mod.rs +++ b/cli/src/generate/prepare_grammar/mod.rs @@ -14,10 +14,10 @@ use self::flatten_grammar::flatten_grammar; use self::intern_symbols::intern_symbols; use self::process_inlines::process_inlines; use crate::error::Result; -use crate::grammars::{ +use crate::generate::grammars::{ ExternalToken, InlinedProductionMap, InputGrammar, LexicalGrammar, SyntaxGrammar, Variable, }; -use crate::rules::{AliasMap, Rule, Symbol}; +use crate::generate::rules::{AliasMap, Rule, Symbol}; pub(crate) struct IntermediateGrammar { variables: Vec, diff --git a/cli/src/prepare_grammar/process_inlines.rs b/cli/src/generate/prepare_grammar/process_inlines.rs similarity index 98% rename from cli/src/prepare_grammar/process_inlines.rs rename to cli/src/generate/prepare_grammar/process_inlines.rs index 557b0fa4..3c0f529a 100644 --- a/cli/src/prepare_grammar/process_inlines.rs +++ b/cli/src/generate/prepare_grammar/process_inlines.rs @@ -1,4 +1,4 @@ -use crate::grammars::{InlinedProductionMap, Production, ProductionStep, SyntaxGrammar}; +use crate::generate::grammars::{InlinedProductionMap, Production, ProductionStep, SyntaxGrammar}; use hashbrown::HashMap; #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] @@ -184,8 +184,8 @@ pub(super) fn process_inlines(grammar: &SyntaxGrammar) -> InlinedProductionMap { #[cfg(test)] mod tests { use super::*; - use crate::grammars::{ProductionStep, SyntaxVariable, VariableType}; - use crate::rules::{Associativity, Symbol}; + use crate::generate::grammars::{ProductionStep, SyntaxVariable, VariableType}; + use crate::generate::rules::{Associativity, Symbol}; #[test] fn test_basic_inlining() { diff --git a/cli/src/render/mod.rs b/cli/src/generate/render.rs similarity index 99% rename from cli/src/render/mod.rs rename to cli/src/generate/render.rs index 36429848..5e0d2b67 100644 --- a/cli/src/render/mod.rs +++ b/cli/src/generate/render.rs @@ -1,7 +1,7 @@ -use crate::grammars::{ExternalToken, LexicalGrammar, SyntaxGrammar, VariableType}; -use crate::nfa::CharacterSet; -use crate::rules::{Alias, AliasMap, Symbol, SymbolType}; -use crate::tables::{AdvanceAction, LexState, LexTable, ParseAction, ParseTable, ParseTableEntry}; +use super::grammars::{ExternalToken, LexicalGrammar, SyntaxGrammar, VariableType}; +use super::nfa::CharacterSet; +use super::rules::{Alias, AliasMap, Symbol, SymbolType}; +use super::tables::{AdvanceAction, LexState, LexTable, ParseAction, ParseTable, ParseTableEntry}; use core::ops::Range; use hashbrown::{HashMap, HashSet}; use std::fmt::Write; diff --git a/cli/src/rules.rs b/cli/src/generate/rules.rs similarity index 100% rename from cli/src/rules.rs rename to cli/src/generate/rules.rs diff --git a/cli/src/tables.rs b/cli/src/generate/tables.rs similarity index 97% rename from cli/src/tables.rs rename to cli/src/generate/tables.rs index edbbaaab..6c3da68e 100644 --- a/cli/src/tables.rs +++ b/cli/src/generate/tables.rs @@ -1,5 +1,5 @@ -use crate::nfa::CharacterSet; -use crate::rules::{Alias, Associativity, Symbol}; +use super::nfa::CharacterSet; +use super::rules::{Alias, Associativity, Symbol}; use hashbrown::HashMap; pub(crate) type AliasSequenceId = usize; diff --git a/cli/src/main.rs b/cli/src/main.rs index 11c277c3..fe6ffd8c 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -7,24 +7,14 @@ extern crate serde_derive; extern crate hashbrown; extern crate serde_json; -use clap::{App, Arg, SubCommand}; -use std::env; -use std::io::Write; -use std::path::PathBuf; -use std::process::{exit, Command, Stdio}; -use std::usize; - -mod build_tables; mod error; mod generate; -mod grammars; mod logger; -mod nfa; -mod parse_grammar; -mod prepare_grammar; -mod render; -mod rules; -mod tables; + +use clap::{App, Arg, SubCommand}; +use std::env; +use std::process::exit; +use std::usize; fn main() { if let Err(e) = run() { @@ -77,43 +67,10 @@ fn run() -> error::Result<()> { }); let mut grammar_path = env::current_dir().expect("Failed to read CWD"); grammar_path.push("grammar.js"); - let grammar_json = load_js_grammar_file(grammar_path); let code = - generate::generate_parser_for_grammar(&grammar_json, minimize, state_ids_to_log)?; + generate::generate_parser_for_grammar(&grammar_path, minimize, state_ids_to_log)?; println!("{}", code); } Ok(()) } - -fn load_js_grammar_file(grammar_path: PathBuf) -> String { - let mut node_process = Command::new("node") - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .spawn() - .expect("Failed to run `node`"); - - let js_prelude = include_str!("./js/dsl.js"); - let mut node_stdin = node_process - .stdin - .take() - .expect("Failed to open stdin for node"); - write!( - node_stdin, - "{}\nconsole.log(JSON.stringify(require(\"{}\"), null, 2));\n", - js_prelude, - grammar_path.to_str().unwrap() - ) - .expect("Failed to write to node's stdin"); - drop(node_stdin); - let output = node_process - .wait_with_output() - .expect("Failed to read output from node"); - match output.status.code() { - None => panic!("Node process was killed"), - Some(0) => {} - Some(code) => panic!(format!("Node process exited with status {}", code)), - } - - String::from_utf8(output.stdout).expect("Got invalid UTF8 from node") -} diff --git a/script/check-mallocs b/script/check-mallocs index 0bd064d0..889861d8 100755 --- a/script/check-mallocs +++ b/script/check-mallocs @@ -1,6 +1,6 @@ #!/usr/bin/env bash -src_dir="src/runtime" +src_dir="lib/src" allocation_functions=( malloc