Remove state-ids-to-log flag
This commit is contained in:
parent
ea515b6967
commit
f85ce2fbfa
4 changed files with 10 additions and 60 deletions
|
|
@ -42,7 +42,6 @@ struct ParseTableBuilder<'a> {
|
|||
item_sets_by_state_id: Vec<ParseItemSet<'a>>,
|
||||
parse_state_queue: VecDeque<ParseStateQueueEntry>,
|
||||
parse_table: ParseTable,
|
||||
state_ids_to_log: Vec<ParseStateId>,
|
||||
}
|
||||
|
||||
impl<'a> ParseTableBuilder<'a> {
|
||||
|
|
@ -73,24 +72,6 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
let item_set = self
|
||||
.item_set_builder
|
||||
.transitive_closure(&self.item_sets_by_state_id[entry.state_id]);
|
||||
|
||||
if self.state_ids_to_log.contains(&entry.state_id) {
|
||||
eprintln!(
|
||||
"state: {}\n\ninitial item set:\n\n{}closed item set:\n\n{}",
|
||||
entry.state_id,
|
||||
super::item::ParseItemSetDisplay(
|
||||
&self.item_sets_by_state_id[entry.state_id],
|
||||
self.syntax_grammar,
|
||||
self.lexical_grammar,
|
||||
),
|
||||
super::item::ParseItemSetDisplay(
|
||||
&item_set,
|
||||
self.syntax_grammar,
|
||||
self.lexical_grammar,
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
self.add_actions(
|
||||
entry.preceding_symbols,
|
||||
entry.preceding_auxiliary_symbols,
|
||||
|
|
@ -775,7 +756,6 @@ pub(crate) fn build_parse_table(
|
|||
lexical_grammar: &LexicalGrammar,
|
||||
inlines: &InlinedProductionMap,
|
||||
variable_info: &Vec<VariableInfo>,
|
||||
state_ids_to_log: Vec<usize>,
|
||||
) -> Result<(ParseTable, Vec<TokenSet>)> {
|
||||
let item_set_builder = ParseItemSetBuilder::new(syntax_grammar, lexical_grammar, inlines);
|
||||
let mut following_tokens = vec![TokenSet::new(); lexical_grammar.variables.len()];
|
||||
|
|
@ -789,7 +769,6 @@ pub(crate) fn build_parse_table(
|
|||
let table = ParseTableBuilder {
|
||||
syntax_grammar,
|
||||
lexical_grammar,
|
||||
state_ids_to_log,
|
||||
item_set_builder,
|
||||
variable_info,
|
||||
state_ids_by_item_set: HashMap::new(),
|
||||
|
|
|
|||
|
|
@ -27,15 +27,9 @@ pub(crate) fn build_tables(
|
|||
variable_info: &Vec<VariableInfo>,
|
||||
inlines: &InlinedProductionMap,
|
||||
minimize: bool,
|
||||
state_ids_to_log: Vec<usize>,
|
||||
) -> Result<(ParseTable, LexTable, LexTable, Option<Symbol>)> {
|
||||
let (mut parse_table, following_tokens) = build_parse_table(
|
||||
syntax_grammar,
|
||||
lexical_grammar,
|
||||
inlines,
|
||||
variable_info,
|
||||
state_ids_to_log,
|
||||
)?;
|
||||
let (mut parse_table, following_tokens) =
|
||||
build_parse_table(syntax_grammar, lexical_grammar, inlines, variable_info)?;
|
||||
let token_conflict_map = TokenConflictMap::new(lexical_grammar, following_tokens);
|
||||
let coincident_token_index = CoincidentTokenIndex::new(&parse_table, lexical_grammar);
|
||||
let keywords = identify_keywords(
|
||||
|
|
|
|||
|
|
@ -38,7 +38,6 @@ pub fn generate_parser_in_directory(
|
|||
repo_path: &PathBuf,
|
||||
grammar_path: Option<&str>,
|
||||
minimize: bool,
|
||||
state_ids_to_log: Vec<usize>,
|
||||
) -> Result<()> {
|
||||
let repo_src_path = repo_path.join("src");
|
||||
let repo_header_path = repo_src_path.join("tree_sitter");
|
||||
|
|
@ -62,7 +61,7 @@ pub fn generate_parser_in_directory(
|
|||
name: language_name,
|
||||
c_code,
|
||||
node_types_json,
|
||||
} = generate_parser_for_grammar_with_opts(&grammar_json, minimize, state_ids_to_log)?;
|
||||
} = generate_parser_for_grammar_with_opts(&grammar_json, minimize)?;
|
||||
|
||||
write_file(&repo_src_path.join("parser.c"), c_code)?;
|
||||
write_file(&repo_src_path.join("node-types.json"), node_types_json)?;
|
||||
|
|
@ -85,14 +84,13 @@ pub fn generate_parser_in_directory(
|
|||
|
||||
pub fn generate_parser_for_grammar(grammar_json: &str) -> Result<(String, String)> {
|
||||
let grammar_json = JSON_COMMENT_REGEX.replace_all(grammar_json, "\n");
|
||||
let parser = generate_parser_for_grammar_with_opts(&grammar_json, true, Vec::new())?;
|
||||
let parser = generate_parser_for_grammar_with_opts(&grammar_json, true)?;
|
||||
Ok((parser.name, parser.c_code))
|
||||
}
|
||||
|
||||
fn generate_parser_for_grammar_with_opts(
|
||||
grammar_json: &str,
|
||||
minimize: bool,
|
||||
state_ids_to_log: Vec<usize>,
|
||||
) -> Result<GeneratedParser> {
|
||||
let input_grammar = parse_grammar(grammar_json)?;
|
||||
let (syntax_grammar, lexical_grammar, inlines, simple_aliases) =
|
||||
|
|
@ -111,7 +109,6 @@ fn generate_parser_for_grammar_with_opts(
|
|||
&variable_info,
|
||||
&inlines,
|
||||
minimize,
|
||||
state_ids_to_log,
|
||||
)?;
|
||||
let name = input_grammar.name;
|
||||
let c_code = render_c_code(
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
use clap::{App, AppSettings, Arg, SubCommand};
|
||||
use error::Error;
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::{env, fs, u64};
|
||||
use std::path::Path;
|
||||
use std::process::exit;
|
||||
use std::{u64, usize};
|
||||
use tree_sitter_cli::{
|
||||
config, error, generate, highlight, loader, logger, parse, properties, test, wasm, web_ui,
|
||||
};
|
||||
|
|
@ -39,11 +37,6 @@ fn run() -> error::Result<()> {
|
|||
.arg(Arg::with_name("grammar-path").index(1))
|
||||
.arg(Arg::with_name("log").long("log"))
|
||||
.arg(Arg::with_name("properties-only").long("properties"))
|
||||
.arg(
|
||||
Arg::with_name("state-ids-to-log")
|
||||
.long("log-state")
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(Arg::with_name("no-minimize").long("no-minimize")),
|
||||
)
|
||||
.subcommand(
|
||||
|
|
@ -120,30 +113,17 @@ fn run() -> error::Result<()> {
|
|||
let config = config::Config::new(&home_dir);
|
||||
config.save(&home_dir)?;
|
||||
} else if let Some(matches) = matches.subcommand_matches("generate") {
|
||||
if matches.is_present("log") {
|
||||
logger::init();
|
||||
}
|
||||
|
||||
let grammar_path = matches.value_of("grammar-path");
|
||||
let minimize = !matches.is_present("no-minimize");
|
||||
let properties_only = matches.is_present("properties-only");
|
||||
let parser_only = grammar_path.is_some();
|
||||
let state_ids_to_log = matches
|
||||
.values_of("state-ids-to-log")
|
||||
.map_or(Vec::new(), |ids| {
|
||||
ids.filter_map(|id| usize::from_str_radix(id, 10).ok())
|
||||
.collect()
|
||||
});
|
||||
|
||||
if !properties_only {
|
||||
generate::generate_parser_in_directory(
|
||||
¤t_dir,
|
||||
grammar_path,
|
||||
minimize,
|
||||
state_ids_to_log,
|
||||
)?;
|
||||
if matches.is_present("log") {
|
||||
logger::init();
|
||||
}
|
||||
|
||||
if !properties_only {
|
||||
generate::generate_parser_in_directory(¤t_dir, grammar_path, minimize)?;
|
||||
}
|
||||
if !parser_only {
|
||||
properties::generate_property_sheets_in_directory(¤t_dir)?;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue