chore: clippy lints

This commit is contained in:
Amaan Qureshi 2024-09-07 20:13:58 -04:00
parent 9301d38b77
commit 5e46fef0d7
17 changed files with 72 additions and 64 deletions

View file

@ -7,6 +7,7 @@ pub struct Edit {
pub inserted_text: Vec<u8>,
}
#[must_use]
pub fn invert_edit(input: &[u8], edit: &Edit) -> Edit {
let position = edit.position;
let removed_content = &input[position..(position + edit.deleted_length)];

View file

@ -65,20 +65,6 @@ pub fn fuzz_language_corpus(
grammar_dir: &Path,
options: &mut FuzzOptions,
) {
let subdir = options.subdir.take().unwrap_or_default();
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
if !corpus_dir.exists() || !corpus_dir.is_dir() {
eprintln!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
return;
}
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
eprintln!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
return;
}
fn retain(entry: &mut TestEntry, language_name: &str) -> bool {
match entry {
TestEntry::Example { attributes, .. } => {
@ -97,6 +83,20 @@ pub fn fuzz_language_corpus(
}
}
let subdir = options.subdir.take().unwrap_or_default();
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
if !corpus_dir.exists() || !corpus_dir.is_dir() {
eprintln!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
return;
}
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
eprintln!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
return;
}
let mut main_tests = parse_tests(&corpus_dir).unwrap();
match main_tests {
TestEntry::Group {
@ -104,7 +104,7 @@ pub fn fuzz_language_corpus(
} => {
children.retain_mut(|child| retain(child, language_name));
}
_ => unreachable!(),
TestEntry::Example { .. } => unreachable!(),
}
let tests = flatten_tests(main_tests, options.filter.as_ref());

View file

@ -10,6 +10,7 @@ const OPERATORS: &[char] = &[
pub struct Rand(StdRng);
impl Rand {
#[must_use]
pub fn new(seed: usize) -> Self {
Self(StdRng::seed_from_u64(seed as u64))
}

View file

@ -6,6 +6,7 @@ pub struct ScopeSequence(Vec<ScopeStack>);
type ScopeStack = Vec<&'static str>;
impl ScopeSequence {
#[must_use]
pub fn new(tree: &Tree) -> Self {
let mut result = Self(Vec::new());
let mut scope_stack = Vec::new();

View file

@ -144,7 +144,7 @@ impl<'a> ParseItem<'a> {
/// Create an item identical to this one, but with a different production.
/// This is used when dynamically "inlining" certain symbols in a production.
pub const fn substitute_production(&self, production: &'a Production) -> ParseItem<'a> {
pub const fn substitute_production(&self, production: &'a Production) -> Self {
let mut result = *self;
result.production = production;
result

View file

@ -237,7 +237,7 @@ impl<'a> ParseItemSetBuilder<'a> {
result
}
pub fn transitive_closure(&mut self, item_set: &ParseItemSet<'a>) -> ParseItemSet<'a> {
pub fn transitive_closure(&self, item_set: &ParseItemSet<'a>) -> ParseItemSet<'a> {
let mut result = ParseItemSet::default();
for (item, lookaheads) in &item_set.entries {
if let Some(productions) = self

View file

@ -130,7 +130,7 @@ pub fn generate_parser_for_grammar(grammar_json: &str) -> Result<(String, String
let input_grammar = parse_grammar(&grammar_json)?;
let parser =
generate_parser_for_grammar_with_opts(&input_grammar, tree_sitter::LANGUAGE_VERSION, None)?;
Ok((input_grammar.name.clone(), parser.c_code))
Ok((input_grammar.name, parser.c_code))
}
fn generate_parser_for_grammar_with_opts(

View file

@ -628,13 +628,16 @@ pub fn generate_node_types_json(
for (name, kind) in regular_tokens.chain(external_tokens) {
match kind {
VariableType::Named => {
let node_type_json = node_types_json.entry(name.clone()).or_insert(NodeInfoJSON {
kind: name.clone(),
named: true,
fields: None,
children: None,
subtypes: None,
});
let node_type_json =
node_types_json
.entry(name.clone())
.or_insert_with(|| NodeInfoJSON {
kind: name.clone(),
named: true,
fields: None,
children: None,
subtypes: None,
});
if let Some(children) = &mut node_type_json.children {
children.required = false;
}

View file

@ -28,11 +28,7 @@ pub(super) fn extract_tokens(
let mut lexical_variables = Vec::with_capacity(extractor.extracted_variables.len());
for variable in extractor.extracted_variables {
lexical_variables.push(Variable {
name: variable.name,
kind: variable.kind,
rule: variable.rule,
});
lexical_variables.push(variable);
}
// If a variable's entire rule was extracted as a token and that token didn't

View file

@ -15,7 +15,7 @@ struct RuleFlattener {
}
impl RuleFlattener {
fn new() -> Self {
const fn new() -> Self {
Self {
production: Production {
steps: Vec::new(),

View file

@ -90,6 +90,27 @@ pub fn prepare_grammar(
/// within the `precedences` lists, and also that there are no conflicting
/// precedence orderings declared in those lists.
fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
// Check that no rule contains a named precedence that is not present in
// any of the `precedences` lists.
fn validate(rule_name: &str, rule: &Rule, names: &HashSet<&String>) -> Result<()> {
match rule {
Rule::Repeat(rule) => validate(rule_name, rule, names),
Rule::Seq(elements) | Rule::Choice(elements) => elements
.iter()
.try_for_each(|e| validate(rule_name, e, names)),
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = &params.precedence {
if !names.contains(n) {
return Err(anyhow!("Undeclared precedence '{n}' in rule '{rule_name}'"));
}
}
validate(rule_name, rule, names)?;
Ok(())
}
_ => Ok(()),
}
}
// For any two precedence names `a` and `b`, if `a` comes before `b`
// in some list, then it cannot come *after* `b` in any list.
let mut pairs = HashMap::new();
@ -120,27 +141,6 @@ fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
}
}
// Check that no rule contains a named precedence that is not present in
// any of the `precedences` lists.
fn validate(rule_name: &str, rule: &Rule, names: &HashSet<&String>) -> Result<()> {
match rule {
Rule::Repeat(rule) => validate(rule_name, rule, names),
Rule::Seq(elements) | Rule::Choice(elements) => elements
.iter()
.try_for_each(|e| validate(rule_name, e, names)),
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = &params.precedence {
if !names.contains(n) {
return Err(anyhow!("Undeclared precedence '{n}' in rule '{rule_name}'"));
}
}
validate(rule_name, rule, names)?;
Ok(())
}
_ => Ok(()),
}
}
let precedence_names = grammar
.precedence_orderings
.iter()

View file

@ -849,7 +849,7 @@ impl Generator {
// are not at the end of the file.
let check_eof = large_set.contains('\0');
if check_eof {
add!(self, "(!eof && ")
add!(self, "(!eof && ");
}
let char_set_info = &mut self.large_character_set_info[large_char_set_ix];
@ -1663,7 +1663,7 @@ impl Generator {
'\r' => add!(self, "'\\r'"),
_ => {
if c == '\0' {
add!(self, "0")
add!(self, "0");
} else if c == ' ' || c.is_ascii_graphic() {
add!(self, "'{c}'");
} else {

View file

@ -146,7 +146,7 @@ impl Rule {
Self::Choice(elements)
}
pub fn seq(rules: Vec<Self>) -> Self {
pub const fn seq(rules: Vec<Self>) -> Self {
Self::Seq(rules)
}
}
@ -272,7 +272,7 @@ impl From<Symbol> for Rule {
}
impl TokenSet {
pub fn new() -> Self {
pub const fn new() -> Self {
Self {
terminal_bits: SmallBitVec::new(),
external_bits: SmallBitVec::new(),

View file

@ -25,7 +25,12 @@ pub struct Assertion {
impl Assertion {
#[must_use]
pub fn new(row: usize, col: usize, negative: bool, expected_capture_name: String) -> Self {
pub const fn new(
row: usize,
col: usize,
negative: bool,
expected_capture_name: String,
) -> Self {
Self {
position: Point::new(row, col),
negative,

View file

@ -216,9 +216,8 @@ pub fn get_test_info<'test>(
} => {
if *test_num == target_test {
return Some((name, input, attributes.languages.clone()));
} else {
*test_num += 1;
}
*test_num += 1;
}
TestEntry::Group { children, .. } => {
for child in children {
@ -648,7 +647,7 @@ fn write_tests_to_buffer(
if attributes_str.is_empty() {
attributes_str.clone()
} else {
format!("{}\n", attributes_str)
format!("{attributes_str}\n")
},
"=".repeat(*header_delim_len),
"-".repeat(*divider_delim_len),

View file

@ -291,8 +291,9 @@ pub struct LossyUtf8<'a> {
}
impl Language {
#[must_use]
pub fn new(builder: LanguageFn) -> Self {
Self(unsafe { (builder.into_raw())() as _ })
Self(unsafe { builder.into_raw()().cast() })
}
/// Get the ABI version number that indicates which version of the

View file

@ -1,11 +1,11 @@
#![no_std]
/// LanguageFn wraps a C function that returns a pointer to a tree-sitter grammer.
/// `LanguageFn` wraps a C function that returns a pointer to a tree-sitter grammer.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct LanguageFn(unsafe extern "C" fn() -> *const ());
impl LanguageFn {
/// Creates a `LanguageFn`.
/// Creates a [`LanguageFn`].
///
/// # Safety
///
@ -15,7 +15,8 @@ impl LanguageFn {
Self(f)
}
/// Gets the function wrapped by this `LanguageFn`.
/// Gets the function wrapped by this [`LanguageFn`].
#[must_use]
pub const fn into_raw(self) -> unsafe extern "C" fn() -> *const () {
self.0
}