Skip to content

Commit b35efa8

Browse files
committed
style: format imports
1 parent a48054f commit b35efa8

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

69 files changed

+479
-314
lines changed

cli/benches/benchmark.rs

+9-4
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,14 @@
1+
use std::{
2+
collections::BTreeMap,
3+
env, fs,
4+
path::{Path, PathBuf},
5+
str,
6+
time::Instant,
7+
usize,
8+
};
9+
110
use anyhow::Context;
211
use lazy_static::lazy_static;
3-
use std::collections::BTreeMap;
4-
use std::path::{Path, PathBuf};
5-
use std::time::Instant;
6-
use std::{env, fs, str, usize};
712
use tree_sitter::{Language, Parser, Query};
813
use tree_sitter_loader::{CompileConfig, Loader};
914

cli/config/src/lib.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#![doc = include_str!("../README.md")]
22

3+
use std::{env, fs, path::PathBuf};
4+
35
use anyhow::{anyhow, Context, Result};
46
use serde::{Deserialize, Serialize};
57
use serde_json::Value;
6-
use std::path::PathBuf;
7-
use std::{env, fs};
88

99
/// Holds the contents of tree-sitter's configuration file.
1010
///

cli/loader/src/lib.rs

+13-9
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,18 @@
11
#![doc = include_str!("../README.md")]
22

3-
use std::collections::HashMap;
4-
use std::ffi::{OsStr, OsString};
5-
use std::io::{BufRead, BufReader};
6-
use std::ops::Range;
7-
use std::path::{Path, PathBuf};
8-
use std::process::Command;
9-
use std::sync::Mutex;
10-
use std::time::SystemTime;
11-
use std::{env, fs, mem};
3+
use std::{
4+
collections::HashMap,
5+
env,
6+
ffi::{OsStr, OsString},
7+
fs,
8+
io::{BufRead, BufReader},
9+
mem,
10+
ops::Range,
11+
path::{Path, PathBuf},
12+
process::Command,
13+
sync::Mutex,
14+
time::SystemTime,
15+
};
1216

1317
use anyhow::{anyhow, Context, Error, Result};
1418
use fs4::FileExt;

cli/src/generate/build_tables/build_lex_table.rs

+14-10
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,18 @@
1-
use super::coincident_tokens::CoincidentTokenIndex;
2-
use super::token_conflicts::TokenConflictMap;
3-
use crate::generate::dedup::split_state_id_groups;
4-
use crate::generate::grammars::{LexicalGrammar, SyntaxGrammar};
5-
use crate::generate::nfa::NfaCursor;
6-
use crate::generate::rules::{Symbol, TokenSet};
7-
use crate::generate::tables::{AdvanceAction, LexState, LexTable, ParseStateId, ParseTable};
1+
use std::{
2+
collections::{hash_map::Entry, HashMap, VecDeque},
3+
mem,
4+
};
5+
86
use log::info;
9-
use std::collections::hash_map::Entry;
10-
use std::collections::{HashMap, VecDeque};
11-
use std::mem;
7+
8+
use super::{coincident_tokens::CoincidentTokenIndex, token_conflicts::TokenConflictMap};
9+
use crate::generate::{
10+
dedup::split_state_id_groups,
11+
grammars::{LexicalGrammar, SyntaxGrammar},
12+
nfa::NfaCursor,
13+
rules::{Symbol, TokenSet},
14+
tables::{AdvanceAction, LexState, LexTable, ParseStateId, ParseTable},
15+
};
1216

1317
pub fn build_lex_table(
1418
parse_table: &mut ParseTable,

cli/src/generate/build_tables/build_parse_table.rs

+22-17
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,30 @@
1-
use super::item::{ParseItem, ParseItemSet, ParseItemSetCore};
2-
use super::item_set_builder::ParseItemSetBuilder;
3-
use crate::generate::grammars::PrecedenceEntry;
4-
use crate::generate::grammars::{
5-
InlinedProductionMap, LexicalGrammar, SyntaxGrammar, VariableType,
1+
use std::{
2+
cmp::Ordering,
3+
collections::{BTreeMap, HashMap, HashSet, VecDeque},
4+
fmt::Write,
5+
hash::BuildHasherDefault,
66
};
7-
use crate::generate::node_types::VariableInfo;
8-
use crate::generate::rules::{Associativity, Precedence, Symbol, SymbolType, TokenSet};
9-
use crate::generate::tables::{
10-
FieldLocation, GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry,
11-
ProductionInfo, ProductionInfoId,
12-
};
13-
use anyhow::{anyhow, Result};
14-
use std::cmp::Ordering;
15-
use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
16-
use std::fmt::Write;
17-
use std::hash::BuildHasherDefault;
18-
use std::u32;
197

8+
use anyhow::{anyhow, Result};
209
use indexmap::{map::Entry, IndexMap};
2110
use rustc_hash::FxHasher;
2211

12+
use super::{
13+
item::{ParseItem, ParseItemSet, ParseItemSetCore},
14+
item_set_builder::ParseItemSetBuilder,
15+
};
16+
use crate::generate::{
17+
grammars::{
18+
InlinedProductionMap, LexicalGrammar, PrecedenceEntry, SyntaxGrammar, VariableType,
19+
},
20+
node_types::VariableInfo,
21+
rules::{Associativity, Precedence, Symbol, SymbolType, TokenSet},
22+
tables::{
23+
FieldLocation, GotoAction, ParseAction, ParseState, ParseStateId, ParseTable,
24+
ParseTableEntry, ProductionInfo, ProductionInfoId,
25+
},
26+
};
27+
2328
// For conflict reporting, each parse state is associated with an example
2429
// sequence of symbols that could lead to that parse state.
2530
type SymbolSequence = Vec<Symbol>;

cli/src/generate/build_tables/coincident_tokens.rs

+6-3
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
1-
use crate::generate::grammars::LexicalGrammar;
2-
use crate::generate::rules::Symbol;
3-
use crate::generate::tables::{ParseStateId, ParseTable};
41
use std::fmt;
52

3+
use crate::generate::{
4+
grammars::LexicalGrammar,
5+
rules::Symbol,
6+
tables::{ParseStateId, ParseTable},
7+
};
8+
69
pub struct CoincidentTokenIndex<'a> {
710
entries: Vec<Vec<ParseStateId>>,
811
grammar: &'a LexicalGrammar,

cli/src/generate/build_tables/item.rs

+11-6
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,15 @@
1-
use crate::generate::grammars::{LexicalGrammar, Production, ProductionStep, SyntaxGrammar};
2-
use crate::generate::rules::{Associativity, Precedence, Symbol, SymbolType, TokenSet};
1+
use std::{
2+
cmp::Ordering,
3+
fmt,
4+
hash::{Hash, Hasher},
5+
};
6+
37
use lazy_static::lazy_static;
4-
use std::cmp::Ordering;
5-
use std::fmt;
6-
use std::hash::{Hash, Hasher};
7-
use std::u32;
8+
9+
use crate::generate::{
10+
grammars::{LexicalGrammar, Production, ProductionStep, SyntaxGrammar},
11+
rules::{Associativity, Precedence, Symbol, SymbolType, TokenSet},
12+
};
813

914
lazy_static! {
1015
static ref START_PRODUCTION: Production = Production {

cli/src/generate/build_tables/item_set_builder.rs

+9-4
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,13 @@
1+
use std::{
2+
collections::{HashMap, HashSet},
3+
fmt,
4+
};
5+
16
use super::item::{ParseItem, ParseItemDisplay, ParseItemSet, TokenSetDisplay};
2-
use crate::generate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar};
3-
use crate::generate::rules::{Symbol, SymbolType, TokenSet};
4-
use std::collections::{HashMap, HashSet};
5-
use std::fmt;
7+
use crate::generate::{
8+
grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar},
9+
rules::{Symbol, SymbolType, TokenSet},
10+
};
611

712
#[derive(Clone, Debug, PartialEq, Eq)]
813
struct TransitiveClosureAddition<'a> {

cli/src/generate/build_tables/minimize_parse_table.rs

+12-8
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,17 @@
1-
use super::token_conflicts::TokenConflictMap;
2-
use crate::generate::dedup::split_state_id_groups;
3-
use crate::generate::grammars::{LexicalGrammar, SyntaxGrammar, VariableType};
4-
use crate::generate::rules::{AliasMap, Symbol, TokenSet};
5-
use crate::generate::tables::{
6-
GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry,
1+
use std::{
2+
collections::{HashMap, HashSet},
3+
mem,
74
};
5+
86
use log::info;
9-
use std::collections::{HashMap, HashSet};
10-
use std::mem;
7+
8+
use super::token_conflicts::TokenConflictMap;
9+
use crate::generate::{
10+
dedup::split_state_id_groups,
11+
grammars::{LexicalGrammar, SyntaxGrammar, VariableType},
12+
rules::{AliasMap, Symbol, TokenSet},
13+
tables::{GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry},
14+
};
1115

1216
pub fn minimize_parse_table(
1317
parse_table: &mut ParseTable,

cli/src/generate/build_tables/mod.rs

+17-11
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,25 @@ mod item_set_builder;
66
mod minimize_parse_table;
77
mod token_conflicts;
88

9-
use self::build_lex_table::build_lex_table;
10-
use self::build_parse_table::{build_parse_table, ParseStateInfo};
11-
use self::coincident_tokens::CoincidentTokenIndex;
12-
use self::minimize_parse_table::minimize_parse_table;
13-
use self::token_conflicts::TokenConflictMap;
14-
use crate::generate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar};
15-
use crate::generate::nfa::NfaCursor;
16-
use crate::generate::node_types::VariableInfo;
17-
use crate::generate::rules::{AliasMap, Symbol, SymbolType, TokenSet};
18-
use crate::generate::tables::{LexTable, ParseAction, ParseTable, ParseTableEntry};
9+
use std::collections::{BTreeSet, HashMap};
10+
1911
use anyhow::Result;
2012
use log::info;
21-
use std::collections::{BTreeSet, HashMap};
13+
14+
use self::{
15+
build_lex_table::build_lex_table,
16+
build_parse_table::{build_parse_table, ParseStateInfo},
17+
coincident_tokens::CoincidentTokenIndex,
18+
minimize_parse_table::minimize_parse_table,
19+
token_conflicts::TokenConflictMap,
20+
};
21+
use crate::generate::{
22+
grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar},
23+
nfa::NfaCursor,
24+
node_types::VariableInfo,
25+
rules::{AliasMap, Symbol, SymbolType, TokenSet},
26+
tables::{LexTable, ParseAction, ParseTable, ParseTableEntry},
27+
};
2228

2329
pub fn build_tables(
2430
syntax_grammar: &SyntaxGrammar,

cli/src/generate/build_tables/token_conflicts.rs

+13-10
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1-
use crate::generate::build_tables::item::TokenSetDisplay;
2-
use crate::generate::grammars::{LexicalGrammar, SyntaxGrammar};
3-
use crate::generate::nfa::{CharacterSet, NfaCursor, NfaTransition};
4-
use crate::generate::rules::TokenSet;
5-
use std::cmp::Ordering;
6-
use std::collections::HashSet;
7-
use std::fmt;
1+
use std::{cmp::Ordering, collections::HashSet, fmt};
2+
3+
use crate::generate::{
4+
build_tables::item::TokenSetDisplay,
5+
grammars::{LexicalGrammar, SyntaxGrammar},
6+
nfa::{CharacterSet, NfaCursor, NfaTransition},
7+
rules::TokenSet,
8+
};
89

910
#[derive(Clone, Debug, Default, PartialEq, Eq)]
1011
struct TokenConflictStatus {
@@ -372,9 +373,11 @@ fn compute_conflict_status(
372373
#[cfg(test)]
373374
mod tests {
374375
use super::*;
375-
use crate::generate::grammars::{Variable, VariableType};
376-
use crate::generate::prepare_grammar::{expand_tokens, ExtractedLexicalGrammar};
377-
use crate::generate::rules::{Precedence, Rule, Symbol};
376+
use crate::generate::{
377+
grammars::{Variable, VariableType},
378+
prepare_grammar::{expand_tokens, ExtractedLexicalGrammar},
379+
rules::{Precedence, Rule, Symbol},
380+
};
378381

379382
#[test]
380383
fn test_starting_characters() {

cli/src/generate/grammar_files.rs

+10-5
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,18 @@
1-
use super::write_file;
1+
use std::{
2+
fs,
3+
fs::File,
4+
io::BufReader,
5+
path::{Path, PathBuf},
6+
str,
7+
};
8+
29
use anyhow::{anyhow, Context, Result};
310
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
411
use indoc::indoc;
512
use serde::Deserialize;
613
use serde_json::{json, Map, Value};
7-
use std::fs::File;
8-
use std::io::BufReader;
9-
use std::path::{Path, PathBuf};
10-
use std::{fs, str};
14+
15+
use super::write_file;
1116

1217
const CLI_VERSION: &str = env!("CARGO_PKG_VERSION");
1318
const CLI_VERSION_PLACEHOLDER: &str = "CLI_VERSION";

cli/src/generate/grammars.rs

+6-4
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1-
use super::nfa::Nfa;
2-
use super::rules::{Alias, Associativity, Precedence, Rule, Symbol};
3-
use std::collections::HashMap;
4-
use std::fmt;
1+
use std::{collections::HashMap, fmt};
2+
3+
use super::{
4+
nfa::Nfa,
5+
rules::{Alias, Associativity, Precedence, Rule, Symbol},
6+
};
57

68
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
79
pub enum VariableType {

cli/src/generate/mod.rs

+9-8
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,21 @@
1-
use std::io::Write;
2-
use std::path::{Path, PathBuf};
3-
use std::process::{Command, Stdio};
4-
use std::{env, fs};
1+
use std::{
2+
env, fs,
3+
io::Write,
4+
path::{Path, PathBuf},
5+
process::{Command, Stdio},
6+
};
57

68
use anyhow::{anyhow, Context, Result};
7-
use lazy_static::lazy_static;
8-
use regex::{Regex, RegexBuilder};
9-
use semver::Version;
10-
119
use build_tables::build_tables;
1210
use grammar_files::path_in_ignore;
1311
use grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar};
12+
use lazy_static::lazy_static;
1413
use parse_grammar::parse_grammar;
1514
use prepare_grammar::prepare_grammar;
15+
use regex::{Regex, RegexBuilder};
1616
use render::render_c_code;
1717
use rules::AliasMap;
18+
use semver::Version;
1819

1920
mod build_tables;
2021
mod char_tree;

cli/src/generate/nfa.rs

+8-7
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1-
use std::char;
2-
use std::cmp::max;
3-
use std::cmp::Ordering;
4-
use std::collections::HashSet;
5-
use std::fmt;
6-
use std::mem::swap;
7-
use std::ops::Range;
1+
use std::{
2+
char,
3+
cmp::{max, Ordering},
4+
collections::HashSet,
5+
fmt,
6+
mem::swap,
7+
ops::Range,
8+
};
89

910
/// A set of characters represented as a vector of ranges.
1011
#[derive(Clone, PartialEq, Eq, Hash)]

0 commit comments

Comments
 (0)