Skip to content

Commit 923e8c7

Browse files
committed
Auto merge of rust-lang#17617 - Veykril:grammar-kind-gen, r=Veykril
Derive kinds information from ungrammar file This reduces the need to touch more files when adding a new grammar rule
2 parents dc22f2a + f7516d9 commit 923e8c7

39 files changed

+816
-757
lines changed

src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs

+4-1
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,10 @@ impl ExprCollector<'_> {
301301
result_expr_id
302302
})
303303
}
304-
None => self.collect_block(e),
304+
// FIXME
305+
Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => {
306+
self.collect_block(e)
307+
}
305308
},
306309
ast::Expr::LoopExpr(e) => {
307310
let label = e.label().map(|label| self.collect_label(label));

src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs

+2
Original file line numberDiff line numberDiff line change
@@ -728,6 +728,8 @@ fn include_expand(
728728
}
729729
};
730730
match parse_to_token_tree(
731+
// FIXME
732+
Edition::CURRENT,
731733
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
732734
SyntaxContextId::ROOT,
733735
&db.file_text(file_id),

src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use syntax::{
33
ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
44
ted, NodeOrToken,
55
SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
6-
SyntaxNode,
6+
SyntaxNode, T,
77
};
88

99
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
@@ -26,8 +26,8 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
2626
// ```
2727
pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
2828
let node = if ctx.has_empty_selection() {
29-
if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() {
30-
expr_stmt.syntax().clone()
29+
if let Some(t) = ctx.token_at_offset().find(|it| it.kind() == T![;]) {
30+
t.parent().and_then(ast::ExprStmt::cast)?.syntax().clone()
3131
} else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
3232
expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
3333
} else {

src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ fn normalize(name: &str) -> Option<String> {
184184

185185
fn is_valid_name(name: &str) -> bool {
186186
matches!(
187-
ide_db::syntax_helpers::LexedStr::single_token(name),
187+
ide_db::syntax_helpers::LexedStr::single_token(syntax::Edition::CURRENT, name),
188188
Some((syntax::SyntaxKind::IDENT, _error))
189189
)
190190
}

src/tools/rust-analyzer/crates/ide-db/src/rename.rs

+11-5
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use std::fmt;
2525
use base_db::{AnchoredPathBuf, FileId, FileRange};
2626
use either::Either;
2727
use hir::{FieldSource, HirFileIdExt, InFile, ModuleSource, Semantics};
28-
use span::SyntaxContextId;
28+
use span::{Edition, SyntaxContextId};
2929
use stdx::{never, TupleExt};
3030
use syntax::{
3131
ast::{self, HasName},
@@ -227,7 +227,8 @@ fn rename_mod(
227227
module: hir::Module,
228228
new_name: &str,
229229
) -> Result<SourceChange> {
230-
if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
230+
if IdentifierKind::classify(module.krate().edition(sema.db), new_name)? != IdentifierKind::Ident
231+
{
231232
bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
232233
}
233234

@@ -313,7 +314,12 @@ fn rename_reference(
313314
def: Definition,
314315
new_name: &str,
315316
) -> Result<SourceChange> {
316-
let ident_kind = IdentifierKind::classify(new_name)?;
317+
let ident_kind = IdentifierKind::classify(
318+
def.krate(sema.db)
319+
.ok_or_else(|| RenameError("definition has no krate?".into()))?
320+
.edition(sema.db),
321+
new_name,
322+
)?;
317323

318324
if matches!(
319325
def,
@@ -605,8 +611,8 @@ pub enum IdentifierKind {
605611
}
606612

607613
impl IdentifierKind {
608-
pub fn classify(new_name: &str) -> Result<IdentifierKind> {
609-
match parser::LexedStr::single_token(new_name) {
614+
pub fn classify(edition: Edition, new_name: &str) -> Result<IdentifierKind> {
615+
match parser::LexedStr::single_token(edition, new_name) {
610616
Some(res) => match res {
611617
(SyntaxKind::IDENT, _) => {
612618
if let Some(inner) = new_name.strip_prefix("r#") {

src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs

+2
Original file line numberDiff line numberDiff line change
@@ -277,6 +277,8 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
277277
});
278278
}
279279
Some(ast::BlockModifier::Unsafe(_)) => (),
280+
Some(ast::BlockModifier::Gen(_)) => (),
281+
Some(ast::BlockModifier::AsyncGen(_)) => (),
280282
None => (),
281283
}
282284
if let Some(stmt_list) = b.stmt_list() {

src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -255,7 +255,7 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
255255
}
256256

257257
fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
258-
let lexed = parser::LexedStr::new(source);
258+
let lexed = parser::LexedStr::new(parser::Edition::CURRENT, source);
259259
if let Some((_, first_error)) = lexed.errors().next() {
260260
bail!("Failed to parse pattern: {}", first_error);
261261
}

src/tools/rust-analyzer/crates/ide/src/rename.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ use ide_db::{
1313
RootDatabase,
1414
};
1515
use itertools::Itertools;
16+
use span::Edition;
1617
use stdx::{always, never};
1718
use syntax::{
1819
ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
@@ -99,7 +100,7 @@ pub(crate) fn rename(
99100
// FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can
100101
// properly find "direct" usages/references.
101102
.map(|(.., def)| {
102-
match IdentifierKind::classify(new_name)? {
103+
match IdentifierKind::classify(Edition::CURRENT, new_name)? {
103104
IdentifierKind::Ident => (),
104105
IdentifierKind::Lifetime => {
105106
bail!("Cannot alias reference to a lifetime identifier")
@@ -391,7 +392,7 @@ fn rename_self_to_param(
391392
return Ok(SourceChange::default());
392393
}
393394

394-
let identifier_kind = IdentifierKind::classify(new_name)?;
395+
let identifier_kind = IdentifierKind::classify(Edition::CURRENT, new_name)?;
395396

396397
let InFile { file_id, value: self_param } =
397398
sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?;

src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs

+8-3
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,7 @@ where
169169
/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
170170
/// anchor with the given context.
171171
pub fn parse_to_token_tree<Ctx>(
172+
edition: Edition,
172173
anchor: SpanAnchor,
173174
ctx: Ctx,
174175
text: &str,
@@ -177,7 +178,7 @@ where
177178
SpanData<Ctx>: Copy + fmt::Debug,
178179
Ctx: Copy,
179180
{
180-
let lexed = parser::LexedStr::new(text);
181+
let lexed = parser::LexedStr::new(edition, text);
181182
if lexed.errors().next().is_some() {
182183
return None;
183184
}
@@ -187,11 +188,15 @@ where
187188
}
188189

189190
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
190-
pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
191+
pub fn parse_to_token_tree_static_span<S>(
192+
edition: Edition,
193+
span: S,
194+
text: &str,
195+
) -> Option<tt::Subtree<S>>
191196
where
192197
S: Copy + fmt::Debug,
193198
{
194-
let lexed = parser::LexedStr::new(text);
199+
let lexed = parser::LexedStr::new(edition, text);
195200
if lexed.errors().next().is_some() {
196201
return None;
197202
}

src/tools/rust-analyzer/crates/parser/src/edition.rs

+9
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,17 @@ pub enum Edition {
1212
}
1313

1414
impl Edition {
15+
/// The current latest stable edition, note this is usually not the right choice in code.
1516
pub const CURRENT: Edition = Edition::Edition2021;
1617
pub const DEFAULT: Edition = Edition::Edition2015;
18+
19+
pub fn at_least_2024(self) -> bool {
20+
self >= Edition::Edition2024
21+
}
22+
23+
pub fn at_least_2018(self) -> bool {
24+
self >= Edition::Edition2018
25+
}
1726
}
1827

1928
#[derive(Debug)]

src/tools/rust-analyzer/crates/parser/src/grammar.rs

-36
Original file line numberDiff line numberDiff line change
@@ -165,42 +165,6 @@ pub(crate) mod entry {
165165
}
166166
m.complete(p, ERROR);
167167
}
168-
169-
pub(crate) fn eager_macro_input(p: &mut Parser<'_>) {
170-
let m = p.start();
171-
172-
let closing_paren_kind = match p.current() {
173-
T!['{'] => T!['}'],
174-
T!['('] => T![')'],
175-
T!['['] => T![']'],
176-
_ => {
177-
p.error("expected `{`, `[`, `(`");
178-
while !p.at(EOF) {
179-
p.bump_any();
180-
}
181-
m.complete(p, ERROR);
182-
return;
183-
}
184-
};
185-
p.bump_any();
186-
while !p.at(EOF) && !p.at(closing_paren_kind) {
187-
if expressions::expr(p).is_none() {
188-
break;
189-
}
190-
if !p.at(EOF) && !p.at(closing_paren_kind) {
191-
p.expect(T![,]);
192-
}
193-
}
194-
p.expect(closing_paren_kind);
195-
if p.at(EOF) {
196-
m.complete(p, MACRO_EAGER_INPUT);
197-
return;
198-
}
199-
while !p.at(EOF) {
200-
p.bump_any();
201-
}
202-
m.complete(p, ERROR);
203-
}
204168
}
205169
}
206170

src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs

+26-2
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
5151
T![const],
5252
T![continue],
5353
T![do],
54+
T![gen],
5455
T![for],
5556
T![if],
5657
T![let],
@@ -138,15 +139,37 @@ pub(super) fn atom_expr(
138139
// fn f() { const { } }
139140
// fn f() { async { } }
140141
// fn f() { async move { } }
141-
T![const] | T![unsafe] | T![async] if la == T!['{'] => {
142+
T![const] | T![unsafe] | T![async] | T![gen] if la == T!['{'] => {
142143
let m = p.start();
143144
p.bump_any();
144145
stmt_list(p);
145146
m.complete(p, BLOCK_EXPR)
146147
}
147-
T![async] if la == T![move] && p.nth(2) == T!['{'] => {
148+
// test_err gen_blocks
149+
// pub fn main() {
150+
// gen { yield ""; };
151+
// async gen { yield ""; };
152+
// gen move { yield ""; };
153+
// async gen move { yield ""; };
154+
// }
155+
T![async] if la == T![gen] && p.nth(2) == T!['{'] => {
156+
let m = p.start();
157+
p.bump(T![async]);
158+
p.eat(T![gen]);
159+
stmt_list(p);
160+
m.complete(p, BLOCK_EXPR)
161+
}
162+
T![async] | T![gen] if la == T![move] && p.nth(2) == T!['{'] => {
163+
let m = p.start();
164+
p.bump_any();
165+
p.bump(T![move]);
166+
stmt_list(p);
167+
m.complete(p, BLOCK_EXPR)
168+
}
169+
T![async] if la == T![gen] && p.nth(2) == T![move] && p.nth(3) == T!['{'] => {
148170
let m = p.start();
149171
p.bump(T![async]);
172+
p.bump(T![gen]);
150173
p.bump(T![move]);
151174
stmt_list(p);
152175
m.complete(p, BLOCK_EXPR)
@@ -355,6 +378,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
355378
p.eat(T![const]);
356379
p.eat(T![static]);
357380
p.eat(T![async]);
381+
p.eat(T![gen]);
358382
p.eat(T![move]);
359383

360384
if !p.at(T![|]) {

src/tools/rust-analyzer/crates/parser/src/grammar/items.rs

+13-9
Original file line numberDiff line numberDiff line change
@@ -112,11 +112,22 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
112112

113113
// test_err async_without_semicolon
114114
// fn foo() { let _ = async {} }
115-
if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) {
115+
if p.at(T![async])
116+
&& (!matches!(p.nth(1), T!['{'] | T![gen] | T![move] | T![|])
117+
|| matches!((p.nth(1), p.nth(2)), (T![gen], T![fn])))
118+
{
116119
p.eat(T![async]);
117120
has_mods = true;
118121
}
119122

123+
// test_err gen_fn
124+
// gen fn gen_fn() {}
125+
// async gen fn async_gen_fn() {}
126+
if p.at(T![gen]) && p.nth(1) == T![fn] {
127+
p.eat(T![gen]);
128+
has_mods = true;
129+
}
130+
120131
// test_err unsafe_block_in_mod
121132
// fn foo(){} unsafe { } fn bar(){}
122133
if p.at(T![unsafe]) && p.nth(1) != T!['{'] {
@@ -173,13 +184,6 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
173184
}
174185
}
175186

176-
// test existential_type
177-
// existential type Foo: Fn() -> usize;
178-
if p.at_contextual_kw(T![existential]) && p.nth(1) == T![type] {
179-
p.bump_remap(T![existential]);
180-
has_mods = true;
181-
}
182-
183187
// items
184188
match p.current() {
185189
T![fn] => fn_(p, m),
@@ -201,7 +205,7 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
201205

202206
_ if has_visibility || has_mods => {
203207
if has_mods {
204-
p.error("expected existential, fn, trait or impl");
208+
p.error("expected fn, trait or impl");
205209
} else {
206210
p.error("expected an item");
207211
}

0 commit comments

Comments
 (0)