Skip to content

Commit 7011094

Browse files
committed
Add always disabled gen parse support
1 parent 983c9c1 commit 7011094

File tree

32 files changed

+376
-55
lines changed

32 files changed

+376
-55
lines changed

crates/hir-def/src/body/lower.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,10 @@ impl ExprCollector<'_> {
301301
result_expr_id
302302
})
303303
}
304-
None => self.collect_block(e),
304+
// FIXME
305+
Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => {
306+
self.collect_block(e)
307+
}
305308
},
306309
ast::Expr::LoopExpr(e) => {
307310
let label = e.label().map(|label| self.collect_label(label));

crates/hir-expand/src/builtin_fn_macro.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -728,6 +728,8 @@ fn include_expand(
728728
}
729729
};
730730
match parse_to_token_tree(
731+
// FIXME
732+
Edition::CURRENT,
731733
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
732734
SyntaxContextId::ROOT,
733735
&db.file_text(file_id),

crates/ide-assists/src/handlers/extract_variable.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use syntax::{
33
ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
44
ted, NodeOrToken,
55
SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
6-
SyntaxNode,
6+
SyntaxNode, T,
77
};
88

99
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
@@ -26,8 +26,8 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
2626
// ```
2727
pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
2828
let node = if ctx.has_empty_selection() {
29-
if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() {
30-
expr_stmt.syntax().clone()
29+
if let Some(t) = ctx.token_at_offset().find(|it| it.kind() == T![;]) {
30+
t.parent().and_then(ast::ExprStmt::cast)?.syntax().clone()
3131
} else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
3232
expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
3333
} else {

crates/ide-assists/src/utils/suggest_name.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ fn normalize(name: &str) -> Option<String> {
184184

185185
fn is_valid_name(name: &str) -> bool {
186186
matches!(
187-
ide_db::syntax_helpers::LexedStr::single_token(name),
187+
ide_db::syntax_helpers::LexedStr::single_token(syntax::Edition::CURRENT, name),
188188
Some((syntax::SyntaxKind::IDENT, _error))
189189
)
190190
}

crates/ide-db/src/rename.rs

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use std::fmt;
2525
use base_db::{AnchoredPathBuf, FileId, FileRange};
2626
use either::Either;
2727
use hir::{FieldSource, HirFileIdExt, InFile, ModuleSource, Semantics};
28-
use span::SyntaxContextId;
28+
use span::{Edition, SyntaxContextId};
2929
use stdx::{never, TupleExt};
3030
use syntax::{
3131
ast::{self, HasName},
@@ -227,7 +227,8 @@ fn rename_mod(
227227
module: hir::Module,
228228
new_name: &str,
229229
) -> Result<SourceChange> {
230-
if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
230+
if IdentifierKind::classify(module.krate().edition(sema.db), new_name)? != IdentifierKind::Ident
231+
{
231232
bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
232233
}
233234

@@ -313,7 +314,12 @@ fn rename_reference(
313314
def: Definition,
314315
new_name: &str,
315316
) -> Result<SourceChange> {
316-
let ident_kind = IdentifierKind::classify(new_name)?;
317+
let ident_kind = IdentifierKind::classify(
318+
def.krate(sema.db)
319+
.ok_or_else(|| RenameError("definition has no krate?".into()))?
320+
.edition(sema.db),
321+
new_name,
322+
)?;
317323

318324
if matches!(
319325
def,
@@ -605,8 +611,8 @@ pub enum IdentifierKind {
605611
}
606612

607613
impl IdentifierKind {
608-
pub fn classify(new_name: &str) -> Result<IdentifierKind> {
609-
match parser::LexedStr::single_token(new_name) {
614+
pub fn classify(edition: Edition, new_name: &str) -> Result<IdentifierKind> {
615+
match parser::LexedStr::single_token(edition, new_name) {
610616
Some(res) => match res {
611617
(SyntaxKind::IDENT, _) => {
612618
if let Some(inner) = new_name.strip_prefix("r#") {

crates/ide-db/src/syntax_helpers/node_ext.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -277,6 +277,8 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
277277
});
278278
}
279279
Some(ast::BlockModifier::Unsafe(_)) => (),
280+
Some(ast::BlockModifier::Gen(_)) => (),
281+
Some(ast::BlockModifier::AsyncGen(_)) => (),
280282
None => (),
281283
}
282284
if let Some(stmt_list) = b.stmt_list() {

crates/ide-ssr/src/parsing.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -255,7 +255,7 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
255255
}
256256

257257
fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
258-
let lexed = parser::LexedStr::new(source);
258+
let lexed = parser::LexedStr::new(parser::Edition::CURRENT, source);
259259
if let Some((_, first_error)) = lexed.errors().next() {
260260
bail!("Failed to parse pattern: {}", first_error);
261261
}

crates/ide/src/rename.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ use ide_db::{
1313
RootDatabase,
1414
};
1515
use itertools::Itertools;
16+
use span::Edition;
1617
use stdx::{always, never};
1718
use syntax::{
1819
ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
@@ -99,7 +100,7 @@ pub(crate) fn rename(
99100
// FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can
100101
// properly find "direct" usages/references.
101102
.map(|(.., def)| {
102-
match IdentifierKind::classify(new_name)? {
103+
match IdentifierKind::classify(Edition::CURRENT, new_name)? {
103104
IdentifierKind::Ident => (),
104105
IdentifierKind::Lifetime => {
105106
bail!("Cannot alias reference to a lifetime identifier")
@@ -391,7 +392,7 @@ fn rename_self_to_param(
391392
return Ok(SourceChange::default());
392393
}
393394

394-
let identifier_kind = IdentifierKind::classify(new_name)?;
395+
let identifier_kind = IdentifierKind::classify(Edition::CURRENT, new_name)?;
395396

396397
let InFile { file_id, value: self_param } =
397398
sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?;

crates/mbe/src/syntax_bridge.rs

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -169,6 +169,7 @@ where
169169
/// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
170170
/// anchor with the given context.
171171
pub fn parse_to_token_tree<Ctx>(
172+
edition: Edition,
172173
anchor: SpanAnchor,
173174
ctx: Ctx,
174175
text: &str,
@@ -177,7 +178,7 @@ where
177178
SpanData<Ctx>: Copy + fmt::Debug,
178179
Ctx: Copy,
179180
{
180-
let lexed = parser::LexedStr::new(text);
181+
let lexed = parser::LexedStr::new(edition, text);
181182
if lexed.errors().next().is_some() {
182183
return None;
183184
}
@@ -187,11 +188,15 @@ where
187188
}
188189

189190
/// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
190-
pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
191+
pub fn parse_to_token_tree_static_span<S>(
192+
edition: Edition,
193+
span: S,
194+
text: &str,
195+
) -> Option<tt::Subtree<S>>
191196
where
192197
S: Copy + fmt::Debug,
193198
{
194-
let lexed = parser::LexedStr::new(text);
199+
let lexed = parser::LexedStr::new(edition, text);
195200
if lexed.errors().next().is_some() {
196201
return None;
197202
}

crates/parser/src/edition.rs

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,13 @@ pub enum Edition {
1212
}
1313

1414
impl Edition {
15+
/// The current latest stable edition, note this is usually not the right choice in code.
1516
pub const CURRENT: Edition = Edition::Edition2021;
1617
pub const DEFAULT: Edition = Edition::Edition2015;
18+
19+
pub fn at_least_2024(self) -> bool {
20+
self >= Edition::Edition2024
21+
}
1722
}
1823

1924
#[derive(Debug)]

crates/parser/src/grammar/expressions/atom.rs

Lines changed: 26 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
5151
T![const],
5252
T![continue],
5353
T![do],
54+
T![gen],
5455
T![for],
5556
T![if],
5657
T![let],
@@ -138,15 +139,37 @@ pub(super) fn atom_expr(
138139
// fn f() { const { } }
139140
// fn f() { async { } }
140141
// fn f() { async move { } }
141-
T![const] | T![unsafe] | T![async] if la == T!['{'] => {
142+
T![const] | T![unsafe] | T![async] | T![gen] if la == T!['{'] => {
142143
let m = p.start();
143144
p.bump_any();
144145
stmt_list(p);
145146
m.complete(p, BLOCK_EXPR)
146147
}
147-
T![async] if la == T![move] && p.nth(2) == T!['{'] => {
148+
// test_err gen_blocks
149+
// pub fn main() {
150+
// gen { yield ""; };
151+
// async gen { yield ""; };
152+
// gen move { yield ""; };
153+
// async gen move { yield ""; };
154+
// }
155+
T![async] if la == T![gen] && p.nth(2) == T!['{'] => {
156+
let m = p.start();
157+
p.bump(T![async]);
158+
p.eat(T![gen]);
159+
stmt_list(p);
160+
m.complete(p, BLOCK_EXPR)
161+
}
162+
T![async] | T![gen] if la == T![move] && p.nth(2) == T!['{'] => {
163+
let m = p.start();
164+
p.bump_any();
165+
p.bump(T![move]);
166+
stmt_list(p);
167+
m.complete(p, BLOCK_EXPR)
168+
}
169+
T![async] if la == T![gen] && p.nth(2) == T![move] && p.nth(3) == T!['{'] => {
148170
let m = p.start();
149171
p.bump(T![async]);
172+
p.bump(T![gen]);
150173
p.bump(T![move]);
151174
stmt_list(p);
152175
m.complete(p, BLOCK_EXPR)
@@ -355,6 +378,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
355378
p.eat(T![const]);
356379
p.eat(T![static]);
357380
p.eat(T![async]);
381+
p.eat(T![gen]);
358382
p.eat(T![move]);
359383

360384
if !p.at(T![|]) {

crates/parser/src/grammar/items.rs

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,11 +112,22 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
112112

113113
// test_err async_without_semicolon
114114
// fn foo() { let _ = async {} }
115-
if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) {
115+
if p.at(T![async])
116+
&& (!matches!(p.nth(1), T!['{'] | T![gen] | T![move] | T![|])
117+
|| matches!((p.nth(1), p.nth(2)), (T![gen], T![fn])))
118+
{
116119
p.eat(T![async]);
117120
has_mods = true;
118121
}
119122

123+
// test_err gen_fn
124+
// gen fn gen_fn() {}
125+
// async gen fn async_gen_fn() {}
126+
if p.at(T![gen]) && p.nth(1) == T![fn] {
127+
p.eat(T![gen]);
128+
has_mods = true;
129+
}
130+
120131
// test_err unsafe_block_in_mod
121132
// fn foo(){} unsafe { } fn bar(){}
122133
if p.at(T![unsafe]) && p.nth(1) != T!['{'] {

crates/parser/src/lexed_str.rs

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ use std::ops;
1313
use rustc_lexer::unescape::{EscapeError, Mode};
1414

1515
use crate::{
16+
Edition,
1617
SyntaxKind::{self, *},
1718
T,
1819
};
@@ -30,9 +31,9 @@ struct LexError {
3031
}
3132

3233
impl<'a> LexedStr<'a> {
33-
pub fn new(text: &'a str) -> LexedStr<'a> {
34+
pub fn new(edition: Edition, text: &'a str) -> LexedStr<'a> {
3435
let _p = tracing::info_span!("LexedStr::new").entered();
35-
let mut conv = Converter::new(text);
36+
let mut conv = Converter::new(edition, text);
3637
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
3738
conv.res.push(SHEBANG, conv.offset);
3839
conv.offset = shebang_len;
@@ -47,7 +48,7 @@ impl<'a> LexedStr<'a> {
4748
conv.finalize_with_eof()
4849
}
4950

50-
pub fn single_token(text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
51+
pub fn single_token(edition: Edition, text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
5152
if text.is_empty() {
5253
return None;
5354
}
@@ -57,7 +58,7 @@ impl<'a> LexedStr<'a> {
5758
return None;
5859
}
5960

60-
let mut conv = Converter::new(text);
61+
let mut conv = Converter::new(edition, text);
6162
conv.extend_token(&token.kind, text);
6263
match &*conv.res.kind {
6364
[kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))),
@@ -129,13 +130,15 @@ impl<'a> LexedStr<'a> {
129130
struct Converter<'a> {
130131
res: LexedStr<'a>,
131132
offset: usize,
133+
edition: Edition,
132134
}
133135

134136
impl<'a> Converter<'a> {
135-
fn new(text: &'a str) -> Self {
137+
fn new(edition: Edition, text: &'a str) -> Self {
136138
Self {
137139
res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() },
138140
offset: 0,
141+
edition,
139142
}
140143
}
141144

@@ -175,6 +178,11 @@ impl<'a> Converter<'a> {
175178
rustc_lexer::TokenKind::Whitespace => WHITESPACE,
176179

177180
rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE,
181+
rustc_lexer::TokenKind::Ident
182+
if token_text == "gen" && !self.edition.at_least_2024() =>
183+
{
184+
IDENT
185+
}
178186
rustc_lexer::TokenKind::Ident => {
179187
SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
180188
}

0 commit comments

Comments
 (0)