Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit 3d7da51

Browse files
committed
Auto merge of rust-lang#12775 - flodiebold:syntax-fixup-if, r=flodiebold
fix: Improve syntax fixup a bit, handle incomplete `if` - allow appending tokens after a token, not just a node - allow inserting delimiters (and remove them again) - fix up `if {}` and `if` without anything following
2 parents f2c2c36 + 0a4065d commit 3d7da51

File tree

3 files changed

+120
-34
lines changed

3 files changed

+120
-34
lines changed

crates/hir-expand/src/db.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ pub fn expand_speculative(
150150
// Build the subtree and token mapping for the speculative args
151151
let censor = censor_for_macro_input(&loc, speculative_args);
152152
let mut fixups = fixup::fixup_syntax(speculative_args);
153-
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
153+
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
154154
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
155155
speculative_args,
156156
fixups.token_map,
@@ -295,7 +295,7 @@ fn macro_arg(
295295
let node = SyntaxNode::new_root(arg);
296296
let censor = censor_for_macro_input(&loc, &node);
297297
let mut fixups = fixup::fixup_syntax(&node);
298-
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
298+
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
299299
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
300300
&node,
301301
fixups.token_map,

crates/hir-expand/src/fixup.rs

Lines changed: 91 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
66
use rustc_hash::FxHashMap;
77
use syntax::{
88
ast::{self, AstNode},
9-
match_ast, SyntaxKind, SyntaxNode, TextRange,
9+
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
1010
};
1111
use tt::Subtree;
1212

@@ -15,8 +15,8 @@ use tt::Subtree;
1515
/// reverse those changes afterwards, and a token map.
1616
#[derive(Debug)]
1717
pub(crate) struct SyntaxFixups {
18-
pub(crate) append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
19-
pub(crate) replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
18+
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
19+
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
2020
pub(crate) undo_info: SyntaxFixupUndoInfo,
2121
pub(crate) token_map: TokenMap,
2222
pub(crate) next_id: u32,
@@ -31,8 +31,8 @@ pub struct SyntaxFixupUndoInfo {
3131
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
3232

3333
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
34-
let mut append = FxHashMap::default();
35-
let mut replace = FxHashMap::default();
34+
let mut append = FxHashMap::<SyntaxElement, _>::default();
35+
let mut replace = FxHashMap::<SyntaxElement, _>::default();
3636
let mut preorder = node.preorder();
3737
let mut original = Vec::new();
3838
let mut token_map = TokenMap::default();
@@ -63,7 +63,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
6363
range: node.text_range(),
6464
id: SyntheticTokenId(idx),
6565
};
66-
replace.insert(node.clone(), vec![replacement]);
66+
replace.insert(node.clone().into(), vec![replacement]);
6767
preorder.skip_subtree();
6868
continue;
6969
}
@@ -75,7 +75,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
7575
ast::FieldExpr(it) => {
7676
if it.name_ref().is_none() {
7777
// incomplete field access: some_expr.|
78-
append.insert(node.clone(), vec![
78+
append.insert(node.clone().into(), vec![
7979
SyntheticToken {
8080
kind: SyntaxKind::IDENT,
8181
text: "__ra_fixup".into(),
@@ -87,7 +87,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
8787
},
8888
ast::ExprStmt(it) => {
8989
if it.semicolon_token().is_none() {
90-
append.insert(node.clone(), vec![
90+
append.insert(node.clone().into(), vec![
9191
SyntheticToken {
9292
kind: SyntaxKind::SEMICOLON,
9393
text: ";".into(),
@@ -99,7 +99,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
9999
},
100100
ast::LetStmt(it) => {
101101
if it.semicolon_token().is_none() {
102-
append.insert(node.clone(), vec![
102+
append.insert(node.clone().into(), vec![
103103
SyntheticToken {
104104
kind: SyntaxKind::SEMICOLON,
105105
text: ";".into(),
@@ -109,6 +109,41 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
109109
]);
110110
}
111111
},
112+
ast::IfExpr(it) => {
113+
if it.condition().is_none() {
114+
// insert placeholder token after the if token
115+
let if_token = match it.if_token() {
116+
Some(t) => t,
117+
None => continue,
118+
};
119+
append.insert(if_token.into(), vec![
120+
SyntheticToken {
121+
kind: SyntaxKind::IDENT,
122+
text: "__ra_fixup".into(),
123+
range: end_range,
124+
id: EMPTY_ID,
125+
},
126+
]);
127+
}
128+
if it.then_branch().is_none() {
129+
append.insert(node.clone().into(), vec![
130+
SyntheticToken {
131+
kind: SyntaxKind::L_CURLY,
132+
text: "{".into(),
133+
range: end_range,
134+
id: EMPTY_ID,
135+
},
136+
SyntheticToken {
137+
kind: SyntaxKind::R_CURLY,
138+
text: "}".into(),
139+
range: end_range,
140+
id: EMPTY_ID,
141+
},
142+
]);
143+
}
144+
},
145+
// FIXME: foo::
146+
// FIXME: for, loop, match etc.
112147
_ => (),
113148
}
114149
}
@@ -144,7 +179,10 @@ pub(crate) fn reverse_fixups(
144179
token_map.synthetic_token_id(leaf.id()).is_none()
145180
|| token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
146181
}
147-
_ => true,
182+
tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
183+
token_map.synthetic_token_id(d.id).is_none()
184+
|| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
185+
}),
148186
});
149187
tt.token_trees.iter_mut().for_each(|tt| match tt {
150188
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
@@ -295,6 +333,49 @@ fn foo() {
295333
"#,
296334
expect![[r#"
297335
fn foo () {__ra_fixup ;}
336+
"#]],
337+
)
338+
}
339+
340+
#[test]
341+
fn fixup_if_1() {
342+
check(
343+
r#"
344+
fn foo() {
345+
if a
346+
}
347+
"#,
348+
expect![[r#"
349+
fn foo () {if a {}}
350+
"#]],
351+
)
352+
}
353+
354+
#[test]
355+
fn fixup_if_2() {
356+
check(
357+
r#"
358+
fn foo() {
359+
if
360+
}
361+
"#,
362+
expect![[r#"
363+
fn foo () {if __ra_fixup {}}
364+
"#]],
365+
)
366+
}
367+
368+
#[test]
369+
fn fixup_if_3() {
370+
check(
371+
r#"
372+
fn foo() {
373+
if {}
374+
}
375+
"#,
376+
// the {} gets parsed as the condition, I think?
377+
expect![[r#"
378+
fn foo () {if {} {}}
298379
"#]],
299380
)
300381
}

crates/mbe/src/syntax_bridge.rs

Lines changed: 27 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@ pub fn syntax_node_to_token_tree_with_modifications(
3131
node: &SyntaxNode,
3232
existing_token_map: TokenMap,
3333
next_id: u32,
34-
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
35-
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
34+
replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
35+
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
3636
) -> (tt::Subtree, TokenMap, u32) {
3737
let global_offset = node.text_range().start();
3838
let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
@@ -221,7 +221,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
221221

222222
if let Some(kind) = delim {
223223
let mut subtree = tt::Subtree::default();
224-
let (id, idx) = conv.id_alloc().open_delim(range);
224+
let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
225225
subtree.delimiter = Some(tt::Delimiter { id, kind });
226226
stack.push(StackEntry { subtree, idx, open_range: range });
227227
continue;
@@ -404,14 +404,21 @@ impl TokenIdAlloc {
404404
token_id
405405
}
406406

407-
fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
407+
fn open_delim(
408+
&mut self,
409+
open_abs_range: TextRange,
410+
synthetic_id: Option<SyntheticTokenId>,
411+
) -> (tt::TokenId, usize) {
408412
let token_id = tt::TokenId(self.next_id);
409413
self.next_id += 1;
410414
let idx = self.map.insert_delim(
411415
token_id,
412416
open_abs_range - self.global_offset,
413417
open_abs_range - self.global_offset,
414418
);
419+
if let Some(id) = synthetic_id {
420+
self.map.insert_synthetic(token_id, id);
421+
}
415422
(token_id, idx)
416423
}
417424

@@ -511,8 +518,8 @@ struct Convertor {
511518
current: Option<SyntaxToken>,
512519
current_synthetic: Vec<SyntheticToken>,
513520
preorder: PreorderWithTokens,
514-
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
515-
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
521+
replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
522+
append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
516523
range: TextRange,
517524
punct_offset: Option<(SyntaxToken, TextSize)>,
518525
}
@@ -523,8 +530,8 @@ impl Convertor {
523530
global_offset: TextSize,
524531
existing_token_map: TokenMap,
525532
next_id: u32,
526-
mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
527-
mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
533+
mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
534+
mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
528535
) -> Convertor {
529536
let range = node.text_range();
530537
let mut preorder = node.preorder_with_tokens();
@@ -543,34 +550,32 @@ impl Convertor {
543550

544551
fn next_token(
545552
preorder: &mut PreorderWithTokens,
546-
replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
547-
append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
553+
replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
554+
append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
548555
) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
549556
while let Some(ev) = preorder.next() {
550557
let ele = match ev {
551558
WalkEvent::Enter(ele) => ele,
552-
WalkEvent::Leave(SyntaxElement::Node(node)) => {
553-
if let Some(mut v) = append.remove(&node) {
559+
WalkEvent::Leave(ele) => {
560+
if let Some(mut v) = append.remove(&ele) {
554561
if !v.is_empty() {
555562
v.reverse();
556563
return (None, v);
557564
}
558565
}
559566
continue;
560567
}
561-
_ => continue,
562568
};
569+
if let Some(mut v) = replace.remove(&ele) {
570+
preorder.skip_subtree();
571+
if !v.is_empty() {
572+
v.reverse();
573+
return (None, v);
574+
}
575+
}
563576
match ele {
564577
SyntaxElement::Token(t) => return (Some(t), Vec::new()),
565-
SyntaxElement::Node(node) => {
566-
if let Some(mut v) = replace.remove(&node) {
567-
preorder.skip_subtree();
568-
if !v.is_empty() {
569-
v.reverse();
570-
return (None, v);
571-
}
572-
}
573-
}
578+
_ => {}
574579
}
575580
}
576581
(None, Vec::new())

0 commit comments

Comments
 (0)