Skip to content

Commit f45b080

Browse files
committed
Starting Fix for cfg stripping
1 parent 00a0125 commit f45b080

File tree

8 files changed

+302
-25
lines changed

8 files changed

+302
-25
lines changed

crates/cfg/src/cfg_attr.rs

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
use std::{
2+
fmt::{self, Debug},
3+
slice::Iter as SliceIter,
4+
};
5+
6+
use crate::{cfg_expr::next_cfg_expr, CfgAtom, CfgExpr};
7+
use tt::{Delimiter, SmolStr, Span};
8+
/// Represents a `#[cfg_attr(.., my_attr)]` attribute.
9+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
10+
pub struct CfgAttr<S> {
11+
/// Expression in `cfg_attr` attribute.
12+
pub cfg_expr: CfgExpr,
13+
/// Inner attribute.
14+
pub attr: tt::Subtree<S>,
15+
}
16+
17+
impl<S: Clone + Span + Debug> CfgAttr<S> {
18+
/// Parses a sub tree in the form of (cfg_expr, inner_attribute)
19+
pub fn parse(tt: &tt::Subtree<S>) -> Option<CfgAttr<S>> {
20+
let mut iter = tt.token_trees.iter();
21+
let cfg_expr = next_cfg_expr(&mut iter).unwrap_or(CfgExpr::Invalid);
22+
// FIXME: This is probably not the right way to do this
23+
// Get's the span of the next token tree
24+
let first_span = iter.as_slice().first().map(|tt| tt.first_span())?;
25+
let attr = tt::Subtree {
26+
delimiter: Delimiter::invisible_spanned(first_span),
27+
token_trees: iter.cloned().collect(),
28+
};
29+
Some(CfgAttr { cfg_expr, attr: attr })
30+
}
31+
}
32+
33+
#[cfg(test)]
34+
mod tests {
35+
use expect_test::{expect, Expect};
36+
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
37+
use syntax::{ast, AstNode};
38+
39+
use crate::{CfgAttr, DnfExpr};
40+
41+
fn check_dnf(input: &str, expected_dnf: Expect, expected_attrs: Expect) {
42+
let source_file = ast::SourceFile::parse(input).ok().unwrap();
43+
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
44+
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
45+
let Some(CfgAttr { cfg_expr, attr }) = CfgAttr::parse(&tt) else {
46+
assert!(false, "failed to parse cfg_attr");
47+
return;
48+
};
49+
50+
let actual = format!("#![cfg({})]", DnfExpr::new(cfg_expr));
51+
expected_dnf.assert_eq(&actual);
52+
let actual_attrs = format!("#![{}]", attr);
53+
expected_attrs.assert_eq(&actual_attrs);
54+
}
55+
56+
#[test]
57+
fn smoke() {
58+
check_dnf(
59+
r#"#![cfg_attr(feature = "nightly", feature(slice_split_at_unchecked))]"#,
60+
expect![[r#"#![cfg(feature = "nightly")]"#]],
61+
expect![r#"#![feature (slice_split_at_unchecked)]"#],
62+
);
63+
64+
check_dnf(
65+
r#"#![cfg_attr(not(feature = "std"), no_std)]"#,
66+
expect![[r#"#![cfg(not(feature = "std"))]"#]],
67+
expect![r#"#![no_std]"#],
68+
);
69+
}
70+
}

crates/cfg/src/cfg_expr.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ impl CfgExpr {
6363
}
6464
}
6565

66-
fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
66+
pub(crate) fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
6767
let name = match it.next() {
6868
None => return None,
6969
Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(),

crates/cfg/src/lib.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,8 @@
22
33
#![warn(rust_2018_idioms, unused_lifetimes)]
44

5-
mod cfg_expr;
5+
mod cfg_attr;
6+
pub(crate) mod cfg_expr;
67
mod dnf;
78
#[cfg(test)]
89
mod tests;
@@ -12,6 +13,7 @@ use std::fmt;
1213
use rustc_hash::FxHashSet;
1314
use tt::SmolStr;
1415

16+
pub use cfg_attr::CfgAttr;
1517
pub use cfg_expr::{CfgAtom, CfgExpr};
1618
pub use dnf::DnfExpr;
1719

crates/hir-expand/src/cfg_process.rs

Lines changed: 178 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,178 @@
1+
use std::os::windows::process;
2+
3+
use mbe::syntax_node_to_token_tree;
4+
use rustc_hash::FxHashSet;
5+
use syntax::{
6+
ast::{self, Attr, FieldList, HasAttrs, RecordFieldList, TupleFieldList, Variant, VariantList},
7+
AstNode, SyntaxElement, SyntaxNode, T,
8+
};
9+
use tracing::info;
10+
11+
use crate::{db::ExpandDatabase, span_map::SpanMap, MacroCallLoc};
12+
13+
fn check_cfg_attr(
14+
attr: &Attr,
15+
loc: &MacroCallLoc,
16+
span_map: &SpanMap,
17+
db: &dyn ExpandDatabase,
18+
) -> Option<bool> {
19+
attr.simple_name().as_deref().map(|v| v == "cfg")?;
20+
info!("Checking cfg attr {:?}", attr);
21+
let Some(tt) = attr.token_tree() else {
22+
info!("cfg attr has no expr {:?}", attr);
23+
return Some(true);
24+
};
25+
info!("Checking cfg {:?}", tt);
26+
let tt = tt.syntax().clone();
27+
// Convert to a tt::Subtree
28+
let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site);
29+
let cfg = cfg::CfgExpr::parse(&tt);
30+
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
31+
Some(enabled)
32+
}
33+
enum CfgAttrResult {
34+
Enabled(Attr),
35+
Disabled,
36+
}
37+
38+
fn check_cfg_attr_attr(
39+
attr: &Attr,
40+
loc: &MacroCallLoc,
41+
span_map: &SpanMap,
42+
db: &dyn ExpandDatabase,
43+
) -> Option<CfgAttrResult> {
44+
attr.simple_name().as_deref().map(|v| v == "cfg_attr")?;
45+
info!("Checking cfg_attr attr {:?}", attr);
46+
let Some(tt) = attr.token_tree() else {
47+
info!("cfg_attr attr has no expr {:?}", attr);
48+
return None;
49+
};
50+
info!("Checking cfg_attr {:?}", tt);
51+
let tt = tt.syntax().clone();
52+
// Convert to a tt::Subtree
53+
let tt = syntax_node_to_token_tree(&tt, span_map, loc.call_site);
54+
let cfg = cfg::CfgExpr::parse(&tt);
55+
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&cfg) != Some(false);
56+
if enabled {
57+
// FIXME: Add the internal attribute
58+
Some(CfgAttrResult::Enabled(attr.clone()))
59+
} else {
60+
Some(CfgAttrResult::Disabled)
61+
}
62+
}
63+
64+
fn process_has_attrs_with_possible_comma<I: HasAttrs>(
65+
items: impl Iterator<Item = I>,
66+
loc: &MacroCallLoc,
67+
span_map: &SpanMap,
68+
db: &dyn ExpandDatabase,
69+
res: &mut FxHashSet<SyntaxElement>,
70+
) -> Option<()> {
71+
for item in items {
72+
let field_attrs = item.attrs();
73+
'attrs: for attr in field_attrs {
74+
let Some(enabled) = check_cfg_attr(&attr, loc, span_map, db) else {
75+
continue;
76+
};
77+
if enabled {
78+
//FIXME: Should we remove the cfg_attr?
79+
} else {
80+
info!("censoring type {:?}", item.syntax());
81+
res.insert(item.syntax().clone().into());
82+
// We need to remove the , as well
83+
if let Some(comma) = item.syntax().next_sibling_or_token() {
84+
if comma.kind() == T![,] {
85+
res.insert(comma.into());
86+
}
87+
}
88+
break 'attrs;
89+
}
90+
let Some(attr_result) = check_cfg_attr_attr(&attr, loc, span_map, db) else {
91+
continue;
92+
};
93+
match attr_result {
94+
CfgAttrResult::Enabled(attr) => {
95+
//FIXME: Replace the attribute with the internal attribute
96+
}
97+
CfgAttrResult::Disabled => {
98+
info!("censoring type {:?}", item.syntax());
99+
res.insert(attr.syntax().clone().into());
100+
continue;
101+
}
102+
}
103+
}
104+
}
105+
Some(())
106+
}
107+
fn process_enum(
108+
variants: VariantList,
109+
loc: &MacroCallLoc,
110+
span_map: &SpanMap,
111+
db: &dyn ExpandDatabase,
112+
res: &mut FxHashSet<SyntaxElement>,
113+
) -> Option<()> {
114+
for variant in variants.variants() {
115+
'attrs: for attr in variant.attrs() {
116+
if !check_cfg_attr(&attr, loc, span_map, db)? {
117+
info!("censoring variant {:?}", variant.syntax());
118+
res.insert(variant.syntax().clone().into());
119+
if let Some(comma) = variant.syntax().next_sibling_or_token() {
120+
if comma.kind() == T![,] {
121+
res.insert(comma.into());
122+
}
123+
}
124+
break 'attrs;
125+
}
126+
}
127+
if let Some(fields) = variant.field_list() {
128+
match fields {
129+
ast::FieldList::RecordFieldList(fields) => {
130+
process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?;
131+
}
132+
ast::FieldList::TupleFieldList(fields) => {
133+
process_has_attrs_with_possible_comma(fields.fields(), loc, span_map, db, res)?;
134+
}
135+
}
136+
}
137+
}
138+
Some(())
139+
}
140+
/// Handle
141+
pub(crate) fn process_cfg_attrs(
142+
node: &SyntaxNode,
143+
loc: &MacroCallLoc,
144+
span_map: &SpanMap,
145+
db: &dyn ExpandDatabase,
146+
) -> Option<FxHashSet<SyntaxElement>> {
147+
let mut res = FxHashSet::default();
148+
let item = ast::Item::cast(node.clone())?;
149+
match item {
150+
ast::Item::Struct(it) => match it.field_list()? {
151+
ast::FieldList::RecordFieldList(fields) => {
152+
process_has_attrs_with_possible_comma(
153+
fields.fields(),
154+
loc,
155+
span_map,
156+
db,
157+
&mut res,
158+
)?;
159+
}
160+
ast::FieldList::TupleFieldList(fields) => {
161+
process_has_attrs_with_possible_comma(
162+
fields.fields(),
163+
loc,
164+
span_map,
165+
db,
166+
&mut res,
167+
)?;
168+
}
169+
},
170+
ast::Item::Enum(it) => {
171+
process_enum(it.variant_list()?, loc, span_map, db, &mut res)?;
172+
}
173+
// FIXME: Implement for other items
174+
_ => {}
175+
}
176+
177+
Some(res)
178+
}

crates/hir-expand/src/db.rs

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,17 @@ use mbe::{syntax_node_to_token_tree, ValueResult};
77
use rustc_hash::FxHashSet;
88
use span::{AstIdMap, SyntaxContextData, SyntaxContextId};
99
use syntax::{
10-
ast::{self, HasAttrs},
11-
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
10+
ast::{self, Attr, HasAttrs},
11+
AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T,
1212
};
13+
use tracing::info;
1314
use triomphe::Arc;
1415

1516
use crate::{
1617
attrs::collect_attrs,
1718
builtin_attr_macro::pseudo_derive_attr_expansion,
1819
builtin_fn_macro::EagerExpander,
20+
cfg_process,
1921
declarative::DeclarativeMacroExpander,
2022
fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
2123
hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
@@ -152,8 +154,8 @@ pub fn expand_speculative(
152154
let censor = censor_for_macro_input(&loc, speculative_args);
153155
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
154156
fixups.append.retain(|it, _| match it {
155-
syntax::NodeOrToken::Node(it) => !censor.contains(it),
156157
syntax::NodeOrToken::Token(_) => true,
158+
it => !censor.contains(it),
157159
});
158160
fixups.remove.extend(censor);
159161
(
@@ -408,12 +410,15 @@ fn macro_arg(
408410
),
409411
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
410412
let censor = censor_for_macro_input(&loc, &syntax);
413+
let censor_cfg = censor_cfg_elements(&syntax, &loc, &map, db);
411414
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
412415
fixups.append.retain(|it, _| match it {
413-
syntax::NodeOrToken::Node(it) => !censor.contains(it),
414416
syntax::NodeOrToken::Token(_) => true,
417+
it => !censor.contains(it) && !censor_cfg.contains(it),
415418
});
416419
fixups.remove.extend(censor);
420+
fixups.remove.extend(censor_cfg);
421+
417422
{
418423
let mut tt = mbe::syntax_node_to_token_tree_modified(
419424
&syntax,
@@ -456,12 +461,19 @@ fn macro_arg(
456461
}
457462
}
458463
}
459-
464+
fn censor_cfg_elements(
465+
node: &SyntaxNode,
466+
loc: &MacroCallLoc,
467+
span_map: &SpanMap,
468+
db: &dyn ExpandDatabase,
469+
) -> FxHashSet<SyntaxElement> {
470+
cfg_process::process_cfg_attrs(node, loc, span_map, db).unwrap_or_default()
471+
}
460472
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
461473
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
462474
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
463475
/// - attributes expect the invoking attribute to be stripped
464-
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
476+
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxElement> {
465477
// FIXME: handle `cfg_attr`
466478
(|| {
467479
let censor = match loc.kind {
@@ -477,7 +489,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
477489
// we need to know about all macro calls for the given ast item here
478490
// so we require some kind of mapping...
479491
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
480-
.map(|it| it.syntax().clone())
492+
.map(|it| it.syntax().clone().into())
481493
.collect()
482494
}
483495
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
@@ -486,7 +498,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
486498
collect_attrs(&ast::Item::cast(node.clone())?)
487499
.nth(invoc_attr_index.ast_index())
488500
.and_then(|x| Either::left(x.1))
489-
.map(|attr| attr.syntax().clone())
501+
.map(|attr| attr.syntax().clone().into())
490502
.into_iter()
491503
.collect()
492504
}

crates/hir-expand/src/fixup.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ use crate::{
2323
#[derive(Debug, Default)]
2424
pub(crate) struct SyntaxFixups {
2525
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
26-
pub(crate) remove: FxHashSet<SyntaxNode>,
26+
pub(crate) remove: FxHashSet<SyntaxElement>,
2727
pub(crate) undo_info: SyntaxFixupUndoInfo,
2828
}
2929

@@ -51,7 +51,7 @@ pub(crate) fn fixup_syntax(
5151
call_site: Span,
5252
) -> SyntaxFixups {
5353
let mut append = FxHashMap::<SyntaxElement, _>::default();
54-
let mut remove = FxHashSet::<SyntaxNode>::default();
54+
let mut remove = FxHashSet::<SyntaxElement>::default();
5555
let mut preorder = node.preorder();
5656
let mut original = Vec::new();
5757
let dummy_range = FIXUP_DUMMY_RANGE;
@@ -68,7 +68,7 @@ pub(crate) fn fixup_syntax(
6868

6969
let node_range = node.text_range();
7070
if can_handle_error(&node) && has_error_to_handle(&node) {
71-
remove.insert(node.clone());
71+
remove.insert(node.clone().into());
7272
// the node contains an error node, we have to completely replace it by something valid
7373
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
7474
let idx = original.len() as u32;

0 commit comments

Comments
 (0)