Skip to content

Commit

Permalink
Recursively expand TokenKind::Interpolated (take 2)
Browse files Browse the repository at this point in the history
Fixes rust-lang#68430

This is a re-attempt of PR rust-lang#72388, which was previously reverted due to
a large number of breakages. All of the known breakages should now be
patched upstream.
  • Loading branch information
Aaron1011 committed Aug 22, 2020
1 parent 527a685 commit cd24aee
Show file tree
Hide file tree
Showing 7 changed files with 152 additions and 75 deletions.
53 changes: 40 additions & 13 deletions src/librustc_parse/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
#![feature(or_patterns)]

use rustc_ast as ast;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
Expand Down Expand Up @@ -309,7 +309,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
// modifications, including adding/removing typically non-semantic
// tokens such as extra braces and commas, don't happen.
if let Some(tokens) = tokens {
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real) {
if tokenstream_probably_equal_for_proc_macro(&tokens, &tokens_for_real, sess) {
return tokens;
}
info!(
Expand All @@ -327,7 +327,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
//
// This is otherwise the same as `eq_unspanned`, only recursing with a
// different method.
pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &TokenStream) -> bool {
pub fn tokenstream_probably_equal_for_proc_macro(
first: &TokenStream,
other: &TokenStream,
sess: &ParseSess,
) -> bool {
// When checking for `probably_eq`, we ignore certain tokens that aren't
// preserved in the AST. Because they are not preserved, the pretty
// printer arbitrarily adds or removes them when printing as token
Expand Down Expand Up @@ -408,20 +412,39 @@ pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &To
}
}
token_trees = out.into_iter().map(TokenTree::Token).collect();
if token_trees.len() != 1 {
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
}
} else {
token_trees = SmallVec::new();
token_trees.push(tree);
}
token_trees.into_iter()
}

let mut t1 = first.trees().filter(semantic_tree).flat_map(break_tokens);
let mut t2 = other.trees().filter(semantic_tree).flat_map(break_tokens);
let expand_nt = |tree: TokenTree| {
if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree {
// When checking tokenstreams for 'probable equality', we are comparing
// a captured (from parsing) `TokenStream` to a reparsed tokenstream.
// The reparsed Tokenstream will never have `None`-delimited groups,
// since they are only ever inserted as a result of macro expansion.
// Therefore, inserting a `None`-delimtied group here (when we
// convert a nested `Nonterminal` to a tokenstream) would cause
// a mismatch with the reparsed tokenstream.
//
// Note that we currently do not handle the case where the
// reparsed stream has a `Parenthesis`-delimited group
// inserted. This will cause a spurious mismatch:
// issue #75734 tracks resolving this.
nt_to_tokenstream(nt, sess, *span).into_trees()
} else {
TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees()
}
};

// Break tokens after we expand any nonterminals, so that we break tokens
// that are produced as a result of nonterminal expansion.
let mut t1 = first.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
let mut t2 = other.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
if !tokentree_probably_equal_for_proc_macro(&t1, &t2) {
if !tokentree_probably_equal_for_proc_macro(&t1, &t2, sess) {
return false;
}
}
Expand All @@ -433,13 +456,17 @@ pub fn tokenstream_probably_equal_for_proc_macro(first: &TokenStream, other: &To
//
// This is otherwise the same as `eq_unspanned`, only recursing with a
// different method.
fn tokentree_probably_equal_for_proc_macro(first: &TokenTree, other: &TokenTree) -> bool {
pub fn tokentree_probably_equal_for_proc_macro(
first: &TokenTree,
other: &TokenTree,
sess: &ParseSess,
) -> bool {
match (first, other) {
(TokenTree::Token(token), TokenTree::Token(token2)) => {
token_probably_equal_for_proc_macro(token, token2)
}
(TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2)
delim == delim2 && tokenstream_probably_equal_for_proc_macro(&tts, &tts2, sess)
}
_ => false,
}
Expand Down Expand Up @@ -498,7 +525,7 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate)
}

(&Interpolated(..), &Interpolated(..)) => false,
(&Interpolated(..), &Interpolated(..)) => panic!("Unexpanded Interpolated!"),

_ => panic!("forgot to add a token?"),
}
Expand Down
40 changes: 26 additions & 14 deletions src/test/ui/proc-macro/input-interpolated.stdout
Original file line number Diff line number Diff line change
Expand Up @@ -15,51 +15,63 @@ PRINT-ATTR INPUT (DISPLAY): const A : u8 = 0 ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "const",
span: #0 bytes(0..0),
span: #3 bytes(416..421),
},
Ident {
ident: "A",
span: #0 bytes(0..0),
Group {
delimiter: None,
stream: TokenStream [
Ident {
ident: "A",
span: #0 bytes(503..504),
},
],
span: #3 bytes(422..424),
},
Punct {
ch: ':',
spacing: Alone,
span: #0 bytes(0..0),
span: #3 bytes(424..425),
},
Ident {
ident: "u8",
span: #0 bytes(0..0),
span: #3 bytes(426..428),
},
Punct {
ch: '=',
spacing: Alone,
span: #0 bytes(0..0),
span: #3 bytes(429..430),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: #0 bytes(0..0),
span: #3 bytes(431..432),
},
Punct {
ch: ';',
spacing: Alone,
span: #0 bytes(0..0),
span: #3 bytes(432..433),
},
]
PRINT-DERIVE INPUT (DISPLAY): struct A { }
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: #0 bytes(0..0),
span: #3 bytes(468..474),
},
Ident {
ident: "A",
span: #0 bytes(0..0),
Group {
delimiter: None,
stream: TokenStream [
Ident {
ident: "A",
span: #0 bytes(503..504),
},
],
span: #3 bytes(475..477),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: #0 bytes(0..0),
span: #3 bytes(478..480),
},
]
5 changes: 2 additions & 3 deletions src/test/ui/proc-macro/macro-rules-derive.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
// aux-build:first-second.rs
// FIXME: The spans here are bad, see PR #73084

extern crate first_second;
use first_second::*;

macro_rules! produce_it {
($name:ident) => {
#[first] //~ ERROR cannot find type
#[first]
struct $name {
field: MissingType
field: MissingType //~ ERROR cannot find type
}
}
}
Expand Down
11 changes: 8 additions & 3 deletions src/test/ui/proc-macro/macro-rules-derive.stderr
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
error[E0412]: cannot find type `MissingType` in this scope
--> $DIR/macro-rules-derive.rs:9:9
--> $DIR/macro-rules-derive.rs:10:20
|
LL | #[first]
| ^^^^^^^^ not found in this scope
LL | field: MissingType
| ^^^^^^^^^^^ not found in this scope
...
LL | produce_it!(MyName);
| -------------------- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)

error: aborting due to previous error

Expand Down
87 changes: 49 additions & 38 deletions src/test/ui/proc-macro/nodelim-groups.stdout
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
},
]
PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1)
PRINT-BANG RE-COLLECTED (DISPLAY): "hi" "hello" . len() + "world" . len() (1 + 1)
PRINT-BANG INPUT (DEBUG): TokenStream [
Literal {
kind: Str,
Expand All @@ -82,50 +81,62 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
Group {
delimiter: None,
stream: TokenStream [
Literal {
kind: Str,
symbol: "hello",
suffix: None,
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
},
Punct {
ch: '.',
spacing: Alone,
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
},
Ident {
ident: "len",
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
delimiter: None,
stream: TokenStream [
Literal {
kind: Str,
symbol: "hello",
suffix: None,
span: $DIR/nodelim-groups.rs:21:17: 21:24 (#0),
},
Punct {
ch: '.',
spacing: Alone,
span: $DIR/nodelim-groups.rs:21:24: 21:25 (#0),
},
Ident {
ident: "len",
span: $DIR/nodelim-groups.rs:21:25: 21:28 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/nodelim-groups.rs:21:28: 21:30 (#0),
},
],
span: $DIR/nodelim-groups.rs:15:49: 15:54 (#7),
},
Punct {
ch: '+',
spacing: Alone,
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
},
Literal {
kind: Str,
symbol: "world",
suffix: None,
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
},
Punct {
ch: '.',
spacing: Alone,
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
},
Ident {
ident: "len",
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
span: $DIR/nodelim-groups.rs:15:55: 15:56 (#7),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
delimiter: None,
stream: TokenStream [
Literal {
kind: Str,
symbol: "world",
suffix: None,
span: $DIR/nodelim-groups.rs:21:33: 21:40 (#0),
},
Punct {
ch: '.',
spacing: Alone,
span: $DIR/nodelim-groups.rs:21:40: 21:41 (#0),
},
Ident {
ident: "len",
span: $DIR/nodelim-groups.rs:21:41: 21:44 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/nodelim-groups.rs:21:44: 21:46 (#0),
},
],
span: $DIR/nodelim-groups.rs:15:57: 15:62 (#7),
},
],
span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8),
Expand Down
6 changes: 2 additions & 4 deletions src/test/ui/proc-macro/weird-hygiene.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
// aux-build:weird-hygiene.rs
// check-pass
// FIXME: This should actually error, see PR #73084

#![feature(stmt_expr_attributes)]
#![feature(proc_macro_hygiene)]
Expand All @@ -22,7 +20,7 @@ macro_rules! other {

#[derive(WeirdDerive)]
enum MyEnum {
Value = (stringify!($tokens + hidden_ident), 1).1
Value = (stringify!($tokens + hidden_ident), 1).1 //~ ERROR cannot find
}

inner!();
Expand All @@ -33,7 +31,7 @@ macro_rules! invoke_it {
($token:expr) => {
#[recollect_attr] {
$token;
hidden_ident
hidden_ident //~ ERROR cannot find
}
}
}
Expand Down
25 changes: 25 additions & 0 deletions src/test/ui/proc-macro/weird-hygiene.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
error[E0425]: cannot find value `hidden_ident` in this scope
--> $DIR/weird-hygiene.rs:23:43
|
LL | Value = (stringify!($tokens + hidden_ident), 1).1
| ^^^^^^^^^^^^ not found in this scope
...
LL | other!(50);
| ----------- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)

error[E0425]: cannot find value `hidden_ident` in this scope
--> $DIR/weird-hygiene.rs:34:13
|
LL | hidden_ident
| ^^^^^^^^^^^^ not found in this scope
...
LL | invoke_it!(25);
| --------------- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)

error: aborting due to 2 previous errors

For more information about this error, try `rustc --explain E0425`.

0 comments on commit cd24aee

Please sign in to comment.