Skip to content

Commit

Permalink
Rollup merge of #101602 - nnethercote:AttrTokenStream, r=petrochenkov
Browse files Browse the repository at this point in the history
Streamline `AttrAnnotatedTokenStream`

r? ```@petrochenkov```
  • Loading branch information
Dylan-DPC authored Sep 13, 2022
2 parents c815756 + d2df07c commit db75d7e
Show file tree
Hide file tree
Showing 11 changed files with 181 additions and 200 deletions.
28 changes: 14 additions & 14 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub use UnsafeSource::*;

use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream};
use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, TokenStream};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -91,7 +91,7 @@ pub struct Path {
/// The segments in the path: the things separated by `::`.
/// Global paths begin with `kw::PathRoot`.
pub segments: Vec<PathSegment>,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl PartialEq<Symbol> for Path {
Expand Down Expand Up @@ -534,7 +534,7 @@ pub struct Block {
/// Distinguishes between `unsafe { ... }` and `{ ... }`.
pub rules: BlockCheckMode,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
/// The following *isn't* a parse error, but will cause multiple errors in following stages.
/// ```compile_fail
/// let x = {
Expand All @@ -553,7 +553,7 @@ pub struct Pat {
pub id: NodeId,
pub kind: PatKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Pat {
Expand Down Expand Up @@ -937,8 +937,8 @@ impl Stmt {
/// a trailing semicolon.
///
/// This only modifies the parsed AST struct, not the attached
/// `LazyTokenStream`. The parser is responsible for calling
/// `CreateTokenStream::add_trailing_semi` when there is actually
/// `LazyAttrTokenStream`. The parser is responsible for calling
/// `ToAttrTokenStream::add_trailing_semi` when there is actually
/// a semicolon in the tokenstream.
pub fn add_trailing_semicolon(mut self) -> Self {
self.kind = match self.kind {
Expand Down Expand Up @@ -984,7 +984,7 @@ pub struct MacCallStmt {
pub mac: P<MacCall>,
pub style: MacStmtStyle,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
Expand All @@ -1009,7 +1009,7 @@ pub struct Local {
pub kind: LocalKind,
pub span: Span,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
Expand Down Expand Up @@ -1108,7 +1108,7 @@ pub struct Expr {
pub kind: ExprKind,
pub span: Span,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Expr {
Expand Down Expand Up @@ -1967,7 +1967,7 @@ pub struct Ty {
pub id: NodeId,
pub kind: TyKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Clone for Ty {
Expand Down Expand Up @@ -2532,7 +2532,7 @@ impl<D: Decoder> Decodable<D> for AttrId {
pub struct AttrItem {
pub path: Path,
pub args: MacArgs,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

/// A list of attributes.
Expand All @@ -2552,7 +2552,7 @@ pub struct Attribute {
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct NormalAttr {
pub item: AttrItem,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
Expand Down Expand Up @@ -2603,7 +2603,7 @@ impl PolyTraitRef {
pub struct Visibility {
pub kind: VisibilityKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
Expand Down Expand Up @@ -2689,7 +2689,7 @@ pub struct Item<K = ItemKind> {
///
/// Note that the tokens here do not include the outer attributes, but will
/// include inner attributes.
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Item {
Expand Down
38 changes: 19 additions & 19 deletions compiler/rustc_ast/src/ast_traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
use crate::ptr::P;
use crate::token::Nonterminal;
use crate::tokenstream::LazyTokenStream;
use crate::tokenstream::LazyAttrTokenStream;
use crate::{Arm, Crate, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
use crate::{AssocItem, Expr, ForeignItem, Item, NodeId};
use crate::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
Expand Down Expand Up @@ -124,18 +124,18 @@ impl HasSpan for AttrItem {

/// A trait for AST nodes having (or not having) collected tokens.
pub trait HasTokens {
fn tokens(&self) -> Option<&LazyTokenStream>;
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>>;
fn tokens(&self) -> Option<&LazyAttrTokenStream>;
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>>;
}

macro_rules! impl_has_tokens {
($($T:ty),+ $(,)?) => {
$(
impl HasTokens for $T {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.tokens.as_ref()
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
Some(&mut self.tokens)
}
}
Expand All @@ -147,10 +147,10 @@ macro_rules! impl_has_tokens_none {
($($T:ty),+ $(,)?) => {
$(
impl HasTokens for $T {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
None
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
None
}
}
Expand All @@ -162,25 +162,25 @@ impl_has_tokens!(AssocItem, AttrItem, Block, Expr, ForeignItem, Item, Pat, Path,
impl_has_tokens_none!(Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant);

impl<T: AstDeref<Target: HasTokens>> HasTokens for T {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.ast_deref().tokens()
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
self.ast_deref_mut().tokens_mut()
}
}

impl<T: HasTokens> HasTokens for Option<T> {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.as_ref().and_then(|inner| inner.tokens())
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
self.as_mut().and_then(|inner| inner.tokens_mut())
}
}

impl HasTokens for StmtKind {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
match self {
StmtKind::Local(local) => local.tokens.as_ref(),
StmtKind::Item(item) => item.tokens(),
Expand All @@ -189,7 +189,7 @@ impl HasTokens for StmtKind {
StmtKind::MacCall(mac) => mac.tokens.as_ref(),
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
match self {
StmtKind::Local(local) => Some(&mut local.tokens),
StmtKind::Item(item) => item.tokens_mut(),
Expand All @@ -201,24 +201,24 @@ impl HasTokens for StmtKind {
}

impl HasTokens for Stmt {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.kind.tokens()
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
self.kind.tokens_mut()
}
}

impl HasTokens for Attribute {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
match &self.kind {
AttrKind::Normal(normal) => normal.tokens.as_ref(),
kind @ AttrKind::DocComment(..) => {
panic!("Called tokens on doc comment attr {:?}", kind)
}
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
Some(match &mut self.kind {
AttrKind::Normal(normal) => &mut normal.tokens,
kind @ AttrKind::DocComment(..) => {
Expand All @@ -229,7 +229,7 @@ impl HasTokens for Attribute {
}

impl HasTokens for Nonterminal {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
match self {
Nonterminal::NtItem(item) => item.tokens(),
Nonterminal::NtStmt(stmt) => stmt.tokens(),
Expand All @@ -243,7 +243,7 @@ impl HasTokens for Nonterminal {
Nonterminal::NtIdent(..) | Nonterminal::NtLifetime(..) => None,
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
match self {
Nonterminal::NtItem(item) => item.tokens_mut(),
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
Expand Down
19 changes: 8 additions & 11 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@ use crate::ast::{MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind, Neste
use crate::ast::{Path, PathSegment};
use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
use crate::tokenstream::{LazyTokenStream, TokenStream};
use crate::tokenstream::{LazyAttrTokenStream, TokenStream};
use crate::util::comments;

use rustc_index::bit_set::GrowableBitSet;
Expand Down Expand Up @@ -296,20 +295,18 @@ impl Attribute {
}
}

pub fn tokens(&self) -> AttrAnnotatedTokenStream {
pub fn tokens(&self) -> TokenStream {
match self.kind {
AttrKind::Normal(ref normal) => normal
.tokens
.as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.create_token_stream(),
AttrKind::DocComment(comment_kind, data) => AttrAnnotatedTokenStream::from((
AttrAnnotatedTokenTree::Token(Token::new(
token::DocComment(comment_kind, self.style, data),
self.span,
)),
.to_attr_token_stream()
.to_tokenstream(),
AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
Spacing::Alone,
)),
)]),
}
}
}
Expand Down Expand Up @@ -356,7 +353,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri

pub fn mk_attr_from_item(
item: AttrItem,
tokens: Option<LazyTokenStream>,
tokens: Option<LazyAttrTokenStream>,
style: AttrStyle,
span: Span,
) -> Attribute {
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_ast/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#![feature(if_let_guard)]
#![cfg_attr(bootstrap, feature(label_break_value))]
#![feature(let_chains)]
#![feature(let_else)]
#![feature(min_specialization)]
#![feature(negative_impls)]
#![feature(slice_internals)]
Expand Down
30 changes: 15 additions & 15 deletions compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -642,17 +642,17 @@ pub fn noop_flat_map_param<T: MutVisitor>(mut param: Param, vis: &mut T) -> Smal
}

// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) {
pub fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
match tt {
AttrAnnotatedTokenTree::Token(token) => {
AttrTokenTree::Token(token, _) => {
visit_token(token, vis);
}
AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
AttrTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
vis.visit_span(open);
vis.visit_span(close);
visit_attr_annotated_tts(tts, vis);
visit_attr_tts(tts, vis);
}
AttrAnnotatedTokenTree::Attributes(data) => {
AttrTokenTree::Attributes(data) => {
for attr in &mut *data.attrs {
match &mut attr.kind {
AttrKind::Normal(normal) => {
Expand Down Expand Up @@ -690,27 +690,27 @@ pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T)
}
}

pub fn visit_attr_annotated_tts<T: MutVisitor>(
AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
vis: &mut T,
) {
pub fn visit_attr_tts<T: MutVisitor>(AttrTokenStream(tts): &mut AttrTokenStream, vis: &mut T) {
if T::VISIT_TOKENS && !tts.is_empty() {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| visit_attr_annotated_tt(tree, vis));
visit_vec(tts, |tree| visit_attr_tt(tree, vis));
}
}

pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) {
pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(
lazy_tts: Option<&mut LazyAttrTokenStream>,
vis: &mut T,
) {
if T::VISIT_TOKENS {
if let Some(lazy_tts) = lazy_tts {
let mut tts = lazy_tts.create_token_stream();
visit_attr_annotated_tts(&mut tts, vis);
*lazy_tts = LazyTokenStream::new(tts);
let mut tts = lazy_tts.to_attr_token_stream();
visit_attr_tts(&mut tts, vis);
*lazy_tts = LazyAttrTokenStream::new(tts);
}
}
}

pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyAttrTokenStream>, vis: &mut T) {
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
}

Expand Down
Loading

0 comments on commit db75d7e

Please sign in to comment.