Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Attribute cleanups #127308

Merged
merged 9 commits into from
Jul 7, 2024
33 changes: 0 additions & 33 deletions compiler/rustc_ast/src/ast_traits.rs
Original file line number Diff line number Diff line change
@@ -10,8 +10,6 @@ use crate::{AssocItem, Expr, ForeignItem, Item, NodeId};
use crate::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
use crate::{AttrVec, Attribute, Stmt, StmtKind};

use rustc_span::Span;

use std::fmt;
use std::marker::PhantomData;

@@ -91,37 +89,6 @@ impl<T: AstDeref<Target: HasNodeId>> HasNodeId for T {
}
}

/// A trait for AST nodes having a span.
pub trait HasSpan {
fn span(&self) -> Span;
}

macro_rules! impl_has_span {
($($T:ty),+ $(,)?) => {
$(
impl HasSpan for $T {
fn span(&self) -> Span {
self.span
}
}
)+
};
}

impl_has_span!(AssocItem, Block, Expr, ForeignItem, Item, Pat, Path, Stmt, Ty, Visibility);

impl<T: AstDeref<Target: HasSpan>> HasSpan for T {
fn span(&self) -> Span {
self.ast_deref().span()
}
}

impl HasSpan for AttrItem {
fn span(&self) -> Span {
self.span()
}
}

/// A trait for AST nodes having (or not having) collected tokens.
pub trait HasTokens {
fn tokens(&self) -> Option<&LazyAttrTokenStream>;
3 changes: 2 additions & 1 deletion compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
@@ -202,7 +202,8 @@ impl Attribute {
}
}

pub fn tokens(&self) -> TokenStream {
// Named `get_tokens` to distinguish it from the `<Attribute as HasTokens>::tokens` method.
pub fn get_tokens(&self) -> TokenStream {
match &self.kind {
AttrKind::Normal(normal) => TokenStream::new(
normal
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/lib.rs
Original file line number Diff line number Diff line change
@@ -44,7 +44,7 @@ pub mod tokenstream;
pub mod visit;

pub use self::ast::*;
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasSpan, HasTokens};
pub use self::ast_traits::{AstDeref, AstNodeWrapper, HasAttrs, HasNodeId, HasTokens};

use rustc_data_structures::stable_hasher::{HashStable, StableHasher};

2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
@@ -704,7 +704,7 @@ fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
visit_attr_tts(tts, vis);
visit_delim_span(dspan, vis);
}
AttrTokenTree::Attributes(AttributesData { attrs, tokens }) => {
AttrTokenTree::AttrsTarget(AttrsTarget { attrs, tokens }) => {
visit_attrs(attrs, vis);
visit_lazy_tts_opt_mut(Some(tokens), vis);
}
33 changes: 17 additions & 16 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
@@ -14,7 +14,7 @@
//! ownership of the original.

use crate::ast::{AttrStyle, StmtKind};
use crate::ast_traits::{HasAttrs, HasSpan, HasTokens};
use crate::ast_traits::{HasAttrs, HasTokens};
use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use crate::AttrVec;

@@ -170,8 +170,8 @@ pub enum AttrTokenTree {
Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
/// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target.
/// See `AttributesData` for more information
Attributes(AttributesData),
/// See `AttrsTarget` for more information
AttrsTarget(AttrsTarget),
}

impl AttrTokenStream {
@@ -180,7 +180,7 @@ impl AttrTokenStream {
}

/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
/// During conversion, `AttrTokenTree::AttrsTarget` get 'flattened'
/// back to a `TokenStream` of the form `outer_attr attr_target`.
/// If there are inner attributes, they are inserted into the proper
/// place in the attribute target tokens.
@@ -199,13 +199,13 @@ impl AttrTokenStream {
TokenStream::new(stream.to_token_trees()),
))
}
AttrTokenTree::Attributes(data) => {
let idx = data
AttrTokenTree::AttrsTarget(target) => {
let idx = target
.attrs
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
let (outer_attrs, inner_attrs) = target.attrs.split_at(idx);

let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
let mut target_tokens = target.tokens.to_attr_token_stream().to_token_trees();
if !inner_attrs.is_empty() {
let mut found = false;
// Check the last two trees (to account for a trailing semi)
@@ -227,7 +227,7 @@ impl AttrTokenStream {

let mut stream = TokenStream::default();
for inner_attr in inner_attrs {
stream.push_stream(inner_attr.tokens());
stream.push_stream(inner_attr.get_tokens());
}
stream.push_stream(delim_tokens.clone());
*tree = TokenTree::Delimited(*span, *spacing, *delim, stream);
@@ -242,7 +242,7 @@ impl AttrTokenStream {
);
}
for attr in outer_attrs {
res.extend(attr.tokens().0.iter().cloned());
res.extend(attr.get_tokens().0.iter().cloned());
}
res.extend(target_tokens);
}
@@ -262,7 +262,7 @@ impl AttrTokenStream {
/// have an `attrs` field containing the `#[cfg(FALSE)]` attr,
/// and a `tokens` field storing the (unparsed) tokens `struct Foo {}`
#[derive(Clone, Debug, Encodable, Decodable)]
pub struct AttributesData {
pub struct AttrsTarget {
/// Attributes, both outer and inner.
/// These are stored in the original order that they were parsed in.
pub attrs: AttrVec,
@@ -436,17 +436,17 @@ impl TokenStream {
TokenStream::new(vec![TokenTree::token_alone(kind, span)])
}

pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
pub fn from_ast(node: &(impl HasAttrs + HasTokens + fmt::Debug)) -> TokenStream {
let Some(tokens) = node.tokens() else {
panic!("missing tokens for node at {:?}: {:?}", node.span(), node);
panic!("missing tokens for node: {:?}", node);
};
let attrs = node.attrs();
let attr_stream = if attrs.is_empty() {
tokens.to_attr_token_stream()
} else {
let attr_data =
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
let target =
AttrsTarget { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
AttrTokenStream::new(vec![AttrTokenTree::AttrsTarget(target)])
};
TokenStream::new(attr_stream.to_token_trees())
}
@@ -765,6 +765,7 @@ mod size_asserts {
static_assert_size!(AttrTokenStream, 8);
static_assert_size!(AttrTokenTree, 32);
static_assert_size!(LazyAttrTokenStream, 8);
static_assert_size!(Option<LazyAttrTokenStream>, 8); // must be small, used in many AST nodes
static_assert_size!(TokenStream, 8);
static_assert_size!(TokenTree, 32);
// tidy-alphabetical-end
2 changes: 1 addition & 1 deletion compiler/rustc_builtin_macros/src/cfg_eval.rs
Original file line number Diff line number Diff line change
@@ -193,7 +193,7 @@ impl CfgEval<'_> {

// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
// to the captured `AttrTokenStream` (specifically, we capture
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
// `AttrTokenTree::AttrsTarget` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
parser.capture_cfg = true;
match parse_annotatable_with(&mut parser) {
29 changes: 13 additions & 16 deletions compiler/rustc_expand/src/config.rs
Original file line number Diff line number Diff line change
@@ -172,7 +172,7 @@ impl<'a> StripUnconfigured<'a> {
fn configure_tokens(&self, stream: &AttrTokenStream) -> AttrTokenStream {
fn can_skip(stream: &AttrTokenStream) -> bool {
stream.0.iter().all(|tree| match tree {
AttrTokenTree::Attributes(_) => false,
AttrTokenTree::AttrsTarget(_) => false,
AttrTokenTree::Token(..) => true,
AttrTokenTree::Delimited(.., inner) => can_skip(inner),
})
@@ -185,22 +185,22 @@ impl<'a> StripUnconfigured<'a> {
let trees: Vec<_> = stream
.0
.iter()
.flat_map(|tree| match tree.clone() {
AttrTokenTree::Attributes(mut data) => {
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
.filter_map(|tree| match tree.clone() {
AttrTokenTree::AttrsTarget(mut target) => {
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));

if self.in_cfg(&data.attrs) {
data.tokens = LazyAttrTokenStream::new(
self.configure_tokens(&data.tokens.to_attr_token_stream()),
if self.in_cfg(&target.attrs) {
target.tokens = LazyAttrTokenStream::new(
self.configure_tokens(&target.tokens.to_attr_token_stream()),
);
Some(AttrTokenTree::Attributes(data)).into_iter()
Some(AttrTokenTree::AttrsTarget(target))
} else {
None.into_iter()
None
}
}
AttrTokenTree::Delimited(sp, spacing, delim, mut inner) => {
inner = self.configure_tokens(&inner);
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner)).into_iter()
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
}
AttrTokenTree::Token(
Token {
@@ -220,9 +220,7 @@ impl<'a> StripUnconfigured<'a> {
) => {
panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tree);
}
AttrTokenTree::Token(token, spacing) => {
Some(AttrTokenTree::Token(token, spacing)).into_iter()
}
AttrTokenTree::Token(token, spacing) => Some(AttrTokenTree::Token(token, spacing)),
})
.collect();
AttrTokenStream::new(trees)
@@ -294,7 +292,7 @@ impl<'a> StripUnconfigured<'a> {
attr: &Attribute,
(item, item_span): (ast::AttrItem, Span),
) -> Attribute {
let orig_tokens = attr.tokens();
let orig_tokens = attr.get_tokens();

// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
@@ -310,12 +308,11 @@ impl<'a> StripUnconfigured<'a> {
else {
panic!("Bad tokens for attribute {attr:?}");
};
let pound_span = pound_token.span;

// We don't really have a good span to use for the synthesized `[]`
// in `#[attr]`, so just use the span of the `#` token.
let bracket_group = AttrTokenTree::Delimited(
DelimSpan::from_single(pound_span),
DelimSpan::from_single(pound_token.span),
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Bracket,
item.tokens
6 changes: 3 additions & 3 deletions compiler/rustc_parse/src/parser/attr.rs
Original file line number Diff line number Diff line change
@@ -282,7 +282,7 @@ impl<'a> Parser<'a> {
pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
let mut attrs = ast::AttrVec::new();
loop {
let start_pos: u32 = self.num_bump_calls.try_into().unwrap();
let start_pos = self.num_bump_calls;
// Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
@@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
None
};
if let Some(attr) = attr {
let end_pos: u32 = self.num_bump_calls.try_into().unwrap();
let end_pos = self.num_bump_calls;
// If we are currently capturing tokens, mark the location of this inner attribute.
// If capturing ends up creating a `LazyAttrTokenStream`, we will include
// this replace range with it, removing the inner attribute from the final
@@ -313,7 +313,7 @@ impl<'a> Parser<'a> {
// corresponding macro).
let range = start_pos..end_pos;
if let Capturing::Yes = self.capture_state.capturing {
self.capture_state.inner_attr_ranges.insert(attr.id, (range, vec![]));
self.capture_state.inner_attr_ranges.insert(attr.id, (range, None));
}
attrs.push(attr);
} else {
53 changes: 23 additions & 30 deletions compiler/rustc_parse/src/parser/attr_wrapper.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing};
use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
use rustc_ast::{self as ast};
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
use rustc_errors::PResult;
use rustc_session::parse::ParseSess;
use rustc_span::{sym, Span, DUMMY_SP};

use std::ops::Range;
use std::{iter, mem};

/// A wrapper type to ensure that the parser handles outer attributes correctly.
@@ -88,7 +87,6 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
//
// This also makes `Parser` very cheap to clone, since
// there is no intermediate collection buffer to clone.
#[derive(Clone)]
struct LazyAttrTokenStreamImpl {
start_token: (Token, Spacing),
cursor_snapshot: TokenCursor,
@@ -146,24 +144,23 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
// start position, we ensure that any replace range which encloses
// another replace range will capture the *replaced* tokens for the inner
// range, not the original tokens.
for (range, new_tokens) in replace_ranges.into_iter().rev() {
for (range, target) in replace_ranges.into_iter().rev() {
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
// Replace ranges are only allowed to decrease the number of tokens.
assert!(
range.len() >= new_tokens.len(),
"Range {range:?} has greater len than {new_tokens:?}"
);

// Replace any removed tokens with `FlatToken::Empty`.
// This keeps the total length of `tokens` constant throughout the
// replacement process, allowing us to use all of the `ReplaceRanges` entries
// without adjusting indices.
let filler = iter::repeat((FlatToken::Empty, Spacing::Alone))
.take(range.len() - new_tokens.len());

// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
// total length of `tokens` constant throughout the replacement process, allowing
// us to use all of the `ReplaceRanges` entries without adjusting indices.
let target_len = target.is_some() as usize;
tokens.splice(
(range.start as usize)..(range.end as usize),
new_tokens.into_iter().chain(filler),
target
.into_iter()
.map(|target| (FlatToken::AttrsTarget(target), Spacing::Alone))
.chain(
iter::repeat((FlatToken::Empty, Spacing::Alone))
.take(range.len() - target_len),
),
);
}
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
@@ -316,7 +313,7 @@ impl<'a> Parser<'a> {
.iter()
.cloned()
.chain(inner_attr_replace_ranges.iter().cloned())
.map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
.map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data))
.collect()
};

@@ -346,18 +343,14 @@ impl<'a> Parser<'a> {
&& matches!(self.capture_state.capturing, Capturing::Yes)
&& has_cfg_or_cfg_attr(final_attrs)
{
let attr_data = AttributesData { attrs: final_attrs.iter().cloned().collect(), tokens };
assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");

// Replace the entire AST node that we just parsed, including attributes,
// with a `FlatToken::AttrTarget`. If this AST node is inside an item
// that has `#[derive]`, then this will allow us to cfg-expand this
// AST node.
// Replace the entire AST node that we just parsed, including attributes, with
// `target`. If this AST node is inside an item that has `#[derive]`, then this will
// allow us to cfg-expand this AST node.
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];

assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
self.capture_state.replace_ranges.push((range, new_tokens));
let target = AttrsTarget { attrs: final_attrs.iter().cloned().collect(), tokens };
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
}

@@ -419,11 +412,11 @@ fn make_attr_token_stream(
.expect("Bottom token frame is missing!")
.inner
.push(AttrTokenTree::Token(token, spacing)),
FlatToken::AttrTarget(data) => stack
FlatToken::AttrsTarget(target) => stack
.last_mut()
.expect("Bottom token frame is missing!")
.inner
.push(AttrTokenTree::Attributes(data)),
.push(AttrTokenTree::AttrsTarget(target)),
FlatToken::Empty => {}
}
token_and_spacing = iter.next();
17 changes: 8 additions & 9 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
@@ -20,7 +20,7 @@ use path::PathStyle;

use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{AttrsTarget, DelimSpacing, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
use rustc_ast::util::case::Case;
use rustc_ast::{
@@ -203,13 +203,13 @@ struct ClosureSpans {
}

/// Indicates a range of tokens that should be replaced by
/// the tokens in the provided vector. This is used in two
/// the tokens in the provided `AttrsTarget`. This is used in two
/// places during token collection:
///
/// 1. During the parsing of an AST node that may have a `#[derive]`
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
/// with `FlatToken::AttrsTarget`, allowing us to perform eager cfg-expansion
/// on an `AttrTokenStream`.
///
/// 2. When we parse an inner attribute while collecting tokens. We
@@ -219,7 +219,7 @@ struct ClosureSpans {
/// the first macro inner attribute to invoke a proc-macro).
/// When create a `TokenStream`, the inner attributes get inserted
/// into the proper place in the token stream.
type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
type ReplaceRange = (Range<u32>, Option<AttrsTarget>);

/// Controls how we capture tokens. Capturing can be expensive,
/// so we try to avoid performing capturing in cases where
@@ -1608,11 +1608,10 @@ enum FlatToken {
/// A token - this holds both delimiter (e.g. '{' and '}')
/// and non-delimiter tokens
Token(Token),
/// Holds the `AttributesData` for an AST node. The
/// `AttributesData` is inserted directly into the
/// constructed `AttrTokenStream` as
/// an `AttrTokenTree::Attributes`.
AttrTarget(AttributesData),
/// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
/// directly into the constructed `AttrTokenStream` as an
/// `AttrTokenTree::AttrsTarget`.
AttrsTarget(AttrsTarget),
/// A special 'empty' token that is ignored during the conversion
/// to an `AttrTokenStream`. This is used to simplify the
/// handling of replace ranges.
2 changes: 1 addition & 1 deletion compiler/rustc_resolve/src/late.rs
Original file line number Diff line number Diff line change
@@ -1744,7 +1744,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
) {
self.r.dcx().emit_err(errors::LendingIteratorReportError {
lifetime: lifetime.ident.span,
ty: ty.span(),
ty: ty.span,
});
} else {
self.r.dcx().emit_err(errors::AnonymousLivetimeNonGatReportError {
Loading