Skip to content

Commit

Permalink
Auto merge of rust-lang#95835 - Dylan-DPC:rollup-l5mf2ad, r=Dylan-DPC
Browse files Browse the repository at this point in the history
Rollup of 8 pull requests

Successful merges:

 - rust-lang#90066 (Add new ThinBox type for 1 stack pointer wide heap allocated trait objects)
 - rust-lang#95374 (assert_uninit_valid: ensure we detect at least arrays of uninhabited types)
 - rust-lang#95599 (Strict provenance lints)
 - rust-lang#95751 (Don't report numeric inference ambiguity when we have previous errors)
 - rust-lang#95764 ([macro_metavar_expr] Add tests to ensure the feature requirement)
 - rust-lang#95787 (reword panic vs result section to remove recoverable vs unrecoverable framing)
 - rust-lang#95797 (Remove explicit delimiter token trees from `Delimited`.)
 - rust-lang#95804 (rustdoc: Fix empty doc comment with backline ICE)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Apr 9, 2022
2 parents e980c62 + 8f4680e commit 4bb685e
Show file tree
Hide file tree
Showing 40 changed files with 1,029 additions and 151 deletions.
5 changes: 4 additions & 1 deletion compiler/rustc_ast/src/util/comments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,10 @@ pub fn beautify_doc_string(data: Symbol, kind: CommentKind) -> Symbol {
// when we try to compute the "horizontal trim".
let lines = if kind == CommentKind::Block {
// Whatever happens, we skip the first line.
let mut i = if lines[0].trim_start().starts_with('*') { 0 } else { 1 };
let mut i = lines
.get(0)
.map(|l| if l.trim_start().starts_with('*') { 0 } else { 1 })
.unwrap_or(0);
let mut j = lines.len();

while i < j && lines[i].trim().is_empty() {
Expand Down
44 changes: 10 additions & 34 deletions compiler/rustc_expand/src/mbe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,48 +17,24 @@ use rustc_data_structures::sync::Lrc;
use rustc_span::symbol::Ident;
use rustc_span::Span;

/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiter itself
/// might be `NoDelim`.
/// Contains the sub-token-trees of a "delimited" token tree such as `(a b c)`. The delimiters
/// might be `NoDelim`, but they are not represented explicitly.
#[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
struct Delimited {
delim: token::DelimToken,
/// Note: This contains the opening and closing delimiters tokens (e.g. `(` and `)`). Note that
/// these could be `NoDelim`. These token kinds must match `delim`, and the methods below
/// debug_assert this.
all_tts: Vec<TokenTree>,
/// FIXME: #67062 has details about why this is sub-optimal.
tts: Vec<TokenTree>,
}

impl Delimited {
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter. Panics if
/// the delimiter is `NoDelim`.
fn open_tt(&self) -> &TokenTree {
let tt = self.all_tts.first().unwrap();
debug_assert!(matches!(
tt,
&TokenTree::Token(token::Token { kind: token::OpenDelim(d), .. }) if d == self.delim
));
tt
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
fn open_tt(&self, span: DelimSpan) -> TokenTree {
TokenTree::token(token::OpenDelim(self.delim), span.open)
}

/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. Panics if
/// the delimiter is `NoDelim`.
fn close_tt(&self) -> &TokenTree {
let tt = self.all_tts.last().unwrap();
debug_assert!(matches!(
tt,
&TokenTree::Token(token::Token { kind: token::CloseDelim(d), .. }) if d == self.delim
));
tt
}

/// Returns the tts excluding the outer delimiters.
///
/// FIXME: #67062 has details about why this is sub-optimal.
fn inner_tts(&self) -> &[TokenTree] {
// These functions are called for the assertions within them.
let _open_tt = self.open_tt();
let _close_tt = self.close_tt();
&self.all_tts[1..self.all_tts.len() - 1]
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
fn close_tt(&self, span: DelimSpan) -> TokenTree {
TokenTree::token(token::CloseDelim(self.delim), span.close)
}
}

Expand Down
16 changes: 5 additions & 11 deletions compiler/rustc_expand/src/mbe/macro_check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ fn check_binders(
// `MetaVarExpr` can not appear in the LHS of a macro arm
TokenTree::MetaVarExpr(..) => {}
TokenTree::Delimited(_, ref del) => {
for tt in del.inner_tts() {
for tt in &del.tts {
check_binders(sess, node_id, tt, macros, binders, ops, valid);
}
}
Expand Down Expand Up @@ -345,7 +345,7 @@ fn check_occurrences(
check_ops_is_prefix(sess, node_id, macros, binders, ops, dl.entire(), name);
}
TokenTree::Delimited(_, ref del) => {
check_nested_occurrences(sess, node_id, del.inner_tts(), macros, binders, ops, valid);
check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
}
TokenTree::Sequence(_, ref seq) => {
let ops = ops.push(seq.kleene);
Expand Down Expand Up @@ -432,20 +432,14 @@ fn check_nested_occurrences(
{
let macro_rules = state == NestedMacroState::MacroRulesNotName;
state = NestedMacroState::Empty;
let rest = check_nested_macro(
sess,
node_id,
macro_rules,
del.inner_tts(),
&nested_macros,
valid,
);
let rest =
check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, valid);
// If we did not check the whole macro definition, then check the rest as if outside
// the macro definition.
check_nested_occurrences(
sess,
node_id,
&del.inner_tts()[rest..],
&del.tts[rest..],
macros,
binders,
ops,
Expand Down
8 changes: 5 additions & 3 deletions compiler/rustc_expand/src/mbe/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,11 @@ pub(super) fn compute_locs(sess: &ParseSess, matcher: &[TokenTree]) -> Vec<Match
TokenTree::Token(token) => {
locs.push(MatcherLoc::Token { token: token.clone() });
}
TokenTree::Delimited(_, delimited) => {
TokenTree::Delimited(span, delimited) => {
locs.push(MatcherLoc::Delimited);
inner(sess, &delimited.all_tts, locs, next_metavar, seq_depth);
inner(sess, &[delimited.open_tt(*span)], locs, next_metavar, seq_depth);
inner(sess, &delimited.tts, locs, next_metavar, seq_depth);
inner(sess, &[delimited.close_tt(*span)], locs, next_metavar, seq_depth);
}
TokenTree::Sequence(_, seq) => {
// We can't determine `idx_first_after` and construct the final
Expand Down Expand Up @@ -293,7 +295,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
.map(|tt| match tt {
TokenTree::MetaVarDecl(..) => 1,
TokenTree::Sequence(_, seq) => seq.num_captures,
TokenTree::Delimited(_, delim) => count_metavar_decls(delim.inner_tts()),
TokenTree::Delimited(_, delim) => count_metavar_decls(&delim.tts),
TokenTree::Token(..) => 0,
TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
})
Expand Down
44 changes: 20 additions & 24 deletions compiler/rustc_expand/src/mbe/macro_rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,9 +263,7 @@ fn generic_extension<'cx, 'tt>(

// Ignore the delimiters on the RHS.
let rhs = match &rhses[i] {
mbe::TokenTree::Delimited(_, delimited) => {
delimited.inner_tts().to_vec().clone()
}
mbe::TokenTree::Delimited(_, delimited) => delimited.tts.to_vec(),
_ => cx.span_bug(sp, "malformed macro rhs"),
};
let arm_span = rhses[i].span();
Expand Down Expand Up @@ -470,17 +468,16 @@ pub fn compile_declarative_macro(
.iter()
.map(|m| {
if let MatchedTokenTree(ref tt) = *m {
let mut tts = vec![];
mbe::quoted::parse(
let tt = mbe::quoted::parse(
tt.clone().into(),
true,
&sess.parse_sess,
def.id,
features,
edition,
&mut tts,
);
let tt = tts.pop().unwrap();
)
.pop()
.unwrap();
valid &= check_lhs_nt_follows(&sess.parse_sess, features, &def, &tt);
return tt;
}
Expand All @@ -495,17 +492,16 @@ pub fn compile_declarative_macro(
.iter()
.map(|m| {
if let MatchedTokenTree(ref tt) = *m {
let mut tts = vec![];
mbe::quoted::parse(
return mbe::quoted::parse(
tt.clone().into(),
false,
&sess.parse_sess,
def.id,
features,
edition,
&mut tts,
);
return tts.pop().unwrap();
)
.pop()
.unwrap();
}
sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
})
Expand Down Expand Up @@ -544,7 +540,7 @@ pub fn compile_declarative_macro(
// Ignore the delimiters around the matcher.
match lhs {
mbe::TokenTree::Delimited(_, delimited) => {
mbe::macro_parser::compute_locs(&sess.parse_sess, delimited.inner_tts())
mbe::macro_parser::compute_locs(&sess.parse_sess, &delimited.tts)
}
_ => sess.parse_sess.span_diagnostic.span_bug(def.span, "malformed macro lhs"),
}
Expand Down Expand Up @@ -576,7 +572,7 @@ fn check_lhs_nt_follows(
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
if let mbe::TokenTree::Delimited(_, delimited) = lhs {
check_matcher(sess, features, def, delimited.inner_tts())
check_matcher(sess, features, def, &delimited.tts)
} else {
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
sess.span_diagnostic.span_err(lhs.span(), msg);
Expand All @@ -597,7 +593,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
| TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarExpr(..) => (),
TokenTree::Delimited(_, ref del) => {
if !check_lhs_no_empty_seq(sess, del.inner_tts()) {
if !check_lhs_no_empty_seq(sess, &del.tts) {
return false;
}
}
Expand Down Expand Up @@ -692,9 +688,9 @@ impl FirstSets {
| TokenTree::MetaVarExpr(..) => {
first.replace_with(tt.clone());
}
TokenTree::Delimited(_span, ref delimited) => {
build_recur(sets, delimited.inner_tts());
first.replace_with(delimited.open_tt().clone());
TokenTree::Delimited(span, ref delimited) => {
build_recur(sets, &delimited.tts);
first.replace_with(delimited.open_tt(span));
}
TokenTree::Sequence(sp, ref seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts);
Expand Down Expand Up @@ -758,8 +754,8 @@ impl FirstSets {
first.add_one(tt.clone());
return first;
}
TokenTree::Delimited(_span, ref delimited) => {
first.add_one(delimited.open_tt().clone());
TokenTree::Delimited(span, ref delimited) => {
first.add_one(delimited.open_tt(span));
return first;
}
TokenTree::Sequence(sp, ref seq_rep) => {
Expand Down Expand Up @@ -945,9 +941,9 @@ fn check_matcher_core(
suffix_first = build_suffix_first();
}
}
TokenTree::Delimited(_span, ref d) => {
let my_suffix = TokenSet::singleton(d.close_tt().clone());
check_matcher_core(sess, features, def, first_sets, d.inner_tts(), &my_suffix);
TokenTree::Delimited(span, ref d) => {
let my_suffix = TokenSet::singleton(d.close_tt(span));
check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens
last.replace_with_irrelevant();

Expand Down
26 changes: 13 additions & 13 deletions compiler/rustc_expand/src/mbe/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,10 @@ pub(super) fn parse(
node_id: NodeId,
features: &Features,
edition: Edition,
result: &mut Vec<TokenTree>,
) {
) -> Vec<TokenTree> {
// Will contain the final collection of `self::TokenTree`
let mut result = Vec::new();

// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
// additional trees if need be.
let mut trees = input.trees();
Expand Down Expand Up @@ -113,6 +115,7 @@ pub(super) fn parse(
_ => result.push(tree),
}
}
result
}

/// Asks for the `macro_metavar_expr` feature if it is not already declared
Expand Down Expand Up @@ -205,8 +208,7 @@ fn parse_tree(
// If we didn't find a metavar expression above, then we must have a
// repetition sequence in the macro (e.g. `$(pat)*`). Parse the
// contents of the sequence itself
let mut sequence = vec![];
parse(tts, parsing_patterns, sess, node_id, features, edition, &mut sequence);
let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
// Get the Kleene operator and optional separator
let (separator, kleene) =
parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess);
Expand Down Expand Up @@ -269,15 +271,13 @@ fn parse_tree(

// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
// descend into the delimited set and further parse it.
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let mut all_tts = vec![];
// Add the explicit open and close delimiters, which
// `tokenstream::TokenTree::Delimited` lacks.
all_tts.push(TokenTree::token(token::OpenDelim(delim), span.open));
parse(tts, parsing_patterns, sess, node_id, features, edition, &mut all_tts);
all_tts.push(TokenTree::token(token::CloseDelim(delim), span.close));
TokenTree::Delimited(span, Lrc::new(Delimited { delim, all_tts }))
}
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
Lrc::new(Delimited {
delim,
tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
}),
),
}
}

Expand Down
16 changes: 5 additions & 11 deletions compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use rustc_errors::{pluralize, PResult};
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
use rustc_span::hygiene::{LocalExpnId, Transparency};
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
use rustc_span::{Span, DUMMY_SP};
use rustc_span::Span;

use smallvec::{smallvec, SmallVec};
use std::mem;
Expand All @@ -34,14 +34,8 @@ enum Frame {

impl Frame {
/// Construct a new frame around the delimited set of tokens.
fn new(mut tts: Vec<mbe::TokenTree>) -> Frame {
// Need to add empty delimiters.
let open_tt = mbe::TokenTree::token(token::OpenDelim(token::NoDelim), DUMMY_SP);
let close_tt = mbe::TokenTree::token(token::CloseDelim(token::NoDelim), DUMMY_SP);
tts.insert(0, open_tt);
tts.push(close_tt);

let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, all_tts: tts });
fn new(tts: Vec<mbe::TokenTree>) -> Frame {
let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
}
}
Expand All @@ -52,7 +46,7 @@ impl Iterator for Frame {
fn next(&mut self) -> Option<mbe::TokenTree> {
match *self {
Frame::Delimited { ref forest, ref mut idx, .. } => {
let res = forest.inner_tts().get(*idx).cloned();
let res = forest.tts.get(*idx).cloned();
*idx += 1;
res
}
Expand Down Expand Up @@ -388,7 +382,7 @@ fn lockstep_iter_size(
use mbe::TokenTree;
match *tree {
TokenTree::Delimited(_, ref delimited) => {
delimited.inner_tts().iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
size.with(lockstep_iter_size(tt, interpolations, repeats))
})
}
Expand Down
2 changes: 2 additions & 0 deletions compiler/rustc_feature/src/active.rs
Original file line number Diff line number Diff line change
Expand Up @@ -505,6 +505,8 @@ declare_features! (
(active, static_nobundle, "1.16.0", Some(37403), None),
/// Allows attributes on expressions and non-item statements.
(active, stmt_expr_attributes, "1.6.0", Some(15701), None),
/// Allows lints part of the strict provenance effort.
(active, strict_provenance, "1.61.0", Some(95228), None),
/// Allows the use of `#[target_feature]` on safe functions.
(active, target_feature_11, "1.45.0", Some(69098), None),
/// Allows using `#[thread_local]` on `static` items.
Expand Down
Loading

0 comments on commit 4bb685e

Please sign in to comment.