Auto merge of #127549 - jhpratt:rollup-o1mbmhr, r=jhpratt

Rollup of 8 pull requests

Successful merges:

 - #124211 (Bump `elided_lifetimes_in_associated_constant` to deny)
 - #125627 (migration lint for `expr2024` for the edition 2024)
 - #127091 (impl FusedIterator and a size hint for the error sources iter)
 - #127461 (Fixup failing fuchsia tests)
 - #127484 (`#[doc(alias)]`'s doc: say that ASCII spaces are allowed)
 - #127508 (small search graph refactor)
 - #127521 (Remove spastorino from SMIR)
 - #127532 (documentation: update cmake version)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-07-10 06:35:04 +00:00
commit 649feb9c1a
22 changed files with 324 additions and 90 deletions

View File

@ -48,7 +48,7 @@ If building LLVM from source, you'll need additional tools:
[LLVM's documentation](https://llvm.org/docs/GettingStarted.html#host-c-toolchain-both-compiler-and-standard-library) [LLVM's documentation](https://llvm.org/docs/GettingStarted.html#host-c-toolchain-both-compiler-and-standard-library)
* `ninja`, or GNU `make` 3.81 or later (Ninja is recommended, especially on * `ninja`, or GNU `make` 3.81 or later (Ninja is recommended, especially on
Windows) Windows)
* `cmake` 3.13.4 or later * `cmake` version listed on [LLVM's documentation](https://llvm.org/docs/GettingStarted.html#software)
* `libstdc++-static` may be required on some Linux distributions such as Fedora * `libstdc++-static` may be required on some Linux distributions such as Fedora
and Ubuntu and Ubuntu

View File

@ -439,6 +439,9 @@ lint_lintpass_by_hand = implementing `LintPass` by hand
lint_macro_expanded_macro_exports_accessed_by_absolute_paths = macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths lint_macro_expanded_macro_exports_accessed_by_absolute_paths = macro-expanded `macro_export` macros from the current crate cannot be referred to by absolute paths
.note = the macro is defined here .note = the macro is defined here
lint_macro_expr_fragment_specifier_2024_migration =
the `expr` fragment specifier will accept more expressions in the 2024 edition
.suggestion = to keep the existing behavior, use the `expr_2021` fragment specifier
lint_macro_is_private = macro `{$ident}` is private lint_macro_is_private = macro `{$ident}` is private
lint_macro_rule_never_used = rule #{$n} of macro `{$name}` is never used lint_macro_rule_never_used = rule #{$n} of macro `{$name}` is never used

View File

@ -60,6 +60,7 @@ mod late;
mod let_underscore; mod let_underscore;
mod levels; mod levels;
mod lints; mod lints;
mod macro_expr_fragment_specifier_2024_migration;
mod map_unit_fn; mod map_unit_fn;
mod methods; mod methods;
mod multiple_supertrait_upcastable; mod multiple_supertrait_upcastable;
@ -97,6 +98,7 @@ use impl_trait_overcaptures::ImplTraitOvercaptures;
use internal::*; use internal::*;
use invalid_from_utf8::*; use invalid_from_utf8::*;
use let_underscore::*; use let_underscore::*;
use macro_expr_fragment_specifier_2024_migration::*;
use map_unit_fn::*; use map_unit_fn::*;
use methods::*; use methods::*;
use multiple_supertrait_upcastable::*; use multiple_supertrait_upcastable::*;
@ -170,6 +172,7 @@ early_lint_methods!(
IncompleteInternalFeatures: IncompleteInternalFeatures, IncompleteInternalFeatures: IncompleteInternalFeatures,
RedundantSemicolons: RedundantSemicolons, RedundantSemicolons: RedundantSemicolons,
UnusedDocComment: UnusedDocComment, UnusedDocComment: UnusedDocComment,
Expr2024: Expr2024,
] ]
] ]
); );

View File

@ -317,6 +317,13 @@ pub struct BuiltinTypeAliasGenericBounds<'a, 'b> {
pub sub: Option<SuggestChangingAssocTypes<'a, 'b>>, pub sub: Option<SuggestChangingAssocTypes<'a, 'b>>,
} }
#[derive(LintDiagnostic)]
#[diag(lint_macro_expr_fragment_specifier_2024_migration)]
pub struct MacroExprFragment2024 {
#[suggestion(code = "expr_2021", applicability = "machine-applicable")]
pub suggestion: Span,
}
pub struct BuiltinTypeAliasGenericBoundsSuggestion { pub struct BuiltinTypeAliasGenericBoundsSuggestion {
pub suggestions: Vec<(Span, String)>, pub suggestions: Vec<(Span, String)>,
} }

View File

@ -0,0 +1,155 @@
//! Migration code for the `expr_fragment_specifier_2024`
//! rule.
use tracing::debug;
use rustc_ast::token::Token;
use rustc_ast::token::TokenKind;
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::tokenstream::TokenTree;
use rustc_session::declare_lint;
use rustc_session::declare_lint_pass;
use rustc_session::lint::FutureIncompatibilityReason;
use rustc_span::edition::Edition;
use rustc_span::sym;
use crate::lints::MacroExprFragment2024;
use crate::EarlyLintPass;
declare_lint! {
/// The `edition_2024_expr_fragment_specifier` lint detects the use of
/// `expr` fragments in macros during migration to the 2024 edition.
///
/// The `expr` fragment specifier will accept more expressions in the 2024
/// edition. To maintain the behavior from the 2021 edition and earlier, use
/// the `expr_2021` fragment specifier.
///
/// ### Example
///
/// ```rust,edition2021,compile_fail
/// #![deny(edition_2024_expr_fragment_specifier)]
/// macro_rules! m {
/// ($e:expr) => {
/// $e
/// }
/// }
///
/// fn main() {
/// m!(1);
/// }
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Rust [editions] allow the language to evolve without breaking backwards
/// compatibility. This lint catches code that uses [macro matcher fragment
/// specifiers] that have changed meaning in the 2024 edition. If you switch
/// to the new edition without updating the code, your macros may behave
/// differently.
///
/// In the 2024 edition, the `expr` fragment specifier `expr` will also
/// match `const { ... }` blocks. This means if a macro had a pattern that
/// matched `$e:expr` and another that matches `const { $e: expr }`, for
/// example, that under the 2024 edition the first pattern would match while
/// in the 2021 and earlier editions the second pattern would match. To keep
/// the old behavior, use the `expr_2021` fragment specifier.
///
/// This lint detects macros whose behavior might change due to the changing
/// meaning of the `expr` fragment specifier. It is "allow" by default
/// because the code is perfectly valid in older editions. The [`cargo fix`]
/// tool with the `--edition` flag will switch this lint to "warn" and
/// automatically apply the suggested fix from the compiler. This provides a
/// completely automated way to update old code for a new edition.
///
/// Using `cargo fix --edition` with this lint will ensure that your code
/// retains the same behavior. This may not be the desired, as macro authors
/// often will want their macros to use the latest grammar for matching
/// expressions. Be sure to carefully review changes introduced by this lint
/// to ensure the macros implement the desired behavior.
///
/// [editions]: https://doc.rust-lang.org/edition-guide/
/// [macro matcher fragment specifiers]: https://doc.rust-lang.org/nightly/edition-guide/rust-2024/macro-fragment-specifiers.html
/// [`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html
pub EDITION_2024_EXPR_FRAGMENT_SPECIFIER,
Allow,
"The `expr` fragment specifier will accept more expressions in the 2024 edition. \
To keep the existing behavior, use the `expr_2021` fragment specifier.",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::EditionSemanticsChange(Edition::Edition2024),
reference: "Migration Guide <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/macro-fragment-specifiers.html>",
};
}
declare_lint_pass!(Expr2024 => [EDITION_2024_EXPR_FRAGMENT_SPECIFIER,]);
impl Expr2024 {
fn check_tokens(&mut self, cx: &crate::EarlyContext<'_>, tokens: &TokenStream) {
let mut prev_colon = false;
let mut prev_identifier = false;
let mut prev_dollar = false;
for tt in tokens.trees() {
debug!(
"check_tokens: {:?} - colon {prev_dollar} - ident {prev_identifier} - colon {prev_colon}",
tt
);
match tt {
TokenTree::Token(token, _) => match token.kind {
TokenKind::Dollar => {
prev_dollar = true;
continue;
}
TokenKind::Ident(..) | TokenKind::NtIdent(..) => {
if prev_colon && prev_identifier && prev_dollar {
self.check_ident_token(cx, token);
} else if prev_dollar {
prev_identifier = true;
continue;
}
}
TokenKind::Colon => {
if prev_dollar && prev_identifier {
prev_colon = true;
continue;
}
}
_ => {}
},
TokenTree::Delimited(.., tts) => self.check_tokens(cx, tts),
}
prev_colon = false;
prev_identifier = false;
prev_dollar = false;
}
}
fn check_ident_token(&mut self, cx: &crate::EarlyContext<'_>, token: &Token) {
debug!("check_ident_token: {:?}", token);
let (sym, edition) = match token.kind {
TokenKind::Ident(sym, _) => (sym, Edition::Edition2024),
_ => return,
};
debug!("token.span.edition(): {:?}", token.span.edition());
if token.span.edition() >= edition {
return;
}
if sym != sym::expr {
return;
}
debug!("emitting lint");
cx.builder.emit_span_lint(
&EDITION_2024_EXPR_FRAGMENT_SPECIFIER,
token.span.into(),
MacroExprFragment2024 { suggestion: token.span },
);
}
}
impl EarlyLintPass for Expr2024 {
fn check_mac_def(&mut self, cx: &crate::EarlyContext<'_>, mc: &rustc_ast::MacroDef) {
self.check_tokens(cx, &mc.body.tokens);
}
}

View File

@ -4620,7 +4620,7 @@ declare_lint! {
/// [against]: https://github.com/rust-lang/rust/issues/38831 /// [against]: https://github.com/rust-lang/rust/issues/38831
/// [future-incompatible]: ../index.md#future-incompatible-lints /// [future-incompatible]: ../index.md#future-incompatible-lints
pub ELIDED_LIFETIMES_IN_ASSOCIATED_CONSTANT, pub ELIDED_LIFETIMES_IN_ASSOCIATED_CONSTANT,
Warn, Deny,
"elided lifetimes cannot be used in associated constants in impls", "elided lifetimes cannot be used in associated constants in impls",
@future_incompatible = FutureIncompatibleInfo { @future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps, reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,

View File

@ -48,12 +48,20 @@ enum GoalEvaluationKind {
Nested, Nested,
} }
// FIXME(trait-system-refactor-initiative#117): we don't detect whether a response
// ended up pulling down any universes.
fn has_no_inference_or_external_constraints<I: Interner>( fn has_no_inference_or_external_constraints<I: Interner>(
response: ty::Canonical<I, Response<I>>, response: ty::Canonical<I, Response<I>>,
) -> bool { ) -> bool {
response.value.external_constraints.region_constraints.is_empty() let ExternalConstraintsData {
&& response.value.var_values.is_identity() ref region_constraints,
&& response.value.external_constraints.opaque_types.is_empty() ref opaque_types,
ref normalization_nested_goals,
} = *response.value.external_constraints;
response.value.var_values.is_identity()
&& region_constraints.is_empty()
&& opaque_types.is_empty()
&& normalization_nested_goals.is_empty()
} }
impl<'a, D, I> EvalCtxt<'a, D> impl<'a, D, I> EvalCtxt<'a, D>

View File

@ -71,7 +71,7 @@ struct StackEntry<I: Interner> {
/// C :- D /// C :- D
/// D :- C /// D :- C
/// ``` /// ```
cycle_participants: HashSet<CanonicalInput<I>>, nested_goals: HashSet<CanonicalInput<I>>,
/// Starts out as `None` and gets set when rerunning this /// Starts out as `None` and gets set when rerunning this
/// goal in case we encounter a cycle. /// goal in case we encounter a cycle.
provisional_result: Option<QueryResult<I>>, provisional_result: Option<QueryResult<I>>,
@ -139,18 +139,11 @@ impl<I: Interner> SearchGraph<I> {
self.mode self.mode
} }
/// Pops the highest goal from the stack, lazily updating the fn update_parent_goal(&mut self, reached_depth: StackDepth, encountered_overflow: bool) {
/// the next goal in the stack. if let Some(parent) = self.stack.raw.last_mut() {
/// parent.reached_depth = parent.reached_depth.max(reached_depth);
/// Directly popping from the stack instead of using this method parent.encountered_overflow |= encountered_overflow;
/// would cause us to not track overflow and recursion depth correctly.
fn pop_stack(&mut self) -> StackEntry<I> {
let elem = self.stack.pop().unwrap();
if let Some(last) = self.stack.raw.last_mut() {
last.reached_depth = last.reached_depth.max(elem.reached_depth);
last.encountered_overflow |= elem.encountered_overflow;
} }
elem
} }
pub(super) fn is_empty(&self) -> bool { pub(super) fn is_empty(&self) -> bool {
@ -222,8 +215,8 @@ impl<I: Interner> SearchGraph<I> {
let current_cycle_root = &mut stack[current_root.as_usize()]; let current_cycle_root = &mut stack[current_root.as_usize()];
for entry in cycle_participants { for entry in cycle_participants {
entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head)); entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head));
current_cycle_root.cycle_participants.insert(entry.input); current_cycle_root.nested_goals.insert(entry.input);
current_cycle_root.cycle_participants.extend(mem::take(&mut entry.cycle_participants)); current_cycle_root.nested_goals.extend(mem::take(&mut entry.nested_goals));
} }
} }
@ -342,7 +335,7 @@ impl<I: Interner> SearchGraph<I> {
non_root_cycle_participant: None, non_root_cycle_participant: None,
encountered_overflow: false, encountered_overflow: false,
has_been_used: HasBeenUsed::empty(), has_been_used: HasBeenUsed::empty(),
cycle_participants: Default::default(), nested_goals: Default::default(),
provisional_result: None, provisional_result: None,
}; };
assert_eq!(self.stack.push(entry), depth); assert_eq!(self.stack.push(entry), depth);
@ -364,7 +357,7 @@ impl<I: Interner> SearchGraph<I> {
} }
debug!("canonical cycle overflow"); debug!("canonical cycle overflow");
let current_entry = self.pop_stack(); let current_entry = self.stack.pop().unwrap();
debug_assert!(current_entry.has_been_used.is_empty()); debug_assert!(current_entry.has_been_used.is_empty());
let result = Self::response_no_constraints(cx, input, Certainty::overflow(false)); let result = Self::response_no_constraints(cx, input, Certainty::overflow(false));
(current_entry, result) (current_entry, result)
@ -372,6 +365,8 @@ impl<I: Interner> SearchGraph<I> {
let proof_tree = inspect.finalize_canonical_goal_evaluation(cx); let proof_tree = inspect.finalize_canonical_goal_evaluation(cx);
self.update_parent_goal(final_entry.reached_depth, final_entry.encountered_overflow);
// We're now done with this goal. In case this goal is involved in a larger cycle // We're now done with this goal. In case this goal is involved in a larger cycle
// do not remove it from the provisional cache and update its provisional result. // do not remove it from the provisional cache and update its provisional result.
// We only add the root of cycles to the global cache. // We only add the root of cycles to the global cache.
@ -394,7 +389,7 @@ impl<I: Interner> SearchGraph<I> {
// //
// We must not use the global cache entry of a root goal if a cycle // We must not use the global cache entry of a root goal if a cycle
// participant is on the stack. This is necessary to prevent unstable // participant is on the stack. This is necessary to prevent unstable
// results. See the comment of `StackEntry::cycle_participants` for // results. See the comment of `StackEntry::nested_goals` for
// more details. // more details.
self.global_cache(cx).insert( self.global_cache(cx).insert(
cx, cx,
@ -402,7 +397,7 @@ impl<I: Interner> SearchGraph<I> {
proof_tree, proof_tree,
reached_depth, reached_depth,
final_entry.encountered_overflow, final_entry.encountered_overflow,
final_entry.cycle_participants, final_entry.nested_goals,
dep_node, dep_node,
result, result,
) )
@ -441,14 +436,9 @@ impl<I: Interner> SearchGraph<I> {
} }
} }
// Update the reached depth of the current goal to make sure // Adjust the parent goal as if we actually computed this goal.
// its state is the same regardless of whether we've used the
// global cache or not.
let reached_depth = self.stack.next_index().plus(additional_depth); let reached_depth = self.stack.next_index().plus(additional_depth);
if let Some(last) = self.stack.raw.last_mut() { self.update_parent_goal(reached_depth, encountered_overflow);
last.reached_depth = last.reached_depth.max(reached_depth);
last.encountered_overflow |= encountered_overflow;
}
Some(result) Some(result)
} }
@ -477,7 +467,7 @@ impl<I: Interner> SearchGraph<I> {
F: FnMut(&mut Self, &mut ProofTreeBuilder<D>) -> QueryResult<I>, F: FnMut(&mut Self, &mut ProofTreeBuilder<D>) -> QueryResult<I>,
{ {
let result = prove_goal(self, inspect); let result = prove_goal(self, inspect);
let stack_entry = self.pop_stack(); let stack_entry = self.stack.pop().unwrap();
debug_assert_eq!(stack_entry.input, input); debug_assert_eq!(stack_entry.input, input);
// If the current goal is not the root of a cycle, we are done. // If the current goal is not the root of a cycle, we are done.
@ -554,27 +544,27 @@ impl<I: Interner> SearchGraph<I> {
non_root_cycle_participant, non_root_cycle_participant,
encountered_overflow: _, encountered_overflow: _,
has_been_used, has_been_used,
ref cycle_participants, ref nested_goals,
provisional_result, provisional_result,
} = *entry; } = *entry;
let cache_entry = provisional_cache.get(&entry.input).unwrap(); let cache_entry = provisional_cache.get(&entry.input).unwrap();
assert_eq!(cache_entry.stack_depth, Some(depth)); assert_eq!(cache_entry.stack_depth, Some(depth));
if let Some(head) = non_root_cycle_participant { if let Some(head) = non_root_cycle_participant {
assert!(head < depth); assert!(head < depth);
assert!(cycle_participants.is_empty()); assert!(nested_goals.is_empty());
assert_ne!(stack[head].has_been_used, HasBeenUsed::empty()); assert_ne!(stack[head].has_been_used, HasBeenUsed::empty());
let mut current_root = head; let mut current_root = head;
while let Some(parent) = stack[current_root].non_root_cycle_participant { while let Some(parent) = stack[current_root].non_root_cycle_participant {
current_root = parent; current_root = parent;
} }
assert!(stack[current_root].cycle_participants.contains(&input)); assert!(stack[current_root].nested_goals.contains(&input));
} }
if !cycle_participants.is_empty() { if !nested_goals.is_empty() {
assert!(provisional_result.is_some() || !has_been_used.is_empty()); assert!(provisional_result.is_some() || !has_been_used.is_empty());
for entry in stack.iter().take(depth.as_usize()) { for entry in stack.iter().take(depth.as_usize()) {
assert_eq!(cycle_participants.get(&entry.input), None); assert_eq!(nested_goals.get(&entry.input), None);
} }
} }
} }

View File

@ -1008,8 +1008,15 @@ impl<'a> Iterator for Source<'a> {
self.current = self.current.and_then(Error::source); self.current = self.current.and_then(Error::source);
current current
} }
fn size_hint(&self) -> (usize, Option<usize>) {
if self.current.is_some() { (1, None) } else { (0, Some(0)) }
}
} }
#[unstable(feature = "error_iter", issue = "58520")]
impl<'a> crate::iter::FusedIterator for Source<'a> {}
#[stable(feature = "error_by_ref", since = "1.51.0")] #[stable(feature = "error_by_ref", since = "1.51.0")]
impl<'a, T: Error + ?Sized> Error for &'a T { impl<'a, T: Error + ?Sized> Error for &'a T {
#[allow(deprecated, deprecated_in_future)] #[allow(deprecated, deprecated_in_future)]

View File

@ -8,8 +8,6 @@ https://doc.rust-lang.org/stable/rustc/platform-support/fuchsia.html#aarch64-unk
""" """
import argparse import argparse
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
import glob import glob
import io import io
import json import json
@ -20,6 +18,8 @@ import shlex
import shutil import shutil
import subprocess import subprocess
import sys import sys
from concurrent.futures import ThreadPoolExecutor
from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import ClassVar, List, Optional from typing import ClassVar, List, Optional
@ -42,12 +42,8 @@ def check_call_with_logging(
for line in pipe: for line in pipe:
handler(line.rstrip()) handler(line.rstrip())
executor_out = executor.submit( executor_out = executor.submit(exhaust_pipe, stdout_handler, process.stdout)
exhaust_pipe, stdout_handler, process.stdout executor_err = executor.submit(exhaust_pipe, stderr_handler, process.stderr)
)
executor_err = executor.submit(
exhaust_pipe, stderr_handler, process.stderr
)
executor_out.result() executor_out.result()
executor_err.result() executor_err.result()
retcode = process.poll() retcode = process.poll()
@ -203,9 +199,7 @@ class TestEnvironment:
raise Exception(f"Unreadable build-id for binary {binary}") raise Exception(f"Unreadable build-id for binary {binary}")
data = json.loads(process.stdout) data = json.loads(process.stdout)
if len(data) != 1: if len(data) != 1:
raise Exception( raise Exception(f"Unreadable output from llvm-readelf for binary {binary}")
f"Unreadable output from llvm-readelf for binary {binary}"
)
notes = data[0]["Notes"] notes = data[0]["Notes"]
for note in notes: for note in notes:
note_section = note["NoteSection"] note_section = note["NoteSection"]
@ -265,19 +259,10 @@ class TestEnvironment:
def setup_logging(self, log_to_file=False): def setup_logging(self, log_to_file=False):
fs = logging.Formatter("%(asctime)s %(levelname)s:%(name)s:%(message)s") fs = logging.Formatter("%(asctime)s %(levelname)s:%(name)s:%(message)s")
if log_to_file: if log_to_file:
logfile_handler = logging.FileHandler( logfile_handler = logging.FileHandler(self.tmp_dir().joinpath("log"))
self.tmp_dir().joinpath("log")
)
logfile_handler.setLevel(logging.DEBUG) logfile_handler.setLevel(logging.DEBUG)
logfile_handler.setFormatter(fs) logfile_handler.setFormatter(fs)
logging.getLogger().addHandler(logfile_handler) logging.getLogger().addHandler(logfile_handler)
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(fs)
if self.verbose:
stream_handler.setLevel(logging.DEBUG)
else:
stream_handler.setLevel(logging.INFO)
logging.getLogger().addHandler(stream_handler)
logging.getLogger().setLevel(logging.DEBUG) logging.getLogger().setLevel(logging.DEBUG)
@property @property
@ -454,9 +439,7 @@ class TestEnvironment:
# Initialize temp directory # Initialize temp directory
os.makedirs(self.tmp_dir(), exist_ok=True) os.makedirs(self.tmp_dir(), exist_ok=True)
if len(os.listdir(self.tmp_dir())) != 0: if len(os.listdir(self.tmp_dir())) != 0:
raise Exception( raise Exception(f"Temp directory is not clean (in {self.tmp_dir()})")
f"Temp directory is not clean (in {self.tmp_dir()})"
)
self.setup_logging(log_to_file=True) self.setup_logging(log_to_file=True)
os.mkdir(self.output_dir) os.mkdir(self.output_dir)
@ -493,9 +476,7 @@ class TestEnvironment:
shutil.rmtree(self.local_pb_path, ignore_errors=True) shutil.rmtree(self.local_pb_path, ignore_errors=True)
# Look up the product bundle transfer manifest. # Look up the product bundle transfer manifest.
self.env_logger.info( self.env_logger.info("Looking up the product bundle transfer manifest...")
"Looking up the product bundle transfer manifest..."
)
product_name = "minimal." + self.triple_to_arch(self.target) product_name = "minimal." + self.triple_to_arch(self.target)
sdk_version = self.read_sdk_version() sdk_version = self.read_sdk_version()
@ -517,9 +498,7 @@ class TestEnvironment:
) )
try: try:
transfer_manifest_url = json.loads(output)[ transfer_manifest_url = json.loads(output)["transfer_manifest_url"]
"transfer_manifest_url"
]
except Exception as e: except Exception as e:
print(e) print(e)
raise Exception("Unable to parse transfer manifest") from e raise Exception("Unable to parse transfer manifest") from e
@ -769,9 +748,7 @@ class TestEnvironment:
# Use /tmp as the test temporary directory # Use /tmp as the test temporary directory
env_vars += '\n "RUST_TEST_TMPDIR=/tmp",' env_vars += '\n "RUST_TEST_TMPDIR=/tmp",'
cml.write( cml.write(self.CML_TEMPLATE.format(env_vars=env_vars, exe_name=exe_name))
self.CML_TEMPLATE.format(env_vars=env_vars, exe_name=exe_name)
)
runner_logger.info("Compiling CML...") runner_logger.info("Compiling CML...")
@ -922,20 +899,16 @@ class TestEnvironment:
if stdout_path is not None: if stdout_path is not None:
if not os.path.exists(stdout_path): if not os.path.exists(stdout_path):
runner_logger.error( runner_logger.error(f"stdout file {stdout_path} does not exist.")
f"stdout file {stdout_path} does not exist."
)
else: else:
with open(stdout_path, encoding="utf-8", errors="ignore") as f: with open(stdout_path, encoding="utf-8", errors="ignore") as f:
runner_logger.info(f.read()) sys.stdout.write(f.read())
if stderr_path is not None: if stderr_path is not None:
if not os.path.exists(stderr_path): if not os.path.exists(stderr_path):
runner_logger.error( runner_logger.error(f"stderr file {stderr_path} does not exist.")
f"stderr file {stderr_path} does not exist."
)
else: else:
with open(stderr_path, encoding="utf-8", errors="ignore") as f: with open(stderr_path, encoding="utf-8", errors="ignore") as f:
runner_logger.error(f.read()) sys.stderr.write(f.read())
runner_logger.info("Done!") runner_logger.info("Done!")
return return_code return return_code
@ -1037,7 +1010,7 @@ class TestEnvironment:
f"--symbol-path={self.rust_dir}/lib/rustlib/{self.target}/lib", f"--symbol-path={self.rust_dir}/lib/rustlib/{self.target}/lib",
] ]
# Add rust source if it's available # Add rust source if it's available
rust_src_map = None rust_src_map = None
if args.rust_src is not None: if args.rust_src is not None:
# This matches the remapped prefix used by compiletest. There's no # This matches the remapped prefix used by compiletest. There's no
@ -1210,7 +1183,7 @@ def main():
start_parser.add_argument( start_parser.add_argument(
"--use-local-product-bundle-if-exists", "--use-local-product-bundle-if-exists",
help="if the product bundle already exists in the local path, use " help="if the product bundle already exists in the local path, use "
"it instead of downloading it again", "it instead of downloading it again",
action="store_true", action="store_true",
) )
start_parser.set_defaults(func=start) start_parser.set_defaults(func=start)
@ -1246,9 +1219,7 @@ def main():
) )
cleanup_parser.set_defaults(func=cleanup) cleanup_parser.set_defaults(func=cleanup)
syslog_parser = subparsers.add_parser( syslog_parser = subparsers.add_parser("syslog", help="prints the device syslog")
"syslog", help="prints the device syslog"
)
syslog_parser.set_defaults(func=syslog) syslog_parser.set_defaults(func=syslog)
debug_parser = subparsers.add_parser( debug_parser = subparsers.add_parser(

View File

@ -80,7 +80,8 @@ pub struct BigX;
Then, when looking for it through the `rustdoc` search, if you enter "x" or Then, when looking for it through the `rustdoc` search, if you enter "x" or
"big", search will show the `BigX` struct first. "big", search will show the `BigX` struct first.
There are some limitations on the doc alias names though: you can't use `"` or whitespace. There are some limitations on the doc alias names though: they cannot contain quotes (`'`, `"`)
or most whitespace. ASCII space is allowed if it does not start or end the alias.
You can add multiple aliases at the same time by using a list: You can add multiple aliases at the same time by using a list:

View File

@ -0,0 +1,24 @@
//@ run-rustfix
//@ check-pass
//@ compile-flags: --edition=2021
#![allow(incomplete_features)]
#![feature(expr_fragment_specifier_2024)]
#![warn(edition_2024_expr_fragment_specifier)]
macro_rules! m {
($e:expr_2021) => { //~ WARN: the `expr` fragment specifier will accept more expressions in the 2024 edition
//~^ WARN: this changes meaning in Rust 2024
$e
};
($($i:expr_2021)*) => { }; //~ WARN: the `expr` fragment specifier will accept more expressions in the 2024 edition
//~^ WARN: this changes meaning in Rust 2024
}
macro_rules! test {
(expr) => {}
}
fn main() {
m!(());
test!(expr);
}

View File

@ -0,0 +1,24 @@
//@ run-rustfix
//@ check-pass
//@ compile-flags: --edition=2021
#![allow(incomplete_features)]
#![feature(expr_fragment_specifier_2024)]
#![warn(edition_2024_expr_fragment_specifier)]
macro_rules! m {
($e:expr) => { //~ WARN: the `expr` fragment specifier will accept more expressions in the 2024 edition
//~^ WARN: this changes meaning in Rust 2024
$e
};
($($i:expr)*) => { }; //~ WARN: the `expr` fragment specifier will accept more expressions in the 2024 edition
//~^ WARN: this changes meaning in Rust 2024
}
macro_rules! test {
(expr) => {}
}
fn main() {
m!(());
test!(expr);
}

View File

@ -0,0 +1,33 @@
warning: the `expr` fragment specifier will accept more expressions in the 2024 edition
--> $DIR/expr_2021_cargo_fix_edition.rs:9:9
|
LL | ($e:expr) => {
| ^^^^
|
= warning: this changes meaning in Rust 2024
= note: for more information, see Migration Guide <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/macro-fragment-specifiers.html>
note: the lint level is defined here
--> $DIR/expr_2021_cargo_fix_edition.rs:6:9
|
LL | #![warn(edition_2024_expr_fragment_specifier)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
help: to keep the existing behavior, use the `expr_2021` fragment specifier
|
LL | ($e:expr_2021) => {
| ~~~~~~~~~
warning: the `expr` fragment specifier will accept more expressions in the 2024 edition
--> $DIR/expr_2021_cargo_fix_edition.rs:13:11
|
LL | ($($i:expr)*) => { };
| ^^^^
|
= warning: this changes meaning in Rust 2024
= note: for more information, see Migration Guide <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/macro-fragment-specifiers.html>
help: to keep the existing behavior, use the `expr_2021` fragment specifier
|
LL | ($($i:expr_2021)*) => { };
| ~~~~~~~~~
warning: 2 warnings emitted

View File

@ -1,5 +1,5 @@
error: no rules expected the token `const` error: no rules expected the token `const`
--> $DIR/expr_2021_inline_const.rs:21:12 --> $DIR/expr_2021_inline_const.rs:26:12
| |
LL | macro_rules! m2021 { LL | macro_rules! m2021 {
| ------------------ when calling this macro | ------------------ when calling this macro
@ -14,7 +14,7 @@ LL | ($e:expr_2021) => {
| ^^^^^^^^^^^^ | ^^^^^^^^^^^^
error: no rules expected the token `const` error: no rules expected the token `const`
--> $DIR/expr_2021_inline_const.rs:22:12 --> $DIR/expr_2021_inline_const.rs:27:12
| |
LL | macro_rules! m2024 { LL | macro_rules! m2024 {
| ------------------ when calling this macro | ------------------ when calling this macro

View File

@ -1,5 +1,5 @@
error: no rules expected the token `const` error: no rules expected the token `const`
--> $DIR/expr_2021_inline_const.rs:21:12 --> $DIR/expr_2021_inline_const.rs:26:12
| |
LL | macro_rules! m2021 { LL | macro_rules! m2021 {
| ------------------ when calling this macro | ------------------ when calling this macro

View File

@ -17,7 +17,14 @@ macro_rules! m2024 {
$e $e
}; };
} }
macro_rules! test {
(expr) => {}
}
fn main() { fn main() {
m2021!(const { 1 }); //~ ERROR: no rules expected the token `const` m2021!(const { 1 }); //~ ERROR: no rules expected the token `const`
m2024!(const { 1 }); //[edi2021]~ ERROR: no rules expected the token `const` m2024!(const { 1 }); //[edi2021]~ ERROR: no rules expected the token `const`
test!(expr);
} }

View File

@ -10,6 +10,7 @@
//@ ignore-wasm no panic or subprocess support //@ ignore-wasm no panic or subprocess support
//@ ignore-emscripten no panic or subprocess support //@ ignore-emscripten no panic or subprocess support
//@ ignore-sgx no subprocess support //@ ignore-sgx no subprocess support
//@ ignore-fuchsia code returned as ZX_TASK_RETCODE_EXCEPTION_KILL, FIXME (#127539)
#![cfg(test)] #![cfg(test)]

View File

@ -1,9 +1,9 @@
thread 'main' panicked at $DIR/test-panic-abort-nocapture.rs:34:5: thread 'main' panicked at $DIR/test-panic-abort-nocapture.rs:35:5:
assertion `left == right` failed assertion `left == right` failed
left: 2 left: 2
right: 4 right: 4
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
thread 'main' panicked at $DIR/test-panic-abort-nocapture.rs:28:5: thread 'main' panicked at $DIR/test-panic-abort-nocapture.rs:29:5:
assertion `left == right` failed assertion `left == right` failed
left: 2 left: 2
right: 4 right: 4

View File

@ -10,6 +10,7 @@
//@ ignore-wasm no panic or subprocess support //@ ignore-wasm no panic or subprocess support
//@ ignore-emscripten no panic or subprocess support //@ ignore-emscripten no panic or subprocess support
//@ ignore-sgx no subprocess support //@ ignore-sgx no subprocess support
//@ ignore-fuchsia code returned as ZX_TASK_RETCODE_EXCEPTION_KILL, FIXME (#127539)
#![cfg(test)] #![cfg(test)]
#![feature(test)] #![feature(test)]

View File

@ -17,7 +17,7 @@ hello, world
testing123 testing123
---- it_fails stderr ---- ---- it_fails stderr ----
testing321 testing321
thread 'main' panicked at $DIR/test-panic-abort.rs:39:5: thread 'main' panicked at $DIR/test-panic-abort.rs:40:5:
assertion `left == right` failed assertion `left == right` failed
left: 2 left: 2
right: 5 right: 5

View File

@ -1050,7 +1050,6 @@ project-const-traits = [
project-stable-mir = [ project-stable-mir = [
"@celinval", "@celinval",
"@oli-obk", "@oli-obk",
"@spastorino",
"@ouz-a", "@ouz-a",
] ]