mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Auto merge of #119662 - matthiaskrgr:rollup-ehofh5n, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #118194 (rustdoc: search for tuples and unit by type with `()`) - #118781 (merge core_panic feature into panic_internals) - #119486 (pass allow-{dirty,staged} to clippy) - #119591 (rustc_mir_transform: Make DestinationPropagation stable for queries) - #119595 (Fixed ambiguity in hint.rs) - #119624 (rustc_span: More consistent span combination operations) - #119653 (compiler: update Fuchsia sanitizer support.) - #119655 (Remove ignore-stage1 that was added when changing error count msg) - #119661 (Strip lld-wrapper binaries) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
b6a8c762ee
@ -104,6 +104,14 @@ gimli.debug = 0
|
||||
miniz_oxide.debug = 0
|
||||
object.debug = 0
|
||||
|
||||
# These are very thin wrappers around executing lld with the right binary name.
|
||||
# Basically nothing within them can go wrong without having been explicitly logged anyway.
|
||||
# We ship these in every rustc tarball and even after compression they add up
|
||||
# to around 0.6MB of data every user needs to download (and 15MB on disk).
|
||||
[profile.release.package.lld-wrapper]
|
||||
debug = 0
|
||||
strip = true
|
||||
|
||||
[patch.crates-io]
|
||||
# See comments in `library/rustc-std-workspace-core/README.md` for what's going on
|
||||
# here
|
||||
|
@ -7,6 +7,7 @@ pub type StdEntry<'a, K, V> = std::collections::hash_map::Entry<'a, K, V>;
|
||||
pub type FxIndexMap<K, V> = indexmap::IndexMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
pub type FxIndexSet<V> = indexmap::IndexSet<V, BuildHasherDefault<FxHasher>>;
|
||||
pub type IndexEntry<'a, K, V> = indexmap::map::Entry<'a, K, V>;
|
||||
pub type IndexOccupiedEntry<'a, K, V> = indexmap::map::OccupiedEntry<'a, K, V>;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! define_id_collections {
|
||||
|
@ -131,10 +131,8 @@
|
||||
//! [attempt 2]: https://github.com/rust-lang/rust/pull/71003
|
||||
//! [attempt 3]: https://github.com/rust-lang/rust/pull/72632
|
||||
|
||||
use std::collections::hash_map::{Entry, OccupiedEntry};
|
||||
|
||||
use crate::MirPass;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::{FxIndexMap, IndexEntry, IndexOccupiedEntry};
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
|
||||
use rustc_middle::mir::HasLocalDecls;
|
||||
@ -211,7 +209,7 @@ impl<'tcx> MirPass<'tcx> for DestinationPropagation {
|
||||
let mut merged_locals: BitSet<Local> = BitSet::new_empty(body.local_decls.len());
|
||||
|
||||
// This is the set of merges we will apply this round. It is a subset of the candidates.
|
||||
let mut merges = FxHashMap::default();
|
||||
let mut merges = FxIndexMap::default();
|
||||
|
||||
for (src, candidates) in candidates.c.iter() {
|
||||
if merged_locals.contains(*src) {
|
||||
@ -250,8 +248,8 @@ impl<'tcx> MirPass<'tcx> for DestinationPropagation {
|
||||
/// frequently. Everything with a `&'alloc` lifetime points into here.
|
||||
#[derive(Default)]
|
||||
struct Allocations {
|
||||
candidates: FxHashMap<Local, Vec<Local>>,
|
||||
candidates_reverse: FxHashMap<Local, Vec<Local>>,
|
||||
candidates: FxIndexMap<Local, Vec<Local>>,
|
||||
candidates_reverse: FxIndexMap<Local, Vec<Local>>,
|
||||
write_info: WriteInfo,
|
||||
// PERF: Do this for `MaybeLiveLocals` allocations too.
|
||||
}
|
||||
@ -272,11 +270,11 @@ struct Candidates<'alloc> {
|
||||
///
|
||||
/// We will still report that we would like to merge `_1` and `_2` in an attempt to allow us to
|
||||
/// remove that assignment.
|
||||
c: &'alloc mut FxHashMap<Local, Vec<Local>>,
|
||||
c: &'alloc mut FxIndexMap<Local, Vec<Local>>,
|
||||
/// A reverse index of the `c` set; if the `c` set contains `a => Place { local: b, proj }`,
|
||||
/// then this contains `b => a`.
|
||||
// PERF: Possibly these should be `SmallVec`s?
|
||||
reverse: &'alloc mut FxHashMap<Local, Vec<Local>>,
|
||||
reverse: &'alloc mut FxIndexMap<Local, Vec<Local>>,
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////
|
||||
@ -287,7 +285,7 @@ struct Candidates<'alloc> {
|
||||
fn apply_merges<'tcx>(
|
||||
body: &mut Body<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
merges: &FxHashMap<Local, Local>,
|
||||
merges: &FxIndexMap<Local, Local>,
|
||||
merged_locals: &BitSet<Local>,
|
||||
) {
|
||||
let mut merger = Merger { tcx, merges, merged_locals };
|
||||
@ -296,7 +294,7 @@ fn apply_merges<'tcx>(
|
||||
|
||||
struct Merger<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
merges: &'a FxHashMap<Local, Local>,
|
||||
merges: &'a FxIndexMap<Local, Local>,
|
||||
merged_locals: &'a BitSet<Local>,
|
||||
}
|
||||
|
||||
@ -379,7 +377,7 @@ impl<'alloc> Candidates<'alloc> {
|
||||
|
||||
/// `vec_filter_candidates` but for an `Entry`
|
||||
fn entry_filter_candidates(
|
||||
mut entry: OccupiedEntry<'_, Local, Vec<Local>>,
|
||||
mut entry: IndexOccupiedEntry<'_, Local, Vec<Local>>,
|
||||
p: Local,
|
||||
f: impl FnMut(Local) -> CandidateFilter,
|
||||
at: Location,
|
||||
@ -399,7 +397,7 @@ impl<'alloc> Candidates<'alloc> {
|
||||
at: Location,
|
||||
) {
|
||||
// Cover the cases where `p` appears as a `src`
|
||||
if let Entry::Occupied(entry) = self.c.entry(p) {
|
||||
if let IndexEntry::Occupied(entry) = self.c.entry(p) {
|
||||
Self::entry_filter_candidates(entry, p, &mut f, at);
|
||||
}
|
||||
// And the cases where `p` appears as a `dest`
|
||||
@ -412,7 +410,7 @@ impl<'alloc> Candidates<'alloc> {
|
||||
if f(*src) == CandidateFilter::Keep {
|
||||
return true;
|
||||
}
|
||||
let Entry::Occupied(entry) = self.c.entry(*src) else {
|
||||
let IndexEntry::Occupied(entry) = self.c.entry(*src) else {
|
||||
return false;
|
||||
};
|
||||
Self::entry_filter_candidates(
|
||||
@ -721,8 +719,8 @@ fn places_to_candidate_pair<'tcx>(
|
||||
fn find_candidates<'alloc, 'tcx>(
|
||||
body: &Body<'tcx>,
|
||||
borrowed: &BitSet<Local>,
|
||||
candidates: &'alloc mut FxHashMap<Local, Vec<Local>>,
|
||||
candidates_reverse: &'alloc mut FxHashMap<Local, Vec<Local>>,
|
||||
candidates: &'alloc mut FxIndexMap<Local, Vec<Local>>,
|
||||
candidates_reverse: &'alloc mut FxIndexMap<Local, Vec<Local>>,
|
||||
) -> Candidates<'alloc> {
|
||||
candidates.clear();
|
||||
candidates_reverse.clear();
|
||||
@ -744,7 +742,7 @@ fn find_candidates<'alloc, 'tcx>(
|
||||
|
||||
struct FindAssignments<'a, 'alloc, 'tcx> {
|
||||
body: &'a Body<'tcx>,
|
||||
candidates: &'alloc mut FxHashMap<Local, Vec<Local>>,
|
||||
candidates: &'alloc mut FxIndexMap<Local, Vec<Local>>,
|
||||
borrowed: &'a BitSet<Local>,
|
||||
}
|
||||
|
||||
|
@ -2489,7 +2489,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
ExprKind::Block(_, None) => {
|
||||
this.dcx().emit_err(errors::IfExpressionMissingCondition {
|
||||
if_span: lo.shrink_to_hi(),
|
||||
if_span: lo.with_neighbor(cond.span).shrink_to_hi(),
|
||||
block_span: self.sess.source_map().start_point(cond_span),
|
||||
});
|
||||
std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi()))
|
||||
@ -3735,7 +3735,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
pub(crate) fn mk_expr(&self, span: Span, kind: ExprKind) -> P<Expr> {
|
||||
P(Expr { kind, span, attrs: AttrVec::new(), id: DUMMY_NODE_ID, tokens: None })
|
||||
self.mk_expr_with_attrs(span, kind, AttrVec::new())
|
||||
}
|
||||
|
||||
pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> {
|
||||
|
@ -2118,7 +2118,7 @@ impl<'a> Parser<'a> {
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
err.span_suggestion(
|
||||
span.shrink_to_hi(),
|
||||
span.with_neighbor(self.token.span).shrink_to_hi(),
|
||||
"add a semicolon",
|
||||
';',
|
||||
Applicability::MaybeIncorrect,
|
||||
@ -2632,7 +2632,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let is_name_required = match this.token.kind {
|
||||
token::DotDotDot => false,
|
||||
_ => req_name(this.token.span.edition()),
|
||||
_ => req_name(this.token.span.with_neighbor(this.prev_token.span).edition()),
|
||||
};
|
||||
let (pat, ty) = if is_name_required || this.is_named_param() {
|
||||
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
|
||||
|
@ -852,21 +852,6 @@ impl fmt::Debug for SyntaxContext {
|
||||
}
|
||||
|
||||
impl Span {
|
||||
/// Creates a fresh expansion with given properties.
|
||||
/// Expansions are normally created by macros, but in some cases expansions are created for
|
||||
/// other compiler-generated code to set per-span properties like allowed unstable features.
|
||||
/// The returned span belongs to the created expansion and has the new properties,
|
||||
/// but its location is inherited from the current span.
|
||||
pub fn fresh_expansion(self, expn_id: LocalExpnId) -> Span {
|
||||
HygieneData::with(|data| {
|
||||
self.with_ctxt(data.apply_mark(
|
||||
self.ctxt(),
|
||||
expn_id.to_expn_id(),
|
||||
Transparency::Transparent,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
/// Reuses the span but adds information like the kind of the desugaring and features that are
|
||||
/// allowed inside this span.
|
||||
pub fn mark_with_reason(
|
||||
@ -881,7 +866,7 @@ impl Span {
|
||||
..ExpnData::default(ExpnKind::Desugaring(reason), self, edition, None, None)
|
||||
};
|
||||
let expn_id = LocalExpnId::fresh(expn_data, ctx);
|
||||
self.fresh_expansion(expn_id)
|
||||
self.apply_mark(expn_id.to_expn_id(), Transparency::Transparent)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -825,6 +825,39 @@ impl Span {
|
||||
)
|
||||
}
|
||||
|
||||
/// Prepare two spans to a combine operation like `to` or `between`.
|
||||
/// FIXME: consider using declarative macro metavariable spans for the given spans if they are
|
||||
/// better suitable for combining (#119412).
|
||||
fn prepare_to_combine(
|
||||
a_orig: Span,
|
||||
b_orig: Span,
|
||||
) -> Result<(SpanData, SpanData, Option<LocalDefId>), Span> {
|
||||
let (a, b) = (a_orig.data(), b_orig.data());
|
||||
|
||||
if a.ctxt != b.ctxt {
|
||||
// Context mismatches usually happen when procedural macros combine spans copied from
|
||||
// the macro input with spans produced by the macro (`Span::*_site`).
|
||||
// In that case we consider the combined span to be produced by the macro and return
|
||||
// the original macro-produced span as the result.
|
||||
// Otherwise we just fall back to returning the first span.
|
||||
// Combining locations typically doesn't make sense in case of context mismatches.
|
||||
// `is_root` here is a fast path optimization.
|
||||
let a_is_callsite = a.ctxt.is_root() || a.ctxt == b.span().source_callsite().ctxt();
|
||||
return Err(if a_is_callsite { b_orig } else { a_orig });
|
||||
}
|
||||
|
||||
let parent = if a.parent == b.parent { a.parent } else { None };
|
||||
Ok((a, b, parent))
|
||||
}
|
||||
|
||||
/// This span, but in a larger context, may switch to the metavariable span if suitable.
|
||||
pub fn with_neighbor(self, neighbor: Span) -> Span {
|
||||
match Span::prepare_to_combine(self, neighbor) {
|
||||
Ok((this, ..)) => Span::new(this.lo, this.hi, this.ctxt, this.parent),
|
||||
Err(_) => self,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a `Span` that would enclose both `self` and `end`.
|
||||
///
|
||||
/// Note that this can also be used to extend the span "backwards":
|
||||
@ -836,26 +869,12 @@ impl Span {
|
||||
/// ^^^^^^^^^^^^^^^^^^^^
|
||||
/// ```
|
||||
pub fn to(self, end: Span) -> Span {
|
||||
let span_data = self.data();
|
||||
let end_data = end.data();
|
||||
// FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480).
|
||||
// Return the macro span on its own to avoid weird diagnostic output. It is preferable to
|
||||
// have an incomplete span than a completely nonsensical one.
|
||||
if span_data.ctxt != end_data.ctxt {
|
||||
if span_data.ctxt.is_root() {
|
||||
return end;
|
||||
} else if end_data.ctxt.is_root() {
|
||||
return self;
|
||||
match Span::prepare_to_combine(self, end) {
|
||||
Ok((from, to, parent)) => {
|
||||
Span::new(cmp::min(from.lo, to.lo), cmp::max(from.hi, to.hi), from.ctxt, parent)
|
||||
}
|
||||
// Both spans fall within a macro.
|
||||
// FIXME(estebank): check if it is the *same* macro.
|
||||
Err(fallback) => fallback,
|
||||
}
|
||||
Span::new(
|
||||
cmp::min(span_data.lo, end_data.lo),
|
||||
cmp::max(span_data.hi, end_data.hi),
|
||||
if span_data.ctxt.is_root() { end_data.ctxt } else { span_data.ctxt },
|
||||
if span_data.parent == end_data.parent { span_data.parent } else { None },
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns a `Span` between the end of `self` to the beginning of `end`.
|
||||
@ -866,14 +885,12 @@ impl Span {
|
||||
/// ^^^^^^^^^^^^^
|
||||
/// ```
|
||||
pub fn between(self, end: Span) -> Span {
|
||||
let span = self.data();
|
||||
let end = end.data();
|
||||
Span::new(
|
||||
span.hi,
|
||||
end.lo,
|
||||
if end.ctxt.is_root() { end.ctxt } else { span.ctxt },
|
||||
if span.parent == end.parent { span.parent } else { None },
|
||||
)
|
||||
match Span::prepare_to_combine(self, end) {
|
||||
Ok((from, to, parent)) => {
|
||||
Span::new(cmp::min(from.hi, to.hi), cmp::max(from.lo, to.lo), from.ctxt, parent)
|
||||
}
|
||||
Err(fallback) => fallback,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a `Span` from the beginning of `self` until the beginning of `end`.
|
||||
@ -884,31 +901,12 @@ impl Span {
|
||||
/// ^^^^^^^^^^^^^^^^^
|
||||
/// ```
|
||||
pub fn until(self, end: Span) -> Span {
|
||||
// Most of this function's body is copied from `to`.
|
||||
// We can't just do `self.to(end.shrink_to_lo())`,
|
||||
// because to also does some magic where it uses min/max so
|
||||
// it can handle overlapping spans. Some advanced mis-use of
|
||||
// `until` with different ctxts makes this visible.
|
||||
let span_data = self.data();
|
||||
let end_data = end.data();
|
||||
// FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480).
|
||||
// Return the macro span on its own to avoid weird diagnostic output. It is preferable to
|
||||
// have an incomplete span than a completely nonsensical one.
|
||||
if span_data.ctxt != end_data.ctxt {
|
||||
if span_data.ctxt.is_root() {
|
||||
return end;
|
||||
} else if end_data.ctxt.is_root() {
|
||||
return self;
|
||||
match Span::prepare_to_combine(self, end) {
|
||||
Ok((from, to, parent)) => {
|
||||
Span::new(cmp::min(from.lo, to.lo), cmp::max(from.lo, to.lo), from.ctxt, parent)
|
||||
}
|
||||
// Both spans fall within a macro.
|
||||
// FIXME(estebank): check if it is the *same* macro.
|
||||
Err(fallback) => fallback,
|
||||
}
|
||||
Span::new(
|
||||
span_data.lo,
|
||||
end_data.lo,
|
||||
if end_data.ctxt.is_root() { end_data.ctxt } else { span_data.ctxt },
|
||||
if span_data.parent == end_data.parent { span_data.parent } else { None },
|
||||
)
|
||||
}
|
||||
|
||||
pub fn from_inner(self, inner: InnerSpan) -> Span {
|
||||
|
@ -6,7 +6,8 @@ pub fn target() -> Target {
|
||||
base.plt_by_default = false;
|
||||
base.max_atomic_width = Some(64);
|
||||
base.stack_probes = StackProbeType::Inline;
|
||||
base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI;
|
||||
base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::LEAK;
|
||||
base.supports_xray = true;
|
||||
|
||||
Target {
|
||||
llvm_target: "x86_64-unknown-fuchsia".into(),
|
||||
|
@ -120,7 +120,6 @@
|
||||
#![feature(const_size_of_val)]
|
||||
#![feature(const_waker)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(core_panic)]
|
||||
#![feature(deprecated_suggestion)]
|
||||
#![feature(dispatch_from_dyn)]
|
||||
#![feature(error_generic_member_access)]
|
||||
@ -139,6 +138,7 @@
|
||||
#![feature(maybe_uninit_slice)]
|
||||
#![feature(maybe_uninit_uninit_array)]
|
||||
#![feature(maybe_uninit_uninit_array_transpose)]
|
||||
#![feature(panic_internals)]
|
||||
#![feature(pattern)]
|
||||
#![feature(ptr_internals)]
|
||||
#![feature(ptr_metadata)]
|
||||
|
@ -287,7 +287,7 @@ pub fn spin_loop() {
|
||||
///
|
||||
/// The compiler could theoretically make optimizations like the following:
|
||||
///
|
||||
/// - `needle` and `haystack` are always the same, move the call to `contains` outside the loop and
|
||||
/// - The `needle` and `haystack` do not change, move the call to `contains` outside the loop and
|
||||
/// delete the loop
|
||||
/// - Inline `contains`
|
||||
/// - `needle` and `haystack` have values known at compile time, `contains` is always true. Remove
|
||||
|
@ -65,6 +65,7 @@
|
||||
//! ```rust
|
||||
//! #![feature(core_intrinsics, custom_mir)]
|
||||
//! #![allow(internal_features)]
|
||||
//! #![allow(unused_assignments)]
|
||||
//!
|
||||
//! use core::intrinsics::mir::*;
|
||||
//!
|
||||
|
@ -167,7 +167,6 @@
|
||||
#![feature(const_unicode_case_lookup)]
|
||||
#![feature(const_unsafecell_get_mut)]
|
||||
#![feature(const_waker)]
|
||||
#![feature(core_panic)]
|
||||
#![feature(coverage_attribute)]
|
||||
#![feature(duration_consts_float)]
|
||||
#![feature(internal_impls_macro)]
|
||||
@ -179,6 +178,7 @@
|
||||
#![feature(non_null_convenience)]
|
||||
#![feature(offset_of)]
|
||||
#![feature(offset_of_enum)]
|
||||
#![feature(panic_internals)]
|
||||
#![feature(ptr_alignment_type)]
|
||||
#![feature(ptr_metadata)]
|
||||
#![feature(set_ptr_value)]
|
||||
|
@ -32,7 +32,7 @@ macro_rules! panic {
|
||||
#[macro_export]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg_attr(not(test), rustc_diagnostic_item = "assert_eq_macro")]
|
||||
#[allow_internal_unstable(core_panic)]
|
||||
#[allow_internal_unstable(panic_internals)]
|
||||
macro_rules! assert_eq {
|
||||
($left:expr, $right:expr $(,)?) => {
|
||||
match (&$left, &$right) {
|
||||
@ -82,7 +82,7 @@ macro_rules! assert_eq {
|
||||
#[macro_export]
|
||||
#[stable(feature = "assert_ne", since = "1.13.0")]
|
||||
#[cfg_attr(not(test), rustc_diagnostic_item = "assert_ne_macro")]
|
||||
#[allow_internal_unstable(core_panic)]
|
||||
#[allow_internal_unstable(panic_internals)]
|
||||
macro_rules! assert_ne {
|
||||
($left:expr, $right:expr $(,)?) => {
|
||||
match (&$left, &$right) {
|
||||
@ -139,7 +139,7 @@ macro_rules! assert_ne {
|
||||
/// assert_matches!(c, Ok(x) | Err(x) if x.len() < 100);
|
||||
/// ```
|
||||
#[unstable(feature = "assert_matches", issue = "82775")]
|
||||
#[allow_internal_unstable(core_panic)]
|
||||
#[allow_internal_unstable(panic_internals)]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro assert_matches {
|
||||
($left:expr, $(|)? $( $pattern:pat_param )|+ $( if $guard: expr )? $(,)?) => {
|
||||
@ -787,7 +787,7 @@ macro_rules! unreachable {
|
||||
#[macro_export]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg_attr(not(test), rustc_diagnostic_item = "unimplemented_macro")]
|
||||
#[allow_internal_unstable(core_panic)]
|
||||
#[allow_internal_unstable(panic_internals)]
|
||||
macro_rules! unimplemented {
|
||||
() => {
|
||||
$crate::panicking::panic("not implemented")
|
||||
@ -867,7 +867,7 @@ macro_rules! unimplemented {
|
||||
#[macro_export]
|
||||
#[stable(feature = "todo_macro", since = "1.40.0")]
|
||||
#[cfg_attr(not(test), rustc_diagnostic_item = "todo_macro")]
|
||||
#[allow_internal_unstable(core_panic)]
|
||||
#[allow_internal_unstable(panic_internals)]
|
||||
macro_rules! todo {
|
||||
() => {
|
||||
$crate::panicking::panic("not yet implemented")
|
||||
@ -1534,7 +1534,7 @@ pub(crate) mod builtin {
|
||||
#[rustc_builtin_macro]
|
||||
#[macro_export]
|
||||
#[rustc_diagnostic_item = "assert_macro"]
|
||||
#[allow_internal_unstable(core_panic, edition_panic, generic_assert_internals)]
|
||||
#[allow_internal_unstable(panic_internals, edition_panic, generic_assert_internals)]
|
||||
macro_rules! assert {
|
||||
($cond:expr $(,)?) => {{ /* compiler built-in */ }};
|
||||
($cond:expr, $($arg:tt)+) => {{ /* compiler built-in */ }};
|
||||
|
@ -17,7 +17,7 @@ pub use self::unwind_safe::{AssertUnwindSafe, RefUnwindSafe, UnwindSafe};
|
||||
|
||||
#[doc(hidden)]
|
||||
#[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")]
|
||||
#[allow_internal_unstable(core_panic, const_format_args)]
|
||||
#[allow_internal_unstable(panic_internals, const_format_args)]
|
||||
#[rustc_diagnostic_item = "core_panic_2015_macro"]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro panic_2015 {
|
||||
@ -44,7 +44,7 @@ pub macro panic_2015 {
|
||||
|
||||
#[doc(hidden)]
|
||||
#[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")]
|
||||
#[allow_internal_unstable(core_panic, const_format_args)]
|
||||
#[allow_internal_unstable(panic_internals, const_format_args)]
|
||||
#[rustc_diagnostic_item = "core_panic_2021_macro"]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
#[cfg(feature = "panic_immediate_abort")]
|
||||
@ -66,7 +66,7 @@ pub macro panic_2021 {
|
||||
#[doc(hidden)]
|
||||
#[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")]
|
||||
#[allow_internal_unstable(
|
||||
core_panic,
|
||||
panic_internals,
|
||||
core_intrinsics,
|
||||
const_dispatch,
|
||||
const_eval_select,
|
||||
@ -109,7 +109,7 @@ pub macro panic_2021 {
|
||||
|
||||
#[doc(hidden)]
|
||||
#[unstable(feature = "edition_panic", issue = "none", reason = "use unreachable!() instead")]
|
||||
#[allow_internal_unstable(core_panic)]
|
||||
#[allow_internal_unstable(panic_internals)]
|
||||
#[rustc_diagnostic_item = "unreachable_2015_macro"]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro unreachable_2015 {
|
||||
@ -128,7 +128,7 @@ pub macro unreachable_2015 {
|
||||
|
||||
#[doc(hidden)]
|
||||
#[unstable(feature = "edition_panic", issue = "none", reason = "use unreachable!() instead")]
|
||||
#[allow_internal_unstable(core_panic)]
|
||||
#[allow_internal_unstable(panic_internals)]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro unreachable_2021 {
|
||||
() => (
|
||||
@ -145,8 +145,8 @@ pub macro unreachable_2021 {
|
||||
/// unwind. For example, checks in `_unchecked` functions that are intended for debugging but should
|
||||
/// not compromise unwind safety.
|
||||
#[doc(hidden)]
|
||||
#[unstable(feature = "core_panic", issue = "none")]
|
||||
#[allow_internal_unstable(core_panic, const_format_args)]
|
||||
#[unstable(feature = "panic_internals", issue = "none")]
|
||||
#[allow_internal_unstable(panic_internals, const_format_args)]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro debug_assert_nounwind {
|
||||
($cond:expr $(,)?) => {
|
||||
|
@ -21,7 +21,7 @@
|
||||
|
||||
#![allow(dead_code, missing_docs)]
|
||||
#![unstable(
|
||||
feature = "core_panic",
|
||||
feature = "panic_internals",
|
||||
reason = "internal details of the implementation of the `panic!` and related macros",
|
||||
issue = "none"
|
||||
)]
|
||||
@ -48,7 +48,7 @@ const _: () = assert!(cfg!(panic = "abort"), "panic_immediate_abort requires -C
|
||||
#[track_caller]
|
||||
#[lang = "panic_fmt"] // needed for const-evaluated panics
|
||||
#[rustc_do_not_const_check] // hooked by const-eval
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
pub const fn panic_fmt(fmt: fmt::Arguments<'_>) -> ! {
|
||||
if cfg!(feature = "panic_immediate_abort") {
|
||||
super::intrinsics::abort()
|
||||
@ -82,7 +82,7 @@ pub const fn panic_fmt(fmt: fmt::Arguments<'_>) -> ! {
|
||||
// and unwinds anyway, we will hit the "unwinding out of nounwind function" guard,
|
||||
// which causes a "panic in a function that cannot unwind".
|
||||
#[rustc_nounwind]
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
pub const fn panic_nounwind_fmt(fmt: fmt::Arguments<'_>, force_no_backtrace: bool) -> ! {
|
||||
#[inline] // this should always be inlined into `panic_nounwind_fmt`
|
||||
#[track_caller]
|
||||
@ -132,7 +132,7 @@ pub const fn panic_nounwind_fmt(fmt: fmt::Arguments<'_>, force_no_backtrace: boo
|
||||
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)]
|
||||
#[cfg_attr(feature = "panic_immediate_abort", inline)]
|
||||
#[track_caller]
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
#[lang = "panic"] // needed by codegen for panic on overflow and other `Assert` MIR terminators
|
||||
pub const fn panic(expr: &'static str) -> ! {
|
||||
// Use Arguments::new_v1 instead of format_args!("{expr}") to potentially
|
||||
@ -150,7 +150,7 @@ pub const fn panic(expr: &'static str) -> ! {
|
||||
#[cfg_attr(feature = "panic_immediate_abort", inline)]
|
||||
#[lang = "panic_nounwind"] // needed by codegen for non-unwinding panics
|
||||
#[rustc_nounwind]
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
pub const fn panic_nounwind(expr: &'static str) -> ! {
|
||||
panic_nounwind_fmt(fmt::Arguments::new_const(&[expr]), /* force_no_backtrace */ false);
|
||||
}
|
||||
@ -166,7 +166,7 @@ pub fn panic_nounwind_nobacktrace(expr: &'static str) -> ! {
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
#[rustc_diagnostic_item = "panic_str"]
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
pub const fn panic_str(expr: &str) -> ! {
|
||||
panic_display(&expr);
|
||||
}
|
||||
@ -174,7 +174,7 @@ pub const fn panic_str(expr: &str) -> ! {
|
||||
#[track_caller]
|
||||
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never), cold)]
|
||||
#[cfg_attr(feature = "panic_immediate_abort", inline)]
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
pub const fn panic_explicit() -> ! {
|
||||
panic_display(&"explicit panic");
|
||||
}
|
||||
@ -191,7 +191,7 @@ pub fn unreachable_display<T: fmt::Display>(x: &T) -> ! {
|
||||
#[rustc_do_not_const_check] // hooked by const-eval
|
||||
// enforce a &&str argument in const-check and hook this by const-eval
|
||||
#[rustc_const_panic_str]
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
pub const fn panic_display<T: fmt::Display>(x: &T) -> ! {
|
||||
panic_fmt(format_args!("{}", *x));
|
||||
}
|
||||
@ -258,7 +258,7 @@ fn panic_in_cleanup() -> ! {
|
||||
|
||||
/// This function is used instead of panic_fmt in const eval.
|
||||
#[lang = "const_panic_fmt"]
|
||||
#[rustc_const_unstable(feature = "core_panic", issue = "none")]
|
||||
#[rustc_const_unstable(feature = "panic_internals", issue = "none")]
|
||||
pub const fn const_panic_fmt(fmt: fmt::Arguments<'_>) -> ! {
|
||||
if let Some(msg) = fmt.as_str() {
|
||||
// The panic_display function is hooked by const eval.
|
||||
|
@ -374,7 +374,6 @@
|
||||
#![feature(cfg_eval)]
|
||||
#![feature(concat_bytes)]
|
||||
#![feature(const_format_args)]
|
||||
#![feature(core_panic)]
|
||||
#![feature(custom_test_frameworks)]
|
||||
#![feature(edition_panic)]
|
||||
#![feature(format_args_nl)]
|
||||
|
@ -11,7 +11,7 @@ use crate::thread::Result;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[unstable(feature = "edition_panic", issue = "none", reason = "use panic!() instead")]
|
||||
#[allow_internal_unstable(libstd_sys_internals, const_format_args, core_panic, rt)]
|
||||
#[allow_internal_unstable(libstd_sys_internals, const_format_args, panic_internals, rt)]
|
||||
#[cfg_attr(not(test), rustc_diagnostic_item = "std_panic_2015_macro")]
|
||||
#[rustc_macro_transparency = "semitransparent"]
|
||||
pub macro panic_2015 {
|
||||
|
@ -28,7 +28,9 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
|
||||
arr.iter().copied().map(String::from)
|
||||
}
|
||||
|
||||
if let Subcommand::Clippy { fix, allow, deny, warn, forbid, .. } = &builder.config.cmd {
|
||||
if let Subcommand::Clippy { fix, allow_dirty, allow_staged, allow, deny, warn, forbid } =
|
||||
&builder.config.cmd
|
||||
{
|
||||
// disable the most spammy clippy lints
|
||||
let ignored_lints = vec![
|
||||
"many_single_char_names", // there are a lot in stdarch
|
||||
@ -49,7 +51,16 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
|
||||
// As a workaround, avoid checking tests and benches when passed --fix.
|
||||
"--lib", "--bins", "--examples",
|
||||
]));
|
||||
|
||||
if *allow_dirty {
|
||||
args.push("--allow-dirty".to_owned());
|
||||
}
|
||||
|
||||
if *allow_staged {
|
||||
args.push("--allow-staged".to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
args.extend(strings(&["--", "--cap-lints", "warn"]));
|
||||
args.extend(ignored_lints.iter().map(|lint| format!("-Aclippy::{}", lint)));
|
||||
let mut clippy_lint_levels: Vec<String> = Vec::new();
|
||||
|
@ -255,6 +255,10 @@ pub enum Subcommand {
|
||||
Clippy {
|
||||
#[arg(long)]
|
||||
fix: bool,
|
||||
#[arg(long, requires = "fix")]
|
||||
allow_dirty: bool,
|
||||
#[arg(long, requires = "fix")]
|
||||
allow_staged: bool,
|
||||
/// clippy lints to allow
|
||||
#[arg(global(true), short = 'A', action = clap::ArgAction::Append, value_name = "LINT")]
|
||||
allow: Vec<String>,
|
||||
|
@ -147,15 +147,38 @@ will match these queries:
|
||||
* `Read -> Result<Vec<u8>, Error>`
|
||||
* `Read -> Result<Error, Vec>`
|
||||
* `Read -> Result<Vec<u8>>`
|
||||
* `Read -> u8`
|
||||
|
||||
But it *does not* match `Result<Vec, u8>` or `Result<u8<Vec>>`.
|
||||
|
||||
Function signature searches also support arrays and slices. The explicit name
|
||||
`primitive:slice<u8>` and `primitive:array<u8>` can be used to match a slice
|
||||
or array of bytes, while square brackets `[u8]` will match either one. Empty
|
||||
square brackets, `[]`, will match any slice or array regardless of what
|
||||
it contains, while a slice with a type parameter, like `[T]`, will only match
|
||||
functions that actually operate on generic slices.
|
||||
### Primitives with Special Syntax
|
||||
|
||||
| Shorthand | Explicit names |
|
||||
| --------- | ------------------------------------------------ |
|
||||
| `[]` | `primitive:slice` and/or `primitive:array` |
|
||||
| `[T]` | `primitive:slice<T>` and/or `primitive:array<T>` |
|
||||
| `()` | `primitive:unit` and/or `primitive:tuple` |
|
||||
| `(T)` | `T` |
|
||||
| `(T,)` | `primitive:tuple<T>` |
|
||||
| `!` | `primitive:never` |
|
||||
|
||||
When searching for `[]`, Rustdoc will return search results with either slices
|
||||
or arrays. If you know which one you want, you can force it to return results
|
||||
for `primitive:slice` or `primitive:array` using the explicit name syntax.
|
||||
Empty square brackets, `[]`, will match any slice or array regardless of what
|
||||
it contains, or an item type can be provided, such as `[u8]` or `[T]`, to
|
||||
explicitly find functions that operate on byte slices or generic slices,
|
||||
respectively.
|
||||
|
||||
A single type expression wrapped in parens is the same as that type expression,
|
||||
since parens act as the grouping operator. If they're empty, though, they will
|
||||
match both `unit` and `tuple`, and if there's more than one type (or a trailing
|
||||
or leading comma) it is the same as `primitive:tuple<...>`.
|
||||
|
||||
However, since items can be left out of the query, `(T)` will still return
|
||||
results for types that match tuples, even though it also matches the type on
|
||||
its own. That is, `(u32)` matches `(u32,)` for the exact same reason that it
|
||||
also matches `Result<u32, Error>`.
|
||||
|
||||
### Limitations and quirks of type-based search
|
||||
|
||||
@ -188,11 +211,10 @@ Most of these limitations should be addressed in future version of Rustdoc.
|
||||
that you don't want a type parameter, you can force it to match
|
||||
something else by giving it a different prefix like `struct:T`.
|
||||
|
||||
* It's impossible to search for references, pointers, or tuples. The
|
||||
* It's impossible to search for references or pointers. The
|
||||
wrapped types can be searched for, so a function that takes `&File` can
|
||||
be found with `File`, but you'll get a parse error when typing an `&`
|
||||
into the search field. Similarly, `Option<(T, U)>` can be matched with
|
||||
`Option<T, U>`, but `(` will give a parse error.
|
||||
into the search field.
|
||||
|
||||
* Searching for lifetimes is not supported.
|
||||
|
||||
@ -216,8 +238,9 @@ Item filters can be used in both name-based and type signature-based searches.
|
||||
```text
|
||||
ident = *(ALPHA / DIGIT / "_")
|
||||
path = ident *(DOUBLE-COLON ident) [!]
|
||||
slice = OPEN-SQUARE-BRACKET [ nonempty-arg-list ] CLOSE-SQUARE-BRACKET
|
||||
arg = [type-filter *WS COLON *WS] (path [generics] / slice / [!])
|
||||
slice-like = OPEN-SQUARE-BRACKET [ nonempty-arg-list ] CLOSE-SQUARE-BRACKET
|
||||
tuple-like = OPEN-PAREN [ nonempty-arg-list ] CLOSE-PAREN
|
||||
arg = [type-filter *WS COLON *WS] (path [generics] / slice-like / tuple-like / [!])
|
||||
type-sep = COMMA/WS *(COMMA/WS)
|
||||
nonempty-arg-list = *(type-sep) arg *(type-sep arg) *(type-sep)
|
||||
generic-arg-list = *(type-sep) arg [ EQUAL arg ] *(type-sep arg [ EQUAL arg ]) *(type-sep)
|
||||
@ -263,6 +286,8 @@ OPEN-ANGLE-BRACKET = "<"
|
||||
CLOSE-ANGLE-BRACKET = ">"
|
||||
OPEN-SQUARE-BRACKET = "["
|
||||
CLOSE-SQUARE-BRACKET = "]"
|
||||
OPEN-PAREN = "("
|
||||
CLOSE-PAREN = ")"
|
||||
COLON = ":"
|
||||
DOUBLE-COLON = "::"
|
||||
QUOTE = %x22
|
||||
|
@ -1,5 +0,0 @@
|
||||
# `core_panic`
|
||||
|
||||
This feature is internal to the Rust compiler and is not intended for general use.
|
||||
|
||||
------------------------
|
@ -143,6 +143,8 @@ complete -c x.py -n "__fish_seen_subcommand_from clippy" -l llvm-profile-use -d
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -l reproducible-artifact -d 'Additional reproducible artifacts that should be added to the reproducible artifacts archive' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -l set -d 'override options in config.toml' -r -f
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -l fix
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -l allow-dirty
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -l allow-staged
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -s v -l verbose -d 'use verbose output (-vv for very verbose)'
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -s i -l incremental -d 'use incremental compilation'
|
||||
complete -c x.py -n "__fish_seen_subcommand_from clippy" -l include-default-paths -d 'include default paths in addition to the provided ones'
|
||||
|
@ -188,6 +188,8 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock {
|
||||
[CompletionResult]::new('--reproducible-artifact', 'reproducible-artifact', [CompletionResultType]::ParameterName, 'Additional reproducible artifacts that should be added to the reproducible artifacts archive')
|
||||
[CompletionResult]::new('--set', 'set', [CompletionResultType]::ParameterName, 'override options in config.toml')
|
||||
[CompletionResult]::new('--fix', 'fix', [CompletionResultType]::ParameterName, 'fix')
|
||||
[CompletionResult]::new('--allow-dirty', 'allow-dirty', [CompletionResultType]::ParameterName, 'allow-dirty')
|
||||
[CompletionResult]::new('--allow-staged', 'allow-staged', [CompletionResultType]::ParameterName, 'allow-staged')
|
||||
[CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)')
|
||||
[CompletionResult]::new('--verbose', 'verbose', [CompletionResultType]::ParameterName, 'use verbose output (-vv for very verbose)')
|
||||
[CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'use incremental compilation')
|
||||
|
@ -615,7 +615,7 @@ _x.py() {
|
||||
return 0
|
||||
;;
|
||||
x.py__clippy)
|
||||
opts="-A -D -W -F -v -i -j -h --fix --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..."
|
||||
opts="-A -D -W -F -v -i -j -h --fix --allow-dirty --allow-staged --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..."
|
||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
|
@ -182,6 +182,8 @@ _arguments "${_arguments_options[@]}" \
|
||||
'*--reproducible-artifact=[Additional reproducible artifacts that should be added to the reproducible artifacts archive]:REPRODUCIBLE_ARTIFACT: ' \
|
||||
'*--set=[override options in config.toml]:section.option=value:( )' \
|
||||
'--fix[]' \
|
||||
'--allow-dirty[]' \
|
||||
'--allow-staged[]' \
|
||||
'*-v[use verbose output (-vv for very verbose)]' \
|
||||
'*--verbose[use verbose output (-vv for very verbose)]' \
|
||||
'-i[use incremental compilation]' \
|
||||
|
@ -566,6 +566,9 @@ fn get_index_type_id(
|
||||
// The type parameters are converted to generics in `simplify_fn_type`
|
||||
clean::Slice(_) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Slice)),
|
||||
clean::Array(_, _) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Array)),
|
||||
clean::Tuple(ref n) if n.is_empty() => {
|
||||
Some(RenderTypeId::Primitive(clean::PrimitiveType::Unit))
|
||||
}
|
||||
clean::Tuple(_) => Some(RenderTypeId::Primitive(clean::PrimitiveType::Tuple)),
|
||||
clean::QPath(ref data) => {
|
||||
if data.self_type.is_self_type()
|
||||
|
@ -260,6 +260,18 @@ function initSearch(rawSearchIndex) {
|
||||
* Special type name IDs for searching by both array and slice (`[]` syntax).
|
||||
*/
|
||||
let typeNameIdOfArrayOrSlice;
|
||||
/**
|
||||
* Special type name IDs for searching by tuple.
|
||||
*/
|
||||
let typeNameIdOfTuple;
|
||||
/**
|
||||
* Special type name IDs for searching by unit.
|
||||
*/
|
||||
let typeNameIdOfUnit;
|
||||
/**
|
||||
* Special type name IDs for searching by both tuple and unit (`()` syntax).
|
||||
*/
|
||||
let typeNameIdOfTupleOrUnit;
|
||||
|
||||
/**
|
||||
* Add an item to the type Name->ID map, or, if one already exists, use it.
|
||||
@ -295,11 +307,7 @@ function initSearch(rawSearchIndex) {
|
||||
}
|
||||
|
||||
function isEndCharacter(c) {
|
||||
return "=,>-]".indexOf(c) !== -1;
|
||||
}
|
||||
|
||||
function isErrorCharacter(c) {
|
||||
return "()".indexOf(c) !== -1;
|
||||
return "=,>-])".indexOf(c) !== -1;
|
||||
}
|
||||
|
||||
function itemTypeFromName(typename) {
|
||||
@ -585,8 +593,6 @@ function initSearch(rawSearchIndex) {
|
||||
throw ["Unexpected ", "!", ": it can only be at the end of an ident"];
|
||||
}
|
||||
foundExclamation = parserState.pos;
|
||||
} else if (isErrorCharacter(c)) {
|
||||
throw ["Unexpected ", c];
|
||||
} else if (isPathSeparator(c)) {
|
||||
if (c === ":") {
|
||||
if (!isPathStart(parserState)) {
|
||||
@ -616,11 +622,14 @@ function initSearch(rawSearchIndex) {
|
||||
}
|
||||
} else if (
|
||||
c === "[" ||
|
||||
c === "(" ||
|
||||
isEndCharacter(c) ||
|
||||
isSpecialStartCharacter(c) ||
|
||||
isSeparatorCharacter(c)
|
||||
) {
|
||||
break;
|
||||
} else if (parserState.pos > 0) {
|
||||
throw ["Unexpected ", c, " after ", parserState.userQuery[parserState.pos - 1]];
|
||||
} else {
|
||||
throw ["Unexpected ", c];
|
||||
}
|
||||
@ -661,15 +670,24 @@ function initSearch(rawSearchIndex) {
|
||||
skipWhitespace(parserState);
|
||||
let start = parserState.pos;
|
||||
let end;
|
||||
if (parserState.userQuery[parserState.pos] === "[") {
|
||||
if ("[(".indexOf(parserState.userQuery[parserState.pos]) !== -1) {
|
||||
let endChar = ")";
|
||||
let name = "()";
|
||||
let friendlyName = "tuple";
|
||||
|
||||
if (parserState.userQuery[parserState.pos] === "[") {
|
||||
endChar = "]";
|
||||
name = "[]";
|
||||
friendlyName = "slice";
|
||||
}
|
||||
parserState.pos += 1;
|
||||
getItemsBefore(query, parserState, generics, "]");
|
||||
const { foundSeparator } = getItemsBefore(query, parserState, generics, endChar);
|
||||
const typeFilter = parserState.typeFilter;
|
||||
const isInBinding = parserState.isInBinding;
|
||||
if (typeFilter !== null && typeFilter !== "primitive") {
|
||||
throw [
|
||||
"Invalid search type: primitive ",
|
||||
"[]",
|
||||
name,
|
||||
" and ",
|
||||
typeFilter,
|
||||
" both specified",
|
||||
@ -677,27 +695,31 @@ function initSearch(rawSearchIndex) {
|
||||
}
|
||||
parserState.typeFilter = null;
|
||||
parserState.isInBinding = null;
|
||||
parserState.totalElems += 1;
|
||||
if (isInGenerics) {
|
||||
parserState.genericsElems += 1;
|
||||
}
|
||||
for (const gen of generics) {
|
||||
if (gen.bindingName !== null) {
|
||||
throw ["Type parameter ", "=", " cannot be within slice ", "[]"];
|
||||
throw ["Type parameter ", "=", ` cannot be within ${friendlyName} `, name];
|
||||
}
|
||||
}
|
||||
elems.push({
|
||||
name: "[]",
|
||||
id: null,
|
||||
fullPath: ["[]"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "[]",
|
||||
normalizedPathLast: "[]",
|
||||
generics,
|
||||
typeFilter: "primitive",
|
||||
bindingName: isInBinding,
|
||||
bindings: new Map(),
|
||||
});
|
||||
if (name === "()" && !foundSeparator && generics.length === 1 && typeFilter === null) {
|
||||
elems.push(generics[0]);
|
||||
} else {
|
||||
parserState.totalElems += 1;
|
||||
if (isInGenerics) {
|
||||
parserState.genericsElems += 1;
|
||||
}
|
||||
elems.push({
|
||||
name: name,
|
||||
id: null,
|
||||
fullPath: [name],
|
||||
pathWithoutLast: [],
|
||||
pathLast: name,
|
||||
normalizedPathLast: name,
|
||||
generics,
|
||||
bindings: new Map(),
|
||||
typeFilter: "primitive",
|
||||
bindingName: isInBinding,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const isStringElem = parserState.userQuery[start] === "\"";
|
||||
// We handle the strings on their own mostly to make code easier to follow.
|
||||
@ -770,9 +792,11 @@ function initSearch(rawSearchIndex) {
|
||||
* @param {Array<QueryElement>} elems - This is where the new {QueryElement} will be added.
|
||||
* @param {string} endChar - This function will stop when it'll encounter this
|
||||
* character.
|
||||
* @returns {{foundSeparator: bool}}
|
||||
*/
|
||||
function getItemsBefore(query, parserState, elems, endChar) {
|
||||
let foundStopChar = true;
|
||||
let foundSeparator = false;
|
||||
let start = parserState.pos;
|
||||
|
||||
// If this is a generic, keep the outer item's type filter around.
|
||||
@ -786,6 +810,8 @@ function initSearch(rawSearchIndex) {
|
||||
extra = "<";
|
||||
} else if (endChar === "]") {
|
||||
extra = "[";
|
||||
} else if (endChar === ")") {
|
||||
extra = "(";
|
||||
} else if (endChar === "") {
|
||||
extra = "->";
|
||||
} else {
|
||||
@ -802,6 +828,7 @@ function initSearch(rawSearchIndex) {
|
||||
} else if (isSeparatorCharacter(c)) {
|
||||
parserState.pos += 1;
|
||||
foundStopChar = true;
|
||||
foundSeparator = true;
|
||||
continue;
|
||||
} else if (c === ":" && isPathStart(parserState)) {
|
||||
throw ["Unexpected ", "::", ": paths cannot start with ", "::"];
|
||||
@ -879,6 +906,8 @@ function initSearch(rawSearchIndex) {
|
||||
|
||||
parserState.typeFilter = oldTypeFilter;
|
||||
parserState.isInBinding = oldIsInBinding;
|
||||
|
||||
return { foundSeparator };
|
||||
}
|
||||
|
||||
/**
|
||||
@ -926,6 +955,8 @@ function initSearch(rawSearchIndex) {
|
||||
break;
|
||||
}
|
||||
throw ["Unexpected ", c, " (did you mean ", "->", "?)"];
|
||||
} else if (parserState.pos > 0) {
|
||||
throw ["Unexpected ", c, " after ", parserState.userQuery[parserState.pos - 1]];
|
||||
}
|
||||
throw ["Unexpected ", c];
|
||||
} else if (c === ":" && !isPathStart(parserState)) {
|
||||
@ -1599,6 +1630,11 @@ function initSearch(rawSearchIndex) {
|
||||
) {
|
||||
// [] matches primitive:array or primitive:slice
|
||||
// if it matches, then we're fine, and this is an appropriate match candidate
|
||||
} else if (queryElem.id === typeNameIdOfTupleOrUnit &&
|
||||
(fnType.id === typeNameIdOfTuple || fnType.id === typeNameIdOfUnit)
|
||||
) {
|
||||
// () matches primitive:tuple or primitive:unit
|
||||
// if it matches, then we're fine, and this is an appropriate match candidate
|
||||
} else if (fnType.id !== queryElem.id || queryElem.id === null) {
|
||||
return false;
|
||||
}
|
||||
@ -1792,7 +1828,7 @@ function initSearch(rawSearchIndex) {
|
||||
if (row.id > 0 && elem.id > 0 && elem.pathWithoutLast.length === 0 &&
|
||||
typePassesFilter(elem.typeFilter, row.ty) && elem.generics.length === 0 &&
|
||||
// special case
|
||||
elem.id !== typeNameIdOfArrayOrSlice
|
||||
elem.id !== typeNameIdOfArrayOrSlice && elem.id !== typeNameIdOfTupleOrUnit
|
||||
) {
|
||||
return row.id === elem.id || checkIfInList(
|
||||
row.generics,
|
||||
@ -2886,12 +2922,15 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
*/
|
||||
function buildFunctionTypeFingerprint(type, output, fps) {
|
||||
let input = type.id;
|
||||
// All forms of `[]` get collapsed down to one thing in the bloom filter.
|
||||
// All forms of `[]`/`()` get collapsed down to one thing in the bloom filter.
|
||||
// Differentiating between arrays and slices, if the user asks for it, is
|
||||
// still done in the matching algorithm.
|
||||
if (input === typeNameIdOfArray || input === typeNameIdOfSlice) {
|
||||
input = typeNameIdOfArrayOrSlice;
|
||||
}
|
||||
if (input === typeNameIdOfTuple || input === typeNameIdOfUnit) {
|
||||
input = typeNameIdOfTupleOrUnit;
|
||||
}
|
||||
// http://burtleburtle.net/bob/hash/integer.html
|
||||
// ~~ is toInt32. It's used before adding, so
|
||||
// the number stays in safe integer range.
|
||||
@ -2991,7 +3030,10 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
// that can be searched using `[]` syntax.
|
||||
typeNameIdOfArray = buildTypeMapIndex("array");
|
||||
typeNameIdOfSlice = buildTypeMapIndex("slice");
|
||||
typeNameIdOfTuple = buildTypeMapIndex("tuple");
|
||||
typeNameIdOfUnit = buildTypeMapIndex("unit");
|
||||
typeNameIdOfArrayOrSlice = buildTypeMapIndex("[]");
|
||||
typeNameIdOfTupleOrUnit = buildTypeMapIndex("()");
|
||||
|
||||
// Function type fingerprints are 128-bit bloom filters that are used to
|
||||
// estimate the distance between function and query.
|
||||
|
@ -8,7 +8,7 @@ use rustc_errors::{
|
||||
use rustc_parse::parse_stream_from_source_str;
|
||||
use rustc_resolve::rustdoc::source_span_for_markdown_range;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId};
|
||||
use rustc_span::hygiene::{AstPass, ExpnData, ExpnKind, LocalExpnId, Transparency};
|
||||
use rustc_span::source_map::{FilePathMapping, SourceMap};
|
||||
use rustc_span::{FileName, InnerSpan, DUMMY_SP};
|
||||
|
||||
@ -50,7 +50,7 @@ fn check_rust_syntax(
|
||||
let expn_data =
|
||||
ExpnData::default(ExpnKind::AstPass(AstPass::TestHarness), DUMMY_SP, edition, None, None);
|
||||
let expn_id = cx.tcx.with_stable_hashing_context(|hcx| LocalExpnId::fresh(expn_data, hcx));
|
||||
let span = DUMMY_SP.fresh_expansion(expn_id);
|
||||
let span = DUMMY_SP.apply_mark(expn_id.to_expn_id(), Transparency::Transparent);
|
||||
|
||||
let is_empty = rustc_driver::catch_fatal_errors(|| {
|
||||
parse_stream_from_source_str(
|
||||
|
@ -8,20 +8,20 @@
|
||||
let mut _3: u8;
|
||||
|
||||
bb0: {
|
||||
- StorageLive(_2);
|
||||
+ nop;
|
||||
StorageLive(_3);
|
||||
_3 = _1;
|
||||
StorageLive(_2);
|
||||
- StorageLive(_3);
|
||||
- _3 = _1;
|
||||
- _2 = dummy(move _3) -> [return: bb1, unwind unreachable];
|
||||
+ _1 = dummy(move _3) -> [return: bb1, unwind unreachable];
|
||||
+ nop;
|
||||
+ nop;
|
||||
+ _2 = dummy(move _1) -> [return: bb1, unwind unreachable];
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageDead(_3);
|
||||
- _1 = move _2;
|
||||
- StorageDead(_2);
|
||||
+ nop;
|
||||
- StorageDead(_3);
|
||||
+ nop;
|
||||
_1 = move _2;
|
||||
StorageDead(_2);
|
||||
_0 = const ();
|
||||
return;
|
||||
}
|
||||
|
@ -8,20 +8,20 @@
|
||||
let mut _3: u8;
|
||||
|
||||
bb0: {
|
||||
- StorageLive(_2);
|
||||
+ nop;
|
||||
StorageLive(_3);
|
||||
_3 = _1;
|
||||
StorageLive(_2);
|
||||
- StorageLive(_3);
|
||||
- _3 = _1;
|
||||
- _2 = dummy(move _3) -> [return: bb1, unwind continue];
|
||||
+ _1 = dummy(move _3) -> [return: bb1, unwind continue];
|
||||
+ nop;
|
||||
+ nop;
|
||||
+ _2 = dummy(move _1) -> [return: bb1, unwind continue];
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageDead(_3);
|
||||
- _1 = move _2;
|
||||
- StorageDead(_2);
|
||||
+ nop;
|
||||
- StorageDead(_3);
|
||||
+ nop;
|
||||
_1 = move _2;
|
||||
StorageDead(_2);
|
||||
_0 = const ();
|
||||
return;
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ const PARSED = [
|
||||
original: "-> *",
|
||||
returned: [],
|
||||
userQuery: "-> *",
|
||||
error: "Unexpected `*`",
|
||||
error: "Unexpected `*` after ` `",
|
||||
},
|
||||
{
|
||||
query: 'a<"P">',
|
||||
@ -107,15 +107,6 @@ const PARSED = [
|
||||
userQuery: "a<::a>",
|
||||
error: "Unexpected `::`: paths cannot start with `::`",
|
||||
},
|
||||
{
|
||||
query: "((a))",
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "((a))",
|
||||
returned: [],
|
||||
userQuery: "((a))",
|
||||
error: "Unexpected `(`",
|
||||
},
|
||||
{
|
||||
query: "(p -> p",
|
||||
elems: [],
|
||||
@ -123,7 +114,7 @@ const PARSED = [
|
||||
original: "(p -> p",
|
||||
returned: [],
|
||||
userQuery: "(p -> p",
|
||||
error: "Unexpected `(`",
|
||||
error: "Unexpected `-` after `(`",
|
||||
},
|
||||
{
|
||||
query: "::a::b",
|
||||
@ -204,7 +195,7 @@ const PARSED = [
|
||||
original: "a (b:",
|
||||
returned: [],
|
||||
userQuery: "a (b:",
|
||||
error: "Unexpected `(`",
|
||||
error: "Expected `,`, `:` or `->`, found `(`",
|
||||
},
|
||||
{
|
||||
query: "_:",
|
||||
@ -249,7 +240,7 @@ const PARSED = [
|
||||
original: "ab'",
|
||||
returned: [],
|
||||
userQuery: "ab'",
|
||||
error: "Unexpected `'`",
|
||||
error: "Unexpected `'` after `b`",
|
||||
},
|
||||
{
|
||||
query: "a->",
|
||||
|
@ -266,6 +266,24 @@ const PARSED = [
|
||||
userQuery: "]",
|
||||
error: "Unexpected `]`",
|
||||
},
|
||||
{
|
||||
query: '[a<b>',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "[a<b>",
|
||||
returned: [],
|
||||
userQuery: "[a<b>",
|
||||
error: "Unclosed `[`",
|
||||
},
|
||||
{
|
||||
query: 'a<b>]',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "a<b>]",
|
||||
returned: [],
|
||||
userQuery: "a<b>]",
|
||||
error: "Unexpected `]` after `>`",
|
||||
},
|
||||
{
|
||||
query: 'primitive:[u8]',
|
||||
elems: [
|
||||
|
365
tests/rustdoc-js-std/parser-tuple.js
Normal file
365
tests/rustdoc-js-std/parser-tuple.js
Normal file
@ -0,0 +1,365 @@
|
||||
const PARSED = [
|
||||
{
|
||||
query: '(((D, ()))',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: '(((D, ()))',
|
||||
returned: [],
|
||||
userQuery: '(((d, ()))',
|
||||
error: 'Unclosed `(`',
|
||||
},
|
||||
{
|
||||
query: '(((D, ())))',
|
||||
elems: [
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [
|
||||
{
|
||||
name: "d",
|
||||
fullPath: ["d"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "d",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
},
|
||||
],
|
||||
typeFilter: 1,
|
||||
}
|
||||
],
|
||||
foundElems: 1,
|
||||
original: '(((D, ())))',
|
||||
returned: [],
|
||||
userQuery: '(((d, ())))',
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '(),u8',
|
||||
elems: [
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
},
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
foundElems: 2,
|
||||
original: "(),u8",
|
||||
returned: [],
|
||||
userQuery: "(),u8",
|
||||
error: null,
|
||||
},
|
||||
// Parens act as grouping operators when:
|
||||
// - there's no commas directly nested within
|
||||
// - there's at least two child types (zero means unit)
|
||||
// - it's not tagged with a type filter
|
||||
// Otherwise, they represent unit and/or tuple. To search for
|
||||
// unit or tuple specifically, use `primitive:unit` or `primitive:tuple<...>`.
|
||||
{
|
||||
query: '(u8)',
|
||||
elems: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "(u8)",
|
||||
returned: [],
|
||||
userQuery: "(u8)",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '(u8,)',
|
||||
elems: [
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
typeFilter: 1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "(u8,)",
|
||||
returned: [],
|
||||
userQuery: "(u8,)",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '(,u8)',
|
||||
elems: [
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
typeFilter: 1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "(,u8)",
|
||||
returned: [],
|
||||
userQuery: "(,u8)",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: 'primitive:(u8)',
|
||||
elems: [
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
typeFilter: 1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "primitive:(u8)",
|
||||
returned: [],
|
||||
userQuery: "primitive:(u8)",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '(primitive:u8)',
|
||||
elems: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "(primitive:u8)",
|
||||
returned: [],
|
||||
userQuery: "(primitive:u8)",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '(u8,u8)',
|
||||
elems: [
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
typeFilter: 1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "(u8,u8)",
|
||||
returned: [],
|
||||
userQuery: "(u8,u8)",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '(u8<u8>)',
|
||||
elems: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [
|
||||
{
|
||||
name: "u8",
|
||||
fullPath: ["u8"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "u8",
|
||||
generics: [],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
typeFilter: -1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "(u8<u8>)",
|
||||
returned: [],
|
||||
userQuery: "(u8<u8>)",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '()',
|
||||
elems: [
|
||||
{
|
||||
name: "()",
|
||||
fullPath: ["()"],
|
||||
pathWithoutLast: [],
|
||||
pathLast: "()",
|
||||
generics: [],
|
||||
typeFilter: 1,
|
||||
},
|
||||
],
|
||||
foundElems: 1,
|
||||
original: "()",
|
||||
returned: [],
|
||||
userQuery: "()",
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
query: '(>',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "(>",
|
||||
returned: [],
|
||||
userQuery: "(>",
|
||||
error: "Unexpected `>` after `(`",
|
||||
},
|
||||
{
|
||||
query: '(<',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "(<",
|
||||
returned: [],
|
||||
userQuery: "(<",
|
||||
error: "Found generics without a path",
|
||||
},
|
||||
{
|
||||
query: '(a>',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "(a>",
|
||||
returned: [],
|
||||
userQuery: "(a>",
|
||||
error: "Unexpected `>` after `(`",
|
||||
},
|
||||
{
|
||||
query: '(a<',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "(a<",
|
||||
returned: [],
|
||||
userQuery: "(a<",
|
||||
error: "Unclosed `<`",
|
||||
},
|
||||
{
|
||||
query: '(a',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "(a",
|
||||
returned: [],
|
||||
userQuery: "(a",
|
||||
error: "Unclosed `(`",
|
||||
},
|
||||
{
|
||||
query: '(',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "(",
|
||||
returned: [],
|
||||
userQuery: "(",
|
||||
error: "Unclosed `(`",
|
||||
},
|
||||
{
|
||||
query: ')',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: ")",
|
||||
returned: [],
|
||||
userQuery: ")",
|
||||
error: "Unexpected `)`",
|
||||
},
|
||||
{
|
||||
query: '(a<b>',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "(a<b>",
|
||||
returned: [],
|
||||
userQuery: "(a<b>",
|
||||
error: "Unclosed `(`",
|
||||
},
|
||||
{
|
||||
query: 'a<b>)',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "a<b>)",
|
||||
returned: [],
|
||||
userQuery: "a<b>)",
|
||||
error: "Unexpected `)` after `>`",
|
||||
},
|
||||
{
|
||||
query: 'macro:(u8)',
|
||||
elems: [],
|
||||
foundElems: 0,
|
||||
original: "macro:(u8)",
|
||||
returned: [],
|
||||
userQuery: "macro:(u8)",
|
||||
error: "Invalid search type: primitive `()` and `macro` both specified",
|
||||
},
|
||||
];
|
@ -44,7 +44,7 @@ const PARSED = [
|
||||
original: "a,b(c)",
|
||||
returned: [],
|
||||
userQuery: "a,b(c)",
|
||||
error: "Unexpected `(`",
|
||||
error: "Expected `,`, `:` or `->`, found `(`",
|
||||
},
|
||||
{
|
||||
query: 'aaa,a',
|
||||
|
80
tests/rustdoc-js/tuple-unit.js
Normal file
80
tests/rustdoc-js/tuple-unit.js
Normal file
@ -0,0 +1,80 @@
|
||||
// exact-check
|
||||
|
||||
const EXPECTED = [
|
||||
{
|
||||
'query': '()',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'side_effect' },
|
||||
{ 'path': 'tuple_unit', 'name': 'one' },
|
||||
{ 'path': 'tuple_unit', 'name': 'two' },
|
||||
{ 'path': 'tuple_unit', 'name': 'nest' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': 'primitive:unit',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'side_effect' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': 'primitive:tuple',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'one' },
|
||||
{ 'path': 'tuple_unit', 'name': 'two' },
|
||||
{ 'path': 'tuple_unit', 'name': 'nest' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': '(P)',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'not_tuple' },
|
||||
{ 'path': 'tuple_unit', 'name': 'one' },
|
||||
{ 'path': 'tuple_unit', 'name': 'two' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': '(P,)',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'one' },
|
||||
{ 'path': 'tuple_unit', 'name': 'two' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': '(P, P)',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'two' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': '(P, ())',
|
||||
'returned': [],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': '(Q, ())',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'nest' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': '(R)',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'nest' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
{
|
||||
'query': '(u32)',
|
||||
'returned': [
|
||||
{ 'path': 'tuple_unit', 'name': 'nest' },
|
||||
],
|
||||
'in_args': [],
|
||||
},
|
||||
];
|
18
tests/rustdoc-js/tuple-unit.rs
Normal file
18
tests/rustdoc-js/tuple-unit.rs
Normal file
@ -0,0 +1,18 @@
|
||||
pub struct P;
|
||||
pub struct Q;
|
||||
pub struct R<T>(T);
|
||||
|
||||
// Checks that tuple and unit both work
|
||||
pub fn side_effect() { }
|
||||
|
||||
// Check a non-tuple
|
||||
pub fn not_tuple() -> P { loop {} }
|
||||
|
||||
// Check a 1-tuple
|
||||
pub fn one() -> (P,) { loop {} }
|
||||
|
||||
// Check a 2-tuple
|
||||
pub fn two() -> (P,P) { loop {} }
|
||||
|
||||
// Check a nested tuple
|
||||
pub fn nest() -> (Q, R<(u32,)>) { loop {} }
|
@ -1,5 +1,3 @@
|
||||
// ignore-stage1
|
||||
|
||||
// Reject mixing cyclic structure and Drop when using TypedArena.
|
||||
//
|
||||
// (Compare against dropck-vec-cycle-checked.rs)
|
||||
|
@ -1,5 +1,5 @@
|
||||
error[E0597]: `arena` does not live long enough
|
||||
--> $DIR/dropck-tarena-cycle-checked.rs:118:7
|
||||
--> $DIR/dropck-tarena-cycle-checked.rs:116:7
|
||||
|
|
||||
LL | let arena = TypedArena::default();
|
||||
| ----- binding `arena` declared here
|
||||
|
@ -1,5 +1,3 @@
|
||||
// ignore-stage1
|
||||
|
||||
// Check that an arena (TypedArena) cannot carry elements whose drop
|
||||
// methods might access borrowed data of lifetime that does not
|
||||
// strictly outlive the arena itself.
|
||||
|
@ -1,5 +1,5 @@
|
||||
error[E0597]: `arena` does not live long enough
|
||||
--> $DIR/dropck-tarena-unsound-drop.rs:43:7
|
||||
--> $DIR/dropck-tarena-unsound-drop.rs:41:7
|
||||
|
|
||||
LL | let arena: TypedArena<C> = TypedArena::default();
|
||||
| ----- binding `arena` declared here
|
||||
|
@ -1,6 +1,5 @@
|
||||
// Test the `rustc::span_use_eq_ctxt` internal lint
|
||||
// compile-flags: -Z unstable-options
|
||||
// ignore-stage1
|
||||
|
||||
#![feature(rustc_private)]
|
||||
#![deny(rustc::span_use_eq_ctxt)]
|
||||
|
@ -1,11 +1,11 @@
|
||||
error: use `.eq_ctxt()` instead of `.ctxt() == .ctxt()`
|
||||
--> $DIR/span_use_eq_ctxt.rs:13:5
|
||||
--> $DIR/span_use_eq_ctxt.rs:12:5
|
||||
|
|
||||
LL | s.ctxt() == t.ctxt()
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/span_use_eq_ctxt.rs:6:9
|
||||
--> $DIR/span_use_eq_ctxt.rs:5:9
|
||||
|
|
||||
LL | #![deny(rustc::span_use_eq_ctxt)]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@ -1,6 +1,5 @@
|
||||
// edition:2018
|
||||
// compile-flags:--extern rustc_middle
|
||||
// ignore-stage1
|
||||
|
||||
// Test that `--extern rustc_middle` fails with `rustc_private`.
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
error[E0658]: use of unstable library feature 'rustc_private': this crate is being loaded from the sysroot, an unstable location; did you mean to load this crate from crates.io via `Cargo.toml` instead?
|
||||
--> $DIR/pathless-extern-unstable.rs:7:9
|
||||
--> $DIR/pathless-extern-unstable.rs:6:9
|
||||
|
|
||||
LL | pub use rustc_middle;
|
||||
| ^^^^^^^^^^^^
|
||||
|
@ -1,5 +1,4 @@
|
||||
// rustc-env:CARGO_CRATE_NAME=rustc_dummy
|
||||
// ignore-stage1
|
||||
|
||||
#![feature(rustc_private)]
|
||||
#![crate_type = "lib"]
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: diagnostic slug and crate name do not match
|
||||
--> $DIR/enforce_slug_naming.rs:23:8
|
||||
--> $DIR/enforce_slug_naming.rs:22:8
|
||||
|
|
||||
LL | #[diag(compiletest_example, code = "E0123")]
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
|
@ -1,4 +1,3 @@
|
||||
// check-pass
|
||||
// edition:2018
|
||||
// aux-build:anon-params-edition-hygiene.rs
|
||||
|
||||
@ -8,6 +7,8 @@
|
||||
#[macro_use]
|
||||
extern crate anon_params_edition_hygiene;
|
||||
|
||||
generate_trait_2015!(u8);
|
||||
generate_trait_2015_ident!(u8);
|
||||
// FIXME: Edition hygiene doesn't work correctly with `tt`s in this case.
|
||||
generate_trait_2015_tt!(u8); //~ ERROR expected one of `:`, `@`, or `|`, found `)`
|
||||
|
||||
fn main() {}
|
||||
|
23
tests/ui/anon-params/anon-params-edition-hygiene.stderr
Normal file
23
tests/ui/anon-params/anon-params-edition-hygiene.stderr
Normal file
@ -0,0 +1,23 @@
|
||||
error: expected one of `:`, `@`, or `|`, found `)`
|
||||
--> $DIR/anon-params-edition-hygiene.rs:12:1
|
||||
|
|
||||
LL | generate_trait_2015_tt!(u8);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected one of `:`, `@`, or `|`
|
||||
|
|
||||
= note: anonymous parameters are removed in the 2018 edition (see RFC 1685)
|
||||
= note: this error originates in the macro `generate_trait_2015_tt` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
help: if this is a `self` type, give it a parameter name
|
||||
|
|
||||
LL | generate_trait_2015_tt!(self: u8);
|
||||
| +++++
|
||||
help: if this is a parameter name, give it a type
|
||||
|
|
||||
LL | generate_trait_2015_tt!(u8: TypeName);
|
||||
| ++++++++++
|
||||
help: if this is a type, explicitly ignore the parameter name
|
||||
|
|
||||
LL | generate_trait_2015_tt!(_: u8);
|
||||
| ++
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
@ -1,9 +1,18 @@
|
||||
// edition:2015
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! generate_trait_2015 {
|
||||
macro_rules! generate_trait_2015_ident {
|
||||
($Type: ident) => {
|
||||
trait Trait {
|
||||
trait Trait1 {
|
||||
fn method($Type) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! generate_trait_2015_tt {
|
||||
($Type: tt) => {
|
||||
trait Trait2 {
|
||||
fn method($Type) {}
|
||||
}
|
||||
};
|
||||
|
@ -26,3 +26,8 @@ macro_rules! consumes_async_raw {
|
||||
macro_rules! passes_ident {
|
||||
($i: ident) => ($i)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! passes_tt {
|
||||
($i: tt) => ($i)
|
||||
}
|
||||
|
@ -26,3 +26,8 @@ macro_rules! consumes_async_raw {
|
||||
macro_rules! passes_ident {
|
||||
($i: ident) => ($i)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! passes_tt {
|
||||
($i: tt) => ($i)
|
||||
}
|
||||
|
@ -19,6 +19,8 @@ pub fn check_async() {
|
||||
|
||||
if passes_ident!(async) == 1 {} // OK
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
if passes_tt!(async) == 1 {} // OK
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
module::async(); // OK
|
||||
module::r#async(); // OK
|
||||
}
|
||||
|
@ -20,6 +20,8 @@ pub fn check_async() {
|
||||
|
||||
if passes_ident!(async) == 1 {} // OK
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
if passes_tt!(async) == 1 {} // OK
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
one_async::async(); // OK
|
||||
one_async::r#async(); // OK
|
||||
two_async::async(); // OK
|
||||
|
@ -19,6 +19,8 @@ pub fn check_async() {
|
||||
|
||||
if passes_ident!(async) == 1 {} // OK
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
if passes_tt!(async) == 1 {} // OK
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
module::async(); // OK
|
||||
module::r#async(); // OK
|
||||
}
|
||||
|
@ -20,6 +20,8 @@ pub fn check_async() {
|
||||
|
||||
if passes_ident!(async) == 1 {} // OK
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
if passes_tt!(async) == 1 {} // OK
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
// one_async::async(); // ERROR, unresolved name
|
||||
// one_async::r#async(); // ERROR, unresolved name
|
||||
two_async::async(); // OK
|
||||
|
@ -21,8 +21,10 @@ pub fn check_async() {
|
||||
r#async = consumes_async_raw!(async); //~ ERROR no rules expected the token `async`
|
||||
r#async = consumes_async_raw!(r#async); // OK
|
||||
|
||||
if passes_ident!(async) == 1 {}
|
||||
if passes_ident!(async) == 1 {} // FIXME: Edition hygiene bug, async here is 2018 and reserved
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
if passes_tt!(async) == 1 {} //~ ERROR macro expansion ends with an incomplete expression
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
module::async(); //~ ERROR expected identifier, found keyword `async`
|
||||
module::r#async(); // OK
|
||||
|
||||
|
@ -10,7 +10,7 @@ LL | let mut r#async = 1;
|
||||
| ++
|
||||
|
||||
error: expected identifier, found keyword `async`
|
||||
--> $DIR/edition-keywords-2018-2015-parsing.rs:26:13
|
||||
--> $DIR/edition-keywords-2018-2015-parsing.rs:28:13
|
||||
|
|
||||
LL | module::async();
|
||||
| ^^^^^ expected identifier, found keyword
|
||||
@ -52,17 +52,23 @@ LL | ($i: ident) => ($i)
|
||||
|
|
||||
::: $DIR/edition-keywords-2018-2015-parsing.rs:24:8
|
||||
|
|
||||
LL | if passes_ident!(async) == 1 {}
|
||||
LL | if passes_ident!(async) == 1 {} // FIXME: Edition hygiene bug, async here is 2018 and reserved
|
||||
| -------------------- in this macro invocation
|
||||
|
||||
error: macro expansion ends with an incomplete expression: expected one of `move`, `|`, or `||`
|
||||
--> $DIR/edition-keywords-2018-2015-parsing.rs:26:24
|
||||
|
|
||||
LL | if passes_tt!(async) == 1 {}
|
||||
| ^ expected one of `move`, `|`, or `||`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/edition-keywords-2018-2015-parsing.rs:29:33
|
||||
--> $DIR/edition-keywords-2018-2015-parsing.rs:31:33
|
||||
|
|
||||
LL | let _recovery_witness: () = 0;
|
||||
| -- ^ expected `()`, found integer
|
||||
| |
|
||||
| expected due to this
|
||||
|
||||
error: aborting due to 6 previous errors
|
||||
error: aborting due to 7 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0308`.
|
||||
|
@ -18,6 +18,8 @@ pub fn check_async() {
|
||||
|
||||
// if passes_ident!(async) == 1 {} // ERROR, reserved
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
// if passes_tt!(async) == 1 {} // ERROR, reserved
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
// one_async::async(); // ERROR, reserved
|
||||
one_async::r#async(); // OK
|
||||
// two_async::async(); // ERROR, reserved
|
||||
|
@ -12,6 +12,13 @@ mod module {
|
||||
pub fn r#async() {}
|
||||
}
|
||||
|
||||
macro_rules! local_passes_ident {
|
||||
($i: ident) => ($i) //~ ERROR macro expansion ends with an incomplete expression
|
||||
}
|
||||
macro_rules! local_passes_tt {
|
||||
($i: tt) => ($i) //~ ERROR macro expansion ends with an incomplete expression
|
||||
}
|
||||
|
||||
pub fn check_async() {
|
||||
let mut async = 1; //~ ERROR expected identifier, found keyword `async`
|
||||
let mut r#async = 1; // OK
|
||||
@ -21,8 +28,14 @@ pub fn check_async() {
|
||||
r#async = consumes_async_raw!(async); //~ ERROR no rules expected the token `async`
|
||||
r#async = consumes_async_raw!(r#async); // OK
|
||||
|
||||
if passes_ident!(async) == 1 {}
|
||||
if passes_ident!(async) == 1 {} // FIXME: Edition hygiene bug, async here is 2018 and reserved
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
if passes_tt!(async) == 1 {} //~ ERROR macro expansion ends with an incomplete expression
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
if local_passes_ident!(async) == 1 {} // Error reported above in the macro
|
||||
if local_passes_ident!(r#async) == 1 {} // OK
|
||||
if local_passes_tt!(async) == 1 {} // Error reported above in the macro
|
||||
if local_passes_tt!(r#async) == 1 {} // OK
|
||||
module::async(); //~ ERROR expected identifier, found keyword `async`
|
||||
module::r#async(); // OK
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: expected identifier, found keyword `async`
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:16:13
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:23:13
|
||||
|
|
||||
LL | let mut async = 1;
|
||||
| ^^^^^ expected identifier, found keyword
|
||||
@ -10,7 +10,7 @@ LL | let mut r#async = 1;
|
||||
| ++
|
||||
|
||||
error: expected identifier, found keyword `async`
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:26:13
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:39:13
|
||||
|
|
||||
LL | module::async();
|
||||
| ^^^^^ expected identifier, found keyword
|
||||
@ -21,7 +21,7 @@ LL | module::r#async();
|
||||
| ++
|
||||
|
||||
error: no rules expected the token `r#async`
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:20:31
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:27:31
|
||||
|
|
||||
LL | r#async = consumes_async!(r#async);
|
||||
| ^^^^^^^ no rules expected this token in macro call
|
||||
@ -33,7 +33,7 @@ LL | (async) => (1)
|
||||
| ^^^^^
|
||||
|
||||
error: no rules expected the token `async`
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:21:35
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:28:35
|
||||
|
|
||||
LL | r#async = consumes_async_raw!(async);
|
||||
| ^^^^^ no rules expected this token in macro call
|
||||
@ -50,19 +50,37 @@ error: macro expansion ends with an incomplete expression: expected one of `move
|
||||
LL | ($i: ident) => ($i)
|
||||
| ^ expected one of `move`, `|`, or `||`
|
||||
|
|
||||
::: $DIR/edition-keywords-2018-2018-parsing.rs:24:8
|
||||
::: $DIR/edition-keywords-2018-2018-parsing.rs:31:8
|
||||
|
|
||||
LL | if passes_ident!(async) == 1 {}
|
||||
LL | if passes_ident!(async) == 1 {} // FIXME: Edition hygiene bug, async here is 2018 and reserved
|
||||
| -------------------- in this macro invocation
|
||||
|
||||
error: macro expansion ends with an incomplete expression: expected one of `move`, `|`, or `||`
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:33:24
|
||||
|
|
||||
LL | if passes_tt!(async) == 1 {}
|
||||
| ^ expected one of `move`, `|`, or `||`
|
||||
|
||||
error: macro expansion ends with an incomplete expression: expected one of `move`, `|`, or `||`
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:16:23
|
||||
|
|
||||
LL | ($i: ident) => ($i)
|
||||
| ^ expected one of `move`, `|`, or `||`
|
||||
|
||||
error: macro expansion ends with an incomplete expression: expected one of `move`, `|`, or `||`
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:19:20
|
||||
|
|
||||
LL | ($i: tt) => ($i)
|
||||
| ^ expected one of `move`, `|`, or `||`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:29:33
|
||||
--> $DIR/edition-keywords-2018-2018-parsing.rs:42:33
|
||||
|
|
||||
LL | let _recovery_witness: () = 0;
|
||||
| -- ^ expected `()`, found integer
|
||||
| |
|
||||
| expected due to this
|
||||
|
||||
error: aborting due to 6 previous errors
|
||||
error: aborting due to 9 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0308`.
|
||||
|
@ -18,6 +18,8 @@ pub fn check_async() {
|
||||
|
||||
// if passes_ident!(async) == 1 {} // ERROR, reserved
|
||||
if passes_ident!(r#async) == 1 {} // OK
|
||||
// if passes_tt!(async) == 1 {} // ERROR, reserved
|
||||
if passes_tt!(r#async) == 1 {} // OK
|
||||
// one_async::async(); // ERROR, reserved
|
||||
// one_async::r#async(); // ERROR, unresolved name
|
||||
// two_async::async(); // ERROR, reserved
|
||||
|
@ -22,7 +22,7 @@
|
||||
//[thin]compile-flags: -C lto=thin
|
||||
//[fat]compile-flags: -C lto=fat
|
||||
|
||||
#![feature(core_panic)]
|
||||
#![feature(panic_internals)]
|
||||
|
||||
// (For some reason, reproducing the LTO issue requires pulling in std
|
||||
// explicitly this way.)
|
||||
|
@ -107,6 +107,15 @@ fn main() {
|
||||
//~^ WARN ambiguous wide pointer comparison
|
||||
}
|
||||
|
||||
{
|
||||
macro_rules! cmp {
|
||||
($a:tt, $b:tt) => { $a == $b }
|
||||
//~^ WARN ambiguous wide pointer comparison
|
||||
}
|
||||
|
||||
cmp!(a, b);
|
||||
}
|
||||
|
||||
{
|
||||
macro_rules! cmp {
|
||||
($a:ident, $b:ident) => { $a == $b }
|
||||
|
@ -421,7 +421,18 @@ LL | std::ptr::eq(*a, *b)
|
||||
| ~~~~~~~~~~~~~ ~ +
|
||||
|
||||
warning: ambiguous wide pointer comparison, the comparison includes metadata which may not be expected
|
||||
--> $DIR/wide_pointer_comparisons.rs:112:39
|
||||
--> $DIR/wide_pointer_comparisons.rs:112:33
|
||||
|
|
||||
LL | ($a:tt, $b:tt) => { $a == $b }
|
||||
| ^^^^^^^^
|
||||
|
|
||||
help: use `std::ptr::addr_eq` or untyped pointers to only compare their addresses
|
||||
|
|
||||
LL | ($a:tt, $b:tt) => { std::ptr::addr_eq($a, $b) }
|
||||
| ++++++++++++++++++ ~ +
|
||||
|
||||
warning: ambiguous wide pointer comparison, the comparison includes metadata which may not be expected
|
||||
--> $DIR/wide_pointer_comparisons.rs:121:39
|
||||
|
|
||||
LL | ($a:ident, $b:ident) => { $a == $b }
|
||||
| ^^^^^^^^
|
||||
@ -436,7 +447,7 @@ LL | ($a:ident, $b:ident) => { std::ptr::addr_eq($a, $b) }
|
||||
| ++++++++++++++++++ ~ +
|
||||
|
||||
warning: ambiguous wide pointer comparison, the comparison includes metadata which may not be expected
|
||||
--> $DIR/wide_pointer_comparisons.rs:122:37
|
||||
--> $DIR/wide_pointer_comparisons.rs:131:37
|
||||
|
|
||||
LL | ($a:expr, $b:expr) => { $a == $b }
|
||||
| ^^
|
||||
@ -448,5 +459,5 @@ LL | cmp!(&a, &b);
|
||||
= help: use `std::ptr::addr_eq` or untyped pointers to only compare their addresses
|
||||
= note: this warning originates in the macro `cmp` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
warning: 37 warnings emitted
|
||||
warning: 38 warnings emitted
|
||||
|
||||
|
@ -5,6 +5,9 @@
|
||||
macro_rules! local_mac {
|
||||
($ident:ident) => { let $ident = 42; }
|
||||
}
|
||||
macro_rules! local_mac_tt {
|
||||
($tt:tt) => { let $tt = 42; }
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let x = 2.0.neg();
|
||||
@ -23,6 +26,10 @@ fn main() {
|
||||
local_mac!(local_bar);
|
||||
local_bar.pow(2);
|
||||
//~^ ERROR can't call method `pow` on ambiguous numeric type `{integer}`
|
||||
|
||||
local_mac_tt!(local_bar_tt);
|
||||
local_bar_tt.pow(2);
|
||||
//~^ ERROR can't call method `pow` on ambiguous numeric type `{integer}`
|
||||
}
|
||||
|
||||
fn qux() {
|
||||
|
@ -1,5 +1,5 @@
|
||||
error[E0689]: can't call method `neg` on ambiguous numeric type `{float}`
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:10:17
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:13:17
|
||||
|
|
||||
LL | let x = 2.0.neg();
|
||||
| ^^^
|
||||
@ -10,7 +10,7 @@ LL | let x = 2.0_f32.neg();
|
||||
| ~~~~~~~
|
||||
|
||||
error[E0689]: can't call method `neg` on ambiguous numeric type `{float}`
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:14:15
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:17:15
|
||||
|
|
||||
LL | let x = y.neg();
|
||||
| ^^^
|
||||
@ -21,7 +21,7 @@ LL | let y: f32 = 2.0;
|
||||
| +++++
|
||||
|
||||
error[E0689]: can't call method `pow` on ambiguous numeric type `{integer}`
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:19:26
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:22:26
|
||||
|
|
||||
LL | for i in 0..100 {
|
||||
| - you must specify a type for this binding, like `i32`
|
||||
@ -29,7 +29,7 @@ LL | println!("{}", i.pow(2));
|
||||
| ^^^
|
||||
|
||||
error[E0689]: can't call method `pow` on ambiguous numeric type `{integer}`
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:24:15
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:27:15
|
||||
|
|
||||
LL | local_bar.pow(2);
|
||||
| ^^^
|
||||
@ -40,7 +40,18 @@ LL | ($ident:ident) => { let $ident: i32 = 42; }
|
||||
| +++++
|
||||
|
||||
error[E0689]: can't call method `pow` on ambiguous numeric type `{integer}`
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:30:9
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:31:18
|
||||
|
|
||||
LL | local_bar_tt.pow(2);
|
||||
| ^^^
|
||||
|
|
||||
help: you must specify a type for this binding, like `i32`
|
||||
|
|
||||
LL | ($tt:tt) => { let $tt: i32 = 42; }
|
||||
| +++++
|
||||
|
||||
error[E0689]: can't call method `pow` on ambiguous numeric type `{integer}`
|
||||
--> $DIR/method-on-ambiguous-numeric-type.rs:37:9
|
||||
|
|
||||
LL | bar.pow(2);
|
||||
| ^^^
|
||||
@ -51,6 +62,6 @@ help: you must specify a type for this binding, like `i32`
|
||||
LL | ($ident:ident) => { let $ident: i32 = 42; }
|
||||
| +++++
|
||||
|
||||
error: aborting due to 5 previous errors
|
||||
error: aborting due to 6 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0689`.
|
||||
|
Loading…
Reference in New Issue
Block a user