mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-26 08:44:35 +00:00
Auto merge of #119864 - matthiaskrgr:rollup-mc2qz13, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #119448 (annotate-snippets: update to 0.10) - #119813 (Silence some follow-up errors [2/x]) - #119836 (chore: remove unnecessary blank line) - #119841 (Remove `DiagnosticBuilder::buffer`) - #119842 (coverage: Add enums to accommodate other kinds of coverage mappings) - #119845 (rint: further doc tweaks) - #119852 (give const-err4 a more descriptive name) - #119853 (rustfmt.toml: don't ignore just any tests path, only root one) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
62d7ed4a67
22
Cargo.lock
22
Cargo.lock
@ -119,6 +119,16 @@ dependencies = [
|
||||
"yansi-term",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotate-snippets"
|
||||
version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a433302f833baa830c0092100c481c7ea768c5981a3c36f549517a502f246dd"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ansi_term"
|
||||
version = "0.12.1"
|
||||
@ -3771,7 +3781,7 @@ dependencies = [
|
||||
name = "rustc_errors"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.10.1",
|
||||
"derive_setters",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
@ -3831,7 +3841,7 @@ dependencies = [
|
||||
name = "rustc_fluent_macro"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.10.1",
|
||||
"fluent-bundle",
|
||||
"fluent-syntax",
|
||||
"proc-macro2",
|
||||
@ -4738,7 +4748,7 @@ dependencies = [
|
||||
name = "rustfmt-nightly"
|
||||
version = "1.7.0"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
"bytecount",
|
||||
"cargo_metadata 0.15.4",
|
||||
@ -5728,7 +5738,7 @@ version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aaf4bf7c184b8dfc7a4d3b90df789b1eb992ee42811cd115f32a7a1eb781058d"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"annotate-snippets 0.9.1",
|
||||
"anyhow",
|
||||
"bstr",
|
||||
"cargo-platform",
|
||||
@ -5859,9 +5869,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-width"
|
||||
version = "0.1.10"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
||||
checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"rustc-std-workspace-core",
|
||||
|
@ -2399,10 +2399,10 @@ mod error {
|
||||
/// and we want only the best of those errors.
|
||||
///
|
||||
/// The `report_use_of_moved_or_uninitialized` function checks this map and replaces the
|
||||
/// diagnostic (if there is one) if the `Place` of the error being reported is a prefix of the
|
||||
/// `Place` of the previous most diagnostic. This happens instead of buffering the error. Once
|
||||
/// all move errors have been reported, any diagnostics in this map are added to the buffer
|
||||
/// to be emitted.
|
||||
/// diagnostic (if there is one) if the `Place` of the error being reported is a prefix of
|
||||
/// the `Place` of the previous most diagnostic. This happens instead of buffering the
|
||||
/// error. Once all move errors have been reported, any diagnostics in this map are added
|
||||
/// to the buffer to be emitted.
|
||||
///
|
||||
/// `BTreeMap` is used to preserve the order of insertions when iterating. This is necessary
|
||||
/// when errors in the map are being re-added to the error buffer so that errors with the
|
||||
@ -2410,7 +2410,8 @@ mod error {
|
||||
buffered_move_errors:
|
||||
BTreeMap<Vec<MoveOutIndex>, (PlaceRef<'tcx>, DiagnosticBuilder<'tcx>)>,
|
||||
buffered_mut_errors: FxIndexMap<Span, (DiagnosticBuilder<'tcx>, usize)>,
|
||||
/// Diagnostics to be reported buffer.
|
||||
/// Buffer of diagnostics to be reported. Uses `Diagnostic` rather than `DiagnosticBuilder`
|
||||
/// because it has a mixture of error diagnostics and non-error diagnostics.
|
||||
buffered: Vec<Diagnostic>,
|
||||
/// Set to Some if we emit an error during borrowck
|
||||
tainted_by_errors: Option<ErrorGuaranteed>,
|
||||
@ -2434,11 +2435,11 @@ mod error {
|
||||
"diagnostic buffered but not emitted",
|
||||
))
|
||||
}
|
||||
t.buffer(&mut self.buffered);
|
||||
self.buffered.push(t.into_diagnostic());
|
||||
}
|
||||
|
||||
pub fn buffer_non_error_diag(&mut self, t: DiagnosticBuilder<'_, ()>) {
|
||||
t.buffer(&mut self.buffered);
|
||||
self.buffered.push(t.into_diagnostic());
|
||||
}
|
||||
|
||||
pub fn set_tainted_by_errors(&mut self, e: ErrorGuaranteed) {
|
||||
@ -2486,13 +2487,13 @@ mod error {
|
||||
// Buffer any move errors that we collected and de-duplicated.
|
||||
for (_, (_, diag)) in std::mem::take(&mut self.errors.buffered_move_errors) {
|
||||
// We have already set tainted for this error, so just buffer it.
|
||||
diag.buffer(&mut self.errors.buffered);
|
||||
self.errors.buffered.push(diag.into_diagnostic());
|
||||
}
|
||||
for (_, (mut diag, count)) in std::mem::take(&mut self.errors.buffered_mut_errors) {
|
||||
if count > 10 {
|
||||
diag.note(format!("...and {} other attempted mutable borrows", count - 10));
|
||||
}
|
||||
diag.buffer(&mut self.errors.buffered);
|
||||
self.errors.buffered.push(diag.into_diagnostic());
|
||||
}
|
||||
|
||||
if !self.errors.buffered.is_empty() {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_middle::mir::coverage::{CounterId, CovTerm, ExpressionId};
|
||||
use rustc_middle::mir::coverage::{CodeRegion, CounterId, CovTerm, ExpressionId, MappingKind};
|
||||
|
||||
/// Must match the layout of `LLVMRustCounterKind`.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
@ -149,6 +149,24 @@ pub struct CounterMappingRegion {
|
||||
}
|
||||
|
||||
impl CounterMappingRegion {
|
||||
pub(crate) fn from_mapping(
|
||||
mapping_kind: &MappingKind,
|
||||
local_file_id: u32,
|
||||
code_region: &CodeRegion,
|
||||
) -> Self {
|
||||
let &CodeRegion { file_name: _, start_line, start_col, end_line, end_col } = code_region;
|
||||
match *mapping_kind {
|
||||
MappingKind::Code(term) => Self::code_region(
|
||||
Counter::from_term(term),
|
||||
local_file_id,
|
||||
start_line,
|
||||
start_col,
|
||||
end_line,
|
||||
end_col,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn code_region(
|
||||
counter: Counter,
|
||||
file_id: u32,
|
||||
|
@ -4,7 +4,8 @@ use rustc_data_structures::captures::Captures;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_middle::mir::coverage::{
|
||||
CodeRegion, CounterId, CovTerm, Expression, ExpressionId, FunctionCoverageInfo, Mapping, Op,
|
||||
CodeRegion, CounterId, CovTerm, Expression, ExpressionId, FunctionCoverageInfo, Mapping,
|
||||
MappingKind, Op,
|
||||
};
|
||||
use rustc_middle::ty::Instance;
|
||||
use rustc_span::Symbol;
|
||||
@ -64,8 +65,8 @@ impl<'tcx> FunctionCoverageCollector<'tcx> {
|
||||
// For each expression ID that is directly used by one or more mappings,
|
||||
// mark it as not-yet-seen. This indicates that we expect to see a
|
||||
// corresponding `ExpressionUsed` statement during MIR traversal.
|
||||
for Mapping { term, .. } in &function_coverage_info.mappings {
|
||||
if let &CovTerm::Expression(id) = term {
|
||||
for term in function_coverage_info.mappings.iter().flat_map(|m| m.kind.terms()) {
|
||||
if let CovTerm::Expression(id) = term {
|
||||
expressions_seen.remove(id);
|
||||
}
|
||||
}
|
||||
@ -221,20 +222,21 @@ impl<'tcx> FunctionCoverage<'tcx> {
|
||||
/// that will be used by `mapgen` when preparing for FFI.
|
||||
pub(crate) fn counter_regions(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (Counter, &CodeRegion)> + ExactSizeIterator {
|
||||
) -> impl Iterator<Item = (MappingKind, &CodeRegion)> + ExactSizeIterator {
|
||||
self.function_coverage_info.mappings.iter().map(move |mapping| {
|
||||
let &Mapping { term, ref code_region } = mapping;
|
||||
let counter = self.counter_for_term(term);
|
||||
(counter, code_region)
|
||||
let Mapping { kind, code_region } = mapping;
|
||||
let kind =
|
||||
kind.map_terms(|term| if self.is_zero_term(term) { CovTerm::Zero } else { term });
|
||||
(kind, code_region)
|
||||
})
|
||||
}
|
||||
|
||||
fn counter_for_term(&self, term: CovTerm) -> Counter {
|
||||
if is_zero_term(&self.counters_seen, &self.zero_expressions, term) {
|
||||
Counter::ZERO
|
||||
} else {
|
||||
Counter::from_term(term)
|
||||
if self.is_zero_term(term) { Counter::ZERO } else { Counter::from_term(term) }
|
||||
}
|
||||
|
||||
fn is_zero_term(&self, term: CovTerm) -> bool {
|
||||
is_zero_term(&self.counters_seen, &self.zero_expressions, term)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,6 @@ use rustc_hir::def_id::DefId;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::coverage::CodeRegion;
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_span::def_id::DefIdSet;
|
||||
use rustc_span::Symbol;
|
||||
@ -237,7 +236,7 @@ fn encode_mappings_for_function(
|
||||
// Prepare file IDs for each filename, and prepare the mapping data so that
|
||||
// we can pass it through FFI to LLVM.
|
||||
for (file_name, counter_regions_for_file) in
|
||||
&counter_regions.group_by(|(_counter, region)| region.file_name)
|
||||
&counter_regions.group_by(|(_, region)| region.file_name)
|
||||
{
|
||||
// Look up the global file ID for this filename.
|
||||
let global_file_id = global_file_table.global_file_id_for_file_name(file_name);
|
||||
@ -248,17 +247,12 @@ fn encode_mappings_for_function(
|
||||
|
||||
// For each counter/region pair in this function+file, convert it to a
|
||||
// form suitable for FFI.
|
||||
for (counter, region) in counter_regions_for_file {
|
||||
let CodeRegion { file_name: _, start_line, start_col, end_line, end_col } = *region;
|
||||
|
||||
debug!("Adding counter {counter:?} to map for {region:?}");
|
||||
mapping_regions.push(CounterMappingRegion::code_region(
|
||||
counter,
|
||||
for (mapping_kind, region) in counter_regions_for_file {
|
||||
debug!("Adding counter {mapping_kind:?} to map for {region:?}");
|
||||
mapping_regions.push(CounterMappingRegion::from_mapping(
|
||||
&mapping_kind,
|
||||
local_file_id.as_u32(),
|
||||
start_line,
|
||||
start_col,
|
||||
end_line,
|
||||
end_col,
|
||||
region,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
|
||||
|
||||
use rustc_errors::{Diagnostic, ErrorGuaranteed};
|
||||
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
@ -214,7 +214,7 @@ pub struct Checker<'mir, 'tcx> {
|
||||
local_has_storage_dead: Option<BitSet<Local>>,
|
||||
|
||||
error_emitted: Option<ErrorGuaranteed>,
|
||||
secondary_errors: Vec<Diagnostic>,
|
||||
secondary_errors: Vec<DiagnosticBuilder<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
|
||||
@ -272,14 +272,17 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> {
|
||||
}
|
||||
|
||||
// If we got through const-checking without emitting any "primary" errors, emit any
|
||||
// "secondary" errors if they occurred.
|
||||
// "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
|
||||
let secondary_errors = mem::take(&mut self.secondary_errors);
|
||||
if self.error_emitted.is_none() {
|
||||
for error in secondary_errors {
|
||||
self.tcx.dcx().emit_diagnostic(error);
|
||||
error.emit();
|
||||
}
|
||||
} else {
|
||||
assert!(self.tcx.dcx().has_errors().is_some());
|
||||
for error in secondary_errors {
|
||||
error.cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -347,7 +350,7 @@ impl<'mir, 'tcx> Checker<'mir, 'tcx> {
|
||||
self.error_emitted = Some(reported);
|
||||
}
|
||||
|
||||
ops::DiagnosticImportance::Secondary => err.buffer(&mut self.secondary_errors),
|
||||
ops::DiagnosticImportance::Secondary => self.secondary_errors.push(err),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
annotate-snippets = "0.9"
|
||||
annotate-snippets = "0.10"
|
||||
derive_setters = "0.1.6"
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||
|
@ -12,8 +12,7 @@ use crate::{
|
||||
CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage, Emitter, FluentBundle,
|
||||
LazyFallbackBundle, Level, MultiSpan, Style, SubDiagnostic,
|
||||
};
|
||||
use annotate_snippets::display_list::{DisplayList, FormatOptions};
|
||||
use annotate_snippets::snippet::*;
|
||||
use annotate_snippets::{Annotation, AnnotationType, Renderer, Slice, Snippet, SourceAnnotation};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_error_messages::FluentArgs;
|
||||
use rustc_span::source_map::SourceMap;
|
||||
@ -190,11 +189,6 @@ impl AnnotateSnippetEmitter {
|
||||
annotation_type: annotation_type_for_level(*level),
|
||||
}),
|
||||
footer: vec![],
|
||||
opt: FormatOptions {
|
||||
color: true,
|
||||
anonymized_line_numbers: self.ui_testing,
|
||||
margin: None,
|
||||
},
|
||||
slices: annotated_files
|
||||
.iter()
|
||||
.map(|(file_name, source, line_index, annotations)| {
|
||||
@ -222,7 +216,8 @@ impl AnnotateSnippetEmitter {
|
||||
// FIXME(#59346): Figure out if we can _always_ print to stderr or not.
|
||||
// `emitter.rs` has the `Destination` enum that lists various possible output
|
||||
// destinations.
|
||||
eprintln!("{}", DisplayList::from(snippet))
|
||||
let renderer = Renderer::plain().anonymized_line_numbers(self.ui_testing);
|
||||
eprintln!("{}", renderer.render(snippet))
|
||||
}
|
||||
// FIXME(#59346): Is it ok to return None if there's no source_map?
|
||||
}
|
||||
|
@ -255,35 +255,13 @@ impl<'a, G: EmissionGuarantee> DiagnosticBuilder<'a, G> {
|
||||
/// Stashes diagnostic for possible later improvement in a different,
|
||||
/// later stage of the compiler. The diagnostic can be accessed with
|
||||
/// the provided `span` and `key` through [`DiagCtxt::steal_diagnostic()`].
|
||||
///
|
||||
/// As with `buffer`, this is unless the dcx has disabled such buffering.
|
||||
pub fn stash(self, span: Span, key: StashKey) {
|
||||
if let Some((diag, dcx)) = self.into_diagnostic() {
|
||||
dcx.stash_diagnostic(span, key, diag);
|
||||
}
|
||||
self.dcx.stash_diagnostic(span, key, self.into_diagnostic());
|
||||
}
|
||||
|
||||
/// Converts the builder to a `Diagnostic` for later emission,
|
||||
/// unless dcx has disabled such buffering.
|
||||
fn into_diagnostic(mut self) -> Option<(Diagnostic, &'a DiagCtxt)> {
|
||||
if self.dcx.inner.lock().flags.treat_err_as_bug.is_some() {
|
||||
self.emit();
|
||||
return None;
|
||||
}
|
||||
|
||||
let diag = self.take_diag();
|
||||
|
||||
// Logging here is useful to help track down where in logs an error was
|
||||
// actually emitted.
|
||||
debug!("buffer: diag={:?}", diag);
|
||||
|
||||
Some((diag, self.dcx))
|
||||
}
|
||||
|
||||
/// Buffers the diagnostic for later emission,
|
||||
/// unless dcx has disabled such buffering.
|
||||
pub fn buffer(self, buffered_diagnostics: &mut Vec<Diagnostic>) {
|
||||
buffered_diagnostics.extend(self.into_diagnostic().map(|(diag, _)| diag));
|
||||
/// Converts the builder to a `Diagnostic` for later emission.
|
||||
pub fn into_diagnostic(mut self) -> Diagnostic {
|
||||
self.take_diag()
|
||||
}
|
||||
|
||||
/// Delay emission of this diagnostic as a bug.
|
||||
|
@ -981,6 +981,10 @@ impl DiagCtxt {
|
||||
|
||||
inner.emit_stashed_diagnostics();
|
||||
|
||||
if inner.treat_err_as_bug() {
|
||||
return;
|
||||
}
|
||||
|
||||
let warnings = match inner.deduplicated_warn_count {
|
||||
0 => Cow::from(""),
|
||||
1 => Cow::from("1 warning emitted"),
|
||||
@ -991,9 +995,6 @@ impl DiagCtxt {
|
||||
1 => Cow::from("aborting due to 1 previous error"),
|
||||
count => Cow::from(format!("aborting due to {count} previous errors")),
|
||||
};
|
||||
if inner.treat_err_as_bug() {
|
||||
return;
|
||||
}
|
||||
|
||||
match (errors.len(), warnings.len()) {
|
||||
(0, 0) => return,
|
||||
@ -1168,7 +1169,8 @@ impl DiagCtxt {
|
||||
let mut inner = self.inner.borrow_mut();
|
||||
|
||||
if loud && lint_level.is_error() {
|
||||
inner.bump_err_count();
|
||||
inner.err_count += 1;
|
||||
inner.panic_if_treat_err_as_bug();
|
||||
}
|
||||
|
||||
inner.emitter.emit_unused_externs(lint_level, unused_externs)
|
||||
@ -1255,7 +1257,7 @@ impl DiagCtxtInner {
|
||||
}
|
||||
|
||||
fn emit_diagnostic(&mut self, mut diagnostic: Diagnostic) -> Option<ErrorGuaranteed> {
|
||||
if matches!(diagnostic.level, Error | Fatal) && self.treat_err_as_bug() {
|
||||
if matches!(diagnostic.level, Error | Fatal) && self.treat_next_err_as_bug() {
|
||||
diagnostic.level = Bug;
|
||||
}
|
||||
|
||||
@ -1353,10 +1355,11 @@ impl DiagCtxtInner {
|
||||
}
|
||||
if diagnostic.is_error() {
|
||||
if diagnostic.is_lint {
|
||||
self.bump_lint_err_count();
|
||||
self.lint_err_count += 1;
|
||||
} else {
|
||||
self.bump_err_count();
|
||||
self.err_count += 1;
|
||||
}
|
||||
self.panic_if_treat_err_as_bug();
|
||||
|
||||
#[allow(deprecated)]
|
||||
{
|
||||
@ -1447,16 +1450,6 @@ impl DiagCtxtInner {
|
||||
panic::panic_any(DelayedBugPanic);
|
||||
}
|
||||
|
||||
fn bump_lint_err_count(&mut self) {
|
||||
self.lint_err_count += 1;
|
||||
self.panic_if_treat_err_as_bug();
|
||||
}
|
||||
|
||||
fn bump_err_count(&mut self) {
|
||||
self.err_count += 1;
|
||||
self.panic_if_treat_err_as_bug();
|
||||
}
|
||||
|
||||
fn panic_if_treat_err_as_bug(&self) {
|
||||
if self.treat_err_as_bug() {
|
||||
match (
|
||||
|
@ -5,7 +5,8 @@ use termcolor::{BufferWriter, ColorChoice};
|
||||
use super::*;
|
||||
|
||||
const INPUT: &str = include_str!("input.md");
|
||||
const OUTPUT_PATH: &[&str] = &[env!("CARGO_MANIFEST_DIR"), "src","markdown","tests","output.stdout"];
|
||||
const OUTPUT_PATH: &[&str] =
|
||||
&[env!("CARGO_MANIFEST_DIR"), "src", "markdown", "tests", "output.stdout"];
|
||||
|
||||
const TEST_WIDTH: usize = 80;
|
||||
|
||||
@ -34,7 +35,7 @@ quis dolor non venenatis. Aliquam ut. ";
|
||||
fn test_wrapping_write() {
|
||||
WIDTH.with(|w| w.set(TEST_WIDTH));
|
||||
let mut buf = BufWriter::new(Vec::new());
|
||||
let txt = TXT.replace("-\n","-").replace("_\n","_").replace('\n', " ").replace(" ", "");
|
||||
let txt = TXT.replace("-\n", "-").replace("_\n", "_").replace('\n', " ").replace(" ", "");
|
||||
write_wrapping(&mut buf, &txt, 0, None).unwrap();
|
||||
write_wrapping(&mut buf, &txt, 4, None).unwrap();
|
||||
write_wrapping(
|
||||
|
@ -8,7 +8,7 @@ proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
annotate-snippets = "0.9"
|
||||
annotate-snippets = "0.10"
|
||||
fluent-bundle = "0.15.2"
|
||||
fluent-syntax = "0.11"
|
||||
proc-macro2 = "1"
|
||||
|
@ -1,7 +1,4 @@
|
||||
use annotate_snippets::{
|
||||
display_list::DisplayList,
|
||||
snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation},
|
||||
};
|
||||
use annotate_snippets::{Annotation, AnnotationType, Renderer, Slice, Snippet, SourceAnnotation};
|
||||
use fluent_bundle::{FluentBundle, FluentError, FluentResource};
|
||||
use fluent_syntax::{
|
||||
ast::{
|
||||
@ -179,10 +176,9 @@ pub(crate) fn fluent_messages(input: proc_macro::TokenStream) -> proc_macro::Tok
|
||||
range: (pos.start, pos.end - 1),
|
||||
}],
|
||||
}],
|
||||
opt: Default::default(),
|
||||
};
|
||||
let dl = DisplayList::from(snippet);
|
||||
eprintln!("{dl}\n");
|
||||
let renderer = Renderer::plain();
|
||||
eprintln!("{}\n", renderer.render(snippet));
|
||||
}
|
||||
|
||||
return failed(&crate_name);
|
||||
|
@ -300,13 +300,15 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
|
||||
.expect("missing associated item");
|
||||
|
||||
if !assoc_item.visibility(tcx).is_accessible_from(def_scope, tcx) {
|
||||
tcx.dcx()
|
||||
let reported = tcx
|
||||
.dcx()
|
||||
.struct_span_err(
|
||||
binding.span,
|
||||
format!("{} `{}` is private", assoc_item.kind, binding.item_name),
|
||||
)
|
||||
.with_span_label(binding.span, format!("private {}", assoc_item.kind))
|
||||
.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
tcx.check_stability(assoc_item.def_id, Some(hir_ref_id), binding.span, None);
|
||||
|
||||
|
@ -354,7 +354,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
);
|
||||
err.span_label(name.span, format!("multiple `{name}` found"));
|
||||
self.note_ambiguous_inherent_assoc_type(&mut err, candidates, span);
|
||||
err.emit()
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
reported
|
||||
}
|
||||
|
||||
// FIXME(fmease): Heavily adapted from `rustc_hir_typeck::method::suggest`. Deduplicate.
|
||||
@ -843,7 +845,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
}
|
||||
}
|
||||
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -390,6 +390,12 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
infer_args,
|
||||
);
|
||||
|
||||
if let Err(err) = &arg_count.correct
|
||||
&& let Some(reported) = err.reported
|
||||
{
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
|
||||
// Skip processing if type has no generic parameters.
|
||||
// Traits always have `Self` as a generic parameter, which means they will not return early
|
||||
// here and so associated type bindings will be handled regardless of whether there are any
|
||||
@ -568,6 +574,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
span,
|
||||
modifier: constness.as_str(),
|
||||
});
|
||||
self.set_tainted_by_errors(e);
|
||||
arg_count.correct =
|
||||
Err(GenericArgCountMismatch { reported: Some(e), invalid_args: vec![] });
|
||||
}
|
||||
@ -966,7 +973,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
}
|
||||
}
|
||||
}
|
||||
err.emit()
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
reported
|
||||
}
|
||||
|
||||
// Search for a bound on a type parameter which includes the associated item
|
||||
@ -1043,6 +1052,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
span,
|
||||
binding,
|
||||
);
|
||||
self.set_tainted_by_errors(reported);
|
||||
return Err(reported);
|
||||
};
|
||||
debug!(?bound);
|
||||
@ -1120,6 +1130,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
));
|
||||
}
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
if !where_bounds.is_empty() {
|
||||
return Err(reported);
|
||||
}
|
||||
@ -1374,6 +1385,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
assoc_ident.name,
|
||||
)
|
||||
};
|
||||
self.set_tainted_by_errors(reported);
|
||||
return Err(reported);
|
||||
}
|
||||
};
|
||||
@ -1616,12 +1628,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
let kind = tcx.def_kind_descr(kind, item);
|
||||
let msg = format!("{kind} `{name}` is private");
|
||||
let def_span = tcx.def_span(item);
|
||||
tcx.dcx()
|
||||
let reported = tcx
|
||||
.dcx()
|
||||
.struct_span_err(span, msg)
|
||||
.with_code(rustc_errors::error_code!(E0624))
|
||||
.with_span_label(span, format!("private {kind}"))
|
||||
.with_span_label(def_span, format!("{kind} defined here"))
|
||||
.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
tcx.check_stability(item, Some(block), span, None);
|
||||
}
|
||||
@ -1862,7 +1876,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
err.span_label(span, format!("not allowed on {what}"));
|
||||
}
|
||||
extend(&mut err);
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
emitted = true;
|
||||
}
|
||||
|
||||
@ -2184,7 +2198,9 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
{
|
||||
err.span_note(impl_.self_ty.span, "not a concrete type");
|
||||
}
|
||||
Ty::new_error(tcx, err.emit())
|
||||
let reported = err.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
Ty::new_error(tcx, reported)
|
||||
} else {
|
||||
ty
|
||||
}
|
||||
@ -2586,7 +2602,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
);
|
||||
}
|
||||
|
||||
diag.emit();
|
||||
self.set_tainted_by_errors(diag.emit());
|
||||
}
|
||||
|
||||
// Find any late-bound regions declared in return type that do
|
||||
@ -2686,7 +2702,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
err.note("consider introducing a named lifetime parameter");
|
||||
}
|
||||
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
}
|
||||
}
|
||||
|
||||
@ -2725,7 +2741,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
// error.
|
||||
let r = derived_region_bounds[0];
|
||||
if derived_region_bounds[1..].iter().any(|r1| r != *r1) {
|
||||
tcx.dcx().emit_err(AmbiguousLifetimeBound { span });
|
||||
self.set_tainted_by_errors(tcx.dcx().emit_err(AmbiguousLifetimeBound { span }));
|
||||
}
|
||||
Some(r)
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
for more information on them, visit \
|
||||
<https://doc.rust-lang.org/reference/special-types-and-traits.html#auto-traits>",
|
||||
);
|
||||
err.emit();
|
||||
self.set_tainted_by_errors(err.emit());
|
||||
}
|
||||
|
||||
if regular_traits.is_empty() && auto_traits.is_empty() {
|
||||
@ -127,6 +127,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
.map(|trait_ref| tcx.def_span(trait_ref));
|
||||
let reported =
|
||||
tcx.dcx().emit_err(TraitObjectDeclaredWithNoTraits { span, trait_alias_span });
|
||||
self.set_tainted_by_errors(reported);
|
||||
return Ty::new_error(tcx, reported);
|
||||
}
|
||||
|
||||
@ -290,7 +291,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
|
||||
if references_self {
|
||||
let def_id = i.bottom().0.def_id();
|
||||
struct_span_code_err!(
|
||||
let reported = struct_span_code_err!(
|
||||
tcx.dcx(),
|
||||
i.bottom().1,
|
||||
E0038,
|
||||
@ -303,6 +304,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
.error_msg(),
|
||||
)
|
||||
.emit();
|
||||
self.set_tainted_by_errors(reported);
|
||||
}
|
||||
|
||||
ty::ExistentialTraitRef { def_id: trait_ref.def_id, args }
|
||||
@ -389,6 +391,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
} else {
|
||||
err.emit()
|
||||
};
|
||||
self.set_tainted_by_errors(e);
|
||||
ty::Region::new_error(tcx, e)
|
||||
})
|
||||
}
|
||||
|
@ -35,6 +35,7 @@ use rustc_target::spec::abi;
|
||||
use rustc_trait_selection::infer::InferCtxtExt;
|
||||
use rustc_trait_selection::traits::error_reporting::suggestions::NextTypeParamName;
|
||||
use rustc_trait_selection::traits::ObligationCtxt;
|
||||
use std::cell::Cell;
|
||||
use std::iter;
|
||||
use std::ops::Bound;
|
||||
|
||||
@ -119,6 +120,7 @@ pub fn provide(providers: &mut Providers) {
|
||||
pub struct ItemCtxt<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
item_def_id: LocalDefId,
|
||||
tainted_by_errors: Cell<Option<ErrorGuaranteed>>,
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
@ -343,7 +345,7 @@ fn bad_placeholder<'tcx>(
|
||||
|
||||
impl<'tcx> ItemCtxt<'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'tcx>, item_def_id: LocalDefId) -> ItemCtxt<'tcx> {
|
||||
ItemCtxt { tcx, item_def_id }
|
||||
ItemCtxt { tcx, item_def_id, tainted_by_errors: Cell::new(None) }
|
||||
}
|
||||
|
||||
pub fn to_ty(&self, ast_ty: &hir::Ty<'_>) -> Ty<'tcx> {
|
||||
@ -357,6 +359,13 @@ impl<'tcx> ItemCtxt<'tcx> {
|
||||
pub fn node(&self) -> hir::Node<'tcx> {
|
||||
self.tcx.hir_node(self.hir_id())
|
||||
}
|
||||
|
||||
fn check_tainted_by_errors(&self) -> Result<(), ErrorGuaranteed> {
|
||||
match self.tainted_by_errors.get() {
|
||||
Some(err) => Err(err),
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
|
||||
@ -492,8 +501,8 @@ impl<'tcx> AstConv<'tcx> for ItemCtxt<'tcx> {
|
||||
ty.ty_adt_def()
|
||||
}
|
||||
|
||||
fn set_tainted_by_errors(&self, _: ErrorGuaranteed) {
|
||||
// There's no obvious place to track this, so just let it go.
|
||||
fn set_tainted_by_errors(&self, err: ErrorGuaranteed) {
|
||||
self.tainted_by_errors.set(Some(err));
|
||||
}
|
||||
|
||||
fn record_ty(&self, _hir_id: hir::HirId, _ty: Ty<'tcx>, _span: Span) {
|
||||
|
@ -513,7 +513,11 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<Ty
|
||||
bug!("unexpected sort of node in type_of(): {:?}", x);
|
||||
}
|
||||
};
|
||||
if let Err(e) = icx.check_tainted_by_errors() {
|
||||
ty::EarlyBinder::bind(Ty::new_error(tcx, e))
|
||||
} else {
|
||||
ty::EarlyBinder::bind(output)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn type_of_opaque(
|
||||
|
@ -802,7 +802,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
.explicit_item_bounds(def_id)
|
||||
.iter_instantiated_copied(self.tcx, args)
|
||||
.find_map(|(p, s)| get_future_output(p.as_predicate(), s))?,
|
||||
ty::Error(_) => return None,
|
||||
ty::Error(_) => return Some(ret_ty),
|
||||
_ => span_bug!(
|
||||
closure_span,
|
||||
"async fn coroutine return type not an inference variable: {ret_ty}"
|
||||
|
@ -498,14 +498,14 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
||||
// order when emitting them.
|
||||
let err =
|
||||
self.tcx().dcx().struct_span_err(span, format!("user args: {user_args:?}"));
|
||||
err.buffer(&mut errors_buffer);
|
||||
errors_buffer.push(err);
|
||||
}
|
||||
}
|
||||
|
||||
if !errors_buffer.is_empty() {
|
||||
errors_buffer.sort_by_key(|diag| diag.span.primary_span());
|
||||
for diag in errors_buffer {
|
||||
self.tcx().dcx().emit_diagnostic(diag);
|
||||
for err in errors_buffer {
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ pub(crate) fn parse_cfg(dcx: &DiagCtxt, cfgs: Vec<String>) -> Cfg {
|
||||
Ok(..) => {}
|
||||
Err(err) => err.cancel(),
|
||||
},
|
||||
Err(errs) => drop(errs),
|
||||
Err(errs) => errs.into_iter().for_each(|err| err.cancel()),
|
||||
}
|
||||
|
||||
// If the user tried to use a key="value" flag, but is missing the quotes, provide
|
||||
@ -129,9 +129,12 @@ pub(crate) fn parse_check_cfg(dcx: &DiagCtxt, specs: Vec<String>) -> CheckCfg {
|
||||
error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
|
||||
};
|
||||
|
||||
let Ok(mut parser) = maybe_new_parser_from_source_str(&sess, filename, s.to_string())
|
||||
else {
|
||||
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) {
|
||||
Ok(parser) => parser,
|
||||
Err(errs) => {
|
||||
errs.into_iter().for_each(|err| err.cancel());
|
||||
expected_error();
|
||||
}
|
||||
};
|
||||
|
||||
let meta_item = match parser.parse_meta_item() {
|
||||
|
@ -160,16 +160,34 @@ pub struct Expression {
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
pub struct Mapping {
|
||||
pub code_region: CodeRegion,
|
||||
pub enum MappingKind {
|
||||
/// Associates a normal region of code with a counter/expression/zero.
|
||||
Code(CovTerm),
|
||||
}
|
||||
|
||||
/// Indicates whether this mapping uses a counter value, expression value,
|
||||
/// or zero value.
|
||||
///
|
||||
/// FIXME: When we add support for mapping kinds other than `Code`
|
||||
/// (e.g. branch regions, expansion regions), replace this with a dedicated
|
||||
/// mapping-kind enum.
|
||||
pub term: CovTerm,
|
||||
impl MappingKind {
|
||||
/// Iterator over all coverage terms in this mapping kind.
|
||||
pub fn terms(&self) -> impl Iterator<Item = CovTerm> {
|
||||
let one = |a| std::iter::once(a);
|
||||
match *self {
|
||||
Self::Code(term) => one(term),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a copy of this mapping kind, in which all coverage terms have
|
||||
/// been replaced with ones returned by the given function.
|
||||
pub fn map_terms(&self, map_fn: impl Fn(CovTerm) -> CovTerm) -> Self {
|
||||
match *self {
|
||||
Self::Code(term) => Self::Code(map_fn(term)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
|
||||
pub struct Mapping {
|
||||
pub kind: MappingKind,
|
||||
pub code_region: CodeRegion,
|
||||
}
|
||||
|
||||
/// Stores per-function coverage information attached to a `mir::Body`,
|
||||
|
@ -493,8 +493,8 @@ fn write_function_coverage_info(
|
||||
for (id, expression) in expressions.iter_enumerated() {
|
||||
writeln!(w, "{INDENT}coverage {id:?} => {expression:?};")?;
|
||||
}
|
||||
for coverage::Mapping { term, code_region } in mappings {
|
||||
writeln!(w, "{INDENT}coverage {term:?} => {code_region:?};")?;
|
||||
for coverage::Mapping { kind, code_region } in mappings {
|
||||
writeln!(w, "{INDENT}coverage {kind:?} => {code_region:?};")?;
|
||||
}
|
||||
writeln!(w)?;
|
||||
|
||||
|
@ -9,7 +9,7 @@ mod tests;
|
||||
|
||||
use self::counters::{BcbCounter, CoverageCounters};
|
||||
use self::graph::{BasicCoverageBlock, CoverageGraph};
|
||||
use self::spans::CoverageSpans;
|
||||
use self::spans::{BcbMapping, BcbMappingKind, CoverageSpans};
|
||||
|
||||
use crate::MirPass;
|
||||
|
||||
@ -141,22 +141,21 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
let file_name =
|
||||
Symbol::intern(&source_file.name.for_codegen(self.tcx.sess).to_string_lossy());
|
||||
|
||||
coverage_spans
|
||||
.bcbs_with_coverage_spans()
|
||||
// For each BCB with spans, get a coverage term for its counter.
|
||||
.map(|(bcb, spans)| {
|
||||
let term = coverage_counters
|
||||
let term_for_bcb = |bcb| {
|
||||
coverage_counters
|
||||
.bcb_counter(bcb)
|
||||
.expect("all BCBs with spans were given counters")
|
||||
.as_term();
|
||||
(term, spans)
|
||||
})
|
||||
// Flatten the spans into individual term/span pairs.
|
||||
.flat_map(|(term, spans)| spans.iter().map(move |&span| (term, span)))
|
||||
// Convert each span to a code region, and create the final mapping.
|
||||
.filter_map(|(term, span)| {
|
||||
.as_term()
|
||||
};
|
||||
|
||||
coverage_spans
|
||||
.all_bcb_mappings()
|
||||
.filter_map(|&BcbMapping { kind: bcb_mapping_kind, span }| {
|
||||
let kind = match bcb_mapping_kind {
|
||||
BcbMappingKind::Code(bcb) => MappingKind::Code(term_for_bcb(bcb)),
|
||||
};
|
||||
let code_region = make_code_region(source_map, file_name, span, body_span)?;
|
||||
Some(Mapping { term, code_region })
|
||||
Some(Mapping { kind, code_region })
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use rustc_data_structures::graph::WithNumNodes;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_middle::mir;
|
||||
use rustc_span::{BytePos, Span, DUMMY_SP};
|
||||
|
||||
@ -8,9 +8,21 @@ use crate::coverage::ExtractedHirInfo;
|
||||
|
||||
mod from_mir;
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub(super) enum BcbMappingKind {
|
||||
/// Associates an ordinary executable code span with its corresponding BCB.
|
||||
Code(BasicCoverageBlock),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(super) struct BcbMapping {
|
||||
pub(super) kind: BcbMappingKind,
|
||||
pub(super) span: Span,
|
||||
}
|
||||
|
||||
pub(super) struct CoverageSpans {
|
||||
/// Map from BCBs to their list of coverage spans.
|
||||
bcb_to_spans: IndexVec<BasicCoverageBlock, Vec<Span>>,
|
||||
bcb_has_mappings: BitSet<BasicCoverageBlock>,
|
||||
mappings: Vec<BcbMapping>,
|
||||
}
|
||||
|
||||
impl CoverageSpans {
|
||||
@ -23,36 +35,42 @@ impl CoverageSpans {
|
||||
hir_info: &ExtractedHirInfo,
|
||||
basic_coverage_blocks: &CoverageGraph,
|
||||
) -> Option<Self> {
|
||||
let mut mappings = vec![];
|
||||
|
||||
let coverage_spans = CoverageSpansGenerator::generate_coverage_spans(
|
||||
mir_body,
|
||||
hir_info,
|
||||
basic_coverage_blocks,
|
||||
);
|
||||
mappings.extend(coverage_spans.into_iter().map(|CoverageSpan { bcb, span, .. }| {
|
||||
// Each span produced by the generator represents an ordinary code region.
|
||||
BcbMapping { kind: BcbMappingKind::Code(bcb), span }
|
||||
}));
|
||||
|
||||
if coverage_spans.is_empty() {
|
||||
if mappings.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Group the coverage spans by BCB, with the BCBs in sorted order.
|
||||
let mut bcb_to_spans = IndexVec::from_elem_n(Vec::new(), basic_coverage_blocks.num_nodes());
|
||||
for CoverageSpan { bcb, span, .. } in coverage_spans {
|
||||
bcb_to_spans[bcb].push(span);
|
||||
// Identify which BCBs have one or more mappings.
|
||||
let mut bcb_has_mappings = BitSet::new_empty(basic_coverage_blocks.num_nodes());
|
||||
let mut insert = |bcb| {
|
||||
bcb_has_mappings.insert(bcb);
|
||||
};
|
||||
for &BcbMapping { kind, span: _ } in &mappings {
|
||||
match kind {
|
||||
BcbMappingKind::Code(bcb) => insert(bcb),
|
||||
}
|
||||
}
|
||||
|
||||
Some(Self { bcb_to_spans })
|
||||
Some(Self { bcb_has_mappings, mappings })
|
||||
}
|
||||
|
||||
pub(super) fn bcb_has_coverage_spans(&self, bcb: BasicCoverageBlock) -> bool {
|
||||
!self.bcb_to_spans[bcb].is_empty()
|
||||
self.bcb_has_mappings.contains(bcb)
|
||||
}
|
||||
|
||||
pub(super) fn bcbs_with_coverage_spans(
|
||||
&self,
|
||||
) -> impl Iterator<Item = (BasicCoverageBlock, &[Span])> {
|
||||
self.bcb_to_spans.iter_enumerated().filter_map(|(bcb, spans)| {
|
||||
// Only yield BCBs that have at least one coverage span.
|
||||
(!spans.is_empty()).then_some((bcb, spans.as_slice()))
|
||||
})
|
||||
pub(super) fn all_bcb_mappings(&self) -> impl Iterator<Item = &BcbMapping> {
|
||||
self.mappings.iter()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@ use rustc_ast::ast::{self, AttrStyle};
|
||||
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::util::unicode::contains_text_flow_control_chars;
|
||||
use rustc_errors::{error_code, Applicability, DiagCtxt, Diagnostic, StashKey};
|
||||
use rustc_errors::{error_code, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
|
||||
use rustc_lexer::unescape::{self, EscapeError, Mode};
|
||||
use rustc_lexer::{Base, DocStyle, RawStrError};
|
||||
use rustc_lexer::{Cursor, LiteralKind};
|
||||
@ -42,12 +42,12 @@ pub struct UnmatchedDelim {
|
||||
pub candidate_span: Option<Span>,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_token_trees<'a>(
|
||||
sess: &'a ParseSess,
|
||||
mut src: &'a str,
|
||||
pub(crate) fn parse_token_trees<'sess, 'src>(
|
||||
sess: &'sess ParseSess,
|
||||
mut src: &'src str,
|
||||
mut start_pos: BytePos,
|
||||
override_span: Option<Span>,
|
||||
) -> Result<TokenStream, Vec<Diagnostic>> {
|
||||
) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
|
||||
// Skip `#!`, if present.
|
||||
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
|
||||
src = &src[shebang_len..];
|
||||
@ -76,13 +76,13 @@ pub(crate) fn parse_token_trees<'a>(
|
||||
let mut buffer = Vec::with_capacity(1);
|
||||
for unmatched in unmatched_delims {
|
||||
if let Some(err) = make_unclosed_delims_error(unmatched, sess) {
|
||||
err.buffer(&mut buffer);
|
||||
buffer.push(err);
|
||||
}
|
||||
}
|
||||
if let Err(errs) = res {
|
||||
// Add unclosing delimiter or diff marker errors
|
||||
for err in errs {
|
||||
err.buffer(&mut buffer);
|
||||
buffer.push(err);
|
||||
}
|
||||
}
|
||||
Err(buffer)
|
||||
@ -90,16 +90,16 @@ pub(crate) fn parse_token_trees<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
struct StringReader<'a> {
|
||||
sess: &'a ParseSess,
|
||||
struct StringReader<'sess, 'src> {
|
||||
sess: &'sess ParseSess,
|
||||
/// Initial position, read-only.
|
||||
start_pos: BytePos,
|
||||
/// The absolute offset within the source_map of the current character.
|
||||
pos: BytePos,
|
||||
/// Source text to tokenize.
|
||||
src: &'a str,
|
||||
src: &'src str,
|
||||
/// Cursor for getting lexer tokens.
|
||||
cursor: Cursor<'a>,
|
||||
cursor: Cursor<'src>,
|
||||
override_span: Option<Span>,
|
||||
/// When a "unknown start of token: \u{a0}" has already been emitted earlier
|
||||
/// in this file, it's safe to treat further occurrences of the non-breaking
|
||||
@ -107,8 +107,8 @@ struct StringReader<'a> {
|
||||
nbsp_is_whitespace: bool,
|
||||
}
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
pub fn dcx(&self) -> &'a DiagCtxt {
|
||||
impl<'sess, 'src> StringReader<'sess, 'src> {
|
||||
pub fn dcx(&self) -> &'sess DiagCtxt {
|
||||
&self.sess.dcx
|
||||
}
|
||||
|
||||
@ -526,7 +526,7 @@ impl<'a> StringReader<'a> {
|
||||
|
||||
/// Slice of the source text from `start` up to but excluding `self.pos`,
|
||||
/// meaning the slice does not include the character `self.ch`.
|
||||
fn str_from(&self, start: BytePos) -> &'a str {
|
||||
fn str_from(&self, start: BytePos) -> &'src str {
|
||||
self.str_from_to(start, self.pos)
|
||||
}
|
||||
|
||||
@ -537,12 +537,12 @@ impl<'a> StringReader<'a> {
|
||||
}
|
||||
|
||||
/// Slice of the source text spanning from `start` up to but excluding `end`.
|
||||
fn str_from_to(&self, start: BytePos, end: BytePos) -> &'a str {
|
||||
fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
|
||||
&self.src[self.src_index(start)..self.src_index(end)]
|
||||
}
|
||||
|
||||
/// Slice of the source text spanning from `start` until the end
|
||||
fn str_from_to_end(&self, start: BytePos) -> &'a str {
|
||||
fn str_from_to_end(&self, start: BytePos) -> &'src str {
|
||||
&self.src[self.src_index(start)..]
|
||||
}
|
||||
|
||||
|
@ -8,18 +8,18 @@ use rustc_ast_pretty::pprust::token_to_string;
|
||||
use rustc_errors::{Applicability, PErr};
|
||||
use rustc_span::symbol::kw;
|
||||
|
||||
pub(super) struct TokenTreesReader<'a> {
|
||||
string_reader: StringReader<'a>,
|
||||
pub(super) struct TokenTreesReader<'sess, 'src> {
|
||||
string_reader: StringReader<'sess, 'src>,
|
||||
/// The "next" token, which has been obtained from the `StringReader` but
|
||||
/// not yet handled by the `TokenTreesReader`.
|
||||
token: Token,
|
||||
diag_info: TokenTreeDiagInfo,
|
||||
}
|
||||
|
||||
impl<'a> TokenTreesReader<'a> {
|
||||
impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
|
||||
pub(super) fn parse_all_token_trees(
|
||||
string_reader: StringReader<'a>,
|
||||
) -> (TokenStream, Result<(), Vec<PErr<'a>>>, Vec<UnmatchedDelim>) {
|
||||
string_reader: StringReader<'sess, 'src>,
|
||||
) -> (TokenStream, Result<(), Vec<PErr<'sess>>>, Vec<UnmatchedDelim>) {
|
||||
let mut tt_reader = TokenTreesReader {
|
||||
string_reader,
|
||||
token: Token::dummy(),
|
||||
@ -35,7 +35,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
fn parse_token_trees(
|
||||
&mut self,
|
||||
is_delimited: bool,
|
||||
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'a>>>) {
|
||||
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'sess>>>) {
|
||||
// Move past the opening delimiter.
|
||||
let (_, open_spacing) = self.bump(false);
|
||||
|
||||
@ -71,7 +71,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn eof_err(&mut self) -> PErr<'a> {
|
||||
fn eof_err(&mut self) -> PErr<'sess> {
|
||||
let msg = "this file contains an unclosed delimiter";
|
||||
let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
|
||||
for &(_, sp) in &self.diag_info.open_braces {
|
||||
@ -99,7 +99,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
fn parse_token_tree_open_delim(
|
||||
&mut self,
|
||||
open_delim: Delimiter,
|
||||
) -> Result<TokenTree, Vec<PErr<'a>>> {
|
||||
) -> Result<TokenTree, Vec<PErr<'sess>>> {
|
||||
// The span for beginning of the delimited section
|
||||
let pre_span = self.token.span;
|
||||
|
||||
@ -229,7 +229,11 @@ impl<'a> TokenTreesReader<'a> {
|
||||
(this_tok, this_spacing)
|
||||
}
|
||||
|
||||
fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
|
||||
fn unclosed_delim_err(
|
||||
&mut self,
|
||||
tts: TokenStream,
|
||||
mut errs: Vec<PErr<'sess>>,
|
||||
) -> Vec<PErr<'sess>> {
|
||||
// If there are unclosed delims, see if there are diff markers and if so, point them
|
||||
// out instead of complaining about the unclosed delims.
|
||||
let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
|
||||
@ -285,7 +289,7 @@ impl<'a> TokenTreesReader<'a> {
|
||||
return errs;
|
||||
}
|
||||
|
||||
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
|
||||
fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'sess> {
|
||||
// An unexpected closing delimiter (i.e., there is no
|
||||
// matching opening delimiter).
|
||||
let token_str = token_to_string(&self.token);
|
||||
|
@ -337,7 +337,7 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
||||
];
|
||||
|
||||
pub(super) fn check_for_substitution(
|
||||
reader: &StringReader<'_>,
|
||||
reader: &StringReader<'_, '_>,
|
||||
pos: BytePos,
|
||||
ch: char,
|
||||
count: usize,
|
||||
|
@ -19,7 +19,7 @@ use rustc_ast::tokenstream::TokenStream;
|
||||
use rustc_ast::{AttrItem, Attribute, MetaItem};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
|
||||
use rustc_errors::{DiagnosticBuilder, FatalError, PResult};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::{FileName, SourceFile, Span};
|
||||
|
||||
@ -45,14 +45,13 @@ rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
||||
/// A variant of 'panictry!' that works on a `Vec<Diagnostic>` instead of a single
|
||||
/// `DiagnosticBuilder`.
|
||||
macro_rules! panictry_buffer {
|
||||
($handler:expr, $e:expr) => {{
|
||||
use rustc_errors::FatalError;
|
||||
($e:expr) => {{
|
||||
use std::result::Result::{Err, Ok};
|
||||
match $e {
|
||||
Ok(e) => e,
|
||||
Err(errs) => {
|
||||
for e in errs {
|
||||
$handler.emit_diagnostic(e);
|
||||
e.emit();
|
||||
}
|
||||
FatalError.raise()
|
||||
}
|
||||
@ -100,36 +99,41 @@ pub fn parse_stream_from_source_str(
|
||||
|
||||
/// Creates a new parser from a source string.
|
||||
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
|
||||
panictry_buffer!(&sess.dcx, maybe_new_parser_from_source_str(sess, name, source))
|
||||
panictry_buffer!(maybe_new_parser_from_source_str(sess, name, source))
|
||||
}
|
||||
|
||||
/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
|
||||
/// token stream.
|
||||
/// token stream; these must be consumed via `emit`, `cancel`, etc., otherwise a panic will occur
|
||||
/// when they are dropped.
|
||||
pub fn maybe_new_parser_from_source_str(
|
||||
sess: &ParseSess,
|
||||
name: FileName,
|
||||
source: String,
|
||||
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
||||
) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
|
||||
maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))
|
||||
}
|
||||
|
||||
/// Creates a new parser, handling errors as appropriate if the file doesn't exist.
|
||||
/// If a span is given, that is used on an error as the source of the problem.
|
||||
/// Creates a new parser, aborting if the file doesn't exist. If a span is given, that is used on
|
||||
/// an error as the source of the problem.
|
||||
pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
|
||||
source_file_to_parser(sess, file_to_source_file(sess, path, sp))
|
||||
let source_file = sess.source_map().load_file(path).unwrap_or_else(|e| {
|
||||
let msg = format!("couldn't read {}: {}", path.display(), e);
|
||||
let mut err = sess.dcx.struct_fatal(msg);
|
||||
if let Some(sp) = sp {
|
||||
err.span(sp);
|
||||
}
|
||||
err.emit();
|
||||
});
|
||||
|
||||
panictry_buffer!(maybe_source_file_to_parser(sess, source_file))
|
||||
}
|
||||
|
||||
/// Given a session and a `source_file`, returns a parser.
|
||||
fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
|
||||
panictry_buffer!(&sess.dcx, maybe_source_file_to_parser(sess, source_file))
|
||||
}
|
||||
|
||||
/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing the
|
||||
/// initial token stream.
|
||||
/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing
|
||||
/// the initial token stream.
|
||||
fn maybe_source_file_to_parser(
|
||||
sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
||||
) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
|
||||
let end_pos = source_file.end_position();
|
||||
let stream = maybe_file_to_stream(sess, source_file, None)?;
|
||||
let mut parser = stream_to_parser(sess, stream, None);
|
||||
@ -142,52 +146,22 @@ fn maybe_source_file_to_parser(
|
||||
|
||||
// Base abstractions
|
||||
|
||||
/// Given a session and a path and an optional span (for error reporting),
|
||||
/// add the path to the session's source_map and return the new source_file or
|
||||
/// error when a file can't be read.
|
||||
fn try_file_to_source_file(
|
||||
sess: &ParseSess,
|
||||
path: &Path,
|
||||
spanopt: Option<Span>,
|
||||
) -> Result<Lrc<SourceFile>, Diagnostic> {
|
||||
sess.source_map().load_file(path).map_err(|e| {
|
||||
let msg = format!("couldn't read {}: {}", path.display(), e);
|
||||
let mut diag = Diagnostic::new(Level::Fatal, msg);
|
||||
if let Some(sp) = spanopt {
|
||||
diag.span(sp);
|
||||
}
|
||||
diag
|
||||
})
|
||||
}
|
||||
|
||||
/// Given a session and a path and an optional span (for error reporting),
|
||||
/// adds the path to the session's `source_map` and returns the new `source_file`.
|
||||
fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> {
|
||||
match try_file_to_source_file(sess, path, spanopt) {
|
||||
Ok(source_file) => source_file,
|
||||
Err(d) => {
|
||||
sess.dcx.emit_diagnostic(d);
|
||||
FatalError.raise();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a `source_file`, produces a sequence of token trees.
|
||||
pub fn source_file_to_stream(
|
||||
sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
override_span: Option<Span>,
|
||||
) -> TokenStream {
|
||||
panictry_buffer!(&sess.dcx, maybe_file_to_stream(sess, source_file, override_span))
|
||||
panictry_buffer!(maybe_file_to_stream(sess, source_file, override_span))
|
||||
}
|
||||
|
||||
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
|
||||
/// parsing the token stream.
|
||||
pub fn maybe_file_to_stream(
|
||||
sess: &ParseSess,
|
||||
fn maybe_file_to_stream<'sess>(
|
||||
sess: &'sess ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
override_span: Option<Span>,
|
||||
) -> Result<TokenStream, Vec<Diagnostic>> {
|
||||
) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
|
||||
let src = source_file.src.as_ref().unwrap_or_else(|| {
|
||||
sess.dcx.bug(format!(
|
||||
"cannot lex `source_file` without source: {}",
|
||||
|
@ -552,7 +552,6 @@ fn handle_reserve(result: Result<(), TryReserveError>) {
|
||||
// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
|
||||
// an extra guard for this in case we're running on a platform which can use
|
||||
// all 4GB in user-space, e.g., PAE or x32.
|
||||
|
||||
#[inline]
|
||||
fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
|
||||
if usize::BITS < 64 && alloc_size > isize::MAX as usize {
|
||||
|
@ -55,12 +55,7 @@ fn test_btree_map() {
|
||||
|
||||
require_send_sync(async {
|
||||
let _v = None::<
|
||||
alloc::collections::btree_map::ExtractIf<
|
||||
'_,
|
||||
&u32,
|
||||
&u32,
|
||||
fn(&&u32, &mut &u32) -> bool,
|
||||
>,
|
||||
alloc::collections::btree_map::ExtractIf<'_, &u32, &u32, fn(&&u32, &mut &u32) -> bool>,
|
||||
>;
|
||||
async {}.await;
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
use core::alloc::{Allocator, Layout};
|
||||
use core::{assert_eq, assert_ne};
|
||||
use core::num::NonZeroUsize;
|
||||
use core::ptr::NonNull;
|
||||
use core::{assert_eq, assert_ne};
|
||||
use std::alloc::System;
|
||||
use std::assert_matches::assert_matches;
|
||||
use std::borrow::Cow;
|
||||
@ -1212,7 +1212,7 @@ fn test_in_place_specialization_step_up_down() {
|
||||
assert_eq!(sink.len(), 2);
|
||||
|
||||
let mut src: Vec<[u8; 3]> = Vec::with_capacity(17);
|
||||
src.resize( 8, [0; 3]);
|
||||
src.resize(8, [0; 3]);
|
||||
let iter = src.into_iter().map(|[a, b, _]| [a, b]);
|
||||
assert_in_place_trait(&iter);
|
||||
let sink: Vec<[u8; 2]> = iter.collect();
|
||||
@ -1221,11 +1221,7 @@ fn test_in_place_specialization_step_up_down() {
|
||||
|
||||
let src = vec![[0u8; 4]; 256];
|
||||
let srcptr = src.as_ptr();
|
||||
let iter = src
|
||||
.into_iter()
|
||||
.flat_map(|a| {
|
||||
a.into_iter().map(|b| b.wrapping_add(1))
|
||||
});
|
||||
let iter = src.into_iter().flat_map(|a| a.into_iter().map(|b| b.wrapping_add(1)));
|
||||
assert_in_place_trait(&iter);
|
||||
let sink = iter.collect::<Vec<_>>();
|
||||
assert_eq!(srcptr as *const u8, sink.as_ptr());
|
||||
|
@ -1787,8 +1787,9 @@ extern "rust-intrinsic" {
|
||||
/// so this rounds half-way cases to the number with an even least significant digit.
|
||||
///
|
||||
/// May raise an inexact floating-point exception if the argument is not an integer.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so this is not something that
|
||||
/// can actually be used from Rust code.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so these exceptions
|
||||
/// cannot actually be utilized from Rust code.
|
||||
/// In other words, this intrinsic is equivalent in behavior to `nearbyintf32` and `roundevenf32`.
|
||||
///
|
||||
/// The stabilized version of this intrinsic is
|
||||
/// [`f32::round_ties_even`](../../std/primitive.f32.html#method.round_ties_even)
|
||||
@ -1798,8 +1799,9 @@ extern "rust-intrinsic" {
|
||||
/// so this rounds half-way cases to the number with an even least significant digit.
|
||||
///
|
||||
/// May raise an inexact floating-point exception if the argument is not an integer.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so this is not something that
|
||||
/// can actually be used from Rust code.
|
||||
/// However, Rust assumes floating-point exceptions cannot be observed, so these exceptions
|
||||
/// cannot actually be utilized from Rust code.
|
||||
/// In other words, this intrinsic is equivalent in behavior to `nearbyintf64` and `roundevenf64`.
|
||||
///
|
||||
/// The stabilized version of this intrinsic is
|
||||
/// [`f64::round_ties_even`](../../std/primitive.f64.html#method.round_ties_even)
|
||||
|
@ -1,6 +1,6 @@
|
||||
use core::{array, assert_eq};
|
||||
use core::num::NonZeroUsize;
|
||||
use core::sync::atomic::{AtomicUsize, Ordering};
|
||||
use core::{array, assert_eq};
|
||||
|
||||
#[test]
|
||||
fn array_from_ref() {
|
||||
|
@ -466,7 +466,7 @@ fn const_cells() {
|
||||
const CELL: Cell<i32> = Cell::new(3);
|
||||
const _: i32 = CELL.into_inner();
|
||||
|
||||
/* FIXME(#110395)
|
||||
/* FIXME(#110395)
|
||||
const UNSAFE_CELL_FROM: UnsafeCell<i32> = UnsafeCell::from(3);
|
||||
const _: i32 = UNSAFE_CELL.into_inner();
|
||||
|
||||
@ -475,5 +475,5 @@ fn const_cells() {
|
||||
|
||||
const CELL_FROM: Cell<i32> = Cell::from(3);
|
||||
const _: i32 = CELL.into_inner();
|
||||
*/
|
||||
*/
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use core::error::{request_value, request_ref, Request};
|
||||
use core::error::{request_ref, request_value, Request};
|
||||
|
||||
// Test the `Request` API.
|
||||
#[derive(Debug)]
|
||||
|
@ -22,11 +22,11 @@ fn test_pointer_formats_data_pointer() {
|
||||
#[test]
|
||||
fn test_estimated_capacity() {
|
||||
assert_eq!(format_args!("").estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}", {""}).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}", { "" }).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("Hello").estimated_capacity(), 5);
|
||||
assert_eq!(format_args!("Hello, {}!", {""}).estimated_capacity(), 16);
|
||||
assert_eq!(format_args!("{}, hello!", {"World"}).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}. 16-bytes piece", {"World"}).estimated_capacity(), 32);
|
||||
assert_eq!(format_args!("Hello, {}!", { "" }).estimated_capacity(), 16);
|
||||
assert_eq!(format_args!("{}, hello!", { "World" }).estimated_capacity(), 0);
|
||||
assert_eq!(format_args!("{}. 16-bytes piece", { "World" }).estimated_capacity(), 32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -35,7 +35,8 @@ impl Hasher for MyHasher {
|
||||
#[test]
|
||||
fn test_writer_hasher() {
|
||||
// FIXME(#110395)
|
||||
/* const */ fn hash<T: Hash>(t: &T) -> u64 {
|
||||
/* const */
|
||||
fn hash<T: Hash>(t: &T) -> u64 {
|
||||
let mut s = MyHasher { hash: 0 };
|
||||
t.hash(&mut s);
|
||||
s.finish()
|
||||
@ -140,7 +141,8 @@ impl Hash for Custom {
|
||||
#[test]
|
||||
fn test_custom_state() {
|
||||
// FIXME(#110395)
|
||||
/* const */ fn hash<T: Hash>(t: &T) -> u64 {
|
||||
/* const */
|
||||
fn hash<T: Hash>(t: &T) -> u64 {
|
||||
let mut c = CustomHasher { output: 0 };
|
||||
t.hash(&mut c);
|
||||
c.finish()
|
||||
|
@ -42,7 +42,10 @@ fn test_iterator_chain_advance_by() {
|
||||
let mut iter = Unfuse::new(xs).chain(Unfuse::new(ys));
|
||||
assert_eq!(iter.advance_by(xs.len() + i), Ok(()));
|
||||
assert_eq!(iter.next(), Some(&ys[i]));
|
||||
assert_eq!(iter.advance_by(100), Err(NonZeroUsize::new(100 - (ys.len() - i - 1)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_by(100),
|
||||
Err(NonZeroUsize::new(100 - (ys.len() - i - 1)).unwrap())
|
||||
);
|
||||
assert_eq!(iter.advance_by(0), Ok(()));
|
||||
}
|
||||
|
||||
@ -71,7 +74,10 @@ fn test_iterator_chain_advance_back_by() {
|
||||
let mut iter = Unfuse::new(xs).chain(Unfuse::new(ys));
|
||||
assert_eq!(iter.advance_back_by(i), Ok(()));
|
||||
assert_eq!(iter.next_back(), Some(&ys[ys.len() - i - 1]));
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (len - i - 1)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (len - i - 1)).unwrap())
|
||||
);
|
||||
assert_eq!(iter.advance_back_by(0), Ok(()));
|
||||
}
|
||||
|
||||
@ -79,7 +85,10 @@ fn test_iterator_chain_advance_back_by() {
|
||||
let mut iter = Unfuse::new(xs).chain(Unfuse::new(ys));
|
||||
assert_eq!(iter.advance_back_by(ys.len() + i), Ok(()));
|
||||
assert_eq!(iter.next_back(), Some(&xs[xs.len() - i - 1]));
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (xs.len() - i - 1)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (xs.len() - i - 1)).unwrap())
|
||||
);
|
||||
assert_eq!(iter.advance_back_by(0), Ok(()));
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use core::assert_eq;
|
||||
use super::*;
|
||||
use core::assert_eq;
|
||||
use core::iter::*;
|
||||
use core::num::NonZeroUsize;
|
||||
|
||||
|
@ -245,7 +245,6 @@ fn test_step_by_skip() {
|
||||
assert_eq!((200..=255u8).step_by(10).nth(3), Some(230));
|
||||
}
|
||||
|
||||
|
||||
struct DeOpt<I: Iterator>(I);
|
||||
|
||||
impl<I: Iterator> Iterator for DeOpt<I> {
|
||||
@ -265,8 +264,7 @@ impl<I: DoubleEndedIterator> DoubleEndedIterator for DeOpt<I> {
|
||||
#[test]
|
||||
fn test_step_by_fold_range_specialization() {
|
||||
macro_rules! t {
|
||||
($range:expr, $var: ident, $body:tt) => {
|
||||
{
|
||||
($range:expr, $var: ident, $body:tt) => {{
|
||||
// run the same tests for the non-optimized version
|
||||
let mut $var = DeOpt($range);
|
||||
$body
|
||||
@ -274,8 +272,7 @@ fn test_step_by_fold_range_specialization() {
|
||||
{
|
||||
let mut $var = $range;
|
||||
$body
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
t!((1usize..5).step_by(1), r, {
|
||||
@ -288,13 +285,12 @@ fn test_step_by_fold_range_specialization() {
|
||||
assert_eq!(r.sum::<usize>(), 2);
|
||||
});
|
||||
|
||||
|
||||
t!((0usize..5).step_by(2), r, {
|
||||
assert_eq!(r.next(), Some(0));
|
||||
assert_eq!(r.sum::<usize>(), 6);
|
||||
});
|
||||
|
||||
t!((usize::MAX - 6 .. usize::MAX).step_by(5), r, {
|
||||
t!((usize::MAX - 6..usize::MAX).step_by(5), r, {
|
||||
assert_eq!(r.next(), Some(usize::MAX - 6));
|
||||
assert_eq!(r.sum::<usize>(), usize::MAX - 1);
|
||||
});
|
||||
|
@ -93,7 +93,10 @@ fn test_take_advance_by() {
|
||||
assert_eq!((0..2).take(1).advance_back_by(10), Err(NonZeroUsize::new(9).unwrap()));
|
||||
assert_eq!((0..0).take(1).advance_back_by(1), Err(NonZeroUsize::new(1).unwrap()));
|
||||
assert_eq!((0..0).take(1).advance_back_by(0), Ok(()));
|
||||
assert_eq!((0..usize::MAX).take(100).advance_back_by(usize::MAX), Err(NonZeroUsize::new(usize::MAX - 100).unwrap()));
|
||||
assert_eq!(
|
||||
(0..usize::MAX).take(100).advance_back_by(usize::MAX),
|
||||
Err(NonZeroUsize::new(usize::MAX - 100).unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -168,7 +168,10 @@ fn test_iterator_advance_back_by() {
|
||||
let mut iter = v.iter();
|
||||
assert_eq!(iter.advance_back_by(i), Ok(()));
|
||||
assert_eq!(iter.next_back().unwrap(), &v[v.len() - 1 - i]);
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
assert_eq!(v.iter().advance_back_by(v.len()), Ok(()));
|
||||
@ -183,7 +186,10 @@ fn test_iterator_rev_advance_back_by() {
|
||||
let mut iter = v.iter().rev();
|
||||
assert_eq!(iter.advance_back_by(i), Ok(()));
|
||||
assert_eq!(iter.next_back().unwrap(), &v[i]);
|
||||
assert_eq!(iter.advance_back_by(100), Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap()));
|
||||
assert_eq!(
|
||||
iter.advance_back_by(100),
|
||||
Err(NonZeroUsize::new(100 - (v.len() - 1 - i)).unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
assert_eq!(v.iter().rev().advance_back_by(v.len()), Ok(()));
|
||||
|
@ -664,7 +664,11 @@ fn ipv6_properties() {
|
||||
&[0x20, 1, 0, 0x20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
|
||||
global | unicast_global
|
||||
);
|
||||
check!("2001:30::", &[0x20, 1, 0, 0x30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], global | unicast_global);
|
||||
check!(
|
||||
"2001:30::",
|
||||
&[0x20, 1, 0, 0x30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
|
||||
global | unicast_global
|
||||
);
|
||||
check!("2001:40::", &[0x20, 1, 0, 0x40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], unicast_global);
|
||||
|
||||
check!(
|
||||
|
@ -178,7 +178,7 @@ fn test_or_else() {
|
||||
assert_eq!(x.or_else(two), Some(2));
|
||||
assert_eq!(x.or_else(none), None);
|
||||
|
||||
/* FIXME(#110395)
|
||||
/* FIXME(#110395)
|
||||
const FOO: Option<isize> = Some(1);
|
||||
const A: Option<isize> = FOO.or_else(two);
|
||||
const B: Option<isize> = FOO.or_else(none);
|
||||
@ -190,7 +190,7 @@ fn test_or_else() {
|
||||
const D: Option<isize> = BAR.or_else(none);
|
||||
assert_eq!(C, Some(2));
|
||||
assert_eq!(D, None);
|
||||
*/
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -486,7 +486,7 @@ const fn option_const_mut() {
|
||||
None => unreachable!(),
|
||||
}
|
||||
}
|
||||
/* FIXME(const-hack)
|
||||
/* FIXME(const-hack)
|
||||
{
|
||||
let as_mut: Option<&mut usize> = Option::from(&mut option);
|
||||
match as_mut {
|
||||
@ -494,7 +494,7 @@ const fn option_const_mut() {
|
||||
None => unreachable!(),
|
||||
}
|
||||
}
|
||||
*/
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -478,7 +478,11 @@ fn align_offset_various_strides() {
|
||||
x |= test_stride::<A9>(ptr::invalid::<A9>(ptr), align);
|
||||
|
||||
#[repr(packed)]
|
||||
struct A10(#[allow(dead_code)] u32, #[allow(dead_code)] u32, #[allow(dead_code)] u16);
|
||||
struct A10(
|
||||
#[allow(dead_code)] u32,
|
||||
#[allow(dead_code)] u32,
|
||||
#[allow(dead_code)] u16,
|
||||
);
|
||||
x |= test_stride::<A10>(ptr::invalid::<A10>(ptr), align);
|
||||
|
||||
x |= test_stride::<u32>(ptr::invalid::<u32>(ptr), align);
|
||||
@ -532,7 +536,11 @@ fn align_offset_various_strides_const() {
|
||||
test_stride::<A6>(ptr::invalid::<A6>(ptr), ptr, align);
|
||||
|
||||
#[repr(packed)]
|
||||
struct A7(#[allow(dead_code)] u32, #[allow(dead_code)] u16, #[allow(dead_code)] u8);
|
||||
struct A7(
|
||||
#[allow(dead_code)] u32,
|
||||
#[allow(dead_code)] u16,
|
||||
#[allow(dead_code)] u8,
|
||||
);
|
||||
test_stride::<A7>(ptr::invalid::<A7>(ptr), ptr, align);
|
||||
|
||||
#[repr(packed)]
|
||||
@ -540,11 +548,19 @@ fn align_offset_various_strides_const() {
|
||||
test_stride::<A8>(ptr::invalid::<A8>(ptr), ptr, align);
|
||||
|
||||
#[repr(packed)]
|
||||
struct A9(#[allow(dead_code)] u32, #[allow(dead_code)] u32, #[allow(dead_code)] u8);
|
||||
struct A9(
|
||||
#[allow(dead_code)] u32,
|
||||
#[allow(dead_code)] u32,
|
||||
#[allow(dead_code)] u8,
|
||||
);
|
||||
test_stride::<A9>(ptr::invalid::<A9>(ptr), ptr, align);
|
||||
|
||||
#[repr(packed)]
|
||||
struct A10(#[allow(dead_code)] u32, #[allow(dead_code)] u32, #[allow(dead_code)] u16);
|
||||
struct A10(
|
||||
#[allow(dead_code)] u32,
|
||||
#[allow(dead_code)] u32,
|
||||
#[allow(dead_code)] u16,
|
||||
);
|
||||
test_stride::<A10>(ptr::invalid::<A10>(ptr), ptr, align);
|
||||
|
||||
test_stride::<u32>(ptr::invalid::<u32>(ptr), ptr, align);
|
||||
|
@ -479,7 +479,7 @@ fn duration_const() {
|
||||
const CHECKED_MUL: Option<Duration> = Duration::SECOND.checked_mul(1);
|
||||
assert_eq!(CHECKED_MUL, Some(Duration::SECOND));
|
||||
|
||||
/* FIXME(#110395)
|
||||
/* FIXME(#110395)
|
||||
const MUL_F32: Duration = Duration::SECOND.mul_f32(1.0);
|
||||
assert_eq!(MUL_F32, Duration::SECOND);
|
||||
|
||||
@ -494,7 +494,7 @@ fn duration_const() {
|
||||
|
||||
const DIV_F64: Duration = Duration::SECOND.div_f64(1.0);
|
||||
assert_eq!(DIV_F64, Duration::SECOND);
|
||||
*/
|
||||
*/
|
||||
|
||||
const DIV_DURATION_F32: f32 = Duration::SECOND.div_duration_f32(Duration::SECOND);
|
||||
assert_eq!(DIV_DURATION_F32, 1.0);
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![cfg(not(target_env="sgx"))]
|
||||
#![cfg(not(target_env = "sgx"))]
|
||||
|
||||
use std::env;
|
||||
use std::fs;
|
||||
|
@ -13,7 +13,7 @@ ignore = [
|
||||
|
||||
# tests for now are not formatted, as they are sometimes pretty-printing constrained
|
||||
# (and generally rustfmt can move around comments in UI-testing incompatible ways)
|
||||
"tests",
|
||||
"/tests/",
|
||||
|
||||
# do not format submodules
|
||||
# FIXME: sync submodule list with tidy/bootstrap/etc
|
||||
|
@ -32,9 +32,12 @@ fn download_ci_llvm() {
|
||||
assert_eq!(parse_llvm("rust.channel = \"dev\""), if_unchanged);
|
||||
assert!(!parse_llvm("rust.channel = \"stable\""));
|
||||
assert_eq!(parse_llvm("build.build = \"x86_64-unknown-linux-gnu\""), if_unchanged);
|
||||
assert_eq!(parse_llvm(
|
||||
assert_eq!(
|
||||
parse_llvm(
|
||||
"llvm.assertions = true \r\n build.build = \"x86_64-unknown-linux-gnu\" \r\n llvm.download-ci-llvm = \"if-unchanged\""
|
||||
), if_unchanged);
|
||||
),
|
||||
if_unchanged
|
||||
);
|
||||
assert!(!parse_llvm(
|
||||
"llvm.assertions = true \r\n build.build = \"aarch64-apple-darwin\" \r\n llvm.download-ci-llvm = \"if-unchanged\""
|
||||
));
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::utils::helpers::{extract_beta_rev, hex_encode, make, check_cfg_arg};
|
||||
use crate::utils::helpers::{check_cfg_arg, extract_beta_rev, hex_encode, make};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
|
@ -69,8 +69,8 @@ fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String
|
||||
let mut parser =
|
||||
match rustc_parse::maybe_new_parser_from_source_str(&sess, file_name, snippet.clone()) {
|
||||
Ok(parser) => parser,
|
||||
Err(diagnostics) => {
|
||||
drop(diagnostics);
|
||||
Err(errs) => {
|
||||
errs.into_iter().for_each(|err| err.cancel());
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
@ -589,7 +589,7 @@ pub(crate) fn make_test(
|
||||
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, source) {
|
||||
Ok(p) => p,
|
||||
Err(errs) => {
|
||||
drop(errs);
|
||||
errs.into_iter().for_each(|err| err.cancel());
|
||||
return (found_main, found_extern_crate, found_macro);
|
||||
}
|
||||
};
|
||||
@ -759,8 +759,10 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
|
||||
let mut parser =
|
||||
match maybe_new_parser_from_source_str(&sess, filename, source.to_owned()) {
|
||||
Ok(p) => p,
|
||||
Err(_) => {
|
||||
// If there is an unclosed delimiter, an error will be returned by the tokentrees.
|
||||
Err(errs) => {
|
||||
errs.into_iter().for_each(|err| err.cancel());
|
||||
// If there is an unclosed delimiter, an error will be returned by the
|
||||
// tokentrees.
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
@ -53,7 +53,7 @@ pub fn check(
|
||||
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
|
||||
Ok(p) => p,
|
||||
Err(errs) => {
|
||||
drop(errs);
|
||||
errs.into_iter().for_each(|err| err.cancel());
|
||||
return (false, test_attr_spans);
|
||||
},
|
||||
};
|
||||
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_ast::token::TokenKind;
|
||||
use rustc_ast::{ast, attr, ptr};
|
||||
use rustc_errors::Diagnostic;
|
||||
use rustc_errors::DiagnosticBuilder;
|
||||
use rustc_parse::{new_parser_from_file, parser::Parser as RawParser};
|
||||
use rustc_span::{sym, Span};
|
||||
use thin_vec::ThinVec;
|
||||
@ -65,7 +65,7 @@ impl<'a> ParserBuilder<'a> {
|
||||
fn parser(
|
||||
sess: &'a rustc_session::parse::ParseSess,
|
||||
input: Input,
|
||||
) -> Result<rustc_parse::parser::Parser<'a>, Option<Vec<Diagnostic>>> {
|
||||
) -> Result<rustc_parse::parser::Parser<'a>, Option<Vec<DiagnosticBuilder<'a>>>> {
|
||||
match input {
|
||||
Input::File(ref file) => catch_unwind(AssertUnwindSafe(move || {
|
||||
new_parser_from_file(sess, file, None)
|
||||
|
@ -4,7 +4,9 @@ use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
|
||||
use rustc_errors::emitter::{DynEmitter, Emitter, HumanEmitter};
|
||||
use rustc_errors::translation::Translate;
|
||||
use rustc_errors::{ColorConfig, DiagCtxt, Diagnostic, Level as DiagnosticLevel};
|
||||
use rustc_errors::{
|
||||
ColorConfig, DiagCtxt, Diagnostic, DiagnosticBuilder, Level as DiagnosticLevel,
|
||||
};
|
||||
use rustc_session::parse::ParseSess as RawParseSess;
|
||||
use rustc_span::{
|
||||
source_map::{FilePathMapping, SourceMap},
|
||||
@ -283,9 +285,9 @@ impl ParseSess {
|
||||
|
||||
// Methods that should be restricted within the parse module.
|
||||
impl ParseSess {
|
||||
pub(super) fn emit_diagnostics(&self, diagnostics: Vec<Diagnostic>) {
|
||||
pub(super) fn emit_diagnostics(&self, diagnostics: Vec<DiagnosticBuilder<'_>>) {
|
||||
for diagnostic in diagnostics {
|
||||
self.parse_sess.dcx.emit_diagnostic(diagnostic);
|
||||
diagnostic.emit();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -194,6 +194,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
|
||||
"aho-corasick",
|
||||
"allocator-api2", // FIXME: only appears in Cargo.lock due to https://github.com/rust-lang/cargo/issues/10801
|
||||
"annotate-snippets",
|
||||
"anstyle",
|
||||
"ar_archive_writer",
|
||||
"arrayvec",
|
||||
"autocfg",
|
||||
@ -391,7 +392,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
"writeable",
|
||||
"yansi-term", // this is a false-positive: it's only used by rustfmt, but because it's enabled through a feature, tidy thinks it's used by rustc as well.
|
||||
"yoke",
|
||||
"yoke-derive",
|
||||
"zerocopy",
|
||||
|
@ -4,7 +4,7 @@
|
||||
fn bar() -> bool {
|
||||
let mut _0: bool;
|
||||
|
||||
+ coverage Counter(0) => /the/src/instrument_coverage.rs:21:1 - 23:2;
|
||||
+ coverage Code(Counter(0)) => /the/src/instrument_coverage.rs:21:1 - 23:2;
|
||||
+
|
||||
bb0: {
|
||||
+ Coverage::CounterIncrement(0);
|
||||
|
@ -9,11 +9,11 @@
|
||||
|
||||
+ coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Add, rhs: Counter(1) };
|
||||
+ coverage ExpressionId(1) => Expression { lhs: Expression(0), op: Subtract, rhs: Counter(1) };
|
||||
+ coverage Counter(0) => /the/src/instrument_coverage.rs:12:1 - 12:11;
|
||||
+ coverage Expression(0) => /the/src/instrument_coverage.rs:13:5 - 14:17;
|
||||
+ coverage Expression(1) => /the/src/instrument_coverage.rs:15:13 - 15:18;
|
||||
+ coverage Expression(1) => /the/src/instrument_coverage.rs:18:1 - 18:2;
|
||||
+ coverage Counter(1) => /the/src/instrument_coverage.rs:16:10 - 16:11;
|
||||
+ coverage Code(Counter(0)) => /the/src/instrument_coverage.rs:12:1 - 12:11;
|
||||
+ coverage Code(Expression(0)) => /the/src/instrument_coverage.rs:13:5 - 14:17;
|
||||
+ coverage Code(Expression(1)) => /the/src/instrument_coverage.rs:15:13 - 15:18;
|
||||
+ coverage Code(Counter(1)) => /the/src/instrument_coverage.rs:16:10 - 16:11;
|
||||
+ coverage Code(Expression(1)) => /the/src/instrument_coverage.rs:18:1 - 18:2;
|
||||
+
|
||||
bb0: {
|
||||
+ Coverage::CounterIncrement(0);
|
||||
|
@ -1,4 +1,4 @@
|
||||
error: expected one of `->`, `where`, or `{`, found `<eof>`
|
||||
error: internal compiler error: expected one of `->`, `where`, or `{`, found `<eof>`
|
||||
--> $DIR/ice-bug-report-url.rs:14:10
|
||||
|
|
||||
LL | fn wrong()
|
||||
|
@ -4,7 +4,6 @@ pub struct Foo<'a, 'b, T> {
|
||||
field1: dyn Bar<'a, 'b,>,
|
||||
//~^ ERROR
|
||||
//~| ERROR
|
||||
//~| ERROR
|
||||
}
|
||||
|
||||
pub trait Bar<'x, 's, U>
|
||||
|
@ -5,7 +5,7 @@ LL | field1: dyn Bar<'a, 'b,>,
|
||||
| ^^^ expected 1 generic argument
|
||||
|
|
||||
note: trait defined here, with 1 generic parameter: `U`
|
||||
--> $DIR/unable-fulfill-trait.rs:10:11
|
||||
--> $DIR/unable-fulfill-trait.rs:9:11
|
||||
|
|
||||
LL | pub trait Bar<'x, 's, U>
|
||||
| ^^^ -
|
||||
@ -20,24 +20,7 @@ error[E0227]: ambiguous lifetime bound, explicit lifetime bound required
|
||||
LL | field1: dyn Bar<'a, 'b,>,
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
||||
error[E0478]: lifetime bound not satisfied
|
||||
--> $DIR/unable-fulfill-trait.rs:4:13
|
||||
|
|
||||
LL | field1: dyn Bar<'a, 'b,>,
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: lifetime parameter instantiated with the lifetime `'b` as defined here
|
||||
--> $DIR/unable-fulfill-trait.rs:3:20
|
||||
|
|
||||
LL | pub struct Foo<'a, 'b, T> {
|
||||
| ^^
|
||||
note: but lifetime parameter must outlive the lifetime `'a` as defined here
|
||||
--> $DIR/unable-fulfill-trait.rs:3:16
|
||||
|
|
||||
LL | pub struct Foo<'a, 'b, T> {
|
||||
| ^^
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0107, E0227, E0478.
|
||||
Some errors have detailed explanations: E0107, E0227.
|
||||
For more information about an error, try `rustc --explain E0107`.
|
||||
|
@ -30,7 +30,7 @@ LL | type Item = &[T];
|
||||
= help: add `#![feature(inherent_associated_types)]` to the crate attributes to enable
|
||||
|
||||
error[E0223]: ambiguous associated type
|
||||
--> $DIR/issue-109071.rs:16:22
|
||||
--> $DIR/issue-109071.rs:15:22
|
||||
|
|
||||
LL | fn T() -> Option<Self::Item> {}
|
||||
| ^^^^^^^^^^
|
||||
|
@ -9,13 +9,11 @@ impl<T> Windows { //~ ERROR: missing generics for struct `Windows`
|
||||
//[no_gate]~^ ERROR: inherent associated types are unstable
|
||||
|
||||
fn next() -> Option<Self::Item> {}
|
||||
//[with_gate]~^ ERROR type annotations needed
|
||||
}
|
||||
|
||||
impl<T> Windows<T> {
|
||||
fn T() -> Option<Self::Item> {}
|
||||
//[no_gate]~^ ERROR: ambiguous associated type
|
||||
//[with_gate]~^^ ERROR type annotations needed
|
||||
//~^ ERROR: ambiguous associated type
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -20,19 +20,20 @@ help: add missing generic argument
|
||||
LL | impl<T> Windows<T> {
|
||||
| +++
|
||||
|
||||
error[E0282]: type annotations needed
|
||||
--> $DIR/issue-109071.rs:11:18
|
||||
|
|
||||
LL | fn next() -> Option<Self::Item> {}
|
||||
| ^^^^^^^^^^^^^^^^^^ cannot infer type for type parameter `T`
|
||||
|
||||
error[E0282]: type annotations needed
|
||||
--> $DIR/issue-109071.rs:16:15
|
||||
error[E0223]: ambiguous associated type
|
||||
--> $DIR/issue-109071.rs:15:22
|
||||
|
|
||||
LL | fn T() -> Option<Self::Item> {}
|
||||
| ^^^^^^^^^^^^^^^^^^ cannot infer type for type parameter `T`
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
help: use fully-qualified syntax
|
||||
|
|
||||
LL | fn T() -> Option<<Windows<T> as IntoAsyncIterator>::Item> {}
|
||||
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
LL | fn T() -> Option<<Windows<T> as IntoIterator>::Item> {}
|
||||
| ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0107, E0282, E0637.
|
||||
Some errors have detailed explanations: E0107, E0223, E0637.
|
||||
For more information about an error, try `rustc --explain E0107`.
|
||||
|
@ -5,9 +5,8 @@ use std::ops::Index;
|
||||
trait Hierarchy {
|
||||
type Value;
|
||||
type ChildKey;
|
||||
type Children = dyn Index<Self::ChildKey, Output=dyn Hierarchy>;
|
||||
type Children = dyn Index<Self::ChildKey, Output = dyn Hierarchy>;
|
||||
//~^ ERROR: the value of the associated types
|
||||
//~| ERROR: the size for values of type
|
||||
|
||||
fn data(&self) -> Option<(Self::Value, Self::Children)>;
|
||||
}
|
||||
|
@ -1,27 +1,13 @@
|
||||
error[E0191]: the value of the associated types `Value`, `ChildKey` and `Children` in `Hierarchy` must be specified
|
||||
--> $DIR/issue-23595-1.rs:8:58
|
||||
--> $DIR/issue-23595-1.rs:8:60
|
||||
|
|
||||
LL | type Value;
|
||||
| ---------- `Value` defined here
|
||||
LL | type ChildKey;
|
||||
| ------------- `ChildKey` defined here
|
||||
LL | type Children = dyn Index<Self::ChildKey, Output=dyn Hierarchy>;
|
||||
LL | type Children = dyn Index<Self::ChildKey, Output = dyn Hierarchy>;
|
||||
| ------------- `Children` defined here ^^^^^^^^^ help: specify the associated types: `Hierarchy<Value = Type, ChildKey = Type, Children = Type>`
|
||||
|
||||
error[E0277]: the size for values of type `(dyn Index<<Self as Hierarchy>::ChildKey, Output = (dyn Hierarchy + 'static)> + 'static)` cannot be known at compilation time
|
||||
--> $DIR/issue-23595-1.rs:8:21
|
||||
|
|
||||
LL | type Children = dyn Index<Self::ChildKey, Output=dyn Hierarchy>;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
|
||||
|
|
||||
= help: the trait `Sized` is not implemented for `(dyn Index<<Self as Hierarchy>::ChildKey, Output = (dyn Hierarchy + 'static)> + 'static)`
|
||||
note: required by a bound in `Hierarchy::Children`
|
||||
--> $DIR/issue-23595-1.rs:8:5
|
||||
|
|
||||
LL | type Children = dyn Index<Self::ChildKey, Output=dyn Hierarchy>;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `Hierarchy::Children`
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0191, E0277.
|
||||
For more information about an error, try `rustc --explain E0191`.
|
||||
For more information about this error, try `rustc --explain E0191`.
|
||||
|
@ -7,7 +7,7 @@ LL | pub fn call_me<Args: Sized, const IDX: usize, const FN: unsafe extern "
|
||||
= note: type parameters may not be used in the type of const parameters
|
||||
|
||||
error[E0770]: the type of const parameters must not depend on other generic parameters
|
||||
--> $DIR/issue-71381.rs:23:40
|
||||
--> $DIR/issue-71381.rs:22:40
|
||||
|
|
||||
LL | const FN: unsafe extern "C" fn(Args),
|
||||
| ^^^^ the type must not depend on the parameter `Args`
|
||||
|
@ -7,29 +7,13 @@ LL | pub fn call_me<Args: Sized, const IDX: usize, const FN: unsafe extern "
|
||||
= note: type parameters may not be used in the type of const parameters
|
||||
|
||||
error[E0770]: the type of const parameters must not depend on other generic parameters
|
||||
--> $DIR/issue-71381.rs:23:40
|
||||
--> $DIR/issue-71381.rs:22:40
|
||||
|
|
||||
LL | const FN: unsafe extern "C" fn(Args),
|
||||
| ^^^^ the type must not depend on the parameter `Args`
|
||||
|
|
||||
= note: type parameters may not be used in the type of const parameters
|
||||
|
||||
error: using function pointers as const generic parameters is forbidden
|
||||
--> $DIR/issue-71381.rs:14:61
|
||||
|
|
||||
LL | pub fn call_me<Args: Sized, const IDX: usize, const FN: unsafe extern "C" fn(Args)>(&self) {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: the only supported types are integers, `bool` and `char`
|
||||
|
||||
error: using function pointers as const generic parameters is forbidden
|
||||
--> $DIR/issue-71381.rs:23:19
|
||||
|
|
||||
LL | const FN: unsafe extern "C" fn(Args),
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: the only supported types are integers, `bool` and `char`
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0770`.
|
||||
|
@ -12,8 +12,7 @@ unsafe extern "C" fn pass(args: PassArg) {
|
||||
|
||||
impl Test {
|
||||
pub fn call_me<Args: Sized, const IDX: usize, const FN: unsafe extern "C" fn(Args)>(&self) {
|
||||
//[min]~^ ERROR: using function pointers as const generic parameters is forbidden
|
||||
//~^^ ERROR: the type of const parameters must not depend on other generic parameters
|
||||
//~^ ERROR: the type of const parameters must not depend on other generic parameters
|
||||
self.0 = Self::trampiline::<Args, IDX, FN> as _
|
||||
}
|
||||
|
||||
@ -21,8 +20,7 @@ impl Test {
|
||||
Args: Sized,
|
||||
const IDX: usize,
|
||||
const FN: unsafe extern "C" fn(Args),
|
||||
//[min]~^ ERROR: using function pointers as const generic parameters is forbidden
|
||||
//~^^ ERROR: the type of const parameters must not depend on other generic parameters
|
||||
//~^ ERROR: the type of const parameters must not depend on other generic parameters
|
||||
>(
|
||||
args: Args,
|
||||
) {
|
||||
|
@ -6,14 +6,6 @@ LL | fn func<A, const F: fn(inner: A)>(outer: A) {
|
||||
|
|
||||
= note: type parameters may not be used in the type of const parameters
|
||||
|
||||
error: using function pointers as const generic parameters is forbidden
|
||||
--> $DIR/issue-71611.rs:5:21
|
||||
|
|
||||
LL | fn func<A, const F: fn(inner: A)>(outer: A) {
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
= note: the only supported types are integers, `bool` and `char`
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0770`.
|
||||
|
@ -3,8 +3,7 @@
|
||||
#![cfg_attr(full, allow(incomplete_features))]
|
||||
|
||||
fn func<A, const F: fn(inner: A)>(outer: A) {
|
||||
//[min]~^ ERROR: using function pointers as const generic parameters is forbidden
|
||||
//~^^ ERROR: the type of const parameters must not depend on other generic parameters
|
||||
//~^ ERROR: the type of const parameters must not depend on other generic parameters
|
||||
F(outer);
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
// stderr-per-bitwidth
|
||||
#[derive(Copy, Clone)]
|
||||
union Foo {
|
||||
a: isize,
|
@ -1,5 +1,5 @@
|
||||
error[E0080]: evaluation of constant value failed
|
||||
--> $DIR/const-err4.rs:9:21
|
||||
--> $DIR/const-err-enum-discriminant.rs:8:21
|
||||
|
|
||||
LL | Boo = [unsafe { Foo { b: () }.a }; 4][3],
|
||||
| ^^^^^^^^^^^^^^^ using uninitialized data, but this operation requires initialized memory
|
@ -1,9 +0,0 @@
|
||||
error[E0080]: evaluation of constant value failed
|
||||
--> $DIR/const-err4.rs:9:21
|
||||
|
|
||||
LL | Boo = [unsafe { Foo { b: () }.a }; 4][3],
|
||||
| ^^^^^^^^^^^^^^^ using uninitialized data, but this operation requires initialized memory
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0080`.
|
@ -1,4 +1,4 @@
|
||||
error[E0080]: evaluation of constant value failed
|
||||
error: internal compiler error[E0080]: evaluation of constant value failed
|
||||
--> $DIR/const-eval-query-stack.rs:16:16
|
||||
|
|
||||
LL | const X: i32 = 1 / 0;
|
||||
|
@ -2,7 +2,6 @@ use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Irrelevant<Irrelevant> { //~ ERROR type arguments are not allowed on type parameter
|
||||
//~^ ERROR `Irrelevant` must be used
|
||||
irrelevant: Irrelevant,
|
||||
}
|
||||
|
||||
|
@ -16,16 +16,6 @@ LL | pub struct Irrelevant<Irrelevant> {
|
||||
| ^^^^^^^^^^
|
||||
= note: this error originates in the derive macro `Debug` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error[E0210]: type parameter `Irrelevant` must be used as the type parameter for some local type (e.g., `MyStruct<Irrelevant>`)
|
||||
--> $DIR/issue-97343.rs:4:23
|
||||
|
|
||||
LL | pub struct Irrelevant<Irrelevant> {
|
||||
| ^^^^^^^^^^ type parameter `Irrelevant` must be used as the type parameter for some local type
|
||||
|
|
||||
= note: implementing a foreign trait is only possible if at least one of the types for which it is implemented is local
|
||||
= note: only traits defined in the current crate can be implemented for a type parameter
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0109, E0210.
|
||||
For more information about an error, try `rustc --explain E0109`.
|
||||
For more information about this error, try `rustc --explain E0109`.
|
||||
|
@ -6,8 +6,6 @@ trait FooBar<'foo, 'bar>: Foo<'foo> + Bar<'bar> {}
|
||||
struct Baz<'foo, 'bar> {
|
||||
baz: dyn FooBar<'foo, 'bar>,
|
||||
//~^ ERROR ambiguous lifetime bound, explicit lifetime bound required
|
||||
//~| ERROR lifetime bound not satisfied
|
||||
}
|
||||
|
||||
fn main() {
|
||||
}
|
||||
fn main() {}
|
||||
|
@ -4,24 +4,6 @@ error[E0227]: ambiguous lifetime bound, explicit lifetime bound required
|
||||
LL | baz: dyn FooBar<'foo, 'bar>,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error[E0478]: lifetime bound not satisfied
|
||||
--> $DIR/E0227.rs:7:10
|
||||
|
|
||||
LL | baz: dyn FooBar<'foo, 'bar>,
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
note: lifetime parameter instantiated with the lifetime `'bar` as defined here
|
||||
--> $DIR/E0227.rs:6:18
|
||||
|
|
||||
LL | struct Baz<'foo, 'bar> {
|
||||
| ^^^^
|
||||
note: but lifetime parameter must outlive the lifetime `'foo` as defined here
|
||||
--> $DIR/E0227.rs:6:12
|
||||
|
|
||||
LL | struct Baz<'foo, 'bar> {
|
||||
| ^^^^
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0227, E0478.
|
||||
For more information about an error, try `rustc --explain E0227`.
|
||||
For more information about this error, try `rustc --explain E0227`.
|
||||
|
@ -9,9 +9,6 @@ impl Provider for () {
|
||||
struct Holder<B> {
|
||||
inner: Box<dyn Provider<A = B>>,
|
||||
//~^ ERROR: missing generics for associated type
|
||||
//~| ERROR: missing generics for associated type
|
||||
//~| ERROR: missing generics for associated type
|
||||
//~| ERROR: the trait `Provider` cannot be made into an object
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
@ -14,57 +14,6 @@ help: add missing lifetime argument
|
||||
LL | inner: Box<dyn Provider<A<'a> = B>>,
|
||||
| ++++
|
||||
|
||||
error[E0107]: missing generics for associated type `Provider::A`
|
||||
--> $DIR/issue-71176.rs:10:27
|
||||
|
|
||||
LL | inner: Box<dyn Provider<A = B>>,
|
||||
| ^ expected 1 lifetime argument
|
||||
|
|
||||
note: associated type defined here, with 1 lifetime parameter: `'a`
|
||||
--> $DIR/issue-71176.rs:2:10
|
||||
|
|
||||
LL | type A<'a>;
|
||||
| ^ --
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
help: add missing lifetime argument
|
||||
|
|
||||
LL | inner: Box<dyn Provider<A<'a> = B>>,
|
||||
| ++++
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
error[E0107]: missing generics for associated type `Provider::A`
|
||||
--> $DIR/issue-71176.rs:10:27
|
||||
|
|
||||
LL | inner: Box<dyn Provider<A = B>>,
|
||||
| ^ expected 1 lifetime argument
|
||||
|
|
||||
note: associated type defined here, with 1 lifetime parameter: `'a`
|
||||
--> $DIR/issue-71176.rs:2:10
|
||||
|
|
||||
LL | type A<'a>;
|
||||
| ^ --
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
help: add missing lifetime argument
|
||||
|
|
||||
LL | inner: Box<dyn Provider<A<'a> = B>>,
|
||||
| ++++
|
||||
|
||||
error[E0038]: the trait `Provider` cannot be made into an object
|
||||
--> $DIR/issue-71176.rs:10:14
|
||||
|
|
||||
LL | inner: Box<dyn Provider<A = B>>,
|
||||
| ^^^^^^^^^^^^^^^^^^^ `Provider` cannot be made into an object
|
||||
|
|
||||
note: for a trait to be "object safe" it needs to allow building a vtable to allow the call to be resolvable dynamically; for more information visit <https://doc.rust-lang.org/reference/items/traits.html#object-safety>
|
||||
--> $DIR/issue-71176.rs:2:10
|
||||
|
|
||||
LL | trait Provider {
|
||||
| -------- this trait cannot be made into an object...
|
||||
LL | type A<'a>;
|
||||
| ^ ...because it contains the generic associated type `A`
|
||||
= help: consider moving `A` to another trait
|
||||
= help: only type `()` implements the trait, consider using it directly instead
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0038, E0107.
|
||||
For more information about an error, try `rustc --explain E0038`.
|
||||
For more information about this error, try `rustc --explain E0107`.
|
||||
|
@ -1,3 +1,3 @@
|
||||
thread 'main' panicked at library/alloc/src/raw_vec.rs:571:5:
|
||||
thread 'main' panicked at library/alloc/src/raw_vec.rs:570:5:
|
||||
capacity overflow
|
||||
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
|
||||
|
@ -4,7 +4,7 @@ error: unconstrained opaque type
|
||||
LL | type TransactionFuture<'__, O> = impl '__ + Future<Output = TransactionResult<O>>;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
error[E0792]: expected generic lifetime parameter, found `'_`
|
||||
error: internal compiler error[E0792]: expected generic lifetime parameter, found `'_`
|
||||
--> $DIR/issue-86800.rs:39:5
|
||||
|
|
||||
LL | type TransactionFuture<'__, O> = impl '__ + Future<Output = TransactionResult<O>>;
|
||||
|
@ -5,7 +5,6 @@ fn foo<T>() {
|
||||
|
||||
impl<T> Drop for Foo<T> {
|
||||
//~^ ERROR struct takes 0 generic arguments but 1 generic argument
|
||||
//~| ERROR `T` is not constrained
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
}
|
||||
|
@ -22,13 +22,7 @@ note: struct defined here, with 0 generic parameters
|
||||
LL | struct Foo {
|
||||
| ^^^
|
||||
|
||||
error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predicates
|
||||
--> $DIR/issue-3214.rs:6:10
|
||||
|
|
||||
LL | impl<T> Drop for Foo<T> {
|
||||
| ^ unconstrained type parameter
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0107, E0207, E0401.
|
||||
Some errors have detailed explanations: E0107, E0401.
|
||||
For more information about an error, try `rustc --explain E0107`.
|
||||
|
@ -8,8 +8,6 @@ static FOO: (dyn AsRef<OsStr>, u8) = ("hello", 42);
|
||||
|
||||
const BAR: (&Path, [u8], usize) = ("hello", [], 42);
|
||||
//~^ ERROR cannot find type `Path` in this scope
|
||||
//~| ERROR the size for values of type `[u8]` cannot be known at compilation time
|
||||
//~| ERROR mismatched types
|
||||
|
||||
static BAZ: ([u8], usize) = ([], 0);
|
||||
//~^ ERROR the size for values of type `[u8]` cannot be known at compilation time
|
||||
|
@ -21,25 +21,7 @@ LL + use std::path::Path;
|
||||
|
|
||||
|
||||
error[E0277]: the size for values of type `[u8]` cannot be known at compilation time
|
||||
--> $DIR/issue-84108.rs:9:12
|
||||
|
|
||||
LL | const BAR: (&Path, [u8], usize) = ("hello", [], 42);
|
||||
| ^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
|
||||
|
|
||||
= help: the trait `Sized` is not implemented for `[u8]`
|
||||
= note: only the last element of a tuple may have a dynamically sized type
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/issue-84108.rs:9:45
|
||||
|
|
||||
LL | const BAR: (&Path, [u8], usize) = ("hello", [], 42);
|
||||
| ^^ expected `[u8]`, found `[_; 0]`
|
||||
|
|
||||
= note: expected slice `[u8]`
|
||||
found array `[_; 0]`
|
||||
|
||||
error[E0277]: the size for values of type `[u8]` cannot be known at compilation time
|
||||
--> $DIR/issue-84108.rs:14:13
|
||||
--> $DIR/issue-84108.rs:12:13
|
||||
|
|
||||
LL | static BAZ: ([u8], usize) = ([], 0);
|
||||
| ^^^^^^^^^^^^^ doesn't have a size known at compile-time
|
||||
@ -48,7 +30,7 @@ LL | static BAZ: ([u8], usize) = ([], 0);
|
||||
= note: only the last element of a tuple may have a dynamically sized type
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/issue-84108.rs:14:30
|
||||
--> $DIR/issue-84108.rs:12:30
|
||||
|
|
||||
LL | static BAZ: ([u8], usize) = ([], 0);
|
||||
| ^^ expected `[u8]`, found `[_; 0]`
|
||||
@ -56,7 +38,7 @@ LL | static BAZ: ([u8], usize) = ([], 0);
|
||||
= note: expected slice `[u8]`
|
||||
found array `[_; 0]`
|
||||
|
||||
error: aborting due to 6 previous errors
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0277, E0308, E0412.
|
||||
For more information about an error, try `rustc --explain E0277`.
|
||||
|
@ -28,6 +28,5 @@ fn d<const C: S>() {}
|
||||
trait Foo<'a> {}
|
||||
struct Bar<const N: &'a (dyn for<'a> Foo<'a>)>;
|
||||
//~^ ERROR the type of const parameters must not depend on other generic parameters
|
||||
//~| ERROR `&dyn for<'a> Foo<'a>` is forbidden as the type of a const generic parameter
|
||||
|
||||
fn main() {}
|
||||
|
@ -58,16 +58,7 @@ LL | fn d<const C: S>() {}
|
||||
= note: the only supported types are integers, `bool` and `char`
|
||||
= help: add `#![feature(adt_const_params)]` to the crate attributes to enable more complex and user defined types
|
||||
|
||||
error: `&dyn for<'a> Foo<'a>` is forbidden as the type of a const generic parameter
|
||||
--> $DIR/unusual-rib-combinations.rs:29:21
|
||||
|
|
||||
LL | struct Bar<const N: &'a (dyn for<'a> Foo<'a>)>;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: the only supported types are integers, `bool` and `char`
|
||||
= help: add `#![feature(adt_const_params)]` to the crate attributes to enable more complex and user defined types
|
||||
|
||||
error: aborting due to 9 previous errors
|
||||
error: aborting due to 8 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0106, E0214, E0308, E0770.
|
||||
For more information about an error, try `rustc --explain E0106`.
|
||||
|
@ -1,4 +1,4 @@
|
||||
error[E0425]: cannot find value `missing_ident` in this scope
|
||||
error: internal compiler error[E0425]: cannot find value `missing_ident` in this scope
|
||||
--> $DIR/default-backtrace-ice.rs:21:13
|
||||
|
|
||||
LL | fn main() { missing_ident; }
|
||||
|
@ -5,4 +5,3 @@ struct Apple((Apple, Option(Banana ? Citron)));
|
||||
//~| ERROR expected one of `)` or `,`, found `Citron`
|
||||
//~| ERROR cannot find type `Citron` in this scope [E0412]
|
||||
//~| ERROR parenthesized type parameters may only be used with a `Fn` trait [E0214]
|
||||
//~| ERROR recursive type `Apple` has infinite size [E0072]
|
||||
|
@ -34,18 +34,7 @@ help: use angle brackets instead
|
||||
LL | struct Apple((Apple, Option<Banana ? Citron>));
|
||||
| ~ ~
|
||||
|
||||
error[E0072]: recursive type `Apple` has infinite size
|
||||
--> $DIR/issue-103748-ICE-wrong-braces.rs:3:1
|
||||
|
|
||||
LL | struct Apple((Apple, Option(Banana ? Citron)));
|
||||
| ^^^^^^^^^^^^ ----- recursive without indirection
|
||||
|
|
||||
help: insert some indirection (e.g., a `Box`, `Rc`, or `&`) to break the cycle
|
||||
|
|
||||
LL | struct Apple((Box<Apple>, Option(Banana ? Citron)));
|
||||
| ++++ +
|
||||
error: aborting due to 4 previous errors
|
||||
|
||||
error: aborting due to 5 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0072, E0214, E0412.
|
||||
For more information about an error, try `rustc --explain E0072`.
|
||||
Some errors have detailed explanations: E0214, E0412.
|
||||
For more information about an error, try `rustc --explain E0214`.
|
||||
|
@ -39,7 +39,6 @@ impl<T: Trait<u32, String>> Struct<T> {}
|
||||
trait YetAnotherTrait {}
|
||||
impl<T: Trait<u32, Assoc=String>, U> YetAnotherTrait for Struct<T, U> {}
|
||||
//~^ ERROR struct takes 1 generic argument but 2 generic arguments were supplied
|
||||
//~| ERROR `U` is not constrained
|
||||
|
||||
|
||||
fn main() {
|
||||
|
@ -116,13 +116,7 @@ error[E0207]: the type parameter `S` is not constrained by the impl trait, self
|
||||
LL | impl<T, S> Trait<T, S> for () {}
|
||||
| ^ unconstrained type parameter
|
||||
|
||||
error[E0207]: the type parameter `U` is not constrained by the impl trait, self type, or predicates
|
||||
--> $DIR/116464-invalid-assoc-type-suggestion-in-trait-impl.rs:40:35
|
||||
|
|
||||
LL | impl<T: Trait<u32, Assoc=String>, U> YetAnotherTrait for Struct<T, U> {}
|
||||
| ^ unconstrained type parameter
|
||||
|
||||
error: aborting due to 10 previous errors
|
||||
error: aborting due to 9 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0107, E0207.
|
||||
For more information about an error, try `rustc --explain E0107`.
|
||||
|
@ -1,4 +1,4 @@
|
||||
error[E0080]: could not evaluate static initializer
|
||||
error: internal compiler error[E0080]: could not evaluate static initializer
|
||||
--> $DIR/err.rs:11:21
|
||||
|
|
||||
LL | pub static C: u32 = 0 - 1;
|
||||
|
Loading…
Reference in New Issue
Block a user