mirror of
https://github.com/rust-lang/rust.git
synced 2024-10-31 22:41:50 +00:00
Auto merge of #107536 - GuillaumeGomez:rollup-xv7dx2h, r=GuillaumeGomez
Rollup of 12 pull requests Successful merges: - #106898 (Include both md and yaml ICE ticket templates) - #107331 (Clean up eslint annotations and remove unused JS function) - #107348 (small refactor to new projection code) - #107354 (rustdoc: update Source Serif 4 from 4.004 to 4.005) - #107412 (avoid needless checks) - #107467 (Improve enum checks) - #107486 (Track bound types like bound regions) - #107491 (rustdoc: remove unused CSS from `.setting-check`) - #107508 (`Edition` micro refactor) - #107525 (PointeeInfo is advisory only) - #107527 (rustdoc: stop making unstable items transparent) - #107535 (Replace unwrap with ? in TcpListener doc) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
ad8e1dc286
49
.github/ISSUE_TEMPLATE/ice.md
vendored
Normal file
49
.github/ISSUE_TEMPLATE/ice.md
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
---
|
||||
name: Internal Compiler Error
|
||||
about: Create a report for an internal compiler error in rustc.
|
||||
labels: C-bug, I-ICE, T-compiler
|
||||
---
|
||||
<!--
|
||||
Thank you for finding an Internal Compiler Error! 🧊 If possible, try to provide
|
||||
a minimal verifiable example. You can read "Rust Bug Minimization Patterns" for
|
||||
how to create smaller examples.
|
||||
http://blog.pnkfx.org/blog/2019/11/18/rust-bug-minimization-patterns/
|
||||
-->
|
||||
|
||||
### Code
|
||||
|
||||
```Rust
|
||||
<code>
|
||||
```
|
||||
|
||||
|
||||
### Meta
|
||||
<!--
|
||||
If you're using the stable version of the compiler, you should also check if the
|
||||
bug also exists in the beta or nightly versions.
|
||||
-->
|
||||
|
||||
`rustc --version --verbose`:
|
||||
```
|
||||
<version>
|
||||
```
|
||||
|
||||
### Error output
|
||||
|
||||
```
|
||||
<output>
|
||||
```
|
||||
|
||||
<!--
|
||||
Include a backtrace in the code block by setting `RUST_BACKTRACE=1` in your
|
||||
environment. E.g. `RUST_BACKTRACE=1 cargo build`.
|
||||
-->
|
||||
<details><summary><strong>Backtrace</strong></summary>
|
||||
<p>
|
||||
|
||||
```
|
||||
<backtrace>
|
||||
```
|
||||
|
||||
</p>
|
||||
</details>
|
6
.github/ISSUE_TEMPLATE/ice.yaml
vendored
6
.github/ISSUE_TEMPLATE/ice.yaml
vendored
@ -1,5 +1,5 @@
|
||||
name: Internal Compiler Error
|
||||
description: Create a report for an internal compiler error in `rustc`
|
||||
name: Internal Compiler Error (Structured form)
|
||||
description: For now, you'll want to use the other ICE template, as GitHub forms have strict limits on the size of fields so backtraces cannot be pasted directly.
|
||||
labels: ["C-bug", "I-ICE", "T-compiler"]
|
||||
title: "[ICE]: "
|
||||
body:
|
||||
@ -79,4 +79,4 @@ body:
|
||||
label: Anything else?
|
||||
description: If you have more details you want to give us to reproduce this issue, please add it here
|
||||
validations:
|
||||
required: false
|
||||
required: false
|
||||
|
@ -1456,6 +1456,8 @@ pub enum PointerKind {
|
||||
UniqueOwned,
|
||||
}
|
||||
|
||||
/// Note that this information is advisory only, and backends are free to ignore it.
|
||||
/// It can only be used to encode potential optimizations, but no critical information.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct PointeeInfo {
|
||||
pub size: Size,
|
||||
|
@ -58,23 +58,24 @@ pub fn beautify_doc_string(data: Symbol, kind: CommentKind) -> Symbol {
|
||||
// In case we have doc comments like `/**` or `/*!`, we want to remove stars if they are
|
||||
// present. However, we first need to strip the empty lines so they don't get in the middle
|
||||
// when we try to compute the "horizontal trim".
|
||||
let lines = if kind == CommentKind::Block {
|
||||
// Whatever happens, we skip the first line.
|
||||
let mut i = lines
|
||||
.get(0)
|
||||
.map(|l| if l.trim_start().starts_with('*') { 0 } else { 1 })
|
||||
.unwrap_or(0);
|
||||
let mut j = lines.len();
|
||||
let lines = match kind {
|
||||
CommentKind::Block => {
|
||||
// Whatever happens, we skip the first line.
|
||||
let mut i = lines
|
||||
.get(0)
|
||||
.map(|l| if l.trim_start().starts_with('*') { 0 } else { 1 })
|
||||
.unwrap_or(0);
|
||||
let mut j = lines.len();
|
||||
|
||||
while i < j && lines[i].trim().is_empty() {
|
||||
i += 1;
|
||||
while i < j && lines[i].trim().is_empty() {
|
||||
i += 1;
|
||||
}
|
||||
while j > i && lines[j - 1].trim().is_empty() {
|
||||
j -= 1;
|
||||
}
|
||||
&lines[i..j]
|
||||
}
|
||||
while j > i && lines[j - 1].trim().is_empty() {
|
||||
j -= 1;
|
||||
}
|
||||
&lines[i..j]
|
||||
} else {
|
||||
lines
|
||||
CommentKind::Line => lines,
|
||||
};
|
||||
|
||||
for line in lines {
|
||||
|
@ -131,7 +131,7 @@ pub fn print_crate<'a>(
|
||||
|
||||
// Currently, in Rust 2018 we don't have `extern crate std;` at the crate
|
||||
// root, so this is not needed, and actually breaks things.
|
||||
if edition == Edition::Edition2015 {
|
||||
if edition.rust_2015() {
|
||||
// `#![no_std]`
|
||||
let fake_attr = attr::mk_attr_word(g, ast::AttrStyle::Inner, sym::no_std, DUMMY_SP);
|
||||
s.print_attribute(&fake_attr);
|
||||
|
@ -344,7 +344,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
} else {
|
||||
err.span_help(source_info.span, "try removing `&mut` here");
|
||||
}
|
||||
} else if decl.mutability == Mutability::Not {
|
||||
} else if decl.mutability.is_not() {
|
||||
if matches!(
|
||||
decl.local_info,
|
||||
Some(box LocalInfo::User(ClearCrossCrate::Set(BindingForm::ImplicitSelf(
|
||||
|
@ -2028,7 +2028,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
}
|
||||
};
|
||||
|
||||
if ty_to_mut == Mutability::Mut && ty_mut == Mutability::Not {
|
||||
if ty_to_mut.is_mut() && ty_mut.is_not() {
|
||||
span_mirbug!(
|
||||
self,
|
||||
rvalue,
|
||||
|
@ -150,17 +150,14 @@ pub unsafe fn create_module<'ll>(
|
||||
target_data_layout =
|
||||
"e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:32-n8:16:32-a:0:32-S32"
|
||||
.to_string();
|
||||
}
|
||||
if sess.target.arch == "wasm32" {
|
||||
} else if sess.target.arch == "wasm32" {
|
||||
target_data_layout = target_data_layout.replace("-p10:8:8-p20:8:8", "");
|
||||
}
|
||||
}
|
||||
if llvm_version < (16, 0, 0) {
|
||||
if sess.target.arch == "s390x" {
|
||||
target_data_layout = target_data_layout.replace("-v128:64", "");
|
||||
}
|
||||
|
||||
if sess.target.arch == "riscv64" {
|
||||
} else if sess.target.arch == "riscv64" {
|
||||
target_data_layout = target_data_layout.replace("-n32:64-", "-n64-");
|
||||
}
|
||||
}
|
||||
|
@ -622,10 +622,9 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
|
||||
let alloc = alloc.inner();
|
||||
if is_write {
|
||||
// Write access. These are never allowed, but we give a targeted error message.
|
||||
if alloc.mutability == Mutability::Not {
|
||||
Err(err_ub!(WriteToReadOnly(alloc_id)).into())
|
||||
} else {
|
||||
Err(ConstEvalErrKind::ModifiedGlobal.into())
|
||||
match alloc.mutability {
|
||||
Mutability::Not => Err(err_ub!(WriteToReadOnly(alloc_id)).into()),
|
||||
Mutability::Mut => Err(ConstEvalErrKind::ModifiedGlobal.into()),
|
||||
}
|
||||
} else {
|
||||
// Read access. These are usually allowed, with some exceptions.
|
||||
|
@ -304,7 +304,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
.into());
|
||||
};
|
||||
|
||||
if alloc.mutability == Mutability::Not {
|
||||
if alloc.mutability.is_not() {
|
||||
throw_ub_format!("deallocating immutable allocation {alloc_id:?}");
|
||||
}
|
||||
if alloc_kind != kind {
|
||||
@ -631,7 +631,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
}
|
||||
|
||||
let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
|
||||
if alloc.mutability == Mutability::Not {
|
||||
if alloc.mutability.is_not() {
|
||||
throw_ub!(WriteToReadOnly(id))
|
||||
}
|
||||
Ok((alloc, &mut self.machine))
|
||||
|
@ -754,7 +754,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
||||
// FIXME(JakobDegen) The validator should check that `self.mir_phase <
|
||||
// DropsLowered`. However, this causes ICEs with generation of drop shims, which
|
||||
// seem to fail to set their `MirPhase` correctly.
|
||||
if *kind == RetagKind::Raw || *kind == RetagKind::TwoPhase {
|
||||
if matches!(kind, RetagKind::Raw | RetagKind::TwoPhase) {
|
||||
self.fail(location, format!("explicit `{:?}` is forbidden", kind));
|
||||
}
|
||||
}
|
||||
|
@ -2113,30 +2113,38 @@ impl EmitterWriter {
|
||||
}
|
||||
}
|
||||
for sugg in suggestions {
|
||||
if sugg.style == SuggestionStyle::CompletelyHidden {
|
||||
// do not display this suggestion, it is meant only for tools
|
||||
} else if sugg.style == SuggestionStyle::HideCodeAlways {
|
||||
if let Err(e) = self.emit_message_default(
|
||||
&MultiSpan::new(),
|
||||
&[(sugg.msg.to_owned(), Style::HeaderMsg)],
|
||||
args,
|
||||
&None,
|
||||
&Level::Help,
|
||||
max_line_num_len,
|
||||
true,
|
||||
None,
|
||||
) {
|
||||
panic!("failed to emit error: {}", e);
|
||||
match sugg.style {
|
||||
SuggestionStyle::CompletelyHidden => {
|
||||
// do not display this suggestion, it is meant only for tools
|
||||
}
|
||||
} else if let Err(e) = self.emit_suggestion_default(
|
||||
span,
|
||||
sugg,
|
||||
args,
|
||||
&Level::Help,
|
||||
max_line_num_len,
|
||||
) {
|
||||
panic!("failed to emit error: {}", e);
|
||||
};
|
||||
SuggestionStyle::HideCodeAlways => {
|
||||
if let Err(e) = self.emit_message_default(
|
||||
&MultiSpan::new(),
|
||||
&[(sugg.msg.to_owned(), Style::HeaderMsg)],
|
||||
args,
|
||||
&None,
|
||||
&Level::Help,
|
||||
max_line_num_len,
|
||||
true,
|
||||
None,
|
||||
) {
|
||||
panic!("failed to emit error: {}", e);
|
||||
}
|
||||
}
|
||||
SuggestionStyle::HideCodeInline
|
||||
| SuggestionStyle::ShowCode
|
||||
| SuggestionStyle::ShowAlways => {
|
||||
if let Err(e) = self.emit_suggestion_default(
|
||||
span,
|
||||
sugg,
|
||||
args,
|
||||
&Level::Help,
|
||||
max_line_num_len,
|
||||
) {
|
||||
panic!("failed to emit error: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ impl StyledBuffer {
|
||||
pub fn set_style(&mut self, line: usize, col: usize, style: Style, overwrite: bool) {
|
||||
if let Some(ref mut line) = self.lines.get_mut(line) {
|
||||
if let Some(StyledChar { style: s, .. }) = line.get_mut(col) {
|
||||
if overwrite || *s == Style::NoStyle || *s == Style::Quotation {
|
||||
if overwrite || matches!(s, Style::NoStyle | Style::Quotation) {
|
||||
*s = style;
|
||||
}
|
||||
}
|
||||
|
@ -503,7 +503,7 @@ impl TtParser {
|
||||
mp.push_match(metavar_idx, seq_depth, MatchedSeq(vec![]));
|
||||
}
|
||||
|
||||
if op == KleeneOp::ZeroOrMore || op == KleeneOp::ZeroOrOne {
|
||||
if matches!(op, KleeneOp::ZeroOrMore | KleeneOp::ZeroOrOne) {
|
||||
// Try zero matches of this sequence, by skipping over it.
|
||||
self.cur_mps.push(MatcherPos {
|
||||
idx: idx_first_after,
|
||||
|
@ -385,10 +385,9 @@ pub fn check_generic_arg_count_for_call(
|
||||
) -> GenericArgCountResult {
|
||||
let empty_args = hir::GenericArgs::none();
|
||||
let gen_args = seg.args.unwrap_or(&empty_args);
|
||||
let gen_pos = if is_method_call == IsMethodCall::Yes {
|
||||
GenericArgPosition::MethodCall
|
||||
} else {
|
||||
GenericArgPosition::Value
|
||||
let gen_pos = match is_method_call {
|
||||
IsMethodCall::Yes => GenericArgPosition::MethodCall,
|
||||
IsMethodCall::No => GenericArgPosition::Value,
|
||||
};
|
||||
let has_self = generics.parent.is_none() && generics.has_self;
|
||||
|
||||
|
@ -606,59 +606,66 @@ fn check_item_type(tcx: TyCtxt<'_>, id: hir::ItemId) {
|
||||
};
|
||||
check_abi(tcx, it.hir_id(), it.span, abi);
|
||||
|
||||
if abi == Abi::RustIntrinsic {
|
||||
for item in items {
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
intrinsic::check_intrinsic_type(tcx, item);
|
||||
}
|
||||
} else if abi == Abi::PlatformIntrinsic {
|
||||
for item in items {
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
intrinsic::check_platform_intrinsic_type(tcx, item);
|
||||
}
|
||||
} else {
|
||||
for item in items {
|
||||
let def_id = item.id.owner_id.def_id;
|
||||
let generics = tcx.generics_of(def_id);
|
||||
let own_counts = generics.own_counts();
|
||||
if generics.params.len() - own_counts.lifetimes != 0 {
|
||||
let (kinds, kinds_pl, egs) = match (own_counts.types, own_counts.consts) {
|
||||
(_, 0) => ("type", "types", Some("u32")),
|
||||
// We don't specify an example value, because we can't generate
|
||||
// a valid value for any type.
|
||||
(0, _) => ("const", "consts", None),
|
||||
_ => ("type or const", "types or consts", None),
|
||||
};
|
||||
struct_span_err!(
|
||||
tcx.sess,
|
||||
item.span,
|
||||
E0044,
|
||||
"foreign items may not have {kinds} parameters",
|
||||
)
|
||||
.span_label(item.span, &format!("can't have {kinds} parameters"))
|
||||
.help(
|
||||
// FIXME: once we start storing spans for type arguments, turn this
|
||||
// into a suggestion.
|
||||
&format!(
|
||||
"replace the {} parameters with concrete {}{}",
|
||||
kinds,
|
||||
kinds_pl,
|
||||
egs.map(|egs| format!(" like `{}`", egs)).unwrap_or_default(),
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
match abi {
|
||||
Abi::RustIntrinsic => {
|
||||
for item in items {
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
intrinsic::check_intrinsic_type(tcx, item);
|
||||
}
|
||||
}
|
||||
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
match &item.kind {
|
||||
hir::ForeignItemKind::Fn(fn_decl, _, _) => {
|
||||
require_c_abi_if_c_variadic(tcx, fn_decl, abi, item.span);
|
||||
Abi::PlatformIntrinsic => {
|
||||
for item in items {
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
intrinsic::check_platform_intrinsic_type(tcx, item);
|
||||
}
|
||||
}
|
||||
|
||||
_ => {
|
||||
for item in items {
|
||||
let def_id = item.id.owner_id.def_id;
|
||||
let generics = tcx.generics_of(def_id);
|
||||
let own_counts = generics.own_counts();
|
||||
if generics.params.len() - own_counts.lifetimes != 0 {
|
||||
let (kinds, kinds_pl, egs) = match (own_counts.types, own_counts.consts)
|
||||
{
|
||||
(_, 0) => ("type", "types", Some("u32")),
|
||||
// We don't specify an example value, because we can't generate
|
||||
// a valid value for any type.
|
||||
(0, _) => ("const", "consts", None),
|
||||
_ => ("type or const", "types or consts", None),
|
||||
};
|
||||
struct_span_err!(
|
||||
tcx.sess,
|
||||
item.span,
|
||||
E0044,
|
||||
"foreign items may not have {kinds} parameters",
|
||||
)
|
||||
.span_label(item.span, &format!("can't have {kinds} parameters"))
|
||||
.help(
|
||||
// FIXME: once we start storing spans for type arguments, turn this
|
||||
// into a suggestion.
|
||||
&format!(
|
||||
"replace the {} parameters with concrete {}{}",
|
||||
kinds,
|
||||
kinds_pl,
|
||||
egs.map(|egs| format!(" like `{}`", egs)).unwrap_or_default(),
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
hir::ForeignItemKind::Static(..) => {
|
||||
check_static_inhabited(tcx, def_id);
|
||||
check_static_linkage(tcx, def_id);
|
||||
|
||||
let item = tcx.hir().foreign_item(item.id);
|
||||
match &item.kind {
|
||||
hir::ForeignItemKind::Fn(fn_decl, _, _) => {
|
||||
require_c_abi_if_c_variadic(tcx, fn_decl, abi, item.span);
|
||||
}
|
||||
hir::ForeignItemKind::Static(..) => {
|
||||
check_static_inhabited(tcx, def_id);
|
||||
check_static_linkage(tcx, def_id);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1930,7 +1930,7 @@ pub(super) fn check_type_bounds<'tcx>(
|
||||
smallvec::SmallVec::with_capacity(defs.count());
|
||||
InternalSubsts::fill_single(&mut substs, defs, &mut |param, _| match param.kind {
|
||||
GenericParamDefKind::Type { .. } => {
|
||||
let kind = ty::BoundTyKind::Param(param.name);
|
||||
let kind = ty::BoundTyKind::Param(param.def_id, param.name);
|
||||
let bound_var = ty::BoundVariableKind::Ty(kind);
|
||||
bound_vars.push(bound_var);
|
||||
tcx.mk_ty(ty::Bound(
|
||||
|
@ -9,9 +9,7 @@ use rustc_ast_pretty::pp::{self, Breaks};
|
||||
use rustc_ast_pretty::pprust::{Comments, PrintState};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::LifetimeParamKind;
|
||||
use rustc_hir::{
|
||||
BindingAnnotation, ByRef, GenericArg, GenericParam, GenericParamKind, Mutability, Node, Term,
|
||||
};
|
||||
use rustc_hir::{BindingAnnotation, ByRef, GenericArg, GenericParam, GenericParamKind, Node, Term};
|
||||
use rustc_hir::{GenericBound, PatKind, RangeEnd, TraitBoundModifier};
|
||||
use rustc_span::source_map::SourceMap;
|
||||
use rustc_span::symbol::{kw, Ident, IdentPrinter, Symbol};
|
||||
@ -1746,7 +1744,7 @@ impl<'a> State<'a> {
|
||||
if by_ref == ByRef::Yes {
|
||||
self.word_nbsp("ref");
|
||||
}
|
||||
if mutbl == Mutability::Mut {
|
||||
if mutbl.is_mut() {
|
||||
self.word_nbsp("mut");
|
||||
}
|
||||
self.print_ident(ident);
|
||||
|
@ -1354,13 +1354,12 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
||||
return Some(Err(MethodError::Ambiguity(sources)));
|
||||
}
|
||||
|
||||
applicable_candidates.pop().map(|(probe, status)| {
|
||||
if status == ProbeResult::Match {
|
||||
applicable_candidates.pop().map(|(probe, status)| match status {
|
||||
ProbeResult::Match => {
|
||||
Ok(probe
|
||||
.to_unadjusted_pick(self_ty, unstable_candidates.cloned().unwrap_or_default()))
|
||||
} else {
|
||||
Err(MethodError::BadReturnType)
|
||||
}
|
||||
ProbeResult::NoMatch | ProbeResult::BadReturnType => Err(MethodError::BadReturnType),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ impl<'tcx> InferCtxt<'tcx> {
|
||||
types: &mut |bound_ty: ty::BoundTy| {
|
||||
self.tcx.mk_ty(ty::Placeholder(ty::PlaceholderType {
|
||||
universe: next_universe,
|
||||
name: bound_ty.var,
|
||||
name: bound_ty.kind,
|
||||
}))
|
||||
},
|
||||
consts: &mut |bound_var: ty::BoundVar, ty| {
|
||||
|
@ -2044,7 +2044,7 @@ fn replace_param_and_infer_substs_with_placeholder<'tcx>(
|
||||
) -> SubstsRef<'tcx> {
|
||||
struct ReplaceParamAndInferWithPlaceholder<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
idx: usize,
|
||||
idx: u32,
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFolder<'tcx> for ReplaceParamAndInferWithPlaceholder<'tcx> {
|
||||
@ -2056,7 +2056,7 @@ fn replace_param_and_infer_substs_with_placeholder<'tcx>(
|
||||
if let ty::Infer(_) = t.kind() {
|
||||
self.tcx.mk_ty(ty::Placeholder(ty::PlaceholderType {
|
||||
universe: ty::UniverseIndex::ROOT,
|
||||
name: ty::BoundVar::from_usize({
|
||||
name: ty::BoundTyKind::Anon({
|
||||
let idx = self.idx;
|
||||
self.idx += 1;
|
||||
idx
|
||||
@ -2077,7 +2077,7 @@ fn replace_param_and_infer_substs_with_placeholder<'tcx>(
|
||||
self.tcx.mk_const(
|
||||
ty::PlaceholderConst {
|
||||
universe: ty::UniverseIndex::ROOT,
|
||||
name: ty::BoundVar::from_usize({
|
||||
name: ty::BoundVar::from_u32({
|
||||
let idx = self.idx;
|
||||
self.idx += 1;
|
||||
idx
|
||||
|
@ -580,27 +580,28 @@ impl<'tcx> LateLintPass<'tcx> for MissingDoc {
|
||||
}
|
||||
|
||||
fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &hir::ImplItem<'_>) {
|
||||
// If the method is an impl for a trait, don't doc.
|
||||
let context = method_context(cx, impl_item.owner_id.def_id);
|
||||
if context == MethodLateContext::TraitImpl {
|
||||
return;
|
||||
}
|
||||
|
||||
// If the method is an impl for an item with docs_hidden, don't doc.
|
||||
if context == MethodLateContext::PlainImpl {
|
||||
let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id());
|
||||
let impl_ty = cx.tcx.type_of(parent);
|
||||
let outerdef = match impl_ty.kind() {
|
||||
ty::Adt(def, _) => Some(def.did()),
|
||||
ty::Foreign(def_id) => Some(*def_id),
|
||||
_ => None,
|
||||
};
|
||||
let is_hidden = match outerdef {
|
||||
Some(id) => cx.tcx.is_doc_hidden(id),
|
||||
None => false,
|
||||
};
|
||||
if is_hidden {
|
||||
return;
|
||||
match context {
|
||||
// If the method is an impl for a trait, don't doc.
|
||||
MethodLateContext::TraitImpl => return,
|
||||
MethodLateContext::TraitAutoImpl => {}
|
||||
// If the method is an impl for an item with docs_hidden, don't doc.
|
||||
MethodLateContext::PlainImpl => {
|
||||
let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id());
|
||||
let impl_ty = cx.tcx.type_of(parent);
|
||||
let outerdef = match impl_ty.kind() {
|
||||
ty::Adt(def, _) => Some(def.did()),
|
||||
ty::Foreign(def_id) => Some(*def_id),
|
||||
_ => None,
|
||||
};
|
||||
let is_hidden = match outerdef {
|
||||
Some(id) => cx.tcx.is_doc_hidden(id),
|
||||
None => false,
|
||||
};
|
||||
if is_hidden {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,37 +113,37 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList {
|
||||
CrateType::Staticlib => Linkage::Static,
|
||||
};
|
||||
|
||||
if preferred_linkage == Linkage::NotLinked {
|
||||
match preferred_linkage {
|
||||
// If the crate is not linked, there are no link-time dependencies.
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if preferred_linkage == Linkage::Static {
|
||||
// Attempt static linkage first. For dylibs and executables, we may be
|
||||
// able to retry below with dynamic linkage.
|
||||
if let Some(v) = attempt_static(tcx) {
|
||||
return v;
|
||||
}
|
||||
|
||||
// Staticlibs and static executables must have all static dependencies.
|
||||
// If any are not found, generate some nice pretty errors.
|
||||
if ty == CrateType::Staticlib
|
||||
|| (ty == CrateType::Executable
|
||||
&& sess.crt_static(Some(ty))
|
||||
&& !sess.target.crt_static_allows_dylibs)
|
||||
{
|
||||
for &cnum in tcx.crates(()).iter() {
|
||||
if tcx.dep_kind(cnum).macros_only() {
|
||||
continue;
|
||||
}
|
||||
let src = tcx.used_crate_source(cnum);
|
||||
if src.rlib.is_some() {
|
||||
continue;
|
||||
}
|
||||
sess.emit_err(RlibRequired { crate_name: tcx.crate_name(cnum) });
|
||||
Linkage::NotLinked => return Vec::new(),
|
||||
Linkage::Static => {
|
||||
// Attempt static linkage first. For dylibs and executables, we may be
|
||||
// able to retry below with dynamic linkage.
|
||||
if let Some(v) = attempt_static(tcx) {
|
||||
return v;
|
||||
}
|
||||
|
||||
// Staticlibs and static executables must have all static dependencies.
|
||||
// If any are not found, generate some nice pretty errors.
|
||||
if ty == CrateType::Staticlib
|
||||
|| (ty == CrateType::Executable
|
||||
&& sess.crt_static(Some(ty))
|
||||
&& !sess.target.crt_static_allows_dylibs)
|
||||
{
|
||||
for &cnum in tcx.crates(()).iter() {
|
||||
if tcx.dep_kind(cnum).macros_only() {
|
||||
continue;
|
||||
}
|
||||
let src = tcx.used_crate_source(cnum);
|
||||
if src.rlib.is_some() {
|
||||
continue;
|
||||
}
|
||||
sess.emit_err(RlibRequired { crate_name: tcx.crate_name(cnum) });
|
||||
}
|
||||
return Vec::new();
|
||||
}
|
||||
return Vec::new();
|
||||
}
|
||||
Linkage::Dynamic | Linkage::IncludedFromDylib => {}
|
||||
}
|
||||
|
||||
let mut formats = FxHashMap::default();
|
||||
@ -283,12 +283,9 @@ fn attempt_static(tcx: TyCtxt<'_>) -> Option<DependencyList> {
|
||||
let mut ret = tcx
|
||||
.crates(())
|
||||
.iter()
|
||||
.map(|&cnum| {
|
||||
if tcx.dep_kind(cnum) == CrateDepKind::Explicit {
|
||||
Linkage::Static
|
||||
} else {
|
||||
Linkage::NotLinked
|
||||
}
|
||||
.map(|&cnum| match tcx.dep_kind(cnum) {
|
||||
CrateDepKind::Explicit => Linkage::Static,
|
||||
CrateDepKind::MacrosOnly | CrateDepKind::Implicit => Linkage::NotLinked,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
@ -107,7 +107,7 @@ impl<'tcx> Collector<'tcx> {
|
||||
return;
|
||||
};
|
||||
|
||||
if abi == Abi::Rust || abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic {
|
||||
if matches!(abi, Abi::Rust | Abi::RustIntrinsic | Abi::PlatformIntrinsic) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -135,7 +135,10 @@ impl Debug for CoverageKind {
|
||||
"Expression({:?}) = {} {} {}",
|
||||
id.index(),
|
||||
lhs.index(),
|
||||
if *op == Op::Add { "+" } else { "-" },
|
||||
match op {
|
||||
Op::Add => "+",
|
||||
Op::Subtract => "-",
|
||||
},
|
||||
rhs.index(),
|
||||
),
|
||||
Unreachable => write!(fmt, "Unreachable"),
|
||||
|
@ -110,7 +110,7 @@ fn write_graph_label<'tcx, W: std::fmt::Write>(
|
||||
let decl = &body.local_decls[local];
|
||||
|
||||
write!(w, "let ")?;
|
||||
if decl.mutability == Mutability::Mut {
|
||||
if decl.mutability.is_mut() {
|
||||
write!(w, "mut ")?;
|
||||
}
|
||||
|
||||
|
@ -416,11 +416,7 @@ impl<'tcx> Body<'tcx> {
|
||||
(self.arg_count + 1..self.local_decls.len()).filter_map(move |index| {
|
||||
let local = Local::new(index);
|
||||
let decl = &self.local_decls[local];
|
||||
if decl.is_user_variable() && decl.mutability == Mutability::Mut {
|
||||
Some(local)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
(decl.is_user_variable() && decl.mutability.is_mut()).then(|| local)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -580,7 +580,7 @@ fn write_scope_tree(
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut_str = if local_decl.mutability == Mutability::Mut { "mut " } else { "" };
|
||||
let mut_str = local_decl.mutability.prefix_str();
|
||||
|
||||
let mut indented_decl =
|
||||
format!("{0:1$}let {2}{3:?}: {4:?}", INDENT, indent, mut_str, local, local_decl.ty);
|
||||
|
@ -610,7 +610,9 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
let index = entry.index();
|
||||
let var = ty::BoundVar::from_usize(index);
|
||||
let kind = entry
|
||||
.or_insert_with(|| ty::BoundVariableKind::Ty(ty::BoundTyKind::Anon))
|
||||
.or_insert_with(|| {
|
||||
ty::BoundVariableKind::Ty(ty::BoundTyKind::Anon(index as u32))
|
||||
})
|
||||
.expect_ty();
|
||||
self.tcx.mk_ty(ty::Bound(ty::INNERMOST, BoundTy { var, kind }))
|
||||
}
|
||||
|
@ -1369,7 +1369,7 @@ pub struct Placeholder<T> {
|
||||
|
||||
pub type PlaceholderRegion = Placeholder<BoundRegionKind>;
|
||||
|
||||
pub type PlaceholderType = Placeholder<BoundVar>;
|
||||
pub type PlaceholderType = Placeholder<BoundTyKind>;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
|
||||
#[derive(TyEncodable, TyDecodable, PartialOrd, Ord)]
|
||||
|
@ -698,8 +698,10 @@ pub trait PrettyPrinter<'tcx>:
|
||||
ty::Error(_) => p!("[type error]"),
|
||||
ty::Param(ref param_ty) => p!(print(param_ty)),
|
||||
ty::Bound(debruijn, bound_ty) => match bound_ty.kind {
|
||||
ty::BoundTyKind::Anon => self.pretty_print_bound_var(debruijn, bound_ty.var)?,
|
||||
ty::BoundTyKind::Param(p) => p!(write("{}", p)),
|
||||
ty::BoundTyKind::Anon(bv) => {
|
||||
self.pretty_print_bound_var(debruijn, ty::BoundVar::from_u32(bv))?
|
||||
}
|
||||
ty::BoundTyKind::Param(_, s) => p!(write("{}", s)),
|
||||
},
|
||||
ty::Adt(def, substs) => {
|
||||
p!(print_def_path(def.did(), substs));
|
||||
|
@ -240,6 +240,7 @@ TrivialTypeTraversalAndLiftImpls! {
|
||||
crate::ty::AssocKind,
|
||||
crate::ty::AliasKind,
|
||||
crate::ty::Placeholder<crate::ty::BoundRegionKind>,
|
||||
crate::ty::Placeholder<crate::ty::BoundTyKind>,
|
||||
crate::ty::ClosureKind,
|
||||
crate::ty::FreeRegion,
|
||||
crate::ty::InferTy,
|
||||
|
@ -1504,13 +1504,22 @@ pub struct BoundTy {
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
|
||||
#[derive(HashStable)]
|
||||
pub enum BoundTyKind {
|
||||
Anon,
|
||||
Param(Symbol),
|
||||
Anon(u32),
|
||||
Param(DefId, Symbol),
|
||||
}
|
||||
|
||||
impl BoundTyKind {
|
||||
pub fn expect_anon(self) -> u32 {
|
||||
match self {
|
||||
BoundTyKind::Anon(i) => i,
|
||||
_ => bug!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BoundVar> for BoundTy {
|
||||
fn from(var: BoundVar) -> Self {
|
||||
BoundTy { var, kind: BoundTyKind::Anon }
|
||||
BoundTy { var, kind: BoundTyKind::Anon(var.as_u32()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
let expr_ty = expr.ty;
|
||||
let temp = {
|
||||
let mut local_decl = LocalDecl::new(expr_ty, expr_span);
|
||||
if mutability == Mutability::Not {
|
||||
if mutability.is_not() {
|
||||
local_decl = local_decl.immutable();
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,6 @@ use std::cell::Cell;
|
||||
|
||||
use either::Right;
|
||||
|
||||
use rustc_ast::Mutability;
|
||||
use rustc_const_eval::const_eval::CheckAlignment;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_hir::def::DefKind;
|
||||
@ -289,7 +288,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
|
||||
}
|
||||
// If the static allocation is mutable, then we can't const prop it as its content
|
||||
// might be different at runtime.
|
||||
if alloc.inner().mutability == Mutability::Mut {
|
||||
if alloc.inner().mutability.is_mut() {
|
||||
throw_machine_stop_str!("can't access mutable globals in ConstProp");
|
||||
}
|
||||
|
||||
@ -528,7 +527,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
let r = self.use_ecx(|this| this.ecx.read_immediate(&this.ecx.eval_operand(right, None)?));
|
||||
let l = self.use_ecx(|this| this.ecx.read_immediate(&this.ecx.eval_operand(left, None)?));
|
||||
// Check for exceeding shifts *even if* we cannot evaluate the LHS.
|
||||
if op == BinOp::Shr || op == BinOp::Shl {
|
||||
if matches!(op, BinOp::Shr | BinOp::Shl) {
|
||||
let r = r.clone()?;
|
||||
// We need the type of the LHS. We cannot use `place_layout` as that is the type
|
||||
// of the result, which for checked binops is not the same!
|
||||
|
@ -368,7 +368,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
this.ecx.read_immediate(&this.ecx.eval_operand(left, None)?)
|
||||
});
|
||||
// Check for exceeding shifts *even if* we cannot evaluate the LHS.
|
||||
if op == BinOp::Shr || op == BinOp::Shl {
|
||||
if matches!(op, BinOp::Shr | BinOp::Shl) {
|
||||
let r = r.clone()?;
|
||||
// We need the type of the LHS. We cannot use `place_layout` as that is the type
|
||||
// of the result, which for checked binops is not the same!
|
||||
|
@ -323,7 +323,10 @@ impl DebugCounters {
|
||||
String::new()
|
||||
},
|
||||
self.format_operand(lhs),
|
||||
if op == Op::Add { "+" } else { "-" },
|
||||
match op {
|
||||
Op::Add => "+",
|
||||
Op::Subtract => "-",
|
||||
},
|
||||
self.format_operand(rhs),
|
||||
);
|
||||
}
|
||||
|
@ -427,7 +427,7 @@ impl<'tcx> CloneShimBuilder<'tcx> {
|
||||
fn make_place(&mut self, mutability: Mutability, ty: Ty<'tcx>) -> Place<'tcx> {
|
||||
let span = self.span;
|
||||
let mut local = LocalDecl::new(ty, span);
|
||||
if mutability == Mutability::Not {
|
||||
if mutability.is_not() {
|
||||
local = local.immutable();
|
||||
}
|
||||
Place::from(self.local_decls.push(local))
|
||||
|
@ -93,11 +93,12 @@ impl<'a> Parser<'a> {
|
||||
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
|
||||
// that starts like a path (1 token), but it fact not a path.
|
||||
// Also, we avoid stealing syntax from `parse_item_`.
|
||||
if force_collect == ForceCollect::Yes {
|
||||
self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))
|
||||
} else {
|
||||
self.parse_stmt_path_start(lo, attrs)
|
||||
}?
|
||||
match force_collect {
|
||||
ForceCollect::Yes => {
|
||||
self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))?
|
||||
}
|
||||
ForceCollect::No => self.parse_stmt_path_start(lo, attrs)?,
|
||||
}
|
||||
} else if let Some(item) = self.parse_item_common(
|
||||
attrs.clone(),
|
||||
false,
|
||||
@ -113,13 +114,12 @@ impl<'a> Parser<'a> {
|
||||
self.mk_stmt(lo, StmtKind::Empty)
|
||||
} else if self.token != token::CloseDelim(Delimiter::Brace) {
|
||||
// Remainder are line-expr stmts.
|
||||
let e = if force_collect == ForceCollect::Yes {
|
||||
self.collect_tokens_no_attrs(|this| {
|
||||
let e = match force_collect {
|
||||
ForceCollect::Yes => self.collect_tokens_no_attrs(|this| {
|
||||
this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
|
||||
})
|
||||
} else {
|
||||
self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
|
||||
}?;
|
||||
})?,
|
||||
ForceCollect::No => self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))?,
|
||||
};
|
||||
if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
|
||||
let bl = self.parse_block()?;
|
||||
// Destructuring assignment ... else.
|
||||
|
@ -323,13 +323,14 @@ impl<'a> Parser<'a> {
|
||||
} else if self.can_begin_bound() {
|
||||
self.parse_bare_trait_object(lo, allow_plus)?
|
||||
} else if self.eat(&token::DotDotDot) {
|
||||
if allow_c_variadic == AllowCVariadic::Yes {
|
||||
TyKind::CVarArgs
|
||||
} else {
|
||||
// FIXME(Centril): Should we just allow `...` syntactically
|
||||
// anywhere in a type and use semantic restrictions instead?
|
||||
self.error_illegal_c_varadic_ty(lo);
|
||||
TyKind::Err
|
||||
match allow_c_variadic {
|
||||
AllowCVariadic::Yes => TyKind::CVarArgs,
|
||||
AllowCVariadic::No => {
|
||||
// FIXME(Centril): Should we just allow `...` syntactically
|
||||
// anywhere in a type and use semantic restrictions instead?
|
||||
self.error_illegal_c_varadic_ty(lo);
|
||||
TyKind::Err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let msg = format!("expected type, found {}", super::token_descr(&self.token));
|
||||
@ -343,10 +344,9 @@ impl<'a> Parser<'a> {
|
||||
let mut ty = self.mk_ty(span, kind);
|
||||
|
||||
// Try to recover from use of `+` with incorrect priority.
|
||||
if allow_plus == AllowPlus::Yes {
|
||||
self.maybe_recover_from_bad_type_plus(&ty)?;
|
||||
} else {
|
||||
self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty);
|
||||
match allow_plus {
|
||||
AllowPlus::Yes => self.maybe_recover_from_bad_type_plus(&ty)?,
|
||||
AllowPlus::No => self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty),
|
||||
}
|
||||
if RecoverQuestionMark::Yes == recover_question_mark {
|
||||
ty = self.maybe_recover_from_question_mark(ty);
|
||||
|
@ -864,33 +864,39 @@ impl CheckAttrVisitor<'_> {
|
||||
target: Target,
|
||||
specified_inline: &mut Option<(bool, Span)>,
|
||||
) -> bool {
|
||||
if target == Target::Use || target == Target::ExternCrate {
|
||||
let do_inline = meta.name_or_empty() == sym::inline;
|
||||
if let Some((prev_inline, prev_span)) = *specified_inline {
|
||||
if do_inline != prev_inline {
|
||||
let mut spans = MultiSpan::from_spans(vec![prev_span, meta.span()]);
|
||||
spans.push_span_label(prev_span, fluent::passes_doc_inline_conflict_first);
|
||||
spans.push_span_label(meta.span(), fluent::passes_doc_inline_conflict_second);
|
||||
self.tcx.sess.emit_err(errors::DocKeywordConflict { spans });
|
||||
return false;
|
||||
match target {
|
||||
Target::Use | Target::ExternCrate => {
|
||||
let do_inline = meta.name_or_empty() == sym::inline;
|
||||
if let Some((prev_inline, prev_span)) = *specified_inline {
|
||||
if do_inline != prev_inline {
|
||||
let mut spans = MultiSpan::from_spans(vec![prev_span, meta.span()]);
|
||||
spans.push_span_label(prev_span, fluent::passes_doc_inline_conflict_first);
|
||||
spans.push_span_label(
|
||||
meta.span(),
|
||||
fluent::passes_doc_inline_conflict_second,
|
||||
);
|
||||
self.tcx.sess.emit_err(errors::DocKeywordConflict { spans });
|
||||
return false;
|
||||
}
|
||||
true
|
||||
} else {
|
||||
*specified_inline = Some((do_inline, meta.span()));
|
||||
true
|
||||
}
|
||||
true
|
||||
} else {
|
||||
*specified_inline = Some((do_inline, meta.span()));
|
||||
true
|
||||
}
|
||||
} else {
|
||||
self.tcx.emit_spanned_lint(
|
||||
INVALID_DOC_ATTRIBUTES,
|
||||
hir_id,
|
||||
meta.span(),
|
||||
errors::DocInlineOnlyUse {
|
||||
attr_span: meta.span(),
|
||||
item_span: (attr.style == AttrStyle::Outer)
|
||||
.then(|| self.tcx.hir().span(hir_id)),
|
||||
},
|
||||
);
|
||||
false
|
||||
_ => {
|
||||
self.tcx.emit_spanned_lint(
|
||||
INVALID_DOC_ATTRIBUTES,
|
||||
hir_id,
|
||||
meta.span(),
|
||||
errors::DocInlineOnlyUse {
|
||||
attr_span: meta.span(),
|
||||
item_span: (attr.style == AttrStyle::Outer)
|
||||
.then(|| self.tcx.hir().span(hir_id)),
|
||||
},
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1137,7 +1143,7 @@ impl CheckAttrVisitor<'_> {
|
||||
errors::DocTestUnknownInclude {
|
||||
path,
|
||||
value: value.to_string(),
|
||||
inner: if attr.style == AttrStyle::Inner { "!" } else { "" },
|
||||
inner: match attr.style { AttrStyle::Inner=> "!" , AttrStyle::Outer => "" },
|
||||
sugg: (attr.meta().unwrap().span, applicability),
|
||||
}
|
||||
);
|
||||
|
@ -125,7 +125,7 @@ impl<'a, 'tcx> Annotator<'a, 'tcx> {
|
||||
if let Some((depr, span)) = &depr {
|
||||
is_deprecated = true;
|
||||
|
||||
if kind == AnnotationKind::Prohibited || kind == AnnotationKind::DeprecationProhibited {
|
||||
if matches!(kind, AnnotationKind::Prohibited | AnnotationKind::DeprecationProhibited) {
|
||||
let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id);
|
||||
self.tcx.emit_spanned_lint(
|
||||
USELESS_DEPRECATED,
|
||||
|
@ -298,14 +298,15 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
|
||||
self.r.record_partial_res(id, PartialRes::new(res));
|
||||
}
|
||||
if module.is_normal() {
|
||||
if res == Res::Err {
|
||||
Ok(ty::Visibility::Public)
|
||||
} else {
|
||||
let vis = ty::Visibility::Restricted(res.def_id());
|
||||
if self.r.is_accessible_from(vis, parent_scope.module) {
|
||||
Ok(vis.expect_local())
|
||||
} else {
|
||||
Err(VisResolutionError::AncestorOnly(path.span))
|
||||
match res {
|
||||
Res::Err => Ok(ty::Visibility::Public),
|
||||
_ => {
|
||||
let vis = ty::Visibility::Restricted(res.def_id());
|
||||
if self.r.is_accessible_from(vis, parent_scope.module) {
|
||||
Ok(vis.expect_local())
|
||||
} else {
|
||||
Err(VisResolutionError::AncestorOnly(path.span))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -1552,12 +1552,12 @@ impl<'a> Resolver<'a> {
|
||||
if b.is_extern_crate() && ident.span.rust_2018() {
|
||||
help_msgs.push(format!("use `::{ident}` to refer to this {thing} unambiguously"))
|
||||
}
|
||||
if misc == AmbiguityErrorMisc::SuggestCrate {
|
||||
help_msgs
|
||||
.push(format!("use `crate::{ident}` to refer to this {thing} unambiguously"))
|
||||
} else if misc == AmbiguityErrorMisc::SuggestSelf {
|
||||
help_msgs
|
||||
.push(format!("use `self::{ident}` to refer to this {thing} unambiguously"))
|
||||
match misc {
|
||||
AmbiguityErrorMisc::SuggestCrate => help_msgs
|
||||
.push(format!("use `crate::{ident}` to refer to this {thing} unambiguously")),
|
||||
AmbiguityErrorMisc::SuggestSelf => help_msgs
|
||||
.push(format!("use `self::{ident}` to refer to this {thing} unambiguously")),
|
||||
AmbiguityErrorMisc::FromPrelude | AmbiguityErrorMisc::None => {}
|
||||
}
|
||||
|
||||
err.span_note(b.span, ¬e_msg);
|
||||
@ -1717,7 +1717,7 @@ impl<'a> Resolver<'a> {
|
||||
Applicability::MaybeIncorrect,
|
||||
)),
|
||||
)
|
||||
} else if self.session.edition() == Edition::Edition2015 {
|
||||
} else if self.session.rust_2015() {
|
||||
(
|
||||
format!("maybe a missing crate `{ident}`?"),
|
||||
Some((
|
||||
|
@ -7,7 +7,6 @@ use rustc_middle::ty;
|
||||
use rustc_session::lint::builtin::PROC_MACRO_DERIVE_RESOLUTION_FALLBACK;
|
||||
use rustc_session::lint::BuiltinLintDiagnostics;
|
||||
use rustc_span::def_id::LocalDefId;
|
||||
use rustc_span::edition::Edition;
|
||||
use rustc_span::hygiene::{ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext};
|
||||
use rustc_span::symbol::{kw, Ident};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
@ -86,7 +85,7 @@ impl<'a> Resolver<'a> {
|
||||
// 4c. Standard library prelude (de-facto closed, controlled).
|
||||
// 6. Language prelude: builtin attributes (closed, controlled).
|
||||
|
||||
let rust_2015 = ctxt.edition() == Edition::Edition2015;
|
||||
let rust_2015 = ctxt.edition().rust_2015();
|
||||
let (ns, macro_kind, is_absolute_path) = match scope_set {
|
||||
ScopeSet::All(ns, _) => (ns, None, false),
|
||||
ScopeSet::AbsolutePath(ns) => (ns, None, true),
|
||||
|
@ -29,7 +29,6 @@ use crate::{id_from_def_id, SaveContext};
|
||||
|
||||
use rls_data::{SigElement, Signature};
|
||||
|
||||
use rustc_ast::Mutability;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir_pretty::id_to_string;
|
||||
@ -769,9 +768,8 @@ impl<'hir> Sig for hir::ForeignItem<'hir> {
|
||||
}
|
||||
hir::ForeignItemKind::Static(ref ty, m) => {
|
||||
let mut text = "static ".to_owned();
|
||||
if m == Mutability::Mut {
|
||||
text.push_str("mut ");
|
||||
}
|
||||
text.push_str(m.prefix_str());
|
||||
|
||||
let name = self.ident.to_string();
|
||||
let defs = vec![SigElement {
|
||||
id: id_from_def_id(self.owner_id.to_def_id()),
|
||||
|
@ -918,23 +918,24 @@ impl Session {
|
||||
ret
|
||||
}
|
||||
|
||||
/// Is this edition 2015?
|
||||
pub fn rust_2015(&self) -> bool {
|
||||
self.edition() == Edition::Edition2015
|
||||
self.edition().rust_2015()
|
||||
}
|
||||
|
||||
/// Are we allowed to use features from the Rust 2018 edition?
|
||||
pub fn rust_2018(&self) -> bool {
|
||||
self.edition() >= Edition::Edition2018
|
||||
self.edition().rust_2018()
|
||||
}
|
||||
|
||||
/// Are we allowed to use features from the Rust 2021 edition?
|
||||
pub fn rust_2021(&self) -> bool {
|
||||
self.edition() >= Edition::Edition2021
|
||||
self.edition().rust_2021()
|
||||
}
|
||||
|
||||
/// Are we allowed to use features from the Rust 2024 edition?
|
||||
pub fn rust_2024(&self) -> bool {
|
||||
self.edition() >= Edition::Edition2024
|
||||
self.edition().rust_2024()
|
||||
}
|
||||
|
||||
/// Returns `true` if we cannot skip the PLT for shared library calls.
|
||||
|
@ -49,8 +49,8 @@ impl fmt::Display for Edition {
|
||||
}
|
||||
|
||||
impl Edition {
|
||||
pub fn lint_name(&self) -> &'static str {
|
||||
match *self {
|
||||
pub fn lint_name(self) -> &'static str {
|
||||
match self {
|
||||
Edition::Edition2015 => "rust_2015_compatibility",
|
||||
Edition::Edition2018 => "rust_2018_compatibility",
|
||||
Edition::Edition2021 => "rust_2021_compatibility",
|
||||
@ -58,8 +58,8 @@ impl Edition {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn feature_name(&self) -> Symbol {
|
||||
match *self {
|
||||
pub fn feature_name(self) -> Symbol {
|
||||
match self {
|
||||
Edition::Edition2015 => sym::rust_2015_preview,
|
||||
Edition::Edition2018 => sym::rust_2018_preview,
|
||||
Edition::Edition2021 => sym::rust_2021_preview,
|
||||
@ -67,8 +67,8 @@ impl Edition {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_stable(&self) -> bool {
|
||||
match *self {
|
||||
pub fn is_stable(self) -> bool {
|
||||
match self {
|
||||
Edition::Edition2015 => true,
|
||||
Edition::Edition2018 => true,
|
||||
Edition::Edition2021 => true,
|
||||
@ -76,23 +76,24 @@ impl Edition {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rust_2015(&self) -> bool {
|
||||
*self == Edition::Edition2015
|
||||
/// Is this edition 2015?
|
||||
pub fn rust_2015(self) -> bool {
|
||||
self == Edition::Edition2015
|
||||
}
|
||||
|
||||
/// Are we allowed to use features from the Rust 2018 edition?
|
||||
pub fn rust_2018(&self) -> bool {
|
||||
*self >= Edition::Edition2018
|
||||
pub fn rust_2018(self) -> bool {
|
||||
self >= Edition::Edition2018
|
||||
}
|
||||
|
||||
/// Are we allowed to use features from the Rust 2021 edition?
|
||||
pub fn rust_2021(&self) -> bool {
|
||||
*self >= Edition::Edition2021
|
||||
pub fn rust_2021(self) -> bool {
|
||||
self >= Edition::Edition2021
|
||||
}
|
||||
|
||||
/// Are we allowed to use features from the Rust 2024 edition?
|
||||
pub fn rust_2024(&self) -> bool {
|
||||
*self >= Edition::Edition2024
|
||||
pub fn rust_2024(self) -> bool {
|
||||
self >= Edition::Edition2024
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -706,22 +706,22 @@ impl Span {
|
||||
|
||||
#[inline]
|
||||
pub fn rust_2015(self) -> bool {
|
||||
self.edition() == edition::Edition::Edition2015
|
||||
self.edition().rust_2015()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn rust_2018(self) -> bool {
|
||||
self.edition() >= edition::Edition::Edition2018
|
||||
self.edition().rust_2018()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn rust_2021(self) -> bool {
|
||||
self.edition() >= edition::Edition::Edition2021
|
||||
self.edition().rust_2021()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn rust_2024(self) -> bool {
|
||||
self.edition() >= edition::Edition::Edition2024
|
||||
self.edition().rust_2024()
|
||||
}
|
||||
|
||||
/// Returns the source callee.
|
||||
|
@ -161,6 +161,7 @@ impl<'tcx> InferCtxtEvalExt<'tcx> for InferCtxt<'tcx> {
|
||||
search_graph: &mut search_graph,
|
||||
infcx: self,
|
||||
var_values: CanonicalVarValues::dummy(),
|
||||
in_projection_eq_hack: false,
|
||||
}
|
||||
.evaluate_goal(goal);
|
||||
|
||||
@ -174,6 +175,10 @@ struct EvalCtxt<'a, 'tcx> {
|
||||
var_values: CanonicalVarValues<'tcx>,
|
||||
|
||||
search_graph: &'a mut search_graph::SearchGraph<'tcx>,
|
||||
|
||||
/// This field is used by a debug assertion in [`EvalCtxt::evaluate_goal`],
|
||||
/// see the comment in that method for more details.
|
||||
in_projection_eq_hack: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
|
||||
@ -209,7 +214,8 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
|
||||
loop {
|
||||
let (ref infcx, goal, var_values) =
|
||||
tcx.infer_ctxt().build_with_canonical(DUMMY_SP, &canonical_goal);
|
||||
let mut ecx = EvalCtxt { infcx, var_values, search_graph };
|
||||
let mut ecx =
|
||||
EvalCtxt { infcx, var_values, search_graph, in_projection_eq_hack: false };
|
||||
let result = ecx.compute_goal(goal);
|
||||
|
||||
// FIXME: `Response` should be `Copy`
|
||||
@ -239,10 +245,28 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
|
||||
let canonical_goal = self.infcx.canonicalize_query(goal, &mut orig_values);
|
||||
let canonical_response =
|
||||
EvalCtxt::evaluate_canonical_goal(self.tcx(), self.search_graph, canonical_goal)?;
|
||||
Ok((
|
||||
!canonical_response.value.var_values.is_identity(),
|
||||
instantiate_canonical_query_response(self.infcx, &orig_values, canonical_response),
|
||||
))
|
||||
|
||||
let has_changed = !canonical_response.value.var_values.is_identity();
|
||||
let certainty =
|
||||
instantiate_canonical_query_response(self.infcx, &orig_values, canonical_response);
|
||||
|
||||
// Check that rerunning this query with its inference constraints applied
|
||||
// doesn't result in new inference constraints and has the same result.
|
||||
//
|
||||
// If we have projection goals like `<T as Trait>::Assoc == u32` we recursively
|
||||
// call `exists<U> <T as Trait>::Assoc == U` to enable better caching. This goal
|
||||
// could constrain `U` to `u32` which would cause this check to result in a
|
||||
// solver cycle.
|
||||
if cfg!(debug_assertions) && has_changed && !self.in_projection_eq_hack {
|
||||
let mut orig_values = OriginalQueryValues::default();
|
||||
let canonical_goal = self.infcx.canonicalize_query(goal, &mut orig_values);
|
||||
let canonical_response =
|
||||
EvalCtxt::evaluate_canonical_goal(self.tcx(), self.search_graph, canonical_goal)?;
|
||||
assert!(canonical_response.value.var_values.is_identity());
|
||||
assert_eq!(certainty, canonical_response.value.certainty);
|
||||
}
|
||||
|
||||
Ok((has_changed, certainty))
|
||||
}
|
||||
|
||||
fn compute_goal(&mut self, goal: Goal<'tcx, ty::Predicate<'tcx>>) -> QueryResult<'tcx> {
|
||||
|
@ -45,8 +45,9 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
projection_ty: goal.predicate.projection_ty,
|
||||
term: unconstrained_rhs,
|
||||
});
|
||||
let (_has_changed, normalize_certainty) =
|
||||
self.evaluate_goal(goal.with(self.tcx(), unconstrained_predicate))?;
|
||||
let (_has_changed, normalize_certainty) = self.in_projection_eq_hack(|this| {
|
||||
this.evaluate_goal(goal.with(this.tcx(), unconstrained_predicate))
|
||||
})?;
|
||||
|
||||
let nested_eq_goals =
|
||||
self.infcx.eq(goal.param_env, unconstrained_rhs, predicate.term)?;
|
||||
@ -55,6 +56,15 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// This sets a flag used by a debug assert in [`EvalCtxt::evaluate_goal`],
|
||||
/// see the comment in that method for more details.
|
||||
fn in_projection_eq_hack<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
|
||||
self.in_projection_eq_hack = true;
|
||||
let result = f(self);
|
||||
self.in_projection_eq_hack = false;
|
||||
result
|
||||
}
|
||||
|
||||
/// Is the projection predicate is of the form `exists<T> <Ty as Trait>::Assoc = T`.
|
||||
///
|
||||
/// This is the case if the `term` is an inference variable in the innermost universe
|
||||
@ -122,6 +132,28 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
&& goal.param_env.visit_with(&mut visitor).is_continue()
|
||||
}
|
||||
|
||||
/// After normalizing the projection to `normalized_alias` with the given
|
||||
/// `normalization_certainty`, constrain the inference variable `term` to it
|
||||
/// and return a query response.
|
||||
fn eq_term_and_make_canonical_response(
|
||||
&mut self,
|
||||
goal: Goal<'tcx, ProjectionPredicate<'tcx>>,
|
||||
normalization_certainty: Certainty,
|
||||
normalized_alias: impl Into<ty::Term<'tcx>>,
|
||||
) -> QueryResult<'tcx> {
|
||||
// The term of our goal should be fully unconstrained, so this should never fail.
|
||||
//
|
||||
// It can however be ambiguous when the `normalized_alias` contains a projection.
|
||||
let nested_goals = self
|
||||
.infcx
|
||||
.eq(goal.param_env, goal.predicate.term, normalized_alias.into())
|
||||
.expect("failed to unify with unconstrained term");
|
||||
let rhs_certainty =
|
||||
self.evaluate_all(nested_goals).expect("failed to unify with unconstrained term");
|
||||
|
||||
self.make_canonical_response(normalization_certainty.unify_and(rhs_certainty))
|
||||
}
|
||||
|
||||
fn merge_project_candidates(
|
||||
&mut self,
|
||||
mut candidates: Vec<Candidate<'tcx>>,
|
||||
@ -218,7 +250,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
.map(|pred| goal.with(tcx, pred));
|
||||
|
||||
nested_goals.extend(where_clause_bounds);
|
||||
let trait_ref_certainty = ecx.evaluate_all(nested_goals)?;
|
||||
let match_impl_certainty = ecx.evaluate_all(nested_goals)?;
|
||||
|
||||
// In case the associated item is hidden due to specialization, we have to
|
||||
// return ambiguity this would otherwise be incomplete, resulting in
|
||||
@ -230,7 +262,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
goal.predicate.def_id(),
|
||||
impl_def_id
|
||||
)? else {
|
||||
return ecx.make_canonical_response(trait_ref_certainty.unify_and(Certainty::AMBIGUOUS));
|
||||
return ecx.make_canonical_response(match_impl_certainty.unify_and(Certainty::AMBIGUOUS));
|
||||
};
|
||||
|
||||
if !assoc_def.item.defaultness(tcx).has_value() {
|
||||
@ -277,17 +309,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
ty.map_bound(|ty| ty.into())
|
||||
};
|
||||
|
||||
// The term of our goal should be fully unconstrained, so this should never fail.
|
||||
//
|
||||
// It can however be ambiguous when the resolved type is a projection.
|
||||
let nested_goals = ecx
|
||||
.infcx
|
||||
.eq(goal.param_env, goal.predicate.term, term.subst(tcx, substs))
|
||||
.expect("failed to unify with unconstrained term");
|
||||
let rhs_certainty =
|
||||
ecx.evaluate_all(nested_goals).expect("failed to unify with unconstrained term");
|
||||
|
||||
ecx.make_canonical_response(trait_ref_certainty.unify_and(rhs_certainty))
|
||||
ecx.eq_term_and_make_canonical_response(goal, match_impl_certainty, term.subst(tcx, substs))
|
||||
})
|
||||
}
|
||||
|
||||
@ -309,18 +331,11 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
)?;
|
||||
let subst_certainty = ecx.evaluate_all(nested_goals)?;
|
||||
|
||||
// The term of our goal should be fully unconstrained, so this should never fail.
|
||||
//
|
||||
// It can however be ambiguous when the resolved type is a projection.
|
||||
let nested_goals = ecx
|
||||
.infcx
|
||||
.eq(goal.param_env, goal.predicate.term, assumption_projection_pred.term)
|
||||
.expect("failed to unify with unconstrained term");
|
||||
let rhs_certainty = ecx
|
||||
.evaluate_all(nested_goals)
|
||||
.expect("failed to unify with unconstrained term");
|
||||
|
||||
ecx.make_canonical_response(subst_certainty.unify_and(rhs_certainty))
|
||||
ecx.eq_term_and_make_canonical_response(
|
||||
goal,
|
||||
subst_certainty,
|
||||
assumption_projection_pred.term,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
Err(NoSolution)
|
||||
@ -437,14 +452,12 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
[ty::GenericArg::from(goal.predicate.self_ty())],
|
||||
));
|
||||
|
||||
let mut nested_goals = ecx.infcx.eq(
|
||||
goal.param_env,
|
||||
goal.predicate.term.ty().unwrap(),
|
||||
let is_sized_certainty = ecx.evaluate_goal(goal.with(tcx, sized_predicate))?.1;
|
||||
return ecx.eq_term_and_make_canonical_response(
|
||||
goal,
|
||||
is_sized_certainty,
|
||||
tcx.types.unit,
|
||||
)?;
|
||||
nested_goals.push(goal.with(tcx, sized_predicate));
|
||||
|
||||
return ecx.evaluate_all_and_make_canonical_response(nested_goals);
|
||||
);
|
||||
}
|
||||
|
||||
ty::Adt(def, substs) if def.is_struct() => {
|
||||
@ -456,7 +469,8 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
tcx,
|
||||
ty::Binder::dummy(goal.predicate.with_self_ty(tcx, self_ty)),
|
||||
);
|
||||
return ecx.evaluate_all_and_make_canonical_response(vec![new_goal]);
|
||||
let (_, certainty) = ecx.evaluate_goal(new_goal)?;
|
||||
return ecx.make_canonical_response(certainty);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -469,7 +483,8 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
tcx,
|
||||
ty::Binder::dummy(goal.predicate.with_self_ty(tcx, self_ty)),
|
||||
);
|
||||
return ecx.evaluate_all_and_make_canonical_response(vec![new_goal]);
|
||||
let (_, certainty) = ecx.evaluate_goal(new_goal)?;
|
||||
return ecx.make_canonical_response(certainty);
|
||||
}
|
||||
},
|
||||
|
||||
@ -482,9 +497,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
|
||||
),
|
||||
};
|
||||
|
||||
let nested_goals =
|
||||
ecx.infcx.eq(goal.param_env, goal.predicate.term.ty().unwrap(), metadata_ty)?;
|
||||
ecx.evaluate_all_and_make_canonical_response(nested_goals)
|
||||
ecx.eq_term_and_make_canonical_response(goal, Certainty::Yes, metadata_ty)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -45,6 +45,7 @@ impl<'tcx> SearchGraph<'tcx> {
|
||||
/// Tries putting the new goal on the stack, returning an error if it is already cached.
|
||||
///
|
||||
/// This correctly updates the provisional cache if there is a cycle.
|
||||
#[instrument(level = "debug", skip(self, tcx), ret)]
|
||||
pub(super) fn try_push_stack(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
@ -79,8 +80,10 @@ impl<'tcx> SearchGraph<'tcx> {
|
||||
Entry::Occupied(entry_index) => {
|
||||
let entry_index = *entry_index.get();
|
||||
|
||||
cache.add_dependency_of_leaf_on(entry_index);
|
||||
let stack_depth = cache.depth(entry_index);
|
||||
debug!("encountered cycle with depth {stack_depth:?}");
|
||||
|
||||
cache.add_dependency_of_leaf_on(entry_index);
|
||||
|
||||
self.stack[stack_depth].has_been_used = true;
|
||||
// NOTE: The goals on the stack aren't the only goals involved in this cycle.
|
||||
@ -117,6 +120,7 @@ impl<'tcx> SearchGraph<'tcx> {
|
||||
/// updated the provisional cache and we have to recompute the current goal.
|
||||
///
|
||||
/// FIXME: Refer to the rustc-dev-guide entry once it exists.
|
||||
#[instrument(level = "debug", skip(self, tcx, actual_goal), ret)]
|
||||
pub(super) fn try_finalize_goal(
|
||||
&mut self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
|
@ -1230,20 +1230,23 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||
}
|
||||
|
||||
ty::PredicateKind::WellFormed(ty) => {
|
||||
if self.tcx.sess.opts.unstable_opts.trait_solver == TraitSolver::Classic {
|
||||
// WF predicates cannot themselves make
|
||||
// errors. They can only block due to
|
||||
// ambiguity; otherwise, they always
|
||||
// degenerate into other obligations
|
||||
// (which may fail).
|
||||
span_bug!(span, "WF predicate not satisfied for {:?}", ty);
|
||||
} else {
|
||||
// FIXME: we'll need a better message which takes into account
|
||||
// which bounds actually failed to hold.
|
||||
self.tcx.sess.struct_span_err(
|
||||
span,
|
||||
&format!("the type `{}` is not well-formed", ty),
|
||||
)
|
||||
match self.tcx.sess.opts.unstable_opts.trait_solver {
|
||||
TraitSolver::Classic => {
|
||||
// WF predicates cannot themselves make
|
||||
// errors. They can only block due to
|
||||
// ambiguity; otherwise, they always
|
||||
// degenerate into other obligations
|
||||
// (which may fail).
|
||||
span_bug!(span, "WF predicate not satisfied for {:?}", ty);
|
||||
}
|
||||
TraitSolver::Chalk | TraitSolver::Next => {
|
||||
// FIXME: we'll need a better message which takes into account
|
||||
// which bounds actually failed to hold.
|
||||
self.tcx.sess.struct_span_err(
|
||||
span,
|
||||
&format!("the type `{}` is not well-formed", ty),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -783,7 +783,7 @@ impl<'tcx> TypeFolder<'tcx> for BoundVarReplacer<'_, 'tcx> {
|
||||
}
|
||||
ty::Bound(debruijn, bound_ty) if debruijn >= self.current_index => {
|
||||
let universe = self.universe_for(debruijn);
|
||||
let p = ty::PlaceholderType { universe, name: bound_ty.var };
|
||||
let p = ty::PlaceholderType { universe, name: bound_ty.kind };
|
||||
self.mapped_types.insert(p, bound_ty);
|
||||
self.infcx.tcx.mk_ty(ty::Placeholder(p))
|
||||
}
|
||||
|
@ -524,7 +524,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
.kind
|
||||
{
|
||||
GenericParamDefKind::Type { .. } => {
|
||||
let kind = ty::BoundTyKind::Param(param.name);
|
||||
let kind = ty::BoundTyKind::Param(param.def_id, param.name);
|
||||
let bound_var = ty::BoundVariableKind::Ty(kind);
|
||||
bound_vars.push(bound_var);
|
||||
tcx.mk_ty(ty::Bound(
|
||||
|
@ -725,7 +725,7 @@ fn bound_vars_for_item(tcx: TyCtxt<'_>, def_id: DefId) -> SubstsRef<'_> {
|
||||
ty::INNERMOST,
|
||||
ty::BoundTy {
|
||||
var: ty::BoundVar::from(param.index),
|
||||
kind: ty::BoundTyKind::Param(param.name),
|
||||
kind: ty::BoundTyKind::Param(param.def_id, param.name),
|
||||
},
|
||||
))
|
||||
.into(),
|
||||
|
@ -370,7 +370,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Ty<RustInterner<'tcx>>> for Ty<'tcx> {
|
||||
ty::Placeholder(_placeholder) => {
|
||||
chalk_ir::TyKind::Placeholder(chalk_ir::PlaceholderIndex {
|
||||
ui: chalk_ir::UniverseIndex { counter: _placeholder.universe.as_usize() },
|
||||
idx: _placeholder.name.as_usize(),
|
||||
idx: _placeholder.name.expect_anon() as usize,
|
||||
})
|
||||
}
|
||||
ty::Infer(_infer) => unimplemented!(),
|
||||
@ -452,10 +452,6 @@ impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty<RustInterner<'tcx>> {
|
||||
),
|
||||
TyKind::Foreign(def_id) => ty::Foreign(def_id.0),
|
||||
TyKind::Error => return interner.tcx.ty_error(),
|
||||
TyKind::Placeholder(placeholder) => ty::Placeholder(ty::Placeholder {
|
||||
universe: ty::UniverseIndex::from_usize(placeholder.ui.counter),
|
||||
name: ty::BoundVar::from_usize(placeholder.idx),
|
||||
}),
|
||||
TyKind::Alias(alias_ty) => match alias_ty {
|
||||
chalk_ir::AliasTy::Projection(projection) => ty::Alias(
|
||||
ty::Projection,
|
||||
@ -473,13 +469,17 @@ impl<'tcx> LowerInto<'tcx, Ty<'tcx>> for &chalk_ir::Ty<RustInterner<'tcx>> {
|
||||
),
|
||||
},
|
||||
TyKind::Function(_quantified_ty) => unimplemented!(),
|
||||
TyKind::BoundVar(_bound) => ty::Bound(
|
||||
ty::DebruijnIndex::from_usize(_bound.debruijn.depth() as usize),
|
||||
TyKind::BoundVar(bound) => ty::Bound(
|
||||
ty::DebruijnIndex::from_usize(bound.debruijn.depth() as usize),
|
||||
ty::BoundTy {
|
||||
var: ty::BoundVar::from_usize(_bound.index),
|
||||
kind: ty::BoundTyKind::Anon,
|
||||
var: ty::BoundVar::from_usize(bound.index),
|
||||
kind: ty::BoundTyKind::Anon(bound.index as u32),
|
||||
},
|
||||
),
|
||||
TyKind::Placeholder(placeholder) => ty::Placeholder(ty::Placeholder {
|
||||
universe: ty::UniverseIndex::from_usize(placeholder.ui.counter),
|
||||
name: ty::BoundTyKind::Anon(placeholder.idx as u32),
|
||||
}),
|
||||
TyKind::InferenceVar(_, _) => unimplemented!(),
|
||||
TyKind::Dyn(_) => unimplemented!(),
|
||||
};
|
||||
@ -504,7 +504,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Lifetime<RustInterner<'tcx>>> for Region<'t
|
||||
ty::RePlaceholder(placeholder_region) => {
|
||||
chalk_ir::LifetimeData::Placeholder(chalk_ir::PlaceholderIndex {
|
||||
ui: chalk_ir::UniverseIndex { counter: placeholder_region.universe.index() },
|
||||
idx: 0,
|
||||
idx: 0, // FIXME: This `idx: 0` is sus.
|
||||
})
|
||||
.intern(interner)
|
||||
}
|
||||
@ -674,7 +674,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Binders<chalk_ir::QuantifiedWhereClauses<Ru
|
||||
let self_ty = interner.tcx.mk_ty(ty::Bound(
|
||||
// This is going to be wrapped in a binder
|
||||
ty::DebruijnIndex::from_usize(1),
|
||||
ty::BoundTy { var: ty::BoundVar::from_usize(0), kind: ty::BoundTyKind::Anon },
|
||||
ty::BoundTy { var: ty::BoundVar::from_usize(0), kind: ty::BoundTyKind::Anon(0) },
|
||||
));
|
||||
let where_clauses = predicates.into_iter().map(|predicate| {
|
||||
let (predicate, binders, _named_regions) =
|
||||
@ -1038,7 +1038,7 @@ pub(crate) struct ParamsSubstitutor<'tcx> {
|
||||
binder_index: ty::DebruijnIndex,
|
||||
list: Vec<rustc_middle::ty::ParamTy>,
|
||||
next_ty_placeholder: usize,
|
||||
pub(crate) params: rustc_data_structures::fx::FxHashMap<usize, rustc_middle::ty::ParamTy>,
|
||||
pub(crate) params: rustc_data_structures::fx::FxHashMap<u32, rustc_middle::ty::ParamTy>,
|
||||
pub(crate) named_regions: BTreeMap<DefId, u32>,
|
||||
}
|
||||
|
||||
@ -1072,15 +1072,15 @@ impl<'tcx> TypeFolder<'tcx> for ParamsSubstitutor<'tcx> {
|
||||
ty::Param(param) => match self.list.iter().position(|r| r == ¶m) {
|
||||
Some(idx) => self.tcx.mk_ty(ty::Placeholder(ty::PlaceholderType {
|
||||
universe: ty::UniverseIndex::from_usize(0),
|
||||
name: ty::BoundVar::from_usize(idx),
|
||||
name: ty::BoundTyKind::Anon(idx as u32),
|
||||
})),
|
||||
None => {
|
||||
self.list.push(param);
|
||||
let idx = self.list.len() - 1 + self.next_ty_placeholder;
|
||||
self.params.insert(idx, param);
|
||||
self.params.insert(idx as u32, param);
|
||||
self.tcx.mk_ty(ty::Placeholder(ty::PlaceholderType {
|
||||
universe: ty::UniverseIndex::from_usize(0),
|
||||
name: ty::BoundVar::from_usize(idx),
|
||||
name: ty::BoundTyKind::Anon(idx as u32),
|
||||
}))
|
||||
}
|
||||
},
|
||||
@ -1119,13 +1119,13 @@ impl<'tcx> TypeFolder<'tcx> for ParamsSubstitutor<'tcx> {
|
||||
|
||||
pub(crate) struct ReverseParamsSubstitutor<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
params: rustc_data_structures::fx::FxHashMap<usize, rustc_middle::ty::ParamTy>,
|
||||
params: rustc_data_structures::fx::FxHashMap<u32, rustc_middle::ty::ParamTy>,
|
||||
}
|
||||
|
||||
impl<'tcx> ReverseParamsSubstitutor<'tcx> {
|
||||
pub(crate) fn new(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
params: rustc_data_structures::fx::FxHashMap<usize, rustc_middle::ty::ParamTy>,
|
||||
params: rustc_data_structures::fx::FxHashMap<u32, rustc_middle::ty::ParamTy>,
|
||||
) -> Self {
|
||||
Self { tcx, params }
|
||||
}
|
||||
@ -1139,7 +1139,7 @@ impl<'tcx> TypeFolder<'tcx> for ReverseParamsSubstitutor<'tcx> {
|
||||
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
|
||||
match *t.kind() {
|
||||
ty::Placeholder(ty::PlaceholderType { universe: ty::UniverseIndex::ROOT, name }) => {
|
||||
match self.params.get(&name.as_usize()) {
|
||||
match self.params.get(&name.expect_anon()) {
|
||||
Some(param) => self.tcx.mk_ty(ty::Param(*param)),
|
||||
None => t,
|
||||
}
|
||||
@ -1171,7 +1171,8 @@ impl<'tcx> TypeVisitor<'tcx> for PlaceholdersCollector {
|
||||
fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow<Self::BreakTy> {
|
||||
match t.kind() {
|
||||
ty::Placeholder(p) if p.universe == self.universe_index => {
|
||||
self.next_ty_placeholder = self.next_ty_placeholder.max(p.name.as_usize() + 1);
|
||||
self.next_ty_placeholder =
|
||||
self.next_ty_placeholder.max(p.name.expect_anon() as usize + 1);
|
||||
}
|
||||
|
||||
_ => (),
|
||||
@ -1186,6 +1187,7 @@ impl<'tcx> TypeVisitor<'tcx> for PlaceholdersCollector {
|
||||
if let ty::BoundRegionKind::BrAnon(anon, _) = p.name {
|
||||
self.next_anon_region_placeholder = self.next_anon_region_placeholder.max(anon);
|
||||
}
|
||||
// FIXME: This doesn't seem to handle BrNamed at all?
|
||||
}
|
||||
|
||||
_ => (),
|
||||
|
@ -6,12 +6,10 @@
|
||||
pub(crate) mod db;
|
||||
pub(crate) mod lowering;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
||||
use rustc_middle::infer::canonical::{CanonicalTyVarKind, CanonicalVarKind};
|
||||
use rustc_middle::traits::ChalkRustInterner;
|
||||
use rustc_middle::ty::query::Providers;
|
||||
use rustc_middle::ty::{self, ParamTy, TyCtxt, TypeFoldable, TypeVisitable};
|
||||
use rustc_middle::ty::{self, TyCtxt, TypeFoldable, TypeVisitable};
|
||||
|
||||
use rustc_infer::infer::canonical::{
|
||||
Canonical, CanonicalVarValues, Certainty, QueryRegionConstraints, QueryResponse,
|
||||
@ -41,7 +39,7 @@ pub(crate) fn evaluate_goal<'tcx>(
|
||||
let mut params_substitutor =
|
||||
ParamsSubstitutor::new(tcx, placeholders_collector.next_ty_placeholder);
|
||||
let obligation = obligation.fold_with(&mut params_substitutor);
|
||||
let params: FxHashMap<usize, ParamTy> = params_substitutor.params;
|
||||
let params = params_substitutor.params;
|
||||
|
||||
let max_universe = obligation.max_universe.index();
|
||||
|
||||
|
@ -829,7 +829,7 @@ impl TcpListener {
|
||||
/// }
|
||||
///
|
||||
/// fn main() -> std::io::Result<()> {
|
||||
/// let listener = TcpListener::bind("127.0.0.1:80").unwrap();
|
||||
/// let listener = TcpListener::bind("127.0.0.1:80")?;
|
||||
///
|
||||
/// for stream in listener.incoming() {
|
||||
/// match stream {
|
||||
|
@ -355,7 +355,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
||||
}
|
||||
|
||||
clean::ImportItem(ref import) => {
|
||||
let (stab, stab_tags) = if let Some(import_def_id) = import.source.did {
|
||||
let stab_tags = if let Some(import_def_id) = import.source.did {
|
||||
let ast_attrs = cx.tcx().get_attrs_unchecked(import_def_id);
|
||||
let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs));
|
||||
|
||||
@ -367,15 +367,12 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
||||
..myitem.clone()
|
||||
};
|
||||
|
||||
let stab = import_item.stability_class(cx.tcx());
|
||||
let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()));
|
||||
(stab, stab_tags)
|
||||
stab_tags
|
||||
} else {
|
||||
(None, None)
|
||||
None
|
||||
};
|
||||
|
||||
let add = if stab.is_some() { " " } else { "" };
|
||||
|
||||
w.write_str(ITEM_TABLE_ROW_OPEN);
|
||||
let id = match import.kind {
|
||||
clean::ImportKind::Simple(s) => {
|
||||
@ -391,11 +388,10 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
||||
};
|
||||
write!(
|
||||
w,
|
||||
"<div class=\"item-left{add}{stab}\"{id}>\
|
||||
"<div class=\"item-left\"{id}>\
|
||||
<code>{vis}{imp}</code>\
|
||||
</div>\
|
||||
{stab_tags_before}{stab_tags}{stab_tags_after}",
|
||||
stab = stab.unwrap_or_default(),
|
||||
vis = visibility_print_with_space(myitem.visibility(tcx), myitem.item_id, cx),
|
||||
imp = import.print(cx),
|
||||
);
|
||||
@ -417,9 +413,6 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
||||
_ => "",
|
||||
};
|
||||
|
||||
let stab = myitem.stability_class(cx.tcx());
|
||||
let add = if stab.is_some() { " " } else { "" };
|
||||
|
||||
let visibility_emoji = match myitem.visibility(tcx) {
|
||||
Some(ty::Visibility::Restricted(_)) => {
|
||||
"<span title=\"Restricted Visibility\"> 🔒</span> "
|
||||
@ -437,7 +430,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
||||
};
|
||||
write!(
|
||||
w,
|
||||
"<div class=\"item-left{add}{stab}\">\
|
||||
"<div class=\"item-left\">\
|
||||
<a class=\"{class}\" href=\"{href}\" title=\"{title}\">{name}</a>\
|
||||
{visibility_emoji}\
|
||||
{unsafety_flag}\
|
||||
@ -448,8 +441,6 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
|
||||
visibility_emoji = visibility_emoji,
|
||||
stab_tags = extra_info_tags(myitem, item, cx.tcx()),
|
||||
class = myitem.type_(),
|
||||
add = add,
|
||||
stab = stab.unwrap_or_default(),
|
||||
unsafety_flag = unsafety_flag,
|
||||
href = item_path(myitem.type_(), myitem.name.unwrap().as_str()),
|
||||
title = [myitem.type_().to_string(), full_path(cx, myitem)]
|
||||
|
@ -1,3 +1,11 @@
|
||||
/* When static files are updated, their suffixes need to be updated.
|
||||
1. In the top directory run:
|
||||
./x.py doc --stage 1 library/core
|
||||
2. Find the directory containing files named with updated suffixes:
|
||||
find build -path '*'/stage1-std/'*'/static.files
|
||||
3. Copy the filenames with updated suffixes from the directory.
|
||||
*/
|
||||
|
||||
/* See FiraSans-LICENSE.txt for the Fira Sans license. */
|
||||
@font-face {
|
||||
font-family: 'Fira Sans';
|
||||
@ -22,7 +30,7 @@
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
src: local('Source Serif 4'),
|
||||
url("SourceSerif4-Regular-1f7d512b176f0f72.ttf.woff2") format("woff2");
|
||||
url("SourceSerif4-Regular-46f98efaafac5295.ttf.woff2") format("woff2");
|
||||
font-display: swap;
|
||||
}
|
||||
@font-face {
|
||||
@ -30,7 +38,7 @@
|
||||
font-style: italic;
|
||||
font-weight: 400;
|
||||
src: local('Source Serif 4 Italic'),
|
||||
url("SourceSerif4-It-d034fe4ef9d0fa00.ttf.woff2") format("woff2");
|
||||
url("SourceSerif4-It-acdfaf1a8af734b1.ttf.woff2") format("woff2");
|
||||
font-display: swap;
|
||||
}
|
||||
@font-face {
|
||||
@ -38,7 +46,7 @@
|
||||
font-style: normal;
|
||||
font-weight: 700;
|
||||
src: local('Source Serif 4 Bold'),
|
||||
url("SourceSerif4-Bold-124a1ca42af929b6.ttf.woff2") format("woff2");
|
||||
url("SourceSerif4-Bold-a2c9cd1067f8b328.ttf.woff2") format("woff2");
|
||||
font-display: swap;
|
||||
}
|
||||
|
||||
@ -977,10 +985,6 @@ so that we can apply CSS-filters to change the arrow color in themes */
|
||||
0 -1px 0 black;
|
||||
}
|
||||
|
||||
.item-left.unstable {
|
||||
opacity: 0.65;
|
||||
}
|
||||
|
||||
.since {
|
||||
font-weight: normal;
|
||||
font-size: initial;
|
||||
|
@ -40,8 +40,6 @@
|
||||
}
|
||||
|
||||
.setting-check {
|
||||
position: relative;
|
||||
width: 100%;
|
||||
margin-right: 20px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
Binary file not shown.
Binary file not shown.
@ -1,4 +1,4 @@
|
||||
Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.
|
||||
Copyright 2014 - 2023 Adobe (http://www.adobe.com/), with Reserved Font Name ‘Source’. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.
|
||||
|
||||
This Font Software is licensed under the SIL Open Font License, Version 1.1.
|
||||
|
||||
|
Binary file not shown.
@ -180,7 +180,6 @@ function browserSupportsHistoryApi() {
|
||||
return window.history && typeof window.history.pushState === "function";
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function loadCss(cssUrl) {
|
||||
const link = document.createElement("link");
|
||||
link.href = cssUrl;
|
||||
|
@ -51,7 +51,6 @@ function hasClass(elem, className) {
|
||||
return elem && elem.classList && elem.classList.contains(className);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function addClass(elem, className) {
|
||||
if (!elem || !elem.classList) {
|
||||
return;
|
||||
|
@ -6,9 +6,9 @@
|
||||
|
||||
extern crate macros;
|
||||
|
||||
// @has foo/index.html '//*[@class="item-left unstable deprecated"]/span[@class="stab deprecated"]' \
|
||||
// @has foo/index.html '//*[@class="item-left"]/span[@class="stab deprecated"]' \
|
||||
// Deprecated
|
||||
// @has - '//*[@class="item-left unstable deprecated"]/span[@class="stab unstable"]' \
|
||||
// @has - '//*[@class="item-left"]/span[@class="stab unstable"]' \
|
||||
// Experimental
|
||||
|
||||
// @has foo/macro.my_macro.html
|
||||
|
@ -2,9 +2,9 @@
|
||||
#![doc(issue_tracker_base_url = "https://issue_url/")]
|
||||
#![unstable(feature = "test", issue = "32374")]
|
||||
|
||||
// @matches issue_32374/index.html '//*[@class="item-left unstable deprecated"]/span[@class="stab deprecated"]' \
|
||||
// @matches issue_32374/index.html '//*[@class="item-left"]/span[@class="stab deprecated"]' \
|
||||
// 'Deprecated'
|
||||
// @matches issue_32374/index.html '//*[@class="item-left unstable deprecated"]/span[@class="stab unstable"]' \
|
||||
// @matches issue_32374/index.html '//*[@class="item-left"]/span[@class="stab unstable"]' \
|
||||
// 'Experimental'
|
||||
// @matches issue_32374/index.html '//*[@class="item-right docblock-short"]/text()' 'Docs'
|
||||
|
||||
|
@ -4,7 +4,6 @@
|
||||
extern crate reexport_check;
|
||||
|
||||
// @!has 'foo/index.html' '//code' 'pub use self::i32;'
|
||||
// @has 'foo/index.html' '//div[@class="item-left deprecated"]' 'i32'
|
||||
// @has 'foo/i32/index.html'
|
||||
#[allow(deprecated, deprecated_in_future)]
|
||||
pub use std::i32;
|
||||
@ -12,6 +11,9 @@ pub use std::i32;
|
||||
// @has 'foo/index.html' '//div[@class="item-left"]' 'String'
|
||||
pub use std::string::String;
|
||||
|
||||
// i32 is deprecated, String is not
|
||||
// @count 'foo/index.html' '//span[@class="stab deprecated"]' 1
|
||||
|
||||
// @has 'foo/index.html' '//div[@class="item-right docblock-short"]' 'Docs in original'
|
||||
// this is a no-op, but shows what happens if there's an attribute that isn't a doc-comment
|
||||
#[doc(inline)]
|
||||
|
Loading…
Reference in New Issue
Block a user