mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-23 04:14:28 +00:00
Merge from rustc
This commit is contained in:
commit
fb26c21c35
@ -153,6 +153,7 @@ trait ResolverAstLoweringExt {
|
||||
fn get_label_res(&self, id: NodeId) -> Option<NodeId>;
|
||||
fn get_lifetime_res(&self, id: NodeId) -> Option<LifetimeRes>;
|
||||
fn take_extra_lifetime_params(&mut self, id: NodeId) -> Vec<(Ident, NodeId, LifetimeRes)>;
|
||||
fn remap_extra_lifetime_params(&mut self, from: NodeId, to: NodeId);
|
||||
fn decl_macro_kind(&self, def_id: LocalDefId) -> MacroKind;
|
||||
}
|
||||
|
||||
@ -213,6 +214,11 @@ impl ResolverAstLoweringExt for ResolverAstLowering {
|
||||
self.extra_lifetime_params_map.remove(&id).unwrap_or_default()
|
||||
}
|
||||
|
||||
fn remap_extra_lifetime_params(&mut self, from: NodeId, to: NodeId) {
|
||||
let lifetimes = self.extra_lifetime_params_map.remove(&from).unwrap_or_default();
|
||||
self.extra_lifetime_params_map.insert(to, lifetimes);
|
||||
}
|
||||
|
||||
fn decl_macro_kind(&self, def_id: LocalDefId) -> MacroKind {
|
||||
self.builtin_macro_kinds.get(&def_id).copied().unwrap_or(MacroKind::Bang)
|
||||
}
|
||||
@ -1089,6 +1095,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
// constructing the HIR for `impl bounds...` and then lowering that.
|
||||
|
||||
let impl_trait_node_id = self.next_node_id();
|
||||
// Shift `impl Trait` lifetime captures from the associated type bound's
|
||||
// node id to the opaque node id, so that the opaque can actually use
|
||||
// these lifetime bounds.
|
||||
self.resolver
|
||||
.remap_extra_lifetime_params(constraint.id, impl_trait_node_id);
|
||||
|
||||
self.with_dyn_type_scope(false, |this| {
|
||||
let node_id = this.next_node_id();
|
||||
|
@ -2249,7 +2249,14 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
}
|
||||
|
||||
pub(crate) fn universe_info(&self, universe: ty::UniverseIndex) -> UniverseInfo<'tcx> {
|
||||
self.universe_causes[&universe].clone()
|
||||
// Query canonicalization can create local superuniverses (for example in
|
||||
// `InferCtx::query_response_substitution_guess`), but they don't have an associated
|
||||
// `UniverseInfo` explaining why they were created.
|
||||
// This can cause ICEs if these causes are accessed in diagnostics, for example in issue
|
||||
// #114907 where this happens via liveness and dropck outlives results.
|
||||
// Therefore, we return a default value in case that happens, which should at worst emit a
|
||||
// suboptimal error, instead of the ICE.
|
||||
self.universe_causes.get(&universe).cloned().unwrap_or_else(|| UniverseInfo::other())
|
||||
}
|
||||
|
||||
/// Tries to find the terminator of the loop in which the region 'r' resides.
|
||||
|
@ -9,7 +9,7 @@ use rustc_span::Span;
|
||||
use rustc_trait_selection::traits::query::type_op::{self, TypeOpOutput};
|
||||
use rustc_trait_selection::traits::ObligationCause;
|
||||
|
||||
use crate::diagnostics::{ToUniverseInfo, UniverseInfo};
|
||||
use crate::diagnostics::ToUniverseInfo;
|
||||
|
||||
use super::{Locations, NormalizeLocation, TypeChecker};
|
||||
|
||||
@ -46,13 +46,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
self.push_region_constraints(locations, category, data);
|
||||
}
|
||||
|
||||
// If the query has created new universes and errors are going to be emitted, register the
|
||||
// cause of these new universes for improved diagnostics.
|
||||
let universe = self.infcx.universe();
|
||||
|
||||
if old_universe != universe {
|
||||
let universe_info = match error_info {
|
||||
Some(error_info) => error_info.to_universe_info(old_universe),
|
||||
None => UniverseInfo::other(),
|
||||
};
|
||||
if old_universe != universe && let Some(error_info) = error_info {
|
||||
let universe_info = error_info.to_universe_info(old_universe);
|
||||
for u in (old_universe + 1)..=universe {
|
||||
self.borrowck_context.constraints.universe_causes.insert(u, universe_info.clone());
|
||||
}
|
||||
@ -69,15 +67,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
where
|
||||
T: TypeFoldable<TyCtxt<'tcx>>,
|
||||
{
|
||||
let old_universe = self.infcx.universe();
|
||||
|
||||
let (instantiated, _) =
|
||||
self.infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical);
|
||||
|
||||
for u in (old_universe + 1)..=self.infcx.universe() {
|
||||
self.borrowck_context.constraints.universe_causes.insert(u, UniverseInfo::other());
|
||||
}
|
||||
|
||||
instantiated
|
||||
}
|
||||
|
||||
|
@ -163,10 +163,6 @@ pub(crate) fn type_check<'mir, 'tcx>(
|
||||
|
||||
debug!(?normalized_inputs_and_output);
|
||||
|
||||
for u in ty::UniverseIndex::ROOT..=infcx.universe() {
|
||||
constraints.universe_causes.insert(u, UniverseInfo::other());
|
||||
}
|
||||
|
||||
let mut borrowck_context = BorrowCheckContext {
|
||||
universal_regions,
|
||||
location_table,
|
||||
|
@ -367,7 +367,7 @@ impl<'a> LlvmArchiveBuilder<'a> {
|
||||
match addition {
|
||||
Addition::File { path, name_in_archive } => {
|
||||
let path = CString::new(path.to_str().unwrap())?;
|
||||
let name = CString::new(name_in_archive.clone())?;
|
||||
let name = CString::new(name_in_archive.as_bytes())?;
|
||||
members.push(llvm::LLVMRustArchiveMemberNew(
|
||||
path.as_ptr(),
|
||||
name.as_ptr(),
|
||||
|
@ -441,7 +441,7 @@ fn thin_lto(
|
||||
|
||||
for (i, (name, buffer)) in modules.into_iter().enumerate() {
|
||||
info!("local module: {} - {}", i, name);
|
||||
let cname = CString::new(name.clone()).unwrap();
|
||||
let cname = CString::new(name.as_bytes()).unwrap();
|
||||
thin_modules.push(llvm::ThinLTOModule {
|
||||
identifier: cname.as_ptr(),
|
||||
data: buffer.data().as_ptr(),
|
||||
|
@ -254,6 +254,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
}
|
||||
|
||||
/// Find the wrapped inner type of a transparent wrapper.
|
||||
/// Must not be called on 1-ZST (as they don't have a uniquely defined "wrapped field").
|
||||
fn unfold_transparent(&self, layout: TyAndLayout<'tcx>) -> TyAndLayout<'tcx> {
|
||||
match layout.ty.kind() {
|
||||
ty::Adt(adt_def, _) if adt_def.repr().transparent() => {
|
||||
@ -263,11 +264,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
let field = layout.field(self, idx);
|
||||
if field.is_1zst() { None } else { Some(field) }
|
||||
});
|
||||
let Some(first) = non_1zst_fields.next() else {
|
||||
// All fields are 1-ZST, so this is basically the same as `()`.
|
||||
// (We still also compare the `PassMode`, so if this target does something strange with 1-ZST there, we'll know.)
|
||||
return self.layout_of(self.tcx.types.unit).unwrap();
|
||||
};
|
||||
let first = non_1zst_fields.next().expect("`unfold_transparent` called on 1-ZST");
|
||||
assert!(
|
||||
non_1zst_fields.next().is_none(),
|
||||
"more than one non-1-ZST field in a transparent type"
|
||||
@ -289,17 +286,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
caller_layout: TyAndLayout<'tcx>,
|
||||
callee_layout: TyAndLayout<'tcx>,
|
||||
) -> bool {
|
||||
fn primitive_abi_compat(a1: abi::Primitive, a2: abi::Primitive) -> bool {
|
||||
match (a1, a2) {
|
||||
// For integers, ignore the sign.
|
||||
(abi::Primitive::Int(int_ty1, _sign1), abi::Primitive::Int(int_ty2, _sign2)) => {
|
||||
int_ty1 == int_ty2
|
||||
}
|
||||
// For everything else we require full equality.
|
||||
_ => a1 == a2,
|
||||
}
|
||||
}
|
||||
|
||||
if caller_layout.ty == callee_layout.ty {
|
||||
// Fast path: equal types are definitely compatible.
|
||||
return true;
|
||||
@ -308,27 +294,40 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
match (caller_layout.abi, callee_layout.abi) {
|
||||
// If both sides have Scalar/Vector/ScalarPair ABI, we can easily directly compare them.
|
||||
// Different valid ranges are okay (the validity check will complain if this leads to
|
||||
// invalid transmutes).
|
||||
// invalid transmutes). Different signs are *not* okay on some targets (e.g. `extern
|
||||
// "C"` on `s390x` where small integers are passed zero/sign-extended in large
|
||||
// registers), so we generally reject them to increase portability.
|
||||
// NOTE: this is *not* a stable guarantee! It just reflects a property of our current
|
||||
// ABIs. It's also fragile; the same pair of types might be considered ABI-compatible
|
||||
// when used directly by-value but not considered compatible as a struct field or array
|
||||
// element.
|
||||
(abi::Abi::Scalar(caller), abi::Abi::Scalar(callee)) => {
|
||||
primitive_abi_compat(caller.primitive(), callee.primitive())
|
||||
caller.primitive() == callee.primitive()
|
||||
}
|
||||
(
|
||||
abi::Abi::Vector { element: caller_element, count: caller_count },
|
||||
abi::Abi::Vector { element: callee_element, count: callee_count },
|
||||
) => {
|
||||
primitive_abi_compat(caller_element.primitive(), callee_element.primitive())
|
||||
caller_element.primitive() == callee_element.primitive()
|
||||
&& caller_count == callee_count
|
||||
}
|
||||
(abi::Abi::ScalarPair(caller1, caller2), abi::Abi::ScalarPair(callee1, callee2)) => {
|
||||
primitive_abi_compat(caller1.primitive(), callee1.primitive())
|
||||
&& primitive_abi_compat(caller2.primitive(), callee2.primitive())
|
||||
caller1.primitive() == callee1.primitive()
|
||||
&& caller2.primitive() == callee2.primitive()
|
||||
}
|
||||
(abi::Abi::Aggregate { .. }, abi::Abi::Aggregate { .. }) => {
|
||||
// Aggregates are compatible only if they newtype-wrap the same type.
|
||||
// Aggregates are compatible only if they newtype-wrap the same type, or if they are both 1-ZST.
|
||||
// (The latter part is needed to ensure e.g. that `struct Zst` is compatible with `struct Wrap((), Zst)`.)
|
||||
// This is conservative, but also means that our check isn't quite so heavily dependent on the `PassMode`,
|
||||
// which means having ABI-compatibility on one target is much more likely to imply compatibility for other targets.
|
||||
self.unfold_transparent(caller_layout).ty
|
||||
== self.unfold_transparent(callee_layout).ty
|
||||
if caller_layout.is_1zst() || callee_layout.is_1zst() {
|
||||
// If either is a 1-ZST, both must be.
|
||||
caller_layout.is_1zst() && callee_layout.is_1zst()
|
||||
} else {
|
||||
// Neither is a 1-ZST, so we can check what they are wrapping.
|
||||
self.unfold_transparent(caller_layout).ty
|
||||
== self.unfold_transparent(callee_layout).ty
|
||||
}
|
||||
}
|
||||
// What remains is `Abi::Uninhabited` (which can never be passed anyway) and
|
||||
// mismatching ABIs, that should all be rejected.
|
||||
|
@ -273,7 +273,7 @@ cfg_if! {
|
||||
pub use std::cell::RefMut as MappedWriteGuard;
|
||||
pub use std::cell::RefMut as MappedLockGuard;
|
||||
|
||||
pub use std::cell::OnceCell;
|
||||
pub use std::cell::OnceCell as OnceLock;
|
||||
|
||||
use std::cell::RefCell as InnerRwLock;
|
||||
|
||||
@ -327,7 +327,7 @@ cfg_if! {
|
||||
|
||||
pub use parking_lot::MappedMutexGuard as MappedLockGuard;
|
||||
|
||||
pub use std::sync::OnceLock as OnceCell;
|
||||
pub use std::sync::OnceLock;
|
||||
|
||||
pub use std::sync::atomic::{AtomicBool, AtomicUsize, AtomicU32, AtomicU64};
|
||||
|
||||
|
@ -608,6 +608,7 @@ E0794: include_str!("./error_codes/E0794.md"),
|
||||
// E0420, // merged into 532
|
||||
// E0421, // merged into 531
|
||||
// E0427, // merged into 530
|
||||
// E0445, // merged into 446 and type privacy lints
|
||||
// E0456, // plugin `..` is not available for triple `..`
|
||||
// E0465, // removed: merged with E0464
|
||||
// E0467, // removed
|
||||
|
@ -1,10 +1,10 @@
|
||||
#### Note: this error code is no longer emitted by the compiler.
|
||||
|
||||
A private trait was used on a public type parameter bound.
|
||||
|
||||
Erroneous code examples:
|
||||
|
||||
```compile_fail,E0445
|
||||
#![deny(private_in_public)]
|
||||
Previously erroneous code examples:
|
||||
|
||||
```
|
||||
trait Foo {
|
||||
fn dummy(&self) { }
|
||||
}
|
||||
|
@ -1,16 +1,16 @@
|
||||
A private type was used in a public type signature.
|
||||
A private type or trait was used in a public associated type signature.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0446
|
||||
#![deny(private_in_public)]
|
||||
struct Bar(u32);
|
||||
struct Bar;
|
||||
|
||||
mod foo {
|
||||
use crate::Bar;
|
||||
pub fn bar() -> Bar { // error: private type in public interface
|
||||
Bar(0)
|
||||
}
|
||||
pub trait PubTr {
|
||||
type Alias;
|
||||
}
|
||||
|
||||
impl PubTr for u8 {
|
||||
type Alias = Bar; // error private type in public interface
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
@ -22,13 +22,14 @@ This is done by using pub(crate) or pub(in crate::my_mod::etc)
|
||||
Example:
|
||||
|
||||
```
|
||||
struct Bar(u32);
|
||||
struct Bar;
|
||||
|
||||
mod foo {
|
||||
use crate::Bar;
|
||||
pub(crate) fn bar() -> Bar { // only public to crate root
|
||||
Bar(0)
|
||||
}
|
||||
pub(crate) trait PubTr { // only public to crate root
|
||||
type Alias;
|
||||
}
|
||||
|
||||
impl PubTr for u8 {
|
||||
type Alias = Bar;
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
@ -38,12 +39,15 @@ The other way to solve this error is to make the private type public.
|
||||
Example:
|
||||
|
||||
```
|
||||
pub struct Bar(u32); // we set the Bar type public
|
||||
mod foo {
|
||||
use crate::Bar;
|
||||
pub fn bar() -> Bar { // ok!
|
||||
Bar(0)
|
||||
}
|
||||
|
||||
pub struct Bar; // we set the Bar trait public
|
||||
|
||||
pub trait PubTr {
|
||||
type Alias;
|
||||
}
|
||||
|
||||
impl PubTr for u8 {
|
||||
type Alias = Bar;
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -55,6 +55,8 @@ use std::num::NonZeroUsize;
|
||||
use std::panic;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
// Used by external projects such as `rust-gpu`.
|
||||
// See https://github.com/rust-lang/rust/pull/115393.
|
||||
pub use termcolor::{Color, ColorSpec, WriteColor};
|
||||
|
||||
pub mod annotate_snippet_emitter_writer;
|
||||
|
@ -54,7 +54,7 @@ declare_features! (
|
||||
/// instead of just the platforms on which it is the C ABI.
|
||||
(accepted, abi_sysv64, "1.24.0", Some(36167), None),
|
||||
/// Allows using the `thiscall` ABI.
|
||||
(accepted, abi_thiscall, "1.19.0", None, None),
|
||||
(accepted, abi_thiscall, "1.73.0", None, None),
|
||||
/// Allows using ADX intrinsics from `core::arch::{x86, x86_64}`.
|
||||
(accepted, adx_target_feature, "1.61.0", Some(44839), None),
|
||||
/// Allows explicit discriminants on non-unit enum variants.
|
||||
|
@ -3729,6 +3729,8 @@ impl<'hir> Node<'hir> {
|
||||
Node::Lifetime(lt) => Some(lt.ident),
|
||||
Node::GenericParam(p) => Some(p.name.ident()),
|
||||
Node::TypeBinding(b) => Some(b.ident),
|
||||
Node::PatField(f) => Some(f.ident),
|
||||
Node::ExprField(f) => Some(f.ident),
|
||||
Node::Param(..)
|
||||
| Node::AnonConst(..)
|
||||
| Node::ConstBlock(..)
|
||||
@ -3737,8 +3739,6 @@ impl<'hir> Node<'hir> {
|
||||
| Node::Block(..)
|
||||
| Node::Ctor(..)
|
||||
| Node::Pat(..)
|
||||
| Node::PatField(..)
|
||||
| Node::ExprField(..)
|
||||
| Node::Arm(..)
|
||||
| Node::Local(..)
|
||||
| Node::Crate(..)
|
||||
|
@ -910,19 +910,24 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
) -> Ty<'tcx> {
|
||||
let tcx = self.tcx();
|
||||
let args = self.ast_path_args_for_ty(span, did, item_segment);
|
||||
let ty = tcx.at(span).type_of(did);
|
||||
|
||||
if let DefKind::TyAlias { lazy } = tcx.def_kind(did)
|
||||
&& (lazy || ty.skip_binder().has_opaque_types())
|
||||
{
|
||||
// Type aliases referring to types that contain opaque types (but aren't just directly
|
||||
// referencing a single opaque type) as well as those defined in crates that have the
|
||||
if let DefKind::TyAlias { lazy: true } = tcx.def_kind(did) {
|
||||
// Type aliases defined in crates that have the
|
||||
// feature `lazy_type_alias` enabled get encoded as a type alias that normalization will
|
||||
// then actually instantiate the where bounds of.
|
||||
let alias_ty = tcx.mk_alias_ty(did, args);
|
||||
Ty::new_alias(tcx, ty::Weak, alias_ty)
|
||||
} else {
|
||||
ty.instantiate(tcx, args)
|
||||
let ty = tcx.at(span).type_of(did);
|
||||
if ty.skip_binder().has_opaque_types() {
|
||||
// Type aliases referring to types that contain opaque types (but aren't just directly
|
||||
// referencing a single opaque type) get encoded as a type alias that normalization will
|
||||
// then actually instantiate the where bounds of.
|
||||
let alias_ty = tcx.mk_alias_ty(did, args);
|
||||
Ty::new_alias(tcx, ty::Weak, alias_ty)
|
||||
} else {
|
||||
ty.instantiate(tcx, args)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,6 +38,7 @@ use rustc_trait_selection::infer::InferCtxtExt;
|
||||
use rustc_trait_selection::traits::error_reporting::suggestions::NextTypeParamName;
|
||||
use rustc_trait_selection::traits::ObligationCtxt;
|
||||
use std::iter;
|
||||
use std::ops::Bound;
|
||||
|
||||
mod generics_of;
|
||||
mod item_bounds;
|
||||
@ -1144,15 +1145,15 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<ty::PolyFnSig<
|
||||
}
|
||||
|
||||
Ctor(data) | Variant(hir::Variant { data, .. }) if data.ctor().is_some() => {
|
||||
let ty = tcx.type_of(tcx.hir().get_parent_item(hir_id)).instantiate_identity();
|
||||
let adt_def_id = tcx.hir().get_parent_item(hir_id).def_id.to_def_id();
|
||||
let ty = tcx.type_of(adt_def_id).instantiate_identity();
|
||||
let inputs = data.fields().iter().map(|f| tcx.type_of(f.def_id).instantiate_identity());
|
||||
ty::Binder::dummy(tcx.mk_fn_sig(
|
||||
inputs,
|
||||
ty,
|
||||
false,
|
||||
hir::Unsafety::Normal,
|
||||
abi::Abi::Rust,
|
||||
))
|
||||
// constructors for structs with `layout_scalar_valid_range` are unsafe to call
|
||||
let safety = match tcx.layout_scalar_valid_range(adt_def_id) {
|
||||
(Bound::Unbounded, Bound::Unbounded) => hir::Unsafety::Normal,
|
||||
_ => hir::Unsafety::Unsafe,
|
||||
};
|
||||
ty::Binder::dummy(tcx.mk_fn_sig(inputs, ty, false, safety, abi::Abi::Rust))
|
||||
}
|
||||
|
||||
Expr(&hir::Expr { kind: hir::ExprKind::Closure { .. }, .. }) => {
|
||||
|
@ -266,12 +266,10 @@ impl<T> Trait<T> for X {
|
||||
}
|
||||
}
|
||||
}
|
||||
(ty::FnPtr(_), ty::FnDef(def, _))
|
||||
if let hir::def::DefKind::Fn = tcx.def_kind(def) => {
|
||||
diag.note(
|
||||
"when the arguments and return types match, functions can be coerced \
|
||||
to function pointers",
|
||||
);
|
||||
(ty::FnPtr(sig), ty::FnDef(def_id, _)) | (ty::FnDef(def_id, _), ty::FnPtr(sig)) => {
|
||||
if tcx.fn_sig(*def_id).skip_binder().unsafety() < sig.unsafety() {
|
||||
diag.note("unsafe functions cannot be coerced into safe function pointers");
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use rustc_borrowck as mir_borrowck;
|
||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||
use rustc_data_structures::parallel;
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::sync::{Lrc, OnceCell, WorkerLocal};
|
||||
use rustc_data_structures::sync::{Lrc, OnceLock, WorkerLocal};
|
||||
use rustc_errors::PResult;
|
||||
use rustc_expand::base::{ExtCtxt, LintStoreExpand};
|
||||
use rustc_feature::Features;
|
||||
@ -689,7 +689,7 @@ pub fn create_global_ctxt<'tcx>(
|
||||
lint_store: Lrc<LintStore>,
|
||||
dep_graph: DepGraph,
|
||||
untracked: Untracked,
|
||||
gcx_cell: &'tcx OnceCell<GlobalCtxt<'tcx>>,
|
||||
gcx_cell: &'tcx OnceLock<GlobalCtxt<'tcx>>,
|
||||
arena: &'tcx WorkerLocal<Arena<'tcx>>,
|
||||
hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||
) -> &'tcx GlobalCtxt<'tcx> {
|
||||
|
@ -7,7 +7,7 @@ use rustc_codegen_ssa::traits::CodegenBackend;
|
||||
use rustc_codegen_ssa::CodegenResults;
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::{AppendOnlyIndexVec, Lrc, OnceCell, RwLock, WorkerLocal};
|
||||
use rustc_data_structures::sync::{AppendOnlyIndexVec, Lrc, OnceLock, RwLock, WorkerLocal};
|
||||
use rustc_hir::def_id::{StableCrateId, CRATE_DEF_ID, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::Definitions;
|
||||
use rustc_incremental::DepGraphFuture;
|
||||
@ -78,7 +78,7 @@ impl<T> Default for Query<T> {
|
||||
|
||||
pub struct Queries<'tcx> {
|
||||
compiler: &'tcx Compiler,
|
||||
gcx_cell: OnceCell<GlobalCtxt<'tcx>>,
|
||||
gcx_cell: OnceLock<GlobalCtxt<'tcx>>,
|
||||
|
||||
arena: WorkerLocal<Arena<'tcx>>,
|
||||
hir_arena: WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||
@ -93,7 +93,7 @@ impl<'tcx> Queries<'tcx> {
|
||||
pub fn new(compiler: &'tcx Compiler) -> Queries<'tcx> {
|
||||
Queries {
|
||||
compiler,
|
||||
gcx_cell: OnceCell::new(),
|
||||
gcx_cell: OnceLock::new(),
|
||||
arena: WorkerLocal::new(|_| Arena::default()),
|
||||
hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
|
||||
parse: Default::default(),
|
||||
|
@ -228,6 +228,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T>
|
||||
}) => self.check_id(closure_id),
|
||||
_ => {}
|
||||
}
|
||||
lint_callback!(self, check_expr_post, e);
|
||||
}
|
||||
|
||||
fn visit_generic_arg(&mut self, arg: &'a ast::GenericArg) {
|
||||
|
@ -500,6 +500,11 @@ fn register_builtins(store: &mut LintStore) {
|
||||
"converted into hard error, see issue #82523 \
|
||||
<https://github.com/rust-lang/rust/issues/82523> for more information",
|
||||
);
|
||||
store.register_removed(
|
||||
"private_in_public",
|
||||
"replaced with another group of lints, see RFC \
|
||||
<https://rust-lang.github.io/rfcs/2145-type-privacy.html> for more information",
|
||||
);
|
||||
}
|
||||
|
||||
fn register_internals(store: &mut LintStore) {
|
||||
|
@ -153,6 +153,7 @@ macro_rules! early_lint_methods {
|
||||
fn check_pat(a: &ast::Pat);
|
||||
fn check_pat_post(a: &ast::Pat);
|
||||
fn check_expr(a: &ast::Expr);
|
||||
fn check_expr_post(a: &ast::Expr);
|
||||
fn check_ty(a: &ast::Ty);
|
||||
fn check_generic_arg(a: &ast::GenericArg);
|
||||
fn check_generic_param(a: &ast::GenericParam);
|
||||
|
@ -955,11 +955,14 @@ declare_lint! {
|
||||
|
||||
pub struct UnusedParens {
|
||||
with_self_ty_parens: bool,
|
||||
/// `1 as (i32) < 2` parses to ExprKind::Lt
|
||||
/// `1 as i32 < 2` parses to i32::<2[missing angle bracket]
|
||||
parens_in_cast_in_lt: Vec<ast::NodeId>,
|
||||
}
|
||||
|
||||
impl UnusedParens {
|
||||
pub fn new() -> Self {
|
||||
Self { with_self_ty_parens: false }
|
||||
Self { with_self_ty_parens: false, parens_in_cast_in_lt: Vec::new() }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1055,6 +1058,14 @@ impl UnusedParens {
|
||||
impl EarlyLintPass for UnusedParens {
|
||||
#[inline]
|
||||
fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) {
|
||||
if let ExprKind::Binary(op, lhs, _rhs) = &e.kind &&
|
||||
(op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl) &&
|
||||
let ExprKind::Cast(_expr, ty) = &lhs.kind &&
|
||||
let ast::TyKind::Paren(_) = &ty.kind
|
||||
{
|
||||
self.parens_in_cast_in_lt.push(ty.id);
|
||||
}
|
||||
|
||||
match e.kind {
|
||||
ExprKind::Let(ref pat, _, _) | ExprKind::ForLoop(ref pat, ..) => {
|
||||
self.check_unused_parens_pat(cx, pat, false, false, (true, true));
|
||||
@ -1101,6 +1112,17 @@ impl EarlyLintPass for UnusedParens {
|
||||
<Self as UnusedDelimLint>::check_expr(self, cx, e)
|
||||
}
|
||||
|
||||
fn check_expr_post(&mut self, _cx: &EarlyContext<'_>, e: &ast::Expr) {
|
||||
if let ExprKind::Binary(op, lhs, _rhs) = &e.kind &&
|
||||
(op.node == ast::BinOpKind::Lt || op.node == ast::BinOpKind::Shl) &&
|
||||
let ExprKind::Cast(_expr, ty) = &lhs.kind &&
|
||||
let ast::TyKind::Paren(_) = &ty.kind
|
||||
{
|
||||
let id = self.parens_in_cast_in_lt.pop().expect("check_expr and check_expr_post must balance");
|
||||
assert_eq!(id, ty.id, "check_expr, check_ty, and check_expr_post are called, in that order, by the visitor");
|
||||
}
|
||||
}
|
||||
|
||||
fn check_pat(&mut self, cx: &EarlyContext<'_>, p: &ast::Pat) {
|
||||
use ast::{Mutability, PatKind::*};
|
||||
let keep_space = (false, false);
|
||||
@ -1141,6 +1163,11 @@ impl EarlyLintPass for UnusedParens {
|
||||
}
|
||||
|
||||
fn check_ty(&mut self, cx: &EarlyContext<'_>, ty: &ast::Ty) {
|
||||
if let ast::TyKind::Paren(_) = ty.kind &&
|
||||
Some(&ty.id) == self.parens_in_cast_in_lt.last()
|
||||
{
|
||||
return;
|
||||
}
|
||||
match &ty.kind {
|
||||
ast::TyKind::Array(_, len) => {
|
||||
self.check_unused_delims_expr(
|
||||
|
@ -982,44 +982,6 @@ declare_lint! {
|
||||
"detects trivial casts of numeric types which could be removed"
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// The `private_in_public` lint detects private items in public
|
||||
/// interfaces not caught by the old implementation.
|
||||
///
|
||||
/// ### Example
|
||||
///
|
||||
/// ```rust
|
||||
/// # #![allow(unused)]
|
||||
/// struct SemiPriv;
|
||||
///
|
||||
/// mod m1 {
|
||||
/// struct Priv;
|
||||
/// impl super::SemiPriv {
|
||||
/// pub fn f(_: Priv) {}
|
||||
/// }
|
||||
/// }
|
||||
/// # fn main() {}
|
||||
/// ```
|
||||
///
|
||||
/// {{produces}}
|
||||
///
|
||||
/// ### Explanation
|
||||
///
|
||||
/// The visibility rules are intended to prevent exposing private items in
|
||||
/// public interfaces. This is a [future-incompatible] lint to transition
|
||||
/// this to a hard error in the future. See [issue #34537] for more
|
||||
/// details.
|
||||
///
|
||||
/// [issue #34537]: https://github.com/rust-lang/rust/issues/34537
|
||||
/// [future-incompatible]: ../index.md#future-incompatible-lints
|
||||
pub PRIVATE_IN_PUBLIC,
|
||||
Warn,
|
||||
"detect private items in public interfaces not caught by the old implementation",
|
||||
@future_incompatible = FutureIncompatibleInfo {
|
||||
reference: "issue #34537 <https://github.com/rust-lang/rust/issues/34537>",
|
||||
};
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
/// The `invalid_alignment` lint detects dereferences of misaligned pointers during
|
||||
/// constant evaluation.
|
||||
@ -3415,7 +3377,6 @@ declare_lint_pass! {
|
||||
PATTERNS_IN_FNS_WITHOUT_BODY,
|
||||
POINTER_STRUCTURAL_MATCH,
|
||||
PRIVATE_BOUNDS,
|
||||
PRIVATE_IN_PUBLIC,
|
||||
PRIVATE_INTERFACES,
|
||||
PROC_MACRO_BACK_COMPAT,
|
||||
PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
|
||||
@ -4334,9 +4295,7 @@ declare_lint! {
|
||||
/// ### Example
|
||||
///
|
||||
/// ```rust,compile_fail
|
||||
/// # #![feature(type_privacy_lints)]
|
||||
/// # #![allow(unused)]
|
||||
/// # #![allow(private_in_public)]
|
||||
/// #![deny(private_interfaces)]
|
||||
/// struct SemiPriv;
|
||||
///
|
||||
@ -4357,9 +4316,8 @@ declare_lint! {
|
||||
/// Having something private in primary interface guarantees that
|
||||
/// the item will be unusable from outer modules due to type privacy.
|
||||
pub PRIVATE_INTERFACES,
|
||||
Allow,
|
||||
Warn,
|
||||
"private type in primary interface of an item",
|
||||
@feature_gate = sym::type_privacy_lints;
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
@ -4370,8 +4328,6 @@ declare_lint! {
|
||||
/// ### Example
|
||||
///
|
||||
/// ```rust,compile_fail
|
||||
/// # #![feature(type_privacy_lints)]
|
||||
/// # #![allow(private_in_public)]
|
||||
/// # #![allow(unused)]
|
||||
/// #![deny(private_bounds)]
|
||||
///
|
||||
@ -4389,9 +4345,8 @@ declare_lint! {
|
||||
/// Having private types or traits in item bounds makes it less clear what interface
|
||||
/// the item actually provides.
|
||||
pub PRIVATE_BOUNDS,
|
||||
Allow,
|
||||
Warn,
|
||||
"private type in secondary interface of an item",
|
||||
@feature_gate = sym::type_privacy_lints;
|
||||
}
|
||||
|
||||
declare_lint! {
|
||||
|
@ -252,7 +252,10 @@ fn main() {
|
||||
} else if target.contains("windows-gnu") {
|
||||
println!("cargo:rustc-link-lib=shell32");
|
||||
println!("cargo:rustc-link-lib=uuid");
|
||||
} else if target.contains("haiku") || target.contains("darwin") {
|
||||
} else if target.contains("haiku")
|
||||
|| target.contains("darwin")
|
||||
|| (is_crossed && (target.contains("dragonfly") || target.contains("solaris")))
|
||||
{
|
||||
println!("cargo:rustc-link-lib=z");
|
||||
} else if target.contains("netbsd") {
|
||||
println!("cargo:rustc-link-lib=z");
|
||||
|
@ -42,6 +42,6 @@ pub mod locator;
|
||||
|
||||
pub use fs::{emit_wrapper_file, METADATA_FILENAME};
|
||||
pub use native_libs::find_native_static_library;
|
||||
pub use rmeta::{encode_metadata, EncodedMetadata, METADATA_HEADER};
|
||||
pub use rmeta::{encode_metadata, rendered_const, EncodedMetadata, METADATA_HEADER};
|
||||
|
||||
fluent_messages! { "../messages.ftl" }
|
||||
|
@ -9,7 +9,7 @@ use rustc_data_structures::captures::Captures;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::owned_slice::OwnedSlice;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::{AppendOnlyVec, AtomicBool, Lock, Lrc, OnceCell};
|
||||
use rustc_data_structures::sync::{AppendOnlyVec, AtomicBool, Lock, Lrc, OnceLock};
|
||||
use rustc_data_structures::unhash::UnhashMap;
|
||||
use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind};
|
||||
use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro};
|
||||
@ -93,7 +93,7 @@ pub(crate) struct CrateMetadata {
|
||||
/// For every definition in this crate, maps its `DefPathHash` to its `DefIndex`.
|
||||
def_path_hash_map: DefPathHashMapRef<'static>,
|
||||
/// Likewise for ExpnHash.
|
||||
expn_hash_map: OnceCell<UnhashMap<ExpnHash, ExpnIndex>>,
|
||||
expn_hash_map: OnceLock<UnhashMap<ExpnHash, ExpnIndex>>,
|
||||
/// Used for decoding interpret::AllocIds in a cached & thread-safe manner.
|
||||
alloc_decoding_state: AllocDecodingState,
|
||||
/// Caches decoded `DefKey`s.
|
||||
|
@ -17,8 +17,8 @@ use rustc_hir::def_id::{
|
||||
CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_ID, CRATE_DEF_INDEX, LOCAL_CRATE,
|
||||
};
|
||||
use rustc_hir::definitions::DefPathData;
|
||||
use rustc_hir::intravisit;
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
use rustc_hir_pretty::id_to_string;
|
||||
use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile;
|
||||
use rustc_middle::middle::dependency_format::Linkage;
|
||||
use rustc_middle::middle::exported_symbols::{
|
||||
@ -1615,7 +1615,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
record!(self.tables.mir_const_qualif[def_id.to_def_id()] <- qualifs);
|
||||
let body_id = tcx.hir().maybe_body_owned_by(def_id);
|
||||
if let Some(body_id) = body_id {
|
||||
let const_data = self.encode_rendered_const_for_body(body_id);
|
||||
let const_data = rendered_const(self.tcx, body_id);
|
||||
record!(self.tables.rendered_const[def_id.to_def_id()] <- const_data);
|
||||
}
|
||||
}
|
||||
@ -1683,14 +1683,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_rendered_const_for_body(&mut self, body_id: hir::BodyId) -> String {
|
||||
let hir = self.tcx.hir();
|
||||
let body = hir.body(body_id);
|
||||
rustc_hir_pretty::to_string(&(&hir as &dyn intravisit::Map<'_>), |s| {
|
||||
s.print_expr(&body.value)
|
||||
})
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
fn encode_info_for_macro(&mut self, def_id: LocalDefId) {
|
||||
let tcx = self.tcx;
|
||||
@ -2292,3 +2284,97 @@ pub fn provide(providers: &mut Providers) {
|
||||
..*providers
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a textual representation of an unevaluated constant expression.
|
||||
///
|
||||
/// If the const expression is too complex, an underscore `_` is returned.
|
||||
/// For const arguments, it's `{ _ }` to be precise.
|
||||
/// This means that the output is not necessarily valid Rust code.
|
||||
///
|
||||
/// Currently, only
|
||||
///
|
||||
/// * literals (optionally with a leading `-`)
|
||||
/// * unit `()`
|
||||
/// * blocks (`{ … }`) around simple expressions and
|
||||
/// * paths without arguments
|
||||
///
|
||||
/// are considered simple enough. Simple blocks are included since they are
|
||||
/// necessary to disambiguate unit from the unit type.
|
||||
/// This list might get extended in the future.
|
||||
///
|
||||
/// Without this censoring, in a lot of cases the output would get too large
|
||||
/// and verbose. Consider `match` expressions, blocks and deeply nested ADTs.
|
||||
/// Further, private and `doc(hidden)` fields of structs would get leaked
|
||||
/// since HIR datatypes like the `body` parameter do not contain enough
|
||||
/// semantic information for this function to be able to hide them –
|
||||
/// at least not without significant performance overhead.
|
||||
///
|
||||
/// Whenever possible, prefer to evaluate the constant first and try to
|
||||
/// use a different method for pretty-printing. Ideally this function
|
||||
/// should only ever be used as a fallback.
|
||||
pub fn rendered_const<'tcx>(tcx: TyCtxt<'tcx>, body: hir::BodyId) -> String {
|
||||
let hir = tcx.hir();
|
||||
let value = &hir.body(body).value;
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
enum Classification {
|
||||
Literal,
|
||||
Simple,
|
||||
Complex,
|
||||
}
|
||||
|
||||
use Classification::*;
|
||||
|
||||
fn classify(expr: &hir::Expr<'_>) -> Classification {
|
||||
match &expr.kind {
|
||||
hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
|
||||
if matches!(expr.kind, hir::ExprKind::Lit(_)) { Literal } else { Complex }
|
||||
}
|
||||
hir::ExprKind::Lit(_) => Literal,
|
||||
hir::ExprKind::Tup([]) => Simple,
|
||||
hir::ExprKind::Block(hir::Block { stmts: [], expr: Some(expr), .. }, _) => {
|
||||
if classify(expr) == Complex { Complex } else { Simple }
|
||||
}
|
||||
// Paths with a self-type or arguments are too “complex” following our measure since
|
||||
// they may leak private fields of structs (with feature `adt_const_params`).
|
||||
// Consider: `<Self as Trait<{ Struct { private: () } }>>::CONSTANT`.
|
||||
// Paths without arguments are definitely harmless though.
|
||||
hir::ExprKind::Path(hir::QPath::Resolved(_, hir::Path { segments, .. })) => {
|
||||
if segments.iter().all(|segment| segment.args.is_none()) { Simple } else { Complex }
|
||||
}
|
||||
// FIXME: Claiming that those kinds of QPaths are simple is probably not true if the Ty
|
||||
// contains const arguments. Is there a *concise* way to check for this?
|
||||
hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => Simple,
|
||||
// FIXME: Can they contain const arguments and thus leak private struct fields?
|
||||
hir::ExprKind::Path(hir::QPath::LangItem(..)) => Simple,
|
||||
_ => Complex,
|
||||
}
|
||||
}
|
||||
|
||||
let classification = classify(value);
|
||||
|
||||
if classification == Literal
|
||||
&& !value.span.from_expansion()
|
||||
&& let Ok(snippet) = tcx.sess.source_map().span_to_snippet(value.span) {
|
||||
// For literals, we avoid invoking the pretty-printer and use the source snippet instead to
|
||||
// preserve certain stylistic choices the user likely made for the sake legibility like
|
||||
//
|
||||
// * hexadecimal notation
|
||||
// * underscores
|
||||
// * character escapes
|
||||
//
|
||||
// FIXME: This passes through `-/*spacer*/0` verbatim.
|
||||
snippet
|
||||
} else if classification == Simple {
|
||||
// Otherwise we prefer pretty-printing to get rid of extraneous whitespace, comments and
|
||||
// other formatting artifacts.
|
||||
id_to_string(&hir, body.hir_id)
|
||||
} else if tcx.def_kind(hir.body_owner_def_id(body).to_def_id()) == DefKind::AnonConst {
|
||||
// FIXME: Omit the curly braces if the enclosing expression is an array literal
|
||||
// with a repeated element (an `ExprKind::Repeat`) as in such case it
|
||||
// would not actually need any disambiguation.
|
||||
"{ _ }".to_owned()
|
||||
} else {
|
||||
"_".to_owned()
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@ pub use decoder::provide_extern;
|
||||
use decoder::DecodeContext;
|
||||
pub(crate) use decoder::{CrateMetadata, CrateNumMap, MetadataBlob};
|
||||
use encoder::EncodeContext;
|
||||
pub use encoder::{encode_metadata, EncodedMetadata};
|
||||
pub use encoder::{encode_metadata, rendered_const, EncodedMetadata};
|
||||
use rustc_span::hygiene::SyntaxContextData;
|
||||
|
||||
mod decoder;
|
||||
|
@ -1226,7 +1226,6 @@ pub(super) fn crate_hash(tcx: TyCtxt<'_>, _: LocalCrate) -> Svh {
|
||||
tcx.stable_crate_id(LOCAL_CRATE).hash_stable(&mut hcx, &mut stable_hasher);
|
||||
// Hash visibility information since it does not appear in HIR.
|
||||
resolutions.visibilities.hash_stable(&mut hcx, &mut stable_hasher);
|
||||
resolutions.has_pub_restricted.hash_stable(&mut hcx, &mut stable_hasher);
|
||||
stable_hasher.finish()
|
||||
});
|
||||
|
||||
|
@ -5,7 +5,7 @@ use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::graph;
|
||||
use rustc_data_structures::graph::dominators::{dominators, Dominators};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::OnceCell;
|
||||
use rustc_data_structures::sync::OnceLock;
|
||||
use rustc_index::{IndexSlice, IndexVec};
|
||||
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
use smallvec::SmallVec;
|
||||
@ -23,11 +23,11 @@ pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u1
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
struct Cache {
|
||||
predecessors: OnceCell<Predecessors>,
|
||||
switch_sources: OnceCell<SwitchSources>,
|
||||
is_cyclic: OnceCell<bool>,
|
||||
reverse_postorder: OnceCell<Vec<BasicBlock>>,
|
||||
dominators: OnceCell<Dominators<BasicBlock>>,
|
||||
predecessors: OnceLock<Predecessors>,
|
||||
switch_sources: OnceLock<SwitchSources>,
|
||||
is_cyclic: OnceLock<bool>,
|
||||
reverse_postorder: OnceLock<Vec<BasicBlock>>,
|
||||
dominators: OnceLock<Dominators<BasicBlock>>,
|
||||
}
|
||||
|
||||
impl<'tcx> BasicBlocks<'tcx> {
|
||||
|
@ -162,8 +162,6 @@ pub struct ResolverOutputs {
|
||||
#[derive(Debug)]
|
||||
pub struct ResolverGlobalCtxt {
|
||||
pub visibilities: FxHashMap<LocalDefId, Visibility>,
|
||||
/// This field is used to decide whether we should make `PRIVATE_IN_PUBLIC` a hard error.
|
||||
pub has_pub_restricted: bool,
|
||||
/// Item with a given `LocalDefId` was defined during macro expansion with ID `ExpnId`.
|
||||
pub expn_that_defined: FxHashMap<LocalDefId, ExpnId>,
|
||||
pub effective_visibilities: EffectiveVisibilities,
|
||||
|
@ -159,52 +159,44 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
ExprKind::LogicalOp { op, lhs, rhs } => {
|
||||
// And:
|
||||
//
|
||||
// [block: If(lhs)] -true-> [else_block: dest = (rhs)]
|
||||
// | (false)
|
||||
// [shortcircuit_block: dest = false]
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// [block: If(lhs)] -false-> [else_block: dest = (rhs)]
|
||||
// | (true)
|
||||
// [shortcircuit_block: dest = true]
|
||||
|
||||
let (shortcircuit_block, mut else_block, join_block) = (
|
||||
this.cfg.start_new_block(),
|
||||
this.cfg.start_new_block(),
|
||||
this.cfg.start_new_block(),
|
||||
);
|
||||
|
||||
let lhs = unpack!(block = this.as_local_operand(block, &this.thir[lhs]));
|
||||
let blocks = match op {
|
||||
LogicalOp::And => (else_block, shortcircuit_block),
|
||||
LogicalOp::Or => (shortcircuit_block, else_block),
|
||||
let condition_scope = this.local_scope();
|
||||
let source_info = this.source_info(expr.span);
|
||||
// We first evaluate the left-hand side of the predicate ...
|
||||
let (then_block, else_block) =
|
||||
this.in_if_then_scope(condition_scope, expr.span, |this| {
|
||||
this.then_else_break(
|
||||
block,
|
||||
&this.thir[lhs],
|
||||
Some(condition_scope),
|
||||
condition_scope,
|
||||
source_info,
|
||||
)
|
||||
});
|
||||
let (short_circuit, continuation, constant) = match op {
|
||||
LogicalOp::And => (else_block, then_block, false),
|
||||
LogicalOp::Or => (then_block, else_block, true),
|
||||
};
|
||||
let term = TerminatorKind::if_(lhs, blocks.0, blocks.1);
|
||||
this.cfg.terminate(block, source_info, term);
|
||||
|
||||
// At this point, the control flow splits into a short-circuiting path
|
||||
// and a continuation path.
|
||||
// - If the operator is `&&`, passing `lhs` leads to continuation of evaluation on `rhs`;
|
||||
// failing it leads to the short-circuting path which assigns `false` to the place.
|
||||
// - If the operator is `||`, failing `lhs` leads to continuation of evaluation on `rhs`;
|
||||
// passing it leads to the short-circuting path which assigns `true` to the place.
|
||||
this.cfg.push_assign_constant(
|
||||
shortcircuit_block,
|
||||
short_circuit,
|
||||
source_info,
|
||||
destination,
|
||||
Constant {
|
||||
span: expr_span,
|
||||
span: expr.span,
|
||||
user_ty: None,
|
||||
literal: match op {
|
||||
LogicalOp::And => ConstantKind::from_bool(this.tcx, false),
|
||||
LogicalOp::Or => ConstantKind::from_bool(this.tcx, true),
|
||||
},
|
||||
literal: ConstantKind::from_bool(this.tcx, constant),
|
||||
},
|
||||
);
|
||||
this.cfg.goto(shortcircuit_block, source_info, join_block);
|
||||
|
||||
let rhs = unpack!(else_block = this.as_local_operand(else_block, &this.thir[rhs]));
|
||||
this.cfg.push_assign(else_block, source_info, destination, Rvalue::Use(rhs));
|
||||
this.cfg.goto(else_block, source_info, join_block);
|
||||
|
||||
join_block.unit()
|
||||
let rhs = unpack!(this.expr_into_dest(destination, continuation, &this.thir[rhs]));
|
||||
let target = this.cfg.start_new_block();
|
||||
this.cfg.goto(rhs, source_info, target);
|
||||
this.cfg.goto(short_circuit, source_info, target);
|
||||
target.unit()
|
||||
}
|
||||
ExprKind::Loop { body } => {
|
||||
// [block]
|
||||
|
@ -64,6 +64,43 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
rhs_then_block.unit()
|
||||
}
|
||||
ExprKind::LogicalOp { op: LogicalOp::Or, lhs, rhs } => {
|
||||
let local_scope = this.local_scope();
|
||||
let (lhs_success_block, failure_block) =
|
||||
this.in_if_then_scope(local_scope, expr_span, |this| {
|
||||
this.then_else_break(
|
||||
block,
|
||||
&this.thir[lhs],
|
||||
temp_scope_override,
|
||||
local_scope,
|
||||
variable_source_info,
|
||||
)
|
||||
});
|
||||
let rhs_success_block = unpack!(this.then_else_break(
|
||||
failure_block,
|
||||
&this.thir[rhs],
|
||||
temp_scope_override,
|
||||
break_scope,
|
||||
variable_source_info,
|
||||
));
|
||||
this.cfg.goto(lhs_success_block, variable_source_info, rhs_success_block);
|
||||
rhs_success_block.unit()
|
||||
}
|
||||
ExprKind::Unary { op: UnOp::Not, arg } => {
|
||||
let local_scope = this.local_scope();
|
||||
let (success_block, failure_block) =
|
||||
this.in_if_then_scope(local_scope, expr_span, |this| {
|
||||
this.then_else_break(
|
||||
block,
|
||||
&this.thir[arg],
|
||||
temp_scope_override,
|
||||
local_scope,
|
||||
variable_source_info,
|
||||
)
|
||||
});
|
||||
this.break_for_else(success_block, break_scope, variable_source_info);
|
||||
failure_block.unit()
|
||||
}
|
||||
ExprKind::Scope { region_scope, lint_level, value } => {
|
||||
let region_scope = (region_scope, this.source_info(expr_span));
|
||||
this.in_scope(region_scope, lint_level, |this| {
|
||||
@ -76,6 +113,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
)
|
||||
})
|
||||
}
|
||||
ExprKind::Use { source } => this.then_else_break(
|
||||
block,
|
||||
&this.thir[source],
|
||||
temp_scope_override,
|
||||
break_scope,
|
||||
variable_source_info,
|
||||
),
|
||||
ExprKind::Let { expr, ref pat } => this.lower_let_expr(
|
||||
block,
|
||||
&this.thir[expr],
|
||||
|
@ -11,11 +11,6 @@ privacy_in_public_interface = {$vis_descr} {$kind} `{$descr}` in public interfac
|
||||
|
||||
privacy_item_is_private = {$kind} `{$descr}` is private
|
||||
.label = private {$kind}
|
||||
privacy_private_in_public_lint =
|
||||
{$vis_descr} {$kind} `{$descr}` in public interface (error {$kind ->
|
||||
[trait] E0445
|
||||
*[other] E0446
|
||||
})
|
||||
|
||||
privacy_private_interface_or_bounds_lint = {$ty_kind} `{$ty_descr}` is more private than the item `{$item_descr}`
|
||||
.item_label = {$item_kind} `{$item_descr}` is reachable at visibility `{$item_vis_descr}`
|
||||
|
@ -47,21 +47,6 @@ pub struct UnnamedItemIsPrivate {
|
||||
pub kind: &'static str,
|
||||
}
|
||||
|
||||
// Duplicate of `InPublicInterface` but with a different error code, shares the same slug.
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(privacy_in_public_interface, code = "E0445")]
|
||||
pub struct InPublicInterfaceTraits<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub vis_descr: &'static str,
|
||||
pub kind: &'a str,
|
||||
pub descr: DiagnosticArgFromDisplay<'a>,
|
||||
#[label(privacy_visibility_label)]
|
||||
pub vis_span: Span,
|
||||
}
|
||||
|
||||
// Duplicate of `InPublicInterfaceTraits` but with a different error code, shares the same slug.
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(privacy_in_public_interface, code = "E0446")]
|
||||
pub struct InPublicInterface<'a> {
|
||||
@ -91,14 +76,6 @@ pub struct FromPrivateDependencyInPublicInterface<'a> {
|
||||
pub krate: Symbol,
|
||||
}
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(privacy_private_in_public_lint)]
|
||||
pub struct PrivateInPublicLint<'a> {
|
||||
pub vis_descr: &'static str,
|
||||
pub kind: &'a str,
|
||||
pub descr: DiagnosticArgFromDisplay<'a>,
|
||||
}
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(privacy_unnameable_types_lint)]
|
||||
pub struct UnnameableTypesLint<'a> {
|
||||
|
@ -22,7 +22,7 @@ use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId, CRATE_DEF_ID};
|
||||
use rustc_hir::intravisit::{self, Visitor};
|
||||
use rustc_hir::{AssocItemKind, ForeignItemKind, HirIdSet, ItemId, Node, PatKind};
|
||||
use rustc_hir::{AssocItemKind, ForeignItemKind, ItemId, Node, PatKind};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::hir::nested_filter;
|
||||
use rustc_middle::middle::privacy::{EffectiveVisibilities, EffectiveVisibility, Level};
|
||||
@ -42,8 +42,8 @@ use std::{fmt, mem};
|
||||
|
||||
use errors::{
|
||||
FieldIsPrivate, FieldIsPrivateLabel, FromPrivateDependencyInPublicInterface, InPublicInterface,
|
||||
InPublicInterfaceTraits, ItemIsPrivate, PrivateInPublicLint, PrivateInterfacesOrBoundsLint,
|
||||
ReportEffectiveVisibility, UnnameableTypesLint, UnnamedItemIsPrivate,
|
||||
ItemIsPrivate, PrivateInterfacesOrBoundsLint, ReportEffectiveVisibility, UnnameableTypesLint,
|
||||
UnnamedItemIsPrivate,
|
||||
};
|
||||
|
||||
fluent_messages! { "../messages.ftl" }
|
||||
@ -364,6 +364,7 @@ trait VisibilityLike: Sized {
|
||||
find.min
|
||||
}
|
||||
}
|
||||
|
||||
impl VisibilityLike for ty::Visibility {
|
||||
const MAX: Self = ty::Visibility::Public;
|
||||
fn new_min<const SHALLOW: bool>(
|
||||
@ -1382,345 +1383,6 @@ impl<'tcx> DefIdVisitor<'tcx> for TypePrivacyVisitor<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
/// Obsolete visitors for checking for private items in public interfaces.
|
||||
/// These visitors are supposed to be kept in frozen state and produce an
|
||||
/// "old error node set". For backward compatibility the new visitor reports
|
||||
/// warnings instead of hard errors when the erroneous node is not in this old set.
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
struct ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
effective_visibilities: &'a EffectiveVisibilities,
|
||||
in_variant: bool,
|
||||
// Set of errors produced by this obsolete visitor.
|
||||
old_error_set: HirIdSet,
|
||||
}
|
||||
|
||||
struct ObsoleteCheckTypeForPrivatenessVisitor<'a, 'b, 'tcx> {
|
||||
inner: &'a ObsoleteVisiblePrivateTypesVisitor<'b, 'tcx>,
|
||||
/// Whether the type refers to private types.
|
||||
contains_private: bool,
|
||||
/// Whether we've recurred at all (i.e., if we're pointing at the
|
||||
/// first type on which `visit_ty` was called).
|
||||
at_outer_type: bool,
|
||||
/// Whether that first type is a public path.
|
||||
outer_type_is_public_path: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
|
||||
fn path_is_private_type(&self, path: &hir::Path<'_>) -> bool {
|
||||
let did = match path.res {
|
||||
Res::PrimTy(..) | Res::SelfTyParam { .. } | Res::SelfTyAlias { .. } | Res::Err => {
|
||||
return false;
|
||||
}
|
||||
res => res.def_id(),
|
||||
};
|
||||
|
||||
// A path can only be private if:
|
||||
// it's in this crate...
|
||||
if let Some(did) = did.as_local() {
|
||||
// .. and it corresponds to a private type in the AST (this returns
|
||||
// `None` for type parameters).
|
||||
match self.tcx.hir().find(self.tcx.hir().local_def_id_to_hir_id(did)) {
|
||||
Some(Node::Item(_)) => !self.tcx.visibility(did).is_public(),
|
||||
Some(_) | None => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn trait_is_public(&self, trait_id: LocalDefId) -> bool {
|
||||
// FIXME: this would preferably be using `exported_items`, but all
|
||||
// traits are exported currently (see `EmbargoVisitor.exported_trait`).
|
||||
self.effective_visibilities.is_directly_public(trait_id)
|
||||
}
|
||||
|
||||
fn check_generic_bound(&mut self, bound: &hir::GenericBound<'_>) {
|
||||
if let hir::GenericBound::Trait(ref trait_ref, _) = *bound {
|
||||
if self.path_is_private_type(trait_ref.trait_ref.path) {
|
||||
self.old_error_set.insert(trait_ref.trait_ref.hir_ref_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn item_is_public(&self, def_id: LocalDefId) -> bool {
|
||||
self.effective_visibilities.is_reachable(def_id) || self.tcx.visibility(def_id).is_public()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'tcx, 'v> Visitor<'v> for ObsoleteCheckTypeForPrivatenessVisitor<'a, 'b, 'tcx> {
|
||||
fn visit_generic_arg(&mut self, generic_arg: &'v hir::GenericArg<'v>) {
|
||||
match generic_arg {
|
||||
hir::GenericArg::Type(t) => self.visit_ty(t),
|
||||
hir::GenericArg::Infer(inf) => self.visit_ty(&inf.to_ty()),
|
||||
hir::GenericArg::Lifetime(_) | hir::GenericArg::Const(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, ty: &hir::Ty<'_>) {
|
||||
if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind {
|
||||
if self.inner.path_is_private_type(path) {
|
||||
self.contains_private = true;
|
||||
// Found what we're looking for, so let's stop working.
|
||||
return;
|
||||
}
|
||||
}
|
||||
if let hir::TyKind::Path(_) = ty.kind {
|
||||
if self.at_outer_type {
|
||||
self.outer_type_is_public_path = true;
|
||||
}
|
||||
}
|
||||
self.at_outer_type = false;
|
||||
intravisit::walk_ty(self, ty)
|
||||
}
|
||||
|
||||
// Don't want to recurse into `[, .. expr]`.
|
||||
fn visit_expr(&mut self, _: &hir::Expr<'_>) {}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> {
|
||||
type NestedFilter = nested_filter::All;
|
||||
|
||||
/// We want to visit items in the context of their containing
|
||||
/// module and so forth, so supply a crate for doing a deep walk.
|
||||
fn nested_visit_map(&mut self) -> Self::Map {
|
||||
self.tcx.hir()
|
||||
}
|
||||
|
||||
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
|
||||
match item.kind {
|
||||
// Contents of a private mod can be re-exported, so we need
|
||||
// to check internals.
|
||||
hir::ItemKind::Mod(_) => {}
|
||||
|
||||
// An `extern {}` doesn't introduce a new privacy
|
||||
// namespace (the contents have their own privacies).
|
||||
hir::ItemKind::ForeignMod { .. } => {}
|
||||
|
||||
hir::ItemKind::Trait(.., bounds, _) => {
|
||||
if !self.trait_is_public(item.owner_id.def_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
for bound in bounds.iter() {
|
||||
self.check_generic_bound(bound)
|
||||
}
|
||||
}
|
||||
|
||||
// Impls need some special handling to try to offer useful
|
||||
// error messages without (too many) false positives
|
||||
// (i.e., we could just return here to not check them at
|
||||
// all, or some worse estimation of whether an impl is
|
||||
// publicly visible).
|
||||
hir::ItemKind::Impl(ref impl_) => {
|
||||
// `impl [... for] Private` is never visible.
|
||||
let self_contains_private;
|
||||
// `impl [... for] Public<...>`, but not `impl [... for]
|
||||
// Vec<Public>` or `(Public,)`, etc.
|
||||
let self_is_public_path;
|
||||
|
||||
// Check the properties of the `Self` type:
|
||||
{
|
||||
let mut visitor = ObsoleteCheckTypeForPrivatenessVisitor {
|
||||
inner: self,
|
||||
contains_private: false,
|
||||
at_outer_type: true,
|
||||
outer_type_is_public_path: false,
|
||||
};
|
||||
visitor.visit_ty(impl_.self_ty);
|
||||
self_contains_private = visitor.contains_private;
|
||||
self_is_public_path = visitor.outer_type_is_public_path;
|
||||
}
|
||||
|
||||
// Miscellaneous info about the impl:
|
||||
|
||||
// `true` iff this is `impl Private for ...`.
|
||||
let not_private_trait = impl_.of_trait.as_ref().map_or(
|
||||
true, // no trait counts as public trait
|
||||
|tr| {
|
||||
if let Some(def_id) = tr.path.res.def_id().as_local() {
|
||||
self.trait_is_public(def_id)
|
||||
} else {
|
||||
true // external traits must be public
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// `true` iff this is a trait impl or at least one method is public.
|
||||
//
|
||||
// `impl Public { $( fn ...() {} )* }` is not visible.
|
||||
//
|
||||
// This is required over just using the methods' privacy
|
||||
// directly because we might have `impl<T: Foo<Private>> ...`,
|
||||
// and we shouldn't warn about the generics if all the methods
|
||||
// are private (because `T` won't be visible externally).
|
||||
let trait_or_some_public_method = impl_.of_trait.is_some()
|
||||
|| impl_.items.iter().any(|impl_item_ref| {
|
||||
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
|
||||
match impl_item.kind {
|
||||
hir::ImplItemKind::Const(..) | hir::ImplItemKind::Fn(..) => self
|
||||
.effective_visibilities
|
||||
.is_reachable(impl_item_ref.id.owner_id.def_id),
|
||||
hir::ImplItemKind::Type(_) => false,
|
||||
}
|
||||
});
|
||||
|
||||
if !self_contains_private && not_private_trait && trait_or_some_public_method {
|
||||
intravisit::walk_generics(self, &impl_.generics);
|
||||
|
||||
match impl_.of_trait {
|
||||
None => {
|
||||
for impl_item_ref in impl_.items {
|
||||
// This is where we choose whether to walk down
|
||||
// further into the impl to check its items. We
|
||||
// should only walk into public items so that we
|
||||
// don't erroneously report errors for private
|
||||
// types in private items.
|
||||
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
|
||||
match impl_item.kind {
|
||||
hir::ImplItemKind::Const(..) | hir::ImplItemKind::Fn(..)
|
||||
if self.item_is_public(impl_item.owner_id.def_id) =>
|
||||
{
|
||||
intravisit::walk_impl_item(self, impl_item)
|
||||
}
|
||||
hir::ImplItemKind::Type(..) => {
|
||||
intravisit::walk_impl_item(self, impl_item)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(ref tr) => {
|
||||
// Any private types in a trait impl fall into three
|
||||
// categories.
|
||||
// 1. mentioned in the trait definition
|
||||
// 2. mentioned in the type params/generics
|
||||
// 3. mentioned in the associated types of the impl
|
||||
//
|
||||
// Those in 1. can only occur if the trait is in
|
||||
// this crate and will have been warned about on the
|
||||
// trait definition (there's no need to warn twice
|
||||
// so we don't check the methods).
|
||||
//
|
||||
// Those in 2. are warned via walk_generics and this
|
||||
// call here.
|
||||
intravisit::walk_path(self, tr.path);
|
||||
|
||||
// Those in 3. are warned with this call.
|
||||
for impl_item_ref in impl_.items {
|
||||
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
|
||||
if let hir::ImplItemKind::Type(ty) = impl_item.kind {
|
||||
self.visit_ty(ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if impl_.of_trait.is_none() && self_is_public_path {
|
||||
// `impl Public<Private> { ... }`. Any public static
|
||||
// methods will be visible as `Public::foo`.
|
||||
let mut found_pub_static = false;
|
||||
for impl_item_ref in impl_.items {
|
||||
if self
|
||||
.effective_visibilities
|
||||
.is_reachable(impl_item_ref.id.owner_id.def_id)
|
||||
|| self.tcx.visibility(impl_item_ref.id.owner_id).is_public()
|
||||
{
|
||||
let impl_item = self.tcx.hir().impl_item(impl_item_ref.id);
|
||||
match impl_item_ref.kind {
|
||||
AssocItemKind::Const => {
|
||||
found_pub_static = true;
|
||||
intravisit::walk_impl_item(self, impl_item);
|
||||
}
|
||||
AssocItemKind::Fn { has_self: false } => {
|
||||
found_pub_static = true;
|
||||
intravisit::walk_impl_item(self, impl_item);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
if found_pub_static {
|
||||
intravisit::walk_generics(self, &impl_.generics)
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// `type ... = ...;` can contain private types, because
|
||||
// we're introducing a new name.
|
||||
hir::ItemKind::TyAlias(..) => return,
|
||||
|
||||
// Not at all public, so we don't care.
|
||||
_ if !self.item_is_public(item.owner_id.def_id) => {
|
||||
return;
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// We've carefully constructed it so that if we're here, then
|
||||
// any `visit_ty`'s will be called on things that are in
|
||||
// public signatures, i.e., things that we're interested in for
|
||||
// this visitor.
|
||||
intravisit::walk_item(self, item);
|
||||
}
|
||||
|
||||
fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) {
|
||||
for predicate in generics.predicates {
|
||||
match predicate {
|
||||
hir::WherePredicate::BoundPredicate(bound_pred) => {
|
||||
for bound in bound_pred.bounds.iter() {
|
||||
self.check_generic_bound(bound)
|
||||
}
|
||||
}
|
||||
hir::WherePredicate::RegionPredicate(_) => {}
|
||||
hir::WherePredicate::EqPredicate(eq_pred) => {
|
||||
self.visit_ty(eq_pred.rhs_ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) {
|
||||
if self.effective_visibilities.is_reachable(item.owner_id.def_id) {
|
||||
intravisit::walk_foreign_item(self, item)
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, t: &'tcx hir::Ty<'tcx>) {
|
||||
if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = t.kind {
|
||||
if self.path_is_private_type(path) {
|
||||
self.old_error_set.insert(t.hir_id);
|
||||
}
|
||||
}
|
||||
intravisit::walk_ty(self, t)
|
||||
}
|
||||
|
||||
fn visit_variant(&mut self, v: &'tcx hir::Variant<'tcx>) {
|
||||
if self.effective_visibilities.is_reachable(v.def_id) {
|
||||
self.in_variant = true;
|
||||
intravisit::walk_variant(self, v);
|
||||
self.in_variant = false;
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_field_def(&mut self, s: &'tcx hir::FieldDef<'tcx>) {
|
||||
let vis = self.tcx.visibility(s.def_id);
|
||||
if vis.is_public() || self.in_variant {
|
||||
intravisit::walk_field_def(self, s);
|
||||
}
|
||||
}
|
||||
|
||||
// We don't need to introspect into these at all: an
|
||||
// expression/block context can't possibly contain exported things.
|
||||
// (Making them no-ops stops us from traversing the whole AST without
|
||||
// having to be super careful about our `walk_...` calls above.)
|
||||
fn visit_block(&mut self, _: &'tcx hir::Block<'tcx>) {}
|
||||
fn visit_expr(&mut self, _: &'tcx hir::Expr<'tcx>) {}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
/// SearchInterfaceForPrivateItemsVisitor traverses an item's interface and
|
||||
/// finds any private components in it.
|
||||
@ -1734,7 +1396,6 @@ struct SearchInterfaceForPrivateItemsVisitor<'tcx> {
|
||||
/// The visitor checks that each component type is at least this visible.
|
||||
required_visibility: ty::Visibility,
|
||||
required_effective_vis: Option<EffectiveVisibility>,
|
||||
has_old_errors: bool,
|
||||
in_assoc_ty: bool,
|
||||
in_primary_interface: bool,
|
||||
}
|
||||
@ -1805,7 +1466,7 @@ impl SearchInterfaceForPrivateItemsVisitor<'_> {
|
||||
let hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id);
|
||||
let span = self.tcx.def_span(self.item_def_id.to_def_id());
|
||||
let vis_span = self.tcx.def_span(def_id);
|
||||
if !vis.is_at_least(self.required_visibility, self.tcx) {
|
||||
if self.in_assoc_ty && !vis.is_at_least(self.required_visibility, self.tcx) {
|
||||
let vis_descr = match vis {
|
||||
ty::Visibility::Public => "public",
|
||||
ty::Visibility::Restricted(vis_def_id) => {
|
||||
@ -1819,35 +1480,14 @@ impl SearchInterfaceForPrivateItemsVisitor<'_> {
|
||||
}
|
||||
};
|
||||
|
||||
if self.has_old_errors
|
||||
|| self.in_assoc_ty
|
||||
|| self.tcx.resolutions(()).has_pub_restricted
|
||||
{
|
||||
if kind == "trait" {
|
||||
self.tcx.sess.emit_err(InPublicInterfaceTraits {
|
||||
span,
|
||||
vis_descr,
|
||||
kind,
|
||||
descr: descr.into(),
|
||||
vis_span,
|
||||
});
|
||||
} else {
|
||||
self.tcx.sess.emit_err(InPublicInterface {
|
||||
span,
|
||||
vis_descr,
|
||||
kind,
|
||||
descr: descr.into(),
|
||||
vis_span,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
self.tcx.emit_spanned_lint(
|
||||
lint::builtin::PRIVATE_IN_PUBLIC,
|
||||
hir_id,
|
||||
span,
|
||||
PrivateInPublicLint { vis_descr, kind, descr: descr.into() },
|
||||
);
|
||||
}
|
||||
self.tcx.sess.emit_err(InPublicInterface {
|
||||
span,
|
||||
vis_descr,
|
||||
kind,
|
||||
descr: descr.into(),
|
||||
vis_span,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
let Some(effective_vis) = self.required_effective_vis else {
|
||||
@ -1918,7 +1558,6 @@ impl<'tcx> DefIdVisitor<'tcx> for SearchInterfaceForPrivateItemsVisitor<'tcx> {
|
||||
|
||||
struct PrivateItemsInPublicInterfacesChecker<'tcx, 'a> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
old_error_set_ancestry: HirIdSet,
|
||||
effective_visibilities: &'a EffectiveVisibilities,
|
||||
}
|
||||
|
||||
@ -1934,9 +1573,6 @@ impl<'tcx> PrivateItemsInPublicInterfacesChecker<'tcx, '_> {
|
||||
item_def_id: def_id,
|
||||
required_visibility,
|
||||
required_effective_vis,
|
||||
has_old_errors: self
|
||||
.old_error_set_ancestry
|
||||
.contains(&self.tcx.hir().local_def_id_to_hir_id(def_id)),
|
||||
in_assoc_ty: false,
|
||||
in_primary_interface: true,
|
||||
}
|
||||
@ -2298,35 +1934,8 @@ fn effective_visibilities(tcx: TyCtxt<'_>, (): ()) -> &EffectiveVisibilities {
|
||||
|
||||
fn check_private_in_public(tcx: TyCtxt<'_>, (): ()) {
|
||||
let effective_visibilities = tcx.effective_visibilities(());
|
||||
|
||||
let mut visitor = ObsoleteVisiblePrivateTypesVisitor {
|
||||
tcx,
|
||||
effective_visibilities,
|
||||
in_variant: false,
|
||||
old_error_set: Default::default(),
|
||||
};
|
||||
tcx.hir().walk_toplevel_module(&mut visitor);
|
||||
|
||||
let mut old_error_set_ancestry = HirIdSet::default();
|
||||
for mut id in visitor.old_error_set.iter().copied() {
|
||||
loop {
|
||||
if !old_error_set_ancestry.insert(id) {
|
||||
break;
|
||||
}
|
||||
let parent = tcx.hir().parent_id(id);
|
||||
if parent == id {
|
||||
break;
|
||||
}
|
||||
id = parent;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for private types and traits in public interfaces.
|
||||
let mut checker = PrivateItemsInPublicInterfacesChecker {
|
||||
tcx,
|
||||
old_error_set_ancestry,
|
||||
effective_visibilities,
|
||||
};
|
||||
// Check for private types in public interfaces.
|
||||
let mut checker = PrivateItemsInPublicInterfacesChecker { tcx, effective_visibilities };
|
||||
|
||||
for id in tcx.hir().items() {
|
||||
checker.check_item(id);
|
||||
|
@ -2,7 +2,7 @@ use crate::dep_graph::DepNodeIndex;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sharded::{self, Sharded};
|
||||
use rustc_data_structures::sync::Lock;
|
||||
use rustc_data_structures::sync::OnceLock;
|
||||
use rustc_index::{Idx, IndexVec};
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
@ -87,12 +87,12 @@ impl<'tcx, V: 'tcx> CacheSelector<'tcx, V> for SingleCacheSelector {
|
||||
}
|
||||
|
||||
pub struct SingleCache<V> {
|
||||
cache: Lock<Option<(V, DepNodeIndex)>>,
|
||||
cache: OnceLock<(V, DepNodeIndex)>,
|
||||
}
|
||||
|
||||
impl<V> Default for SingleCache<V> {
|
||||
fn default() -> Self {
|
||||
SingleCache { cache: Lock::new(None) }
|
||||
SingleCache { cache: OnceLock::new() }
|
||||
}
|
||||
}
|
||||
|
||||
@ -105,16 +105,16 @@ where
|
||||
|
||||
#[inline(always)]
|
||||
fn lookup(&self, _key: &()) -> Option<(V, DepNodeIndex)> {
|
||||
*self.cache.lock()
|
||||
self.cache.get().copied()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn complete(&self, _key: (), value: V, index: DepNodeIndex) {
|
||||
*self.cache.lock() = Some((value, index));
|
||||
self.cache.set((value, index)).ok();
|
||||
}
|
||||
|
||||
fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
|
||||
if let Some(value) = self.cache.lock().as_ref() {
|
||||
if let Some(value) = self.cache.get() {
|
||||
f(&(), &value.0, value.1)
|
||||
}
|
||||
}
|
||||
|
@ -247,8 +247,6 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> {
|
||||
})
|
||||
}
|
||||
ast::VisibilityKind::Restricted { ref path, id, .. } => {
|
||||
// Make `PRIVATE_IN_PUBLIC` lint a hard error.
|
||||
self.r.has_pub_restricted = true;
|
||||
// For visibilities we are not ready to provide correct implementation of "uniform
|
||||
// paths" right now, so on 2018 edition we only allow module-relative paths for now.
|
||||
// On 2015 edition visibilities are resolved as crate-relative by default,
|
||||
|
@ -128,11 +128,14 @@ impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
|
||||
// If the binding is ambiguous, put the root ambiguity binding and all reexports
|
||||
// leading to it into the table. They are used by the `ambiguous_glob_reexports`
|
||||
// lint. For all bindings added to the table this way `is_ambiguity` returns true.
|
||||
let is_ambiguity =
|
||||
|binding: NameBinding<'a>, warn: bool| binding.ambiguity.is_some() && !warn;
|
||||
let mut parent_id = ParentId::Def(module_id);
|
||||
let mut warn_ambiguity = binding.warn_ambiguity;
|
||||
while let NameBindingKind::Import { binding: nested_binding, .. } = binding.kind {
|
||||
self.update_import(binding, parent_id);
|
||||
|
||||
if binding.ambiguity.is_some() {
|
||||
if is_ambiguity(binding, warn_ambiguity) {
|
||||
// Stop at the root ambiguity, further bindings in the chain should not
|
||||
// be reexported because the root ambiguity blocks any access to them.
|
||||
// (Those further bindings are most likely not ambiguities themselves.)
|
||||
@ -141,9 +144,9 @@ impl<'r, 'a, 'tcx> EffectiveVisibilitiesVisitor<'r, 'a, 'tcx> {
|
||||
|
||||
parent_id = ParentId::Import(binding);
|
||||
binding = nested_binding;
|
||||
warn_ambiguity |= nested_binding.warn_ambiguity;
|
||||
}
|
||||
|
||||
if binding.ambiguity.is_none()
|
||||
if !is_ambiguity(binding, warn_ambiguity)
|
||||
&& let Some(def_id) = binding.res().opt_def_id().and_then(|id| id.as_local()) {
|
||||
self.update_def(def_id, binding.vis.expect_local(), parent_id);
|
||||
}
|
||||
|
@ -313,7 +313,7 @@ enum LifetimeRibKind {
|
||||
|
||||
/// Resolves elided lifetimes to `'static`, but gives a warning that this behavior
|
||||
/// is a bug and will be reverted soon.
|
||||
AnonymousWarnToStatic(NodeId),
|
||||
AnonymousWarn(NodeId),
|
||||
|
||||
/// Signal we cannot find which should be the anonymous lifetime.
|
||||
ElisionFailure,
|
||||
@ -1109,6 +1109,7 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
|
||||
}
|
||||
},
|
||||
AssocConstraintKind::Bound { ref bounds } => {
|
||||
self.record_lifetime_params_for_impl_trait(constraint.id);
|
||||
walk_list!(self, visit_param_bound, bounds, BoundKind::Bound);
|
||||
}
|
||||
}
|
||||
@ -1153,7 +1154,7 @@ impl<'a: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast,
|
||||
}
|
||||
LifetimeRibKind::AnonymousCreateParameter { .. }
|
||||
| LifetimeRibKind::AnonymousReportError
|
||||
| LifetimeRibKind::AnonymousWarnToStatic(_)
|
||||
| LifetimeRibKind::AnonymousWarn(_)
|
||||
| LifetimeRibKind::Elided(_)
|
||||
| LifetimeRibKind::ElisionFailure
|
||||
| LifetimeRibKind::ConcreteAnonConst(_)
|
||||
@ -1521,7 +1522,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
// lifetime would be illegal.
|
||||
LifetimeRibKind::Item
|
||||
| LifetimeRibKind::AnonymousReportError
|
||||
| LifetimeRibKind::AnonymousWarnToStatic(_)
|
||||
| LifetimeRibKind::AnonymousWarn(_)
|
||||
| LifetimeRibKind::ElisionFailure => Some(LifetimeUseSet::Many),
|
||||
// An anonymous lifetime is legal here, and bound to the right
|
||||
// place, go ahead.
|
||||
@ -1584,7 +1585,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
| LifetimeRibKind::Generics { .. }
|
||||
| LifetimeRibKind::ElisionFailure
|
||||
| LifetimeRibKind::AnonymousReportError
|
||||
| LifetimeRibKind::AnonymousWarnToStatic(_) => {}
|
||||
| LifetimeRibKind::AnonymousWarn(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1624,8 +1625,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
self.record_lifetime_res(lifetime.id, res, elision_candidate);
|
||||
return;
|
||||
}
|
||||
LifetimeRibKind::AnonymousWarnToStatic(node_id) => {
|
||||
self.record_lifetime_res(lifetime.id, LifetimeRes::Static, elision_candidate);
|
||||
LifetimeRibKind::AnonymousWarn(node_id) => {
|
||||
let msg = if elided {
|
||||
"`&` without an explicit lifetime name cannot be used here"
|
||||
} else {
|
||||
@ -1641,7 +1641,6 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
span: lifetime.ident.span,
|
||||
},
|
||||
);
|
||||
return;
|
||||
}
|
||||
LifetimeRibKind::AnonymousReportError => {
|
||||
let (msg, note) = if elided {
|
||||
@ -1839,7 +1838,7 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
// impl Foo for std::cell::Ref<u32> // note lack of '_
|
||||
// async fn foo(_: std::cell::Ref<u32>) { ... }
|
||||
LifetimeRibKind::AnonymousCreateParameter { report_in_path: true, .. }
|
||||
| LifetimeRibKind::AnonymousWarnToStatic(_) => {
|
||||
| LifetimeRibKind::AnonymousWarn(_) => {
|
||||
let sess = self.r.tcx.sess;
|
||||
let mut err = rustc_errors::struct_span_err!(
|
||||
sess,
|
||||
@ -2935,33 +2934,30 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> {
|
||||
kind: LifetimeBinderKind::ConstItem,
|
||||
},
|
||||
|this| {
|
||||
this.with_lifetime_rib(
|
||||
LifetimeRibKind::AnonymousWarnToStatic(item.id),
|
||||
|this| {
|
||||
// If this is a trait impl, ensure the const
|
||||
// exists in trait
|
||||
this.check_trait_item(
|
||||
item.id,
|
||||
item.ident,
|
||||
&item.kind,
|
||||
ValueNS,
|
||||
item.span,
|
||||
seen_trait_items,
|
||||
|i, s, c| ConstNotMemberOfTrait(i, s, c),
|
||||
);
|
||||
this.with_lifetime_rib(LifetimeRibKind::AnonymousWarn(item.id), |this| {
|
||||
// If this is a trait impl, ensure the const
|
||||
// exists in trait
|
||||
this.check_trait_item(
|
||||
item.id,
|
||||
item.ident,
|
||||
&item.kind,
|
||||
ValueNS,
|
||||
item.span,
|
||||
seen_trait_items,
|
||||
|i, s, c| ConstNotMemberOfTrait(i, s, c),
|
||||
);
|
||||
|
||||
this.visit_generics(generics);
|
||||
this.visit_ty(ty);
|
||||
if let Some(expr) = expr {
|
||||
// We allow arbitrary const expressions inside of associated consts,
|
||||
// even if they are potentially not const evaluatable.
|
||||
//
|
||||
// Type parameters can already be used and as associated consts are
|
||||
// not used as part of the type system, this is far less surprising.
|
||||
this.resolve_const_body(expr, None);
|
||||
}
|
||||
},
|
||||
);
|
||||
this.visit_generics(generics);
|
||||
this.visit_ty(ty);
|
||||
if let Some(expr) = expr {
|
||||
// We allow arbitrary const expressions inside of associated consts,
|
||||
// even if they are potentially not const evaluatable.
|
||||
//
|
||||
// Type parameters can already be used and as associated consts are
|
||||
// not used as part of the type system, this is far less surprising.
|
||||
this.resolve_const_body(expr, None);
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@ -989,7 +989,6 @@ pub struct Resolver<'a, 'tcx> {
|
||||
glob_map: FxHashMap<LocalDefId, FxHashSet<Symbol>>,
|
||||
/// Visibilities in "lowered" form, for all entities that have them.
|
||||
visibilities: FxHashMap<LocalDefId, ty::Visibility>,
|
||||
has_pub_restricted: bool,
|
||||
used_imports: FxHashSet<NodeId>,
|
||||
maybe_unused_trait_imports: FxIndexSet<LocalDefId>,
|
||||
|
||||
@ -1342,7 +1341,6 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
|
||||
glob_map: Default::default(),
|
||||
visibilities,
|
||||
has_pub_restricted: false,
|
||||
used_imports: FxHashSet::default(),
|
||||
maybe_unused_trait_imports: Default::default(),
|
||||
|
||||
@ -1486,7 +1484,6 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let proc_macros = self.proc_macros.iter().map(|id| self.local_def_id(*id)).collect();
|
||||
let expn_that_defined = self.expn_that_defined;
|
||||
let visibilities = self.visibilities;
|
||||
let has_pub_restricted = self.has_pub_restricted;
|
||||
let extern_crate_map = self.extern_crate_map;
|
||||
let maybe_unused_trait_imports = self.maybe_unused_trait_imports;
|
||||
let glob_map = self.glob_map;
|
||||
@ -1504,7 +1501,6 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let global_ctxt = ResolverGlobalCtxt {
|
||||
expn_that_defined,
|
||||
visibilities,
|
||||
has_pub_restricted,
|
||||
effective_visibilities,
|
||||
extern_crate_map,
|
||||
module_children: self.module_children,
|
||||
|
@ -5,7 +5,6 @@
|
||||
|
||||
use std::fmt::Debug;
|
||||
use std::ops::Index;
|
||||
use std::string::ToString;
|
||||
|
||||
use crate::rustc_internal;
|
||||
use crate::{
|
||||
@ -156,10 +155,23 @@ pub fn run(tcx: TyCtxt<'_>, f: impl FnOnce()) {
|
||||
}
|
||||
|
||||
/// A type that provides internal information but that can still be used for debug purpose.
|
||||
pub type Opaque = impl Debug + ToString + Clone;
|
||||
#[derive(Clone)]
|
||||
pub struct Opaque(String);
|
||||
|
||||
impl std::fmt::Display for Opaque {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Opaque {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn opaque<T: Debug>(value: &T) -> Opaque {
|
||||
format!("{value:?}")
|
||||
Opaque(format!("{value:?}"))
|
||||
}
|
||||
|
||||
pub struct StableMir {
|
||||
|
124
compiler/rustc_smir/src/rustc_smir/alloc.rs
Normal file
124
compiler/rustc_smir/src/rustc_smir/alloc.rs
Normal file
@ -0,0 +1,124 @@
|
||||
use rustc_middle::mir::interpret::{alloc_range, AllocRange, ConstValue, Pointer};
|
||||
|
||||
use crate::{
|
||||
rustc_internal::opaque,
|
||||
rustc_smir::{Stable, Tables},
|
||||
stable_mir::mir::Mutability,
|
||||
stable_mir::ty::{Allocation, ProvenanceMap},
|
||||
};
|
||||
|
||||
/// Creates new empty `Allocation` from given `Align`.
|
||||
fn new_empty_allocation(align: rustc_target::abi::Align) -> Allocation {
|
||||
Allocation {
|
||||
bytes: Vec::new(),
|
||||
provenance: ProvenanceMap { ptrs: Vec::new() },
|
||||
align: align.bytes(),
|
||||
mutability: Mutability::Not,
|
||||
}
|
||||
}
|
||||
|
||||
// We need this method instead of a Stable implementation
|
||||
// because we need to get `Ty` of the const we are trying to create, to do that
|
||||
// we need to have access to `ConstantKind` but we can't access that inside Stable impl.
|
||||
#[allow(rustc::usage_of_qualified_ty)]
|
||||
pub fn new_allocation<'tcx>(
|
||||
ty: rustc_middle::ty::Ty<'tcx>,
|
||||
const_value: ConstValue<'tcx>,
|
||||
tables: &mut Tables<'tcx>,
|
||||
) -> Allocation {
|
||||
match const_value {
|
||||
ConstValue::Scalar(scalar) => {
|
||||
let size = scalar.size();
|
||||
let align = tables
|
||||
.tcx
|
||||
.layout_of(rustc_middle::ty::ParamEnv::reveal_all().and(ty))
|
||||
.unwrap()
|
||||
.align;
|
||||
let mut allocation = rustc_middle::mir::interpret::Allocation::uninit(size, align.abi);
|
||||
allocation
|
||||
.write_scalar(&tables.tcx, alloc_range(rustc_target::abi::Size::ZERO, size), scalar)
|
||||
.unwrap();
|
||||
allocation.stable(tables)
|
||||
}
|
||||
ConstValue::ZeroSized => {
|
||||
let align =
|
||||
tables.tcx.layout_of(rustc_middle::ty::ParamEnv::empty().and(ty)).unwrap().align;
|
||||
new_empty_allocation(align.abi)
|
||||
}
|
||||
ConstValue::Slice { data, start, end } => {
|
||||
let alloc_id = tables.tcx.create_memory_alloc(data);
|
||||
let ptr = Pointer::new(alloc_id, rustc_target::abi::Size::from_bytes(start));
|
||||
let scalar_ptr = rustc_middle::mir::interpret::Scalar::from_pointer(ptr, &tables.tcx);
|
||||
let scalar_len = rustc_middle::mir::interpret::Scalar::from_target_usize(
|
||||
(end - start) as u64,
|
||||
&tables.tcx,
|
||||
);
|
||||
let layout =
|
||||
tables.tcx.layout_of(rustc_middle::ty::ParamEnv::reveal_all().and(ty)).unwrap();
|
||||
let mut allocation =
|
||||
rustc_middle::mir::interpret::Allocation::uninit(layout.size, layout.align.abi);
|
||||
allocation
|
||||
.write_scalar(
|
||||
&tables.tcx,
|
||||
alloc_range(rustc_target::abi::Size::ZERO, tables.tcx.data_layout.pointer_size),
|
||||
scalar_ptr,
|
||||
)
|
||||
.unwrap();
|
||||
allocation
|
||||
.write_scalar(
|
||||
&tables.tcx,
|
||||
alloc_range(tables.tcx.data_layout.pointer_size, scalar_len.size()),
|
||||
scalar_len,
|
||||
)
|
||||
.unwrap();
|
||||
allocation.stable(tables)
|
||||
}
|
||||
ConstValue::ByRef { alloc, offset } => {
|
||||
let ty_size = tables
|
||||
.tcx
|
||||
.layout_of(rustc_middle::ty::ParamEnv::reveal_all().and(ty))
|
||||
.unwrap()
|
||||
.size;
|
||||
allocation_filter(&alloc.0, alloc_range(offset, ty_size), tables)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an `Allocation` only from information within the `AllocRange`.
|
||||
pub(super) fn allocation_filter<'tcx>(
|
||||
alloc: &rustc_middle::mir::interpret::Allocation,
|
||||
alloc_range: AllocRange,
|
||||
tables: &mut Tables<'tcx>,
|
||||
) -> Allocation {
|
||||
let mut bytes: Vec<Option<u8>> = alloc
|
||||
.inspect_with_uninit_and_ptr_outside_interpreter(
|
||||
alloc_range.start.bytes_usize()..alloc_range.end().bytes_usize(),
|
||||
)
|
||||
.iter()
|
||||
.copied()
|
||||
.map(Some)
|
||||
.collect();
|
||||
for (i, b) in bytes.iter_mut().enumerate() {
|
||||
if !alloc
|
||||
.init_mask()
|
||||
.get(rustc_target::abi::Size::from_bytes(i + alloc_range.start.bytes_usize()))
|
||||
{
|
||||
*b = None;
|
||||
}
|
||||
}
|
||||
let mut ptrs = Vec::new();
|
||||
for (offset, prov) in alloc
|
||||
.provenance()
|
||||
.ptrs()
|
||||
.iter()
|
||||
.filter(|a| a.0 >= alloc_range.start && a.0 <= alloc_range.end())
|
||||
{
|
||||
ptrs.push((offset.bytes_usize() - alloc_range.start.bytes_usize(), opaque(prov)));
|
||||
}
|
||||
Allocation {
|
||||
bytes: bytes,
|
||||
provenance: ProvenanceMap { ptrs },
|
||||
align: alloc.align.bytes(),
|
||||
mutability: alloc.mutability.stable(tables),
|
||||
}
|
||||
}
|
@ -9,10 +9,7 @@
|
||||
|
||||
use crate::rustc_internal::{self, opaque};
|
||||
use crate::stable_mir::mir::{CopyNonOverlapping, UserTypeProjection, VariantIdx};
|
||||
use crate::stable_mir::ty::{
|
||||
allocation_filter, new_allocation, Const, FloatTy, GenericParamDef, IntTy, Movability, RigidTy,
|
||||
TyKind, UintTy,
|
||||
};
|
||||
use crate::stable_mir::ty::{FloatTy, GenericParamDef, IntTy, Movability, RigidTy, TyKind, UintTy};
|
||||
use crate::stable_mir::{self, Context};
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::mir::interpret::alloc_range;
|
||||
@ -22,6 +19,8 @@ use rustc_span::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
use rustc_target::abi::FieldIdx;
|
||||
use tracing::debug;
|
||||
|
||||
mod alloc;
|
||||
|
||||
impl<'tcx> Context for Tables<'tcx> {
|
||||
fn local_crate(&self) -> stable_mir::Crate {
|
||||
smir_crate(self.tcx, LOCAL_CRATE)
|
||||
@ -205,8 +204,7 @@ impl<'tcx> Stable<'tcx> for mir::Rvalue<'tcx> {
|
||||
match self {
|
||||
Use(op) => stable_mir::mir::Rvalue::Use(op.stable(tables)),
|
||||
Repeat(op, len) => {
|
||||
let cnst = ConstantKind::from_const(*len, tables.tcx);
|
||||
let len = Const { literal: cnst.stable(tables) };
|
||||
let len = len.stable(tables);
|
||||
stable_mir::mir::Rvalue::Repeat(op.stable(tables), len)
|
||||
}
|
||||
Ref(region, kind, place) => stable_mir::mir::Rvalue::Ref(
|
||||
@ -394,8 +392,7 @@ impl<'tcx> Stable<'tcx> for ty::TermKind<'tcx> {
|
||||
match self {
|
||||
ty::TermKind::Ty(ty) => TermKind::Type(tables.intern_ty(*ty)),
|
||||
ty::TermKind::Const(cnst) => {
|
||||
let cnst = ConstantKind::from_const(*cnst, tables.tcx);
|
||||
let cnst = Const { literal: cnst.stable(tables) };
|
||||
let cnst = cnst.stable(tables);
|
||||
TermKind::Const(cnst)
|
||||
}
|
||||
}
|
||||
@ -1083,8 +1080,32 @@ impl<'tcx> Stable<'tcx> for ty::Const<'tcx> {
|
||||
type T = stable_mir::ty::Const;
|
||||
|
||||
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
|
||||
let cnst = ConstantKind::from_const(*self, tables.tcx);
|
||||
stable_mir::ty::Const { literal: cnst.stable(tables) }
|
||||
stable_mir::ty::Const {
|
||||
literal: match self.kind() {
|
||||
ty::Value(val) => {
|
||||
let const_val = tables.tcx.valtree_to_const_val((self.ty(), val));
|
||||
stable_mir::ty::ConstantKind::Allocated(alloc::new_allocation(
|
||||
self.ty(),
|
||||
const_val,
|
||||
tables,
|
||||
))
|
||||
}
|
||||
ty::ParamCt(param) => stable_mir::ty::ConstantKind::ParamCt(opaque(¶m)),
|
||||
ty::ErrorCt(_) => unreachable!(),
|
||||
ty::InferCt(_) => unreachable!(),
|
||||
ty::BoundCt(_, _) => unimplemented!(),
|
||||
ty::PlaceholderCt(_) => unimplemented!(),
|
||||
ty::Unevaluated(uv) => {
|
||||
stable_mir::ty::ConstantKind::Unevaluated(stable_mir::ty::UnevaluatedConst {
|
||||
ty: tables.intern_ty(self.ty()),
|
||||
def: tables.const_def(uv.def),
|
||||
args: uv.args.stable(tables),
|
||||
promoted: None,
|
||||
})
|
||||
}
|
||||
ty::ExprCt(_) => unimplemented!(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1108,7 +1129,11 @@ impl<'tcx> Stable<'tcx> for mir::interpret::Allocation {
|
||||
type T = stable_mir::ty::Allocation;
|
||||
|
||||
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
|
||||
allocation_filter(self, alloc_range(rustc_target::abi::Size::ZERO, self.size()), tables)
|
||||
alloc::allocation_filter(
|
||||
self,
|
||||
alloc_range(rustc_target::abi::Size::ZERO, self.size()),
|
||||
tables,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1155,26 +1180,18 @@ impl<'tcx> Stable<'tcx> for rustc_middle::mir::ConstantKind<'tcx> {
|
||||
type T = stable_mir::ty::ConstantKind;
|
||||
|
||||
fn stable(&self, tables: &mut Tables<'tcx>) -> Self::T {
|
||||
match self {
|
||||
ConstantKind::Ty(c) => match c.kind() {
|
||||
ty::Value(val) => {
|
||||
let const_val = tables.tcx.valtree_to_const_val((c.ty(), val));
|
||||
stable_mir::ty::ConstantKind::Allocated(new_allocation(self, const_val, tables))
|
||||
}
|
||||
ty::ParamCt(param) => stable_mir::ty::ConstantKind::ParamCt(opaque(¶m)),
|
||||
ty::ErrorCt(_) => unreachable!(),
|
||||
_ => unimplemented!(),
|
||||
},
|
||||
match *self {
|
||||
ConstantKind::Ty(c) => c.stable(tables).literal,
|
||||
ConstantKind::Unevaluated(unev_const, ty) => {
|
||||
stable_mir::ty::ConstantKind::Unevaluated(stable_mir::ty::UnevaluatedConst {
|
||||
ty: tables.intern_ty(*ty),
|
||||
ty: tables.intern_ty(ty),
|
||||
def: tables.const_def(unev_const.def),
|
||||
args: unev_const.args.stable(tables),
|
||||
promoted: unev_const.promoted.map(|u| u.as_u32()),
|
||||
})
|
||||
}
|
||||
ConstantKind::Val(val, _) => {
|
||||
stable_mir::ty::ConstantKind::Allocated(new_allocation(self, *val, tables))
|
||||
ConstantKind::Val(val, ty) => {
|
||||
stable_mir::ty::ConstantKind::Allocated(alloc::new_allocation(ty, val, tables))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ use crate::rustc_smir::Tables;
|
||||
|
||||
pub mod mir;
|
||||
pub mod ty;
|
||||
pub mod visitor;
|
||||
|
||||
/// Use String for now but we should replace it.
|
||||
pub type Symbol = String;
|
||||
|
@ -1,10 +1,5 @@
|
||||
use rustc_middle::mir::interpret::{alloc_range, AllocRange, ConstValue, Pointer};
|
||||
|
||||
use super::{mir::Mutability, mir::Safety, with, DefId};
|
||||
use crate::{
|
||||
rustc_internal::{opaque, Opaque},
|
||||
rustc_smir::{Stable, Tables},
|
||||
};
|
||||
use crate::rustc_internal::Opaque;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Ty(pub usize);
|
||||
@ -286,128 +281,6 @@ pub struct Allocation {
|
||||
pub mutability: Mutability,
|
||||
}
|
||||
|
||||
impl Allocation {
|
||||
/// Creates new empty `Allocation` from given `Align`.
|
||||
fn new_empty_allocation(align: rustc_target::abi::Align) -> Allocation {
|
||||
Allocation {
|
||||
bytes: Vec::new(),
|
||||
provenance: ProvenanceMap { ptrs: Vec::new() },
|
||||
align: align.bytes(),
|
||||
mutability: Mutability::Not,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We need this method instead of a Stable implementation
|
||||
// because we need to get `Ty` of the const we are trying to create, to do that
|
||||
// we need to have access to `ConstantKind` but we can't access that inside Stable impl.
|
||||
pub fn new_allocation<'tcx>(
|
||||
const_kind: &rustc_middle::mir::ConstantKind<'tcx>,
|
||||
const_value: ConstValue<'tcx>,
|
||||
tables: &mut Tables<'tcx>,
|
||||
) -> Allocation {
|
||||
match const_value {
|
||||
ConstValue::Scalar(scalar) => {
|
||||
let size = scalar.size();
|
||||
let align = tables
|
||||
.tcx
|
||||
.layout_of(rustc_middle::ty::ParamEnv::reveal_all().and(const_kind.ty()))
|
||||
.unwrap()
|
||||
.align;
|
||||
let mut allocation = rustc_middle::mir::interpret::Allocation::uninit(size, align.abi);
|
||||
allocation
|
||||
.write_scalar(&tables.tcx, alloc_range(rustc_target::abi::Size::ZERO, size), scalar)
|
||||
.unwrap();
|
||||
allocation.stable(tables)
|
||||
}
|
||||
ConstValue::ZeroSized => {
|
||||
let align = tables
|
||||
.tcx
|
||||
.layout_of(rustc_middle::ty::ParamEnv::empty().and(const_kind.ty()))
|
||||
.unwrap()
|
||||
.align;
|
||||
Allocation::new_empty_allocation(align.abi)
|
||||
}
|
||||
ConstValue::Slice { data, start, end } => {
|
||||
let alloc_id = tables.tcx.create_memory_alloc(data);
|
||||
let ptr = Pointer::new(alloc_id, rustc_target::abi::Size::from_bytes(start));
|
||||
let scalar_ptr = rustc_middle::mir::interpret::Scalar::from_pointer(ptr, &tables.tcx);
|
||||
let scalar_len = rustc_middle::mir::interpret::Scalar::from_target_usize(
|
||||
(end - start) as u64,
|
||||
&tables.tcx,
|
||||
);
|
||||
let layout = tables
|
||||
.tcx
|
||||
.layout_of(rustc_middle::ty::ParamEnv::reveal_all().and(const_kind.ty()))
|
||||
.unwrap();
|
||||
let mut allocation =
|
||||
rustc_middle::mir::interpret::Allocation::uninit(layout.size, layout.align.abi);
|
||||
allocation
|
||||
.write_scalar(
|
||||
&tables.tcx,
|
||||
alloc_range(rustc_target::abi::Size::ZERO, tables.tcx.data_layout.pointer_size),
|
||||
scalar_ptr,
|
||||
)
|
||||
.unwrap();
|
||||
allocation
|
||||
.write_scalar(
|
||||
&tables.tcx,
|
||||
alloc_range(tables.tcx.data_layout.pointer_size, scalar_len.size()),
|
||||
scalar_len,
|
||||
)
|
||||
.unwrap();
|
||||
allocation.stable(tables)
|
||||
}
|
||||
ConstValue::ByRef { alloc, offset } => {
|
||||
let ty_size = tables
|
||||
.tcx
|
||||
.layout_of(rustc_middle::ty::ParamEnv::reveal_all().and(const_kind.ty()))
|
||||
.unwrap()
|
||||
.size;
|
||||
allocation_filter(&alloc.0, alloc_range(offset, ty_size), tables)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an `Allocation` only from information within the `AllocRange`.
|
||||
pub fn allocation_filter<'tcx>(
|
||||
alloc: &rustc_middle::mir::interpret::Allocation,
|
||||
alloc_range: AllocRange,
|
||||
tables: &mut Tables<'tcx>,
|
||||
) -> Allocation {
|
||||
let mut bytes: Vec<Option<u8>> = alloc
|
||||
.inspect_with_uninit_and_ptr_outside_interpreter(
|
||||
alloc_range.start.bytes_usize()..alloc_range.end().bytes_usize(),
|
||||
)
|
||||
.iter()
|
||||
.copied()
|
||||
.map(Some)
|
||||
.collect();
|
||||
for (i, b) in bytes.iter_mut().enumerate() {
|
||||
if !alloc
|
||||
.init_mask()
|
||||
.get(rustc_target::abi::Size::from_bytes(i + alloc_range.start.bytes_usize()))
|
||||
{
|
||||
*b = None;
|
||||
}
|
||||
}
|
||||
let mut ptrs = Vec::new();
|
||||
for (offset, prov) in alloc
|
||||
.provenance()
|
||||
.ptrs()
|
||||
.iter()
|
||||
.filter(|a| a.0 >= alloc_range.start && a.0 <= alloc_range.end())
|
||||
{
|
||||
ptrs.push((offset.bytes_usize() - alloc_range.start.bytes_usize(), opaque(prov)));
|
||||
}
|
||||
Allocation {
|
||||
bytes: bytes,
|
||||
provenance: ProvenanceMap { ptrs },
|
||||
align: alloc.align.bytes(),
|
||||
mutability: alloc.mutability.stable(tables),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ConstantKind {
|
||||
Allocated(Allocation),
|
||||
|
186
compiler/rustc_smir/src/stable_mir/visitor.rs
Normal file
186
compiler/rustc_smir/src/stable_mir/visitor.rs
Normal file
@ -0,0 +1,186 @@
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use crate::rustc_internal::Opaque;
|
||||
|
||||
use super::ty::{
|
||||
Allocation, Binder, Const, ConstDef, ExistentialPredicate, FnSig, GenericArgKind, GenericArgs,
|
||||
Promoted, RigidTy, TermKind, Ty, UnevaluatedConst,
|
||||
};
|
||||
|
||||
pub trait Visitor: Sized {
|
||||
type Break;
|
||||
fn visit_ty(&mut self, ty: &Ty) -> ControlFlow<Self::Break> {
|
||||
ty.super_visit(self)
|
||||
}
|
||||
fn visit_const(&mut self, c: &Const) -> ControlFlow<Self::Break> {
|
||||
c.super_visit(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Visitable {
|
||||
fn visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
self.super_visit(visitor)
|
||||
}
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break>;
|
||||
}
|
||||
|
||||
impl Visitable for Ty {
|
||||
fn visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
visitor.visit_ty(self)
|
||||
}
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match self.kind() {
|
||||
super::ty::TyKind::RigidTy(ty) => ty.visit(visitor),
|
||||
super::ty::TyKind::Alias(_, alias) => alias.args.visit(visitor),
|
||||
super::ty::TyKind::Param(_) => todo!(),
|
||||
super::ty::TyKind::Bound(_, _) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for Const {
|
||||
fn visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
visitor.visit_const(self)
|
||||
}
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match &self.literal {
|
||||
super::ty::ConstantKind::Allocated(alloc) => alloc.visit(visitor),
|
||||
super::ty::ConstantKind::Unevaluated(uv) => uv.visit(visitor),
|
||||
super::ty::ConstantKind::ParamCt(param) => param.visit(visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for Opaque {
|
||||
fn super_visit<V: Visitor>(&self, _visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for Allocation {
|
||||
fn super_visit<V: Visitor>(&self, _visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for UnevaluatedConst {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
let UnevaluatedConst { ty, def, args, promoted } = self;
|
||||
ty.visit(visitor)?;
|
||||
def.visit(visitor)?;
|
||||
args.visit(visitor)?;
|
||||
promoted.visit(visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for ConstDef {
|
||||
fn super_visit<V: Visitor>(&self, _visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Visitable> Visitable for Option<T> {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match self {
|
||||
Some(val) => val.visit(visitor),
|
||||
None => ControlFlow::Continue(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for Promoted {
|
||||
fn super_visit<V: Visitor>(&self, _visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for GenericArgs {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
self.0.visit(visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for GenericArgKind {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match self {
|
||||
GenericArgKind::Lifetime(lt) => lt.visit(visitor),
|
||||
GenericArgKind::Type(t) => t.visit(visitor),
|
||||
GenericArgKind::Const(c) => c.visit(visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for RigidTy {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match self {
|
||||
RigidTy::Bool
|
||||
| RigidTy::Char
|
||||
| RigidTy::Int(_)
|
||||
| RigidTy::Uint(_)
|
||||
| RigidTy::Float(_)
|
||||
| RigidTy::Never
|
||||
| RigidTy::Foreign(_)
|
||||
| RigidTy::Str => ControlFlow::Continue(()),
|
||||
RigidTy::Array(t, c) => {
|
||||
t.visit(visitor)?;
|
||||
c.visit(visitor)
|
||||
}
|
||||
RigidTy::Slice(inner) => inner.visit(visitor),
|
||||
RigidTy::RawPtr(ty, _) => ty.visit(visitor),
|
||||
RigidTy::Ref(_, ty, _) => ty.visit(visitor),
|
||||
RigidTy::FnDef(_, args) => args.visit(visitor),
|
||||
RigidTy::FnPtr(sig) => sig.visit(visitor),
|
||||
RigidTy::Closure(_, args) => args.visit(visitor),
|
||||
RigidTy::Generator(_, args, _) => args.visit(visitor),
|
||||
RigidTy::Dynamic(pred, r, _) => {
|
||||
pred.visit(visitor)?;
|
||||
r.visit(visitor)
|
||||
}
|
||||
RigidTy::Tuple(fields) => fields.visit(visitor),
|
||||
RigidTy::Adt(_, args) => args.visit(visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Visitable> Visitable for Vec<T> {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
for arg in self {
|
||||
arg.visit(visitor)?;
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Visitable> Visitable for Binder<T> {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
self.value.visit(visitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for ExistentialPredicate {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match self {
|
||||
ExistentialPredicate::Trait(tr) => tr.generic_args.visit(visitor),
|
||||
ExistentialPredicate::Projection(p) => {
|
||||
p.term.visit(visitor)?;
|
||||
p.generic_args.visit(visitor)
|
||||
}
|
||||
ExistentialPredicate::AutoTrait(_) => ControlFlow::Continue(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for TermKind {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
match self {
|
||||
TermKind::Type(t) => t.visit(visitor),
|
||||
TermKind::Const(c) => c.visit(visitor),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitable for FnSig {
|
||||
fn super_visit<V: Visitor>(&self, visitor: &mut V) -> ControlFlow<V::Break> {
|
||||
self.inputs_and_output.visit(visitor)
|
||||
}
|
||||
}
|
@ -40,4 +40,5 @@ trait_selection_no_value_in_rustc_on_unimplemented = this attribute must have a
|
||||
.label = expected value here
|
||||
.note = eg `#[rustc_on_unimplemented(message="foo")]`
|
||||
|
||||
trait_selection_ty_alias_overflow = in case this is a recursive type alias, consider using a struct, enum, or union instead
|
||||
trait_selection_unable_to_construct_constant_value = unable to construct a constant value for the unevaluated constant {$unevaluated}
|
||||
|
@ -537,7 +537,7 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> {
|
||||
|
||||
/// Iterate over all added goals: returning `Ok(Some(_))` in case we can stop rerunning.
|
||||
///
|
||||
/// Goals for the next step get directly added the the nested goals of the `EvalCtxt`.
|
||||
/// Goals for the next step get directly added to the nested goals of the `EvalCtxt`.
|
||||
fn evaluate_added_goals_step(&mut self) -> Result<Option<Certainty>, NoSolution> {
|
||||
let tcx = self.tcx();
|
||||
let mut goals = core::mem::replace(&mut self.nested_goals, NestedGoals::new());
|
||||
|
@ -659,6 +659,18 @@ impl<'a, 'b, 'tcx> TypeFolder<TyCtxt<'tcx>> for AssocTypeNormalizer<'a, 'b, 'tcx
|
||||
normalized_ty
|
||||
}
|
||||
ty::Weak => {
|
||||
let recursion_limit = self.interner().recursion_limit();
|
||||
if !recursion_limit.value_within_limit(self.depth) {
|
||||
self.selcx.infcx.err_ctxt().report_overflow_error(
|
||||
&ty,
|
||||
self.cause.span,
|
||||
false,
|
||||
|diag| {
|
||||
diag.note(crate::fluent_generated::trait_selection_ty_alias_overflow);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let infcx = self.selcx.infcx;
|
||||
self.obligations.extend(
|
||||
infcx.tcx.predicates_of(data.def_id).instantiate_own(infcx.tcx, data.args).map(
|
||||
@ -678,7 +690,14 @@ impl<'a, 'b, 'tcx> TypeFolder<TyCtxt<'tcx>> for AssocTypeNormalizer<'a, 'b, 'tcx
|
||||
},
|
||||
),
|
||||
);
|
||||
infcx.tcx.type_of(data.def_id).instantiate(infcx.tcx, data.args).fold_with(self)
|
||||
self.depth += 1;
|
||||
let res = infcx
|
||||
.tcx
|
||||
.type_of(data.def_id)
|
||||
.instantiate(infcx.tcx, data.args)
|
||||
.fold_with(self);
|
||||
self.depth -= 1;
|
||||
res
|
||||
}
|
||||
|
||||
ty::Inherent if !data.has_escaping_bound_vars() => {
|
||||
|
@ -87,20 +87,54 @@ impl From<char> for u128 {
|
||||
}
|
||||
}
|
||||
|
||||
/// Map `char` with code point in U+0000..=U+00FF to byte in 0x00..=0xFF with same value, failing
|
||||
/// if the code point is greater than U+00FF.
|
||||
/// Maps a `char` with code point in U+0000..=U+00FF to a byte in 0x00..=0xFF with same value,
|
||||
/// failing if the code point is greater than U+00FF.
|
||||
///
|
||||
/// See [`impl From<u8> for char`](char#impl-From<u8>-for-char) for details on the encoding.
|
||||
#[stable(feature = "u8_from_char", since = "1.59.0")]
|
||||
impl TryFrom<char> for u8 {
|
||||
type Error = TryFromCharError;
|
||||
|
||||
/// Tries to convert a [`char`] into a [`u8`].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let a = 'ÿ'; // U+00FF
|
||||
/// let b = 'Ā'; // U+0100
|
||||
/// assert_eq!(u8::try_from(a), Ok(0xFF_u8));
|
||||
/// assert!(u8::try_from(b).is_err());
|
||||
/// ```
|
||||
#[inline]
|
||||
fn try_from(c: char) -> Result<u8, Self::Error> {
|
||||
u8::try_from(u32::from(c)).map_err(|_| TryFromCharError(()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps a `char` with code point in U+0000..=U+FFFF to a `u16` in 0x0000..=0xFFFF with same value,
|
||||
/// failing if the code point is greater than U+FFFF.
|
||||
///
|
||||
/// This corresponds to the UCS-2 encoding, as specified in ISO/IEC 10646:2003.
|
||||
#[stable(feature = "u16_from_char", since = "CURRENT_RUSTC_VERSION")]
|
||||
impl TryFrom<char> for u16 {
|
||||
type Error = TryFromCharError;
|
||||
|
||||
/// Tries to convert a [`char`] into a [`u16`].
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// let trans_rights = '⚧'; // U+26A7
|
||||
/// let ninjas = '🥷'; // U+1F977
|
||||
/// assert_eq!(u16::try_from(trans_rights), Ok(0x26A7_u16));
|
||||
/// assert!(u16::try_from(ninjas).is_err());
|
||||
/// ```
|
||||
#[inline]
|
||||
fn try_from(c: char) -> Result<u16, Self::Error> {
|
||||
u16::try_from(u32::from(c)).map_err(|_| TryFromCharError(()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Maps a byte in 0x00..=0xFF to a `char` whose code point has the same value, in U+0000..=U+00FF.
|
||||
///
|
||||
/// Unicode is designed such that this effectively decodes bytes
|
||||
|
@ -1,5 +1,7 @@
|
||||
use crate::cmp;
|
||||
use crate::iter::{adapters::SourceIter, FusedIterator, InPlaceIterable, TrustedLen};
|
||||
use crate::iter::{
|
||||
adapters::SourceIter, FusedIterator, InPlaceIterable, TrustedLen, TrustedRandomAccess,
|
||||
};
|
||||
use crate::num::NonZeroUsize;
|
||||
use crate::ops::{ControlFlow, Try};
|
||||
|
||||
@ -98,26 +100,18 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl_fold_via_try_fold! { fold -> try_fold }
|
||||
#[inline]
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
Self: Sized,
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
Self::spec_fold(self, init, f)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn for_each<F: FnMut(Self::Item)>(mut self, f: F) {
|
||||
// The default implementation would use a unit accumulator, so we can
|
||||
// avoid a stateful closure by folding over the remaining number
|
||||
// of items we wish to return instead.
|
||||
fn check<'a, Item>(
|
||||
mut action: impl FnMut(Item) + 'a,
|
||||
) -> impl FnMut(usize, Item) -> Option<usize> + 'a {
|
||||
move |more, x| {
|
||||
action(x);
|
||||
more.checked_sub(1)
|
||||
}
|
||||
}
|
||||
|
||||
let remaining = self.n;
|
||||
if remaining > 0 {
|
||||
self.iter.try_fold(remaining - 1, check(f));
|
||||
}
|
||||
fn for_each<F: FnMut(Self::Item)>(self, f: F) {
|
||||
Self::spec_for_each(self, f)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -249,3 +243,72 @@ impl<I> FusedIterator for Take<I> where I: FusedIterator {}
|
||||
|
||||
#[unstable(feature = "trusted_len", issue = "37572")]
|
||||
unsafe impl<I: TrustedLen> TrustedLen for Take<I> {}
|
||||
|
||||
trait SpecTake: Iterator {
|
||||
fn spec_fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
Self: Sized,
|
||||
F: FnMut(B, Self::Item) -> B;
|
||||
|
||||
fn spec_for_each<F: FnMut(Self::Item)>(self, f: F);
|
||||
}
|
||||
|
||||
impl<I: Iterator> SpecTake for Take<I> {
|
||||
#[inline]
|
||||
default fn spec_fold<B, F>(mut self, init: B, f: F) -> B
|
||||
where
|
||||
Self: Sized,
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
use crate::ops::NeverShortCircuit;
|
||||
self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
default fn spec_for_each<F: FnMut(Self::Item)>(mut self, f: F) {
|
||||
// The default implementation would use a unit accumulator, so we can
|
||||
// avoid a stateful closure by folding over the remaining number
|
||||
// of items we wish to return instead.
|
||||
fn check<'a, Item>(
|
||||
mut action: impl FnMut(Item) + 'a,
|
||||
) -> impl FnMut(usize, Item) -> Option<usize> + 'a {
|
||||
move |more, x| {
|
||||
action(x);
|
||||
more.checked_sub(1)
|
||||
}
|
||||
}
|
||||
|
||||
let remaining = self.n;
|
||||
if remaining > 0 {
|
||||
self.iter.try_fold(remaining - 1, check(f));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Iterator + TrustedRandomAccess> SpecTake for Take<I> {
|
||||
#[inline]
|
||||
fn spec_fold<B, F>(mut self, init: B, mut f: F) -> B
|
||||
where
|
||||
Self: Sized,
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
let mut acc = init;
|
||||
let end = self.n.min(self.iter.size());
|
||||
for i in 0..end {
|
||||
// SAFETY: i < end <= self.iter.size() and we discard the iterator at the end
|
||||
let val = unsafe { self.iter.__iterator_get_unchecked(i) };
|
||||
acc = f(acc, val);
|
||||
}
|
||||
acc
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn spec_for_each<F: FnMut(Self::Item)>(mut self, mut f: F) {
|
||||
let end = self.n.min(self.iter.size());
|
||||
for i in 0..end {
|
||||
// SAFETY: i < end <= self.iter.size() and we discard the iterator at the end
|
||||
let val = unsafe { self.iter.__iterator_get_unchecked(i) };
|
||||
f(val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use crate::ascii::Char as AsciiChar;
|
||||
use crate::convert::TryFrom;
|
||||
use crate::mem;
|
||||
use crate::num::NonZeroUsize;
|
||||
@ -14,7 +15,7 @@ macro_rules! unsafe_impl_trusted_step {
|
||||
unsafe impl TrustedStep for $type {}
|
||||
)*};
|
||||
}
|
||||
unsafe_impl_trusted_step![char i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize];
|
||||
unsafe_impl_trusted_step![AsciiChar char i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize];
|
||||
|
||||
/// Objects that have a notion of *successor* and *predecessor* operations.
|
||||
///
|
||||
@ -484,6 +485,48 @@ impl Step for char {
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "step_trait", reason = "recently redesigned", issue = "42168")]
|
||||
impl Step for AsciiChar {
|
||||
#[inline]
|
||||
fn steps_between(&start: &AsciiChar, &end: &AsciiChar) -> Option<usize> {
|
||||
Step::steps_between(&start.to_u8(), &end.to_u8())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn forward_checked(start: AsciiChar, count: usize) -> Option<AsciiChar> {
|
||||
let end = Step::forward_checked(start.to_u8(), count)?;
|
||||
AsciiChar::from_u8(end)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn backward_checked(start: AsciiChar, count: usize) -> Option<AsciiChar> {
|
||||
let end = Step::backward_checked(start.to_u8(), count)?;
|
||||
|
||||
// SAFETY: Values below that of a valid ASCII character are also valid ASCII
|
||||
Some(unsafe { AsciiChar::from_u8_unchecked(end) })
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn forward_unchecked(start: AsciiChar, count: usize) -> AsciiChar {
|
||||
// SAFETY: Caller asserts that result is a valid ASCII character,
|
||||
// and therefore it is a valid u8.
|
||||
let end = unsafe { Step::forward_unchecked(start.to_u8(), count) };
|
||||
|
||||
// SAFETY: Caller asserts that result is a valid ASCII character.
|
||||
unsafe { AsciiChar::from_u8_unchecked(end) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn backward_unchecked(start: AsciiChar, count: usize) -> AsciiChar {
|
||||
// SAFETY: Caller asserts that result is a valid ASCII character,
|
||||
// and therefore it is a valid u8.
|
||||
let end = unsafe { Step::backward_unchecked(start.to_u8(), count) };
|
||||
|
||||
// SAFETY: Caller asserts that result is a valid ASCII character.
|
||||
unsafe { AsciiChar::from_u8_unchecked(end) }
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! range_exact_iter_impl {
|
||||
($($t:ty)*) => ($(
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -152,7 +152,6 @@
|
||||
#![feature(const_slice_from_raw_parts_mut)]
|
||||
#![feature(const_slice_from_ref)]
|
||||
#![feature(const_slice_index)]
|
||||
#![feature(const_slice_is_ascii)]
|
||||
#![feature(const_slice_ptr_len)]
|
||||
#![feature(const_slice_split_at_mut)]
|
||||
#![feature(const_str_from_utf8_unchecked_mut)]
|
||||
|
@ -41,6 +41,20 @@ macro_rules! nonzero_integers {
|
||||
/// with the exception that `0` is not a valid instance.
|
||||
#[doc = concat!("`Option<", stringify!($Ty), ">` is guaranteed to be compatible with `", stringify!($Int), "`,")]
|
||||
/// including in FFI.
|
||||
///
|
||||
/// Thanks to the [null pointer optimization],
|
||||
#[doc = concat!("`", stringify!($Ty), "` and `Option<", stringify!($Ty), ">`")]
|
||||
/// are guaranteed to have the same size and alignment:
|
||||
///
|
||||
/// ```
|
||||
/// # use std::mem::{size_of, align_of};
|
||||
#[doc = concat!("use std::num::", stringify!($Ty), ";")]
|
||||
///
|
||||
#[doc = concat!("assert_eq!(size_of::<", stringify!($Ty), ">(), size_of::<Option<", stringify!($Ty), ">>());")]
|
||||
#[doc = concat!("assert_eq!(align_of::<", stringify!($Ty), ">(), align_of::<Option<", stringify!($Ty), ">>());")]
|
||||
/// ```
|
||||
///
|
||||
/// [null pointer optimization]: crate::option#representation
|
||||
#[$stability]
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
#[repr(transparent)]
|
||||
|
@ -35,6 +35,7 @@ use crate::ops::{Shl, ShlAssign, Shr, ShrAssign, Sub, SubAssign};
|
||||
#[unstable(feature = "saturating_int_impl", issue = "87920")]
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Hash)]
|
||||
#[repr(transparent)]
|
||||
#[rustc_diagnostic_item = "Saturating"]
|
||||
pub struct Saturating<T>(#[unstable(feature = "saturating_int_impl", issue = "87920")] pub T);
|
||||
|
||||
#[unstable(feature = "saturating_int_impl", issue = "87920")]
|
||||
|
@ -39,6 +39,7 @@ use crate::ops::{Shl, ShlAssign, Shr, ShrAssign, Sub, SubAssign};
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Hash)]
|
||||
#[repr(transparent)]
|
||||
#[rustc_diagnostic_item = "Wrapping"]
|
||||
pub struct Wrapping<T>(#[stable(feature = "rust1", since = "1.0.0")] pub T);
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -119,7 +119,7 @@
|
||||
//! # Representation
|
||||
//!
|
||||
//! Rust guarantees to optimize the following types `T` such that
|
||||
//! [`Option<T>`] has the same size as `T`:
|
||||
//! [`Option<T>`] has the same size and alignment as `T`:
|
||||
//!
|
||||
//! * [`Box<U>`]
|
||||
//! * `&U`
|
||||
|
@ -43,9 +43,27 @@ use crate::slice::{self, SliceIndex};
|
||||
/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
|
||||
/// is never used for mutation.
|
||||
///
|
||||
/// # Representation
|
||||
///
|
||||
/// Thanks to the [null pointer optimization],
|
||||
/// `NonNull<T>` and `Option<NonNull<T>>`
|
||||
/// are guaranteed to have the same size and alignment:
|
||||
///
|
||||
/// ```
|
||||
/// # use std::mem::{size_of, align_of};
|
||||
/// use std::ptr::NonNull;
|
||||
///
|
||||
/// assert_eq!(size_of::<NonNull<i16>>(), size_of::<Option<NonNull<i16>>>());
|
||||
/// assert_eq!(align_of::<NonNull<i16>>(), align_of::<Option<NonNull<i16>>>());
|
||||
///
|
||||
/// assert_eq!(size_of::<NonNull<str>>(), size_of::<Option<NonNull<str>>>());
|
||||
/// assert_eq!(align_of::<NonNull<str>>(), align_of::<Option<NonNull<str>>>());
|
||||
/// ```
|
||||
///
|
||||
/// [covariant]: https://doc.rust-lang.org/reference/subtyping.html
|
||||
/// [`PhantomData`]: crate::marker::PhantomData
|
||||
/// [`UnsafeCell<T>`]: crate::cell::UnsafeCell
|
||||
/// [null pointer optimization]: crate::option#representation
|
||||
#[stable(feature = "nonnull", since = "1.25.0")]
|
||||
#[repr(transparent)]
|
||||
#[rustc_layout_scalar_valid_range_start(1)]
|
||||
|
@ -10,7 +10,7 @@ use crate::ops;
|
||||
impl [u8] {
|
||||
/// Checks if all bytes in this slice are within the ASCII range.
|
||||
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
|
||||
#[rustc_const_unstable(feature = "const_slice_is_ascii", issue = "111090")]
|
||||
#[rustc_const_stable(feature = "const_slice_is_ascii", since = "CURRENT_RUSTC_VERSION")]
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub const fn is_ascii(&self) -> bool {
|
||||
|
@ -2322,7 +2322,7 @@ impl str {
|
||||
/// assert!(!non_ascii.is_ascii());
|
||||
/// ```
|
||||
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
|
||||
#[rustc_const_unstable(feature = "const_slice_is_ascii", issue = "111090")]
|
||||
#[rustc_const_stable(feature = "const_slice_is_ascii", since = "CURRENT_RUSTC_VERSION")]
|
||||
#[must_use]
|
||||
#[inline]
|
||||
pub const fn is_ascii(&self) -> bool {
|
||||
|
@ -1,5 +1,6 @@
|
||||
use core::num::NonZeroUsize;
|
||||
use super::*;
|
||||
use core::ascii::Char as AsciiChar;
|
||||
use core::num::NonZeroUsize;
|
||||
|
||||
#[test]
|
||||
fn test_range() {
|
||||
@ -39,6 +40,21 @@ fn test_char_range() {
|
||||
assert_eq!(('\u{D7FF}'..'\u{E000}').size_hint(), (1, Some(1)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ascii_char_range() {
|
||||
let from = AsciiChar::Null;
|
||||
let to = AsciiChar::Delete;
|
||||
assert!((from..=to).eq((from as u8..=to as u8).filter_map(AsciiChar::from_u8)));
|
||||
assert!((from..=to).rev().eq((from as u8..=to as u8).filter_map(AsciiChar::from_u8).rev()));
|
||||
|
||||
assert_eq!((AsciiChar::CapitalA..=AsciiChar::CapitalZ).count(), 26);
|
||||
assert_eq!((AsciiChar::CapitalA..=AsciiChar::CapitalZ).size_hint(), (26, Some(26)));
|
||||
assert_eq!((AsciiChar::SmallA..=AsciiChar::SmallZ).count(), 26);
|
||||
assert_eq!((AsciiChar::SmallA..=AsciiChar::SmallZ).size_hint(), (26, Some(26)));
|
||||
assert_eq!((AsciiChar::Digit0..=AsciiChar::Digit9).count(), 10);
|
||||
assert_eq!((AsciiChar::Digit0..=AsciiChar::Digit9).size_hint(), (10, Some(10)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_range_exhaustion() {
|
||||
let mut r = 10..10;
|
||||
|
@ -2,6 +2,8 @@
|
||||
#![feature(array_chunks)]
|
||||
#![feature(array_methods)]
|
||||
#![feature(array_windows)]
|
||||
#![feature(ascii_char)]
|
||||
#![feature(ascii_char_variants)]
|
||||
#![feature(bigint_helper_methods)]
|
||||
#![feature(cell_update)]
|
||||
#![feature(const_align_offset)]
|
||||
|
@ -132,8 +132,8 @@
|
||||
//! On all platforms, [`OsStr`] consists of a sequence of bytes that is encoded as a superset of
|
||||
//! UTF-8; see [`OsString`] for more details on its encoding on different platforms.
|
||||
//!
|
||||
//! For limited, inexpensive conversions from and to bytes, see [`OsStr::as_os_str_bytes`] and
|
||||
//! [`OsStr::from_os_str_bytes_unchecked`].
|
||||
//! For limited, inexpensive conversions from and to bytes, see [`OsStr::as_encoded_bytes`] and
|
||||
//! [`OsStr::from_encoded_bytes_unchecked`].
|
||||
//!
|
||||
//! [Unicode scalar value]: https://www.unicode.org/glossary/#unicode_scalar_value
|
||||
//! [Unicode code point]: https://www.unicode.org/glossary/#code_point
|
||||
|
@ -154,36 +154,34 @@ impl OsString {
|
||||
/// # Safety
|
||||
///
|
||||
/// As the encoding is unspecified, callers must pass in bytes that originated as a mixture of
|
||||
/// validated UTF-8 and bytes from [`OsStr::as_os_str_bytes`] from within the same rust version
|
||||
/// validated UTF-8 and bytes from [`OsStr::as_encoded_bytes`] from within the same rust version
|
||||
/// built for the same target platform. For example, reconstructing an `OsString` from bytes sent
|
||||
/// over the network or stored in a file will likely violate these safety rules.
|
||||
///
|
||||
/// Due to the encoding being self-synchronizing, the bytes from [`OsStr::as_os_str_bytes`] can be
|
||||
/// Due to the encoding being self-synchronizing, the bytes from [`OsStr::as_encoded_bytes`] can be
|
||||
/// split either immediately before or immediately after any valid non-empty UTF-8 substring.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(os_str_bytes)]
|
||||
///
|
||||
/// use std::ffi::OsStr;
|
||||
///
|
||||
/// let os_str = OsStr::new("Mary had a little lamb");
|
||||
/// let bytes = os_str.as_os_str_bytes();
|
||||
/// let bytes = os_str.as_encoded_bytes();
|
||||
/// let words = bytes.split(|b| *b == b' ');
|
||||
/// let words: Vec<&OsStr> = words.map(|word| {
|
||||
/// // SAFETY:
|
||||
/// // - Each `word` only contains content that originated from `OsStr::as_os_str_bytes`
|
||||
/// // - Each `word` only contains content that originated from `OsStr::as_encoded_bytes`
|
||||
/// // - Only split with ASCII whitespace which is a non-empty UTF-8 substring
|
||||
/// unsafe { OsStr::from_os_str_bytes_unchecked(word) }
|
||||
/// unsafe { OsStr::from_encoded_bytes_unchecked(word) }
|
||||
/// }).collect();
|
||||
/// ```
|
||||
///
|
||||
/// [conversions]: super#conversions
|
||||
#[inline]
|
||||
#[unstable(feature = "os_str_bytes", issue = "111544")]
|
||||
pub unsafe fn from_os_str_bytes_unchecked(bytes: Vec<u8>) -> Self {
|
||||
OsString { inner: Buf::from_os_str_bytes_unchecked(bytes) }
|
||||
#[stable(feature = "os_str_bytes", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub unsafe fn from_encoded_bytes_unchecked(bytes: Vec<u8>) -> Self {
|
||||
OsString { inner: Buf::from_encoded_bytes_unchecked(bytes) }
|
||||
}
|
||||
|
||||
/// Converts to an [`OsStr`] slice.
|
||||
@ -205,7 +203,7 @@ impl OsString {
|
||||
}
|
||||
|
||||
/// Converts the `OsString` into a byte slice. To convert the byte slice back into an
|
||||
/// `OsString`, use the [`OsStr::from_os_str_bytes_unchecked`] function.
|
||||
/// `OsString`, use the [`OsStr::from_encoded_bytes_unchecked`] function.
|
||||
///
|
||||
/// The byte encoding is an unspecified, platform-specific, self-synchronizing superset of UTF-8.
|
||||
/// By being a self-synchronizing superset of UTF-8, this encoding is also a superset of 7-bit
|
||||
@ -219,9 +217,9 @@ impl OsString {
|
||||
///
|
||||
/// [`std::ffi`]: crate::ffi
|
||||
#[inline]
|
||||
#[unstable(feature = "os_str_bytes", issue = "111544")]
|
||||
pub fn into_os_str_bytes(self) -> Vec<u8> {
|
||||
self.inner.into_os_str_bytes()
|
||||
#[stable(feature = "os_str_bytes", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub fn into_encoded_bytes(self) -> Vec<u8> {
|
||||
self.inner.into_encoded_bytes()
|
||||
}
|
||||
|
||||
/// Converts the `OsString` into a [`String`] if it contains valid Unicode data.
|
||||
@ -745,36 +743,34 @@ impl OsStr {
|
||||
/// # Safety
|
||||
///
|
||||
/// As the encoding is unspecified, callers must pass in bytes that originated as a mixture of
|
||||
/// validated UTF-8 and bytes from [`OsStr::as_os_str_bytes`] from within the same rust version
|
||||
/// validated UTF-8 and bytes from [`OsStr::as_encoded_bytes`] from within the same rust version
|
||||
/// built for the same target platform. For example, reconstructing an `OsStr` from bytes sent
|
||||
/// over the network or stored in a file will likely violate these safety rules.
|
||||
///
|
||||
/// Due to the encoding being self-synchronizing, the bytes from [`OsStr::as_os_str_bytes`] can be
|
||||
/// Due to the encoding being self-synchronizing, the bytes from [`OsStr::as_encoded_bytes`] can be
|
||||
/// split either immediately before or immediately after any valid non-empty UTF-8 substring.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(os_str_bytes)]
|
||||
///
|
||||
/// use std::ffi::OsStr;
|
||||
///
|
||||
/// let os_str = OsStr::new("Mary had a little lamb");
|
||||
/// let bytes = os_str.as_os_str_bytes();
|
||||
/// let bytes = os_str.as_encoded_bytes();
|
||||
/// let words = bytes.split(|b| *b == b' ');
|
||||
/// let words: Vec<&OsStr> = words.map(|word| {
|
||||
/// // SAFETY:
|
||||
/// // - Each `word` only contains content that originated from `OsStr::as_os_str_bytes`
|
||||
/// // - Each `word` only contains content that originated from `OsStr::as_encoded_bytes`
|
||||
/// // - Only split with ASCII whitespace which is a non-empty UTF-8 substring
|
||||
/// unsafe { OsStr::from_os_str_bytes_unchecked(word) }
|
||||
/// unsafe { OsStr::from_encoded_bytes_unchecked(word) }
|
||||
/// }).collect();
|
||||
/// ```
|
||||
///
|
||||
/// [conversions]: super#conversions
|
||||
#[inline]
|
||||
#[unstable(feature = "os_str_bytes", issue = "111544")]
|
||||
pub unsafe fn from_os_str_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
Self::from_inner(Slice::from_os_str_bytes_unchecked(bytes))
|
||||
#[stable(feature = "os_str_bytes", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub unsafe fn from_encoded_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
Self::from_inner(Slice::from_encoded_bytes_unchecked(bytes))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -948,7 +944,7 @@ impl OsStr {
|
||||
}
|
||||
|
||||
/// Converts an OS string slice to a byte slice. To convert the byte slice back into an OS
|
||||
/// string slice, use the [`OsStr::from_os_str_bytes_unchecked`] function.
|
||||
/// string slice, use the [`OsStr::from_encoded_bytes_unchecked`] function.
|
||||
///
|
||||
/// The byte encoding is an unspecified, platform-specific, self-synchronizing superset of UTF-8.
|
||||
/// By being a self-synchronizing superset of UTF-8, this encoding is also a superset of 7-bit
|
||||
@ -962,9 +958,9 @@ impl OsStr {
|
||||
///
|
||||
/// [`std::ffi`]: crate::ffi
|
||||
#[inline]
|
||||
#[unstable(feature = "os_str_bytes", issue = "111544")]
|
||||
pub fn as_os_str_bytes(&self) -> &[u8] {
|
||||
self.inner.as_os_str_bytes()
|
||||
#[stable(feature = "os_str_bytes", since = "CURRENT_RUSTC_VERSION")]
|
||||
pub fn as_encoded_bytes(&self) -> &[u8] {
|
||||
self.inner.as_encoded_bytes()
|
||||
}
|
||||
|
||||
/// Converts this string to its ASCII lower case equivalent in-place.
|
||||
@ -1270,7 +1266,7 @@ impl Default for &OsStr {
|
||||
impl PartialEq for OsStr {
|
||||
#[inline]
|
||||
fn eq(&self, other: &OsStr) -> bool {
|
||||
self.as_os_str_bytes().eq(other.as_os_str_bytes())
|
||||
self.as_encoded_bytes().eq(other.as_encoded_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1297,23 +1293,23 @@ impl Eq for OsStr {}
|
||||
impl PartialOrd for OsStr {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &OsStr) -> Option<cmp::Ordering> {
|
||||
self.as_os_str_bytes().partial_cmp(other.as_os_str_bytes())
|
||||
self.as_encoded_bytes().partial_cmp(other.as_encoded_bytes())
|
||||
}
|
||||
#[inline]
|
||||
fn lt(&self, other: &OsStr) -> bool {
|
||||
self.as_os_str_bytes().lt(other.as_os_str_bytes())
|
||||
self.as_encoded_bytes().lt(other.as_encoded_bytes())
|
||||
}
|
||||
#[inline]
|
||||
fn le(&self, other: &OsStr) -> bool {
|
||||
self.as_os_str_bytes().le(other.as_os_str_bytes())
|
||||
self.as_encoded_bytes().le(other.as_encoded_bytes())
|
||||
}
|
||||
#[inline]
|
||||
fn gt(&self, other: &OsStr) -> bool {
|
||||
self.as_os_str_bytes().gt(other.as_os_str_bytes())
|
||||
self.as_encoded_bytes().gt(other.as_encoded_bytes())
|
||||
}
|
||||
#[inline]
|
||||
fn ge(&self, other: &OsStr) -> bool {
|
||||
self.as_os_str_bytes().ge(other.as_os_str_bytes())
|
||||
self.as_encoded_bytes().ge(other.as_encoded_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1332,7 +1328,7 @@ impl PartialOrd<str> for OsStr {
|
||||
impl Ord for OsStr {
|
||||
#[inline]
|
||||
fn cmp(&self, other: &OsStr) -> cmp::Ordering {
|
||||
self.as_os_str_bytes().cmp(other.as_os_str_bytes())
|
||||
self.as_encoded_bytes().cmp(other.as_encoded_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1382,7 +1378,7 @@ impl_cmp!(Cow<'a, OsStr>, OsString);
|
||||
impl Hash for OsStr {
|
||||
#[inline]
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.as_os_str_bytes().hash(state)
|
||||
self.as_encoded_bytes().hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -233,8 +233,8 @@ pub struct DirBuilder {
|
||||
/// This function will return an error if `path` does not already exist.
|
||||
/// Other errors may also be returned according to [`OpenOptions::open`].
|
||||
///
|
||||
/// It will also return an error if it encounters while reading an error
|
||||
/// of a kind other than [`io::ErrorKind::Interrupted`].
|
||||
/// While reading from the file, this function handles [`io::ErrorKind::Interrupted`]
|
||||
/// with automatic retries. See [io::Read] documentation for details.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@ -271,9 +271,11 @@ pub fn read<P: AsRef<Path>>(path: P) -> io::Result<Vec<u8>> {
|
||||
/// This function will return an error if `path` does not already exist.
|
||||
/// Other errors may also be returned according to [`OpenOptions::open`].
|
||||
///
|
||||
/// It will also return an error if it encounters while reading an error
|
||||
/// of a kind other than [`io::ErrorKind::Interrupted`],
|
||||
/// or if the contents of the file are not valid UTF-8.
|
||||
/// If the contents of the file are not valid UTF-8, then an error will also be
|
||||
/// returned.
|
||||
///
|
||||
/// While reading from the file, this function handles [`io::ErrorKind::Interrupted`]
|
||||
/// with automatic retries. See [io::Read] documentation for details.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
|
@ -193,7 +193,7 @@ impl<'a> Prefix<'a> {
|
||||
fn len(&self) -> usize {
|
||||
use self::Prefix::*;
|
||||
fn os_str_len(s: &OsStr) -> usize {
|
||||
s.as_os_str_bytes().len()
|
||||
s.as_encoded_bytes().len()
|
||||
}
|
||||
match *self {
|
||||
Verbatim(x) => 4 + os_str_len(x),
|
||||
@ -316,7 +316,7 @@ fn has_physical_root(s: &[u8], prefix: Option<Prefix<'_>>) -> bool {
|
||||
|
||||
// basic workhorse for splitting stem and extension
|
||||
fn rsplit_file_at_dot(file: &OsStr) -> (Option<&OsStr>, Option<&OsStr>) {
|
||||
if file.as_os_str_bytes() == b".." {
|
||||
if file.as_encoded_bytes() == b".." {
|
||||
return (Some(file), None);
|
||||
}
|
||||
|
||||
@ -324,7 +324,7 @@ fn rsplit_file_at_dot(file: &OsStr) -> (Option<&OsStr>, Option<&OsStr>) {
|
||||
// and back. This is safe to do because (1) we only look at ASCII
|
||||
// contents of the encoding and (2) new &OsStr values are produced
|
||||
// only from ASCII-bounded slices of existing &OsStr values.
|
||||
let mut iter = file.as_os_str_bytes().rsplitn(2, |b| *b == b'.');
|
||||
let mut iter = file.as_encoded_bytes().rsplitn(2, |b| *b == b'.');
|
||||
let after = iter.next();
|
||||
let before = iter.next();
|
||||
if before == Some(b"") {
|
||||
@ -332,15 +332,15 @@ fn rsplit_file_at_dot(file: &OsStr) -> (Option<&OsStr>, Option<&OsStr>) {
|
||||
} else {
|
||||
unsafe {
|
||||
(
|
||||
before.map(|s| OsStr::from_os_str_bytes_unchecked(s)),
|
||||
after.map(|s| OsStr::from_os_str_bytes_unchecked(s)),
|
||||
before.map(|s| OsStr::from_encoded_bytes_unchecked(s)),
|
||||
after.map(|s| OsStr::from_encoded_bytes_unchecked(s)),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn split_file_at_dot(file: &OsStr) -> (&OsStr, Option<&OsStr>) {
|
||||
let slice = file.as_os_str_bytes();
|
||||
let slice = file.as_encoded_bytes();
|
||||
if slice == b".." {
|
||||
return (file, None);
|
||||
}
|
||||
@ -357,8 +357,8 @@ fn split_file_at_dot(file: &OsStr) -> (&OsStr, Option<&OsStr>) {
|
||||
let after = &slice[i + 1..];
|
||||
unsafe {
|
||||
(
|
||||
OsStr::from_os_str_bytes_unchecked(before),
|
||||
Some(OsStr::from_os_str_bytes_unchecked(after)),
|
||||
OsStr::from_encoded_bytes_unchecked(before),
|
||||
Some(OsStr::from_encoded_bytes_unchecked(after)),
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -739,7 +739,7 @@ impl<'a> Components<'a> {
|
||||
// separately via `include_cur_dir`
|
||||
b".." => Some(Component::ParentDir),
|
||||
b"" => None,
|
||||
_ => Some(Component::Normal(unsafe { OsStr::from_os_str_bytes_unchecked(comp) })),
|
||||
_ => Some(Component::Normal(unsafe { OsStr::from_encoded_bytes_unchecked(comp) })),
|
||||
}
|
||||
}
|
||||
|
||||
@ -896,7 +896,7 @@ impl<'a> Iterator for Components<'a> {
|
||||
let raw = &self.path[..self.prefix_len()];
|
||||
self.path = &self.path[self.prefix_len()..];
|
||||
return Some(Component::Prefix(PrefixComponent {
|
||||
raw: unsafe { OsStr::from_os_str_bytes_unchecked(raw) },
|
||||
raw: unsafe { OsStr::from_encoded_bytes_unchecked(raw) },
|
||||
parsed: self.prefix.unwrap(),
|
||||
}));
|
||||
}
|
||||
@ -968,7 +968,7 @@ impl<'a> DoubleEndedIterator for Components<'a> {
|
||||
State::Prefix if self.prefix_len() > 0 => {
|
||||
self.back = State::Done;
|
||||
return Some(Component::Prefix(PrefixComponent {
|
||||
raw: unsafe { OsStr::from_os_str_bytes_unchecked(self.path) },
|
||||
raw: unsafe { OsStr::from_encoded_bytes_unchecked(self.path) },
|
||||
parsed: self.prefix.unwrap(),
|
||||
}));
|
||||
}
|
||||
@ -1477,17 +1477,17 @@ impl PathBuf {
|
||||
fn _set_extension(&mut self, extension: &OsStr) -> bool {
|
||||
let file_stem = match self.file_stem() {
|
||||
None => return false,
|
||||
Some(f) => f.as_os_str_bytes(),
|
||||
Some(f) => f.as_encoded_bytes(),
|
||||
};
|
||||
|
||||
// truncate until right after the file stem
|
||||
let end_file_stem = file_stem[file_stem.len()..].as_ptr().addr();
|
||||
let start = self.inner.as_os_str_bytes().as_ptr().addr();
|
||||
let start = self.inner.as_encoded_bytes().as_ptr().addr();
|
||||
let v = self.as_mut_vec();
|
||||
v.truncate(end_file_stem.wrapping_sub(start));
|
||||
|
||||
// add the new extension, if any
|
||||
let new = extension.as_os_str_bytes();
|
||||
let new = extension.as_encoded_bytes();
|
||||
if !new.is_empty() {
|
||||
v.reserve_exact(new.len() + 1);
|
||||
v.push(b'.');
|
||||
@ -2007,11 +2007,11 @@ impl Path {
|
||||
// The following (private!) function allows construction of a path from a u8
|
||||
// slice, which is only safe when it is known to follow the OsStr encoding.
|
||||
unsafe fn from_u8_slice(s: &[u8]) -> &Path {
|
||||
unsafe { Path::new(OsStr::from_os_str_bytes_unchecked(s)) }
|
||||
unsafe { Path::new(OsStr::from_encoded_bytes_unchecked(s)) }
|
||||
}
|
||||
// The following (private!) function reveals the byte encoding used for OsStr.
|
||||
fn as_u8_slice(&self) -> &[u8] {
|
||||
self.inner.as_os_str_bytes()
|
||||
self.inner.as_encoded_bytes()
|
||||
}
|
||||
|
||||
/// Directly wraps a string slice as a `Path` slice.
|
||||
@ -2609,7 +2609,7 @@ impl Path {
|
||||
|
||||
fn _with_extension(&self, extension: &OsStr) -> PathBuf {
|
||||
let self_len = self.as_os_str().len();
|
||||
let self_bytes = self.as_os_str().as_os_str_bytes();
|
||||
let self_bytes = self.as_os_str().as_encoded_bytes();
|
||||
|
||||
let (new_capacity, slice_to_copy) = match self.extension() {
|
||||
None => {
|
||||
|
@ -19,7 +19,7 @@ pub fn run_path_with_cstr<T, F>(path: &Path, f: F) -> io::Result<T>
|
||||
where
|
||||
F: FnOnce(&CStr) -> io::Result<T>,
|
||||
{
|
||||
run_with_cstr(path.as_os_str().as_os_str_bytes(), f)
|
||||
run_with_cstr(path.as_os_str().as_encoded_bytes(), f)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -8,7 +8,7 @@ use core::iter::repeat;
|
||||
fn stack_allocation_works() {
|
||||
let path = Path::new("abc");
|
||||
let result = run_path_with_cstr(path, |p| {
|
||||
assert_eq!(p, &*CString::new(path.as_os_str().as_os_str_bytes()).unwrap());
|
||||
assert_eq!(p, &*CString::new(path.as_os_str().as_encoded_bytes()).unwrap());
|
||||
Ok(42)
|
||||
});
|
||||
assert_eq!(result.unwrap(), 42);
|
||||
@ -25,7 +25,7 @@ fn heap_allocation_works() {
|
||||
let path = repeat("a").take(384).collect::<String>();
|
||||
let path = Path::new(&path);
|
||||
let result = run_path_with_cstr(path, |p| {
|
||||
assert_eq!(p, &*CString::new(path.as_os_str().as_os_str_bytes()).unwrap());
|
||||
assert_eq!(p, &*CString::new(path.as_os_str().as_encoded_bytes()).unwrap());
|
||||
Ok(42)
|
||||
});
|
||||
assert_eq!(result.unwrap(), 42);
|
||||
|
@ -31,11 +31,6 @@ pub fn error_name(er: abi::ER) -> Option<&'static str> {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn is_interrupted(er: abi::ER) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
pub fn decode_error_kind(er: abi::ER) -> ErrorKind {
|
||||
match er {
|
||||
// Success
|
||||
|
@ -74,7 +74,7 @@ pub fn unsupported_err() -> crate::io::Error {
|
||||
|
||||
#[inline]
|
||||
pub fn is_interrupted(code: i32) -> bool {
|
||||
error::is_interrupted(code)
|
||||
net::is_interrupted(code)
|
||||
}
|
||||
|
||||
pub fn decode_error_kind(code: i32) -> crate::io::ErrorKind {
|
||||
|
@ -183,8 +183,7 @@ pub(super) fn error_name(er: abi::ER) -> Option<&'static str> {
|
||||
|
||||
#[inline]
|
||||
pub fn is_interrupted(er: abi::ER) -> bool {
|
||||
let errno = netc::SOLID_NET_ERR_BASE - er;
|
||||
errno as libc::c_int == libc::EINTR
|
||||
er == netc::SOLID_NET_ERR_BASE - libc::EINTR
|
||||
}
|
||||
|
||||
pub(super) fn decode_error_kind(er: abi::ER) -> ErrorKind {
|
||||
|
@ -97,12 +97,12 @@ impl AsInner<[u8]> for Buf {
|
||||
|
||||
impl Buf {
|
||||
#[inline]
|
||||
pub fn into_os_str_bytes(self) -> Vec<u8> {
|
||||
pub fn into_encoded_bytes(self) -> Vec<u8> {
|
||||
self.inner
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn from_os_str_bytes_unchecked(s: Vec<u8>) -> Self {
|
||||
pub unsafe fn from_encoded_bytes_unchecked(s: Vec<u8>) -> Self {
|
||||
Self { inner: s }
|
||||
}
|
||||
|
||||
@ -203,18 +203,18 @@ impl Buf {
|
||||
|
||||
impl Slice {
|
||||
#[inline]
|
||||
pub fn as_os_str_bytes(&self) -> &[u8] {
|
||||
pub fn as_encoded_bytes(&self) -> &[u8] {
|
||||
&self.inner
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn from_os_str_bytes_unchecked(s: &[u8]) -> &Slice {
|
||||
pub unsafe fn from_encoded_bytes_unchecked(s: &[u8]) -> &Slice {
|
||||
unsafe { mem::transmute(s) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn from_str(s: &str) -> &Slice {
|
||||
unsafe { Slice::from_os_str_bytes_unchecked(s.as_bytes()) }
|
||||
unsafe { Slice::from_encoded_bytes_unchecked(s.as_bytes()) }
|
||||
}
|
||||
|
||||
pub fn to_str(&self) -> Result<&str, crate::str::Utf8Error> {
|
||||
|
@ -2,7 +2,7 @@ use super::*;
|
||||
|
||||
#[test]
|
||||
fn slice_debug_output() {
|
||||
let input = unsafe { Slice::from_os_str_bytes_unchecked(b"\xF0hello,\tworld") };
|
||||
let input = unsafe { Slice::from_encoded_bytes_unchecked(b"\xF0hello,\tworld") };
|
||||
let expected = r#""\xF0hello,\tworld""#;
|
||||
let output = format!("{input:?}");
|
||||
|
||||
@ -12,6 +12,6 @@ fn slice_debug_output() {
|
||||
#[test]
|
||||
fn display() {
|
||||
assert_eq!("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye", unsafe {
|
||||
Slice::from_os_str_bytes_unchecked(b"Hello\xC0\x80 There\xE6\x83 Goodbye").to_string()
|
||||
Slice::from_encoded_bytes_unchecked(b"Hello\xC0\x80 There\xE6\x83 Goodbye").to_string()
|
||||
},);
|
||||
}
|
||||
|
@ -30,7 +30,7 @@ pub(crate) fn absolute(path: &Path) -> io::Result<PathBuf> {
|
||||
|
||||
// Get the components, skipping the redundant leading "." component if it exists.
|
||||
let mut components = path.strip_prefix(".").unwrap_or(path).components();
|
||||
let path_os = path.as_os_str().as_os_str_bytes();
|
||||
let path_os = path.as_os_str().as_encoded_bytes();
|
||||
|
||||
let mut normalized = if path.is_absolute() {
|
||||
// "If a pathname begins with two successive <slash> characters, the
|
||||
|
@ -164,9 +164,9 @@ pub enum ProgramKind {
|
||||
|
||||
impl ProgramKind {
|
||||
fn new(program: &OsStr) -> Self {
|
||||
if program.as_os_str_bytes().starts_with(b"/") {
|
||||
if program.as_encoded_bytes().starts_with(b"/") {
|
||||
Self::Absolute
|
||||
} else if program.as_os_str_bytes().contains(&b'/') {
|
||||
} else if program.as_encoded_bytes().contains(&b'/') {
|
||||
// If the program has more than one component in it, it is a relative path.
|
||||
Self::Relative
|
||||
} else {
|
||||
|
@ -226,7 +226,7 @@ pub(crate) fn append_arg(cmd: &mut Vec<u16>, arg: &Arg, force_quotes: bool) -> i
|
||||
// that it actually gets passed through on the command line or otherwise
|
||||
// it will be dropped entirely when parsed on the other end.
|
||||
ensure_no_nuls(arg)?;
|
||||
let arg_bytes = arg.as_os_str_bytes();
|
||||
let arg_bytes = arg.as_encoded_bytes();
|
||||
let (quote, escape) = match quote {
|
||||
Quote::Always => (true, true),
|
||||
Quote::Auto => {
|
||||
@ -298,7 +298,7 @@ pub(crate) fn make_bat_command_line(
|
||||
const SPECIAL: &[u8] = b"\t &()[]{}^=;!'+,`~%|<>";
|
||||
let force_quotes = match arg {
|
||||
Arg::Regular(arg) if !force_quotes => {
|
||||
arg.as_os_str_bytes().iter().any(|c| SPECIAL.contains(c))
|
||||
arg.as_encoded_bytes().iter().any(|c| SPECIAL.contains(c))
|
||||
}
|
||||
_ => force_quotes,
|
||||
};
|
||||
|
@ -64,12 +64,12 @@ impl fmt::Display for Slice {
|
||||
|
||||
impl Buf {
|
||||
#[inline]
|
||||
pub fn into_os_str_bytes(self) -> Vec<u8> {
|
||||
pub fn into_encoded_bytes(self) -> Vec<u8> {
|
||||
self.inner.into_bytes()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn from_os_str_bytes_unchecked(s: Vec<u8>) -> Self {
|
||||
pub unsafe fn from_encoded_bytes_unchecked(s: Vec<u8>) -> Self {
|
||||
Self { inner: Wtf8Buf::from_bytes_unchecked(s) }
|
||||
}
|
||||
|
||||
@ -162,12 +162,12 @@ impl Buf {
|
||||
|
||||
impl Slice {
|
||||
#[inline]
|
||||
pub fn as_os_str_bytes(&self) -> &[u8] {
|
||||
pub fn as_encoded_bytes(&self) -> &[u8] {
|
||||
self.inner.as_bytes()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn from_os_str_bytes_unchecked(s: &[u8]) -> &Slice {
|
||||
pub unsafe fn from_encoded_bytes_unchecked(s: &[u8]) -> &Slice {
|
||||
mem::transmute(Wtf8::from_bytes_unchecked(s))
|
||||
}
|
||||
|
||||
|
@ -22,12 +22,12 @@ pub fn is_verbatim_sep(b: u8) -> bool {
|
||||
|
||||
/// Returns true if `path` looks like a lone filename.
|
||||
pub(crate) fn is_file_name(path: &OsStr) -> bool {
|
||||
!path.as_os_str_bytes().iter().copied().any(is_sep_byte)
|
||||
!path.as_encoded_bytes().iter().copied().any(is_sep_byte)
|
||||
}
|
||||
pub(crate) fn has_trailing_slash(path: &OsStr) -> bool {
|
||||
let is_verbatim = path.as_os_str_bytes().starts_with(br"\\?\");
|
||||
let is_verbatim = path.as_encoded_bytes().starts_with(br"\\?\");
|
||||
let is_separator = if is_verbatim { is_verbatim_sep } else { is_sep_byte };
|
||||
if let Some(&c) = path.as_os_str_bytes().last() { is_separator(c) } else { false }
|
||||
if let Some(&c) = path.as_encoded_bytes().last() { is_separator(c) } else { false }
|
||||
}
|
||||
|
||||
/// Appends a suffix to a path.
|
||||
@ -49,7 +49,7 @@ impl<'a, const LEN: usize> PrefixParser<'a, LEN> {
|
||||
fn get_prefix(path: &OsStr) -> [u8; LEN] {
|
||||
let mut prefix = [0; LEN];
|
||||
// SAFETY: Only ASCII characters are modified.
|
||||
for (i, &ch) in path.as_os_str_bytes().iter().take(LEN).enumerate() {
|
||||
for (i, &ch) in path.as_encoded_bytes().iter().take(LEN).enumerate() {
|
||||
prefix[i] = if ch == b'/' { b'\\' } else { ch };
|
||||
}
|
||||
prefix
|
||||
@ -82,7 +82,7 @@ impl<'a> PrefixParserSlice<'a, '_> {
|
||||
}
|
||||
|
||||
fn prefix_bytes(&self) -> &'a [u8] {
|
||||
&self.path.as_os_str_bytes()[..self.index]
|
||||
&self.path.as_encoded_bytes()[..self.index]
|
||||
}
|
||||
|
||||
fn finish(self) -> &'a OsStr {
|
||||
@ -90,7 +90,7 @@ impl<'a> PrefixParserSlice<'a, '_> {
|
||||
// &[u8] and back. This is safe to do because (1) we only look at ASCII
|
||||
// contents of the encoding and (2) new &OsStr values are produced only
|
||||
// from ASCII-bounded slices of existing &OsStr values.
|
||||
unsafe { OsStr::from_os_str_bytes_unchecked(&self.path.as_os_str_bytes()[self.index..]) }
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(&self.path.as_encoded_bytes()[self.index..]) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -162,7 +162,7 @@ fn parse_drive(path: &OsStr) -> Option<u8> {
|
||||
drive.is_ascii_alphabetic()
|
||||
}
|
||||
|
||||
match path.as_os_str_bytes() {
|
||||
match path.as_encoded_bytes() {
|
||||
[drive, b':', ..] if is_valid_drive_letter(drive) => Some(drive.to_ascii_uppercase()),
|
||||
_ => None,
|
||||
}
|
||||
@ -171,7 +171,7 @@ fn parse_drive(path: &OsStr) -> Option<u8> {
|
||||
// Parses a drive prefix exactly, e.g. "C:"
|
||||
fn parse_drive_exact(path: &OsStr) -> Option<u8> {
|
||||
// only parse two bytes: the drive letter and the drive separator
|
||||
if path.as_os_str_bytes().get(2).map(|&x| is_sep_byte(x)).unwrap_or(true) {
|
||||
if path.as_encoded_bytes().get(2).map(|&x| is_sep_byte(x)).unwrap_or(true) {
|
||||
parse_drive(path)
|
||||
} else {
|
||||
None
|
||||
@ -185,15 +185,15 @@ fn parse_drive_exact(path: &OsStr) -> Option<u8> {
|
||||
fn parse_next_component(path: &OsStr, verbatim: bool) -> (&OsStr, &OsStr) {
|
||||
let separator = if verbatim { is_verbatim_sep } else { is_sep_byte };
|
||||
|
||||
match path.as_os_str_bytes().iter().position(|&x| separator(x)) {
|
||||
match path.as_encoded_bytes().iter().position(|&x| separator(x)) {
|
||||
Some(separator_start) => {
|
||||
let separator_end = separator_start + 1;
|
||||
|
||||
let component = &path.as_os_str_bytes()[..separator_start];
|
||||
let component = &path.as_encoded_bytes()[..separator_start];
|
||||
|
||||
// Panic safe
|
||||
// The max `separator_end` is `bytes.len()` and `bytes[bytes.len()..]` is a valid index.
|
||||
let path = &path.as_os_str_bytes()[separator_end..];
|
||||
let path = &path.as_encoded_bytes()[separator_end..];
|
||||
|
||||
// SAFETY: `path` is a valid wtf8 encoded slice and each of the separators ('/', '\')
|
||||
// is encoded in a single byte, therefore `bytes[separator_start]` and
|
||||
@ -201,8 +201,8 @@ fn parse_next_component(path: &OsStr, verbatim: bool) -> (&OsStr, &OsStr) {
|
||||
// `bytes[..separator_start]` and `bytes[separator_end..]` are valid wtf8 slices.
|
||||
unsafe {
|
||||
(
|
||||
OsStr::from_os_str_bytes_unchecked(component),
|
||||
OsStr::from_os_str_bytes_unchecked(path),
|
||||
OsStr::from_encoded_bytes_unchecked(component),
|
||||
OsStr::from_encoded_bytes_unchecked(path),
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -323,7 +323,7 @@ pub(crate) fn absolute(path: &Path) -> io::Result<PathBuf> {
|
||||
// Verbatim paths should not be modified.
|
||||
if prefix.map(|x| x.is_verbatim()).unwrap_or(false) {
|
||||
// NULs in verbatim paths are rejected for consistency.
|
||||
if path.as_os_str_bytes().contains(&0) {
|
||||
if path.as_encoded_bytes().contains(&0) {
|
||||
return Err(io::const_io_error!(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"strings passed to WinAPI cannot contain NULs",
|
||||
|
@ -429,7 +429,7 @@ fn resolve_exe<'a>(
|
||||
// Test if the file name has the `exe` extension.
|
||||
// This does a case-insensitive `ends_with`.
|
||||
let has_exe_suffix = if exe_path.len() >= EXE_SUFFIX.len() {
|
||||
exe_path.as_os_str_bytes()[exe_path.len() - EXE_SUFFIX.len()..]
|
||||
exe_path.as_encoded_bytes()[exe_path.len() - EXE_SUFFIX.len()..]
|
||||
.eq_ignore_ascii_case(EXE_SUFFIX.as_bytes())
|
||||
} else {
|
||||
false
|
||||
@ -459,7 +459,7 @@ fn resolve_exe<'a>(
|
||||
// From the `CreateProcessW` docs:
|
||||
// > If the file name does not contain an extension, .exe is appended.
|
||||
// Note that this rule only applies when searching paths.
|
||||
let has_extension = exe_path.as_os_str_bytes().contains(&b'.');
|
||||
let has_extension = exe_path.as_encoded_bytes().contains(&b'.');
|
||||
|
||||
// Search the directories given by `search_paths`.
|
||||
let result = search_paths(parent_paths, child_paths, |mut path| {
|
||||
|
@ -24,6 +24,7 @@ use crate::util::{self, exe, output, t, up_to_date};
|
||||
use crate::{CLang, GitRepo, Kind};
|
||||
|
||||
use build_helper::ci::CiEnv;
|
||||
use build_helper::git::get_git_merge_base;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LlvmResult {
|
||||
@ -128,13 +129,19 @@ pub fn prebuilt_llvm_config(
|
||||
/// This retrieves the LLVM sha we *want* to use, according to git history.
|
||||
pub(crate) fn detect_llvm_sha(config: &Config, is_git: bool) -> String {
|
||||
let llvm_sha = if is_git {
|
||||
// We proceed in 2 steps. First we get the closest commit that is actually upstream. Then we
|
||||
// walk back further to the last bors merge commit that actually changed LLVM. The first
|
||||
// step will fail on CI because only the `auto` branch exists; we just fall back to `HEAD`
|
||||
// in that case.
|
||||
let closest_upstream =
|
||||
get_git_merge_base(Some(&config.src)).unwrap_or_else(|_| "HEAD".into());
|
||||
let mut rev_list = config.git();
|
||||
rev_list.args(&[
|
||||
PathBuf::from("rev-list"),
|
||||
format!("--author={}", config.stage0_metadata.config.git_merge_commit_email).into(),
|
||||
"-n1".into(),
|
||||
"--first-parent".into(),
|
||||
"HEAD".into(),
|
||||
closest_upstream.into(),
|
||||
"--".into(),
|
||||
config.src.join("src/llvm-project"),
|
||||
config.src.join("src/bootstrap/download-ci-llvm-stamp"),
|
||||
|
@ -62,6 +62,9 @@ impl Finder {
|
||||
}
|
||||
|
||||
pub fn check(build: &mut Build) {
|
||||
let skip_target_sanity =
|
||||
env::var_os("BOOTSTRAP_SKIP_TARGET_SANITY").is_some_and(|s| s == "1" || s == "true");
|
||||
|
||||
let path = env::var_os("PATH").unwrap_or_default();
|
||||
// On Windows, quotes are invalid characters for filename paths, and if
|
||||
// one is present as part of the PATH then that can lead to the system
|
||||
@ -166,7 +169,7 @@ than building it.
|
||||
// FIXME: it would be better to refactor this code to split necessary setup from pure sanity
|
||||
// checks, and have a regular flag for skipping the latter. Also see
|
||||
// <https://github.com/rust-lang/rust/pull/103569#discussion_r1008741742>.
|
||||
if env::var_os("BOOTSTRAP_SKIP_TARGET_SANITY").is_some() {
|
||||
if skip_target_sanity {
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -205,7 +208,15 @@ than building it.
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure musl-root is valid
|
||||
// Some environments don't want or need these tools, such as when testing Miri.
|
||||
// FIXME: it would be better to refactor this code to split necessary setup from pure sanity
|
||||
// checks, and have a regular flag for skipping the latter. Also see
|
||||
// <https://github.com/rust-lang/rust/pull/103569#discussion_r1008741742>.
|
||||
if skip_target_sanity {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Make sure musl-root is valid.
|
||||
if target.contains("musl") && !target.contains("unikraft") {
|
||||
// If this is a native target (host is also musl) and no musl-root is given,
|
||||
// fall back to the system toolchain in /usr before giving up
|
||||
@ -227,14 +238,6 @@ than building it.
|
||||
}
|
||||
}
|
||||
|
||||
// Some environments don't want or need these tools, such as when testing Miri.
|
||||
// FIXME: it would be better to refactor this code to split necessary setup from pure sanity
|
||||
// checks, and have a regular flag for skipping the latter. Also see
|
||||
// <https://github.com/rust-lang/rust/pull/103569#discussion_r1008741742>.
|
||||
if env::var_os("BOOTSTRAP_SKIP_TARGET_SANITY").is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if need_cmake && target.contains("msvc") {
|
||||
// There are three builds of cmake on windows: MSVC, MinGW, and
|
||||
// Cygwin. The Cygwin build does not have generators for Visual
|
||||
|
@ -63,8 +63,9 @@ Tools](#tier-1-with-host-tools).
|
||||
## Tier 2 with Host Tools
|
||||
|
||||
Tier 2 targets can be thought of as "guaranteed to build". The Rust project
|
||||
builds official binary releases for each tier 2 target, and automated builds
|
||||
ensure that each tier 2 target builds after each change. Automated tests are
|
||||
builds official binary releases of the standard library (or, in some cases,
|
||||
only the `core` library) for each tier 2 target, and automated builds
|
||||
ensure that each tier 2 target can be used as build target after each change. Automated tests are
|
||||
not always run so it's not guaranteed to produce a working build, but tier 2
|
||||
targets often work to quite a good degree and patches are always welcome!
|
||||
|
||||
@ -103,11 +104,12 @@ target | notes
|
||||
`x86_64-unknown-linux-musl` | 64-bit Linux with MUSL
|
||||
[`x86_64-unknown-netbsd`](platform-support/netbsd.md) | NetBSD/amd64
|
||||
|
||||
## Tier 2
|
||||
## Tier 2 without Host Tools
|
||||
|
||||
Tier 2 targets can be thought of as "guaranteed to build". The Rust project
|
||||
builds official binary releases for each tier 2 target, and automated builds
|
||||
ensure that each tier 2 target builds after each change. Automated tests are
|
||||
builds official binary releases of the standard library (or, in some cases,
|
||||
only the `core` library) for each tier 2 target, and automated builds
|
||||
ensure that each tier 2 target can be used as build target after each change. Automated tests are
|
||||
not always run so it's not guaranteed to produce a working build, but tier 2
|
||||
targets often work to quite a good degree and patches are always welcome! For
|
||||
the full requirements, see [Tier 2 target
|
||||
|
@ -1959,31 +1959,44 @@ fn can_elide_trait_object_lifetime_bound<'tcx>(
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum ContainerTy<'tcx> {
|
||||
Ref(ty::Region<'tcx>),
|
||||
Regular { ty: DefId, args: ty::Binder<'tcx, &'tcx [ty::GenericArg<'tcx>]>, arg: usize },
|
||||
Regular {
|
||||
ty: DefId,
|
||||
args: ty::Binder<'tcx, &'tcx [ty::GenericArg<'tcx>]>,
|
||||
has_self: bool,
|
||||
arg: usize,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'tcx> ContainerTy<'tcx> {
|
||||
fn object_lifetime_default(self, tcx: TyCtxt<'tcx>) -> ObjectLifetimeDefault<'tcx> {
|
||||
match self {
|
||||
Self::Ref(region) => ObjectLifetimeDefault::Arg(region),
|
||||
Self::Regular { ty: container, args, arg: index } => {
|
||||
Self::Regular { ty: container, args, has_self, arg: index } => {
|
||||
let (DefKind::Struct
|
||||
| DefKind::Union
|
||||
| DefKind::Enum
|
||||
| DefKind::TyAlias { .. }
|
||||
| DefKind::Trait
|
||||
| DefKind::AssocTy
|
||||
| DefKind::Variant) = tcx.def_kind(container)
|
||||
| DefKind::Trait) = tcx.def_kind(container)
|
||||
else {
|
||||
return ObjectLifetimeDefault::Empty;
|
||||
};
|
||||
|
||||
let generics = tcx.generics_of(container);
|
||||
let param = generics.params[index].def_id;
|
||||
let default = tcx.object_lifetime_default(param);
|
||||
debug_assert_eq!(generics.parent_count, 0);
|
||||
|
||||
// If the container is a trait object type, the arguments won't contain the self type but the
|
||||
// generics of the corresponding trait will. In such a case, offset the index by one.
|
||||
// For comparison, if the container is a trait inside a bound, the arguments do contain the
|
||||
// self type.
|
||||
let offset =
|
||||
if !has_self && generics.parent.is_none() && generics.has_self { 1 } else { 0 };
|
||||
let param = generics.params[index + offset].def_id;
|
||||
|
||||
let default = tcx.object_lifetime_default(param);
|
||||
match default {
|
||||
rbv::ObjectLifetimeDefault::Param(lifetime) => {
|
||||
// The index is relative to the parent generics but since we don't have any,
|
||||
// we don't need to translate it.
|
||||
let index = generics.param_def_id_to_index[&lifetime];
|
||||
let arg = args.skip_binder()[index as usize].expect_region();
|
||||
ObjectLifetimeDefault::Arg(arg)
|
||||
|
@ -22,6 +22,7 @@ use rustc_hir::lang_items::LangItem;
|
||||
use rustc_hir::{BodyId, Mutability};
|
||||
use rustc_hir_analysis::check::intrinsic::intrinsic_operation_unsafety;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_metadata::rendered_const;
|
||||
use rustc_middle::ty::fast_reject::SimplifiedType;
|
||||
use rustc_middle::ty::{self, TyCtxt, Visibility};
|
||||
use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, inner_docs, DocFragment};
|
||||
@ -35,7 +36,7 @@ use rustc_target::spec::abi::Abi;
|
||||
use crate::clean::cfg::Cfg;
|
||||
use crate::clean::external_path;
|
||||
use crate::clean::inline::{self, print_inlined_const};
|
||||
use crate::clean::utils::{is_literal_expr, print_const_expr, print_evaluated_const};
|
||||
use crate::clean::utils::{is_literal_expr, print_evaluated_const};
|
||||
use crate::core::DocContext;
|
||||
use crate::formats::cache::Cache;
|
||||
use crate::formats::item_type::ItemType;
|
||||
@ -2086,7 +2087,7 @@ impl Discriminant {
|
||||
/// Will be `None` in the case of cross-crate reexports, and may be
|
||||
/// simplified
|
||||
pub(crate) fn expr(&self, tcx: TyCtxt<'_>) -> Option<String> {
|
||||
self.expr.map(|body| print_const_expr(tcx, body))
|
||||
self.expr.map(|body| rendered_const(tcx, body))
|
||||
}
|
||||
/// Will always be a machine readable number, without underscores or suffixes.
|
||||
pub(crate) fn value(&self, tcx: TyCtxt<'_>) -> String {
|
||||
@ -2326,7 +2327,7 @@ impl ConstantKind {
|
||||
ConstantKind::TyConst { ref expr } => expr.to_string(),
|
||||
ConstantKind::Extern { def_id } => print_inlined_const(tcx, def_id),
|
||||
ConstantKind::Local { body, .. } | ConstantKind::Anonymous { body } => {
|
||||
print_const_expr(tcx, body)
|
||||
rendered_const(tcx, body)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ use rustc_ast::tokenstream::TokenTree;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId, LOCAL_CRATE};
|
||||
use rustc_metadata::rendered_const;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::interpret::ConstValue;
|
||||
use rustc_middle::ty::{self, GenericArgKind, GenericArgsRef, TyCtxt};
|
||||
@ -77,9 +78,10 @@ pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate {
|
||||
pub(crate) fn ty_args_to_args<'tcx>(
|
||||
cx: &mut DocContext<'tcx>,
|
||||
args: ty::Binder<'tcx, &'tcx [ty::GenericArg<'tcx>]>,
|
||||
mut skip_first: bool,
|
||||
has_self: bool,
|
||||
container: Option<DefId>,
|
||||
) -> Vec<GenericArg> {
|
||||
let mut skip_first = has_self;
|
||||
let mut ret_val =
|
||||
Vec::with_capacity(args.skip_binder().len().saturating_sub(if skip_first { 1 } else { 0 }));
|
||||
|
||||
@ -99,6 +101,7 @@ pub(crate) fn ty_args_to_args<'tcx>(
|
||||
container.map(|container| crate::clean::ContainerTy::Regular {
|
||||
ty: container,
|
||||
args,
|
||||
has_self,
|
||||
arg: index,
|
||||
}),
|
||||
))),
|
||||
@ -253,7 +256,7 @@ pub(crate) fn print_const(cx: &DocContext<'_>, n: ty::Const<'_>) -> String {
|
||||
match n.kind() {
|
||||
ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, args: _ }) => {
|
||||
let s = if let Some(def) = def.as_local() {
|
||||
print_const_expr(cx.tcx, cx.tcx.hir().body_owned_by(def))
|
||||
rendered_const(cx.tcx, cx.tcx.hir().body_owned_by(def))
|
||||
} else {
|
||||
inline::print_inlined_const(cx.tcx, def)
|
||||
};
|
||||
@ -365,100 +368,6 @@ pub(crate) fn is_literal_expr(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Build a textual representation of an unevaluated constant expression.
|
||||
///
|
||||
/// If the const expression is too complex, an underscore `_` is returned.
|
||||
/// For const arguments, it's `{ _ }` to be precise.
|
||||
/// This means that the output is not necessarily valid Rust code.
|
||||
///
|
||||
/// Currently, only
|
||||
///
|
||||
/// * literals (optionally with a leading `-`)
|
||||
/// * unit `()`
|
||||
/// * blocks (`{ … }`) around simple expressions and
|
||||
/// * paths without arguments
|
||||
///
|
||||
/// are considered simple enough. Simple blocks are included since they are
|
||||
/// necessary to disambiguate unit from the unit type.
|
||||
/// This list might get extended in the future.
|
||||
///
|
||||
/// Without this censoring, in a lot of cases the output would get too large
|
||||
/// and verbose. Consider `match` expressions, blocks and deeply nested ADTs.
|
||||
/// Further, private and `doc(hidden)` fields of structs would get leaked
|
||||
/// since HIR datatypes like the `body` parameter do not contain enough
|
||||
/// semantic information for this function to be able to hide them –
|
||||
/// at least not without significant performance overhead.
|
||||
///
|
||||
/// Whenever possible, prefer to evaluate the constant first and try to
|
||||
/// use a different method for pretty-printing. Ideally this function
|
||||
/// should only ever be used as a fallback.
|
||||
pub(crate) fn print_const_expr(tcx: TyCtxt<'_>, body: hir::BodyId) -> String {
|
||||
let hir = tcx.hir();
|
||||
let value = &hir.body(body).value;
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
enum Classification {
|
||||
Literal,
|
||||
Simple,
|
||||
Complex,
|
||||
}
|
||||
|
||||
use Classification::*;
|
||||
|
||||
fn classify(expr: &hir::Expr<'_>) -> Classification {
|
||||
match &expr.kind {
|
||||
hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
|
||||
if matches!(expr.kind, hir::ExprKind::Lit(_)) { Literal } else { Complex }
|
||||
}
|
||||
hir::ExprKind::Lit(_) => Literal,
|
||||
hir::ExprKind::Tup([]) => Simple,
|
||||
hir::ExprKind::Block(hir::Block { stmts: [], expr: Some(expr), .. }, _) => {
|
||||
if classify(expr) == Complex { Complex } else { Simple }
|
||||
}
|
||||
// Paths with a self-type or arguments are too “complex” following our measure since
|
||||
// they may leak private fields of structs (with feature `adt_const_params`).
|
||||
// Consider: `<Self as Trait<{ Struct { private: () } }>>::CONSTANT`.
|
||||
// Paths without arguments are definitely harmless though.
|
||||
hir::ExprKind::Path(hir::QPath::Resolved(_, hir::Path { segments, .. })) => {
|
||||
if segments.iter().all(|segment| segment.args.is_none()) { Simple } else { Complex }
|
||||
}
|
||||
// FIXME: Claiming that those kinds of QPaths are simple is probably not true if the Ty
|
||||
// contains const arguments. Is there a *concise* way to check for this?
|
||||
hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => Simple,
|
||||
// FIXME: Can they contain const arguments and thus leak private struct fields?
|
||||
hir::ExprKind::Path(hir::QPath::LangItem(..)) => Simple,
|
||||
_ => Complex,
|
||||
}
|
||||
}
|
||||
|
||||
let classification = classify(value);
|
||||
|
||||
if classification == Literal
|
||||
&& !value.span.from_expansion()
|
||||
&& let Ok(snippet) = tcx.sess.source_map().span_to_snippet(value.span) {
|
||||
// For literals, we avoid invoking the pretty-printer and use the source snippet instead to
|
||||
// preserve certain stylistic choices the user likely made for the sake legibility like
|
||||
//
|
||||
// * hexadecimal notation
|
||||
// * underscores
|
||||
// * character escapes
|
||||
//
|
||||
// FIXME: This passes through `-/*spacer*/0` verbatim.
|
||||
snippet
|
||||
} else if classification == Simple {
|
||||
// Otherwise we prefer pretty-printing to get rid of extraneous whitespace, comments and
|
||||
// other formatting artifacts.
|
||||
rustc_hir_pretty::id_to_string(&hir, body.hir_id)
|
||||
} else if tcx.def_kind(hir.body_owner_def_id(body).to_def_id()) == DefKind::AnonConst {
|
||||
// FIXME: Omit the curly braces if the enclosing expression is an array literal
|
||||
// with a repeated element (an `ExprKind::Repeat`) as in such case it
|
||||
// would not actually need any disambiguation.
|
||||
"{ _ }".to_owned()
|
||||
} else {
|
||||
"_".to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a type Path, resolve it to a Type using the TyCtxt
|
||||
pub(crate) fn resolve_type(cx: &mut DocContext<'_>, path: Path) -> Type {
|
||||
debug!("resolve_type({path:?})");
|
||||
|
@ -1,10 +1,10 @@
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::symbol::Symbol;
|
||||
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||
use serde::ser::{Serialize, SerializeSeq, SerializeStruct, Serializer};
|
||||
|
||||
use crate::clean;
|
||||
use crate::clean::types::{Function, Generics, ItemId, Type, WherePredicate};
|
||||
@ -78,9 +78,9 @@ pub(crate) fn build_index<'tcx>(
|
||||
map: &mut FxHashMap<F, usize>,
|
||||
itemid: F,
|
||||
lastpathid: &mut usize,
|
||||
crate_paths: &mut Vec<(ItemType, Symbol)>,
|
||||
crate_paths: &mut Vec<(ItemType, Vec<Symbol>)>,
|
||||
item_type: ItemType,
|
||||
path: Symbol,
|
||||
path: &[Symbol],
|
||||
) {
|
||||
match map.entry(itemid) {
|
||||
Entry::Occupied(entry) => ty.id = Some(RenderTypeId::Index(*entry.get())),
|
||||
@ -88,7 +88,7 @@ pub(crate) fn build_index<'tcx>(
|
||||
let pathid = *lastpathid;
|
||||
entry.insert(pathid);
|
||||
*lastpathid += 1;
|
||||
crate_paths.push((item_type, path));
|
||||
crate_paths.push((item_type, path.to_vec()));
|
||||
ty.id = Some(RenderTypeId::Index(pathid));
|
||||
}
|
||||
}
|
||||
@ -100,7 +100,7 @@ pub(crate) fn build_index<'tcx>(
|
||||
itemid_to_pathid: &mut FxHashMap<ItemId, usize>,
|
||||
primitives: &mut FxHashMap<Symbol, usize>,
|
||||
lastpathid: &mut usize,
|
||||
crate_paths: &mut Vec<(ItemType, Symbol)>,
|
||||
crate_paths: &mut Vec<(ItemType, Vec<Symbol>)>,
|
||||
) {
|
||||
if let Some(generics) = &mut ty.generics {
|
||||
for item in generics {
|
||||
@ -131,7 +131,7 @@ pub(crate) fn build_index<'tcx>(
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
item_type,
|
||||
*fqp.last().unwrap(),
|
||||
fqp,
|
||||
);
|
||||
} else {
|
||||
ty.id = None;
|
||||
@ -146,7 +146,7 @@ pub(crate) fn build_index<'tcx>(
|
||||
lastpathid,
|
||||
crate_paths,
|
||||
ItemType::Primitive,
|
||||
sym,
|
||||
&[sym],
|
||||
);
|
||||
}
|
||||
RenderTypeId::Index(_) => {}
|
||||
@ -191,7 +191,7 @@ pub(crate) fn build_index<'tcx>(
|
||||
lastpathid += 1;
|
||||
|
||||
if let Some(&(ref fqp, short)) = paths.get(&defid) {
|
||||
crate_paths.push((short, *fqp.last().unwrap()));
|
||||
crate_paths.push((short, fqp.clone()));
|
||||
Some(pathid)
|
||||
} else {
|
||||
None
|
||||
@ -213,118 +213,163 @@ pub(crate) fn build_index<'tcx>(
|
||||
struct CrateData<'a> {
|
||||
doc: String,
|
||||
items: Vec<&'a IndexItem>,
|
||||
paths: Vec<(ItemType, Symbol)>,
|
||||
paths: Vec<(ItemType, Vec<Symbol>)>,
|
||||
// The String is alias name and the vec is the list of the elements with this alias.
|
||||
//
|
||||
// To be noted: the `usize` elements are indexes to `items`.
|
||||
aliases: &'a BTreeMap<String, Vec<usize>>,
|
||||
}
|
||||
|
||||
struct Paths {
|
||||
ty: ItemType,
|
||||
name: Symbol,
|
||||
path: Option<usize>,
|
||||
}
|
||||
|
||||
impl Serialize for Paths {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut seq = serializer.serialize_seq(None)?;
|
||||
seq.serialize_element(&self.ty)?;
|
||||
seq.serialize_element(self.name.as_str())?;
|
||||
if let Some(ref path) = self.path {
|
||||
seq.serialize_element(path)?;
|
||||
}
|
||||
seq.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Serialize for CrateData<'a> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut extra_paths = FxHashMap::default();
|
||||
// We need to keep the order of insertion, hence why we use an `IndexMap`. Then we will
|
||||
// insert these "extra paths" (which are paths of items from external crates) into the
|
||||
// `full_paths` list at the end.
|
||||
let mut revert_extra_paths = FxIndexMap::default();
|
||||
let mut mod_paths = FxHashMap::default();
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
if item.path.is_empty() {
|
||||
continue;
|
||||
}
|
||||
mod_paths.insert(&item.path, index);
|
||||
}
|
||||
let mut paths = Vec::with_capacity(self.paths.len());
|
||||
for (ty, path) in &self.paths {
|
||||
if path.len() < 2 {
|
||||
paths.push(Paths { ty: *ty, name: path[0], path: None });
|
||||
continue;
|
||||
}
|
||||
let full_path = join_with_double_colon(&path[..path.len() - 1]);
|
||||
if let Some(index) = mod_paths.get(&full_path) {
|
||||
paths.push(Paths { ty: *ty, name: *path.last().unwrap(), path: Some(*index) });
|
||||
continue;
|
||||
}
|
||||
// It means it comes from an external crate so the item and its path will be
|
||||
// stored into another array.
|
||||
//
|
||||
// `index` is put after the last `mod_paths`
|
||||
let index = extra_paths.len() + self.items.len();
|
||||
if !revert_extra_paths.contains_key(&index) {
|
||||
revert_extra_paths.insert(index, full_path.clone());
|
||||
}
|
||||
match extra_paths.entry(full_path) {
|
||||
Entry::Occupied(entry) => {
|
||||
paths.push(Paths {
|
||||
ty: *ty,
|
||||
name: *path.last().unwrap(),
|
||||
path: Some(*entry.get()),
|
||||
});
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(index);
|
||||
paths.push(Paths {
|
||||
ty: *ty,
|
||||
name: *path.last().unwrap(),
|
||||
path: Some(index),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut names = Vec::with_capacity(self.items.len());
|
||||
let mut types = String::with_capacity(self.items.len());
|
||||
let mut full_paths = Vec::with_capacity(self.items.len());
|
||||
let mut descriptions = Vec::with_capacity(self.items.len());
|
||||
let mut parents = Vec::with_capacity(self.items.len());
|
||||
let mut functions = Vec::with_capacity(self.items.len());
|
||||
let mut deprecated = Vec::with_capacity(self.items.len());
|
||||
|
||||
for (index, item) in self.items.iter().enumerate() {
|
||||
let n = item.ty as u8;
|
||||
let c = char::try_from(n + b'A').expect("item types must fit in ASCII");
|
||||
assert!(c <= 'z', "item types must fit within ASCII printables");
|
||||
types.push(c);
|
||||
|
||||
assert_eq!(
|
||||
item.parent.is_some(),
|
||||
item.parent_idx.is_some(),
|
||||
"`{}` is missing idx",
|
||||
item.name
|
||||
);
|
||||
// 0 is a sentinel, everything else is one-indexed
|
||||
parents.push(item.parent_idx.map(|x| x + 1).unwrap_or(0));
|
||||
|
||||
names.push(item.name.as_str());
|
||||
descriptions.push(&item.desc);
|
||||
|
||||
if !item.path.is_empty() {
|
||||
full_paths.push((index, &item.path));
|
||||
}
|
||||
|
||||
// Fake option to get `0` out as a sentinel instead of `null`.
|
||||
// We want to use `0` because it's three less bytes.
|
||||
enum FunctionOption<'a> {
|
||||
Function(&'a IndexItemFunctionType),
|
||||
None,
|
||||
}
|
||||
impl<'a> Serialize for FunctionOption<'a> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
FunctionOption::None => 0.serialize(serializer),
|
||||
FunctionOption::Function(ty) => ty.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
functions.push(match &item.search_type {
|
||||
Some(ty) => FunctionOption::Function(ty),
|
||||
None => FunctionOption::None,
|
||||
});
|
||||
|
||||
if item.deprecation.is_some() {
|
||||
deprecated.push(index);
|
||||
}
|
||||
}
|
||||
|
||||
for (index, path) in &revert_extra_paths {
|
||||
full_paths.push((*index, path));
|
||||
}
|
||||
|
||||
let has_aliases = !self.aliases.is_empty();
|
||||
let mut crate_data =
|
||||
serializer.serialize_struct("CrateData", if has_aliases { 9 } else { 8 })?;
|
||||
crate_data.serialize_field("doc", &self.doc)?;
|
||||
crate_data.serialize_field(
|
||||
"t",
|
||||
&self
|
||||
.items
|
||||
.iter()
|
||||
.map(|item| {
|
||||
let n = item.ty as u8;
|
||||
let c = char::try_from(n + b'A').expect("item types must fit in ASCII");
|
||||
assert!(c <= 'z', "item types must fit within ASCII printables");
|
||||
c
|
||||
})
|
||||
.collect::<String>(),
|
||||
)?;
|
||||
crate_data.serialize_field(
|
||||
"n",
|
||||
&self.items.iter().map(|item| item.name.as_str()).collect::<Vec<_>>(),
|
||||
)?;
|
||||
crate_data.serialize_field(
|
||||
"q",
|
||||
&self
|
||||
.items
|
||||
.iter()
|
||||
.enumerate()
|
||||
// Serialize as an array of item indices and full paths
|
||||
.filter_map(
|
||||
|(index, item)| {
|
||||
if item.path.is_empty() { None } else { Some((index, &item.path)) }
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
crate_data.serialize_field(
|
||||
"d",
|
||||
&self.items.iter().map(|item| &item.desc).collect::<Vec<_>>(),
|
||||
)?;
|
||||
crate_data.serialize_field(
|
||||
"i",
|
||||
&self
|
||||
.items
|
||||
.iter()
|
||||
.map(|item| {
|
||||
assert_eq!(
|
||||
item.parent.is_some(),
|
||||
item.parent_idx.is_some(),
|
||||
"`{}` is missing idx",
|
||||
item.name
|
||||
);
|
||||
// 0 is a sentinel, everything else is one-indexed
|
||||
item.parent_idx.map(|x| x + 1).unwrap_or(0)
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
crate_data.serialize_field(
|
||||
"f",
|
||||
&self
|
||||
.items
|
||||
.iter()
|
||||
.map(|item| {
|
||||
// Fake option to get `0` out as a sentinel instead of `null`.
|
||||
// We want to use `0` because it's three less bytes.
|
||||
enum FunctionOption<'a> {
|
||||
Function(&'a IndexItemFunctionType),
|
||||
None,
|
||||
}
|
||||
impl<'a> Serialize for FunctionOption<'a> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
FunctionOption::None => 0.serialize(serializer),
|
||||
FunctionOption::Function(ty) => ty.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
match &item.search_type {
|
||||
Some(ty) => FunctionOption::Function(ty),
|
||||
None => FunctionOption::None,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
crate_data.serialize_field(
|
||||
"c",
|
||||
&self
|
||||
.items
|
||||
.iter()
|
||||
.enumerate()
|
||||
// Serialize as an array of deprecated item indices
|
||||
.filter_map(|(index, item)| item.deprecation.map(|_| index))
|
||||
.collect::<Vec<_>>(),
|
||||
)?;
|
||||
crate_data.serialize_field(
|
||||
"p",
|
||||
&self.paths.iter().map(|(it, s)| (it, s.as_str())).collect::<Vec<_>>(),
|
||||
)?;
|
||||
crate_data.serialize_field("t", &types)?;
|
||||
crate_data.serialize_field("n", &names)?;
|
||||
// Serialize as an array of item indices and full paths
|
||||
crate_data.serialize_field("q", &full_paths)?;
|
||||
crate_data.serialize_field("d", &descriptions)?;
|
||||
crate_data.serialize_field("i", &parents)?;
|
||||
crate_data.serialize_field("f", &functions)?;
|
||||
crate_data.serialize_field("c", &deprecated)?;
|
||||
crate_data.serialize_field("p", &paths)?;
|
||||
if has_aliases {
|
||||
crate_data.serialize_field("a", &self.aliases)?;
|
||||
}
|
||||
|
@ -263,7 +263,6 @@ function initSearch(rawSearchIndex) {
|
||||
* @returns {integer}
|
||||
*/
|
||||
function buildTypeMapIndex(name) {
|
||||
|
||||
if (name === "" || name === null) {
|
||||
return -1;
|
||||
}
|
||||
@ -1380,7 +1379,7 @@ function initSearch(rawSearchIndex) {
|
||||
* @type Map<integer, QueryElement[]>
|
||||
*/
|
||||
const queryElemSet = new Map();
|
||||
const addQueryElemToQueryElemSet = function addQueryElemToQueryElemSet(queryElem) {
|
||||
const addQueryElemToQueryElemSet = queryElem => {
|
||||
let currentQueryElemList;
|
||||
if (queryElemSet.has(queryElem.id)) {
|
||||
currentQueryElemList = queryElemSet.get(queryElem.id);
|
||||
@ -1397,7 +1396,7 @@ function initSearch(rawSearchIndex) {
|
||||
* @type Map<integer, FunctionType[]>
|
||||
*/
|
||||
const fnTypeSet = new Map();
|
||||
const addFnTypeToFnTypeSet = function addFnTypeToFnTypeSet(fnType) {
|
||||
const addFnTypeToFnTypeSet = fnType => {
|
||||
// Pure generic, or an item that's not matched by any query elems.
|
||||
// Try [unboxing] it.
|
||||
//
|
||||
@ -1463,6 +1462,32 @@ function initSearch(rawSearchIndex) {
|
||||
if (!typePassesFilter(queryElem.typeFilter, fnType.ty)) {
|
||||
continue;
|
||||
}
|
||||
const queryElemPathLength = queryElem.pathWithoutLast.length;
|
||||
// If the query element is a path (it contains `::`), we need to check if this
|
||||
// path is compatible with the target type.
|
||||
if (queryElemPathLength > 0) {
|
||||
const fnTypePath = fnType.path !== undefined && fnType.path !== null ?
|
||||
fnType.path.split("::") : [];
|
||||
// If the path provided in the query element is longer than this type,
|
||||
// no need to check it since it won't match in any case.
|
||||
if (queryElemPathLength > fnTypePath.length) {
|
||||
continue;
|
||||
}
|
||||
let i = 0;
|
||||
for (const path of fnTypePath) {
|
||||
if (path === queryElem.pathWithoutLast[i]) {
|
||||
i += 1;
|
||||
if (i >= queryElemPathLength) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (i < queryElemPathLength) {
|
||||
// If we didn't find all parts of the path of the query element inside
|
||||
// the fn type, then it's not the right one.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (queryElem.generics.length === 0 || checkGenerics(fnType, queryElem)) {
|
||||
currentFnTypeList.splice(i, 1);
|
||||
const result = doHandleQueryElemList(currentFnTypeList, queryElemList);
|
||||
@ -1863,14 +1888,14 @@ function initSearch(rawSearchIndex) {
|
||||
* @param {QueryElement} elem
|
||||
*/
|
||||
function convertNameToId(elem) {
|
||||
if (typeNameIdMap.has(elem.name)) {
|
||||
elem.id = typeNameIdMap.get(elem.name);
|
||||
if (typeNameIdMap.has(elem.pathLast)) {
|
||||
elem.id = typeNameIdMap.get(elem.pathLast);
|
||||
} else if (!parsedQuery.literalSearch) {
|
||||
let match = -1;
|
||||
let matchDist = maxEditDistance + 1;
|
||||
let matchName = "";
|
||||
for (const [name, id] of typeNameIdMap) {
|
||||
const dist = editDistance(name, elem.name, maxEditDistance);
|
||||
const dist = editDistance(name, elem.pathLast, maxEditDistance);
|
||||
if (dist <= matchDist && dist <= maxEditDistance) {
|
||||
if (dist === matchDist && matchName > name) {
|
||||
continue;
|
||||
@ -2385,12 +2410,20 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
lowercasePaths
|
||||
);
|
||||
}
|
||||
// `0` is used as a sentinel because it's fewer bytes than `null`
|
||||
if (pathIndex === 0) {
|
||||
return {
|
||||
id: -1,
|
||||
ty: null,
|
||||
path: null,
|
||||
generics: generics,
|
||||
};
|
||||
}
|
||||
const item = lowercasePaths[pathIndex - 1];
|
||||
return {
|
||||
// `0` is used as a sentinel because it's fewer bytes than `null`
|
||||
id: pathIndex === 0
|
||||
? -1
|
||||
: buildTypeMapIndex(lowercasePaths[pathIndex - 1].name),
|
||||
ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
|
||||
id: buildTypeMapIndex(item.name),
|
||||
ty: item.ty,
|
||||
path: item.path,
|
||||
generics: generics,
|
||||
};
|
||||
});
|
||||
@ -2422,13 +2455,22 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
let inputs, output;
|
||||
if (typeof functionSearchType[INPUTS_DATA] === "number") {
|
||||
const pathIndex = functionSearchType[INPUTS_DATA];
|
||||
inputs = [{
|
||||
id: pathIndex === 0
|
||||
? -1
|
||||
: buildTypeMapIndex(lowercasePaths[pathIndex - 1].name),
|
||||
ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
|
||||
generics: [],
|
||||
}];
|
||||
if (pathIndex === 0) {
|
||||
inputs = [{
|
||||
id: -1,
|
||||
ty: null,
|
||||
path: null,
|
||||
generics: [],
|
||||
}];
|
||||
} else {
|
||||
const item = lowercasePaths[pathIndex - 1];
|
||||
inputs = [{
|
||||
id: buildTypeMapIndex(item.name),
|
||||
ty: item.ty,
|
||||
path: item.path,
|
||||
generics: [],
|
||||
}];
|
||||
}
|
||||
} else {
|
||||
inputs = buildItemSearchTypeAll(
|
||||
functionSearchType[INPUTS_DATA],
|
||||
@ -2438,13 +2480,22 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
if (functionSearchType.length > 1) {
|
||||
if (typeof functionSearchType[OUTPUT_DATA] === "number") {
|
||||
const pathIndex = functionSearchType[OUTPUT_DATA];
|
||||
output = [{
|
||||
id: pathIndex === 0
|
||||
? -1
|
||||
: buildTypeMapIndex(lowercasePaths[pathIndex - 1].name),
|
||||
ty: pathIndex === 0 ? null : lowercasePaths[pathIndex - 1].ty,
|
||||
generics: [],
|
||||
}];
|
||||
if (pathIndex === 0) {
|
||||
output = [{
|
||||
id: -1,
|
||||
ty: null,
|
||||
path: null,
|
||||
generics: [],
|
||||
}];
|
||||
} else {
|
||||
const item = lowercasePaths[pathIndex - 1];
|
||||
output = [{
|
||||
id: buildTypeMapIndex(item.name),
|
||||
ty: item.ty,
|
||||
path: item.path,
|
||||
generics: [],
|
||||
}];
|
||||
}
|
||||
} else {
|
||||
output = buildItemSearchTypeAll(
|
||||
functionSearchType[OUTPUT_DATA],
|
||||
@ -2577,9 +2628,19 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
// convert `rawPaths` entries into object form
|
||||
// generate normalizedPaths for function search mode
|
||||
let len = paths.length;
|
||||
let lastPath = itemPaths.get(0);
|
||||
for (let i = 0; i < len; ++i) {
|
||||
lowercasePaths.push({ty: paths[i][0], name: paths[i][1].toLowerCase()});
|
||||
paths[i] = {ty: paths[i][0], name: paths[i][1]};
|
||||
const elem = paths[i];
|
||||
const ty = elem[0];
|
||||
const name = elem[1];
|
||||
let path = null;
|
||||
if (elem.length > 2) {
|
||||
path = itemPaths.has(elem[2]) ? itemPaths.get(elem[2]) : lastPath;
|
||||
lastPath = path;
|
||||
}
|
||||
|
||||
lowercasePaths.push({ty: ty, name: name.toLowerCase(), path: path});
|
||||
paths[i] = {ty: ty, name: name, path: path};
|
||||
}
|
||||
|
||||
// convert `item*` into an object form, and construct word indices.
|
||||
@ -2589,8 +2650,8 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
// operation that is cached for the life of the page state so that
|
||||
// all other search operations have access to this cached data for
|
||||
// faster analysis operations
|
||||
lastPath = "";
|
||||
len = itemTypes.length;
|
||||
let lastPath = "";
|
||||
for (let i = 0; i < len; ++i) {
|
||||
let word = "";
|
||||
// This object should have exactly the same set of fields as the "crateRow"
|
||||
@ -2599,11 +2660,12 @@ ${item.displayPath}<span class="${type}">${name}</span>\
|
||||
word = itemNames[i].toLowerCase();
|
||||
}
|
||||
searchWords.push(word);
|
||||
const path = itemPaths.has(i) ? itemPaths.get(i) : lastPath;
|
||||
const row = {
|
||||
crate: crate,
|
||||
ty: itemTypes.charCodeAt(i) - charA,
|
||||
name: itemNames[i],
|
||||
path: itemPaths.has(i) ? itemPaths.get(i) : lastPath,
|
||||
path: path,
|
||||
desc: itemDescs[i],
|
||||
parent: itemParentIdxs[i] > 0 ? paths[itemParentIdxs[i] - 1] : undefined,
|
||||
type: buildFunctionSearchType(
|
||||
|
@ -8,6 +8,7 @@ use std::fmt;
|
||||
|
||||
use rustc_ast::ast;
|
||||
use rustc_hir::{def::CtorKind, def::DefKind, def_id::DefId};
|
||||
use rustc_metadata::rendered_const;
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::{Pos, Symbol};
|
||||
@ -15,7 +16,6 @@ use rustc_target::spec::abi::Abi as RustcAbi;
|
||||
|
||||
use rustdoc_json_types::*;
|
||||
|
||||
use crate::clean::utils::print_const_expr;
|
||||
use crate::clean::{self, ItemId};
|
||||
use crate::formats::item_type::ItemType;
|
||||
use crate::json::JsonRenderer;
|
||||
@ -805,7 +805,7 @@ impl FromWithTcx<clean::Static> for Static {
|
||||
Static {
|
||||
type_: stat.type_.into_tcx(tcx),
|
||||
mutable: stat.mutability == ast::Mutability::Mut,
|
||||
expr: stat.expr.map(|e| print_const_expr(tcx, e)).unwrap_or_default(),
|
||||
expr: stat.expr.map(|e| rendered_const(tcx, e)).unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -78,13 +78,22 @@ pub fn rev_exists(rev: &str, git_dir: Option<&Path>) -> Result<bool, String> {
|
||||
/// We will then fall back to origin/master in the hope that at least this exists.
|
||||
pub fn updated_master_branch(git_dir: Option<&Path>) -> Result<String, String> {
|
||||
let upstream_remote = get_rust_lang_rust_remote(git_dir)?;
|
||||
let upstream_master = format!("{upstream_remote}/master");
|
||||
if rev_exists(&upstream_master, git_dir)? {
|
||||
return Ok(upstream_master);
|
||||
for upstream_master in [format!("{upstream_remote}/master"), format!("origin/master")] {
|
||||
if rev_exists(&upstream_master, git_dir)? {
|
||||
return Ok(upstream_master);
|
||||
}
|
||||
}
|
||||
|
||||
// We could implement smarter logic here in the future.
|
||||
Ok("origin/master".into())
|
||||
Err(format!("Cannot find any suitable upstream master branch"))
|
||||
}
|
||||
|
||||
pub fn get_git_merge_base(git_dir: Option<&Path>) -> Result<String, String> {
|
||||
let updated_master = updated_master_branch(git_dir)?;
|
||||
let mut git = Command::new("git");
|
||||
if let Some(git_dir) = git_dir {
|
||||
git.current_dir(git_dir);
|
||||
}
|
||||
Ok(output_result(git.arg("merge-base").arg(&updated_master).arg("HEAD"))?.trim().to_owned())
|
||||
}
|
||||
|
||||
/// Returns the files that have been modified in the current branch compared to the master branch.
|
||||
@ -94,20 +103,13 @@ pub fn get_git_modified_files(
|
||||
git_dir: Option<&Path>,
|
||||
extensions: &Vec<&str>,
|
||||
) -> Result<Option<Vec<String>>, String> {
|
||||
let Ok(updated_master) = updated_master_branch(git_dir) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let merge_base = get_git_merge_base(git_dir)?;
|
||||
|
||||
let git = || {
|
||||
let mut git = Command::new("git");
|
||||
if let Some(git_dir) = git_dir {
|
||||
git.current_dir(git_dir);
|
||||
}
|
||||
git
|
||||
};
|
||||
|
||||
let merge_base = output_result(git().arg("merge-base").arg(&updated_master).arg("HEAD"))?;
|
||||
let files = output_result(git().arg("diff-index").arg("--name-only").arg(merge_base.trim()))?
|
||||
let mut git = Command::new("git");
|
||||
if let Some(git_dir) = git_dir {
|
||||
git.current_dir(git_dir);
|
||||
}
|
||||
let files = output_result(git.args(["diff-index", "--name-only", merge_base.trim()]))?
|
||||
.lines()
|
||||
.map(|s| s.trim().to_owned())
|
||||
.filter(|f| {
|
||||
|
@ -1,24 +1,20 @@
|
||||
use super::ARITHMETIC_SIDE_EFFECTS;
|
||||
use clippy_utils::consts::{constant, constant_simple, Constant};
|
||||
use clippy_utils::diagnostics::span_lint;
|
||||
use clippy_utils::ty::type_diagnostic_name;
|
||||
use clippy_utils::{expr_or_init, is_from_proc_macro, is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_session::impl_lint_pass;
|
||||
use rustc_span::source_map::{Span, Spanned};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Symbol;
|
||||
use {rustc_ast as ast, rustc_hir as hir};
|
||||
|
||||
const HARD_CODED_ALLOWED_BINARY: &[[&str; 2]] = &[
|
||||
["f32", "f32"],
|
||||
["f64", "f64"],
|
||||
["std::num::Saturating", "*"],
|
||||
["std::num::Wrapping", "*"],
|
||||
["std::string::String", "str"],
|
||||
];
|
||||
const HARD_CODED_ALLOWED_BINARY: &[[&str; 2]] = &[["f32", "f32"], ["f64", "f64"], ["std::string::String", "str"]];
|
||||
const HARD_CODED_ALLOWED_UNARY: &[&str] = &["f32", "f64", "std::num::Saturating", "std::num::Wrapping"];
|
||||
const INTEGER_METHODS: &[&str] = &["saturating_div", "wrapping_div", "wrapping_rem", "wrapping_rem_euclid"];
|
||||
const INTEGER_METHODS: &[Symbol] = &[sym::saturating_div, sym::wrapping_div, sym::wrapping_rem, sym::wrapping_rem_euclid];
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ArithmeticSideEffects {
|
||||
@ -53,7 +49,7 @@ impl ArithmeticSideEffects {
|
||||
allowed_unary,
|
||||
const_span: None,
|
||||
expr_span: None,
|
||||
integer_methods: INTEGER_METHODS.iter().map(|el| Symbol::intern(el)).collect(),
|
||||
integer_methods: INTEGER_METHODS.iter().copied().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,6 +82,38 @@ impl ArithmeticSideEffects {
|
||||
self.allowed_unary.contains(ty_string_elem)
|
||||
}
|
||||
|
||||
/// Verifies built-in types that have specific allowed operations
|
||||
fn has_specific_allowed_type_and_operation(
|
||||
cx: &LateContext<'_>,
|
||||
lhs_ty: Ty<'_>,
|
||||
op: &Spanned<hir::BinOpKind>,
|
||||
rhs_ty: Ty<'_>,
|
||||
) -> bool {
|
||||
let is_div_or_rem = matches!(op.node, hir::BinOpKind::Div | hir::BinOpKind::Rem);
|
||||
let is_non_zero_u = |symbol: Option<Symbol>| {
|
||||
matches!(
|
||||
symbol,
|
||||
Some(sym::NonZeroU128 | sym::NonZeroU16 | sym::NonZeroU32 | sym::NonZeroU64 | sym::NonZeroU8 | sym::NonZeroUsize)
|
||||
)
|
||||
};
|
||||
let is_sat_or_wrap = |ty: Ty<'_>| {
|
||||
let is_sat = type_diagnostic_name(cx, ty) == Some(sym::Saturating);
|
||||
let is_wrap = type_diagnostic_name(cx, ty) == Some(sym::Wrapping);
|
||||
is_sat || is_wrap
|
||||
};
|
||||
|
||||
// If the RHS is NonZeroU*, then division or module by zero will never occur
|
||||
if is_non_zero_u(type_diagnostic_name(cx, rhs_ty)) && is_div_or_rem {
|
||||
return true;
|
||||
}
|
||||
// `Saturation` and `Wrapping` can overflow if the RHS is zero in a division or module
|
||||
if is_sat_or_wrap(lhs_ty) {
|
||||
return !is_div_or_rem;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
// For example, 8i32 or &i64::MAX.
|
||||
fn is_integral(ty: Ty<'_>) -> bool {
|
||||
ty.peel_refs().is_integral()
|
||||
@ -147,6 +175,9 @@ impl ArithmeticSideEffects {
|
||||
if self.has_allowed_binary(lhs_ty, rhs_ty) {
|
||||
return;
|
||||
}
|
||||
if Self::has_specific_allowed_type_and_operation(cx, lhs_ty, op, rhs_ty) {
|
||||
return;
|
||||
}
|
||||
let has_valid_op = if Self::is_integral(lhs_ty) && Self::is_integral(rhs_ty) {
|
||||
if let hir::BinOpKind::Shl | hir::BinOpKind::Shr = op.node {
|
||||
// At least for integers, shifts are already handled by the CTFE
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
extern crate proc_macro_derive;
|
||||
|
||||
use core::num::{Saturating, Wrapping};
|
||||
use core::num::{NonZeroUsize, Saturating, Wrapping};
|
||||
|
||||
const ONE: i32 = 1;
|
||||
const ZERO: i32 = 0;
|
||||
@ -493,4 +493,32 @@ pub fn issue_11262() {
|
||||
let _ = 2 / zero;
|
||||
}
|
||||
|
||||
pub fn issue_11392() {
|
||||
fn example_div(unsigned: usize, nonzero_unsigned: NonZeroUsize) -> usize {
|
||||
unsigned / nonzero_unsigned
|
||||
}
|
||||
|
||||
fn example_rem(unsigned: usize, nonzero_unsigned: NonZeroUsize) -> usize {
|
||||
unsigned % nonzero_unsigned
|
||||
}
|
||||
|
||||
let (unsigned, nonzero_unsigned) = (0, NonZeroUsize::new(1).unwrap());
|
||||
example_div(unsigned, nonzero_unsigned);
|
||||
example_rem(unsigned, nonzero_unsigned);
|
||||
}
|
||||
|
||||
pub fn issue_11393() {
|
||||
fn example_div(x: Wrapping<i32>, maybe_zero: Wrapping<i32>) -> Wrapping<i32> {
|
||||
x / maybe_zero
|
||||
}
|
||||
|
||||
fn example_rem(x: Wrapping<i32>, maybe_zero: Wrapping<i32>) -> Wrapping<i32> {
|
||||
x % maybe_zero
|
||||
}
|
||||
|
||||
let [x, maybe_zero] = [1, 0].map(Wrapping);
|
||||
example_div(x, maybe_zero);
|
||||
example_rem(x, maybe_zero);
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -702,5 +702,17 @@ error: arithmetic operation that can potentially result in unexpected side-effec
|
||||
LL | 10 / a
|
||||
| ^^^^^^
|
||||
|
||||
error: aborting due to 117 previous errors
|
||||
error: arithmetic operation that can potentially result in unexpected side-effects
|
||||
--> $DIR/arithmetic_side_effects.rs:512:9
|
||||
|
|
||||
LL | x / maybe_zero
|
||||
| ^^^^^^^^^^^^^^
|
||||
|
||||
error: arithmetic operation that can potentially result in unexpected side-effects
|
||||
--> $DIR/arithmetic_side_effects.rs:516:9
|
||||
|
|
||||
LL | x % maybe_zero
|
||||
| ^^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to 119 previous errors
|
||||
|
||||
|
@ -8,7 +8,6 @@
|
||||
#![feature(yeet_expr)]
|
||||
#![feature(nonzero_ops)]
|
||||
#![feature(round_ties_even)]
|
||||
#![feature(os_str_bytes)]
|
||||
#![feature(lint_reasons)]
|
||||
#![feature(trait_upcasting)]
|
||||
// Configure clippy and other lints
|
||||
|
@ -24,7 +24,7 @@ pub fn bytes_to_os_str<'tcx>(bytes: &[u8]) -> InterpResult<'tcx, &OsStr> {
|
||||
}
|
||||
#[cfg(not(unix))]
|
||||
pub fn bytes_to_os_str<'tcx>(bytes: &[u8]) -> InterpResult<'tcx, &OsStr> {
|
||||
// We cannot use `from_os_str_bytes_unchecked` here since we can't trust `bytes`.
|
||||
// We cannot use `from_encoded_bytes_unchecked` here since we can't trust `bytes`.
|
||||
let s = std::str::from_utf8(bytes)
|
||||
.map_err(|_| err_unsup_format!("{:?} is not a valid utf-8 string", bytes))?;
|
||||
Ok(OsStr::new(s))
|
||||
@ -83,7 +83,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
|
||||
ptr: Pointer<Option<Provenance>>,
|
||||
size: u64,
|
||||
) -> InterpResult<'tcx, (bool, u64)> {
|
||||
let bytes = os_str.as_os_str_bytes();
|
||||
let bytes = os_str.as_encoded_bytes();
|
||||
self.eval_context_mut().write_c_str(bytes, ptr, size)
|
||||
}
|
||||
|
||||
|
@ -1344,7 +1344,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
|
||||
|
||||
let mut name = dir_entry.file_name(); // not a Path as there are no separators!
|
||||
name.push("\0"); // Add a NUL terminator
|
||||
let name_bytes = name.as_os_str_bytes();
|
||||
let name_bytes = name.as_encoded_bytes();
|
||||
let name_len = u64::try_from(name_bytes.len()).unwrap();
|
||||
|
||||
let dirent64_layout = this.libc_ty_layout("dirent64");
|
||||
@ -1698,7 +1698,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
|
||||
Cow::Borrowed(resolved.as_ref()),
|
||||
crate::shims::os_str::PathConversion::HostToTarget,
|
||||
);
|
||||
let mut path_bytes = resolved.as_os_str_bytes();
|
||||
let mut path_bytes = resolved.as_encoded_bytes();
|
||||
let bufsize: usize = bufsize.try_into().unwrap();
|
||||
if path_bytes.len() > bufsize {
|
||||
path_bytes = &path_bytes[..bufsize]
|
||||
|
@ -6,7 +6,7 @@ use super::miri_extern;
|
||||
pub fn host_to_target_path(path: OsString) -> PathBuf {
|
||||
use std::ffi::{CStr, CString};
|
||||
|
||||
// Once into_os_str_bytes is stable we can use it here.
|
||||
// Once into_encoded_bytes is stable we can use it here.
|
||||
// (Unstable features would need feature flags in each test...)
|
||||
let path = CString::new(path.into_string().unwrap()).unwrap();
|
||||
let mut out = Vec::with_capacity(1024);
|
||||
|
15
tests/codegen/lib-optimizations/iter-sum.rs
Normal file
15
tests/codegen/lib-optimizations/iter-sum.rs
Normal file
@ -0,0 +1,15 @@
|
||||
// ignore-debug: the debug assertions get in the way
|
||||
// compile-flags: -O
|
||||
// only-x86_64 (vectorization varies between architectures)
|
||||
#![crate_type = "lib"]
|
||||
|
||||
|
||||
// Ensure that slice + take + sum gets vectorized.
|
||||
// Currently this relies on the slice::Iter::try_fold implementation
|
||||
// CHECK-LABEL: @slice_take_sum
|
||||
#[no_mangle]
|
||||
pub fn slice_take_sum(s: &[u64], l: usize) -> u64 {
|
||||
// CHECK: vector.body:
|
||||
// CHECK: ret
|
||||
s.iter().take(l).sum()
|
||||
}
|
@ -22,9 +22,9 @@ pub fn chunks4(x: &[u8]) -> &[[u8; 4]] {
|
||||
// CHECK-LABEL: @chunks4_with_remainder
|
||||
#[no_mangle]
|
||||
pub fn chunks4_with_remainder(x: &[u8]) -> (&[[u8; 4]], &[u8]) {
|
||||
// CHECK: and i64 %x.1, -4
|
||||
// CHECK: and i64 %x.1, 3
|
||||
// CHECK: lshr exact
|
||||
// CHECK-DAG: and i64 %x.1, -4
|
||||
// CHECK-DAG: and i64 %x.1, 3
|
||||
// CHECK-DAG: lshr
|
||||
// CHECK-NOT: mul
|
||||
// CHECK-NOT: udiv
|
||||
// CHECK-NOT: urem
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user