mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 23:04:33 +00:00
Auto merge of #108116 - Dylan-DPC:rollup-h3n2vxl, r=Dylan-DPC
Rollup of 6 pull requests Successful merges: - #106372 (Use id-based thread parking on SOLID) - #108050 (Fix index out of bounds ICE in `point_at_expr_source_of_inferred_type`) - #108084 (Constify `RangeBounds`, `RangeX::contains` and `RangeX::is_empty` (where applicable).) - #108101 (don't clone types that are copy) - #108102 (simplify some refs) - #108103 (be nice and don't slice) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
4b34c7b766
@ -1970,7 +1970,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
) = &qself.kind {
|
||||
// If the path segment already has type params, we want to overwrite
|
||||
// them.
|
||||
match &path.segments[..] {
|
||||
match &path.segments {
|
||||
// `segment` is the previous to last element on the path,
|
||||
// which would normally be the `enum` itself, while the last
|
||||
// `_` `PathSegment` corresponds to the variant.
|
||||
@ -2670,7 +2670,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
// `Self` in trait or type alias.
|
||||
assert_eq!(opt_self_ty, None);
|
||||
self.prohibit_generics(path.segments.iter(), |err| {
|
||||
if let [hir::PathSegment { args: Some(args), ident, .. }] = &path.segments[..] {
|
||||
if let [hir::PathSegment { args: Some(args), ident, .. }] = &path.segments {
|
||||
err.span_suggestion_verbose(
|
||||
ident.span.shrink_to_hi().to(args.span_ext),
|
||||
"the `Self` type doesn't accept type parameters",
|
||||
|
@ -298,6 +298,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// call's arguments and we can provide a more explicit span.
|
||||
let sig = self.tcx.fn_sig(def_id).subst_identity();
|
||||
let def_self_ty = sig.input(0).skip_binder();
|
||||
let param_tys = sig.inputs().skip_binder().iter().skip(1);
|
||||
// If there's an arity mismatch, pointing out the call as the source of an inference
|
||||
// can be misleading, so we skip it.
|
||||
if param_tys.len() != args.len() {
|
||||
continue;
|
||||
}
|
||||
let rcvr_ty = self.node_ty(rcvr.hir_id);
|
||||
// Get the evaluated type *after* calling the method call, so that the influence
|
||||
// of the arguments can be reflected in the receiver type. The receiver
|
||||
@ -323,13 +329,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let mut param_found = FxHashMap::default();
|
||||
if self.can_eq(self.param_env, ty, found) {
|
||||
// We only point at the first place where the found type was inferred.
|
||||
for (i, param_ty) in sig.inputs().skip_binder().iter().skip(1).enumerate() {
|
||||
for (param_ty, arg) in param_tys.zip(args) {
|
||||
if def_self_ty.contains(*param_ty) && let ty::Param(_) = param_ty.kind() {
|
||||
// We found an argument that references a type parameter in `Self`,
|
||||
// so we assume that this is the argument that caused the found
|
||||
// type, which we know already because of `can_eq` above was first
|
||||
// inferred in this method call.
|
||||
let arg = &args[i];
|
||||
let arg_ty = self.node_ty(arg.hir_id);
|
||||
if !arg.span.overlaps(mismatch_span) {
|
||||
err.span_label(
|
||||
|
@ -517,8 +517,7 @@ fn method_autoderef_steps<'tcx>(
|
||||
.by_ref()
|
||||
.map(|(ty, d)| {
|
||||
let step = CandidateStep {
|
||||
self_ty: infcx
|
||||
.make_query_response_ignoring_pending_obligations(inference_vars.clone(), ty),
|
||||
self_ty: infcx.make_query_response_ignoring_pending_obligations(inference_vars, ty),
|
||||
autoderefs: d,
|
||||
from_unsafe_deref: reached_raw_pointer,
|
||||
unsize: false,
|
||||
|
@ -125,7 +125,7 @@ impl FlagComputation {
|
||||
self.bound_computation(ts, |flags, ts| flags.add_tys(ts));
|
||||
}
|
||||
|
||||
&ty::GeneratorWitnessMIR(_, ref substs) => {
|
||||
ty::GeneratorWitnessMIR(_, substs) => {
|
||||
let should_remove_further_specializable =
|
||||
!self.flags.contains(TypeFlags::STILL_FURTHER_SPECIALIZABLE);
|
||||
self.add_substs(substs);
|
||||
|
@ -92,7 +92,7 @@ where
|
||||
let mut lock = self.cache.lock();
|
||||
// We may be overwriting another value. This is all right, since the dep-graph
|
||||
// will check that the fingerprint matches.
|
||||
lock.insert(key, (value.clone(), index));
|
||||
lock.insert(key, (value, index));
|
||||
value
|
||||
}
|
||||
|
||||
@ -153,7 +153,7 @@ where
|
||||
|
||||
#[inline]
|
||||
fn complete(&self, _key: (), value: V, index: DepNodeIndex) -> Self::Stored {
|
||||
*self.cache.lock() = Some((value.clone(), index));
|
||||
*self.cache.lock() = Some((value, index));
|
||||
value
|
||||
}
|
||||
|
||||
@ -283,7 +283,7 @@ where
|
||||
let mut lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
|
||||
#[cfg(not(parallel_compiler))]
|
||||
let mut lock = self.cache.lock();
|
||||
lock.insert(key, (value.clone(), index));
|
||||
lock.insert(key, (value, index));
|
||||
value
|
||||
}
|
||||
|
||||
|
@ -547,7 +547,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
response.value.certainty == Certainty::Yes
|
||||
&& response.has_no_inference_or_external_constraints()
|
||||
}) {
|
||||
return Ok(response.clone());
|
||||
return Ok(*response);
|
||||
}
|
||||
|
||||
let certainty = candidates.iter().fold(Certainty::AMBIGUOUS, |certainty, response| {
|
||||
|
@ -599,7 +599,7 @@ fn virtual_call_violation_for_method<'tcx>(
|
||||
return false;
|
||||
}
|
||||
|
||||
contains_illegal_self_type_reference(tcx, trait_def_id, pred.clone())
|
||||
contains_illegal_self_type_reference(tcx, trait_def_id, pred)
|
||||
}) {
|
||||
return Some(MethodViolationCode::WhereClauseReferencesSelf);
|
||||
}
|
||||
|
@ -336,7 +336,7 @@ impl<I: Interner> PartialEq for TyKind<I> {
|
||||
a_d == b_d && a_s == b_s && a_m == b_m
|
||||
}
|
||||
(GeneratorWitness(a_g), GeneratorWitness(b_g)) => a_g == b_g,
|
||||
(&GeneratorWitnessMIR(ref a_d, ref a_s), &GeneratorWitnessMIR(ref b_d, ref b_s)) => {
|
||||
(GeneratorWitnessMIR(a_d, a_s), GeneratorWitnessMIR(b_d, b_s)) => {
|
||||
a_d == b_d && a_s == b_s
|
||||
}
|
||||
(Tuple(a_t), Tuple(b_t)) => a_t == b_t,
|
||||
@ -397,8 +397,8 @@ impl<I: Interner> Ord for TyKind<I> {
|
||||
}
|
||||
(GeneratorWitness(a_g), GeneratorWitness(b_g)) => a_g.cmp(b_g),
|
||||
(
|
||||
&GeneratorWitnessMIR(ref a_d, ref a_s),
|
||||
&GeneratorWitnessMIR(ref b_d, ref b_s),
|
||||
GeneratorWitnessMIR(a_d, a_s),
|
||||
GeneratorWitnessMIR(b_d, b_s),
|
||||
) => match Ord::cmp(a_d, b_d) {
|
||||
Ordering::Equal => Ord::cmp(a_s, b_s),
|
||||
cmp => cmp,
|
||||
|
@ -1491,9 +1491,10 @@ mod impls {
|
||||
}
|
||||
}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<A: ?Sized, B: ?Sized> PartialOrd<&B> for &A
|
||||
#[rustc_const_unstable(feature = "const_cmp", issue = "92391")]
|
||||
impl<A: ?Sized, B: ?Sized> const PartialOrd<&B> for &A
|
||||
where
|
||||
A: PartialOrd<B>,
|
||||
A: ~const PartialOrd<B>,
|
||||
{
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &&B) -> Option<Ordering> {
|
||||
|
@ -96,7 +96,7 @@ impl<Idx: fmt::Debug> fmt::Debug for Range<Idx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Idx: PartialOrd<Idx>> Range<Idx> {
|
||||
impl<Idx: ~const PartialOrd<Idx>> Range<Idx> {
|
||||
/// Returns `true` if `item` is contained in the range.
|
||||
///
|
||||
/// # Examples
|
||||
@ -116,10 +116,11 @@ impl<Idx: PartialOrd<Idx>> Range<Idx> {
|
||||
/// assert!(!(f32::NAN..1.0).contains(&0.5));
|
||||
/// ```
|
||||
#[stable(feature = "range_contains", since = "1.35.0")]
|
||||
pub fn contains<U>(&self, item: &U) -> bool
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
pub const fn contains<U>(&self, item: &U) -> bool
|
||||
where
|
||||
Idx: PartialOrd<U>,
|
||||
U: ?Sized + PartialOrd<Idx>,
|
||||
Idx: ~const PartialOrd<U>,
|
||||
U: ?Sized + ~const PartialOrd<Idx>,
|
||||
{
|
||||
<Self as RangeBounds<Idx>>::contains(self, item)
|
||||
}
|
||||
@ -142,7 +143,8 @@ impl<Idx: PartialOrd<Idx>> Range<Idx> {
|
||||
/// assert!( (f32::NAN..5.0).is_empty());
|
||||
/// ```
|
||||
#[stable(feature = "range_is_empty", since = "1.47.0")]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
pub const fn is_empty(&self) -> bool {
|
||||
!(self.start < self.end)
|
||||
}
|
||||
}
|
||||
@ -199,7 +201,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeFrom<Idx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
|
||||
impl<Idx: ~const PartialOrd<Idx>> RangeFrom<Idx> {
|
||||
/// Returns `true` if `item` is contained in the range.
|
||||
///
|
||||
/// # Examples
|
||||
@ -214,10 +216,11 @@ impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
|
||||
/// assert!(!(f32::NAN..).contains(&0.5));
|
||||
/// ```
|
||||
#[stable(feature = "range_contains", since = "1.35.0")]
|
||||
pub fn contains<U>(&self, item: &U) -> bool
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
pub const fn contains<U>(&self, item: &U) -> bool
|
||||
where
|
||||
Idx: PartialOrd<U>,
|
||||
U: ?Sized + PartialOrd<Idx>,
|
||||
Idx: ~const PartialOrd<U>,
|
||||
U: ?Sized + ~const PartialOrd<Idx>,
|
||||
{
|
||||
<Self as RangeBounds<Idx>>::contains(self, item)
|
||||
}
|
||||
@ -280,7 +283,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeTo<Idx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
|
||||
impl<Idx: ~const PartialOrd<Idx>> RangeTo<Idx> {
|
||||
/// Returns `true` if `item` is contained in the range.
|
||||
///
|
||||
/// # Examples
|
||||
@ -295,10 +298,11 @@ impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
|
||||
/// assert!(!(..f32::NAN).contains(&0.5));
|
||||
/// ```
|
||||
#[stable(feature = "range_contains", since = "1.35.0")]
|
||||
pub fn contains<U>(&self, item: &U) -> bool
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
pub const fn contains<U>(&self, item: &U) -> bool
|
||||
where
|
||||
Idx: PartialOrd<U>,
|
||||
U: ?Sized + PartialOrd<Idx>,
|
||||
Idx: ~const PartialOrd<U>,
|
||||
U: ?Sized + ~const PartialOrd<Idx>,
|
||||
{
|
||||
<Self as RangeBounds<Idx>>::contains(self, item)
|
||||
}
|
||||
@ -437,7 +441,8 @@ impl<Idx> RangeInclusive<Idx> {
|
||||
/// ```
|
||||
#[stable(feature = "inclusive_range_methods", since = "1.27.0")]
|
||||
#[inline]
|
||||
pub fn into_inner(self) -> (Idx, Idx) {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
pub const fn into_inner(self) -> (Idx, Idx) {
|
||||
(self.start, self.end)
|
||||
}
|
||||
}
|
||||
@ -469,7 +474,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeInclusive<Idx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
|
||||
impl<Idx: ~const PartialOrd<Idx>> RangeInclusive<Idx> {
|
||||
/// Returns `true` if `item` is contained in the range.
|
||||
///
|
||||
/// # Examples
|
||||
@ -500,10 +505,11 @@ impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
|
||||
/// assert!(!r.contains(&3) && !r.contains(&5));
|
||||
/// ```
|
||||
#[stable(feature = "range_contains", since = "1.35.0")]
|
||||
pub fn contains<U>(&self, item: &U) -> bool
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
pub const fn contains<U>(&self, item: &U) -> bool
|
||||
where
|
||||
Idx: PartialOrd<U>,
|
||||
U: ?Sized + PartialOrd<Idx>,
|
||||
Idx: ~const PartialOrd<U>,
|
||||
U: ?Sized + ~const PartialOrd<Idx>,
|
||||
{
|
||||
<Self as RangeBounds<Idx>>::contains(self, item)
|
||||
}
|
||||
@ -535,8 +541,9 @@ impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
|
||||
/// assert!(r.is_empty());
|
||||
/// ```
|
||||
#[stable(feature = "range_is_empty", since = "1.47.0")]
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
#[inline]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
pub const fn is_empty(&self) -> bool {
|
||||
self.exhausted || !(self.start <= self.end)
|
||||
}
|
||||
}
|
||||
@ -598,7 +605,7 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeToInclusive<Idx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Idx: PartialOrd<Idx>> RangeToInclusive<Idx> {
|
||||
impl<Idx: ~const PartialOrd<Idx>> RangeToInclusive<Idx> {
|
||||
/// Returns `true` if `item` is contained in the range.
|
||||
///
|
||||
/// # Examples
|
||||
@ -613,10 +620,11 @@ impl<Idx: PartialOrd<Idx>> RangeToInclusive<Idx> {
|
||||
/// assert!(!(..=f32::NAN).contains(&0.5));
|
||||
/// ```
|
||||
#[stable(feature = "range_contains", since = "1.35.0")]
|
||||
pub fn contains<U>(&self, item: &U) -> bool
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
pub const fn contains<U>(&self, item: &U) -> bool
|
||||
where
|
||||
Idx: PartialOrd<U>,
|
||||
U: ?Sized + PartialOrd<Idx>,
|
||||
Idx: ~const PartialOrd<U>,
|
||||
U: ?Sized + ~const PartialOrd<Idx>,
|
||||
{
|
||||
<Self as RangeBounds<Idx>>::contains(self, item)
|
||||
}
|
||||
@ -757,6 +765,7 @@ impl<T: Clone> Bound<&T> {
|
||||
/// `RangeBounds` is implemented by Rust's built-in range types, produced
|
||||
/// by range syntax like `..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`.
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
#[const_trait]
|
||||
pub trait RangeBounds<T: ?Sized> {
|
||||
/// Start index bound.
|
||||
///
|
||||
@ -809,8 +818,8 @@ pub trait RangeBounds<T: ?Sized> {
|
||||
#[stable(feature = "range_contains", since = "1.35.0")]
|
||||
fn contains<U>(&self, item: &U) -> bool
|
||||
where
|
||||
T: PartialOrd<U>,
|
||||
U: ?Sized + PartialOrd<T>,
|
||||
T: ~const PartialOrd<U>,
|
||||
U: ?Sized + ~const PartialOrd<T>,
|
||||
{
|
||||
(match self.start_bound() {
|
||||
Included(start) => start <= item,
|
||||
@ -827,7 +836,8 @@ pub trait RangeBounds<T: ?Sized> {
|
||||
use self::Bound::{Excluded, Included, Unbounded};
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T: ?Sized> RangeBounds<T> for RangeFull {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T: ?Sized> const RangeBounds<T> for RangeFull {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Unbounded
|
||||
}
|
||||
@ -837,7 +847,8 @@ impl<T: ?Sized> RangeBounds<T> for RangeFull {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeFrom<T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeFrom<T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Included(&self.start)
|
||||
}
|
||||
@ -847,7 +858,8 @@ impl<T> RangeBounds<T> for RangeFrom<T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeTo<T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeTo<T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Unbounded
|
||||
}
|
||||
@ -857,7 +869,8 @@ impl<T> RangeBounds<T> for RangeTo<T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for Range<T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for Range<T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Included(&self.start)
|
||||
}
|
||||
@ -867,7 +880,8 @@ impl<T> RangeBounds<T> for Range<T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeInclusive<T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeInclusive<T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Included(&self.start)
|
||||
}
|
||||
@ -883,7 +897,8 @@ impl<T> RangeBounds<T> for RangeInclusive<T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeToInclusive<T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeToInclusive<T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Unbounded
|
||||
}
|
||||
@ -893,7 +908,8 @@ impl<T> RangeBounds<T> for RangeToInclusive<T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for (Bound<T>, Bound<T>) {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for (Bound<T>, Bound<T>) {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
match *self {
|
||||
(Included(ref start), _) => Included(start),
|
||||
@ -912,7 +928,8 @@ impl<T> RangeBounds<T> for (Bound<T>, Bound<T>) {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<'a, T: ?Sized + 'a> RangeBounds<T> for (Bound<&'a T>, Bound<&'a T>) {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<'a, T: ?Sized + 'a> const RangeBounds<T> for (Bound<&'a T>, Bound<&'a T>) {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
self.0
|
||||
}
|
||||
@ -923,7 +940,8 @@ impl<'a, T: ?Sized + 'a> RangeBounds<T> for (Bound<&'a T>, Bound<&'a T>) {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeFrom<&T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeFrom<&T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Included(self.start)
|
||||
}
|
||||
@ -933,7 +951,8 @@ impl<T> RangeBounds<T> for RangeFrom<&T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeTo<&T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeTo<&T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Unbounded
|
||||
}
|
||||
@ -943,7 +962,8 @@ impl<T> RangeBounds<T> for RangeTo<&T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for Range<&T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for Range<&T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Included(self.start)
|
||||
}
|
||||
@ -953,7 +973,8 @@ impl<T> RangeBounds<T> for Range<&T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeInclusive<&T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeInclusive<&T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Included(self.start)
|
||||
}
|
||||
@ -963,7 +984,8 @@ impl<T> RangeBounds<T> for RangeInclusive<&T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "collections_range", since = "1.28.0")]
|
||||
impl<T> RangeBounds<T> for RangeToInclusive<&T> {
|
||||
#[rustc_const_unstable(feature = "const_range_bounds", issue = "108082")]
|
||||
impl<T> const RangeBounds<T> for RangeToInclusive<&T> {
|
||||
fn start_bound(&self) -> Bound<&T> {
|
||||
Unbounded
|
||||
}
|
||||
|
37
library/std/src/sys/itron/thread_parking.rs
Normal file
37
library/std/src/sys/itron/thread_parking.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use super::abi;
|
||||
use super::error::expect_success_aborting;
|
||||
use super::time::with_tmos;
|
||||
use crate::time::Duration;
|
||||
|
||||
pub type ThreadId = abi::ID;
|
||||
|
||||
pub use super::task::current_task_id_aborting as current;
|
||||
|
||||
pub fn park(_hint: usize) {
|
||||
match unsafe { abi::slp_tsk() } {
|
||||
abi::E_OK | abi::E_RLWAI => {}
|
||||
err => {
|
||||
expect_success_aborting(err, &"slp_tsk");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn park_timeout(dur: Duration, _hint: usize) {
|
||||
match with_tmos(dur, |tmo| unsafe { abi::tslp_tsk(tmo) }) {
|
||||
abi::E_OK | abi::E_RLWAI | abi::E_TMOUT => {}
|
||||
err => {
|
||||
expect_success_aborting(err, &"tslp_tsk");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unpark(id: ThreadId, _hint: usize) {
|
||||
match unsafe { abi::wup_tsk(id) } {
|
||||
// It is allowed to try to wake up a destroyed or unrelated task, so we ignore all
|
||||
// errors that could result from that situation.
|
||||
abi::E_OK | abi::E_NOEXS | abi::E_OBJ | abi::E_QOVR => {}
|
||||
err => {
|
||||
expect_success_aborting(err, &"wup_tsk");
|
||||
}
|
||||
}
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
use crate::mem::MaybeUninit;
|
||||
use crate::time::Duration;
|
||||
|
||||
use super::{
|
||||
abi,
|
||||
error::{expect_success, fail},
|
||||
time::with_tmos,
|
||||
};
|
||||
|
||||
const CLEAR: abi::FLGPTN = 0;
|
||||
const RAISED: abi::FLGPTN = 1;
|
||||
|
||||
/// A thread parking primitive that is not susceptible to race conditions,
|
||||
/// but provides no atomic ordering guarantees and allows only one `raise` per wait.
|
||||
pub struct WaitFlag {
|
||||
flag: abi::ID,
|
||||
}
|
||||
|
||||
impl WaitFlag {
|
||||
/// Creates a new wait flag.
|
||||
pub fn new() -> WaitFlag {
|
||||
let flag = expect_success(
|
||||
unsafe {
|
||||
abi::acre_flg(&abi::T_CFLG {
|
||||
flgatr: abi::TA_FIFO | abi::TA_WSGL | abi::TA_CLR,
|
||||
iflgptn: CLEAR,
|
||||
})
|
||||
},
|
||||
&"acre_flg",
|
||||
);
|
||||
|
||||
WaitFlag { flag }
|
||||
}
|
||||
|
||||
/// Wait for the wait flag to be raised.
|
||||
pub fn wait(&self) {
|
||||
let mut token = MaybeUninit::uninit();
|
||||
expect_success(
|
||||
unsafe { abi::wai_flg(self.flag, RAISED, abi::TWF_ORW, token.as_mut_ptr()) },
|
||||
&"wai_flg",
|
||||
);
|
||||
}
|
||||
|
||||
/// Wait for the wait flag to be raised or the timeout to occur.
|
||||
///
|
||||
/// Returns whether the flag was raised (`true`) or the operation timed out (`false`).
|
||||
pub fn wait_timeout(&self, dur: Duration) -> bool {
|
||||
let mut token = MaybeUninit::uninit();
|
||||
let res = with_tmos(dur, |tmout| unsafe {
|
||||
abi::twai_flg(self.flag, RAISED, abi::TWF_ORW, token.as_mut_ptr(), tmout)
|
||||
});
|
||||
|
||||
match res {
|
||||
abi::E_OK => true,
|
||||
abi::E_TMOUT => false,
|
||||
error => fail(error, &"twai_flg"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Raise the wait flag.
|
||||
///
|
||||
/// Calls to this function should be balanced with the number of successful waits.
|
||||
pub fn raise(&self) {
|
||||
expect_success(unsafe { abi::set_flg(self.flag, RAISED) }, &"set_flg");
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for WaitFlag {
|
||||
fn drop(&mut self) {
|
||||
expect_success(unsafe { abi::del_flg(self.flag) }, &"del_flg");
|
||||
}
|
||||
}
|
@ -13,9 +13,9 @@ mod itron {
|
||||
pub(super) mod spin;
|
||||
pub(super) mod task;
|
||||
pub mod thread;
|
||||
pub mod thread_parking;
|
||||
pub(super) mod time;
|
||||
use super::unsupported;
|
||||
pub mod wait_flag;
|
||||
}
|
||||
|
||||
pub mod alloc;
|
||||
@ -43,8 +43,8 @@ pub use self::itron::thread;
|
||||
pub mod memchr;
|
||||
pub mod thread_local_dtor;
|
||||
pub mod thread_local_key;
|
||||
pub use self::itron::thread_parking;
|
||||
pub mod time;
|
||||
pub use self::itron::wait_flag;
|
||||
|
||||
mod rwlock;
|
||||
|
||||
|
@ -14,12 +14,10 @@ cfg_if::cfg_if! {
|
||||
} else if #[cfg(any(
|
||||
target_os = "netbsd",
|
||||
all(target_vendor = "fortanix", target_env = "sgx"),
|
||||
target_os = "solid_asp3",
|
||||
))] {
|
||||
mod id;
|
||||
pub use id::Parker;
|
||||
} else if #[cfg(target_os = "solid_asp3")] {
|
||||
mod wait_flag;
|
||||
pub use wait_flag::Parker;
|
||||
} else if #[cfg(any(windows, target_family = "unix"))] {
|
||||
pub use crate::sys::thread_parking::Parker;
|
||||
} else {
|
||||
|
@ -1,102 +0,0 @@
|
||||
//! A wait-flag-based thread parker.
|
||||
//!
|
||||
//! Some operating systems provide low-level parking primitives like wait counts,
|
||||
//! event flags or semaphores which are not susceptible to race conditions (meaning
|
||||
//! the wakeup can occur before the wait operation). To implement the `std` thread
|
||||
//! parker on top of these primitives, we only have to ensure that parking is fast
|
||||
//! when the thread token is available, the atomic ordering guarantees are maintained
|
||||
//! and spurious wakeups are minimized.
|
||||
//!
|
||||
//! To achieve this, this parker uses an atomic variable with three states: `EMPTY`,
|
||||
//! `PARKED` and `NOTIFIED`:
|
||||
//! * `EMPTY` means the token has not been made available, but the thread is not
|
||||
//! currently waiting on it.
|
||||
//! * `PARKED` means the token is not available and the thread is parked.
|
||||
//! * `NOTIFIED` means the token is available.
|
||||
//!
|
||||
//! `park` and `park_timeout` change the state from `EMPTY` to `PARKED` and from
|
||||
//! `NOTIFIED` to `EMPTY`. If the state was `NOTIFIED`, the thread was unparked and
|
||||
//! execution can continue without calling into the OS. If the state was `EMPTY`,
|
||||
//! the token is not available and the thread waits on the primitive (here called
|
||||
//! "wait flag").
|
||||
//!
|
||||
//! `unpark` changes the state to `NOTIFIED`. If the state was `PARKED`, the thread
|
||||
//! is or will be sleeping on the wait flag, so we raise it.
|
||||
|
||||
use crate::pin::Pin;
|
||||
use crate::sync::atomic::AtomicI8;
|
||||
use crate::sync::atomic::Ordering::{Acquire, Relaxed, Release};
|
||||
use crate::sys::wait_flag::WaitFlag;
|
||||
use crate::time::Duration;
|
||||
|
||||
const EMPTY: i8 = 0;
|
||||
const PARKED: i8 = -1;
|
||||
const NOTIFIED: i8 = 1;
|
||||
|
||||
pub struct Parker {
|
||||
state: AtomicI8,
|
||||
wait_flag: WaitFlag,
|
||||
}
|
||||
|
||||
impl Parker {
|
||||
/// Construct a parker for the current thread. The UNIX parker
|
||||
/// implementation requires this to happen in-place.
|
||||
pub unsafe fn new_in_place(parker: *mut Parker) {
|
||||
parker.write(Parker { state: AtomicI8::new(EMPTY), wait_flag: WaitFlag::new() })
|
||||
}
|
||||
|
||||
// This implementation doesn't require `unsafe` and `Pin`, but other implementations do.
|
||||
pub unsafe fn park(self: Pin<&Self>) {
|
||||
match self.state.fetch_sub(1, Acquire) {
|
||||
// NOTIFIED => EMPTY
|
||||
NOTIFIED => return,
|
||||
// EMPTY => PARKED
|
||||
EMPTY => (),
|
||||
_ => panic!("inconsistent park state"),
|
||||
}
|
||||
|
||||
// Avoid waking up from spurious wakeups (these are quite likely, see below).
|
||||
loop {
|
||||
self.wait_flag.wait();
|
||||
|
||||
match self.state.compare_exchange(NOTIFIED, EMPTY, Acquire, Relaxed) {
|
||||
Ok(_) => return,
|
||||
Err(PARKED) => (),
|
||||
Err(_) => panic!("inconsistent park state"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This implementation doesn't require `unsafe` and `Pin`, but other implementations do.
|
||||
pub unsafe fn park_timeout(self: Pin<&Self>, dur: Duration) {
|
||||
match self.state.fetch_sub(1, Acquire) {
|
||||
NOTIFIED => return,
|
||||
EMPTY => (),
|
||||
_ => panic!("inconsistent park state"),
|
||||
}
|
||||
|
||||
self.wait_flag.wait_timeout(dur);
|
||||
|
||||
// Either a wakeup or a timeout occurred. Wakeups may be spurious, as there can be
|
||||
// a race condition when `unpark` is performed between receiving the timeout and
|
||||
// resetting the state, resulting in the eventflag being set unnecessarily. `park`
|
||||
// is protected against this by looping until the token is actually given, but
|
||||
// here we cannot easily tell.
|
||||
|
||||
// Use `swap` to provide acquire ordering.
|
||||
match self.state.swap(EMPTY, Acquire) {
|
||||
NOTIFIED => (),
|
||||
PARKED => (),
|
||||
_ => panic!("inconsistent park state"),
|
||||
}
|
||||
}
|
||||
|
||||
// This implementation doesn't require `Pin`, but other implementations do.
|
||||
pub fn unpark(self: Pin<&Self>) {
|
||||
let state = self.state.swap(NOTIFIED, Release);
|
||||
|
||||
if state == PARKED {
|
||||
self.wait_flag.raise();
|
||||
}
|
||||
}
|
||||
}
|
21
tests/ui/type/type-check/point-at-inference-4.rs
Normal file
21
tests/ui/type/type-check/point-at-inference-4.rs
Normal file
@ -0,0 +1,21 @@
|
||||
struct S<A, B>(Option<(A, B)>);
|
||||
|
||||
impl<A, B> S<A, B> {
|
||||
fn infer(&self, a: A, b: B) {}
|
||||
//~^ NOTE associated function defined here
|
||||
//~| NOTE
|
||||
//~| NOTE
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let s = S(None);
|
||||
s.infer(0i32);
|
||||
//~^ ERROR this method takes 2 arguments but 1 argument was supplied
|
||||
//~| NOTE an argument is missing
|
||||
//~| HELP provide the argument
|
||||
let t: S<u32, _> = s;
|
||||
//~^ ERROR mismatched types
|
||||
//~| NOTE expected `S<u32, _>`, found `S<i32, _>`
|
||||
//~| NOTE expected due to this
|
||||
//~| NOTE expected struct `S<u32, _>`
|
||||
}
|
31
tests/ui/type/type-check/point-at-inference-4.stderr
Normal file
31
tests/ui/type/type-check/point-at-inference-4.stderr
Normal file
@ -0,0 +1,31 @@
|
||||
error[E0061]: this method takes 2 arguments but 1 argument was supplied
|
||||
--> $DIR/point-at-inference-4.rs:12:7
|
||||
|
|
||||
LL | s.infer(0i32);
|
||||
| ^^^^^------ an argument is missing
|
||||
|
|
||||
note: associated function defined here
|
||||
--> $DIR/point-at-inference-4.rs:4:8
|
||||
|
|
||||
LL | fn infer(&self, a: A, b: B) {}
|
||||
| ^^^^^ ---- ----
|
||||
help: provide the argument
|
||||
|
|
||||
LL | s.infer(0i32, /* b */);
|
||||
| ~~~~~~~~~~~~~~~
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/point-at-inference-4.rs:16:24
|
||||
|
|
||||
LL | let t: S<u32, _> = s;
|
||||
| --------- ^ expected `S<u32, _>`, found `S<i32, _>`
|
||||
| |
|
||||
| expected due to this
|
||||
|
|
||||
= note: expected struct `S<u32, _>`
|
||||
found struct `S<i32, _>`
|
||||
|
||||
error: aborting due to 2 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0061, E0308.
|
||||
For more information about an error, try `rustc --explain E0061`.
|
Loading…
Reference in New Issue
Block a user