mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-15 21:47:04 +00:00
Auto merge of #110440 - matthiaskrgr:rollup-eit19vi, r=matthiaskrgr
Rollup of 7 pull requests Successful merges: - #110038 (Erase regions when confirming transmutability candidate) - #110341 (rustdoc: stop passing a title to `replaceState` second argument) - #110388 (Add a message for if an overflow occurs in `core::intrinsics::is_nonoverlapping`.) - #110404 (fix clippy::toplevel_ref_arg and ::manual_map) - #110421 (Spelling librustdoc) - #110423 (Spelling srcdoc) - #110433 (Windows: map a few more error codes to ErrorKind) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
bdb32bd4bb
@ -141,13 +141,7 @@ fn parse_args<'a>(ecx: &mut ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<
|
||||
args: args
|
||||
.named_args()
|
||||
.iter()
|
||||
.filter_map(|a| {
|
||||
if let Some(ident) = a.kind.ident() {
|
||||
Some((a, ident))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.filter_map(|a| a.kind.ident().map(|ident| (a, ident)))
|
||||
.map(|(arg, n)| n.span.to(arg.expr.span))
|
||||
.collect(),
|
||||
});
|
||||
|
@ -256,12 +256,9 @@ impl<K: Eq + Hash, V> SsoHashMap<K, V> {
|
||||
pub fn remove(&mut self, key: &K) -> Option<V> {
|
||||
match self {
|
||||
SsoHashMap::Array(array) => {
|
||||
if let Some(index) = array.iter().position(|(k, _v)| k == key) {
|
||||
Some(array.swap_remove(index).1)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
array.iter().position(|(k, _v)| k == key).map(|index| array.swap_remove(index).1)
|
||||
}
|
||||
|
||||
SsoHashMap::Map(map) => map.remove(key),
|
||||
}
|
||||
}
|
||||
|
@ -636,20 +636,14 @@ trait UnusedDelimLint {
|
||||
return;
|
||||
}
|
||||
let spans = match value.kind {
|
||||
ast::ExprKind::Block(ref block, None) if block.stmts.len() == 1 => {
|
||||
if let Some(span) = block.stmts[0].span.find_ancestor_inside(value.span) {
|
||||
Some((value.span.with_hi(span.lo()), value.span.with_lo(span.hi())))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
ast::ExprKind::Block(ref block, None) if block.stmts.len() == 1 => block.stmts[0]
|
||||
.span
|
||||
.find_ancestor_inside(value.span)
|
||||
.map(|span| (value.span.with_hi(span.lo()), value.span.with_lo(span.hi()))),
|
||||
ast::ExprKind::Paren(ref expr) => {
|
||||
let expr_span = expr.span.find_ancestor_inside(value.span);
|
||||
if let Some(expr_span) = expr_span {
|
||||
Some((value.span.with_hi(expr_span.lo()), value.span.with_lo(expr_span.hi())))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
expr.span.find_ancestor_inside(value.span).map(|expr_span| {
|
||||
(value.span.with_hi(expr_span.lo()), value.span.with_lo(expr_span.hi()))
|
||||
})
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
@ -928,11 +922,10 @@ impl UnusedParens {
|
||||
// Otherwise proceed with linting.
|
||||
_ => {}
|
||||
}
|
||||
let spans = if let Some(inner) = inner.span.find_ancestor_inside(value.span) {
|
||||
Some((value.span.with_hi(inner.lo()), value.span.with_lo(inner.hi())))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let spans = inner
|
||||
.span
|
||||
.find_ancestor_inside(value.span)
|
||||
.map(|inner| (value.span.with_hi(inner.lo()), value.span.with_lo(inner.hi())));
|
||||
self.emit_unused_delims(cx, value.span, spans, "pattern", keep_space);
|
||||
}
|
||||
}
|
||||
@ -1043,11 +1036,11 @@ impl EarlyLintPass for UnusedParens {
|
||||
if self.with_self_ty_parens && b.generic_params.len() > 0 => {}
|
||||
ast::TyKind::ImplTrait(_, bounds) if bounds.len() > 1 => {}
|
||||
_ => {
|
||||
let spans = if let Some(r) = r.span.find_ancestor_inside(ty.span) {
|
||||
Some((ty.span.with_hi(r.lo()), ty.span.with_lo(r.hi())))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let spans = r
|
||||
.span
|
||||
.find_ancestor_inside(ty.span)
|
||||
.map(|r| (ty.span.with_hi(r.lo()), ty.span.with_lo(r.hi())));
|
||||
|
||||
self.emit_unused_delims(cx, ty.span, spans, "type", (false, false));
|
||||
}
|
||||
}
|
||||
|
@ -493,7 +493,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
cond: &Operand<'tcx>,
|
||||
location: Location,
|
||||
) -> Option<!> {
|
||||
let ref value = self.eval_operand(&cond, location)?;
|
||||
let value = &self.eval_operand(&cond, location)?;
|
||||
trace!("assertion on {:?} should be {:?}", value, expected);
|
||||
|
||||
let expected = Scalar::from_bool(expected);
|
||||
|
@ -45,10 +45,10 @@ impl<'a> Parser<'a> {
|
||||
Some(InnerAttrForbiddenReason::AfterOuterDocComment {
|
||||
prev_doc_comment_span: prev_outer_attr_sp.unwrap(),
|
||||
})
|
||||
} else if let Some(prev_outer_attr_sp) = prev_outer_attr_sp {
|
||||
Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp })
|
||||
} else {
|
||||
None
|
||||
prev_outer_attr_sp.map(|prev_outer_attr_sp| {
|
||||
InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }
|
||||
})
|
||||
};
|
||||
let inner_parse_policy = InnerAttrPolicy::Forbidden(inner_error_reason);
|
||||
just_parsed_doc_comment = false;
|
||||
|
@ -1869,15 +1869,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
Some(LexicalScopeBinding::Item(name_binding)) => Some(name_binding.span),
|
||||
_ => None,
|
||||
};
|
||||
let suggestion = if let Some(span) = match_span {
|
||||
Some((
|
||||
let suggestion = match_span.map(|span| {
|
||||
(
|
||||
vec![(span, String::from(""))],
|
||||
format!("`{}` is defined here, but is not a type", ident),
|
||||
Applicability::MaybeIncorrect,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
)
|
||||
});
|
||||
|
||||
(format!("use of undeclared type `{}`", ident), suggestion)
|
||||
} else {
|
||||
|
@ -1663,10 +1663,11 @@ impl SourceFile {
|
||||
|
||||
if let Some(ref src) = self.src {
|
||||
Some(Cow::from(get_until_newline(src, begin)))
|
||||
} else if let Some(src) = self.external_src.borrow().get_source() {
|
||||
Some(Cow::Owned(String::from(get_until_newline(src, begin))))
|
||||
} else {
|
||||
None
|
||||
self.external_src
|
||||
.borrow()
|
||||
.get_source()
|
||||
.map(|src| Cow::Owned(String::from(get_until_newline(src, begin))))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -906,10 +906,8 @@ impl SourceMap {
|
||||
|
||||
let snippet = if let Some(ref src) = local_begin.sf.src {
|
||||
Some(&src[start_index..])
|
||||
} else if let Some(src) = src.get_source() {
|
||||
Some(&src[start_index..])
|
||||
} else {
|
||||
None
|
||||
src.get_source().map(|src| &src[start_index..])
|
||||
};
|
||||
|
||||
match snippet {
|
||||
|
@ -649,7 +649,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
|
||||
// FIXME(transmutability): This really should be returning nested goals for `Answer::If*`
|
||||
match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable(
|
||||
ObligationCause::dummy(),
|
||||
ty::Binder::dummy(src_and_dst),
|
||||
src_and_dst,
|
||||
scope,
|
||||
assume,
|
||||
) {
|
||||
|
@ -742,7 +742,6 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||
{
|
||||
// Recompute the safe transmute reason and use that for the error reporting
|
||||
self.get_safe_transmute_error_and_reason(
|
||||
trait_predicate,
|
||||
obligation.clone(),
|
||||
trait_ref,
|
||||
span,
|
||||
@ -1629,7 +1628,6 @@ trait InferCtxtPrivExt<'tcx> {
|
||||
|
||||
fn get_safe_transmute_error_and_reason(
|
||||
&self,
|
||||
trait_predicate: ty::Binder<'tcx, ty::TraitPredicate<'tcx>>,
|
||||
obligation: Obligation<'tcx, ty::Predicate<'tcx>>,
|
||||
trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>,
|
||||
span: Span,
|
||||
@ -2921,18 +2919,20 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||
|
||||
fn get_safe_transmute_error_and_reason(
|
||||
&self,
|
||||
trait_predicate: ty::Binder<'tcx, ty::TraitPredicate<'tcx>>,
|
||||
obligation: Obligation<'tcx, ty::Predicate<'tcx>>,
|
||||
trait_ref: ty::Binder<'tcx, ty::TraitRef<'tcx>>,
|
||||
span: Span,
|
||||
) -> (String, Option<String>) {
|
||||
let src_and_dst = trait_predicate.map_bound(|p| rustc_transmute::Types {
|
||||
dst: p.trait_ref.substs.type_at(0),
|
||||
src: p.trait_ref.substs.type_at(1),
|
||||
});
|
||||
let scope = trait_ref.skip_binder().substs.type_at(2);
|
||||
// Erase regions because layout code doesn't particularly care about regions.
|
||||
let trait_ref = self.tcx.erase_regions(self.tcx.erase_late_bound_regions(trait_ref));
|
||||
|
||||
let src_and_dst = rustc_transmute::Types {
|
||||
dst: trait_ref.substs.type_at(0),
|
||||
src: trait_ref.substs.type_at(1),
|
||||
};
|
||||
let scope = trait_ref.substs.type_at(2);
|
||||
let Some(assume) =
|
||||
rustc_transmute::Assume::from_const(self.infcx.tcx, obligation.param_env, trait_ref.skip_binder().substs.const_at(3)) else {
|
||||
rustc_transmute::Assume::from_const(self.infcx.tcx, obligation.param_env, trait_ref.substs.const_at(3)) else {
|
||||
span_bug!(span, "Unable to construct rustc_transmute::Assume where it was previously possible");
|
||||
};
|
||||
match rustc_transmute::TransmuteTypeEnv::new(self.infcx).is_transmutable(
|
||||
@ -2942,8 +2942,8 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
|
||||
assume,
|
||||
) {
|
||||
rustc_transmute::Answer::No(reason) => {
|
||||
let dst = trait_ref.skip_binder().substs.type_at(0);
|
||||
let src = trait_ref.skip_binder().substs.type_at(1);
|
||||
let dst = trait_ref.substs.type_at(0);
|
||||
let src = trait_ref.substs.type_at(1);
|
||||
let custom_err_msg = format!(
|
||||
"`{src}` cannot be safely transmuted into `{dst}` in the defining scope of `{scope}`"
|
||||
);
|
||||
|
@ -275,33 +275,35 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
) -> Result<ImplSourceBuiltinData<PredicateObligation<'tcx>>, SelectionError<'tcx>> {
|
||||
debug!(?obligation, "confirm_transmutability_candidate");
|
||||
|
||||
let predicate = obligation.predicate;
|
||||
// We erase regions here because transmutability calls layout queries,
|
||||
// which does not handle inference regions and doesn't particularly
|
||||
// care about other regions. Erasing late-bound regions is equivalent
|
||||
// to instantiating the binder with placeholders then erasing those
|
||||
// placeholder regions.
|
||||
let predicate =
|
||||
self.tcx().erase_regions(self.tcx().erase_late_bound_regions(obligation.predicate));
|
||||
|
||||
let type_at = |i| predicate.map_bound(|p| p.trait_ref.substs.type_at(i));
|
||||
let const_at = |i| predicate.skip_binder().trait_ref.substs.const_at(i);
|
||||
|
||||
let src_and_dst = predicate.map_bound(|p| rustc_transmute::Types {
|
||||
dst: p.trait_ref.substs.type_at(0),
|
||||
src: p.trait_ref.substs.type_at(1),
|
||||
});
|
||||
|
||||
let scope = type_at(2).skip_binder();
|
||||
|
||||
let Some(assume) =
|
||||
rustc_transmute::Assume::from_const(self.infcx.tcx, obligation.param_env, const_at(3)) else {
|
||||
return Err(Unimplemented);
|
||||
};
|
||||
|
||||
let cause = obligation.cause.clone();
|
||||
let Some(assume) = rustc_transmute::Assume::from_const(
|
||||
self.infcx.tcx,
|
||||
obligation.param_env,
|
||||
predicate.trait_ref.substs.const_at(3)
|
||||
) else {
|
||||
return Err(Unimplemented);
|
||||
};
|
||||
|
||||
let mut transmute_env = rustc_transmute::TransmuteTypeEnv::new(self.infcx);
|
||||
|
||||
let maybe_transmutable = transmute_env.is_transmutable(cause, src_and_dst, scope, assume);
|
||||
|
||||
use rustc_transmute::Answer;
|
||||
let maybe_transmutable = transmute_env.is_transmutable(
|
||||
obligation.cause.clone(),
|
||||
rustc_transmute::Types {
|
||||
dst: predicate.trait_ref.substs.type_at(0),
|
||||
src: predicate.trait_ref.substs.type_at(1),
|
||||
},
|
||||
predicate.trait_ref.substs.type_at(2),
|
||||
assume,
|
||||
);
|
||||
|
||||
match maybe_transmutable {
|
||||
Answer::Yes => Ok(ImplSourceBuiltinData { nested: vec![] }),
|
||||
rustc_transmute::Answer::Yes => Ok(ImplSourceBuiltinData { nested: vec![] }),
|
||||
_ => Err(Unimplemented),
|
||||
}
|
||||
}
|
||||
|
@ -243,16 +243,11 @@ pub fn get_vtable_index_of_object_method<'tcx, N>(
|
||||
) -> Option<usize> {
|
||||
// Count number of methods preceding the one we are selecting and
|
||||
// add them to the total offset.
|
||||
if let Some(index) = tcx
|
||||
.own_existential_vtable_entries(object.upcast_trait_ref.def_id())
|
||||
tcx.own_existential_vtable_entries(object.upcast_trait_ref.def_id())
|
||||
.iter()
|
||||
.copied()
|
||||
.position(|def_id| def_id == method_def_id)
|
||||
{
|
||||
Some(object.vtable_base + index)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
.map(|index| object.vtable_base + index)
|
||||
}
|
||||
|
||||
pub fn closure_trait_ref_and_return_type<'tcx>(
|
||||
|
@ -64,7 +64,6 @@ mod rustc {
|
||||
use rustc_infer::infer::InferCtxt;
|
||||
use rustc_macros::{TypeFoldable, TypeVisitable};
|
||||
use rustc_middle::traits::ObligationCause;
|
||||
use rustc_middle::ty::Binder;
|
||||
use rustc_middle::ty::Const;
|
||||
use rustc_middle::ty::ParamEnv;
|
||||
use rustc_middle::ty::Ty;
|
||||
@ -92,15 +91,13 @@ mod rustc {
|
||||
pub fn is_transmutable(
|
||||
&mut self,
|
||||
cause: ObligationCause<'tcx>,
|
||||
src_and_dst: Binder<'tcx, Types<'tcx>>,
|
||||
types: Types<'tcx>,
|
||||
scope: Ty<'tcx>,
|
||||
assume: crate::Assume,
|
||||
) -> crate::Answer<crate::layout::rustc::Ref<'tcx>> {
|
||||
let src = src_and_dst.map_bound(|types| types.src).skip_binder();
|
||||
let dst = src_and_dst.map_bound(|types| types.dst).skip_binder();
|
||||
crate::maybe_transmutable::MaybeTransmutableQuery::new(
|
||||
src,
|
||||
dst,
|
||||
types.src,
|
||||
types.dst,
|
||||
scope,
|
||||
assume,
|
||||
self.infcx.tcx,
|
||||
|
@ -234,15 +234,12 @@ fn resolve_associated_item<'tcx>(
|
||||
_ => None,
|
||||
},
|
||||
traits::ImplSource::Object(ref data) => {
|
||||
if let Some(index) = traits::get_vtable_index_of_object_method(tcx, data, trait_item_id)
|
||||
{
|
||||
Some(Instance {
|
||||
traits::get_vtable_index_of_object_method(tcx, data, trait_item_id).map(|index| {
|
||||
Instance {
|
||||
def: ty::InstanceDef::Virtual(trait_item_id, index),
|
||||
substs: rcvr_substs,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
traits::ImplSource::Builtin(..) => {
|
||||
let lang_items = tcx.lang_items();
|
||||
|
@ -13,7 +13,7 @@ use rustc_trait_selection::traits::{ObligationCause, ObligationCtxt};
|
||||
/// Note that this does *not* recursively check if the substructure of `adt_ty`
|
||||
/// implements the traits.
|
||||
fn has_structural_eq_impls<'tcx>(tcx: TyCtxt<'tcx>, adt_ty: Ty<'tcx>) -> bool {
|
||||
let ref infcx = tcx.infer_ctxt().build();
|
||||
let infcx = &tcx.infer_ctxt().build();
|
||||
let cause = ObligationCause::dummy();
|
||||
|
||||
let ocx = ObligationCtxt::new(infcx);
|
||||
|
@ -2519,7 +2519,9 @@ pub(crate) fn is_valid_allocation_size<T>(len: usize) -> bool {
|
||||
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
|
||||
let src_usize = src.addr();
|
||||
let dst_usize = dst.addr();
|
||||
let size = mem::size_of::<T>().checked_mul(count).unwrap();
|
||||
let size = mem::size_of::<T>()
|
||||
.checked_mul(count)
|
||||
.expect("is_nonoverlapping: `size_of::<T>() * count` overflows a usize");
|
||||
let diff = if src_usize > dst_usize { src_usize - dst_usize } else { dst_usize - src_usize };
|
||||
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
|
||||
// they do not overlap.
|
||||
|
@ -68,10 +68,13 @@ pub fn decode_error_kind(errno: i32) -> ErrorKind {
|
||||
c::ERROR_ALREADY_EXISTS => return AlreadyExists,
|
||||
c::ERROR_FILE_EXISTS => return AlreadyExists,
|
||||
c::ERROR_BROKEN_PIPE => return BrokenPipe,
|
||||
c::ERROR_FILE_NOT_FOUND => return NotFound,
|
||||
c::ERROR_PATH_NOT_FOUND => return NotFound,
|
||||
c::ERROR_FILE_NOT_FOUND
|
||||
| c::ERROR_PATH_NOT_FOUND
|
||||
| c::ERROR_INVALID_DRIVE
|
||||
| c::ERROR_BAD_NETPATH
|
||||
| c::ERROR_BAD_NET_NAME => return NotFound,
|
||||
c::ERROR_NO_DATA => return BrokenPipe,
|
||||
c::ERROR_INVALID_NAME => return InvalidFilename,
|
||||
c::ERROR_INVALID_NAME | c::ERROR_BAD_PATHNAME => return InvalidFilename,
|
||||
c::ERROR_INVALID_PARAMETER => return InvalidInput,
|
||||
c::ERROR_NOT_ENOUGH_MEMORY | c::ERROR_OUTOFMEMORY => return OutOfMemory,
|
||||
c::ERROR_SEM_TIMEOUT
|
||||
|
@ -117,7 +117,7 @@ $ ls formatjson5.profraw
|
||||
formatjson5.profraw
|
||||
```
|
||||
|
||||
If `LLVM_PROFILE_FILE` contains a path to a non-existent directory, the missing directory structure will be created. Additionally, the following special pattern strings are rewritten:
|
||||
If `LLVM_PROFILE_FILE` contains a path to a nonexistent directory, the missing directory structure will be created. Additionally, the following special pattern strings are rewritten:
|
||||
|
||||
- `%p` - The process ID.
|
||||
- `%h` - The hostname of the machine running the program.
|
||||
|
@ -61,7 +61,7 @@ Diagnostics have the following format:
|
||||
/* The file where the span is located.
|
||||
Note that this path may not exist. For example, if the path
|
||||
points to the standard library, and the rust src is not
|
||||
available in the sysroot, then it may point to a non-existent
|
||||
available in the sysroot, then it may point to a nonexistent
|
||||
file. Beware that this may also point to the source of an
|
||||
external crate.
|
||||
*/
|
||||
|
@ -66,7 +66,7 @@ After completing these steps you can use rust normally in a native environment.
|
||||
|
||||
To cross compile, you'll need to:
|
||||
|
||||
* Build the rust cross toochain using [rust-bootstrap-armv7-unknown-linux-uclibceabi](https://github.com/lancethepants/rust-bootstrap-armv7-unknown-linux-uclibceabi) or your own built toolchain.
|
||||
* Build the rust cross toolchain using [rust-bootstrap-armv7-unknown-linux-uclibceabi](https://github.com/lancethepants/rust-bootstrap-armv7-unknown-linux-uclibceabi) or your own built toolchain.
|
||||
* Link your built toolchain with
|
||||
|
||||
```text
|
||||
|
@ -123,7 +123,7 @@ There are 3 common ways to compile native C code for UEFI targets:
|
||||
targets. Be wary of any includes that are not specifically suitable for UEFI
|
||||
targets (especially the C standard library includes are not always
|
||||
compatible). Freestanding compilations are recommended to avoid
|
||||
incompatibilites.
|
||||
incompatibilities.
|
||||
|
||||
## Ecosystem
|
||||
|
||||
|
@ -6,7 +6,7 @@ following principles (in rough priority order):
|
||||
* readability
|
||||
- scan-ability
|
||||
- avoiding misleading formatting
|
||||
- accessibility - readable and editable by users using the the widest
|
||||
- accessibility - readable and editable by users using the widest
|
||||
variety of hardware, including non-visual accessibility interfaces
|
||||
- readability of code in contexts without syntax highlighting or IDE
|
||||
assistance, such as rustc error messages, diffs, grep, and other
|
||||
|
@ -6,7 +6,7 @@
|
||||
* `[T; expr]`, e.g., `[u32; 42]`, `[Vec<Foo>; 10 * 2 + foo()]` (space after colon, no spaces around square brackets)
|
||||
* `*const T`, `*mut T` (no space after `*`, space before type)
|
||||
* `&'a T`, `&T`, `&'a mut T`, `&mut T` (no space after `&`, single spaces separating other words)
|
||||
* `unsafe extern "C" fn<'a, 'b, 'c>(T, U, V) -> W` or `fn()` (single spaces around keyowrds and sigils, and after commas, no trailing commas, no spaces around brackets)
|
||||
* `unsafe extern "C" fn<'a, 'b, 'c>(T, U, V) -> W` or `fn()` (single spaces around keywords and sigils, and after commas, no trailing commas, no spaces around brackets)
|
||||
* `!` should be treated like any other type name, `Name`
|
||||
* `(A, B, C, D)` (spaces after commas, no spaces around parens, no trailing comma unless it is a one-tuple)
|
||||
* `<Baz<T> as SomeTrait>::Foo::Bar` or `Foo::Bar` or `::Foo::Bar` (no spaces around `::` or angle brackets, single spaces around `as`)
|
||||
|
@ -202,5 +202,5 @@ fn shoot_lasers() {}
|
||||
|
||||
#[cfg(feature = "monkeys")] // This is UNEXPECTED, because "monkeys" is not in
|
||||
// the values(feature) list
|
||||
fn write_shakespear() {}
|
||||
fn write_shakespeare() {}
|
||||
```
|
||||
|
@ -6,5 +6,5 @@ The `-Zmove-size-limit=N` compiler flag enables `large_assignments` lints which
|
||||
will warn when moving objects whose size exceeds `N` bytes.
|
||||
|
||||
Lint warns only about moves in functions that participate in code generation.
|
||||
Consequently it will be ineffective for compiler invocatation that emit
|
||||
Consequently it will be ineffective for compiler invocation that emit
|
||||
metadata only, i.e., `cargo check` like workflows.
|
||||
|
@ -65,7 +65,7 @@ pub union GenericUnion<T: Copy> { // Unions with non-`Copy` fields are unstable.
|
||||
pub const THIS_IS_OKAY: GenericUnion<()> = GenericUnion { field: () };
|
||||
```
|
||||
|
||||
Like transarent `struct`s, a transparent `union` of type `U` has the same
|
||||
Like transparent `struct`s, a transparent `union` of type `U` has the same
|
||||
layout, size, and ABI as its single non-ZST field. If it is generic over a type
|
||||
`T`, and all its fields are ZSTs except for exactly one field of type `T`, then
|
||||
it has the same layout and ABI as `T` (even if `T` is a ZST when monomorphized).
|
||||
|
@ -528,7 +528,7 @@ pub(crate) fn build_impl(
|
||||
items: trait_items,
|
||||
polarity,
|
||||
kind: if utils::has_doc_flag(tcx, did, sym::fake_variadic) {
|
||||
ImplKind::FakeVaradic
|
||||
ImplKind::FakeVariadic
|
||||
} else {
|
||||
ImplKind::Normal
|
||||
},
|
||||
|
@ -2356,7 +2356,7 @@ fn clean_impl<'tcx>(
|
||||
items,
|
||||
polarity: tcx.impl_polarity(def_id),
|
||||
kind: if utils::has_doc_flag(tcx, def_id.to_def_id(), sym::fake_variadic) {
|
||||
ImplKind::FakeVaradic
|
||||
ImplKind::FakeVariadic
|
||||
} else {
|
||||
ImplKind::Normal
|
||||
},
|
||||
|
@ -156,7 +156,7 @@ impl ExternalCrate {
|
||||
}
|
||||
|
||||
/// Attempts to find where an external crate is located, given that we're
|
||||
/// rendering in to the specified source destination.
|
||||
/// rendering into the specified source destination.
|
||||
pub(crate) fn location(
|
||||
&self,
|
||||
extern_url: Option<&str>,
|
||||
@ -751,7 +751,7 @@ pub(crate) enum ItemKind {
|
||||
PrimitiveItem(PrimitiveType),
|
||||
/// A required associated constant in a trait declaration.
|
||||
TyAssocConstItem(Type),
|
||||
/// An associated associated constant in a trait impl or a provided one in a trait declaration.
|
||||
/// An associated constant in a trait impl or a provided one in a trait declaration.
|
||||
AssocConstItem(Type, ConstantKind),
|
||||
/// A required associated type in a trait declaration.
|
||||
///
|
||||
@ -2305,7 +2305,7 @@ impl Impl {
|
||||
pub(crate) enum ImplKind {
|
||||
Normal,
|
||||
Auto,
|
||||
FakeVaradic,
|
||||
FakeVariadic,
|
||||
Blanket(Box<Type>),
|
||||
}
|
||||
|
||||
@ -2319,7 +2319,7 @@ impl ImplKind {
|
||||
}
|
||||
|
||||
pub(crate) fn is_fake_variadic(&self) -> bool {
|
||||
matches!(self, ImplKind::FakeVaradic)
|
||||
matches!(self, ImplKind::FakeVariadic)
|
||||
}
|
||||
|
||||
pub(crate) fn as_blanket_ty(&self) -> Option<&Type> {
|
||||
|
@ -491,7 +491,7 @@ impl Options {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/dataset
|
||||
//
|
||||
// The original key values we have are the same as the DOM storage API keys and the
|
||||
// command line options, so contain `-`. Our Javascript needs to be able to look
|
||||
// command line options, so contain `-`. Our JavaScript needs to be able to look
|
||||
// these values up both in `dataset` and in the storage API, so it needs to be able
|
||||
// to convert the names back and forth. Despite doing this kebab-case to
|
||||
// StudlyCaps transformation automatically, the JS DOM API does not provide a
|
||||
|
@ -300,14 +300,13 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
|
||||
ParentStackItem::Impl { for_, .. } => for_.def_id(&self.cache),
|
||||
ParentStackItem::Type(item_id) => item_id.as_def_id(),
|
||||
};
|
||||
let path = match did.and_then(|did| self.cache.paths.get(&did)) {
|
||||
let path = did
|
||||
.and_then(|did| self.cache.paths.get(&did))
|
||||
// The current stack not necessarily has correlation
|
||||
// for where the type was defined. On the other
|
||||
// hand, `paths` always has the right
|
||||
// information if present.
|
||||
Some((fqp, _)) => Some(&fqp[..fqp.len() - 1]),
|
||||
None => None,
|
||||
};
|
||||
.map(|(fqp, _)| &fqp[..fqp.len() - 1]);
|
||||
((did, path), true)
|
||||
}
|
||||
}
|
||||
|
@ -514,7 +514,7 @@ struct Classifier<'src> {
|
||||
|
||||
impl<'src> Classifier<'src> {
|
||||
/// Takes as argument the source code to HTML-ify, the rust edition to use and the source code
|
||||
/// file span which will be used later on by the `span_correspondance_map`.
|
||||
/// file span which will be used later on by the `span_correspondence_map`.
|
||||
fn new(src: &str, file_span: Span, decoration_info: Option<DecorationInfo>) -> Classifier<'_> {
|
||||
let tokens = PeekIter::new(TokenIter { src, cursor: Cursor::new(src) });
|
||||
let decorations = decoration_info.map(Decorations::new);
|
||||
@ -649,7 +649,7 @@ impl<'src> Classifier<'src> {
|
||||
///
|
||||
/// `before` is the position of the given token in the `source` string and is used as "lo" byte
|
||||
/// in case we want to try to generate a link for this token using the
|
||||
/// `span_correspondance_map`.
|
||||
/// `span_correspondence_map`.
|
||||
fn advance(
|
||||
&mut self,
|
||||
token: TokenKind,
|
||||
@ -895,7 +895,7 @@ fn exit_span(out: &mut impl Write, closing_tag: &str) {
|
||||
/// flexible.
|
||||
///
|
||||
/// Note that if `context` is not `None` and that the given `klass` contains a `Span`, the function
|
||||
/// will then try to find this `span` in the `span_correspondance_map`. If found, it'll then
|
||||
/// will then try to find this `span` in the `span_correspondence_map`. If found, it'll then
|
||||
/// generate a link for this element (which corresponds to where its definition is located).
|
||||
fn string<T: Display>(
|
||||
out: &mut impl Write,
|
||||
@ -916,7 +916,7 @@ fn string<T: Display>(
|
||||
/// * If `klass` is `Some` but `klass.get_span()` is `None`, it writes the text wrapped in a
|
||||
/// `<span>` with the provided `klass`.
|
||||
/// * If `klass` is `Some` and has a [`rustc_span::Span`], it then tries to generate a link (`<a>`
|
||||
/// element) by retrieving the link information from the `span_correspondance_map` that was filled
|
||||
/// element) by retrieving the link information from the `span_correspondence_map` that was filled
|
||||
/// in `span_map.rs::collect_spans_and_sources`. If it cannot retrieve the information, then it's
|
||||
/// the same as the second point (`klass` is `Some` but doesn't have a [`rustc_span::Span`]).
|
||||
fn string_without_closing_tag<T: Display>(
|
||||
@ -963,7 +963,7 @@ fn string_without_closing_tag<T: Display>(
|
||||
|
||||
if let Some(href_context) = href_context {
|
||||
if let Some(href) =
|
||||
href_context.context.shared.span_correspondance_map.get(&def_span).and_then(|href| {
|
||||
href_context.context.shared.span_correspondence_map.get(&def_span).and_then(|href| {
|
||||
let context = href_context.context;
|
||||
// FIXME: later on, it'd be nice to provide two links (if possible) for all items:
|
||||
// one to the documentation page and one to the source definition.
|
||||
|
@ -1392,7 +1392,7 @@ static DEFAULT_ID_MAP: Lazy<FxHashMap<Cow<'static, str>, usize>> = Lazy::new(||
|
||||
|
||||
fn init_id_map() -> FxHashMap<Cow<'static, str>, usize> {
|
||||
let mut map = FxHashMap::default();
|
||||
// This is the list of IDs used in Javascript.
|
||||
// This is the list of IDs used in JavaScript.
|
||||
map.insert("help".into(), 1);
|
||||
map.insert("settings".into(), 1);
|
||||
map.insert("not-displayed".into(), 1);
|
||||
|
@ -122,9 +122,9 @@ pub(crate) struct SharedContext<'tcx> {
|
||||
/// the crate.
|
||||
redirections: Option<RefCell<FxHashMap<String, String>>>,
|
||||
|
||||
/// Correspondance map used to link types used in the source code pages to allow to click on
|
||||
/// Correspondence map used to link types used in the source code pages to allow to click on
|
||||
/// links to jump to the type's definition.
|
||||
pub(crate) span_correspondance_map: FxHashMap<rustc_span::Span, LinkFromSrc>,
|
||||
pub(crate) span_correspondence_map: FxHashMap<rustc_span::Span, LinkFromSrc>,
|
||||
/// The [`Cache`] used during rendering.
|
||||
pub(crate) cache: Cache,
|
||||
|
||||
@ -531,7 +531,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
||||
errors: receiver,
|
||||
redirections: if generate_redirect_map { Some(Default::default()) } else { None },
|
||||
show_type_layout,
|
||||
span_correspondance_map: matches,
|
||||
span_correspondence_map: matches,
|
||||
cache,
|
||||
call_locations,
|
||||
};
|
||||
@ -647,7 +647,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
||||
</div>\
|
||||
<noscript>\
|
||||
<section>\
|
||||
You need to enable Javascript be able to update your settings.\
|
||||
You need to enable JavaScript be able to update your settings.\
|
||||
</section>\
|
||||
</noscript>\
|
||||
<link rel=\"stylesheet\" \
|
||||
@ -709,7 +709,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
|
||||
</div>\
|
||||
<noscript>\
|
||||
<section>\
|
||||
<p>You need to enable Javascript to use keyboard commands or search.</p>\
|
||||
<p>You need to enable JavaScript to use keyboard commands or search.</p>\
|
||||
<p>For more information, browse the <a href=\"https://doc.rust-lang.org/rustdoc/\">rustdoc handbook</a>.</p>\
|
||||
</section>\
|
||||
</noscript>",
|
||||
|
@ -275,8 +275,7 @@ function preLoadCss(cssUrl) {
|
||||
document.title = searchState.titleBeforeSearch;
|
||||
// We also remove the query parameter from the URL.
|
||||
if (browserSupportsHistoryApi()) {
|
||||
history.replaceState(null, window.currentCrate + " - Rust",
|
||||
getNakedUrl() + window.location.hash);
|
||||
history.replaceState(null, "", getNakedUrl() + window.location.hash);
|
||||
}
|
||||
},
|
||||
getQueryStringParams: () => {
|
||||
@ -378,8 +377,7 @@ function preLoadCss(cssUrl) {
|
||||
searchState.clearInputTimeout();
|
||||
switchDisplayedElement(null);
|
||||
if (browserSupportsHistoryApi()) {
|
||||
history.replaceState(null, window.currentCrate + " - Rust",
|
||||
getNakedUrl() + window.location.hash);
|
||||
history.replaceState(null, "", getNakedUrl() + window.location.hash);
|
||||
}
|
||||
ev.preventDefault();
|
||||
searchState.defocus();
|
||||
|
@ -665,7 +665,7 @@ impl FromWithTcx<clean::Impl> for Impl {
|
||||
let clean::Impl { unsafety, generics, trait_, for_, items, polarity, kind } = impl_;
|
||||
// FIXME: use something like ImplKind in JSON?
|
||||
let (synthetic, blanket_impl) = match kind {
|
||||
clean::ImplKind::Normal | clean::ImplKind::FakeVaradic => (false, None),
|
||||
clean::ImplKind::Normal | clean::ImplKind::FakeVariadic => (false, None),
|
||||
clean::ImplKind::Auto => (true, None),
|
||||
clean::ImplKind::Blanket(ty) => (false, Some(*ty)),
|
||||
};
|
||||
@ -740,7 +740,7 @@ impl FromWithTcx<clean::Variant> for Variant {
|
||||
impl FromWithTcx<clean::Discriminant> for Discriminant {
|
||||
fn from_tcx(disr: clean::Discriminant, tcx: TyCtxt<'_>) -> Self {
|
||||
Discriminant {
|
||||
// expr is only none if going through the inlineing path, which gets
|
||||
// expr is only none if going through the inlining path, which gets
|
||||
// `rustc_middle` types, not `rustc_hir`, but because JSON never inlines
|
||||
// the expr is always some.
|
||||
expr: disr.expr(tcx).unwrap(),
|
||||
|
@ -286,7 +286,7 @@ pub(crate) fn run(
|
||||
let (cx, _) = Context::init(krate, renderopts, cache, tcx).map_err(|e| e.to_string())?;
|
||||
|
||||
// Collect CrateIds corresponding to provided target crates
|
||||
// If two different versions of the crate in the dependency tree, then examples will be collcted from both.
|
||||
// If two different versions of the crate in the dependency tree, then examples will be collected from both.
|
||||
let all_crates = tcx
|
||||
.crates(())
|
||||
.iter()
|
||||
|
@ -13,11 +13,11 @@ rule d
|
||||
// another line comment
|
||||
e {}
|
||||
|
||||
rule f/* a multine
|
||||
rule f/* a multiline
|
||||
|
||||
comment*/{}
|
||||
|
||||
rule g/* another multine
|
||||
rule g/* another multiline
|
||||
|
||||
comment*/h
|
||||
|
||||
|
@ -42,7 +42,7 @@ function parseOptions(args) {
|
||||
"executable_path": null,
|
||||
"no_sandbox": false,
|
||||
};
|
||||
const correspondances = {
|
||||
const correspondences = {
|
||||
"--doc-folder": "doc_folder",
|
||||
"--tests-folder": "tests_folder",
|
||||
"--debug": "debug",
|
||||
@ -73,7 +73,7 @@ function parseOptions(args) {
|
||||
}
|
||||
opts["jobs"] = parseInt(arg_value);
|
||||
} else if (arg !== "--file") {
|
||||
opts[correspondances[arg]] = arg_value;
|
||||
opts[correspondences[arg]] = arg_value;
|
||||
} else {
|
||||
opts["files"].push(arg_value);
|
||||
}
|
||||
@ -82,9 +82,9 @@ function parseOptions(args) {
|
||||
process.exit(0);
|
||||
} else if (arg === "--no-sandbox") {
|
||||
console.log("`--no-sandbox` is being used. Be very careful!");
|
||||
opts[correspondances[arg]] = true;
|
||||
} else if (correspondances[arg]) {
|
||||
opts[correspondances[arg]] = true;
|
||||
opts[correspondences[arg]] = true;
|
||||
} else if (correspondences[arg]) {
|
||||
opts[correspondences[arg]] = true;
|
||||
} else {
|
||||
console.log("Unknown option `" + arg + "`.");
|
||||
console.log("Use `--help` to see the list of options");
|
||||
|
@ -1,8 +1,8 @@
|
||||
error[E0277]: `&'static Unit` cannot be safely transmuted into `&'static Unit` in the defining scope of `assert::Context`
|
||||
error[E0277]: `&Unit` cannot be safely transmuted into `&Unit` in the defining scope of `assert::Context`
|
||||
--> $DIR/references.rs:29:52
|
||||
|
|
||||
LL | assert::is_maybe_transmutable::<&'static Unit, &'static Unit>();
|
||||
| ^^^^^^^^^^^^^ `&'static Unit` does not have a well-specified layout
|
||||
| ^^^^^^^^^^^^^ `&Unit` does not have a well-specified layout
|
||||
|
|
||||
note: required by a bound in `is_maybe_transmutable`
|
||||
--> $DIR/references.rs:16:14
|
||||
|
@ -1,8 +1,8 @@
|
||||
error[E0277]: `&'static Unit` cannot be safely transmuted into `&'static Unit` in the defining scope of `assert::Context`
|
||||
error[E0277]: `&Unit` cannot be safely transmuted into `&Unit` in the defining scope of `assert::Context`
|
||||
--> $DIR/references.rs:29:52
|
||||
|
|
||||
LL | assert::is_maybe_transmutable::<&'static Unit, &'static Unit>();
|
||||
| ^^^^^^^^^^^^^ `&'static Unit` does not have a well-specified layout
|
||||
| ^^^^^^^^^^^^^ `&Unit` does not have a well-specified layout
|
||||
|
|
||||
note: required by a bound in `is_maybe_transmutable`
|
||||
--> $DIR/references.rs:16:14
|
||||
|
22
tests/ui/transmutability/region-infer.rs
Normal file
22
tests/ui/transmutability/region-infer.rs
Normal file
@ -0,0 +1,22 @@
|
||||
#![feature(transmutability)]
|
||||
|
||||
use std::mem::{Assume, BikeshedIntrinsicFrom};
|
||||
pub struct Context;
|
||||
|
||||
#[repr(C)]
|
||||
struct W<'a>(&'a ());
|
||||
|
||||
fn test<'a>()
|
||||
where
|
||||
W<'a>: BikeshedIntrinsicFrom<
|
||||
(),
|
||||
Context,
|
||||
{ Assume { alignment: true, lifetimes: true, safety: true, validity: true } },
|
||||
>,
|
||||
{
|
||||
}
|
||||
|
||||
fn main() {
|
||||
test();
|
||||
//~^ ERROR `()` cannot be safely transmuted into `W<'_>`
|
||||
}
|
23
tests/ui/transmutability/region-infer.stderr
Normal file
23
tests/ui/transmutability/region-infer.stderr
Normal file
@ -0,0 +1,23 @@
|
||||
error[E0277]: `()` cannot be safely transmuted into `W<'_>` in the defining scope of `Context`
|
||||
--> $DIR/region-infer.rs:20:5
|
||||
|
|
||||
LL | test();
|
||||
| ^^^^ `W<'_>` does not have a well-specified layout
|
||||
|
|
||||
note: required by a bound in `test`
|
||||
--> $DIR/region-infer.rs:11:12
|
||||
|
|
||||
LL | fn test<'a>()
|
||||
| ---- required by a bound in this function
|
||||
LL | where
|
||||
LL | W<'a>: BikeshedIntrinsicFrom<
|
||||
| ____________^
|
||||
LL | | (),
|
||||
LL | | Context,
|
||||
LL | | { Assume { alignment: true, lifetimes: true, safety: true, validity: true } },
|
||||
LL | | >,
|
||||
| |_________^ required by this bound in `test`
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0277`.
|
Loading…
Reference in New Issue
Block a user