mirror of
https://github.com/rust-lang/rust.git
synced 2025-06-05 03:38:29 +00:00
Auto merge of #135286 - matthiaskrgr:rollup-sxuq1nh, r=matthiaskrgr
Rollup of 3 pull requests Successful merges: - #134898 (Make it easier to run CI jobs locally) - #135195 (Make `lit_to_mir_constant` and `lit_to_const` infallible) - #135261 (Account for identity substituted items in symbol mangling) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
8247594932
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@ -2,7 +2,7 @@
|
||||
# and also on pushes to special branches (auto, try).
|
||||
#
|
||||
# The actual definition of the executed jobs is calculated by a Python
|
||||
# script located at src/ci/github-actions/calculate-job-matrix.py, which
|
||||
# script located at src/ci/github-actions/ci.py, which
|
||||
# uses job definition data from src/ci/github-actions/jobs.yml.
|
||||
# You should primarily modify the `jobs.yml` file if you want to modify
|
||||
# what jobs are executed in CI.
|
||||
@ -56,7 +56,7 @@ jobs:
|
||||
- name: Calculate the CI job matrix
|
||||
env:
|
||||
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
||||
run: python3 src/ci/github-actions/calculate-job-matrix.py >> $GITHUB_OUTPUT
|
||||
run: python3 src/ci/github-actions/ci.py calculate-job-matrix >> $GITHUB_OUTPUT
|
||||
id: jobs
|
||||
job:
|
||||
name: ${{ matrix.name }}
|
||||
|
@ -35,7 +35,7 @@ use rustc_hir::{self as hir, AnonConst, GenericArg, GenericArgs, HirId};
|
||||
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
|
||||
use rustc_infer::traits::ObligationCause;
|
||||
use rustc_middle::middle::stability::AllowUnstable;
|
||||
use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput};
|
||||
use rustc_middle::mir::interpret::LitToConstInput;
|
||||
use rustc_middle::ty::fold::fold_regions;
|
||||
use rustc_middle::ty::print::PrintPolyTraitRefExt as _;
|
||||
use rustc_middle::ty::{
|
||||
@ -2262,25 +2262,11 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
_ => None,
|
||||
};
|
||||
|
||||
if let Some(lit_input) = lit_input {
|
||||
// If an error occurred, ignore that it's a literal and leave reporting the error up to
|
||||
// mir.
|
||||
match tcx.at(expr.span).lit_to_const(lit_input) {
|
||||
Ok(c) => return Some(c),
|
||||
Err(_) if lit_input.ty.has_aliases() => {
|
||||
// allow the `ty` to be an alias type, though we cannot handle it here
|
||||
return None;
|
||||
}
|
||||
Err(e) => {
|
||||
tcx.dcx().span_delayed_bug(
|
||||
expr.span,
|
||||
format!("try_lower_anon_const_lit: couldn't lit_to_const {e:?}"),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
lit_input
|
||||
// Allow the `ty` to be an alias type, though we cannot handle it here, we just go through
|
||||
// the more expensive anon const code path.
|
||||
.filter(|l| !l.ty.has_aliases())
|
||||
.map(|l| tcx.at(expr.span).lit_to_const(l))
|
||||
}
|
||||
|
||||
fn lower_delegation_ty(&self, idx: hir::InferDelegationKind) -> Ty<'tcx> {
|
||||
@ -2454,13 +2440,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
hir::PatExprKind::Lit { lit, negated } => {
|
||||
let lit_input =
|
||||
LitToConstInput { lit: &lit.node, ty, neg: negated };
|
||||
let ct = match tcx.lit_to_const(lit_input) {
|
||||
Ok(c) => c,
|
||||
Err(LitToConstError::Reported(err)) => {
|
||||
ty::Const::new_error(tcx, err)
|
||||
}
|
||||
Err(LitToConstError::TypeError) => todo!(),
|
||||
};
|
||||
let ct = tcx.lit_to_const(lit_input);
|
||||
(ct, ty)
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,6 @@ use rustc_abi::{AddressSpace, Align, Endian, HasDataLayout, Size};
|
||||
use rustc_ast::{LitKind, Mutability};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lock;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable};
|
||||
@ -84,16 +83,6 @@ pub struct LitToConstInput<'tcx> {
|
||||
pub neg: bool,
|
||||
}
|
||||
|
||||
/// Error type for `tcx.lit_to_const`.
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)]
|
||||
pub enum LitToConstError {
|
||||
/// The literal's inferred type did not match the expected `ty` in the input.
|
||||
/// This is used for graceful error handling (`span_delayed_bug`) in
|
||||
/// type checking (`Const::from_anon_const`).
|
||||
TypeError,
|
||||
Reported(ErrorGuaranteed),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
||||
pub struct AllocId(pub NonZero<u64>);
|
||||
|
||||
|
@ -141,14 +141,6 @@ impl EraseType for Result<rustc_abi::TyAndLayout<'_, Ty<'_>>, &ty::layout::Layou
|
||||
>()];
|
||||
}
|
||||
|
||||
impl EraseType for Result<ty::Const<'_>, mir::interpret::LitToConstError> {
|
||||
type Result = [u8; size_of::<Result<ty::Const<'static>, mir::interpret::LitToConstError>>()];
|
||||
}
|
||||
|
||||
impl EraseType for Result<mir::Const<'_>, mir::interpret::LitToConstError> {
|
||||
type Result = [u8; size_of::<Result<mir::Const<'static>, mir::interpret::LitToConstError>>()];
|
||||
}
|
||||
|
||||
impl EraseType for Result<mir::ConstAlloc<'_>, mir::interpret::ErrorHandled> {
|
||||
type Result = [u8; size_of::<Result<mir::ConstAlloc<'static>, mir::interpret::ErrorHandled>>()];
|
||||
}
|
||||
@ -296,7 +288,6 @@ trivial! {
|
||||
rustc_middle::mir::interpret::AllocId,
|
||||
rustc_middle::mir::interpret::CtfeProvenance,
|
||||
rustc_middle::mir::interpret::ErrorHandled,
|
||||
rustc_middle::mir::interpret::LitToConstError,
|
||||
rustc_middle::thir::ExprId,
|
||||
rustc_middle::traits::CodegenObligationError,
|
||||
rustc_middle::traits::EvaluationResult,
|
||||
|
@ -57,7 +57,7 @@ use crate::middle::resolve_bound_vars::{ObjectLifetimeDefault, ResolveBoundVars,
|
||||
use crate::middle::stability::{self, DeprecationEntry};
|
||||
use crate::mir::interpret::{
|
||||
EvalStaticInitializerRawResult, EvalToAllocationRawResult, EvalToConstValueResult,
|
||||
EvalToValTreeResult, GlobalId, LitToConstError, LitToConstInput,
|
||||
EvalToValTreeResult, GlobalId, LitToConstInput,
|
||||
};
|
||||
use crate::mir::mono::{CodegenUnit, CollectionMode, MonoItem};
|
||||
use crate::query::erase::{Erase, erase, restore};
|
||||
@ -1268,7 +1268,7 @@ rustc_queries! {
|
||||
// FIXME get rid of this with valtrees
|
||||
query lit_to_const(
|
||||
key: LitToConstInput<'tcx>
|
||||
) -> Result<ty::Const<'tcx>, LitToConstError> {
|
||||
) -> ty::Const<'tcx> {
|
||||
desc { "converting literal to const" }
|
||||
}
|
||||
|
||||
|
@ -165,10 +165,14 @@ impl<'tcx> NormalizeAfterErasingRegionsFolder<'tcx> {
|
||||
arg: ty::GenericArg<'tcx>,
|
||||
) -> ty::GenericArg<'tcx> {
|
||||
let arg = self.typing_env.as_query_input(arg);
|
||||
self.tcx.try_normalize_generic_arg_after_erasing_regions(arg).unwrap_or_else(|_| bug!(
|
||||
"Failed to normalize {:?}, maybe try to call `try_normalize_erasing_regions` instead",
|
||||
arg.value
|
||||
))
|
||||
self.tcx.try_normalize_generic_arg_after_erasing_regions(arg).unwrap_or_else(|_| {
|
||||
bug!(
|
||||
"Failed to normalize {:?} in typing_env={:?}, \
|
||||
maybe try to call `try_normalize_erasing_regions` instead",
|
||||
arg.value,
|
||||
self.typing_env,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,13 +3,12 @@
|
||||
use rustc_abi::Size;
|
||||
use rustc_ast as ast;
|
||||
use rustc_hir::LangItem;
|
||||
use rustc_middle::mir::interpret::{
|
||||
Allocation, CTFE_ALLOC_SALT, LitToConstError, LitToConstInput, Scalar,
|
||||
};
|
||||
use rustc_middle::mir::interpret::{Allocation, CTFE_ALLOC_SALT, LitToConstInput, Scalar};
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::thir::*;
|
||||
use rustc_middle::ty::{
|
||||
self, CanonicalUserType, CanonicalUserTypeAnnotation, Ty, TyCtxt, UserTypeAnnotationIndex,
|
||||
self, CanonicalUserType, CanonicalUserTypeAnnotation, Ty, TyCtxt, TypeVisitableExt as _,
|
||||
UserTypeAnnotationIndex,
|
||||
};
|
||||
use rustc_middle::{bug, mir, span_bug};
|
||||
use tracing::{instrument, trace};
|
||||
@ -50,16 +49,7 @@ pub(crate) fn as_constant_inner<'tcx>(
|
||||
let Expr { ty, temp_lifetime: _, span, ref kind } = *expr;
|
||||
match *kind {
|
||||
ExprKind::Literal { lit, neg } => {
|
||||
let const_ = match lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg })
|
||||
{
|
||||
Ok(c) => c,
|
||||
Err(LitToConstError::Reported(guar)) => {
|
||||
Const::Ty(Ty::new_error(tcx, guar), ty::Const::new_error(tcx, guar))
|
||||
}
|
||||
Err(LitToConstError::TypeError) => {
|
||||
bug!("encountered type error in `lit_to_mir_constant`")
|
||||
}
|
||||
};
|
||||
let const_ = lit_to_mir_constant(tcx, LitToConstInput { lit: &lit.node, ty, neg });
|
||||
|
||||
ConstOperand { span, user_ty: None, const_ }
|
||||
}
|
||||
@ -108,11 +98,13 @@ pub(crate) fn as_constant_inner<'tcx>(
|
||||
}
|
||||
|
||||
#[instrument(skip(tcx, lit_input))]
|
||||
fn lit_to_mir_constant<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
lit_input: LitToConstInput<'tcx>,
|
||||
) -> Result<Const<'tcx>, LitToConstError> {
|
||||
fn lit_to_mir_constant<'tcx>(tcx: TyCtxt<'tcx>, lit_input: LitToConstInput<'tcx>) -> Const<'tcx> {
|
||||
let LitToConstInput { lit, ty, neg } = lit_input;
|
||||
|
||||
if let Err(guar) = ty.error_reported() {
|
||||
return Const::Ty(Ty::new_error(tcx, guar), ty::Const::new_error(tcx, guar));
|
||||
}
|
||||
|
||||
let trunc = |n| {
|
||||
let width = match tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(ty)) {
|
||||
Ok(layout) => layout.size,
|
||||
@ -123,7 +115,7 @@ fn lit_to_mir_constant<'tcx>(
|
||||
trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
|
||||
let result = width.truncate(n);
|
||||
trace!("trunc result: {}", result);
|
||||
Ok(ConstValue::Scalar(Scalar::from_uint(result, width)))
|
||||
ConstValue::Scalar(Scalar::from_uint(result, width))
|
||||
};
|
||||
|
||||
let value = match (lit, ty.kind()) {
|
||||
@ -154,20 +146,18 @@ fn lit_to_mir_constant<'tcx>(
|
||||
ConstValue::Scalar(Scalar::from_uint(*n, Size::from_bytes(1)))
|
||||
}
|
||||
(ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => {
|
||||
trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() })?
|
||||
trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() })
|
||||
}
|
||||
(ast::LitKind::Float(n, _), ty::Float(fty)) => {
|
||||
parse_float_into_constval(*n, *fty, neg).unwrap()
|
||||
}
|
||||
(ast::LitKind::Float(n, _), ty::Float(fty)) => parse_float_into_constval(*n, *fty, neg)
|
||||
.ok_or_else(|| {
|
||||
LitToConstError::Reported(
|
||||
tcx.dcx()
|
||||
.delayed_bug(format!("couldn't parse float literal: {:?}", lit_input.lit)),
|
||||
)
|
||||
})?,
|
||||
(ast::LitKind::Bool(b), ty::Bool) => ConstValue::Scalar(Scalar::from_bool(*b)),
|
||||
(ast::LitKind::Char(c), ty::Char) => ConstValue::Scalar(Scalar::from_char(*c)),
|
||||
(ast::LitKind::Err(guar), _) => return Err(LitToConstError::Reported(*guar)),
|
||||
_ => return Err(LitToConstError::TypeError),
|
||||
(ast::LitKind::Err(guar), _) => {
|
||||
return Const::Ty(Ty::new_error(tcx, *guar), ty::Const::new_error(tcx, *guar));
|
||||
}
|
||||
_ => bug!("invalid lit/ty combination in `lit_to_mir_constant`: {lit:?}: {ty:?}"),
|
||||
};
|
||||
|
||||
Ok(Const::Val(value, ty))
|
||||
Const::Val(value, ty)
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use rustc_ast as ast;
|
||||
use rustc_hir::LangItem;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput};
|
||||
use rustc_middle::mir::interpret::LitToConstInput;
|
||||
use rustc_middle::ty::{self, ScalarInt, TyCtxt, TypeVisitableExt as _};
|
||||
use tracing::trace;
|
||||
|
||||
@ -10,11 +10,11 @@ use crate::builder::parse_float_into_scalar;
|
||||
pub(crate) fn lit_to_const<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
lit_input: LitToConstInput<'tcx>,
|
||||
) -> Result<ty::Const<'tcx>, LitToConstError> {
|
||||
) -> ty::Const<'tcx> {
|
||||
let LitToConstInput { lit, ty, neg } = lit_input;
|
||||
|
||||
if let Err(guar) = ty.error_reported() {
|
||||
return Ok(ty::Const::new_error(tcx, guar));
|
||||
return ty::Const::new_error(tcx, guar);
|
||||
}
|
||||
|
||||
let trunc = |n| {
|
||||
@ -28,8 +28,8 @@ pub(crate) fn lit_to_const<'tcx>(
|
||||
let result = width.truncate(n);
|
||||
trace!("trunc result: {}", result);
|
||||
|
||||
Ok(ScalarInt::try_from_uint(result, width)
|
||||
.unwrap_or_else(|| bug!("expected to create ScalarInt from uint {:?}", result)))
|
||||
ScalarInt::try_from_uint(result, width)
|
||||
.unwrap_or_else(|| bug!("expected to create ScalarInt from uint {:?}", result))
|
||||
};
|
||||
|
||||
let valtree = match (lit, ty.kind()) {
|
||||
@ -57,20 +57,20 @@ pub(crate) fn lit_to_const<'tcx>(
|
||||
}
|
||||
(ast::LitKind::Int(n, _), ty::Uint(_)) | (ast::LitKind::Int(n, _), ty::Int(_)) => {
|
||||
let scalar_int =
|
||||
trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() })?;
|
||||
trunc(if neg { (n.get() as i128).overflowing_neg().0 as u128 } else { n.get() });
|
||||
ty::ValTree::from_scalar_int(scalar_int)
|
||||
}
|
||||
(ast::LitKind::Bool(b), ty::Bool) => ty::ValTree::from_scalar_int((*b).into()),
|
||||
(ast::LitKind::Float(n, _), ty::Float(fty)) => {
|
||||
let bits = parse_float_into_scalar(*n, *fty, neg).ok_or_else(|| {
|
||||
let bits = parse_float_into_scalar(*n, *fty, neg).unwrap_or_else(|| {
|
||||
tcx.dcx().bug(format!("couldn't parse float literal: {:?}", lit_input.lit))
|
||||
})?;
|
||||
});
|
||||
ty::ValTree::from_scalar_int(bits)
|
||||
}
|
||||
(ast::LitKind::Char(c), ty::Char) => ty::ValTree::from_scalar_int((*c).into()),
|
||||
(ast::LitKind::Err(guar), _) => return Err(LitToConstError::Reported(*guar)),
|
||||
_ => return Err(LitToConstError::TypeError),
|
||||
(ast::LitKind::Err(guar), _) => return ty::Const::new_error(tcx, *guar),
|
||||
_ => return ty::Const::new_misc_error(tcx),
|
||||
};
|
||||
|
||||
Ok(ty::Const::new_value(tcx, valtree, ty))
|
||||
ty::Const::new_value(tcx, valtree, ty)
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ use rustc_hir::pat_util::EnumerateAndAdjustIterator;
|
||||
use rustc_hir::{self as hir, ByRef, Mutability, RangeEnd};
|
||||
use rustc_index::Idx;
|
||||
use rustc_lint as lint;
|
||||
use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput};
|
||||
use rustc_middle::mir::interpret::LitToConstInput;
|
||||
use rustc_middle::thir::{
|
||||
Ascription, FieldPat, LocalVarId, Pat, PatKind, PatRange, PatRangeBoundary,
|
||||
};
|
||||
@ -669,11 +669,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
||||
|
||||
let ct_ty = self.typeck_results.node_type(expr.hir_id);
|
||||
let lit_input = LitToConstInput { lit: &lit.node, ty: ct_ty, neg };
|
||||
match self.tcx.at(expr.span).lit_to_const(lit_input) {
|
||||
Ok(constant) => self.const_to_pat(constant, ct_ty, expr.hir_id, lit.span).kind,
|
||||
Err(LitToConstError::Reported(e)) => PatKind::Error(e),
|
||||
Err(LitToConstError::TypeError) => bug!("lower_lit: had type error"),
|
||||
}
|
||||
let constant = self.tcx.at(expr.span).lit_to_const(lit_input);
|
||||
self.const_to_pat(constant, ct_ty, expr.hir_id, lit.span).kind
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,6 @@ use rustc_middle::bug;
|
||||
use rustc_middle::ty::print::{PrettyPrinter, Print, PrintError, Printer};
|
||||
use rustc_middle::ty::{
|
||||
self, GenericArg, GenericArgKind, Instance, ReifyReason, Ty, TyCtxt, TypeVisitableExt,
|
||||
TypingEnv,
|
||||
};
|
||||
use tracing::debug;
|
||||
|
||||
@ -387,23 +386,44 @@ impl<'tcx> Printer<'tcx> for SymbolPrinter<'tcx> {
|
||||
) -> Result<(), PrintError> {
|
||||
let self_ty = self.tcx.type_of(impl_def_id);
|
||||
let impl_trait_ref = self.tcx.impl_trait_ref(impl_def_id);
|
||||
let (typing_env, mut self_ty, mut impl_trait_ref) =
|
||||
if self.tcx.generics_of(impl_def_id).count() <= args.len() {
|
||||
(
|
||||
TypingEnv::fully_monomorphized(),
|
||||
self_ty.instantiate(self.tcx, args),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate(self.tcx, args)),
|
||||
)
|
||||
} else {
|
||||
// We are probably printing a nested item inside of an impl.
|
||||
// Use the identity substitutions for the impl. We also need
|
||||
// a well-formed param-env, so let's use post-analysis.
|
||||
(
|
||||
TypingEnv::post_analysis(self.tcx, impl_def_id),
|
||||
self_ty.instantiate_identity(),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate_identity()),
|
||||
)
|
||||
};
|
||||
let generics = self.tcx.generics_of(impl_def_id);
|
||||
// We have two cases to worry about here:
|
||||
// 1. We're printing a nested item inside of an impl item, like an inner
|
||||
// function inside of a method. Due to the way that def path printing works,
|
||||
// we'll render this something like `<Ty as Trait>::method::inner_fn`
|
||||
// but we have no substs for this impl since it's not really inheriting
|
||||
// generics from the outer item. We need to use the identity substs, and
|
||||
// to normalize we need to use the correct param-env too.
|
||||
// 2. We're mangling an item with identity substs. This seems to only happen
|
||||
// when generating coverage, since we try to generate coverage for unused
|
||||
// items too, and if something isn't monomorphized then we necessarily don't
|
||||
// have anything to substitute the instance with.
|
||||
// NOTE: We don't support mangling partially substituted but still polymorphic
|
||||
// instances, like `impl<A> Tr<A> for ()` where `A` is substituted w/ `(T,)`.
|
||||
let (typing_env, mut self_ty, mut impl_trait_ref) = if generics.count() > args.len()
|
||||
|| &args[..generics.count()]
|
||||
== self
|
||||
.tcx
|
||||
.erase_regions(ty::GenericArgs::identity_for_item(self.tcx, impl_def_id))
|
||||
.as_slice()
|
||||
{
|
||||
(
|
||||
ty::TypingEnv::post_analysis(self.tcx, impl_def_id),
|
||||
self_ty.instantiate_identity(),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate_identity()),
|
||||
)
|
||||
} else {
|
||||
assert!(
|
||||
!args.has_non_region_param(),
|
||||
"should not be mangling partially substituted \
|
||||
polymorphic instance: {impl_def_id:?} {args:?}"
|
||||
);
|
||||
(
|
||||
ty::TypingEnv::fully_monomorphized(),
|
||||
self_ty.instantiate(self.tcx, args),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate(self.tcx, args)),
|
||||
)
|
||||
};
|
||||
|
||||
match &mut impl_trait_ref {
|
||||
Some(impl_trait_ref) => {
|
||||
|
@ -233,23 +233,44 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> {
|
||||
|
||||
let self_ty = self.tcx.type_of(impl_def_id);
|
||||
let impl_trait_ref = self.tcx.impl_trait_ref(impl_def_id);
|
||||
let (typing_env, mut self_ty, mut impl_trait_ref) =
|
||||
if self.tcx.generics_of(impl_def_id).count() <= args.len() {
|
||||
(
|
||||
ty::TypingEnv::fully_monomorphized(),
|
||||
self_ty.instantiate(self.tcx, args),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate(self.tcx, args)),
|
||||
)
|
||||
} else {
|
||||
// We are probably printing a nested item inside of an impl.
|
||||
// Use the identity substitutions for the impl. We also need
|
||||
// a well-formed param-env, so let's use post-analysis.
|
||||
(
|
||||
ty::TypingEnv::post_analysis(self.tcx, impl_def_id),
|
||||
self_ty.instantiate_identity(),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate_identity()),
|
||||
)
|
||||
};
|
||||
let generics = self.tcx.generics_of(impl_def_id);
|
||||
// We have two cases to worry about here:
|
||||
// 1. We're printing a nested item inside of an impl item, like an inner
|
||||
// function inside of a method. Due to the way that def path printing works,
|
||||
// we'll render this something like `<Ty as Trait>::method::inner_fn`
|
||||
// but we have no substs for this impl since it's not really inheriting
|
||||
// generics from the outer item. We need to use the identity substs, and
|
||||
// to normalize we need to use the correct param-env too.
|
||||
// 2. We're mangling an item with identity substs. This seems to only happen
|
||||
// when generating coverage, since we try to generate coverage for unused
|
||||
// items too, and if something isn't monomorphized then we necessarily don't
|
||||
// have anything to substitute the instance with.
|
||||
// NOTE: We don't support mangling partially substituted but still polymorphic
|
||||
// instances, like `impl<A> Tr<A> for ()` where `A` is substituted w/ `(T,)`.
|
||||
let (typing_env, mut self_ty, mut impl_trait_ref) = if generics.count() > args.len()
|
||||
|| &args[..generics.count()]
|
||||
== self
|
||||
.tcx
|
||||
.erase_regions(ty::GenericArgs::identity_for_item(self.tcx, impl_def_id))
|
||||
.as_slice()
|
||||
{
|
||||
(
|
||||
ty::TypingEnv::post_analysis(self.tcx, impl_def_id),
|
||||
self_ty.instantiate_identity(),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate_identity()),
|
||||
)
|
||||
} else {
|
||||
assert!(
|
||||
!args.has_non_region_param(),
|
||||
"should not be mangling partially substituted \
|
||||
polymorphic instance: {impl_def_id:?} {args:?}"
|
||||
);
|
||||
(
|
||||
ty::TypingEnv::fully_monomorphized(),
|
||||
self_ty.instantiate(self.tcx, args),
|
||||
impl_trait_ref.map(|impl_trait_ref| impl_trait_ref.instantiate(self.tcx, args)),
|
||||
)
|
||||
};
|
||||
|
||||
match &mut impl_trait_ref {
|
||||
Some(impl_trait_ref) => {
|
||||
|
@ -4,7 +4,7 @@ use rustc_abi::{FIRST_VARIANT, VariantIdx};
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::LocalDefId;
|
||||
use rustc_middle::mir::interpret::{LitToConstError, LitToConstInput};
|
||||
use rustc_middle::mir::interpret::LitToConstInput;
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::thir::visit;
|
||||
use rustc_middle::thir::visit::Visitor;
|
||||
@ -118,13 +118,7 @@ fn recurse_build<'tcx>(
|
||||
}
|
||||
&ExprKind::Literal { lit, neg } => {
|
||||
let sp = node.span;
|
||||
match tcx.at(sp).lit_to_const(LitToConstInput { lit: &lit.node, ty: node.ty, neg }) {
|
||||
Ok(c) => c,
|
||||
Err(LitToConstError::Reported(guar)) => ty::Const::new_error(tcx, guar),
|
||||
Err(LitToConstError::TypeError) => {
|
||||
bug!("encountered type error in lit_to_const")
|
||||
}
|
||||
}
|
||||
tcx.at(sp).lit_to_const(LitToConstInput { lit: &lit.node, ty: node.ty, neg })
|
||||
}
|
||||
&ExprKind::NonHirLiteral { lit, user_ty: _ } => {
|
||||
let val = ty::ValTree::from_scalar_int(lit);
|
||||
|
@ -1,29 +1,30 @@
|
||||
# Docker images for CI
|
||||
|
||||
This folder contains a bunch of docker images used by the continuous integration
|
||||
(CI) of Rust. An script is accompanied (`run.sh`) with these images to actually
|
||||
execute them. To test out an image execute:
|
||||
(CI) of Rust. A script is accompanied (`run.sh`) with these images to actually
|
||||
execute them.
|
||||
|
||||
Note that a single Docker image can be used by multiple CI jobs, so the job name
|
||||
is the important thing that you should know. You can examine the existing CI jobs in
|
||||
the [`jobs.yml`](../github-actions/jobs.yml) file.
|
||||
|
||||
To run a specific CI job locally, you can use the following script:
|
||||
|
||||
```
|
||||
./src/ci/docker/run.sh $image_name
|
||||
python3 ./src/ci/github-actions/ci.py run-local <job-name>
|
||||
```
|
||||
|
||||
for example:
|
||||
|
||||
For example, to run the `x86_64-gnu-llvm-18-1` job:
|
||||
```
|
||||
./src/ci/docker/run.sh x86_64-gnu
|
||||
python3 ./src/ci/github-actions/ci.py run-local x86_64-gnu-llvm-18-1
|
||||
```
|
||||
|
||||
Images will output artifacts in an `obj/$image_name` dir at the root of a repository. Note
|
||||
that the script will overwrite the contents of this directory.
|
||||
|
||||
To match conditions in rusts CI, also set the environment variable `DEPLOY=1`, e.g.:
|
||||
```
|
||||
DEPLOY=1 ./src/ci/docker/run.sh x86_64-gnu
|
||||
```
|
||||
The job will output artifacts in an `obj/<image-name>` dir at the root of a repository. Note
|
||||
that the script will overwrite the contents of this directory. `<image-name>` is set based on the
|
||||
Docker image executed in the given CI job.
|
||||
|
||||
**NOTE**: In CI, the script outputs the artifacts to the `obj` directory,
|
||||
while locally, to the `obj/$image_name` directory. This is primarily to prevent
|
||||
while locally, to the `obj/<image-name>` directory. This is primarily to prevent
|
||||
strange linker errors when using multiple Docker images.
|
||||
|
||||
For some Linux workflows (for example `x86_64-gnu-llvm-18-N`), the process is more involved. You will need to see which script is executed for the given workflow inside the [`jobs.yml`](../github-actions/jobs.yml) file and pass it through the `DOCKER_SCRIPT` environment variable. For example, to reproduce the `x86_64-gnu-llvm-18-3` workflow, you can run the following script:
|
||||
|
@ -1,18 +1,20 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
This script serves for generating a matrix of jobs that should
|
||||
be executed on CI.
|
||||
This script contains CI functionality.
|
||||
It can be used to generate a matrix of jobs that should
|
||||
be executed on CI, or run a specific CI job locally.
|
||||
|
||||
It reads job definitions from `src/ci/github-actions/jobs.yml`
|
||||
and filters them based on the event that happened on CI.
|
||||
It reads job definitions from `src/ci/github-actions/jobs.yml`.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import dataclasses
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import typing
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional
|
||||
@ -25,13 +27,19 @@ JOBS_YAML_PATH = Path(__file__).absolute().parent / "jobs.yml"
|
||||
Job = Dict[str, Any]
|
||||
|
||||
|
||||
def name_jobs(jobs: List[Dict], prefix: str) -> List[Job]:
|
||||
def add_job_properties(jobs: List[Dict], prefix: str) -> List[Job]:
|
||||
"""
|
||||
Add a `name` attribute to each job, based on its image and the given `prefix`.
|
||||
Modify the `name` attribute of each job, based on its base name and the given `prefix`.
|
||||
Add an `image` attribute to each job, based on its image.
|
||||
"""
|
||||
modified_jobs = []
|
||||
for job in jobs:
|
||||
job["name"] = f"{prefix} - {job['image']}"
|
||||
return jobs
|
||||
# Create a copy of the `job` dictionary to avoid modifying `jobs`
|
||||
new_job = dict(job)
|
||||
new_job["image"] = get_job_image(new_job)
|
||||
new_job["name"] = f"{prefix} - {new_job['name']}"
|
||||
modified_jobs.append(new_job)
|
||||
return modified_jobs
|
||||
|
||||
|
||||
def add_base_env(jobs: List[Job], environment: Dict[str, str]) -> List[Job]:
|
||||
@ -39,11 +47,15 @@ def add_base_env(jobs: List[Job], environment: Dict[str, str]) -> List[Job]:
|
||||
Prepends `environment` to the `env` attribute of each job.
|
||||
The `env` of each job has higher precedence than `environment`.
|
||||
"""
|
||||
modified_jobs = []
|
||||
for job in jobs:
|
||||
env = environment.copy()
|
||||
env.update(job.get("env", {}))
|
||||
job["env"] = env
|
||||
return jobs
|
||||
|
||||
new_job = dict(job)
|
||||
new_job["env"] = env
|
||||
modified_jobs.append(new_job)
|
||||
return modified_jobs
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@ -116,7 +128,9 @@ def find_run_type(ctx: GitHubCtx) -> Optional[WorkflowRunType]:
|
||||
|
||||
def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[Job]:
|
||||
if isinstance(run_type, PRRunType):
|
||||
return add_base_env(name_jobs(job_data["pr"], "PR"), job_data["envs"]["pr"])
|
||||
return add_base_env(
|
||||
add_job_properties(job_data["pr"], "PR"), job_data["envs"]["pr"]
|
||||
)
|
||||
elif isinstance(run_type, TryRunType):
|
||||
jobs = job_data["try"]
|
||||
custom_jobs = run_type.custom_jobs
|
||||
@ -130,7 +144,7 @@ def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[
|
||||
jobs = []
|
||||
unknown_jobs = []
|
||||
for custom_job in custom_jobs:
|
||||
job = [j for j in job_data["auto"] if j["image"] == custom_job]
|
||||
job = [j for j in job_data["auto"] if j["name"] == custom_job]
|
||||
if not job:
|
||||
unknown_jobs.append(custom_job)
|
||||
continue
|
||||
@ -140,10 +154,10 @@ def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[
|
||||
f"Custom job(s) `{unknown_jobs}` not found in auto jobs"
|
||||
)
|
||||
|
||||
return add_base_env(name_jobs(jobs, "try"), job_data["envs"]["try"])
|
||||
return add_base_env(add_job_properties(jobs, "try"), job_data["envs"]["try"])
|
||||
elif isinstance(run_type, AutoRunType):
|
||||
return add_base_env(
|
||||
name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"]
|
||||
add_job_properties(job_data["auto"], "auto"), job_data["envs"]["auto"]
|
||||
)
|
||||
|
||||
return []
|
||||
@ -181,12 +195,64 @@ def format_run_type(run_type: WorkflowRunType) -> str:
|
||||
raise AssertionError()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
def get_job_image(job: Job) -> str:
|
||||
"""
|
||||
By default, the Docker image of a job is based on its name.
|
||||
However, it can be overridden by its IMAGE environment variable.
|
||||
"""
|
||||
env = job.get("env", {})
|
||||
# Return the IMAGE environment variable if it exists, otherwise return the job name
|
||||
return env.get("IMAGE", job["name"])
|
||||
|
||||
with open(JOBS_YAML_PATH) as f:
|
||||
data = yaml.safe_load(f)
|
||||
|
||||
def is_linux_job(job: Job) -> bool:
|
||||
return "ubuntu" in job["os"]
|
||||
|
||||
|
||||
def find_linux_job(job_data: Dict[str, Any], job_name: str, pr_jobs: bool) -> Job:
|
||||
candidates = job_data["pr"] if pr_jobs else job_data["auto"]
|
||||
jobs = [job for job in candidates if job.get("name") == job_name]
|
||||
if len(jobs) == 0:
|
||||
available_jobs = "\n".join(
|
||||
sorted(job["name"] for job in candidates if is_linux_job(job))
|
||||
)
|
||||
raise Exception(f"""Job `{job_name}` not found in {'pr' if pr_jobs else 'auto'} jobs.
|
||||
The following jobs are available:
|
||||
{available_jobs}""")
|
||||
assert len(jobs) == 1
|
||||
|
||||
job = jobs[0]
|
||||
if not is_linux_job(job):
|
||||
raise Exception("Only Linux jobs can be executed locally")
|
||||
return job
|
||||
|
||||
|
||||
def run_workflow_locally(job_data: Dict[str, Any], job_name: str, pr_jobs: bool):
|
||||
DOCKER_DIR = Path(__file__).absolute().parent.parent / "docker"
|
||||
|
||||
job = find_linux_job(job_data, job_name=job_name, pr_jobs=pr_jobs)
|
||||
|
||||
custom_env = {}
|
||||
# Replicate src/ci/scripts/setup-environment.sh
|
||||
# Adds custom environment variables to the job
|
||||
if job_name.startswith("dist-"):
|
||||
if job_name.endswith("-alt"):
|
||||
custom_env["DEPLOY_ALT"] = "1"
|
||||
else:
|
||||
custom_env["DEPLOY"] = "1"
|
||||
custom_env.update({k: str(v) for (k, v) in job.get("env", {}).items()})
|
||||
|
||||
args = [str(DOCKER_DIR / "run.sh"), get_job_image(job)]
|
||||
env_formatted = [f"{k}={v}" for (k, v) in sorted(custom_env.items())]
|
||||
print(f"Executing `{' '.join(env_formatted)} {' '.join(args)}`")
|
||||
|
||||
env = os.environ.copy()
|
||||
env.update(custom_env)
|
||||
|
||||
subprocess.run(args, env=env)
|
||||
|
||||
|
||||
def calculate_job_matrix(job_data: Dict[str, Any]):
|
||||
github_ctx = get_github_ctx()
|
||||
|
||||
run_type = find_run_type(github_ctx)
|
||||
@ -197,7 +263,7 @@ if __name__ == "__main__":
|
||||
|
||||
jobs = []
|
||||
if run_type is not None:
|
||||
jobs = calculate_jobs(run_type, data)
|
||||
jobs = calculate_jobs(run_type, job_data)
|
||||
jobs = skip_jobs(jobs, channel)
|
||||
|
||||
if not jobs:
|
||||
@ -208,3 +274,45 @@ if __name__ == "__main__":
|
||||
logging.info(f"Output:\n{yaml.dump(dict(jobs=jobs, run_type=run_type), indent=4)}")
|
||||
print(f"jobs={json.dumps(jobs)}")
|
||||
print(f"run_type={run_type}")
|
||||
|
||||
|
||||
def create_cli_parser():
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="ci.py", description="Generate or run CI workflows"
|
||||
)
|
||||
subparsers = parser.add_subparsers(
|
||||
help="Command to execute", dest="command", required=True
|
||||
)
|
||||
subparsers.add_parser(
|
||||
"calculate-job-matrix",
|
||||
help="Generate a matrix of jobs that should be executed in CI",
|
||||
)
|
||||
run_parser = subparsers.add_parser(
|
||||
"run-local", help="Run a CI jobs locally (on Linux)"
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"job_name",
|
||||
help="CI job that should be executed. By default, a merge (auto) "
|
||||
"job with the given name will be executed",
|
||||
)
|
||||
run_parser.add_argument(
|
||||
"--pr", action="store_true", help="Run a PR job instead of an auto job"
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
with open(JOBS_YAML_PATH) as f:
|
||||
data = yaml.safe_load(f)
|
||||
|
||||
parser = create_cli_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "calculate-job-matrix":
|
||||
calculate_job_matrix(data)
|
||||
elif args.command == "run-local":
|
||||
run_workflow_locally(data, args.job_name, args.pr)
|
||||
else:
|
||||
raise Exception(f"Unknown command {args.command}")
|
@ -91,26 +91,26 @@ envs:
|
||||
# These jobs automatically inherit envs.pr, to avoid repeating
|
||||
# it in each job definition.
|
||||
pr:
|
||||
- image: mingw-check
|
||||
- name: mingw-check
|
||||
<<: *job-linux-4c
|
||||
- image: mingw-check-tidy
|
||||
- name: mingw-check-tidy
|
||||
continue_on_error: true
|
||||
<<: *job-linux-4c
|
||||
- image: x86_64-gnu-llvm-18
|
||||
- name: x86_64-gnu-llvm-18
|
||||
env:
|
||||
ENABLE_GCC_CODEGEN: "1"
|
||||
# We are adding (temporarily) a dummy commit on the compiler
|
||||
READ_ONLY_SRC: "0"
|
||||
DOCKER_SCRIPT: x86_64-gnu-llvm.sh
|
||||
<<: *job-linux-16c
|
||||
- image: x86_64-gnu-tools
|
||||
- name: x86_64-gnu-tools
|
||||
<<: *job-linux-16c
|
||||
|
||||
# Jobs that run when you perform a try build (@bors try)
|
||||
# These jobs automatically inherit envs.try, to avoid repeating
|
||||
# it in each job definition.
|
||||
try:
|
||||
- image: dist-x86_64-linux
|
||||
- name: dist-x86_64-linux
|
||||
env:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-linux-16c
|
||||
@ -123,106 +123,106 @@ auto:
|
||||
# Linux/Docker builders #
|
||||
#############################
|
||||
|
||||
- image: aarch64-gnu
|
||||
- name: aarch64-gnu
|
||||
<<: *job-aarch64-linux
|
||||
|
||||
- image: aarch64-gnu-debug
|
||||
- name: aarch64-gnu-debug
|
||||
<<: *job-aarch64-linux
|
||||
|
||||
- image: arm-android
|
||||
- name: arm-android
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: armhf-gnu
|
||||
- name: armhf-gnu
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-aarch64-linux
|
||||
- name: dist-aarch64-linux
|
||||
env:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-android
|
||||
- name: dist-android
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-arm-linux
|
||||
- name: dist-arm-linux
|
||||
<<: *job-linux-8c
|
||||
|
||||
- image: dist-armhf-linux
|
||||
- name: dist-armhf-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-armv7-linux
|
||||
- name: dist-armv7-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-i586-gnu-i586-i686-musl
|
||||
- name: dist-i586-gnu-i586-i686-musl
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-i686-linux
|
||||
- name: dist-i686-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-loongarch64-linux
|
||||
- name: dist-loongarch64-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-loongarch64-musl
|
||||
- name: dist-loongarch64-musl
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-ohos
|
||||
- name: dist-ohos
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-powerpc-linux
|
||||
- name: dist-powerpc-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-powerpc64-linux
|
||||
- name: dist-powerpc64-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-powerpc64le-linux
|
||||
- name: dist-powerpc64le-linux
|
||||
<<: *job-linux-4c-largedisk
|
||||
|
||||
- image: dist-riscv64-linux
|
||||
- name: dist-riscv64-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-s390x-linux
|
||||
- name: dist-s390x-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-various-1
|
||||
- name: dist-various-1
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-various-2
|
||||
- name: dist-various-2
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-x86_64-freebsd
|
||||
- name: dist-x86_64-freebsd
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-x86_64-illumos
|
||||
- name: dist-x86_64-illumos
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-x86_64-linux
|
||||
- name: dist-x86_64-linux
|
||||
env:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-linux-16c
|
||||
|
||||
- image: dist-x86_64-linux-alt
|
||||
- name: dist-x86_64-linux-alt
|
||||
env:
|
||||
IMAGE: dist-x86_64-linux
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-linux-16c
|
||||
|
||||
- image: dist-x86_64-musl
|
||||
- name: dist-x86_64-musl
|
||||
env:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: dist-x86_64-netbsd
|
||||
- name: dist-x86_64-netbsd
|
||||
<<: *job-linux-4c
|
||||
|
||||
# The i686-gnu job is split into multiple jobs to run tests in parallel.
|
||||
# i686-gnu-1 skips tests that run in i686-gnu-2.
|
||||
- image: i686-gnu-1
|
||||
- name: i686-gnu-1
|
||||
env:
|
||||
IMAGE: i686-gnu
|
||||
DOCKER_SCRIPT: stage_2_test_set1.sh
|
||||
<<: *job-linux-4c
|
||||
|
||||
# Skip tests that run in i686-gnu-1
|
||||
- image: i686-gnu-2
|
||||
- name: i686-gnu-2
|
||||
env:
|
||||
IMAGE: i686-gnu
|
||||
DOCKER_SCRIPT: stage_2_test_set2.sh
|
||||
@ -230,14 +230,14 @@ auto:
|
||||
|
||||
# The i686-gnu-nopt job is split into multiple jobs to run tests in parallel.
|
||||
# i686-gnu-nopt-1 skips tests that run in i686-gnu-nopt-2
|
||||
- image: i686-gnu-nopt-1
|
||||
- name: i686-gnu-nopt-1
|
||||
env:
|
||||
IMAGE: i686-gnu-nopt
|
||||
DOCKER_SCRIPT: /scripts/stage_2_test_set1.sh
|
||||
<<: *job-linux-4c
|
||||
|
||||
# Skip tests that run in i686-gnu-nopt-1
|
||||
- image: i686-gnu-nopt-2
|
||||
- name: i686-gnu-nopt-2
|
||||
env:
|
||||
IMAGE: i686-gnu-nopt
|
||||
DOCKER_SCRIPT: >-
|
||||
@ -245,13 +245,13 @@ auto:
|
||||
/scripts/stage_2_test_set2.sh
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: mingw-check
|
||||
- name: mingw-check
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: test-various
|
||||
- name: test-various
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: x86_64-fuchsia
|
||||
- name: x86_64-fuchsia
|
||||
# Only run this job on the nightly channel. Fuchsia requires
|
||||
# nightly features to compile, and this job would fail if
|
||||
# executed on beta and stable.
|
||||
@ -260,10 +260,10 @@ auto:
|
||||
|
||||
# Tests integration with Rust for Linux.
|
||||
# Builds stage 1 compiler and tries to compile a few RfL examples with it.
|
||||
- image: x86_64-rust-for-linux
|
||||
- name: x86_64-rust-for-linux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: x86_64-gnu
|
||||
- name: x86_64-gnu
|
||||
<<: *job-linux-4c
|
||||
|
||||
# This job ensures commits landing on nightly still pass the full
|
||||
@ -271,7 +271,7 @@ auto:
|
||||
# depend on the channel being built (for example if they include the
|
||||
# channel name on the output), and this builder prevents landing
|
||||
# changes that would result in broken builds after a promotion.
|
||||
- image: x86_64-gnu-stable
|
||||
- name: x86_64-gnu-stable
|
||||
# Only run this job on the nightly channel. Running this on beta
|
||||
# could cause failures when `dev: 1` in `stage0.txt`, and running
|
||||
# this on stable is useless.
|
||||
@ -281,20 +281,20 @@ auto:
|
||||
RUST_CI_OVERRIDE_RELEASE_CHANNEL: stable
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: x86_64-gnu-aux
|
||||
- name: x86_64-gnu-aux
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: x86_64-gnu-debug
|
||||
- name: x86_64-gnu-debug
|
||||
# This seems to be needed because a full stage 2 build + run-make tests
|
||||
# overwhelms the storage capacity of the standard 4c runner.
|
||||
<<: *job-linux-4c-largedisk
|
||||
|
||||
- image: x86_64-gnu-distcheck
|
||||
- name: x86_64-gnu-distcheck
|
||||
<<: *job-linux-8c
|
||||
|
||||
# The x86_64-gnu-llvm-19 job is split into multiple jobs to run tests in parallel.
|
||||
# x86_64-gnu-llvm-19-1 skips tests that run in x86_64-gnu-llvm-19-{2,3}.
|
||||
- image: x86_64-gnu-llvm-19-1
|
||||
- name: x86_64-gnu-llvm-19-1
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
IMAGE: x86_64-gnu-llvm-19
|
||||
@ -302,7 +302,7 @@ auto:
|
||||
<<: *job-linux-4c
|
||||
|
||||
# Skip tests that run in x86_64-gnu-llvm-19-{1,3}
|
||||
- image: x86_64-gnu-llvm-19-2
|
||||
- name: x86_64-gnu-llvm-19-2
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
IMAGE: x86_64-gnu-llvm-19
|
||||
@ -310,7 +310,7 @@ auto:
|
||||
<<: *job-linux-4c
|
||||
|
||||
# Skip tests that run in x86_64-gnu-llvm-19-{1,2}
|
||||
- image: x86_64-gnu-llvm-19-3
|
||||
- name: x86_64-gnu-llvm-19-3
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
IMAGE: x86_64-gnu-llvm-19
|
||||
@ -319,7 +319,7 @@ auto:
|
||||
|
||||
# The x86_64-gnu-llvm-18 job is split into multiple jobs to run tests in parallel.
|
||||
# x86_64-gnu-llvm-18-1 skips tests that run in x86_64-gnu-llvm-18-{2,3}.
|
||||
- image: x86_64-gnu-llvm-18-1
|
||||
- name: x86_64-gnu-llvm-18-1
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
READ_ONLY_SRC: "0"
|
||||
@ -328,7 +328,7 @@ auto:
|
||||
<<: *job-linux-4c
|
||||
|
||||
# Skip tests that run in x86_64-gnu-llvm-18-{1,3}
|
||||
- image: x86_64-gnu-llvm-18-2
|
||||
- name: x86_64-gnu-llvm-18-2
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
READ_ONLY_SRC: "0"
|
||||
@ -337,7 +337,7 @@ auto:
|
||||
<<: *job-linux-4c
|
||||
|
||||
# Skip tests that run in x86_64-gnu-llvm-18-{1,2}
|
||||
- image: x86_64-gnu-llvm-18-3
|
||||
- name: x86_64-gnu-llvm-18-3
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
READ_ONLY_SRC: "0"
|
||||
@ -345,10 +345,10 @@ auto:
|
||||
DOCKER_SCRIPT: x86_64-gnu-llvm3.sh
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: x86_64-gnu-nopt
|
||||
- name: x86_64-gnu-nopt
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: x86_64-gnu-tools
|
||||
- name: x86_64-gnu-tools
|
||||
env:
|
||||
DEPLOY_TOOLSTATES_JSON: toolstates-linux.json
|
||||
<<: *job-linux-4c
|
||||
@ -357,7 +357,7 @@ auto:
|
||||
# macOS Builders #
|
||||
####################
|
||||
|
||||
- image: dist-x86_64-apple
|
||||
- name: dist-x86_64-apple
|
||||
env:
|
||||
SCRIPT: ./x.py dist bootstrap --include-default-paths --host=x86_64-apple-darwin --target=x86_64-apple-darwin
|
||||
RUST_CONFIGURE_ARGS: --enable-full-tools --enable-sanitizers --enable-profiler --set rust.jemalloc --set rust.lto=thin --set rust.codegen-units=1
|
||||
@ -371,7 +371,7 @@ auto:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-macos-xl
|
||||
|
||||
- image: dist-apple-various
|
||||
- name: dist-apple-various
|
||||
env:
|
||||
SCRIPT: ./x.py dist bootstrap --include-default-paths --host='' --target=aarch64-apple-ios,x86_64-apple-ios,aarch64-apple-ios-sim,aarch64-apple-ios-macabi,x86_64-apple-ios-macabi
|
||||
# Mac Catalyst cannot currently compile the sanitizer:
|
||||
@ -385,19 +385,19 @@ auto:
|
||||
NO_OVERFLOW_CHECKS: 1
|
||||
<<: *job-macos-xl
|
||||
|
||||
- image: x86_64-apple-1
|
||||
- name: x86_64-apple-1
|
||||
env:
|
||||
<<: *env-x86_64-apple-tests
|
||||
<<: *job-macos-xl
|
||||
|
||||
- image: x86_64-apple-2
|
||||
- name: x86_64-apple-2
|
||||
env:
|
||||
SCRIPT: ./x.py --stage 2 test tests/ui tests/rustdoc
|
||||
<<: *env-x86_64-apple-tests
|
||||
<<: *job-macos-xl
|
||||
|
||||
# This target only needs to support 11.0 and up as nothing else supports the hardware
|
||||
- image: dist-aarch64-apple
|
||||
- name: dist-aarch64-apple
|
||||
env:
|
||||
SCRIPT: ./x.py dist bootstrap --include-default-paths --host=aarch64-apple-darwin --target=aarch64-apple-darwin
|
||||
RUST_CONFIGURE_ARGS: >-
|
||||
@ -421,7 +421,7 @@ auto:
|
||||
<<: *job-macos-m1
|
||||
|
||||
# This target only needs to support 11.0 and up as nothing else supports the hardware
|
||||
- image: aarch64-apple
|
||||
- name: aarch64-apple
|
||||
env:
|
||||
SCRIPT: ./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin
|
||||
RUST_CONFIGURE_ARGS: >-
|
||||
@ -442,20 +442,20 @@ auto:
|
||||
# Windows Builders #
|
||||
######################
|
||||
|
||||
- image: x86_64-msvc
|
||||
- name: x86_64-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-profiler
|
||||
SCRIPT: make ci-msvc
|
||||
<<: *job-windows-8c
|
||||
|
||||
- image: i686-msvc
|
||||
- name: i686-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
|
||||
SCRIPT: make ci-msvc
|
||||
<<: *job-windows-8c
|
||||
|
||||
# x86_64-msvc-ext is split into multiple jobs to run tests in parallel.
|
||||
- image: x86_64-msvc-ext1
|
||||
- name: x86_64-msvc-ext1
|
||||
env:
|
||||
SCRIPT: python x.py --stage 2 test src/tools/cargotest src/tools/cargo
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld
|
||||
@ -464,7 +464,7 @@ auto:
|
||||
# Temporary builder to workaround CI issues
|
||||
# See <https://github.com/rust-lang/rust/issues/127883>
|
||||
#FIXME: Remove this, and re-enable the same tests in `checktools.sh`, once CI issues are fixed.
|
||||
- image: x86_64-msvc-ext2
|
||||
- name: x86_64-msvc-ext2
|
||||
env:
|
||||
SCRIPT: >
|
||||
python x.py test --stage 2 src/tools/miri --target aarch64-apple-darwin --test-args pass &&
|
||||
@ -476,7 +476,7 @@ auto:
|
||||
<<: *job-windows
|
||||
|
||||
# Run `checktools.sh` and upload the toolstate file.
|
||||
- image: x86_64-msvc-ext3
|
||||
- name: x86_64-msvc-ext3
|
||||
env:
|
||||
SCRIPT: src/ci/docker/host-x86_64/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstate/toolstates.json windows
|
||||
HOST_TARGET: x86_64-pc-windows-msvc
|
||||
@ -500,7 +500,7 @@ auto:
|
||||
# came from the mingw-w64 SourceForge download site. Unfortunately
|
||||
# SourceForge is notoriously flaky, so we mirror it on our own infrastructure.
|
||||
|
||||
- image: i686-mingw
|
||||
- name: i686-mingw
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
|
||||
SCRIPT: make ci-mingw
|
||||
@ -510,7 +510,7 @@ auto:
|
||||
<<: *job-windows-8c
|
||||
|
||||
# x86_64-mingw is split into two jobs to run tests in parallel.
|
||||
- image: x86_64-mingw-1
|
||||
- name: x86_64-mingw-1
|
||||
env:
|
||||
SCRIPT: make ci-mingw-x
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
|
||||
@ -519,7 +519,7 @@ auto:
|
||||
NO_DOWNLOAD_CI_LLVM: 1
|
||||
<<: *job-windows
|
||||
|
||||
- image: x86_64-mingw-2
|
||||
- name: x86_64-mingw-2
|
||||
env:
|
||||
SCRIPT: make ci-mingw-bootstrap
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
|
||||
@ -528,7 +528,7 @@ auto:
|
||||
NO_DOWNLOAD_CI_LLVM: 1
|
||||
<<: *job-windows
|
||||
|
||||
- image: dist-x86_64-msvc
|
||||
- name: dist-x86_64-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: >-
|
||||
--build=x86_64-pc-windows-msvc
|
||||
@ -542,7 +542,7 @@ auto:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-windows-8c
|
||||
|
||||
- image: dist-i686-msvc
|
||||
- name: dist-i686-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: >-
|
||||
--build=i686-pc-windows-msvc
|
||||
@ -555,7 +555,7 @@ auto:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-windows
|
||||
|
||||
- image: dist-aarch64-msvc
|
||||
- name: dist-aarch64-msvc
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: >-
|
||||
--build=x86_64-pc-windows-msvc
|
||||
@ -567,7 +567,7 @@ auto:
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
<<: *job-windows
|
||||
|
||||
- image: dist-i686-mingw
|
||||
- name: dist-i686-mingw
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: >-
|
||||
--build=i686-pc-windows-gnu
|
||||
@ -580,7 +580,7 @@ auto:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-windows
|
||||
|
||||
- image: dist-x86_64-mingw
|
||||
- name: dist-x86_64-mingw
|
||||
env:
|
||||
SCRIPT: python x.py dist bootstrap --include-default-paths
|
||||
RUST_CONFIGURE_ARGS: >-
|
||||
@ -593,7 +593,7 @@ auto:
|
||||
CODEGEN_BACKENDS: llvm,cranelift
|
||||
<<: *job-windows
|
||||
|
||||
- image: dist-x86_64-msvc-alt
|
||||
- name: dist-x86_64-msvc-alt
|
||||
env:
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-extended --enable-profiler
|
||||
SCRIPT: python x.py dist bootstrap --include-default-paths
|
||||
|
@ -126,4 +126,4 @@ Here is an example of how can `opt-dist` be used locally (outside of CI):
|
||||
[`Environment`]: https://github.com/rust-lang/rust/blob/ee451f8faccf3050c76cdcd82543c917b40c7962/src/tools/opt-dist/src/environment.rs#L5
|
||||
|
||||
> Note: if you want to run the actual CI pipeline, instead of running `opt-dist` locally,
|
||||
> you can execute `DEPLOY=1 src/ci/docker/run.sh dist-x86_64-linux`.
|
||||
> you can execute `python3 src/ci/github-actions/ci.py run-local dist-x86_64-linux`.
|
||||
|
@ -28,7 +28,7 @@ Our CI is primarily executed on [GitHub Actions], with a single workflow defined
|
||||
in [`.github/workflows/ci.yml`], which contains a bunch of steps that are
|
||||
unified for all CI jobs that we execute. When a commit is pushed to a
|
||||
corresponding branch or a PR, the workflow executes the
|
||||
[`calculate-job-matrix.py`] script, which dynamically generates the specific CI
|
||||
[`ci.py`] script, which dynamically generates the specific CI
|
||||
jobs that should be executed. This script uses the [`jobs.yml`] file as an
|
||||
input, which contains a declarative configuration of all our CI jobs.
|
||||
|
||||
@ -299,8 +299,7 @@ platform’s custom [Docker container]. This has a lot of advantages for us:
|
||||
- We can avoid reinstalling tools (like QEMU or the Android emulator) every time
|
||||
thanks to Docker image caching.
|
||||
- Users can run the same tests in the same environment locally by just running
|
||||
`src/ci/docker/run.sh image-name`, which is awesome to debug failures. Note
|
||||
that there are only linux docker images available locally due to licensing and
|
||||
`python3 src/ci/github-actions/ci.py run-local <job-name>`, which is awesome to debug failures. Note that there are only linux docker images available locally due to licensing and
|
||||
other restrictions.
|
||||
|
||||
The docker images prefixed with `dist-` are used for building artifacts while
|
||||
@ -413,7 +412,7 @@ To learn more about the dashboard, see the [Datadog CI docs].
|
||||
[GitHub Actions]: https://github.com/rust-lang/rust/actions
|
||||
[`jobs.yml`]: https://github.com/rust-lang/rust/blob/master/src/ci/github-actions/jobs.yml
|
||||
[`.github/workflows/ci.yml`]: https://github.com/rust-lang/rust/blob/master/.github/workflows/ci.yml
|
||||
[`calculate-job-matrix.py`]: https://github.com/rust-lang/rust/blob/master/src/ci/github-actions/calculate-job-matrix.py
|
||||
[`ci.py`]: https://github.com/rust-lang/rust/blob/master/src/ci/github-actions/ci.py
|
||||
[rust-lang-ci]: https://github.com/rust-lang-ci/rust/actions
|
||||
[bors]: https://github.com/bors
|
||||
[homu]: https://github.com/rust-lang/homu
|
||||
|
@ -45,6 +45,15 @@ Some additional notes about using the Docker images:
|
||||
containers. With the container name, run `docker exec -it <CONTAINER>
|
||||
/bin/bash` where `<CONTAINER>` is the container name like `4ba195e95cef`.
|
||||
|
||||
The approach described above is a relatively low-level interface for running the Docker images
|
||||
directly. If you want to run a full CI Linux job locally with Docker, in a way that is as close to CI as possible, you can use the following command:
|
||||
|
||||
```bash
|
||||
python3 src/ci/github-actions/ci.py run-local <job-name>
|
||||
# For example:
|
||||
python3 src/ci/github-actions/ci.py run-local dist-x86_64-linux-alt
|
||||
```
|
||||
|
||||
[Docker]: https://www.docker.com/
|
||||
[`src/ci/docker`]: https://github.com/rust-lang/rust/tree/master/src/ci/docker
|
||||
[`src/ci/docker/run.sh`]: https://github.com/rust-lang/rust/blob/master/src/ci/docker/run.sh
|
||||
|
18
tests/coverage/generic-unused-impl.cov-map
Normal file
18
tests/coverage/generic-unused-impl.cov-map
Normal file
@ -0,0 +1,18 @@
|
||||
Function name: <generic_unused_impl::W<_> as core::convert::From<[<_ as generic_unused_impl::Foo>::Assoc; 1]>>::from (unused)
|
||||
Raw bytes (9): 0x[01, 01, 00, 01, 00, 0b, 05, 03, 06]
|
||||
Number of files: 1
|
||||
- file 0 => global file 1
|
||||
Number of expressions: 0
|
||||
Number of file 0 mappings: 1
|
||||
- Code(Zero) at (prev + 11, 5) to (start + 3, 6)
|
||||
Highest counter ID seen: (none)
|
||||
|
||||
Function name: generic_unused_impl::main
|
||||
Raw bytes (9): 0x[01, 01, 00, 01, 01, 11, 01, 00, 0d]
|
||||
Number of files: 1
|
||||
- file 0 => global file 1
|
||||
Number of expressions: 0
|
||||
Number of file 0 mappings: 1
|
||||
- Code(Counter(0)) at (prev + 17, 1) to (start + 0, 13)
|
||||
Highest counter ID seen: c0
|
||||
|
18
tests/coverage/generic-unused-impl.coverage
Normal file
18
tests/coverage/generic-unused-impl.coverage
Normal file
@ -0,0 +1,18 @@
|
||||
LL| |// Regression test for #135235.
|
||||
LL| |trait Foo {
|
||||
LL| | type Assoc;
|
||||
LL| |
|
||||
LL| | fn from(s: Self::Assoc) -> Self;
|
||||
LL| |}
|
||||
LL| |
|
||||
LL| |struct W<T>(T);
|
||||
LL| |
|
||||
LL| |impl<T: Foo> From<[T::Assoc; 1]> for W<T> {
|
||||
LL| 0| fn from(from: [T::Assoc; 1]) -> Self {
|
||||
LL| 0| let [item] = from;
|
||||
LL| 0| W(Foo::from(item))
|
||||
LL| 0| }
|
||||
LL| |}
|
||||
LL| |
|
||||
LL| 1|fn main() {}
|
||||
|
17
tests/coverage/generic-unused-impl.rs
Normal file
17
tests/coverage/generic-unused-impl.rs
Normal file
@ -0,0 +1,17 @@
|
||||
// Regression test for #135235.
|
||||
trait Foo {
|
||||
type Assoc;
|
||||
|
||||
fn from(s: Self::Assoc) -> Self;
|
||||
}
|
||||
|
||||
struct W<T>(T);
|
||||
|
||||
impl<T: Foo> From<[T::Assoc; 1]> for W<T> {
|
||||
fn from(from: [T::Assoc; 1]) -> Self {
|
||||
let [item] = from;
|
||||
W(Foo::from(item))
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {}
|
@ -1,6 +0,0 @@
|
||||
//@ known-bug: #114317
|
||||
#![feature(generic_const_exprs)]
|
||||
|
||||
struct A<const B: str = 1, C>;
|
||||
|
||||
fn main() {}
|
@ -1,10 +0,0 @@
|
||||
//@ known-bug: rust-lang/rust#126182
|
||||
|
||||
#![feature(generic_const_exprs)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
struct Cond<const B: bool>;
|
||||
|
||||
struct Thing<T = Cond<0>>(T);
|
||||
|
||||
impl Thing {}
|
@ -0,0 +1,22 @@
|
||||
//! ICE regression test for #114317 and #126182
|
||||
//! Type mismatches of literals cause errors int typeck,
|
||||
//! but those errors cannot be propagated to the various
|
||||
//! `lit_to_const` call sites. Now `lit_to_const` just delays
|
||||
//! a bug and produces an error constant on its own.
|
||||
|
||||
#![feature(adt_const_params)]
|
||||
#![feature(generic_const_exprs)]
|
||||
#![allow(incomplete_features)]
|
||||
|
||||
struct A<const B: () = 1, C>(C);
|
||||
//~^ ERROR: generic parameters with a default must be trailing
|
||||
//~| ERROR: mismatched types
|
||||
|
||||
struct Cond<const B: bool>;
|
||||
|
||||
struct Thing<T = Cond<0>>(T);
|
||||
//~^ ERROR: mismatched types
|
||||
|
||||
impl Thing {}
|
||||
|
||||
fn main() {}
|
@ -0,0 +1,21 @@
|
||||
error: generic parameters with a default must be trailing
|
||||
--> $DIR/lit_type_mismatch.rs:11:16
|
||||
|
|
||||
LL | struct A<const B: () = 1, C>(C);
|
||||
| ^
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/lit_type_mismatch.rs:11:24
|
||||
|
|
||||
LL | struct A<const B: () = 1, C>(C);
|
||||
| ^ expected `()`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/lit_type_mismatch.rs:17:23
|
||||
|
|
||||
LL | struct Thing<T = Cond<0>>(T);
|
||||
| ^ expected `bool`, found integer
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0308`.
|
@ -1,27 +1,3 @@
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:29:21
|
||||
|
|
||||
LL | get_flag::<false, 0xFF>();
|
||||
| ^^^^ expected `char`, found `u8`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:31:14
|
||||
|
|
||||
LL | get_flag::<7, 'c'>();
|
||||
| ^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:14
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:18
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^^^^ expected `char`, found `u8`
|
||||
|
||||
error[E0080]: evaluation of constant value failed
|
||||
--> $DIR/invalid-patterns.rs:38:32
|
||||
|
|
||||
@ -56,6 +32,30 @@ error[E0080]: evaluation of constant value failed
|
||||
LL | get_flag::<{ unsafe { bool_raw.boolean } }, { unsafe { char_raw.character } }>();
|
||||
| ^^^^^^^^^^^^^^^^^^ using uninitialized data, but this operation requires initialized memory
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:29:21
|
||||
|
|
||||
LL | get_flag::<false, 0xFF>();
|
||||
| ^^^^ expected `char`, found `u8`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:31:14
|
||||
|
|
||||
LL | get_flag::<7, 'c'>();
|
||||
| ^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:14
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:18
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^^^^ expected `char`, found `u8`
|
||||
|
||||
error: aborting due to 8 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0080, E0308.
|
||||
|
@ -1,27 +1,3 @@
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:29:21
|
||||
|
|
||||
LL | get_flag::<false, 0xFF>();
|
||||
| ^^^^ expected `char`, found `u8`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:31:14
|
||||
|
|
||||
LL | get_flag::<7, 'c'>();
|
||||
| ^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:14
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:18
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^^^^ expected `char`, found `u8`
|
||||
|
||||
error[E0080]: evaluation of constant value failed
|
||||
--> $DIR/invalid-patterns.rs:38:32
|
||||
|
|
||||
@ -56,6 +32,30 @@ error[E0080]: evaluation of constant value failed
|
||||
LL | get_flag::<{ unsafe { bool_raw.boolean } }, { unsafe { char_raw.character } }>();
|
||||
| ^^^^^^^^^^^^^^^^^^ using uninitialized data, but this operation requires initialized memory
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:29:21
|
||||
|
|
||||
LL | get_flag::<false, 0xFF>();
|
||||
| ^^^^ expected `char`, found `u8`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:31:14
|
||||
|
|
||||
LL | get_flag::<7, 'c'>();
|
||||
| ^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:14
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^ expected `bool`, found integer
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/invalid-patterns.rs:33:18
|
||||
|
|
||||
LL | get_flag::<42, 0x5ad>();
|
||||
| ^^^^^ expected `char`, found `u8`
|
||||
|
||||
error: aborting due to 8 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0080, E0308.
|
||||
|
@ -15,6 +15,12 @@ error[E0308]: mismatched types
|
||||
LL | let b = [0; ()];
|
||||
| ^^ expected `usize`, found `()`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/repeat_count.rs:31:17
|
||||
|
|
||||
LL | let g = [0; G { g: () }];
|
||||
| ^^^^^^^^^^^ expected `usize`, found `G`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/repeat_count.rs:10:17
|
||||
|
|
||||
@ -33,12 +39,6 @@ error[E0308]: mismatched types
|
||||
LL | let e = [0; "foo"];
|
||||
| ^^^^^ expected `usize`, found `&str`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/repeat_count.rs:31:17
|
||||
|
|
||||
LL | let g = [0; G { g: () }];
|
||||
| ^^^^^^^^^^^ expected `usize`, found `G`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/repeat_count.rs:19:17
|
||||
|
|
||||
|
Loading…
Reference in New Issue
Block a user