mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-12 23:13:15 +00:00
Auto merge of #133940 - GuillaumeGomez:rollup-nm1cz5j, r=GuillaumeGomez
Rollup of 8 pull requests Successful merges: - #132155 (Always display first line of impl blocks even when collapsed) - #133256 (CI: use free runners for i686-gnu jobs) - #133607 (implement checks for tail calls) - #133821 (Replace black with ruff in `tidy`) - #133827 (CI: rfl: move job forward to Linux v6.13-rc1) - #133910 (Normalize target-cpus.rs stdout test for LLVM changes) - #133921 (Adapt codegen tests for NUW inference) - #133936 (Avoid fetching the anon const hir node that is already available) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
706141b8d9
@ -29,10 +29,9 @@ use rustc_errors::codes::*;
|
||||
use rustc_errors::{
|
||||
Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, FatalError, struct_span_code_err,
|
||||
};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Namespace, Res};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_hir::{GenericArg, GenericArgs, HirId};
|
||||
use rustc_hir::{self as hir, AnonConst, GenericArg, GenericArgs, HirId};
|
||||
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
|
||||
use rustc_infer::traits::ObligationCause;
|
||||
use rustc_middle::middle::stability::AllowUnstable;
|
||||
@ -2089,7 +2088,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
qpath.span(),
|
||||
format!("Const::lower_const_arg: invalid qpath {qpath:?}"),
|
||||
),
|
||||
hir::ConstArgKind::Anon(anon) => self.lower_anon_const(anon.def_id),
|
||||
hir::ConstArgKind::Anon(anon) => self.lower_anon_const(anon),
|
||||
hir::ConstArgKind::Infer(span) => self.ct_infer(None, span),
|
||||
}
|
||||
}
|
||||
@ -2180,27 +2179,22 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
/// Literals and const generic parameters are eagerly converted to a constant, everything else
|
||||
/// becomes `Unevaluated`.
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
fn lower_anon_const(&self, def: LocalDefId) -> Const<'tcx> {
|
||||
fn lower_anon_const(&self, anon: &AnonConst) -> Const<'tcx> {
|
||||
let tcx = self.tcx();
|
||||
|
||||
let body_id = match tcx.hir_node_by_def_id(def) {
|
||||
hir::Node::AnonConst(ac) => ac.body,
|
||||
node => span_bug!(
|
||||
tcx.def_span(def.to_def_id()),
|
||||
"from_anon_const can only process anonymous constants, not {node:?}"
|
||||
),
|
||||
};
|
||||
|
||||
let expr = &tcx.hir().body(body_id).value;
|
||||
let expr = &tcx.hir().body(anon.body).value;
|
||||
debug!(?expr);
|
||||
|
||||
let ty = tcx.type_of(def).no_bound_vars().expect("const parameter types cannot be generic");
|
||||
let ty = tcx
|
||||
.type_of(anon.def_id)
|
||||
.no_bound_vars()
|
||||
.expect("const parameter types cannot be generic");
|
||||
|
||||
match self.try_lower_anon_const_lit(ty, expr) {
|
||||
Some(v) => v,
|
||||
None => ty::Const::new_unevaluated(tcx, ty::UnevaluatedConst {
|
||||
def: def.to_def_id(),
|
||||
args: ty::GenericArgs::identity_for_item(tcx, def.to_def_id()),
|
||||
def: anon.def_id.to_def_id(),
|
||||
args: ty::GenericArgs::identity_for_item(tcx, anon.def_id.to_def_id()),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -916,6 +916,12 @@ rustc_queries! {
|
||||
cache_on_disk_if { true }
|
||||
}
|
||||
|
||||
/// Checks well-formedness of tail calls (`become f()`).
|
||||
query check_tail_calls(key: LocalDefId) -> Result<(), rustc_errors::ErrorGuaranteed> {
|
||||
desc { |tcx| "tail-call-checking `{}`", tcx.def_path_str(key) }
|
||||
cache_on_disk_if { true }
|
||||
}
|
||||
|
||||
/// Returns the types assumed to be well formed while "inside" of the given item.
|
||||
///
|
||||
/// Note that we've liberated the late bound regions of function signatures, so
|
||||
|
@ -50,6 +50,10 @@ pub(crate) fn mir_build<'tcx>(tcx: TyCtxtAt<'tcx>, def: LocalDefId) -> Body<'tcx
|
||||
return construct_error(tcx, def, e);
|
||||
}
|
||||
|
||||
if let Err(err) = tcx.check_tail_calls(def) {
|
||||
return construct_error(tcx, def, err);
|
||||
}
|
||||
|
||||
let body = match tcx.thir_body(def) {
|
||||
Err(error_reported) => construct_error(tcx, def, error_reported),
|
||||
Ok((thir, expr)) => {
|
||||
|
386
compiler/rustc_mir_build/src/check_tail_calls.rs
Normal file
386
compiler/rustc_mir_build/src/check_tail_calls.rs
Normal file
@ -0,0 +1,386 @@
|
||||
use rustc_abi::ExternAbi;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir::LangItem;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::thir::visit::{self, Visitor};
|
||||
use rustc_middle::thir::{BodyTy, Expr, ExprId, ExprKind, Thir};
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
use rustc_span::def_id::{DefId, LocalDefId};
|
||||
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span};
|
||||
|
||||
pub(crate) fn check_tail_calls(tcx: TyCtxt<'_>, def: LocalDefId) -> Result<(), ErrorGuaranteed> {
|
||||
let (thir, expr) = tcx.thir_body(def)?;
|
||||
let thir = &thir.borrow();
|
||||
|
||||
// If `thir` is empty, a type error occurred, skip this body.
|
||||
if thir.exprs.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let is_closure = matches!(tcx.def_kind(def), DefKind::Closure);
|
||||
let caller_ty = tcx.type_of(def).skip_binder();
|
||||
|
||||
let mut visitor = TailCallCkVisitor {
|
||||
tcx,
|
||||
thir,
|
||||
found_errors: Ok(()),
|
||||
// FIXME(#132279): we're clearly in a body here.
|
||||
typing_env: ty::TypingEnv::non_body_analysis(tcx, def),
|
||||
is_closure,
|
||||
caller_ty,
|
||||
};
|
||||
|
||||
visitor.visit_expr(&thir[expr]);
|
||||
|
||||
visitor.found_errors
|
||||
}
|
||||
|
||||
struct TailCallCkVisitor<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
thir: &'a Thir<'tcx>,
|
||||
typing_env: ty::TypingEnv<'tcx>,
|
||||
/// Whatever the currently checked body is one of a closure
|
||||
is_closure: bool,
|
||||
/// The result of the checks, `Err(_)` if there was a problem with some
|
||||
/// tail call, `Ok(())` if all of them were fine.
|
||||
found_errors: Result<(), ErrorGuaranteed>,
|
||||
/// Type of the caller function.
|
||||
caller_ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> TailCallCkVisitor<'_, 'tcx> {
|
||||
fn check_tail_call(&mut self, call: &Expr<'_>, expr: &Expr<'_>) {
|
||||
if self.is_closure {
|
||||
self.report_in_closure(expr);
|
||||
return;
|
||||
}
|
||||
|
||||
let BodyTy::Fn(caller_sig) = self.thir.body_type else {
|
||||
span_bug!(
|
||||
call.span,
|
||||
"`become` outside of functions should have been disallowed by hit_typeck"
|
||||
)
|
||||
};
|
||||
|
||||
let ExprKind::Scope { value, .. } = call.kind else {
|
||||
span_bug!(call.span, "expected scope, found: {call:?}")
|
||||
};
|
||||
let value = &self.thir[value];
|
||||
|
||||
if matches!(
|
||||
value.kind,
|
||||
ExprKind::Binary { .. }
|
||||
| ExprKind::Unary { .. }
|
||||
| ExprKind::AssignOp { .. }
|
||||
| ExprKind::Index { .. }
|
||||
) {
|
||||
self.report_builtin_op(call, expr);
|
||||
return;
|
||||
}
|
||||
|
||||
let ExprKind::Call { ty, fun, ref args, from_hir_call, fn_span } = value.kind else {
|
||||
self.report_non_call(value, expr);
|
||||
return;
|
||||
};
|
||||
|
||||
if !from_hir_call {
|
||||
self.report_op(ty, args, fn_span, expr);
|
||||
}
|
||||
|
||||
// Closures in thir look something akin to
|
||||
// `for<'a> extern "rust-call" fn(&'a [closure@...], ()) -> <[closure@...] as FnOnce<()>>::Output {<[closure@...] as Fn<()>>::call}`
|
||||
// So we have to check for them in this weird way...
|
||||
if let &ty::FnDef(did, args) = ty.kind() {
|
||||
let parent = self.tcx.parent(did);
|
||||
if self.tcx.fn_trait_kind_from_def_id(parent).is_some()
|
||||
&& args.first().and_then(|arg| arg.as_type()).is_some_and(Ty::is_closure)
|
||||
{
|
||||
self.report_calling_closure(&self.thir[fun], args[1].as_type().unwrap(), expr);
|
||||
|
||||
// Tail calling is likely to cause unrelated errors (ABI, argument mismatches),
|
||||
// skip them, producing an error about calling a closure is enough.
|
||||
return;
|
||||
};
|
||||
}
|
||||
|
||||
// Erase regions since tail calls don't care about lifetimes
|
||||
let callee_sig =
|
||||
self.tcx.normalize_erasing_late_bound_regions(self.typing_env, ty.fn_sig(self.tcx));
|
||||
|
||||
if caller_sig.abi != callee_sig.abi {
|
||||
self.report_abi_mismatch(expr.span, caller_sig.abi, callee_sig.abi);
|
||||
}
|
||||
|
||||
if caller_sig.inputs_and_output != callee_sig.inputs_and_output {
|
||||
if caller_sig.inputs() != callee_sig.inputs() {
|
||||
self.report_arguments_mismatch(expr.span, caller_sig, callee_sig);
|
||||
}
|
||||
|
||||
// FIXME(explicit_tail_calls): this currenly fails for cases where opaques are used.
|
||||
// e.g.
|
||||
// ```
|
||||
// fn a() -> impl Sized { become b() } // ICE
|
||||
// fn b() -> u8 { 0 }
|
||||
// ```
|
||||
// we should think what is the expected behavior here.
|
||||
// (we should probably just accept this by revealing opaques?)
|
||||
if caller_sig.output() != callee_sig.output() {
|
||||
span_bug!(expr.span, "hir typeck should have checked the return type already");
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let caller_needs_location = self.needs_location(self.caller_ty);
|
||||
let callee_needs_location = self.needs_location(ty);
|
||||
|
||||
if caller_needs_location != callee_needs_location {
|
||||
self.report_track_caller_mismatch(expr.span, caller_needs_location);
|
||||
}
|
||||
}
|
||||
|
||||
if caller_sig.c_variadic {
|
||||
self.report_c_variadic_caller(expr.span);
|
||||
}
|
||||
|
||||
if callee_sig.c_variadic {
|
||||
self.report_c_variadic_callee(expr.span);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if function of type `ty` needs location argument
|
||||
/// (i.e. if a function is marked as `#[track_caller]`)
|
||||
fn needs_location(&self, ty: Ty<'tcx>) -> bool {
|
||||
if let &ty::FnDef(did, substs) = ty.kind() {
|
||||
let instance =
|
||||
ty::Instance::expect_resolve(self.tcx, self.typing_env, did, substs, DUMMY_SP);
|
||||
|
||||
instance.def.requires_caller_location(self.tcx)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn report_in_closure(&mut self, expr: &Expr<'_>) {
|
||||
let err = self.tcx.dcx().span_err(expr.span, "`become` is not allowed in closures");
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_builtin_op(&mut self, value: &Expr<'_>, expr: &Expr<'_>) {
|
||||
let err = self
|
||||
.tcx
|
||||
.dcx()
|
||||
.struct_span_err(value.span, "`become` does not support operators")
|
||||
.with_note("using `become` on a builtin operator is not useful")
|
||||
.with_span_suggestion(
|
||||
value.span.until(expr.span),
|
||||
"try using `return` instead",
|
||||
"return ",
|
||||
Applicability::MachineApplicable,
|
||||
)
|
||||
.emit();
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_op(&mut self, fun_ty: Ty<'_>, args: &[ExprId], fn_span: Span, expr: &Expr<'_>) {
|
||||
let mut err =
|
||||
self.tcx.dcx().struct_span_err(fn_span, "`become` does not support operators");
|
||||
|
||||
if let &ty::FnDef(did, _substs) = fun_ty.kind()
|
||||
&& let parent = self.tcx.parent(did)
|
||||
&& matches!(self.tcx.def_kind(parent), DefKind::Trait)
|
||||
&& let Some(method) = op_trait_as_method_name(self.tcx, parent)
|
||||
{
|
||||
match args {
|
||||
&[arg] => {
|
||||
let arg = &self.thir[arg];
|
||||
|
||||
err.multipart_suggestion(
|
||||
"try using the method directly",
|
||||
vec![
|
||||
(fn_span.shrink_to_lo().until(arg.span), "(".to_owned()),
|
||||
(arg.span.shrink_to_hi(), format!(").{method}()")),
|
||||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
&[lhs, rhs] => {
|
||||
let lhs = &self.thir[lhs];
|
||||
let rhs = &self.thir[rhs];
|
||||
|
||||
err.multipart_suggestion(
|
||||
"try using the method directly",
|
||||
vec![
|
||||
(lhs.span.shrink_to_lo(), format!("(")),
|
||||
(lhs.span.between(rhs.span), format!(").{method}(")),
|
||||
(rhs.span.between(expr.span.shrink_to_hi()), ")".to_owned()),
|
||||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
_ => span_bug!(expr.span, "operator with more than 2 args? {args:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
self.found_errors = Err(err.emit());
|
||||
}
|
||||
|
||||
fn report_non_call(&mut self, value: &Expr<'_>, expr: &Expr<'_>) {
|
||||
let err = self
|
||||
.tcx
|
||||
.dcx()
|
||||
.struct_span_err(value.span, "`become` requires a function call")
|
||||
.with_span_note(value.span, "not a function call")
|
||||
.with_span_suggestion(
|
||||
value.span.until(expr.span),
|
||||
"try using `return` instead",
|
||||
"return ",
|
||||
Applicability::MaybeIncorrect,
|
||||
)
|
||||
.emit();
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_calling_closure(&mut self, fun: &Expr<'_>, tupled_args: Ty<'_>, expr: &Expr<'_>) {
|
||||
let underscored_args = match tupled_args.kind() {
|
||||
ty::Tuple(tys) if tys.is_empty() => "".to_owned(),
|
||||
ty::Tuple(tys) => std::iter::repeat("_, ").take(tys.len() - 1).chain(["_"]).collect(),
|
||||
_ => "_".to_owned(),
|
||||
};
|
||||
|
||||
let err = self
|
||||
.tcx
|
||||
.dcx()
|
||||
.struct_span_err(expr.span, "tail calling closures directly is not allowed")
|
||||
.with_multipart_suggestion(
|
||||
"try casting the closure to a function pointer type",
|
||||
vec![
|
||||
(fun.span.shrink_to_lo(), "(".to_owned()),
|
||||
(fun.span.shrink_to_hi(), format!(" as fn({underscored_args}) -> _)")),
|
||||
],
|
||||
Applicability::MaybeIncorrect,
|
||||
)
|
||||
.emit();
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_abi_mismatch(&mut self, sp: Span, caller_abi: ExternAbi, callee_abi: ExternAbi) {
|
||||
let err = self
|
||||
.tcx
|
||||
.dcx()
|
||||
.struct_span_err(sp, "mismatched function ABIs")
|
||||
.with_note("`become` requires caller and callee to have the same ABI")
|
||||
.with_note(format!("caller ABI is `{caller_abi}`, while callee ABI is `{callee_abi}`"))
|
||||
.emit();
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_arguments_mismatch(
|
||||
&mut self,
|
||||
sp: Span,
|
||||
caller_sig: ty::FnSig<'_>,
|
||||
callee_sig: ty::FnSig<'_>,
|
||||
) {
|
||||
let err = self
|
||||
.tcx
|
||||
.dcx()
|
||||
.struct_span_err(sp, "mismatched signatures")
|
||||
.with_note("`become` requires caller and callee to have matching signatures")
|
||||
.with_note(format!("caller signature: `{caller_sig}`"))
|
||||
.with_note(format!("callee signature: `{callee_sig}`"))
|
||||
.emit();
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_track_caller_mismatch(&mut self, sp: Span, caller_needs_location: bool) {
|
||||
let err = match caller_needs_location {
|
||||
true => self
|
||||
.tcx
|
||||
.dcx()
|
||||
.struct_span_err(
|
||||
sp,
|
||||
"a function marked with `#[track_caller]` cannot tail-call one that is not",
|
||||
)
|
||||
.emit(),
|
||||
false => self
|
||||
.tcx
|
||||
.dcx()
|
||||
.struct_span_err(
|
||||
sp,
|
||||
"a function mot marked with `#[track_caller]` cannot tail-call one that is",
|
||||
)
|
||||
.emit(),
|
||||
};
|
||||
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_c_variadic_caller(&mut self, sp: Span) {
|
||||
let err = self
|
||||
.tcx
|
||||
.dcx()
|
||||
// FIXME(explicit_tail_calls): highlight the `...`
|
||||
.struct_span_err(sp, "tail-calls are not allowed in c-variadic functions")
|
||||
.emit();
|
||||
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
|
||||
fn report_c_variadic_callee(&mut self, sp: Span) {
|
||||
let err = self
|
||||
.tcx
|
||||
.dcx()
|
||||
// FIXME(explicit_tail_calls): highlight the function or something...
|
||||
.struct_span_err(sp, "c-variadic functions can't be tail-called")
|
||||
.emit();
|
||||
|
||||
self.found_errors = Err(err);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Visitor<'a, 'tcx> for TailCallCkVisitor<'a, 'tcx> {
|
||||
fn thir(&self) -> &'a Thir<'tcx> {
|
||||
&self.thir
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'a Expr<'tcx>) {
|
||||
if let ExprKind::Become { value } = expr.kind {
|
||||
let call = &self.thir[value];
|
||||
self.check_tail_call(call, expr);
|
||||
}
|
||||
|
||||
visit::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
fn op_trait_as_method_name(tcx: TyCtxt<'_>, trait_did: DefId) -> Option<&'static str> {
|
||||
let m = match tcx.as_lang_item(trait_did)? {
|
||||
LangItem::Add => "add",
|
||||
LangItem::Sub => "sub",
|
||||
LangItem::Mul => "mul",
|
||||
LangItem::Div => "div",
|
||||
LangItem::Rem => "rem",
|
||||
LangItem::Neg => "neg",
|
||||
LangItem::Not => "not",
|
||||
LangItem::BitXor => "bitxor",
|
||||
LangItem::BitAnd => "bitand",
|
||||
LangItem::BitOr => "bitor",
|
||||
LangItem::Shl => "shl",
|
||||
LangItem::Shr => "shr",
|
||||
LangItem::AddAssign => "add_assign",
|
||||
LangItem::SubAssign => "sub_assign",
|
||||
LangItem::MulAssign => "mul_assign",
|
||||
LangItem::DivAssign => "div_assign",
|
||||
LangItem::RemAssign => "rem_assign",
|
||||
LangItem::BitXorAssign => "bitxor_assign",
|
||||
LangItem::BitAndAssign => "bitand_assign",
|
||||
LangItem::BitOrAssign => "bitor_assign",
|
||||
LangItem::ShlAssign => "shl_assign",
|
||||
LangItem::ShrAssign => "shr_assign",
|
||||
LangItem::Index => "index",
|
||||
LangItem::IndexMut => "index_mut",
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
Some(m)
|
||||
}
|
@ -12,6 +12,7 @@
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod build;
|
||||
mod check_tail_calls;
|
||||
mod check_unsafety;
|
||||
mod errors;
|
||||
pub mod lints;
|
||||
@ -28,6 +29,7 @@ pub fn provide(providers: &mut Providers) {
|
||||
providers.closure_saved_names_of_captured_variables =
|
||||
build::closure_saved_names_of_captured_variables;
|
||||
providers.check_unsafety = check_unsafety::check_unsafety;
|
||||
providers.check_tail_calls = check_tail_calls::check_tail_calls;
|
||||
providers.thir_body = thir::cx::thir_body;
|
||||
providers.hooks.thir_tree = thir::print::thir_tree;
|
||||
providers.hooks.thir_flat = thir::print::thir_flat;
|
||||
|
@ -9,7 +9,8 @@ import csv
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
NUM_CODEPOINTS=0x110000
|
||||
NUM_CODEPOINTS = 0x110000
|
||||
|
||||
|
||||
def to_ranges(iter):
|
||||
current = None
|
||||
@ -23,11 +24,15 @@ def to_ranges(iter):
|
||||
if current is not None:
|
||||
yield tuple(current)
|
||||
|
||||
|
||||
def get_escaped(codepoints):
|
||||
for c in codepoints:
|
||||
if (c.class_ or "Cn") in "Cc Cf Cs Co Cn Zl Zp Zs".split() and c.value != ord(' '):
|
||||
if (c.class_ or "Cn") in "Cc Cf Cs Co Cn Zl Zp Zs".split() and c.value != ord(
|
||||
" "
|
||||
):
|
||||
yield c.value
|
||||
|
||||
|
||||
def get_file(f):
|
||||
try:
|
||||
return open(os.path.basename(f))
|
||||
@ -35,7 +40,9 @@ def get_file(f):
|
||||
subprocess.run(["curl", "-O", f], check=True)
|
||||
return open(os.path.basename(f))
|
||||
|
||||
Codepoint = namedtuple('Codepoint', 'value class_')
|
||||
|
||||
Codepoint = namedtuple("Codepoint", "value class_")
|
||||
|
||||
|
||||
def get_codepoints(f):
|
||||
r = csv.reader(f, delimiter=";")
|
||||
@ -66,13 +73,14 @@ def get_codepoints(f):
|
||||
for c in range(prev_codepoint + 1, NUM_CODEPOINTS):
|
||||
yield Codepoint(c, None)
|
||||
|
||||
|
||||
def compress_singletons(singletons):
|
||||
uppers = [] # (upper, # items in lowers)
|
||||
uppers = [] # (upper, # items in lowers)
|
||||
lowers = []
|
||||
|
||||
for i in singletons:
|
||||
upper = i >> 8
|
||||
lower = i & 0xff
|
||||
lower = i & 0xFF
|
||||
if len(uppers) == 0 or uppers[-1][0] != upper:
|
||||
uppers.append((upper, 1))
|
||||
else:
|
||||
@ -82,10 +90,11 @@ def compress_singletons(singletons):
|
||||
|
||||
return uppers, lowers
|
||||
|
||||
|
||||
def compress_normal(normal):
|
||||
# lengths 0x00..0x7f are encoded as 00, 01, ..., 7e, 7f
|
||||
# lengths 0x80..0x7fff are encoded as 80 80, 80 81, ..., ff fe, ff ff
|
||||
compressed = [] # [truelen, (truelenaux), falselen, (falselenaux)]
|
||||
compressed = [] # [truelen, (truelenaux), falselen, (falselenaux)]
|
||||
|
||||
prev_start = 0
|
||||
for start, count in normal:
|
||||
@ -95,21 +104,22 @@ def compress_normal(normal):
|
||||
|
||||
assert truelen < 0x8000 and falselen < 0x8000
|
||||
entry = []
|
||||
if truelen > 0x7f:
|
||||
if truelen > 0x7F:
|
||||
entry.append(0x80 | (truelen >> 8))
|
||||
entry.append(truelen & 0xff)
|
||||
entry.append(truelen & 0xFF)
|
||||
else:
|
||||
entry.append(truelen & 0x7f)
|
||||
if falselen > 0x7f:
|
||||
entry.append(truelen & 0x7F)
|
||||
if falselen > 0x7F:
|
||||
entry.append(0x80 | (falselen >> 8))
|
||||
entry.append(falselen & 0xff)
|
||||
entry.append(falselen & 0xFF)
|
||||
else:
|
||||
entry.append(falselen & 0x7f)
|
||||
entry.append(falselen & 0x7F)
|
||||
|
||||
compressed.append(entry)
|
||||
|
||||
return compressed
|
||||
|
||||
|
||||
def print_singletons(uppers, lowers, uppersname, lowersname):
|
||||
print("#[rustfmt::skip]")
|
||||
print("const {}: &[(u8, u8)] = &[".format(uppersname))
|
||||
@ -119,9 +129,12 @@ def print_singletons(uppers, lowers, uppersname, lowersname):
|
||||
print("#[rustfmt::skip]")
|
||||
print("const {}: &[u8] = &[".format(lowersname))
|
||||
for i in range(0, len(lowers), 8):
|
||||
print(" {}".format(" ".join("{:#04x},".format(x) for x in lowers[i:i+8])))
|
||||
print(
|
||||
" {}".format(" ".join("{:#04x},".format(x) for x in lowers[i : i + 8]))
|
||||
)
|
||||
print("];")
|
||||
|
||||
|
||||
def print_normal(normal, normalname):
|
||||
print("#[rustfmt::skip]")
|
||||
print("const {}: &[u8] = &[".format(normalname))
|
||||
@ -129,12 +142,13 @@ def print_normal(normal, normalname):
|
||||
print(" {}".format(" ".join("{:#04x},".format(i) for i in v)))
|
||||
print("];")
|
||||
|
||||
|
||||
def main():
|
||||
file = get_file("https://www.unicode.org/Public/UNIDATA/UnicodeData.txt")
|
||||
|
||||
codepoints = get_codepoints(file)
|
||||
|
||||
CUTOFF=0x10000
|
||||
CUTOFF = 0x10000
|
||||
singletons0 = []
|
||||
singletons1 = []
|
||||
normal0 = []
|
||||
@ -234,10 +248,11 @@ pub(crate) fn is_printable(x: char) -> bool {
|
||||
}\
|
||||
""")
|
||||
print()
|
||||
print_singletons(singletons0u, singletons0l, 'SINGLETONS0U', 'SINGLETONS0L')
|
||||
print_singletons(singletons1u, singletons1l, 'SINGLETONS1U', 'SINGLETONS1L')
|
||||
print_normal(normal0, 'NORMAL0')
|
||||
print_normal(normal1, 'NORMAL1')
|
||||
print_singletons(singletons0u, singletons0l, "SINGLETONS0U", "SINGLETONS0L")
|
||||
print_singletons(singletons1u, singletons1l, "SINGLETONS1U", "SINGLETONS1L")
|
||||
print_normal(normal0, "NORMAL0")
|
||||
print_normal(normal1, "NORMAL1")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -16,8 +16,9 @@ from shutil import rmtree
|
||||
bootstrap_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# For the import below, have Python search in src/bootstrap first.
|
||||
sys.path.insert(0, bootstrap_dir)
|
||||
import bootstrap # noqa: E402
|
||||
import configure # noqa: E402
|
||||
import bootstrap # noqa: E402
|
||||
import configure # noqa: E402
|
||||
|
||||
|
||||
def serialize_and_parse(configure_args, bootstrap_args=None):
|
||||
from io import StringIO
|
||||
@ -32,15 +33,20 @@ def serialize_and_parse(configure_args, bootstrap_args=None):
|
||||
|
||||
try:
|
||||
import tomllib
|
||||
|
||||
# Verify this is actually valid TOML.
|
||||
tomllib.loads(build.config_toml)
|
||||
except ImportError:
|
||||
print("WARNING: skipping TOML validation, need at least python 3.11", file=sys.stderr)
|
||||
print(
|
||||
"WARNING: skipping TOML validation, need at least python 3.11",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return build
|
||||
|
||||
|
||||
class VerifyTestCase(unittest.TestCase):
|
||||
"""Test Case for verify"""
|
||||
|
||||
def setUp(self):
|
||||
self.container = tempfile.mkdtemp()
|
||||
self.src = os.path.join(self.container, "src.txt")
|
||||
@ -68,14 +74,14 @@ class VerifyTestCase(unittest.TestCase):
|
||||
|
||||
class ProgramOutOfDate(unittest.TestCase):
|
||||
"""Test if a program is out of date"""
|
||||
|
||||
def setUp(self):
|
||||
self.container = tempfile.mkdtemp()
|
||||
os.mkdir(os.path.join(self.container, "stage0"))
|
||||
self.build = bootstrap.RustBuild()
|
||||
self.build.date = "2017-06-15"
|
||||
self.build.build_dir = self.container
|
||||
self.rustc_stamp_path = os.path.join(self.container, "stage0",
|
||||
".rustc-stamp")
|
||||
self.rustc_stamp_path = os.path.join(self.container, "stage0", ".rustc-stamp")
|
||||
self.key = self.build.date + str(None)
|
||||
|
||||
def tearDown(self):
|
||||
@ -97,11 +103,14 @@ class ProgramOutOfDate(unittest.TestCase):
|
||||
"""Return False both dates match"""
|
||||
with open(self.rustc_stamp_path, "w") as rustc_stamp:
|
||||
rustc_stamp.write("2017-06-15None")
|
||||
self.assertFalse(self.build.program_out_of_date(self.rustc_stamp_path, self.key))
|
||||
self.assertFalse(
|
||||
self.build.program_out_of_date(self.rustc_stamp_path, self.key)
|
||||
)
|
||||
|
||||
|
||||
class ParseArgsInConfigure(unittest.TestCase):
|
||||
"""Test if `parse_args` function in `configure.py` works properly"""
|
||||
|
||||
@patch("configure.err")
|
||||
def test_unknown_args(self, err):
|
||||
# It should be print an error message if the argument doesn't start with '--'
|
||||
@ -148,28 +157,35 @@ class ParseArgsInConfigure(unittest.TestCase):
|
||||
|
||||
class GenerateAndParseConfig(unittest.TestCase):
|
||||
"""Test that we can serialize and deserialize a config.toml file"""
|
||||
|
||||
def test_no_args(self):
|
||||
build = serialize_and_parse([])
|
||||
self.assertEqual(build.get_toml("profile"), 'dist')
|
||||
self.assertEqual(build.get_toml("profile"), "dist")
|
||||
self.assertIsNone(build.get_toml("llvm.download-ci-llvm"))
|
||||
|
||||
def test_set_section(self):
|
||||
build = serialize_and_parse(["--set", "llvm.download-ci-llvm"])
|
||||
self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), 'true')
|
||||
self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), "true")
|
||||
|
||||
def test_set_target(self):
|
||||
build = serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"])
|
||||
self.assertEqual(build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), 'gcc')
|
||||
self.assertEqual(
|
||||
build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), "gcc"
|
||||
)
|
||||
|
||||
def test_set_top_level(self):
|
||||
build = serialize_and_parse(["--set", "profile=compiler"])
|
||||
self.assertEqual(build.get_toml("profile"), 'compiler')
|
||||
self.assertEqual(build.get_toml("profile"), "compiler")
|
||||
|
||||
def test_set_codegen_backends(self):
|
||||
build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift"])
|
||||
self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift']"), -1)
|
||||
self.assertNotEqual(
|
||||
build.config_toml.find("codegen-backends = ['cranelift']"), -1
|
||||
)
|
||||
build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift,llvm"])
|
||||
self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1)
|
||||
self.assertNotEqual(
|
||||
build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1
|
||||
)
|
||||
build = serialize_and_parse(["--enable-full-tools"])
|
||||
self.assertNotEqual(build.config_toml.find("codegen-backends = ['llvm']"), -1)
|
||||
|
||||
@ -223,7 +239,7 @@ class BuildBootstrap(unittest.TestCase):
|
||||
self.assertTrue("--timings" in args)
|
||||
|
||||
def test_warnings(self):
|
||||
for toml_warnings in ['false', 'true', None]:
|
||||
for toml_warnings in ["false", "true", None]:
|
||||
configure_args = []
|
||||
if toml_warnings is not None:
|
||||
configure_args = ["--set", "rust.deny-warnings=" + toml_warnings]
|
||||
|
@ -6,11 +6,12 @@ from __future__ import absolute_import, division, print_function
|
||||
import shlex
|
||||
import sys
|
||||
import os
|
||||
|
||||
rust_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
rust_dir = os.path.dirname(rust_dir)
|
||||
rust_dir = os.path.dirname(rust_dir)
|
||||
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
|
||||
import bootstrap # noqa: E402
|
||||
import bootstrap # noqa: E402
|
||||
|
||||
|
||||
class Option(object):
|
||||
@ -32,26 +33,62 @@ def v(*args):
|
||||
options.append(Option(*args, value=True))
|
||||
|
||||
|
||||
o("debug", "rust.debug", "enables debugging environment; does not affect optimization of bootstrapped code")
|
||||
o(
|
||||
"debug",
|
||||
"rust.debug",
|
||||
"enables debugging environment; does not affect optimization of bootstrapped code",
|
||||
)
|
||||
o("docs", "build.docs", "build standard library documentation")
|
||||
o("compiler-docs", "build.compiler-docs", "build compiler documentation")
|
||||
o("optimize-tests", "rust.optimize-tests", "build tests with optimizations")
|
||||
o("verbose-tests", "rust.verbose-tests", "enable verbose output when running tests")
|
||||
o("ccache", "llvm.ccache", "invoke gcc/clang via ccache to reuse object files between builds")
|
||||
o(
|
||||
"ccache",
|
||||
"llvm.ccache",
|
||||
"invoke gcc/clang via ccache to reuse object files between builds",
|
||||
)
|
||||
o("sccache", None, "invoke gcc/clang via sccache to reuse object files between builds")
|
||||
o("local-rust", None, "use an installed rustc rather than downloading a snapshot")
|
||||
v("local-rust-root", None, "set prefix for local rust binary")
|
||||
o("local-rebuild", "build.local-rebuild", "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version")
|
||||
o("llvm-static-stdcpp", "llvm.static-libstdcpp", "statically link to libstdc++ for LLVM")
|
||||
o("llvm-link-shared", "llvm.link-shared", "prefer shared linking to LLVM (llvm-config --link-shared)")
|
||||
o(
|
||||
"local-rebuild",
|
||||
"build.local-rebuild",
|
||||
"assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version",
|
||||
)
|
||||
o(
|
||||
"llvm-static-stdcpp",
|
||||
"llvm.static-libstdcpp",
|
||||
"statically link to libstdc++ for LLVM",
|
||||
)
|
||||
o(
|
||||
"llvm-link-shared",
|
||||
"llvm.link-shared",
|
||||
"prefer shared linking to LLVM (llvm-config --link-shared)",
|
||||
)
|
||||
o("rpath", "rust.rpath", "build rpaths into rustc itself")
|
||||
o("codegen-tests", "rust.codegen-tests", "run the tests/codegen tests")
|
||||
o("ninja", "llvm.ninja", "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)")
|
||||
o(
|
||||
"ninja",
|
||||
"llvm.ninja",
|
||||
"build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)",
|
||||
)
|
||||
o("locked-deps", "build.locked-deps", "force Cargo.lock to be up to date")
|
||||
o("vendor", "build.vendor", "enable usage of vendored Rust crates")
|
||||
o("sanitizers", "build.sanitizers", "build the sanitizer runtimes (asan, dfsan, lsan, msan, tsan, hwasan)")
|
||||
o("dist-src", "rust.dist-src", "when building tarballs enables building a source tarball")
|
||||
o("cargo-native-static", "build.cargo-native-static", "static native libraries in cargo")
|
||||
o(
|
||||
"sanitizers",
|
||||
"build.sanitizers",
|
||||
"build the sanitizer runtimes (asan, dfsan, lsan, msan, tsan, hwasan)",
|
||||
)
|
||||
o(
|
||||
"dist-src",
|
||||
"rust.dist-src",
|
||||
"when building tarballs enables building a source tarball",
|
||||
)
|
||||
o(
|
||||
"cargo-native-static",
|
||||
"build.cargo-native-static",
|
||||
"static native libraries in cargo",
|
||||
)
|
||||
o("profiler", "build.profiler", "build the profiler runtime")
|
||||
o("full-tools", None, "enable all tools")
|
||||
o("lld", "rust.lld", "build lld")
|
||||
@ -59,7 +96,11 @@ o("llvm-bitcode-linker", "rust.llvm-bitcode-linker", "build llvm bitcode linker"
|
||||
o("clang", "llvm.clang", "build clang")
|
||||
o("use-libcxx", "llvm.use-libcxx", "build LLVM with libc++")
|
||||
o("control-flow-guard", "rust.control-flow-guard", "Enable Control Flow Guard")
|
||||
o("patch-binaries-for-nix", "build.patch-binaries-for-nix", "whether patch binaries for usage with Nix toolchains")
|
||||
o(
|
||||
"patch-binaries-for-nix",
|
||||
"build.patch-binaries-for-nix",
|
||||
"whether patch binaries for usage with Nix toolchains",
|
||||
)
|
||||
o("new-symbol-mangling", "rust.new-symbol-mangling", "use symbol-mangling-version v0")
|
||||
|
||||
v("llvm-cflags", "llvm.cflags", "build LLVM with these extra compiler flags")
|
||||
@ -76,16 +117,48 @@ o("llvm-enzyme", "llvm.enzyme", "build LLVM with enzyme")
|
||||
o("llvm-offload", "llvm.offload", "build LLVM with gpu offload support")
|
||||
o("llvm-plugins", "llvm.plugins", "build LLVM with plugin interface")
|
||||
o("debug-assertions", "rust.debug-assertions", "build with debugging assertions")
|
||||
o("debug-assertions-std", "rust.debug-assertions-std", "build the standard library with debugging assertions")
|
||||
o(
|
||||
"debug-assertions-std",
|
||||
"rust.debug-assertions-std",
|
||||
"build the standard library with debugging assertions",
|
||||
)
|
||||
o("overflow-checks", "rust.overflow-checks", "build with overflow checks")
|
||||
o("overflow-checks-std", "rust.overflow-checks-std", "build the standard library with overflow checks")
|
||||
o("llvm-release-debuginfo", "llvm.release-debuginfo", "build LLVM with debugger metadata")
|
||||
o(
|
||||
"overflow-checks-std",
|
||||
"rust.overflow-checks-std",
|
||||
"build the standard library with overflow checks",
|
||||
)
|
||||
o(
|
||||
"llvm-release-debuginfo",
|
||||
"llvm.release-debuginfo",
|
||||
"build LLVM with debugger metadata",
|
||||
)
|
||||
v("debuginfo-level", "rust.debuginfo-level", "debuginfo level for Rust code")
|
||||
v("debuginfo-level-rustc", "rust.debuginfo-level-rustc", "debuginfo level for the compiler")
|
||||
v("debuginfo-level-std", "rust.debuginfo-level-std", "debuginfo level for the standard library")
|
||||
v("debuginfo-level-tools", "rust.debuginfo-level-tools", "debuginfo level for the tools")
|
||||
v("debuginfo-level-tests", "rust.debuginfo-level-tests", "debuginfo level for the test suites run with compiletest")
|
||||
v("save-toolstates", "rust.save-toolstates", "save build and test status of external tools into this file")
|
||||
v(
|
||||
"debuginfo-level-rustc",
|
||||
"rust.debuginfo-level-rustc",
|
||||
"debuginfo level for the compiler",
|
||||
)
|
||||
v(
|
||||
"debuginfo-level-std",
|
||||
"rust.debuginfo-level-std",
|
||||
"debuginfo level for the standard library",
|
||||
)
|
||||
v(
|
||||
"debuginfo-level-tools",
|
||||
"rust.debuginfo-level-tools",
|
||||
"debuginfo level for the tools",
|
||||
)
|
||||
v(
|
||||
"debuginfo-level-tests",
|
||||
"rust.debuginfo-level-tests",
|
||||
"debuginfo level for the test suites run with compiletest",
|
||||
)
|
||||
v(
|
||||
"save-toolstates",
|
||||
"rust.save-toolstates",
|
||||
"save build and test status of external tools into this file",
|
||||
)
|
||||
|
||||
v("prefix", "install.prefix", "set installation prefix")
|
||||
v("localstatedir", "install.localstatedir", "local state directory")
|
||||
@ -102,50 +175,117 @@ v("llvm-config", None, "set path to llvm-config")
|
||||
v("llvm-filecheck", None, "set path to LLVM's FileCheck utility")
|
||||
v("python", "build.python", "set path to python")
|
||||
v("android-ndk", "build.android-ndk", "set path to Android NDK")
|
||||
v("musl-root", "target.x86_64-unknown-linux-musl.musl-root",
|
||||
"MUSL root installation directory (deprecated)")
|
||||
v("musl-root-x86_64", "target.x86_64-unknown-linux-musl.musl-root",
|
||||
"x86_64-unknown-linux-musl install directory")
|
||||
v("musl-root-i586", "target.i586-unknown-linux-musl.musl-root",
|
||||
"i586-unknown-linux-musl install directory")
|
||||
v("musl-root-i686", "target.i686-unknown-linux-musl.musl-root",
|
||||
"i686-unknown-linux-musl install directory")
|
||||
v("musl-root-arm", "target.arm-unknown-linux-musleabi.musl-root",
|
||||
"arm-unknown-linux-musleabi install directory")
|
||||
v("musl-root-armhf", "target.arm-unknown-linux-musleabihf.musl-root",
|
||||
"arm-unknown-linux-musleabihf install directory")
|
||||
v("musl-root-armv5te", "target.armv5te-unknown-linux-musleabi.musl-root",
|
||||
"armv5te-unknown-linux-musleabi install directory")
|
||||
v("musl-root-armv7", "target.armv7-unknown-linux-musleabi.musl-root",
|
||||
"armv7-unknown-linux-musleabi install directory")
|
||||
v("musl-root-armv7hf", "target.armv7-unknown-linux-musleabihf.musl-root",
|
||||
"armv7-unknown-linux-musleabihf install directory")
|
||||
v("musl-root-aarch64", "target.aarch64-unknown-linux-musl.musl-root",
|
||||
"aarch64-unknown-linux-musl install directory")
|
||||
v("musl-root-mips", "target.mips-unknown-linux-musl.musl-root",
|
||||
"mips-unknown-linux-musl install directory")
|
||||
v("musl-root-mipsel", "target.mipsel-unknown-linux-musl.musl-root",
|
||||
"mipsel-unknown-linux-musl install directory")
|
||||
v("musl-root-mips64", "target.mips64-unknown-linux-muslabi64.musl-root",
|
||||
"mips64-unknown-linux-muslabi64 install directory")
|
||||
v("musl-root-mips64el", "target.mips64el-unknown-linux-muslabi64.musl-root",
|
||||
"mips64el-unknown-linux-muslabi64 install directory")
|
||||
v("musl-root-riscv32gc", "target.riscv32gc-unknown-linux-musl.musl-root",
|
||||
"riscv32gc-unknown-linux-musl install directory")
|
||||
v("musl-root-riscv64gc", "target.riscv64gc-unknown-linux-musl.musl-root",
|
||||
"riscv64gc-unknown-linux-musl install directory")
|
||||
v("musl-root-loongarch64", "target.loongarch64-unknown-linux-musl.musl-root",
|
||||
"loongarch64-unknown-linux-musl install directory")
|
||||
v("qemu-armhf-rootfs", "target.arm-unknown-linux-gnueabihf.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this")
|
||||
v("qemu-aarch64-rootfs", "target.aarch64-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this")
|
||||
v("qemu-riscv64-rootfs", "target.riscv64gc-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this")
|
||||
v("experimental-targets", "llvm.experimental-targets",
|
||||
"experimental LLVM targets to build")
|
||||
v(
|
||||
"musl-root",
|
||||
"target.x86_64-unknown-linux-musl.musl-root",
|
||||
"MUSL root installation directory (deprecated)",
|
||||
)
|
||||
v(
|
||||
"musl-root-x86_64",
|
||||
"target.x86_64-unknown-linux-musl.musl-root",
|
||||
"x86_64-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-i586",
|
||||
"target.i586-unknown-linux-musl.musl-root",
|
||||
"i586-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-i686",
|
||||
"target.i686-unknown-linux-musl.musl-root",
|
||||
"i686-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-arm",
|
||||
"target.arm-unknown-linux-musleabi.musl-root",
|
||||
"arm-unknown-linux-musleabi install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armhf",
|
||||
"target.arm-unknown-linux-musleabihf.musl-root",
|
||||
"arm-unknown-linux-musleabihf install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armv5te",
|
||||
"target.armv5te-unknown-linux-musleabi.musl-root",
|
||||
"armv5te-unknown-linux-musleabi install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armv7",
|
||||
"target.armv7-unknown-linux-musleabi.musl-root",
|
||||
"armv7-unknown-linux-musleabi install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armv7hf",
|
||||
"target.armv7-unknown-linux-musleabihf.musl-root",
|
||||
"armv7-unknown-linux-musleabihf install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-aarch64",
|
||||
"target.aarch64-unknown-linux-musl.musl-root",
|
||||
"aarch64-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mips",
|
||||
"target.mips-unknown-linux-musl.musl-root",
|
||||
"mips-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mipsel",
|
||||
"target.mipsel-unknown-linux-musl.musl-root",
|
||||
"mipsel-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mips64",
|
||||
"target.mips64-unknown-linux-muslabi64.musl-root",
|
||||
"mips64-unknown-linux-muslabi64 install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mips64el",
|
||||
"target.mips64el-unknown-linux-muslabi64.musl-root",
|
||||
"mips64el-unknown-linux-muslabi64 install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-riscv32gc",
|
||||
"target.riscv32gc-unknown-linux-musl.musl-root",
|
||||
"riscv32gc-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-riscv64gc",
|
||||
"target.riscv64gc-unknown-linux-musl.musl-root",
|
||||
"riscv64gc-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-loongarch64",
|
||||
"target.loongarch64-unknown-linux-musl.musl-root",
|
||||
"loongarch64-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"qemu-armhf-rootfs",
|
||||
"target.arm-unknown-linux-gnueabihf.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this",
|
||||
)
|
||||
v(
|
||||
"qemu-aarch64-rootfs",
|
||||
"target.aarch64-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this",
|
||||
)
|
||||
v(
|
||||
"qemu-riscv64-rootfs",
|
||||
"target.riscv64gc-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this",
|
||||
)
|
||||
v(
|
||||
"experimental-targets",
|
||||
"llvm.experimental-targets",
|
||||
"experimental LLVM targets to build",
|
||||
)
|
||||
v("release-channel", "rust.channel", "the name of the release channel to build")
|
||||
v("release-description", "rust.description", "optional descriptive string for version output")
|
||||
v(
|
||||
"release-description",
|
||||
"rust.description",
|
||||
"optional descriptive string for version output",
|
||||
)
|
||||
v("dist-compression-formats", None, "List of compression formats to use")
|
||||
|
||||
# Used on systems where "cc" is unavailable
|
||||
@ -154,7 +294,11 @@ v("default-linker", "rust.default-linker", "the default linker")
|
||||
# Many of these are saved below during the "writing configuration" step
|
||||
# (others are conditionally saved).
|
||||
o("manage-submodules", "build.submodules", "let the build manage the git submodules")
|
||||
o("full-bootstrap", "build.full-bootstrap", "build three compilers instead of two (not recommended except for testing reproducible builds)")
|
||||
o(
|
||||
"full-bootstrap",
|
||||
"build.full-bootstrap",
|
||||
"build three compilers instead of two (not recommended except for testing reproducible builds)",
|
||||
)
|
||||
o("extended", "build.extended", "build an extended rust tool set")
|
||||
|
||||
v("bootstrap-cache-path", None, "use provided path for the bootstrap cache")
|
||||
@ -165,8 +309,16 @@ v("host", None, "List of GNUs ./configure syntax LLVM host triples")
|
||||
v("target", None, "List of GNUs ./configure syntax LLVM target triples")
|
||||
|
||||
# Options specific to this configure script
|
||||
o("option-checking", None, "complain about unrecognized options in this configure script")
|
||||
o("verbose-configure", None, "don't truncate options when printing them in this configure script")
|
||||
o(
|
||||
"option-checking",
|
||||
None,
|
||||
"complain about unrecognized options in this configure script",
|
||||
)
|
||||
o(
|
||||
"verbose-configure",
|
||||
None,
|
||||
"don't truncate options when printing them in this configure script",
|
||||
)
|
||||
v("set", None, "set arbitrary key/value pairs in TOML configuration")
|
||||
|
||||
|
||||
@ -178,39 +330,42 @@ def err(msg):
|
||||
print("\nconfigure: ERROR: " + msg + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_value_list(key):
|
||||
for option in options:
|
||||
if option.name == key and option.desc.startswith('List of'):
|
||||
if option.name == key and option.desc.startswith("List of"):
|
||||
return True
|
||||
return False
|
||||
|
||||
if '--help' in sys.argv or '-h' in sys.argv:
|
||||
print('Usage: ./configure [options]')
|
||||
print('')
|
||||
print('Options')
|
||||
|
||||
if "--help" in sys.argv or "-h" in sys.argv:
|
||||
print("Usage: ./configure [options]")
|
||||
print("")
|
||||
print("Options")
|
||||
for option in options:
|
||||
if 'android' in option.name:
|
||||
if "android" in option.name:
|
||||
# no one needs to know about these obscure options
|
||||
continue
|
||||
if option.value:
|
||||
print('\t{:30} {}'.format('--{}=VAL'.format(option.name), option.desc))
|
||||
print("\t{:30} {}".format("--{}=VAL".format(option.name), option.desc))
|
||||
else:
|
||||
print('\t--enable-{:25} OR --disable-{}'.format(option.name, option.name))
|
||||
print('\t\t' + option.desc)
|
||||
print('')
|
||||
print('This configure script is a thin configuration shim over the true')
|
||||
print('configuration system, `config.toml`. You can explore the comments')
|
||||
print('in `config.example.toml` next to this configure script to see')
|
||||
print('more information about what each option is. Additionally you can')
|
||||
print('pass `--set` as an argument to set arbitrary key/value pairs')
|
||||
print('in the TOML configuration if desired')
|
||||
print('')
|
||||
print('Also note that all options which take `--enable` can similarly')
|
||||
print('be passed with `--disable-foo` to forcibly disable the option')
|
||||
print("\t--enable-{:25} OR --disable-{}".format(option.name, option.name))
|
||||
print("\t\t" + option.desc)
|
||||
print("")
|
||||
print("This configure script is a thin configuration shim over the true")
|
||||
print("configuration system, `config.toml`. You can explore the comments")
|
||||
print("in `config.example.toml` next to this configure script to see")
|
||||
print("more information about what each option is. Additionally you can")
|
||||
print("pass `--set` as an argument to set arbitrary key/value pairs")
|
||||
print("in the TOML configuration if desired")
|
||||
print("")
|
||||
print("Also note that all options which take `--enable` can similarly")
|
||||
print("be passed with `--disable-foo` to forcibly disable the option")
|
||||
sys.exit(0)
|
||||
|
||||
VERBOSE = False
|
||||
|
||||
|
||||
# Parse all command line arguments into one of these three lists, handling
|
||||
# boolean and value-based options separately
|
||||
def parse_args(args):
|
||||
@ -222,7 +377,7 @@ def parse_args(args):
|
||||
while i < len(args):
|
||||
arg = args[i]
|
||||
i += 1
|
||||
if not arg.startswith('--'):
|
||||
if not arg.startswith("--"):
|
||||
unknown_args.append(arg)
|
||||
continue
|
||||
|
||||
@ -230,7 +385,7 @@ def parse_args(args):
|
||||
for option in options:
|
||||
value = None
|
||||
if option.value:
|
||||
keyval = arg[2:].split('=', 1)
|
||||
keyval = arg[2:].split("=", 1)
|
||||
key = keyval[0]
|
||||
if option.name != key:
|
||||
continue
|
||||
@ -244,9 +399,9 @@ def parse_args(args):
|
||||
need_value_args.append(arg)
|
||||
continue
|
||||
else:
|
||||
if arg[2:] == 'enable-' + option.name:
|
||||
if arg[2:] == "enable-" + option.name:
|
||||
value = True
|
||||
elif arg[2:] == 'disable-' + option.name:
|
||||
elif arg[2:] == "disable-" + option.name:
|
||||
value = False
|
||||
else:
|
||||
continue
|
||||
@ -263,8 +418,9 @@ def parse_args(args):
|
||||
# NOTE: here and a few other places, we use [-1] to apply the *last* value
|
||||
# passed. But if option-checking is enabled, then the known_args loop will
|
||||
# also assert that options are only passed once.
|
||||
option_checking = ('option-checking' not in known_args
|
||||
or known_args['option-checking'][-1][1])
|
||||
option_checking = (
|
||||
"option-checking" not in known_args or known_args["option-checking"][-1][1]
|
||||
)
|
||||
if option_checking:
|
||||
if len(unknown_args) > 0:
|
||||
err("Option '" + unknown_args[0] + "' is not recognized")
|
||||
@ -272,18 +428,18 @@ def parse_args(args):
|
||||
err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0]))
|
||||
|
||||
global VERBOSE
|
||||
VERBOSE = 'verbose-configure' in known_args
|
||||
VERBOSE = "verbose-configure" in known_args
|
||||
|
||||
config = {}
|
||||
|
||||
set('build.configure-args', args, config)
|
||||
set("build.configure-args", args, config)
|
||||
apply_args(known_args, option_checking, config)
|
||||
return parse_example_config(known_args, config)
|
||||
|
||||
|
||||
def build(known_args):
|
||||
if 'build' in known_args:
|
||||
return known_args['build'][-1][1]
|
||||
if "build" in known_args:
|
||||
return known_args["build"][-1][1]
|
||||
return bootstrap.default_build_triple(verbose=False)
|
||||
|
||||
|
||||
@ -291,7 +447,7 @@ def set(key, value, config):
|
||||
if isinstance(value, list):
|
||||
# Remove empty values, which value.split(',') tends to generate and
|
||||
# replace single quotes for double quotes to ensure correct parsing.
|
||||
value = [v.replace('\'', '"') for v in value if v]
|
||||
value = [v.replace("'", '"') for v in value if v]
|
||||
|
||||
s = "{:20} := {}".format(key, value)
|
||||
if len(s) < 70 or VERBOSE:
|
||||
@ -310,7 +466,7 @@ def set(key, value, config):
|
||||
for i, part in enumerate(parts):
|
||||
if i == len(parts) - 1:
|
||||
if is_value_list(part) and isinstance(value, str):
|
||||
value = value.split(',')
|
||||
value = value.split(",")
|
||||
arr[part] = value
|
||||
else:
|
||||
if part not in arr:
|
||||
@ -321,9 +477,9 @@ def set(key, value, config):
|
||||
def apply_args(known_args, option_checking, config):
|
||||
for key in known_args:
|
||||
# The `set` option is special and can be passed a bunch of times
|
||||
if key == 'set':
|
||||
if key == "set":
|
||||
for _option, value in known_args[key]:
|
||||
keyval = value.split('=', 1)
|
||||
keyval = value.split("=", 1)
|
||||
if len(keyval) == 1 or keyval[1] == "true":
|
||||
value = True
|
||||
elif keyval[1] == "false":
|
||||
@ -348,50 +504,55 @@ def apply_args(known_args, option_checking, config):
|
||||
# that here.
|
||||
build_triple = build(known_args)
|
||||
|
||||
if option.name == 'sccache':
|
||||
set('llvm.ccache', 'sccache', config)
|
||||
elif option.name == 'local-rust':
|
||||
for path in os.environ['PATH'].split(os.pathsep):
|
||||
if os.path.exists(path + '/rustc'):
|
||||
set('build.rustc', path + '/rustc', config)
|
||||
if option.name == "sccache":
|
||||
set("llvm.ccache", "sccache", config)
|
||||
elif option.name == "local-rust":
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
if os.path.exists(path + "/rustc"):
|
||||
set("build.rustc", path + "/rustc", config)
|
||||
break
|
||||
for path in os.environ['PATH'].split(os.pathsep):
|
||||
if os.path.exists(path + '/cargo'):
|
||||
set('build.cargo', path + '/cargo', config)
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
if os.path.exists(path + "/cargo"):
|
||||
set("build.cargo", path + "/cargo", config)
|
||||
break
|
||||
elif option.name == 'local-rust-root':
|
||||
set('build.rustc', value + '/bin/rustc', config)
|
||||
set('build.cargo', value + '/bin/cargo', config)
|
||||
elif option.name == 'llvm-root':
|
||||
set('target.{}.llvm-config'.format(build_triple), value + '/bin/llvm-config', config)
|
||||
elif option.name == 'llvm-config':
|
||||
set('target.{}.llvm-config'.format(build_triple), value, config)
|
||||
elif option.name == 'llvm-filecheck':
|
||||
set('target.{}.llvm-filecheck'.format(build_triple), value, config)
|
||||
elif option.name == 'tools':
|
||||
set('build.tools', value.split(','), config)
|
||||
elif option.name == 'bootstrap-cache-path':
|
||||
set('build.bootstrap-cache-path', value, config)
|
||||
elif option.name == 'codegen-backends':
|
||||
set('rust.codegen-backends', value.split(','), config)
|
||||
elif option.name == 'host':
|
||||
set('build.host', value.split(','), config)
|
||||
elif option.name == 'target':
|
||||
set('build.target', value.split(','), config)
|
||||
elif option.name == 'full-tools':
|
||||
set('rust.codegen-backends', ['llvm'], config)
|
||||
set('rust.lld', True, config)
|
||||
set('rust.llvm-tools', True, config)
|
||||
set('rust.llvm-bitcode-linker', True, config)
|
||||
set('build.extended', True, config)
|
||||
elif option.name in ['option-checking', 'verbose-configure']:
|
||||
elif option.name == "local-rust-root":
|
||||
set("build.rustc", value + "/bin/rustc", config)
|
||||
set("build.cargo", value + "/bin/cargo", config)
|
||||
elif option.name == "llvm-root":
|
||||
set(
|
||||
"target.{}.llvm-config".format(build_triple),
|
||||
value + "/bin/llvm-config",
|
||||
config,
|
||||
)
|
||||
elif option.name == "llvm-config":
|
||||
set("target.{}.llvm-config".format(build_triple), value, config)
|
||||
elif option.name == "llvm-filecheck":
|
||||
set("target.{}.llvm-filecheck".format(build_triple), value, config)
|
||||
elif option.name == "tools":
|
||||
set("build.tools", value.split(","), config)
|
||||
elif option.name == "bootstrap-cache-path":
|
||||
set("build.bootstrap-cache-path", value, config)
|
||||
elif option.name == "codegen-backends":
|
||||
set("rust.codegen-backends", value.split(","), config)
|
||||
elif option.name == "host":
|
||||
set("build.host", value.split(","), config)
|
||||
elif option.name == "target":
|
||||
set("build.target", value.split(","), config)
|
||||
elif option.name == "full-tools":
|
||||
set("rust.codegen-backends", ["llvm"], config)
|
||||
set("rust.lld", True, config)
|
||||
set("rust.llvm-tools", True, config)
|
||||
set("rust.llvm-bitcode-linker", True, config)
|
||||
set("build.extended", True, config)
|
||||
elif option.name in ["option-checking", "verbose-configure"]:
|
||||
# this was handled above
|
||||
pass
|
||||
elif option.name == 'dist-compression-formats':
|
||||
set('dist.compression-formats', value.split(','), config)
|
||||
elif option.name == "dist-compression-formats":
|
||||
set("dist.compression-formats", value.split(","), config)
|
||||
else:
|
||||
raise RuntimeError("unhandled option {}".format(option.name))
|
||||
|
||||
|
||||
# "Parse" the `config.example.toml` file into the various sections, and we'll
|
||||
# use this as a template of a `config.toml` to write out which preserves
|
||||
# all the various comments and whatnot.
|
||||
@ -406,20 +567,22 @@ def parse_example_config(known_args, config):
|
||||
targets = {}
|
||||
top_level_keys = []
|
||||
|
||||
with open(rust_dir + '/config.example.toml') as example_config:
|
||||
with open(rust_dir + "/config.example.toml") as example_config:
|
||||
example_lines = example_config.read().split("\n")
|
||||
for line in example_lines:
|
||||
if cur_section is None:
|
||||
if line.count('=') == 1:
|
||||
top_level_key = line.split('=')[0]
|
||||
top_level_key = top_level_key.strip(' #')
|
||||
if line.count("=") == 1:
|
||||
top_level_key = line.split("=")[0]
|
||||
top_level_key = top_level_key.strip(" #")
|
||||
top_level_keys.append(top_level_key)
|
||||
if line.startswith('['):
|
||||
if line.startswith("["):
|
||||
cur_section = line[1:-1]
|
||||
if cur_section.startswith('target'):
|
||||
cur_section = 'target'
|
||||
elif '.' in cur_section:
|
||||
raise RuntimeError("don't know how to deal with section: {}".format(cur_section))
|
||||
if cur_section.startswith("target"):
|
||||
cur_section = "target"
|
||||
elif "." in cur_section:
|
||||
raise RuntimeError(
|
||||
"don't know how to deal with section: {}".format(cur_section)
|
||||
)
|
||||
sections[cur_section] = [line]
|
||||
section_order.append(cur_section)
|
||||
else:
|
||||
@ -428,22 +591,25 @@ def parse_example_config(known_args, config):
|
||||
# Fill out the `targets` array by giving all configured targets a copy of the
|
||||
# `target` section we just loaded from the example config
|
||||
configured_targets = [build(known_args)]
|
||||
if 'build' in config:
|
||||
if 'host' in config['build']:
|
||||
configured_targets += config['build']['host']
|
||||
if 'target' in config['build']:
|
||||
configured_targets += config['build']['target']
|
||||
if 'target' in config:
|
||||
for target in config['target']:
|
||||
if "build" in config:
|
||||
if "host" in config["build"]:
|
||||
configured_targets += config["build"]["host"]
|
||||
if "target" in config["build"]:
|
||||
configured_targets += config["build"]["target"]
|
||||
if "target" in config:
|
||||
for target in config["target"]:
|
||||
configured_targets.append(target)
|
||||
for target in configured_targets:
|
||||
targets[target] = sections['target'][:]
|
||||
targets[target] = sections["target"][:]
|
||||
# For `.` to be valid TOML, it needs to be quoted. But `bootstrap.py` doesn't use a proper TOML parser and fails to parse the target.
|
||||
# Avoid using quotes unless it's necessary.
|
||||
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
|
||||
targets[target][0] = targets[target][0].replace(
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"'{}'".format(target) if "." in target else target,
|
||||
)
|
||||
|
||||
if 'profile' not in config:
|
||||
set('profile', 'dist', config)
|
||||
if "profile" not in config:
|
||||
set("profile", "dist", config)
|
||||
configure_file(sections, top_level_keys, targets, config)
|
||||
return section_order, sections, targets
|
||||
|
||||
@ -467,7 +633,7 @@ def to_toml(value):
|
||||
else:
|
||||
return "false"
|
||||
elif isinstance(value, list):
|
||||
return '[' + ', '.join(map(to_toml, value)) + ']'
|
||||
return "[" + ", ".join(map(to_toml, value)) + "]"
|
||||
elif isinstance(value, str):
|
||||
# Don't put quotes around numeric values
|
||||
if is_number(value):
|
||||
@ -475,9 +641,18 @@ def to_toml(value):
|
||||
else:
|
||||
return "'" + value + "'"
|
||||
elif isinstance(value, dict):
|
||||
return "{" + ", ".join(map(lambda a: "{} = {}".format(to_toml(a[0]), to_toml(a[1])), value.items())) + "}"
|
||||
return (
|
||||
"{"
|
||||
+ ", ".join(
|
||||
map(
|
||||
lambda a: "{} = {}".format(to_toml(a[0]), to_toml(a[1])),
|
||||
value.items(),
|
||||
)
|
||||
)
|
||||
+ "}"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError('no toml')
|
||||
raise RuntimeError("no toml")
|
||||
|
||||
|
||||
def configure_section(lines, config):
|
||||
@ -485,7 +660,7 @@ def configure_section(lines, config):
|
||||
value = config[key]
|
||||
found = False
|
||||
for i, line in enumerate(lines):
|
||||
if not line.startswith('#' + key + ' = '):
|
||||
if not line.startswith("#" + key + " = "):
|
||||
continue
|
||||
found = True
|
||||
lines[i] = "{} = {}".format(key, to_toml(value))
|
||||
@ -501,7 +676,9 @@ def configure_section(lines, config):
|
||||
|
||||
def configure_top_level_key(lines, top_level_key, value):
|
||||
for i, line in enumerate(lines):
|
||||
if line.startswith('#' + top_level_key + ' = ') or line.startswith(top_level_key + ' = '):
|
||||
if line.startswith("#" + top_level_key + " = ") or line.startswith(
|
||||
top_level_key + " = "
|
||||
):
|
||||
lines[i] = "{} = {}".format(top_level_key, to_toml(value))
|
||||
return
|
||||
|
||||
@ -512,11 +689,13 @@ def configure_top_level_key(lines, top_level_key, value):
|
||||
def configure_file(sections, top_level_keys, targets, config):
|
||||
for section_key, section_config in config.items():
|
||||
if section_key not in sections and section_key not in top_level_keys:
|
||||
raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key))
|
||||
raise RuntimeError(
|
||||
"config key {} not in sections or top_level_keys".format(section_key)
|
||||
)
|
||||
if section_key in top_level_keys:
|
||||
configure_top_level_key(sections[None], section_key, section_config)
|
||||
|
||||
elif section_key == 'target':
|
||||
elif section_key == "target":
|
||||
for target in section_config:
|
||||
configure_section(targets[target], section_config[target])
|
||||
else:
|
||||
@ -536,18 +715,19 @@ def write_uncommented(target, f):
|
||||
block = []
|
||||
is_comment = True
|
||||
continue
|
||||
is_comment = is_comment and line.startswith('#')
|
||||
is_comment = is_comment and line.startswith("#")
|
||||
return f
|
||||
|
||||
|
||||
def write_config_toml(writer, section_order, targets, sections):
|
||||
for section in section_order:
|
||||
if section == 'target':
|
||||
if section == "target":
|
||||
for target in targets:
|
||||
writer = write_uncommented(targets[target], writer)
|
||||
else:
|
||||
writer = write_uncommented(sections[section], writer)
|
||||
|
||||
|
||||
def quit_if_file_exists(file):
|
||||
if os.path.isfile(file):
|
||||
msg = "Existing '{}' detected. Exiting".format(file)
|
||||
@ -559,9 +739,10 @@ def quit_if_file_exists(file):
|
||||
|
||||
err(msg)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# If 'config.toml' already exists, exit the script at this point
|
||||
quit_if_file_exists('config.toml')
|
||||
quit_if_file_exists("config.toml")
|
||||
|
||||
if "GITHUB_ACTIONS" in os.environ:
|
||||
print("::group::Configure the build")
|
||||
@ -575,13 +756,13 @@ if __name__ == "__main__":
|
||||
# order that we read it in.
|
||||
p("")
|
||||
p("writing `config.toml` in current directory")
|
||||
with bootstrap.output('config.toml') as f:
|
||||
with bootstrap.output("config.toml") as f:
|
||||
write_config_toml(f, section_order, targets, sections)
|
||||
|
||||
with bootstrap.output('Makefile') as f:
|
||||
contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
|
||||
with bootstrap.output("Makefile") as f:
|
||||
contents = os.path.join(rust_dir, "src", "bootstrap", "mk", "Makefile.in")
|
||||
contents = open(contents).read()
|
||||
contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
|
||||
contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + "/")
|
||||
contents = contents.replace("$(CFG_PYTHON)", sys.executable)
|
||||
f.write(contents)
|
||||
|
||||
|
@ -40,12 +40,13 @@ import time
|
||||
# Python 3.3 changed the value of `sys.platform` on Linux from "linux2" to just
|
||||
# "linux". We check here with `.startswith` to keep compatibility with older
|
||||
# Python versions (especially Python 2.7).
|
||||
if sys.platform.startswith('linux'):
|
||||
if sys.platform.startswith("linux"):
|
||||
|
||||
class State:
|
||||
def __init__(self):
|
||||
with open('/proc/stat', 'r') as file:
|
||||
with open("/proc/stat", "r") as file:
|
||||
data = file.readline().split()
|
||||
if data[0] != 'cpu':
|
||||
if data[0] != "cpu":
|
||||
raise Exception('did not start with "cpu"')
|
||||
self.user = int(data[1])
|
||||
self.nice = int(data[2])
|
||||
@ -69,10 +70,21 @@ if sys.platform.startswith('linux'):
|
||||
steal = self.steal - prev.steal
|
||||
guest = self.guest - prev.guest
|
||||
guest_nice = self.guest_nice - prev.guest_nice
|
||||
total = user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice
|
||||
total = (
|
||||
user
|
||||
+ nice
|
||||
+ system
|
||||
+ idle
|
||||
+ iowait
|
||||
+ irq
|
||||
+ softirq
|
||||
+ steal
|
||||
+ guest
|
||||
+ guest_nice
|
||||
)
|
||||
return float(idle) / float(total) * 100
|
||||
|
||||
elif sys.platform == 'win32':
|
||||
elif sys.platform == "win32":
|
||||
from ctypes.wintypes import DWORD
|
||||
from ctypes import Structure, windll, WinError, GetLastError, byref
|
||||
|
||||
@ -104,9 +116,10 @@ elif sys.platform == 'win32':
|
||||
kernel = self.kernel - prev.kernel
|
||||
return float(idle) / float(user + kernel) * 100
|
||||
|
||||
elif sys.platform == 'darwin':
|
||||
elif sys.platform == "darwin":
|
||||
from ctypes import *
|
||||
libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
|
||||
|
||||
libc = cdll.LoadLibrary("/usr/lib/libc.dylib")
|
||||
|
||||
class host_cpu_load_info_data_t(Structure):
|
||||
_fields_ = [("cpu_ticks", c_uint * 4)]
|
||||
@ -116,7 +129,7 @@ elif sys.platform == 'darwin':
|
||||
c_uint,
|
||||
c_int,
|
||||
POINTER(host_cpu_load_info_data_t),
|
||||
POINTER(c_int)
|
||||
POINTER(c_int),
|
||||
]
|
||||
host_statistics.restype = c_int
|
||||
|
||||
@ -124,13 +137,14 @@ elif sys.platform == 'darwin':
|
||||
CPU_STATE_SYSTEM = 1
|
||||
CPU_STATE_IDLE = 2
|
||||
CPU_STATE_NICE = 3
|
||||
|
||||
class State:
|
||||
def __init__(self):
|
||||
stats = host_cpu_load_info_data_t()
|
||||
count = c_int(4) # HOST_CPU_LOAD_INFO_COUNT
|
||||
count = c_int(4) # HOST_CPU_LOAD_INFO_COUNT
|
||||
err = libc.host_statistics(
|
||||
libc.mach_host_self(),
|
||||
c_int(3), # HOST_CPU_LOAD_INFO
|
||||
c_int(3), # HOST_CPU_LOAD_INFO
|
||||
byref(stats),
|
||||
byref(count),
|
||||
)
|
||||
@ -148,7 +162,7 @@ elif sys.platform == 'darwin':
|
||||
return float(idle) / float(user + system + idle + nice) * 100.0
|
||||
|
||||
else:
|
||||
print('unknown platform', sys.platform)
|
||||
print("unknown platform", sys.platform)
|
||||
sys.exit(1)
|
||||
|
||||
cur_state = State()
|
||||
|
@ -27,5 +27,5 @@ RUN echo "[rust]" > /config/nopt-std-config.toml
|
||||
RUN echo "optimize = false" >> /config/nopt-std-config.toml
|
||||
|
||||
ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests
|
||||
ENV SCRIPT python3 ../x.py test --stage 0 --config /config/nopt-std-config.toml library/std \
|
||||
&& python3 ../x.py --stage 2 test
|
||||
ARG SCRIPT_ARG
|
||||
ENV SCRIPT=${SCRIPT_ARG}
|
||||
|
@ -24,10 +24,5 @@ COPY scripts/sccache.sh /scripts/
|
||||
RUN sh /scripts/sccache.sh
|
||||
|
||||
ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu
|
||||
# Skip some tests that are unlikely to be platform specific, to speed up
|
||||
# this slow job.
|
||||
ENV SCRIPT python3 ../x.py --stage 2 test \
|
||||
--skip src/bootstrap \
|
||||
--skip tests/rustdoc-js \
|
||||
--skip src/tools/error_index_generator \
|
||||
--skip src/tools/linkchecker
|
||||
ARG SCRIPT_ARG
|
||||
ENV SCRIPT=${SCRIPT_ARG}
|
||||
|
@ -8,78 +8,79 @@ import tempfile
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
TARGET_AARCH64 = 'aarch64-unknown-uefi'
|
||||
TARGET_I686 = 'i686-unknown-uefi'
|
||||
TARGET_X86_64 = 'x86_64-unknown-uefi'
|
||||
TARGET_AARCH64 = "aarch64-unknown-uefi"
|
||||
TARGET_I686 = "i686-unknown-uefi"
|
||||
TARGET_X86_64 = "x86_64-unknown-uefi"
|
||||
|
||||
|
||||
def run(*cmd, capture=False, check=True, env=None, timeout=None):
|
||||
"""Print and run a command, optionally capturing the output."""
|
||||
cmd = [str(p) for p in cmd]
|
||||
print(' '.join(cmd))
|
||||
return subprocess.run(cmd,
|
||||
capture_output=capture,
|
||||
check=check,
|
||||
env=env,
|
||||
text=True,
|
||||
timeout=timeout)
|
||||
print(" ".join(cmd))
|
||||
return subprocess.run(
|
||||
cmd, capture_output=capture, check=check, env=env, text=True, timeout=timeout
|
||||
)
|
||||
|
||||
|
||||
def build_and_run(tmp_dir, target):
|
||||
if target == TARGET_AARCH64:
|
||||
boot_file_name = 'bootaa64.efi'
|
||||
ovmf_dir = Path('/usr/share/AAVMF')
|
||||
ovmf_code = 'AAVMF_CODE.fd'
|
||||
ovmf_vars = 'AAVMF_VARS.fd'
|
||||
qemu = 'qemu-system-aarch64'
|
||||
machine = 'virt'
|
||||
cpu = 'cortex-a72'
|
||||
boot_file_name = "bootaa64.efi"
|
||||
ovmf_dir = Path("/usr/share/AAVMF")
|
||||
ovmf_code = "AAVMF_CODE.fd"
|
||||
ovmf_vars = "AAVMF_VARS.fd"
|
||||
qemu = "qemu-system-aarch64"
|
||||
machine = "virt"
|
||||
cpu = "cortex-a72"
|
||||
elif target == TARGET_I686:
|
||||
boot_file_name = 'bootia32.efi'
|
||||
ovmf_dir = Path('/usr/share/OVMF')
|
||||
ovmf_code = 'OVMF32_CODE_4M.secboot.fd'
|
||||
ovmf_vars = 'OVMF32_VARS_4M.fd'
|
||||
boot_file_name = "bootia32.efi"
|
||||
ovmf_dir = Path("/usr/share/OVMF")
|
||||
ovmf_code = "OVMF32_CODE_4M.secboot.fd"
|
||||
ovmf_vars = "OVMF32_VARS_4M.fd"
|
||||
# The i686 target intentionally uses 64-bit qemu; the important
|
||||
# difference is that the OVMF code provides a 32-bit environment.
|
||||
qemu = 'qemu-system-x86_64'
|
||||
machine = 'q35'
|
||||
cpu = 'qemu64'
|
||||
qemu = "qemu-system-x86_64"
|
||||
machine = "q35"
|
||||
cpu = "qemu64"
|
||||
elif target == TARGET_X86_64:
|
||||
boot_file_name = 'bootx64.efi'
|
||||
ovmf_dir = Path('/usr/share/OVMF')
|
||||
ovmf_code = 'OVMF_CODE.fd'
|
||||
ovmf_vars = 'OVMF_VARS.fd'
|
||||
qemu = 'qemu-system-x86_64'
|
||||
machine = 'q35'
|
||||
cpu = 'qemu64'
|
||||
boot_file_name = "bootx64.efi"
|
||||
ovmf_dir = Path("/usr/share/OVMF")
|
||||
ovmf_code = "OVMF_CODE.fd"
|
||||
ovmf_vars = "OVMF_VARS.fd"
|
||||
qemu = "qemu-system-x86_64"
|
||||
machine = "q35"
|
||||
cpu = "qemu64"
|
||||
else:
|
||||
raise KeyError('invalid target')
|
||||
raise KeyError("invalid target")
|
||||
|
||||
host_artifacts = Path('/checkout/obj/build/x86_64-unknown-linux-gnu')
|
||||
stage0 = host_artifacts / 'stage0/bin'
|
||||
stage2 = host_artifacts / 'stage2/bin'
|
||||
host_artifacts = Path("/checkout/obj/build/x86_64-unknown-linux-gnu")
|
||||
stage0 = host_artifacts / "stage0/bin"
|
||||
stage2 = host_artifacts / "stage2/bin"
|
||||
|
||||
env = dict(os.environ)
|
||||
env['PATH'] = '{}:{}:{}'.format(stage2, stage0, env['PATH'])
|
||||
env["PATH"] = "{}:{}:{}".format(stage2, stage0, env["PATH"])
|
||||
|
||||
# Copy the test create into `tmp_dir`.
|
||||
test_crate = Path(tmp_dir) / 'uefi_qemu_test'
|
||||
shutil.copytree('/uefi_qemu_test', test_crate)
|
||||
test_crate = Path(tmp_dir) / "uefi_qemu_test"
|
||||
shutil.copytree("/uefi_qemu_test", test_crate)
|
||||
|
||||
# Build the UEFI executable.
|
||||
run('cargo',
|
||||
'build',
|
||||
'--manifest-path',
|
||||
test_crate / 'Cargo.toml',
|
||||
'--target',
|
||||
run(
|
||||
"cargo",
|
||||
"build",
|
||||
"--manifest-path",
|
||||
test_crate / "Cargo.toml",
|
||||
"--target",
|
||||
target,
|
||||
env=env)
|
||||
env=env,
|
||||
)
|
||||
|
||||
# Create a mock EFI System Partition in a subdirectory.
|
||||
esp = test_crate / 'esp'
|
||||
boot = esp / 'efi/boot'
|
||||
esp = test_crate / "esp"
|
||||
boot = esp / "efi/boot"
|
||||
os.makedirs(boot, exist_ok=True)
|
||||
|
||||
# Copy the executable into the ESP.
|
||||
src_exe_path = test_crate / 'target' / target / 'debug/uefi_qemu_test.efi'
|
||||
src_exe_path = test_crate / "target" / target / "debug/uefi_qemu_test.efi"
|
||||
shutil.copy(src_exe_path, boot / boot_file_name)
|
||||
print(src_exe_path, boot / boot_file_name)
|
||||
|
||||
@ -89,37 +90,39 @@ def build_and_run(tmp_dir, target):
|
||||
|
||||
# Make a writable copy of the vars file. aarch64 doesn't boot
|
||||
# correctly with read-only vars.
|
||||
ovmf_rw_vars = Path(tmp_dir) / 'vars.fd'
|
||||
ovmf_rw_vars = Path(tmp_dir) / "vars.fd"
|
||||
shutil.copy(ovmf_vars, ovmf_rw_vars)
|
||||
|
||||
# Run the executable in QEMU and capture the output.
|
||||
output = run(qemu,
|
||||
'-machine',
|
||||
machine,
|
||||
'-cpu',
|
||||
cpu,
|
||||
'-display',
|
||||
'none',
|
||||
'-serial',
|
||||
'stdio',
|
||||
'-drive',
|
||||
f'if=pflash,format=raw,readonly=on,file={ovmf_code}',
|
||||
'-drive',
|
||||
f'if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}',
|
||||
'-drive',
|
||||
f'format=raw,file=fat:rw:{esp}',
|
||||
capture=True,
|
||||
check=True,
|
||||
# Set a timeout to kill the VM in case something goes wrong.
|
||||
timeout=60).stdout
|
||||
output = run(
|
||||
qemu,
|
||||
"-machine",
|
||||
machine,
|
||||
"-cpu",
|
||||
cpu,
|
||||
"-display",
|
||||
"none",
|
||||
"-serial",
|
||||
"stdio",
|
||||
"-drive",
|
||||
f"if=pflash,format=raw,readonly=on,file={ovmf_code}",
|
||||
"-drive",
|
||||
f"if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}",
|
||||
"-drive",
|
||||
f"format=raw,file=fat:rw:{esp}",
|
||||
capture=True,
|
||||
check=True,
|
||||
# Set a timeout to kill the VM in case something goes wrong.
|
||||
timeout=60,
|
||||
).stdout
|
||||
|
||||
if 'Hello World!' in output:
|
||||
print('VM produced expected output')
|
||||
if "Hello World!" in output:
|
||||
print("VM produced expected output")
|
||||
else:
|
||||
print('unexpected VM output:')
|
||||
print('---start---')
|
||||
print("unexpected VM output:")
|
||||
print("---start---")
|
||||
print(output)
|
||||
print('---end---')
|
||||
print("---end---")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -1 +1 @@
|
||||
0.18.1
|
||||
0.18.2
|
@ -105,6 +105,23 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
|
||||
# It seems that it cannot be the same as $IMAGE_TAG, otherwise it overwrites the cache
|
||||
CACHE_IMAGE_TAG=${REGISTRY}/${REGISTRY_USERNAME}/rust-ci-cache:${cksum}
|
||||
|
||||
# Docker build arguments.
|
||||
build_args=(
|
||||
"build"
|
||||
"--rm"
|
||||
"-t" "rust-ci"
|
||||
"-f" "$dockerfile"
|
||||
"$context"
|
||||
)
|
||||
|
||||
# If the environment variable DOCKER_SCRIPT is defined,
|
||||
# set the build argument SCRIPT_ARG to DOCKER_SCRIPT.
|
||||
# In this way, we run the script defined in CI,
|
||||
# instead of the one defined in the Dockerfile.
|
||||
if [ -n "${DOCKER_SCRIPT+x}" ]; then
|
||||
build_args+=("--build-arg" "SCRIPT_ARG=${DOCKER_SCRIPT}")
|
||||
fi
|
||||
|
||||
# On non-CI jobs, we try to download a pre-built image from the rust-lang-ci
|
||||
# ghcr.io registry. If it is not possible, we fall back to building the image
|
||||
# locally.
|
||||
@ -115,7 +132,7 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
|
||||
docker tag "${IMAGE_TAG}" rust-ci
|
||||
else
|
||||
echo "Building local Docker image"
|
||||
retry docker build --rm -t rust-ci -f "$dockerfile" "$context"
|
||||
retry docker "${build_args[@]}"
|
||||
fi
|
||||
# On PR CI jobs, we don't have permissions to write to the registry cache,
|
||||
# but we can still read from it.
|
||||
@ -127,13 +144,9 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
|
||||
# Build the image using registry caching backend
|
||||
retry docker \
|
||||
buildx \
|
||||
build \
|
||||
--rm \
|
||||
-t rust-ci \
|
||||
-f "$dockerfile" \
|
||||
"${build_args[@]}" \
|
||||
--cache-from type=registry,ref=${CACHE_IMAGE_TAG} \
|
||||
--output=type=docker \
|
||||
"$context"
|
||||
--output=type=docker
|
||||
# On auto/try builds, we can also write to the cache.
|
||||
else
|
||||
# Log into the Docker registry, so that we can read/write cache and the final image
|
||||
@ -147,14 +160,10 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
|
||||
# Build the image using registry caching backend
|
||||
retry docker \
|
||||
buildx \
|
||||
build \
|
||||
--rm \
|
||||
-t rust-ci \
|
||||
-f "$dockerfile" \
|
||||
"${build_args[@]}" \
|
||||
--cache-from type=registry,ref=${CACHE_IMAGE_TAG} \
|
||||
--cache-to type=registry,ref=${CACHE_IMAGE_TAG},compression=zstd \
|
||||
--output=type=docker \
|
||||
"$context"
|
||||
--output=type=docker
|
||||
|
||||
# Print images for debugging purposes
|
||||
docker images
|
||||
|
@ -35,6 +35,7 @@ MIRROR_BUCKET = "rust-lang-ci-mirrors"
|
||||
MIRROR_BUCKET_REGION = "us-west-1"
|
||||
MIRROR_BASE_DIR = "rustc/android/"
|
||||
|
||||
|
||||
class Package:
|
||||
def __init__(self, path, url, sha1, deps=None):
|
||||
if deps is None:
|
||||
@ -53,18 +54,25 @@ class Package:
|
||||
sha1 = hashlib.sha1(f.read()).hexdigest()
|
||||
if sha1 != self.sha1:
|
||||
raise RuntimeError(
|
||||
"hash mismatch for package " + self.path + ": " +
|
||||
sha1 + " vs " + self.sha1 + " (known good)"
|
||||
"hash mismatch for package "
|
||||
+ self.path
|
||||
+ ": "
|
||||
+ sha1
|
||||
+ " vs "
|
||||
+ self.sha1
|
||||
+ " (known good)"
|
||||
)
|
||||
return file
|
||||
|
||||
def __repr__(self):
|
||||
return "<Package "+self.path+" at "+self.url+" (sha1="+self.sha1+")"
|
||||
return "<Package " + self.path + " at " + self.url + " (sha1=" + self.sha1 + ")"
|
||||
|
||||
|
||||
def fetch_url(url):
|
||||
page = urllib.request.urlopen(url)
|
||||
return page.read()
|
||||
|
||||
|
||||
def fetch_repository(base, repo_url):
|
||||
packages = {}
|
||||
root = ET.fromstring(fetch_url(base + repo_url))
|
||||
@ -92,12 +100,14 @@ def fetch_repository(base, repo_url):
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def fetch_repositories():
|
||||
packages = {}
|
||||
for repo in REPOSITORIES:
|
||||
packages.update(fetch_repository(BASE_REPOSITORY, repo))
|
||||
return packages
|
||||
|
||||
|
||||
class Lockfile:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
@ -123,6 +133,7 @@ class Lockfile:
|
||||
for package in packages:
|
||||
f.write(package.path + " " + package.url + " " + package.sha1 + "\n")
|
||||
|
||||
|
||||
def cli_add_to_lockfile(args):
|
||||
lockfile = Lockfile(args.lockfile)
|
||||
packages = fetch_repositories()
|
||||
@ -130,28 +141,49 @@ def cli_add_to_lockfile(args):
|
||||
lockfile.add(packages, package)
|
||||
lockfile.save()
|
||||
|
||||
|
||||
def cli_update_mirror(args):
|
||||
lockfile = Lockfile(args.lockfile)
|
||||
for package in lockfile.packages.values():
|
||||
path = package.download(BASE_REPOSITORY)
|
||||
subprocess.run([
|
||||
"aws", "s3", "mv", path,
|
||||
"s3://" + MIRROR_BUCKET + "/" + MIRROR_BASE_DIR + package.url,
|
||||
"--profile=" + args.awscli_profile,
|
||||
], check=True)
|
||||
subprocess.run(
|
||||
[
|
||||
"aws",
|
||||
"s3",
|
||||
"mv",
|
||||
path,
|
||||
"s3://" + MIRROR_BUCKET + "/" + MIRROR_BASE_DIR + package.url,
|
||||
"--profile=" + args.awscli_profile,
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
||||
|
||||
def cli_install(args):
|
||||
lockfile = Lockfile(args.lockfile)
|
||||
for package in lockfile.packages.values():
|
||||
# Download the file from the mirror into a temp file
|
||||
url = "https://" + MIRROR_BUCKET + ".s3-" + MIRROR_BUCKET_REGION + \
|
||||
".amazonaws.com/" + MIRROR_BASE_DIR
|
||||
url = (
|
||||
"https://"
|
||||
+ MIRROR_BUCKET
|
||||
+ ".s3-"
|
||||
+ MIRROR_BUCKET_REGION
|
||||
+ ".amazonaws.com/"
|
||||
+ MIRROR_BASE_DIR
|
||||
)
|
||||
downloaded = package.download(url)
|
||||
# Extract the file in a temporary directory
|
||||
extract_dir = tempfile.mkdtemp()
|
||||
subprocess.run([
|
||||
"unzip", "-q", downloaded, "-d", extract_dir,
|
||||
], check=True)
|
||||
subprocess.run(
|
||||
[
|
||||
"unzip",
|
||||
"-q",
|
||||
downloaded,
|
||||
"-d",
|
||||
extract_dir,
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
# Figure out the prefix used in the zip
|
||||
subdirs = [d for d in os.listdir(extract_dir) if not d.startswith(".")]
|
||||
if len(subdirs) != 1:
|
||||
@ -162,6 +194,7 @@ def cli_install(args):
|
||||
os.rename(os.path.join(extract_dir, subdirs[0]), dest)
|
||||
os.unlink(downloaded)
|
||||
|
||||
|
||||
def cli():
|
||||
parser = argparse.ArgumentParser()
|
||||
subparsers = parser.add_subparsers()
|
||||
@ -187,5 +220,6 @@ def cli():
|
||||
exit(1)
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
|
@ -588,7 +588,7 @@ class TestEnvironment:
|
||||
"--repo-path",
|
||||
self.repo_dir(),
|
||||
"--repository",
|
||||
self.TEST_REPO_NAME
|
||||
self.TEST_REPO_NAME,
|
||||
],
|
||||
env=ffx_env,
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
@ -619,9 +619,7 @@ class TestEnvironment:
|
||||
# `facet` statement required for TCP testing via
|
||||
# protocol `fuchsia.posix.socket.Provider`. See
|
||||
# https://fuchsia.dev/fuchsia-src/development/testing/components/test_runner_framework?hl=en#legacy_non-hermetic_tests
|
||||
CML_TEMPLATE: ClassVar[
|
||||
str
|
||||
] = """
|
||||
CML_TEMPLATE: ClassVar[str] = """
|
||||
{{
|
||||
program: {{
|
||||
runner: "elf_test_runner",
|
||||
@ -994,7 +992,7 @@ class TestEnvironment:
|
||||
"repository",
|
||||
"server",
|
||||
"stop",
|
||||
self.TEST_REPO_NAME
|
||||
self.TEST_REPO_NAME,
|
||||
],
|
||||
env=self.ffx_cmd_env(),
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
LINUX_VERSION=28e848386b92645f93b9f2fdba5882c3ca7fb3e2
|
||||
LINUX_VERSION=v6.13-rc1
|
||||
|
||||
# Build rustc, rustdoc, cargo, clippy-driver and rustfmt
|
||||
../x.py build --stage 2 library rustdoc clippy rustfmt
|
||||
@ -64,7 +64,7 @@ make -C linux LLVM=1 -j$(($(nproc) + 1)) \
|
||||
|
||||
BUILD_TARGETS="
|
||||
samples/rust/rust_minimal.o
|
||||
samples/rust/rust_print.o
|
||||
samples/rust/rust_print_main.o
|
||||
drivers/net/phy/ax88796b_rust.o
|
||||
rust/doctests_kernel_generated.o
|
||||
"
|
||||
|
@ -7,6 +7,7 @@ be executed on CI.
|
||||
It reads job definitions from `src/ci/github-actions/jobs.yml`
|
||||
and filters them based on the event that happened on CI.
|
||||
"""
|
||||
|
||||
import dataclasses
|
||||
import json
|
||||
import logging
|
||||
@ -94,7 +95,7 @@ def find_run_type(ctx: GitHubCtx) -> Optional[WorkflowRunType]:
|
||||
try_build = ctx.ref in (
|
||||
"refs/heads/try",
|
||||
"refs/heads/try-perf",
|
||||
"refs/heads/automation/bors/try"
|
||||
"refs/heads/automation/bors/try",
|
||||
)
|
||||
|
||||
# Unrolled branch from a rollup for testing perf
|
||||
@ -135,11 +136,15 @@ def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[
|
||||
continue
|
||||
jobs.append(job[0])
|
||||
if unknown_jobs:
|
||||
raise Exception(f"Custom job(s) `{unknown_jobs}` not found in auto jobs")
|
||||
raise Exception(
|
||||
f"Custom job(s) `{unknown_jobs}` not found in auto jobs"
|
||||
)
|
||||
|
||||
return add_base_env(name_jobs(jobs, "try"), job_data["envs"]["try"])
|
||||
elif isinstance(run_type, AutoRunType):
|
||||
return add_base_env(name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"])
|
||||
return add_base_env(
|
||||
name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"]
|
||||
)
|
||||
|
||||
return []
|
||||
|
||||
@ -161,7 +166,7 @@ def get_github_ctx() -> GitHubCtx:
|
||||
event_name=event_name,
|
||||
ref=os.environ["GITHUB_REF"],
|
||||
repository=os.environ["GITHUB_REPOSITORY"],
|
||||
commit_message=commit_message
|
||||
commit_message=commit_message,
|
||||
)
|
||||
|
||||
|
||||
|
@ -58,6 +58,22 @@ envs:
|
||||
NO_DEBUG_ASSERTIONS: 1
|
||||
NO_OVERFLOW_CHECKS: 1
|
||||
|
||||
# Different set of tests to run tests in parallel in multiple jobs.
|
||||
stage_2_test_set1: &stage_2_test_set1
|
||||
DOCKER_SCRIPT: >-
|
||||
python3 ../x.py --stage 2 test
|
||||
--skip compiler
|
||||
--skip src
|
||||
|
||||
stage_2_test_set2: &stage_2_test_set2
|
||||
DOCKER_SCRIPT: >-
|
||||
python3 ../x.py --stage 2 test
|
||||
--skip tests
|
||||
--skip coverage-map
|
||||
--skip coverage-run
|
||||
--skip library
|
||||
--skip tidyselftest
|
||||
|
||||
production:
|
||||
&production
|
||||
DEPLOY_BUCKET: rust-lang-ci2
|
||||
@ -212,11 +228,42 @@ auto:
|
||||
- image: dist-x86_64-netbsd
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: i686-gnu
|
||||
<<: *job-linux-8c
|
||||
# The i686-gnu job is split into multiple jobs to run tests in parallel.
|
||||
# i686-gnu-1 skips tests that run in i686-gnu-2.
|
||||
- image: i686-gnu-1
|
||||
env:
|
||||
IMAGE: i686-gnu
|
||||
<<: *stage_2_test_set1
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: i686-gnu-nopt
|
||||
<<: *job-linux-8c
|
||||
# Skip tests that run in i686-gnu-1
|
||||
- image: i686-gnu-2
|
||||
env:
|
||||
IMAGE: i686-gnu
|
||||
<<: *stage_2_test_set2
|
||||
<<: *job-linux-4c
|
||||
|
||||
# The i686-gnu-nopt job is split into multiple jobs to run tests in parallel.
|
||||
# i686-gnu-nopt-1 skips tests that run in i686-gnu-nopt-2
|
||||
- image: i686-gnu-nopt-1
|
||||
env:
|
||||
IMAGE: i686-gnu-nopt
|
||||
<<: *stage_2_test_set1
|
||||
<<: *job-linux-4c
|
||||
|
||||
# Skip tests that run in i686-gnu-nopt-1
|
||||
- image: i686-gnu-nopt-2
|
||||
env:
|
||||
IMAGE: i686-gnu-nopt
|
||||
DOCKER_SCRIPT: >-
|
||||
python3 ../x.py test --stage 0 --config /config/nopt-std-config.toml library/std &&
|
||||
python3 ../x.py --stage 2 test
|
||||
--skip tests
|
||||
--skip coverage-map
|
||||
--skip coverage-run
|
||||
--skip library
|
||||
--skip tidyselftest
|
||||
<<: *job-linux-4c
|
||||
|
||||
- image: mingw-check
|
||||
<<: *job-linux-4c
|
||||
|
@ -19,6 +19,7 @@ $ python3 upload-build-metrics.py <path-to-CPU-usage-CSV>
|
||||
|
||||
`path-to-CPU-usage-CSV` is a path to a CSV generated by the `src/ci/cpu-usage-over-time.py` script.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import os
|
||||
@ -31,7 +32,7 @@ from typing import List
|
||||
def load_cpu_usage(path: Path) -> List[float]:
|
||||
usage = []
|
||||
with open(path) as f:
|
||||
reader = csv.reader(f, delimiter=',')
|
||||
reader = csv.reader(f, delimiter=",")
|
||||
for row in reader:
|
||||
# The log might contain incomplete rows or some Python exception
|
||||
if len(row) == 2:
|
||||
@ -50,25 +51,21 @@ def upload_datadog_measure(name: str, value: float):
|
||||
print(f"Metric {name}: {value:.4f}")
|
||||
|
||||
datadog_cmd = "datadog-ci"
|
||||
if os.getenv("GITHUB_ACTIONS") is not None and sys.platform.lower().startswith("win"):
|
||||
if os.getenv("GITHUB_ACTIONS") is not None and sys.platform.lower().startswith(
|
||||
"win"
|
||||
):
|
||||
# Due to weird interaction of MSYS2 and Python, we need to use an absolute path,
|
||||
# and also specify the ".cmd" at the end. See https://github.com/rust-lang/rust/pull/125771.
|
||||
datadog_cmd = "C:\\npm\\prefix\\datadog-ci.cmd"
|
||||
|
||||
subprocess.run([
|
||||
datadog_cmd,
|
||||
"measure",
|
||||
"--level", "job",
|
||||
"--measures", f"{name}:{value}"
|
||||
],
|
||||
check=False
|
||||
subprocess.run(
|
||||
[datadog_cmd, "measure", "--level", "job", "--measures", f"{name}:{value}"],
|
||||
check=False,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="DataDog metric uploader"
|
||||
)
|
||||
parser = argparse.ArgumentParser(prog="DataDog metric uploader")
|
||||
parser.add_argument("cpu-usage-history-csv")
|
||||
args = parser.parse_args()
|
||||
|
||||
|
@ -13,6 +13,7 @@ i.e., within 0.5 ULP of the true value.
|
||||
Adapted from Daniel Lemire's fast_float ``table_generation.py``,
|
||||
available here: <https://github.com/fastfloat/fast_float/blob/main/script/table_generation.py>.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from math import ceil, floor, log
|
||||
from collections import deque
|
||||
@ -34,6 +35,7 @@ STATIC_WARNING = """
|
||||
// the final binary.
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
min_exp = minimum_exponent(10)
|
||||
max_exp = maximum_exponent(10)
|
||||
@ -41,10 +43,10 @@ def main():
|
||||
|
||||
print(HEADER.strip())
|
||||
print()
|
||||
print('pub const SMALLEST_POWER_OF_FIVE: i32 = {};'.format(min_exp))
|
||||
print('pub const LARGEST_POWER_OF_FIVE: i32 = {};'.format(max_exp))
|
||||
print('pub const N_POWERS_OF_FIVE: usize = ', end='')
|
||||
print('(LARGEST_POWER_OF_FIVE - SMALLEST_POWER_OF_FIVE + 1) as usize;')
|
||||
print("pub const SMALLEST_POWER_OF_FIVE: i32 = {};".format(min_exp))
|
||||
print("pub const LARGEST_POWER_OF_FIVE: i32 = {};".format(max_exp))
|
||||
print("pub const N_POWERS_OF_FIVE: usize = ", end="")
|
||||
print("(LARGEST_POWER_OF_FIVE - SMALLEST_POWER_OF_FIVE + 1) as usize;")
|
||||
print()
|
||||
print_proper_powers(min_exp, max_exp, bias)
|
||||
|
||||
@ -54,7 +56,7 @@ def minimum_exponent(base):
|
||||
|
||||
|
||||
def maximum_exponent(base):
|
||||
return floor(log(1.7976931348623157e+308, base))
|
||||
return floor(log(1.7976931348623157e308, base))
|
||||
|
||||
|
||||
def print_proper_powers(min_exp, max_exp, bias):
|
||||
@ -64,46 +66,46 @@ def print_proper_powers(min_exp, max_exp, bias):
|
||||
# 2^(2b)/(5^−q) with b=64 + int(math.ceil(log2(5^−q)))
|
||||
powers = []
|
||||
for q in range(min_exp, 0):
|
||||
power5 = 5 ** -q
|
||||
power5 = 5**-q
|
||||
z = 0
|
||||
while (1 << z) < power5:
|
||||
z += 1
|
||||
if q >= -27:
|
||||
b = z + 127
|
||||
c = 2 ** b // power5 + 1
|
||||
c = 2**b // power5 + 1
|
||||
powers.append((c, q))
|
||||
else:
|
||||
b = 2 * z + 2 * 64
|
||||
c = 2 ** b // power5 + 1
|
||||
c = 2**b // power5 + 1
|
||||
# truncate
|
||||
while c >= (1<<128):
|
||||
while c >= (1 << 128):
|
||||
c //= 2
|
||||
powers.append((c, q))
|
||||
|
||||
# Add positive exponents
|
||||
for q in range(0, max_exp + 1):
|
||||
power5 = 5 ** q
|
||||
power5 = 5**q
|
||||
# move the most significant bit in position
|
||||
while power5 < (1<<127):
|
||||
while power5 < (1 << 127):
|
||||
power5 *= 2
|
||||
# *truncate*
|
||||
while power5 >= (1<<128):
|
||||
while power5 >= (1 << 128):
|
||||
power5 //= 2
|
||||
powers.append((power5, q))
|
||||
|
||||
# Print the powers.
|
||||
print(STATIC_WARNING.strip())
|
||||
print('#[rustfmt::skip]')
|
||||
typ = '[(u64, u64); N_POWERS_OF_FIVE]'
|
||||
print('pub static POWER_OF_FIVE_128: {} = ['.format(typ))
|
||||
print("#[rustfmt::skip]")
|
||||
typ = "[(u64, u64); N_POWERS_OF_FIVE]"
|
||||
print("pub static POWER_OF_FIVE_128: {} = [".format(typ))
|
||||
for c, exp in powers:
|
||||
hi = '0x{:x}'.format(c // (1 << 64))
|
||||
lo = '0x{:x}'.format(c % (1 << 64))
|
||||
value = ' ({}, {}), '.format(hi, lo)
|
||||
comment = '// {}^{}'.format(5, exp)
|
||||
print(value.ljust(46, ' ') + comment)
|
||||
print('];')
|
||||
hi = "0x{:x}".format(c // (1 << 64))
|
||||
lo = "0x{:x}".format(c % (1 << 64))
|
||||
value = " ({}, {}), ".format(hi, lo)
|
||||
comment = "// {}^{}".format(5, exp)
|
||||
print(value.ljust(46, " ") + comment)
|
||||
print("];")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -1,6 +1,7 @@
|
||||
# Add this folder to the python sys path; GDB Python-interpreter will now find modules in this path
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
self_dir = path.dirname(path.realpath(__file__))
|
||||
sys.path.append(self_dir)
|
||||
|
||||
|
@ -6,8 +6,11 @@ from gdb_providers import *
|
||||
from rust_types import *
|
||||
|
||||
|
||||
_gdb_version_matched = re.search('([0-9]+)\\.([0-9]+)', gdb.VERSION)
|
||||
gdb_version = [int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else []
|
||||
_gdb_version_matched = re.search("([0-9]+)\\.([0-9]+)", gdb.VERSION)
|
||||
gdb_version = (
|
||||
[int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else []
|
||||
)
|
||||
|
||||
|
||||
def register_printers(objfile):
|
||||
objfile.pretty_printers.append(printer)
|
||||
|
@ -21,7 +21,7 @@ def unwrap_unique_or_non_null(unique_or_nonnull):
|
||||
# GDB 14 has a tag class that indicates that extension methods are ok
|
||||
# to call. Use of this tag only requires that printers hide local
|
||||
# attributes and methods by prefixing them with "_".
|
||||
if hasattr(gdb, 'ValuePrinter'):
|
||||
if hasattr(gdb, "ValuePrinter"):
|
||||
printer_base = gdb.ValuePrinter
|
||||
else:
|
||||
printer_base = object
|
||||
@ -98,7 +98,7 @@ class StdStrProvider(printer_base):
|
||||
|
||||
|
||||
def _enumerate_array_elements(element_ptrs):
|
||||
for (i, element_ptr) in enumerate(element_ptrs):
|
||||
for i, element_ptr in enumerate(element_ptrs):
|
||||
key = "[{}]".format(i)
|
||||
element = element_ptr.dereference()
|
||||
|
||||
@ -173,7 +173,8 @@ class StdVecDequeProvider(printer_base):
|
||||
|
||||
def children(self):
|
||||
return _enumerate_array_elements(
|
||||
(self._data_ptr + ((self._head + index) % self._cap)) for index in xrange(self._size)
|
||||
(self._data_ptr + ((self._head + index) % self._cap))
|
||||
for index in xrange(self._size)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@ -270,7 +271,9 @@ def children_of_btree_map(map):
|
||||
# Yields each key/value pair in the node and in any child nodes.
|
||||
def children_of_node(node_ptr, height):
|
||||
def cast_to_internal(node):
|
||||
internal_type_name = node.type.target().name.replace("LeafNode", "InternalNode", 1)
|
||||
internal_type_name = node.type.target().name.replace(
|
||||
"LeafNode", "InternalNode", 1
|
||||
)
|
||||
internal_type = gdb.lookup_type(internal_type_name)
|
||||
return node.cast(internal_type.pointer())
|
||||
|
||||
@ -293,8 +296,16 @@ def children_of_btree_map(map):
|
||||
# Avoid "Cannot perform pointer math on incomplete type" on zero-sized arrays.
|
||||
key_type_size = keys.type.sizeof
|
||||
val_type_size = vals.type.sizeof
|
||||
key = keys[i]["value"]["value"] if key_type_size > 0 else gdb.parse_and_eval("()")
|
||||
val = vals[i]["value"]["value"] if val_type_size > 0 else gdb.parse_and_eval("()")
|
||||
key = (
|
||||
keys[i]["value"]["value"]
|
||||
if key_type_size > 0
|
||||
else gdb.parse_and_eval("()")
|
||||
)
|
||||
val = (
|
||||
vals[i]["value"]["value"]
|
||||
if val_type_size > 0
|
||||
else gdb.parse_and_eval("()")
|
||||
)
|
||||
yield key, val
|
||||
|
||||
if map["length"] > 0:
|
||||
@ -352,7 +363,7 @@ class StdOldHashMapProvider(printer_base):
|
||||
self._hashes = self._table["hashes"]
|
||||
self._hash_uint_type = self._hashes.type
|
||||
self._hash_uint_size = self._hashes.type.sizeof
|
||||
self._modulo = 2 ** self._hash_uint_size
|
||||
self._modulo = 2**self._hash_uint_size
|
||||
self._data_ptr = self._hashes[ZERO_FIELD]["pointer"]
|
||||
|
||||
self._capacity_mask = int(self._table["capacity_mask"])
|
||||
@ -382,8 +393,14 @@ class StdOldHashMapProvider(printer_base):
|
||||
|
||||
hashes = self._hash_uint_size * self._capacity
|
||||
align = self._pair_type_size
|
||||
len_rounded_up = (((((hashes + align) % self._modulo - 1) % self._modulo) & ~(
|
||||
(align - 1) % self._modulo)) % self._modulo - hashes) % self._modulo
|
||||
len_rounded_up = (
|
||||
(
|
||||
(((hashes + align) % self._modulo - 1) % self._modulo)
|
||||
& ~((align - 1) % self._modulo)
|
||||
)
|
||||
% self._modulo
|
||||
- hashes
|
||||
) % self._modulo
|
||||
|
||||
pairs_offset = hashes + len_rounded_up
|
||||
pairs_start = gdb.Value(start + pairs_offset).cast(self._pair_type.pointer())
|
||||
|
@ -12,7 +12,8 @@ import os
|
||||
import stat
|
||||
|
||||
TEST_DIR = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../test/ui/derives/'))
|
||||
os.path.join(os.path.dirname(__file__), "../test/ui/derives/")
|
||||
)
|
||||
|
||||
TEMPLATE = """\
|
||||
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
|
||||
@ -56,28 +57,33 @@ ENUM_TUPLE, ENUM_STRUCT, STRUCT_FIELDS, STRUCT_TUPLE = range(4)
|
||||
|
||||
|
||||
def create_test_case(type, trait, super_traits, error_count):
|
||||
string = [ENUM_STRING, ENUM_STRUCT_VARIANT_STRING, STRUCT_STRING, STRUCT_TUPLE_STRING][type]
|
||||
all_traits = ','.join([trait] + super_traits)
|
||||
super_traits = ','.join(super_traits)
|
||||
error_deriving = '#[derive(%s)]' % super_traits if super_traits else ''
|
||||
string = [
|
||||
ENUM_STRING,
|
||||
ENUM_STRUCT_VARIANT_STRING,
|
||||
STRUCT_STRING,
|
||||
STRUCT_TUPLE_STRING,
|
||||
][type]
|
||||
all_traits = ",".join([trait] + super_traits)
|
||||
super_traits = ",".join(super_traits)
|
||||
error_deriving = "#[derive(%s)]" % super_traits if super_traits else ""
|
||||
|
||||
errors = '\n'.join('//~%s ERROR' % ('^' * n) for n in range(error_count))
|
||||
errors = "\n".join("//~%s ERROR" % ("^" * n) for n in range(error_count))
|
||||
code = string.format(traits=all_traits, errors=errors)
|
||||
return TEMPLATE.format(error_deriving=error_deriving, code=code)
|
||||
|
||||
|
||||
def write_file(name, string):
|
||||
test_file = os.path.join(TEST_DIR, 'derives-span-%s.rs' % name)
|
||||
test_file = os.path.join(TEST_DIR, "derives-span-%s.rs" % name)
|
||||
|
||||
# set write permission if file exists, so it can be changed
|
||||
if os.path.exists(test_file):
|
||||
os.chmod(test_file, stat.S_IWUSR)
|
||||
|
||||
with open(test_file, 'w') as f:
|
||||
with open(test_file, "w") as f:
|
||||
f.write(string)
|
||||
|
||||
# mark file read-only
|
||||
os.chmod(test_file, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
|
||||
os.chmod(test_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
|
||||
|
||||
|
||||
ENUM = 1
|
||||
@ -85,29 +91,31 @@ STRUCT = 2
|
||||
ALL = STRUCT | ENUM
|
||||
|
||||
traits = {
|
||||
'Default': (STRUCT, [], 1),
|
||||
'FromPrimitive': (0, [], 0), # only works for C-like enums
|
||||
|
||||
'Decodable': (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
'Encodable': (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
"Default": (STRUCT, [], 1),
|
||||
"FromPrimitive": (0, [], 0), # only works for C-like enums
|
||||
"Decodable": (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
"Encodable": (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
}
|
||||
|
||||
for (trait, supers, errs) in [('Clone', [], 1),
|
||||
('PartialEq', [], 2),
|
||||
('PartialOrd', ['PartialEq'], 1),
|
||||
('Eq', ['PartialEq'], 1),
|
||||
('Ord', ['Eq', 'PartialOrd', 'PartialEq'], 1),
|
||||
('Debug', [], 1),
|
||||
('Hash', [], 1)]:
|
||||
for trait, supers, errs in [
|
||||
("Clone", [], 1),
|
||||
("PartialEq", [], 2),
|
||||
("PartialOrd", ["PartialEq"], 1),
|
||||
("Eq", ["PartialEq"], 1),
|
||||
("Ord", ["Eq", "PartialOrd", "PartialEq"], 1),
|
||||
("Debug", [], 1),
|
||||
("Hash", [], 1),
|
||||
]:
|
||||
traits[trait] = (ALL, supers, errs)
|
||||
|
||||
for (trait, (types, super_traits, error_count)) in traits.items():
|
||||
for trait, (types, super_traits, error_count) in traits.items():
|
||||
|
||||
def mk(ty, t=trait, st=super_traits, ec=error_count):
|
||||
return create_test_case(ty, t, st, ec)
|
||||
|
||||
if types & ENUM:
|
||||
write_file(trait + '-enum', mk(ENUM_TUPLE))
|
||||
write_file(trait + '-enum-struct-variant', mk(ENUM_STRUCT))
|
||||
write_file(trait + "-enum", mk(ENUM_TUPLE))
|
||||
write_file(trait + "-enum-struct-variant", mk(ENUM_STRUCT))
|
||||
if types & STRUCT:
|
||||
write_file(trait + '-struct', mk(STRUCT_FIELDS))
|
||||
write_file(trait + '-tuple-struct', mk(STRUCT_TUPLE))
|
||||
write_file(trait + "-struct", mk(STRUCT_FIELDS))
|
||||
write_file(trait + "-tuple-struct", mk(STRUCT_TUPLE))
|
||||
|
@ -22,18 +22,16 @@ fn main() {
|
||||
}
|
||||
"""
|
||||
|
||||
test_dir = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../test/ui/parser')
|
||||
)
|
||||
test_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../test/ui/parser"))
|
||||
|
||||
for kw in sys.argv[1:]:
|
||||
test_file = os.path.join(test_dir, 'keyword-%s-as-identifier.rs' % kw)
|
||||
test_file = os.path.join(test_dir, "keyword-%s-as-identifier.rs" % kw)
|
||||
|
||||
# set write permission if file exists, so it can be changed
|
||||
if os.path.exists(test_file):
|
||||
os.chmod(test_file, stat.S_IWUSR)
|
||||
|
||||
with open(test_file, 'wt') as f:
|
||||
with open(test_file, "wt") as f:
|
||||
f.write(template % (kw, kw, kw))
|
||||
|
||||
# mark file read-only
|
||||
|
@ -127,6 +127,7 @@ import os.path
|
||||
import re
|
||||
import shlex
|
||||
from collections import namedtuple
|
||||
|
||||
try:
|
||||
from html.parser import HTMLParser
|
||||
except ImportError:
|
||||
@ -142,12 +143,28 @@ except ImportError:
|
||||
from htmlentitydefs import name2codepoint
|
||||
|
||||
# "void elements" (no closing tag) from the HTML Standard section 12.1.2
|
||||
VOID_ELEMENTS = {'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
|
||||
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'}
|
||||
VOID_ELEMENTS = {
|
||||
"area",
|
||||
"base",
|
||||
"br",
|
||||
"col",
|
||||
"embed",
|
||||
"hr",
|
||||
"img",
|
||||
"input",
|
||||
"keygen",
|
||||
"link",
|
||||
"menuitem",
|
||||
"meta",
|
||||
"param",
|
||||
"source",
|
||||
"track",
|
||||
"wbr",
|
||||
}
|
||||
|
||||
# Python 2 -> 3 compatibility
|
||||
try:
|
||||
unichr # noqa: B018 FIXME: py2
|
||||
unichr # noqa: B018 FIXME: py2
|
||||
except NameError:
|
||||
unichr = chr
|
||||
|
||||
@ -158,18 +175,20 @@ channel = os.environ["DOC_RUST_LANG_ORG_CHANNEL"]
|
||||
rust_test_path = None
|
||||
bless = None
|
||||
|
||||
|
||||
class CustomHTMLParser(HTMLParser):
|
||||
"""simplified HTML parser.
|
||||
|
||||
this is possible because we are dealing with very regular HTML from
|
||||
rustdoc; we only have to deal with i) void elements and ii) empty
|
||||
attributes."""
|
||||
|
||||
def __init__(self, target=None):
|
||||
HTMLParser.__init__(self)
|
||||
self.__builder = target or ET.TreeBuilder()
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
attrs = {k: v or '' for k, v in attrs}
|
||||
attrs = {k: v or "" for k, v in attrs}
|
||||
self.__builder.start(tag, attrs)
|
||||
if tag in VOID_ELEMENTS:
|
||||
self.__builder.end(tag)
|
||||
@ -178,7 +197,7 @@ class CustomHTMLParser(HTMLParser):
|
||||
self.__builder.end(tag)
|
||||
|
||||
def handle_startendtag(self, tag, attrs):
|
||||
attrs = {k: v or '' for k, v in attrs}
|
||||
attrs = {k: v or "" for k, v in attrs}
|
||||
self.__builder.start(tag, attrs)
|
||||
self.__builder.end(tag)
|
||||
|
||||
@ -189,7 +208,7 @@ class CustomHTMLParser(HTMLParser):
|
||||
self.__builder.data(unichr(name2codepoint[name]))
|
||||
|
||||
def handle_charref(self, name):
|
||||
code = int(name[1:], 16) if name.startswith(('x', 'X')) else int(name, 10)
|
||||
code = int(name[1:], 16) if name.startswith(("x", "X")) else int(name, 10)
|
||||
self.__builder.data(unichr(code))
|
||||
|
||||
def close(self):
|
||||
@ -197,7 +216,7 @@ class CustomHTMLParser(HTMLParser):
|
||||
return self.__builder.close()
|
||||
|
||||
|
||||
Command = namedtuple('Command', 'negated cmd args lineno context')
|
||||
Command = namedtuple("Command", "negated cmd args lineno context")
|
||||
|
||||
|
||||
class FailedCheck(Exception):
|
||||
@ -216,17 +235,17 @@ def concat_multi_lines(f):
|
||||
concatenated."""
|
||||
lastline = None # set to the last line when the last line has a backslash
|
||||
firstlineno = None
|
||||
catenated = ''
|
||||
catenated = ""
|
||||
for lineno, line in enumerate(f):
|
||||
line = line.rstrip('\r\n')
|
||||
line = line.rstrip("\r\n")
|
||||
|
||||
# strip the common prefix from the current line if needed
|
||||
if lastline is not None:
|
||||
common_prefix = os.path.commonprefix([line, lastline])
|
||||
line = line[len(common_prefix):].lstrip()
|
||||
line = line[len(common_prefix) :].lstrip()
|
||||
|
||||
firstlineno = firstlineno or lineno
|
||||
if line.endswith('\\'):
|
||||
if line.endswith("\\"):
|
||||
if lastline is None:
|
||||
lastline = line[:-1]
|
||||
catenated += line[:-1]
|
||||
@ -234,10 +253,10 @@ def concat_multi_lines(f):
|
||||
yield firstlineno, catenated + line
|
||||
lastline = None
|
||||
firstlineno = None
|
||||
catenated = ''
|
||||
catenated = ""
|
||||
|
||||
if lastline is not None:
|
||||
print_err(lineno, line, 'Trailing backslash at the end of the file')
|
||||
print_err(lineno, line, "Trailing backslash at the end of the file")
|
||||
|
||||
|
||||
def get_known_directive_names():
|
||||
@ -253,12 +272,12 @@ def get_known_directive_names():
|
||||
"tools/compiletest/src/directive-list.rs",
|
||||
),
|
||||
"r",
|
||||
encoding="utf8"
|
||||
encoding="utf8",
|
||||
) as fd:
|
||||
content = fd.read()
|
||||
return [
|
||||
line.strip().replace('",', '').replace('"', '')
|
||||
for line in content.split('\n')
|
||||
line.strip().replace('",', "").replace('"', "")
|
||||
for line in content.split("\n")
|
||||
if filter_line(line)
|
||||
]
|
||||
|
||||
@ -269,35 +288,42 @@ def get_known_directive_names():
|
||||
# See <https://github.com/rust-lang/rust/issues/125813#issuecomment-2141953780>.
|
||||
KNOWN_DIRECTIVE_NAMES = get_known_directive_names()
|
||||
|
||||
LINE_PATTERN = re.compile(r'''
|
||||
LINE_PATTERN = re.compile(
|
||||
r"""
|
||||
//@\s+
|
||||
(?P<negated>!?)(?P<cmd>[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*)
|
||||
(?P<args>.*)$
|
||||
''', re.X | re.UNICODE)
|
||||
""",
|
||||
re.X | re.UNICODE,
|
||||
)
|
||||
|
||||
|
||||
def get_commands(template):
|
||||
with io.open(template, encoding='utf-8') as f:
|
||||
with io.open(template, encoding="utf-8") as f:
|
||||
for lineno, line in concat_multi_lines(f):
|
||||
m = LINE_PATTERN.search(line)
|
||||
if not m:
|
||||
continue
|
||||
|
||||
cmd = m.group('cmd')
|
||||
negated = (m.group('negated') == '!')
|
||||
cmd = m.group("cmd")
|
||||
negated = m.group("negated") == "!"
|
||||
if not negated and cmd in KNOWN_DIRECTIVE_NAMES:
|
||||
continue
|
||||
args = m.group('args')
|
||||
args = m.group("args")
|
||||
if args and not args[:1].isspace():
|
||||
print_err(lineno, line, 'Invalid template syntax')
|
||||
print_err(lineno, line, "Invalid template syntax")
|
||||
continue
|
||||
try:
|
||||
args = shlex.split(args)
|
||||
except UnicodeEncodeError:
|
||||
args = [arg.decode('utf-8') for arg in shlex.split(args.encode('utf-8'))]
|
||||
args = [
|
||||
arg.decode("utf-8") for arg in shlex.split(args.encode("utf-8"))
|
||||
]
|
||||
except Exception as exc:
|
||||
raise Exception("line {}: {}".format(lineno + 1, exc)) from None
|
||||
yield Command(negated=negated, cmd=cmd, args=args, lineno=lineno+1, context=line)
|
||||
yield Command(
|
||||
negated=negated, cmd=cmd, args=args, lineno=lineno + 1, context=line
|
||||
)
|
||||
|
||||
|
||||
def _flatten(node, acc):
|
||||
@ -312,22 +338,24 @@ def _flatten(node, acc):
|
||||
def flatten(node):
|
||||
acc = []
|
||||
_flatten(node, acc)
|
||||
return ''.join(acc)
|
||||
return "".join(acc)
|
||||
|
||||
|
||||
def make_xml(text):
|
||||
xml = ET.XML('<xml>%s</xml>' % text)
|
||||
xml = ET.XML("<xml>%s</xml>" % text)
|
||||
return xml
|
||||
|
||||
|
||||
def normalize_xpath(path):
|
||||
path = path.replace("{{channel}}", channel)
|
||||
if path.startswith('//'):
|
||||
return '.' + path # avoid warnings
|
||||
elif path.startswith('.//'):
|
||||
if path.startswith("//"):
|
||||
return "." + path # avoid warnings
|
||||
elif path.startswith(".//"):
|
||||
return path
|
||||
else:
|
||||
raise InvalidCheck('Non-absolute XPath is not supported due to implementation issues')
|
||||
raise InvalidCheck(
|
||||
"Non-absolute XPath is not supported due to implementation issues"
|
||||
)
|
||||
|
||||
|
||||
class CachedFiles(object):
|
||||
@ -338,12 +366,12 @@ class CachedFiles(object):
|
||||
self.last_path = None
|
||||
|
||||
def resolve_path(self, path):
|
||||
if path != '-':
|
||||
if path != "-":
|
||||
path = os.path.normpath(path)
|
||||
self.last_path = path
|
||||
return path
|
||||
elif self.last_path is None:
|
||||
raise InvalidCheck('Tried to use the previous path in the first command')
|
||||
raise InvalidCheck("Tried to use the previous path in the first command")
|
||||
else:
|
||||
return self.last_path
|
||||
|
||||
@ -356,10 +384,10 @@ class CachedFiles(object):
|
||||
return self.files[path]
|
||||
|
||||
abspath = self.get_absolute_path(path)
|
||||
if not(os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck('File does not exist {!r}'.format(path))
|
||||
if not (os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck("File does not exist {!r}".format(path))
|
||||
|
||||
with io.open(abspath, encoding='utf-8') as f:
|
||||
with io.open(abspath, encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
self.files[path] = data
|
||||
return data
|
||||
@ -370,15 +398,15 @@ class CachedFiles(object):
|
||||
return self.trees[path]
|
||||
|
||||
abspath = self.get_absolute_path(path)
|
||||
if not(os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck('File does not exist {!r}'.format(path))
|
||||
if not (os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck("File does not exist {!r}".format(path))
|
||||
|
||||
with io.open(abspath, encoding='utf-8') as f:
|
||||
with io.open(abspath, encoding="utf-8") as f:
|
||||
try:
|
||||
tree = ET.fromstringlist(f.readlines(), CustomHTMLParser())
|
||||
except Exception as e:
|
||||
raise RuntimeError( # noqa: B904 FIXME: py2
|
||||
'Cannot parse an HTML file {!r}: {}'.format(path, e)
|
||||
raise RuntimeError( # noqa: B904 FIXME: py2
|
||||
"Cannot parse an HTML file {!r}: {}".format(path, e)
|
||||
)
|
||||
self.trees[path] = tree
|
||||
return self.trees[path]
|
||||
@ -386,8 +414,8 @@ class CachedFiles(object):
|
||||
def get_dir(self, path):
|
||||
path = self.resolve_path(path)
|
||||
abspath = self.get_absolute_path(path)
|
||||
if not(os.path.exists(abspath) and os.path.isdir(abspath)):
|
||||
raise FailedCheck('Directory does not exist {!r}'.format(path))
|
||||
if not (os.path.exists(abspath) and os.path.isdir(abspath)):
|
||||
raise FailedCheck("Directory does not exist {!r}".format(path))
|
||||
|
||||
|
||||
def check_string(data, pat, regexp):
|
||||
@ -397,8 +425,8 @@ def check_string(data, pat, regexp):
|
||||
elif regexp:
|
||||
return re.search(pat, data, flags=re.UNICODE) is not None
|
||||
else:
|
||||
data = ' '.join(data.split())
|
||||
pat = ' '.join(pat.split())
|
||||
data = " ".join(data.split())
|
||||
pat = " ".join(pat.split())
|
||||
return pat in data
|
||||
|
||||
|
||||
@ -444,19 +472,19 @@ def get_tree_count(tree, path):
|
||||
|
||||
|
||||
def check_snapshot(snapshot_name, actual_tree, normalize_to_text):
|
||||
assert rust_test_path.endswith('.rs')
|
||||
snapshot_path = '{}.{}.{}'.format(rust_test_path[:-3], snapshot_name, 'html')
|
||||
assert rust_test_path.endswith(".rs")
|
||||
snapshot_path = "{}.{}.{}".format(rust_test_path[:-3], snapshot_name, "html")
|
||||
try:
|
||||
with open(snapshot_path, 'r') as snapshot_file:
|
||||
with open(snapshot_path, "r") as snapshot_file:
|
||||
expected_str = snapshot_file.read().replace("{{channel}}", channel)
|
||||
except FileNotFoundError:
|
||||
if bless:
|
||||
expected_str = None
|
||||
else:
|
||||
raise FailedCheck('No saved snapshot value') # noqa: B904 FIXME: py2
|
||||
raise FailedCheck("No saved snapshot value") # noqa: B904 FIXME: py2
|
||||
|
||||
if not normalize_to_text:
|
||||
actual_str = ET.tostring(actual_tree).decode('utf-8')
|
||||
actual_str = ET.tostring(actual_tree).decode("utf-8")
|
||||
else:
|
||||
actual_str = flatten(actual_tree)
|
||||
|
||||
@ -464,64 +492,66 @@ def check_snapshot(snapshot_name, actual_tree, normalize_to_text):
|
||||
# 1. Is --bless
|
||||
# 2. Are actual and expected tree different
|
||||
# 3. Are actual and expected text different
|
||||
if not expected_str \
|
||||
or (not normalize_to_text and \
|
||||
not compare_tree(make_xml(actual_str), make_xml(expected_str), stderr)) \
|
||||
or (normalize_to_text and actual_str != expected_str):
|
||||
|
||||
if (
|
||||
not expected_str
|
||||
or (
|
||||
not normalize_to_text
|
||||
and not compare_tree(make_xml(actual_str), make_xml(expected_str), stderr)
|
||||
)
|
||||
or (normalize_to_text and actual_str != expected_str)
|
||||
):
|
||||
if bless:
|
||||
with open(snapshot_path, 'w') as snapshot_file:
|
||||
with open(snapshot_path, "w") as snapshot_file:
|
||||
actual_str = actual_str.replace(channel, "{{channel}}")
|
||||
snapshot_file.write(actual_str)
|
||||
else:
|
||||
print('--- expected ---\n')
|
||||
print("--- expected ---\n")
|
||||
print(expected_str)
|
||||
print('\n\n--- actual ---\n')
|
||||
print("\n\n--- actual ---\n")
|
||||
print(actual_str)
|
||||
print()
|
||||
raise FailedCheck('Actual snapshot value is different than expected')
|
||||
raise FailedCheck("Actual snapshot value is different than expected")
|
||||
|
||||
|
||||
# Adapted from https://github.com/formencode/formencode/blob/3a1ba9de2fdd494dd945510a4568a3afeddb0b2e/formencode/doctest_xml_compare.py#L72-L120
|
||||
def compare_tree(x1, x2, reporter=None):
|
||||
if x1.tag != x2.tag:
|
||||
if reporter:
|
||||
reporter('Tags do not match: %s and %s' % (x1.tag, x2.tag))
|
||||
reporter("Tags do not match: %s and %s" % (x1.tag, x2.tag))
|
||||
return False
|
||||
for name, value in x1.attrib.items():
|
||||
if x2.attrib.get(name) != value:
|
||||
if reporter:
|
||||
reporter('Attributes do not match: %s=%r, %s=%r'
|
||||
% (name, value, name, x2.attrib.get(name)))
|
||||
reporter(
|
||||
"Attributes do not match: %s=%r, %s=%r"
|
||||
% (name, value, name, x2.attrib.get(name))
|
||||
)
|
||||
return False
|
||||
for name in x2.attrib:
|
||||
if name not in x1.attrib:
|
||||
if reporter:
|
||||
reporter('x2 has an attribute x1 is missing: %s'
|
||||
% name)
|
||||
reporter("x2 has an attribute x1 is missing: %s" % name)
|
||||
return False
|
||||
if not text_compare(x1.text, x2.text):
|
||||
if reporter:
|
||||
reporter('text: %r != %r' % (x1.text, x2.text))
|
||||
reporter("text: %r != %r" % (x1.text, x2.text))
|
||||
return False
|
||||
if not text_compare(x1.tail, x2.tail):
|
||||
if reporter:
|
||||
reporter('tail: %r != %r' % (x1.tail, x2.tail))
|
||||
reporter("tail: %r != %r" % (x1.tail, x2.tail))
|
||||
return False
|
||||
cl1 = list(x1)
|
||||
cl2 = list(x2)
|
||||
if len(cl1) != len(cl2):
|
||||
if reporter:
|
||||
reporter('children length differs, %i != %i'
|
||||
% (len(cl1), len(cl2)))
|
||||
reporter("children length differs, %i != %i" % (len(cl1), len(cl2)))
|
||||
return False
|
||||
i = 0
|
||||
for c1, c2 in zip(cl1, cl2):
|
||||
i += 1
|
||||
if not compare_tree(c1, c2, reporter=reporter):
|
||||
if reporter:
|
||||
reporter('children %i do not match: %s'
|
||||
% (i, c1.tag))
|
||||
reporter("children %i do not match: %s" % (i, c1.tag))
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -529,14 +559,14 @@ def compare_tree(x1, x2, reporter=None):
|
||||
def text_compare(t1, t2):
|
||||
if not t1 and not t2:
|
||||
return True
|
||||
if t1 == '*' or t2 == '*':
|
||||
if t1 == "*" or t2 == "*":
|
||||
return True
|
||||
return (t1 or '').strip() == (t2 or '').strip()
|
||||
return (t1 or "").strip() == (t2 or "").strip()
|
||||
|
||||
|
||||
def stderr(*args):
|
||||
if sys.version_info.major < 3:
|
||||
file = codecs.getwriter('utf-8')(sys.stderr)
|
||||
file = codecs.getwriter("utf-8")(sys.stderr)
|
||||
else:
|
||||
file = sys.stderr
|
||||
|
||||
@ -556,21 +586,25 @@ def print_err(lineno, context, err, message=None):
|
||||
|
||||
def get_nb_matching_elements(cache, c, regexp, stop_at_first):
|
||||
tree = cache.get_tree(c.args[0])
|
||||
pat, sep, attr = c.args[1].partition('/@')
|
||||
pat, sep, attr = c.args[1].partition("/@")
|
||||
if sep: # attribute
|
||||
tree = cache.get_tree(c.args[0])
|
||||
return check_tree_attr(tree, pat, attr, c.args[2], False)
|
||||
else: # normalized text
|
||||
pat = c.args[1]
|
||||
if pat.endswith('/text()'):
|
||||
if pat.endswith("/text()"):
|
||||
pat = pat[:-7]
|
||||
return check_tree_text(cache.get_tree(c.args[0]), pat, c.args[2], regexp, stop_at_first)
|
||||
return check_tree_text(
|
||||
cache.get_tree(c.args[0]), pat, c.args[2], regexp, stop_at_first
|
||||
)
|
||||
|
||||
|
||||
def check_files_in_folder(c, cache, folder, files):
|
||||
files = files.strip()
|
||||
if not files.startswith('[') or not files.endswith(']'):
|
||||
raise InvalidCheck("Expected list as second argument of {} (ie '[]')".format(c.cmd))
|
||||
if not files.startswith("[") or not files.endswith("]"):
|
||||
raise InvalidCheck(
|
||||
"Expected list as second argument of {} (ie '[]')".format(c.cmd)
|
||||
)
|
||||
|
||||
folder = cache.get_absolute_path(folder)
|
||||
|
||||
@ -592,12 +626,18 @@ def check_files_in_folder(c, cache, folder, files):
|
||||
|
||||
error = 0
|
||||
if len(files_set) != 0:
|
||||
print_err(c.lineno, c.context, "Entries not found in folder `{}`: `{}`".format(
|
||||
folder, files_set))
|
||||
print_err(
|
||||
c.lineno,
|
||||
c.context,
|
||||
"Entries not found in folder `{}`: `{}`".format(folder, files_set),
|
||||
)
|
||||
error += 1
|
||||
if len(folder_set) != 0:
|
||||
print_err(c.lineno, c.context, "Extra entries in folder `{}`: `{}`".format(
|
||||
folder, folder_set))
|
||||
print_err(
|
||||
c.lineno,
|
||||
c.context,
|
||||
"Extra entries in folder `{}`: `{}`".format(folder, folder_set),
|
||||
)
|
||||
error += 1
|
||||
return error == 0
|
||||
|
||||
@ -608,11 +648,11 @@ ERR_COUNT = 0
|
||||
def check_command(c, cache):
|
||||
try:
|
||||
cerr = ""
|
||||
if c.cmd in ['has', 'hasraw', 'matches', 'matchesraw']: # string test
|
||||
regexp = c.cmd.startswith('matches')
|
||||
if c.cmd in ["has", "hasraw", "matches", "matchesraw"]: # string test
|
||||
regexp = c.cmd.startswith("matches")
|
||||
|
||||
# has <path> = file existence
|
||||
if len(c.args) == 1 and not regexp and 'raw' not in c.cmd:
|
||||
if len(c.args) == 1 and not regexp and "raw" not in c.cmd:
|
||||
try:
|
||||
cache.get_file(c.args[0])
|
||||
ret = True
|
||||
@ -620,24 +660,24 @@ def check_command(c, cache):
|
||||
cerr = str(err)
|
||||
ret = False
|
||||
# hasraw/matchesraw <path> <pat> = string test
|
||||
elif len(c.args) == 2 and 'raw' in c.cmd:
|
||||
elif len(c.args) == 2 and "raw" in c.cmd:
|
||||
cerr = "`PATTERN` did not match"
|
||||
ret = check_string(cache.get_file(c.args[0]), c.args[1], regexp)
|
||||
# has/matches <path> <pat> <match> = XML tree test
|
||||
elif len(c.args) == 3 and 'raw' not in c.cmd:
|
||||
elif len(c.args) == 3 and "raw" not in c.cmd:
|
||||
cerr = "`XPATH PATTERN` did not match"
|
||||
ret = get_nb_matching_elements(cache, c, regexp, True) != 0
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'files': # check files in given folder
|
||||
if len(c.args) != 2: # files <folder path> <file list>
|
||||
elif c.cmd == "files": # check files in given folder
|
||||
if len(c.args) != 2: # files <folder path> <file list>
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
elif c.negated:
|
||||
raise InvalidCheck("{} doesn't support negative check".format(c.cmd))
|
||||
ret = check_files_in_folder(c, cache, c.args[0], c.args[1])
|
||||
|
||||
elif c.cmd == 'count': # count test
|
||||
elif c.cmd == "count": # count test
|
||||
if len(c.args) == 3: # count <path> <pat> <count> = count test
|
||||
expected = int(c.args[2])
|
||||
found = get_tree_count(cache.get_tree(c.args[0]), c.args[1])
|
||||
@ -649,15 +689,15 @@ def check_command(c, cache):
|
||||
cerr = "Expected {} occurrences but found {}".format(expected, found)
|
||||
ret = found == expected
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'snapshot': # snapshot test
|
||||
elif c.cmd == "snapshot": # snapshot test
|
||||
if len(c.args) == 3: # snapshot <snapshot-name> <html-path> <xpath>
|
||||
[snapshot_name, html_path, pattern] = c.args
|
||||
tree = cache.get_tree(html_path)
|
||||
xpath = normalize_xpath(pattern)
|
||||
normalize_to_text = False
|
||||
if xpath.endswith('/text()'):
|
||||
if xpath.endswith("/text()"):
|
||||
xpath = xpath[:-7]
|
||||
normalize_to_text = True
|
||||
|
||||
@ -671,13 +711,15 @@ def check_command(c, cache):
|
||||
cerr = str(err)
|
||||
ret = False
|
||||
elif len(subtrees) == 0:
|
||||
raise FailedCheck('XPATH did not match')
|
||||
raise FailedCheck("XPATH did not match")
|
||||
else:
|
||||
raise FailedCheck('Expected 1 match, but found {}'.format(len(subtrees)))
|
||||
raise FailedCheck(
|
||||
"Expected 1 match, but found {}".format(len(subtrees))
|
||||
)
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'has-dir': # has-dir test
|
||||
elif c.cmd == "has-dir": # has-dir test
|
||||
if len(c.args) == 1: # has-dir <path> = has-dir test
|
||||
try:
|
||||
cache.get_dir(c.args[0])
|
||||
@ -686,22 +728,22 @@ def check_command(c, cache):
|
||||
cerr = str(err)
|
||||
ret = False
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'valid-html':
|
||||
raise InvalidCheck('Unimplemented valid-html')
|
||||
elif c.cmd == "valid-html":
|
||||
raise InvalidCheck("Unimplemented valid-html")
|
||||
|
||||
elif c.cmd == 'valid-links':
|
||||
raise InvalidCheck('Unimplemented valid-links')
|
||||
elif c.cmd == "valid-links":
|
||||
raise InvalidCheck("Unimplemented valid-links")
|
||||
|
||||
else:
|
||||
raise InvalidCheck('Unrecognized {}'.format(c.cmd))
|
||||
raise InvalidCheck("Unrecognized {}".format(c.cmd))
|
||||
|
||||
if ret == c.negated:
|
||||
raise FailedCheck(cerr)
|
||||
|
||||
except FailedCheck as err:
|
||||
message = '{}{} check failed'.format('!' if c.negated else '', c.cmd)
|
||||
message = "{}{} check failed".format("!" if c.negated else "", c.cmd)
|
||||
print_err(c.lineno, c.context, str(err), message)
|
||||
except InvalidCheck as err:
|
||||
print_err(c.lineno, c.context, str(err))
|
||||
@ -713,18 +755,18 @@ def check(target, commands):
|
||||
check_command(c, cache)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) not in [3, 4]:
|
||||
stderr('Usage: {} <doc dir> <template> [--bless]'.format(sys.argv[0]))
|
||||
stderr("Usage: {} <doc dir> <template> [--bless]".format(sys.argv[0]))
|
||||
raise SystemExit(1)
|
||||
|
||||
rust_test_path = sys.argv[2]
|
||||
if len(sys.argv) > 3 and sys.argv[3] == '--bless':
|
||||
if len(sys.argv) > 3 and sys.argv[3] == "--bless":
|
||||
bless = True
|
||||
else:
|
||||
# We only support `--bless` at the end of the arguments.
|
||||
# This assert is to prevent silent failures.
|
||||
assert '--bless' not in sys.argv
|
||||
assert "--bless" not in sys.argv
|
||||
bless = False
|
||||
check(sys.argv[1], get_commands(rust_test_path))
|
||||
if ERR_COUNT:
|
||||
|
@ -45,7 +45,7 @@ def normalize_whitespace(s):
|
||||
|
||||
def breakpoint_callback(frame, bp_loc, dict):
|
||||
"""This callback is registered with every breakpoint and makes sure that the
|
||||
frame containing the breakpoint location is selected """
|
||||
frame containing the breakpoint location is selected"""
|
||||
|
||||
# HACK(eddyb) print a newline to avoid continuing an unfinished line.
|
||||
print("")
|
||||
@ -79,7 +79,7 @@ def execute_command(command_interpreter, command):
|
||||
|
||||
if res.Succeeded():
|
||||
if res.HasResult():
|
||||
print(normalize_whitespace(res.GetOutput() or ''), end='\n')
|
||||
print(normalize_whitespace(res.GetOutput() or ""), end="\n")
|
||||
|
||||
# If the command introduced any breakpoints, make sure to register
|
||||
# them with the breakpoint
|
||||
@ -89,20 +89,32 @@ def execute_command(command_interpreter, command):
|
||||
breakpoint_id = new_breakpoints.pop()
|
||||
|
||||
if breakpoint_id in registered_breakpoints:
|
||||
print_debug("breakpoint with id %s is already registered. Ignoring." %
|
||||
str(breakpoint_id))
|
||||
print_debug(
|
||||
"breakpoint with id %s is already registered. Ignoring."
|
||||
% str(breakpoint_id)
|
||||
)
|
||||
else:
|
||||
print_debug("registering breakpoint callback, id = " + str(breakpoint_id))
|
||||
callback_command = ("breakpoint command add -F breakpoint_callback " +
|
||||
str(breakpoint_id))
|
||||
print_debug(
|
||||
"registering breakpoint callback, id = " + str(breakpoint_id)
|
||||
)
|
||||
callback_command = (
|
||||
"breakpoint command add -F breakpoint_callback "
|
||||
+ str(breakpoint_id)
|
||||
)
|
||||
command_interpreter.HandleCommand(callback_command, res)
|
||||
if res.Succeeded():
|
||||
print_debug("successfully registered breakpoint callback, id = " +
|
||||
str(breakpoint_id))
|
||||
print_debug(
|
||||
"successfully registered breakpoint callback, id = "
|
||||
+ str(breakpoint_id)
|
||||
)
|
||||
registered_breakpoints.add(breakpoint_id)
|
||||
else:
|
||||
print("Error while trying to register breakpoint callback, id = " +
|
||||
str(breakpoint_id) + ", message = " + str(res.GetError()))
|
||||
print(
|
||||
"Error while trying to register breakpoint callback, id = "
|
||||
+ str(breakpoint_id)
|
||||
+ ", message = "
|
||||
+ str(res.GetError())
|
||||
)
|
||||
else:
|
||||
print(res.GetError())
|
||||
|
||||
@ -117,14 +129,16 @@ def start_breakpoint_listener(target):
|
||||
try:
|
||||
while True:
|
||||
if listener.WaitForEvent(120, event):
|
||||
if lldb.SBBreakpoint.EventIsBreakpointEvent(event) and \
|
||||
lldb.SBBreakpoint.GetBreakpointEventTypeFromEvent(event) == \
|
||||
lldb.eBreakpointEventTypeAdded:
|
||||
if (
|
||||
lldb.SBBreakpoint.EventIsBreakpointEvent(event)
|
||||
and lldb.SBBreakpoint.GetBreakpointEventTypeFromEvent(event)
|
||||
== lldb.eBreakpointEventTypeAdded
|
||||
):
|
||||
global new_breakpoints
|
||||
breakpoint = lldb.SBBreakpoint.GetBreakpointFromEvent(event)
|
||||
print_debug("breakpoint added, id = " + str(breakpoint.id))
|
||||
new_breakpoints.append(breakpoint.id)
|
||||
except BaseException: # explicitly catch ctrl+c/sysexit
|
||||
except BaseException: # explicitly catch ctrl+c/sysexit
|
||||
print_debug("breakpoint listener shutting down")
|
||||
|
||||
# Start the listener and let it run as a daemon
|
||||
@ -133,7 +147,9 @@ def start_breakpoint_listener(target):
|
||||
listener_thread.start()
|
||||
|
||||
# Register the listener with the target
|
||||
target.GetBroadcaster().AddListener(listener, lldb.SBTarget.eBroadcastBitBreakpointChanged)
|
||||
target.GetBroadcaster().AddListener(
|
||||
listener, lldb.SBTarget.eBroadcastBitBreakpointChanged
|
||||
)
|
||||
|
||||
|
||||
def start_watchdog():
|
||||
@ -159,6 +175,7 @@ def start_watchdog():
|
||||
watchdog_thread.daemon = True
|
||||
watchdog_thread.start()
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# ~main
|
||||
####################################################################################################
|
||||
@ -193,8 +210,14 @@ target_error = lldb.SBError()
|
||||
target = debugger.CreateTarget(target_path, None, None, True, target_error)
|
||||
|
||||
if not target:
|
||||
print("Could not create debugging target '" + target_path + "': " +
|
||||
str(target_error) + ". Aborting.", file=sys.stderr)
|
||||
print(
|
||||
"Could not create debugging target '"
|
||||
+ target_path
|
||||
+ "': "
|
||||
+ str(target_error)
|
||||
+ ". Aborting.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -204,15 +227,19 @@ start_breakpoint_listener(target)
|
||||
command_interpreter = debugger.GetCommandInterpreter()
|
||||
|
||||
try:
|
||||
script_file = open(script_path, 'r')
|
||||
script_file = open(script_path, "r")
|
||||
|
||||
for line in script_file:
|
||||
command = line.strip()
|
||||
if command == "run" or command == "r" or re.match("^process\s+launch.*", command):
|
||||
if (
|
||||
command == "run"
|
||||
or command == "r"
|
||||
or re.match("^process\s+launch.*", command)
|
||||
):
|
||||
# Before starting to run the program, let the thread sleep a bit, so all
|
||||
# breakpoint added events can be processed
|
||||
time.sleep(0.5)
|
||||
if command != '':
|
||||
if command != "":
|
||||
execute_command(command_interpreter, command)
|
||||
|
||||
except IOError as e:
|
||||
|
@ -1,7 +1,12 @@
|
||||
import sys
|
||||
|
||||
from lldb import SBData, SBError, eBasicTypeLong, eBasicTypeUnsignedLong, \
|
||||
eBasicTypeUnsignedChar
|
||||
from lldb import (
|
||||
SBData,
|
||||
SBError,
|
||||
eBasicTypeLong,
|
||||
eBasicTypeUnsignedLong,
|
||||
eBasicTypeUnsignedChar,
|
||||
)
|
||||
|
||||
# from lldb.formatters import Logger
|
||||
|
||||
@ -50,13 +55,17 @@ class ValueBuilder:
|
||||
def from_int(self, name, value):
|
||||
# type: (str, int) -> SBValue
|
||||
type = self.valobj.GetType().GetBasicType(eBasicTypeLong)
|
||||
data = SBData.CreateDataFromSInt64Array(self.endianness, self.pointer_size, [value])
|
||||
data = SBData.CreateDataFromSInt64Array(
|
||||
self.endianness, self.pointer_size, [value]
|
||||
)
|
||||
return self.valobj.CreateValueFromData(name, data, type)
|
||||
|
||||
def from_uint(self, name, value):
|
||||
# type: (str, int) -> SBValue
|
||||
type = self.valobj.GetType().GetBasicType(eBasicTypeUnsignedLong)
|
||||
data = SBData.CreateDataFromUInt64Array(self.endianness, self.pointer_size, [value])
|
||||
data = SBData.CreateDataFromUInt64Array(
|
||||
self.endianness, self.pointer_size, [value]
|
||||
)
|
||||
return self.valobj.CreateValueFromData(name, data, type)
|
||||
|
||||
|
||||
@ -127,13 +136,17 @@ class EmptySyntheticProvider:
|
||||
|
||||
def SizeSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
return 'size=' + str(valobj.GetNumChildren())
|
||||
return "size=" + str(valobj.GetNumChildren())
|
||||
|
||||
|
||||
def vec_to_string(vec):
|
||||
length = vec.GetNumChildren()
|
||||
chars = [vec.GetChildAtIndex(i).GetValueAsUnsigned() for i in range(length)]
|
||||
return bytes(chars).decode(errors='replace') if PY3 else "".join(chr(char) for char in chars)
|
||||
return (
|
||||
bytes(chars).decode(errors="replace")
|
||||
if PY3
|
||||
else "".join(chr(char) for char in chars)
|
||||
)
|
||||
|
||||
|
||||
def StdStringSummaryProvider(valobj, dict):
|
||||
@ -172,7 +185,7 @@ def StdStrSummaryProvider(valobj, dict):
|
||||
error = SBError()
|
||||
process = data_ptr.GetProcess()
|
||||
data = process.ReadMemory(start, length, error)
|
||||
data = data.decode(encoding='UTF-8') if PY3 else data
|
||||
data = data.decode(encoding="UTF-8") if PY3 else data
|
||||
return '"%s"' % data
|
||||
|
||||
|
||||
@ -199,9 +212,9 @@ def StdPathSummaryProvider(valobj, dict):
|
||||
data = process.ReadMemory(start, length, error)
|
||||
if PY3:
|
||||
try:
|
||||
data = data.decode(encoding='UTF-8')
|
||||
data = data.decode(encoding="UTF-8")
|
||||
except UnicodeDecodeError:
|
||||
return '%r' % data
|
||||
return "%r" % data
|
||||
return '"%s"' % data
|
||||
|
||||
|
||||
@ -250,8 +263,10 @@ class StructSyntheticProvider:
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class ClangEncodedEnumProvider:
|
||||
"""Pretty-printer for 'clang-encoded' enums support implemented in LLDB"""
|
||||
|
||||
DISCRIMINANT_MEMBER_NAME = "$discr$"
|
||||
VALUE_MEMBER_NAME = "value"
|
||||
|
||||
@ -260,7 +275,7 @@ class ClangEncodedEnumProvider:
|
||||
self.update()
|
||||
|
||||
def has_children(self):
|
||||
return True
|
||||
return True
|
||||
|
||||
def num_children(self):
|
||||
if self.is_default:
|
||||
@ -276,25 +291,32 @@ class ClangEncodedEnumProvider:
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
if index == 0:
|
||||
return self.variant.GetChildMemberWithName(ClangEncodedEnumProvider.VALUE_MEMBER_NAME)
|
||||
return self.variant.GetChildMemberWithName(
|
||||
ClangEncodedEnumProvider.VALUE_MEMBER_NAME
|
||||
)
|
||||
if index == 1:
|
||||
return self.variant.GetChildMemberWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME)
|
||||
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME
|
||||
)
|
||||
|
||||
def update(self):
|
||||
all_variants = self.valobj.GetChildAtIndex(0)
|
||||
index = self._getCurrentVariantIndex(all_variants)
|
||||
self.variant = all_variants.GetChildAtIndex(index)
|
||||
self.is_default = self.variant.GetIndexOfChildWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME) == -1
|
||||
self.is_default = (
|
||||
self.variant.GetIndexOfChildWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME
|
||||
)
|
||||
== -1
|
||||
)
|
||||
|
||||
def _getCurrentVariantIndex(self, all_variants):
|
||||
default_index = 0
|
||||
for i in range(all_variants.GetNumChildren()):
|
||||
variant = all_variants.GetChildAtIndex(i)
|
||||
discr = variant.GetChildMemberWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME)
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME
|
||||
)
|
||||
if discr.IsValid():
|
||||
discr_unsigned_value = discr.GetValueAsUnsigned()
|
||||
if variant.GetName() == f"$variant${discr_unsigned_value}":
|
||||
@ -303,6 +325,7 @@ class ClangEncodedEnumProvider:
|
||||
default_index = i
|
||||
return default_index
|
||||
|
||||
|
||||
class TupleSyntheticProvider:
|
||||
"""Pretty-printer for tuples and tuple enum variants"""
|
||||
|
||||
@ -336,7 +359,9 @@ class TupleSyntheticProvider:
|
||||
else:
|
||||
field = self.type.GetFieldAtIndex(index)
|
||||
element = self.valobj.GetChildMemberWithName(field.name)
|
||||
return self.valobj.CreateValueFromData(str(index), element.GetData(), element.GetType())
|
||||
return self.valobj.CreateValueFromData(
|
||||
str(index), element.GetData(), element.GetType()
|
||||
)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
@ -373,7 +398,7 @@ class StdVecSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -383,15 +408,21 @@ class StdVecSyntheticProvider:
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + index * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.element_type
|
||||
)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.length = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned()
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner")
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName(
|
||||
"inner"
|
||||
)
|
||||
|
||||
self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr"))
|
||||
self.data_ptr = unwrap_unique_or_non_null(
|
||||
self.buf.GetChildMemberWithName("ptr")
|
||||
)
|
||||
|
||||
self.element_type = self.valobj.GetType().GetTemplateArgumentType(0)
|
||||
self.element_type_size = self.element_type.GetByteSize()
|
||||
@ -412,7 +443,7 @@ class StdSliceSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -422,7 +453,9 @@ class StdSliceSyntheticProvider:
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + index * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.element_type
|
||||
)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
@ -457,7 +490,7 @@ class StdVecDequeSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit() and int(index) < self.size:
|
||||
return int(index)
|
||||
else:
|
||||
@ -467,20 +500,26 @@ class StdVecDequeSyntheticProvider:
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + ((index + self.head) % self.cap) * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.element_type
|
||||
)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.head = self.valobj.GetChildMemberWithName("head").GetValueAsUnsigned()
|
||||
self.size = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned()
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner")
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName(
|
||||
"inner"
|
||||
)
|
||||
cap = self.buf.GetChildMemberWithName("cap")
|
||||
if cap.GetType().num_fields == 1:
|
||||
cap = cap.GetChildAtIndex(0)
|
||||
self.cap = cap.GetValueAsUnsigned()
|
||||
|
||||
self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr"))
|
||||
self.data_ptr = unwrap_unique_or_non_null(
|
||||
self.buf.GetChildMemberWithName("ptr")
|
||||
)
|
||||
|
||||
self.element_type = self.valobj.GetType().GetTemplateArgumentType(0)
|
||||
self.element_type_size = self.element_type.GetByteSize()
|
||||
@ -510,7 +549,7 @@ class StdOldHashMapSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -525,8 +564,14 @@ class StdOldHashMapSyntheticProvider:
|
||||
hashes = self.hash_uint_size * self.capacity
|
||||
align = self.pair_type_size
|
||||
# See `libcore/alloc.rs:padding_needed_for`
|
||||
len_rounded_up = (((((hashes + align) % self.modulo - 1) % self.modulo) & ~(
|
||||
(align - 1) % self.modulo)) % self.modulo - hashes) % self.modulo
|
||||
len_rounded_up = (
|
||||
(
|
||||
(((hashes + align) % self.modulo - 1) % self.modulo)
|
||||
& ~((align - 1) % self.modulo)
|
||||
)
|
||||
% self.modulo
|
||||
- hashes
|
||||
) % self.modulo
|
||||
# len_rounded_up = ((hashes + align - 1) & ~(align - 1)) - hashes
|
||||
|
||||
pairs_offset = hashes + len_rounded_up
|
||||
@ -535,12 +580,16 @@ class StdOldHashMapSyntheticProvider:
|
||||
table_index = self.valid_indices[index]
|
||||
idx = table_index & self.capacity_mask
|
||||
address = pairs_start + idx * self.pair_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.pair_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.pair_type
|
||||
)
|
||||
if self.show_values:
|
||||
return element
|
||||
else:
|
||||
key = element.GetChildAtIndex(0)
|
||||
return self.valobj.CreateValueFromData("[%s]" % index, key.GetData(), key.GetType())
|
||||
return self.valobj.CreateValueFromData(
|
||||
"[%s]" % index, key.GetData(), key.GetType()
|
||||
)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
@ -551,10 +600,12 @@ class StdOldHashMapSyntheticProvider:
|
||||
self.hashes = self.table.GetChildMemberWithName("hashes")
|
||||
self.hash_uint_type = self.hashes.GetType()
|
||||
self.hash_uint_size = self.hashes.GetType().GetByteSize()
|
||||
self.modulo = 2 ** self.hash_uint_size
|
||||
self.modulo = 2**self.hash_uint_size
|
||||
self.data_ptr = self.hashes.GetChildAtIndex(0).GetChildAtIndex(0)
|
||||
|
||||
self.capacity_mask = self.table.GetChildMemberWithName("capacity_mask").GetValueAsUnsigned()
|
||||
self.capacity_mask = self.table.GetChildMemberWithName(
|
||||
"capacity_mask"
|
||||
).GetValueAsUnsigned()
|
||||
self.capacity = (self.capacity_mask + 1) % self.modulo
|
||||
|
||||
marker = self.table.GetChildMemberWithName("marker").GetType() # type: SBType
|
||||
@ -564,8 +615,9 @@ class StdOldHashMapSyntheticProvider:
|
||||
self.valid_indices = []
|
||||
for idx in range(self.capacity):
|
||||
address = self.data_ptr.GetValueAsUnsigned() + idx * self.hash_uint_size
|
||||
hash_uint = self.data_ptr.CreateValueFromAddress("[%s]" % idx, address,
|
||||
self.hash_uint_type)
|
||||
hash_uint = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % idx, address, self.hash_uint_type
|
||||
)
|
||||
hash_ptr = hash_uint.GetChildAtIndex(0).GetChildAtIndex(0)
|
||||
if hash_ptr.GetValueAsUnsigned() != 0:
|
||||
self.valid_indices.append(idx)
|
||||
@ -592,7 +644,7 @@ class StdHashMapSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -605,19 +657,25 @@ class StdHashMapSyntheticProvider:
|
||||
if self.new_layout:
|
||||
idx = -(idx + 1)
|
||||
address = pairs_start + idx * self.pair_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.pair_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.pair_type
|
||||
)
|
||||
if self.show_values:
|
||||
return element
|
||||
else:
|
||||
key = element.GetChildAtIndex(0)
|
||||
return self.valobj.CreateValueFromData("[%s]" % index, key.GetData(), key.GetType())
|
||||
return self.valobj.CreateValueFromData(
|
||||
"[%s]" % index, key.GetData(), key.GetType()
|
||||
)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
table = self.table()
|
||||
inner_table = table.GetChildMemberWithName("table")
|
||||
|
||||
capacity = inner_table.GetChildMemberWithName("bucket_mask").GetValueAsUnsigned() + 1
|
||||
capacity = (
|
||||
inner_table.GetChildMemberWithName("bucket_mask").GetValueAsUnsigned() + 1
|
||||
)
|
||||
ctrl = inner_table.GetChildMemberWithName("ctrl").GetChildAtIndex(0)
|
||||
|
||||
self.size = inner_table.GetChildMemberWithName("items").GetValueAsUnsigned()
|
||||
@ -630,16 +688,21 @@ class StdHashMapSyntheticProvider:
|
||||
if self.new_layout:
|
||||
self.data_ptr = ctrl.Cast(self.pair_type.GetPointerType())
|
||||
else:
|
||||
self.data_ptr = inner_table.GetChildMemberWithName("data").GetChildAtIndex(0)
|
||||
self.data_ptr = inner_table.GetChildMemberWithName("data").GetChildAtIndex(
|
||||
0
|
||||
)
|
||||
|
||||
u8_type = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar)
|
||||
u8_type_size = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar).GetByteSize()
|
||||
u8_type_size = (
|
||||
self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar).GetByteSize()
|
||||
)
|
||||
|
||||
self.valid_indices = []
|
||||
for idx in range(capacity):
|
||||
address = ctrl.GetValueAsUnsigned() + idx * u8_type_size
|
||||
value = ctrl.CreateValueFromAddress("ctrl[%s]" % idx, address,
|
||||
u8_type).GetValueAsUnsigned()
|
||||
value = ctrl.CreateValueFromAddress(
|
||||
"ctrl[%s]" % idx, address, u8_type
|
||||
).GetValueAsUnsigned()
|
||||
is_present = value & 128 == 0
|
||||
if is_present:
|
||||
self.valid_indices.append(idx)
|
||||
@ -691,10 +754,16 @@ class StdRcSyntheticProvider:
|
||||
|
||||
self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value")
|
||||
|
||||
self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex(
|
||||
0).GetChildMemberWithName("value")
|
||||
self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex(
|
||||
0).GetChildMemberWithName("value")
|
||||
self.strong = (
|
||||
self.ptr.GetChildMemberWithName("strong")
|
||||
.GetChildAtIndex(0)
|
||||
.GetChildMemberWithName("value")
|
||||
)
|
||||
self.weak = (
|
||||
self.ptr.GetChildMemberWithName("weak")
|
||||
.GetChildAtIndex(0)
|
||||
.GetChildMemberWithName("value")
|
||||
)
|
||||
|
||||
self.value_builder = ValueBuilder(valobj)
|
||||
|
||||
@ -772,7 +841,9 @@ class StdCellSyntheticProvider:
|
||||
def StdRefSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
borrow = valobj.GetChildMemberWithName("borrow").GetValueAsSigned()
|
||||
return "borrow={}".format(borrow) if borrow >= 0 else "borrow_mut={}".format(-borrow)
|
||||
return (
|
||||
"borrow={}".format(borrow) if borrow >= 0 else "borrow_mut={}".format(-borrow)
|
||||
)
|
||||
|
||||
|
||||
class StdRefSyntheticProvider:
|
||||
@ -785,11 +856,16 @@ class StdRefSyntheticProvider:
|
||||
borrow = valobj.GetChildMemberWithName("borrow")
|
||||
value = valobj.GetChildMemberWithName("value")
|
||||
if is_cell:
|
||||
self.borrow = borrow.GetChildMemberWithName("value").GetChildMemberWithName("value")
|
||||
self.borrow = borrow.GetChildMemberWithName("value").GetChildMemberWithName(
|
||||
"value"
|
||||
)
|
||||
self.value = value.GetChildMemberWithName("value")
|
||||
else:
|
||||
self.borrow = borrow.GetChildMemberWithName("borrow").GetChildMemberWithName(
|
||||
"value").GetChildMemberWithName("value")
|
||||
self.borrow = (
|
||||
borrow.GetChildMemberWithName("borrow")
|
||||
.GetChildMemberWithName("value")
|
||||
.GetChildMemberWithName("value")
|
||||
)
|
||||
self.value = value.Dereference()
|
||||
|
||||
self.value_builder = ValueBuilder(valobj)
|
||||
@ -832,7 +908,7 @@ def StdNonZeroNumberSummaryProvider(valobj, _dict):
|
||||
|
||||
# FIXME: Avoid printing as character literal,
|
||||
# see https://github.com/llvm/llvm-project/issues/65076.
|
||||
if inner_inner.GetTypeName() in ['char', 'unsigned char']:
|
||||
return str(inner_inner.GetValueAsSigned())
|
||||
if inner_inner.GetTypeName() in ["char", "unsigned char"]:
|
||||
return str(inner_inner.GetValueAsSigned())
|
||||
else:
|
||||
return inner_inner.GetValue()
|
||||
return inner_inner.GetValue()
|
||||
|
@ -54,7 +54,7 @@ STD_REF_MUT_REGEX = re.compile(r"^(core::([a-z_]+::)+)RefMut<.+>$")
|
||||
STD_REF_CELL_REGEX = re.compile(r"^(core::([a-z_]+::)+)RefCell<.+>$")
|
||||
STD_NONZERO_NUMBER_REGEX = re.compile(r"^(core::([a-z_]+::)+)NonZero<.+>$")
|
||||
STD_PATHBUF_REGEX = re.compile(r"^(std::([a-z_]+::)+)PathBuf$")
|
||||
STD_PATH_REGEX = re.compile(r"^&(mut )?(std::([a-z_]+::)+)Path$")
|
||||
STD_PATH_REGEX = re.compile(r"^&(mut )?(std::([a-z_]+::)+)Path$")
|
||||
|
||||
TUPLE_ITEM_REGEX = re.compile(r"__\d+$")
|
||||
|
||||
@ -84,6 +84,7 @@ STD_TYPE_TO_REGEX = {
|
||||
RustType.STD_PATH: STD_PATH_REGEX,
|
||||
}
|
||||
|
||||
|
||||
def is_tuple_fields(fields):
|
||||
# type: (list) -> bool
|
||||
return all(TUPLE_ITEM_REGEX.match(str(field.name)) for field in fields)
|
||||
|
@ -32,6 +32,8 @@ use std::iter::Peekable;
|
||||
use std::ops::{ControlFlow, Range};
|
||||
use std::path::PathBuf;
|
||||
use std::str::{self, CharIndices};
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use pulldown_cmark::{
|
||||
BrokenLink, CodeBlockKind, CowStr, Event, LinkType, Options, Parser, Tag, TagEnd, html,
|
||||
@ -1301,8 +1303,20 @@ impl LangString {
|
||||
}
|
||||
}
|
||||
|
||||
impl Markdown<'_> {
|
||||
impl<'a> Markdown<'a> {
|
||||
pub fn into_string(self) -> String {
|
||||
// This is actually common enough to special-case
|
||||
if self.content.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let mut s = String::with_capacity(self.content.len() * 3 / 2);
|
||||
html::push_html(&mut s, self.into_iter());
|
||||
|
||||
s
|
||||
}
|
||||
|
||||
fn into_iter(self) -> CodeBlocks<'a, 'a, impl Iterator<Item = Event<'a>>> {
|
||||
let Markdown {
|
||||
content: md,
|
||||
links,
|
||||
@ -1313,32 +1327,72 @@ impl Markdown<'_> {
|
||||
heading_offset,
|
||||
} = self;
|
||||
|
||||
// This is actually common enough to special-case
|
||||
if md.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
let mut replacer = |broken_link: BrokenLink<'_>| {
|
||||
let replacer = move |broken_link: BrokenLink<'_>| {
|
||||
links
|
||||
.iter()
|
||||
.find(|link| *link.original_text == *broken_link.reference)
|
||||
.map(|link| (link.href.as_str().into(), link.tooltip.as_str().into()))
|
||||
};
|
||||
|
||||
let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(&mut replacer));
|
||||
let p = Parser::new_with_broken_link_callback(md, main_body_opts(), Some(replacer));
|
||||
let p = p.into_offset_iter();
|
||||
|
||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||
|
||||
ids.handle_footnotes(|ids, existing_footnotes| {
|
||||
let p = HeadingLinks::new(p, None, ids, heading_offset);
|
||||
let p = footnotes::Footnotes::new(p, existing_footnotes);
|
||||
let p = LinkReplacer::new(p.map(|(ev, _)| ev), links);
|
||||
let p = TableWrapper::new(p);
|
||||
let p = CodeBlocks::new(p, codes, edition, playground);
|
||||
html::push_html(&mut s, p);
|
||||
});
|
||||
CodeBlocks::new(p, codes, edition, playground)
|
||||
})
|
||||
}
|
||||
|
||||
s
|
||||
/// Convert markdown to (summary, remaining) HTML.
|
||||
///
|
||||
/// - The summary is the first top-level Markdown element (usually a paragraph, but potentially
|
||||
/// any block).
|
||||
/// - The remaining docs contain everything after the summary.
|
||||
pub(crate) fn split_summary_and_content(self) -> (Option<String>, Option<String>) {
|
||||
if self.content.is_empty() {
|
||||
return (None, None);
|
||||
}
|
||||
let mut p = self.into_iter();
|
||||
|
||||
let mut event_level = 0;
|
||||
let mut summary_events = Vec::new();
|
||||
let mut get_next_tag = false;
|
||||
|
||||
let mut end_of_summary = false;
|
||||
while let Some(event) = p.next() {
|
||||
match event {
|
||||
Event::Start(_) => event_level += 1,
|
||||
Event::End(kind) => {
|
||||
event_level -= 1;
|
||||
if event_level == 0 {
|
||||
// We're back at the "top" so it means we're done with the summary.
|
||||
end_of_summary = true;
|
||||
// We surround tables with `<div>` HTML tags so this is a special case.
|
||||
get_next_tag = kind == TagEnd::Table;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
summary_events.push(event);
|
||||
if end_of_summary {
|
||||
if get_next_tag && let Some(event) = p.next() {
|
||||
summary_events.push(event);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
let mut summary = String::new();
|
||||
html::push_html(&mut summary, summary_events.into_iter());
|
||||
if summary.is_empty() {
|
||||
return (None, None);
|
||||
}
|
||||
let mut content = String::new();
|
||||
html::push_html(&mut content, p);
|
||||
|
||||
if content.is_empty() { (Some(summary), None) } else { (Some(summary), Some(content)) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1882,7 +1936,7 @@ pub(crate) fn rust_code_blocks(md: &str, extra_info: &ExtraInfo<'_>) -> Vec<Rust
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct IdMap {
|
||||
map: FxHashMap<String, usize>,
|
||||
existing_footnotes: usize,
|
||||
existing_footnotes: Arc<AtomicUsize>,
|
||||
}
|
||||
|
||||
fn is_default_id(id: &str) -> bool {
|
||||
@ -1942,7 +1996,7 @@ fn is_default_id(id: &str) -> bool {
|
||||
|
||||
impl IdMap {
|
||||
pub fn new() -> Self {
|
||||
IdMap { map: FxHashMap::default(), existing_footnotes: 0 }
|
||||
IdMap { map: FxHashMap::default(), existing_footnotes: Arc::new(AtomicUsize::new(0)) }
|
||||
}
|
||||
|
||||
pub(crate) fn derive<S: AsRef<str> + ToString>(&mut self, candidate: S) -> String {
|
||||
@ -1970,15 +2024,17 @@ impl IdMap {
|
||||
|
||||
/// Method to handle `existing_footnotes` increment automatically (to prevent forgetting
|
||||
/// about it).
|
||||
pub(crate) fn handle_footnotes<F: FnOnce(&mut Self, &mut usize)>(&mut self, closure: F) {
|
||||
let mut existing_footnotes = self.existing_footnotes;
|
||||
pub(crate) fn handle_footnotes<'a, T, F: FnOnce(&'a mut Self, Weak<AtomicUsize>) -> T>(
|
||||
&'a mut self,
|
||||
closure: F,
|
||||
) -> T {
|
||||
let existing_footnotes = Arc::downgrade(&self.existing_footnotes);
|
||||
|
||||
closure(self, &mut existing_footnotes);
|
||||
self.existing_footnotes = existing_footnotes;
|
||||
closure(self, existing_footnotes)
|
||||
}
|
||||
|
||||
pub(crate) fn clear(&mut self) {
|
||||
self.map.clear();
|
||||
self.existing_footnotes = 0;
|
||||
self.existing_footnotes = Arc::new(AtomicUsize::new(0));
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,8 @@
|
||||
//! Markdown footnote handling.
|
||||
|
||||
use std::fmt::Write as _;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use pulldown_cmark::{CowStr, Event, Tag, TagEnd, html};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
@ -8,10 +11,11 @@ use super::SpannedEvent;
|
||||
|
||||
/// Moves all footnote definitions to the end and add back links to the
|
||||
/// references.
|
||||
pub(super) struct Footnotes<'a, 'b, I> {
|
||||
pub(super) struct Footnotes<'a, I> {
|
||||
inner: I,
|
||||
footnotes: FxIndexMap<String, FootnoteDef<'a>>,
|
||||
existing_footnotes: &'b mut usize,
|
||||
existing_footnotes: Arc<AtomicUsize>,
|
||||
start_id: usize,
|
||||
}
|
||||
|
||||
/// The definition of a single footnote.
|
||||
@ -21,13 +25,16 @@ struct FootnoteDef<'a> {
|
||||
id: usize,
|
||||
}
|
||||
|
||||
impl<'a, 'b, I: Iterator<Item = SpannedEvent<'a>>> Footnotes<'a, 'b, I> {
|
||||
pub(super) fn new(iter: I, existing_footnotes: &'b mut usize) -> Self {
|
||||
Footnotes { inner: iter, footnotes: FxIndexMap::default(), existing_footnotes }
|
||||
impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Footnotes<'a, I> {
|
||||
pub(super) fn new(iter: I, existing_footnotes: Weak<AtomicUsize>) -> Self {
|
||||
let existing_footnotes =
|
||||
existing_footnotes.upgrade().expect("`existing_footnotes` was dropped");
|
||||
let start_id = existing_footnotes.load(Ordering::Relaxed);
|
||||
Footnotes { inner: iter, footnotes: FxIndexMap::default(), existing_footnotes, start_id }
|
||||
}
|
||||
|
||||
fn get_entry(&mut self, key: &str) -> (&mut Vec<Event<'a>>, usize) {
|
||||
let new_id = self.footnotes.len() + 1 + *self.existing_footnotes;
|
||||
let new_id = self.footnotes.len() + 1 + self.start_id;
|
||||
let key = key.to_owned();
|
||||
let FootnoteDef { content, id } =
|
||||
self.footnotes.entry(key).or_insert(FootnoteDef { content: Vec::new(), id: new_id });
|
||||
@ -44,7 +51,7 @@ impl<'a, 'b, I: Iterator<Item = SpannedEvent<'a>>> Footnotes<'a, 'b, I> {
|
||||
id,
|
||||
// Although the ID count is for the whole page, the footnote reference
|
||||
// are local to the item so we make this ID "local" when displayed.
|
||||
id - *self.existing_footnotes
|
||||
id - self.start_id
|
||||
);
|
||||
Event::Html(reference.into())
|
||||
}
|
||||
@ -64,7 +71,7 @@ impl<'a, 'b, I: Iterator<Item = SpannedEvent<'a>>> Footnotes<'a, 'b, I> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, '_, I> {
|
||||
impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, I> {
|
||||
type Item = SpannedEvent<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
@ -87,7 +94,7 @@ impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for Footnotes<'a, '_, I>
|
||||
// After all the markdown is emmited, emit an <hr> then all the footnotes
|
||||
// in a list.
|
||||
let defs: Vec<_> = self.footnotes.drain(..).map(|(_, x)| x).collect();
|
||||
*self.existing_footnotes += defs.len();
|
||||
self.existing_footnotes.fetch_add(defs.len(), Ordering::Relaxed);
|
||||
let defs_html = render_footnotes_defs(defs);
|
||||
return Some((Event::Html(defs_html.into()), 0..0));
|
||||
} else {
|
||||
|
@ -1904,7 +1904,6 @@ fn render_impl(
|
||||
}
|
||||
}
|
||||
|
||||
let trait_is_none = trait_.is_none();
|
||||
// If we've implemented a trait, then also emit documentation for all
|
||||
// default items which weren't overridden in the implementation block.
|
||||
// We don't emit documentation for default items if they appear in the
|
||||
@ -1936,6 +1935,23 @@ fn render_impl(
|
||||
if rendering_params.toggle_open_by_default { " open" } else { "" }
|
||||
);
|
||||
}
|
||||
|
||||
let (before_dox, after_dox) = i
|
||||
.impl_item
|
||||
.opt_doc_value()
|
||||
.map(|dox| {
|
||||
Markdown {
|
||||
content: &*dox,
|
||||
links: &i.impl_item.links(cx),
|
||||
ids: &mut cx.id_map.borrow_mut(),
|
||||
error_codes: cx.shared.codes,
|
||||
edition: cx.shared.edition(),
|
||||
playground: &cx.shared.playground,
|
||||
heading_offset: HeadingOffset::H4,
|
||||
}
|
||||
.split_summary_and_content()
|
||||
})
|
||||
.unwrap_or((None, None));
|
||||
render_impl_summary(
|
||||
w,
|
||||
cx,
|
||||
@ -1944,33 +1960,23 @@ fn render_impl(
|
||||
rendering_params.show_def_docs,
|
||||
use_absolute,
|
||||
aliases,
|
||||
&before_dox,
|
||||
);
|
||||
if toggled {
|
||||
w.write_str("</summary>");
|
||||
}
|
||||
|
||||
if let Some(ref dox) = i.impl_item.opt_doc_value() {
|
||||
if trait_is_none && impl_.items.is_empty() {
|
||||
if before_dox.is_some() {
|
||||
if trait_.is_none() && impl_.items.is_empty() {
|
||||
w.write_str(
|
||||
"<div class=\"item-info\">\
|
||||
<div class=\"stab empty-impl\">This impl block contains no items.</div>\
|
||||
</div>",
|
||||
);
|
||||
}
|
||||
write!(
|
||||
w,
|
||||
"<div class=\"docblock\">{}</div>",
|
||||
Markdown {
|
||||
content: dox,
|
||||
links: &i.impl_item.links(cx),
|
||||
ids: &mut cx.id_map.borrow_mut(),
|
||||
error_codes: cx.shared.codes,
|
||||
edition: cx.shared.edition(),
|
||||
playground: &cx.shared.playground,
|
||||
heading_offset: HeadingOffset::H4,
|
||||
}
|
||||
.into_string()
|
||||
);
|
||||
if let Some(after_dox) = after_dox {
|
||||
write!(w, "<div class=\"docblock\">{after_dox}</div>");
|
||||
}
|
||||
}
|
||||
if !default_impl_items.is_empty() || !impl_items.is_empty() {
|
||||
w.write_str("<div class=\"impl-items\">");
|
||||
@ -2031,6 +2037,7 @@ pub(crate) fn render_impl_summary(
|
||||
// This argument is used to reference same type with different paths to avoid duplication
|
||||
// in documentation pages for trait with automatic implementations like "Send" and "Sync".
|
||||
aliases: &[String],
|
||||
doc: &Option<String>,
|
||||
) {
|
||||
let inner_impl = i.inner_impl();
|
||||
let id = cx.derive_id(get_id_for_impl(cx.tcx(), i.impl_item.item_id));
|
||||
@ -2082,6 +2089,10 @@ pub(crate) fn render_impl_summary(
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(doc) = doc {
|
||||
write!(w, "<div class=\"docblock\">{doc}</div>");
|
||||
}
|
||||
|
||||
w.write_str("</section>");
|
||||
}
|
||||
|
||||
|
@ -2210,6 +2210,39 @@ details.toggle[open] > summary::after {
|
||||
content: "Collapse";
|
||||
}
|
||||
|
||||
details.toggle:not([open]) > summary .docblock {
|
||||
max-height: calc(1.5em + 0.75em);
|
||||
overflow-y: hidden;
|
||||
}
|
||||
details.toggle:not([open]) > summary .docblock > :first-child {
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
width: fit-content;
|
||||
white-space: nowrap;
|
||||
position: relative;
|
||||
padding-right: 1em;
|
||||
}
|
||||
details.toggle:not([open]) > summary .docblock > :first-child::after {
|
||||
content: "…";
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
z-index: 1;
|
||||
background-color: var(--main-background-color);
|
||||
font: 1rem/1.5 "Source Serif 4", NanumBarunGothic, serif;
|
||||
/* To make it look a bit better and not have it stuck to the preceding element. */
|
||||
padding-left: 0.2em;
|
||||
}
|
||||
details.toggle:not([open]) > summary .docblock > div:first-child::after {
|
||||
/* This is to make the "..." always appear at the bottom. */
|
||||
padding-top: calc(1.5em + 0.75em - 1.2rem);
|
||||
}
|
||||
|
||||
details.toggle > summary .docblock {
|
||||
margin-top: 0.75em;
|
||||
}
|
||||
|
||||
/* This is needed in docblocks to have the "▶" element to be on the same line. */
|
||||
.docblock summary > * {
|
||||
display: inline-block;
|
||||
|
@ -14,6 +14,7 @@ import json
|
||||
import datetime
|
||||
import collections
|
||||
import textwrap
|
||||
|
||||
try:
|
||||
import urllib2
|
||||
from urllib2 import HTTPError
|
||||
@ -21,7 +22,7 @@ except ImportError:
|
||||
import urllib.request as urllib2
|
||||
from urllib.error import HTTPError
|
||||
try:
|
||||
import typing # noqa: F401 FIXME: py2
|
||||
import typing # noqa: F401 FIXME: py2
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@ -29,40 +30,41 @@ except ImportError:
|
||||
# These should be collaborators of the rust-lang/rust repository (with at least
|
||||
# read privileges on it). CI will fail otherwise.
|
||||
MAINTAINERS = {
|
||||
'book': {'carols10cents'},
|
||||
'nomicon': {'frewsxcv', 'Gankra', 'JohnTitor'},
|
||||
'reference': {'Havvy', 'matthewjasper', 'ehuss'},
|
||||
'rust-by-example': {'marioidival'},
|
||||
'embedded-book': {'adamgreig', 'andre-richter', 'jamesmunns', 'therealprof'},
|
||||
'edition-guide': {'ehuss'},
|
||||
'rustc-dev-guide': {'spastorino', 'amanjeev', 'JohnTitor'},
|
||||
"book": {"carols10cents"},
|
||||
"nomicon": {"frewsxcv", "Gankra", "JohnTitor"},
|
||||
"reference": {"Havvy", "matthewjasper", "ehuss"},
|
||||
"rust-by-example": {"marioidival"},
|
||||
"embedded-book": {"adamgreig", "andre-richter", "jamesmunns", "therealprof"},
|
||||
"edition-guide": {"ehuss"},
|
||||
"rustc-dev-guide": {"spastorino", "amanjeev", "JohnTitor"},
|
||||
}
|
||||
|
||||
LABELS = {
|
||||
'book': ['C-bug'],
|
||||
'nomicon': ['C-bug'],
|
||||
'reference': ['C-bug'],
|
||||
'rust-by-example': ['C-bug'],
|
||||
'embedded-book': ['C-bug'],
|
||||
'edition-guide': ['C-bug'],
|
||||
'rustc-dev-guide': ['C-bug'],
|
||||
"book": ["C-bug"],
|
||||
"nomicon": ["C-bug"],
|
||||
"reference": ["C-bug"],
|
||||
"rust-by-example": ["C-bug"],
|
||||
"embedded-book": ["C-bug"],
|
||||
"edition-guide": ["C-bug"],
|
||||
"rustc-dev-guide": ["C-bug"],
|
||||
}
|
||||
|
||||
REPOS = {
|
||||
'book': 'https://github.com/rust-lang/book',
|
||||
'nomicon': 'https://github.com/rust-lang/nomicon',
|
||||
'reference': 'https://github.com/rust-lang/reference',
|
||||
'rust-by-example': 'https://github.com/rust-lang/rust-by-example',
|
||||
'embedded-book': 'https://github.com/rust-embedded/book',
|
||||
'edition-guide': 'https://github.com/rust-lang/edition-guide',
|
||||
'rustc-dev-guide': 'https://github.com/rust-lang/rustc-dev-guide',
|
||||
"book": "https://github.com/rust-lang/book",
|
||||
"nomicon": "https://github.com/rust-lang/nomicon",
|
||||
"reference": "https://github.com/rust-lang/reference",
|
||||
"rust-by-example": "https://github.com/rust-lang/rust-by-example",
|
||||
"embedded-book": "https://github.com/rust-embedded/book",
|
||||
"edition-guide": "https://github.com/rust-lang/edition-guide",
|
||||
"rustc-dev-guide": "https://github.com/rust-lang/rustc-dev-guide",
|
||||
}
|
||||
|
||||
|
||||
def load_json_from_response(resp):
|
||||
# type: (typing.Any) -> typing.Any
|
||||
content = resp.read()
|
||||
if isinstance(content, bytes):
|
||||
content_str = content.decode('utf-8')
|
||||
content_str = content.decode("utf-8")
|
||||
else:
|
||||
print("Refusing to decode " + str(type(content)) + " to str")
|
||||
return json.loads(content_str)
|
||||
@ -70,11 +72,10 @@ def load_json_from_response(resp):
|
||||
|
||||
def read_current_status(current_commit, path):
|
||||
# type: (str, str) -> typing.Mapping[str, typing.Any]
|
||||
'''Reads build status of `current_commit` from content of `history/*.tsv`
|
||||
'''
|
||||
with open(path, 'r') as f:
|
||||
"""Reads build status of `current_commit` from content of `history/*.tsv`"""
|
||||
with open(path, "r") as f:
|
||||
for line in f:
|
||||
(commit, status) = line.split('\t', 1)
|
||||
(commit, status) = line.split("\t", 1)
|
||||
if commit == current_commit:
|
||||
return json.loads(status)
|
||||
return {}
|
||||
@ -82,12 +83,12 @@ def read_current_status(current_commit, path):
|
||||
|
||||
def gh_url():
|
||||
# type: () -> str
|
||||
return os.environ['TOOLSTATE_ISSUES_API_URL']
|
||||
return os.environ["TOOLSTATE_ISSUES_API_URL"]
|
||||
|
||||
|
||||
def maybe_remove_mention(message):
|
||||
# type: (str) -> str
|
||||
if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
|
||||
if os.environ.get("TOOLSTATE_SKIP_MENTIONS") is not None:
|
||||
return message.replace("@", "")
|
||||
return message
|
||||
|
||||
@ -102,36 +103,45 @@ def issue(
|
||||
github_token,
|
||||
):
|
||||
# type: (str, str, typing.Iterable[str], str, str, typing.List[str], str) -> None
|
||||
'''Open an issue about the toolstate failure.'''
|
||||
if status == 'test-fail':
|
||||
status_description = 'has failing tests'
|
||||
"""Open an issue about the toolstate failure."""
|
||||
if status == "test-fail":
|
||||
status_description = "has failing tests"
|
||||
else:
|
||||
status_description = 'no longer builds'
|
||||
request = json.dumps({
|
||||
'body': maybe_remove_mention(textwrap.dedent('''\
|
||||
status_description = "no longer builds"
|
||||
request = json.dumps(
|
||||
{
|
||||
"body": maybe_remove_mention(
|
||||
textwrap.dedent("""\
|
||||
Hello, this is your friendly neighborhood mergebot.
|
||||
After merging PR {}, I observed that the tool {} {}.
|
||||
A follow-up PR to the repository {} is needed to fix the fallout.
|
||||
|
||||
cc @{}, do you think you would have time to do the follow-up work?
|
||||
If so, that would be great!
|
||||
''').format(
|
||||
relevant_pr_number, tool, status_description,
|
||||
REPOS.get(tool), relevant_pr_user
|
||||
)),
|
||||
'title': '`{}` no longer builds after {}'.format(tool, relevant_pr_number),
|
||||
'assignees': list(assignees),
|
||||
'labels': labels,
|
||||
})
|
||||
print("Creating issue:\n{}".format(request))
|
||||
response = urllib2.urlopen(urllib2.Request(
|
||||
gh_url(),
|
||||
request.encode(),
|
||||
{
|
||||
'Authorization': 'token ' + github_token,
|
||||
'Content-Type': 'application/json',
|
||||
""").format(
|
||||
relevant_pr_number,
|
||||
tool,
|
||||
status_description,
|
||||
REPOS.get(tool),
|
||||
relevant_pr_user,
|
||||
)
|
||||
),
|
||||
"title": "`{}` no longer builds after {}".format(tool, relevant_pr_number),
|
||||
"assignees": list(assignees),
|
||||
"labels": labels,
|
||||
}
|
||||
))
|
||||
)
|
||||
print("Creating issue:\n{}".format(request))
|
||||
response = urllib2.urlopen(
|
||||
urllib2.Request(
|
||||
gh_url(),
|
||||
request.encode(),
|
||||
{
|
||||
"Authorization": "token " + github_token,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
)
|
||||
response.read()
|
||||
|
||||
|
||||
@ -145,27 +155,26 @@ def update_latest(
|
||||
github_token,
|
||||
):
|
||||
# type: (str, str, str, str, str, str, str) -> str
|
||||
'''Updates `_data/latest.json` to match build result of the given commit.
|
||||
'''
|
||||
with open('_data/latest.json', 'r+') as f:
|
||||
"""Updates `_data/latest.json` to match build result of the given commit."""
|
||||
with open("_data/latest.json", "r+") as f:
|
||||
latest = json.load(f, object_pairs_hook=collections.OrderedDict)
|
||||
|
||||
current_status = {
|
||||
os_: read_current_status(current_commit, 'history/' + os_ + '.tsv')
|
||||
for os_ in ['windows', 'linux']
|
||||
os_: read_current_status(current_commit, "history/" + os_ + ".tsv")
|
||||
for os_ in ["windows", "linux"]
|
||||
}
|
||||
|
||||
slug = 'rust-lang/rust'
|
||||
message = textwrap.dedent('''\
|
||||
slug = "rust-lang/rust"
|
||||
message = textwrap.dedent("""\
|
||||
📣 Toolstate changed by {}!
|
||||
|
||||
Tested on commit {}@{}.
|
||||
Direct link to PR: <{}>
|
||||
|
||||
''').format(relevant_pr_number, slug, current_commit, relevant_pr_url)
|
||||
""").format(relevant_pr_number, slug, current_commit, relevant_pr_url)
|
||||
anything_changed = False
|
||||
for status in latest:
|
||||
tool = status['tool']
|
||||
tool = status["tool"]
|
||||
changed = False
|
||||
create_issue_for_status = None # set to the status that caused the issue
|
||||
|
||||
@ -173,57 +182,70 @@ def update_latest(
|
||||
old = status[os_]
|
||||
new = s.get(tool, old)
|
||||
status[os_] = new
|
||||
maintainers = ' '.join('@'+name for name in MAINTAINERS.get(tool, ()))
|
||||
maintainers = " ".join("@" + name for name in MAINTAINERS.get(tool, ()))
|
||||
# comparing the strings, but they are ordered appropriately:
|
||||
# "test-pass" > "test-fail" > "build-fail"
|
||||
if new > old:
|
||||
# things got fixed or at least the status quo improved
|
||||
changed = True
|
||||
message += '🎉 {} on {}: {} → {} (cc {}).\n' \
|
||||
.format(tool, os_, old, new, maintainers)
|
||||
message += "🎉 {} on {}: {} → {} (cc {}).\n".format(
|
||||
tool, os_, old, new, maintainers
|
||||
)
|
||||
elif new < old:
|
||||
# tests or builds are failing and were not failing before
|
||||
changed = True
|
||||
title = '💔 {} on {}: {} → {}' \
|
||||
.format(tool, os_, old, new)
|
||||
message += '{} (cc {}).\n' \
|
||||
.format(title, maintainers)
|
||||
title = "💔 {} on {}: {} → {}".format(tool, os_, old, new)
|
||||
message += "{} (cc {}).\n".format(title, maintainers)
|
||||
# See if we need to create an issue.
|
||||
# Create issue if things no longer build.
|
||||
# (No issue for mere test failures to avoid spurious issues.)
|
||||
if new == 'build-fail':
|
||||
if new == "build-fail":
|
||||
create_issue_for_status = new
|
||||
|
||||
if create_issue_for_status is not None:
|
||||
try:
|
||||
issue(
|
||||
tool, create_issue_for_status, MAINTAINERS.get(tool, ()),
|
||||
relevant_pr_number, relevant_pr_user, LABELS.get(tool, []),
|
||||
tool,
|
||||
create_issue_for_status,
|
||||
MAINTAINERS.get(tool, ()),
|
||||
relevant_pr_number,
|
||||
relevant_pr_user,
|
||||
LABELS.get(tool, []),
|
||||
github_token,
|
||||
)
|
||||
except HTTPError as e:
|
||||
# network errors will simply end up not creating an issue, but that's better
|
||||
# than failing the entire build job
|
||||
print("HTTPError when creating issue for status regression: {0}\n{1!r}"
|
||||
.format(e, e.read()))
|
||||
print(
|
||||
"HTTPError when creating issue for status regression: {0}\n{1!r}".format(
|
||||
e, e.read()
|
||||
)
|
||||
)
|
||||
except IOError as e:
|
||||
print("I/O error when creating issue for status regression: {0}".format(e))
|
||||
print(
|
||||
"I/O error when creating issue for status regression: {0}".format(
|
||||
e
|
||||
)
|
||||
)
|
||||
except:
|
||||
print("Unexpected error when creating issue for status regression: {0}"
|
||||
.format(sys.exc_info()[0]))
|
||||
print(
|
||||
"Unexpected error when creating issue for status regression: {0}".format(
|
||||
sys.exc_info()[0]
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
if changed:
|
||||
status['commit'] = current_commit
|
||||
status['datetime'] = current_datetime
|
||||
status["commit"] = current_commit
|
||||
status["datetime"] = current_datetime
|
||||
anything_changed = True
|
||||
|
||||
if not anything_changed:
|
||||
return ''
|
||||
return ""
|
||||
|
||||
f.seek(0)
|
||||
f.truncate(0)
|
||||
json.dump(latest, f, indent=4, separators=(',', ': '))
|
||||
json.dump(latest, f, indent=4, separators=(",", ": "))
|
||||
return message
|
||||
|
||||
|
||||
@ -231,12 +253,12 @@ def update_latest(
|
||||
# There are variables declared within that are implicitly global; it is unknown
|
||||
# which ones precisely but at least this is true for `github_token`.
|
||||
try:
|
||||
if __name__ != '__main__':
|
||||
if __name__ != "__main__":
|
||||
exit(0)
|
||||
|
||||
cur_commit = sys.argv[1]
|
||||
cur_datetime = datetime.datetime.now(datetime.timezone.utc).strftime(
|
||||
'%Y-%m-%dT%H:%M:%SZ'
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
cur_commit_msg = sys.argv[2]
|
||||
save_message_to_path = sys.argv[3]
|
||||
@ -244,21 +266,21 @@ try:
|
||||
|
||||
# assume that PR authors are also owners of the repo where the branch lives
|
||||
relevant_pr_match = re.search(
|
||||
r'Auto merge of #([0-9]+) - ([^:]+):[^,]+, r=(\S+)',
|
||||
r"Auto merge of #([0-9]+) - ([^:]+):[^,]+, r=(\S+)",
|
||||
cur_commit_msg,
|
||||
)
|
||||
if relevant_pr_match:
|
||||
number = relevant_pr_match.group(1)
|
||||
relevant_pr_user = relevant_pr_match.group(2)
|
||||
relevant_pr_number = 'rust-lang/rust#' + number
|
||||
relevant_pr_url = 'https://github.com/rust-lang/rust/pull/' + number
|
||||
relevant_pr_number = "rust-lang/rust#" + number
|
||||
relevant_pr_url = "https://github.com/rust-lang/rust/pull/" + number
|
||||
pr_reviewer = relevant_pr_match.group(3)
|
||||
else:
|
||||
number = '-1'
|
||||
relevant_pr_user = 'ghost'
|
||||
relevant_pr_number = '<unknown PR>'
|
||||
relevant_pr_url = '<unknown>'
|
||||
pr_reviewer = 'ghost'
|
||||
number = "-1"
|
||||
relevant_pr_user = "ghost"
|
||||
relevant_pr_number = "<unknown PR>"
|
||||
relevant_pr_url = "<unknown>"
|
||||
pr_reviewer = "ghost"
|
||||
|
||||
message = update_latest(
|
||||
cur_commit,
|
||||
@ -270,28 +292,30 @@ try:
|
||||
github_token,
|
||||
)
|
||||
if not message:
|
||||
print('<Nothing changed>')
|
||||
print("<Nothing changed>")
|
||||
sys.exit(0)
|
||||
|
||||
print(message)
|
||||
|
||||
if not github_token:
|
||||
print('Dry run only, not committing anything')
|
||||
print("Dry run only, not committing anything")
|
||||
sys.exit(0)
|
||||
|
||||
with open(save_message_to_path, 'w') as f:
|
||||
with open(save_message_to_path, "w") as f:
|
||||
f.write(message)
|
||||
|
||||
# Write the toolstate comment on the PR as well.
|
||||
issue_url = gh_url() + '/{}/comments'.format(number)
|
||||
response = urllib2.urlopen(urllib2.Request(
|
||||
issue_url,
|
||||
json.dumps({'body': maybe_remove_mention(message)}).encode(),
|
||||
{
|
||||
'Authorization': 'token ' + github_token,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
))
|
||||
issue_url = gh_url() + "/{}/comments".format(number)
|
||||
response = urllib2.urlopen(
|
||||
urllib2.Request(
|
||||
issue_url,
|
||||
json.dumps({"body": maybe_remove_mention(message)}).encode(),
|
||||
{
|
||||
"Authorization": "token " + github_token,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
)
|
||||
response.read()
|
||||
except HTTPError as e:
|
||||
print("HTTPError: %s\n%r" % (e, e.read()))
|
||||
|
@ -1,18 +0,0 @@
|
||||
[tool.black]
|
||||
# Ignore all submodules
|
||||
extend-exclude = """(\
|
||||
src/doc/nomicon|\
|
||||
src/tools/cargo/|\
|
||||
src/doc/reference/|\
|
||||
src/doc/book/|\
|
||||
src/doc/rust-by-example/|\
|
||||
library/stdarch/|\
|
||||
src/doc/rustc-dev-guide/|\
|
||||
src/doc/edition-guide/|\
|
||||
src/llvm-project/|\
|
||||
src/doc/embedded-book/|\
|
||||
src/tools/rustc-perf/|\
|
||||
src/tools/enzyme/|\
|
||||
library/backtrace/|\
|
||||
src/gcc/
|
||||
)"""
|
@ -6,6 +6,5 @@
|
||||
# Note: this generation step should be run with the oldest supported python
|
||||
# version (currently 3.9) to ensure backward compatibility
|
||||
|
||||
black==24.4.2
|
||||
ruff==0.4.9
|
||||
clang-format==18.1.7
|
||||
|
@ -4,30 +4,6 @@
|
||||
#
|
||||
# pip-compile --generate-hashes --strip-extras src/tools/tidy/config/requirements.in
|
||||
#
|
||||
black==24.4.2 \
|
||||
--hash=sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474 \
|
||||
--hash=sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1 \
|
||||
--hash=sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0 \
|
||||
--hash=sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8 \
|
||||
--hash=sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96 \
|
||||
--hash=sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1 \
|
||||
--hash=sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04 \
|
||||
--hash=sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021 \
|
||||
--hash=sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94 \
|
||||
--hash=sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d \
|
||||
--hash=sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c \
|
||||
--hash=sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7 \
|
||||
--hash=sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c \
|
||||
--hash=sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc \
|
||||
--hash=sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7 \
|
||||
--hash=sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d \
|
||||
--hash=sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c \
|
||||
--hash=sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741 \
|
||||
--hash=sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce \
|
||||
--hash=sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb \
|
||||
--hash=sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063 \
|
||||
--hash=sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e
|
||||
# via -r src/tools/tidy/config/requirements.in
|
||||
clang-format==18.1.7 \
|
||||
--hash=sha256:035204410f65d03f98cb81c9c39d6d193f9987917cc88de9d0dbd01f2aa9c302 \
|
||||
--hash=sha256:05c482a854287a5d21f7567186c0bd4b8dbd4a871751e655a45849185f30b931 \
|
||||
@ -45,26 +21,6 @@ clang-format==18.1.7 \
|
||||
--hash=sha256:f4f77ac0f4f9a659213fedda0f2d216886c410132e6e7dd4b13f92b34e925554 \
|
||||
--hash=sha256:f935d34152a2e11e55120eb9182862f432bc9789ab819f680c9f6db4edebf9e3
|
||||
# via -r src/tools/tidy/config/requirements.in
|
||||
click==8.1.3 \
|
||||
--hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \
|
||||
--hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48
|
||||
# via black
|
||||
mypy-extensions==1.0.0 \
|
||||
--hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \
|
||||
--hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782
|
||||
# via black
|
||||
packaging==23.1 \
|
||||
--hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \
|
||||
--hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f
|
||||
# via black
|
||||
pathspec==0.11.1 \
|
||||
--hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687 \
|
||||
--hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293
|
||||
# via black
|
||||
platformdirs==4.2.2 \
|
||||
--hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
|
||||
--hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
|
||||
# via black
|
||||
ruff==0.4.9 \
|
||||
--hash=sha256:06b60f91bfa5514bb689b500a25ba48e897d18fea14dce14b48a0c40d1635893 \
|
||||
--hash=sha256:0e8e7b95673f22e0efd3571fb5b0cf71a5eaaa3cc8a776584f3b2cc878e46bff \
|
||||
@ -84,11 +40,3 @@ ruff==0.4.9 \
|
||||
--hash=sha256:e91175fbe48f8a2174c9aad70438fe9cb0a5732c4159b2a10a3565fea2d94cde \
|
||||
--hash=sha256:f1cb0828ac9533ba0135d148d214e284711ede33640465e706772645483427e3
|
||||
# via -r src/tools/tidy/config/requirements.in
|
||||
tomli==2.0.1 \
|
||||
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
|
||||
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
|
||||
# via black
|
||||
typing-extensions==4.12.2 \
|
||||
--hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
|
||||
--hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
|
||||
# via black
|
||||
|
@ -19,6 +19,9 @@ extend-exclude = [
|
||||
"src/tools/enzyme/",
|
||||
"src/tools/rustc-perf/",
|
||||
"src/gcc/",
|
||||
"compiler/rustc_codegen_gcc",
|
||||
"src/tools/clippy",
|
||||
"src/tools/miri",
|
||||
# Hack: CI runs from a subdirectory under the main checkout
|
||||
"../src/doc/nomicon/",
|
||||
"../src/tools/cargo/",
|
||||
@ -34,6 +37,9 @@ extend-exclude = [
|
||||
"../src/tools/enzyme/",
|
||||
"../src/tools/rustc-perf/",
|
||||
"../src/gcc/",
|
||||
"../compiler/rustc_codegen_gcc",
|
||||
"../src/tools/clippy",
|
||||
"../src/tools/miri",
|
||||
]
|
||||
|
||||
[lint]
|
||||
|
@ -32,9 +32,8 @@ const REL_PY_PATH: &[&str] = &["Scripts", "python3.exe"];
|
||||
const REL_PY_PATH: &[&str] = &["bin", "python3"];
|
||||
|
||||
const RUFF_CONFIG_PATH: &[&str] = &["src", "tools", "tidy", "config", "ruff.toml"];
|
||||
const BLACK_CONFIG_PATH: &[&str] = &["src", "tools", "tidy", "config", "black.toml"];
|
||||
/// Location within build directory
|
||||
const RUFF_CACH_PATH: &[&str] = &["cache", "ruff_cache"];
|
||||
const RUFF_CACHE_PATH: &[&str] = &["cache", "ruff_cache"];
|
||||
const PIP_REQ_PATH: &[&str] = &["src", "tools", "tidy", "config", "requirements.txt"];
|
||||
|
||||
pub fn check(
|
||||
@ -96,7 +95,7 @@ fn check_impl(
|
||||
let mut cfg_path = root_path.to_owned();
|
||||
cfg_path.extend(RUFF_CONFIG_PATH);
|
||||
let mut cache_dir = outdir.to_owned();
|
||||
cache_dir.extend(RUFF_CACH_PATH);
|
||||
cache_dir.extend(RUFF_CACHE_PATH);
|
||||
|
||||
cfg_args_ruff.extend([
|
||||
"--config".as_ref(),
|
||||
@ -124,33 +123,36 @@ fn check_impl(
|
||||
}
|
||||
|
||||
if python_fmt {
|
||||
let mut cfg_args_black = cfg_args.clone();
|
||||
let mut file_args_black = file_args.clone();
|
||||
let mut cfg_args_ruff = cfg_args.clone();
|
||||
let mut file_args_ruff = file_args.clone();
|
||||
|
||||
if bless {
|
||||
eprintln!("formatting python files");
|
||||
} else {
|
||||
eprintln!("checking python file formatting");
|
||||
cfg_args_black.push("--check".as_ref());
|
||||
cfg_args_ruff.push("--check".as_ref());
|
||||
}
|
||||
|
||||
let mut cfg_path = root_path.to_owned();
|
||||
cfg_path.extend(BLACK_CONFIG_PATH);
|
||||
cfg_path.extend(RUFF_CONFIG_PATH);
|
||||
let mut cache_dir = outdir.to_owned();
|
||||
cache_dir.extend(RUFF_CACHE_PATH);
|
||||
|
||||
cfg_args_black.extend(["--config".as_ref(), cfg_path.as_os_str()]);
|
||||
cfg_args_ruff.extend(["--config".as_ref(), cfg_path.as_os_str()]);
|
||||
|
||||
if file_args_black.is_empty() {
|
||||
file_args_black.push(root_path.as_os_str());
|
||||
if file_args_ruff.is_empty() {
|
||||
file_args_ruff.push(root_path.as_os_str());
|
||||
}
|
||||
|
||||
let mut args = merge_args(&cfg_args_black, &file_args_black);
|
||||
let res = py_runner(py_path.as_ref().unwrap(), true, None, "black", &args);
|
||||
let mut args = merge_args(&cfg_args_ruff, &file_args_ruff);
|
||||
args.insert(0, "format".as_ref());
|
||||
let res = py_runner(py_path.as_ref().unwrap(), true, None, "ruff", &args);
|
||||
|
||||
if res.is_err() && show_diff {
|
||||
eprintln!("\npython formatting does not match! Printing diff:");
|
||||
|
||||
args.insert(0, "--diff".as_ref());
|
||||
let _ = py_runner(py_path.as_ref().unwrap(), true, None, "black", &args);
|
||||
let _ = py_runner(py_path.as_ref().unwrap(), true, None, "ruff", &args);
|
||||
}
|
||||
// Rethrow error
|
||||
let _ = res?;
|
||||
@ -445,7 +447,7 @@ fn shellcheck_runner(args: &[&OsStr]) -> Result<(), Error> {
|
||||
}
|
||||
|
||||
let status = Command::new("shellcheck").args(args).status()?;
|
||||
if status.success() { Ok(()) } else { Err(Error::FailedCheck("black")) }
|
||||
if status.success() { Ok(()) } else { Err(Error::FailedCheck("shellcheck")) }
|
||||
}
|
||||
|
||||
/// Check git for tracked files matching an extension
|
||||
|
@ -57,7 +57,7 @@ pub unsafe fn test_catch_unwind(
|
||||
// CHECK: [[IS_RUST_EXN_I8:%.*]] = zext i1 [[IS_RUST_EXN]] to i8
|
||||
|
||||
// CHECK: store ptr [[EXCEPTION]], ptr [[ALLOCA]]
|
||||
// CHECK: [[IS_RUST_SLOT:%.*]] = getelementptr inbounds i8, ptr [[ALLOCA]], [[PTR_SIZE]]
|
||||
// CHECK: [[IS_RUST_SLOT:%.*]] = getelementptr inbounds{{( nuw)?}} i8, ptr [[ALLOCA]], [[PTR_SIZE]]
|
||||
// CHECK: store i8 [[IS_RUST_EXN_I8]], ptr [[IS_RUST_SLOT]]
|
||||
|
||||
// CHECK: call void %catch_fn(ptr %data, ptr nonnull [[ALLOCA]])
|
||||
|
@ -12,7 +12,7 @@ pub fn branchy(input: u64) -> u64 {
|
||||
// CHECK-LABEL: @branchy(
|
||||
// CHECK-NEXT: start:
|
||||
// CHECK-NEXT: [[_2:%.*]] = and i64 [[INPUT:%.*]], 3
|
||||
// CHECK-NEXT: [[SWITCH_GEP:%.*]] = getelementptr inbounds [4 x i64], ptr @switch.table.branchy, i64 0, i64 [[_2]]
|
||||
// CHECK-NEXT: [[SWITCH_GEP:%.*]] = getelementptr inbounds{{( nuw)?}} [4 x i64], ptr @switch.table.branchy, i64 0, i64 [[_2]]
|
||||
// CHECK-NEXT: [[SWITCH_LOAD:%.*]] = load i64, ptr [[SWITCH_GEP]]
|
||||
// CHECK-NEXT: ret i64 [[SWITCH_LOAD]]
|
||||
match input % 4 {
|
||||
|
@ -17,19 +17,19 @@
|
||||
// CHECK-LABEL: define{{.*}}void @convert(
|
||||
// CHECK-NOT: shufflevector
|
||||
// OPT2: store i16
|
||||
// OPT2-NEXT: getelementptr inbounds i8, {{.+}} 2
|
||||
// OPT2-NEXT: getelementptr inbounds{{( nuw)?}} i8, {{.+}} 2
|
||||
// OPT2-NEXT: store i16
|
||||
// OPT2-NEXT: getelementptr inbounds i8, {{.+}} 4
|
||||
// OPT2-NEXT: getelementptr inbounds{{( nuw)?}} i8, {{.+}} 4
|
||||
// OPT2-NEXT: store i16
|
||||
// OPT2-NEXT: getelementptr inbounds i8, {{.+}} 6
|
||||
// OPT2-NEXT: getelementptr inbounds{{( nuw)?}} i8, {{.+}} 6
|
||||
// OPT2-NEXT: store i16
|
||||
// OPT2-NEXT: getelementptr inbounds i8, {{.+}} 8
|
||||
// OPT2-NEXT: getelementptr inbounds{{( nuw)?}} i8, {{.+}} 8
|
||||
// OPT2-NEXT: store i16
|
||||
// OPT2-NEXT: getelementptr inbounds i8, {{.+}} 10
|
||||
// OPT2-NEXT: getelementptr inbounds{{( nuw)?}} i8, {{.+}} 10
|
||||
// OPT2-NEXT: store i16
|
||||
// OPT2-NEXT: getelementptr inbounds i8, {{.+}} 12
|
||||
// OPT2-NEXT: getelementptr inbounds{{( nuw)?}} i8, {{.+}} 12
|
||||
// OPT2-NEXT: store i16
|
||||
// OPT2-NEXT: getelementptr inbounds i8, {{.+}} 14
|
||||
// OPT2-NEXT: getelementptr inbounds{{( nuw)?}} i8, {{.+}} 14
|
||||
// OPT2-NEXT: store i16
|
||||
// OPT3LINX64: load <8 x i16>
|
||||
// OPT3LINX64-NEXT: call <8 x i16> @llvm.bswap
|
||||
|
@ -14,7 +14,7 @@
|
||||
// CHECK-LABEL: @slice_iter_next(
|
||||
#[no_mangle]
|
||||
pub fn slice_iter_next<'a>(it: &mut std::slice::Iter<'a, u32>) -> Option<&'a u32> {
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds{{( nuw)?}} i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[END:.+]] = load ptr, ptr %[[ENDP]]
|
||||
// CHECK-SAME: !nonnull
|
||||
// CHECK-SAME: !noundef
|
||||
@ -31,7 +31,7 @@ pub fn slice_iter_next<'a>(it: &mut std::slice::Iter<'a, u32>) -> Option<&'a u32
|
||||
// CHECK-LABEL: @slice_iter_next_back(
|
||||
#[no_mangle]
|
||||
pub fn slice_iter_next_back<'a>(it: &mut std::slice::Iter<'a, u32>) -> Option<&'a u32> {
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds{{( nuw)?}} i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[END:.+]] = load ptr, ptr %[[ENDP]]
|
||||
// CHECK-SAME: !nonnull
|
||||
// CHECK-SAME: !noundef
|
||||
@ -55,7 +55,7 @@ pub fn slice_iter_next_back<'a>(it: &mut std::slice::Iter<'a, u32>) -> Option<&'
|
||||
#[no_mangle]
|
||||
pub fn slice_iter_new(slice: &[u32]) -> std::slice::Iter<'_, u32> {
|
||||
// CHECK-NOT: slice
|
||||
// CHECK: %[[END:.+]] = getelementptr inbounds i32{{.+}} %slice.0{{.+}} %slice.1
|
||||
// CHECK: %[[END:.+]] = getelementptr inbounds{{( nuw)?}} i32{{.+}} %slice.0{{.+}} %slice.1
|
||||
// CHECK-NOT: slice
|
||||
// CHECK: insertvalue {{.+}} ptr %slice.0, 0
|
||||
// CHECK-NOT: slice
|
||||
@ -70,7 +70,7 @@ pub fn slice_iter_new(slice: &[u32]) -> std::slice::Iter<'_, u32> {
|
||||
#[no_mangle]
|
||||
pub fn slice_iter_mut_new(slice: &mut [u32]) -> std::slice::IterMut<'_, u32> {
|
||||
// CHECK-NOT: slice
|
||||
// CHECK: %[[END:.+]] = getelementptr inbounds i32{{.+}} %slice.0{{.+}} %slice.1
|
||||
// CHECK: %[[END:.+]] = getelementptr inbounds{{( nuw)?}} i32{{.+}} %slice.0{{.+}} %slice.1
|
||||
// CHECK-NOT: slice
|
||||
// CHECK: insertvalue {{.+}} ptr %slice.0, 0
|
||||
// CHECK-NOT: slice
|
||||
@ -83,7 +83,7 @@ pub fn slice_iter_mut_new(slice: &mut [u32]) -> std::slice::IterMut<'_, u32> {
|
||||
// CHECK-LABEL: @slice_iter_is_empty
|
||||
#[no_mangle]
|
||||
pub fn slice_iter_is_empty(it: &std::slice::Iter<'_, u32>) -> bool {
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds{{( nuw)?}} i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[END:.+]] = load ptr, ptr %[[ENDP]]
|
||||
// CHECK-SAME: !nonnull
|
||||
// CHECK-SAME: !noundef
|
||||
@ -99,7 +99,7 @@ pub fn slice_iter_is_empty(it: &std::slice::Iter<'_, u32>) -> bool {
|
||||
// CHECK-LABEL: @slice_iter_len
|
||||
#[no_mangle]
|
||||
pub fn slice_iter_len(it: &std::slice::Iter<'_, u32>) -> usize {
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[ENDP:.+]] = getelementptr inbounds{{( nuw)?}} i8, ptr %it, {{i32 4|i64 8}}
|
||||
// CHECK: %[[END:.+]] = load ptr, ptr %[[ENDP]]
|
||||
// CHECK-SAME: !nonnull
|
||||
// CHECK-SAME: !noundef
|
||||
|
@ -1,5 +1,6 @@
|
||||
import gdb
|
||||
|
||||
|
||||
class PersonPrinter:
|
||||
"Print a Person"
|
||||
|
||||
@ -11,6 +12,7 @@ class PersonPrinter:
|
||||
def to_string(self):
|
||||
return "{} is {} years old.".format(self.name, self.age)
|
||||
|
||||
|
||||
def lookup(val):
|
||||
lookup_tag = val.type.tag
|
||||
if lookup_tag is None:
|
||||
@ -20,4 +22,5 @@ def lookup(val):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
gdb.current_objfile().pretty_printers.append(lookup)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import gdb
|
||||
|
||||
|
||||
class PointPrinter:
|
||||
"Print a Point"
|
||||
|
||||
@ -11,6 +12,7 @@ class PointPrinter:
|
||||
def to_string(self):
|
||||
return "({}, {})".format(self.x, self.y)
|
||||
|
||||
|
||||
def lookup(val):
|
||||
lookup_tag = val.type.tag
|
||||
if lookup_tag is None:
|
||||
@ -20,4 +22,5 @@ def lookup(val):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
gdb.current_objfile().pretty_printers.append(lookup)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import gdb
|
||||
|
||||
|
||||
class LinePrinter:
|
||||
"Print a Line"
|
||||
|
||||
@ -11,6 +12,7 @@ class LinePrinter:
|
||||
def to_string(self):
|
||||
return "({}, {})".format(self.a, self.b)
|
||||
|
||||
|
||||
def lookup(val):
|
||||
lookup_tag = val.type.tag
|
||||
if lookup_tag is None:
|
||||
@ -20,4 +22,5 @@ def lookup(val):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
gdb.current_objfile().pretty_printers.append(lookup)
|
||||
|
@ -10,12 +10,8 @@ assert-property: (".top-doc .docblock table", {"scrollWidth": "1572"})
|
||||
|
||||
// Checking it works on other doc blocks as well...
|
||||
|
||||
// Logically, the ".docblock" and the "<p>" should have the same scroll width.
|
||||
compare-elements-property: (
|
||||
"#implementations-list > details .docblock",
|
||||
"#implementations-list > details .docblock > p",
|
||||
["scrollWidth"],
|
||||
)
|
||||
assert-property: ("#implementations-list > details .docblock", {"scrollWidth": "835"})
|
||||
// Logically, the ".docblock" and the "<p>" should have the same scroll width (if we exclude the margin).
|
||||
assert-property: ("#implementations-list > details .docblock", {"scrollWidth": 816})
|
||||
assert-property: ("#implementations-list > details .docblock > p", {"scrollWidth": 835})
|
||||
// However, since there is overflow in the <table>, its scroll width is bigger.
|
||||
assert-property: ("#implementations-list > details .docblock table", {"scrollWidth": "1572"})
|
||||
|
42
tests/rustdoc-gui/impl-block-doc.goml
Normal file
42
tests/rustdoc-gui/impl-block-doc.goml
Normal file
@ -0,0 +1,42 @@
|
||||
// Checks that the first sentence of an impl block doc is always visible even when the impl
|
||||
// block is collapsed.
|
||||
go-to: "file://" + |DOC_PATH| + "/test_docs/struct.ImplDoc.html"
|
||||
|
||||
set-window-size: (900, 600)
|
||||
|
||||
define-function: (
|
||||
"compare-size-and-pos",
|
||||
[nth_impl],
|
||||
block {
|
||||
// First we collapse the impl block.
|
||||
store-value: (impl_path, "#implementations-list details:nth-of-type(" + |nth_impl| + ")")
|
||||
set-property: (|impl_path|, {"open": false})
|
||||
wait-for: |impl_path| + ":not([open])"
|
||||
|
||||
store-value: (impl_path, |impl_path| + " summary")
|
||||
store-size: (|impl_path|, {"height": impl_height})
|
||||
store-position: (|impl_path|, {"y": impl_y})
|
||||
|
||||
store-size: (|impl_path| + " .docblock", {"height": doc_height})
|
||||
store-position: (|impl_path| + " .docblock", {"y": doc_y})
|
||||
|
||||
assert: |impl_y| + |impl_height| >= |doc_y|
|
||||
}
|
||||
)
|
||||
|
||||
call-function: ("compare-size-and-pos", {"nth_impl": 1})
|
||||
// Since the first impl block has a long line, we ensure that it doesn't display all of it.
|
||||
assert: (|impl_y| + |impl_height|) <= (|doc_y| + |doc_height|)
|
||||
|
||||
call-function: ("compare-size-and-pos", {"nth_impl": 2})
|
||||
// The second impl block has a short line.
|
||||
assert: (|impl_y| + |impl_height|) >= (|doc_y| + |doc_height|)
|
||||
|
||||
// FIXME: Needs `if` condition to make this test check that `padding-top` on the "..." element
|
||||
// is as expected for tables.
|
||||
call-function: ("compare-size-and-pos", {"nth_impl": 3})
|
||||
assert: (|impl_y| + |impl_height|) >= (|doc_y| + |doc_height|)
|
||||
call-function: ("compare-size-and-pos", {"nth_impl": 4})
|
||||
assert: (|impl_y| + |impl_height|) >= (|doc_y| + |doc_height|)
|
||||
call-function: ("compare-size-and-pos", {"nth_impl": 5})
|
||||
assert: (|impl_y| + |impl_height|) >= (|doc_y| + |doc_height|)
|
@ -3,7 +3,7 @@ go-to: "file://" + |DOC_PATH| + "/test_docs/struct.TypeWithImplDoc.html"
|
||||
|
||||
// The text is about 24px tall, so if there's a margin, then their position will be >24px apart
|
||||
compare-elements-position-near-false: (
|
||||
"#implementations-list > .implementors-toggle > .docblock > p",
|
||||
"#implementations-list > .implementors-toggle .docblock > p",
|
||||
"#implementations-list > .implementors-toggle > .impl-items",
|
||||
{"y": 24}
|
||||
)
|
||||
|
@ -16,7 +16,7 @@ assert-text: (
|
||||
go-to: "file://" + |DOC_PATH| + "/lib2/struct.LongItemInfo2.html"
|
||||
compare-elements-property: (
|
||||
"#impl-SimpleTrait-for-LongItemInfo2 .item-info",
|
||||
"#impl-SimpleTrait-for-LongItemInfo2 + .docblock",
|
||||
"#impl-SimpleTrait-for-LongItemInfo2 .docblock",
|
||||
["scrollWidth"],
|
||||
)
|
||||
assert-property: (
|
||||
|
@ -2,7 +2,7 @@
|
||||
go-to: "file://" + |DOC_PATH| + "/src/test_docs/lib.rs.html"
|
||||
set-window-size: (800, 1000)
|
||||
// "scrollWidth" should be superior than "clientWidth".
|
||||
assert-property: ("body", {"scrollWidth": 1114, "clientWidth": 800})
|
||||
assert-property: ("body", {"scrollWidth": 1776, "clientWidth": 800})
|
||||
|
||||
// Both properties should be equal (ie, no scroll on the code block).
|
||||
assert-property: (".example-wrap .rust", {"scrollWidth": 1000, "clientWidth": 1000})
|
||||
assert-property: (".example-wrap .rust", {"scrollWidth": 1662, "clientWidth": 1662})
|
||||
|
@ -652,3 +652,42 @@ pub mod long_list {
|
||||
//! * [`FromBytes`](#a) indicates that a type may safely be converted from an arbitrary byte
|
||||
//! sequence
|
||||
}
|
||||
|
||||
pub struct ImplDoc;
|
||||
|
||||
/// bla sondfosdnf sdfasd fadsd fdsa f ads fad sf sad f sad fasdfsafsa df dsafasdasd fsa dfadsfasd
|
||||
/// fads fadfadd
|
||||
///
|
||||
/// bla
|
||||
impl ImplDoc {
|
||||
pub fn bar() {}
|
||||
}
|
||||
|
||||
/// bla
|
||||
///
|
||||
/// bla
|
||||
impl ImplDoc {
|
||||
pub fn bar2() {}
|
||||
}
|
||||
|
||||
// ignore-tidy-linelength
|
||||
/// | this::is::a::kinda::very::long::header::number::one | this::is::a::kinda::very::long::header::number::two | this::is::a::kinda::very::long::header::number::three |
|
||||
/// |-|-|-|
|
||||
/// | bla | bli | blob |
|
||||
impl ImplDoc {
|
||||
pub fn bar3() {}
|
||||
}
|
||||
|
||||
/// # h1
|
||||
///
|
||||
/// bla
|
||||
impl ImplDoc {
|
||||
pub fn bar4() {}
|
||||
}
|
||||
|
||||
/// * list
|
||||
/// * list
|
||||
/// * list
|
||||
impl ImplDoc {
|
||||
pub fn bar5() {}
|
||||
}
|
||||
|
@ -1,3 +1,9 @@
|
||||
//@ needs-llvm-components: webassembly
|
||||
//@ compile-flags: --print=target-cpus --target=wasm32-unknown-unknown
|
||||
//@ check-pass
|
||||
|
||||
// LLVM at HEAD has added support for the `lime1` CPU. Remove it from the
|
||||
// output so that the stdout with LLVM-at-HEAD matches the output of the LLVM
|
||||
// versions currently used by default.
|
||||
// FIXME(#133919): Once Rust upgrades to LLVM 20, remove this.
|
||||
//@ normalize-stdout-test: "(?m)^ *lime1\n" -> ""
|
||||
|
13
tests/ui/explicit-tail-calls/become-macro.rs
Normal file
13
tests/ui/explicit-tail-calls/become-macro.rs
Normal file
@ -0,0 +1,13 @@
|
||||
//@ check-pass
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls, decl_macro)]
|
||||
|
||||
macro call($f:expr $(, $args:expr)* $(,)?) {
|
||||
($f)($($args),*)
|
||||
}
|
||||
|
||||
fn main() {
|
||||
become call!(f);
|
||||
}
|
||||
|
||||
fn f() {}
|
42
tests/ui/explicit-tail-calls/become-operator.fixed
Normal file
42
tests/ui/explicit-tail-calls/become-operator.fixed
Normal file
@ -0,0 +1,42 @@
|
||||
//@ run-rustfix
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
#![allow(unused)]
|
||||
use std::num::Wrapping;
|
||||
use std::ops::{Not, Add, BitXorAssign};
|
||||
|
||||
// built-ins and overloaded operators are handled differently
|
||||
|
||||
fn f(a: u64, b: u64) -> u64 {
|
||||
return a + b; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn g(a: String, b: &str) -> String {
|
||||
become (a).add(b); //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn h(x: u64) -> u64 {
|
||||
return !x; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn i_do_not_know_any_more_letters(x: Wrapping<u32>) -> Wrapping<u32> {
|
||||
become (x).not(); //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn builtin_index(x: &[u8], i: usize) -> u8 {
|
||||
return x[i] //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
// FIXME(explicit_tail_calls): overloaded index is represented like `[&]*x.index(i)`,
|
||||
// and so need additional handling
|
||||
|
||||
fn a(a: &mut u8, _: u8) {
|
||||
return *a ^= 1; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn b(b: &mut Wrapping<u8>, _: u8) {
|
||||
become (*b).bitxor_assign(1); //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
|
||||
fn main() {}
|
42
tests/ui/explicit-tail-calls/become-operator.rs
Normal file
42
tests/ui/explicit-tail-calls/become-operator.rs
Normal file
@ -0,0 +1,42 @@
|
||||
//@ run-rustfix
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
#![allow(unused)]
|
||||
use std::num::Wrapping;
|
||||
use std::ops::{Not, Add, BitXorAssign};
|
||||
|
||||
// built-ins and overloaded operators are handled differently
|
||||
|
||||
fn f(a: u64, b: u64) -> u64 {
|
||||
become a + b; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn g(a: String, b: &str) -> String {
|
||||
become a + b; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn h(x: u64) -> u64 {
|
||||
become !x; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn i_do_not_know_any_more_letters(x: Wrapping<u32>) -> Wrapping<u32> {
|
||||
become !x; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn builtin_index(x: &[u8], i: usize) -> u8 {
|
||||
become x[i] //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
// FIXME(explicit_tail_calls): overloaded index is represented like `[&]*x.index(i)`,
|
||||
// and so need additional handling
|
||||
|
||||
fn a(a: &mut u8, _: u8) {
|
||||
become *a ^= 1; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
fn b(b: &mut Wrapping<u8>, _: u8) {
|
||||
become *b ^= 1; //~ error: `become` does not support operators
|
||||
}
|
||||
|
||||
|
||||
fn main() {}
|
75
tests/ui/explicit-tail-calls/become-operator.stderr
Normal file
75
tests/ui/explicit-tail-calls/become-operator.stderr
Normal file
@ -0,0 +1,75 @@
|
||||
error: `become` does not support operators
|
||||
--> $DIR/become-operator.rs:11:12
|
||||
|
|
||||
LL | become a + b;
|
||||
| -------^^^^^
|
||||
| |
|
||||
| help: try using `return` instead: `return`
|
||||
|
|
||||
= note: using `become` on a builtin operator is not useful
|
||||
|
||||
error: `become` does not support operators
|
||||
--> $DIR/become-operator.rs:15:12
|
||||
|
|
||||
LL | become a + b;
|
||||
| ^^^^^
|
||||
|
|
||||
help: try using the method directly
|
||||
|
|
||||
LL | become (a).add(b);
|
||||
| + ~~~~~~ +
|
||||
|
||||
error: `become` does not support operators
|
||||
--> $DIR/become-operator.rs:19:12
|
||||
|
|
||||
LL | become !x;
|
||||
| -------^^
|
||||
| |
|
||||
| help: try using `return` instead: `return`
|
||||
|
|
||||
= note: using `become` on a builtin operator is not useful
|
||||
|
||||
error: `become` does not support operators
|
||||
--> $DIR/become-operator.rs:23:12
|
||||
|
|
||||
LL | become !x;
|
||||
| ^^
|
||||
|
|
||||
help: try using the method directly
|
||||
|
|
||||
LL | become (x).not();
|
||||
| ~ +++++++
|
||||
|
||||
error: `become` does not support operators
|
||||
--> $DIR/become-operator.rs:27:12
|
||||
|
|
||||
LL | become x[i]
|
||||
| -------^^^^
|
||||
| |
|
||||
| help: try using `return` instead: `return`
|
||||
|
|
||||
= note: using `become` on a builtin operator is not useful
|
||||
|
||||
error: `become` does not support operators
|
||||
--> $DIR/become-operator.rs:34:12
|
||||
|
|
||||
LL | become *a ^= 1;
|
||||
| -------^^^^^^^
|
||||
| |
|
||||
| help: try using `return` instead: `return`
|
||||
|
|
||||
= note: using `become` on a builtin operator is not useful
|
||||
|
||||
error: `become` does not support operators
|
||||
--> $DIR/become-operator.rs:38:12
|
||||
|
|
||||
LL | become *b ^= 1;
|
||||
| ^^^^^^^
|
||||
|
|
||||
help: try using the method directly
|
||||
|
|
||||
LL | become (*b).bitxor_assign(1);
|
||||
| + ~~~~~~~~~~~~~~~~ +
|
||||
|
||||
error: aborting due to 7 previous errors
|
||||
|
@ -1,5 +1,5 @@
|
||||
//@ revisions: constant array
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
#[cfg(constant)]
|
||||
|
18
tests/ui/explicit-tail-calls/become-uncallable.fixed
Normal file
18
tests/ui/explicit-tail-calls/become-uncallable.fixed
Normal file
@ -0,0 +1,18 @@
|
||||
//@ run-rustfix
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
#![allow(unused)]
|
||||
|
||||
fn f() -> u64 {
|
||||
return 1; //~ error: `become` requires a function call
|
||||
}
|
||||
|
||||
fn g() {
|
||||
return { h() }; //~ error: `become` requires a function call
|
||||
}
|
||||
|
||||
fn h() {
|
||||
return *&g(); //~ error: `become` requires a function call
|
||||
}
|
||||
|
||||
fn main() {}
|
18
tests/ui/explicit-tail-calls/become-uncallable.rs
Normal file
18
tests/ui/explicit-tail-calls/become-uncallable.rs
Normal file
@ -0,0 +1,18 @@
|
||||
//@ run-rustfix
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
#![allow(unused)]
|
||||
|
||||
fn f() -> u64 {
|
||||
become 1; //~ error: `become` requires a function call
|
||||
}
|
||||
|
||||
fn g() {
|
||||
become { h() }; //~ error: `become` requires a function call
|
||||
}
|
||||
|
||||
fn h() {
|
||||
become *&g(); //~ error: `become` requires a function call
|
||||
}
|
||||
|
||||
fn main() {}
|
44
tests/ui/explicit-tail-calls/become-uncallable.stderr
Normal file
44
tests/ui/explicit-tail-calls/become-uncallable.stderr
Normal file
@ -0,0 +1,44 @@
|
||||
error: `become` requires a function call
|
||||
--> $DIR/become-uncallable.rs:7:12
|
||||
|
|
||||
LL | become 1;
|
||||
| -------^
|
||||
| |
|
||||
| help: try using `return` instead: `return`
|
||||
|
|
||||
note: not a function call
|
||||
--> $DIR/become-uncallable.rs:7:12
|
||||
|
|
||||
LL | become 1;
|
||||
| ^
|
||||
|
||||
error: `become` requires a function call
|
||||
--> $DIR/become-uncallable.rs:11:12
|
||||
|
|
||||
LL | become { h() };
|
||||
| -------^^^^^^^
|
||||
| |
|
||||
| help: try using `return` instead: `return`
|
||||
|
|
||||
note: not a function call
|
||||
--> $DIR/become-uncallable.rs:11:12
|
||||
|
|
||||
LL | become { h() };
|
||||
| ^^^^^^^
|
||||
|
||||
error: `become` requires a function call
|
||||
--> $DIR/become-uncallable.rs:15:12
|
||||
|
|
||||
LL | become *&g();
|
||||
| -------^^^^^
|
||||
| |
|
||||
| help: try using `return` instead: `return`
|
||||
|
|
||||
note: not a function call
|
||||
--> $DIR/become-uncallable.rs:15:12
|
||||
|
|
||||
LL | become *&g();
|
||||
| ^^^^^
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
31
tests/ui/explicit-tail-calls/closure.fixed
Normal file
31
tests/ui/explicit-tail-calls/closure.fixed
Normal file
@ -0,0 +1,31 @@
|
||||
//@ run-rustfix
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
fn a() {
|
||||
become ((|| ()) as fn() -> _)();
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn aa((): ()) {
|
||||
become ((|()| ()) as fn(_) -> _)(());
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn aaa((): (), _: i32) {
|
||||
become ((|(), _| ()) as fn(_, _) -> _)((), 1);
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn v((): (), ((), ()): ((), ())) -> (((), ()), ()) {
|
||||
let f = |(), ((), ())| (((), ()), ());
|
||||
become (f as fn(_, _) -> _)((), ((), ()));
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn main() {
|
||||
a();
|
||||
aa(());
|
||||
aaa((), 1);
|
||||
v((), ((), ()));
|
||||
}
|
31
tests/ui/explicit-tail-calls/closure.rs
Normal file
31
tests/ui/explicit-tail-calls/closure.rs
Normal file
@ -0,0 +1,31 @@
|
||||
//@ run-rustfix
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
fn a() {
|
||||
become (|| ())();
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn aa((): ()) {
|
||||
become (|()| ())(());
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn aaa((): (), _: i32) {
|
||||
become (|(), _| ())((), 1);
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn v((): (), ((), ()): ((), ())) -> (((), ()), ()) {
|
||||
let f = |(), ((), ())| (((), ()), ());
|
||||
become f((), ((), ()));
|
||||
//~^ ERROR: tail calling closures directly is not allowed
|
||||
}
|
||||
|
||||
fn main() {
|
||||
a();
|
||||
aa(());
|
||||
aaa((), 1);
|
||||
v((), ((), ()));
|
||||
}
|
46
tests/ui/explicit-tail-calls/closure.stderr
Normal file
46
tests/ui/explicit-tail-calls/closure.stderr
Normal file
@ -0,0 +1,46 @@
|
||||
error: tail calling closures directly is not allowed
|
||||
--> $DIR/closure.rs:6:5
|
||||
|
|
||||
LL | become (|| ())();
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: try casting the closure to a function pointer type
|
||||
|
|
||||
LL | become ((|| ()) as fn() -> _)();
|
||||
| + +++++++++++++
|
||||
|
||||
error: tail calling closures directly is not allowed
|
||||
--> $DIR/closure.rs:11:5
|
||||
|
|
||||
LL | become (|()| ())(());
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: try casting the closure to a function pointer type
|
||||
|
|
||||
LL | become ((|()| ()) as fn(_) -> _)(());
|
||||
| + ++++++++++++++
|
||||
|
||||
error: tail calling closures directly is not allowed
|
||||
--> $DIR/closure.rs:16:5
|
||||
|
|
||||
LL | become (|(), _| ())((), 1);
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: try casting the closure to a function pointer type
|
||||
|
|
||||
LL | become ((|(), _| ()) as fn(_, _) -> _)((), 1);
|
||||
| + +++++++++++++++++
|
||||
|
||||
error: tail calling closures directly is not allowed
|
||||
--> $DIR/closure.rs:22:5
|
||||
|
|
||||
LL | become f((), ((), ()));
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: try casting the closure to a function pointer type
|
||||
|
|
||||
LL | become (f as fn(_, _) -> _)((), ((), ()));
|
||||
| + +++++++++++++++++
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
const fn f() {
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
pub const fn test(_: &Type) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
//@ check-pass
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
pub const fn test(x: &Type) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
//@ check-pass
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
pub const fn test(s: String) -> String {
|
||||
|
@ -1,5 +1,5 @@
|
||||
//@ run-pass
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
/// A very unnecessarily complicated "implementation" of the Collatz conjecture.
|
||||
|
@ -1,6 +1,6 @@
|
||||
//@ revisions: become return
|
||||
//@ [become] run-pass
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
// This is an identity function (`|x| x`), but implemented using recursion.
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
pub const fn f() {
|
||||
|
@ -1,7 +1,7 @@
|
||||
// FIXME(explicit_tail_calls): enable this test once rustc_codegen_ssa supports tail calls
|
||||
//@ ignore-test: tail calls are not implemented in rustc_codegen_ssa yet, so this causes 🧊
|
||||
//@ run-pass
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
use std::cell::RefCell;
|
||||
|
||||
|
8
tests/ui/explicit-tail-calls/in-closure.rs
Normal file
8
tests/ui/explicit-tail-calls/in-closure.rs
Normal file
@ -0,0 +1,8 @@
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
fn main() {
|
||||
|| become f(); //~ error: `become` is not allowed in closures
|
||||
}
|
||||
|
||||
fn f() {}
|
8
tests/ui/explicit-tail-calls/in-closure.stderr
Normal file
8
tests/ui/explicit-tail-calls/in-closure.stderr
Normal file
@ -0,0 +1,8 @@
|
||||
error: `become` is not allowed in closures
|
||||
--> $DIR/in-closure.rs:5:8
|
||||
|
|
||||
LL | || become f();
|
||||
| ^^^^^^^^^^
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
@ -1,5 +1,5 @@
|
||||
//@ check-pass
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
fn _f<'a>() -> &'a [u8] {
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
fn _f0<'a>() -> &'static [u8] {
|
||||
|
33
tests/ui/explicit-tail-calls/signature-mismatch.rs
Normal file
33
tests/ui/explicit-tail-calls/signature-mismatch.rs
Normal file
@ -0,0 +1,33 @@
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
#![feature(c_variadic)]
|
||||
|
||||
fn _f0((): ()) {
|
||||
become _g0(); //~ error: mismatched signatures
|
||||
}
|
||||
|
||||
fn _g0() {}
|
||||
|
||||
|
||||
fn _f1() {
|
||||
become _g1(()); //~ error: mismatched signatures
|
||||
}
|
||||
|
||||
fn _g1((): ()) {}
|
||||
|
||||
|
||||
extern "C" fn _f2() {
|
||||
become _g2(); //~ error: mismatched function ABIs
|
||||
}
|
||||
|
||||
fn _g2() {}
|
||||
|
||||
|
||||
fn _f3() {
|
||||
become _g3(); //~ error: mismatched function ABIs
|
||||
}
|
||||
|
||||
extern "C" fn _g3() {}
|
||||
|
||||
|
||||
fn main() {}
|
40
tests/ui/explicit-tail-calls/signature-mismatch.stderr
Normal file
40
tests/ui/explicit-tail-calls/signature-mismatch.stderr
Normal file
@ -0,0 +1,40 @@
|
||||
error: mismatched signatures
|
||||
--> $DIR/signature-mismatch.rs:6:5
|
||||
|
|
||||
LL | become _g0();
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
= note: `become` requires caller and callee to have matching signatures
|
||||
= note: caller signature: `fn(())`
|
||||
= note: callee signature: `fn()`
|
||||
|
||||
error: mismatched signatures
|
||||
--> $DIR/signature-mismatch.rs:13:5
|
||||
|
|
||||
LL | become _g1(());
|
||||
| ^^^^^^^^^^^^^^
|
||||
|
|
||||
= note: `become` requires caller and callee to have matching signatures
|
||||
= note: caller signature: `fn()`
|
||||
= note: callee signature: `fn(())`
|
||||
|
||||
error: mismatched function ABIs
|
||||
--> $DIR/signature-mismatch.rs:20:5
|
||||
|
|
||||
LL | become _g2();
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
= note: `become` requires caller and callee to have the same ABI
|
||||
= note: caller ABI is `"C"`, while callee ABI is `"Rust"`
|
||||
|
||||
error: mismatched function ABIs
|
||||
--> $DIR/signature-mismatch.rs:27:5
|
||||
|
|
||||
LL | become _g3();
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
= note: `become` requires caller and callee to have the same ABI
|
||||
= note: caller ABI is `"Rust"`, while callee ABI is `"C"`
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![allow(incomplete_features)]
|
||||
#![expect(incomplete_features)]
|
||||
#![feature(explicit_tail_calls)]
|
||||
|
||||
const fn f() {
|
||||
|
9
x.py
9
x.py
@ -6,7 +6,7 @@
|
||||
|
||||
# Parts of `bootstrap.py` use the `multiprocessing` module, so this entry point
|
||||
# must use the normal `if __name__ == '__main__':` convention to avoid problems.
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
@ -32,14 +32,16 @@ if __name__ == '__main__':
|
||||
# soft deprecation of old python versions
|
||||
skip_check = os.environ.get("RUST_IGNORE_OLD_PYTHON") == "1"
|
||||
if not skip_check and (major < 3 or (major == 3 and minor < 6)):
|
||||
msg = cleandoc("""
|
||||
msg = cleandoc(
|
||||
"""
|
||||
Using python {}.{} but >= 3.6 is recommended. Your python version
|
||||
should continue to work for the near future, but this will
|
||||
eventually change. If python >= 3.6 is not available on your system,
|
||||
please file an issue to help us understand timelines.
|
||||
|
||||
This message can be suppressed by setting `RUST_IGNORE_OLD_PYTHON=1`
|
||||
""".format(major, minor))
|
||||
""".format(major, minor)
|
||||
)
|
||||
warnings.warn(msg, stacklevel=1)
|
||||
|
||||
rust_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
@ -47,4 +49,5 @@ if __name__ == '__main__':
|
||||
sys.path.insert(0, os.path.join(rust_dir, "src", "bootstrap"))
|
||||
|
||||
import bootstrap
|
||||
|
||||
bootstrap.main()
|
||||
|
Loading…
Reference in New Issue
Block a user