mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-13 15:33:53 +00:00
move bootstrap utils into bootstrap/src/utils module
Signed-off-by: onur-ozkan <work@onurozkan.dev>
This commit is contained in:
parent
2bce0207d4
commit
9f381fe345
@ -9,27 +9,27 @@ default-run = "bootstrap"
|
||||
build-metrics = ["sysinfo"]
|
||||
|
||||
[lib]
|
||||
path = "lib.rs"
|
||||
path = "src/lib.rs"
|
||||
doctest = false
|
||||
|
||||
[[bin]]
|
||||
name = "bootstrap"
|
||||
path = "bin/main.rs"
|
||||
path = "src/bin/main.rs"
|
||||
test = false
|
||||
|
||||
[[bin]]
|
||||
name = "rustc"
|
||||
path = "bin/rustc.rs"
|
||||
path = "src/bin/rustc.rs"
|
||||
test = false
|
||||
|
||||
[[bin]]
|
||||
name = "rustdoc"
|
||||
path = "bin/rustdoc.rs"
|
||||
path = "src/bin/rustdoc.rs"
|
||||
test = false
|
||||
|
||||
[[bin]]
|
||||
name = "sccache-plus-cl"
|
||||
path = "bin/sccache-plus-cl.rs"
|
||||
path = "src/bin/sccache-plus-cl.rs"
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
|
@ -1,568 +0,0 @@
|
||||
//! Command-line interface of the rustbuild build system.
|
||||
//!
|
||||
//! This module implements the command-line parsing of the build system which
|
||||
//! has various flags to configure how it's run.
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use clap::{CommandFactory, Parser, ValueEnum};
|
||||
|
||||
use crate::builder::{Builder, Kind};
|
||||
use crate::config::{target_selection_list, Config, TargetSelectionList};
|
||||
use crate::setup::Profile;
|
||||
use crate::{Build, DocTests};
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, ValueEnum)]
|
||||
pub enum Color {
|
||||
Always,
|
||||
Never,
|
||||
#[default]
|
||||
Auto,
|
||||
}
|
||||
|
||||
/// Whether to deny warnings, emit them as warnings, or use the default behavior
|
||||
#[derive(Copy, Clone, Default, Debug, ValueEnum)]
|
||||
pub enum Warnings {
|
||||
Deny,
|
||||
Warn,
|
||||
#[default]
|
||||
Default,
|
||||
}
|
||||
|
||||
/// Deserialized version of all flags for this compile.
|
||||
#[derive(Debug, Parser)]
|
||||
#[clap(
|
||||
override_usage = "x.py <subcommand> [options] [<paths>...]",
|
||||
disable_help_subcommand(true),
|
||||
about = "",
|
||||
next_line_help(false)
|
||||
)]
|
||||
pub struct Flags {
|
||||
#[command(subcommand)]
|
||||
pub cmd: Subcommand,
|
||||
|
||||
#[arg(global(true), short, long, action = clap::ArgAction::Count)]
|
||||
/// use verbose output (-vv for very verbose)
|
||||
pub verbose: u8, // each extra -v after the first is passed to Cargo
|
||||
#[arg(global(true), short, long)]
|
||||
/// use incremental compilation
|
||||
pub incremental: bool,
|
||||
#[arg(global(true), long, value_hint = clap::ValueHint::FilePath, value_name = "FILE")]
|
||||
/// TOML configuration file for build
|
||||
pub config: Option<PathBuf>,
|
||||
#[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")]
|
||||
/// Build directory, overrides `build.build-dir` in `config.toml`
|
||||
pub build_dir: Option<PathBuf>,
|
||||
|
||||
#[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "BUILD")]
|
||||
/// build target of the stage0 compiler
|
||||
pub build: Option<String>,
|
||||
|
||||
#[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "HOST", value_parser = target_selection_list)]
|
||||
/// host targets to build
|
||||
pub host: Option<TargetSelectionList>,
|
||||
|
||||
#[arg(global(true), long, value_hint = clap::ValueHint::Other, value_name = "TARGET", value_parser = target_selection_list)]
|
||||
/// target targets to build
|
||||
pub target: Option<TargetSelectionList>,
|
||||
|
||||
#[arg(global(true), long, value_name = "PATH")]
|
||||
/// build paths to exclude
|
||||
pub exclude: Vec<PathBuf>, // keeping for client backward compatibility
|
||||
#[arg(global(true), long, value_name = "PATH")]
|
||||
/// build paths to skip
|
||||
pub skip: Vec<PathBuf>,
|
||||
#[arg(global(true), long)]
|
||||
/// include default paths in addition to the provided ones
|
||||
pub include_default_paths: bool,
|
||||
|
||||
#[arg(global(true), value_hint = clap::ValueHint::Other, long)]
|
||||
pub rustc_error_format: Option<String>,
|
||||
|
||||
#[arg(global(true), long, value_hint = clap::ValueHint::CommandString, value_name = "CMD")]
|
||||
/// command to run on failure
|
||||
pub on_fail: Option<String>,
|
||||
#[arg(global(true), long)]
|
||||
/// dry run; don't build anything
|
||||
pub dry_run: bool,
|
||||
#[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")]
|
||||
/// stage to build (indicates compiler to use/test, e.g., stage 0 uses the
|
||||
/// bootstrap compiler, stage 1 the stage 0 rustc artifacts, etc.)
|
||||
pub stage: Option<u32>,
|
||||
|
||||
#[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")]
|
||||
/// stage(s) to keep without recompiling
|
||||
/// (pass multiple times to keep e.g., both stages 0 and 1)
|
||||
pub keep_stage: Vec<u32>,
|
||||
#[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "N")]
|
||||
/// stage(s) of the standard library to keep without recompiling
|
||||
/// (pass multiple times to keep e.g., both stages 0 and 1)
|
||||
pub keep_stage_std: Vec<u32>,
|
||||
#[arg(global(true), long, value_hint = clap::ValueHint::DirPath, value_name = "DIR")]
|
||||
/// path to the root of the rust checkout
|
||||
pub src: Option<PathBuf>,
|
||||
|
||||
#[arg(
|
||||
global(true),
|
||||
short,
|
||||
long,
|
||||
value_hint = clap::ValueHint::Other,
|
||||
default_value_t = std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get),
|
||||
value_name = "JOBS"
|
||||
)]
|
||||
/// number of jobs to run in parallel
|
||||
pub jobs: usize,
|
||||
// This overrides the deny-warnings configuration option,
|
||||
// which passes -Dwarnings to the compiler invocations.
|
||||
#[arg(global(true), long)]
|
||||
#[clap(value_enum, default_value_t=Warnings::Default, value_name = "deny|warn")]
|
||||
/// if value is deny, will deny warnings
|
||||
/// if value is warn, will emit warnings
|
||||
/// otherwise, use the default configured behaviour
|
||||
pub warnings: Warnings,
|
||||
|
||||
#[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "FORMAT")]
|
||||
/// rustc error format
|
||||
pub error_format: Option<String>,
|
||||
#[arg(global(true), long)]
|
||||
/// use message-format=json
|
||||
pub json_output: bool,
|
||||
|
||||
#[arg(global(true), long, value_name = "STYLE")]
|
||||
#[clap(value_enum, default_value_t = Color::Auto)]
|
||||
/// whether to use color in cargo and rustc output
|
||||
pub color: Color,
|
||||
|
||||
/// whether rebuilding llvm should be skipped, overriding `skip-rebuld` in config.toml
|
||||
#[arg(global(true), long, value_name = "VALUE")]
|
||||
pub llvm_skip_rebuild: Option<bool>,
|
||||
/// generate PGO profile with rustc build
|
||||
#[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")]
|
||||
pub rust_profile_generate: Option<String>,
|
||||
/// use PGO profile for rustc build
|
||||
#[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")]
|
||||
pub rust_profile_use: Option<String>,
|
||||
/// use PGO profile for LLVM build
|
||||
#[arg(global(true), value_hint = clap::ValueHint::FilePath, long, value_name = "PROFILE")]
|
||||
pub llvm_profile_use: Option<String>,
|
||||
// LLVM doesn't support a custom location for generating profile
|
||||
// information.
|
||||
//
|
||||
// llvm_out/build/profiles/ is the location this writes to.
|
||||
/// generate PGO profile with llvm built for rustc
|
||||
#[arg(global(true), long)]
|
||||
pub llvm_profile_generate: bool,
|
||||
/// Enable BOLT link flags
|
||||
#[arg(global(true), long)]
|
||||
pub enable_bolt_settings: bool,
|
||||
/// Additional reproducible artifacts that should be added to the reproducible artifacts archive.
|
||||
#[arg(global(true), long)]
|
||||
pub reproducible_artifact: Vec<String>,
|
||||
#[arg(global(true))]
|
||||
/// paths for the subcommand
|
||||
pub paths: Vec<PathBuf>,
|
||||
/// override options in config.toml
|
||||
#[arg(global(true), value_hint = clap::ValueHint::Other, long, value_name = "section.option=value")]
|
||||
pub set: Vec<String>,
|
||||
/// arguments passed to subcommands
|
||||
#[arg(global(true), last(true), value_name = "ARGS")]
|
||||
pub free_args: Vec<String>,
|
||||
}
|
||||
|
||||
impl Flags {
|
||||
pub fn parse(args: &[String]) -> Self {
|
||||
let first = String::from("x.py");
|
||||
let it = std::iter::once(&first).chain(args.iter());
|
||||
// We need to check for `<cmd> -h -v`, in which case we list the paths
|
||||
#[derive(Parser)]
|
||||
#[clap(disable_help_flag(true))]
|
||||
struct HelpVerboseOnly {
|
||||
#[arg(short, long)]
|
||||
help: bool,
|
||||
#[arg(global(true), short, long, action = clap::ArgAction::Count)]
|
||||
pub verbose: u8,
|
||||
#[arg(value_enum)]
|
||||
cmd: Kind,
|
||||
}
|
||||
if let Ok(HelpVerboseOnly { help: true, verbose: 1.., cmd: subcommand }) =
|
||||
HelpVerboseOnly::try_parse_from(it.clone())
|
||||
{
|
||||
println!("note: updating submodules before printing available paths");
|
||||
let config = Config::parse(&[String::from("build")]);
|
||||
let build = Build::new(config);
|
||||
let paths = Builder::get_help(&build, subcommand);
|
||||
if let Some(s) = paths {
|
||||
println!("{s}");
|
||||
} else {
|
||||
panic!("No paths available for subcommand `{}`", subcommand.as_str());
|
||||
}
|
||||
crate::exit!(0);
|
||||
}
|
||||
|
||||
Flags::parse_from(it)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, clap::Subcommand)]
|
||||
pub enum Subcommand {
|
||||
#[clap(aliases = ["b"], long_about = "\n
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to directories to the crates
|
||||
and/or artifacts to compile. For example, for a quick build of a usable
|
||||
compiler:
|
||||
./x.py build --stage 1 library/std
|
||||
This will build a compiler and standard library from the local source code.
|
||||
Once this is done, build/$ARCH/stage1 contains a usable compiler.
|
||||
If no arguments are passed then the default artifacts for that stage are
|
||||
compiled. For example:
|
||||
./x.py build --stage 0
|
||||
./x.py build ")]
|
||||
/// Compile either the compiler or libraries
|
||||
#[default]
|
||||
Build,
|
||||
#[clap(aliases = ["c"], long_about = "\n
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to directories to the crates
|
||||
and/or artifacts to compile. For example:
|
||||
./x.py check library/std
|
||||
If no arguments are passed then many artifacts are checked.")]
|
||||
/// Compile either the compiler or libraries, using cargo check
|
||||
Check {
|
||||
#[arg(long)]
|
||||
/// Check all targets
|
||||
all_targets: bool,
|
||||
},
|
||||
/// Run Clippy (uses rustup/cargo-installed clippy binary)
|
||||
#[clap(long_about = "\n
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to directories to the crates
|
||||
and/or artifacts to run clippy against. For example:
|
||||
./x.py clippy library/core
|
||||
./x.py clippy library/core library/proc_macro")]
|
||||
Clippy {
|
||||
#[arg(long)]
|
||||
fix: bool,
|
||||
/// clippy lints to allow
|
||||
#[arg(global(true), short = 'A', action = clap::ArgAction::Append, value_name = "LINT")]
|
||||
allow: Vec<String>,
|
||||
/// clippy lints to deny
|
||||
#[arg(global(true), short = 'D', action = clap::ArgAction::Append, value_name = "LINT")]
|
||||
deny: Vec<String>,
|
||||
/// clippy lints to warn on
|
||||
#[arg(global(true), short = 'W', action = clap::ArgAction::Append, value_name = "LINT")]
|
||||
warn: Vec<String>,
|
||||
/// clippy lints to forbid
|
||||
#[arg(global(true), short = 'F', action = clap::ArgAction::Append, value_name = "LINT")]
|
||||
forbid: Vec<String>,
|
||||
},
|
||||
/// Run cargo fix
|
||||
#[clap(long_about = "\n
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to directories to the crates
|
||||
and/or artifacts to run `cargo fix` against. For example:
|
||||
./x.py fix library/core
|
||||
./x.py fix library/core library/proc_macro")]
|
||||
Fix,
|
||||
#[clap(
|
||||
name = "fmt",
|
||||
long_about = "\n
|
||||
Arguments:
|
||||
This subcommand optionally accepts a `--check` flag which succeeds if formatting is correct and
|
||||
fails if it is not. For example:
|
||||
./x.py fmt
|
||||
./x.py fmt --check"
|
||||
)]
|
||||
/// Run rustfmt
|
||||
Format {
|
||||
/// check formatting instead of applying
|
||||
#[arg(long)]
|
||||
check: bool,
|
||||
},
|
||||
#[clap(aliases = ["d"], long_about = "\n
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to directories of documentation
|
||||
to build. For example:
|
||||
./x.py doc src/doc/book
|
||||
./x.py doc src/doc/nomicon
|
||||
./x.py doc src/doc/book library/std
|
||||
./x.py doc library/std --json
|
||||
./x.py doc library/std --open
|
||||
If no arguments are passed then everything is documented:
|
||||
./x.py doc
|
||||
./x.py doc --stage 1")]
|
||||
/// Build documentation
|
||||
Doc {
|
||||
#[arg(long)]
|
||||
/// open the docs in a browser
|
||||
open: bool,
|
||||
#[arg(long)]
|
||||
/// render the documentation in JSON format in addition to the usual HTML format
|
||||
json: bool,
|
||||
},
|
||||
#[clap(aliases = ["t"], long_about = "\n
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to test directories that
|
||||
should be compiled and run. For example:
|
||||
./x.py test tests/ui
|
||||
./x.py test library/std --test-args hash_map
|
||||
./x.py test library/std --stage 0 --no-doc
|
||||
./x.py test tests/ui --bless
|
||||
./x.py test tests/ui --compare-mode next-solver
|
||||
Note that `test tests/* --stage N` does NOT depend on `build compiler/rustc --stage N`;
|
||||
just like `build library/std --stage N` it tests the compiler produced by the previous
|
||||
stage.
|
||||
Execute tool tests with a tool name argument:
|
||||
./x.py test tidy
|
||||
If no arguments are passed then the complete artifacts for that stage are
|
||||
compiled and tested.
|
||||
./x.py test
|
||||
./x.py test --stage 1")]
|
||||
/// Build and run some test suites
|
||||
Test {
|
||||
#[arg(long)]
|
||||
/// run all tests regardless of failure
|
||||
no_fail_fast: bool,
|
||||
#[arg(long, value_name = "SUBSTRING")]
|
||||
/// skips tests matching SUBSTRING, if supported by test tool. May be passed multiple times
|
||||
skip: Vec<PathBuf>,
|
||||
#[arg(long, value_name = "ARGS", allow_hyphen_values(true))]
|
||||
/// extra arguments to be passed for the test tool being used
|
||||
/// (e.g. libtest, compiletest or rustdoc)
|
||||
test_args: Vec<String>,
|
||||
/// extra options to pass the compiler when running tests
|
||||
#[arg(long, value_name = "ARGS", allow_hyphen_values(true))]
|
||||
rustc_args: Vec<String>,
|
||||
#[arg(long)]
|
||||
/// do not run doc tests
|
||||
no_doc: bool,
|
||||
#[arg(long)]
|
||||
/// only run doc tests
|
||||
doc: bool,
|
||||
#[arg(long)]
|
||||
/// whether to automatically update stderr/stdout files
|
||||
bless: bool,
|
||||
#[arg(long)]
|
||||
/// comma-separated list of other files types to check (accepts py, py:lint,
|
||||
/// py:fmt, shell)
|
||||
extra_checks: Option<String>,
|
||||
#[arg(long)]
|
||||
/// rerun tests even if the inputs are unchanged
|
||||
force_rerun: bool,
|
||||
#[arg(long)]
|
||||
/// only run tests that result has been changed
|
||||
only_modified: bool,
|
||||
#[arg(long, value_name = "COMPARE MODE")]
|
||||
/// mode describing what file the actual ui output will be compared to
|
||||
compare_mode: Option<String>,
|
||||
#[arg(long, value_name = "check | build | run")]
|
||||
/// force {check,build,run}-pass tests to this mode.
|
||||
pass: Option<String>,
|
||||
#[arg(long, value_name = "auto | always | never")]
|
||||
/// whether to execute run-* tests
|
||||
run: Option<String>,
|
||||
#[arg(long)]
|
||||
/// enable this to generate a Rustfix coverage file, which is saved in
|
||||
/// `/<build_base>/rustfix_missing_coverage.txt`
|
||||
rustfix_coverage: bool,
|
||||
},
|
||||
/// Build and run some benchmarks
|
||||
Bench {
|
||||
#[arg(long, allow_hyphen_values(true))]
|
||||
test_args: Vec<String>,
|
||||
},
|
||||
/// Clean out build directories
|
||||
Clean {
|
||||
#[arg(long)]
|
||||
/// Clean the entire build directory (not used by default)
|
||||
all: bool,
|
||||
#[arg(long, value_name = "N")]
|
||||
/// Clean a specific stage without touching other artifacts. By default, every stage is cleaned if this option is not used.
|
||||
stage: Option<u32>,
|
||||
},
|
||||
/// Build distribution artifacts
|
||||
Dist,
|
||||
/// Install distribution artifacts
|
||||
Install,
|
||||
#[clap(aliases = ["r"], long_about = "\n
|
||||
Arguments:
|
||||
This subcommand accepts a number of paths to tools to build and run. For
|
||||
example:
|
||||
./x.py run src/tools/expand-yaml-anchors
|
||||
At least a tool needs to be called.")]
|
||||
/// Run tools contained in this repository
|
||||
Run {
|
||||
/// arguments for the tool
|
||||
#[arg(long, allow_hyphen_values(true))]
|
||||
args: Vec<String>,
|
||||
},
|
||||
/// Set up the environment for development
|
||||
#[clap(long_about = format!(
|
||||
"\n
|
||||
x.py setup creates a `config.toml` which changes the defaults for x.py itself,
|
||||
as well as setting up a git pre-push hook, VS Code config and toolchain link.
|
||||
Arguments:
|
||||
This subcommand accepts a 'profile' to use for builds. For example:
|
||||
./x.py setup library
|
||||
The profile is optional and you will be prompted interactively if it is not given.
|
||||
The following profiles are available:
|
||||
{}
|
||||
To only set up the git hook, VS Code config or toolchain link, you may use
|
||||
./x.py setup hook
|
||||
./x.py setup vscode
|
||||
./x.py setup link", Profile::all_for_help(" ").trim_end()))]
|
||||
Setup {
|
||||
/// Either the profile for `config.toml` or another setup action.
|
||||
/// May be omitted to set up interactively
|
||||
#[arg(value_name = "<PROFILE>|hook|vscode|link")]
|
||||
profile: Option<PathBuf>,
|
||||
},
|
||||
/// Suggest a subset of tests to run, based on modified files
|
||||
#[clap(long_about = "\n")]
|
||||
Suggest {
|
||||
/// run suggested tests
|
||||
#[arg(long)]
|
||||
run: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Subcommand {
|
||||
pub fn kind(&self) -> Kind {
|
||||
match self {
|
||||
Subcommand::Bench { .. } => Kind::Bench,
|
||||
Subcommand::Build { .. } => Kind::Build,
|
||||
Subcommand::Check { .. } => Kind::Check,
|
||||
Subcommand::Clippy { .. } => Kind::Clippy,
|
||||
Subcommand::Doc { .. } => Kind::Doc,
|
||||
Subcommand::Fix { .. } => Kind::Fix,
|
||||
Subcommand::Format { .. } => Kind::Format,
|
||||
Subcommand::Test { .. } => Kind::Test,
|
||||
Subcommand::Clean { .. } => Kind::Clean,
|
||||
Subcommand::Dist { .. } => Kind::Dist,
|
||||
Subcommand::Install { .. } => Kind::Install,
|
||||
Subcommand::Run { .. } => Kind::Run,
|
||||
Subcommand::Setup { .. } => Kind::Setup,
|
||||
Subcommand::Suggest { .. } => Kind::Suggest,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rustc_args(&self) -> Vec<&str> {
|
||||
match *self {
|
||||
Subcommand::Test { ref rustc_args, .. } => {
|
||||
rustc_args.iter().flat_map(|s| s.split_whitespace()).collect()
|
||||
}
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fail_fast(&self) -> bool {
|
||||
match *self {
|
||||
Subcommand::Test { no_fail_fast, .. } => !no_fail_fast,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn doc_tests(&self) -> DocTests {
|
||||
match *self {
|
||||
Subcommand::Test { doc, no_doc, .. } => {
|
||||
if doc {
|
||||
DocTests::Only
|
||||
} else if no_doc {
|
||||
DocTests::No
|
||||
} else {
|
||||
DocTests::Yes
|
||||
}
|
||||
}
|
||||
_ => DocTests::Yes,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bless(&self) -> bool {
|
||||
match *self {
|
||||
Subcommand::Test { bless, .. } => bless,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extra_checks(&self) -> Option<&str> {
|
||||
match *self {
|
||||
Subcommand::Test { ref extra_checks, .. } => extra_checks.as_ref().map(String::as_str),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn only_modified(&self) -> bool {
|
||||
match *self {
|
||||
Subcommand::Test { only_modified, .. } => only_modified,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn force_rerun(&self) -> bool {
|
||||
match *self {
|
||||
Subcommand::Test { force_rerun, .. } => force_rerun,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rustfix_coverage(&self) -> bool {
|
||||
match *self {
|
||||
Subcommand::Test { rustfix_coverage, .. } => rustfix_coverage,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compare_mode(&self) -> Option<&str> {
|
||||
match *self {
|
||||
Subcommand::Test { ref compare_mode, .. } => compare_mode.as_ref().map(|s| &s[..]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pass(&self) -> Option<&str> {
|
||||
match *self {
|
||||
Subcommand::Test { ref pass, .. } => pass.as_ref().map(|s| &s[..]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(&self) -> Option<&str> {
|
||||
match *self {
|
||||
Subcommand::Test { ref run, .. } => run.as_ref().map(|s| &s[..]),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open(&self) -> bool {
|
||||
match *self {
|
||||
Subcommand::Doc { open, .. } => open,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn json(&self) -> bool {
|
||||
match *self {
|
||||
Subcommand::Doc { json, .. } => json,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the shell completion for a given shell, if the result differs from the current
|
||||
/// content of `path`. If `path` does not exist, always returns `Some`.
|
||||
pub fn get_completion<G: clap_complete::Generator>(shell: G, path: &Path) -> Option<String> {
|
||||
let mut cmd = Flags::command();
|
||||
let current = if !path.exists() {
|
||||
String::new()
|
||||
} else {
|
||||
std::fs::read_to_string(path).unwrap_or_else(|_| {
|
||||
eprintln!("couldn't read {}", path.display());
|
||||
crate::exit!(1)
|
||||
})
|
||||
};
|
||||
let mut buf = Vec::new();
|
||||
clap_complete::generate(shell, &mut cmd, "x.py", &mut buf);
|
||||
if buf == current.as_bytes() {
|
||||
return None;
|
||||
}
|
||||
Some(String::from_utf8(buf).expect("completion script should be UTF-8"))
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
//! Job management on Windows for bootstrapping
|
||||
//!
|
||||
//! Most of the time when you're running a build system (e.g., make) you expect
|
||||
//! Ctrl-C or abnormal termination to actually terminate the entire tree of
|
||||
//! process in play, not just the one at the top. This currently works "by
|
||||
//! default" on Unix platforms because Ctrl-C actually sends a signal to the
|
||||
//! *process group* rather than the parent process, so everything will get torn
|
||||
//! down. On Windows, however, this does not happen and Ctrl-C just kills the
|
||||
//! parent process.
|
||||
//!
|
||||
//! To achieve the same semantics on Windows we use Job Objects to ensure that
|
||||
//! all processes die at the same time. Job objects have a mode of operation
|
||||
//! where when all handles to the object are closed it causes all child
|
||||
//! processes associated with the object to be terminated immediately.
|
||||
//! Conveniently whenever a process in the job object spawns a new process the
|
||||
//! child will be associated with the job object as well. This means if we add
|
||||
//! ourselves to the job object we create then everything will get torn down!
|
||||
//!
|
||||
//! Unfortunately most of the time the build system is actually called from a
|
||||
//! python wrapper (which manages things like building the build system) so this
|
||||
//! all doesn't quite cut it so far. To go the last mile we duplicate the job
|
||||
//! object handle into our parent process (a python process probably) and then
|
||||
//! close our own handle. This means that the only handle to the job object
|
||||
//! resides in the parent python process, so when python dies the whole build
|
||||
//! system dies (as one would probably expect!).
|
||||
//!
|
||||
//! Note that this module has a #[cfg(windows)] above it as none of this logic
|
||||
//! is required on Unix.
|
||||
|
||||
use crate::Build;
|
||||
use std::env;
|
||||
use std::ffi::c_void;
|
||||
use std::io;
|
||||
use std::mem;
|
||||
|
||||
use windows::{
|
||||
core::PCWSTR,
|
||||
Win32::Foundation::{CloseHandle, DuplicateHandle, DUPLICATE_SAME_ACCESS, HANDLE},
|
||||
Win32::System::Diagnostics::Debug::{SetErrorMode, SEM_NOGPFAULTERRORBOX, THREAD_ERROR_MODE},
|
||||
Win32::System::JobObjects::{
|
||||
AssignProcessToJobObject, CreateJobObjectW, JobObjectExtendedLimitInformation,
|
||||
SetInformationJobObject, JOBOBJECT_EXTENDED_LIMIT_INFORMATION,
|
||||
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE, JOB_OBJECT_LIMIT_PRIORITY_CLASS,
|
||||
},
|
||||
Win32::System::Threading::{
|
||||
GetCurrentProcess, OpenProcess, BELOW_NORMAL_PRIORITY_CLASS, PROCESS_DUP_HANDLE,
|
||||
},
|
||||
};
|
||||
|
||||
pub unsafe fn setup(build: &mut Build) {
|
||||
// Enable the Windows Error Reporting dialog which msys disables,
|
||||
// so we can JIT debug rustc
|
||||
let mode = SetErrorMode(THREAD_ERROR_MODE::default());
|
||||
let mode = THREAD_ERROR_MODE(mode);
|
||||
SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX);
|
||||
|
||||
// Create a new job object for us to use
|
||||
let job = CreateJobObjectW(None, PCWSTR::null()).unwrap();
|
||||
|
||||
// Indicate that when all handles to the job object are gone that all
|
||||
// process in the object should be killed. Note that this includes our
|
||||
// entire process tree by default because we've added ourselves and our
|
||||
// children will reside in the job by default.
|
||||
let mut info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION::default();
|
||||
info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
|
||||
if build.config.low_priority {
|
||||
info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS;
|
||||
info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS.0;
|
||||
}
|
||||
let r = SetInformationJobObject(
|
||||
job,
|
||||
JobObjectExtendedLimitInformation,
|
||||
&info as *const _ as *const c_void,
|
||||
mem::size_of_val(&info) as u32,
|
||||
)
|
||||
.ok();
|
||||
assert!(r.is_ok(), "{}", io::Error::last_os_error());
|
||||
|
||||
// Assign our process to this job object. Note that if this fails, one very
|
||||
// likely reason is that we are ourselves already in a job object! This can
|
||||
// happen on the build bots that we've got for Windows, or if just anyone
|
||||
// else is instrumenting the build. In this case we just bail out
|
||||
// immediately and assume that they take care of it.
|
||||
//
|
||||
// Also note that nested jobs (why this might fail) are supported in recent
|
||||
// versions of Windows, but the version of Windows that our bots are running
|
||||
// at least don't support nested job objects.
|
||||
let r = AssignProcessToJobObject(job, GetCurrentProcess()).ok();
|
||||
if r.is_err() {
|
||||
CloseHandle(job);
|
||||
return;
|
||||
}
|
||||
|
||||
// If we've got a parent process (e.g., the python script that called us)
|
||||
// then move ownership of this job object up to them. That way if the python
|
||||
// script is killed (e.g., via ctrl-c) then we'll all be torn down.
|
||||
//
|
||||
// If we don't have a parent (e.g., this was run directly) then we
|
||||
// intentionally leak the job object handle. When our process exits
|
||||
// (normally or abnormally) it will close the handle implicitly, causing all
|
||||
// processes in the job to be cleaned up.
|
||||
let pid = match env::var("BOOTSTRAP_PARENT_ID") {
|
||||
Ok(s) => s,
|
||||
Err(..) => return,
|
||||
};
|
||||
|
||||
let parent = match OpenProcess(PROCESS_DUP_HANDLE, false, pid.parse().unwrap()).ok() {
|
||||
Some(parent) => parent,
|
||||
_ => {
|
||||
// If we get a null parent pointer here, it is possible that either
|
||||
// we have an invalid pid or the parent process has been closed.
|
||||
// Since the first case rarely happens
|
||||
// (only when wrongly setting the environmental variable),
|
||||
// it might be better to improve the experience of the second case
|
||||
// when users have interrupted the parent process and we haven't finish
|
||||
// duplicating the handle yet. We just need close the job object if that occurs.
|
||||
CloseHandle(job);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut parent_handle = HANDLE::default();
|
||||
let r = DuplicateHandle(
|
||||
GetCurrentProcess(),
|
||||
job,
|
||||
parent,
|
||||
&mut parent_handle,
|
||||
0,
|
||||
false,
|
||||
DUPLICATE_SAME_ACCESS,
|
||||
)
|
||||
.ok();
|
||||
|
||||
// If this failed, well at least we tried! An example of DuplicateHandle
|
||||
// failing in the past has been when the wrong python2 package spawned this
|
||||
// build system (e.g., the `python2` package in MSYS instead of
|
||||
// `mingw-w64-x86_64-python2`). Not sure why it failed, but the "failure
|
||||
// mode" here is that we only clean everything up when the build system
|
||||
// dies, not when the python parent does, so not too bad.
|
||||
if r.is_err() {
|
||||
CloseHandle(job);
|
||||
}
|
||||
}
|
@ -152,6 +152,9 @@ pub struct Flags {
|
||||
/// generate PGO profile with llvm built for rustc
|
||||
#[arg(global(true), long)]
|
||||
pub llvm_profile_generate: bool,
|
||||
/// Enable BOLT link flags
|
||||
#[arg(global(true), long)]
|
||||
pub enable_bolt_settings: bool,
|
||||
/// Additional reproducible artifacts that should be added to the reproducible artifacts archive.
|
||||
#[arg(global(true), long)]
|
||||
pub reproducible_artifact: Vec<String>,
|
||||
|
@ -28,70 +28,28 @@ use std::str;
|
||||
|
||||
use build_helper::ci::{gha, CiEnv};
|
||||
use build_helper::exit;
|
||||
use channel::GitInfo;
|
||||
use config::{DryRun, Target};
|
||||
use filetime::FileTime;
|
||||
use once_cell::sync::OnceCell;
|
||||
use termcolor::{ColorChoice, StandardStream, WriteColor};
|
||||
use utils::channel::GitInfo;
|
||||
|
||||
use crate::builder::Kind;
|
||||
use crate::config::{LlvmLibunwind, TargetSelection};
|
||||
use crate::util::{
|
||||
dir_is_empty, exe, libdir, mtime, output, run, run_suppressed, symlink_dir, try_run_suppressed,
|
||||
use crate::core::builder;
|
||||
use crate::core::builder::Kind;
|
||||
use crate::core::config::flags;
|
||||
use crate::core::config::{DryRun, Target};
|
||||
use crate::core::config::{LlvmLibunwind, TargetSelection};
|
||||
use crate::utils::cache::{Interned, INTERNER};
|
||||
use crate::utils::helpers::{
|
||||
self, dir_is_empty, exe, libdir, mtime, output, run, run_suppressed, symlink_dir,
|
||||
try_run_suppressed,
|
||||
};
|
||||
|
||||
mod builder;
|
||||
mod cache;
|
||||
mod cc_detect;
|
||||
mod channel;
|
||||
mod check;
|
||||
mod clean;
|
||||
mod compile;
|
||||
mod config;
|
||||
mod dist;
|
||||
mod doc;
|
||||
mod download;
|
||||
mod flags;
|
||||
mod format;
|
||||
mod install;
|
||||
mod llvm;
|
||||
mod metadata;
|
||||
mod render_tests;
|
||||
mod run;
|
||||
mod sanity;
|
||||
mod setup;
|
||||
mod suggest;
|
||||
mod synthetic_targets;
|
||||
mod tarball;
|
||||
mod test;
|
||||
mod tool;
|
||||
mod toolstate;
|
||||
pub mod util;
|
||||
mod core;
|
||||
mod utils;
|
||||
|
||||
#[cfg(feature = "build-metrics")]
|
||||
mod metrics;
|
||||
|
||||
#[cfg(windows)]
|
||||
mod job;
|
||||
|
||||
#[cfg(all(unix, not(target_os = "haiku")))]
|
||||
mod job {
|
||||
pub unsafe fn setup(build: &mut crate::Build) {
|
||||
if build.config.low_priority {
|
||||
libc::setpriority(libc::PRIO_PGRP as _, 0, 10);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))]
|
||||
mod job {
|
||||
pub unsafe fn setup(_build: &mut crate::Build) {}
|
||||
}
|
||||
|
||||
pub use crate::builder::PathSet;
|
||||
use crate::cache::{Interned, INTERNER};
|
||||
pub use crate::config::Config;
|
||||
pub use crate::flags::Subcommand;
|
||||
use termcolor::{ColorChoice, StandardStream, WriteColor};
|
||||
pub use crate::core::builder::PathSet;
|
||||
pub use crate::core::config::flags::Subcommand;
|
||||
pub use crate::core::config::Config;
|
||||
|
||||
const LLVM_TOOLS: &[&str] = &[
|
||||
"llvm-cov", // used to generate coverage report
|
||||
@ -210,12 +168,12 @@ pub struct Build {
|
||||
src: PathBuf,
|
||||
out: PathBuf,
|
||||
bootstrap_out: PathBuf,
|
||||
cargo_info: channel::GitInfo,
|
||||
rust_analyzer_info: channel::GitInfo,
|
||||
clippy_info: channel::GitInfo,
|
||||
miri_info: channel::GitInfo,
|
||||
rustfmt_info: channel::GitInfo,
|
||||
in_tree_llvm_info: channel::GitInfo,
|
||||
cargo_info: GitInfo,
|
||||
rust_analyzer_info: GitInfo,
|
||||
clippy_info: GitInfo,
|
||||
miri_info: GitInfo,
|
||||
rustfmt_info: GitInfo,
|
||||
in_tree_llvm_info: GitInfo,
|
||||
local_rebuild: bool,
|
||||
fail_fast: bool,
|
||||
doc_tests: DocTests,
|
||||
@ -248,7 +206,7 @@ pub struct Build {
|
||||
prerelease_version: Cell<Option<u32>>,
|
||||
|
||||
#[cfg(feature = "build-metrics")]
|
||||
metrics: metrics::BuildMetrics,
|
||||
metrics: crate::utils::metrics::BuildMetrics,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -372,16 +330,15 @@ impl Build {
|
||||
let is_sudo = false;
|
||||
|
||||
let omit_git_hash = config.omit_git_hash;
|
||||
let rust_info = channel::GitInfo::new(omit_git_hash, &src);
|
||||
let cargo_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/cargo"));
|
||||
let rust_analyzer_info =
|
||||
channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rust-analyzer"));
|
||||
let clippy_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/clippy"));
|
||||
let miri_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/miri"));
|
||||
let rustfmt_info = channel::GitInfo::new(omit_git_hash, &src.join("src/tools/rustfmt"));
|
||||
let rust_info = GitInfo::new(omit_git_hash, &src);
|
||||
let cargo_info = GitInfo::new(omit_git_hash, &src.join("src/tools/cargo"));
|
||||
let rust_analyzer_info = GitInfo::new(omit_git_hash, &src.join("src/tools/rust-analyzer"));
|
||||
let clippy_info = GitInfo::new(omit_git_hash, &src.join("src/tools/clippy"));
|
||||
let miri_info = GitInfo::new(omit_git_hash, &src.join("src/tools/miri"));
|
||||
let rustfmt_info = GitInfo::new(omit_git_hash, &src.join("src/tools/rustfmt"));
|
||||
|
||||
// we always try to use git for LLVM builds
|
||||
let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
|
||||
let in_tree_llvm_info = GitInfo::new(false, &src.join("src/llvm-project"));
|
||||
|
||||
let initial_target_libdir_str = if config.dry_run() {
|
||||
"/dummy/lib/path/to/lib/".to_string()
|
||||
@ -474,7 +431,7 @@ impl Build {
|
||||
prerelease_version: Cell::new(None),
|
||||
|
||||
#[cfg(feature = "build-metrics")]
|
||||
metrics: metrics::BuildMetrics::init(),
|
||||
metrics: crate::utils::metrics::BuildMetrics::init(),
|
||||
};
|
||||
|
||||
// If local-rust is the same major.minor as the current version, then force a
|
||||
@ -493,7 +450,7 @@ impl Build {
|
||||
}
|
||||
|
||||
build.verbose("finding compilers");
|
||||
cc_detect::find(&build);
|
||||
utils::cc_detect::find(&build);
|
||||
// When running `setup`, the profile is about to change, so any requirements we have now may
|
||||
// be different on the next invocation. Don't check for them until the next time x.py is
|
||||
// run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
|
||||
@ -501,7 +458,7 @@ impl Build {
|
||||
// Similarly, for `setup` we don't actually need submodules or cargo metadata.
|
||||
if !matches!(build.config.cmd, Subcommand::Setup { .. }) {
|
||||
build.verbose("running sanity check");
|
||||
sanity::check(&mut build);
|
||||
crate::core::sanity::check(&mut build);
|
||||
|
||||
// Make sure we update these before gathering metadata so we don't get an error about missing
|
||||
// Cargo.toml files.
|
||||
@ -513,7 +470,7 @@ impl Build {
|
||||
build.update_existing_submodules();
|
||||
|
||||
build.verbose("learning about cargo");
|
||||
metadata::build(&mut build);
|
||||
crate::core::metadata::build(&mut build);
|
||||
}
|
||||
|
||||
// Make a symbolic link so we can use a consistent directory in the documentation.
|
||||
@ -549,7 +506,7 @@ impl Build {
|
||||
|
||||
// NOTE: The check for the empty directory is here because when running x.py the first time,
|
||||
// the submodule won't be checked out. Check it out now so we can build it.
|
||||
if !channel::GitInfo::new(false, &absolute_path).is_managed_git_subrepository()
|
||||
if !GitInfo::new(false, &absolute_path).is_managed_git_subrepository()
|
||||
&& !dir_is_empty(&absolute_path)
|
||||
{
|
||||
return;
|
||||
@ -663,7 +620,7 @@ impl Build {
|
||||
// Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
|
||||
let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
|
||||
// Don't update the submodule unless it's already been cloned.
|
||||
if channel::GitInfo::new(false, submodule).is_managed_git_subrepository() {
|
||||
if GitInfo::new(false, submodule).is_managed_git_subrepository() {
|
||||
self.update_submodule(submodule);
|
||||
}
|
||||
}
|
||||
@ -672,7 +629,7 @@ impl Build {
|
||||
/// Executes the entire build, as configured by the flags and configuration.
|
||||
pub fn build(&mut self) {
|
||||
unsafe {
|
||||
job::setup(self);
|
||||
crate::utils::job::setup(self);
|
||||
}
|
||||
|
||||
// Download rustfmt early so that it can be used in rust-analyzer configs.
|
||||
@ -681,10 +638,14 @@ impl Build {
|
||||
// hardcoded subcommands
|
||||
match &self.config.cmd {
|
||||
Subcommand::Format { check } => {
|
||||
return format::format(&builder::Builder::new(&self), *check, &self.config.paths);
|
||||
return core::build_steps::format::format(
|
||||
&builder::Builder::new(&self),
|
||||
*check,
|
||||
&self.config.paths,
|
||||
);
|
||||
}
|
||||
Subcommand::Suggest { run } => {
|
||||
return suggest::suggest(&builder::Builder::new(&self), *run);
|
||||
return core::build_steps::suggest::suggest(&builder::Builder::new(&self), *run);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
@ -1065,7 +1026,7 @@ impl Build {
|
||||
|
||||
/// Return a `Group` guard for a [`Step`] that is built for each `--stage`.
|
||||
///
|
||||
/// [`Step`]: crate::builder::Step
|
||||
/// [`Step`]: crate::core::builder::Step
|
||||
#[must_use = "Groups should not be dropped until the Step finishes running"]
|
||||
#[track_caller]
|
||||
fn msg(
|
||||
@ -1093,7 +1054,7 @@ impl Build {
|
||||
|
||||
/// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`.
|
||||
///
|
||||
/// [`Step`]: crate::builder::Step
|
||||
/// [`Step`]: crate::core::builder::Step
|
||||
#[must_use = "Groups should not be dropped until the Step finishes running"]
|
||||
#[track_caller]
|
||||
fn msg_unstaged(
|
||||
@ -1253,7 +1214,7 @@ impl Build {
|
||||
// that are only existed in CXX libraries
|
||||
Some(self.cxx.borrow()[&target].path().into())
|
||||
} else if target != self.config.build
|
||||
&& util::use_host_linker(target)
|
||||
&& helpers::use_host_linker(target)
|
||||
&& !target.contains("msvc")
|
||||
{
|
||||
Some(self.cc(target))
|
||||
@ -1278,7 +1239,7 @@ impl Build {
|
||||
options[0] = Some("-Clink-arg=-fuse-ld=lld".to_string());
|
||||
}
|
||||
|
||||
let no_threads = util::lld_flag_no_threads(target.contains("windows"));
|
||||
let no_threads = helpers::lld_flag_no_threads(target.contains("windows"));
|
||||
options[1] = Some(format!("-Clink-arg=-Wl,{no_threads}"));
|
||||
}
|
||||
|
||||
@ -1418,7 +1379,7 @@ impl Build {
|
||||
fn extract_beta_rev_from_file<P: AsRef<Path>>(version_file: P) -> Option<String> {
|
||||
let version = fs::read_to_string(version_file).ok()?;
|
||||
|
||||
extract_beta_rev(&version)
|
||||
helpers::extract_beta_rev(&version)
|
||||
}
|
||||
|
||||
if let Some(s) = self.prerelease_version.get() {
|
||||
@ -1732,7 +1693,7 @@ impl Build {
|
||||
/// Returns if config.ninja is enabled, and checks for ninja existence,
|
||||
/// exiting with a nicer error message if not.
|
||||
fn ninja(&self) -> bool {
|
||||
let mut cmd_finder = crate::sanity::Finder::new();
|
||||
let mut cmd_finder = crate::core::sanity::Finder::new();
|
||||
|
||||
if self.config.ninja_in_file {
|
||||
// Some Linux distros rename `ninja` to `ninja-build`.
|
||||
@ -1798,17 +1759,6 @@ to download LLVM rather than building it.
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract the beta revision from the full version string.
|
||||
///
|
||||
/// The full version string looks like "a.b.c-beta.y". And we need to extract
|
||||
/// the "y" part from the string.
|
||||
pub fn extract_beta_rev(version: &str) -> Option<String> {
|
||||
let parts = version.splitn(2, "-beta.").collect::<Vec<_>>();
|
||||
let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string()));
|
||||
|
||||
count
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn chmod(path: &Path, perms: u32) {
|
||||
use std::os::unix::fs::*;
|
28
src/bootstrap/src/utils/bin_helpers.rs
Normal file
28
src/bootstrap/src/utils/bin_helpers.rs
Normal file
@ -0,0 +1,28 @@
|
||||
//! This file is meant to be included directly from bootstrap shims to avoid a
|
||||
//! dependency on the bootstrap library. This reduces the binary size and
|
||||
//! improves compilation time by reducing the linking time.
|
||||
|
||||
/// Parses the value of the "RUSTC_VERBOSE" environment variable and returns it as a `usize`.
|
||||
/// If it was not defined, returns 0 by default.
|
||||
///
|
||||
/// Panics if "RUSTC_VERBOSE" is defined with the value that is not an unsigned integer.
|
||||
pub(crate) fn parse_rustc_verbose() -> usize {
|
||||
use std::str::FromStr;
|
||||
|
||||
match std::env::var("RUSTC_VERBOSE") {
|
||||
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
|
||||
Err(_) => 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses the value of the "RUSTC_STAGE" environment variable and returns it as a `String`.
|
||||
///
|
||||
/// If "RUSTC_STAGE" was not set, the program will be terminated with 101.
|
||||
pub(crate) fn parse_rustc_stage() -> String {
|
||||
std::env::var("RUSTC_STAGE").unwrap_or_else(|_| {
|
||||
// Don't panic here; it's reasonable to try and run these shims directly. Give a helpful error instead.
|
||||
eprintln!("rustc shim: fatal: RUSTC_STAGE was not set");
|
||||
eprintln!("rustc shim: note: use `x.py build -vvv` to see all environment variables set by bootstrap");
|
||||
std::process::exit(101);
|
||||
})
|
||||
}
|
@ -14,7 +14,7 @@ use std::sync::Mutex;
|
||||
// FIXME: replace with std::lazy after it gets stabilized and reaches beta
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use crate::builder::Step;
|
||||
use crate::core::builder::Step;
|
||||
|
||||
pub struct Interned<T>(usize, PhantomData<*const T>);
|
||||
|
@ -26,8 +26,8 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::{env, iter};
|
||||
|
||||
use crate::config::{Target, TargetSelection};
|
||||
use crate::util::output;
|
||||
use crate::core::config::{Target, TargetSelection};
|
||||
use crate::utils::helpers::output;
|
||||
use crate::{Build, CLang, GitRepo};
|
||||
|
||||
// The `cc` crate doesn't provide a way to obtain a path to the detected archiver,
|
@ -9,8 +9,7 @@ use std::fs;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
use crate::util::output;
|
||||
use crate::util::t;
|
||||
use crate::utils::helpers::{output, t};
|
||||
use crate::Build;
|
||||
|
||||
#[derive(Clone, Default)]
|
@ -1,7 +1,4 @@
|
||||
// Various utilities for working with dylib paths.
|
||||
//
|
||||
// This file is meant to be included directly to avoid a dependency on the bootstrap library from
|
||||
// the rustc and rustdoc wrappers. This improves compilation time by reducing the linking time.
|
||||
//! Various utilities for working with dylib paths.
|
||||
|
||||
/// Returns the environment variable which the dynamic library lookup path
|
||||
/// resides in for this platform.
|
||||
@ -21,10 +18,10 @@ pub fn dylib_path_var() -> &'static str {
|
||||
|
||||
/// Parses the `dylib_path_var()` environment variable, returning a list of
|
||||
/// paths that are members of this lookup path.
|
||||
pub fn dylib_path() -> Vec<PathBuf> {
|
||||
let var = match env::var_os(dylib_path_var()) {
|
||||
pub fn dylib_path() -> Vec<std::path::PathBuf> {
|
||||
let var = match std::env::var_os(dylib_path_var()) {
|
||||
Some(v) => v,
|
||||
None => return vec![],
|
||||
};
|
||||
env::split_paths(&var).collect()
|
||||
std::env::split_paths(&var).collect()
|
||||
}
|
@ -12,10 +12,12 @@ use std::process::{Command, Stdio};
|
||||
use std::str;
|
||||
use std::time::{Instant, SystemTime, UNIX_EPOCH};
|
||||
|
||||
use crate::builder::Builder;
|
||||
use crate::config::{Config, TargetSelection};
|
||||
use crate::core::builder::Builder;
|
||||
use crate::core::config::{Config, TargetSelection};
|
||||
use crate::OnceCell;
|
||||
|
||||
pub use crate::utils::dylib_util::{dylib_path, dylib_path_var};
|
||||
|
||||
/// A helper macro to `unwrap` a result except also print out details like:
|
||||
///
|
||||
/// * The file/line of the panic
|
||||
@ -81,8 +83,6 @@ pub fn add_dylib_path(path: Vec<PathBuf>, cmd: &mut Command) {
|
||||
cmd.env(dylib_path_var(), t!(env::join_paths(list)));
|
||||
}
|
||||
|
||||
include!("dylib_util.rs");
|
||||
|
||||
/// Adds a list of lookup paths to `cmd`'s link library lookup path.
|
||||
pub fn add_link_lib_path(path: Vec<PathBuf>, cmd: &mut Command) {
|
||||
let mut list = link_lib_path();
|
||||
@ -293,23 +293,6 @@ pub fn output(cmd: &mut Command) -> String {
|
||||
String::from_utf8(output.stdout).unwrap()
|
||||
}
|
||||
|
||||
pub fn output_result(cmd: &mut Command) -> Result<String, String> {
|
||||
let output = match cmd.stderr(Stdio::inherit()).output() {
|
||||
Ok(status) => status,
|
||||
Err(e) => return Err(format!("failed to run command: {cmd:?}: {e}")),
|
||||
};
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"command did not execute successfully: {:?}\n\
|
||||
expected success, got: {}\n{}",
|
||||
cmd,
|
||||
output.status,
|
||||
String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))?
|
||||
));
|
||||
}
|
||||
Ok(String::from_utf8(output.stdout).map_err(|err| format!("{err:?}"))?)
|
||||
}
|
||||
|
||||
/// Returns the last-modified time for `path`, or zero if it doesn't exist.
|
||||
pub fn mtime(path: &Path) -> SystemTime {
|
||||
fs::metadata(path).and_then(|f| f.modified()).unwrap_or(UNIX_EPOCH)
|
||||
@ -495,3 +478,14 @@ pub fn lld_flag_no_threads(is_windows: bool) -> &'static str {
|
||||
pub fn dir_is_empty(dir: &Path) -> bool {
|
||||
t!(std::fs::read_dir(dir)).next().is_none()
|
||||
}
|
||||
|
||||
/// Extract the beta revision from the full version string.
|
||||
///
|
||||
/// The full version string looks like "a.b.c-beta.y". And we need to extract
|
||||
/// the "y" part from the string.
|
||||
pub fn extract_beta_rev(version: &str) -> Option<String> {
|
||||
let parts = version.splitn(2, "-beta.").collect::<Vec<_>>();
|
||||
let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string()));
|
||||
|
||||
count
|
||||
}
|
161
src/bootstrap/src/utils/job.rs
Normal file
161
src/bootstrap/src/utils/job.rs
Normal file
@ -0,0 +1,161 @@
|
||||
#[cfg(windows)]
|
||||
pub use for_windows::*;
|
||||
|
||||
#[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))]
|
||||
pub unsafe fn setup(_build: &mut crate::Build) {}
|
||||
|
||||
#[cfg(all(unix, not(target_os = "haiku")))]
|
||||
pub unsafe fn setup(build: &mut crate::Build) {
|
||||
if build.config.low_priority {
|
||||
libc::setpriority(libc::PRIO_PGRP as _, 0, 10);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
mod for_windows {
|
||||
//! Job management on Windows for bootstrapping
|
||||
//!
|
||||
//! Most of the time when you're running a build system (e.g., make) you expect
|
||||
//! Ctrl-C or abnormal termination to actually terminate the entire tree of
|
||||
//! process in play, not just the one at the top. This currently works "by
|
||||
//! default" on Unix platforms because Ctrl-C actually sends a signal to the
|
||||
//! *process group* rather than the parent process, so everything will get torn
|
||||
//! down. On Windows, however, this does not happen and Ctrl-C just kills the
|
||||
//! parent process.
|
||||
//!
|
||||
//! To achieve the same semantics on Windows we use Job Objects to ensure that
|
||||
//! all processes die at the same time. Job objects have a mode of operation
|
||||
//! where when all handles to the object are closed it causes all child
|
||||
//! processes associated with the object to be terminated immediately.
|
||||
//! Conveniently whenever a process in the job object spawns a new process the
|
||||
//! child will be associated with the job object as well. This means if we add
|
||||
//! ourselves to the job object we create then everything will get torn down!
|
||||
//!
|
||||
//! Unfortunately most of the time the build system is actually called from a
|
||||
//! python wrapper (which manages things like building the build system) so this
|
||||
//! all doesn't quite cut it so far. To go the last mile we duplicate the job
|
||||
//! object handle into our parent process (a python process probably) and then
|
||||
//! close our own handle. This means that the only handle to the job object
|
||||
//! resides in the parent python process, so when python dies the whole build
|
||||
//! system dies (as one would probably expect!).
|
||||
//!
|
||||
//! Note that this module has a #[cfg(windows)] above it as none of this logic
|
||||
//! is required on Unix.
|
||||
|
||||
use crate::Build;
|
||||
use std::env;
|
||||
use std::ffi::c_void;
|
||||
use std::io;
|
||||
use std::mem;
|
||||
|
||||
use windows::{
|
||||
core::PCWSTR,
|
||||
Win32::Foundation::{CloseHandle, DuplicateHandle, DUPLICATE_SAME_ACCESS, HANDLE},
|
||||
Win32::System::Diagnostics::Debug::{
|
||||
SetErrorMode, SEM_NOGPFAULTERRORBOX, THREAD_ERROR_MODE,
|
||||
},
|
||||
Win32::System::JobObjects::{
|
||||
AssignProcessToJobObject, CreateJobObjectW, JobObjectExtendedLimitInformation,
|
||||
SetInformationJobObject, JOBOBJECT_EXTENDED_LIMIT_INFORMATION,
|
||||
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE, JOB_OBJECT_LIMIT_PRIORITY_CLASS,
|
||||
},
|
||||
Win32::System::Threading::{
|
||||
GetCurrentProcess, OpenProcess, BELOW_NORMAL_PRIORITY_CLASS, PROCESS_DUP_HANDLE,
|
||||
},
|
||||
};
|
||||
|
||||
pub unsafe fn setup(build: &mut Build) {
|
||||
// Enable the Windows Error Reporting dialog which msys disables,
|
||||
// so we can JIT debug rustc
|
||||
let mode = SetErrorMode(THREAD_ERROR_MODE::default());
|
||||
let mode = THREAD_ERROR_MODE(mode);
|
||||
SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX);
|
||||
|
||||
// Create a new job object for us to use
|
||||
let job = CreateJobObjectW(None, PCWSTR::null()).unwrap();
|
||||
|
||||
// Indicate that when all handles to the job object are gone that all
|
||||
// process in the object should be killed. Note that this includes our
|
||||
// entire process tree by default because we've added ourselves and our
|
||||
// children will reside in the job by default.
|
||||
let mut info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION::default();
|
||||
info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
|
||||
if build.config.low_priority {
|
||||
info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_PRIORITY_CLASS;
|
||||
info.BasicLimitInformation.PriorityClass = BELOW_NORMAL_PRIORITY_CLASS.0;
|
||||
}
|
||||
let r = SetInformationJobObject(
|
||||
job,
|
||||
JobObjectExtendedLimitInformation,
|
||||
&info as *const _ as *const c_void,
|
||||
mem::size_of_val(&info) as u32,
|
||||
)
|
||||
.ok();
|
||||
assert!(r.is_ok(), "{}", io::Error::last_os_error());
|
||||
|
||||
// Assign our process to this job object. Note that if this fails, one very
|
||||
// likely reason is that we are ourselves already in a job object! This can
|
||||
// happen on the build bots that we've got for Windows, or if just anyone
|
||||
// else is instrumenting the build. In this case we just bail out
|
||||
// immediately and assume that they take care of it.
|
||||
//
|
||||
// Also note that nested jobs (why this might fail) are supported in recent
|
||||
// versions of Windows, but the version of Windows that our bots are running
|
||||
// at least don't support nested job objects.
|
||||
let r = AssignProcessToJobObject(job, GetCurrentProcess()).ok();
|
||||
if r.is_err() {
|
||||
CloseHandle(job);
|
||||
return;
|
||||
}
|
||||
|
||||
// If we've got a parent process (e.g., the python script that called us)
|
||||
// then move ownership of this job object up to them. That way if the python
|
||||
// script is killed (e.g., via ctrl-c) then we'll all be torn down.
|
||||
//
|
||||
// If we don't have a parent (e.g., this was run directly) then we
|
||||
// intentionally leak the job object handle. When our process exits
|
||||
// (normally or abnormally) it will close the handle implicitly, causing all
|
||||
// processes in the job to be cleaned up.
|
||||
let pid = match env::var("BOOTSTRAP_PARENT_ID") {
|
||||
Ok(s) => s,
|
||||
Err(..) => return,
|
||||
};
|
||||
|
||||
let parent = match OpenProcess(PROCESS_DUP_HANDLE, false, pid.parse().unwrap()).ok() {
|
||||
Some(parent) => parent,
|
||||
_ => {
|
||||
// If we get a null parent pointer here, it is possible that either
|
||||
// we have an invalid pid or the parent process has been closed.
|
||||
// Since the first case rarely happens
|
||||
// (only when wrongly setting the environmental variable),
|
||||
// it might be better to improve the experience of the second case
|
||||
// when users have interrupted the parent process and we haven't finish
|
||||
// duplicating the handle yet. We just need close the job object if that occurs.
|
||||
CloseHandle(job);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut parent_handle = HANDLE::default();
|
||||
let r = DuplicateHandle(
|
||||
GetCurrentProcess(),
|
||||
job,
|
||||
parent,
|
||||
&mut parent_handle,
|
||||
0,
|
||||
false,
|
||||
DUPLICATE_SAME_ACCESS,
|
||||
)
|
||||
.ok();
|
||||
|
||||
// If this failed, well at least we tried! An example of DuplicateHandle
|
||||
// failing in the past has been when the wrong python2 package spawned this
|
||||
// build system (e.g., the `python2` package in MSYS instead of
|
||||
// `mingw-w64-x86_64-python2`). Not sure why it failed, but the "failure
|
||||
// mode" here is that we only clean everything up when the build system
|
||||
// dies, not when the python parent does, so not too bad.
|
||||
if r.is_err() {
|
||||
CloseHandle(job);
|
||||
}
|
||||
}
|
||||
}
|
@ -4,8 +4,8 @@
|
||||
//! As this module requires additional dependencies not present during local builds, it's cfg'd
|
||||
//! away whenever the `build.metrics` config option is not set to `true`.
|
||||
|
||||
use crate::builder::{Builder, Step};
|
||||
use crate::util::t;
|
||||
use crate::core::builder::{Builder, Step};
|
||||
use crate::utils::helpers::t;
|
||||
use crate::Build;
|
||||
use build_helper::metrics::{
|
||||
JsonInvocation, JsonInvocationSystemStats, JsonNode, JsonRoot, JsonStepSystemStats, Test,
|
14
src/bootstrap/src/utils/mod.rs
Normal file
14
src/bootstrap/src/utils/mod.rs
Normal file
@ -0,0 +1,14 @@
|
||||
//! This module contains integral components of the build and configuration process, providing
|
||||
//! support for a wide range of tasks and operations such as caching, tarballs, release
|
||||
//! channels, job management, etc.
|
||||
|
||||
pub(crate) mod cache;
|
||||
pub(crate) mod cc_detect;
|
||||
pub(crate) mod channel;
|
||||
pub(crate) mod dylib_util;
|
||||
pub(crate) mod helpers;
|
||||
pub(crate) mod job;
|
||||
#[cfg(feature = "build-metrics")]
|
||||
pub(crate) mod metrics;
|
||||
pub(crate) mod render_tests;
|
||||
pub(crate) mod tarball;
|
@ -6,7 +6,7 @@
|
||||
//! and rustc) libtest doesn't include the rendered human-readable output as a JSON field. We had
|
||||
//! to reimplement all the rendering logic in this module because of that.
|
||||
|
||||
use crate::builder::Builder;
|
||||
use crate::core::builder::Builder;
|
||||
use std::io::{BufRead, BufReader, Read, Write};
|
||||
use std::process::{ChildStdout, Command, Stdio};
|
||||
use std::time::Duration;
|
@ -3,9 +3,10 @@ use std::{
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use crate::builder::Builder;
|
||||
use crate::channel;
|
||||
use crate::util::t;
|
||||
use crate::core::build_steps::dist::distdir;
|
||||
use crate::core::builder::Builder;
|
||||
use crate::utils::channel;
|
||||
use crate::utils::helpers::t;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub(crate) enum OverlayKind {
|
||||
@ -112,7 +113,7 @@ impl<'a> Tarball<'a> {
|
||||
}
|
||||
|
||||
fn new_inner(builder: &'a Builder<'a>, component: &str, target: Option<String>) -> Self {
|
||||
let pkgname = crate::dist::pkgname(builder, component);
|
||||
let pkgname = crate::core::build_steps::dist::pkgname(builder, component);
|
||||
|
||||
let mut temp_dir = builder.out.join("tmp").join("tarball").join(component);
|
||||
if let Some(target) = &target {
|
||||
@ -265,7 +266,7 @@ impl<'a> Tarball<'a> {
|
||||
t!(std::fs::rename(&self.image_dir, &dest));
|
||||
|
||||
self.run(|this, cmd| {
|
||||
let distdir = crate::dist::distdir(this.builder);
|
||||
let distdir = distdir(this.builder);
|
||||
t!(std::fs::create_dir_all(&distdir));
|
||||
cmd.arg("tarball")
|
||||
.arg("--input")
|
||||
@ -292,7 +293,7 @@ impl<'a> Tarball<'a> {
|
||||
.arg("--non-installed-overlay")
|
||||
.arg(&self.overlay_dir)
|
||||
.arg("--output-dir")
|
||||
.arg(crate::dist::distdir(self.builder));
|
||||
.arg(distdir(self.builder));
|
||||
}
|
||||
|
||||
fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTarball {
|
||||
@ -306,11 +307,11 @@ impl<'a> Tarball<'a> {
|
||||
self.builder.install(&self.builder.src.join(file), &self.overlay_dir, 0o644);
|
||||
}
|
||||
|
||||
let mut cmd = self.builder.tool_cmd(crate::tool::Tool::RustInstaller);
|
||||
let mut cmd = self.builder.tool_cmd(crate::core::build_steps::tool::Tool::RustInstaller);
|
||||
|
||||
let package_name = self.package_name();
|
||||
self.builder.info(&format!("Dist {package_name}"));
|
||||
let _time = crate::util::timeit(self.builder);
|
||||
let _time = crate::utils::helpers::timeit(self.builder);
|
||||
|
||||
build_cli(&self, &mut cmd);
|
||||
cmd.arg("--work-dir").arg(&self.temp_dir);
|
||||
@ -344,7 +345,7 @@ impl<'a> Tarball<'a> {
|
||||
.unwrap_or("gz");
|
||||
|
||||
GeneratedTarball {
|
||||
path: crate::dist::distdir(self.builder).join(format!("{package_name}.tar.{ext}")),
|
||||
path: distdir(self.builder).join(format!("{package_name}.tar.{ext}")),
|
||||
decompressed_output,
|
||||
work: self.temp_dir,
|
||||
}
|
Loading…
Reference in New Issue
Block a user