mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Auto merge of #122283 - matthiaskrgr:rollup-w3sau3u, r=matthiaskrgr
Rollup of 14 pull requests Successful merges: - #112136 (Add std::ffi::c_str module) - #113525 (Dynamically size sigaltstk in std) - #121567 (Avoid some interning in bootstrap) - #121642 (Update a test to support Symbol Mangling V0) - #121685 (Fixing shellcheck comments on lvi test script) - #121860 (Add a tidy check that checks whether the fluent slugs only appear once) - #121942 (std::rand: enable getrandom for dragonflybsd too.) - #122125 (Revert back to Git-for-Windows for MinGW CI builds) - #122221 (match lowering: define a convenient struct) - #122244 (fix: LocalWaker memory leak and some stability attributes) - #122251 (Add test to check unused_lifetimes don't duplicate "parameter is never used" error) - #122264 (add myself to rotation) - #122269 (doc/rustc: Move loongarch64-unknown-linux-musl to Tier 3) - #122271 (Fix legacy numeric constant diag items) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
cdb775cab5
20
INSTALL.md
20
INSTALL.md
@ -145,6 +145,15 @@ toolchain.
|
||||
|
||||
1. Download the latest [MSYS2 installer][msys2] and go through the installer.
|
||||
|
||||
2. Download and install [Git for Windows](https://git-scm.com/download/win).
|
||||
Make sure that it's in your Windows PATH. To enable access to it from within
|
||||
MSYS2, edit the relevant `mingw[32|64].ini` file in your MSYS2 installation
|
||||
directory and uncomment the line `MSYS2_PATH_TYPE=inherit`.
|
||||
|
||||
You could install and use MSYS2's version of git instead with `pacman`,
|
||||
however this is not recommended as it's excrutiatingly slow, and not frequently
|
||||
tested for compatability.
|
||||
|
||||
2. Start a MINGW64 or MINGW32 shell (depending on whether you want 32-bit
|
||||
or 64-bit Rust) either from your start menu, or by running `mingw64.exe`
|
||||
or `mingw32.exe` from your MSYS2 installation directory (e.g. `C:\msys64`).
|
||||
@ -160,8 +169,7 @@ toolchain.
|
||||
# Note that it is important that you do **not** use the 'python2', 'cmake',
|
||||
# and 'ninja' packages from the 'msys2' subsystem.
|
||||
# The build has historically been known to fail with these packages.
|
||||
pacman -S git \
|
||||
make \
|
||||
pacman -S make \
|
||||
diffutils \
|
||||
tar \
|
||||
mingw-w64-x86_64-python \
|
||||
@ -176,11 +184,9 @@ toolchain.
|
||||
python x.py setup dist && python x.py build && python x.py install
|
||||
```
|
||||
|
||||
If you want to use the native versions of Git, Python, or CMake you can remove
|
||||
them from the above pacman command and install them from another source. Make
|
||||
sure that they're in your Windows PATH, and edit the relevant `mingw[32|64].ini`
|
||||
file in your MSYS2 installation directory by uncommenting the line
|
||||
`MSYS2_PATH_TYPE=inherit` to include them in your MSYS2 PATH.
|
||||
If you want to try the native Windows versions of Python or CMake, you can remove
|
||||
them from the above pacman command and install them from another source. Follow
|
||||
the instructions in step 2 to get them on PATH.
|
||||
|
||||
Using Windows native Python can be helpful if you get errors when building LLVM.
|
||||
You may also want to use Git for Windows, as it is often *much* faster. Turning
|
||||
|
@ -146,9 +146,6 @@ const_eval_intern_kind = {$kind ->
|
||||
*[other] {""}
|
||||
}
|
||||
|
||||
const_eval_invalid_align =
|
||||
align has to be a power of 2
|
||||
|
||||
const_eval_invalid_align_details =
|
||||
invalid align passed to `{$name}`: {$align} is {$err_kind ->
|
||||
[not_power_of_two] not a power of 2
|
||||
|
@ -181,14 +181,6 @@ infer_more_targeted = {$has_param_name ->
|
||||
|
||||
infer_msl_introduces_static = introduces a `'static` lifetime requirement
|
||||
infer_msl_unmet_req = because this has an unmet lifetime requirement
|
||||
infer_need_type_info_in_coroutine =
|
||||
type inside {$coroutine_kind ->
|
||||
[async_block] `async` block
|
||||
[async_closure] `async` closure
|
||||
[async_fn] `async fn` body
|
||||
*[coroutine] coroutine
|
||||
} must be known in this context
|
||||
|
||||
|
||||
infer_nothing = {""}
|
||||
|
||||
|
@ -562,8 +562,6 @@ lint_suspicious_double_ref_clone =
|
||||
lint_suspicious_double_ref_deref =
|
||||
using `.deref()` on a double reference, which returns `{$ty}` instead of dereferencing the inner type
|
||||
|
||||
lint_trivial_untranslatable_diag = diagnostic with static strings only
|
||||
|
||||
lint_ty_qualified = usage of qualified `ty::{$ty}`
|
||||
.suggestion = try importing it and using it unqualified
|
||||
|
||||
|
@ -506,13 +506,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
traverse_candidate(
|
||||
candidate,
|
||||
&mut Vec::new(),
|
||||
&mut |leaf_candidate, parent_bindings| {
|
||||
&mut |leaf_candidate, parent_data| {
|
||||
if let Some(arm) = arm {
|
||||
self.clear_top_scope(arm.scope);
|
||||
}
|
||||
let binding_end = self.bind_and_guard_matched_candidate(
|
||||
leaf_candidate,
|
||||
parent_bindings,
|
||||
parent_data,
|
||||
fake_borrow_temps,
|
||||
scrutinee_span,
|
||||
arm_match_scope,
|
||||
@ -524,12 +524,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}
|
||||
self.cfg.goto(binding_end, outer_source_info, target_block);
|
||||
},
|
||||
|inner_candidate, parent_bindings| {
|
||||
parent_bindings.push((inner_candidate.bindings, inner_candidate.ascriptions));
|
||||
|inner_candidate, parent_data| {
|
||||
parent_data.push(inner_candidate.extra_data);
|
||||
inner_candidate.subcandidates.into_iter()
|
||||
},
|
||||
|parent_bindings| {
|
||||
parent_bindings.pop();
|
||||
|parent_data| {
|
||||
parent_data.pop();
|
||||
},
|
||||
);
|
||||
|
||||
@ -651,7 +651,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
if set_match_place {
|
||||
let mut next = Some(&candidate);
|
||||
while let Some(candidate_ref) = next.take() {
|
||||
for binding in &candidate_ref.bindings {
|
||||
for binding in &candidate_ref.extra_data.bindings {
|
||||
let local = self.var_local_id(binding.var_id, OutsideGuard);
|
||||
// `try_to_place` may fail if it is unable to resolve the given
|
||||
// `PlaceBuilder` inside a closure. In this case, we don't want to include
|
||||
@ -924,22 +924,35 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// A pattern in a form suitable for generating code.
|
||||
/// Data extracted from a pattern that doesn't affect which branch is taken. Collected during
|
||||
/// pattern simplification and not mutated later.
|
||||
#[derive(Debug, Clone)]
|
||||
struct FlatPat<'pat, 'tcx> {
|
||||
struct PatternExtraData<'tcx> {
|
||||
/// [`Span`] of the original pattern.
|
||||
span: Span,
|
||||
|
||||
/// Bindings that must be established.
|
||||
bindings: Vec<Binding<'tcx>>,
|
||||
|
||||
/// Types that must be asserted.
|
||||
ascriptions: Vec<Ascription<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'tcx> PatternExtraData<'tcx> {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.bindings.is_empty() && self.ascriptions.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
/// A pattern in a form suitable for generating code.
|
||||
#[derive(Debug, Clone)]
|
||||
struct FlatPat<'pat, 'tcx> {
|
||||
/// To match the pattern, all of these must be satisfied...
|
||||
// Invariant: all the `MatchPair`s are recursively simplified.
|
||||
// Invariant: or-patterns must be sorted to the end.
|
||||
match_pairs: Vec<MatchPair<'pat, 'tcx>>,
|
||||
|
||||
/// ...these bindings established...
|
||||
bindings: Vec<Binding<'tcx>>,
|
||||
|
||||
/// ...and these types asserted.
|
||||
ascriptions: Vec<Ascription<'tcx>>,
|
||||
extra_data: PatternExtraData<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx, 'pat> FlatPat<'pat, 'tcx> {
|
||||
@ -948,43 +961,38 @@ impl<'tcx, 'pat> FlatPat<'pat, 'tcx> {
|
||||
pattern: &'pat Pat<'tcx>,
|
||||
cx: &mut Builder<'_, 'tcx>,
|
||||
) -> Self {
|
||||
let mut match_pairs = vec![MatchPair::new(place, pattern, cx)];
|
||||
let mut bindings = Vec::new();
|
||||
let mut ascriptions = Vec::new();
|
||||
|
||||
cx.simplify_match_pairs(&mut match_pairs, &mut bindings, &mut ascriptions);
|
||||
|
||||
FlatPat { span: pattern.span, match_pairs, bindings, ascriptions }
|
||||
let mut flat_pat = FlatPat {
|
||||
match_pairs: vec![MatchPair::new(place, pattern, cx)],
|
||||
extra_data: PatternExtraData {
|
||||
span: pattern.span,
|
||||
bindings: Vec::new(),
|
||||
ascriptions: Vec::new(),
|
||||
},
|
||||
};
|
||||
cx.simplify_match_pairs(&mut flat_pat.match_pairs, &mut flat_pat.extra_data);
|
||||
flat_pat
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Candidate<'pat, 'tcx> {
|
||||
/// [`Span`] of the original pattern that gave rise to this candidate.
|
||||
span: Span,
|
||||
|
||||
/// Whether this `Candidate` has a guard.
|
||||
has_guard: bool,
|
||||
|
||||
/// All of these must be satisfied...
|
||||
/// For the candidate to match, all of these must be satisfied...
|
||||
// Invariant: all the `MatchPair`s are recursively simplified.
|
||||
// Invariant: or-patterns must be sorted at the end.
|
||||
match_pairs: Vec<MatchPair<'pat, 'tcx>>,
|
||||
|
||||
/// ...these bindings established...
|
||||
// Invariant: not mutated after candidate creation.
|
||||
bindings: Vec<Binding<'tcx>>,
|
||||
|
||||
/// ...and these types asserted...
|
||||
// Invariant: not mutated after candidate creation.
|
||||
ascriptions: Vec<Ascription<'tcx>>,
|
||||
|
||||
/// ...and if this is non-empty, one of these subcandidates also has to match...
|
||||
subcandidates: Vec<Candidate<'pat, 'tcx>>,
|
||||
|
||||
/// ...and the guard must be evaluated; if it's `false` then branch to `otherwise_block`.
|
||||
/// ...and the guard must be evaluated if there is one.
|
||||
has_guard: bool,
|
||||
|
||||
/// If the guard is `false` then branch to `otherwise_block`.
|
||||
otherwise_block: Option<BasicBlock>,
|
||||
|
||||
/// If the candidate matches, bindings and ascriptions must be established.
|
||||
extra_data: PatternExtraData<'tcx>,
|
||||
|
||||
/// The block before the `bindings` have been established.
|
||||
pre_binding_block: Option<BasicBlock>,
|
||||
/// The pre-binding block of the next candidate.
|
||||
@ -1003,10 +1011,8 @@ impl<'tcx, 'pat> Candidate<'pat, 'tcx> {
|
||||
|
||||
fn from_flat_pat(flat_pat: FlatPat<'pat, 'tcx>, has_guard: bool) -> Self {
|
||||
Candidate {
|
||||
span: flat_pat.span,
|
||||
match_pairs: flat_pat.match_pairs,
|
||||
bindings: flat_pat.bindings,
|
||||
ascriptions: flat_pat.ascriptions,
|
||||
extra_data: flat_pat.extra_data,
|
||||
has_guard,
|
||||
subcandidates: Vec::new(),
|
||||
otherwise_block: None,
|
||||
@ -1518,9 +1524,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
self.merge_trivial_subcandidates(subcandidate, source_info);
|
||||
|
||||
// FIXME(or_patterns; matthewjasper) Try to be more aggressive here.
|
||||
can_merge &= subcandidate.subcandidates.is_empty()
|
||||
&& subcandidate.bindings.is_empty()
|
||||
&& subcandidate.ascriptions.is_empty();
|
||||
can_merge &=
|
||||
subcandidate.subcandidates.is_empty() && subcandidate.extra_data.is_empty();
|
||||
}
|
||||
|
||||
if can_merge {
|
||||
@ -1943,7 +1948,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
fn bind_and_guard_matched_candidate<'pat>(
|
||||
&mut self,
|
||||
candidate: Candidate<'pat, 'tcx>,
|
||||
parent_bindings: &[(Vec<Binding<'tcx>>, Vec<Ascription<'tcx>>)],
|
||||
parent_data: &[PatternExtraData<'tcx>],
|
||||
fake_borrows: &[(Place<'tcx>, Local)],
|
||||
scrutinee_span: Span,
|
||||
arm_match_scope: Option<(&Arm<'tcx>, region::Scope)>,
|
||||
@ -1954,7 +1959,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
debug_assert!(candidate.match_pairs.is_empty());
|
||||
|
||||
let candidate_source_info = self.source_info(candidate.span);
|
||||
let candidate_source_info = self.source_info(candidate.extra_data.span);
|
||||
|
||||
let mut block = candidate.pre_binding_block.unwrap();
|
||||
|
||||
@ -1971,11 +1976,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
self.ascribe_types(
|
||||
block,
|
||||
parent_bindings
|
||||
parent_data
|
||||
.iter()
|
||||
.flat_map(|(_, ascriptions)| ascriptions)
|
||||
.flat_map(|d| &d.ascriptions)
|
||||
.cloned()
|
||||
.chain(candidate.ascriptions),
|
||||
.chain(candidate.extra_data.ascriptions),
|
||||
);
|
||||
|
||||
// rust-lang/rust#27282: The `autoref` business deserves some
|
||||
@ -2063,10 +2068,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
&& let Some(guard) = arm.guard
|
||||
{
|
||||
let tcx = self.tcx;
|
||||
let bindings = parent_bindings
|
||||
.iter()
|
||||
.flat_map(|(bindings, _)| bindings)
|
||||
.chain(&candidate.bindings);
|
||||
let bindings =
|
||||
parent_data.iter().flat_map(|d| &d.bindings).chain(&candidate.extra_data.bindings);
|
||||
|
||||
self.bind_matched_candidate_for_guard(block, schedule_drops, bindings.clone());
|
||||
let guard_frame = GuardFrame {
|
||||
@ -2144,10 +2147,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// ```
|
||||
//
|
||||
// and that is clearly not correct.
|
||||
let by_value_bindings = parent_bindings
|
||||
let by_value_bindings = parent_data
|
||||
.iter()
|
||||
.flat_map(|(bindings, _)| bindings)
|
||||
.chain(&candidate.bindings)
|
||||
.flat_map(|d| &d.bindings)
|
||||
.chain(&candidate.extra_data.bindings)
|
||||
.filter(|binding| matches!(binding.binding_mode, BindingMode::ByValue));
|
||||
// Read all of the by reference bindings to ensure that the
|
||||
// place they refer to can't be modified by the guard.
|
||||
@ -2172,10 +2175,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
self.bind_matched_candidate_for_arm_body(
|
||||
block,
|
||||
schedule_drops,
|
||||
parent_bindings
|
||||
.iter()
|
||||
.flat_map(|(bindings, _)| bindings)
|
||||
.chain(&candidate.bindings),
|
||||
parent_data.iter().flat_map(|d| &d.bindings).chain(&candidate.extra_data.bindings),
|
||||
storages_alive,
|
||||
);
|
||||
block
|
||||
|
@ -12,20 +12,19 @@
|
||||
//! sort of test: for example, testing which variant an enum is, or
|
||||
//! testing a value against a constant.
|
||||
|
||||
use crate::build::matches::{Ascription, Binding, Candidate, FlatPat, MatchPair, TestCase};
|
||||
use crate::build::matches::{Candidate, FlatPat, MatchPair, PatternExtraData, TestCase};
|
||||
use crate::build::Builder;
|
||||
|
||||
use std::mem;
|
||||
|
||||
impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
/// Simplify a list of match pairs so they all require a test. Stores relevant bindings and
|
||||
/// ascriptions in the provided `Vec`s.
|
||||
/// ascriptions in `extra_data`.
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
pub(super) fn simplify_match_pairs<'pat>(
|
||||
&mut self,
|
||||
match_pairs: &mut Vec<MatchPair<'pat, 'tcx>>,
|
||||
candidate_bindings: &mut Vec<Binding<'tcx>>,
|
||||
candidate_ascriptions: &mut Vec<Ascription<'tcx>>,
|
||||
extra_data: &mut PatternExtraData<'tcx>,
|
||||
) {
|
||||
// In order to please the borrow checker, in a pattern like `x @ pat` we must lower the
|
||||
// bindings in `pat` before `x`. E.g. (#69971):
|
||||
@ -45,17 +44,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// after any bindings in `pat`. This doesn't work for or-patterns: the current structure of
|
||||
// match lowering forces us to lower bindings inside or-patterns last.
|
||||
for mut match_pair in mem::take(match_pairs) {
|
||||
self.simplify_match_pairs(
|
||||
&mut match_pair.subpairs,
|
||||
candidate_bindings,
|
||||
candidate_ascriptions,
|
||||
);
|
||||
self.simplify_match_pairs(&mut match_pair.subpairs, extra_data);
|
||||
if let TestCase::Irrefutable { binding, ascription } = match_pair.test_case {
|
||||
if let Some(binding) = binding {
|
||||
candidate_bindings.push(binding);
|
||||
extra_data.bindings.push(binding);
|
||||
}
|
||||
if let Some(ascription) = ascription {
|
||||
candidate_ascriptions.push(ascription);
|
||||
extra_data.ascriptions.push(ascription);
|
||||
}
|
||||
// Simplifiable pattern; we replace it with its already simplified subpairs.
|
||||
match_pairs.append(&mut match_pair.subpairs);
|
||||
|
@ -280,7 +280,7 @@ impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_candidate(&mut self, candidate: &Candidate<'_, 'tcx>) {
|
||||
for binding in &candidate.bindings {
|
||||
for binding in &candidate.extra_data.bindings {
|
||||
self.visit_binding(binding);
|
||||
}
|
||||
for match_pair in &candidate.match_pairs {
|
||||
@ -289,7 +289,7 @@ impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
|
||||
}
|
||||
|
||||
fn visit_flat_pat(&mut self, flat_pat: &FlatPat<'_, 'tcx>) {
|
||||
for binding in &flat_pat.bindings {
|
||||
for binding in &flat_pat.extra_data.bindings {
|
||||
self.visit_binding(binding);
|
||||
}
|
||||
for match_pair in &flat_pat.match_pairs {
|
||||
|
@ -392,9 +392,6 @@ parse_invalid_identifier_with_leading_number = identifiers cannot start with a n
|
||||
|
||||
parse_invalid_interpolated_expression = invalid interpolated expression
|
||||
|
||||
parse_invalid_literal_suffix = suffixes on {$kind} literals are invalid
|
||||
.label = invalid suffix `{$suffix}`
|
||||
|
||||
parse_invalid_literal_suffix_on_tuple_index = suffixes on a tuple index are invalid
|
||||
.label = invalid suffix `{$suffix}`
|
||||
.tuple_exception_line_1 = `{$suffix}` is *temporarily* accepted on tuple index fields as it was incorrectly accepted on stable for a few releases
|
||||
@ -609,7 +606,6 @@ parse_nonterminal_expected_item_keyword = expected an item keyword
|
||||
parse_nonterminal_expected_lifetime = expected a lifetime, found `{$token}`
|
||||
|
||||
parse_nonterminal_expected_statement = expected a statement
|
||||
parse_not_supported = not supported
|
||||
|
||||
parse_note_edition_guide = for more on editions, read https://doc.rust-lang.org/edition-guide
|
||||
|
||||
|
@ -302,9 +302,6 @@ passes_export_name =
|
||||
attribute should be applied to a free function, impl method or static
|
||||
.label = not a free function, impl method or static
|
||||
|
||||
passes_expr_not_allowed_in_context =
|
||||
{$expr} is not allowed in a `{$context}`
|
||||
|
||||
passes_extern_main =
|
||||
the `main` function cannot be declared in an `extern` block
|
||||
|
||||
@ -405,8 +402,6 @@ passes_lang_item_on_incorrect_target =
|
||||
`{$name}` language item must be applied to a {$expected_target}
|
||||
.label = attribute should be applied to a {$expected_target}, not a {$actual_target}
|
||||
|
||||
passes_layout =
|
||||
layout error: {$layout_error}
|
||||
passes_layout_abi =
|
||||
abi: {$abi}
|
||||
passes_layout_align =
|
||||
|
@ -1000,6 +1000,11 @@ symbols! {
|
||||
is_val_statically_known,
|
||||
isa_attribute,
|
||||
isize,
|
||||
isize_legacy_const_max,
|
||||
isize_legacy_const_min,
|
||||
isize_legacy_fn_max_value,
|
||||
isize_legacy_fn_min_value,
|
||||
isize_legacy_mod,
|
||||
issue,
|
||||
issue_5723_bootstrap,
|
||||
issue_tracker_base_url,
|
||||
@ -1910,6 +1915,11 @@ symbols! {
|
||||
used_with_arg,
|
||||
using,
|
||||
usize,
|
||||
usize_legacy_const_max,
|
||||
usize_legacy_const_min,
|
||||
usize_legacy_fn_max_value,
|
||||
usize_legacy_fn_min_value,
|
||||
usize_legacy_mod,
|
||||
va_arg,
|
||||
va_copy,
|
||||
va_end,
|
||||
|
@ -1,3 +1,5 @@
|
||||
//! [`CString`] and its related types.
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
|
@ -80,9 +80,13 @@
|
||||
|
||||
#![stable(feature = "alloc_ffi", since = "1.64.0")]
|
||||
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "alloc_c_string", since = "1.64.0")]
|
||||
pub use self::c_str::FromVecWithNulError;
|
||||
#[stable(feature = "alloc_c_string", since = "1.64.0")]
|
||||
pub use self::c_str::{CString, IntoStringError, NulError};
|
||||
pub use self::c_str::{FromVecWithNulError, IntoStringError, NulError};
|
||||
|
||||
mod c_str;
|
||||
#[doc(inline)]
|
||||
#[stable(feature = "alloc_c_string", since = "1.64.0")]
|
||||
pub use self::c_str::CString;
|
||||
|
||||
#[unstable(feature = "c_str_module", issue = "112134")]
|
||||
pub mod c_str;
|
||||
|
@ -1,3 +1,5 @@
|
||||
//! [`CStr`] and its related types.
|
||||
|
||||
use crate::cmp::Ordering;
|
||||
use crate::error::Error;
|
||||
use crate::ffi::c_char;
|
||||
@ -9,15 +11,20 @@ use crate::slice;
|
||||
use crate::slice::memchr;
|
||||
use crate::str;
|
||||
|
||||
// FIXME: because this is doc(inline)d, we *have* to use intra-doc links because the actual link
|
||||
// depends on where the item is being documented. however, since this is libcore, we can't
|
||||
// actually reference libstd or liballoc in intra-doc links. so, the best we can do is remove the
|
||||
// links to `CString` and `String` for now until a solution is developed
|
||||
|
||||
/// Representation of a borrowed C string.
|
||||
///
|
||||
/// This type represents a borrowed reference to a nul-terminated
|
||||
/// array of bytes. It can be constructed safely from a <code>&[[u8]]</code>
|
||||
/// slice, or unsafely from a raw `*const c_char`. It can then be
|
||||
/// converted to a Rust <code>&[str]</code> by performing UTF-8 validation, or
|
||||
/// into an owned [`CString`].
|
||||
/// into an owned `CString`.
|
||||
///
|
||||
/// `&CStr` is to [`CString`] as <code>&[str]</code> is to [`String`]: the former
|
||||
/// `&CStr` is to `CString` as <code>&[str]</code> is to `String`: the former
|
||||
/// in each pair are borrowed references; the latter are owned
|
||||
/// strings.
|
||||
///
|
||||
@ -26,9 +33,6 @@ use crate::str;
|
||||
/// Instead, safe wrappers of FFI functions may leverage the unsafe [`CStr::from_ptr`] constructor
|
||||
/// to provide a safe interface to other consumers.
|
||||
///
|
||||
/// [`CString`]: ../../std/ffi/struct.CString.html
|
||||
/// [`String`]: ../../std/string/struct.String.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Inspecting a foreign C string:
|
||||
@ -125,10 +129,13 @@ enum FromBytesWithNulErrorKind {
|
||||
NotNulTerminated,
|
||||
}
|
||||
|
||||
// FIXME: const stability attributes should not be required here, I think
|
||||
impl FromBytesWithNulError {
|
||||
#[rustc_const_stable(feature = "const_cstr_methods", since = "1.72.0")]
|
||||
const fn interior_nul(pos: usize) -> FromBytesWithNulError {
|
||||
FromBytesWithNulError { kind: FromBytesWithNulErrorKind::InteriorNul(pos) }
|
||||
}
|
||||
#[rustc_const_stable(feature = "const_cstr_methods", since = "1.72.0")]
|
||||
const fn not_nul_terminated() -> FromBytesWithNulError {
|
||||
FromBytesWithNulError { kind: FromBytesWithNulErrorKind::NotNulTerminated }
|
||||
}
|
||||
|
@ -13,10 +13,20 @@ use crate::fmt;
|
||||
use crate::marker::PhantomData;
|
||||
use crate::ops::{Deref, DerefMut};
|
||||
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "core_c_str", since = "1.64.0")]
|
||||
pub use self::c_str::{CStr, FromBytesUntilNulError, FromBytesWithNulError};
|
||||
pub use self::c_str::FromBytesWithNulError;
|
||||
|
||||
mod c_str;
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "cstr_from_bytes_until_nul", since = "1.69.0")]
|
||||
pub use self::c_str::FromBytesUntilNulError;
|
||||
|
||||
#[doc(inline)]
|
||||
#[stable(feature = "core_c_str", since = "1.64.0")]
|
||||
pub use self::c_str::CStr;
|
||||
|
||||
#[unstable(feature = "c_str_module", issue = "112134")]
|
||||
pub mod c_str;
|
||||
|
||||
macro_rules! type_alias {
|
||||
{
|
||||
|
@ -2972,6 +2972,7 @@ macro_rules! uint_impl {
|
||||
#[inline(always)]
|
||||
#[rustc_const_stable(feature = "const_max_value", since = "1.32.0")]
|
||||
#[deprecated(since = "TBD", note = "replaced by the `MIN` associated constant on this type")]
|
||||
#[rustc_diagnostic_item = concat!(stringify!($SelfT), "_legacy_fn_min_value")]
|
||||
pub const fn min_value() -> Self { Self::MIN }
|
||||
|
||||
/// New code should prefer to use
|
||||
@ -2983,6 +2984,7 @@ macro_rules! uint_impl {
|
||||
#[inline(always)]
|
||||
#[rustc_const_stable(feature = "const_max_value", since = "1.32.0")]
|
||||
#[deprecated(since = "TBD", note = "replaced by the `MAX` associated constant on this type")]
|
||||
#[rustc_diagnostic_item = concat!(stringify!($SelfT), "_legacy_fn_max_value")]
|
||||
pub const fn max_value() -> Self { Self::MAX }
|
||||
}
|
||||
}
|
||||
|
@ -622,7 +622,7 @@ impl LocalWaker {
|
||||
///
|
||||
/// [`poll()`]: crate::future::Future::poll
|
||||
#[inline]
|
||||
#[stable(feature = "futures_api", since = "1.36.0")]
|
||||
#[unstable(feature = "local_waker", issue = "118959")]
|
||||
pub fn wake(self) {
|
||||
// The actual wakeup call is delegated through a virtual function call
|
||||
// to the implementation which is defined by the executor.
|
||||
@ -644,7 +644,7 @@ impl LocalWaker {
|
||||
/// the case where an owned `Waker` is available. This method should be preferred to
|
||||
/// calling `waker.clone().wake()`.
|
||||
#[inline]
|
||||
#[stable(feature = "futures_api", since = "1.36.0")]
|
||||
#[unstable(feature = "local_waker", issue = "118959")]
|
||||
pub fn wake_by_ref(&self) {
|
||||
// The actual wakeup call is delegated through a virtual function call
|
||||
// to the implementation which is defined by the executor.
|
||||
@ -664,7 +664,7 @@ impl LocalWaker {
|
||||
/// avoid cloning the waker when they would wake the same task anyway.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
#[stable(feature = "futures_api", since = "1.36.0")]
|
||||
#[unstable(feature = "local_waker", issue = "118959")]
|
||||
pub fn will_wake(&self, other: &LocalWaker) -> bool {
|
||||
self.waker == other.waker
|
||||
}
|
||||
@ -676,7 +676,7 @@ impl LocalWaker {
|
||||
/// Therefore this method is unsafe.
|
||||
#[inline]
|
||||
#[must_use]
|
||||
#[stable(feature = "futures_api", since = "1.36.0")]
|
||||
#[unstable(feature = "local_waker", issue = "118959")]
|
||||
#[rustc_const_unstable(feature = "const_waker", issue = "102012")]
|
||||
pub const unsafe fn from_raw(waker: RawWaker) -> LocalWaker {
|
||||
Self { waker }
|
||||
@ -748,7 +748,18 @@ impl AsRef<LocalWaker> for Waker {
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "futures_api", since = "1.36.0")]
|
||||
#[unstable(feature = "local_waker", issue = "118959")]
|
||||
impl Drop for LocalWaker {
|
||||
#[inline]
|
||||
fn drop(&mut self) {
|
||||
// SAFETY: This is safe because `LocalWaker::from_raw` is the only way
|
||||
// to initialize `drop` and `data` requiring the user to acknowledge
|
||||
// that the contract of `RawWaker` is upheld.
|
||||
unsafe { (self.waker.vtable.drop)(self.waker.data) }
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "local_waker", issue = "118959")]
|
||||
impl fmt::Debug for LocalWaker {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let vtable_ptr = self.waker.vtable as *const RawWakerVTable;
|
||||
|
19
library/std/src/ffi/c_str.rs
Normal file
19
library/std/src/ffi/c_str.rs
Normal file
@ -0,0 +1,19 @@
|
||||
//! [`CStr`], [`CString`], and related types.
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub use core::ffi::c_str::CStr;
|
||||
|
||||
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
|
||||
pub use core::ffi::c_str::FromBytesWithNulError;
|
||||
|
||||
#[stable(feature = "cstr_from_bytes_until_nul", since = "1.69.0")]
|
||||
pub use core::ffi::c_str::FromBytesUntilNulError;
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub use alloc::ffi::c_str::{CString, NulError};
|
||||
|
||||
#[stable(feature = "cstring_from_vec_with_nul", since = "1.58.0")]
|
||||
pub use alloc::ffi::c_str::FromVecWithNulError;
|
||||
|
||||
#[stable(feature = "cstring_into", since = "1.7.0")]
|
||||
pub use alloc::ffi::c_str::IntoStringError;
|
@ -161,12 +161,32 @@
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
#[stable(feature = "alloc_c_string", since = "1.64.0")]
|
||||
pub use alloc::ffi::{CString, FromVecWithNulError, IntoStringError, NulError};
|
||||
#[stable(feature = "cstr_from_bytes_until_nul", since = "1.73.0")]
|
||||
pub use core::ffi::FromBytesUntilNulError;
|
||||
#[stable(feature = "core_c_str", since = "1.64.0")]
|
||||
pub use core::ffi::{CStr, FromBytesWithNulError};
|
||||
#[unstable(feature = "c_str_module", issue = "112134")]
|
||||
pub mod c_str;
|
||||
|
||||
#[doc(inline)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub use self::c_str::{CStr, CString};
|
||||
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
|
||||
pub use self::c_str::FromBytesWithNulError;
|
||||
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "cstr_from_bytes_until_nul", since = "1.69.0")]
|
||||
pub use self::c_str::FromBytesUntilNulError;
|
||||
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub use self::c_str::NulError;
|
||||
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "cstring_from_vec_with_nul", since = "1.58.0")]
|
||||
pub use self::c_str::FromVecWithNulError;
|
||||
|
||||
#[doc(no_inline)]
|
||||
#[stable(feature = "cstring_into", since = "1.7.0")]
|
||||
pub use self::c_str::IntoStringError;
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[doc(inline)]
|
||||
|
@ -314,6 +314,7 @@
|
||||
//
|
||||
// Library features (core):
|
||||
// tidy-alphabetical-start
|
||||
#![feature(c_str_module)]
|
||||
#![feature(char_internals)]
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(core_io_borrowed_buf)]
|
||||
|
@ -62,7 +62,13 @@ mod imp {
|
||||
unsafe { getrandom(buf.as_mut_ptr().cast(), buf.len(), libc::GRND_NONBLOCK) }
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "espidf", target_os = "horizon", target_os = "freebsd", netbsd10))]
|
||||
#[cfg(any(
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
target_os = "dragonfly",
|
||||
netbsd10
|
||||
))]
|
||||
fn getrandom(buf: &mut [u8]) -> libc::ssize_t {
|
||||
unsafe { libc::getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) }
|
||||
}
|
||||
@ -73,6 +79,7 @@ mod imp {
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
target_os = "dragonfly",
|
||||
netbsd10
|
||||
)))]
|
||||
fn getrandom_fill_bytes(_buf: &mut [u8]) -> bool {
|
||||
@ -85,6 +92,7 @@ mod imp {
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
target_os = "dragonfly",
|
||||
netbsd10
|
||||
))]
|
||||
fn getrandom_fill_bytes(v: &mut [u8]) -> bool {
|
||||
|
@ -51,7 +51,7 @@ mod imp {
|
||||
#[cfg(all(target_os = "linux", target_env = "gnu"))]
|
||||
use libc::{mmap64, munmap};
|
||||
use libc::{sigaction, sighandler_t, SA_ONSTACK, SA_SIGINFO, SIGBUS, SIG_DFL};
|
||||
use libc::{sigaltstack, SIGSTKSZ, SS_DISABLE};
|
||||
use libc::{sigaltstack, SS_DISABLE};
|
||||
use libc::{MAP_ANON, MAP_PRIVATE, PROT_NONE, PROT_READ, PROT_WRITE, SIGSEGV};
|
||||
|
||||
use crate::sync::atomic::{AtomicBool, AtomicPtr, Ordering};
|
||||
@ -130,7 +130,7 @@ mod imp {
|
||||
drop_handler(MAIN_ALTSTACK.load(Ordering::Relaxed));
|
||||
}
|
||||
|
||||
unsafe fn get_stackp() -> *mut libc::c_void {
|
||||
unsafe fn get_stack() -> libc::stack_t {
|
||||
// OpenBSD requires this flag for stack mapping
|
||||
// otherwise the said mapping will fail as a no-op on most systems
|
||||
// and has a different meaning on FreeBSD
|
||||
@ -148,20 +148,28 @@ mod imp {
|
||||
target_os = "dragonfly",
|
||||
)))]
|
||||
let flags = MAP_PRIVATE | MAP_ANON;
|
||||
let stackp =
|
||||
mmap64(ptr::null_mut(), SIGSTKSZ + page_size(), PROT_READ | PROT_WRITE, flags, -1, 0);
|
||||
|
||||
let sigstack_size = sigstack_size();
|
||||
let page_size = page_size();
|
||||
|
||||
let stackp = mmap64(
|
||||
ptr::null_mut(),
|
||||
sigstack_size + page_size,
|
||||
PROT_READ | PROT_WRITE,
|
||||
flags,
|
||||
-1,
|
||||
0,
|
||||
);
|
||||
if stackp == MAP_FAILED {
|
||||
panic!("failed to allocate an alternative stack: {}", io::Error::last_os_error());
|
||||
}
|
||||
let guard_result = libc::mprotect(stackp, page_size(), PROT_NONE);
|
||||
let guard_result = libc::mprotect(stackp, page_size, PROT_NONE);
|
||||
if guard_result != 0 {
|
||||
panic!("failed to set up alternative stack guard page: {}", io::Error::last_os_error());
|
||||
}
|
||||
stackp.add(page_size())
|
||||
}
|
||||
let stackp = stackp.add(page_size);
|
||||
|
||||
unsafe fn get_stack() -> libc::stack_t {
|
||||
libc::stack_t { ss_sp: get_stackp(), ss_flags: 0, ss_size: SIGSTKSZ }
|
||||
libc::stack_t { ss_sp: stackp, ss_flags: 0, ss_size: sigstack_size }
|
||||
}
|
||||
|
||||
pub unsafe fn make_handler() -> Handler {
|
||||
@ -182,6 +190,8 @@ mod imp {
|
||||
|
||||
pub unsafe fn drop_handler(data: *mut libc::c_void) {
|
||||
if !data.is_null() {
|
||||
let sigstack_size = sigstack_size();
|
||||
let page_size = page_size();
|
||||
let stack = libc::stack_t {
|
||||
ss_sp: ptr::null_mut(),
|
||||
ss_flags: SS_DISABLE,
|
||||
@ -189,14 +199,32 @@ mod imp {
|
||||
// UNIX2003 which returns ENOMEM when disabling a stack while
|
||||
// passing ss_size smaller than MINSIGSTKSZ. According to POSIX
|
||||
// both ss_sp and ss_size should be ignored in this case.
|
||||
ss_size: SIGSTKSZ,
|
||||
ss_size: sigstack_size,
|
||||
};
|
||||
sigaltstack(&stack, ptr::null_mut());
|
||||
// We know from `get_stackp` that the alternate stack we installed is part of a mapping
|
||||
// that started one page earlier, so walk back a page and unmap from there.
|
||||
munmap(data.sub(page_size()), SIGSTKSZ + page_size());
|
||||
munmap(data.sub(page_size), sigstack_size + page_size);
|
||||
}
|
||||
}
|
||||
|
||||
/// Modern kernels on modern hardware can have dynamic signal stack sizes.
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
fn sigstack_size() -> usize {
|
||||
// FIXME: reuse const from libc when available?
|
||||
const AT_MINSIGSTKSZ: crate::ffi::c_ulong = 51;
|
||||
let dynamic_sigstksz = unsafe { libc::getauxval(AT_MINSIGSTKSZ) };
|
||||
// If getauxval couldn't find the entry, it returns 0,
|
||||
// so take the higher of the "constant" and auxval.
|
||||
// This transparently supports older kernels which don't provide AT_MINSIGSTKSZ
|
||||
libc::SIGSTKSZ.max(dynamic_sigstksz as _)
|
||||
}
|
||||
|
||||
/// Not all OS support hardware where this is needed.
|
||||
#[cfg(not(any(target_os = "linux", target_os = "android")))]
|
||||
fn sigstack_size() -> usize {
|
||||
libc::SIGSTKSZ
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
|
@ -8,12 +8,10 @@ use crate::core::builder::{
|
||||
self, crate_description, Alias, Builder, Kind, RunConfig, ShouldRun, Step,
|
||||
};
|
||||
use crate::core::config::TargetSelection;
|
||||
use crate::utils::cache::Interned;
|
||||
use crate::INTERNER;
|
||||
use crate::{Compiler, Mode, Subcommand};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Std {
|
||||
pub target: TargetSelection,
|
||||
/// Whether to build only a subset of crates.
|
||||
@ -21,7 +19,7 @@ pub struct Std {
|
||||
/// This shouldn't be used from other steps; see the comment on [`compile::Rustc`].
|
||||
///
|
||||
/// [`compile::Rustc`]: crate::core::build_steps::compile::Rustc
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
/// Returns args for the subcommand itself (not for cargo)
|
||||
@ -89,7 +87,7 @@ fn cargo_subcommand(kind: Kind) -> &'static str {
|
||||
|
||||
impl Std {
|
||||
pub fn new(target: TargetSelection) -> Self {
|
||||
Self { target, crates: INTERNER.intern_list(vec![]) }
|
||||
Self { target, crates: vec![] }
|
||||
}
|
||||
}
|
||||
|
||||
@ -204,7 +202,7 @@ impl Step for Std {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Rustc {
|
||||
pub target: TargetSelection,
|
||||
/// Whether to build only a subset of crates.
|
||||
@ -212,7 +210,7 @@ pub struct Rustc {
|
||||
/// This shouldn't be used from other steps; see the comment on [`compile::Rustc`].
|
||||
///
|
||||
/// [`compile::Rustc`]: crate::core::build_steps::compile::Rustc
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl Rustc {
|
||||
@ -222,7 +220,7 @@ impl Rustc {
|
||||
.into_iter()
|
||||
.map(|krate| krate.name.to_string())
|
||||
.collect();
|
||||
Self { target, crates: INTERNER.intern_list(crates) }
|
||||
Self { target, crates }
|
||||
}
|
||||
}
|
||||
|
||||
@ -305,10 +303,10 @@ impl Step for Rustc {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CodegenBackend {
|
||||
pub target: TargetSelection,
|
||||
pub backend: Interned<String>,
|
||||
pub backend: &'static str,
|
||||
}
|
||||
|
||||
impl Step for CodegenBackend {
|
||||
@ -321,14 +319,14 @@ impl Step for CodegenBackend {
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
for &backend in &[INTERNER.intern_str("cranelift"), INTERNER.intern_str("gcc")] {
|
||||
for &backend in &["cranelift", "gcc"] {
|
||||
run.builder.ensure(CodegenBackend { target: run.target, backend });
|
||||
}
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
// FIXME: remove once https://github.com/rust-lang/rust/issues/112393 is resolved
|
||||
if builder.build.config.vendor && &self.backend == "gcc" {
|
||||
if builder.build.config.vendor && self.backend == "gcc" {
|
||||
println!("Skipping checking of `rustc_codegen_gcc` with vendoring enabled.");
|
||||
return;
|
||||
}
|
||||
@ -552,7 +550,7 @@ fn codegen_backend_stamp(
|
||||
builder: &Builder<'_>,
|
||||
compiler: Compiler,
|
||||
target: TargetSelection,
|
||||
backend: Interned<String>,
|
||||
backend: &str,
|
||||
) -> PathBuf {
|
||||
builder
|
||||
.cargo_out(compiler, Mode::Codegen, target)
|
||||
|
@ -10,7 +10,6 @@ use std::io::{self, ErrorKind};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::core::builder::{crate_description, Builder, RunConfig, ShouldRun, Step};
|
||||
use crate::utils::cache::Interned;
|
||||
use crate::utils::helpers::t;
|
||||
use crate::{Build, Compiler, Mode, Subcommand};
|
||||
|
||||
@ -44,10 +43,10 @@ impl Step for CleanAll {
|
||||
|
||||
macro_rules! clean_crate_tree {
|
||||
( $( $name:ident, $mode:path, $root_crate:literal);+ $(;)? ) => { $(
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct $name {
|
||||
compiler: Compiler,
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl Step for $name {
|
||||
|
@ -27,7 +27,6 @@ use crate::core::builder::crate_description;
|
||||
use crate::core::builder::Cargo;
|
||||
use crate::core::builder::{Builder, Kind, PathSet, RunConfig, ShouldRun, Step, TaskPath};
|
||||
use crate::core::config::{DebuginfoLevel, LlvmLibunwind, RustcLto, TargetSelection};
|
||||
use crate::utils::cache::{Interned, INTERNER};
|
||||
use crate::utils::helpers::{
|
||||
exe, get_clang_cl_resource_dir, is_debug_info, is_dylib, output, symlink_dir, t, up_to_date,
|
||||
};
|
||||
@ -35,14 +34,14 @@ use crate::LLVM_TOOLS;
|
||||
use crate::{CLang, Compiler, DependencyType, GitRepo, Mode};
|
||||
use filetime::FileTime;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Std {
|
||||
pub target: TargetSelection,
|
||||
pub compiler: Compiler,
|
||||
/// Whether to build only a subset of crates in the standard library.
|
||||
///
|
||||
/// This shouldn't be used from other steps; see the comment on [`Rustc`].
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
/// When using download-rustc, we need to use a new build of `std` for running unit tests of Std itself,
|
||||
/// but we need to use the downloaded copy of std for linking to rustdoc. Allow this to be overriden by `builder.ensure` from other steps.
|
||||
force_recompile: bool,
|
||||
@ -559,13 +558,13 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
|
||||
cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)");
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
struct StdLink {
|
||||
pub compiler: Compiler,
|
||||
pub target_compiler: Compiler,
|
||||
pub target: TargetSelection,
|
||||
/// Not actually used; only present to make sure the cache invalidation is correct.
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
/// See [`Std::force_recompile`].
|
||||
force_recompile: bool,
|
||||
}
|
||||
@ -612,7 +611,7 @@ impl Step for StdLink {
|
||||
});
|
||||
let libdir = sysroot.join(lib).join("rustlib").join(target.triple).join("lib");
|
||||
let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host.triple).join("lib");
|
||||
(INTERNER.intern_path(libdir), INTERNER.intern_path(hostdir))
|
||||
(libdir, hostdir)
|
||||
} else {
|
||||
let libdir = builder.sysroot_libdir(target_compiler, target);
|
||||
let hostdir = builder.sysroot_libdir(target_compiler, compiler.host);
|
||||
@ -818,7 +817,7 @@ fn cp_rustc_component_to_ci_sysroot(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Rustc {
|
||||
pub target: TargetSelection,
|
||||
pub compiler: Compiler,
|
||||
@ -827,7 +826,7 @@ pub struct Rustc {
|
||||
/// This should only be requested by the user, not used within rustbuild itself.
|
||||
/// Using it within rustbuild can lead to confusing situation where lints are replayed
|
||||
/// in two different steps.
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl Rustc {
|
||||
@ -1220,13 +1219,13 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
struct RustcLink {
|
||||
pub compiler: Compiler,
|
||||
pub target_compiler: Compiler,
|
||||
pub target: TargetSelection,
|
||||
/// Not actually used; only present to make sure the cache invalidation is correct.
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl RustcLink {
|
||||
@ -1261,11 +1260,11 @@ impl Step for RustcLink {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CodegenBackend {
|
||||
pub target: TargetSelection,
|
||||
pub compiler: Compiler,
|
||||
pub backend: Interned<String>,
|
||||
pub backend: String,
|
||||
}
|
||||
|
||||
fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
|
||||
@ -1284,7 +1283,7 @@ pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_";
|
||||
fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
|
||||
if path.path.to_str().unwrap().contains(CODEGEN_BACKEND_PREFIX) {
|
||||
let mut needs_codegen_backend_config = true;
|
||||
for &backend in run.builder.config.codegen_backends(run.target) {
|
||||
for backend in run.builder.config.codegen_backends(run.target) {
|
||||
if path
|
||||
.path
|
||||
.to_str()
|
||||
@ -1321,7 +1320,7 @@ impl Step for CodegenBackend {
|
||||
return;
|
||||
}
|
||||
|
||||
for &backend in run.builder.config.codegen_backends(run.target) {
|
||||
for backend in run.builder.config.codegen_backends(run.target) {
|
||||
if backend == "llvm" {
|
||||
continue; // Already built as part of rustc
|
||||
}
|
||||
@ -1329,7 +1328,7 @@ impl Step for CodegenBackend {
|
||||
run.builder.ensure(CodegenBackend {
|
||||
target: run.target,
|
||||
compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()),
|
||||
backend,
|
||||
backend: backend.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -1394,7 +1393,7 @@ impl Step for CodegenBackend {
|
||||
f.display()
|
||||
);
|
||||
}
|
||||
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
|
||||
let stamp = codegen_backend_stamp(builder, compiler, target, &backend);
|
||||
let codegen_backend = codegen_backend.to_str().unwrap();
|
||||
t!(fs::write(stamp, codegen_backend));
|
||||
}
|
||||
@ -1433,7 +1432,7 @@ fn copy_codegen_backends_to_sysroot(
|
||||
continue; // Already built as part of rustc
|
||||
}
|
||||
|
||||
let stamp = codegen_backend_stamp(builder, compiler, target, *backend);
|
||||
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
|
||||
let dylib = t!(fs::read_to_string(&stamp));
|
||||
let file = Path::new(&dylib);
|
||||
let filename = file.file_name().unwrap().to_str().unwrap();
|
||||
@ -1470,7 +1469,7 @@ fn codegen_backend_stamp(
|
||||
builder: &Builder<'_>,
|
||||
compiler: Compiler,
|
||||
target: TargetSelection,
|
||||
backend: Interned<String>,
|
||||
backend: &str,
|
||||
) -> PathBuf {
|
||||
builder
|
||||
.cargo_out(compiler, Mode::Codegen, target)
|
||||
@ -1508,7 +1507,7 @@ impl Sysroot {
|
||||
}
|
||||
|
||||
impl Step for Sysroot {
|
||||
type Output = Interned<PathBuf>;
|
||||
type Output = PathBuf;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.never()
|
||||
@ -1520,7 +1519,7 @@ impl Step for Sysroot {
|
||||
/// That is, the sysroot for the stage0 compiler is not what the compiler
|
||||
/// thinks it is by default, but it's the same as the default for stages
|
||||
/// 1-3.
|
||||
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
let compiler = self.compiler;
|
||||
let host_dir = builder.out.join(compiler.host.triple);
|
||||
|
||||
@ -1652,7 +1651,7 @@ impl Step for Sysroot {
|
||||
);
|
||||
}
|
||||
|
||||
INTERNER.intern_path(sysroot)
|
||||
sysroot
|
||||
}
|
||||
}
|
||||
|
||||
@ -1735,7 +1734,7 @@ impl Step for Assemble {
|
||||
// to not fail while linking the artifacts.
|
||||
build_compiler.stage = actual_stage;
|
||||
|
||||
for &backend in builder.config.codegen_backends(target_compiler.host) {
|
||||
for backend in builder.config.codegen_backends(target_compiler.host) {
|
||||
if backend == "llvm" {
|
||||
continue; // Already built as part of rustc
|
||||
}
|
||||
@ -1743,7 +1742,7 @@ impl Step for Assemble {
|
||||
builder.ensure(CodegenBackend {
|
||||
compiler: build_compiler,
|
||||
target: target_compiler.host,
|
||||
backend,
|
||||
backend: backend.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,6 @@ use crate::core::build_steps::llvm;
|
||||
use crate::core::build_steps::tool::{self, Tool};
|
||||
use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
|
||||
use crate::core::config::TargetSelection;
|
||||
use crate::utils::cache::{Interned, INTERNER};
|
||||
use crate::utils::channel;
|
||||
use crate::utils::helpers::{exe, is_dylib, output, t, target_supports_cranelift_backend, timeit};
|
||||
use crate::utils::tarball::{GeneratedTarball, OverlayKind, Tarball};
|
||||
@ -489,8 +488,7 @@ impl Step for Rustc {
|
||||
}
|
||||
|
||||
// Debugger scripts
|
||||
builder
|
||||
.ensure(DebuggerScripts { sysroot: INTERNER.intern_path(image.to_owned()), host });
|
||||
builder.ensure(DebuggerScripts { sysroot: image.to_owned(), host });
|
||||
|
||||
// Misc license info
|
||||
let cp = |file: &str| {
|
||||
@ -504,9 +502,9 @@ impl Step for Rustc {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct DebuggerScripts {
|
||||
pub sysroot: Interned<PathBuf>,
|
||||
pub sysroot: PathBuf,
|
||||
pub host: TargetSelection,
|
||||
}
|
||||
|
||||
@ -1264,10 +1262,10 @@ impl Step for Miri {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct CodegenBackend {
|
||||
pub compiler: Compiler,
|
||||
pub backend: Interned<String>,
|
||||
pub backend: String,
|
||||
}
|
||||
|
||||
impl Step for CodegenBackend {
|
||||
@ -1280,14 +1278,14 @@ impl Step for CodegenBackend {
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
for &backend in run.builder.config.codegen_backends(run.target) {
|
||||
for backend in run.builder.config.codegen_backends(run.target) {
|
||||
if backend == "llvm" {
|
||||
continue; // Already built as part of rustc
|
||||
}
|
||||
|
||||
run.builder.ensure(CodegenBackend {
|
||||
compiler: run.builder.compiler(run.builder.top_stage, run.target),
|
||||
backend,
|
||||
backend: backend.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -1304,7 +1302,8 @@ impl Step for CodegenBackend {
|
||||
return None;
|
||||
}
|
||||
|
||||
if !builder.config.codegen_backends(self.compiler.host).contains(&self.backend) {
|
||||
if !builder.config.codegen_backends(self.compiler.host).contains(&self.backend.to_string())
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -1529,7 +1528,7 @@ impl Step for Extended {
|
||||
add_component!("analysis" => Analysis { compiler, target });
|
||||
add_component!("rustc-codegen-cranelift" => CodegenBackend {
|
||||
compiler: builder.compiler(stage, target),
|
||||
backend: INTERNER.intern_str("cranelift"),
|
||||
backend: "cranelift".to_string(),
|
||||
});
|
||||
|
||||
let etc = builder.src.join("src/etc/installer");
|
||||
|
@ -16,7 +16,6 @@ use crate::core::build_steps::tool::{self, prepare_tool_cargo, SourceType, Tool}
|
||||
use crate::core::builder::{self, crate_description};
|
||||
use crate::core::builder::{Alias, Builder, Compiler, Kind, RunConfig, ShouldRun, Step};
|
||||
use crate::core::config::{Config, TargetSelection};
|
||||
use crate::utils::cache::{Interned, INTERNER};
|
||||
use crate::utils::helpers::{dir_is_empty, symlink_dir, t, up_to_date};
|
||||
use crate::Mode;
|
||||
|
||||
@ -59,8 +58,8 @@ macro_rules! book {
|
||||
)?
|
||||
builder.ensure(RustbookSrc {
|
||||
target: self.target,
|
||||
name: INTERNER.intern_str($book_name),
|
||||
src: INTERNER.intern_path(builder.src.join($path)),
|
||||
name: $book_name.to_owned(),
|
||||
src: builder.src.join($path),
|
||||
parent: Some(self),
|
||||
})
|
||||
}
|
||||
@ -108,18 +107,18 @@ impl Step for UnstableBook {
|
||||
builder.ensure(UnstableBookGen { target: self.target });
|
||||
builder.ensure(RustbookSrc {
|
||||
target: self.target,
|
||||
name: INTERNER.intern_str("unstable-book"),
|
||||
src: INTERNER.intern_path(builder.md_doc_out(self.target).join("unstable-book")),
|
||||
name: "unstable-book".to_owned(),
|
||||
src: builder.md_doc_out(self.target).join("unstable-book"),
|
||||
parent: Some(self),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
struct RustbookSrc<P: Step> {
|
||||
target: TargetSelection,
|
||||
name: Interned<String>,
|
||||
src: Interned<PathBuf>,
|
||||
name: String,
|
||||
src: PathBuf,
|
||||
parent: Option<P>,
|
||||
}
|
||||
|
||||
@ -141,7 +140,7 @@ impl<P: Step> Step for RustbookSrc<P> {
|
||||
let out = builder.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
let out = out.join(name);
|
||||
let out = out.join(&name);
|
||||
let index = out.join("index.html");
|
||||
let rustbook = builder.tool_exe(Tool::Rustbook);
|
||||
let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook);
|
||||
@ -211,8 +210,8 @@ impl Step for TheBook {
|
||||
// build book
|
||||
builder.ensure(RustbookSrc {
|
||||
target,
|
||||
name: INTERNER.intern_str("book"),
|
||||
src: INTERNER.intern_path(absolute_path.clone()),
|
||||
name: "book".to_owned(),
|
||||
src: absolute_path.clone(),
|
||||
parent: Some(self),
|
||||
});
|
||||
|
||||
@ -220,8 +219,8 @@ impl Step for TheBook {
|
||||
for edition in &["first-edition", "second-edition", "2018-edition"] {
|
||||
builder.ensure(RustbookSrc {
|
||||
target,
|
||||
name: INTERNER.intern_string(format!("book/{edition}")),
|
||||
src: INTERNER.intern_path(absolute_path.join(edition)),
|
||||
name: format!("book/{edition}"),
|
||||
src: absolute_path.join(edition),
|
||||
// There should only be one book that is marked as the parent for each target, so
|
||||
// treat the other editions as not having a parent.
|
||||
parent: Option::<Self>::None,
|
||||
@ -526,12 +525,12 @@ impl Step for SharedAssets {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Std {
|
||||
pub stage: u32,
|
||||
pub target: TargetSelection,
|
||||
pub format: DocumentationFormat,
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl Std {
|
||||
@ -546,7 +545,7 @@ impl Std {
|
||||
.into_iter()
|
||||
.map(|krate| krate.name.to_string())
|
||||
.collect();
|
||||
Std { stage, target, format, crates: INTERNER.intern_list(crates) }
|
||||
Std { stage, target, format, crates }
|
||||
}
|
||||
}
|
||||
|
||||
@ -721,11 +720,11 @@ fn doc_std(
|
||||
builder.cp_r(&out_dir, out);
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Rustc {
|
||||
pub stage: u32,
|
||||
pub target: TargetSelection,
|
||||
crates: Interned<Vec<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl Rustc {
|
||||
@ -735,7 +734,7 @@ impl Rustc {
|
||||
.into_iter()
|
||||
.map(|krate| krate.name.to_string())
|
||||
.collect();
|
||||
Self { stage, target, crates: INTERNER.intern_list(crates) }
|
||||
Self { stage, target, crates }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1194,8 +1193,8 @@ impl Step for RustcBook {
|
||||
// Run rustbook/mdbook to generate the HTML pages.
|
||||
builder.ensure(RustbookSrc {
|
||||
target: self.target,
|
||||
name: INTERNER.intern_str("rustc"),
|
||||
src: INTERNER.intern_path(out_base),
|
||||
name: "rustc".to_owned(),
|
||||
src: out_base,
|
||||
parent: Some(self),
|
||||
});
|
||||
}
|
||||
|
@ -13,7 +13,6 @@ use crate::core::builder::{Builder, RunConfig, ShouldRun, Step};
|
||||
use crate::core::config::{Config, TargetSelection};
|
||||
use crate::utils::helpers::t;
|
||||
use crate::utils::tarball::GeneratedTarball;
|
||||
use crate::INTERNER;
|
||||
use crate::{Compiler, Kind};
|
||||
|
||||
#[cfg(target_os = "illumos")]
|
||||
@ -291,7 +290,7 @@ install!((self, builder, _config),
|
||||
RustcCodegenCranelift, alias = "rustc-codegen-cranelift", Self::should_build(_config), only_hosts: true, {
|
||||
if let Some(tarball) = builder.ensure(dist::CodegenBackend {
|
||||
compiler: self.compiler,
|
||||
backend: INTERNER.intern_str("cranelift"),
|
||||
backend: "cranelift".to_string(),
|
||||
}) {
|
||||
install_sh(builder, "rustc-codegen-cranelift", self.compiler.stage, Some(self.target), &tarball);
|
||||
} else {
|
||||
|
@ -26,7 +26,6 @@ use crate::core::builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step};
|
||||
use crate::core::config::flags::get_completion;
|
||||
use crate::core::config::flags::Subcommand;
|
||||
use crate::core::config::TargetSelection;
|
||||
use crate::utils::cache::{Interned, INTERNER};
|
||||
use crate::utils::exec::BootstrapCommand;
|
||||
use crate::utils::helpers::{
|
||||
self, add_link_lib_path, add_rustdoc_cargo_linker_args, dylib_path, dylib_path_var,
|
||||
@ -38,9 +37,9 @@ use crate::{envify, CLang, DocTests, GitRepo, Mode};
|
||||
|
||||
const ADB_TEST_DIR: &str = "/data/local/tmp/work";
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CrateBootstrap {
|
||||
path: Interned<PathBuf>,
|
||||
path: PathBuf,
|
||||
host: TargetSelection,
|
||||
}
|
||||
|
||||
@ -58,7 +57,7 @@ impl Step for CrateBootstrap {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
for path in run.paths {
|
||||
let path = INTERNER.intern_path(path.assert_single_path().path.clone());
|
||||
let path = path.assert_single_path().path.clone();
|
||||
run.builder.ensure(CrateBootstrap { host: run.target, path });
|
||||
}
|
||||
}
|
||||
@ -623,7 +622,7 @@ impl Step for Miri {
|
||||
|
||||
// miri tests need to know about the stage sysroot
|
||||
cargo.env("MIRI_SYSROOT", &miri_sysroot);
|
||||
cargo.env("MIRI_HOST_SYSROOT", sysroot);
|
||||
cargo.env("MIRI_HOST_SYSROOT", &sysroot);
|
||||
cargo.env("MIRI", &miri);
|
||||
if builder.config.locked_deps {
|
||||
// enforce lockfiles
|
||||
@ -1646,8 +1645,10 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||
}
|
||||
|
||||
if suite == "debuginfo" {
|
||||
builder
|
||||
.ensure(dist::DebuggerScripts { sysroot: builder.sysroot(compiler), host: target });
|
||||
builder.ensure(dist::DebuggerScripts {
|
||||
sysroot: builder.sysroot(compiler).to_path_buf(),
|
||||
host: target,
|
||||
});
|
||||
}
|
||||
|
||||
// Also provide `rust_test_helpers` for the host.
|
||||
@ -2378,7 +2379,7 @@ impl Step for RustcGuide {
|
||||
pub struct CrateLibrustc {
|
||||
compiler: Compiler,
|
||||
target: TargetSelection,
|
||||
crates: Vec<Interned<String>>,
|
||||
crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl Step for CrateLibrustc {
|
||||
@ -2394,8 +2395,11 @@ impl Step for CrateLibrustc {
|
||||
let builder = run.builder;
|
||||
let host = run.build_triple();
|
||||
let compiler = builder.compiler_for(builder.top_stage, host, host);
|
||||
let crates =
|
||||
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();
|
||||
let crates = run
|
||||
.paths
|
||||
.iter()
|
||||
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
|
||||
.collect();
|
||||
|
||||
builder.ensure(CrateLibrustc { compiler, target: run.target, crates });
|
||||
}
|
||||
@ -2418,7 +2422,7 @@ impl Step for CrateLibrustc {
|
||||
fn run_cargo_test<'a>(
|
||||
cargo: impl Into<Command>,
|
||||
libtest_args: &[&str],
|
||||
crates: &[Interned<String>],
|
||||
crates: &[String],
|
||||
primary_crate: &str,
|
||||
description: impl Into<Option<&'a str>>,
|
||||
compiler: Compiler,
|
||||
@ -2449,7 +2453,7 @@ fn run_cargo_test<'a>(
|
||||
fn prepare_cargo_test(
|
||||
cargo: impl Into<Command>,
|
||||
libtest_args: &[&str],
|
||||
crates: &[Interned<String>],
|
||||
crates: &[String],
|
||||
primary_crate: &str,
|
||||
compiler: Compiler,
|
||||
target: TargetSelection,
|
||||
@ -2477,7 +2481,7 @@ fn prepare_cargo_test(
|
||||
DocTests::No => {
|
||||
let krate = &builder
|
||||
.crates
|
||||
.get(&INTERNER.intern_str(primary_crate))
|
||||
.get(primary_crate)
|
||||
.unwrap_or_else(|| panic!("missing crate {primary_crate}"));
|
||||
if krate.has_lib {
|
||||
cargo.arg("--lib");
|
||||
@ -2487,7 +2491,7 @@ fn prepare_cargo_test(
|
||||
DocTests::Yes => {}
|
||||
}
|
||||
|
||||
for &krate in crates {
|
||||
for krate in crates {
|
||||
cargo.arg("-p").arg(krate);
|
||||
}
|
||||
|
||||
@ -2529,7 +2533,7 @@ pub struct Crate {
|
||||
pub compiler: Compiler,
|
||||
pub target: TargetSelection,
|
||||
pub mode: Mode,
|
||||
pub crates: Vec<Interned<String>>,
|
||||
pub crates: Vec<String>,
|
||||
}
|
||||
|
||||
impl Step for Crate {
|
||||
@ -2544,8 +2548,11 @@ impl Step for Crate {
|
||||
let builder = run.builder;
|
||||
let host = run.build_triple();
|
||||
let compiler = builder.compiler_for(builder.top_stage, host, host);
|
||||
let crates =
|
||||
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();
|
||||
let crates = run
|
||||
.paths
|
||||
.iter()
|
||||
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
|
||||
.collect();
|
||||
|
||||
builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates });
|
||||
}
|
||||
@ -2707,7 +2714,7 @@ impl Step for CrateRustdoc {
|
||||
run_cargo_test(
|
||||
cargo,
|
||||
&[],
|
||||
&[INTERNER.intern_str("rustdoc:0.0.0")],
|
||||
&["rustdoc:0.0.0".to_string()],
|
||||
"rustdoc",
|
||||
"rustdoc",
|
||||
compiler,
|
||||
@ -2768,7 +2775,7 @@ impl Step for CrateRustdocJsonTypes {
|
||||
run_cargo_test(
|
||||
cargo,
|
||||
libtest_args,
|
||||
&[INTERNER.intern_str("rustdoc-json-types")],
|
||||
&["rustdoc-json-types".to_string()],
|
||||
"rustdoc-json-types",
|
||||
"rustdoc-json-types",
|
||||
compiler,
|
||||
@ -3194,8 +3201,7 @@ impl Step for CodegenCranelift {
|
||||
return;
|
||||
}
|
||||
|
||||
if !builder.config.codegen_backends(run.target).contains(&INTERNER.intern_str("cranelift"))
|
||||
{
|
||||
if !builder.config.codegen_backends(run.target).contains(&"cranelift".to_owned()) {
|
||||
builder.info("cranelift not in rust.codegen-backends. skipping");
|
||||
return;
|
||||
}
|
||||
@ -3319,7 +3325,7 @@ impl Step for CodegenGCC {
|
||||
return;
|
||||
}
|
||||
|
||||
if !builder.config.codegen_backends(run.target).contains(&INTERNER.intern_str("gcc")) {
|
||||
if !builder.config.codegen_backends(run.target).contains(&"gcc".to_owned()) {
|
||||
builder.info("gcc not in rust.codegen-backends. skipping");
|
||||
return;
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ use crate::core::build_steps::{check, clean, compile, dist, doc, install, run, s
|
||||
use crate::core::config::flags::{Color, Subcommand};
|
||||
use crate::core::config::{DryRun, SplitDebuginfo, TargetSelection};
|
||||
use crate::prepare_behaviour_dump_dir;
|
||||
use crate::utils::cache::{Cache, Interned, INTERNER};
|
||||
use crate::utils::cache::Cache;
|
||||
use crate::utils::helpers::{self, add_dylib_path, add_link_lib_path, exe, linker_args};
|
||||
use crate::utils::helpers::{check_cfg_arg, libdir, linker_flags, output, t, LldThreads};
|
||||
use crate::EXTRA_CHECK_CFGS;
|
||||
@ -102,7 +102,7 @@ impl RunConfig<'_> {
|
||||
|
||||
/// Return a list of crate names selected by `run.paths`.
|
||||
#[track_caller]
|
||||
pub fn cargo_crates_in_set(&self) -> Interned<Vec<String>> {
|
||||
pub fn cargo_crates_in_set(&self) -> Vec<String> {
|
||||
let mut crates = Vec::new();
|
||||
for krate in &self.paths {
|
||||
let path = krate.assert_single_path();
|
||||
@ -111,7 +111,7 @@ impl RunConfig<'_> {
|
||||
};
|
||||
crates.push(crate_name.to_string());
|
||||
}
|
||||
INTERNER.intern_list(crates)
|
||||
crates
|
||||
}
|
||||
|
||||
/// Given an `alias` selected by the `Step` and the paths passed on the command line,
|
||||
@ -120,7 +120,7 @@ impl RunConfig<'_> {
|
||||
/// Normally, people will pass *just* `library` if they pass it.
|
||||
/// But it's possible (although strange) to pass something like `library std core`.
|
||||
/// Build all crates anyway, as if they hadn't passed the other args.
|
||||
pub fn make_run_crates(&self, alias: Alias) -> Interned<Vec<String>> {
|
||||
pub fn make_run_crates(&self, alias: Alias) -> Vec<String> {
|
||||
let has_alias =
|
||||
self.paths.iter().any(|set| set.assert_single_path().path.ends_with(alias.as_str()));
|
||||
if !has_alias {
|
||||
@ -133,7 +133,7 @@ impl RunConfig<'_> {
|
||||
};
|
||||
|
||||
let crate_names = crates.into_iter().map(|krate| krate.name.to_string()).collect();
|
||||
INTERNER.intern_list(crate_names)
|
||||
crate_names
|
||||
}
|
||||
}
|
||||
|
||||
@ -1062,26 +1062,26 @@ impl<'a> Builder<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sysroot(&self, compiler: Compiler) -> Interned<PathBuf> {
|
||||
pub fn sysroot(&self, compiler: Compiler) -> PathBuf {
|
||||
self.ensure(compile::Sysroot::new(compiler))
|
||||
}
|
||||
|
||||
/// Returns the libdir where the standard library and other artifacts are
|
||||
/// found for a compiler's sysroot.
|
||||
pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> Interned<PathBuf> {
|
||||
pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> PathBuf {
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
struct Libdir {
|
||||
compiler: Compiler,
|
||||
target: TargetSelection,
|
||||
}
|
||||
impl Step for Libdir {
|
||||
type Output = Interned<PathBuf>;
|
||||
type Output = PathBuf;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.never()
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
let lib = builder.sysroot_libdir_relative(self.compiler);
|
||||
let sysroot = builder
|
||||
.sysroot(self.compiler)
|
||||
@ -1110,7 +1110,7 @@ impl<'a> Builder<'a> {
|
||||
);
|
||||
}
|
||||
|
||||
INTERNER.intern_path(sysroot)
|
||||
sysroot
|
||||
}
|
||||
}
|
||||
self.ensure(Libdir { compiler, target })
|
||||
|
@ -608,7 +608,7 @@ mod dist {
|
||||
compiler: Compiler { host, stage: 0 },
|
||||
target: host,
|
||||
mode: Mode::Std,
|
||||
crates: vec![INTERNER.intern_str("std")],
|
||||
crates: vec!["std".to_owned()],
|
||||
},]
|
||||
);
|
||||
}
|
||||
|
@ -264,7 +264,7 @@ pub struct Config {
|
||||
pub rustc_default_linker: Option<String>,
|
||||
pub rust_optimize_tests: bool,
|
||||
pub rust_dist_src: bool,
|
||||
pub rust_codegen_backends: Vec<Interned<String>>,
|
||||
pub rust_codegen_backends: Vec<String>,
|
||||
pub rust_verify_llvm_ir: bool,
|
||||
pub rust_thin_lto_import_instr_limit: Option<u32>,
|
||||
pub rust_remap_debuginfo: bool,
|
||||
@ -458,6 +458,8 @@ impl std::str::FromStr for RustcLto {
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
// N.B.: This type is used everywhere, and the entire codebase relies on it being Copy.
|
||||
// Making !Copy is highly nontrivial!
|
||||
pub struct TargetSelection {
|
||||
pub triple: Interned<String>,
|
||||
file: Option<Interned<String>>,
|
||||
@ -580,7 +582,7 @@ pub struct Target {
|
||||
pub wasi_root: Option<PathBuf>,
|
||||
pub qemu_rootfs: Option<PathBuf>,
|
||||
pub no_std: bool,
|
||||
pub codegen_backends: Option<Vec<Interned<String>>>,
|
||||
pub codegen_backends: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl Target {
|
||||
@ -1163,7 +1165,7 @@ impl Config {
|
||||
channel: "dev".to_string(),
|
||||
codegen_tests: true,
|
||||
rust_dist_src: true,
|
||||
rust_codegen_backends: vec![INTERNER.intern_str("llvm")],
|
||||
rust_codegen_backends: vec!["llvm".to_owned()],
|
||||
deny_warnings: true,
|
||||
bindir: "bin".into(),
|
||||
dist_include_mingw_linker: true,
|
||||
@ -1693,7 +1695,7 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
INTERNER.intern_str(s)
|
||||
s.clone()
|
||||
}).collect();
|
||||
}
|
||||
|
||||
@ -1880,7 +1882,7 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
INTERNER.intern_str(s)
|
||||
s.clone()
|
||||
}).collect());
|
||||
}
|
||||
|
||||
@ -2267,7 +2269,7 @@ impl Config {
|
||||
}
|
||||
|
||||
pub fn llvm_enabled(&self, target: TargetSelection) -> bool {
|
||||
self.codegen_backends(target).contains(&INTERNER.intern_str("llvm"))
|
||||
self.codegen_backends(target).contains(&"llvm".to_owned())
|
||||
}
|
||||
|
||||
pub fn llvm_libunwind(&self, target: TargetSelection) -> LlvmLibunwind {
|
||||
@ -2286,14 +2288,14 @@ impl Config {
|
||||
self.submodules.unwrap_or(rust_info.is_managed_git_subrepository())
|
||||
}
|
||||
|
||||
pub fn codegen_backends(&self, target: TargetSelection) -> &[Interned<String>] {
|
||||
pub fn codegen_backends(&self, target: TargetSelection) -> &[String] {
|
||||
self.target_config
|
||||
.get(&target)
|
||||
.and_then(|cfg| cfg.codegen_backends.as_deref())
|
||||
.unwrap_or(&self.rust_codegen_backends)
|
||||
}
|
||||
|
||||
pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<Interned<String>> {
|
||||
pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<String> {
|
||||
self.codegen_backends(target).first().cloned()
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,6 @@ use std::process::Command;
|
||||
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
use crate::utils::cache::INTERNER;
|
||||
use crate::utils::helpers::output;
|
||||
use crate::{t, Build, Crate};
|
||||
|
||||
@ -43,19 +42,19 @@ struct Target {
|
||||
pub fn build(build: &mut Build) {
|
||||
for package in workspace_members(build) {
|
||||
if package.source.is_none() {
|
||||
let name = INTERNER.intern_string(package.name);
|
||||
let name = package.name;
|
||||
let mut path = PathBuf::from(package.manifest_path);
|
||||
path.pop();
|
||||
let deps = package
|
||||
.dependencies
|
||||
.into_iter()
|
||||
.filter(|dep| dep.source.is_none())
|
||||
.map(|dep| INTERNER.intern_string(dep.name))
|
||||
.map(|dep| dep.name)
|
||||
.collect();
|
||||
let has_lib = package.targets.iter().any(|t| t.kind.iter().any(|k| k == "lib"));
|
||||
let krate = Crate { name, deps, path, has_lib };
|
||||
let krate = Crate { name: name.clone(), deps, path, has_lib };
|
||||
let relative_path = krate.local_path(build);
|
||||
build.crates.insert(name, krate);
|
||||
build.crates.insert(name.clone(), krate);
|
||||
let existing_path = build.crate_paths.insert(relative_path, name);
|
||||
assert!(
|
||||
existing_path.is_none(),
|
||||
|
@ -41,7 +41,6 @@ use crate::core::builder::Kind;
|
||||
use crate::core::config::{flags, LldMode};
|
||||
use crate::core::config::{DryRun, Target};
|
||||
use crate::core::config::{LlvmLibunwind, TargetSelection};
|
||||
use crate::utils::cache::{Interned, INTERNER};
|
||||
use crate::utils::exec::{BehaviorOnFailure, BootstrapCommand, OutputMode};
|
||||
use crate::utils::helpers::{self, dir_is_empty, exe, libdir, mtime, output, symlink_dir};
|
||||
|
||||
@ -191,8 +190,8 @@ pub struct Build {
|
||||
ranlib: RefCell<HashMap<TargetSelection, PathBuf>>,
|
||||
// Miscellaneous
|
||||
// allow bidirectional lookups: both name -> path and path -> name
|
||||
crates: HashMap<Interned<String>, Crate>,
|
||||
crate_paths: HashMap<PathBuf, Interned<String>>,
|
||||
crates: HashMap<String, Crate>,
|
||||
crate_paths: HashMap<PathBuf, String>,
|
||||
is_sudo: bool,
|
||||
ci_env: CiEnv,
|
||||
delayed_failures: RefCell<Vec<String>>,
|
||||
@ -204,8 +203,8 @@ pub struct Build {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Crate {
|
||||
name: Interned<String>,
|
||||
deps: HashSet<Interned<String>>,
|
||||
name: String,
|
||||
deps: HashSet<String>,
|
||||
path: PathBuf,
|
||||
has_lib: bool,
|
||||
}
|
||||
@ -829,8 +828,8 @@ impl Build {
|
||||
}
|
||||
|
||||
/// Output directory for some generated md crate documentation for a target (temporary)
|
||||
fn md_doc_out(&self, target: TargetSelection) -> Interned<PathBuf> {
|
||||
INTERNER.intern_path(self.out.join(&*target.triple).join("md-doc"))
|
||||
fn md_doc_out(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("md-doc")
|
||||
}
|
||||
|
||||
/// Returns `true` if this is an external version of LLVM not managed by bootstrap.
|
||||
@ -1538,7 +1537,7 @@ impl Build {
|
||||
/// "local" crates (those in the local source tree, not from a registry).
|
||||
fn in_tree_crates(&self, root: &str, target: Option<TargetSelection>) -> Vec<&Crate> {
|
||||
let mut ret = Vec::new();
|
||||
let mut list = vec![INTERNER.intern_str(root)];
|
||||
let mut list = vec![root.to_owned()];
|
||||
let mut visited = HashSet::new();
|
||||
while let Some(krate) = list.pop() {
|
||||
let krate = self
|
||||
@ -1564,11 +1563,11 @@ impl Build {
|
||||
&& (dep != "rustc_codegen_llvm"
|
||||
|| self.config.hosts.iter().any(|host| self.config.llvm_enabled(*host)))
|
||||
{
|
||||
list.push(*dep);
|
||||
list.push(dep.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
ret.sort_unstable_by_key(|krate| krate.name); // reproducible order needed for tests
|
||||
ret.sort_unstable_by_key(|krate| krate.name.clone()); // reproducible order needed for tests
|
||||
ret
|
||||
}
|
||||
|
||||
|
@ -194,17 +194,6 @@ impl Interner {
|
||||
pub fn intern_str(&self, s: &str) -> Interned<String> {
|
||||
self.strs.lock().unwrap().intern_borrow(s)
|
||||
}
|
||||
pub fn intern_string(&self, s: String) -> Interned<String> {
|
||||
self.strs.lock().unwrap().intern(s)
|
||||
}
|
||||
|
||||
pub fn intern_path(&self, s: PathBuf) -> Interned<PathBuf> {
|
||||
self.paths.lock().unwrap().intern(s)
|
||||
}
|
||||
|
||||
pub fn intern_list(&self, v: Vec<String>) -> Interned<Vec<String>> {
|
||||
self.lists.lock().unwrap().intern(v)
|
||||
}
|
||||
}
|
||||
|
||||
pub static INTERNER: Lazy<Interner> = Lazy::new(Interner::default);
|
||||
|
@ -31,12 +31,14 @@ if isWindows; then
|
||||
# Delete these pre-installed tools so we can't accidentally use them, because we are using the
|
||||
# MSYS2 setup action versions instead.
|
||||
# Delete pre-installed version of MSYS2
|
||||
echo "Cleaning up tools in PATH"
|
||||
rm -r "/c/msys64/"
|
||||
# Delete Strawberry Perl, which contains a version of mingw
|
||||
rm -r "/c/Strawberry/"
|
||||
# Delete these other copies of mingw, I don't even know where they come from.
|
||||
rm -r "/c/mingw64/"
|
||||
rm -r "/c/mingw32/"
|
||||
echo "Finished cleaning up tools in PATH"
|
||||
|
||||
if isKnownToBeMingwBuild; then
|
||||
# Use the mingw version of CMake for mingw builds.
|
||||
@ -46,11 +48,11 @@ if isWindows; then
|
||||
# Install mingw-w64-$arch-cmake
|
||||
pacboy -S --noconfirm cmake:p
|
||||
|
||||
# We use Git-for-Windows for MSVC builds, and MSYS2 Git for mingw builds,
|
||||
# so that both are tested.
|
||||
# Delete Windows-Git
|
||||
rm -r "/c/Program Files/Git/"
|
||||
# Install MSYS2 git
|
||||
pacman -S --noconfirm git
|
||||
# It would be nice to use MSYS's git in MinGW builds so that it's tested and known to
|
||||
# work. But it makes everything extremely slow, so it's commented out for now.
|
||||
# # Delete Windows-Git
|
||||
# rm -r "/c/Program Files/Git/"
|
||||
# # Install MSYS2 git
|
||||
# pacman -S --noconfirm git
|
||||
fi
|
||||
fi
|
||||
|
@ -165,7 +165,6 @@ target | std | notes
|
||||
`i686-unknown-freebsd` | ✓ | 32-bit FreeBSD [^x86_32-floats-return-ABI]
|
||||
`i686-unknown-linux-musl` | ✓ | 32-bit Linux with musl 1.2.3 [^x86_32-floats-return-ABI]
|
||||
[`i686-unknown-uefi`](platform-support/unknown-uefi.md) | * | 32-bit UEFI
|
||||
[`loongarch64-unknown-linux-musl`](platform-support/loongarch-linux.md) | ? | | LoongArch64 Linux (LP64D ABI) with musl 1.2.3
|
||||
[`loongarch64-unknown-none`](platform-support/loongarch-none.md) | * | | LoongArch64 Bare-metal (LP64D ABI)
|
||||
[`loongarch64-unknown-none-softfloat`](platform-support/loongarch-none.md) | * | | LoongArch64 Bare-metal (LP64S ABI)
|
||||
[`nvptx64-nvidia-cuda`](platform-support/nvptx64-nvidia-cuda.md) | * | --emit=asm generates PTX code that [runs on NVIDIA GPUs]
|
||||
@ -303,6 +302,7 @@ target | std | host | notes
|
||||
`i686-uwp-windows-msvc` | ? | | [^x86_32-floats-return-ABI]
|
||||
[`i686-win7-windows-msvc`](platform-support/win7-windows-msvc.md) | ✓ | | 32-bit Windows 7 support [^x86_32-floats-return-ABI]
|
||||
`i686-wrs-vxworks` | ? | | [^x86_32-floats-return-ABI]
|
||||
[`loongarch64-unknown-linux-musl`](platform-support/loongarch-linux.md) | ? | | LoongArch64 Linux (LP64D ABI) with musl 1.2.3
|
||||
[`m68k-unknown-linux-gnu`](platform-support/m68k-unknown-linux-gnu.md) | ? | | Motorola 680x0 Linux
|
||||
`mips-unknown-linux-gnu` | ✓ | ✓ | MIPS Linux (kernel 4.4, glibc 2.23)
|
||||
`mips-unknown-linux-musl` | ✓ | | MIPS Linux with musl 1.2.3
|
||||
|
@ -1,6 +1,7 @@
|
||||
//! Checks that all Flunt files have messages in alphabetical order
|
||||
|
||||
use crate::walk::{filter_dirs, walk};
|
||||
use std::collections::HashMap;
|
||||
use std::{fs::OpenOptions, io::Write, path::Path};
|
||||
|
||||
use regex::Regex;
|
||||
@ -13,11 +14,27 @@ fn filter_fluent(path: &Path) -> bool {
|
||||
if let Some(ext) = path.extension() { ext.to_str() != Some("ftl") } else { true }
|
||||
}
|
||||
|
||||
fn check_alphabetic(filename: &str, fluent: &str, bad: &mut bool) {
|
||||
fn check_alphabetic(
|
||||
filename: &str,
|
||||
fluent: &str,
|
||||
bad: &mut bool,
|
||||
all_defined_msgs: &mut HashMap<String, String>,
|
||||
) {
|
||||
let mut matches = MESSAGE.captures_iter(fluent).peekable();
|
||||
while let Some(m) = matches.next() {
|
||||
let name = m.get(1).unwrap();
|
||||
if let Some(defined_filename) = all_defined_msgs.get(name.as_str()) {
|
||||
tidy_error!(
|
||||
bad,
|
||||
"{filename}: message `{}` is already defined in {}",
|
||||
name.as_str(),
|
||||
defined_filename,
|
||||
);
|
||||
}
|
||||
|
||||
all_defined_msgs.insert(name.as_str().to_owned(), filename.to_owned());
|
||||
|
||||
if let Some(next) = matches.peek() {
|
||||
let name = m.get(1).unwrap();
|
||||
let next = next.get(1).unwrap();
|
||||
if name.as_str() > next.as_str() {
|
||||
tidy_error!(
|
||||
@ -34,13 +51,29 @@ run `./x.py test tidy --bless` to sort the file correctly",
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_messages(fluent: &str) -> String {
|
||||
fn sort_messages(
|
||||
filename: &str,
|
||||
fluent: &str,
|
||||
bad: &mut bool,
|
||||
all_defined_msgs: &mut HashMap<String, String>,
|
||||
) -> String {
|
||||
let mut chunks = vec![];
|
||||
let mut cur = String::new();
|
||||
for line in fluent.lines() {
|
||||
if MESSAGE.is_match(line) {
|
||||
if let Some(name) = MESSAGE.find(line) {
|
||||
if let Some(defined_filename) = all_defined_msgs.get(name.as_str()) {
|
||||
tidy_error!(
|
||||
bad,
|
||||
"{filename}: message `{}` is already defined in {}",
|
||||
name.as_str(),
|
||||
defined_filename,
|
||||
);
|
||||
}
|
||||
|
||||
all_defined_msgs.insert(name.as_str().to_owned(), filename.to_owned());
|
||||
chunks.push(std::mem::take(&mut cur));
|
||||
}
|
||||
|
||||
cur += line;
|
||||
cur.push('\n');
|
||||
}
|
||||
@ -53,20 +86,33 @@ fn sort_messages(fluent: &str) -> String {
|
||||
}
|
||||
|
||||
pub fn check(path: &Path, bless: bool, bad: &mut bool) {
|
||||
let mut all_defined_msgs = HashMap::new();
|
||||
walk(
|
||||
path,
|
||||
|path, is_dir| filter_dirs(path) || (!is_dir && filter_fluent(path)),
|
||||
&mut |ent, contents| {
|
||||
if bless {
|
||||
let sorted = sort_messages(contents);
|
||||
let sorted = sort_messages(
|
||||
ent.path().to_str().unwrap(),
|
||||
contents,
|
||||
bad,
|
||||
&mut all_defined_msgs,
|
||||
);
|
||||
if sorted != contents {
|
||||
let mut f =
|
||||
OpenOptions::new().write(true).truncate(true).open(ent.path()).unwrap();
|
||||
f.write(sorted.as_bytes()).unwrap();
|
||||
}
|
||||
} else {
|
||||
check_alphabetic(ent.path().to_str().unwrap(), contents, bad);
|
||||
check_alphabetic(
|
||||
ent.path().to_str().unwrap(),
|
||||
contents,
|
||||
bad,
|
||||
&mut all_defined_msgs,
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
crate::fluent_used::check(path, all_defined_msgs, bad);
|
||||
}
|
||||
|
43
src/tools/tidy/src/fluent_used.rs
Normal file
43
src/tools/tidy/src/fluent_used.rs
Normal file
@ -0,0 +1,43 @@
|
||||
//! Checks that all Fluent messages appear at least twice
|
||||
|
||||
use crate::walk::{filter_dirs, walk};
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref WORD: Regex = Regex::new(r"\w+").unwrap();
|
||||
}
|
||||
|
||||
fn filter_used_messages(
|
||||
contents: &str,
|
||||
msgs_not_appeared_yet: &mut HashMap<String, String>,
|
||||
msgs_appeared_only_once: &mut HashMap<String, String>,
|
||||
) {
|
||||
// we don't just check messages never appear in Rust files,
|
||||
// because messages can be used as parts of other fluent messages in Fluent files,
|
||||
// so we do checking messages appear only once in all Rust and Fluent files.
|
||||
let mut matches = WORD.find_iter(contents);
|
||||
while let Some(name) = matches.next() {
|
||||
if let Some((name, filename)) = msgs_not_appeared_yet.remove_entry(name.as_str()) {
|
||||
// if one msg appears for the first time,
|
||||
// remove it from `msgs_not_appeared_yet` and insert it into `msgs_appeared_only_once`.
|
||||
msgs_appeared_only_once.insert(name, filename);
|
||||
} else {
|
||||
// if one msg appears for the second time,
|
||||
// remove it from `msgs_appeared_only_once`.
|
||||
msgs_appeared_only_once.remove(name.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check(path: &Path, mut all_defined_msgs: HashMap<String, String>, bad: &mut bool) {
|
||||
let mut msgs_appear_only_once = HashMap::new();
|
||||
walk(path, |path, _| filter_dirs(path), &mut |_, contents| {
|
||||
filter_used_messages(contents, &mut all_defined_msgs, &mut msgs_appear_only_once);
|
||||
});
|
||||
|
||||
for (name, filename) in msgs_appear_only_once {
|
||||
tidy_error!(bad, "{filename}: message `{}` is not used", name,);
|
||||
}
|
||||
}
|
@ -65,6 +65,7 @@ pub mod ext_tool_checks;
|
||||
pub mod extdeps;
|
||||
pub mod features;
|
||||
pub mod fluent_alphabetical;
|
||||
mod fluent_used;
|
||||
pub(crate) mod iter_header;
|
||||
pub mod mir_opt_tests;
|
||||
pub mod pal;
|
||||
|
@ -4,9 +4,9 @@
|
||||
// can correctly merge the debug info if it merges the inlined code (e.g., for merging of tail
|
||||
// calls to panic.
|
||||
|
||||
// CHECK: tail call void @_ZN4core6option13unwrap_failed17h{{([0-9a-z]{16})}}E
|
||||
// CHECK: tail call void @{{[A-Za-z0-9_]+4core6option13unwrap_failed}}
|
||||
// CHECK-SAME: !dbg ![[#first_dbg:]]
|
||||
// CHECK: tail call void @_ZN4core6option13unwrap_failed17h{{([0-9a-z]{16})}}E
|
||||
// CHECK: tail call void @{{[A-Za-z0-9_]+4core6option13unwrap_failed}}
|
||||
// CHECK-SAME: !dbg ![[#second_dbg:]]
|
||||
|
||||
// CHECK-DAG: ![[#func_dbg:]] = distinct !DISubprogram(name: "unwrap<i32>"
|
||||
|
@ -1,20 +1,20 @@
|
||||
#!/bin/sh
|
||||
#!/bin/bash
|
||||
set -exuo pipefail
|
||||
|
||||
function build {
|
||||
CRATE=enclave
|
||||
|
||||
mkdir -p $WORK_DIR
|
||||
pushd $WORK_DIR
|
||||
rm -rf $CRATE
|
||||
cp -a $TEST_DIR/enclave .
|
||||
mkdir -p "${WORK_DIR}"
|
||||
pushd "${WORK_DIR}"
|
||||
rm -rf "${CRATE}"
|
||||
cp -a "${TEST_DIR}"/enclave .
|
||||
pushd $CRATE
|
||||
echo ${WORK_DIR}
|
||||
echo "${WORK_DIR}"
|
||||
# HACK(eddyb) sets `RUSTC_BOOTSTRAP=1` so Cargo can accept nightly features.
|
||||
# These come from the top-level Rust workspace, that this crate is not a
|
||||
# member of, but Cargo tries to load the workspace `Cargo.toml` anyway.
|
||||
env RUSTC_BOOTSTRAP=1
|
||||
cargo -v run --target $TARGET
|
||||
cargo -v run --target "${TARGET}"
|
||||
popd
|
||||
popd
|
||||
}
|
||||
@ -22,17 +22,18 @@ function build {
|
||||
function check {
|
||||
local func_re="$1"
|
||||
local checks="${TEST_DIR}/$2"
|
||||
local asm=$(mktemp)
|
||||
local asm=""
|
||||
local objdump="${LLVM_BIN_DIR}/llvm-objdump"
|
||||
local filecheck="${LLVM_BIN_DIR}/FileCheck"
|
||||
local enclave=${WORK_DIR}/enclave/target/x86_64-fortanix-unknown-sgx/debug/enclave
|
||||
|
||||
func="$(${objdump} --syms --demangle ${enclave} | \
|
||||
asm=$(mktemp)
|
||||
func="$(${objdump} --syms --demangle "${enclave}" | \
|
||||
grep --only-matching -E "[[:blank:]]+${func_re}\$" | \
|
||||
sed -e 's/^[[:space:]]*//' )"
|
||||
${objdump} --disassemble-symbols="${func}" --demangle \
|
||||
${enclave} > ${asm}
|
||||
${filecheck} --input-file ${asm} ${checks}
|
||||
"${enclave}" > "${asm}"
|
||||
${filecheck} --input-file "${asm}" "${checks}"
|
||||
|
||||
if [ "${func_re}" != "rust_plus_one_global_asm" ] &&
|
||||
[ "${func_re}" != "cmake_plus_one_c_global_asm" ] &&
|
||||
@ -41,7 +42,7 @@ function check {
|
||||
# of `shlq $0x0, (%rsp); lfence; retq` are used instead.
|
||||
# https://www.intel.com/content/www/us/en/developer/articles/technical/
|
||||
# software-security-guidance/technical-documentation/load-value-injection.html
|
||||
${filecheck} --implicit-check-not ret --input-file ${asm} ${checks}
|
||||
${filecheck} --implicit-check-not ret --input-file "${asm}" "${checks}"
|
||||
fi
|
||||
}
|
||||
|
||||
|
9
tests/ui/single-use-lifetime/dedup.rs
Normal file
9
tests/ui/single-use-lifetime/dedup.rs
Normal file
@ -0,0 +1,9 @@
|
||||
// Check that `unused_lifetimes` lint doesn't duplicate a "parameter is never used" error.
|
||||
// Fixed in <https://github.com/rust-lang/rust/pull/96833>.
|
||||
// Issue: <https://github.com/rust-lang/rust/issues/72587>.
|
||||
|
||||
#![warn(unused_lifetimes)]
|
||||
struct Foo<'a>;
|
||||
//~^ ERROR parameter `'a` is never used
|
||||
|
||||
fn main() {}
|
11
tests/ui/single-use-lifetime/dedup.stderr
Normal file
11
tests/ui/single-use-lifetime/dedup.stderr
Normal file
@ -0,0 +1,11 @@
|
||||
error[E0392]: lifetime parameter `'a` is never used
|
||||
--> $DIR/dedup.rs:6:12
|
||||
|
|
||||
LL | struct Foo<'a>;
|
||||
| ^^ unused lifetime parameter
|
||||
|
|
||||
= help: consider removing `'a`, referring to it in a field, or using a marker such as `PhantomData`
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
For more information about this error, try `rustc --explain E0392`.
|
@ -665,6 +665,7 @@ compiler-team-contributors = [
|
||||
"@Nadrieril",
|
||||
"@nnethercote",
|
||||
"@fmease",
|
||||
"@fee1-dead",
|
||||
]
|
||||
compiler = [
|
||||
"compiler-team",
|
||||
|
Loading…
Reference in New Issue
Block a user