Auto merge of #131645 - matthiaskrgr:rollup-lgr1ezu, r=matthiaskrgr

Rollup of 6 pull requests

Successful merges:

 - #131086 (Update unicode-width to 0.2.0)
 - #131585 (compiletest: Remove the one thing that was checking a directive's `original_line`)
 - #131614 (Error on trying to use revisions in `run-make` tests)
 - #131638 (compiletest: Move debugger setup code out of `lib.rs`)
 - #131641 (switch unicode-data bitsets back to 'static')
 - #131642 (Special case error message for a `build-fail` test that failed check build)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-10-13 11:41:12 +00:00
commit 2aa26d8a72
15 changed files with 374 additions and 342 deletions

View File

@ -96,7 +96,7 @@ version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccaf7e9dfbb6ab22c82e473cd1a8a7bd313c19a5b7e40970f3d89ef5a5c9e81e"
dependencies = [
"unicode-width",
"unicode-width 0.1.14",
"yansi-term",
]
@ -107,7 +107,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24e35ed54e5ea7997c14ed4c70ba043478db1112e98263b3b035907aa197d991"
dependencies = [
"anstyle",
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -168,7 +168,7 @@ dependencies = [
"anstyle",
"anstyle-lossy",
"html-escape",
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -737,7 +737,7 @@ dependencies = [
"encode_unicode",
"lazy_static",
"libc",
"unicode-width",
"unicode-width 0.1.14",
"windows-sys 0.52.0",
]
@ -1425,7 +1425,7 @@ version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5"
dependencies = [
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -1788,7 +1788,7 @@ dependencies = [
"instant",
"number_prefix",
"portable-atomic",
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -2590,7 +2590,7 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2ad9b889f1b12e0b9ee24db044b5129150d5eada288edc800f789928dc8c0e3"
dependencies = [
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -2601,7 +2601,7 @@ checksum = "9ad43c07024ef767f9160710b3a6773976194758c7919b17e63b863db0bdf7fb"
dependencies = [
"bytecount",
"fnv",
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -4188,7 +4188,7 @@ dependencies = [
"thin-vec",
"tracing",
"unicode-normalization",
"unicode-width",
"unicode-width 0.2.0",
]
[[package]]
@ -4422,7 +4422,7 @@ dependencies = [
"sha1",
"sha2",
"tracing",
"unicode-width",
"unicode-width 0.2.0",
]
[[package]]
@ -4687,7 +4687,7 @@ dependencies = [
"tracing-subscriber",
"unicode-properties",
"unicode-segmentation",
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -5097,7 +5097,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c998b0c8b921495196a48aabaf1901ff28be0760136e31604f7967b0792050e"
dependencies = [
"papergrid",
"unicode-width",
"unicode-width 0.1.14",
]
[[package]]
@ -5646,6 +5646,12 @@ version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]]
name = "unicode-width"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd"
[[package]]
name = "unicode-xid"
version = "0.2.6"
@ -5894,7 +5900,7 @@ dependencies = [
"bumpalo",
"leb128",
"memchr",
"unicode-width",
"unicode-width 0.1.14",
"wasm-encoder 0.219.0",
]

View File

@ -20,7 +20,7 @@ rustc_span = { path = "../rustc_span" }
thin-vec = "0.2.12"
tracing = "0.1"
unicode-normalization = "0.1.11"
unicode-width = "0.1.4"
unicode-width = "0.2.0"
# tidy-alphabetical-end
[dev-dependencies]

View File

@ -19,5 +19,5 @@ scoped-tls = "1.0"
sha1 = "0.10.0"
sha2 = "0.10.1"
tracing = "0.1"
unicode-width = "0.1.4"
unicode-width = "0.2.0"
# tidy-alphabetical-end

View File

@ -178,6 +178,7 @@
// tidy-alphabetical-start
#![cfg_attr(bootstrap, feature(const_mut_refs))]
#![cfg_attr(bootstrap, feature(const_refs_to_cell))]
#![cfg_attr(bootstrap, feature(const_refs_to_static))]
#![feature(abi_unadjusted)]
#![feature(adt_const_params)]
#![feature(allow_internal_unsafe)]

View File

@ -331,14 +331,14 @@ pub mod grapheme_extend {
#[rustfmt::skip]
pub mod lowercase {
const BITSET_CHUNKS_MAP: &'static [u8; 123] = &[
static BITSET_CHUNKS_MAP: [u8; 123] = [
14, 17, 0, 0, 9, 0, 0, 12, 13, 10, 0, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 4, 1, 0, 15, 0, 8, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 0,
3, 18, 0, 7,
];
const BITSET_INDEX_CHUNKS: &'static [[u8; 16]; 20] = &[
static BITSET_INDEX_CHUNKS: [[u8; 16]; 20] = [
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 61, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 14, 56, 0],
@ -360,7 +360,7 @@ pub mod lowercase {
[16, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[65, 41, 55, 12, 77, 63, 18, 1, 7, 64, 76, 20, 73, 74, 4, 45],
];
const BITSET_CANONICAL: &'static [u64; 56] = &[
static BITSET_CANONICAL: [u64; 56] = [
0b0000000000000000000000000000000000000000000000000000000000000000,
0b1111111111111111110000000000000000000000000011111111111111111111,
0b1010101010101010101010101010101010101010101010101010100000000010,
@ -418,7 +418,7 @@ pub mod lowercase {
0b1110011001010001001011010010101001001110001001000011000100101001,
0b1110101111000000000000000000000000001111111111111111111111111100,
];
const BITSET_MAPPING: &'static [(u8, u8); 22] = &[
static BITSET_MAPPING: [(u8, u8); 22] = [
(0, 64), (1, 188), (1, 186), (1, 183), (1, 176), (1, 109), (1, 124), (1, 126), (1, 66),
(1, 70), (1, 77), (2, 146), (2, 144), (2, 83), (3, 93), (3, 147), (3, 133), (4, 12), (4, 6),
(5, 187), (6, 78), (7, 132),
@ -471,14 +471,14 @@ pub mod n {
#[rustfmt::skip]
pub mod uppercase {
const BITSET_CHUNKS_MAP: &'static [u8; 125] = &[
static BITSET_CHUNKS_MAP: [u8; 125] = [
12, 15, 6, 6, 0, 6, 6, 2, 4, 11, 6, 16, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 8, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 6, 6, 5, 6, 14, 6, 10, 6, 6, 1, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
6, 7, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 13, 6, 6,
6, 6, 9, 6, 3,
];
const BITSET_INDEX_CHUNKS: &'static [[u8; 16]; 17] = &[
static BITSET_INDEX_CHUNKS: [[u8; 16]; 17] = [
[44, 44, 5, 35, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 5, 1],
[44, 44, 5, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44],
[44, 44, 40, 44, 44, 44, 44, 44, 17, 17, 63, 17, 43, 29, 24, 23],
@ -497,7 +497,7 @@ pub mod uppercase {
[58, 19, 2, 18, 10, 48, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44],
[58, 38, 17, 27, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44],
];
const BITSET_CANONICAL: &'static [u64; 44] = &[
static BITSET_CANONICAL: [u64; 44] = [
0b0000011111111111111111111111111000000000000000000000000000000000,
0b0000000000111111111111111111111111111111111111111111111111111111,
0b0101010101010101010101010101010101010101010101010101010000000001,
@ -543,7 +543,7 @@ pub mod uppercase {
0b1111011111111111000000000000000000000000000000000000000000000000,
0b1111111100000000111111110000000000111111000000001111111100000000,
];
const BITSET_MAPPING: &'static [(u8, u8); 25] = &[
static BITSET_MAPPING: [(u8, u8); 25] = [
(0, 187), (0, 177), (0, 171), (0, 167), (0, 164), (0, 32), (0, 47), (0, 51), (0, 121),
(0, 117), (0, 109), (1, 150), (1, 148), (1, 142), (1, 134), (1, 131), (1, 64), (2, 164),
(2, 146), (2, 20), (3, 146), (3, 140), (3, 134), (4, 178), (4, 171),

View File

@ -0,0 +1,272 @@
use std::env;
use std::ffi::OsString;
use std::path::PathBuf;
use std::process::Command;
use std::sync::Arc;
use crate::common::{Config, Debugger};
pub(crate) fn configure_cdb(config: &Config) -> Option<Arc<Config>> {
config.cdb.as_ref()?;
Some(Arc::new(Config { debugger: Some(Debugger::Cdb), ..config.clone() }))
}
pub(crate) fn configure_gdb(config: &Config) -> Option<Arc<Config>> {
config.gdb_version?;
if config.matches_env("msvc") {
return None;
}
if config.remote_test_client.is_some() && !config.target.contains("android") {
println!(
"WARNING: debuginfo tests are not available when \
testing with remote"
);
return None;
}
if config.target.contains("android") {
println!(
"{} debug-info test uses tcp 5039 port.\
please reserve it",
config.target
);
// android debug-info test uses remote debugger so, we test 1 thread
// at once as they're all sharing the same TCP port to communicate
// over.
//
// we should figure out how to lift this restriction! (run them all
// on different ports allocated dynamically).
env::set_var("RUST_TEST_THREADS", "1");
}
Some(Arc::new(Config { debugger: Some(Debugger::Gdb), ..config.clone() }))
}
pub(crate) fn configure_lldb(config: &Config) -> Option<Arc<Config>> {
config.lldb_python_dir.as_ref()?;
if let Some(350) = config.lldb_version {
println!(
"WARNING: The used version of LLDB (350) has a \
known issue that breaks debuginfo tests. See \
issue #32520 for more information. Skipping all \
LLDB-based tests!",
);
return None;
}
Some(Arc::new(Config { debugger: Some(Debugger::Lldb), ..config.clone() }))
}
/// Returns `true` if the given target is an Android target for the
/// purposes of GDB testing.
pub(crate) fn is_android_gdb_target(target: &str) -> bool {
matches!(
&target[..],
"arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android"
)
}
/// Returns `true` if the given target is a MSVC target for the purposes of CDB testing.
fn is_pc_windows_msvc_target(target: &str) -> bool {
target.ends_with("-pc-windows-msvc")
}
fn find_cdb(target: &str) -> Option<OsString> {
if !(cfg!(windows) && is_pc_windows_msvc_target(target)) {
return None;
}
let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?;
let cdb_arch = if cfg!(target_arch = "x86") {
"x86"
} else if cfg!(target_arch = "x86_64") {
"x64"
} else if cfg!(target_arch = "aarch64") {
"arm64"
} else if cfg!(target_arch = "arm") {
"arm"
} else {
return None; // No compatible CDB.exe in the Windows 10 SDK
};
let mut path = PathBuf::new();
path.push(pf86);
path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too?
path.push(cdb_arch);
path.push(r"cdb.exe");
if !path.exists() {
return None;
}
Some(path.into_os_string())
}
/// Returns Path to CDB
pub(crate) fn analyze_cdb(
cdb: Option<String>,
target: &str,
) -> (Option<OsString>, Option<[u16; 4]>) {
let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target));
let mut version = None;
if let Some(cdb) = cdb.as_ref() {
if let Ok(output) = Command::new(cdb).arg("/version").output() {
if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
version = extract_cdb_version(&first_line);
}
}
}
(cdb, version)
}
pub(crate) fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> {
// Example full_version_line: "cdb version 10.0.18362.1"
let version = full_version_line.rsplit(' ').next()?;
let mut components = version.split('.');
let major: u16 = components.next().unwrap().parse().unwrap();
let minor: u16 = components.next().unwrap().parse().unwrap();
let patch: u16 = components.next().unwrap_or("0").parse().unwrap();
let build: u16 = components.next().unwrap_or("0").parse().unwrap();
Some([major, minor, patch, build])
}
/// Returns (Path to GDB, GDB Version)
pub(crate) fn analyze_gdb(
gdb: Option<String>,
target: &str,
android_cross_path: &PathBuf,
) -> (Option<String>, Option<u32>) {
#[cfg(not(windows))]
const GDB_FALLBACK: &str = "gdb";
#[cfg(windows)]
const GDB_FALLBACK: &str = "gdb.exe";
let fallback_gdb = || {
if is_android_gdb_target(target) {
let mut gdb_path = match android_cross_path.to_str() {
Some(x) => x.to_owned(),
None => panic!("cannot find android cross path"),
};
gdb_path.push_str("/bin/gdb");
gdb_path
} else {
GDB_FALLBACK.to_owned()
}
};
let gdb = match gdb {
None => fallback_gdb(),
Some(ref s) if s.is_empty() => fallback_gdb(), // may be empty if configure found no gdb
Some(ref s) => s.to_owned(),
};
let mut version_line = None;
if let Ok(output) = Command::new(&gdb).arg("--version").output() {
if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
version_line = Some(first_line.to_string());
}
}
let version = match version_line {
Some(line) => extract_gdb_version(&line),
None => return (None, None),
};
(Some(gdb), version)
}
pub(crate) fn extract_gdb_version(full_version_line: &str) -> Option<u32> {
let full_version_line = full_version_line.trim();
// GDB versions look like this: "major.minor.patch?.yyyymmdd?", with both
// of the ? sections being optional
// We will parse up to 3 digits for each component, ignoring the date
// We skip text in parentheses. This avoids accidentally parsing
// the openSUSE version, which looks like:
// GNU gdb (GDB; openSUSE Leap 15.0) 8.1
// This particular form is documented in the GNU coding standards:
// https://www.gnu.org/prep/standards/html_node/_002d_002dversion.html#g_t_002d_002dversion
let unbracketed_part = full_version_line.split('[').next().unwrap();
let mut splits = unbracketed_part.trim_end().rsplit(' ');
let version_string = splits.next().unwrap();
let mut splits = version_string.split('.');
let major = splits.next().unwrap();
let minor = splits.next().unwrap();
let patch = splits.next();
let major: u32 = major.parse().unwrap();
let (minor, patch): (u32, u32) = match minor.find(not_a_digit) {
None => {
let minor = minor.parse().unwrap();
let patch: u32 = match patch {
Some(patch) => match patch.find(not_a_digit) {
None => patch.parse().unwrap(),
Some(idx) if idx > 3 => 0,
Some(idx) => patch[..idx].parse().unwrap(),
},
None => 0,
};
(minor, patch)
}
// There is no patch version after minor-date (e.g. "4-2012").
Some(idx) => {
let minor = minor[..idx].parse().unwrap();
(minor, 0)
}
};
Some(((major * 1000) + minor) * 1000 + patch)
}
/// Returns LLDB version
pub(crate) fn extract_lldb_version(full_version_line: &str) -> Option<u32> {
// Extract the major LLDB version from the given version string.
// LLDB version strings are different for Apple and non-Apple platforms.
// The Apple variant looks like this:
//
// LLDB-179.5 (older versions)
// lldb-300.2.51 (new versions)
//
// We are only interested in the major version number, so this function
// will return `Some(179)` and `Some(300)` respectively.
//
// Upstream versions look like:
// lldb version 6.0.1
//
// There doesn't seem to be a way to correlate the Apple version
// with the upstream version, and since the tests were originally
// written against Apple versions, we make a fake Apple version by
// multiplying the first number by 100. This is a hack.
let full_version_line = full_version_line.trim();
if let Some(apple_ver) =
full_version_line.strip_prefix("LLDB-").or_else(|| full_version_line.strip_prefix("lldb-"))
{
if let Some(idx) = apple_ver.find(not_a_digit) {
let version: u32 = apple_ver[..idx].parse().unwrap();
return Some(version);
}
} else if let Some(lldb_ver) = full_version_line.strip_prefix("lldb version ") {
if let Some(idx) = lldb_ver.find(not_a_digit) {
let version: u32 = lldb_ver[..idx].parse().ok()?;
return Some(version * 100);
}
}
None
}
fn not_a_digit(c: char) -> bool {
!c.is_ascii_digit()
}

View File

@ -9,11 +9,11 @@ use std::process::Command;
use tracing::*;
use crate::common::{Config, Debugger, FailMode, Mode, PassMode};
use crate::debuggers::{extract_cdb_version, extract_gdb_version};
use crate::header::auxiliary::{AuxProps, parse_and_update_aux};
use crate::header::cfg::{MatchOutcome, parse_cfg_name_directive};
use crate::header::needs::CachedNeedsConditions;
use crate::util::static_regex;
use crate::{extract_cdb_version, extract_gdb_version};
pub(crate) mod auxiliary;
mod cfg;
@ -57,9 +57,9 @@ impl EarlyProps {
&mut poisoned,
testfile,
rdr,
&mut |HeaderLine { directive: ln, .. }| {
&mut |DirectiveLine { directive: ln, .. }| {
parse_and_update_aux(config, ln, &mut props.aux);
config.parse_and_update_revisions(ln, &mut props.revisions);
config.parse_and_update_revisions(testfile, ln, &mut props.revisions);
},
);
@ -344,7 +344,7 @@ impl TestProps {
&mut poisoned,
testfile,
file,
&mut |HeaderLine { header_revision, directive: ln, .. }| {
&mut |DirectiveLine { header_revision, directive: ln, .. }| {
if header_revision.is_some() && header_revision != test_revision {
return;
}
@ -391,7 +391,7 @@ impl TestProps {
has_edition = true;
}
config.parse_and_update_revisions(ln, &mut self.revisions);
config.parse_and_update_revisions(testfile, ln, &mut self.revisions);
if let Some(flags) = config.parse_name_value_directive(ln, RUN_FLAGS) {
self.run_flags.extend(split_flags(&flags));
@ -680,7 +680,7 @@ impl TestProps {
/// Extract an `(Option<line_revision>, directive)` directive from a line if comment is present.
///
/// See [`HeaderLine`] for a diagram.
/// See [`DirectiveLine`] for a diagram.
pub fn line_directive<'line>(
comment: &str,
original_line: &'line str,
@ -738,17 +738,13 @@ const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] =
/// ```text
/// //@ compile-flags: -O
/// ^^^^^^^^^^^^^^^^^ directive
/// ^^^^^^^^^^^^^^^^^^^^^ original_line
///
/// //@ [foo] compile-flags: -O
/// ^^^ header_revision
/// ^^^^^^^^^^^^^^^^^ directive
/// ^^^^^^^^^^^^^^^^^^^^^^^^^^^ original_line
/// ```
struct HeaderLine<'ln> {
struct DirectiveLine<'ln> {
line_number: usize,
/// Raw line from the test file, including comment prefix and any revision.
original_line: &'ln str,
/// Some header directives start with a revision name in square brackets
/// (e.g. `[foo]`), and only apply to that revision of the test.
/// If present, this field contains the revision name (e.g. `foo`).
@ -803,7 +799,7 @@ fn iter_header(
poisoned: &mut bool,
testfile: &Path,
rdr: impl Read,
it: &mut dyn FnMut(HeaderLine<'_>),
it: &mut dyn FnMut(DirectiveLine<'_>),
) {
if testfile.is_dir() {
return;
@ -824,7 +820,7 @@ fn iter_header(
];
// Process the extra implied directives, with a dummy line number of 0.
for directive in extra_directives {
it(HeaderLine { line_number: 0, original_line: "", header_revision: None, directive });
it(DirectiveLine { line_number: 0, header_revision: None, directive });
}
}
@ -841,11 +837,6 @@ fn iter_header(
if rdr.read_line(&mut ln).unwrap() == 0 {
break;
}
// Assume that any directives will be found before the first
// module or function. This doesn't seem to be an optimization
// with a warm page cache. Maybe with a cold one.
let original_line = &ln;
let ln = ln.trim();
// Assume that any directives will be found before the first module or function. This
@ -897,9 +888,8 @@ fn iter_header(
}
}
it(HeaderLine {
it(DirectiveLine {
line_number,
original_line,
header_revision,
directive: non_revisioned_directive_line,
});
@ -907,12 +897,21 @@ fn iter_header(
}
impl Config {
fn parse_and_update_revisions(&self, line: &str, existing: &mut Vec<String>) {
fn parse_and_update_revisions(&self, testfile: &Path, line: &str, existing: &mut Vec<String>) {
if let Some(raw) = self.parse_name_value_directive(line, "revisions") {
if self.mode == Mode::RunMake {
panic!("`run-make` tests do not support revisions: {}", testfile.display());
}
let mut duplicates: HashSet<_> = existing.iter().cloned().collect();
for revision in raw.split_whitespace().map(|r| r.to_string()) {
if !duplicates.insert(revision.clone()) {
panic!("Duplicate revision: `{}` in line `{}`", revision, raw);
panic!(
"duplicate revision: `{}` in line `{}`: {}",
revision,
raw,
testfile.display()
);
}
existing.push(revision);
}
@ -1286,13 +1285,14 @@ pub fn make_test_description<R: Read>(
let mut local_poisoned = false;
// Scan through the test file to handle `ignore-*`, `only-*`, and `needs-*` directives.
iter_header(
config.mode,
&config.suite,
&mut local_poisoned,
path,
src,
&mut |HeaderLine { header_revision, original_line, directive: ln, line_number }| {
&mut |DirectiveLine { header_revision, directive: ln, line_number }| {
if header_revision.is_some() && header_revision != test_revision {
return;
}
@ -1317,17 +1317,7 @@ pub fn make_test_description<R: Read>(
};
}
if let Some((_, post)) = original_line.trim_start().split_once("//") {
let post = post.trim_start();
if post.starts_with("ignore-tidy") {
// Not handled by compiletest.
} else {
decision!(cfg::handle_ignore(config, ln));
}
} else {
decision!(cfg::handle_ignore(config, ln));
}
decision!(cfg::handle_ignore(config, ln));
decision!(cfg::handle_only(config, ln));
decision!(needs::handle_needs(&cache.needs, config, ln));
decision!(ignore_llvm(config, ln));

View File

@ -423,7 +423,7 @@ fn test_extract_version_range() {
}
#[test]
#[should_panic(expected = "Duplicate revision: `rpass1` in line ` rpass1 rpass1`")]
#[should_panic(expected = "duplicate revision: `rpass1` in line ` rpass1 rpass1`")]
fn test_duplicate_revisions() {
let config: Config = cfg().build();
parse_rs(&config, "//@ revisions: rpass1 rpass1");

View File

@ -10,6 +10,7 @@ mod tests;
pub mod common;
pub mod compute_diff;
mod debuggers;
pub mod errors;
pub mod header;
mod json;
@ -36,8 +37,8 @@ use walkdir::WalkDir;
use self::header::{EarlyProps, make_test_description};
use crate::common::{
Config, Debugger, Mode, PassMode, TestPaths, UI_EXTENSIONS, expected_output_path,
output_base_dir, output_relative_path,
Config, Mode, PassMode, TestPaths, UI_EXTENSIONS, expected_output_path, output_base_dir,
output_relative_path,
};
use crate::header::HeadersCache;
use crate::util::logv;
@ -204,9 +205,11 @@ pub fn parse_config(args: Vec<String>) -> Config {
let target = opt_str2(matches.opt_str("target"));
let android_cross_path = opt_path(matches, "android-cross-path");
let (cdb, cdb_version) = analyze_cdb(matches.opt_str("cdb"), &target);
let (gdb, gdb_version) = analyze_gdb(matches.opt_str("gdb"), &target, &android_cross_path);
let lldb_version = matches.opt_str("lldb-version").as_deref().and_then(extract_lldb_version);
let (cdb, cdb_version) = debuggers::analyze_cdb(matches.opt_str("cdb"), &target);
let (gdb, gdb_version) =
debuggers::analyze_gdb(matches.opt_str("gdb"), &target, &android_cross_path);
let lldb_version =
matches.opt_str("lldb-version").as_deref().and_then(debuggers::extract_lldb_version);
let color = match matches.opt_str("color").as_deref() {
Some("auto") | None => ColorConfig::AutoColor,
Some("always") => ColorConfig::AlwaysColor,
@ -443,9 +446,9 @@ pub fn run_tests(config: Arc<Config>) {
if let Mode::DebugInfo = config.mode {
// Debugging emscripten code doesn't make sense today
if !config.target.contains("emscripten") {
configs.extend(configure_cdb(&config));
configs.extend(configure_gdb(&config));
configs.extend(configure_lldb(&config));
configs.extend(debuggers::configure_cdb(&config));
configs.extend(debuggers::configure_gdb(&config));
configs.extend(debuggers::configure_lldb(&config));
}
} else {
configs.push(config.clone());
@ -498,62 +501,6 @@ pub fn run_tests(config: Arc<Config>) {
}
}
fn configure_cdb(config: &Config) -> Option<Arc<Config>> {
config.cdb.as_ref()?;
Some(Arc::new(Config { debugger: Some(Debugger::Cdb), ..config.clone() }))
}
fn configure_gdb(config: &Config) -> Option<Arc<Config>> {
config.gdb_version?;
if config.matches_env("msvc") {
return None;
}
if config.remote_test_client.is_some() && !config.target.contains("android") {
println!(
"WARNING: debuginfo tests are not available when \
testing with remote"
);
return None;
}
if config.target.contains("android") {
println!(
"{} debug-info test uses tcp 5039 port.\
please reserve it",
config.target
);
// android debug-info test uses remote debugger so, we test 1 thread
// at once as they're all sharing the same TCP port to communicate
// over.
//
// we should figure out how to lift this restriction! (run them all
// on different ports allocated dynamically).
env::set_var("RUST_TEST_THREADS", "1");
}
Some(Arc::new(Config { debugger: Some(Debugger::Gdb), ..config.clone() }))
}
fn configure_lldb(config: &Config) -> Option<Arc<Config>> {
config.lldb_python_dir.as_ref()?;
if let Some(350) = config.lldb_version {
println!(
"WARNING: The used version of LLDB (350) has a \
known issue that breaks debuginfo tests. See \
issue #32520 for more information. Skipping all \
LLDB-based tests!",
);
return None;
}
Some(Arc::new(Config { debugger: Some(Debugger::Lldb), ..config.clone() }))
}
pub fn test_opts(config: &Config) -> test::TestOpts {
if env::var("RUST_TEST_NOCAPTURE").is_ok() {
eprintln!(
@ -981,212 +928,6 @@ fn make_test_closure(
}))
}
/// Returns `true` if the given target is an Android target for the
/// purposes of GDB testing.
fn is_android_gdb_target(target: &str) -> bool {
matches!(
&target[..],
"arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android"
)
}
/// Returns `true` if the given target is a MSVC target for the purposes of CDB testing.
fn is_pc_windows_msvc_target(target: &str) -> bool {
target.ends_with("-pc-windows-msvc")
}
fn find_cdb(target: &str) -> Option<OsString> {
if !(cfg!(windows) && is_pc_windows_msvc_target(target)) {
return None;
}
let pf86 = env::var_os("ProgramFiles(x86)").or_else(|| env::var_os("ProgramFiles"))?;
let cdb_arch = if cfg!(target_arch = "x86") {
"x86"
} else if cfg!(target_arch = "x86_64") {
"x64"
} else if cfg!(target_arch = "aarch64") {
"arm64"
} else if cfg!(target_arch = "arm") {
"arm"
} else {
return None; // No compatible CDB.exe in the Windows 10 SDK
};
let mut path = PathBuf::new();
path.push(pf86);
path.push(r"Windows Kits\10\Debuggers"); // We could check 8.1 etc. too?
path.push(cdb_arch);
path.push(r"cdb.exe");
if !path.exists() {
return None;
}
Some(path.into_os_string())
}
/// Returns Path to CDB
fn analyze_cdb(cdb: Option<String>, target: &str) -> (Option<OsString>, Option<[u16; 4]>) {
let cdb = cdb.map(OsString::from).or_else(|| find_cdb(target));
let mut version = None;
if let Some(cdb) = cdb.as_ref() {
if let Ok(output) = Command::new(cdb).arg("/version").output() {
if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
version = extract_cdb_version(&first_line);
}
}
}
(cdb, version)
}
fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> {
// Example full_version_line: "cdb version 10.0.18362.1"
let version = full_version_line.rsplit(' ').next()?;
let mut components = version.split('.');
let major: u16 = components.next().unwrap().parse().unwrap();
let minor: u16 = components.next().unwrap().parse().unwrap();
let patch: u16 = components.next().unwrap_or("0").parse().unwrap();
let build: u16 = components.next().unwrap_or("0").parse().unwrap();
Some([major, minor, patch, build])
}
/// Returns (Path to GDB, GDB Version)
fn analyze_gdb(
gdb: Option<String>,
target: &str,
android_cross_path: &PathBuf,
) -> (Option<String>, Option<u32>) {
#[cfg(not(windows))]
const GDB_FALLBACK: &str = "gdb";
#[cfg(windows)]
const GDB_FALLBACK: &str = "gdb.exe";
let fallback_gdb = || {
if is_android_gdb_target(target) {
let mut gdb_path = match android_cross_path.to_str() {
Some(x) => x.to_owned(),
None => panic!("cannot find android cross path"),
};
gdb_path.push_str("/bin/gdb");
gdb_path
} else {
GDB_FALLBACK.to_owned()
}
};
let gdb = match gdb {
None => fallback_gdb(),
Some(ref s) if s.is_empty() => fallback_gdb(), // may be empty if configure found no gdb
Some(ref s) => s.to_owned(),
};
let mut version_line = None;
if let Ok(output) = Command::new(&gdb).arg("--version").output() {
if let Some(first_line) = String::from_utf8_lossy(&output.stdout).lines().next() {
version_line = Some(first_line.to_string());
}
}
let version = match version_line {
Some(line) => extract_gdb_version(&line),
None => return (None, None),
};
(Some(gdb), version)
}
fn extract_gdb_version(full_version_line: &str) -> Option<u32> {
let full_version_line = full_version_line.trim();
// GDB versions look like this: "major.minor.patch?.yyyymmdd?", with both
// of the ? sections being optional
// We will parse up to 3 digits for each component, ignoring the date
// We skip text in parentheses. This avoids accidentally parsing
// the openSUSE version, which looks like:
// GNU gdb (GDB; openSUSE Leap 15.0) 8.1
// This particular form is documented in the GNU coding standards:
// https://www.gnu.org/prep/standards/html_node/_002d_002dversion.html#g_t_002d_002dversion
let unbracketed_part = full_version_line.split('[').next().unwrap();
let mut splits = unbracketed_part.trim_end().rsplit(' ');
let version_string = splits.next().unwrap();
let mut splits = version_string.split('.');
let major = splits.next().unwrap();
let minor = splits.next().unwrap();
let patch = splits.next();
let major: u32 = major.parse().unwrap();
let (minor, patch): (u32, u32) = match minor.find(not_a_digit) {
None => {
let minor = minor.parse().unwrap();
let patch: u32 = match patch {
Some(patch) => match patch.find(not_a_digit) {
None => patch.parse().unwrap(),
Some(idx) if idx > 3 => 0,
Some(idx) => patch[..idx].parse().unwrap(),
},
None => 0,
};
(minor, patch)
}
// There is no patch version after minor-date (e.g. "4-2012").
Some(idx) => {
let minor = minor[..idx].parse().unwrap();
(minor, 0)
}
};
Some(((major * 1000) + minor) * 1000 + patch)
}
/// Returns LLDB version
fn extract_lldb_version(full_version_line: &str) -> Option<u32> {
// Extract the major LLDB version from the given version string.
// LLDB version strings are different for Apple and non-Apple platforms.
// The Apple variant looks like this:
//
// LLDB-179.5 (older versions)
// lldb-300.2.51 (new versions)
//
// We are only interested in the major version number, so this function
// will return `Some(179)` and `Some(300)` respectively.
//
// Upstream versions look like:
// lldb version 6.0.1
//
// There doesn't seem to be a way to correlate the Apple version
// with the upstream version, and since the tests were originally
// written against Apple versions, we make a fake Apple version by
// multiplying the first number by 100. This is a hack.
let full_version_line = full_version_line.trim();
if let Some(apple_ver) =
full_version_line.strip_prefix("LLDB-").or_else(|| full_version_line.strip_prefix("lldb-"))
{
if let Some(idx) = apple_ver.find(not_a_digit) {
let version: u32 = apple_ver[..idx].parse().unwrap();
return Some(version);
}
} else if let Some(lldb_ver) = full_version_line.strip_prefix("lldb version ") {
if let Some(idx) = lldb_ver.find(not_a_digit) {
let version: u32 = lldb_ver[..idx].parse().ok()?;
return Some(version * 100);
}
}
None
}
fn not_a_digit(c: char) -> bool {
!c.is_ascii_digit()
}
fn check_overlapping_tests(found_paths: &HashSet<PathBuf>) {
let mut collisions = Vec::new();
for path in found_paths {

View File

@ -318,10 +318,29 @@ impl<'test> TestCx<'test> {
}
}
fn check_if_test_should_compile(&self, proc_res: &ProcRes, pm: Option<PassMode>) {
if self.should_compile_successfully(pm) {
fn check_if_test_should_compile(
&self,
fail_mode: Option<FailMode>,
pass_mode: Option<PassMode>,
proc_res: &ProcRes,
) {
if self.should_compile_successfully(pass_mode) {
if !proc_res.status.success() {
self.fatal_proc_rec("test compilation failed although it shouldn't!", proc_res);
match (fail_mode, pass_mode) {
(Some(FailMode::Build), Some(PassMode::Check)) => {
// A `build-fail` test needs to `check-pass`.
self.fatal_proc_rec(
"`build-fail` test is required to pass check build, but check build failed",
proc_res,
);
}
_ => {
self.fatal_proc_rec(
"test compilation failed although it shouldn't!",
proc_res,
);
}
}
}
} else {
if proc_res.status.success() {

View File

@ -9,8 +9,8 @@ use tracing::debug;
use super::debugger::DebuggerCommands;
use super::{Debugger, Emit, ProcRes, TestCx, Truncated, WillExecute};
use crate::common::Config;
use crate::debuggers::{extract_gdb_version, is_android_gdb_target};
use crate::util::logv;
use crate::{extract_gdb_version, is_android_gdb_target};
impl TestCx<'_> {
pub(super) fn run_debuginfo_test(&self) {

View File

@ -1,4 +1,4 @@
use super::{TestCx, WillExecute};
use super::{FailMode, TestCx, WillExecute};
use crate::errors;
impl TestCx<'_> {
@ -96,7 +96,7 @@ impl TestCx<'_> {
fn run_cfail_test(&self) {
let pm = self.pass_mode();
let proc_res = self.compile_test(WillExecute::No, self.should_emit_metadata(pm));
self.check_if_test_should_compile(&proc_res, pm);
self.check_if_test_should_compile(Some(FailMode::Build), pm, &proc_res);
self.check_no_compiler_crash(&proc_res, self.props.should_ice);
let output_to_check = self.get_output(&proc_res);

View File

@ -18,14 +18,14 @@ impl TestCx<'_> {
let pm = Some(PassMode::Check);
let proc_res =
self.compile_test_general(WillExecute::No, Emit::Metadata, pm, Vec::new());
self.check_if_test_should_compile(&proc_res, pm);
self.check_if_test_should_compile(self.props.fail_mode, pm, &proc_res);
}
let pm = self.pass_mode();
let should_run = self.should_run(pm);
let emit_metadata = self.should_emit_metadata(pm);
let proc_res = self.compile_test(should_run, emit_metadata);
self.check_if_test_should_compile(&proc_res, pm);
self.check_if_test_should_compile(self.props.fail_mode, pm, &proc_res);
if matches!(proc_res.truncated, Truncated::Yes)
&& !self.props.dont_check_compiler_stdout
&& !self.props.dont_check_compiler_stderr

View File

@ -1,5 +1,8 @@
use super::header::extract_llvm_version;
use super::*;
use std::ffi::OsString;
use crate::debuggers::{extract_gdb_version, extract_lldb_version};
use crate::header::extract_llvm_version;
use crate::is_test;
#[test]
fn test_extract_gdb_version() {

View File

@ -77,7 +77,7 @@ impl RawEmitter {
writeln!(
&mut self.file,
"const BITSET_CANONICAL: &'static [u64; {}] = &[{}];",
"static BITSET_CANONICAL: [u64; {}] = [{}];",
canonicalized.canonical_words.len(),
fmt_list(canonicalized.canonical_words.iter().map(|v| Bits(*v))),
)
@ -85,7 +85,7 @@ impl RawEmitter {
self.bytes_used += 8 * canonicalized.canonical_words.len();
writeln!(
&mut self.file,
"const BITSET_MAPPING: &'static [(u8, u8); {}] = &[{}];",
"static BITSET_MAPPING: [(u8, u8); {}] = [{}];",
canonicalized.canonicalized_words.len(),
fmt_list(&canonicalized.canonicalized_words),
)
@ -139,7 +139,7 @@ impl RawEmitter {
writeln!(
&mut self.file,
"const BITSET_CHUNKS_MAP: &'static [u8; {}] = &[{}];",
"static BITSET_CHUNKS_MAP: [u8; {}] = [{}];",
chunk_indices.len(),
fmt_list(&chunk_indices),
)
@ -147,7 +147,7 @@ impl RawEmitter {
self.bytes_used += chunk_indices.len();
writeln!(
&mut self.file,
"const BITSET_INDEX_CHUNKS: &'static [[u8; {}]; {}] = &[{}];",
"static BITSET_INDEX_CHUNKS: [[u8; {}]; {}] = [{}];",
chunk_length,
chunks.len(),
fmt_list(chunks.iter()),