mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-15 05:26:47 +00:00
Reformat Python code with ruff
This commit is contained in:
parent
0b737a163e
commit
536516f949
@ -9,7 +9,8 @@ import csv
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
NUM_CODEPOINTS=0x110000
|
||||
NUM_CODEPOINTS = 0x110000
|
||||
|
||||
|
||||
def to_ranges(iter):
|
||||
current = None
|
||||
@ -23,11 +24,15 @@ def to_ranges(iter):
|
||||
if current is not None:
|
||||
yield tuple(current)
|
||||
|
||||
|
||||
def get_escaped(codepoints):
|
||||
for c in codepoints:
|
||||
if (c.class_ or "Cn") in "Cc Cf Cs Co Cn Zl Zp Zs".split() and c.value != ord(' '):
|
||||
if (c.class_ or "Cn") in "Cc Cf Cs Co Cn Zl Zp Zs".split() and c.value != ord(
|
||||
" "
|
||||
):
|
||||
yield c.value
|
||||
|
||||
|
||||
def get_file(f):
|
||||
try:
|
||||
return open(os.path.basename(f))
|
||||
@ -35,7 +40,9 @@ def get_file(f):
|
||||
subprocess.run(["curl", "-O", f], check=True)
|
||||
return open(os.path.basename(f))
|
||||
|
||||
Codepoint = namedtuple('Codepoint', 'value class_')
|
||||
|
||||
Codepoint = namedtuple("Codepoint", "value class_")
|
||||
|
||||
|
||||
def get_codepoints(f):
|
||||
r = csv.reader(f, delimiter=";")
|
||||
@ -66,13 +73,14 @@ def get_codepoints(f):
|
||||
for c in range(prev_codepoint + 1, NUM_CODEPOINTS):
|
||||
yield Codepoint(c, None)
|
||||
|
||||
|
||||
def compress_singletons(singletons):
|
||||
uppers = [] # (upper, # items in lowers)
|
||||
uppers = [] # (upper, # items in lowers)
|
||||
lowers = []
|
||||
|
||||
for i in singletons:
|
||||
upper = i >> 8
|
||||
lower = i & 0xff
|
||||
lower = i & 0xFF
|
||||
if len(uppers) == 0 or uppers[-1][0] != upper:
|
||||
uppers.append((upper, 1))
|
||||
else:
|
||||
@ -82,10 +90,11 @@ def compress_singletons(singletons):
|
||||
|
||||
return uppers, lowers
|
||||
|
||||
|
||||
def compress_normal(normal):
|
||||
# lengths 0x00..0x7f are encoded as 00, 01, ..., 7e, 7f
|
||||
# lengths 0x80..0x7fff are encoded as 80 80, 80 81, ..., ff fe, ff ff
|
||||
compressed = [] # [truelen, (truelenaux), falselen, (falselenaux)]
|
||||
compressed = [] # [truelen, (truelenaux), falselen, (falselenaux)]
|
||||
|
||||
prev_start = 0
|
||||
for start, count in normal:
|
||||
@ -95,21 +104,22 @@ def compress_normal(normal):
|
||||
|
||||
assert truelen < 0x8000 and falselen < 0x8000
|
||||
entry = []
|
||||
if truelen > 0x7f:
|
||||
if truelen > 0x7F:
|
||||
entry.append(0x80 | (truelen >> 8))
|
||||
entry.append(truelen & 0xff)
|
||||
entry.append(truelen & 0xFF)
|
||||
else:
|
||||
entry.append(truelen & 0x7f)
|
||||
if falselen > 0x7f:
|
||||
entry.append(truelen & 0x7F)
|
||||
if falselen > 0x7F:
|
||||
entry.append(0x80 | (falselen >> 8))
|
||||
entry.append(falselen & 0xff)
|
||||
entry.append(falselen & 0xFF)
|
||||
else:
|
||||
entry.append(falselen & 0x7f)
|
||||
entry.append(falselen & 0x7F)
|
||||
|
||||
compressed.append(entry)
|
||||
|
||||
return compressed
|
||||
|
||||
|
||||
def print_singletons(uppers, lowers, uppersname, lowersname):
|
||||
print("#[rustfmt::skip]")
|
||||
print("const {}: &[(u8, u8)] = &[".format(uppersname))
|
||||
@ -119,9 +129,12 @@ def print_singletons(uppers, lowers, uppersname, lowersname):
|
||||
print("#[rustfmt::skip]")
|
||||
print("const {}: &[u8] = &[".format(lowersname))
|
||||
for i in range(0, len(lowers), 8):
|
||||
print(" {}".format(" ".join("{:#04x},".format(x) for x in lowers[i:i+8])))
|
||||
print(
|
||||
" {}".format(" ".join("{:#04x},".format(x) for x in lowers[i : i + 8]))
|
||||
)
|
||||
print("];")
|
||||
|
||||
|
||||
def print_normal(normal, normalname):
|
||||
print("#[rustfmt::skip]")
|
||||
print("const {}: &[u8] = &[".format(normalname))
|
||||
@ -129,12 +142,13 @@ def print_normal(normal, normalname):
|
||||
print(" {}".format(" ".join("{:#04x},".format(i) for i in v)))
|
||||
print("];")
|
||||
|
||||
|
||||
def main():
|
||||
file = get_file("https://www.unicode.org/Public/UNIDATA/UnicodeData.txt")
|
||||
|
||||
codepoints = get_codepoints(file)
|
||||
|
||||
CUTOFF=0x10000
|
||||
CUTOFF = 0x10000
|
||||
singletons0 = []
|
||||
singletons1 = []
|
||||
normal0 = []
|
||||
@ -234,10 +248,11 @@ pub(crate) fn is_printable(x: char) -> bool {
|
||||
}\
|
||||
""")
|
||||
print()
|
||||
print_singletons(singletons0u, singletons0l, 'SINGLETONS0U', 'SINGLETONS0L')
|
||||
print_singletons(singletons1u, singletons1l, 'SINGLETONS1U', 'SINGLETONS1L')
|
||||
print_normal(normal0, 'NORMAL0')
|
||||
print_normal(normal1, 'NORMAL1')
|
||||
print_singletons(singletons0u, singletons0l, "SINGLETONS0U", "SINGLETONS0L")
|
||||
print_singletons(singletons1u, singletons1l, "SINGLETONS1U", "SINGLETONS1L")
|
||||
print_normal(normal0, "NORMAL0")
|
||||
print_normal(normal1, "NORMAL1")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -16,8 +16,9 @@ from shutil import rmtree
|
||||
bootstrap_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
# For the import below, have Python search in src/bootstrap first.
|
||||
sys.path.insert(0, bootstrap_dir)
|
||||
import bootstrap # noqa: E402
|
||||
import configure # noqa: E402
|
||||
import bootstrap # noqa: E402
|
||||
import configure # noqa: E402
|
||||
|
||||
|
||||
def serialize_and_parse(configure_args, bootstrap_args=None):
|
||||
from io import StringIO
|
||||
@ -32,15 +33,20 @@ def serialize_and_parse(configure_args, bootstrap_args=None):
|
||||
|
||||
try:
|
||||
import tomllib
|
||||
|
||||
# Verify this is actually valid TOML.
|
||||
tomllib.loads(build.config_toml)
|
||||
except ImportError:
|
||||
print("WARNING: skipping TOML validation, need at least python 3.11", file=sys.stderr)
|
||||
print(
|
||||
"WARNING: skipping TOML validation, need at least python 3.11",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return build
|
||||
|
||||
|
||||
class VerifyTestCase(unittest.TestCase):
|
||||
"""Test Case for verify"""
|
||||
|
||||
def setUp(self):
|
||||
self.container = tempfile.mkdtemp()
|
||||
self.src = os.path.join(self.container, "src.txt")
|
||||
@ -68,14 +74,14 @@ class VerifyTestCase(unittest.TestCase):
|
||||
|
||||
class ProgramOutOfDate(unittest.TestCase):
|
||||
"""Test if a program is out of date"""
|
||||
|
||||
def setUp(self):
|
||||
self.container = tempfile.mkdtemp()
|
||||
os.mkdir(os.path.join(self.container, "stage0"))
|
||||
self.build = bootstrap.RustBuild()
|
||||
self.build.date = "2017-06-15"
|
||||
self.build.build_dir = self.container
|
||||
self.rustc_stamp_path = os.path.join(self.container, "stage0",
|
||||
".rustc-stamp")
|
||||
self.rustc_stamp_path = os.path.join(self.container, "stage0", ".rustc-stamp")
|
||||
self.key = self.build.date + str(None)
|
||||
|
||||
def tearDown(self):
|
||||
@ -97,11 +103,14 @@ class ProgramOutOfDate(unittest.TestCase):
|
||||
"""Return False both dates match"""
|
||||
with open(self.rustc_stamp_path, "w") as rustc_stamp:
|
||||
rustc_stamp.write("2017-06-15None")
|
||||
self.assertFalse(self.build.program_out_of_date(self.rustc_stamp_path, self.key))
|
||||
self.assertFalse(
|
||||
self.build.program_out_of_date(self.rustc_stamp_path, self.key)
|
||||
)
|
||||
|
||||
|
||||
class ParseArgsInConfigure(unittest.TestCase):
|
||||
"""Test if `parse_args` function in `configure.py` works properly"""
|
||||
|
||||
@patch("configure.err")
|
||||
def test_unknown_args(self, err):
|
||||
# It should be print an error message if the argument doesn't start with '--'
|
||||
@ -148,28 +157,35 @@ class ParseArgsInConfigure(unittest.TestCase):
|
||||
|
||||
class GenerateAndParseConfig(unittest.TestCase):
|
||||
"""Test that we can serialize and deserialize a config.toml file"""
|
||||
|
||||
def test_no_args(self):
|
||||
build = serialize_and_parse([])
|
||||
self.assertEqual(build.get_toml("profile"), 'dist')
|
||||
self.assertEqual(build.get_toml("profile"), "dist")
|
||||
self.assertIsNone(build.get_toml("llvm.download-ci-llvm"))
|
||||
|
||||
def test_set_section(self):
|
||||
build = serialize_and_parse(["--set", "llvm.download-ci-llvm"])
|
||||
self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), 'true')
|
||||
self.assertEqual(build.get_toml("download-ci-llvm", section="llvm"), "true")
|
||||
|
||||
def test_set_target(self):
|
||||
build = serialize_and_parse(["--set", "target.x86_64-unknown-linux-gnu.cc=gcc"])
|
||||
self.assertEqual(build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), 'gcc')
|
||||
self.assertEqual(
|
||||
build.get_toml("cc", section="target.x86_64-unknown-linux-gnu"), "gcc"
|
||||
)
|
||||
|
||||
def test_set_top_level(self):
|
||||
build = serialize_and_parse(["--set", "profile=compiler"])
|
||||
self.assertEqual(build.get_toml("profile"), 'compiler')
|
||||
self.assertEqual(build.get_toml("profile"), "compiler")
|
||||
|
||||
def test_set_codegen_backends(self):
|
||||
build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift"])
|
||||
self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift']"), -1)
|
||||
self.assertNotEqual(
|
||||
build.config_toml.find("codegen-backends = ['cranelift']"), -1
|
||||
)
|
||||
build = serialize_and_parse(["--set", "rust.codegen-backends=cranelift,llvm"])
|
||||
self.assertNotEqual(build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1)
|
||||
self.assertNotEqual(
|
||||
build.config_toml.find("codegen-backends = ['cranelift', 'llvm']"), -1
|
||||
)
|
||||
build = serialize_and_parse(["--enable-full-tools"])
|
||||
self.assertNotEqual(build.config_toml.find("codegen-backends = ['llvm']"), -1)
|
||||
|
||||
@ -223,7 +239,7 @@ class BuildBootstrap(unittest.TestCase):
|
||||
self.assertTrue("--timings" in args)
|
||||
|
||||
def test_warnings(self):
|
||||
for toml_warnings in ['false', 'true', None]:
|
||||
for toml_warnings in ["false", "true", None]:
|
||||
configure_args = []
|
||||
if toml_warnings is not None:
|
||||
configure_args = ["--set", "rust.deny-warnings=" + toml_warnings]
|
||||
|
@ -6,11 +6,12 @@ from __future__ import absolute_import, division, print_function
|
||||
import shlex
|
||||
import sys
|
||||
import os
|
||||
|
||||
rust_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
rust_dir = os.path.dirname(rust_dir)
|
||||
rust_dir = os.path.dirname(rust_dir)
|
||||
sys.path.append(os.path.join(rust_dir, "src", "bootstrap"))
|
||||
import bootstrap # noqa: E402
|
||||
import bootstrap # noqa: E402
|
||||
|
||||
|
||||
class Option(object):
|
||||
@ -32,26 +33,62 @@ def v(*args):
|
||||
options.append(Option(*args, value=True))
|
||||
|
||||
|
||||
o("debug", "rust.debug", "enables debugging environment; does not affect optimization of bootstrapped code")
|
||||
o(
|
||||
"debug",
|
||||
"rust.debug",
|
||||
"enables debugging environment; does not affect optimization of bootstrapped code",
|
||||
)
|
||||
o("docs", "build.docs", "build standard library documentation")
|
||||
o("compiler-docs", "build.compiler-docs", "build compiler documentation")
|
||||
o("optimize-tests", "rust.optimize-tests", "build tests with optimizations")
|
||||
o("verbose-tests", "rust.verbose-tests", "enable verbose output when running tests")
|
||||
o("ccache", "llvm.ccache", "invoke gcc/clang via ccache to reuse object files between builds")
|
||||
o(
|
||||
"ccache",
|
||||
"llvm.ccache",
|
||||
"invoke gcc/clang via ccache to reuse object files between builds",
|
||||
)
|
||||
o("sccache", None, "invoke gcc/clang via sccache to reuse object files between builds")
|
||||
o("local-rust", None, "use an installed rustc rather than downloading a snapshot")
|
||||
v("local-rust-root", None, "set prefix for local rust binary")
|
||||
o("local-rebuild", "build.local-rebuild", "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version")
|
||||
o("llvm-static-stdcpp", "llvm.static-libstdcpp", "statically link to libstdc++ for LLVM")
|
||||
o("llvm-link-shared", "llvm.link-shared", "prefer shared linking to LLVM (llvm-config --link-shared)")
|
||||
o(
|
||||
"local-rebuild",
|
||||
"build.local-rebuild",
|
||||
"assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version",
|
||||
)
|
||||
o(
|
||||
"llvm-static-stdcpp",
|
||||
"llvm.static-libstdcpp",
|
||||
"statically link to libstdc++ for LLVM",
|
||||
)
|
||||
o(
|
||||
"llvm-link-shared",
|
||||
"llvm.link-shared",
|
||||
"prefer shared linking to LLVM (llvm-config --link-shared)",
|
||||
)
|
||||
o("rpath", "rust.rpath", "build rpaths into rustc itself")
|
||||
o("codegen-tests", "rust.codegen-tests", "run the tests/codegen tests")
|
||||
o("ninja", "llvm.ninja", "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)")
|
||||
o(
|
||||
"ninja",
|
||||
"llvm.ninja",
|
||||
"build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)",
|
||||
)
|
||||
o("locked-deps", "build.locked-deps", "force Cargo.lock to be up to date")
|
||||
o("vendor", "build.vendor", "enable usage of vendored Rust crates")
|
||||
o("sanitizers", "build.sanitizers", "build the sanitizer runtimes (asan, dfsan, lsan, msan, tsan, hwasan)")
|
||||
o("dist-src", "rust.dist-src", "when building tarballs enables building a source tarball")
|
||||
o("cargo-native-static", "build.cargo-native-static", "static native libraries in cargo")
|
||||
o(
|
||||
"sanitizers",
|
||||
"build.sanitizers",
|
||||
"build the sanitizer runtimes (asan, dfsan, lsan, msan, tsan, hwasan)",
|
||||
)
|
||||
o(
|
||||
"dist-src",
|
||||
"rust.dist-src",
|
||||
"when building tarballs enables building a source tarball",
|
||||
)
|
||||
o(
|
||||
"cargo-native-static",
|
||||
"build.cargo-native-static",
|
||||
"static native libraries in cargo",
|
||||
)
|
||||
o("profiler", "build.profiler", "build the profiler runtime")
|
||||
o("full-tools", None, "enable all tools")
|
||||
o("lld", "rust.lld", "build lld")
|
||||
@ -59,7 +96,11 @@ o("llvm-bitcode-linker", "rust.llvm-bitcode-linker", "build llvm bitcode linker"
|
||||
o("clang", "llvm.clang", "build clang")
|
||||
o("use-libcxx", "llvm.use-libcxx", "build LLVM with libc++")
|
||||
o("control-flow-guard", "rust.control-flow-guard", "Enable Control Flow Guard")
|
||||
o("patch-binaries-for-nix", "build.patch-binaries-for-nix", "whether patch binaries for usage with Nix toolchains")
|
||||
o(
|
||||
"patch-binaries-for-nix",
|
||||
"build.patch-binaries-for-nix",
|
||||
"whether patch binaries for usage with Nix toolchains",
|
||||
)
|
||||
o("new-symbol-mangling", "rust.new-symbol-mangling", "use symbol-mangling-version v0")
|
||||
|
||||
v("llvm-cflags", "llvm.cflags", "build LLVM with these extra compiler flags")
|
||||
@ -76,16 +117,48 @@ o("llvm-enzyme", "llvm.enzyme", "build LLVM with enzyme")
|
||||
o("llvm-offload", "llvm.offload", "build LLVM with gpu offload support")
|
||||
o("llvm-plugins", "llvm.plugins", "build LLVM with plugin interface")
|
||||
o("debug-assertions", "rust.debug-assertions", "build with debugging assertions")
|
||||
o("debug-assertions-std", "rust.debug-assertions-std", "build the standard library with debugging assertions")
|
||||
o(
|
||||
"debug-assertions-std",
|
||||
"rust.debug-assertions-std",
|
||||
"build the standard library with debugging assertions",
|
||||
)
|
||||
o("overflow-checks", "rust.overflow-checks", "build with overflow checks")
|
||||
o("overflow-checks-std", "rust.overflow-checks-std", "build the standard library with overflow checks")
|
||||
o("llvm-release-debuginfo", "llvm.release-debuginfo", "build LLVM with debugger metadata")
|
||||
o(
|
||||
"overflow-checks-std",
|
||||
"rust.overflow-checks-std",
|
||||
"build the standard library with overflow checks",
|
||||
)
|
||||
o(
|
||||
"llvm-release-debuginfo",
|
||||
"llvm.release-debuginfo",
|
||||
"build LLVM with debugger metadata",
|
||||
)
|
||||
v("debuginfo-level", "rust.debuginfo-level", "debuginfo level for Rust code")
|
||||
v("debuginfo-level-rustc", "rust.debuginfo-level-rustc", "debuginfo level for the compiler")
|
||||
v("debuginfo-level-std", "rust.debuginfo-level-std", "debuginfo level for the standard library")
|
||||
v("debuginfo-level-tools", "rust.debuginfo-level-tools", "debuginfo level for the tools")
|
||||
v("debuginfo-level-tests", "rust.debuginfo-level-tests", "debuginfo level for the test suites run with compiletest")
|
||||
v("save-toolstates", "rust.save-toolstates", "save build and test status of external tools into this file")
|
||||
v(
|
||||
"debuginfo-level-rustc",
|
||||
"rust.debuginfo-level-rustc",
|
||||
"debuginfo level for the compiler",
|
||||
)
|
||||
v(
|
||||
"debuginfo-level-std",
|
||||
"rust.debuginfo-level-std",
|
||||
"debuginfo level for the standard library",
|
||||
)
|
||||
v(
|
||||
"debuginfo-level-tools",
|
||||
"rust.debuginfo-level-tools",
|
||||
"debuginfo level for the tools",
|
||||
)
|
||||
v(
|
||||
"debuginfo-level-tests",
|
||||
"rust.debuginfo-level-tests",
|
||||
"debuginfo level for the test suites run with compiletest",
|
||||
)
|
||||
v(
|
||||
"save-toolstates",
|
||||
"rust.save-toolstates",
|
||||
"save build and test status of external tools into this file",
|
||||
)
|
||||
|
||||
v("prefix", "install.prefix", "set installation prefix")
|
||||
v("localstatedir", "install.localstatedir", "local state directory")
|
||||
@ -102,50 +175,117 @@ v("llvm-config", None, "set path to llvm-config")
|
||||
v("llvm-filecheck", None, "set path to LLVM's FileCheck utility")
|
||||
v("python", "build.python", "set path to python")
|
||||
v("android-ndk", "build.android-ndk", "set path to Android NDK")
|
||||
v("musl-root", "target.x86_64-unknown-linux-musl.musl-root",
|
||||
"MUSL root installation directory (deprecated)")
|
||||
v("musl-root-x86_64", "target.x86_64-unknown-linux-musl.musl-root",
|
||||
"x86_64-unknown-linux-musl install directory")
|
||||
v("musl-root-i586", "target.i586-unknown-linux-musl.musl-root",
|
||||
"i586-unknown-linux-musl install directory")
|
||||
v("musl-root-i686", "target.i686-unknown-linux-musl.musl-root",
|
||||
"i686-unknown-linux-musl install directory")
|
||||
v("musl-root-arm", "target.arm-unknown-linux-musleabi.musl-root",
|
||||
"arm-unknown-linux-musleabi install directory")
|
||||
v("musl-root-armhf", "target.arm-unknown-linux-musleabihf.musl-root",
|
||||
"arm-unknown-linux-musleabihf install directory")
|
||||
v("musl-root-armv5te", "target.armv5te-unknown-linux-musleabi.musl-root",
|
||||
"armv5te-unknown-linux-musleabi install directory")
|
||||
v("musl-root-armv7", "target.armv7-unknown-linux-musleabi.musl-root",
|
||||
"armv7-unknown-linux-musleabi install directory")
|
||||
v("musl-root-armv7hf", "target.armv7-unknown-linux-musleabihf.musl-root",
|
||||
"armv7-unknown-linux-musleabihf install directory")
|
||||
v("musl-root-aarch64", "target.aarch64-unknown-linux-musl.musl-root",
|
||||
"aarch64-unknown-linux-musl install directory")
|
||||
v("musl-root-mips", "target.mips-unknown-linux-musl.musl-root",
|
||||
"mips-unknown-linux-musl install directory")
|
||||
v("musl-root-mipsel", "target.mipsel-unknown-linux-musl.musl-root",
|
||||
"mipsel-unknown-linux-musl install directory")
|
||||
v("musl-root-mips64", "target.mips64-unknown-linux-muslabi64.musl-root",
|
||||
"mips64-unknown-linux-muslabi64 install directory")
|
||||
v("musl-root-mips64el", "target.mips64el-unknown-linux-muslabi64.musl-root",
|
||||
"mips64el-unknown-linux-muslabi64 install directory")
|
||||
v("musl-root-riscv32gc", "target.riscv32gc-unknown-linux-musl.musl-root",
|
||||
"riscv32gc-unknown-linux-musl install directory")
|
||||
v("musl-root-riscv64gc", "target.riscv64gc-unknown-linux-musl.musl-root",
|
||||
"riscv64gc-unknown-linux-musl install directory")
|
||||
v("musl-root-loongarch64", "target.loongarch64-unknown-linux-musl.musl-root",
|
||||
"loongarch64-unknown-linux-musl install directory")
|
||||
v("qemu-armhf-rootfs", "target.arm-unknown-linux-gnueabihf.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this")
|
||||
v("qemu-aarch64-rootfs", "target.aarch64-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this")
|
||||
v("qemu-riscv64-rootfs", "target.riscv64gc-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this")
|
||||
v("experimental-targets", "llvm.experimental-targets",
|
||||
"experimental LLVM targets to build")
|
||||
v(
|
||||
"musl-root",
|
||||
"target.x86_64-unknown-linux-musl.musl-root",
|
||||
"MUSL root installation directory (deprecated)",
|
||||
)
|
||||
v(
|
||||
"musl-root-x86_64",
|
||||
"target.x86_64-unknown-linux-musl.musl-root",
|
||||
"x86_64-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-i586",
|
||||
"target.i586-unknown-linux-musl.musl-root",
|
||||
"i586-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-i686",
|
||||
"target.i686-unknown-linux-musl.musl-root",
|
||||
"i686-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-arm",
|
||||
"target.arm-unknown-linux-musleabi.musl-root",
|
||||
"arm-unknown-linux-musleabi install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armhf",
|
||||
"target.arm-unknown-linux-musleabihf.musl-root",
|
||||
"arm-unknown-linux-musleabihf install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armv5te",
|
||||
"target.armv5te-unknown-linux-musleabi.musl-root",
|
||||
"armv5te-unknown-linux-musleabi install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armv7",
|
||||
"target.armv7-unknown-linux-musleabi.musl-root",
|
||||
"armv7-unknown-linux-musleabi install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-armv7hf",
|
||||
"target.armv7-unknown-linux-musleabihf.musl-root",
|
||||
"armv7-unknown-linux-musleabihf install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-aarch64",
|
||||
"target.aarch64-unknown-linux-musl.musl-root",
|
||||
"aarch64-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mips",
|
||||
"target.mips-unknown-linux-musl.musl-root",
|
||||
"mips-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mipsel",
|
||||
"target.mipsel-unknown-linux-musl.musl-root",
|
||||
"mipsel-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mips64",
|
||||
"target.mips64-unknown-linux-muslabi64.musl-root",
|
||||
"mips64-unknown-linux-muslabi64 install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-mips64el",
|
||||
"target.mips64el-unknown-linux-muslabi64.musl-root",
|
||||
"mips64el-unknown-linux-muslabi64 install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-riscv32gc",
|
||||
"target.riscv32gc-unknown-linux-musl.musl-root",
|
||||
"riscv32gc-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-riscv64gc",
|
||||
"target.riscv64gc-unknown-linux-musl.musl-root",
|
||||
"riscv64gc-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"musl-root-loongarch64",
|
||||
"target.loongarch64-unknown-linux-musl.musl-root",
|
||||
"loongarch64-unknown-linux-musl install directory",
|
||||
)
|
||||
v(
|
||||
"qemu-armhf-rootfs",
|
||||
"target.arm-unknown-linux-gnueabihf.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this",
|
||||
)
|
||||
v(
|
||||
"qemu-aarch64-rootfs",
|
||||
"target.aarch64-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this",
|
||||
)
|
||||
v(
|
||||
"qemu-riscv64-rootfs",
|
||||
"target.riscv64gc-unknown-linux-gnu.qemu-rootfs",
|
||||
"rootfs in qemu testing, you probably don't want to use this",
|
||||
)
|
||||
v(
|
||||
"experimental-targets",
|
||||
"llvm.experimental-targets",
|
||||
"experimental LLVM targets to build",
|
||||
)
|
||||
v("release-channel", "rust.channel", "the name of the release channel to build")
|
||||
v("release-description", "rust.description", "optional descriptive string for version output")
|
||||
v(
|
||||
"release-description",
|
||||
"rust.description",
|
||||
"optional descriptive string for version output",
|
||||
)
|
||||
v("dist-compression-formats", None, "List of compression formats to use")
|
||||
|
||||
# Used on systems where "cc" is unavailable
|
||||
@ -154,7 +294,11 @@ v("default-linker", "rust.default-linker", "the default linker")
|
||||
# Many of these are saved below during the "writing configuration" step
|
||||
# (others are conditionally saved).
|
||||
o("manage-submodules", "build.submodules", "let the build manage the git submodules")
|
||||
o("full-bootstrap", "build.full-bootstrap", "build three compilers instead of two (not recommended except for testing reproducible builds)")
|
||||
o(
|
||||
"full-bootstrap",
|
||||
"build.full-bootstrap",
|
||||
"build three compilers instead of two (not recommended except for testing reproducible builds)",
|
||||
)
|
||||
o("extended", "build.extended", "build an extended rust tool set")
|
||||
|
||||
v("bootstrap-cache-path", None, "use provided path for the bootstrap cache")
|
||||
@ -165,8 +309,16 @@ v("host", None, "List of GNUs ./configure syntax LLVM host triples")
|
||||
v("target", None, "List of GNUs ./configure syntax LLVM target triples")
|
||||
|
||||
# Options specific to this configure script
|
||||
o("option-checking", None, "complain about unrecognized options in this configure script")
|
||||
o("verbose-configure", None, "don't truncate options when printing them in this configure script")
|
||||
o(
|
||||
"option-checking",
|
||||
None,
|
||||
"complain about unrecognized options in this configure script",
|
||||
)
|
||||
o(
|
||||
"verbose-configure",
|
||||
None,
|
||||
"don't truncate options when printing them in this configure script",
|
||||
)
|
||||
v("set", None, "set arbitrary key/value pairs in TOML configuration")
|
||||
|
||||
|
||||
@ -178,39 +330,42 @@ def err(msg):
|
||||
print("\nconfigure: ERROR: " + msg + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_value_list(key):
|
||||
for option in options:
|
||||
if option.name == key and option.desc.startswith('List of'):
|
||||
if option.name == key and option.desc.startswith("List of"):
|
||||
return True
|
||||
return False
|
||||
|
||||
if '--help' in sys.argv or '-h' in sys.argv:
|
||||
print('Usage: ./configure [options]')
|
||||
print('')
|
||||
print('Options')
|
||||
|
||||
if "--help" in sys.argv or "-h" in sys.argv:
|
||||
print("Usage: ./configure [options]")
|
||||
print("")
|
||||
print("Options")
|
||||
for option in options:
|
||||
if 'android' in option.name:
|
||||
if "android" in option.name:
|
||||
# no one needs to know about these obscure options
|
||||
continue
|
||||
if option.value:
|
||||
print('\t{:30} {}'.format('--{}=VAL'.format(option.name), option.desc))
|
||||
print("\t{:30} {}".format("--{}=VAL".format(option.name), option.desc))
|
||||
else:
|
||||
print('\t--enable-{:25} OR --disable-{}'.format(option.name, option.name))
|
||||
print('\t\t' + option.desc)
|
||||
print('')
|
||||
print('This configure script is a thin configuration shim over the true')
|
||||
print('configuration system, `config.toml`. You can explore the comments')
|
||||
print('in `config.example.toml` next to this configure script to see')
|
||||
print('more information about what each option is. Additionally you can')
|
||||
print('pass `--set` as an argument to set arbitrary key/value pairs')
|
||||
print('in the TOML configuration if desired')
|
||||
print('')
|
||||
print('Also note that all options which take `--enable` can similarly')
|
||||
print('be passed with `--disable-foo` to forcibly disable the option')
|
||||
print("\t--enable-{:25} OR --disable-{}".format(option.name, option.name))
|
||||
print("\t\t" + option.desc)
|
||||
print("")
|
||||
print("This configure script is a thin configuration shim over the true")
|
||||
print("configuration system, `config.toml`. You can explore the comments")
|
||||
print("in `config.example.toml` next to this configure script to see")
|
||||
print("more information about what each option is. Additionally you can")
|
||||
print("pass `--set` as an argument to set arbitrary key/value pairs")
|
||||
print("in the TOML configuration if desired")
|
||||
print("")
|
||||
print("Also note that all options which take `--enable` can similarly")
|
||||
print("be passed with `--disable-foo` to forcibly disable the option")
|
||||
sys.exit(0)
|
||||
|
||||
VERBOSE = False
|
||||
|
||||
|
||||
# Parse all command line arguments into one of these three lists, handling
|
||||
# boolean and value-based options separately
|
||||
def parse_args(args):
|
||||
@ -222,7 +377,7 @@ def parse_args(args):
|
||||
while i < len(args):
|
||||
arg = args[i]
|
||||
i += 1
|
||||
if not arg.startswith('--'):
|
||||
if not arg.startswith("--"):
|
||||
unknown_args.append(arg)
|
||||
continue
|
||||
|
||||
@ -230,7 +385,7 @@ def parse_args(args):
|
||||
for option in options:
|
||||
value = None
|
||||
if option.value:
|
||||
keyval = arg[2:].split('=', 1)
|
||||
keyval = arg[2:].split("=", 1)
|
||||
key = keyval[0]
|
||||
if option.name != key:
|
||||
continue
|
||||
@ -244,9 +399,9 @@ def parse_args(args):
|
||||
need_value_args.append(arg)
|
||||
continue
|
||||
else:
|
||||
if arg[2:] == 'enable-' + option.name:
|
||||
if arg[2:] == "enable-" + option.name:
|
||||
value = True
|
||||
elif arg[2:] == 'disable-' + option.name:
|
||||
elif arg[2:] == "disable-" + option.name:
|
||||
value = False
|
||||
else:
|
||||
continue
|
||||
@ -263,8 +418,9 @@ def parse_args(args):
|
||||
# NOTE: here and a few other places, we use [-1] to apply the *last* value
|
||||
# passed. But if option-checking is enabled, then the known_args loop will
|
||||
# also assert that options are only passed once.
|
||||
option_checking = ('option-checking' not in known_args
|
||||
or known_args['option-checking'][-1][1])
|
||||
option_checking = (
|
||||
"option-checking" not in known_args or known_args["option-checking"][-1][1]
|
||||
)
|
||||
if option_checking:
|
||||
if len(unknown_args) > 0:
|
||||
err("Option '" + unknown_args[0] + "' is not recognized")
|
||||
@ -272,18 +428,18 @@ def parse_args(args):
|
||||
err("Option '{0}' needs a value ({0}=val)".format(need_value_args[0]))
|
||||
|
||||
global VERBOSE
|
||||
VERBOSE = 'verbose-configure' in known_args
|
||||
VERBOSE = "verbose-configure" in known_args
|
||||
|
||||
config = {}
|
||||
|
||||
set('build.configure-args', args, config)
|
||||
set("build.configure-args", args, config)
|
||||
apply_args(known_args, option_checking, config)
|
||||
return parse_example_config(known_args, config)
|
||||
|
||||
|
||||
def build(known_args):
|
||||
if 'build' in known_args:
|
||||
return known_args['build'][-1][1]
|
||||
if "build" in known_args:
|
||||
return known_args["build"][-1][1]
|
||||
return bootstrap.default_build_triple(verbose=False)
|
||||
|
||||
|
||||
@ -291,7 +447,7 @@ def set(key, value, config):
|
||||
if isinstance(value, list):
|
||||
# Remove empty values, which value.split(',') tends to generate and
|
||||
# replace single quotes for double quotes to ensure correct parsing.
|
||||
value = [v.replace('\'', '"') for v in value if v]
|
||||
value = [v.replace("'", '"') for v in value if v]
|
||||
|
||||
s = "{:20} := {}".format(key, value)
|
||||
if len(s) < 70 or VERBOSE:
|
||||
@ -310,7 +466,7 @@ def set(key, value, config):
|
||||
for i, part in enumerate(parts):
|
||||
if i == len(parts) - 1:
|
||||
if is_value_list(part) and isinstance(value, str):
|
||||
value = value.split(',')
|
||||
value = value.split(",")
|
||||
arr[part] = value
|
||||
else:
|
||||
if part not in arr:
|
||||
@ -321,9 +477,9 @@ def set(key, value, config):
|
||||
def apply_args(known_args, option_checking, config):
|
||||
for key in known_args:
|
||||
# The `set` option is special and can be passed a bunch of times
|
||||
if key == 'set':
|
||||
if key == "set":
|
||||
for _option, value in known_args[key]:
|
||||
keyval = value.split('=', 1)
|
||||
keyval = value.split("=", 1)
|
||||
if len(keyval) == 1 or keyval[1] == "true":
|
||||
value = True
|
||||
elif keyval[1] == "false":
|
||||
@ -348,50 +504,55 @@ def apply_args(known_args, option_checking, config):
|
||||
# that here.
|
||||
build_triple = build(known_args)
|
||||
|
||||
if option.name == 'sccache':
|
||||
set('llvm.ccache', 'sccache', config)
|
||||
elif option.name == 'local-rust':
|
||||
for path in os.environ['PATH'].split(os.pathsep):
|
||||
if os.path.exists(path + '/rustc'):
|
||||
set('build.rustc', path + '/rustc', config)
|
||||
if option.name == "sccache":
|
||||
set("llvm.ccache", "sccache", config)
|
||||
elif option.name == "local-rust":
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
if os.path.exists(path + "/rustc"):
|
||||
set("build.rustc", path + "/rustc", config)
|
||||
break
|
||||
for path in os.environ['PATH'].split(os.pathsep):
|
||||
if os.path.exists(path + '/cargo'):
|
||||
set('build.cargo', path + '/cargo', config)
|
||||
for path in os.environ["PATH"].split(os.pathsep):
|
||||
if os.path.exists(path + "/cargo"):
|
||||
set("build.cargo", path + "/cargo", config)
|
||||
break
|
||||
elif option.name == 'local-rust-root':
|
||||
set('build.rustc', value + '/bin/rustc', config)
|
||||
set('build.cargo', value + '/bin/cargo', config)
|
||||
elif option.name == 'llvm-root':
|
||||
set('target.{}.llvm-config'.format(build_triple), value + '/bin/llvm-config', config)
|
||||
elif option.name == 'llvm-config':
|
||||
set('target.{}.llvm-config'.format(build_triple), value, config)
|
||||
elif option.name == 'llvm-filecheck':
|
||||
set('target.{}.llvm-filecheck'.format(build_triple), value, config)
|
||||
elif option.name == 'tools':
|
||||
set('build.tools', value.split(','), config)
|
||||
elif option.name == 'bootstrap-cache-path':
|
||||
set('build.bootstrap-cache-path', value, config)
|
||||
elif option.name == 'codegen-backends':
|
||||
set('rust.codegen-backends', value.split(','), config)
|
||||
elif option.name == 'host':
|
||||
set('build.host', value.split(','), config)
|
||||
elif option.name == 'target':
|
||||
set('build.target', value.split(','), config)
|
||||
elif option.name == 'full-tools':
|
||||
set('rust.codegen-backends', ['llvm'], config)
|
||||
set('rust.lld', True, config)
|
||||
set('rust.llvm-tools', True, config)
|
||||
set('rust.llvm-bitcode-linker', True, config)
|
||||
set('build.extended', True, config)
|
||||
elif option.name in ['option-checking', 'verbose-configure']:
|
||||
elif option.name == "local-rust-root":
|
||||
set("build.rustc", value + "/bin/rustc", config)
|
||||
set("build.cargo", value + "/bin/cargo", config)
|
||||
elif option.name == "llvm-root":
|
||||
set(
|
||||
"target.{}.llvm-config".format(build_triple),
|
||||
value + "/bin/llvm-config",
|
||||
config,
|
||||
)
|
||||
elif option.name == "llvm-config":
|
||||
set("target.{}.llvm-config".format(build_triple), value, config)
|
||||
elif option.name == "llvm-filecheck":
|
||||
set("target.{}.llvm-filecheck".format(build_triple), value, config)
|
||||
elif option.name == "tools":
|
||||
set("build.tools", value.split(","), config)
|
||||
elif option.name == "bootstrap-cache-path":
|
||||
set("build.bootstrap-cache-path", value, config)
|
||||
elif option.name == "codegen-backends":
|
||||
set("rust.codegen-backends", value.split(","), config)
|
||||
elif option.name == "host":
|
||||
set("build.host", value.split(","), config)
|
||||
elif option.name == "target":
|
||||
set("build.target", value.split(","), config)
|
||||
elif option.name == "full-tools":
|
||||
set("rust.codegen-backends", ["llvm"], config)
|
||||
set("rust.lld", True, config)
|
||||
set("rust.llvm-tools", True, config)
|
||||
set("rust.llvm-bitcode-linker", True, config)
|
||||
set("build.extended", True, config)
|
||||
elif option.name in ["option-checking", "verbose-configure"]:
|
||||
# this was handled above
|
||||
pass
|
||||
elif option.name == 'dist-compression-formats':
|
||||
set('dist.compression-formats', value.split(','), config)
|
||||
elif option.name == "dist-compression-formats":
|
||||
set("dist.compression-formats", value.split(","), config)
|
||||
else:
|
||||
raise RuntimeError("unhandled option {}".format(option.name))
|
||||
|
||||
|
||||
# "Parse" the `config.example.toml` file into the various sections, and we'll
|
||||
# use this as a template of a `config.toml` to write out which preserves
|
||||
# all the various comments and whatnot.
|
||||
@ -406,20 +567,22 @@ def parse_example_config(known_args, config):
|
||||
targets = {}
|
||||
top_level_keys = []
|
||||
|
||||
with open(rust_dir + '/config.example.toml') as example_config:
|
||||
with open(rust_dir + "/config.example.toml") as example_config:
|
||||
example_lines = example_config.read().split("\n")
|
||||
for line in example_lines:
|
||||
if cur_section is None:
|
||||
if line.count('=') == 1:
|
||||
top_level_key = line.split('=')[0]
|
||||
top_level_key = top_level_key.strip(' #')
|
||||
if line.count("=") == 1:
|
||||
top_level_key = line.split("=")[0]
|
||||
top_level_key = top_level_key.strip(" #")
|
||||
top_level_keys.append(top_level_key)
|
||||
if line.startswith('['):
|
||||
if line.startswith("["):
|
||||
cur_section = line[1:-1]
|
||||
if cur_section.startswith('target'):
|
||||
cur_section = 'target'
|
||||
elif '.' in cur_section:
|
||||
raise RuntimeError("don't know how to deal with section: {}".format(cur_section))
|
||||
if cur_section.startswith("target"):
|
||||
cur_section = "target"
|
||||
elif "." in cur_section:
|
||||
raise RuntimeError(
|
||||
"don't know how to deal with section: {}".format(cur_section)
|
||||
)
|
||||
sections[cur_section] = [line]
|
||||
section_order.append(cur_section)
|
||||
else:
|
||||
@ -428,22 +591,25 @@ def parse_example_config(known_args, config):
|
||||
# Fill out the `targets` array by giving all configured targets a copy of the
|
||||
# `target` section we just loaded from the example config
|
||||
configured_targets = [build(known_args)]
|
||||
if 'build' in config:
|
||||
if 'host' in config['build']:
|
||||
configured_targets += config['build']['host']
|
||||
if 'target' in config['build']:
|
||||
configured_targets += config['build']['target']
|
||||
if 'target' in config:
|
||||
for target in config['target']:
|
||||
if "build" in config:
|
||||
if "host" in config["build"]:
|
||||
configured_targets += config["build"]["host"]
|
||||
if "target" in config["build"]:
|
||||
configured_targets += config["build"]["target"]
|
||||
if "target" in config:
|
||||
for target in config["target"]:
|
||||
configured_targets.append(target)
|
||||
for target in configured_targets:
|
||||
targets[target] = sections['target'][:]
|
||||
targets[target] = sections["target"][:]
|
||||
# For `.` to be valid TOML, it needs to be quoted. But `bootstrap.py` doesn't use a proper TOML parser and fails to parse the target.
|
||||
# Avoid using quotes unless it's necessary.
|
||||
targets[target][0] = targets[target][0].replace("x86_64-unknown-linux-gnu", "'{}'".format(target) if "." in target else target)
|
||||
targets[target][0] = targets[target][0].replace(
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"'{}'".format(target) if "." in target else target,
|
||||
)
|
||||
|
||||
if 'profile' not in config:
|
||||
set('profile', 'dist', config)
|
||||
if "profile" not in config:
|
||||
set("profile", "dist", config)
|
||||
configure_file(sections, top_level_keys, targets, config)
|
||||
return section_order, sections, targets
|
||||
|
||||
@ -467,7 +633,7 @@ def to_toml(value):
|
||||
else:
|
||||
return "false"
|
||||
elif isinstance(value, list):
|
||||
return '[' + ', '.join(map(to_toml, value)) + ']'
|
||||
return "[" + ", ".join(map(to_toml, value)) + "]"
|
||||
elif isinstance(value, str):
|
||||
# Don't put quotes around numeric values
|
||||
if is_number(value):
|
||||
@ -475,9 +641,18 @@ def to_toml(value):
|
||||
else:
|
||||
return "'" + value + "'"
|
||||
elif isinstance(value, dict):
|
||||
return "{" + ", ".join(map(lambda a: "{} = {}".format(to_toml(a[0]), to_toml(a[1])), value.items())) + "}"
|
||||
return (
|
||||
"{"
|
||||
+ ", ".join(
|
||||
map(
|
||||
lambda a: "{} = {}".format(to_toml(a[0]), to_toml(a[1])),
|
||||
value.items(),
|
||||
)
|
||||
)
|
||||
+ "}"
|
||||
)
|
||||
else:
|
||||
raise RuntimeError('no toml')
|
||||
raise RuntimeError("no toml")
|
||||
|
||||
|
||||
def configure_section(lines, config):
|
||||
@ -485,7 +660,7 @@ def configure_section(lines, config):
|
||||
value = config[key]
|
||||
found = False
|
||||
for i, line in enumerate(lines):
|
||||
if not line.startswith('#' + key + ' = '):
|
||||
if not line.startswith("#" + key + " = "):
|
||||
continue
|
||||
found = True
|
||||
lines[i] = "{} = {}".format(key, to_toml(value))
|
||||
@ -501,7 +676,9 @@ def configure_section(lines, config):
|
||||
|
||||
def configure_top_level_key(lines, top_level_key, value):
|
||||
for i, line in enumerate(lines):
|
||||
if line.startswith('#' + top_level_key + ' = ') or line.startswith(top_level_key + ' = '):
|
||||
if line.startswith("#" + top_level_key + " = ") or line.startswith(
|
||||
top_level_key + " = "
|
||||
):
|
||||
lines[i] = "{} = {}".format(top_level_key, to_toml(value))
|
||||
return
|
||||
|
||||
@ -512,11 +689,13 @@ def configure_top_level_key(lines, top_level_key, value):
|
||||
def configure_file(sections, top_level_keys, targets, config):
|
||||
for section_key, section_config in config.items():
|
||||
if section_key not in sections and section_key not in top_level_keys:
|
||||
raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key))
|
||||
raise RuntimeError(
|
||||
"config key {} not in sections or top_level_keys".format(section_key)
|
||||
)
|
||||
if section_key in top_level_keys:
|
||||
configure_top_level_key(sections[None], section_key, section_config)
|
||||
|
||||
elif section_key == 'target':
|
||||
elif section_key == "target":
|
||||
for target in section_config:
|
||||
configure_section(targets[target], section_config[target])
|
||||
else:
|
||||
@ -536,18 +715,19 @@ def write_uncommented(target, f):
|
||||
block = []
|
||||
is_comment = True
|
||||
continue
|
||||
is_comment = is_comment and line.startswith('#')
|
||||
is_comment = is_comment and line.startswith("#")
|
||||
return f
|
||||
|
||||
|
||||
def write_config_toml(writer, section_order, targets, sections):
|
||||
for section in section_order:
|
||||
if section == 'target':
|
||||
if section == "target":
|
||||
for target in targets:
|
||||
writer = write_uncommented(targets[target], writer)
|
||||
else:
|
||||
writer = write_uncommented(sections[section], writer)
|
||||
|
||||
|
||||
def quit_if_file_exists(file):
|
||||
if os.path.isfile(file):
|
||||
msg = "Existing '{}' detected. Exiting".format(file)
|
||||
@ -559,9 +739,10 @@ def quit_if_file_exists(file):
|
||||
|
||||
err(msg)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# If 'config.toml' already exists, exit the script at this point
|
||||
quit_if_file_exists('config.toml')
|
||||
quit_if_file_exists("config.toml")
|
||||
|
||||
if "GITHUB_ACTIONS" in os.environ:
|
||||
print("::group::Configure the build")
|
||||
@ -575,13 +756,13 @@ if __name__ == "__main__":
|
||||
# order that we read it in.
|
||||
p("")
|
||||
p("writing `config.toml` in current directory")
|
||||
with bootstrap.output('config.toml') as f:
|
||||
with bootstrap.output("config.toml") as f:
|
||||
write_config_toml(f, section_order, targets, sections)
|
||||
|
||||
with bootstrap.output('Makefile') as f:
|
||||
contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
|
||||
with bootstrap.output("Makefile") as f:
|
||||
contents = os.path.join(rust_dir, "src", "bootstrap", "mk", "Makefile.in")
|
||||
contents = open(contents).read()
|
||||
contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + '/')
|
||||
contents = contents.replace("$(CFG_SRC_DIR)", rust_dir + "/")
|
||||
contents = contents.replace("$(CFG_PYTHON)", sys.executable)
|
||||
f.write(contents)
|
||||
|
||||
|
@ -40,12 +40,13 @@ import time
|
||||
# Python 3.3 changed the value of `sys.platform` on Linux from "linux2" to just
|
||||
# "linux". We check here with `.startswith` to keep compatibility with older
|
||||
# Python versions (especially Python 2.7).
|
||||
if sys.platform.startswith('linux'):
|
||||
if sys.platform.startswith("linux"):
|
||||
|
||||
class State:
|
||||
def __init__(self):
|
||||
with open('/proc/stat', 'r') as file:
|
||||
with open("/proc/stat", "r") as file:
|
||||
data = file.readline().split()
|
||||
if data[0] != 'cpu':
|
||||
if data[0] != "cpu":
|
||||
raise Exception('did not start with "cpu"')
|
||||
self.user = int(data[1])
|
||||
self.nice = int(data[2])
|
||||
@ -69,10 +70,21 @@ if sys.platform.startswith('linux'):
|
||||
steal = self.steal - prev.steal
|
||||
guest = self.guest - prev.guest
|
||||
guest_nice = self.guest_nice - prev.guest_nice
|
||||
total = user + nice + system + idle + iowait + irq + softirq + steal + guest + guest_nice
|
||||
total = (
|
||||
user
|
||||
+ nice
|
||||
+ system
|
||||
+ idle
|
||||
+ iowait
|
||||
+ irq
|
||||
+ softirq
|
||||
+ steal
|
||||
+ guest
|
||||
+ guest_nice
|
||||
)
|
||||
return float(idle) / float(total) * 100
|
||||
|
||||
elif sys.platform == 'win32':
|
||||
elif sys.platform == "win32":
|
||||
from ctypes.wintypes import DWORD
|
||||
from ctypes import Structure, windll, WinError, GetLastError, byref
|
||||
|
||||
@ -104,9 +116,10 @@ elif sys.platform == 'win32':
|
||||
kernel = self.kernel - prev.kernel
|
||||
return float(idle) / float(user + kernel) * 100
|
||||
|
||||
elif sys.platform == 'darwin':
|
||||
elif sys.platform == "darwin":
|
||||
from ctypes import *
|
||||
libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
|
||||
|
||||
libc = cdll.LoadLibrary("/usr/lib/libc.dylib")
|
||||
|
||||
class host_cpu_load_info_data_t(Structure):
|
||||
_fields_ = [("cpu_ticks", c_uint * 4)]
|
||||
@ -116,7 +129,7 @@ elif sys.platform == 'darwin':
|
||||
c_uint,
|
||||
c_int,
|
||||
POINTER(host_cpu_load_info_data_t),
|
||||
POINTER(c_int)
|
||||
POINTER(c_int),
|
||||
]
|
||||
host_statistics.restype = c_int
|
||||
|
||||
@ -124,13 +137,14 @@ elif sys.platform == 'darwin':
|
||||
CPU_STATE_SYSTEM = 1
|
||||
CPU_STATE_IDLE = 2
|
||||
CPU_STATE_NICE = 3
|
||||
|
||||
class State:
|
||||
def __init__(self):
|
||||
stats = host_cpu_load_info_data_t()
|
||||
count = c_int(4) # HOST_CPU_LOAD_INFO_COUNT
|
||||
count = c_int(4) # HOST_CPU_LOAD_INFO_COUNT
|
||||
err = libc.host_statistics(
|
||||
libc.mach_host_self(),
|
||||
c_int(3), # HOST_CPU_LOAD_INFO
|
||||
c_int(3), # HOST_CPU_LOAD_INFO
|
||||
byref(stats),
|
||||
byref(count),
|
||||
)
|
||||
@ -148,7 +162,7 @@ elif sys.platform == 'darwin':
|
||||
return float(idle) / float(user + system + idle + nice) * 100.0
|
||||
|
||||
else:
|
||||
print('unknown platform', sys.platform)
|
||||
print("unknown platform", sys.platform)
|
||||
sys.exit(1)
|
||||
|
||||
cur_state = State()
|
||||
|
@ -8,78 +8,79 @@ import tempfile
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
TARGET_AARCH64 = 'aarch64-unknown-uefi'
|
||||
TARGET_I686 = 'i686-unknown-uefi'
|
||||
TARGET_X86_64 = 'x86_64-unknown-uefi'
|
||||
TARGET_AARCH64 = "aarch64-unknown-uefi"
|
||||
TARGET_I686 = "i686-unknown-uefi"
|
||||
TARGET_X86_64 = "x86_64-unknown-uefi"
|
||||
|
||||
|
||||
def run(*cmd, capture=False, check=True, env=None, timeout=None):
|
||||
"""Print and run a command, optionally capturing the output."""
|
||||
cmd = [str(p) for p in cmd]
|
||||
print(' '.join(cmd))
|
||||
return subprocess.run(cmd,
|
||||
capture_output=capture,
|
||||
check=check,
|
||||
env=env,
|
||||
text=True,
|
||||
timeout=timeout)
|
||||
print(" ".join(cmd))
|
||||
return subprocess.run(
|
||||
cmd, capture_output=capture, check=check, env=env, text=True, timeout=timeout
|
||||
)
|
||||
|
||||
|
||||
def build_and_run(tmp_dir, target):
|
||||
if target == TARGET_AARCH64:
|
||||
boot_file_name = 'bootaa64.efi'
|
||||
ovmf_dir = Path('/usr/share/AAVMF')
|
||||
ovmf_code = 'AAVMF_CODE.fd'
|
||||
ovmf_vars = 'AAVMF_VARS.fd'
|
||||
qemu = 'qemu-system-aarch64'
|
||||
machine = 'virt'
|
||||
cpu = 'cortex-a72'
|
||||
boot_file_name = "bootaa64.efi"
|
||||
ovmf_dir = Path("/usr/share/AAVMF")
|
||||
ovmf_code = "AAVMF_CODE.fd"
|
||||
ovmf_vars = "AAVMF_VARS.fd"
|
||||
qemu = "qemu-system-aarch64"
|
||||
machine = "virt"
|
||||
cpu = "cortex-a72"
|
||||
elif target == TARGET_I686:
|
||||
boot_file_name = 'bootia32.efi'
|
||||
ovmf_dir = Path('/usr/share/OVMF')
|
||||
ovmf_code = 'OVMF32_CODE_4M.secboot.fd'
|
||||
ovmf_vars = 'OVMF32_VARS_4M.fd'
|
||||
boot_file_name = "bootia32.efi"
|
||||
ovmf_dir = Path("/usr/share/OVMF")
|
||||
ovmf_code = "OVMF32_CODE_4M.secboot.fd"
|
||||
ovmf_vars = "OVMF32_VARS_4M.fd"
|
||||
# The i686 target intentionally uses 64-bit qemu; the important
|
||||
# difference is that the OVMF code provides a 32-bit environment.
|
||||
qemu = 'qemu-system-x86_64'
|
||||
machine = 'q35'
|
||||
cpu = 'qemu64'
|
||||
qemu = "qemu-system-x86_64"
|
||||
machine = "q35"
|
||||
cpu = "qemu64"
|
||||
elif target == TARGET_X86_64:
|
||||
boot_file_name = 'bootx64.efi'
|
||||
ovmf_dir = Path('/usr/share/OVMF')
|
||||
ovmf_code = 'OVMF_CODE.fd'
|
||||
ovmf_vars = 'OVMF_VARS.fd'
|
||||
qemu = 'qemu-system-x86_64'
|
||||
machine = 'q35'
|
||||
cpu = 'qemu64'
|
||||
boot_file_name = "bootx64.efi"
|
||||
ovmf_dir = Path("/usr/share/OVMF")
|
||||
ovmf_code = "OVMF_CODE.fd"
|
||||
ovmf_vars = "OVMF_VARS.fd"
|
||||
qemu = "qemu-system-x86_64"
|
||||
machine = "q35"
|
||||
cpu = "qemu64"
|
||||
else:
|
||||
raise KeyError('invalid target')
|
||||
raise KeyError("invalid target")
|
||||
|
||||
host_artifacts = Path('/checkout/obj/build/x86_64-unknown-linux-gnu')
|
||||
stage0 = host_artifacts / 'stage0/bin'
|
||||
stage2 = host_artifacts / 'stage2/bin'
|
||||
host_artifacts = Path("/checkout/obj/build/x86_64-unknown-linux-gnu")
|
||||
stage0 = host_artifacts / "stage0/bin"
|
||||
stage2 = host_artifacts / "stage2/bin"
|
||||
|
||||
env = dict(os.environ)
|
||||
env['PATH'] = '{}:{}:{}'.format(stage2, stage0, env['PATH'])
|
||||
env["PATH"] = "{}:{}:{}".format(stage2, stage0, env["PATH"])
|
||||
|
||||
# Copy the test create into `tmp_dir`.
|
||||
test_crate = Path(tmp_dir) / 'uefi_qemu_test'
|
||||
shutil.copytree('/uefi_qemu_test', test_crate)
|
||||
test_crate = Path(tmp_dir) / "uefi_qemu_test"
|
||||
shutil.copytree("/uefi_qemu_test", test_crate)
|
||||
|
||||
# Build the UEFI executable.
|
||||
run('cargo',
|
||||
'build',
|
||||
'--manifest-path',
|
||||
test_crate / 'Cargo.toml',
|
||||
'--target',
|
||||
run(
|
||||
"cargo",
|
||||
"build",
|
||||
"--manifest-path",
|
||||
test_crate / "Cargo.toml",
|
||||
"--target",
|
||||
target,
|
||||
env=env)
|
||||
env=env,
|
||||
)
|
||||
|
||||
# Create a mock EFI System Partition in a subdirectory.
|
||||
esp = test_crate / 'esp'
|
||||
boot = esp / 'efi/boot'
|
||||
esp = test_crate / "esp"
|
||||
boot = esp / "efi/boot"
|
||||
os.makedirs(boot, exist_ok=True)
|
||||
|
||||
# Copy the executable into the ESP.
|
||||
src_exe_path = test_crate / 'target' / target / 'debug/uefi_qemu_test.efi'
|
||||
src_exe_path = test_crate / "target" / target / "debug/uefi_qemu_test.efi"
|
||||
shutil.copy(src_exe_path, boot / boot_file_name)
|
||||
print(src_exe_path, boot / boot_file_name)
|
||||
|
||||
@ -89,37 +90,39 @@ def build_and_run(tmp_dir, target):
|
||||
|
||||
# Make a writable copy of the vars file. aarch64 doesn't boot
|
||||
# correctly with read-only vars.
|
||||
ovmf_rw_vars = Path(tmp_dir) / 'vars.fd'
|
||||
ovmf_rw_vars = Path(tmp_dir) / "vars.fd"
|
||||
shutil.copy(ovmf_vars, ovmf_rw_vars)
|
||||
|
||||
# Run the executable in QEMU and capture the output.
|
||||
output = run(qemu,
|
||||
'-machine',
|
||||
machine,
|
||||
'-cpu',
|
||||
cpu,
|
||||
'-display',
|
||||
'none',
|
||||
'-serial',
|
||||
'stdio',
|
||||
'-drive',
|
||||
f'if=pflash,format=raw,readonly=on,file={ovmf_code}',
|
||||
'-drive',
|
||||
f'if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}',
|
||||
'-drive',
|
||||
f'format=raw,file=fat:rw:{esp}',
|
||||
capture=True,
|
||||
check=True,
|
||||
# Set a timeout to kill the VM in case something goes wrong.
|
||||
timeout=60).stdout
|
||||
output = run(
|
||||
qemu,
|
||||
"-machine",
|
||||
machine,
|
||||
"-cpu",
|
||||
cpu,
|
||||
"-display",
|
||||
"none",
|
||||
"-serial",
|
||||
"stdio",
|
||||
"-drive",
|
||||
f"if=pflash,format=raw,readonly=on,file={ovmf_code}",
|
||||
"-drive",
|
||||
f"if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}",
|
||||
"-drive",
|
||||
f"format=raw,file=fat:rw:{esp}",
|
||||
capture=True,
|
||||
check=True,
|
||||
# Set a timeout to kill the VM in case something goes wrong.
|
||||
timeout=60,
|
||||
).stdout
|
||||
|
||||
if 'Hello World!' in output:
|
||||
print('VM produced expected output')
|
||||
if "Hello World!" in output:
|
||||
print("VM produced expected output")
|
||||
else:
|
||||
print('unexpected VM output:')
|
||||
print('---start---')
|
||||
print("unexpected VM output:")
|
||||
print("---start---")
|
||||
print(output)
|
||||
print('---end---')
|
||||
print("---end---")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -35,6 +35,7 @@ MIRROR_BUCKET = "rust-lang-ci-mirrors"
|
||||
MIRROR_BUCKET_REGION = "us-west-1"
|
||||
MIRROR_BASE_DIR = "rustc/android/"
|
||||
|
||||
|
||||
class Package:
|
||||
def __init__(self, path, url, sha1, deps=None):
|
||||
if deps is None:
|
||||
@ -53,18 +54,25 @@ class Package:
|
||||
sha1 = hashlib.sha1(f.read()).hexdigest()
|
||||
if sha1 != self.sha1:
|
||||
raise RuntimeError(
|
||||
"hash mismatch for package " + self.path + ": " +
|
||||
sha1 + " vs " + self.sha1 + " (known good)"
|
||||
"hash mismatch for package "
|
||||
+ self.path
|
||||
+ ": "
|
||||
+ sha1
|
||||
+ " vs "
|
||||
+ self.sha1
|
||||
+ " (known good)"
|
||||
)
|
||||
return file
|
||||
|
||||
def __repr__(self):
|
||||
return "<Package "+self.path+" at "+self.url+" (sha1="+self.sha1+")"
|
||||
return "<Package " + self.path + " at " + self.url + " (sha1=" + self.sha1 + ")"
|
||||
|
||||
|
||||
def fetch_url(url):
|
||||
page = urllib.request.urlopen(url)
|
||||
return page.read()
|
||||
|
||||
|
||||
def fetch_repository(base, repo_url):
|
||||
packages = {}
|
||||
root = ET.fromstring(fetch_url(base + repo_url))
|
||||
@ -92,12 +100,14 @@ def fetch_repository(base, repo_url):
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def fetch_repositories():
|
||||
packages = {}
|
||||
for repo in REPOSITORIES:
|
||||
packages.update(fetch_repository(BASE_REPOSITORY, repo))
|
||||
return packages
|
||||
|
||||
|
||||
class Lockfile:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
@ -123,6 +133,7 @@ class Lockfile:
|
||||
for package in packages:
|
||||
f.write(package.path + " " + package.url + " " + package.sha1 + "\n")
|
||||
|
||||
|
||||
def cli_add_to_lockfile(args):
|
||||
lockfile = Lockfile(args.lockfile)
|
||||
packages = fetch_repositories()
|
||||
@ -130,28 +141,49 @@ def cli_add_to_lockfile(args):
|
||||
lockfile.add(packages, package)
|
||||
lockfile.save()
|
||||
|
||||
|
||||
def cli_update_mirror(args):
|
||||
lockfile = Lockfile(args.lockfile)
|
||||
for package in lockfile.packages.values():
|
||||
path = package.download(BASE_REPOSITORY)
|
||||
subprocess.run([
|
||||
"aws", "s3", "mv", path,
|
||||
"s3://" + MIRROR_BUCKET + "/" + MIRROR_BASE_DIR + package.url,
|
||||
"--profile=" + args.awscli_profile,
|
||||
], check=True)
|
||||
subprocess.run(
|
||||
[
|
||||
"aws",
|
||||
"s3",
|
||||
"mv",
|
||||
path,
|
||||
"s3://" + MIRROR_BUCKET + "/" + MIRROR_BASE_DIR + package.url,
|
||||
"--profile=" + args.awscli_profile,
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
||||
|
||||
def cli_install(args):
|
||||
lockfile = Lockfile(args.lockfile)
|
||||
for package in lockfile.packages.values():
|
||||
# Download the file from the mirror into a temp file
|
||||
url = "https://" + MIRROR_BUCKET + ".s3-" + MIRROR_BUCKET_REGION + \
|
||||
".amazonaws.com/" + MIRROR_BASE_DIR
|
||||
url = (
|
||||
"https://"
|
||||
+ MIRROR_BUCKET
|
||||
+ ".s3-"
|
||||
+ MIRROR_BUCKET_REGION
|
||||
+ ".amazonaws.com/"
|
||||
+ MIRROR_BASE_DIR
|
||||
)
|
||||
downloaded = package.download(url)
|
||||
# Extract the file in a temporary directory
|
||||
extract_dir = tempfile.mkdtemp()
|
||||
subprocess.run([
|
||||
"unzip", "-q", downloaded, "-d", extract_dir,
|
||||
], check=True)
|
||||
subprocess.run(
|
||||
[
|
||||
"unzip",
|
||||
"-q",
|
||||
downloaded,
|
||||
"-d",
|
||||
extract_dir,
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
# Figure out the prefix used in the zip
|
||||
subdirs = [d for d in os.listdir(extract_dir) if not d.startswith(".")]
|
||||
if len(subdirs) != 1:
|
||||
@ -162,6 +194,7 @@ def cli_install(args):
|
||||
os.rename(os.path.join(extract_dir, subdirs[0]), dest)
|
||||
os.unlink(downloaded)
|
||||
|
||||
|
||||
def cli():
|
||||
parser = argparse.ArgumentParser()
|
||||
subparsers = parser.add_subparsers()
|
||||
@ -187,5 +220,6 @@ def cli():
|
||||
exit(1)
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
|
@ -588,7 +588,7 @@ class TestEnvironment:
|
||||
"--repo-path",
|
||||
self.repo_dir(),
|
||||
"--repository",
|
||||
self.TEST_REPO_NAME
|
||||
self.TEST_REPO_NAME,
|
||||
],
|
||||
env=ffx_env,
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
@ -619,9 +619,7 @@ class TestEnvironment:
|
||||
# `facet` statement required for TCP testing via
|
||||
# protocol `fuchsia.posix.socket.Provider`. See
|
||||
# https://fuchsia.dev/fuchsia-src/development/testing/components/test_runner_framework?hl=en#legacy_non-hermetic_tests
|
||||
CML_TEMPLATE: ClassVar[
|
||||
str
|
||||
] = """
|
||||
CML_TEMPLATE: ClassVar[str] = """
|
||||
{{
|
||||
program: {{
|
||||
runner: "elf_test_runner",
|
||||
@ -994,7 +992,7 @@ class TestEnvironment:
|
||||
"repository",
|
||||
"server",
|
||||
"stop",
|
||||
self.TEST_REPO_NAME
|
||||
self.TEST_REPO_NAME,
|
||||
],
|
||||
env=self.ffx_cmd_env(),
|
||||
stdout_handler=self.subprocess_logger.debug,
|
||||
|
@ -7,6 +7,7 @@ be executed on CI.
|
||||
It reads job definitions from `src/ci/github-actions/jobs.yml`
|
||||
and filters them based on the event that happened on CI.
|
||||
"""
|
||||
|
||||
import dataclasses
|
||||
import json
|
||||
import logging
|
||||
@ -94,7 +95,7 @@ def find_run_type(ctx: GitHubCtx) -> Optional[WorkflowRunType]:
|
||||
try_build = ctx.ref in (
|
||||
"refs/heads/try",
|
||||
"refs/heads/try-perf",
|
||||
"refs/heads/automation/bors/try"
|
||||
"refs/heads/automation/bors/try",
|
||||
)
|
||||
|
||||
# Unrolled branch from a rollup for testing perf
|
||||
@ -135,11 +136,15 @@ def calculate_jobs(run_type: WorkflowRunType, job_data: Dict[str, Any]) -> List[
|
||||
continue
|
||||
jobs.append(job[0])
|
||||
if unknown_jobs:
|
||||
raise Exception(f"Custom job(s) `{unknown_jobs}` not found in auto jobs")
|
||||
raise Exception(
|
||||
f"Custom job(s) `{unknown_jobs}` not found in auto jobs"
|
||||
)
|
||||
|
||||
return add_base_env(name_jobs(jobs, "try"), job_data["envs"]["try"])
|
||||
elif isinstance(run_type, AutoRunType):
|
||||
return add_base_env(name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"])
|
||||
return add_base_env(
|
||||
name_jobs(job_data["auto"], "auto"), job_data["envs"]["auto"]
|
||||
)
|
||||
|
||||
return []
|
||||
|
||||
@ -161,7 +166,7 @@ def get_github_ctx() -> GitHubCtx:
|
||||
event_name=event_name,
|
||||
ref=os.environ["GITHUB_REF"],
|
||||
repository=os.environ["GITHUB_REPOSITORY"],
|
||||
commit_message=commit_message
|
||||
commit_message=commit_message,
|
||||
)
|
||||
|
||||
|
||||
|
@ -19,6 +19,7 @@ $ python3 upload-build-metrics.py <path-to-CPU-usage-CSV>
|
||||
|
||||
`path-to-CPU-usage-CSV` is a path to a CSV generated by the `src/ci/cpu-usage-over-time.py` script.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import os
|
||||
@ -31,7 +32,7 @@ from typing import List
|
||||
def load_cpu_usage(path: Path) -> List[float]:
|
||||
usage = []
|
||||
with open(path) as f:
|
||||
reader = csv.reader(f, delimiter=',')
|
||||
reader = csv.reader(f, delimiter=",")
|
||||
for row in reader:
|
||||
# The log might contain incomplete rows or some Python exception
|
||||
if len(row) == 2:
|
||||
@ -50,25 +51,21 @@ def upload_datadog_measure(name: str, value: float):
|
||||
print(f"Metric {name}: {value:.4f}")
|
||||
|
||||
datadog_cmd = "datadog-ci"
|
||||
if os.getenv("GITHUB_ACTIONS") is not None and sys.platform.lower().startswith("win"):
|
||||
if os.getenv("GITHUB_ACTIONS") is not None and sys.platform.lower().startswith(
|
||||
"win"
|
||||
):
|
||||
# Due to weird interaction of MSYS2 and Python, we need to use an absolute path,
|
||||
# and also specify the ".cmd" at the end. See https://github.com/rust-lang/rust/pull/125771.
|
||||
datadog_cmd = "C:\\npm\\prefix\\datadog-ci.cmd"
|
||||
|
||||
subprocess.run([
|
||||
datadog_cmd,
|
||||
"measure",
|
||||
"--level", "job",
|
||||
"--measures", f"{name}:{value}"
|
||||
],
|
||||
check=False
|
||||
subprocess.run(
|
||||
[datadog_cmd, "measure", "--level", "job", "--measures", f"{name}:{value}"],
|
||||
check=False,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="DataDog metric uploader"
|
||||
)
|
||||
parser = argparse.ArgumentParser(prog="DataDog metric uploader")
|
||||
parser.add_argument("cpu-usage-history-csv")
|
||||
args = parser.parse_args()
|
||||
|
||||
|
@ -13,6 +13,7 @@ i.e., within 0.5 ULP of the true value.
|
||||
Adapted from Daniel Lemire's fast_float ``table_generation.py``,
|
||||
available here: <https://github.com/fastfloat/fast_float/blob/main/script/table_generation.py>.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
from math import ceil, floor, log
|
||||
from collections import deque
|
||||
@ -34,6 +35,7 @@ STATIC_WARNING = """
|
||||
// the final binary.
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
min_exp = minimum_exponent(10)
|
||||
max_exp = maximum_exponent(10)
|
||||
@ -41,10 +43,10 @@ def main():
|
||||
|
||||
print(HEADER.strip())
|
||||
print()
|
||||
print('pub const SMALLEST_POWER_OF_FIVE: i32 = {};'.format(min_exp))
|
||||
print('pub const LARGEST_POWER_OF_FIVE: i32 = {};'.format(max_exp))
|
||||
print('pub const N_POWERS_OF_FIVE: usize = ', end='')
|
||||
print('(LARGEST_POWER_OF_FIVE - SMALLEST_POWER_OF_FIVE + 1) as usize;')
|
||||
print("pub const SMALLEST_POWER_OF_FIVE: i32 = {};".format(min_exp))
|
||||
print("pub const LARGEST_POWER_OF_FIVE: i32 = {};".format(max_exp))
|
||||
print("pub const N_POWERS_OF_FIVE: usize = ", end="")
|
||||
print("(LARGEST_POWER_OF_FIVE - SMALLEST_POWER_OF_FIVE + 1) as usize;")
|
||||
print()
|
||||
print_proper_powers(min_exp, max_exp, bias)
|
||||
|
||||
@ -54,7 +56,7 @@ def minimum_exponent(base):
|
||||
|
||||
|
||||
def maximum_exponent(base):
|
||||
return floor(log(1.7976931348623157e+308, base))
|
||||
return floor(log(1.7976931348623157e308, base))
|
||||
|
||||
|
||||
def print_proper_powers(min_exp, max_exp, bias):
|
||||
@ -64,46 +66,46 @@ def print_proper_powers(min_exp, max_exp, bias):
|
||||
# 2^(2b)/(5^−q) with b=64 + int(math.ceil(log2(5^−q)))
|
||||
powers = []
|
||||
for q in range(min_exp, 0):
|
||||
power5 = 5 ** -q
|
||||
power5 = 5**-q
|
||||
z = 0
|
||||
while (1 << z) < power5:
|
||||
z += 1
|
||||
if q >= -27:
|
||||
b = z + 127
|
||||
c = 2 ** b // power5 + 1
|
||||
c = 2**b // power5 + 1
|
||||
powers.append((c, q))
|
||||
else:
|
||||
b = 2 * z + 2 * 64
|
||||
c = 2 ** b // power5 + 1
|
||||
c = 2**b // power5 + 1
|
||||
# truncate
|
||||
while c >= (1<<128):
|
||||
while c >= (1 << 128):
|
||||
c //= 2
|
||||
powers.append((c, q))
|
||||
|
||||
# Add positive exponents
|
||||
for q in range(0, max_exp + 1):
|
||||
power5 = 5 ** q
|
||||
power5 = 5**q
|
||||
# move the most significant bit in position
|
||||
while power5 < (1<<127):
|
||||
while power5 < (1 << 127):
|
||||
power5 *= 2
|
||||
# *truncate*
|
||||
while power5 >= (1<<128):
|
||||
while power5 >= (1 << 128):
|
||||
power5 //= 2
|
||||
powers.append((power5, q))
|
||||
|
||||
# Print the powers.
|
||||
print(STATIC_WARNING.strip())
|
||||
print('#[rustfmt::skip]')
|
||||
typ = '[(u64, u64); N_POWERS_OF_FIVE]'
|
||||
print('pub static POWER_OF_FIVE_128: {} = ['.format(typ))
|
||||
print("#[rustfmt::skip]")
|
||||
typ = "[(u64, u64); N_POWERS_OF_FIVE]"
|
||||
print("pub static POWER_OF_FIVE_128: {} = [".format(typ))
|
||||
for c, exp in powers:
|
||||
hi = '0x{:x}'.format(c // (1 << 64))
|
||||
lo = '0x{:x}'.format(c % (1 << 64))
|
||||
value = ' ({}, {}), '.format(hi, lo)
|
||||
comment = '// {}^{}'.format(5, exp)
|
||||
print(value.ljust(46, ' ') + comment)
|
||||
print('];')
|
||||
hi = "0x{:x}".format(c // (1 << 64))
|
||||
lo = "0x{:x}".format(c % (1 << 64))
|
||||
value = " ({}, {}), ".format(hi, lo)
|
||||
comment = "// {}^{}".format(5, exp)
|
||||
print(value.ljust(46, " ") + comment)
|
||||
print("];")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -1,6 +1,7 @@
|
||||
# Add this folder to the python sys path; GDB Python-interpreter will now find modules in this path
|
||||
import sys
|
||||
from os import path
|
||||
|
||||
self_dir = path.dirname(path.realpath(__file__))
|
||||
sys.path.append(self_dir)
|
||||
|
||||
|
@ -6,8 +6,11 @@ from gdb_providers import *
|
||||
from rust_types import *
|
||||
|
||||
|
||||
_gdb_version_matched = re.search('([0-9]+)\\.([0-9]+)', gdb.VERSION)
|
||||
gdb_version = [int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else []
|
||||
_gdb_version_matched = re.search("([0-9]+)\\.([0-9]+)", gdb.VERSION)
|
||||
gdb_version = (
|
||||
[int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else []
|
||||
)
|
||||
|
||||
|
||||
def register_printers(objfile):
|
||||
objfile.pretty_printers.append(printer)
|
||||
|
@ -21,7 +21,7 @@ def unwrap_unique_or_non_null(unique_or_nonnull):
|
||||
# GDB 14 has a tag class that indicates that extension methods are ok
|
||||
# to call. Use of this tag only requires that printers hide local
|
||||
# attributes and methods by prefixing them with "_".
|
||||
if hasattr(gdb, 'ValuePrinter'):
|
||||
if hasattr(gdb, "ValuePrinter"):
|
||||
printer_base = gdb.ValuePrinter
|
||||
else:
|
||||
printer_base = object
|
||||
@ -98,7 +98,7 @@ class StdStrProvider(printer_base):
|
||||
|
||||
|
||||
def _enumerate_array_elements(element_ptrs):
|
||||
for (i, element_ptr) in enumerate(element_ptrs):
|
||||
for i, element_ptr in enumerate(element_ptrs):
|
||||
key = "[{}]".format(i)
|
||||
element = element_ptr.dereference()
|
||||
|
||||
@ -173,7 +173,8 @@ class StdVecDequeProvider(printer_base):
|
||||
|
||||
def children(self):
|
||||
return _enumerate_array_elements(
|
||||
(self._data_ptr + ((self._head + index) % self._cap)) for index in xrange(self._size)
|
||||
(self._data_ptr + ((self._head + index) % self._cap))
|
||||
for index in xrange(self._size)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@ -270,7 +271,9 @@ def children_of_btree_map(map):
|
||||
# Yields each key/value pair in the node and in any child nodes.
|
||||
def children_of_node(node_ptr, height):
|
||||
def cast_to_internal(node):
|
||||
internal_type_name = node.type.target().name.replace("LeafNode", "InternalNode", 1)
|
||||
internal_type_name = node.type.target().name.replace(
|
||||
"LeafNode", "InternalNode", 1
|
||||
)
|
||||
internal_type = gdb.lookup_type(internal_type_name)
|
||||
return node.cast(internal_type.pointer())
|
||||
|
||||
@ -293,8 +296,16 @@ def children_of_btree_map(map):
|
||||
# Avoid "Cannot perform pointer math on incomplete type" on zero-sized arrays.
|
||||
key_type_size = keys.type.sizeof
|
||||
val_type_size = vals.type.sizeof
|
||||
key = keys[i]["value"]["value"] if key_type_size > 0 else gdb.parse_and_eval("()")
|
||||
val = vals[i]["value"]["value"] if val_type_size > 0 else gdb.parse_and_eval("()")
|
||||
key = (
|
||||
keys[i]["value"]["value"]
|
||||
if key_type_size > 0
|
||||
else gdb.parse_and_eval("()")
|
||||
)
|
||||
val = (
|
||||
vals[i]["value"]["value"]
|
||||
if val_type_size > 0
|
||||
else gdb.parse_and_eval("()")
|
||||
)
|
||||
yield key, val
|
||||
|
||||
if map["length"] > 0:
|
||||
@ -352,7 +363,7 @@ class StdOldHashMapProvider(printer_base):
|
||||
self._hashes = self._table["hashes"]
|
||||
self._hash_uint_type = self._hashes.type
|
||||
self._hash_uint_size = self._hashes.type.sizeof
|
||||
self._modulo = 2 ** self._hash_uint_size
|
||||
self._modulo = 2**self._hash_uint_size
|
||||
self._data_ptr = self._hashes[ZERO_FIELD]["pointer"]
|
||||
|
||||
self._capacity_mask = int(self._table["capacity_mask"])
|
||||
@ -382,8 +393,14 @@ class StdOldHashMapProvider(printer_base):
|
||||
|
||||
hashes = self._hash_uint_size * self._capacity
|
||||
align = self._pair_type_size
|
||||
len_rounded_up = (((((hashes + align) % self._modulo - 1) % self._modulo) & ~(
|
||||
(align - 1) % self._modulo)) % self._modulo - hashes) % self._modulo
|
||||
len_rounded_up = (
|
||||
(
|
||||
(((hashes + align) % self._modulo - 1) % self._modulo)
|
||||
& ~((align - 1) % self._modulo)
|
||||
)
|
||||
% self._modulo
|
||||
- hashes
|
||||
) % self._modulo
|
||||
|
||||
pairs_offset = hashes + len_rounded_up
|
||||
pairs_start = gdb.Value(start + pairs_offset).cast(self._pair_type.pointer())
|
||||
|
@ -12,7 +12,8 @@ import os
|
||||
import stat
|
||||
|
||||
TEST_DIR = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../test/ui/derives/'))
|
||||
os.path.join(os.path.dirname(__file__), "../test/ui/derives/")
|
||||
)
|
||||
|
||||
TEMPLATE = """\
|
||||
// This file was auto-generated using 'src/etc/generate-deriving-span-tests.py'
|
||||
@ -56,28 +57,33 @@ ENUM_TUPLE, ENUM_STRUCT, STRUCT_FIELDS, STRUCT_TUPLE = range(4)
|
||||
|
||||
|
||||
def create_test_case(type, trait, super_traits, error_count):
|
||||
string = [ENUM_STRING, ENUM_STRUCT_VARIANT_STRING, STRUCT_STRING, STRUCT_TUPLE_STRING][type]
|
||||
all_traits = ','.join([trait] + super_traits)
|
||||
super_traits = ','.join(super_traits)
|
||||
error_deriving = '#[derive(%s)]' % super_traits if super_traits else ''
|
||||
string = [
|
||||
ENUM_STRING,
|
||||
ENUM_STRUCT_VARIANT_STRING,
|
||||
STRUCT_STRING,
|
||||
STRUCT_TUPLE_STRING,
|
||||
][type]
|
||||
all_traits = ",".join([trait] + super_traits)
|
||||
super_traits = ",".join(super_traits)
|
||||
error_deriving = "#[derive(%s)]" % super_traits if super_traits else ""
|
||||
|
||||
errors = '\n'.join('//~%s ERROR' % ('^' * n) for n in range(error_count))
|
||||
errors = "\n".join("//~%s ERROR" % ("^" * n) for n in range(error_count))
|
||||
code = string.format(traits=all_traits, errors=errors)
|
||||
return TEMPLATE.format(error_deriving=error_deriving, code=code)
|
||||
|
||||
|
||||
def write_file(name, string):
|
||||
test_file = os.path.join(TEST_DIR, 'derives-span-%s.rs' % name)
|
||||
test_file = os.path.join(TEST_DIR, "derives-span-%s.rs" % name)
|
||||
|
||||
# set write permission if file exists, so it can be changed
|
||||
if os.path.exists(test_file):
|
||||
os.chmod(test_file, stat.S_IWUSR)
|
||||
|
||||
with open(test_file, 'w') as f:
|
||||
with open(test_file, "w") as f:
|
||||
f.write(string)
|
||||
|
||||
# mark file read-only
|
||||
os.chmod(test_file, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
|
||||
os.chmod(test_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)
|
||||
|
||||
|
||||
ENUM = 1
|
||||
@ -85,29 +91,31 @@ STRUCT = 2
|
||||
ALL = STRUCT | ENUM
|
||||
|
||||
traits = {
|
||||
'Default': (STRUCT, [], 1),
|
||||
'FromPrimitive': (0, [], 0), # only works for C-like enums
|
||||
|
||||
'Decodable': (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
'Encodable': (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
"Default": (STRUCT, [], 1),
|
||||
"FromPrimitive": (0, [], 0), # only works for C-like enums
|
||||
"Decodable": (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
"Encodable": (0, [], 0), # FIXME: quoting gives horrible spans
|
||||
}
|
||||
|
||||
for (trait, supers, errs) in [('Clone', [], 1),
|
||||
('PartialEq', [], 2),
|
||||
('PartialOrd', ['PartialEq'], 1),
|
||||
('Eq', ['PartialEq'], 1),
|
||||
('Ord', ['Eq', 'PartialOrd', 'PartialEq'], 1),
|
||||
('Debug', [], 1),
|
||||
('Hash', [], 1)]:
|
||||
for trait, supers, errs in [
|
||||
("Clone", [], 1),
|
||||
("PartialEq", [], 2),
|
||||
("PartialOrd", ["PartialEq"], 1),
|
||||
("Eq", ["PartialEq"], 1),
|
||||
("Ord", ["Eq", "PartialOrd", "PartialEq"], 1),
|
||||
("Debug", [], 1),
|
||||
("Hash", [], 1),
|
||||
]:
|
||||
traits[trait] = (ALL, supers, errs)
|
||||
|
||||
for (trait, (types, super_traits, error_count)) in traits.items():
|
||||
for trait, (types, super_traits, error_count) in traits.items():
|
||||
|
||||
def mk(ty, t=trait, st=super_traits, ec=error_count):
|
||||
return create_test_case(ty, t, st, ec)
|
||||
|
||||
if types & ENUM:
|
||||
write_file(trait + '-enum', mk(ENUM_TUPLE))
|
||||
write_file(trait + '-enum-struct-variant', mk(ENUM_STRUCT))
|
||||
write_file(trait + "-enum", mk(ENUM_TUPLE))
|
||||
write_file(trait + "-enum-struct-variant", mk(ENUM_STRUCT))
|
||||
if types & STRUCT:
|
||||
write_file(trait + '-struct', mk(STRUCT_FIELDS))
|
||||
write_file(trait + '-tuple-struct', mk(STRUCT_TUPLE))
|
||||
write_file(trait + "-struct", mk(STRUCT_FIELDS))
|
||||
write_file(trait + "-tuple-struct", mk(STRUCT_TUPLE))
|
||||
|
@ -22,18 +22,16 @@ fn main() {
|
||||
}
|
||||
"""
|
||||
|
||||
test_dir = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), '../test/ui/parser')
|
||||
)
|
||||
test_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../test/ui/parser"))
|
||||
|
||||
for kw in sys.argv[1:]:
|
||||
test_file = os.path.join(test_dir, 'keyword-%s-as-identifier.rs' % kw)
|
||||
test_file = os.path.join(test_dir, "keyword-%s-as-identifier.rs" % kw)
|
||||
|
||||
# set write permission if file exists, so it can be changed
|
||||
if os.path.exists(test_file):
|
||||
os.chmod(test_file, stat.S_IWUSR)
|
||||
|
||||
with open(test_file, 'wt') as f:
|
||||
with open(test_file, "wt") as f:
|
||||
f.write(template % (kw, kw, kw))
|
||||
|
||||
# mark file read-only
|
||||
|
@ -127,6 +127,7 @@ import os.path
|
||||
import re
|
||||
import shlex
|
||||
from collections import namedtuple
|
||||
|
||||
try:
|
||||
from html.parser import HTMLParser
|
||||
except ImportError:
|
||||
@ -142,12 +143,28 @@ except ImportError:
|
||||
from htmlentitydefs import name2codepoint
|
||||
|
||||
# "void elements" (no closing tag) from the HTML Standard section 12.1.2
|
||||
VOID_ELEMENTS = {'area', 'base', 'br', 'col', 'embed', 'hr', 'img', 'input', 'keygen',
|
||||
'link', 'menuitem', 'meta', 'param', 'source', 'track', 'wbr'}
|
||||
VOID_ELEMENTS = {
|
||||
"area",
|
||||
"base",
|
||||
"br",
|
||||
"col",
|
||||
"embed",
|
||||
"hr",
|
||||
"img",
|
||||
"input",
|
||||
"keygen",
|
||||
"link",
|
||||
"menuitem",
|
||||
"meta",
|
||||
"param",
|
||||
"source",
|
||||
"track",
|
||||
"wbr",
|
||||
}
|
||||
|
||||
# Python 2 -> 3 compatibility
|
||||
try:
|
||||
unichr # noqa: B018 FIXME: py2
|
||||
unichr # noqa: B018 FIXME: py2
|
||||
except NameError:
|
||||
unichr = chr
|
||||
|
||||
@ -158,18 +175,20 @@ channel = os.environ["DOC_RUST_LANG_ORG_CHANNEL"]
|
||||
rust_test_path = None
|
||||
bless = None
|
||||
|
||||
|
||||
class CustomHTMLParser(HTMLParser):
|
||||
"""simplified HTML parser.
|
||||
|
||||
this is possible because we are dealing with very regular HTML from
|
||||
rustdoc; we only have to deal with i) void elements and ii) empty
|
||||
attributes."""
|
||||
|
||||
def __init__(self, target=None):
|
||||
HTMLParser.__init__(self)
|
||||
self.__builder = target or ET.TreeBuilder()
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
attrs = {k: v or '' for k, v in attrs}
|
||||
attrs = {k: v or "" for k, v in attrs}
|
||||
self.__builder.start(tag, attrs)
|
||||
if tag in VOID_ELEMENTS:
|
||||
self.__builder.end(tag)
|
||||
@ -178,7 +197,7 @@ class CustomHTMLParser(HTMLParser):
|
||||
self.__builder.end(tag)
|
||||
|
||||
def handle_startendtag(self, tag, attrs):
|
||||
attrs = {k: v or '' for k, v in attrs}
|
||||
attrs = {k: v or "" for k, v in attrs}
|
||||
self.__builder.start(tag, attrs)
|
||||
self.__builder.end(tag)
|
||||
|
||||
@ -189,7 +208,7 @@ class CustomHTMLParser(HTMLParser):
|
||||
self.__builder.data(unichr(name2codepoint[name]))
|
||||
|
||||
def handle_charref(self, name):
|
||||
code = int(name[1:], 16) if name.startswith(('x', 'X')) else int(name, 10)
|
||||
code = int(name[1:], 16) if name.startswith(("x", "X")) else int(name, 10)
|
||||
self.__builder.data(unichr(code))
|
||||
|
||||
def close(self):
|
||||
@ -197,7 +216,7 @@ class CustomHTMLParser(HTMLParser):
|
||||
return self.__builder.close()
|
||||
|
||||
|
||||
Command = namedtuple('Command', 'negated cmd args lineno context')
|
||||
Command = namedtuple("Command", "negated cmd args lineno context")
|
||||
|
||||
|
||||
class FailedCheck(Exception):
|
||||
@ -216,17 +235,17 @@ def concat_multi_lines(f):
|
||||
concatenated."""
|
||||
lastline = None # set to the last line when the last line has a backslash
|
||||
firstlineno = None
|
||||
catenated = ''
|
||||
catenated = ""
|
||||
for lineno, line in enumerate(f):
|
||||
line = line.rstrip('\r\n')
|
||||
line = line.rstrip("\r\n")
|
||||
|
||||
# strip the common prefix from the current line if needed
|
||||
if lastline is not None:
|
||||
common_prefix = os.path.commonprefix([line, lastline])
|
||||
line = line[len(common_prefix):].lstrip()
|
||||
line = line[len(common_prefix) :].lstrip()
|
||||
|
||||
firstlineno = firstlineno or lineno
|
||||
if line.endswith('\\'):
|
||||
if line.endswith("\\"):
|
||||
if lastline is None:
|
||||
lastline = line[:-1]
|
||||
catenated += line[:-1]
|
||||
@ -234,10 +253,10 @@ def concat_multi_lines(f):
|
||||
yield firstlineno, catenated + line
|
||||
lastline = None
|
||||
firstlineno = None
|
||||
catenated = ''
|
||||
catenated = ""
|
||||
|
||||
if lastline is not None:
|
||||
print_err(lineno, line, 'Trailing backslash at the end of the file')
|
||||
print_err(lineno, line, "Trailing backslash at the end of the file")
|
||||
|
||||
|
||||
def get_known_directive_names():
|
||||
@ -253,12 +272,12 @@ def get_known_directive_names():
|
||||
"tools/compiletest/src/directive-list.rs",
|
||||
),
|
||||
"r",
|
||||
encoding="utf8"
|
||||
encoding="utf8",
|
||||
) as fd:
|
||||
content = fd.read()
|
||||
return [
|
||||
line.strip().replace('",', '').replace('"', '')
|
||||
for line in content.split('\n')
|
||||
line.strip().replace('",', "").replace('"', "")
|
||||
for line in content.split("\n")
|
||||
if filter_line(line)
|
||||
]
|
||||
|
||||
@ -269,35 +288,42 @@ def get_known_directive_names():
|
||||
# See <https://github.com/rust-lang/rust/issues/125813#issuecomment-2141953780>.
|
||||
KNOWN_DIRECTIVE_NAMES = get_known_directive_names()
|
||||
|
||||
LINE_PATTERN = re.compile(r'''
|
||||
LINE_PATTERN = re.compile(
|
||||
r"""
|
||||
//@\s+
|
||||
(?P<negated>!?)(?P<cmd>[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*)
|
||||
(?P<args>.*)$
|
||||
''', re.X | re.UNICODE)
|
||||
""",
|
||||
re.X | re.UNICODE,
|
||||
)
|
||||
|
||||
|
||||
def get_commands(template):
|
||||
with io.open(template, encoding='utf-8') as f:
|
||||
with io.open(template, encoding="utf-8") as f:
|
||||
for lineno, line in concat_multi_lines(f):
|
||||
m = LINE_PATTERN.search(line)
|
||||
if not m:
|
||||
continue
|
||||
|
||||
cmd = m.group('cmd')
|
||||
negated = (m.group('negated') == '!')
|
||||
cmd = m.group("cmd")
|
||||
negated = m.group("negated") == "!"
|
||||
if not negated and cmd in KNOWN_DIRECTIVE_NAMES:
|
||||
continue
|
||||
args = m.group('args')
|
||||
args = m.group("args")
|
||||
if args and not args[:1].isspace():
|
||||
print_err(lineno, line, 'Invalid template syntax')
|
||||
print_err(lineno, line, "Invalid template syntax")
|
||||
continue
|
||||
try:
|
||||
args = shlex.split(args)
|
||||
except UnicodeEncodeError:
|
||||
args = [arg.decode('utf-8') for arg in shlex.split(args.encode('utf-8'))]
|
||||
args = [
|
||||
arg.decode("utf-8") for arg in shlex.split(args.encode("utf-8"))
|
||||
]
|
||||
except Exception as exc:
|
||||
raise Exception("line {}: {}".format(lineno + 1, exc)) from None
|
||||
yield Command(negated=negated, cmd=cmd, args=args, lineno=lineno+1, context=line)
|
||||
yield Command(
|
||||
negated=negated, cmd=cmd, args=args, lineno=lineno + 1, context=line
|
||||
)
|
||||
|
||||
|
||||
def _flatten(node, acc):
|
||||
@ -312,22 +338,24 @@ def _flatten(node, acc):
|
||||
def flatten(node):
|
||||
acc = []
|
||||
_flatten(node, acc)
|
||||
return ''.join(acc)
|
||||
return "".join(acc)
|
||||
|
||||
|
||||
def make_xml(text):
|
||||
xml = ET.XML('<xml>%s</xml>' % text)
|
||||
xml = ET.XML("<xml>%s</xml>" % text)
|
||||
return xml
|
||||
|
||||
|
||||
def normalize_xpath(path):
|
||||
path = path.replace("{{channel}}", channel)
|
||||
if path.startswith('//'):
|
||||
return '.' + path # avoid warnings
|
||||
elif path.startswith('.//'):
|
||||
if path.startswith("//"):
|
||||
return "." + path # avoid warnings
|
||||
elif path.startswith(".//"):
|
||||
return path
|
||||
else:
|
||||
raise InvalidCheck('Non-absolute XPath is not supported due to implementation issues')
|
||||
raise InvalidCheck(
|
||||
"Non-absolute XPath is not supported due to implementation issues"
|
||||
)
|
||||
|
||||
|
||||
class CachedFiles(object):
|
||||
@ -338,12 +366,12 @@ class CachedFiles(object):
|
||||
self.last_path = None
|
||||
|
||||
def resolve_path(self, path):
|
||||
if path != '-':
|
||||
if path != "-":
|
||||
path = os.path.normpath(path)
|
||||
self.last_path = path
|
||||
return path
|
||||
elif self.last_path is None:
|
||||
raise InvalidCheck('Tried to use the previous path in the first command')
|
||||
raise InvalidCheck("Tried to use the previous path in the first command")
|
||||
else:
|
||||
return self.last_path
|
||||
|
||||
@ -356,10 +384,10 @@ class CachedFiles(object):
|
||||
return self.files[path]
|
||||
|
||||
abspath = self.get_absolute_path(path)
|
||||
if not(os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck('File does not exist {!r}'.format(path))
|
||||
if not (os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck("File does not exist {!r}".format(path))
|
||||
|
||||
with io.open(abspath, encoding='utf-8') as f:
|
||||
with io.open(abspath, encoding="utf-8") as f:
|
||||
data = f.read()
|
||||
self.files[path] = data
|
||||
return data
|
||||
@ -370,15 +398,15 @@ class CachedFiles(object):
|
||||
return self.trees[path]
|
||||
|
||||
abspath = self.get_absolute_path(path)
|
||||
if not(os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck('File does not exist {!r}'.format(path))
|
||||
if not (os.path.exists(abspath) and os.path.isfile(abspath)):
|
||||
raise FailedCheck("File does not exist {!r}".format(path))
|
||||
|
||||
with io.open(abspath, encoding='utf-8') as f:
|
||||
with io.open(abspath, encoding="utf-8") as f:
|
||||
try:
|
||||
tree = ET.fromstringlist(f.readlines(), CustomHTMLParser())
|
||||
except Exception as e:
|
||||
raise RuntimeError( # noqa: B904 FIXME: py2
|
||||
'Cannot parse an HTML file {!r}: {}'.format(path, e)
|
||||
raise RuntimeError( # noqa: B904 FIXME: py2
|
||||
"Cannot parse an HTML file {!r}: {}".format(path, e)
|
||||
)
|
||||
self.trees[path] = tree
|
||||
return self.trees[path]
|
||||
@ -386,8 +414,8 @@ class CachedFiles(object):
|
||||
def get_dir(self, path):
|
||||
path = self.resolve_path(path)
|
||||
abspath = self.get_absolute_path(path)
|
||||
if not(os.path.exists(abspath) and os.path.isdir(abspath)):
|
||||
raise FailedCheck('Directory does not exist {!r}'.format(path))
|
||||
if not (os.path.exists(abspath) and os.path.isdir(abspath)):
|
||||
raise FailedCheck("Directory does not exist {!r}".format(path))
|
||||
|
||||
|
||||
def check_string(data, pat, regexp):
|
||||
@ -397,8 +425,8 @@ def check_string(data, pat, regexp):
|
||||
elif regexp:
|
||||
return re.search(pat, data, flags=re.UNICODE) is not None
|
||||
else:
|
||||
data = ' '.join(data.split())
|
||||
pat = ' '.join(pat.split())
|
||||
data = " ".join(data.split())
|
||||
pat = " ".join(pat.split())
|
||||
return pat in data
|
||||
|
||||
|
||||
@ -444,19 +472,19 @@ def get_tree_count(tree, path):
|
||||
|
||||
|
||||
def check_snapshot(snapshot_name, actual_tree, normalize_to_text):
|
||||
assert rust_test_path.endswith('.rs')
|
||||
snapshot_path = '{}.{}.{}'.format(rust_test_path[:-3], snapshot_name, 'html')
|
||||
assert rust_test_path.endswith(".rs")
|
||||
snapshot_path = "{}.{}.{}".format(rust_test_path[:-3], snapshot_name, "html")
|
||||
try:
|
||||
with open(snapshot_path, 'r') as snapshot_file:
|
||||
with open(snapshot_path, "r") as snapshot_file:
|
||||
expected_str = snapshot_file.read().replace("{{channel}}", channel)
|
||||
except FileNotFoundError:
|
||||
if bless:
|
||||
expected_str = None
|
||||
else:
|
||||
raise FailedCheck('No saved snapshot value') # noqa: B904 FIXME: py2
|
||||
raise FailedCheck("No saved snapshot value") # noqa: B904 FIXME: py2
|
||||
|
||||
if not normalize_to_text:
|
||||
actual_str = ET.tostring(actual_tree).decode('utf-8')
|
||||
actual_str = ET.tostring(actual_tree).decode("utf-8")
|
||||
else:
|
||||
actual_str = flatten(actual_tree)
|
||||
|
||||
@ -464,64 +492,66 @@ def check_snapshot(snapshot_name, actual_tree, normalize_to_text):
|
||||
# 1. Is --bless
|
||||
# 2. Are actual and expected tree different
|
||||
# 3. Are actual and expected text different
|
||||
if not expected_str \
|
||||
or (not normalize_to_text and \
|
||||
not compare_tree(make_xml(actual_str), make_xml(expected_str), stderr)) \
|
||||
or (normalize_to_text and actual_str != expected_str):
|
||||
|
||||
if (
|
||||
not expected_str
|
||||
or (
|
||||
not normalize_to_text
|
||||
and not compare_tree(make_xml(actual_str), make_xml(expected_str), stderr)
|
||||
)
|
||||
or (normalize_to_text and actual_str != expected_str)
|
||||
):
|
||||
if bless:
|
||||
with open(snapshot_path, 'w') as snapshot_file:
|
||||
with open(snapshot_path, "w") as snapshot_file:
|
||||
actual_str = actual_str.replace(channel, "{{channel}}")
|
||||
snapshot_file.write(actual_str)
|
||||
else:
|
||||
print('--- expected ---\n')
|
||||
print("--- expected ---\n")
|
||||
print(expected_str)
|
||||
print('\n\n--- actual ---\n')
|
||||
print("\n\n--- actual ---\n")
|
||||
print(actual_str)
|
||||
print()
|
||||
raise FailedCheck('Actual snapshot value is different than expected')
|
||||
raise FailedCheck("Actual snapshot value is different than expected")
|
||||
|
||||
|
||||
# Adapted from https://github.com/formencode/formencode/blob/3a1ba9de2fdd494dd945510a4568a3afeddb0b2e/formencode/doctest_xml_compare.py#L72-L120
|
||||
def compare_tree(x1, x2, reporter=None):
|
||||
if x1.tag != x2.tag:
|
||||
if reporter:
|
||||
reporter('Tags do not match: %s and %s' % (x1.tag, x2.tag))
|
||||
reporter("Tags do not match: %s and %s" % (x1.tag, x2.tag))
|
||||
return False
|
||||
for name, value in x1.attrib.items():
|
||||
if x2.attrib.get(name) != value:
|
||||
if reporter:
|
||||
reporter('Attributes do not match: %s=%r, %s=%r'
|
||||
% (name, value, name, x2.attrib.get(name)))
|
||||
reporter(
|
||||
"Attributes do not match: %s=%r, %s=%r"
|
||||
% (name, value, name, x2.attrib.get(name))
|
||||
)
|
||||
return False
|
||||
for name in x2.attrib:
|
||||
if name not in x1.attrib:
|
||||
if reporter:
|
||||
reporter('x2 has an attribute x1 is missing: %s'
|
||||
% name)
|
||||
reporter("x2 has an attribute x1 is missing: %s" % name)
|
||||
return False
|
||||
if not text_compare(x1.text, x2.text):
|
||||
if reporter:
|
||||
reporter('text: %r != %r' % (x1.text, x2.text))
|
||||
reporter("text: %r != %r" % (x1.text, x2.text))
|
||||
return False
|
||||
if not text_compare(x1.tail, x2.tail):
|
||||
if reporter:
|
||||
reporter('tail: %r != %r' % (x1.tail, x2.tail))
|
||||
reporter("tail: %r != %r" % (x1.tail, x2.tail))
|
||||
return False
|
||||
cl1 = list(x1)
|
||||
cl2 = list(x2)
|
||||
if len(cl1) != len(cl2):
|
||||
if reporter:
|
||||
reporter('children length differs, %i != %i'
|
||||
% (len(cl1), len(cl2)))
|
||||
reporter("children length differs, %i != %i" % (len(cl1), len(cl2)))
|
||||
return False
|
||||
i = 0
|
||||
for c1, c2 in zip(cl1, cl2):
|
||||
i += 1
|
||||
if not compare_tree(c1, c2, reporter=reporter):
|
||||
if reporter:
|
||||
reporter('children %i do not match: %s'
|
||||
% (i, c1.tag))
|
||||
reporter("children %i do not match: %s" % (i, c1.tag))
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -529,14 +559,14 @@ def compare_tree(x1, x2, reporter=None):
|
||||
def text_compare(t1, t2):
|
||||
if not t1 and not t2:
|
||||
return True
|
||||
if t1 == '*' or t2 == '*':
|
||||
if t1 == "*" or t2 == "*":
|
||||
return True
|
||||
return (t1 or '').strip() == (t2 or '').strip()
|
||||
return (t1 or "").strip() == (t2 or "").strip()
|
||||
|
||||
|
||||
def stderr(*args):
|
||||
if sys.version_info.major < 3:
|
||||
file = codecs.getwriter('utf-8')(sys.stderr)
|
||||
file = codecs.getwriter("utf-8")(sys.stderr)
|
||||
else:
|
||||
file = sys.stderr
|
||||
|
||||
@ -556,21 +586,25 @@ def print_err(lineno, context, err, message=None):
|
||||
|
||||
def get_nb_matching_elements(cache, c, regexp, stop_at_first):
|
||||
tree = cache.get_tree(c.args[0])
|
||||
pat, sep, attr = c.args[1].partition('/@')
|
||||
pat, sep, attr = c.args[1].partition("/@")
|
||||
if sep: # attribute
|
||||
tree = cache.get_tree(c.args[0])
|
||||
return check_tree_attr(tree, pat, attr, c.args[2], False)
|
||||
else: # normalized text
|
||||
pat = c.args[1]
|
||||
if pat.endswith('/text()'):
|
||||
if pat.endswith("/text()"):
|
||||
pat = pat[:-7]
|
||||
return check_tree_text(cache.get_tree(c.args[0]), pat, c.args[2], regexp, stop_at_first)
|
||||
return check_tree_text(
|
||||
cache.get_tree(c.args[0]), pat, c.args[2], regexp, stop_at_first
|
||||
)
|
||||
|
||||
|
||||
def check_files_in_folder(c, cache, folder, files):
|
||||
files = files.strip()
|
||||
if not files.startswith('[') or not files.endswith(']'):
|
||||
raise InvalidCheck("Expected list as second argument of {} (ie '[]')".format(c.cmd))
|
||||
if not files.startswith("[") or not files.endswith("]"):
|
||||
raise InvalidCheck(
|
||||
"Expected list as second argument of {} (ie '[]')".format(c.cmd)
|
||||
)
|
||||
|
||||
folder = cache.get_absolute_path(folder)
|
||||
|
||||
@ -592,12 +626,18 @@ def check_files_in_folder(c, cache, folder, files):
|
||||
|
||||
error = 0
|
||||
if len(files_set) != 0:
|
||||
print_err(c.lineno, c.context, "Entries not found in folder `{}`: `{}`".format(
|
||||
folder, files_set))
|
||||
print_err(
|
||||
c.lineno,
|
||||
c.context,
|
||||
"Entries not found in folder `{}`: `{}`".format(folder, files_set),
|
||||
)
|
||||
error += 1
|
||||
if len(folder_set) != 0:
|
||||
print_err(c.lineno, c.context, "Extra entries in folder `{}`: `{}`".format(
|
||||
folder, folder_set))
|
||||
print_err(
|
||||
c.lineno,
|
||||
c.context,
|
||||
"Extra entries in folder `{}`: `{}`".format(folder, folder_set),
|
||||
)
|
||||
error += 1
|
||||
return error == 0
|
||||
|
||||
@ -608,11 +648,11 @@ ERR_COUNT = 0
|
||||
def check_command(c, cache):
|
||||
try:
|
||||
cerr = ""
|
||||
if c.cmd in ['has', 'hasraw', 'matches', 'matchesraw']: # string test
|
||||
regexp = c.cmd.startswith('matches')
|
||||
if c.cmd in ["has", "hasraw", "matches", "matchesraw"]: # string test
|
||||
regexp = c.cmd.startswith("matches")
|
||||
|
||||
# has <path> = file existence
|
||||
if len(c.args) == 1 and not regexp and 'raw' not in c.cmd:
|
||||
if len(c.args) == 1 and not regexp and "raw" not in c.cmd:
|
||||
try:
|
||||
cache.get_file(c.args[0])
|
||||
ret = True
|
||||
@ -620,24 +660,24 @@ def check_command(c, cache):
|
||||
cerr = str(err)
|
||||
ret = False
|
||||
# hasraw/matchesraw <path> <pat> = string test
|
||||
elif len(c.args) == 2 and 'raw' in c.cmd:
|
||||
elif len(c.args) == 2 and "raw" in c.cmd:
|
||||
cerr = "`PATTERN` did not match"
|
||||
ret = check_string(cache.get_file(c.args[0]), c.args[1], regexp)
|
||||
# has/matches <path> <pat> <match> = XML tree test
|
||||
elif len(c.args) == 3 and 'raw' not in c.cmd:
|
||||
elif len(c.args) == 3 and "raw" not in c.cmd:
|
||||
cerr = "`XPATH PATTERN` did not match"
|
||||
ret = get_nb_matching_elements(cache, c, regexp, True) != 0
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'files': # check files in given folder
|
||||
if len(c.args) != 2: # files <folder path> <file list>
|
||||
elif c.cmd == "files": # check files in given folder
|
||||
if len(c.args) != 2: # files <folder path> <file list>
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
elif c.negated:
|
||||
raise InvalidCheck("{} doesn't support negative check".format(c.cmd))
|
||||
ret = check_files_in_folder(c, cache, c.args[0], c.args[1])
|
||||
|
||||
elif c.cmd == 'count': # count test
|
||||
elif c.cmd == "count": # count test
|
||||
if len(c.args) == 3: # count <path> <pat> <count> = count test
|
||||
expected = int(c.args[2])
|
||||
found = get_tree_count(cache.get_tree(c.args[0]), c.args[1])
|
||||
@ -649,15 +689,15 @@ def check_command(c, cache):
|
||||
cerr = "Expected {} occurrences but found {}".format(expected, found)
|
||||
ret = found == expected
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'snapshot': # snapshot test
|
||||
elif c.cmd == "snapshot": # snapshot test
|
||||
if len(c.args) == 3: # snapshot <snapshot-name> <html-path> <xpath>
|
||||
[snapshot_name, html_path, pattern] = c.args
|
||||
tree = cache.get_tree(html_path)
|
||||
xpath = normalize_xpath(pattern)
|
||||
normalize_to_text = False
|
||||
if xpath.endswith('/text()'):
|
||||
if xpath.endswith("/text()"):
|
||||
xpath = xpath[:-7]
|
||||
normalize_to_text = True
|
||||
|
||||
@ -671,13 +711,15 @@ def check_command(c, cache):
|
||||
cerr = str(err)
|
||||
ret = False
|
||||
elif len(subtrees) == 0:
|
||||
raise FailedCheck('XPATH did not match')
|
||||
raise FailedCheck("XPATH did not match")
|
||||
else:
|
||||
raise FailedCheck('Expected 1 match, but found {}'.format(len(subtrees)))
|
||||
raise FailedCheck(
|
||||
"Expected 1 match, but found {}".format(len(subtrees))
|
||||
)
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'has-dir': # has-dir test
|
||||
elif c.cmd == "has-dir": # has-dir test
|
||||
if len(c.args) == 1: # has-dir <path> = has-dir test
|
||||
try:
|
||||
cache.get_dir(c.args[0])
|
||||
@ -686,22 +728,22 @@ def check_command(c, cache):
|
||||
cerr = str(err)
|
||||
ret = False
|
||||
else:
|
||||
raise InvalidCheck('Invalid number of {} arguments'.format(c.cmd))
|
||||
raise InvalidCheck("Invalid number of {} arguments".format(c.cmd))
|
||||
|
||||
elif c.cmd == 'valid-html':
|
||||
raise InvalidCheck('Unimplemented valid-html')
|
||||
elif c.cmd == "valid-html":
|
||||
raise InvalidCheck("Unimplemented valid-html")
|
||||
|
||||
elif c.cmd == 'valid-links':
|
||||
raise InvalidCheck('Unimplemented valid-links')
|
||||
elif c.cmd == "valid-links":
|
||||
raise InvalidCheck("Unimplemented valid-links")
|
||||
|
||||
else:
|
||||
raise InvalidCheck('Unrecognized {}'.format(c.cmd))
|
||||
raise InvalidCheck("Unrecognized {}".format(c.cmd))
|
||||
|
||||
if ret == c.negated:
|
||||
raise FailedCheck(cerr)
|
||||
|
||||
except FailedCheck as err:
|
||||
message = '{}{} check failed'.format('!' if c.negated else '', c.cmd)
|
||||
message = "{}{} check failed".format("!" if c.negated else "", c.cmd)
|
||||
print_err(c.lineno, c.context, str(err), message)
|
||||
except InvalidCheck as err:
|
||||
print_err(c.lineno, c.context, str(err))
|
||||
@ -713,18 +755,18 @@ def check(target, commands):
|
||||
check_command(c, cache)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) not in [3, 4]:
|
||||
stderr('Usage: {} <doc dir> <template> [--bless]'.format(sys.argv[0]))
|
||||
stderr("Usage: {} <doc dir> <template> [--bless]".format(sys.argv[0]))
|
||||
raise SystemExit(1)
|
||||
|
||||
rust_test_path = sys.argv[2]
|
||||
if len(sys.argv) > 3 and sys.argv[3] == '--bless':
|
||||
if len(sys.argv) > 3 and sys.argv[3] == "--bless":
|
||||
bless = True
|
||||
else:
|
||||
# We only support `--bless` at the end of the arguments.
|
||||
# This assert is to prevent silent failures.
|
||||
assert '--bless' not in sys.argv
|
||||
assert "--bless" not in sys.argv
|
||||
bless = False
|
||||
check(sys.argv[1], get_commands(rust_test_path))
|
||||
if ERR_COUNT:
|
||||
|
@ -45,7 +45,7 @@ def normalize_whitespace(s):
|
||||
|
||||
def breakpoint_callback(frame, bp_loc, dict):
|
||||
"""This callback is registered with every breakpoint and makes sure that the
|
||||
frame containing the breakpoint location is selected """
|
||||
frame containing the breakpoint location is selected"""
|
||||
|
||||
# HACK(eddyb) print a newline to avoid continuing an unfinished line.
|
||||
print("")
|
||||
@ -79,7 +79,7 @@ def execute_command(command_interpreter, command):
|
||||
|
||||
if res.Succeeded():
|
||||
if res.HasResult():
|
||||
print(normalize_whitespace(res.GetOutput() or ''), end='\n')
|
||||
print(normalize_whitespace(res.GetOutput() or ""), end="\n")
|
||||
|
||||
# If the command introduced any breakpoints, make sure to register
|
||||
# them with the breakpoint
|
||||
@ -89,20 +89,32 @@ def execute_command(command_interpreter, command):
|
||||
breakpoint_id = new_breakpoints.pop()
|
||||
|
||||
if breakpoint_id in registered_breakpoints:
|
||||
print_debug("breakpoint with id %s is already registered. Ignoring." %
|
||||
str(breakpoint_id))
|
||||
print_debug(
|
||||
"breakpoint with id %s is already registered. Ignoring."
|
||||
% str(breakpoint_id)
|
||||
)
|
||||
else:
|
||||
print_debug("registering breakpoint callback, id = " + str(breakpoint_id))
|
||||
callback_command = ("breakpoint command add -F breakpoint_callback " +
|
||||
str(breakpoint_id))
|
||||
print_debug(
|
||||
"registering breakpoint callback, id = " + str(breakpoint_id)
|
||||
)
|
||||
callback_command = (
|
||||
"breakpoint command add -F breakpoint_callback "
|
||||
+ str(breakpoint_id)
|
||||
)
|
||||
command_interpreter.HandleCommand(callback_command, res)
|
||||
if res.Succeeded():
|
||||
print_debug("successfully registered breakpoint callback, id = " +
|
||||
str(breakpoint_id))
|
||||
print_debug(
|
||||
"successfully registered breakpoint callback, id = "
|
||||
+ str(breakpoint_id)
|
||||
)
|
||||
registered_breakpoints.add(breakpoint_id)
|
||||
else:
|
||||
print("Error while trying to register breakpoint callback, id = " +
|
||||
str(breakpoint_id) + ", message = " + str(res.GetError()))
|
||||
print(
|
||||
"Error while trying to register breakpoint callback, id = "
|
||||
+ str(breakpoint_id)
|
||||
+ ", message = "
|
||||
+ str(res.GetError())
|
||||
)
|
||||
else:
|
||||
print(res.GetError())
|
||||
|
||||
@ -117,14 +129,16 @@ def start_breakpoint_listener(target):
|
||||
try:
|
||||
while True:
|
||||
if listener.WaitForEvent(120, event):
|
||||
if lldb.SBBreakpoint.EventIsBreakpointEvent(event) and \
|
||||
lldb.SBBreakpoint.GetBreakpointEventTypeFromEvent(event) == \
|
||||
lldb.eBreakpointEventTypeAdded:
|
||||
if (
|
||||
lldb.SBBreakpoint.EventIsBreakpointEvent(event)
|
||||
and lldb.SBBreakpoint.GetBreakpointEventTypeFromEvent(event)
|
||||
== lldb.eBreakpointEventTypeAdded
|
||||
):
|
||||
global new_breakpoints
|
||||
breakpoint = lldb.SBBreakpoint.GetBreakpointFromEvent(event)
|
||||
print_debug("breakpoint added, id = " + str(breakpoint.id))
|
||||
new_breakpoints.append(breakpoint.id)
|
||||
except BaseException: # explicitly catch ctrl+c/sysexit
|
||||
except BaseException: # explicitly catch ctrl+c/sysexit
|
||||
print_debug("breakpoint listener shutting down")
|
||||
|
||||
# Start the listener and let it run as a daemon
|
||||
@ -133,7 +147,9 @@ def start_breakpoint_listener(target):
|
||||
listener_thread.start()
|
||||
|
||||
# Register the listener with the target
|
||||
target.GetBroadcaster().AddListener(listener, lldb.SBTarget.eBroadcastBitBreakpointChanged)
|
||||
target.GetBroadcaster().AddListener(
|
||||
listener, lldb.SBTarget.eBroadcastBitBreakpointChanged
|
||||
)
|
||||
|
||||
|
||||
def start_watchdog():
|
||||
@ -159,6 +175,7 @@ def start_watchdog():
|
||||
watchdog_thread.daemon = True
|
||||
watchdog_thread.start()
|
||||
|
||||
|
||||
####################################################################################################
|
||||
# ~main
|
||||
####################################################################################################
|
||||
@ -193,8 +210,14 @@ target_error = lldb.SBError()
|
||||
target = debugger.CreateTarget(target_path, None, None, True, target_error)
|
||||
|
||||
if not target:
|
||||
print("Could not create debugging target '" + target_path + "': " +
|
||||
str(target_error) + ". Aborting.", file=sys.stderr)
|
||||
print(
|
||||
"Could not create debugging target '"
|
||||
+ target_path
|
||||
+ "': "
|
||||
+ str(target_error)
|
||||
+ ". Aborting.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -204,15 +227,19 @@ start_breakpoint_listener(target)
|
||||
command_interpreter = debugger.GetCommandInterpreter()
|
||||
|
||||
try:
|
||||
script_file = open(script_path, 'r')
|
||||
script_file = open(script_path, "r")
|
||||
|
||||
for line in script_file:
|
||||
command = line.strip()
|
||||
if command == "run" or command == "r" or re.match("^process\s+launch.*", command):
|
||||
if (
|
||||
command == "run"
|
||||
or command == "r"
|
||||
or re.match("^process\s+launch.*", command)
|
||||
):
|
||||
# Before starting to run the program, let the thread sleep a bit, so all
|
||||
# breakpoint added events can be processed
|
||||
time.sleep(0.5)
|
||||
if command != '':
|
||||
if command != "":
|
||||
execute_command(command_interpreter, command)
|
||||
|
||||
except IOError as e:
|
||||
|
@ -1,7 +1,12 @@
|
||||
import sys
|
||||
|
||||
from lldb import SBData, SBError, eBasicTypeLong, eBasicTypeUnsignedLong, \
|
||||
eBasicTypeUnsignedChar
|
||||
from lldb import (
|
||||
SBData,
|
||||
SBError,
|
||||
eBasicTypeLong,
|
||||
eBasicTypeUnsignedLong,
|
||||
eBasicTypeUnsignedChar,
|
||||
)
|
||||
|
||||
# from lldb.formatters import Logger
|
||||
|
||||
@ -50,13 +55,17 @@ class ValueBuilder:
|
||||
def from_int(self, name, value):
|
||||
# type: (str, int) -> SBValue
|
||||
type = self.valobj.GetType().GetBasicType(eBasicTypeLong)
|
||||
data = SBData.CreateDataFromSInt64Array(self.endianness, self.pointer_size, [value])
|
||||
data = SBData.CreateDataFromSInt64Array(
|
||||
self.endianness, self.pointer_size, [value]
|
||||
)
|
||||
return self.valobj.CreateValueFromData(name, data, type)
|
||||
|
||||
def from_uint(self, name, value):
|
||||
# type: (str, int) -> SBValue
|
||||
type = self.valobj.GetType().GetBasicType(eBasicTypeUnsignedLong)
|
||||
data = SBData.CreateDataFromUInt64Array(self.endianness, self.pointer_size, [value])
|
||||
data = SBData.CreateDataFromUInt64Array(
|
||||
self.endianness, self.pointer_size, [value]
|
||||
)
|
||||
return self.valobj.CreateValueFromData(name, data, type)
|
||||
|
||||
|
||||
@ -127,13 +136,17 @@ class EmptySyntheticProvider:
|
||||
|
||||
def SizeSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
return 'size=' + str(valobj.GetNumChildren())
|
||||
return "size=" + str(valobj.GetNumChildren())
|
||||
|
||||
|
||||
def vec_to_string(vec):
|
||||
length = vec.GetNumChildren()
|
||||
chars = [vec.GetChildAtIndex(i).GetValueAsUnsigned() for i in range(length)]
|
||||
return bytes(chars).decode(errors='replace') if PY3 else "".join(chr(char) for char in chars)
|
||||
return (
|
||||
bytes(chars).decode(errors="replace")
|
||||
if PY3
|
||||
else "".join(chr(char) for char in chars)
|
||||
)
|
||||
|
||||
|
||||
def StdStringSummaryProvider(valobj, dict):
|
||||
@ -172,7 +185,7 @@ def StdStrSummaryProvider(valobj, dict):
|
||||
error = SBError()
|
||||
process = data_ptr.GetProcess()
|
||||
data = process.ReadMemory(start, length, error)
|
||||
data = data.decode(encoding='UTF-8') if PY3 else data
|
||||
data = data.decode(encoding="UTF-8") if PY3 else data
|
||||
return '"%s"' % data
|
||||
|
||||
|
||||
@ -199,9 +212,9 @@ def StdPathSummaryProvider(valobj, dict):
|
||||
data = process.ReadMemory(start, length, error)
|
||||
if PY3:
|
||||
try:
|
||||
data = data.decode(encoding='UTF-8')
|
||||
data = data.decode(encoding="UTF-8")
|
||||
except UnicodeDecodeError:
|
||||
return '%r' % data
|
||||
return "%r" % data
|
||||
return '"%s"' % data
|
||||
|
||||
|
||||
@ -250,8 +263,10 @@ class StructSyntheticProvider:
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class ClangEncodedEnumProvider:
|
||||
"""Pretty-printer for 'clang-encoded' enums support implemented in LLDB"""
|
||||
|
||||
DISCRIMINANT_MEMBER_NAME = "$discr$"
|
||||
VALUE_MEMBER_NAME = "value"
|
||||
|
||||
@ -260,7 +275,7 @@ class ClangEncodedEnumProvider:
|
||||
self.update()
|
||||
|
||||
def has_children(self):
|
||||
return True
|
||||
return True
|
||||
|
||||
def num_children(self):
|
||||
if self.is_default:
|
||||
@ -276,25 +291,32 @@ class ClangEncodedEnumProvider:
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
if index == 0:
|
||||
return self.variant.GetChildMemberWithName(ClangEncodedEnumProvider.VALUE_MEMBER_NAME)
|
||||
return self.variant.GetChildMemberWithName(
|
||||
ClangEncodedEnumProvider.VALUE_MEMBER_NAME
|
||||
)
|
||||
if index == 1:
|
||||
return self.variant.GetChildMemberWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME)
|
||||
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME
|
||||
)
|
||||
|
||||
def update(self):
|
||||
all_variants = self.valobj.GetChildAtIndex(0)
|
||||
index = self._getCurrentVariantIndex(all_variants)
|
||||
self.variant = all_variants.GetChildAtIndex(index)
|
||||
self.is_default = self.variant.GetIndexOfChildWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME) == -1
|
||||
self.is_default = (
|
||||
self.variant.GetIndexOfChildWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME
|
||||
)
|
||||
== -1
|
||||
)
|
||||
|
||||
def _getCurrentVariantIndex(self, all_variants):
|
||||
default_index = 0
|
||||
for i in range(all_variants.GetNumChildren()):
|
||||
variant = all_variants.GetChildAtIndex(i)
|
||||
discr = variant.GetChildMemberWithName(
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME)
|
||||
ClangEncodedEnumProvider.DISCRIMINANT_MEMBER_NAME
|
||||
)
|
||||
if discr.IsValid():
|
||||
discr_unsigned_value = discr.GetValueAsUnsigned()
|
||||
if variant.GetName() == f"$variant${discr_unsigned_value}":
|
||||
@ -303,6 +325,7 @@ class ClangEncodedEnumProvider:
|
||||
default_index = i
|
||||
return default_index
|
||||
|
||||
|
||||
class TupleSyntheticProvider:
|
||||
"""Pretty-printer for tuples and tuple enum variants"""
|
||||
|
||||
@ -336,7 +359,9 @@ class TupleSyntheticProvider:
|
||||
else:
|
||||
field = self.type.GetFieldAtIndex(index)
|
||||
element = self.valobj.GetChildMemberWithName(field.name)
|
||||
return self.valobj.CreateValueFromData(str(index), element.GetData(), element.GetType())
|
||||
return self.valobj.CreateValueFromData(
|
||||
str(index), element.GetData(), element.GetType()
|
||||
)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
@ -373,7 +398,7 @@ class StdVecSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -383,15 +408,21 @@ class StdVecSyntheticProvider:
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + index * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.element_type
|
||||
)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.length = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned()
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner")
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName(
|
||||
"inner"
|
||||
)
|
||||
|
||||
self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr"))
|
||||
self.data_ptr = unwrap_unique_or_non_null(
|
||||
self.buf.GetChildMemberWithName("ptr")
|
||||
)
|
||||
|
||||
self.element_type = self.valobj.GetType().GetTemplateArgumentType(0)
|
||||
self.element_type_size = self.element_type.GetByteSize()
|
||||
@ -412,7 +443,7 @@ class StdSliceSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -422,7 +453,9 @@ class StdSliceSyntheticProvider:
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + index * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.element_type
|
||||
)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
@ -457,7 +490,7 @@ class StdVecDequeSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit() and int(index) < self.size:
|
||||
return int(index)
|
||||
else:
|
||||
@ -467,20 +500,26 @@ class StdVecDequeSyntheticProvider:
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + ((index + self.head) % self.cap) * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.element_type
|
||||
)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.head = self.valobj.GetChildMemberWithName("head").GetValueAsUnsigned()
|
||||
self.size = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned()
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner")
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName(
|
||||
"inner"
|
||||
)
|
||||
cap = self.buf.GetChildMemberWithName("cap")
|
||||
if cap.GetType().num_fields == 1:
|
||||
cap = cap.GetChildAtIndex(0)
|
||||
self.cap = cap.GetValueAsUnsigned()
|
||||
|
||||
self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr"))
|
||||
self.data_ptr = unwrap_unique_or_non_null(
|
||||
self.buf.GetChildMemberWithName("ptr")
|
||||
)
|
||||
|
||||
self.element_type = self.valobj.GetType().GetTemplateArgumentType(0)
|
||||
self.element_type_size = self.element_type.GetByteSize()
|
||||
@ -510,7 +549,7 @@ class StdOldHashMapSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -525,8 +564,14 @@ class StdOldHashMapSyntheticProvider:
|
||||
hashes = self.hash_uint_size * self.capacity
|
||||
align = self.pair_type_size
|
||||
# See `libcore/alloc.rs:padding_needed_for`
|
||||
len_rounded_up = (((((hashes + align) % self.modulo - 1) % self.modulo) & ~(
|
||||
(align - 1) % self.modulo)) % self.modulo - hashes) % self.modulo
|
||||
len_rounded_up = (
|
||||
(
|
||||
(((hashes + align) % self.modulo - 1) % self.modulo)
|
||||
& ~((align - 1) % self.modulo)
|
||||
)
|
||||
% self.modulo
|
||||
- hashes
|
||||
) % self.modulo
|
||||
# len_rounded_up = ((hashes + align - 1) & ~(align - 1)) - hashes
|
||||
|
||||
pairs_offset = hashes + len_rounded_up
|
||||
@ -535,12 +580,16 @@ class StdOldHashMapSyntheticProvider:
|
||||
table_index = self.valid_indices[index]
|
||||
idx = table_index & self.capacity_mask
|
||||
address = pairs_start + idx * self.pair_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.pair_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.pair_type
|
||||
)
|
||||
if self.show_values:
|
||||
return element
|
||||
else:
|
||||
key = element.GetChildAtIndex(0)
|
||||
return self.valobj.CreateValueFromData("[%s]" % index, key.GetData(), key.GetType())
|
||||
return self.valobj.CreateValueFromData(
|
||||
"[%s]" % index, key.GetData(), key.GetType()
|
||||
)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
@ -551,10 +600,12 @@ class StdOldHashMapSyntheticProvider:
|
||||
self.hashes = self.table.GetChildMemberWithName("hashes")
|
||||
self.hash_uint_type = self.hashes.GetType()
|
||||
self.hash_uint_size = self.hashes.GetType().GetByteSize()
|
||||
self.modulo = 2 ** self.hash_uint_size
|
||||
self.modulo = 2**self.hash_uint_size
|
||||
self.data_ptr = self.hashes.GetChildAtIndex(0).GetChildAtIndex(0)
|
||||
|
||||
self.capacity_mask = self.table.GetChildMemberWithName("capacity_mask").GetValueAsUnsigned()
|
||||
self.capacity_mask = self.table.GetChildMemberWithName(
|
||||
"capacity_mask"
|
||||
).GetValueAsUnsigned()
|
||||
self.capacity = (self.capacity_mask + 1) % self.modulo
|
||||
|
||||
marker = self.table.GetChildMemberWithName("marker").GetType() # type: SBType
|
||||
@ -564,8 +615,9 @@ class StdOldHashMapSyntheticProvider:
|
||||
self.valid_indices = []
|
||||
for idx in range(self.capacity):
|
||||
address = self.data_ptr.GetValueAsUnsigned() + idx * self.hash_uint_size
|
||||
hash_uint = self.data_ptr.CreateValueFromAddress("[%s]" % idx, address,
|
||||
self.hash_uint_type)
|
||||
hash_uint = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % idx, address, self.hash_uint_type
|
||||
)
|
||||
hash_ptr = hash_uint.GetChildAtIndex(0).GetChildAtIndex(0)
|
||||
if hash_ptr.GetValueAsUnsigned() != 0:
|
||||
self.valid_indices.append(idx)
|
||||
@ -592,7 +644,7 @@ class StdHashMapSyntheticProvider:
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
index = name.lstrip("[").rstrip("]")
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
@ -605,19 +657,25 @@ class StdHashMapSyntheticProvider:
|
||||
if self.new_layout:
|
||||
idx = -(idx + 1)
|
||||
address = pairs_start + idx * self.pair_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.pair_type)
|
||||
element = self.data_ptr.CreateValueFromAddress(
|
||||
"[%s]" % index, address, self.pair_type
|
||||
)
|
||||
if self.show_values:
|
||||
return element
|
||||
else:
|
||||
key = element.GetChildAtIndex(0)
|
||||
return self.valobj.CreateValueFromData("[%s]" % index, key.GetData(), key.GetType())
|
||||
return self.valobj.CreateValueFromData(
|
||||
"[%s]" % index, key.GetData(), key.GetType()
|
||||
)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
table = self.table()
|
||||
inner_table = table.GetChildMemberWithName("table")
|
||||
|
||||
capacity = inner_table.GetChildMemberWithName("bucket_mask").GetValueAsUnsigned() + 1
|
||||
capacity = (
|
||||
inner_table.GetChildMemberWithName("bucket_mask").GetValueAsUnsigned() + 1
|
||||
)
|
||||
ctrl = inner_table.GetChildMemberWithName("ctrl").GetChildAtIndex(0)
|
||||
|
||||
self.size = inner_table.GetChildMemberWithName("items").GetValueAsUnsigned()
|
||||
@ -630,16 +688,21 @@ class StdHashMapSyntheticProvider:
|
||||
if self.new_layout:
|
||||
self.data_ptr = ctrl.Cast(self.pair_type.GetPointerType())
|
||||
else:
|
||||
self.data_ptr = inner_table.GetChildMemberWithName("data").GetChildAtIndex(0)
|
||||
self.data_ptr = inner_table.GetChildMemberWithName("data").GetChildAtIndex(
|
||||
0
|
||||
)
|
||||
|
||||
u8_type = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar)
|
||||
u8_type_size = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar).GetByteSize()
|
||||
u8_type_size = (
|
||||
self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar).GetByteSize()
|
||||
)
|
||||
|
||||
self.valid_indices = []
|
||||
for idx in range(capacity):
|
||||
address = ctrl.GetValueAsUnsigned() + idx * u8_type_size
|
||||
value = ctrl.CreateValueFromAddress("ctrl[%s]" % idx, address,
|
||||
u8_type).GetValueAsUnsigned()
|
||||
value = ctrl.CreateValueFromAddress(
|
||||
"ctrl[%s]" % idx, address, u8_type
|
||||
).GetValueAsUnsigned()
|
||||
is_present = value & 128 == 0
|
||||
if is_present:
|
||||
self.valid_indices.append(idx)
|
||||
@ -691,10 +754,16 @@ class StdRcSyntheticProvider:
|
||||
|
||||
self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value")
|
||||
|
||||
self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex(
|
||||
0).GetChildMemberWithName("value")
|
||||
self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex(
|
||||
0).GetChildMemberWithName("value")
|
||||
self.strong = (
|
||||
self.ptr.GetChildMemberWithName("strong")
|
||||
.GetChildAtIndex(0)
|
||||
.GetChildMemberWithName("value")
|
||||
)
|
||||
self.weak = (
|
||||
self.ptr.GetChildMemberWithName("weak")
|
||||
.GetChildAtIndex(0)
|
||||
.GetChildMemberWithName("value")
|
||||
)
|
||||
|
||||
self.value_builder = ValueBuilder(valobj)
|
||||
|
||||
@ -772,7 +841,9 @@ class StdCellSyntheticProvider:
|
||||
def StdRefSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
borrow = valobj.GetChildMemberWithName("borrow").GetValueAsSigned()
|
||||
return "borrow={}".format(borrow) if borrow >= 0 else "borrow_mut={}".format(-borrow)
|
||||
return (
|
||||
"borrow={}".format(borrow) if borrow >= 0 else "borrow_mut={}".format(-borrow)
|
||||
)
|
||||
|
||||
|
||||
class StdRefSyntheticProvider:
|
||||
@ -785,11 +856,16 @@ class StdRefSyntheticProvider:
|
||||
borrow = valobj.GetChildMemberWithName("borrow")
|
||||
value = valobj.GetChildMemberWithName("value")
|
||||
if is_cell:
|
||||
self.borrow = borrow.GetChildMemberWithName("value").GetChildMemberWithName("value")
|
||||
self.borrow = borrow.GetChildMemberWithName("value").GetChildMemberWithName(
|
||||
"value"
|
||||
)
|
||||
self.value = value.GetChildMemberWithName("value")
|
||||
else:
|
||||
self.borrow = borrow.GetChildMemberWithName("borrow").GetChildMemberWithName(
|
||||
"value").GetChildMemberWithName("value")
|
||||
self.borrow = (
|
||||
borrow.GetChildMemberWithName("borrow")
|
||||
.GetChildMemberWithName("value")
|
||||
.GetChildMemberWithName("value")
|
||||
)
|
||||
self.value = value.Dereference()
|
||||
|
||||
self.value_builder = ValueBuilder(valobj)
|
||||
@ -832,7 +908,7 @@ def StdNonZeroNumberSummaryProvider(valobj, _dict):
|
||||
|
||||
# FIXME: Avoid printing as character literal,
|
||||
# see https://github.com/llvm/llvm-project/issues/65076.
|
||||
if inner_inner.GetTypeName() in ['char', 'unsigned char']:
|
||||
return str(inner_inner.GetValueAsSigned())
|
||||
if inner_inner.GetTypeName() in ["char", "unsigned char"]:
|
||||
return str(inner_inner.GetValueAsSigned())
|
||||
else:
|
||||
return inner_inner.GetValue()
|
||||
return inner_inner.GetValue()
|
||||
|
@ -54,7 +54,7 @@ STD_REF_MUT_REGEX = re.compile(r"^(core::([a-z_]+::)+)RefMut<.+>$")
|
||||
STD_REF_CELL_REGEX = re.compile(r"^(core::([a-z_]+::)+)RefCell<.+>$")
|
||||
STD_NONZERO_NUMBER_REGEX = re.compile(r"^(core::([a-z_]+::)+)NonZero<.+>$")
|
||||
STD_PATHBUF_REGEX = re.compile(r"^(std::([a-z_]+::)+)PathBuf$")
|
||||
STD_PATH_REGEX = re.compile(r"^&(mut )?(std::([a-z_]+::)+)Path$")
|
||||
STD_PATH_REGEX = re.compile(r"^&(mut )?(std::([a-z_]+::)+)Path$")
|
||||
|
||||
TUPLE_ITEM_REGEX = re.compile(r"__\d+$")
|
||||
|
||||
@ -84,6 +84,7 @@ STD_TYPE_TO_REGEX = {
|
||||
RustType.STD_PATH: STD_PATH_REGEX,
|
||||
}
|
||||
|
||||
|
||||
def is_tuple_fields(fields):
|
||||
# type: (list) -> bool
|
||||
return all(TUPLE_ITEM_REGEX.match(str(field.name)) for field in fields)
|
||||
|
@ -14,6 +14,7 @@ import json
|
||||
import datetime
|
||||
import collections
|
||||
import textwrap
|
||||
|
||||
try:
|
||||
import urllib2
|
||||
from urllib2 import HTTPError
|
||||
@ -21,7 +22,7 @@ except ImportError:
|
||||
import urllib.request as urllib2
|
||||
from urllib.error import HTTPError
|
||||
try:
|
||||
import typing # noqa: F401 FIXME: py2
|
||||
import typing # noqa: F401 FIXME: py2
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@ -29,40 +30,41 @@ except ImportError:
|
||||
# These should be collaborators of the rust-lang/rust repository (with at least
|
||||
# read privileges on it). CI will fail otherwise.
|
||||
MAINTAINERS = {
|
||||
'book': {'carols10cents'},
|
||||
'nomicon': {'frewsxcv', 'Gankra', 'JohnTitor'},
|
||||
'reference': {'Havvy', 'matthewjasper', 'ehuss'},
|
||||
'rust-by-example': {'marioidival'},
|
||||
'embedded-book': {'adamgreig', 'andre-richter', 'jamesmunns', 'therealprof'},
|
||||
'edition-guide': {'ehuss'},
|
||||
'rustc-dev-guide': {'spastorino', 'amanjeev', 'JohnTitor'},
|
||||
"book": {"carols10cents"},
|
||||
"nomicon": {"frewsxcv", "Gankra", "JohnTitor"},
|
||||
"reference": {"Havvy", "matthewjasper", "ehuss"},
|
||||
"rust-by-example": {"marioidival"},
|
||||
"embedded-book": {"adamgreig", "andre-richter", "jamesmunns", "therealprof"},
|
||||
"edition-guide": {"ehuss"},
|
||||
"rustc-dev-guide": {"spastorino", "amanjeev", "JohnTitor"},
|
||||
}
|
||||
|
||||
LABELS = {
|
||||
'book': ['C-bug'],
|
||||
'nomicon': ['C-bug'],
|
||||
'reference': ['C-bug'],
|
||||
'rust-by-example': ['C-bug'],
|
||||
'embedded-book': ['C-bug'],
|
||||
'edition-guide': ['C-bug'],
|
||||
'rustc-dev-guide': ['C-bug'],
|
||||
"book": ["C-bug"],
|
||||
"nomicon": ["C-bug"],
|
||||
"reference": ["C-bug"],
|
||||
"rust-by-example": ["C-bug"],
|
||||
"embedded-book": ["C-bug"],
|
||||
"edition-guide": ["C-bug"],
|
||||
"rustc-dev-guide": ["C-bug"],
|
||||
}
|
||||
|
||||
REPOS = {
|
||||
'book': 'https://github.com/rust-lang/book',
|
||||
'nomicon': 'https://github.com/rust-lang/nomicon',
|
||||
'reference': 'https://github.com/rust-lang/reference',
|
||||
'rust-by-example': 'https://github.com/rust-lang/rust-by-example',
|
||||
'embedded-book': 'https://github.com/rust-embedded/book',
|
||||
'edition-guide': 'https://github.com/rust-lang/edition-guide',
|
||||
'rustc-dev-guide': 'https://github.com/rust-lang/rustc-dev-guide',
|
||||
"book": "https://github.com/rust-lang/book",
|
||||
"nomicon": "https://github.com/rust-lang/nomicon",
|
||||
"reference": "https://github.com/rust-lang/reference",
|
||||
"rust-by-example": "https://github.com/rust-lang/rust-by-example",
|
||||
"embedded-book": "https://github.com/rust-embedded/book",
|
||||
"edition-guide": "https://github.com/rust-lang/edition-guide",
|
||||
"rustc-dev-guide": "https://github.com/rust-lang/rustc-dev-guide",
|
||||
}
|
||||
|
||||
|
||||
def load_json_from_response(resp):
|
||||
# type: (typing.Any) -> typing.Any
|
||||
content = resp.read()
|
||||
if isinstance(content, bytes):
|
||||
content_str = content.decode('utf-8')
|
||||
content_str = content.decode("utf-8")
|
||||
else:
|
||||
print("Refusing to decode " + str(type(content)) + " to str")
|
||||
return json.loads(content_str)
|
||||
@ -70,11 +72,10 @@ def load_json_from_response(resp):
|
||||
|
||||
def read_current_status(current_commit, path):
|
||||
# type: (str, str) -> typing.Mapping[str, typing.Any]
|
||||
'''Reads build status of `current_commit` from content of `history/*.tsv`
|
||||
'''
|
||||
with open(path, 'r') as f:
|
||||
"""Reads build status of `current_commit` from content of `history/*.tsv`"""
|
||||
with open(path, "r") as f:
|
||||
for line in f:
|
||||
(commit, status) = line.split('\t', 1)
|
||||
(commit, status) = line.split("\t", 1)
|
||||
if commit == current_commit:
|
||||
return json.loads(status)
|
||||
return {}
|
||||
@ -82,12 +83,12 @@ def read_current_status(current_commit, path):
|
||||
|
||||
def gh_url():
|
||||
# type: () -> str
|
||||
return os.environ['TOOLSTATE_ISSUES_API_URL']
|
||||
return os.environ["TOOLSTATE_ISSUES_API_URL"]
|
||||
|
||||
|
||||
def maybe_remove_mention(message):
|
||||
# type: (str) -> str
|
||||
if os.environ.get('TOOLSTATE_SKIP_MENTIONS') is not None:
|
||||
if os.environ.get("TOOLSTATE_SKIP_MENTIONS") is not None:
|
||||
return message.replace("@", "")
|
||||
return message
|
||||
|
||||
@ -102,36 +103,45 @@ def issue(
|
||||
github_token,
|
||||
):
|
||||
# type: (str, str, typing.Iterable[str], str, str, typing.List[str], str) -> None
|
||||
'''Open an issue about the toolstate failure.'''
|
||||
if status == 'test-fail':
|
||||
status_description = 'has failing tests'
|
||||
"""Open an issue about the toolstate failure."""
|
||||
if status == "test-fail":
|
||||
status_description = "has failing tests"
|
||||
else:
|
||||
status_description = 'no longer builds'
|
||||
request = json.dumps({
|
||||
'body': maybe_remove_mention(textwrap.dedent('''\
|
||||
status_description = "no longer builds"
|
||||
request = json.dumps(
|
||||
{
|
||||
"body": maybe_remove_mention(
|
||||
textwrap.dedent("""\
|
||||
Hello, this is your friendly neighborhood mergebot.
|
||||
After merging PR {}, I observed that the tool {} {}.
|
||||
A follow-up PR to the repository {} is needed to fix the fallout.
|
||||
|
||||
cc @{}, do you think you would have time to do the follow-up work?
|
||||
If so, that would be great!
|
||||
''').format(
|
||||
relevant_pr_number, tool, status_description,
|
||||
REPOS.get(tool), relevant_pr_user
|
||||
)),
|
||||
'title': '`{}` no longer builds after {}'.format(tool, relevant_pr_number),
|
||||
'assignees': list(assignees),
|
||||
'labels': labels,
|
||||
})
|
||||
print("Creating issue:\n{}".format(request))
|
||||
response = urllib2.urlopen(urllib2.Request(
|
||||
gh_url(),
|
||||
request.encode(),
|
||||
{
|
||||
'Authorization': 'token ' + github_token,
|
||||
'Content-Type': 'application/json',
|
||||
""").format(
|
||||
relevant_pr_number,
|
||||
tool,
|
||||
status_description,
|
||||
REPOS.get(tool),
|
||||
relevant_pr_user,
|
||||
)
|
||||
),
|
||||
"title": "`{}` no longer builds after {}".format(tool, relevant_pr_number),
|
||||
"assignees": list(assignees),
|
||||
"labels": labels,
|
||||
}
|
||||
))
|
||||
)
|
||||
print("Creating issue:\n{}".format(request))
|
||||
response = urllib2.urlopen(
|
||||
urllib2.Request(
|
||||
gh_url(),
|
||||
request.encode(),
|
||||
{
|
||||
"Authorization": "token " + github_token,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
)
|
||||
response.read()
|
||||
|
||||
|
||||
@ -145,27 +155,26 @@ def update_latest(
|
||||
github_token,
|
||||
):
|
||||
# type: (str, str, str, str, str, str, str) -> str
|
||||
'''Updates `_data/latest.json` to match build result of the given commit.
|
||||
'''
|
||||
with open('_data/latest.json', 'r+') as f:
|
||||
"""Updates `_data/latest.json` to match build result of the given commit."""
|
||||
with open("_data/latest.json", "r+") as f:
|
||||
latest = json.load(f, object_pairs_hook=collections.OrderedDict)
|
||||
|
||||
current_status = {
|
||||
os_: read_current_status(current_commit, 'history/' + os_ + '.tsv')
|
||||
for os_ in ['windows', 'linux']
|
||||
os_: read_current_status(current_commit, "history/" + os_ + ".tsv")
|
||||
for os_ in ["windows", "linux"]
|
||||
}
|
||||
|
||||
slug = 'rust-lang/rust'
|
||||
message = textwrap.dedent('''\
|
||||
slug = "rust-lang/rust"
|
||||
message = textwrap.dedent("""\
|
||||
📣 Toolstate changed by {}!
|
||||
|
||||
Tested on commit {}@{}.
|
||||
Direct link to PR: <{}>
|
||||
|
||||
''').format(relevant_pr_number, slug, current_commit, relevant_pr_url)
|
||||
""").format(relevant_pr_number, slug, current_commit, relevant_pr_url)
|
||||
anything_changed = False
|
||||
for status in latest:
|
||||
tool = status['tool']
|
||||
tool = status["tool"]
|
||||
changed = False
|
||||
create_issue_for_status = None # set to the status that caused the issue
|
||||
|
||||
@ -173,57 +182,70 @@ def update_latest(
|
||||
old = status[os_]
|
||||
new = s.get(tool, old)
|
||||
status[os_] = new
|
||||
maintainers = ' '.join('@'+name for name in MAINTAINERS.get(tool, ()))
|
||||
maintainers = " ".join("@" + name for name in MAINTAINERS.get(tool, ()))
|
||||
# comparing the strings, but they are ordered appropriately:
|
||||
# "test-pass" > "test-fail" > "build-fail"
|
||||
if new > old:
|
||||
# things got fixed or at least the status quo improved
|
||||
changed = True
|
||||
message += '🎉 {} on {}: {} → {} (cc {}).\n' \
|
||||
.format(tool, os_, old, new, maintainers)
|
||||
message += "🎉 {} on {}: {} → {} (cc {}).\n".format(
|
||||
tool, os_, old, new, maintainers
|
||||
)
|
||||
elif new < old:
|
||||
# tests or builds are failing and were not failing before
|
||||
changed = True
|
||||
title = '💔 {} on {}: {} → {}' \
|
||||
.format(tool, os_, old, new)
|
||||
message += '{} (cc {}).\n' \
|
||||
.format(title, maintainers)
|
||||
title = "💔 {} on {}: {} → {}".format(tool, os_, old, new)
|
||||
message += "{} (cc {}).\n".format(title, maintainers)
|
||||
# See if we need to create an issue.
|
||||
# Create issue if things no longer build.
|
||||
# (No issue for mere test failures to avoid spurious issues.)
|
||||
if new == 'build-fail':
|
||||
if new == "build-fail":
|
||||
create_issue_for_status = new
|
||||
|
||||
if create_issue_for_status is not None:
|
||||
try:
|
||||
issue(
|
||||
tool, create_issue_for_status, MAINTAINERS.get(tool, ()),
|
||||
relevant_pr_number, relevant_pr_user, LABELS.get(tool, []),
|
||||
tool,
|
||||
create_issue_for_status,
|
||||
MAINTAINERS.get(tool, ()),
|
||||
relevant_pr_number,
|
||||
relevant_pr_user,
|
||||
LABELS.get(tool, []),
|
||||
github_token,
|
||||
)
|
||||
except HTTPError as e:
|
||||
# network errors will simply end up not creating an issue, but that's better
|
||||
# than failing the entire build job
|
||||
print("HTTPError when creating issue for status regression: {0}\n{1!r}"
|
||||
.format(e, e.read()))
|
||||
print(
|
||||
"HTTPError when creating issue for status regression: {0}\n{1!r}".format(
|
||||
e, e.read()
|
||||
)
|
||||
)
|
||||
except IOError as e:
|
||||
print("I/O error when creating issue for status regression: {0}".format(e))
|
||||
print(
|
||||
"I/O error when creating issue for status regression: {0}".format(
|
||||
e
|
||||
)
|
||||
)
|
||||
except:
|
||||
print("Unexpected error when creating issue for status regression: {0}"
|
||||
.format(sys.exc_info()[0]))
|
||||
print(
|
||||
"Unexpected error when creating issue for status regression: {0}".format(
|
||||
sys.exc_info()[0]
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
if changed:
|
||||
status['commit'] = current_commit
|
||||
status['datetime'] = current_datetime
|
||||
status["commit"] = current_commit
|
||||
status["datetime"] = current_datetime
|
||||
anything_changed = True
|
||||
|
||||
if not anything_changed:
|
||||
return ''
|
||||
return ""
|
||||
|
||||
f.seek(0)
|
||||
f.truncate(0)
|
||||
json.dump(latest, f, indent=4, separators=(',', ': '))
|
||||
json.dump(latest, f, indent=4, separators=(",", ": "))
|
||||
return message
|
||||
|
||||
|
||||
@ -231,12 +253,12 @@ def update_latest(
|
||||
# There are variables declared within that are implicitly global; it is unknown
|
||||
# which ones precisely but at least this is true for `github_token`.
|
||||
try:
|
||||
if __name__ != '__main__':
|
||||
if __name__ != "__main__":
|
||||
exit(0)
|
||||
|
||||
cur_commit = sys.argv[1]
|
||||
cur_datetime = datetime.datetime.now(datetime.timezone.utc).strftime(
|
||||
'%Y-%m-%dT%H:%M:%SZ'
|
||||
"%Y-%m-%dT%H:%M:%SZ"
|
||||
)
|
||||
cur_commit_msg = sys.argv[2]
|
||||
save_message_to_path = sys.argv[3]
|
||||
@ -244,21 +266,21 @@ try:
|
||||
|
||||
# assume that PR authors are also owners of the repo where the branch lives
|
||||
relevant_pr_match = re.search(
|
||||
r'Auto merge of #([0-9]+) - ([^:]+):[^,]+, r=(\S+)',
|
||||
r"Auto merge of #([0-9]+) - ([^:]+):[^,]+, r=(\S+)",
|
||||
cur_commit_msg,
|
||||
)
|
||||
if relevant_pr_match:
|
||||
number = relevant_pr_match.group(1)
|
||||
relevant_pr_user = relevant_pr_match.group(2)
|
||||
relevant_pr_number = 'rust-lang/rust#' + number
|
||||
relevant_pr_url = 'https://github.com/rust-lang/rust/pull/' + number
|
||||
relevant_pr_number = "rust-lang/rust#" + number
|
||||
relevant_pr_url = "https://github.com/rust-lang/rust/pull/" + number
|
||||
pr_reviewer = relevant_pr_match.group(3)
|
||||
else:
|
||||
number = '-1'
|
||||
relevant_pr_user = 'ghost'
|
||||
relevant_pr_number = '<unknown PR>'
|
||||
relevant_pr_url = '<unknown>'
|
||||
pr_reviewer = 'ghost'
|
||||
number = "-1"
|
||||
relevant_pr_user = "ghost"
|
||||
relevant_pr_number = "<unknown PR>"
|
||||
relevant_pr_url = "<unknown>"
|
||||
pr_reviewer = "ghost"
|
||||
|
||||
message = update_latest(
|
||||
cur_commit,
|
||||
@ -270,28 +292,30 @@ try:
|
||||
github_token,
|
||||
)
|
||||
if not message:
|
||||
print('<Nothing changed>')
|
||||
print("<Nothing changed>")
|
||||
sys.exit(0)
|
||||
|
||||
print(message)
|
||||
|
||||
if not github_token:
|
||||
print('Dry run only, not committing anything')
|
||||
print("Dry run only, not committing anything")
|
||||
sys.exit(0)
|
||||
|
||||
with open(save_message_to_path, 'w') as f:
|
||||
with open(save_message_to_path, "w") as f:
|
||||
f.write(message)
|
||||
|
||||
# Write the toolstate comment on the PR as well.
|
||||
issue_url = gh_url() + '/{}/comments'.format(number)
|
||||
response = urllib2.urlopen(urllib2.Request(
|
||||
issue_url,
|
||||
json.dumps({'body': maybe_remove_mention(message)}).encode(),
|
||||
{
|
||||
'Authorization': 'token ' + github_token,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
))
|
||||
issue_url = gh_url() + "/{}/comments".format(number)
|
||||
response = urllib2.urlopen(
|
||||
urllib2.Request(
|
||||
issue_url,
|
||||
json.dumps({"body": maybe_remove_mention(message)}).encode(),
|
||||
{
|
||||
"Authorization": "token " + github_token,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
)
|
||||
)
|
||||
response.read()
|
||||
except HTTPError as e:
|
||||
print("HTTPError: %s\n%r" % (e, e.read()))
|
||||
|
@ -1,5 +1,6 @@
|
||||
import gdb
|
||||
|
||||
|
||||
class PersonPrinter:
|
||||
"Print a Person"
|
||||
|
||||
@ -11,6 +12,7 @@ class PersonPrinter:
|
||||
def to_string(self):
|
||||
return "{} is {} years old.".format(self.name, self.age)
|
||||
|
||||
|
||||
def lookup(val):
|
||||
lookup_tag = val.type.tag
|
||||
if lookup_tag is None:
|
||||
@ -20,4 +22,5 @@ def lookup(val):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
gdb.current_objfile().pretty_printers.append(lookup)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import gdb
|
||||
|
||||
|
||||
class PointPrinter:
|
||||
"Print a Point"
|
||||
|
||||
@ -11,6 +12,7 @@ class PointPrinter:
|
||||
def to_string(self):
|
||||
return "({}, {})".format(self.x, self.y)
|
||||
|
||||
|
||||
def lookup(val):
|
||||
lookup_tag = val.type.tag
|
||||
if lookup_tag is None:
|
||||
@ -20,4 +22,5 @@ def lookup(val):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
gdb.current_objfile().pretty_printers.append(lookup)
|
||||
|
@ -1,5 +1,6 @@
|
||||
import gdb
|
||||
|
||||
|
||||
class LinePrinter:
|
||||
"Print a Line"
|
||||
|
||||
@ -11,6 +12,7 @@ class LinePrinter:
|
||||
def to_string(self):
|
||||
return "({}, {})".format(self.a, self.b)
|
||||
|
||||
|
||||
def lookup(val):
|
||||
lookup_tag = val.type.tag
|
||||
if lookup_tag is None:
|
||||
@ -20,4 +22,5 @@ def lookup(val):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
gdb.current_objfile().pretty_printers.append(lookup)
|
||||
|
9
x.py
9
x.py
@ -6,7 +6,7 @@
|
||||
|
||||
# Parts of `bootstrap.py` use the `multiprocessing` module, so this entry point
|
||||
# must use the normal `if __name__ == '__main__':` convention to avoid problems.
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
@ -32,14 +32,16 @@ if __name__ == '__main__':
|
||||
# soft deprecation of old python versions
|
||||
skip_check = os.environ.get("RUST_IGNORE_OLD_PYTHON") == "1"
|
||||
if not skip_check and (major < 3 or (major == 3 and minor < 6)):
|
||||
msg = cleandoc("""
|
||||
msg = cleandoc(
|
||||
"""
|
||||
Using python {}.{} but >= 3.6 is recommended. Your python version
|
||||
should continue to work for the near future, but this will
|
||||
eventually change. If python >= 3.6 is not available on your system,
|
||||
please file an issue to help us understand timelines.
|
||||
|
||||
This message can be suppressed by setting `RUST_IGNORE_OLD_PYTHON=1`
|
||||
""".format(major, minor))
|
||||
""".format(major, minor)
|
||||
)
|
||||
warnings.warn(msg, stacklevel=1)
|
||||
|
||||
rust_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
@ -47,4 +49,5 @@ if __name__ == '__main__':
|
||||
sys.path.insert(0, os.path.join(rust_dir, "src", "bootstrap"))
|
||||
|
||||
import bootstrap
|
||||
|
||||
bootstrap.main()
|
||||
|
Loading…
Reference in New Issue
Block a user