2017-10-09 15:16:18 +00:00
|
|
|
from __future__ import absolute_import, division, print_function
|
2015-11-19 23:20:12 +00:00
|
|
|
import argparse
|
|
|
|
import contextlib
|
2016-07-02 14:19:27 +00:00
|
|
|
import datetime
|
2020-04-25 19:43:19 +00:00
|
|
|
import distutils.version
|
2016-04-14 02:10:42 +00:00
|
|
|
import hashlib
|
2021-08-26 09:26:03 +00:00
|
|
|
import json
|
2015-11-19 23:20:12 +00:00
|
|
|
import os
|
2017-05-13 05:21:35 +00:00
|
|
|
import re
|
2015-11-19 23:20:12 +00:00
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tarfile
|
2016-04-30 06:09:53 +00:00
|
|
|
import tempfile
|
|
|
|
|
2016-07-02 14:19:27 +00:00
|
|
|
from time import time
|
|
|
|
|
2020-09-04 23:00:04 +00:00
|
|
|
def support_xz():
|
|
|
|
try:
|
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
|
|
|
temp_path = temp_file.name
|
|
|
|
with tarfile.open(temp_path, "w:xz"):
|
|
|
|
pass
|
|
|
|
return True
|
|
|
|
except tarfile.CompressionError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get(url, path, verbose=False, do_verify=True):
|
2017-07-01 04:24:35 +00:00
|
|
|
suffix = '.sha256'
|
|
|
|
sha_url = url + suffix
|
2016-05-08 07:54:50 +00:00
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
|
|
|
temp_path = temp_file.name
|
2017-07-01 04:24:35 +00:00
|
|
|
with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as sha_file:
|
2016-05-08 07:54:50 +00:00
|
|
|
sha_path = sha_file.name
|
|
|
|
|
|
|
|
try:
|
2020-09-04 23:00:04 +00:00
|
|
|
if do_verify:
|
|
|
|
download(sha_path, sha_url, False, verbose)
|
|
|
|
if os.path.exists(path):
|
|
|
|
if verify(path, sha_path, False):
|
|
|
|
if verbose:
|
|
|
|
print("using already-download file", path)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
if verbose:
|
|
|
|
print("ignoring already-download file",
|
|
|
|
path, "due to failed verification")
|
|
|
|
os.unlink(path)
|
2016-11-16 20:31:19 +00:00
|
|
|
download(temp_path, url, True, verbose)
|
2020-09-04 23:00:04 +00:00
|
|
|
if do_verify and not verify(temp_path, sha_path, verbose):
|
2016-07-05 22:07:26 +00:00
|
|
|
raise RuntimeError("failed verification")
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
|
|
|
print("moving {} to {}".format(temp_path, path))
|
2016-05-08 07:54:50 +00:00
|
|
|
shutil.move(temp_path, path)
|
|
|
|
finally:
|
2016-11-16 20:31:19 +00:00
|
|
|
delete_if_present(sha_path, verbose)
|
|
|
|
delete_if_present(temp_path, verbose)
|
2016-05-08 08:00:36 +00:00
|
|
|
|
|
|
|
|
2016-11-16 20:31:19 +00:00
|
|
|
def delete_if_present(path, verbose):
|
2017-07-01 04:24:35 +00:00
|
|
|
"""Remove the given file if present"""
|
2016-05-08 08:00:36 +00:00
|
|
|
if os.path.isfile(path):
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("removing", path)
|
2016-05-08 08:00:36 +00:00
|
|
|
os.unlink(path)
|
2016-04-30 06:09:53 +00:00
|
|
|
|
|
|
|
|
2016-11-16 20:31:19 +00:00
|
|
|
def download(path, url, probably_big, verbose):
|
2017-07-03 04:32:42 +00:00
|
|
|
for _ in range(0, 4):
|
2017-02-23 15:04:29 +00:00
|
|
|
try:
|
|
|
|
_download(path, url, probably_big, verbose, True)
|
|
|
|
return
|
|
|
|
except RuntimeError:
|
|
|
|
print("\nspurious failure, trying again")
|
|
|
|
_download(path, url, probably_big, verbose, False)
|
|
|
|
|
|
|
|
|
|
|
|
def _download(path, url, probably_big, verbose, exception):
|
2016-11-16 20:31:19 +00:00
|
|
|
if probably_big or verbose:
|
|
|
|
print("downloading {}".format(url))
|
2021-06-23 20:26:46 +00:00
|
|
|
# see https://serverfault.com/questions/301128/how-to-download
|
2016-04-30 06:43:01 +00:00
|
|
|
if sys.platform == 'win32':
|
|
|
|
run(["PowerShell.exe", "/nologo", "-Command",
|
2018-11-12 10:32:21 +00:00
|
|
|
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
|
|
|
|
"(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(url, path)],
|
2017-02-23 15:04:29 +00:00
|
|
|
verbose=verbose,
|
|
|
|
exception=exception)
|
2016-04-30 06:43:01 +00:00
|
|
|
else:
|
2016-11-16 20:31:19 +00:00
|
|
|
if probably_big or verbose:
|
|
|
|
option = "-#"
|
|
|
|
else:
|
|
|
|
option = "-s"
|
x.py: Give a more helpful error message if curl isn't installed
This also abstracts checking for a command into `require`.
Before:
```
Updating only changed submodules
Submodules updated in 0.01 seconds
Traceback (most recent call last):
File "./x.py", line 11, in <module>
bootstrap.main()
...
File "/home/joshua/src/rust/src/bootstrap/bootstrap.py", line 137, in run
ret = subprocess.Popen(args, **kwargs)
File "/usr/lib/python2.7/subprocess.py", line 394, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1047, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
```
After:
```
error: unable to run `curl --version`: [Errno 2] No such file or directory
Please make sure it's installed and in the path.
```
2020-05-03 00:54:28 +00:00
|
|
|
require(["curl", "--version"])
|
2018-07-30 04:06:22 +00:00
|
|
|
run(["curl", option,
|
|
|
|
"-y", "30", "-Y", "10", # timeout if speed is < 10 bytes/sec for > 30 seconds
|
2020-02-08 03:04:44 +00:00
|
|
|
"--connect-timeout", "30", # timeout if cannot connect within 30 seconds
|
2018-07-30 04:06:22 +00:00
|
|
|
"--retry", "3", "-Sf", "-o", path, url],
|
2017-02-23 15:04:29 +00:00
|
|
|
verbose=verbose,
|
|
|
|
exception=exception)
|
2016-04-30 06:09:53 +00:00
|
|
|
|
|
|
|
|
2016-05-08 07:54:50 +00:00
|
|
|
def verify(path, sha_path, verbose):
|
2017-07-01 04:24:35 +00:00
|
|
|
"""Check if the sha256 sum of the given path is valid"""
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("verifying", path)
|
2017-07-01 04:24:35 +00:00
|
|
|
with open(path, "rb") as source:
|
|
|
|
found = hashlib.sha256(source.read()).hexdigest()
|
|
|
|
with open(sha_path, "r") as sha256sum:
|
|
|
|
expected = sha256sum.readline().split()[0]
|
2016-07-05 22:07:26 +00:00
|
|
|
verified = found == expected
|
2016-11-16 20:31:19 +00:00
|
|
|
if not verified:
|
2016-07-05 22:07:26 +00:00
|
|
|
print("invalid checksum:\n"
|
2017-05-19 11:16:29 +00:00
|
|
|
" found: {}\n"
|
|
|
|
" expected: {}".format(found, expected))
|
2016-07-05 22:07:26 +00:00
|
|
|
return verified
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2016-04-30 06:09:53 +00:00
|
|
|
|
2019-10-29 08:48:05 +00:00
|
|
|
def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
|
2017-07-01 04:24:35 +00:00
|
|
|
"""Unpack the given tarball file"""
|
2017-07-03 04:32:42 +00:00
|
|
|
print("extracting", tarball)
|
2019-10-29 08:48:05 +00:00
|
|
|
fname = os.path.basename(tarball).replace(tarball_suffix, "")
|
2015-11-19 23:20:12 +00:00
|
|
|
with contextlib.closing(tarfile.open(tarball)) as tar:
|
2017-07-03 04:32:42 +00:00
|
|
|
for member in tar.getnames():
|
|
|
|
if "/" not in member:
|
2015-11-19 23:20:12 +00:00
|
|
|
continue
|
2017-07-03 04:32:42 +00:00
|
|
|
name = member.replace(fname + "/", "", 1)
|
2015-11-19 23:20:12 +00:00
|
|
|
if match is not None and not name.startswith(match):
|
|
|
|
continue
|
|
|
|
name = name[len(match) + 1:]
|
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
dst_path = os.path.join(dst, name)
|
2015-11-19 23:20:12 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print(" extracting", member)
|
|
|
|
tar.extract(member, dst)
|
|
|
|
src_path = os.path.join(dst, member)
|
|
|
|
if os.path.isdir(src_path) and os.path.exists(dst_path):
|
2015-11-19 23:20:12 +00:00
|
|
|
continue
|
2017-07-03 04:32:42 +00:00
|
|
|
shutil.move(src_path, dst_path)
|
2015-11-19 23:20:12 +00:00
|
|
|
shutil.rmtree(os.path.join(dst, fname))
|
|
|
|
|
2017-07-01 04:24:35 +00:00
|
|
|
|
2021-07-04 01:33:16 +00:00
|
|
|
def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Run a child program in a new process"""
|
2015-11-19 23:20:12 +00:00
|
|
|
if verbose:
|
|
|
|
print("running: " + ' '.join(args))
|
|
|
|
sys.stdout.flush()
|
|
|
|
# Use Popen here instead of call() as it apparently allows powershell on
|
|
|
|
# Windows to not lock up waiting for input presumably.
|
2017-05-18 08:33:24 +00:00
|
|
|
ret = subprocess.Popen(args, **kwargs)
|
2015-11-19 23:20:12 +00:00
|
|
|
code = ret.wait()
|
|
|
|
if code != 0:
|
2016-04-14 02:10:25 +00:00
|
|
|
err = "failed to run: " + ' '.join(args)
|
2017-02-23 15:04:29 +00:00
|
|
|
if verbose or exception:
|
2016-04-14 02:10:25 +00:00
|
|
|
raise RuntimeError(err)
|
2021-07-04 01:33:16 +00:00
|
|
|
# For most failures, we definitely do want to print this error, or the user will have no
|
|
|
|
# idea what went wrong. But when we've successfully built bootstrap and it failed, it will
|
|
|
|
# have already printed an error above, so there's no need to print the exact command we're
|
|
|
|
# running.
|
|
|
|
if is_bootstrap:
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
sys.exit(err)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-05-19 11:16:29 +00:00
|
|
|
|
x.py: Give a more helpful error message if curl isn't installed
This also abstracts checking for a command into `require`.
Before:
```
Updating only changed submodules
Submodules updated in 0.01 seconds
Traceback (most recent call last):
File "./x.py", line 11, in <module>
bootstrap.main()
...
File "/home/joshua/src/rust/src/bootstrap/bootstrap.py", line 137, in run
ret = subprocess.Popen(args, **kwargs)
File "/usr/lib/python2.7/subprocess.py", line 394, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1047, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
```
After:
```
error: unable to run `curl --version`: [Errno 2] No such file or directory
Please make sure it's installed and in the path.
```
2020-05-03 00:54:28 +00:00
|
|
|
def require(cmd, exit=True):
|
|
|
|
'''Run a command, returning its output.
|
|
|
|
On error,
|
|
|
|
If `exit` is `True`, exit the process.
|
|
|
|
Otherwise, return None.'''
|
|
|
|
try:
|
|
|
|
return subprocess.check_output(cmd).strip()
|
|
|
|
except (subprocess.CalledProcessError, OSError) as exc:
|
|
|
|
if not exit:
|
|
|
|
return None
|
|
|
|
print("error: unable to run `{}`: {}".format(' '.join(cmd), exc))
|
|
|
|
print("Please make sure it's installed and in the path.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
2016-07-02 14:19:27 +00:00
|
|
|
def format_build_time(duration):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return a nicer format for build time
|
|
|
|
|
|
|
|
>>> format_build_time('300')
|
|
|
|
'0:05:00'
|
|
|
|
"""
|
2016-07-02 14:19:27 +00:00
|
|
|
return str(datetime.timedelta(seconds=int(duration)))
|
|
|
|
|
2016-09-18 06:31:06 +00:00
|
|
|
|
2020-10-29 03:09:41 +00:00
|
|
|
def default_build_triple(verbose):
|
2017-08-26 22:01:48 +00:00
|
|
|
"""Build triple as in LLVM"""
|
2020-10-29 03:09:41 +00:00
|
|
|
# If the user already has a host build triple with an existing `rustc`
|
|
|
|
# install, use their preference. This fixes most issues with Windows builds
|
|
|
|
# being detected as GNU instead of MSVC.
|
2020-12-09 00:18:06 +00:00
|
|
|
default_encoding = sys.getdefaultencoding()
|
2020-10-29 03:09:41 +00:00
|
|
|
try:
|
2021-01-29 17:04:31 +00:00
|
|
|
version = subprocess.check_output(["rustc", "--version", "--verbose"],
|
|
|
|
stderr=subprocess.DEVNULL)
|
2020-12-09 00:18:06 +00:00
|
|
|
version = version.decode(default_encoding)
|
2020-10-29 03:09:41 +00:00
|
|
|
host = next(x for x in version.split('\n') if x.startswith("host: "))
|
|
|
|
triple = host.split("host: ")[1]
|
|
|
|
if verbose:
|
|
|
|
print("detected default triple {}".format(triple))
|
|
|
|
return triple
|
|
|
|
except Exception as e:
|
|
|
|
if verbose:
|
|
|
|
print("rustup not detected: {}".format(e))
|
|
|
|
print("falling back to auto-detect")
|
|
|
|
|
2020-05-07 00:24:40 +00:00
|
|
|
required = sys.platform != 'win32'
|
|
|
|
ostype = require(["uname", "-s"], exit=required)
|
|
|
|
cputype = require(['uname', '-m'], exit=required)
|
x.py: Give a more helpful error message if curl isn't installed
This also abstracts checking for a command into `require`.
Before:
```
Updating only changed submodules
Submodules updated in 0.01 seconds
Traceback (most recent call last):
File "./x.py", line 11, in <module>
bootstrap.main()
...
File "/home/joshua/src/rust/src/bootstrap/bootstrap.py", line 137, in run
ret = subprocess.Popen(args, **kwargs)
File "/usr/lib/python2.7/subprocess.py", line 394, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1047, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
```
After:
```
error: unable to run `curl --version`: [Errno 2] No such file or directory
Please make sure it's installed and in the path.
```
2020-05-03 00:54:28 +00:00
|
|
|
|
2020-06-25 14:45:24 +00:00
|
|
|
# If we do not have `uname`, assume Windows.
|
x.py: Give a more helpful error message if curl isn't installed
This also abstracts checking for a command into `require`.
Before:
```
Updating only changed submodules
Submodules updated in 0.01 seconds
Traceback (most recent call last):
File "./x.py", line 11, in <module>
bootstrap.main()
...
File "/home/joshua/src/rust/src/bootstrap/bootstrap.py", line 137, in run
ret = subprocess.Popen(args, **kwargs)
File "/usr/lib/python2.7/subprocess.py", line 394, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1047, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
```
After:
```
error: unable to run `curl --version`: [Errno 2] No such file or directory
Please make sure it's installed and in the path.
```
2020-05-03 00:54:28 +00:00
|
|
|
if ostype is None or cputype is None:
|
|
|
|
return 'x86_64-pc-windows-msvc'
|
2017-08-26 22:01:48 +00:00
|
|
|
|
2020-05-07 00:24:40 +00:00
|
|
|
ostype = ostype.decode(default_encoding)
|
|
|
|
cputype = cputype.decode(default_encoding)
|
|
|
|
|
2017-08-26 22:01:48 +00:00
|
|
|
# The goal here is to come up with the same triple as LLVM would,
|
|
|
|
# at least for the subset of platforms we're willing to target.
|
|
|
|
ostype_mapper = {
|
|
|
|
'Darwin': 'apple-darwin',
|
|
|
|
'DragonFly': 'unknown-dragonfly',
|
|
|
|
'FreeBSD': 'unknown-freebsd',
|
|
|
|
'Haiku': 'unknown-haiku',
|
|
|
|
'NetBSD': 'unknown-netbsd',
|
|
|
|
'OpenBSD': 'unknown-openbsd'
|
|
|
|
}
|
|
|
|
|
|
|
|
# Consider the direct transformation first and then the special cases
|
|
|
|
if ostype in ostype_mapper:
|
|
|
|
ostype = ostype_mapper[ostype]
|
|
|
|
elif ostype == 'Linux':
|
|
|
|
os_from_sp = subprocess.check_output(
|
|
|
|
['uname', '-o']).strip().decode(default_encoding)
|
|
|
|
if os_from_sp == 'Android':
|
|
|
|
ostype = 'linux-android'
|
|
|
|
else:
|
|
|
|
ostype = 'unknown-linux-gnu'
|
|
|
|
elif ostype == 'SunOS':
|
2021-02-16 14:02:04 +00:00
|
|
|
ostype = 'pc-solaris'
|
2017-08-26 22:01:48 +00:00
|
|
|
# On Solaris, uname -m will return a machine classification instead
|
|
|
|
# of a cpu type, so uname -p is recommended instead. However, the
|
|
|
|
# output from that option is too generic for our purposes (it will
|
|
|
|
# always emit 'i386' on x86/amd64 systems). As such, isainfo -k
|
|
|
|
# must be used instead.
|
x.py: Give a more helpful error message if curl isn't installed
This also abstracts checking for a command into `require`.
Before:
```
Updating only changed submodules
Submodules updated in 0.01 seconds
Traceback (most recent call last):
File "./x.py", line 11, in <module>
bootstrap.main()
...
File "/home/joshua/src/rust/src/bootstrap/bootstrap.py", line 137, in run
ret = subprocess.Popen(args, **kwargs)
File "/usr/lib/python2.7/subprocess.py", line 394, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1047, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
```
After:
```
error: unable to run `curl --version`: [Errno 2] No such file or directory
Please make sure it's installed and in the path.
```
2020-05-03 00:54:28 +00:00
|
|
|
cputype = require(['isainfo', '-k']).decode(default_encoding)
|
2021-02-16 14:02:04 +00:00
|
|
|
# sparc cpus have sun as a target vendor
|
|
|
|
if 'sparc' in cputype:
|
|
|
|
ostype = 'sun-solaris'
|
2017-08-26 22:01:48 +00:00
|
|
|
elif ostype.startswith('MINGW'):
|
|
|
|
# msys' `uname` does not print gcc configuration, but prints msys
|
|
|
|
# configuration. so we cannot believe `uname -m`:
|
|
|
|
# msys1 is always i686 and msys2 is always x86_64.
|
|
|
|
# instead, msys defines $MSYSTEM which is MINGW32 on i686 and
|
|
|
|
# MINGW64 on x86_64.
|
|
|
|
ostype = 'pc-windows-gnu'
|
|
|
|
cputype = 'i686'
|
|
|
|
if os.environ.get('MSYSTEM') == 'MINGW64':
|
|
|
|
cputype = 'x86_64'
|
|
|
|
elif ostype.startswith('MSYS'):
|
|
|
|
ostype = 'pc-windows-gnu'
|
|
|
|
elif ostype.startswith('CYGWIN_NT'):
|
|
|
|
cputype = 'i686'
|
|
|
|
if ostype.endswith('WOW64'):
|
|
|
|
cputype = 'x86_64'
|
|
|
|
ostype = 'pc-windows-gnu'
|
2020-06-25 14:45:24 +00:00
|
|
|
elif sys.platform == 'win32':
|
|
|
|
# Some Windows platforms might have a `uname` command that returns a
|
|
|
|
# non-standard string (e.g. gnuwin32 tools returns `windows32`). In
|
|
|
|
# these cases, fall back to using sys.platform.
|
|
|
|
return 'x86_64-pc-windows-msvc'
|
2017-08-26 22:01:48 +00:00
|
|
|
else:
|
|
|
|
err = "unknown OS type: {}".format(ostype)
|
|
|
|
sys.exit(err)
|
|
|
|
|
2019-01-17 18:20:00 +00:00
|
|
|
if cputype == 'powerpc' and ostype == 'unknown-freebsd':
|
|
|
|
cputype = subprocess.check_output(
|
|
|
|
['uname', '-p']).strip().decode(default_encoding)
|
2017-08-26 22:01:48 +00:00
|
|
|
cputype_mapper = {
|
|
|
|
'BePC': 'i686',
|
|
|
|
'aarch64': 'aarch64',
|
|
|
|
'amd64': 'x86_64',
|
|
|
|
'arm64': 'aarch64',
|
|
|
|
'i386': 'i686',
|
|
|
|
'i486': 'i686',
|
|
|
|
'i686': 'i686',
|
|
|
|
'i786': 'i686',
|
|
|
|
'powerpc': 'powerpc',
|
|
|
|
'powerpc64': 'powerpc64',
|
|
|
|
'powerpc64le': 'powerpc64le',
|
|
|
|
'ppc': 'powerpc',
|
|
|
|
'ppc64': 'powerpc64',
|
|
|
|
'ppc64le': 'powerpc64le',
|
|
|
|
's390x': 's390x',
|
|
|
|
'x64': 'x86_64',
|
|
|
|
'x86': 'i686',
|
|
|
|
'x86-64': 'x86_64',
|
|
|
|
'x86_64': 'x86_64'
|
|
|
|
}
|
|
|
|
|
|
|
|
# Consider the direct transformation first and then the special cases
|
|
|
|
if cputype in cputype_mapper:
|
|
|
|
cputype = cputype_mapper[cputype]
|
|
|
|
elif cputype in {'xscale', 'arm'}:
|
|
|
|
cputype = 'arm'
|
|
|
|
if ostype == 'linux-android':
|
|
|
|
ostype = 'linux-androideabi'
|
2019-02-02 15:41:38 +00:00
|
|
|
elif ostype == 'unknown-freebsd':
|
|
|
|
cputype = subprocess.check_output(
|
|
|
|
['uname', '-p']).strip().decode(default_encoding)
|
|
|
|
ostype = 'unknown-freebsd'
|
2017-08-26 22:01:48 +00:00
|
|
|
elif cputype == 'armv6l':
|
|
|
|
cputype = 'arm'
|
|
|
|
if ostype == 'linux-android':
|
|
|
|
ostype = 'linux-androideabi'
|
|
|
|
else:
|
|
|
|
ostype += 'eabihf'
|
|
|
|
elif cputype in {'armv7l', 'armv8l'}:
|
|
|
|
cputype = 'armv7'
|
|
|
|
if ostype == 'linux-android':
|
|
|
|
ostype = 'linux-androideabi'
|
|
|
|
else:
|
|
|
|
ostype += 'eabihf'
|
|
|
|
elif cputype == 'mips':
|
|
|
|
if sys.byteorder == 'big':
|
|
|
|
cputype = 'mips'
|
|
|
|
elif sys.byteorder == 'little':
|
|
|
|
cputype = 'mipsel'
|
|
|
|
else:
|
|
|
|
raise ValueError("unknown byteorder: {}".format(sys.byteorder))
|
|
|
|
elif cputype == 'mips64':
|
|
|
|
if sys.byteorder == 'big':
|
|
|
|
cputype = 'mips64'
|
|
|
|
elif sys.byteorder == 'little':
|
|
|
|
cputype = 'mips64el'
|
|
|
|
else:
|
|
|
|
raise ValueError('unknown byteorder: {}'.format(sys.byteorder))
|
|
|
|
# only the n64 ABI is supported, indicate it
|
|
|
|
ostype += 'abi64'
|
2018-02-17 14:29:11 +00:00
|
|
|
elif cputype == 'sparc' or cputype == 'sparcv9' or cputype == 'sparc64':
|
2017-08-26 22:01:48 +00:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
err = "unknown cpu type: {}".format(cputype)
|
|
|
|
sys.exit(err)
|
|
|
|
|
|
|
|
return "{}-{}".format(cputype, ostype)
|
|
|
|
|
2017-10-09 00:08:11 +00:00
|
|
|
|
2018-06-23 08:51:19 +00:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def output(filepath):
|
|
|
|
tmp = filepath + '.tmp'
|
|
|
|
with open(tmp, 'w') as f:
|
|
|
|
yield f
|
2018-06-26 19:23:14 +00:00
|
|
|
try:
|
2020-12-27 04:07:11 +00:00
|
|
|
if os.path.exists(filepath):
|
|
|
|
os.remove(filepath) # PermissionError/OSError on Win32 if in use
|
2018-06-26 19:23:14 +00:00
|
|
|
except OSError:
|
|
|
|
shutil.copy2(tmp, filepath)
|
|
|
|
os.remove(tmp)
|
2020-12-27 04:07:11 +00:00
|
|
|
return
|
|
|
|
os.rename(tmp, filepath)
|
2018-06-23 08:51:19 +00:00
|
|
|
|
|
|
|
|
2021-08-26 09:26:03 +00:00
|
|
|
class Stage0Toolchain:
|
|
|
|
def __init__(self, stage0_payload):
|
|
|
|
self.date = stage0_payload["date"]
|
|
|
|
self.version = stage0_payload["version"]
|
|
|
|
|
|
|
|
def channel(self):
|
|
|
|
return self.version + "-" + self.date
|
|
|
|
|
|
|
|
|
2016-09-18 06:31:06 +00:00
|
|
|
class RustBuild(object):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Provide all the methods required to build Rust"""
|
|
|
|
def __init__(self):
|
2021-08-26 09:26:03 +00:00
|
|
|
self.stage0_compiler = None
|
|
|
|
self.stage0_rustfmt = None
|
2019-08-27 16:45:29 +00:00
|
|
|
self._download_url = ''
|
2017-07-03 04:32:42 +00:00
|
|
|
self.build = ''
|
2020-05-07 20:51:03 +00:00
|
|
|
self.build_dir = ''
|
2017-07-03 04:32:42 +00:00
|
|
|
self.clean = False
|
|
|
|
self.config_toml = ''
|
2018-03-10 01:14:35 +00:00
|
|
|
self.rust_root = ''
|
2017-07-03 04:32:42 +00:00
|
|
|
self.use_locked_deps = ''
|
|
|
|
self.use_vendored_sources = ''
|
|
|
|
self.verbose = False
|
2020-04-25 19:43:19 +00:00
|
|
|
self.git_version = None
|
2020-07-17 12:35:14 +00:00
|
|
|
self.nix_deps_dir = None
|
2021-01-22 05:31:17 +00:00
|
|
|
self.rustc_commit = None
|
2017-05-19 11:16:29 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
def download_toolchain(self, stage0=True, rustc_channel=None):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Fetch the build system for Rust, written in Rust
|
|
|
|
|
|
|
|
This method will build a cache directory, then it will fetch the
|
|
|
|
tarball which has the stage0 compiler used to then bootstrap the Rust
|
|
|
|
compiler itself.
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
Each downloaded tarball is extracted, after that, the script
|
|
|
|
will move all the content to the right place.
|
|
|
|
"""
|
2021-02-10 05:33:17 +00:00
|
|
|
if rustc_channel is None:
|
2021-08-26 09:26:03 +00:00
|
|
|
rustc_channel = self.stage0_compiler.version
|
2021-02-10 05:33:17 +00:00
|
|
|
bin_root = self.bin_root(stage0)
|
|
|
|
|
2021-08-26 09:26:03 +00:00
|
|
|
key = self.stage0_compiler.date
|
2021-02-10 05:33:17 +00:00
|
|
|
if not stage0:
|
|
|
|
key += str(self.rustc_commit)
|
|
|
|
if self.rustc(stage0).startswith(bin_root) and \
|
|
|
|
(not os.path.exists(self.rustc(stage0)) or
|
|
|
|
self.program_out_of_date(self.rustc_stamp(stage0), key)):
|
|
|
|
if os.path.exists(bin_root):
|
|
|
|
shutil.rmtree(bin_root)
|
2019-11-11 16:16:05 +00:00
|
|
|
tarball_suffix = '.tar.xz' if support_xz() else '.tar.gz'
|
2019-10-29 08:48:05 +00:00
|
|
|
filename = "rust-std-{}-{}{}".format(
|
2019-11-11 16:16:05 +00:00
|
|
|
rustc_channel, self.build, tarball_suffix)
|
2017-07-03 04:32:42 +00:00
|
|
|
pattern = "rust-std-{}".format(self.build)
|
2021-02-10 05:33:17 +00:00
|
|
|
self._download_component_helper(filename, pattern, tarball_suffix, stage0)
|
2019-10-29 08:48:05 +00:00
|
|
|
filename = "rustc-{}-{}{}".format(rustc_channel, self.build,
|
2019-11-11 16:16:05 +00:00
|
|
|
tarball_suffix)
|
2021-02-10 05:33:17 +00:00
|
|
|
self._download_component_helper(filename, "rustc", tarball_suffix, stage0)
|
2021-04-30 14:19:26 +00:00
|
|
|
# download-rustc doesn't need its own cargo, it can just use beta's.
|
|
|
|
if stage0:
|
|
|
|
filename = "cargo-{}-{}{}".format(rustc_channel, self.build,
|
|
|
|
tarball_suffix)
|
|
|
|
self._download_component_helper(filename, "cargo", tarball_suffix)
|
|
|
|
self.fix_bin_or_dylib("{}/bin/cargo".format(bin_root))
|
|
|
|
else:
|
2021-01-22 05:31:17 +00:00
|
|
|
filename = "rustc-dev-{}-{}{}".format(rustc_channel, self.build, tarball_suffix)
|
|
|
|
self._download_component_helper(
|
2021-02-10 05:33:17 +00:00
|
|
|
filename, "rustc-dev", tarball_suffix, stage0
|
2021-01-22 05:31:17 +00:00
|
|
|
)
|
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root))
|
|
|
|
self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root))
|
|
|
|
lib_dir = "{}/lib".format(bin_root)
|
2020-07-17 13:25:05 +00:00
|
|
|
for lib in os.listdir(lib_dir):
|
|
|
|
if lib.endswith(".so"):
|
2021-04-10 20:05:28 +00:00
|
|
|
self.fix_bin_or_dylib(os.path.join(lib_dir, lib))
|
2021-02-10 05:33:17 +00:00
|
|
|
with output(self.rustc_stamp(stage0)) as rust_stamp:
|
|
|
|
rust_stamp.write(key)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
if self.rustfmt() and self.rustfmt().startswith(bin_root) and (
|
2019-10-30 02:43:37 +00:00
|
|
|
not os.path.exists(self.rustfmt())
|
2021-08-26 09:26:03 +00:00
|
|
|
or self.program_out_of_date(
|
|
|
|
self.rustfmt_stamp(),
|
|
|
|
"" if self.stage0_rustfmt is None else self.stage0_rustfmt.channel()
|
|
|
|
)
|
2019-10-30 02:43:37 +00:00
|
|
|
):
|
2021-08-26 09:26:03 +00:00
|
|
|
if self.stage0_rustfmt is not None:
|
2019-10-30 02:43:37 +00:00
|
|
|
tarball_suffix = '.tar.xz' if support_xz() else '.tar.gz'
|
2021-08-26 09:26:03 +00:00
|
|
|
filename = "rustfmt-{}-{}{}".format(
|
|
|
|
self.stage0_rustfmt.version, self.build, tarball_suffix,
|
|
|
|
)
|
2021-01-22 05:31:17 +00:00
|
|
|
self._download_component_helper(
|
2021-08-26 09:26:03 +00:00
|
|
|
filename, "rustfmt-preview", tarball_suffix, key=self.stage0_rustfmt.date
|
2021-01-22 05:31:17 +00:00
|
|
|
)
|
2021-02-10 05:33:17 +00:00
|
|
|
self.fix_bin_or_dylib("{}/bin/rustfmt".format(bin_root))
|
|
|
|
self.fix_bin_or_dylib("{}/bin/cargo-fmt".format(bin_root))
|
2019-10-30 02:43:37 +00:00
|
|
|
with output(self.rustfmt_stamp()) as rustfmt_stamp:
|
2021-08-26 09:26:03 +00:00
|
|
|
rustfmt_stamp.write(self.stage0_rustfmt.channel())
|
2019-10-30 02:43:37 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
# Avoid downloading LLVM twice (once for stage0 and once for the master rustc)
|
|
|
|
if self.downloading_llvm() and stage0:
|
2020-09-23 23:03:52 +00:00
|
|
|
# We want the most recent LLVM submodule update to avoid downloading
|
|
|
|
# LLVM more often than necessary.
|
|
|
|
#
|
|
|
|
# This git command finds that commit SHA, looking for bors-authored
|
2021-07-28 00:58:16 +00:00
|
|
|
# merges that modified src/llvm-project or other relevant version
|
2021-07-27 23:14:37 +00:00
|
|
|
# stamp files.
|
2020-09-23 23:03:52 +00:00
|
|
|
#
|
|
|
|
# This works even in a repository that has not yet initialized
|
|
|
|
# submodules.
|
2020-11-18 03:09:08 +00:00
|
|
|
top_level = subprocess.check_output([
|
|
|
|
"git", "rev-parse", "--show-toplevel",
|
|
|
|
]).decode(sys.getdefaultencoding()).strip()
|
2020-09-23 23:03:52 +00:00
|
|
|
llvm_sha = subprocess.check_output([
|
2021-07-27 23:14:37 +00:00
|
|
|
"git", "rev-list", "--author=bors@rust-lang.org", "-n1",
|
2021-07-28 00:58:16 +00:00
|
|
|
"--merges", "--first-parent", "HEAD",
|
2020-10-20 18:04:53 +00:00
|
|
|
"--",
|
2020-11-18 03:09:08 +00:00
|
|
|
"{}/src/llvm-project".format(top_level),
|
|
|
|
"{}/src/bootstrap/download-ci-llvm-stamp".format(top_level),
|
2021-03-21 16:10:34 +00:00
|
|
|
# the LLVM shared object file is named `LLVM-12-rust-{version}-nightly`
|
|
|
|
"{}/src/version".format(top_level)
|
2020-09-23 23:03:52 +00:00
|
|
|
]).decode(sys.getdefaultencoding()).strip()
|
2020-09-04 23:00:04 +00:00
|
|
|
llvm_assertions = self.get_toml('assertions', 'llvm') == 'true'
|
2021-01-08 19:21:30 +00:00
|
|
|
llvm_root = self.llvm_root()
|
|
|
|
llvm_lib = os.path.join(llvm_root, "lib")
|
2020-09-04 23:00:04 +00:00
|
|
|
if self.program_out_of_date(self.llvm_stamp(), llvm_sha + str(llvm_assertions)):
|
|
|
|
self._download_ci_llvm(llvm_sha, llvm_assertions)
|
2020-09-13 12:37:35 +00:00
|
|
|
for binary in ["llvm-config", "FileCheck"]:
|
2021-04-10 20:05:28 +00:00
|
|
|
self.fix_bin_or_dylib(os.path.join(llvm_root, "bin", binary))
|
2021-01-08 19:21:30 +00:00
|
|
|
for lib in os.listdir(llvm_lib):
|
|
|
|
if lib.endswith(".so"):
|
2021-04-10 20:05:28 +00:00
|
|
|
self.fix_bin_or_dylib(os.path.join(llvm_lib, lib))
|
2020-09-04 23:00:04 +00:00
|
|
|
with output(self.llvm_stamp()) as llvm_stamp:
|
2020-12-09 01:18:39 +00:00
|
|
|
llvm_stamp.write(llvm_sha + str(llvm_assertions))
|
2020-09-04 23:00:04 +00:00
|
|
|
|
|
|
|
def downloading_llvm(self):
|
|
|
|
opt = self.get_toml('download-ci-llvm', 'llvm')
|
2021-01-13 14:32:48 +00:00
|
|
|
# This is currently all tier 1 targets (since others may not have CI
|
|
|
|
# artifacts)
|
|
|
|
# https://doc.rust-lang.org/rustc/platform-support.html#tier-1
|
|
|
|
supported_platforms = [
|
|
|
|
"aarch64-unknown-linux-gnu",
|
|
|
|
"i686-pc-windows-gnu",
|
|
|
|
"i686-pc-windows-msvc",
|
|
|
|
"i686-unknown-linux-gnu",
|
|
|
|
"x86_64-unknown-linux-gnu",
|
|
|
|
"x86_64-apple-darwin",
|
|
|
|
"x86_64-pc-windows-gnu",
|
|
|
|
"x86_64-pc-windows-msvc",
|
|
|
|
]
|
2020-10-13 14:57:58 +00:00
|
|
|
return opt == "true" \
|
2021-01-13 14:32:48 +00:00
|
|
|
or (opt == "if-available" and self.build in supported_platforms)
|
2020-09-04 23:00:04 +00:00
|
|
|
|
2021-01-22 05:31:17 +00:00
|
|
|
def _download_component_helper(
|
2021-02-10 05:33:17 +00:00
|
|
|
self, filename, pattern, tarball_suffix, stage0=True, key=None
|
2021-01-22 05:31:17 +00:00
|
|
|
):
|
|
|
|
if key is None:
|
2021-02-10 05:33:17 +00:00
|
|
|
if stage0:
|
2021-08-26 09:26:03 +00:00
|
|
|
key = self.stage0_compiler.date
|
2021-02-10 05:33:17 +00:00
|
|
|
else:
|
|
|
|
key = self.rustc_commit
|
2017-07-03 04:32:42 +00:00
|
|
|
cache_dst = os.path.join(self.build_dir, "cache")
|
2021-01-22 05:31:17 +00:00
|
|
|
rustc_cache = os.path.join(cache_dst, key)
|
2017-07-03 04:32:42 +00:00
|
|
|
if not os.path.exists(rustc_cache):
|
|
|
|
os.makedirs(rustc_cache)
|
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
if stage0:
|
2021-01-22 05:31:17 +00:00
|
|
|
url = "{}/dist/{}".format(self._download_url, key)
|
2021-02-10 05:33:17 +00:00
|
|
|
else:
|
|
|
|
url = "https://ci-artifacts.rust-lang.org/rustc-builds/{}".format(self.rustc_commit)
|
2017-07-03 04:32:42 +00:00
|
|
|
tarball = os.path.join(rustc_cache, filename)
|
|
|
|
if not os.path.exists(tarball):
|
2021-02-10 05:33:17 +00:00
|
|
|
get("{}/{}".format(url, filename), tarball, verbose=self.verbose, do_verify=stage0)
|
|
|
|
unpack(tarball, tarball_suffix, self.bin_root(stage0), match=pattern, verbose=self.verbose)
|
2017-02-06 08:30:01 +00:00
|
|
|
|
2020-09-04 23:00:04 +00:00
|
|
|
def _download_ci_llvm(self, llvm_sha, llvm_assertions):
|
|
|
|
cache_prefix = "llvm-{}-{}".format(llvm_sha, llvm_assertions)
|
|
|
|
cache_dst = os.path.join(self.build_dir, "cache")
|
|
|
|
rustc_cache = os.path.join(cache_dst, cache_prefix)
|
|
|
|
if not os.path.exists(rustc_cache):
|
|
|
|
os.makedirs(rustc_cache)
|
|
|
|
|
|
|
|
url = "https://ci-artifacts.rust-lang.org/rustc-builds/{}".format(llvm_sha)
|
|
|
|
if llvm_assertions:
|
|
|
|
url = url.replace('rustc-builds', 'rustc-builds-alt')
|
2021-01-01 15:22:25 +00:00
|
|
|
# ci-artifacts are only stored as .xz, not .gz
|
|
|
|
if not support_xz():
|
|
|
|
print("error: XZ support is required to download LLVM")
|
|
|
|
print("help: consider disabling `download-ci-llvm` or using python3")
|
|
|
|
exit(1)
|
|
|
|
tarball_suffix = '.tar.xz'
|
2020-09-04 23:00:04 +00:00
|
|
|
filename = "rust-dev-nightly-" + self.build + tarball_suffix
|
|
|
|
tarball = os.path.join(rustc_cache, filename)
|
|
|
|
if not os.path.exists(tarball):
|
|
|
|
get("{}/{}".format(url, filename), tarball, verbose=self.verbose, do_verify=False)
|
|
|
|
unpack(tarball, tarball_suffix, self.llvm_root(),
|
|
|
|
match="rust-dev",
|
|
|
|
verbose=self.verbose)
|
|
|
|
|
2021-04-10 20:05:28 +00:00
|
|
|
def fix_bin_or_dylib(self, fname):
|
2020-07-17 13:25:05 +00:00
|
|
|
"""Modifies the interpreter section of 'fname' to fix the dynamic linker,
|
|
|
|
or the RPATH section, to fix the dynamic library search path
|
2017-07-03 04:32:42 +00:00
|
|
|
|
|
|
|
This method is only required on NixOS and uses the PatchELF utility to
|
2020-07-17 13:25:05 +00:00
|
|
|
change the interpreter/RPATH of ELF executables.
|
2017-07-03 04:32:42 +00:00
|
|
|
|
|
|
|
Please see https://nixos.org/patchelf.html for more information
|
|
|
|
"""
|
2017-02-06 08:30:01 +00:00
|
|
|
default_encoding = sys.getdefaultencoding()
|
|
|
|
try:
|
2017-05-19 11:16:29 +00:00
|
|
|
ostype = subprocess.check_output(
|
|
|
|
['uname', '-s']).strip().decode(default_encoding)
|
2017-07-03 04:32:42 +00:00
|
|
|
except subprocess.CalledProcessError:
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
2017-07-03 04:32:42 +00:00
|
|
|
except OSError as reason:
|
|
|
|
if getattr(reason, 'winerror', None) is not None:
|
|
|
|
return
|
|
|
|
raise reason
|
2017-02-06 08:30:01 +00:00
|
|
|
|
|
|
|
if ostype != "Linux":
|
|
|
|
return
|
|
|
|
|
2021-07-16 11:18:50 +00:00
|
|
|
# Use `/etc/os-release` instead of `/etc/NIXOS`.
|
|
|
|
# The latter one does not exist on NixOS when using tmpfs as root.
|
|
|
|
try:
|
|
|
|
with open("/etc/os-release", "r") as f:
|
|
|
|
if not any(line.strip() == "ID=nixos" for line in f):
|
|
|
|
return
|
|
|
|
except FileNotFoundError:
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
|
|
|
if os.path.exists("/lib"):
|
|
|
|
return
|
|
|
|
|
|
|
|
# At this point we're pretty sure the user is running NixOS
|
2017-07-01 04:24:35 +00:00
|
|
|
nix_os_msg = "info: you seem to be running NixOS. Attempting to patch"
|
2017-07-01 12:16:57 +00:00
|
|
|
print(nix_os_msg, fname)
|
2017-02-06 08:30:01 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
# Only build `.nix-deps` once.
|
2020-07-17 12:35:14 +00:00
|
|
|
nix_deps_dir = self.nix_deps_dir
|
|
|
|
if not nix_deps_dir:
|
|
|
|
# Run `nix-build` to "build" each dependency (which will likely reuse
|
|
|
|
# the existing `/nix/store` copy, or at most download a pre-built copy).
|
2021-04-10 20:05:28 +00:00
|
|
|
#
|
|
|
|
# Importantly, we create a gc-root called `.nix-deps` in the `build/`
|
|
|
|
# directory, but still reference the actual `/nix/store` path in the rpath
|
|
|
|
# as it makes it significantly more robust against changes to the location of
|
|
|
|
# the `.nix-deps` location.
|
|
|
|
#
|
|
|
|
# bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
|
|
|
|
# zlib: Needed as a system dependency of `libLLVM-*.so`.
|
|
|
|
# patchelf: Needed for patching ELF binaries (see doc comment above).
|
|
|
|
nix_deps_dir = "{}/{}".format(self.build_dir, ".nix-deps")
|
|
|
|
nix_expr = '''
|
|
|
|
with (import <nixpkgs> {});
|
|
|
|
symlinkJoin {
|
|
|
|
name = "rust-stage0-dependencies";
|
|
|
|
paths = [
|
|
|
|
zlib
|
|
|
|
patchelf
|
|
|
|
stdenv.cc.bintools
|
|
|
|
];
|
|
|
|
}
|
|
|
|
'''
|
|
|
|
try:
|
|
|
|
subprocess.check_output([
|
|
|
|
"nix-build", "-E", nix_expr, "-o", nix_deps_dir,
|
|
|
|
])
|
|
|
|
except subprocess.CalledProcessError as reason:
|
|
|
|
print("warning: failed to call nix-build:", reason)
|
|
|
|
return
|
2020-07-17 12:35:14 +00:00
|
|
|
self.nix_deps_dir = nix_deps_dir
|
|
|
|
|
2021-04-10 20:05:28 +00:00
|
|
|
patchelf = "{}/bin/patchelf".format(nix_deps_dir)
|
|
|
|
rpath_entries = [
|
|
|
|
# Relative default, all binary and dynamic libraries we ship
|
|
|
|
# appear to have this (even when `../lib` is redundant).
|
|
|
|
"$ORIGIN/../lib",
|
|
|
|
os.path.join(os.path.realpath(nix_deps_dir), "lib")
|
|
|
|
]
|
|
|
|
patchelf_args = ["--set-rpath", ":".join(rpath_entries)]
|
2021-01-08 19:21:30 +00:00
|
|
|
if not fname.endswith(".so"):
|
|
|
|
# Finally, set the corret .interp for binaries
|
2021-04-10 20:05:28 +00:00
|
|
|
with open("{}/nix-support/dynamic-linker".format(nix_deps_dir)) as dynamic_linker:
|
2021-01-08 19:21:30 +00:00
|
|
|
patchelf_args += ["--set-interpreter", dynamic_linker.read().rstrip()]
|
2017-02-06 08:30:01 +00:00
|
|
|
|
|
|
|
try:
|
2020-07-17 13:25:05 +00:00
|
|
|
subprocess.check_output([patchelf] + patchelf_args + [fname])
|
2017-07-03 04:32:42 +00:00
|
|
|
except subprocess.CalledProcessError as reason:
|
|
|
|
print("warning: failed to call patchelf:", reason)
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
# If `download-rustc` is set, download the most recent commit with CI artifacts
|
|
|
|
def maybe_download_ci_toolchain(self):
|
2021-01-22 05:31:17 +00:00
|
|
|
# If `download-rustc` is not set, default to rebuilding.
|
2021-03-22 06:31:47 +00:00
|
|
|
download_rustc = self.get_toml("download-rustc", section="rust")
|
|
|
|
if download_rustc is None or download_rustc == "false":
|
2021-01-22 05:31:17 +00:00
|
|
|
return None
|
2021-03-22 06:31:47 +00:00
|
|
|
assert download_rustc == "true" or download_rustc == "if-unchanged", download_rustc
|
2021-01-22 05:31:17 +00:00
|
|
|
|
2021-01-22 06:00:51 +00:00
|
|
|
# Handle running from a directory other than the top level
|
2021-01-22 05:31:17 +00:00
|
|
|
rev_parse = ["git", "rev-parse", "--show-toplevel"]
|
|
|
|
top_level = subprocess.check_output(rev_parse, universal_newlines=True).strip()
|
|
|
|
compiler = "{}/compiler/".format(top_level)
|
2021-06-04 17:38:59 +00:00
|
|
|
library = "{}/library/".format(top_level)
|
2021-01-22 06:00:51 +00:00
|
|
|
|
|
|
|
# Look for a version to compare to based on the current commit.
|
2021-03-04 03:11:07 +00:00
|
|
|
# Only commits merged by bors will have CI artifacts.
|
2021-07-28 00:58:16 +00:00
|
|
|
merge_base = [
|
|
|
|
"git", "rev-list", "--author=bors@rust-lang.org", "-n1",
|
|
|
|
"--merges", "--first-parent", "HEAD"
|
|
|
|
]
|
2021-01-22 06:00:51 +00:00
|
|
|
commit = subprocess.check_output(merge_base, universal_newlines=True).strip()
|
|
|
|
|
2021-06-04 17:38:59 +00:00
|
|
|
# Warn if there were changes to the compiler or standard library since the ancestor commit.
|
|
|
|
status = subprocess.call(["git", "diff-index", "--quiet", commit, "--", compiler, library])
|
2021-01-22 05:31:17 +00:00
|
|
|
if status != 0:
|
2021-03-22 06:31:47 +00:00
|
|
|
if download_rustc == "if-unchanged":
|
|
|
|
return None
|
2021-06-04 17:38:59 +00:00
|
|
|
print("warning: `download-rustc` is enabled, but there are changes to \
|
|
|
|
compiler/ or library/")
|
2021-01-22 05:31:17 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
if self.verbose:
|
|
|
|
print("using downloaded stage1 artifacts from CI (commit {})".format(commit))
|
|
|
|
self.rustc_commit = commit
|
|
|
|
# FIXME: support downloading artifacts from the beta channel
|
|
|
|
self.download_toolchain(False, "nightly")
|
2021-01-22 05:31:17 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
def rustc_stamp(self, stage0):
|
|
|
|
"""Return the path for .rustc-stamp at the given stage
|
2017-07-03 04:32:42 +00:00
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
2021-02-10 05:33:17 +00:00
|
|
|
>>> rb.rustc_stamp(True) == os.path.join("build", "stage0", ".rustc-stamp")
|
|
|
|
True
|
|
|
|
>>> rb.rustc_stamp(False) == os.path.join("build", "ci-rustc", ".rustc-stamp")
|
2017-07-03 04:32:42 +00:00
|
|
|
True
|
|
|
|
"""
|
2021-02-10 05:33:17 +00:00
|
|
|
return os.path.join(self.bin_root(stage0), '.rustc-stamp')
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2019-10-30 02:43:37 +00:00
|
|
|
def rustfmt_stamp(self):
|
|
|
|
"""Return the path for .rustfmt-stamp
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.rustfmt_stamp() == os.path.join("build", "stage0", ".rustfmt-stamp")
|
|
|
|
True
|
|
|
|
"""
|
2021-02-10 05:33:17 +00:00
|
|
|
return os.path.join(self.bin_root(True), '.rustfmt-stamp')
|
2019-10-30 02:43:37 +00:00
|
|
|
|
2020-09-04 23:00:04 +00:00
|
|
|
def llvm_stamp(self):
|
|
|
|
"""Return the path for .rustfmt-stamp
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.llvm_stamp() == os.path.join("build", "ci-llvm", ".llvm-stamp")
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
return os.path.join(self.llvm_root(), '.llvm-stamp')
|
|
|
|
|
|
|
|
|
2020-12-09 01:18:39 +00:00
|
|
|
def program_out_of_date(self, stamp_path, key):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Check if the given program stamp is out of date"""
|
|
|
|
if not os.path.exists(stamp_path) or self.clean:
|
2015-11-19 23:20:12 +00:00
|
|
|
return True
|
2017-07-03 04:32:42 +00:00
|
|
|
with open(stamp_path, 'r') as stamp:
|
2020-12-09 01:18:39 +00:00
|
|
|
return key != stamp.read()
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
def bin_root(self, stage0):
|
|
|
|
"""Return the binary root directory for the given stage
|
2017-07-03 04:32:42 +00:00
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
2021-02-10 05:33:17 +00:00
|
|
|
>>> rb.bin_root(True) == os.path.join("build", "stage0")
|
|
|
|
True
|
|
|
|
>>> rb.bin_root(False) == os.path.join("build", "ci-rustc")
|
2017-07-03 04:32:42 +00:00
|
|
|
True
|
|
|
|
|
|
|
|
When the 'build' property is given should be a nested directory:
|
|
|
|
|
|
|
|
>>> rb.build = "devel"
|
2021-02-10 05:33:17 +00:00
|
|
|
>>> rb.bin_root(True) == os.path.join("build", "devel", "stage0")
|
2017-07-03 04:32:42 +00:00
|
|
|
True
|
|
|
|
"""
|
2021-02-10 05:33:17 +00:00
|
|
|
if stage0:
|
|
|
|
subdir = "stage0"
|
|
|
|
else:
|
|
|
|
subdir = "ci-rustc"
|
|
|
|
return os.path.join(self.build_dir, self.build, subdir)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2020-09-04 23:00:04 +00:00
|
|
|
def llvm_root(self):
|
|
|
|
"""Return the CI LLVM root directory
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.llvm_root() == os.path.join("build", "ci-llvm")
|
|
|
|
True
|
|
|
|
|
|
|
|
When the 'build' property is given should be a nested directory:
|
|
|
|
|
|
|
|
>>> rb.build = "devel"
|
|
|
|
>>> rb.llvm_root() == os.path.join("build", "devel", "ci-llvm")
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
return os.path.join(self.build_dir, self.build, "ci-llvm")
|
|
|
|
|
2018-04-20 18:50:50 +00:00
|
|
|
def get_toml(self, key, section=None):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Returns the value of the given key in config.toml, otherwise returns None
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.config_toml = 'key1 = "value1"\\nkey2 = "value2"'
|
|
|
|
>>> rb.get_toml("key2")
|
|
|
|
'value2'
|
|
|
|
|
|
|
|
If the key does not exists, the result is None:
|
|
|
|
|
2017-10-08 23:46:58 +00:00
|
|
|
>>> rb.get_toml("key3") is None
|
2017-07-03 04:32:42 +00:00
|
|
|
True
|
2018-04-20 18:50:50 +00:00
|
|
|
|
|
|
|
Optionally also matches the section the key appears in
|
|
|
|
|
|
|
|
>>> rb.config_toml = '[a]\\nkey = "value1"\\n[b]\\nkey = "value2"'
|
|
|
|
>>> rb.get_toml('key', 'a')
|
|
|
|
'value1'
|
|
|
|
>>> rb.get_toml('key', 'b')
|
|
|
|
'value2'
|
|
|
|
>>> rb.get_toml('key', 'c') is None
|
|
|
|
True
|
2019-09-08 08:26:06 +00:00
|
|
|
|
|
|
|
>>> rb.config_toml = 'key1 = true'
|
|
|
|
>>> rb.get_toml("key1")
|
|
|
|
'true'
|
2017-07-03 04:32:42 +00:00
|
|
|
"""
|
2018-04-20 18:50:50 +00:00
|
|
|
|
|
|
|
cur_section = None
|
2015-11-19 23:20:12 +00:00
|
|
|
for line in self.config_toml.splitlines():
|
2018-04-20 18:50:50 +00:00
|
|
|
section_match = re.match(r'^\s*\[(.*)\]\s*$', line)
|
|
|
|
if section_match is not None:
|
|
|
|
cur_section = section_match.group(1)
|
|
|
|
|
2017-05-13 05:21:35 +00:00
|
|
|
match = re.match(r'^{}\s*=(.*)$'.format(key), line)
|
|
|
|
if match is not None:
|
|
|
|
value = match.group(1)
|
2018-04-20 18:50:50 +00:00
|
|
|
if section is None or section == cur_section:
|
|
|
|
return self.get_string(value) or value.strip()
|
2015-11-19 23:20:12 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def cargo(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return config path for cargo"""
|
|
|
|
return self.program_config('cargo')
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
def rustc(self, stage0):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return config path for rustc"""
|
2021-02-10 05:33:17 +00:00
|
|
|
return self.program_config('rustc', stage0)
|
2017-07-03 04:32:42 +00:00
|
|
|
|
2019-10-30 02:43:37 +00:00
|
|
|
def rustfmt(self):
|
|
|
|
"""Return config path for rustfmt"""
|
2021-08-26 09:26:03 +00:00
|
|
|
if self.stage0_rustfmt is None:
|
2019-10-30 02:43:37 +00:00
|
|
|
return None
|
|
|
|
return self.program_config('rustfmt')
|
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
def program_config(self, program, stage0=True):
|
|
|
|
"""Return config path for the given program at the given stage
|
2017-07-03 04:32:42 +00:00
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.config_toml = 'rustc = "rustc"\\n'
|
|
|
|
>>> rb.program_config('rustc')
|
|
|
|
'rustc'
|
|
|
|
>>> rb.config_toml = ''
|
2021-02-10 05:33:17 +00:00
|
|
|
>>> cargo_path = rb.program_config('cargo', True)
|
|
|
|
>>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(True),
|
|
|
|
... "bin", "cargo")
|
|
|
|
True
|
|
|
|
>>> cargo_path = rb.program_config('cargo', False)
|
|
|
|
>>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(False),
|
2017-07-03 04:32:42 +00:00
|
|
|
... "bin", "cargo")
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
config = self.get_toml(program)
|
2015-11-19 23:20:12 +00:00
|
|
|
if config:
|
2017-10-18 21:22:32 +00:00
|
|
|
return os.path.expanduser(config)
|
2021-02-10 05:33:17 +00:00
|
|
|
return os.path.join(self.bin_root(stage0), "bin", "{}{}".format(
|
2017-07-03 04:32:42 +00:00
|
|
|
program, self.exe_suffix()))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_string(line):
|
|
|
|
"""Return the value between double quotes
|
|
|
|
|
|
|
|
>>> RustBuild.get_string(' "devel" ')
|
|
|
|
'devel'
|
2019-09-08 08:26:06 +00:00
|
|
|
>>> RustBuild.get_string(" 'devel' ")
|
|
|
|
'devel'
|
|
|
|
>>> RustBuild.get_string('devel') is None
|
|
|
|
True
|
|
|
|
>>> RustBuild.get_string(' "devel ')
|
|
|
|
''
|
2017-07-03 04:32:42 +00:00
|
|
|
"""
|
2015-11-19 23:20:12 +00:00
|
|
|
start = line.find('"')
|
2017-08-26 22:01:48 +00:00
|
|
|
if start != -1:
|
|
|
|
end = start + 1 + line[start + 1:].find('"')
|
|
|
|
return line[start + 1:end]
|
|
|
|
start = line.find('\'')
|
|
|
|
if start != -1:
|
|
|
|
end = start + 1 + line[start + 1:].find('\'')
|
|
|
|
return line[start + 1:end]
|
|
|
|
return None
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
@staticmethod
|
|
|
|
def exe_suffix():
|
|
|
|
"""Return a suffix for executables"""
|
2015-11-19 23:20:12 +00:00
|
|
|
if sys.platform == 'win32':
|
|
|
|
return '.exe'
|
2017-07-01 04:24:35 +00:00
|
|
|
return ''
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2016-11-16 20:31:19 +00:00
|
|
|
def bootstrap_binary(self):
|
2018-10-22 16:21:55 +00:00
|
|
|
"""Return the path of the bootstrap binary
|
2017-07-03 04:32:42 +00:00
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.bootstrap_binary() == os.path.join("build", "bootstrap",
|
|
|
|
... "debug", "bootstrap")
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
return os.path.join(self.build_dir, "bootstrap", "debug", "bootstrap")
|
2016-11-16 20:31:19 +00:00
|
|
|
|
2015-11-19 23:20:12 +00:00
|
|
|
def build_bootstrap(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Build bootstrap"""
|
2016-05-31 20:24:28 +00:00
|
|
|
build_dir = os.path.join(self.build_dir, "bootstrap")
|
|
|
|
if self.clean and os.path.exists(build_dir):
|
|
|
|
shutil.rmtree(build_dir)
|
2015-11-19 23:20:12 +00:00
|
|
|
env = os.environ.copy()
|
2020-03-21 14:54:01 +00:00
|
|
|
# `CARGO_BUILD_TARGET` breaks bootstrap build.
|
|
|
|
# See also: <https://github.com/rust-lang/rust/issues/70208>.
|
|
|
|
if "CARGO_BUILD_TARGET" in env:
|
|
|
|
del env["CARGO_BUILD_TARGET"]
|
2016-05-31 20:24:28 +00:00
|
|
|
env["CARGO_TARGET_DIR"] = build_dir
|
2021-02-10 05:33:17 +00:00
|
|
|
env["RUSTC"] = self.rustc(True)
|
|
|
|
env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(True), "lib") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
(os.pathsep + env["LD_LIBRARY_PATH"]) \
|
|
|
|
if "LD_LIBRARY_PATH" in env else ""
|
2021-02-10 05:33:17 +00:00
|
|
|
env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(True), "lib") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
(os.pathsep + env["DYLD_LIBRARY_PATH"]) \
|
|
|
|
if "DYLD_LIBRARY_PATH" in env else ""
|
2021-02-10 05:33:17 +00:00
|
|
|
env["LIBRARY_PATH"] = os.path.join(self.bin_root(True), "lib") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
(os.pathsep + env["LIBRARY_PATH"]) \
|
|
|
|
if "LIBRARY_PATH" in env else ""
|
2019-11-28 10:11:02 +00:00
|
|
|
# preserve existing RUSTFLAGS
|
|
|
|
env.setdefault("RUSTFLAGS", "")
|
|
|
|
env["RUSTFLAGS"] += " -Cdebuginfo=2"
|
2018-04-20 18:50:50 +00:00
|
|
|
|
2020-12-09 00:18:06 +00:00
|
|
|
build_section = "target.{}".format(self.build)
|
2018-04-20 18:50:50 +00:00
|
|
|
target_features = []
|
|
|
|
if self.get_toml("crt-static", build_section) == "true":
|
|
|
|
target_features += ["+crt-static"]
|
|
|
|
elif self.get_toml("crt-static", build_section) == "false":
|
|
|
|
target_features += ["-crt-static"]
|
|
|
|
if target_features:
|
2019-11-28 10:11:02 +00:00
|
|
|
env["RUSTFLAGS"] += " -C target-feature=" + (",".join(target_features))
|
2018-10-26 08:18:48 +00:00
|
|
|
target_linker = self.get_toml("linker", build_section)
|
|
|
|
if target_linker is not None:
|
2019-11-28 10:11:02 +00:00
|
|
|
env["RUSTFLAGS"] += " -C linker=" + target_linker
|
|
|
|
env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes"
|
2021-03-13 17:46:33 +00:00
|
|
|
env["RUSTFLAGS"] += " -Wsemicolon_in_expressions_from_macros"
|
2019-08-27 07:37:51 +00:00
|
|
|
if self.get_toml("deny-warnings", "rust") != "false":
|
2019-11-28 10:11:02 +00:00
|
|
|
env["RUSTFLAGS"] += " -Dwarnings"
|
2018-04-20 18:50:50 +00:00
|
|
|
|
2021-02-10 05:33:17 +00:00
|
|
|
env["PATH"] = os.path.join(self.bin_root(True), "bin") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
os.pathsep + env["PATH"]
|
2016-11-16 23:02:56 +00:00
|
|
|
if not os.path.isfile(self.cargo()):
|
2017-07-03 04:32:42 +00:00
|
|
|
raise Exception("no cargo executable found at `{}`".format(
|
|
|
|
self.cargo()))
|
2016-11-01 20:46:38 +00:00
|
|
|
args = [self.cargo(), "build", "--manifest-path",
|
|
|
|
os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
|
2018-03-11 23:44:05 +00:00
|
|
|
for _ in range(1, self.verbose):
|
2017-05-24 07:10:15 +00:00
|
|
|
args.append("--verbose")
|
2017-02-10 20:59:40 +00:00
|
|
|
if self.use_locked_deps:
|
|
|
|
args.append("--locked")
|
2016-11-01 20:46:38 +00:00
|
|
|
if self.use_vendored_sources:
|
|
|
|
args.append("--frozen")
|
2017-05-24 07:11:10 +00:00
|
|
|
run(args, env=env, verbose=self.verbose)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
|
|
|
def build_triple(self):
|
2020-12-09 00:18:06 +00:00
|
|
|
"""Build triple as in LLVM
|
|
|
|
|
|
|
|
Note that `default_build_triple` is moderately expensive,
|
|
|
|
so use `self.build` where possible.
|
|
|
|
"""
|
2015-11-19 23:20:12 +00:00
|
|
|
config = self.get_toml('build')
|
2015-11-20 00:55:21 +00:00
|
|
|
if config:
|
|
|
|
return config
|
2020-10-29 03:09:41 +00:00
|
|
|
return default_build_triple(self.verbose)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2018-03-15 19:13:33 +00:00
|
|
|
def check_submodule(self, module, slow_submodules):
|
|
|
|
if not slow_submodules:
|
|
|
|
checked_out = subprocess.Popen(["git", "rev-parse", "HEAD"],
|
|
|
|
cwd=os.path.join(self.rust_root, module),
|
|
|
|
stdout=subprocess.PIPE)
|
|
|
|
return checked_out
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def update_submodule(self, module, checked_out, recorded_submodules):
|
|
|
|
module_path = os.path.join(self.rust_root, module)
|
|
|
|
|
2019-09-06 07:14:25 +00:00
|
|
|
if checked_out is not None:
|
2018-03-15 19:13:33 +00:00
|
|
|
default_encoding = sys.getdefaultencoding()
|
|
|
|
checked_out = checked_out.communicate()[0].decode(default_encoding).strip()
|
|
|
|
if recorded_submodules[module] == checked_out:
|
|
|
|
return
|
|
|
|
|
|
|
|
print("Updating submodule", module)
|
|
|
|
|
2018-12-24 19:19:49 +00:00
|
|
|
run(["git", "submodule", "-q", "sync", module],
|
2018-03-15 19:13:33 +00:00
|
|
|
cwd=self.rust_root, verbose=self.verbose)
|
2020-04-25 19:43:19 +00:00
|
|
|
|
|
|
|
update_args = ["git", "submodule", "update", "--init", "--recursive"]
|
|
|
|
if self.git_version >= distutils.version.LooseVersion("2.11.0"):
|
|
|
|
update_args.append("--progress")
|
|
|
|
update_args.append(module)
|
|
|
|
run(update_args, cwd=self.rust_root, verbose=self.verbose, exception=True)
|
|
|
|
|
2018-03-15 19:13:33 +00:00
|
|
|
run(["git", "reset", "-q", "--hard"],
|
|
|
|
cwd=module_path, verbose=self.verbose)
|
|
|
|
run(["git", "clean", "-qdfx"],
|
|
|
|
cwd=module_path, verbose=self.verbose)
|
|
|
|
|
2017-05-13 05:21:35 +00:00
|
|
|
def update_submodules(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Update submodules"""
|
2017-05-13 05:21:35 +00:00
|
|
|
if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \
|
2017-08-26 22:01:48 +00:00
|
|
|
self.get_toml('submodules') == "false":
|
2017-05-13 05:21:35 +00:00
|
|
|
return
|
2019-09-04 15:49:30 +00:00
|
|
|
|
2020-04-25 19:43:19 +00:00
|
|
|
default_encoding = sys.getdefaultencoding()
|
|
|
|
|
|
|
|
# check the existence and version of 'git' command
|
x.py: Give a more helpful error message if curl isn't installed
This also abstracts checking for a command into `require`.
Before:
```
Updating only changed submodules
Submodules updated in 0.01 seconds
Traceback (most recent call last):
File "./x.py", line 11, in <module>
bootstrap.main()
...
File "/home/joshua/src/rust/src/bootstrap/bootstrap.py", line 137, in run
ret = subprocess.Popen(args, **kwargs)
File "/usr/lib/python2.7/subprocess.py", line 394, in __init__
errread, errwrite)
File "/usr/lib/python2.7/subprocess.py", line 1047, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
```
After:
```
error: unable to run `curl --version`: [Errno 2] No such file or directory
Please make sure it's installed and in the path.
```
2020-05-03 00:54:28 +00:00
|
|
|
git_version_str = require(['git', '--version']).split()[2].decode(default_encoding)
|
|
|
|
self.git_version = distutils.version.LooseVersion(git_version_str)
|
2019-09-04 15:49:30 +00:00
|
|
|
|
2018-03-30 23:42:57 +00:00
|
|
|
slow_submodules = self.get_toml('fast-submodules') == "false"
|
2018-03-15 19:13:33 +00:00
|
|
|
start_time = time()
|
|
|
|
if slow_submodules:
|
2021-03-01 04:51:15 +00:00
|
|
|
print('Unconditionally updating submodules')
|
2018-03-15 19:13:33 +00:00
|
|
|
else:
|
|
|
|
print('Updating only changed submodules')
|
2017-05-22 07:04:34 +00:00
|
|
|
default_encoding = sys.getdefaultencoding()
|
2021-03-01 04:51:15 +00:00
|
|
|
# Only update submodules that are needed to build bootstrap. These are needed because Cargo
|
|
|
|
# currently requires everything in a workspace to be "locally present" when starting a
|
|
|
|
# build, and will give a hard error if any Cargo.toml files are missing.
|
|
|
|
# FIXME: Is there a way to avoid cloning these eagerly? Bootstrap itself doesn't need to
|
|
|
|
# share a workspace with any tools - maybe it could be excluded from the workspace?
|
|
|
|
# That will still require cloning the submodules the second you check the standard
|
|
|
|
# library, though...
|
|
|
|
# FIXME: Is there a way to avoid hard-coding the submodules required?
|
|
|
|
# WARNING: keep this in sync with the submodules hard-coded in bootstrap/lib.rs
|
|
|
|
submodules = [
|
|
|
|
"src/tools/rust-installer",
|
|
|
|
"src/tools/cargo",
|
|
|
|
"src/tools/rls",
|
|
|
|
"src/tools/miri",
|
|
|
|
"library/backtrace",
|
|
|
|
"library/stdarch"
|
|
|
|
]
|
rustc: Split Emscripten to a separate codegen backend
This commit introduces a separately compiled backend for Emscripten, avoiding
compiling the `JSBackend` target in the main LLVM codegen backend. This builds
on the foundation provided by #47671 to create a new codegen backend dedicated
solely to Emscripten, removing the `JSBackend` of the main codegen backend in
the process.
A new field was added to each target for this commit which specifies the backend
to use for translation, the default being `llvm` which is the main backend that
we use. The Emscripten targets specify an `emscripten` backend instead of the
main `llvm` one.
There's a whole bunch of consequences of this change, but I'll try to enumerate
them here:
* A *second* LLVM submodule was added in this commit. The main LLVM submodule
will soon start to drift from the Emscripten submodule, but currently they're
both at the same revision.
* Logic was added to rustbuild to *not* build the Emscripten backend by default.
This is gated behind a `--enable-emscripten` flag to the configure script. By
default users should neither check out the emscripten submodule nor compile
it.
* The `init_repo.sh` script was updated to fetch the Emscripten submodule from
GitHub the same way we do the main LLVM submodule (a tarball fetch).
* The Emscripten backend, turned off by default, is still turned on for a number
of targets on CI. We'll only be shipping an Emscripten backend with Tier 1
platforms, though. All cross-compiled platforms will not be receiving an
Emscripten backend yet.
This commit means that when you download the `rustc` package in Rustup for Tier
1 platforms you'll be receiving two trans backends, one for Emscripten and one
that's the general LLVM backend. If you never compile for Emscripten you'll
never use the Emscripten backend, so we may update this one day to only download
the Emscripten backend when you add the Emscripten target. For now though it's
just an extra 10MB gzip'd.
Closes #46819
2018-01-24 16:22:34 +00:00
|
|
|
filtered_submodules = []
|
2018-03-15 19:13:33 +00:00
|
|
|
submodules_names = []
|
rustc: Split Emscripten to a separate codegen backend
This commit introduces a separately compiled backend for Emscripten, avoiding
compiling the `JSBackend` target in the main LLVM codegen backend. This builds
on the foundation provided by #47671 to create a new codegen backend dedicated
solely to Emscripten, removing the `JSBackend` of the main codegen backend in
the process.
A new field was added to each target for this commit which specifies the backend
to use for translation, the default being `llvm` which is the main backend that
we use. The Emscripten targets specify an `emscripten` backend instead of the
main `llvm` one.
There's a whole bunch of consequences of this change, but I'll try to enumerate
them here:
* A *second* LLVM submodule was added in this commit. The main LLVM submodule
will soon start to drift from the Emscripten submodule, but currently they're
both at the same revision.
* Logic was added to rustbuild to *not* build the Emscripten backend by default.
This is gated behind a `--enable-emscripten` flag to the configure script. By
default users should neither check out the emscripten submodule nor compile
it.
* The `init_repo.sh` script was updated to fetch the Emscripten submodule from
GitHub the same way we do the main LLVM submodule (a tarball fetch).
* The Emscripten backend, turned off by default, is still turned on for a number
of targets on CI. We'll only be shipping an Emscripten backend with Tier 1
platforms, though. All cross-compiled platforms will not be receiving an
Emscripten backend yet.
This commit means that when you download the `rustc` package in Rustup for Tier
1 platforms you'll be receiving two trans backends, one for Emscripten and one
that's the general LLVM backend. If you never compile for Emscripten you'll
never use the Emscripten backend, so we may update this one day to only download
the Emscripten backend when you add the Emscripten target. For now though it's
just an extra 10MB gzip'd.
Closes #46819
2018-01-24 16:22:34 +00:00
|
|
|
for module in submodules:
|
2018-03-15 19:13:33 +00:00
|
|
|
check = self.check_submodule(module, slow_submodules)
|
|
|
|
filtered_submodules.append((module, check))
|
|
|
|
submodules_names.append(module)
|
|
|
|
recorded = subprocess.Popen(["git", "ls-tree", "HEAD"] + submodules_names,
|
|
|
|
cwd=self.rust_root, stdout=subprocess.PIPE)
|
|
|
|
recorded = recorded.communicate()[0].decode(default_encoding).strip().splitlines()
|
2021-06-05 02:17:01 +00:00
|
|
|
# { filename: hash }
|
2018-03-15 19:13:33 +00:00
|
|
|
recorded_submodules = {}
|
|
|
|
for data in recorded:
|
2021-06-05 02:17:01 +00:00
|
|
|
# [mode, kind, hash, filename]
|
2018-03-15 19:13:33 +00:00
|
|
|
data = data.split()
|
|
|
|
recorded_submodules[data[3]] = data[2]
|
|
|
|
for module in filtered_submodules:
|
|
|
|
self.update_submodule(module[0], module[1], recorded_submodules)
|
|
|
|
print("Submodules updated in %.2f seconds" % (time() - start_time))
|
2017-05-18 08:33:24 +00:00
|
|
|
|
2021-08-26 09:26:03 +00:00
|
|
|
def set_dist_environment(self, url):
|
2019-08-27 16:45:29 +00:00
|
|
|
"""Set download URL for normal environment"""
|
|
|
|
if 'RUSTUP_DIST_SERVER' in os.environ:
|
|
|
|
self._download_url = os.environ['RUSTUP_DIST_SERVER']
|
|
|
|
else:
|
2021-08-26 09:26:03 +00:00
|
|
|
self._download_url = url
|
2017-07-03 04:32:42 +00:00
|
|
|
|
2019-07-09 23:12:41 +00:00
|
|
|
def check_vendored_status(self):
|
|
|
|
"""Check that vendoring is configured properly"""
|
|
|
|
vendor_dir = os.path.join(self.rust_root, 'vendor')
|
|
|
|
if 'SUDO_USER' in os.environ and not self.use_vendored_sources:
|
|
|
|
if os.environ.get('USER') != os.environ['SUDO_USER']:
|
|
|
|
self.use_vendored_sources = True
|
|
|
|
print('info: looks like you are running this command under `sudo`')
|
|
|
|
print(' and so in order to preserve your $HOME this will now')
|
|
|
|
print(' use vendored sources by default.')
|
|
|
|
if not os.path.exists(vendor_dir):
|
|
|
|
print('error: vendoring required, but vendor directory does not exist.')
|
|
|
|
print(' Run `cargo vendor` without sudo to initialize the '
|
2020-02-08 03:04:44 +00:00
|
|
|
'vendor directory.')
|
2019-07-09 23:12:41 +00:00
|
|
|
raise Exception("{} not found".format(vendor_dir))
|
|
|
|
|
|
|
|
if self.use_vendored_sources:
|
|
|
|
if not os.path.exists('.cargo'):
|
|
|
|
os.makedirs('.cargo')
|
|
|
|
with output('.cargo/config') as cargo_config:
|
|
|
|
cargo_config.write(
|
|
|
|
"[source.crates-io]\n"
|
|
|
|
"replace-with = 'vendored-sources'\n"
|
|
|
|
"registry = 'https://example.com'\n"
|
|
|
|
"\n"
|
|
|
|
"[source.vendored-sources]\n"
|
|
|
|
"directory = '{}/vendor'\n"
|
2020-02-08 03:04:44 +00:00
|
|
|
.format(self.rust_root))
|
2019-07-09 23:12:41 +00:00
|
|
|
else:
|
|
|
|
if os.path.exists('.cargo'):
|
|
|
|
shutil.rmtree('.cargo')
|
|
|
|
|
|
|
|
def ensure_vendored(self):
|
|
|
|
"""Ensure that the vendored sources are available if needed"""
|
|
|
|
vendor_dir = os.path.join(self.rust_root, 'vendor')
|
|
|
|
# Note that this does not handle updating the vendored dependencies if
|
|
|
|
# the rust git repository is updated. Normal development usually does
|
|
|
|
# not use vendoring, so hopefully this isn't too much of a problem.
|
|
|
|
if self.use_vendored_sources and not os.path.exists(vendor_dir):
|
2020-10-15 12:23:43 +00:00
|
|
|
run([
|
|
|
|
self.cargo(),
|
|
|
|
"vendor",
|
|
|
|
"--sync=./src/tools/rust-analyzer/Cargo.toml",
|
|
|
|
"--sync=./compiler/rustc_codegen_cranelift/Cargo.toml",
|
|
|
|
], verbose=self.verbose, cwd=self.rust_root)
|
2019-07-09 23:12:41 +00:00
|
|
|
|
2017-05-17 16:15:44 +00:00
|
|
|
|
2018-02-27 01:45:52 +00:00
|
|
|
def bootstrap(help_triggered):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Configure, fetch, build and run the initial bootstrap"""
|
2018-03-02 08:29:35 +00:00
|
|
|
|
|
|
|
# If the user is asking for help, let them know that the whole download-and-build
|
|
|
|
# process has to happen before anything is printed out.
|
|
|
|
if help_triggered:
|
|
|
|
print("info: Downloading and building bootstrap before processing --help")
|
|
|
|
print(" command. See src/bootstrap/README.md for help with common")
|
|
|
|
print(" commands.")
|
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
parser = argparse.ArgumentParser(description='Build rust')
|
|
|
|
parser.add_argument('--config')
|
2017-07-21 11:11:18 +00:00
|
|
|
parser.add_argument('--build')
|
2016-05-31 20:24:28 +00:00
|
|
|
parser.add_argument('--clean', action='store_true')
|
2018-03-11 23:44:05 +00:00
|
|
|
parser.add_argument('-v', '--verbose', action='count', default=0)
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2016-06-29 17:43:57 +00:00
|
|
|
args = [a for a in sys.argv if a != '-h' and a != '--help']
|
2016-04-13 18:18:35 +00:00
|
|
|
args, _ = parser.parse_known_args(args)
|
|
|
|
|
|
|
|
# Configure initial bootstrap
|
2017-07-03 04:32:42 +00:00
|
|
|
build = RustBuild()
|
2020-09-07 01:32:55 +00:00
|
|
|
build.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
|
2017-07-03 04:32:42 +00:00
|
|
|
build.verbose = args.verbose
|
|
|
|
build.clean = args.clean
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2020-06-21 19:21:17 +00:00
|
|
|
# Read from `RUST_BOOTSTRAP_CONFIG`, then `--config`, then fallback to `config.toml` (if it
|
|
|
|
# exists).
|
|
|
|
toml_path = os.getenv('RUST_BOOTSTRAP_CONFIG') or args.config
|
|
|
|
if not toml_path and os.path.exists('config.toml'):
|
|
|
|
toml_path = 'config.toml'
|
|
|
|
|
|
|
|
if toml_path:
|
2020-05-07 20:51:03 +00:00
|
|
|
if not os.path.exists(toml_path):
|
|
|
|
toml_path = os.path.join(build.rust_root, toml_path)
|
|
|
|
|
|
|
|
with open(toml_path) as config:
|
2017-07-03 04:32:42 +00:00
|
|
|
build.config_toml = config.read()
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2020-10-13 15:58:56 +00:00
|
|
|
profile = build.get_toml('profile')
|
|
|
|
if profile is not None:
|
|
|
|
include_file = 'config.{}.toml'.format(profile)
|
|
|
|
include_dir = os.path.join(build.rust_root, 'src', 'bootstrap', 'defaults')
|
|
|
|
include_path = os.path.join(include_dir, include_file)
|
|
|
|
# HACK: This works because `build.get_toml()` returns the first match it finds for a
|
|
|
|
# specific key, so appending our defaults at the end allows the user to override them
|
|
|
|
with open(include_path) as included_toml:
|
|
|
|
build.config_toml += os.linesep + included_toml.read()
|
|
|
|
|
2019-09-08 08:26:06 +00:00
|
|
|
config_verbose = build.get_toml('verbose', 'build')
|
|
|
|
if config_verbose is not None:
|
|
|
|
build.verbose = max(build.verbose, int(config_verbose))
|
2017-05-24 07:09:17 +00:00
|
|
|
|
2019-09-08 08:26:06 +00:00
|
|
|
build.use_vendored_sources = build.get_toml('vendor', 'build') == 'true'
|
2016-11-01 20:46:38 +00:00
|
|
|
|
2019-09-08 08:26:06 +00:00
|
|
|
build.use_locked_deps = build.get_toml('locked-deps', 'build') == 'true'
|
2017-02-10 20:59:40 +00:00
|
|
|
|
2019-07-09 23:12:41 +00:00
|
|
|
build.check_vendored_status()
|
2016-11-16 20:31:19 +00:00
|
|
|
|
2020-05-07 20:51:03 +00:00
|
|
|
build_dir = build.get_toml('build-dir', 'build') or 'build'
|
|
|
|
build.build_dir = os.path.abspath(build_dir.replace("$ROOT", build.rust_root))
|
|
|
|
|
2021-08-26 09:26:03 +00:00
|
|
|
with open(os.path.join(build.rust_root, "src", "stage0.json")) as f:
|
|
|
|
data = json.load(f)
|
|
|
|
build.stage0_compiler = Stage0Toolchain(data["compiler"])
|
|
|
|
if data.get("rustfmt") is not None:
|
|
|
|
build.stage0_rustfmt = Stage0Toolchain(data["rustfmt"])
|
2017-07-03 04:32:42 +00:00
|
|
|
|
2021-08-26 09:26:03 +00:00
|
|
|
build.set_dist_environment(data["dist_server"])
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2021-01-29 17:23:36 +00:00
|
|
|
build.build = args.build or build.build_triple()
|
2018-04-06 07:23:53 +00:00
|
|
|
build.update_submodules()
|
2017-05-13 05:21:35 +00:00
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
# Fetch/build the bootstrap
|
2021-02-10 05:33:17 +00:00
|
|
|
build.download_toolchain()
|
|
|
|
# Download the master compiler if `download-rustc` is set
|
|
|
|
build.maybe_download_ci_toolchain()
|
2016-04-13 18:18:35 +00:00
|
|
|
sys.stdout.flush()
|
2019-07-09 23:12:41 +00:00
|
|
|
build.ensure_vendored()
|
2017-07-03 04:32:42 +00:00
|
|
|
build.build_bootstrap()
|
2016-04-13 18:18:35 +00:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
# Run the bootstrap
|
2017-07-03 04:32:42 +00:00
|
|
|
args = [build.bootstrap_binary()]
|
2016-04-13 18:18:35 +00:00
|
|
|
args.extend(sys.argv[1:])
|
|
|
|
env = os.environ.copy()
|
|
|
|
env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
|
2017-06-21 16:01:24 +00:00
|
|
|
env["BOOTSTRAP_PYTHON"] = sys.executable
|
2018-03-10 01:14:35 +00:00
|
|
|
env["BUILD_DIR"] = build.build_dir
|
2018-06-30 23:50:12 +00:00
|
|
|
env["RUSTC_BOOTSTRAP"] = '1'
|
2020-06-21 19:21:17 +00:00
|
|
|
if toml_path:
|
|
|
|
env["BOOTSTRAP_CONFIG"] = toml_path
|
2021-03-22 06:31:47 +00:00
|
|
|
if build.rustc_commit is not None:
|
|
|
|
env["BOOTSTRAP_DOWNLOAD_RUSTC"] = '1'
|
2021-07-04 01:33:16 +00:00
|
|
|
run(args, env=env, verbose=build.verbose, is_bootstrap=True)
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2017-05-19 11:16:29 +00:00
|
|
|
|
2017-03-03 02:27:07 +00:00
|
|
|
def main():
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Entry point for the bootstrap process"""
|
2017-03-03 02:27:07 +00:00
|
|
|
start_time = time()
|
2018-10-01 01:06:58 +00:00
|
|
|
|
|
|
|
# x.py help <cmd> ...
|
|
|
|
if len(sys.argv) > 1 and sys.argv[1] == 'help':
|
2019-02-23 18:57:25 +00:00
|
|
|
sys.argv = [sys.argv[0], '-h'] + sys.argv[2:]
|
2018-10-01 01:06:58 +00:00
|
|
|
|
2017-05-19 11:16:29 +00:00
|
|
|
help_triggered = (
|
|
|
|
'-h' in sys.argv) or ('--help' in sys.argv) or (len(sys.argv) == 1)
|
2017-03-03 02:27:07 +00:00
|
|
|
try:
|
2018-02-27 01:45:52 +00:00
|
|
|
bootstrap(help_triggered)
|
2017-04-01 21:48:03 +00:00
|
|
|
if not help_triggered:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("Build completed successfully in {}".format(
|
|
|
|
format_build_time(time() - start_time)))
|
|
|
|
except (SystemExit, KeyboardInterrupt) as error:
|
|
|
|
if hasattr(error, 'code') and isinstance(error.code, int):
|
|
|
|
exit_code = error.code
|
2017-03-03 02:27:07 +00:00
|
|
|
else:
|
|
|
|
exit_code = 1
|
2017-07-03 04:32:42 +00:00
|
|
|
print(error)
|
2017-04-01 21:48:03 +00:00
|
|
|
if not help_triggered:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("Build completed unsuccessfully in {}".format(
|
|
|
|
format_build_time(time() - start_time)))
|
2017-03-03 02:27:07 +00:00
|
|
|
sys.exit(exit_code)
|
2016-07-02 14:19:27 +00:00
|
|
|
|
2017-07-01 04:24:35 +00:00
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|