2015-11-19 23:20:12 +00:00
|
|
|
# Copyright 2015-2016 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
# file at the top-level directory of this distribution and at
|
|
|
|
# http://rust-lang.org/COPYRIGHT.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
# option. This file may not be copied, modified, or distributed
|
|
|
|
# except according to those terms.
|
|
|
|
|
2017-10-09 15:16:18 +00:00
|
|
|
from __future__ import absolute_import, division, print_function
|
2015-11-19 23:20:12 +00:00
|
|
|
import argparse
|
|
|
|
import contextlib
|
2016-07-02 14:19:27 +00:00
|
|
|
import datetime
|
2016-04-14 02:10:42 +00:00
|
|
|
import hashlib
|
2015-11-19 23:20:12 +00:00
|
|
|
import os
|
2017-05-13 05:21:35 +00:00
|
|
|
import re
|
2015-11-19 23:20:12 +00:00
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tarfile
|
2016-04-30 06:09:53 +00:00
|
|
|
import tempfile
|
|
|
|
|
2016-07-02 14:19:27 +00:00
|
|
|
from time import time
|
|
|
|
|
2015-11-19 23:20:12 +00:00
|
|
|
|
|
|
|
def get(url, path, verbose=False):
|
2017-07-01 04:24:35 +00:00
|
|
|
suffix = '.sha256'
|
|
|
|
sha_url = url + suffix
|
2016-05-08 07:54:50 +00:00
|
|
|
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
|
|
|
temp_path = temp_file.name
|
2017-07-01 04:24:35 +00:00
|
|
|
with tempfile.NamedTemporaryFile(suffix=suffix, delete=False) as sha_file:
|
2016-05-08 07:54:50 +00:00
|
|
|
sha_path = sha_file.name
|
|
|
|
|
|
|
|
try:
|
2016-11-16 20:31:19 +00:00
|
|
|
download(sha_path, sha_url, False, verbose)
|
2016-07-04 14:37:46 +00:00
|
|
|
if os.path.exists(path):
|
2016-07-05 22:44:31 +00:00
|
|
|
if verify(path, sha_path, False):
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("using already-download file", path)
|
2016-07-04 14:37:46 +00:00
|
|
|
return
|
2016-07-05 22:07:26 +00:00
|
|
|
else:
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("ignoring already-download file",
|
|
|
|
path, "due to failed verification")
|
2016-07-04 14:37:46 +00:00
|
|
|
os.unlink(path)
|
2016-11-16 20:31:19 +00:00
|
|
|
download(temp_path, url, True, verbose)
|
|
|
|
if not verify(temp_path, sha_path, verbose):
|
2016-07-05 22:07:26 +00:00
|
|
|
raise RuntimeError("failed verification")
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
|
|
|
print("moving {} to {}".format(temp_path, path))
|
2016-05-08 07:54:50 +00:00
|
|
|
shutil.move(temp_path, path)
|
|
|
|
finally:
|
2016-11-16 20:31:19 +00:00
|
|
|
delete_if_present(sha_path, verbose)
|
|
|
|
delete_if_present(temp_path, verbose)
|
2016-05-08 08:00:36 +00:00
|
|
|
|
|
|
|
|
2016-11-16 20:31:19 +00:00
|
|
|
def delete_if_present(path, verbose):
|
2017-07-01 04:24:35 +00:00
|
|
|
"""Remove the given file if present"""
|
2016-05-08 08:00:36 +00:00
|
|
|
if os.path.isfile(path):
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("removing", path)
|
2016-05-08 08:00:36 +00:00
|
|
|
os.unlink(path)
|
2016-04-30 06:09:53 +00:00
|
|
|
|
|
|
|
|
2016-11-16 20:31:19 +00:00
|
|
|
def download(path, url, probably_big, verbose):
|
2017-07-03 04:32:42 +00:00
|
|
|
for _ in range(0, 4):
|
2017-02-23 15:04:29 +00:00
|
|
|
try:
|
|
|
|
_download(path, url, probably_big, verbose, True)
|
|
|
|
return
|
|
|
|
except RuntimeError:
|
|
|
|
print("\nspurious failure, trying again")
|
|
|
|
_download(path, url, probably_big, verbose, False)
|
|
|
|
|
|
|
|
|
|
|
|
def _download(path, url, probably_big, verbose, exception):
|
2016-11-16 20:31:19 +00:00
|
|
|
if probably_big or verbose:
|
|
|
|
print("downloading {}".format(url))
|
2016-04-30 06:43:01 +00:00
|
|
|
# see http://serverfault.com/questions/301128/how-to-download
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
run(["PowerShell.exe", "/nologo", "-Command",
|
|
|
|
"(New-Object System.Net.WebClient)"
|
|
|
|
".DownloadFile('{}', '{}')".format(url, path)],
|
2017-02-23 15:04:29 +00:00
|
|
|
verbose=verbose,
|
|
|
|
exception=exception)
|
2016-04-30 06:43:01 +00:00
|
|
|
else:
|
2016-11-16 20:31:19 +00:00
|
|
|
if probably_big or verbose:
|
|
|
|
option = "-#"
|
|
|
|
else:
|
|
|
|
option = "-s"
|
2017-02-23 15:04:29 +00:00
|
|
|
run(["curl", option, "--retry", "3", "-Sf", "-o", path, url],
|
|
|
|
verbose=verbose,
|
|
|
|
exception=exception)
|
2016-04-30 06:09:53 +00:00
|
|
|
|
|
|
|
|
2016-05-08 07:54:50 +00:00
|
|
|
def verify(path, sha_path, verbose):
|
2017-07-01 04:24:35 +00:00
|
|
|
"""Check if the sha256 sum of the given path is valid"""
|
2016-11-16 20:31:19 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("verifying", path)
|
2017-07-01 04:24:35 +00:00
|
|
|
with open(path, "rb") as source:
|
|
|
|
found = hashlib.sha256(source.read()).hexdigest()
|
|
|
|
with open(sha_path, "r") as sha256sum:
|
|
|
|
expected = sha256sum.readline().split()[0]
|
2016-07-05 22:07:26 +00:00
|
|
|
verified = found == expected
|
2016-11-16 20:31:19 +00:00
|
|
|
if not verified:
|
2016-07-05 22:07:26 +00:00
|
|
|
print("invalid checksum:\n"
|
2017-05-19 11:16:29 +00:00
|
|
|
" found: {}\n"
|
|
|
|
" expected: {}".format(found, expected))
|
2016-07-05 22:07:26 +00:00
|
|
|
return verified
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2016-04-30 06:09:53 +00:00
|
|
|
|
2015-11-19 23:20:12 +00:00
|
|
|
def unpack(tarball, dst, verbose=False, match=None):
|
2017-07-01 04:24:35 +00:00
|
|
|
"""Unpack the given tarball file"""
|
2017-07-03 04:32:42 +00:00
|
|
|
print("extracting", tarball)
|
2015-11-19 23:20:12 +00:00
|
|
|
fname = os.path.basename(tarball).replace(".tar.gz", "")
|
|
|
|
with contextlib.closing(tarfile.open(tarball)) as tar:
|
2017-07-03 04:32:42 +00:00
|
|
|
for member in tar.getnames():
|
|
|
|
if "/" not in member:
|
2015-11-19 23:20:12 +00:00
|
|
|
continue
|
2017-07-03 04:32:42 +00:00
|
|
|
name = member.replace(fname + "/", "", 1)
|
2015-11-19 23:20:12 +00:00
|
|
|
if match is not None and not name.startswith(match):
|
|
|
|
continue
|
|
|
|
name = name[len(match) + 1:]
|
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
dst_path = os.path.join(dst, name)
|
2015-11-19 23:20:12 +00:00
|
|
|
if verbose:
|
2017-07-03 04:32:42 +00:00
|
|
|
print(" extracting", member)
|
|
|
|
tar.extract(member, dst)
|
|
|
|
src_path = os.path.join(dst, member)
|
|
|
|
if os.path.isdir(src_path) and os.path.exists(dst_path):
|
2015-11-19 23:20:12 +00:00
|
|
|
continue
|
2017-07-03 04:32:42 +00:00
|
|
|
shutil.move(src_path, dst_path)
|
2015-11-19 23:20:12 +00:00
|
|
|
shutil.rmtree(os.path.join(dst, fname))
|
|
|
|
|
2017-07-01 04:24:35 +00:00
|
|
|
|
2017-05-18 08:33:24 +00:00
|
|
|
def run(args, verbose=False, exception=False, **kwargs):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Run a child program in a new process"""
|
2015-11-19 23:20:12 +00:00
|
|
|
if verbose:
|
|
|
|
print("running: " + ' '.join(args))
|
|
|
|
sys.stdout.flush()
|
|
|
|
# Use Popen here instead of call() as it apparently allows powershell on
|
|
|
|
# Windows to not lock up waiting for input presumably.
|
2017-05-18 08:33:24 +00:00
|
|
|
ret = subprocess.Popen(args, **kwargs)
|
2015-11-19 23:20:12 +00:00
|
|
|
code = ret.wait()
|
|
|
|
if code != 0:
|
2016-04-14 02:10:25 +00:00
|
|
|
err = "failed to run: " + ' '.join(args)
|
2017-02-23 15:04:29 +00:00
|
|
|
if verbose or exception:
|
2016-04-14 02:10:25 +00:00
|
|
|
raise RuntimeError(err)
|
|
|
|
sys.exit(err)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-05-19 11:16:29 +00:00
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
def stage0_data(rust_root):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Build a dictionary from stage0.txt"""
|
2016-04-13 18:18:35 +00:00
|
|
|
nightlies = os.path.join(rust_root, "src/stage0.txt")
|
|
|
|
with open(nightlies, 'r') as nightlies:
|
2017-07-03 04:32:42 +00:00
|
|
|
lines = [line.rstrip() for line in nightlies
|
|
|
|
if not line.startswith("#")]
|
|
|
|
return dict([line.split(": ", 1) for line in lines if line])
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2017-05-19 11:16:29 +00:00
|
|
|
|
2016-07-02 14:19:27 +00:00
|
|
|
def format_build_time(duration):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return a nicer format for build time
|
|
|
|
|
|
|
|
>>> format_build_time('300')
|
|
|
|
'0:05:00'
|
|
|
|
"""
|
2016-07-02 14:19:27 +00:00
|
|
|
return str(datetime.timedelta(seconds=int(duration)))
|
|
|
|
|
2016-09-18 06:31:06 +00:00
|
|
|
|
2017-08-26 22:01:48 +00:00
|
|
|
def default_build_triple():
|
|
|
|
"""Build triple as in LLVM"""
|
|
|
|
default_encoding = sys.getdefaultencoding()
|
|
|
|
try:
|
|
|
|
ostype = subprocess.check_output(
|
|
|
|
['uname', '-s']).strip().decode(default_encoding)
|
|
|
|
cputype = subprocess.check_output(
|
|
|
|
['uname', '-m']).strip().decode(default_encoding)
|
|
|
|
except (subprocess.CalledProcessError, OSError):
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
return 'x86_64-pc-windows-msvc'
|
|
|
|
err = "uname not found"
|
|
|
|
sys.exit(err)
|
|
|
|
|
|
|
|
# The goal here is to come up with the same triple as LLVM would,
|
|
|
|
# at least for the subset of platforms we're willing to target.
|
|
|
|
ostype_mapper = {
|
|
|
|
'Bitrig': 'unknown-bitrig',
|
|
|
|
'Darwin': 'apple-darwin',
|
|
|
|
'DragonFly': 'unknown-dragonfly',
|
|
|
|
'FreeBSD': 'unknown-freebsd',
|
|
|
|
'Haiku': 'unknown-haiku',
|
|
|
|
'NetBSD': 'unknown-netbsd',
|
|
|
|
'OpenBSD': 'unknown-openbsd'
|
|
|
|
}
|
|
|
|
|
|
|
|
# Consider the direct transformation first and then the special cases
|
|
|
|
if ostype in ostype_mapper:
|
|
|
|
ostype = ostype_mapper[ostype]
|
|
|
|
elif ostype == 'Linux':
|
|
|
|
os_from_sp = subprocess.check_output(
|
|
|
|
['uname', '-o']).strip().decode(default_encoding)
|
|
|
|
if os_from_sp == 'Android':
|
|
|
|
ostype = 'linux-android'
|
|
|
|
else:
|
|
|
|
ostype = 'unknown-linux-gnu'
|
|
|
|
elif ostype == 'SunOS':
|
|
|
|
ostype = 'sun-solaris'
|
|
|
|
# On Solaris, uname -m will return a machine classification instead
|
|
|
|
# of a cpu type, so uname -p is recommended instead. However, the
|
|
|
|
# output from that option is too generic for our purposes (it will
|
|
|
|
# always emit 'i386' on x86/amd64 systems). As such, isainfo -k
|
|
|
|
# must be used instead.
|
|
|
|
try:
|
|
|
|
cputype = subprocess.check_output(
|
|
|
|
['isainfo', '-k']).strip().decode(default_encoding)
|
|
|
|
except (subprocess.CalledProcessError, OSError):
|
|
|
|
err = "isainfo not found"
|
|
|
|
sys.exit(err)
|
|
|
|
elif ostype.startswith('MINGW'):
|
|
|
|
# msys' `uname` does not print gcc configuration, but prints msys
|
|
|
|
# configuration. so we cannot believe `uname -m`:
|
|
|
|
# msys1 is always i686 and msys2 is always x86_64.
|
|
|
|
# instead, msys defines $MSYSTEM which is MINGW32 on i686 and
|
|
|
|
# MINGW64 on x86_64.
|
|
|
|
ostype = 'pc-windows-gnu'
|
|
|
|
cputype = 'i686'
|
|
|
|
if os.environ.get('MSYSTEM') == 'MINGW64':
|
|
|
|
cputype = 'x86_64'
|
|
|
|
elif ostype.startswith('MSYS'):
|
|
|
|
ostype = 'pc-windows-gnu'
|
|
|
|
elif ostype.startswith('CYGWIN_NT'):
|
|
|
|
cputype = 'i686'
|
|
|
|
if ostype.endswith('WOW64'):
|
|
|
|
cputype = 'x86_64'
|
|
|
|
ostype = 'pc-windows-gnu'
|
|
|
|
else:
|
|
|
|
err = "unknown OS type: {}".format(ostype)
|
|
|
|
sys.exit(err)
|
|
|
|
|
|
|
|
cputype_mapper = {
|
|
|
|
'BePC': 'i686',
|
|
|
|
'aarch64': 'aarch64',
|
|
|
|
'amd64': 'x86_64',
|
|
|
|
'arm64': 'aarch64',
|
|
|
|
'i386': 'i686',
|
|
|
|
'i486': 'i686',
|
|
|
|
'i686': 'i686',
|
|
|
|
'i786': 'i686',
|
|
|
|
'powerpc': 'powerpc',
|
|
|
|
'powerpc64': 'powerpc64',
|
|
|
|
'powerpc64le': 'powerpc64le',
|
|
|
|
'ppc': 'powerpc',
|
|
|
|
'ppc64': 'powerpc64',
|
|
|
|
'ppc64le': 'powerpc64le',
|
|
|
|
's390x': 's390x',
|
|
|
|
'x64': 'x86_64',
|
|
|
|
'x86': 'i686',
|
|
|
|
'x86-64': 'x86_64',
|
|
|
|
'x86_64': 'x86_64'
|
|
|
|
}
|
|
|
|
|
|
|
|
# Consider the direct transformation first and then the special cases
|
|
|
|
if cputype in cputype_mapper:
|
|
|
|
cputype = cputype_mapper[cputype]
|
|
|
|
elif cputype in {'xscale', 'arm'}:
|
|
|
|
cputype = 'arm'
|
|
|
|
if ostype == 'linux-android':
|
|
|
|
ostype = 'linux-androideabi'
|
|
|
|
elif cputype == 'armv6l':
|
|
|
|
cputype = 'arm'
|
|
|
|
if ostype == 'linux-android':
|
|
|
|
ostype = 'linux-androideabi'
|
|
|
|
else:
|
|
|
|
ostype += 'eabihf'
|
|
|
|
elif cputype in {'armv7l', 'armv8l'}:
|
|
|
|
cputype = 'armv7'
|
|
|
|
if ostype == 'linux-android':
|
|
|
|
ostype = 'linux-androideabi'
|
|
|
|
else:
|
|
|
|
ostype += 'eabihf'
|
|
|
|
elif cputype == 'mips':
|
|
|
|
if sys.byteorder == 'big':
|
|
|
|
cputype = 'mips'
|
|
|
|
elif sys.byteorder == 'little':
|
|
|
|
cputype = 'mipsel'
|
|
|
|
else:
|
|
|
|
raise ValueError("unknown byteorder: {}".format(sys.byteorder))
|
|
|
|
elif cputype == 'mips64':
|
|
|
|
if sys.byteorder == 'big':
|
|
|
|
cputype = 'mips64'
|
|
|
|
elif sys.byteorder == 'little':
|
|
|
|
cputype = 'mips64el'
|
|
|
|
else:
|
|
|
|
raise ValueError('unknown byteorder: {}'.format(sys.byteorder))
|
|
|
|
# only the n64 ABI is supported, indicate it
|
|
|
|
ostype += 'abi64'
|
2018-02-17 14:29:11 +00:00
|
|
|
elif cputype == 'sparc' or cputype == 'sparcv9' or cputype == 'sparc64':
|
2017-08-26 22:01:48 +00:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
err = "unknown cpu type: {}".format(cputype)
|
|
|
|
sys.exit(err)
|
|
|
|
|
|
|
|
return "{}-{}".format(cputype, ostype)
|
|
|
|
|
2017-10-09 00:08:11 +00:00
|
|
|
|
2016-09-18 06:31:06 +00:00
|
|
|
class RustBuild(object):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Provide all the methods required to build Rust"""
|
|
|
|
def __init__(self):
|
|
|
|
self.cargo_channel = ''
|
|
|
|
self.date = ''
|
|
|
|
self._download_url = 'https://static.rust-lang.org'
|
|
|
|
self.rustc_channel = ''
|
|
|
|
self.build = ''
|
|
|
|
self.build_dir = os.path.join(os.getcwd(), "build")
|
|
|
|
self.clean = False
|
|
|
|
self.config_toml = ''
|
|
|
|
self.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
|
|
|
|
self.use_locked_deps = ''
|
|
|
|
self.use_vendored_sources = ''
|
|
|
|
self.verbose = False
|
2017-05-19 11:16:29 +00:00
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
def download_stage0(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Fetch the build system for Rust, written in Rust
|
|
|
|
|
|
|
|
This method will build a cache directory, then it will fetch the
|
|
|
|
tarball which has the stage0 compiler used to then bootstrap the Rust
|
|
|
|
compiler itself.
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
Each downloaded tarball is extracted, after that, the script
|
|
|
|
will move all the content to the right place.
|
|
|
|
"""
|
|
|
|
rustc_channel = self.rustc_channel
|
|
|
|
cargo_channel = self.cargo_channel
|
2017-03-24 17:58:18 +00:00
|
|
|
|
2015-11-19 23:20:12 +00:00
|
|
|
if self.rustc().startswith(self.bin_root()) and \
|
2017-07-03 04:32:42 +00:00
|
|
|
(not os.path.exists(self.rustc()) or
|
|
|
|
self.program_out_of_date(self.rustc_stamp())):
|
2016-03-01 20:02:11 +00:00
|
|
|
if os.path.exists(self.bin_root()):
|
|
|
|
shutil.rmtree(self.bin_root())
|
2017-05-19 11:16:29 +00:00
|
|
|
filename = "rust-std-{}-{}.tar.gz".format(
|
|
|
|
rustc_channel, self.build)
|
2017-07-03 04:32:42 +00:00
|
|
|
pattern = "rust-std-{}".format(self.build)
|
|
|
|
self._download_stage0_helper(filename, pattern)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-04-20 21:32:54 +00:00
|
|
|
filename = "rustc-{}-{}.tar.gz".format(rustc_channel, self.build)
|
2017-07-03 04:32:42 +00:00
|
|
|
self._download_stage0_helper(filename, "rustc")
|
|
|
|
self.fix_executable("{}/bin/rustc".format(self.bin_root()))
|
|
|
|
self.fix_executable("{}/bin/rustdoc".format(self.bin_root()))
|
|
|
|
with open(self.rustc_stamp(), 'w') as rust_stamp:
|
|
|
|
rust_stamp.write(self.date)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2018-03-01 16:33:14 +00:00
|
|
|
# This is required so that we don't mix incompatible MinGW
|
|
|
|
# libraries/binaries that are included in rust-std with
|
|
|
|
# the system MinGW ones.
|
2018-03-01 12:47:20 +00:00
|
|
|
if "pc-windows-gnu" in self.build:
|
|
|
|
filename = "rust-mingw-{}-{}.tar.gz".format(
|
|
|
|
rustc_channel, self.build)
|
|
|
|
self._download_stage0_helper(filename, "rust-mingw")
|
|
|
|
|
2015-11-19 23:20:12 +00:00
|
|
|
if self.cargo().startswith(self.bin_root()) and \
|
2017-07-03 04:32:42 +00:00
|
|
|
(not os.path.exists(self.cargo()) or
|
|
|
|
self.program_out_of_date(self.cargo_stamp())):
|
2017-04-20 21:32:54 +00:00
|
|
|
filename = "cargo-{}-{}.tar.gz".format(cargo_channel, self.build)
|
2017-07-03 04:32:42 +00:00
|
|
|
self._download_stage0_helper(filename, "cargo")
|
|
|
|
self.fix_executable("{}/bin/cargo".format(self.bin_root()))
|
|
|
|
with open(self.cargo_stamp(), 'w') as cargo_stamp:
|
|
|
|
cargo_stamp.write(self.date)
|
|
|
|
|
|
|
|
def _download_stage0_helper(self, filename, pattern):
|
|
|
|
cache_dst = os.path.join(self.build_dir, "cache")
|
|
|
|
rustc_cache = os.path.join(cache_dst, self.date)
|
|
|
|
if not os.path.exists(rustc_cache):
|
|
|
|
os.makedirs(rustc_cache)
|
|
|
|
|
|
|
|
url = "{}/dist/{}".format(self._download_url, self.date)
|
|
|
|
tarball = os.path.join(rustc_cache, filename)
|
|
|
|
if not os.path.exists(tarball):
|
|
|
|
get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
|
|
|
|
unpack(tarball, self.bin_root(), match=pattern, verbose=self.verbose)
|
2017-02-06 08:30:01 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
@staticmethod
|
|
|
|
def fix_executable(fname):
|
|
|
|
"""Modifies the interpreter section of 'fname' to fix the dynamic linker
|
|
|
|
|
|
|
|
This method is only required on NixOS and uses the PatchELF utility to
|
|
|
|
change the dynamic linker of ELF executables.
|
|
|
|
|
|
|
|
Please see https://nixos.org/patchelf.html for more information
|
|
|
|
"""
|
2017-02-06 08:30:01 +00:00
|
|
|
default_encoding = sys.getdefaultencoding()
|
|
|
|
try:
|
2017-05-19 11:16:29 +00:00
|
|
|
ostype = subprocess.check_output(
|
|
|
|
['uname', '-s']).strip().decode(default_encoding)
|
2017-07-03 04:32:42 +00:00
|
|
|
except subprocess.CalledProcessError:
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
2017-07-03 04:32:42 +00:00
|
|
|
except OSError as reason:
|
|
|
|
if getattr(reason, 'winerror', None) is not None:
|
|
|
|
return
|
|
|
|
raise reason
|
2017-02-06 08:30:01 +00:00
|
|
|
|
|
|
|
if ostype != "Linux":
|
|
|
|
return
|
|
|
|
|
2017-02-11 08:17:54 +00:00
|
|
|
if not os.path.exists("/etc/NIXOS"):
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
|
|
|
if os.path.exists("/lib"):
|
|
|
|
return
|
|
|
|
|
|
|
|
# At this point we're pretty sure the user is running NixOS
|
2017-07-01 04:24:35 +00:00
|
|
|
nix_os_msg = "info: you seem to be running NixOS. Attempting to patch"
|
2017-07-01 12:16:57 +00:00
|
|
|
print(nix_os_msg, fname)
|
2017-02-06 08:30:01 +00:00
|
|
|
|
|
|
|
try:
|
2017-05-19 11:16:29 +00:00
|
|
|
interpreter = subprocess.check_output(
|
|
|
|
["patchelf", "--print-interpreter", fname])
|
2017-02-06 08:30:01 +00:00
|
|
|
interpreter = interpreter.strip().decode(default_encoding)
|
2017-07-03 04:32:42 +00:00
|
|
|
except subprocess.CalledProcessError as reason:
|
|
|
|
print("warning: failed to call patchelf:", reason)
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
loader = interpreter.split("/")[-1]
|
|
|
|
|
|
|
|
try:
|
2017-05-19 11:16:29 +00:00
|
|
|
ldd_output = subprocess.check_output(
|
|
|
|
['ldd', '/run/current-system/sw/bin/sh'])
|
2017-02-06 08:30:01 +00:00
|
|
|
ldd_output = ldd_output.strip().decode(default_encoding)
|
2017-07-03 04:32:42 +00:00
|
|
|
except subprocess.CalledProcessError as reason:
|
|
|
|
print("warning: unable to call ldd:", reason)
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
for line in ldd_output.splitlines():
|
|
|
|
libname = line.split()[0]
|
|
|
|
if libname.endswith(loader):
|
|
|
|
loader_path = libname[:len(libname) - len(loader)]
|
|
|
|
break
|
|
|
|
else:
|
2017-02-17 06:00:58 +00:00
|
|
|
print("warning: unable to find the path to the dynamic linker")
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
correct_interpreter = loader_path + loader
|
|
|
|
|
|
|
|
try:
|
2017-05-19 11:16:29 +00:00
|
|
|
subprocess.check_output(
|
|
|
|
["patchelf", "--set-interpreter", correct_interpreter, fname])
|
2017-07-03 04:32:42 +00:00
|
|
|
except subprocess.CalledProcessError as reason:
|
|
|
|
print("warning: failed to call patchelf:", reason)
|
2017-02-06 08:30:01 +00:00
|
|
|
return
|
|
|
|
|
2015-11-19 23:20:12 +00:00
|
|
|
def rustc_stamp(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return the path for .rustc-stamp
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.rustc_stamp() == os.path.join("build", "stage0", ".rustc-stamp")
|
|
|
|
True
|
|
|
|
"""
|
2015-11-19 23:20:12 +00:00
|
|
|
return os.path.join(self.bin_root(), '.rustc-stamp')
|
|
|
|
|
|
|
|
def cargo_stamp(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return the path for .cargo-stamp
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.cargo_stamp() == os.path.join("build", "stage0", ".cargo-stamp")
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
return os.path.join(self.bin_root(), '.cargo-stamp')
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
def program_out_of_date(self, stamp_path):
|
|
|
|
"""Check if the given program stamp is out of date"""
|
|
|
|
if not os.path.exists(stamp_path) or self.clean:
|
2015-11-19 23:20:12 +00:00
|
|
|
return True
|
2017-07-03 04:32:42 +00:00
|
|
|
with open(stamp_path, 'r') as stamp:
|
|
|
|
return self.date != stamp.read()
|
2015-11-19 23:20:12 +00:00
|
|
|
|
|
|
|
def bin_root(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return the binary root directory
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.bin_root() == os.path.join("build", "stage0")
|
|
|
|
True
|
|
|
|
|
|
|
|
When the 'build' property is given should be a nested directory:
|
|
|
|
|
|
|
|
>>> rb.build = "devel"
|
|
|
|
>>> rb.bin_root() == os.path.join("build", "devel", "stage0")
|
|
|
|
True
|
|
|
|
"""
|
2015-11-19 23:20:12 +00:00
|
|
|
return os.path.join(self.build_dir, self.build, "stage0")
|
|
|
|
|
|
|
|
def get_toml(self, key):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Returns the value of the given key in config.toml, otherwise returns None
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.config_toml = 'key1 = "value1"\\nkey2 = "value2"'
|
|
|
|
>>> rb.get_toml("key2")
|
|
|
|
'value2'
|
|
|
|
|
|
|
|
If the key does not exists, the result is None:
|
|
|
|
|
2017-10-08 23:46:58 +00:00
|
|
|
>>> rb.get_toml("key3") is None
|
2017-07-03 04:32:42 +00:00
|
|
|
True
|
|
|
|
"""
|
2015-11-19 23:20:12 +00:00
|
|
|
for line in self.config_toml.splitlines():
|
2017-05-13 05:21:35 +00:00
|
|
|
match = re.match(r'^{}\s*=(.*)$'.format(key), line)
|
|
|
|
if match is not None:
|
|
|
|
value = match.group(1)
|
|
|
|
return self.get_string(value) or value.strip()
|
2015-11-19 23:20:12 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def cargo(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return config path for cargo"""
|
|
|
|
return self.program_config('cargo')
|
2015-11-19 23:20:12 +00:00
|
|
|
|
|
|
|
def rustc(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return config path for rustc"""
|
|
|
|
return self.program_config('rustc')
|
|
|
|
|
|
|
|
def program_config(self, program):
|
|
|
|
"""Return config path for the given program
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.config_toml = 'rustc = "rustc"\\n'
|
|
|
|
>>> rb.program_config('rustc')
|
|
|
|
'rustc'
|
|
|
|
>>> rb.config_toml = ''
|
|
|
|
>>> cargo_path = rb.program_config('cargo')
|
|
|
|
>>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(),
|
|
|
|
... "bin", "cargo")
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
config = self.get_toml(program)
|
2015-11-19 23:20:12 +00:00
|
|
|
if config:
|
2017-10-18 21:22:32 +00:00
|
|
|
return os.path.expanduser(config)
|
2017-07-03 04:32:42 +00:00
|
|
|
return os.path.join(self.bin_root(), "bin", "{}{}".format(
|
|
|
|
program, self.exe_suffix()))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_string(line):
|
|
|
|
"""Return the value between double quotes
|
|
|
|
|
|
|
|
>>> RustBuild.get_string(' "devel" ')
|
|
|
|
'devel'
|
|
|
|
"""
|
2015-11-19 23:20:12 +00:00
|
|
|
start = line.find('"')
|
2017-08-26 22:01:48 +00:00
|
|
|
if start != -1:
|
|
|
|
end = start + 1 + line[start + 1:].find('"')
|
|
|
|
return line[start + 1:end]
|
|
|
|
start = line.find('\'')
|
|
|
|
if start != -1:
|
|
|
|
end = start + 1 + line[start + 1:].find('\'')
|
|
|
|
return line[start + 1:end]
|
|
|
|
return None
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
@staticmethod
|
|
|
|
def exe_suffix():
|
|
|
|
"""Return a suffix for executables"""
|
2015-11-19 23:20:12 +00:00
|
|
|
if sys.platform == 'win32':
|
|
|
|
return '.exe'
|
2017-07-01 04:24:35 +00:00
|
|
|
return ''
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2016-11-16 20:31:19 +00:00
|
|
|
def bootstrap_binary(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Return the path of the boostrap binary
|
|
|
|
|
|
|
|
>>> rb = RustBuild()
|
|
|
|
>>> rb.build_dir = "build"
|
|
|
|
>>> rb.bootstrap_binary() == os.path.join("build", "bootstrap",
|
|
|
|
... "debug", "bootstrap")
|
|
|
|
True
|
|
|
|
"""
|
|
|
|
return os.path.join(self.build_dir, "bootstrap", "debug", "bootstrap")
|
2016-11-16 20:31:19 +00:00
|
|
|
|
2015-11-19 23:20:12 +00:00
|
|
|
def build_bootstrap(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Build bootstrap"""
|
2016-05-31 20:24:28 +00:00
|
|
|
build_dir = os.path.join(self.build_dir, "bootstrap")
|
|
|
|
if self.clean and os.path.exists(build_dir):
|
|
|
|
shutil.rmtree(build_dir)
|
2015-11-19 23:20:12 +00:00
|
|
|
env = os.environ.copy()
|
2017-06-07 02:32:43 +00:00
|
|
|
env["RUSTC_BOOTSTRAP"] = '1'
|
2016-05-31 20:24:28 +00:00
|
|
|
env["CARGO_TARGET_DIR"] = build_dir
|
2015-11-19 23:20:12 +00:00
|
|
|
env["RUSTC"] = self.rustc()
|
2017-02-19 01:41:56 +00:00
|
|
|
env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
(os.pathsep + env["LD_LIBRARY_PATH"]) \
|
|
|
|
if "LD_LIBRARY_PATH" in env else ""
|
2017-02-19 01:41:56 +00:00
|
|
|
env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
(os.pathsep + env["DYLD_LIBRARY_PATH"]) \
|
|
|
|
if "DYLD_LIBRARY_PATH" in env else ""
|
2017-04-24 14:21:36 +00:00
|
|
|
env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
(os.pathsep + env["LIBRARY_PATH"]) \
|
|
|
|
if "LIBRARY_PATH" in env else ""
|
2018-01-15 17:44:00 +00:00
|
|
|
env["RUSTFLAGS"] = "-Cdebuginfo=2"
|
2015-11-19 23:20:12 +00:00
|
|
|
env["PATH"] = os.path.join(self.bin_root(), "bin") + \
|
2017-05-19 11:16:29 +00:00
|
|
|
os.pathsep + env["PATH"]
|
2016-11-16 23:02:56 +00:00
|
|
|
if not os.path.isfile(self.cargo()):
|
2017-07-03 04:32:42 +00:00
|
|
|
raise Exception("no cargo executable found at `{}`".format(
|
|
|
|
self.cargo()))
|
2016-11-01 20:46:38 +00:00
|
|
|
args = [self.cargo(), "build", "--manifest-path",
|
|
|
|
os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
|
2017-05-24 07:10:15 +00:00
|
|
|
if self.verbose:
|
|
|
|
args.append("--verbose")
|
|
|
|
if self.verbose > 1:
|
|
|
|
args.append("--verbose")
|
2017-02-10 20:59:40 +00:00
|
|
|
if self.use_locked_deps:
|
|
|
|
args.append("--locked")
|
2016-11-01 20:46:38 +00:00
|
|
|
if self.use_vendored_sources:
|
|
|
|
args.append("--frozen")
|
2017-05-24 07:11:10 +00:00
|
|
|
run(args, env=env, verbose=self.verbose)
|
2015-11-19 23:20:12 +00:00
|
|
|
|
|
|
|
def build_triple(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Build triple as in LLVM"""
|
2015-11-19 23:20:12 +00:00
|
|
|
config = self.get_toml('build')
|
2015-11-20 00:55:21 +00:00
|
|
|
if config:
|
|
|
|
return config
|
2017-08-26 22:01:48 +00:00
|
|
|
return default_build_triple()
|
2015-11-19 23:20:12 +00:00
|
|
|
|
2017-05-13 05:21:35 +00:00
|
|
|
def update_submodules(self):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Update submodules"""
|
2017-05-13 05:21:35 +00:00
|
|
|
if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \
|
2017-08-26 22:01:48 +00:00
|
|
|
self.get_toml('submodules') == "false":
|
2017-05-13 05:21:35 +00:00
|
|
|
return
|
|
|
|
print('Updating submodules')
|
2017-05-22 07:04:34 +00:00
|
|
|
default_encoding = sys.getdefaultencoding()
|
2017-09-15 17:12:14 +00:00
|
|
|
run(["git", "submodule", "-q", "sync"], cwd=self.rust_root, verbose=self.verbose)
|
2017-05-22 07:04:34 +00:00
|
|
|
submodules = [s.split(' ', 1)[1] for s in subprocess.check_output(
|
2017-07-01 04:24:35 +00:00
|
|
|
["git", "config", "--file",
|
|
|
|
os.path.join(self.rust_root, ".gitmodules"),
|
2017-05-22 07:04:34 +00:00
|
|
|
"--get-regexp", "path"]
|
|
|
|
).decode(default_encoding).splitlines()]
|
rustc: Split Emscripten to a separate codegen backend
This commit introduces a separately compiled backend for Emscripten, avoiding
compiling the `JSBackend` target in the main LLVM codegen backend. This builds
on the foundation provided by #47671 to create a new codegen backend dedicated
solely to Emscripten, removing the `JSBackend` of the main codegen backend in
the process.
A new field was added to each target for this commit which specifies the backend
to use for translation, the default being `llvm` which is the main backend that
we use. The Emscripten targets specify an `emscripten` backend instead of the
main `llvm` one.
There's a whole bunch of consequences of this change, but I'll try to enumerate
them here:
* A *second* LLVM submodule was added in this commit. The main LLVM submodule
will soon start to drift from the Emscripten submodule, but currently they're
both at the same revision.
* Logic was added to rustbuild to *not* build the Emscripten backend by default.
This is gated behind a `--enable-emscripten` flag to the configure script. By
default users should neither check out the emscripten submodule nor compile
it.
* The `init_repo.sh` script was updated to fetch the Emscripten submodule from
GitHub the same way we do the main LLVM submodule (a tarball fetch).
* The Emscripten backend, turned off by default, is still turned on for a number
of targets on CI. We'll only be shipping an Emscripten backend with Tier 1
platforms, though. All cross-compiled platforms will not be receiving an
Emscripten backend yet.
This commit means that when you download the `rustc` package in Rustup for Tier
1 platforms you'll be receiving two trans backends, one for Emscripten and one
that's the general LLVM backend. If you never compile for Emscripten you'll
never use the Emscripten backend, so we may update this one day to only download
the Emscripten backend when you add the Emscripten target. For now though it's
just an extra 10MB gzip'd.
Closes #46819
2018-01-24 16:22:34 +00:00
|
|
|
filtered_submodules = []
|
|
|
|
for module in submodules:
|
|
|
|
if module.endswith("llvm"):
|
|
|
|
if self.get_toml('llvm-config'):
|
|
|
|
continue
|
|
|
|
if module.endswith("llvm-emscripten"):
|
|
|
|
backends = self.get_toml('codegen-backends')
|
|
|
|
if backends is None or not 'emscripten' in backends:
|
|
|
|
continue
|
|
|
|
if module.endswith("jemalloc"):
|
|
|
|
if self.get_toml('use-jemalloc') == 'false':
|
|
|
|
continue
|
|
|
|
if self.get_toml('jemalloc'):
|
|
|
|
continue
|
rust: Import LLD for linking wasm objects
This commit imports the LLD project from LLVM to serve as the default linker for
the `wasm32-unknown-unknown` target. The `binaryen` submoule is consequently
removed along with "binaryen linker" support in rustc.
Moving to LLD brings with it a number of benefits for wasm code:
* LLD is itself an actual linker, so there's no need to compile all wasm code
with LTO any more. As a result builds should be *much* speedier as LTO is no
longer forcibly enabled for all builds of the wasm target.
* LLD is quickly becoming an "official solution" for linking wasm code together.
This, I believe at least, is intended to be the main supported linker for
native code and wasm moving forward. Picking up support early on should help
ensure that we can help LLD identify bugs and otherwise prove that it works
great for all our use cases!
* Improvements to the wasm toolchain are currently primarily focused around LLVM
and LLD (from what I can tell at least), so it's in general much better to be
on this bandwagon for bugfixes and new features.
* Historical "hacks" like `wasm-gc` will soon no longer be necessary, LLD
will [natively implement][gc] `--gc-sections` (better than `wasm-gc`!) which
means a postprocessor is no longer needed to show off Rust's "small wasm
binary size".
LLD is added in a pretty standard way to rustc right now. A new rustbuild target
was defined for building LLD, and this is executed when a compiler's sysroot is
being assembled. LLD is compiled against the LLVM that we've got in tree, which
means we're currently on the `release_60` branch, but this may get upgraded in
the near future!
LLD is placed into rustc's sysroot in a `bin` directory. This is similar to
where `gcc.exe` can be found on Windows. This directory is automatically added
to `PATH` whenever rustc executes the linker, allowing us to define a `WasmLd`
linker which implements the interface that `wasm-ld`, LLD's frontend, expects.
Like Emscripten the LLD target is currently only enabled for Tier 1 platforms,
notably OSX/Windows/Linux, and will need to be installed manually for compiling
to wasm on other platforms. LLD is by default turned off in rustbuild, and
requires a `config.toml` option to be enabled to turn it on.
Finally the unstable `#![wasm_import_memory]` attribute was also removed as LLD
has a native option for controlling this.
[gc]: https://reviews.llvm.org/D42511
2017-08-27 01:30:12 +00:00
|
|
|
if module.endswith("lld"):
|
|
|
|
config = self.get_toml('lld')
|
|
|
|
if config is None or config == 'false':
|
|
|
|
continue
|
rustc: Split Emscripten to a separate codegen backend
This commit introduces a separately compiled backend for Emscripten, avoiding
compiling the `JSBackend` target in the main LLVM codegen backend. This builds
on the foundation provided by #47671 to create a new codegen backend dedicated
solely to Emscripten, removing the `JSBackend` of the main codegen backend in
the process.
A new field was added to each target for this commit which specifies the backend
to use for translation, the default being `llvm` which is the main backend that
we use. The Emscripten targets specify an `emscripten` backend instead of the
main `llvm` one.
There's a whole bunch of consequences of this change, but I'll try to enumerate
them here:
* A *second* LLVM submodule was added in this commit. The main LLVM submodule
will soon start to drift from the Emscripten submodule, but currently they're
both at the same revision.
* Logic was added to rustbuild to *not* build the Emscripten backend by default.
This is gated behind a `--enable-emscripten` flag to the configure script. By
default users should neither check out the emscripten submodule nor compile
it.
* The `init_repo.sh` script was updated to fetch the Emscripten submodule from
GitHub the same way we do the main LLVM submodule (a tarball fetch).
* The Emscripten backend, turned off by default, is still turned on for a number
of targets on CI. We'll only be shipping an Emscripten backend with Tier 1
platforms, though. All cross-compiled platforms will not be receiving an
Emscripten backend yet.
This commit means that when you download the `rustc` package in Rustup for Tier
1 platforms you'll be receiving two trans backends, one for Emscripten and one
that's the general LLVM backend. If you never compile for Emscripten you'll
never use the Emscripten backend, so we may update this one day to only download
the Emscripten backend when you add the Emscripten target. For now though it's
just an extra 10MB gzip'd.
Closes #46819
2018-01-24 16:22:34 +00:00
|
|
|
filtered_submodules.append(module)
|
2017-05-19 11:36:49 +00:00
|
|
|
run(["git", "submodule", "update",
|
rustc: Split Emscripten to a separate codegen backend
This commit introduces a separately compiled backend for Emscripten, avoiding
compiling the `JSBackend` target in the main LLVM codegen backend. This builds
on the foundation provided by #47671 to create a new codegen backend dedicated
solely to Emscripten, removing the `JSBackend` of the main codegen backend in
the process.
A new field was added to each target for this commit which specifies the backend
to use for translation, the default being `llvm` which is the main backend that
we use. The Emscripten targets specify an `emscripten` backend instead of the
main `llvm` one.
There's a whole bunch of consequences of this change, but I'll try to enumerate
them here:
* A *second* LLVM submodule was added in this commit. The main LLVM submodule
will soon start to drift from the Emscripten submodule, but currently they're
both at the same revision.
* Logic was added to rustbuild to *not* build the Emscripten backend by default.
This is gated behind a `--enable-emscripten` flag to the configure script. By
default users should neither check out the emscripten submodule nor compile
it.
* The `init_repo.sh` script was updated to fetch the Emscripten submodule from
GitHub the same way we do the main LLVM submodule (a tarball fetch).
* The Emscripten backend, turned off by default, is still turned on for a number
of targets on CI. We'll only be shipping an Emscripten backend with Tier 1
platforms, though. All cross-compiled platforms will not be receiving an
Emscripten backend yet.
This commit means that when you download the `rustc` package in Rustup for Tier
1 platforms you'll be receiving two trans backends, one for Emscripten and one
that's the general LLVM backend. If you never compile for Emscripten you'll
never use the Emscripten backend, so we may update this one day to only download
the Emscripten backend when you add the Emscripten target. For now though it's
just an extra 10MB gzip'd.
Closes #46819
2018-01-24 16:22:34 +00:00
|
|
|
"--init", "--recursive"] + filtered_submodules,
|
2017-07-01 04:24:35 +00:00
|
|
|
cwd=self.rust_root, verbose=self.verbose)
|
2017-05-18 08:33:24 +00:00
|
|
|
run(["git", "submodule", "-q", "foreach", "git",
|
2017-07-01 04:24:35 +00:00
|
|
|
"reset", "-q", "--hard"],
|
|
|
|
cwd=self.rust_root, verbose=self.verbose)
|
2017-05-18 08:33:24 +00:00
|
|
|
run(["git", "submodule", "-q", "foreach", "git",
|
2017-07-01 04:24:35 +00:00
|
|
|
"clean", "-qdfx"],
|
|
|
|
cwd=self.rust_root, verbose=self.verbose)
|
2017-05-18 08:33:24 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
def set_dev_environment(self):
|
|
|
|
"""Set download URL for development environment"""
|
|
|
|
self._download_url = 'https://dev-static.rust-lang.org'
|
|
|
|
|
2017-05-17 16:15:44 +00:00
|
|
|
|
2018-02-27 01:45:52 +00:00
|
|
|
def bootstrap(help_triggered):
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Configure, fetch, build and run the initial bootstrap"""
|
2018-03-02 08:29:35 +00:00
|
|
|
|
|
|
|
# If the user is asking for help, let them know that the whole download-and-build
|
|
|
|
# process has to happen before anything is printed out.
|
|
|
|
if help_triggered:
|
|
|
|
print("info: Downloading and building bootstrap before processing --help")
|
|
|
|
print(" command. See src/bootstrap/README.md for help with common")
|
|
|
|
print(" commands.")
|
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
parser = argparse.ArgumentParser(description='Build rust')
|
|
|
|
parser.add_argument('--config')
|
2017-07-21 11:11:18 +00:00
|
|
|
parser.add_argument('--build')
|
2016-05-31 20:24:28 +00:00
|
|
|
parser.add_argument('--clean', action='store_true')
|
2016-04-13 18:18:35 +00:00
|
|
|
parser.add_argument('-v', '--verbose', action='store_true')
|
|
|
|
|
2016-06-29 17:43:57 +00:00
|
|
|
args = [a for a in sys.argv if a != '-h' and a != '--help']
|
2016-04-13 18:18:35 +00:00
|
|
|
args, _ = parser.parse_known_args(args)
|
|
|
|
|
|
|
|
# Configure initial bootstrap
|
2017-07-03 04:32:42 +00:00
|
|
|
build = RustBuild()
|
|
|
|
build.verbose = args.verbose
|
|
|
|
build.clean = args.clean
|
2016-04-13 18:18:35 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
with open(args.config or 'config.toml') as config:
|
2017-07-03 04:32:42 +00:00
|
|
|
build.config_toml = config.read()
|
2017-09-21 19:15:10 +00:00
|
|
|
except (OSError, IOError):
|
2016-04-13 18:18:35 +00:00
|
|
|
pass
|
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
if '\nverbose = 2' in build.config_toml:
|
|
|
|
build.verbose = 2
|
|
|
|
elif '\nverbose = 1' in build.config_toml:
|
|
|
|
build.verbose = 1
|
2017-05-24 07:09:17 +00:00
|
|
|
|
2017-08-26 22:01:48 +00:00
|
|
|
build.use_vendored_sources = '\nvendor = true' in build.config_toml
|
2016-11-01 20:46:38 +00:00
|
|
|
|
2017-08-26 22:01:48 +00:00
|
|
|
build.use_locked_deps = '\nlocked-deps = true' in build.config_toml
|
2017-02-10 20:59:40 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
if 'SUDO_USER' in os.environ and not build.use_vendored_sources:
|
2017-02-02 21:28:00 +00:00
|
|
|
if os.environ.get('USER') != os.environ['SUDO_USER']:
|
2017-07-03 04:32:42 +00:00
|
|
|
build.use_vendored_sources = True
|
2016-11-16 20:31:19 +00:00
|
|
|
print('info: looks like you are running this command under `sudo`')
|
|
|
|
print(' and so in order to preserve your $HOME this will now')
|
|
|
|
print(' use vendored sources by default. Note that if this')
|
|
|
|
print(' does not work you should run a normal build first')
|
2018-02-27 01:45:52 +00:00
|
|
|
print(' before running a command like `sudo ./x.py install`')
|
2016-11-16 20:31:19 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
if build.use_vendored_sources:
|
2016-11-01 20:46:38 +00:00
|
|
|
if not os.path.exists('.cargo'):
|
|
|
|
os.makedirs('.cargo')
|
2017-07-03 04:32:42 +00:00
|
|
|
with open('.cargo/config', 'w') as cargo_config:
|
|
|
|
cargo_config.write("""
|
2016-11-16 20:31:19 +00:00
|
|
|
[source.crates-io]
|
|
|
|
replace-with = 'vendored-sources'
|
|
|
|
registry = 'https://example.com'
|
|
|
|
|
|
|
|
[source.vendored-sources]
|
|
|
|
directory = '{}/src/vendor'
|
2017-07-03 04:32:42 +00:00
|
|
|
""".format(build.rust_root))
|
2016-11-01 20:46:38 +00:00
|
|
|
else:
|
|
|
|
if os.path.exists('.cargo'):
|
|
|
|
shutil.rmtree('.cargo')
|
2016-11-16 20:31:19 +00:00
|
|
|
|
2017-07-03 04:32:42 +00:00
|
|
|
data = stage0_data(build.rust_root)
|
|
|
|
build.date = data['date']
|
|
|
|
build.rustc_channel = data['rustc']
|
|
|
|
build.cargo_channel = data['cargo']
|
|
|
|
|
2017-04-20 21:32:54 +00:00
|
|
|
if 'dev' in data:
|
2017-07-03 04:32:42 +00:00
|
|
|
build.set_dev_environment()
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2018-02-27 01:45:52 +00:00
|
|
|
# No help text depends on submodules. This check saves ~1 minute of git commands, even if
|
|
|
|
# all the submodules are present and downloaded!
|
|
|
|
if not help_triggered:
|
|
|
|
build.update_submodules()
|
2017-05-13 05:21:35 +00:00
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
# Fetch/build the bootstrap
|
2017-07-03 04:32:42 +00:00
|
|
|
build.build = args.build or build.build_triple()
|
|
|
|
build.download_stage0()
|
2016-04-13 18:18:35 +00:00
|
|
|
sys.stdout.flush()
|
2017-07-03 04:32:42 +00:00
|
|
|
build.build_bootstrap()
|
2016-04-13 18:18:35 +00:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
# Run the bootstrap
|
2017-07-03 04:32:42 +00:00
|
|
|
args = [build.bootstrap_binary()]
|
2016-04-13 18:18:35 +00:00
|
|
|
args.extend(sys.argv[1:])
|
|
|
|
env = os.environ.copy()
|
2017-07-03 04:32:42 +00:00
|
|
|
env["BUILD"] = build.build
|
|
|
|
env["SRC"] = build.rust_root
|
2016-04-13 18:18:35 +00:00
|
|
|
env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
|
2017-06-21 16:01:24 +00:00
|
|
|
env["BOOTSTRAP_PYTHON"] = sys.executable
|
2017-07-03 04:32:42 +00:00
|
|
|
run(args, env=env, verbose=build.verbose)
|
2016-04-13 18:18:35 +00:00
|
|
|
|
2017-05-19 11:16:29 +00:00
|
|
|
|
2017-03-03 02:27:07 +00:00
|
|
|
def main():
|
2017-07-03 04:32:42 +00:00
|
|
|
"""Entry point for the bootstrap process"""
|
2017-03-03 02:27:07 +00:00
|
|
|
start_time = time()
|
2017-05-19 11:16:29 +00:00
|
|
|
help_triggered = (
|
|
|
|
'-h' in sys.argv) or ('--help' in sys.argv) or (len(sys.argv) == 1)
|
2017-03-03 02:27:07 +00:00
|
|
|
try:
|
2018-02-27 01:45:52 +00:00
|
|
|
bootstrap(help_triggered)
|
2017-04-01 21:48:03 +00:00
|
|
|
if not help_triggered:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("Build completed successfully in {}".format(
|
|
|
|
format_build_time(time() - start_time)))
|
|
|
|
except (SystemExit, KeyboardInterrupt) as error:
|
|
|
|
if hasattr(error, 'code') and isinstance(error.code, int):
|
|
|
|
exit_code = error.code
|
2017-03-03 02:27:07 +00:00
|
|
|
else:
|
|
|
|
exit_code = 1
|
2017-07-03 04:32:42 +00:00
|
|
|
print(error)
|
2017-04-01 21:48:03 +00:00
|
|
|
if not help_triggered:
|
2017-07-03 04:32:42 +00:00
|
|
|
print("Build completed unsuccessfully in {}".format(
|
|
|
|
format_build_time(time() - start_time)))
|
2017-03-03 02:27:07 +00:00
|
|
|
sys.exit(exit_code)
|
2016-07-02 14:19:27 +00:00
|
|
|
|
2017-07-01 04:24:35 +00:00
|
|
|
|
2016-04-13 18:18:35 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|