Directly import rust-installer submodule

This moves the rust-installer code to be directly hosted in
rust-lang/rust, since it's not used elsewhere and this makes it easier
to make and review changes without needing a separate upstream commit.
This commit is contained in:
Mark Rousskov 2023-02-28 21:02:17 -05:00
parent e733ff797f
commit 8d9cef4709
39 changed files with 4370 additions and 4 deletions

3
.gitmodules vendored
View File

@ -1,6 +1,3 @@
[submodule "src/rust-installer"]
path = src/tools/rust-installer
url = https://github.com/rust-lang/rust-installer.git
[submodule "src/doc/nomicon"]
path = src/doc/nomicon
url = https://github.com/rust-lang/nomicon.git

@ -1 +0,0 @@
Subproject commit 31b4e313213dfaf62b2dd13a4da8176990929526

5
src/tools/rust-installer/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
*~
tmp
target/
**/*.rs.bk
Cargo.lock

View File

@ -0,0 +1,28 @@
[package]
authors = ["The Rust Project Developers"]
name = "installer"
version = "0.0.0"
edition = "2018"
[[bin]]
doc = false
name = "rust-installer"
path = "src/main.rs"
[dependencies]
anyhow = "1.0.19"
flate2 = "1.0.1"
rayon = "1.0"
tar = "0.4.13"
walkdir = "2"
xz2 = "0.1.4"
num_cpus = "1"
remove_dir_all = "0.5"
[dependencies.clap]
features = ["derive"]
version = "3.1"
[target."cfg(windows)".dependencies]
lazy_static = "1"
winapi = { version = "0.3", features = ["errhandlingapi", "handleapi", "ioapiset", "winerror", "winioctl", "winnt"] }

View File

@ -0,0 +1,71 @@
[![Build Status](https://travis-ci.org/rust-lang/rust-installer.svg?branch=master)](https://travis-ci.org/rust-lang/rust-installer)
A generator for the install.sh script commonly used to install Rust in
Unix environments. It is used By Rust, Cargo, and is intended to be
used by a future combined installer of Rust + Cargo.
# Usage
```
./gen-installer.sh --product-name=Rust \
--rel-manifest-dir=rustlib \
--success-message=Rust-is-ready-to-roll. \
--image-dir=./install-image \
--work-dir=./temp \
--output-dir=./dist \
--non-installed-overlay=./overlay \
--package-name=rustc-nightly-i686-apple-darwin \
--component-name=rustc \
--legacy-manifest-dirs=rustlib \
--bulk-dirs=share/doc
```
Or, to just generate the script.
```
./gen-install-script.sh --product-name=Rust \
--rel-manifest-dir=rustlib \
--success-message=Rust-is-ready-to-roll. \
--output-script=install.sh \
--legacy-manifest-dirs=rustlib
```
*Note: the dashes in `success-message` are converted to spaces. The
script's argument handling is broken with spaces.*
To combine installers.
```
./combine-installers.sh --product-name=Rust \
--rel-manifest-dir=rustlib \
--success-message=Rust-is-ready-to-roll. \
--work-dir=./temp \
--output-dir=./dist \
--non-installed-overlay=./overlay \
--package-name=rustc-nightly-i686-apple-darwin \
--legacy-manifest-dirs=rustlib \
--input-tarballs=./rustc.tar.gz,cargo.tar.gz
```
# Future work
* Make install.sh not have to be customized, pull it's data from a
config file.
* Be more resiliant to installation failures, particularly if the disk
is full.
* Pre-install and post-uninstall scripts.
* Allow components to depend on or contradict other components.
* Sanity check that expected destination dirs (bin, lib, share exist)?
* Add --docdir flag. Is there a standard name for this?
* Remove empty directories on uninstall.
* Detect mismatches in --prefix, --mandir, etc. in follow-on
installs/uninstalls.
* Fix argument handling for spaces.
* Add --bindir.
# License
This software is distributed under the terms of both the MIT license
and/or the Apache License (Version 2.0), at your option.
See [LICENSE-APACHE](LICENSE-APACHE), [LICENSE-MIT](LICENSE-MIT) for details.

View File

@ -0,0 +1,24 @@
#!/bin/bash
# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ue
# Prints the absolute path of a directory to stdout
abs_path() {
local path="$1"
# Unset CDPATH because it causes havok: it makes the destination unpredictable
# and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
# for good measure.
(unset CDPATH && cd "$path" > /dev/null && pwd)
}
src_dir="$(abs_path $(dirname "$0"))"
cargo run --manifest-path="$src_dir/Cargo.toml" -- combine "$@"

View File

@ -0,0 +1,24 @@
#!/bin/bash
# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ue
# Prints the absolute path of a directory to stdout
abs_path() {
local path="$1"
# Unset CDPATH because it causes havok: it makes the destination unpredictable
# and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
# for good measure.
(unset CDPATH && cd "$path" > /dev/null && pwd)
}
src_dir="$(abs_path $(dirname "$0"))"
cargo run --manifest-path="$src_dir/Cargo.toml" -- script "$@"

View File

@ -0,0 +1,24 @@
#!/bin/bash
# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ue
# Prints the absolute path of a directory to stdout
abs_path() {
local path="$1"
# Unset CDPATH because it causes havok: it makes the destination unpredictable
# and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
# for good measure.
(unset CDPATH && cd "$path" > /dev/null && pwd)
}
src_dir="$(abs_path $(dirname "$0"))"
cargo run --manifest-path="$src_dir/Cargo.toml" -- generate "$@"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,24 @@
#!/bin/sh
# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
set -ue
# Prints the absolute path of a directory to stdout
abs_path() {
local path="$1"
# Unset CDPATH because it causes havok: it makes the destination unpredictable
# and triggers 'cd' to print the path to stdout. Route `cd`'s output to /dev/null
# for good measure.
(unset CDPATH && cd "$path" > /dev/null && pwd)
}
src_dir="$(abs_path $(dirname "$0"))"
cargo run --manifest-path="$src_dir/Cargo.toml" -- tarball "$@"

View File

@ -0,0 +1 @@
3

View File

@ -0,0 +1,161 @@
use super::Scripter;
use super::Tarballer;
use crate::{
compression::{CompressionFormat, CompressionFormats},
util::*,
};
use anyhow::{bail, Context, Result};
use std::io::{Read, Write};
use std::path::Path;
use tar::Archive;
actor! {
#[derive(Debug)]
pub struct Combiner {
/// The name of the product, for display.
#[clap(value_name = "NAME")]
product_name: String = "Product",
/// The name of the package tarball.
#[clap(value_name = "NAME")]
package_name: String = "package",
/// The directory under lib/ where the manifest lives.
#[clap(value_name = "DIR")]
rel_manifest_dir: String = "packagelib",
/// The string to print after successful installation.
#[clap(value_name = "MESSAGE")]
success_message: String = "Installed.",
/// Places to look for legacy manifests to uninstall.
#[clap(value_name = "DIRS")]
legacy_manifest_dirs: String = "",
/// Installers to combine.
#[clap(value_name = "FILE,FILE")]
input_tarballs: String = "",
/// Directory containing files that should not be installed.
#[clap(value_name = "DIR")]
non_installed_overlay: String = "",
/// The directory to do temporary work.
#[clap(value_name = "DIR")]
work_dir: String = "./workdir",
/// The location to put the final image and tarball.
#[clap(value_name = "DIR")]
output_dir: String = "./dist",
/// The formats used to compress the tarball
#[clap(value_name = "FORMAT", default_value_t)]
compression_formats: CompressionFormats,
}
}
impl Combiner {
/// Combines the installer tarballs.
pub fn run(self) -> Result<()> {
create_dir_all(&self.work_dir)?;
let package_dir = Path::new(&self.work_dir).join(&self.package_name);
if package_dir.exists() {
remove_dir_all(&package_dir)?;
}
create_dir_all(&package_dir)?;
// Merge each installer into the work directory of the new installer.
let components = create_new_file(package_dir.join("components"))?;
for input_tarball in self
.input_tarballs
.split(',')
.map(str::trim)
.filter(|s| !s.is_empty())
{
// Extract the input tarballs
let compression =
CompressionFormat::detect_from_path(input_tarball).ok_or_else(|| {
anyhow::anyhow!("couldn't figure out the format of {}", input_tarball)
})?;
Archive::new(compression.decode(input_tarball)?)
.unpack(&self.work_dir)
.with_context(|| {
format!(
"unable to extract '{}' into '{}'",
&input_tarball, self.work_dir
)
})?;
let pkg_name =
input_tarball.trim_end_matches(&format!(".tar.{}", compression.extension()));
let pkg_name = Path::new(pkg_name).file_name().unwrap();
let pkg_dir = Path::new(&self.work_dir).join(&pkg_name);
// Verify the version number.
let mut version = String::new();
open_file(pkg_dir.join("rust-installer-version"))
.and_then(|mut file| Ok(file.read_to_string(&mut version)?))
.with_context(|| format!("failed to read version in '{}'", input_tarball))?;
if version.trim().parse() != Ok(crate::RUST_INSTALLER_VERSION) {
bail!("incorrect installer version in {}", input_tarball);
}
// Copy components to the new combined installer.
let mut pkg_components = String::new();
open_file(pkg_dir.join("components"))
.and_then(|mut file| Ok(file.read_to_string(&mut pkg_components)?))
.with_context(|| format!("failed to read components in '{}'", input_tarball))?;
for component in pkg_components.split_whitespace() {
// All we need to do is copy the component directory. We could
// move it, but rustbuild wants to reuse the unpacked package
// dir for OS-specific installers on macOS and Windows.
let component_dir = package_dir.join(&component);
create_dir(&component_dir)?;
copy_recursive(&pkg_dir.join(&component), &component_dir)?;
// Merge the component name.
writeln!(&components, "{}", component).context("failed to write new components")?;
}
}
drop(components);
// Write the installer version.
let version = package_dir.join("rust-installer-version");
writeln!(
create_new_file(version)?,
"{}",
crate::RUST_INSTALLER_VERSION
)
.context("failed to write new installer version")?;
// Copy the overlay.
if !self.non_installed_overlay.is_empty() {
copy_recursive(self.non_installed_overlay.as_ref(), &package_dir)?;
}
// Generate the install script.
let output_script = package_dir.join("install.sh");
let mut scripter = Scripter::default();
scripter
.product_name(self.product_name)
.rel_manifest_dir(self.rel_manifest_dir)
.success_message(self.success_message)
.legacy_manifest_dirs(self.legacy_manifest_dirs)
.output_script(path_to_str(&output_script)?.into());
scripter.run()?;
// Make the tarballs.
create_dir_all(&self.output_dir)?;
let output = Path::new(&self.output_dir).join(&self.package_name);
let mut tarballer = Tarballer::default();
tarballer
.work_dir(self.work_dir)
.input(self.package_name)
.output(path_to_str(&output)?.into())
.compression_formats(self.compression_formats.clone());
tarballer.run()?;
Ok(())
}
}

View File

@ -0,0 +1,203 @@
use anyhow::{Context, Error};
use flate2::{read::GzDecoder, write::GzEncoder};
use rayon::prelude::*;
use std::{convert::TryFrom, fmt, io::Read, io::Write, path::Path, str::FromStr};
use xz2::{read::XzDecoder, write::XzEncoder};
#[derive(Debug, Copy, Clone)]
pub enum CompressionFormat {
Gz,
Xz,
}
impl CompressionFormat {
pub(crate) fn detect_from_path(path: impl AsRef<Path>) -> Option<Self> {
match path.as_ref().extension().and_then(|e| e.to_str()) {
Some("gz") => Some(CompressionFormat::Gz),
Some("xz") => Some(CompressionFormat::Xz),
_ => None,
}
}
pub(crate) fn extension(&self) -> &'static str {
match self {
CompressionFormat::Gz => "gz",
CompressionFormat::Xz => "xz",
}
}
pub(crate) fn encode(&self, path: impl AsRef<Path>) -> Result<Box<dyn Encoder>, Error> {
let mut os = path.as_ref().as_os_str().to_os_string();
os.push(format!(".{}", self.extension()));
let path = Path::new(&os);
if path.exists() {
crate::util::remove_file(path)?;
}
let file = crate::util::create_new_file(path)?;
Ok(match self {
CompressionFormat::Gz => Box::new(GzEncoder::new(file, flate2::Compression::best())),
CompressionFormat::Xz => {
let mut filters = xz2::stream::Filters::new();
// the preset is overridden by the other options so it doesn't matter
let mut lzma_ops = xz2::stream::LzmaOptions::new_preset(9).unwrap();
// This sets the overall dictionary size, which is also how much memory (baseline)
// is needed for decompression.
lzma_ops.dict_size(64 * 1024 * 1024);
// Use the best match finder for compression ratio.
lzma_ops.match_finder(xz2::stream::MatchFinder::BinaryTree4);
lzma_ops.mode(xz2::stream::Mode::Normal);
// Set nice len to the maximum for best compression ratio
lzma_ops.nice_len(273);
// Set depth to a reasonable value, 0 means auto, 1000 is somwhat high but gives
// good results.
lzma_ops.depth(1000);
// 2 is the default and does well for most files
lzma_ops.position_bits(2);
// 0 is the default and does well for most files
lzma_ops.literal_position_bits(0);
// 3 is the default and does well for most files
lzma_ops.literal_context_bits(3);
filters.lzma2(&lzma_ops);
let compressor = XzEncoder::new_stream(
std::io::BufWriter::new(file),
xz2::stream::MtStreamBuilder::new()
.threads(1)
.filters(filters)
.encoder()
.unwrap(),
);
Box::new(compressor)
}
})
}
pub(crate) fn decode(&self, path: impl AsRef<Path>) -> Result<Box<dyn Read>, Error> {
let file = crate::util::open_file(path.as_ref())?;
Ok(match self {
CompressionFormat::Gz => Box::new(GzDecoder::new(file)),
CompressionFormat::Xz => Box::new(XzDecoder::new(file)),
})
}
}
/// This struct wraps Vec<CompressionFormat> in order to parse the value from the command line.
#[derive(Debug, Clone)]
pub struct CompressionFormats(Vec<CompressionFormat>);
impl TryFrom<&'_ str> for CompressionFormats {
type Error = Error;
fn try_from(value: &str) -> Result<Self, Self::Error> {
let mut parsed = Vec::new();
for format in value.split(',') {
match format.trim() {
"gz" => parsed.push(CompressionFormat::Gz),
"xz" => parsed.push(CompressionFormat::Xz),
other => anyhow::bail!("unknown compression format: {}", other),
}
}
Ok(CompressionFormats(parsed))
}
}
impl FromStr for CompressionFormats {
type Err = Error;
fn from_str(value: &str) -> Result<Self, Self::Err> {
Self::try_from(value)
}
}
impl fmt::Display for CompressionFormats {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (i, format) in self.iter().enumerate() {
if i != 0 {
write!(f, ",")?;
}
fmt::Display::fmt(
match format {
CompressionFormat::Xz => "xz",
CompressionFormat::Gz => "gz",
},
f,
)?;
}
Ok(())
}
}
impl Default for CompressionFormats {
fn default() -> Self {
Self(vec![CompressionFormat::Gz, CompressionFormat::Xz])
}
}
impl CompressionFormats {
pub(crate) fn iter(&self) -> impl Iterator<Item = CompressionFormat> + '_ {
self.0.iter().map(|i| *i)
}
}
pub(crate) trait Encoder: Send + Write {
fn finish(self: Box<Self>) -> Result<(), Error>;
}
impl<W: Send + Write> Encoder for GzEncoder<W> {
fn finish(self: Box<Self>) -> Result<(), Error> {
GzEncoder::finish(*self).context("failed to finish .gz file")?;
Ok(())
}
}
impl<W: Send + Write> Encoder for XzEncoder<W> {
fn finish(self: Box<Self>) -> Result<(), Error> {
XzEncoder::finish(*self).context("failed to finish .xz file")?;
Ok(())
}
}
pub(crate) struct CombinedEncoder {
encoders: Vec<Box<dyn Encoder>>,
}
impl CombinedEncoder {
pub(crate) fn new(encoders: Vec<Box<dyn Encoder>>) -> Box<dyn Encoder> {
Box::new(Self { encoders })
}
}
impl Write for CombinedEncoder {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.write_all(buf)?;
Ok(buf.len())
}
fn write_all(&mut self, buf: &[u8]) -> std::io::Result<()> {
self.encoders
.par_iter_mut()
.map(|w| w.write_all(buf))
.collect::<std::io::Result<Vec<()>>>()?;
Ok(())
}
fn flush(&mut self) -> std::io::Result<()> {
self.encoders
.par_iter_mut()
.map(|w| w.flush())
.collect::<std::io::Result<Vec<()>>>()?;
Ok(())
}
}
impl Encoder for CombinedEncoder {
fn finish(self: Box<Self>) -> Result<(), Error> {
self.encoders
.into_par_iter()
.map(|e| e.finish())
.collect::<Result<Vec<()>, Error>>()?;
Ok(())
}
}

View File

@ -0,0 +1,178 @@
use super::Scripter;
use super::Tarballer;
use crate::compression::CompressionFormats;
use crate::util::*;
use anyhow::{bail, format_err, Context, Result};
use std::collections::BTreeSet;
use std::io::Write;
use std::path::Path;
actor! {
#[derive(Debug)]
pub struct Generator {
/// The name of the product, for display
#[clap(value_name = "NAME")]
product_name: String = "Product",
/// The name of the component, distinct from other installed components
#[clap(value_name = "NAME")]
component_name: String = "component",
/// The name of the package, tarball
#[clap(value_name = "NAME")]
package_name: String = "package",
/// The directory under lib/ where the manifest lives
#[clap(value_name = "DIR")]
rel_manifest_dir: String = "packagelib",
/// The string to print after successful installation
#[clap(value_name = "MESSAGE")]
success_message: String = "Installed.",
/// Places to look for legacy manifests to uninstall
#[clap(value_name = "DIRS")]
legacy_manifest_dirs: String = "",
/// Directory containing files that should not be installed
#[clap(value_name = "DIR")]
non_installed_overlay: String = "",
/// Path prefixes of directories that should be installed/uninstalled in bulk
#[clap(value_name = "DIRS")]
bulk_dirs: String = "",
/// The directory containing the installation medium
#[clap(value_name = "DIR")]
image_dir: String = "./install_image",
/// The directory to do temporary work
#[clap(value_name = "DIR")]
work_dir: String = "./workdir",
/// The location to put the final image and tarball
#[clap(value_name = "DIR")]
output_dir: String = "./dist",
/// The formats used to compress the tarball
#[clap(value_name = "FORMAT", default_value_t)]
compression_formats: CompressionFormats,
}
}
impl Generator {
/// Generates the actual installer tarball
pub fn run(self) -> Result<()> {
create_dir_all(&self.work_dir)?;
let package_dir = Path::new(&self.work_dir).join(&self.package_name);
if package_dir.exists() {
remove_dir_all(&package_dir)?;
}
// Copy the image and write the manifest
let component_dir = package_dir.join(&self.component_name);
create_dir_all(&component_dir)?;
copy_and_manifest(self.image_dir.as_ref(), &component_dir, &self.bulk_dirs)?;
// Write the component name
let components = package_dir.join("components");
writeln!(create_new_file(components)?, "{}", self.component_name)
.context("failed to write the component file")?;
// Write the installer version (only used by combine-installers.sh)
let version = package_dir.join("rust-installer-version");
writeln!(
create_new_file(version)?,
"{}",
crate::RUST_INSTALLER_VERSION
)
.context("failed to write new installer version")?;
// Copy the overlay
if !self.non_installed_overlay.is_empty() {
copy_recursive(self.non_installed_overlay.as_ref(), &package_dir)?;
}
// Generate the install script
let output_script = package_dir.join("install.sh");
let mut scripter = Scripter::default();
scripter
.product_name(self.product_name)
.rel_manifest_dir(self.rel_manifest_dir)
.success_message(self.success_message)
.legacy_manifest_dirs(self.legacy_manifest_dirs)
.output_script(path_to_str(&output_script)?.into());
scripter.run()?;
// Make the tarballs
create_dir_all(&self.output_dir)?;
let output = Path::new(&self.output_dir).join(&self.package_name);
let mut tarballer = Tarballer::default();
tarballer
.work_dir(self.work_dir)
.input(self.package_name)
.output(path_to_str(&output)?.into())
.compression_formats(self.compression_formats.clone());
tarballer.run()?;
Ok(())
}
}
/// Copies the `src` directory recursively to `dst`, writing `manifest.in` too.
fn copy_and_manifest(src: &Path, dst: &Path, bulk_dirs: &str) -> Result<()> {
let mut manifest = create_new_file(dst.join("manifest.in"))?;
let bulk_dirs: Vec<_> = bulk_dirs
.split(',')
.filter(|s| !s.is_empty())
.map(Path::new)
.collect();
let mut paths = BTreeSet::new();
copy_with_callback(src, dst, |path, file_type| {
// We need paths to be compatible with both Unix and Windows.
if path
.components()
.filter_map(|c| c.as_os_str().to_str())
.any(|s| s.contains('\\'))
{
bail!(
"rust-installer doesn't support '\\' in path components: {:?}",
path
);
}
// Normalize to Unix-style path separators.
let normalized_string;
let mut string = path.to_str().ok_or_else(|| {
format_err!(
"rust-installer doesn't support non-Unicode paths: {:?}",
path
)
})?;
if string.contains('\\') {
normalized_string = string.replace('\\', "/");
string = &normalized_string;
}
if file_type.is_dir() {
// Only manifest directories that are explicitly bulk.
if bulk_dirs.contains(&path) {
paths.insert(format!("dir:{}\n", string));
}
} else {
// Only manifest files that aren't under bulk directories.
if !bulk_dirs.iter().any(|d| path.starts_with(d)) {
paths.insert(format!("file:{}\n", string));
}
}
Ok(())
})?;
for path in paths {
manifest.write_all(path.as_bytes())?;
}
Ok(())
}

View File

@ -0,0 +1,17 @@
#[macro_use]
mod util;
mod combiner;
mod compression;
mod generator;
mod scripter;
mod tarballer;
pub use crate::combiner::Combiner;
pub use crate::generator::Generator;
pub use crate::scripter::Scripter;
pub use crate::tarballer::Tarballer;
/// The installer version, output only to be used by combine-installers.sh.
/// (should match `SOURCE_DIRECTORY/rust_installer_version`)
pub const RUST_INSTALLER_VERSION: u32 = 3;

View File

@ -0,0 +1,27 @@
use anyhow::{Context, Result};
use clap::{self, Parser};
#[derive(Parser)]
struct CommandLine {
#[clap(subcommand)]
command: Subcommand,
}
#[derive(clap::Subcommand)]
enum Subcommand {
Generate(installer::Generator),
Combine(installer::Combiner),
Script(installer::Scripter),
Tarball(installer::Tarballer),
}
fn main() -> Result<()> {
let command_line = CommandLine::parse();
match command_line.command {
Subcommand::Combine(combiner) => combiner.run().context("failed to combine installers")?,
Subcommand::Generate(generator) => generator.run().context("failed to generate installer")?,
Subcommand::Script(scripter) => scripter.run().context("failed to generate installation script")?,
Subcommand::Tarball(tarballer) => tarballer.run().context("failed to generate tarballs")?,
}
Ok(())
}

View File

@ -0,0 +1,860 @@
#![allow(non_snake_case)]
use std::io;
use std::path::Path;
#[cfg(not(windows))]
pub fn remove_dir_all(path: &Path) -> io::Result<()> {
::std::fs::remove_dir_all(path)
}
#[cfg(windows)]
pub fn remove_dir_all(path: &Path) -> io::Result<()> {
win::remove_dir_all(path)
}
#[cfg(windows)]
mod win {
use winapi::ctypes::{c_uint, c_ushort};
use winapi::shared::minwindef::{BOOL, DWORD, FALSE, FILETIME, LPVOID};
use winapi::shared::winerror::{
ERROR_CALL_NOT_IMPLEMENTED, ERROR_INSUFFICIENT_BUFFER, ERROR_NO_MORE_FILES,
};
use winapi::um::errhandlingapi::{GetLastError, SetLastError};
use winapi::um::fileapi::{
CreateFileW, FindFirstFileW, FindNextFileW, GetFileInformationByHandle,
};
use winapi::um::fileapi::{BY_HANDLE_FILE_INFORMATION, CREATE_ALWAYS, CREATE_NEW};
use winapi::um::fileapi::{FILE_BASIC_INFO, FILE_RENAME_INFO, TRUNCATE_EXISTING};
use winapi::um::fileapi::{OPEN_ALWAYS, OPEN_EXISTING};
use winapi::um::handleapi::{CloseHandle, INVALID_HANDLE_VALUE};
use winapi::um::ioapiset::DeviceIoControl;
use winapi::um::libloaderapi::{GetModuleHandleW, GetProcAddress};
use winapi::um::minwinbase::{
FileBasicInfo, FileRenameInfo, FILE_INFO_BY_HANDLE_CLASS, WIN32_FIND_DATAW,
};
use winapi::um::winbase::SECURITY_SQOS_PRESENT;
use winapi::um::winbase::{
FILE_FLAG_BACKUP_SEMANTICS, FILE_FLAG_DELETE_ON_CLOSE, FILE_FLAG_OPEN_REPARSE_POINT,
};
use winapi::um::winioctl::FSCTL_GET_REPARSE_POINT;
use winapi::um::winnt::{DELETE, FILE_ATTRIBUTE_DIRECTORY, HANDLE, LPCWSTR};
use winapi::um::winnt::{FILE_ATTRIBUTE_READONLY, FILE_ATTRIBUTE_REPARSE_POINT};
use winapi::um::winnt::{FILE_GENERIC_WRITE, FILE_WRITE_DATA, GENERIC_READ, GENERIC_WRITE};
use winapi::um::winnt::{FILE_READ_ATTRIBUTES, FILE_WRITE_ATTRIBUTES};
use winapi::um::winnt::{FILE_SHARE_DELETE, FILE_SHARE_READ, FILE_SHARE_WRITE};
use winapi::um::winnt::{IO_REPARSE_TAG_MOUNT_POINT, IO_REPARSE_TAG_SYMLINK, LARGE_INTEGER};
use std::ffi::{OsStr, OsString};
use std::io;
use std::mem;
use std::os::windows::ffi::{OsStrExt, OsStringExt};
use std::path::{Path, PathBuf};
use std::ptr;
use std::sync::Arc;
pub fn remove_dir_all(path: &Path) -> io::Result<()> {
// On Windows it is not enough to just recursively remove the contents of a
// directory and then the directory itself. Deleting does not happen
// instantaneously, but is scheduled.
// To work around this, we move the file or directory to some `base_dir`
// right before deletion to avoid races.
//
// As `base_dir` we choose the parent dir of the directory we want to
// remove. We very probably have permission to create files here, as we
// already need write permission in this dir to delete the directory. And it
// should be on the same volume.
//
// To handle files with names like `CON` and `morse .. .`, and when a
// directory structure is so deep it needs long path names the path is first
// converted to a `//?/`-path with `get_path()`.
//
// To make sure we don't leave a moved file laying around if the process
// crashes before we can delete the file, we do all operations on an file
// handle. By opening a file with `FILE_FLAG_DELETE_ON_CLOSE` Windows will
// always delete the file when the handle closes.
//
// All files are renamed to be in the `base_dir`, and have their name
// changed to "rm-<counter>". After every rename the counter is increased.
// Rename should not overwrite possibly existing files in the base dir. So
// if it fails with `AlreadyExists`, we just increase the counter and try
// again.
//
// For read-only files and directories we first have to remove the read-only
// attribute before we can move or delete them. This also removes the
// attribute from possible hardlinks to the file, so just before closing we
// restore the read-only attribute.
//
// If 'path' points to a directory symlink or junction we should not
// recursively remove the target of the link, but only the link itself.
//
// Moving and deleting is guaranteed to succeed if we are able to open the
// file with `DELETE` permission. If others have the file open we only have
// `DELETE` permission if they have specified `FILE_SHARE_DELETE`. We can
// also delete the file now, but it will not disappear until all others have
// closed the file. But no-one can open the file after we have flagged it
// for deletion.
// Open the path once to get the canonical path, file type and attributes.
let (path, metadata) = {
let mut opts = OpenOptions::new();
opts.access_mode(FILE_READ_ATTRIBUTES);
opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS | FILE_FLAG_OPEN_REPARSE_POINT);
let file = File::open(path, &opts)?;
(get_path(&file)?, file.file_attr()?)
};
let mut ctx = RmdirContext {
base_dir: match path.parent() {
Some(dir) => dir,
None => {
return Err(io::Error::new(
io::ErrorKind::PermissionDenied,
"can't delete root directory",
))
}
},
readonly: metadata.perm().readonly(),
counter: 0,
};
let filetype = metadata.file_type();
if filetype.is_dir() {
remove_dir_all_recursive(path.as_ref(), &mut ctx)
} else if filetype.is_symlink_dir() {
remove_item(path.as_ref(), &mut ctx)
} else {
Err(io::Error::new(
io::ErrorKind::PermissionDenied,
"Not a directory",
))
}
}
fn readdir(p: &Path) -> io::Result<ReadDir> {
let root = p.to_path_buf();
let star = p.join("*");
let path = to_u16s(&star)?;
unsafe {
let mut wfd = mem::zeroed();
let find_handle = FindFirstFileW(path.as_ptr(), &mut wfd);
if find_handle != INVALID_HANDLE_VALUE {
Ok(ReadDir {
handle: FindNextFileHandle(find_handle),
root: Arc::new(root),
first: Some(wfd),
})
} else {
Err(io::Error::last_os_error())
}
}
}
struct RmdirContext<'a> {
base_dir: &'a Path,
readonly: bool,
counter: u64,
}
fn remove_dir_all_recursive(path: &Path, ctx: &mut RmdirContext) -> io::Result<()> {
let dir_readonly = ctx.readonly;
for child in readdir(path)? {
let child = child?;
let child_type = child.file_type()?;
ctx.readonly = child.metadata()?.perm().readonly();
if child_type.is_dir() {
remove_dir_all_recursive(&child.path(), ctx)?;
} else {
remove_item(&child.path().as_ref(), ctx)?;
}
}
ctx.readonly = dir_readonly;
remove_item(path, ctx)
}
fn remove_item(path: &Path, ctx: &mut RmdirContext) -> io::Result<()> {
if !ctx.readonly {
let mut opts = OpenOptions::new();
opts.access_mode(DELETE);
opts.custom_flags(
FILE_FLAG_BACKUP_SEMANTICS | // delete directory
FILE_FLAG_OPEN_REPARSE_POINT | // delete symlink
FILE_FLAG_DELETE_ON_CLOSE,
);
let file = File::open(path, &opts)?;
move_item(&file, ctx)
} else {
// remove read-only permision
set_perm(&path, FilePermissions::new())?;
// move and delete file, similar to !readonly.
// only the access mode is different.
let mut opts = OpenOptions::new();
opts.access_mode(DELETE | FILE_WRITE_ATTRIBUTES);
opts.custom_flags(
FILE_FLAG_BACKUP_SEMANTICS
| FILE_FLAG_OPEN_REPARSE_POINT
| FILE_FLAG_DELETE_ON_CLOSE,
);
let file = File::open(path, &opts)?;
move_item(&file, ctx)?;
// restore read-only flag just in case there are other hard links
let mut perm = FilePermissions::new();
perm.set_readonly(true);
let _ = file.set_perm(perm); // ignore if this fails
Ok(())
}
}
macro_rules! compat_fn {
($module:ident: $(
fn $symbol:ident($($argname:ident: $argtype:ty),*)
-> $rettype:ty {
$($body:expr);*
}
)*) => ($(
#[allow(unused_variables)]
unsafe fn $symbol($($argname: $argtype),*) -> $rettype {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::mem;
use std::ffi::CString;
type F = unsafe extern "system" fn($($argtype),*) -> $rettype;
lazy_static! { static ref PTR: AtomicUsize = AtomicUsize::new(0);}
fn lookup(module: &str, symbol: &str) -> Option<usize> {
let mut module: Vec<u16> = module.encode_utf16().collect();
module.push(0);
let symbol = CString::new(symbol).unwrap();
unsafe {
let handle = GetModuleHandleW(module.as_ptr());
match GetProcAddress(handle, symbol.as_ptr()) as usize {
0 => None,
n => Some(n),
}
}
}
fn store_func(ptr: &AtomicUsize, module: &str, symbol: &str,
fallback: usize) -> usize {
let value = lookup(module, symbol).unwrap_or(fallback);
ptr.store(value, Ordering::SeqCst);
value
}
fn load() -> usize {
store_func(&PTR, stringify!($module), stringify!($symbol), fallback as usize)
}
unsafe extern "system" fn fallback($($argname: $argtype),*)
-> $rettype {
$($body);*
}
let addr = match PTR.load(Ordering::SeqCst) {
0 => load(),
n => n,
};
mem::transmute::<usize, F>(addr)($($argname),*)
}
)*)
}
compat_fn! {
kernel32:
fn GetFinalPathNameByHandleW(_hFile: HANDLE,
_lpszFilePath: LPCWSTR,
_cchFilePath: DWORD,
_dwFlags: DWORD) -> DWORD {
SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0
}
fn SetFileInformationByHandle(_hFile: HANDLE,
_FileInformationClass: FILE_INFO_BY_HANDLE_CLASS,
_lpFileInformation: LPVOID,
_dwBufferSize: DWORD) -> BOOL {
SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0
}
}
fn cvt(i: i32) -> io::Result<i32> {
if i == 0 {
Err(io::Error::last_os_error())
} else {
Ok(i)
}
}
fn to_u16s<S: AsRef<OsStr>>(s: S) -> io::Result<Vec<u16>> {
fn inner(s: &OsStr) -> io::Result<Vec<u16>> {
let mut maybe_result: Vec<u16> = s.encode_wide().collect();
if maybe_result.iter().any(|&u| u == 0) {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"strings passed to WinAPI cannot contain NULs",
));
}
maybe_result.push(0);
Ok(maybe_result)
}
inner(s.as_ref())
}
fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] {
match v.iter().position(|c| *c == 0) {
// don't include the 0
Some(i) => &v[..i],
None => v,
}
}
fn fill_utf16_buf<F1, F2, T>(mut f1: F1, f2: F2) -> io::Result<T>
where
F1: FnMut(*mut u16, DWORD) -> DWORD,
F2: FnOnce(&[u16]) -> T,
{
// Start off with a stack buf but then spill over to the heap if we end up
// needing more space.
let mut stack_buf = [0u16; 512];
let mut heap_buf = Vec::new();
unsafe {
let mut n = stack_buf.len();
loop {
let buf = if n <= stack_buf.len() {
&mut stack_buf[..]
} else {
let extra = n - heap_buf.len();
heap_buf.reserve(extra);
heap_buf.set_len(n);
&mut heap_buf[..]
};
// This function is typically called on windows API functions which
// will return the correct length of the string, but these functions
// also return the `0` on error. In some cases, however, the
// returned "correct length" may actually be 0!
//
// To handle this case we call `SetLastError` to reset it to 0 and
// then check it again if we get the "0 error value". If the "last
// error" is still 0 then we interpret it as a 0 length buffer and
// not an actual error.
SetLastError(0);
let k = match f1(buf.as_mut_ptr(), n as DWORD) {
0 if GetLastError() == 0 => 0,
0 => return Err(io::Error::last_os_error()),
n => n,
} as usize;
if k == n && GetLastError() == ERROR_INSUFFICIENT_BUFFER {
n *= 2;
} else if k >= n {
n = k;
} else {
return Ok(f2(&buf[..k]));
}
}
}
}
#[derive(Clone, PartialEq, Eq, Debug, Default)]
struct FilePermissions {
readonly: bool,
}
impl FilePermissions {
fn new() -> FilePermissions {
Default::default()
}
fn readonly(&self) -> bool {
self.readonly
}
fn set_readonly(&mut self, readonly: bool) {
self.readonly = readonly
}
}
#[derive(Clone)]
struct OpenOptions {
// generic
read: bool,
write: bool,
append: bool,
truncate: bool,
create: bool,
create_new: bool,
// system-specific
custom_flags: u32,
access_mode: Option<DWORD>,
attributes: DWORD,
share_mode: DWORD,
security_qos_flags: DWORD,
security_attributes: usize, // FIXME: should be a reference
}
impl OpenOptions {
fn new() -> OpenOptions {
OpenOptions {
// generic
read: false,
write: false,
append: false,
truncate: false,
create: false,
create_new: false,
// system-specific
custom_flags: 0,
access_mode: None,
share_mode: FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
attributes: 0,
security_qos_flags: 0,
security_attributes: 0,
}
}
fn custom_flags(&mut self, flags: u32) {
self.custom_flags = flags;
}
fn access_mode(&mut self, access_mode: u32) {
self.access_mode = Some(access_mode);
}
fn get_access_mode(&self) -> io::Result<DWORD> {
const ERROR_INVALID_PARAMETER: i32 = 87;
match (self.read, self.write, self.append, self.access_mode) {
(_, _, _, Some(mode)) => Ok(mode),
(true, false, false, None) => Ok(GENERIC_READ),
(false, true, false, None) => Ok(GENERIC_WRITE),
(true, true, false, None) => Ok(GENERIC_READ | GENERIC_WRITE),
(false, _, true, None) => Ok(FILE_GENERIC_WRITE & !FILE_WRITE_DATA),
(true, _, true, None) => Ok(GENERIC_READ | (FILE_GENERIC_WRITE & !FILE_WRITE_DATA)),
(false, false, false, None) => {
Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER))
}
}
}
fn get_creation_mode(&self) -> io::Result<DWORD> {
const ERROR_INVALID_PARAMETER: i32 = 87;
match (self.write, self.append) {
(true, false) => {}
(false, false) => {
if self.truncate || self.create || self.create_new {
return Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
}
}
(_, true) => {
if self.truncate && !self.create_new {
return Err(io::Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
}
}
}
Ok(match (self.create, self.truncate, self.create_new) {
(false, false, false) => OPEN_EXISTING,
(true, false, false) => OPEN_ALWAYS,
(false, true, false) => TRUNCATE_EXISTING,
(true, true, false) => CREATE_ALWAYS,
(_, _, true) => CREATE_NEW,
})
}
fn get_flags_and_attributes(&self) -> DWORD {
self.custom_flags
| self.attributes
| self.security_qos_flags
| if self.security_qos_flags != 0 {
SECURITY_SQOS_PRESENT
} else {
0
}
| if self.create_new {
FILE_FLAG_OPEN_REPARSE_POINT
} else {
0
}
}
}
struct File {
handle: Handle,
}
impl File {
fn open(path: &Path, opts: &OpenOptions) -> io::Result<File> {
let path = to_u16s(path)?;
let handle = unsafe {
CreateFileW(
path.as_ptr(),
opts.get_access_mode()?,
opts.share_mode,
opts.security_attributes as *mut _,
opts.get_creation_mode()?,
opts.get_flags_and_attributes(),
ptr::null_mut(),
)
};
if handle == INVALID_HANDLE_VALUE {
Err(io::Error::last_os_error())
} else {
Ok(File {
handle: Handle::new(handle),
})
}
}
fn file_attr(&self) -> io::Result<FileAttr> {
unsafe {
let mut info: BY_HANDLE_FILE_INFORMATION = mem::zeroed();
cvt(GetFileInformationByHandle(self.handle.raw(), &mut info))?;
let mut attr = FileAttr {
attributes: info.dwFileAttributes,
creation_time: info.ftCreationTime,
last_access_time: info.ftLastAccessTime,
last_write_time: info.ftLastWriteTime,
file_size: ((info.nFileSizeHigh as u64) << 32) | (info.nFileSizeLow as u64),
reparse_tag: 0,
};
if attr.is_reparse_point() {
let mut b = [0; MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
if let Ok((_, buf)) = self.reparse_point(&mut b) {
attr.reparse_tag = buf.ReparseTag;
}
}
Ok(attr)
}
}
fn set_attributes(&self, attr: DWORD) -> io::Result<()> {
let zero: LARGE_INTEGER = unsafe { mem::zeroed() };
let mut info = FILE_BASIC_INFO {
CreationTime: zero, // do not change
LastAccessTime: zero, // do not change
LastWriteTime: zero, // do not change
ChangeTime: zero, // do not change
FileAttributes: attr,
};
let size = mem::size_of_val(&info);
cvt(unsafe {
SetFileInformationByHandle(
self.handle.raw(),
FileBasicInfo,
&mut info as *mut _ as *mut _,
size as DWORD,
)
})?;
Ok(())
}
fn rename(&self, new: &Path, replace: bool) -> io::Result<()> {
// &self must be opened with DELETE permission
use std::iter;
#[cfg(target_arch = "x86")]
const STRUCT_SIZE: usize = 12;
#[cfg(target_arch = "x86_64")]
const STRUCT_SIZE: usize = 20;
// FIXME: check for internal NULs in 'new'
let mut data: Vec<u16> = iter::repeat(0u16)
.take(STRUCT_SIZE / 2)
.chain(new.as_os_str().encode_wide())
.collect();
data.push(0);
let size = data.len() * 2;
unsafe {
// Thanks to alignment guarantees on Windows this works
// (8 for 32-bit and 16 for 64-bit)
let info = data.as_mut_ptr() as *mut FILE_RENAME_INFO;
// The type of ReplaceIfExists is BOOL, but it actually expects a
// BOOLEAN. This means true is -1, not c::TRUE.
(*info).ReplaceIfExists = if replace { -1 } else { FALSE };
(*info).RootDirectory = ptr::null_mut();
(*info).FileNameLength = (size - STRUCT_SIZE) as DWORD;
cvt(SetFileInformationByHandle(
self.handle().raw(),
FileRenameInfo,
data.as_mut_ptr() as *mut _ as *mut _,
size as DWORD,
))?;
Ok(())
}
}
fn set_perm(&self, perm: FilePermissions) -> io::Result<()> {
let attr = self.file_attr()?.attributes;
if perm.readonly == (attr & FILE_ATTRIBUTE_READONLY != 0) {
Ok(())
} else if perm.readonly {
self.set_attributes(attr | FILE_ATTRIBUTE_READONLY)
} else {
self.set_attributes(attr & !FILE_ATTRIBUTE_READONLY)
}
}
fn handle(&self) -> &Handle {
&self.handle
}
fn reparse_point<'a>(
&self,
space: &'a mut [u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE],
) -> io::Result<(DWORD, &'a REPARSE_DATA_BUFFER)> {
unsafe {
let mut bytes = 0;
cvt({
DeviceIoControl(
self.handle.raw(),
FSCTL_GET_REPARSE_POINT,
ptr::null_mut(),
0,
space.as_mut_ptr() as *mut _,
space.len() as DWORD,
&mut bytes,
ptr::null_mut(),
)
})?;
Ok((bytes, &*(space.as_ptr() as *const REPARSE_DATA_BUFFER)))
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
enum FileType {
Dir,
File,
SymlinkFile,
SymlinkDir,
ReparsePoint,
MountPoint,
}
impl FileType {
fn new(attrs: DWORD, reparse_tag: DWORD) -> FileType {
match (
attrs & FILE_ATTRIBUTE_DIRECTORY != 0,
attrs & FILE_ATTRIBUTE_REPARSE_POINT != 0,
reparse_tag,
) {
(false, false, _) => FileType::File,
(true, false, _) => FileType::Dir,
(false, true, IO_REPARSE_TAG_SYMLINK) => FileType::SymlinkFile,
(true, true, IO_REPARSE_TAG_SYMLINK) => FileType::SymlinkDir,
(true, true, IO_REPARSE_TAG_MOUNT_POINT) => FileType::MountPoint,
(_, true, _) => FileType::ReparsePoint,
// Note: if a _file_ has a reparse tag of the type IO_REPARSE_TAG_MOUNT_POINT it is
// invalid, as junctions always have to be dirs. We set the filetype to ReparsePoint
// to indicate it is something symlink-like, but not something you can follow.
}
}
fn is_dir(&self) -> bool {
*self == FileType::Dir
}
fn is_symlink_dir(&self) -> bool {
*self == FileType::SymlinkDir || *self == FileType::MountPoint
}
}
impl DirEntry {
fn new(root: &Arc<PathBuf>, wfd: &WIN32_FIND_DATAW) -> Option<DirEntry> {
let first_bytes = &wfd.cFileName[0..3];
if first_bytes.starts_with(&[46, 0]) || first_bytes.starts_with(&[46, 46, 0]) {
None
} else {
Some(DirEntry {
root: root.clone(),
data: *wfd,
})
}
}
fn path(&self) -> PathBuf {
self.root.join(&self.file_name())
}
fn file_name(&self) -> OsString {
let filename = truncate_utf16_at_nul(&self.data.cFileName);
OsString::from_wide(filename)
}
fn file_type(&self) -> io::Result<FileType> {
Ok(FileType::new(
self.data.dwFileAttributes,
/* reparse_tag = */ self.data.dwReserved0,
))
}
fn metadata(&self) -> io::Result<FileAttr> {
Ok(FileAttr {
attributes: self.data.dwFileAttributes,
creation_time: self.data.ftCreationTime,
last_access_time: self.data.ftLastAccessTime,
last_write_time: self.data.ftLastWriteTime,
file_size: ((self.data.nFileSizeHigh as u64) << 32)
| (self.data.nFileSizeLow as u64),
reparse_tag: if self.data.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT != 0 {
// reserved unless this is a reparse point
self.data.dwReserved0
} else {
0
},
})
}
}
struct DirEntry {
root: Arc<PathBuf>,
data: WIN32_FIND_DATAW,
}
struct ReadDir {
handle: FindNextFileHandle,
root: Arc<PathBuf>,
first: Option<WIN32_FIND_DATAW>,
}
impl Iterator for ReadDir {
type Item = io::Result<DirEntry>;
fn next(&mut self) -> Option<io::Result<DirEntry>> {
if let Some(first) = self.first.take() {
if let Some(e) = DirEntry::new(&self.root, &first) {
return Some(Ok(e));
}
}
unsafe {
let mut wfd = mem::zeroed();
loop {
if FindNextFileW(self.handle.0, &mut wfd) == 0 {
if GetLastError() == ERROR_NO_MORE_FILES {
return None;
} else {
return Some(Err(io::Error::last_os_error()));
}
}
if let Some(e) = DirEntry::new(&self.root, &wfd) {
return Some(Ok(e));
}
}
}
}
}
#[derive(Clone)]
struct FileAttr {
attributes: DWORD,
creation_time: FILETIME,
last_access_time: FILETIME,
last_write_time: FILETIME,
file_size: u64,
reparse_tag: DWORD,
}
impl FileAttr {
fn perm(&self) -> FilePermissions {
FilePermissions {
readonly: self.attributes & FILE_ATTRIBUTE_READONLY != 0,
}
}
fn file_type(&self) -> FileType {
FileType::new(self.attributes, self.reparse_tag)
}
fn is_reparse_point(&self) -> bool {
self.attributes & FILE_ATTRIBUTE_REPARSE_POINT != 0
}
}
#[repr(C)]
struct REPARSE_DATA_BUFFER {
ReparseTag: c_uint,
ReparseDataLength: c_ushort,
Reserved: c_ushort,
rest: (),
}
const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024;
/// An owned container for `HANDLE` object, closing them on Drop.
///
/// All methods are inherited through a `Deref` impl to `RawHandle`
struct Handle(RawHandle);
use std::ops::Deref;
/// A wrapper type for `HANDLE` objects to give them proper Send/Sync inference
/// as well as Rust-y methods.
///
/// This does **not** drop the handle when it goes out of scope, use `Handle`
/// instead for that.
#[derive(Copy, Clone)]
struct RawHandle(HANDLE);
unsafe impl Send for RawHandle {}
unsafe impl Sync for RawHandle {}
impl Handle {
fn new(handle: HANDLE) -> Handle {
Handle(RawHandle::new(handle))
}
}
impl Deref for Handle {
type Target = RawHandle;
fn deref(&self) -> &RawHandle {
&self.0
}
}
impl Drop for Handle {
fn drop(&mut self) {
unsafe {
let _ = CloseHandle(self.raw());
}
}
}
impl RawHandle {
fn new(handle: HANDLE) -> RawHandle {
RawHandle(handle)
}
fn raw(&self) -> HANDLE {
self.0
}
}
struct FindNextFileHandle(HANDLE);
fn get_path(f: &File) -> io::Result<PathBuf> {
fill_utf16_buf(
|buf, sz| unsafe {
GetFinalPathNameByHandleW(f.handle.raw(), buf, sz, VOLUME_NAME_DOS)
},
|buf| PathBuf::from(OsString::from_wide(buf)),
)
}
fn move_item(file: &File, ctx: &mut RmdirContext) -> io::Result<()> {
let mut tmpname = ctx.base_dir.join(format! {"rm-{}", ctx.counter});
ctx.counter += 1;
// Try to rename the file. If it already exists, just retry with an other
// filename.
while let Err(err) = file.rename(tmpname.as_ref(), false) {
if err.kind() != io::ErrorKind::AlreadyExists {
return Err(err);
};
tmpname = ctx.base_dir.join(format!("rm-{}", ctx.counter));
ctx.counter += 1;
}
Ok(())
}
fn set_perm(path: &Path, perm: FilePermissions) -> io::Result<()> {
let mut opts = OpenOptions::new();
opts.access_mode(FILE_READ_ATTRIBUTES | FILE_WRITE_ATTRIBUTES);
opts.custom_flags(FILE_FLAG_BACKUP_SEMANTICS);
let file = File::open(path, &opts)?;
file.set_perm(perm)
}
const VOLUME_NAME_DOS: DWORD = 0x0;
}

View File

@ -0,0 +1,68 @@
use crate::util::*;
use anyhow::{Context, Result};
use std::io::Write;
const TEMPLATE: &'static str = include_str!("../install-template.sh");
actor! {
#[derive(Debug)]
pub struct Scripter {
/// The name of the product, for display
#[clap(value_name = "NAME")]
product_name: String = "Product",
/// The directory under lib/ where the manifest lives
#[clap(value_name = "DIR")]
rel_manifest_dir: String = "manifestlib",
/// The string to print after successful installation
#[clap(value_name = "MESSAGE")]
success_message: String = "Installed.",
/// Places to look for legacy manifests to uninstall
#[clap(value_name = "DIRS")]
legacy_manifest_dirs: String = "",
/// The name of the output script
#[clap(value_name = "FILE")]
output_script: String = "install.sh",
}
}
impl Scripter {
/// Generates the actual installer script
pub fn run(self) -> Result<()> {
// Replace dashes in the success message with spaces (our arg handling botches spaces)
// TODO: still needed? Kept for compatibility for now.
let product_name = self.product_name.replace('-', " ");
// Replace dashes in the success message with spaces (our arg handling botches spaces)
// TODO: still needed? Kept for compatibility for now.
let success_message = self.success_message.replace('-', " ");
let script = TEMPLATE
.replace("%%TEMPLATE_PRODUCT_NAME%%", &sh_quote(&product_name))
.replace("%%TEMPLATE_REL_MANIFEST_DIR%%", &self.rel_manifest_dir)
.replace("%%TEMPLATE_SUCCESS_MESSAGE%%", &sh_quote(&success_message))
.replace(
"%%TEMPLATE_LEGACY_MANIFEST_DIRS%%",
&sh_quote(&self.legacy_manifest_dirs),
)
.replace(
"%%TEMPLATE_RUST_INSTALLER_VERSION%%",
&sh_quote(&crate::RUST_INSTALLER_VERSION),
);
create_new_executable(&self.output_script)?
.write_all(script.as_ref())
.with_context(|| format!("failed to write output script '{}'", self.output_script))?;
Ok(())
}
}
fn sh_quote<T: ToString>(s: &T) -> String {
// We'll single-quote the whole thing, so first replace single-quotes with
// '"'"' (leave quoting, double-quote one `'`, re-enter single-quoting)
format!("'{}'", s.to_string().replace('\'', r#"'"'"'"#))
}

View File

@ -0,0 +1,143 @@
use anyhow::{bail, Context, Result};
use std::fs::{read_link, symlink_metadata};
use std::io::{empty, BufWriter, Write};
use std::path::Path;
use tar::{Builder, Header};
use walkdir::WalkDir;
use crate::{
compression::{CombinedEncoder, CompressionFormats},
util::*,
};
actor! {
#[derive(Debug)]
pub struct Tarballer {
/// The input folder to be compressed.
#[clap(value_name = "NAME")]
input: String = "package",
/// The prefix of the tarballs.
#[clap(value_name = "PATH")]
output: String = "./dist",
/// The folder in which the input is to be found.
#[clap(value_name = "DIR")]
work_dir: String = "./workdir",
/// The formats used to compress the tarball.
#[clap(value_name = "FORMAT", default_value_t)]
compression_formats: CompressionFormats,
}
}
impl Tarballer {
/// Generates the actual tarballs
pub fn run(self) -> Result<()> {
let tarball_name = self.output.clone() + ".tar";
let encoder = CombinedEncoder::new(
self.compression_formats
.iter()
.map(|f| f.encode(&tarball_name))
.collect::<Result<Vec<_>>>()?,
);
// Sort files by their suffix, to group files with the same name from
// different locations (likely identical) and files with the same
// extension (likely containing similar data).
let (dirs, mut files) = get_recursive_paths(&self.work_dir, &self.input)
.context("failed to collect file paths")?;
files.sort_by(|a, b| a.bytes().rev().cmp(b.bytes().rev()));
// Write the tar into both encoded files. We write all directories
// first, so files may be directly created. (See rust-lang/rustup.rs#1092.)
let buf = BufWriter::with_capacity(1024 * 1024, encoder);
let mut builder = Builder::new(buf);
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(2)
.build()
.unwrap();
pool.install(move || {
for path in dirs {
let src = Path::new(&self.work_dir).join(&path);
builder
.append_dir(&path, &src)
.with_context(|| format!("failed to tar dir '{}'", src.display()))?;
}
for path in files {
let src = Path::new(&self.work_dir).join(&path);
append_path(&mut builder, &src, &path)
.with_context(|| format!("failed to tar file '{}'", src.display()))?;
}
builder
.into_inner()
.context("failed to finish writing .tar stream")?
.into_inner()
.ok()
.unwrap()
.finish()?;
Ok(())
})
}
}
fn append_path<W: Write>(builder: &mut Builder<W>, src: &Path, path: &String) -> Result<()> {
let stat = symlink_metadata(src)?;
let mut header = Header::new_gnu();
header.set_metadata(&stat);
if stat.file_type().is_symlink() {
let link = read_link(src)?;
header.set_link_name(&link)?;
builder.append_data(&mut header, path, &mut empty())?;
} else {
if cfg!(windows) {
// Windows doesn't really have a mode, so `tar` never marks files executable.
// Use an extension whitelist to update files that usually should be so.
const EXECUTABLES: [&'static str; 4] = ["exe", "dll", "py", "sh"];
if let Some(ext) = src.extension().and_then(|s| s.to_str()) {
if EXECUTABLES.contains(&ext) {
let mode = header.mode()?;
header.set_mode(mode | 0o111);
}
}
}
let file = open_file(src)?;
builder.append_data(&mut header, path, &file)?;
}
Ok(())
}
/// Returns all `(directories, files)` under the source path.
fn get_recursive_paths<P, Q>(root: P, name: Q) -> Result<(Vec<String>, Vec<String>)>
where
P: AsRef<Path>,
Q: AsRef<Path>,
{
let root = root.as_ref();
let name = name.as_ref();
if !name.is_relative() && !name.starts_with(root) {
bail!(
"input '{}' is not in work dir '{}'",
name.display(),
root.display()
);
}
let mut dirs = vec![];
let mut files = vec![];
for entry in WalkDir::new(root.join(name)) {
let entry = entry?;
let path = entry.path().strip_prefix(root)?;
let path = path_to_str(&path)?;
if entry.file_type().is_dir() {
dirs.push(path.to_owned());
} else {
files.push(path.to_owned());
}
}
Ok((dirs, files))
}

View File

@ -0,0 +1,156 @@
use anyhow::{format_err, Context, Result};
use std::fs;
use std::path::Path;
use walkdir::WalkDir;
// Needed to set the script mode to executable.
#[cfg(unix)]
use std::os::unix::fs::OpenOptionsExt;
// FIXME: what about Windows? Are default ACLs executable?
#[cfg(unix)]
use std::os::unix::fs::symlink as symlink_file;
#[cfg(windows)]
use std::os::windows::fs::symlink_file;
/// Converts a `&Path` to a UTF-8 `&str`.
pub fn path_to_str(path: &Path) -> Result<&str> {
path.to_str()
.ok_or_else(|| format_err!("path is not valid UTF-8 '{}'", path.display()))
}
/// Wraps `fs::copy` with a nicer error message.
pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
if fs::symlink_metadata(&from)?.file_type().is_symlink() {
let link = fs::read_link(&from)?;
symlink_file(link, &to)?;
Ok(0)
} else {
let amt = fs::copy(&from, &to).with_context(|| {
format!(
"failed to copy '{}' to '{}'",
from.as_ref().display(),
to.as_ref().display()
)
})?;
Ok(amt)
}
}
/// Wraps `fs::create_dir` with a nicer error message.
pub fn create_dir<P: AsRef<Path>>(path: P) -> Result<()> {
fs::create_dir(&path)
.with_context(|| format!("failed to create dir '{}'", path.as_ref().display()))?;
Ok(())
}
/// Wraps `fs::create_dir_all` with a nicer error message.
pub fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
fs::create_dir_all(&path)
.with_context(|| format!("failed to create dir '{}'", path.as_ref().display()))?;
Ok(())
}
/// Wraps `fs::OpenOptions::create_new().open()` as executable, with a nicer error message.
pub fn create_new_executable<P: AsRef<Path>>(path: P) -> Result<fs::File> {
let mut options = fs::OpenOptions::new();
options.write(true).create_new(true);
#[cfg(unix)]
options.mode(0o755);
let file = options
.open(&path)
.with_context(|| format!("failed to create file '{}'", path.as_ref().display()))?;
Ok(file)
}
/// Wraps `fs::OpenOptions::create_new().open()`, with a nicer error message.
pub fn create_new_file<P: AsRef<Path>>(path: P) -> Result<fs::File> {
let file = fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(&path)
.with_context(|| format!("failed to create file '{}'", path.as_ref().display()))?;
Ok(file)
}
/// Wraps `fs::File::open()` with a nicer error message.
pub fn open_file<P: AsRef<Path>>(path: P) -> Result<fs::File> {
let file = fs::File::open(&path)
.with_context(|| format!("failed to open file '{}'", path.as_ref().display()))?;
Ok(file)
}
/// Wraps `remove_dir_all` with a nicer error message.
pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
remove_dir_all::remove_dir_all(path.as_ref())
.with_context(|| format!("failed to remove dir '{}'", path.as_ref().display()))?;
Ok(())
}
/// Wrap `fs::remove_file` with a nicer error message
pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {
fs::remove_file(path.as_ref())
.with_context(|| format!("failed to remove file '{}'", path.as_ref().display()))?;
Ok(())
}
/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
/// when this function is called.
pub fn copy_recursive(src: &Path, dst: &Path) -> Result<()> {
copy_with_callback(src, dst, |_, _| Ok(()))
}
/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
/// when this function is called. Invokes a callback for each path visited.
pub fn copy_with_callback<F>(src: &Path, dst: &Path, mut callback: F) -> Result<()>
where
F: FnMut(&Path, fs::FileType) -> Result<()>,
{
for entry in WalkDir::new(src).min_depth(1) {
let entry = entry?;
let file_type = entry.file_type();
let path = entry.path().strip_prefix(src)?;
let dst = dst.join(path);
if file_type.is_dir() {
create_dir(&dst)?;
} else {
copy(entry.path(), dst)?;
}
callback(&path, file_type)?;
}
Ok(())
}
macro_rules! actor_field_default {
() => { Default::default() };
(= $expr:expr) => { $expr.into() }
}
/// Creates an "actor" with default values, setters for all fields, and Clap parser support.
macro_rules! actor {
($( #[ $attr:meta ] )+ pub struct $name:ident {
$( $( #[ $field_attr:meta ] )+ $field:ident : $type:ty $(= $default:tt)*, )*
}) => {
$( #[ $attr ] )+
#[derive(clap::Args)]
pub struct $name {
$( $( #[ $field_attr ] )+ #[clap(long, $(default_value = $default)*)] $field : $type, )*
}
impl Default for $name {
fn default() -> $name {
$name {
$($field : actor_field_default!($(= $default)*), )*
}
}
}
impl $name {
$(pub fn $field(&mut self, value: $type) -> &mut Self {
self.$field = value;
self
})*
}
}
}

1342
src/tools/rust-installer/test.sh Executable file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1 @@
rust

View File

@ -0,0 +1 @@
cargo

View File

@ -0,0 +1 @@
#!/bin/bogus

View File

@ -0,0 +1 @@
#!/bin/sh

View File

@ -0,0 +1 @@
#!/bin/sh

View File

@ -0,0 +1 @@
#!/bin/sh

View File

@ -0,0 +1 @@
#!/bin/sh

View File