Merge remote-tracking branch 'upstream/master' into hash-always-has-type

This commit is contained in:
John Ericson 2020-07-16 14:58:53 +00:00
commit 5ea817dace
108 changed files with 8157 additions and 1845 deletions

View File

@ -10,15 +10,8 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
macos_perf_test:
runs-on: macos-latest
steps:
- name: Disable syspolicy assessments
run: |
spctl --status
sudo spctl --master-disable
- uses: actions/checkout@v2
- uses: cachix/install-nix-action@v10
- run: nix-build release.nix --arg nix '{ outPath = ./.; revCount = 123; shortRev = "abcdefgh"; }' --arg systems '[ builtins.currentSystem ]' -A installerScript -A perlBindings
#- run: nix flake check
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)

View File

@ -11,6 +11,7 @@ makefiles = \
src/resolve-system-dependencies/local.mk \
scripts/local.mk \
corepkgs/local.mk \
misc/bash/local.mk \
misc/systemd/local.mk \
misc/launchd/local.mk \
misc/upstart/local.mk \

View File

@ -123,6 +123,7 @@ AC_PATH_PROG(flex, flex, false)
AC_PATH_PROG(bison, bison, false)
AC_PATH_PROG(dot, dot)
AC_PATH_PROG(lsof, lsof, lsof)
NEED_PROG(jq, jq)
AC_SUBST(coreutils, [$(dirname $(type -p cat))])

3
default.nix Normal file
View File

@ -0,0 +1,3 @@
(import (fetchTarball https://github.com/edolstra/flake-compat/archive/master.tar.gz) {
src = ./.;
}).defaultNix

26
flake.lock Normal file
View File

@ -0,0 +1,26 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1591633336,
"narHash": "sha256-oVXv4xAnDJB03LvZGbC72vSVlIbbJr8tpjEW5o/Fdek=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "70717a337f7ae4e486ba71a500367cad697e5f09",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-20.03-small",
"type": "indirect"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 6
}

443
flake.nix Normal file
View File

@ -0,0 +1,443 @@
{
description = "The purely functional package manager";
inputs.nixpkgs.url = "nixpkgs/nixos-20.03-small";
outputs = { self, nixpkgs }:
let
version = builtins.readFile ./.version + versionSuffix;
versionSuffix =
if officialRelease
then ""
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified)}_${self.shortRev or "dirty"}";
officialRelease = false;
systems = [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ];
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f system);
# Memoize nixpkgs for different platforms for efficiency.
nixpkgsFor = forAllSystems (system:
import nixpkgs {
inherit system;
overlays = [ self.overlay ];
}
);
commonDeps = pkgs: with pkgs; rec {
# Use "busybox-sandbox-shell" if present,
# if not (legacy) fallback and hope it's sufficient.
sh = pkgs.busybox-sandbox-shell or (busybox.override {
useMusl = true;
enableStatic = true;
enableMinimal = true;
extraConfig = ''
CONFIG_FEATURE_FANCY_ECHO y
CONFIG_FEATURE_SH_MATH y
CONFIG_FEATURE_SH_MATH_64 y
CONFIG_ASH y
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
CONFIG_ASH_ALIAS y
CONFIG_ASH_BASH_COMPAT y
CONFIG_ASH_CMDCMD y
CONFIG_ASH_ECHO y
CONFIG_ASH_GETOPTS y
CONFIG_ASH_INTERNAL_GLOB y
CONFIG_ASH_JOB_CONTROL y
CONFIG_ASH_PRINTF y
CONFIG_ASH_TEST y
'';
});
configureFlags =
lib.optionals stdenv.isLinux [
"--with-sandbox-shell=${sh}/bin/busybox"
];
buildDeps =
[ bison
flex
libxml2
libxslt
docbook5
docbook_xsl_ns
autoconf-archive
autoreconfHook
curl
bzip2 xz brotli zlib editline
openssl pkgconfig sqlite
libarchive
boost
(if lib.versionAtLeast lib.version "20.03pre"
then nlohmann_json
else nlohmann_json.override { multipleHeaders = true; })
nlohmann_json
# Tests
git
mercurial
jq
gmock
]
++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
++ lib.optional (stdenv.isLinux || stdenv.isDarwin)
(aws-sdk-cpp.override {
apis = ["s3" "transfer"];
customMemoryManagement = false;
});
propagatedDeps =
[ (boehmgc.override { enableLargeConfig = true; })
];
perlDeps =
[ perl
perlPackages.DBDSQLite
];
};
in {
# A Nixpkgs overlay that overrides the 'nix' and
# 'nix.perl-bindings' packages.
overlay = final: prev: {
nix = with final; with commonDeps pkgs; (stdenv.mkDerivation {
name = "nix-${version}";
src = self;
VERSION_SUFFIX = versionSuffix;
outputs = [ "out" "dev" "doc" ];
buildInputs = buildDeps;
propagatedBuildInputs = propagatedDeps;
preConfigure =
''
# Copy libboost_context so we don't get all of Boost in our closure.
# https://github.com/NixOS/nixpkgs/issues/45462
mkdir -p $out/lib
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
rm -f $out/lib/*.a
${lib.optionalString stdenv.isLinux ''
chmod u+w $out/lib/*.so.*
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
''}
'';
configureFlags = configureFlags ++
[ "--sysconfdir=/etc" ];
enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d";
doCheck = true;
installFlags = "sysconfdir=$(out)/etc";
postInstall = ''
mkdir -p $doc/nix-support
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
'';
doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
separateDebugInfo = true;
}) // {
perl-bindings = with final; stdenv.mkDerivation {
name = "nix-perl-${version}";
src = self;
buildInputs =
[ autoconf-archive
autoreconfHook
nix
curl
bzip2
xz
pkgconfig
pkgs.perl
boost
]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium;
configureFlags = ''
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
'';
enableParallelBuilding = true;
postUnpack = "sourceRoot=$sourceRoot/perl";
};
};
};
hydraJobs = {
# Binary package for various platforms.
build = nixpkgs.lib.genAttrs systems (system: nixpkgsFor.${system}.nix);
# Perl bindings for various platforms.
perlBindings = nixpkgs.lib.genAttrs systems (system: nixpkgsFor.${system}.nix.perl-bindings);
# Binary tarball for various platforms, containing a Nix store
# with the closure of 'nix' package, and the second half of
# the installation script.
binaryTarball = nixpkgs.lib.genAttrs systems (system:
with nixpkgsFor.${system};
let
installerClosureInfo = closureInfo { rootPaths = [ nix cacert ]; };
in
runCommand "nix-binary-tarball-${version}"
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
meta.description = "Distribution-independent Nix bootstrap binaries for ${system}";
}
''
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
--subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert}
if type -p shellcheck; then
# SC1090: Don't worry about not being able to find
# $nix/etc/profile.d/nix.sh
shellcheck --exclude SC1090 $TMPDIR/install
shellcheck $TMPDIR/install-darwin-multi-user.sh
shellcheck $TMPDIR/install-systemd-multi-user.sh
# SC1091: Don't panic about not being able to source
# /etc/profile
# SC2002: Ignore "useless cat" "error", when loading
# .reginfo, as the cat is a much cleaner
# implementation, even though it is "useless"
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
# root's home directory
shellcheck --external-sources \
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
fi
chmod +x $TMPDIR/install
chmod +x $TMPDIR/install-darwin-multi-user.sh
chmod +x $TMPDIR/install-systemd-multi-user.sh
chmod +x $TMPDIR/install-multi-user
dir=nix-${version}-${system}
fn=$out/$dir.tar.xz
mkdir -p $out/nix-support
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
tar cvfJ $fn \
--owner=0 --group=0 --mode=u+rw,uga+r \
--absolute-names \
--hard-dereference \
--transform "s,$TMPDIR/install,$dir/install," \
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
--transform "s,$NIX_STORE,$dir/store,S" \
$TMPDIR/install $TMPDIR/install-darwin-multi-user.sh \
$TMPDIR/install-systemd-multi-user.sh \
$TMPDIR/install-multi-user $TMPDIR/reginfo \
$(cat ${installerClosureInfo}/store-paths)
'');
# The first half of the installation script. This is uploaded
# to https://nixos.org/nix/install. It downloads the binary
# tarball for the user's system and calls the second half of the
# installation script.
installerScript =
with nixpkgsFor.x86_64-linux;
runCommand "installer-script"
{ buildInputs = [ nix ];
}
''
mkdir -p $out/nix-support
substitute ${./scripts/install.in} $out/install \
${pkgs.lib.concatMapStrings
(system: "--replace '@binaryTarball_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${self.hydraJobs.binaryTarball.${system}}/*.tar.xz) ")
[ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ]
} \
--replace '@nixVersion@' ${version}
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
'';
# Line coverage analysis.
coverage =
with nixpkgsFor.x86_64-linux;
with commonDeps pkgs;
releaseTools.coverageAnalysis {
name = "nix-coverage-${version}";
src = self;
enableParallelBuilding = true;
buildInputs = buildDeps ++ propagatedDeps;
dontInstall = false;
doInstallCheck = true;
lcovFilter = [ "*/boost/*" "*-tab.*" ];
# We call `dot', and even though we just use it to
# syntax-check generated dot files, it still requires some
# fonts. So provide those.
FONTCONFIG_FILE = texFunctions.fontsConf;
# To test building without precompiled headers.
makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ];
};
# System tests.
tests.remoteBuilds = import ./tests/remote-builds.nix {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
};
tests.nix-copy-closure = import ./tests/nix-copy-closure.nix {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
};
tests.githubFlakes = (import ./tests/github-flakes.nix rec {
system = "x86_64-linux";
inherit nixpkgs;
inherit (self) overlay;
});
tests.setuid = nixpkgs.lib.genAttrs
["i686-linux" "x86_64-linux"]
(system:
import ./tests/setuid.nix rec {
inherit nixpkgs system;
inherit (self) overlay;
});
# Test whether the binary tarball works in an Ubuntu system.
tests.binaryTarball =
with nixpkgsFor.x86_64-linux;
vmTools.runInLinuxImage (runCommand "nix-binary-tarball-test"
{ diskImage = vmTools.diskImages.ubuntu1204x86_64;
}
''
set -x
useradd -m alice
su - alice -c 'tar xf ${self.hydraJobs.binaryTarball.x86_64-linux}/*.tar.*'
mkdir /dest-nix
mount -o bind /dest-nix /nix # Provide a writable /nix.
chown alice /nix
su - alice -c '_NIX_INSTALLER_TEST=1 ./nix-*/install'
su - alice -c 'nix-store --verify'
su - alice -c 'PAGER= nix-store -qR ${self.hydraJobs.build.x86_64-linux}'
# Check whether 'nix upgrade-nix' works.
cat > /tmp/paths.nix <<EOF
{
x86_64-linux = "${self.hydraJobs.build.x86_64-linux}";
}
EOF
su - alice -c 'nix --experimental-features nix-command upgrade-nix -vvv --nix-store-paths-url file:///tmp/paths.nix'
(! [ -L /home/alice/.profile-1-link ])
su - alice -c 'PAGER= nix-store -qR ${self.hydraJobs.build.x86_64-linux}'
mkdir -p $out/nix-support
touch $out/nix-support/hydra-build-products
umount /nix
'');
/*
# Check whether we can still evaluate all of Nixpkgs.
tests.evalNixpkgs =
import (nixpkgs + "/pkgs/top-level/make-tarball.nix") {
# FIXME: fix pkgs/top-level/make-tarball.nix in NixOS to not require a revCount.
inherit nixpkgs;
pkgs = nixpkgsFor.x86_64-linux;
officialRelease = false;
};
# Check whether we can still evaluate NixOS.
tests.evalNixOS =
with nixpkgsFor.x86_64-linux;
runCommand "eval-nixos" { buildInputs = [ nix ]; }
''
export NIX_STATE_DIR=$TMPDIR
nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
--arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
touch $out
'';
*/
};
checks = forAllSystems (system: {
binaryTarball = self.hydraJobs.binaryTarball.${system};
perlBindings = self.hydraJobs.perlBindings.${system};
});
packages = forAllSystems (system: {
inherit (nixpkgsFor.${system}) nix;
});
defaultPackage = forAllSystems (system: self.packages.${system}.nix);
devShell = forAllSystems (system:
with nixpkgsFor.${system};
with commonDeps pkgs;
stdenv.mkDerivation {
name = "nix";
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps;
inherit configureFlags;
enableParallelBuilding = true;
installFlags = "sysconfdir=$(out)/etc";
shellHook =
''
export prefix=$(pwd)/inst
configureFlags+=" --prefix=$prefix"
PKG_CONFIG_PATH=$prefix/lib/pkgconfig:$PKG_CONFIG_PATH
PATH=$prefix/bin:$PATH
unset PYTHONPATH
'';
});
};
}

View File

@ -8,7 +8,7 @@ clean-files += Makefile.config
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations
$(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \
$(foreach i, config.h $(wildcard src/lib*/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
$(GCH) $(PCH): src/libutil/util.hh config.h

19
misc/bash/completion.sh Normal file
View File

@ -0,0 +1,19 @@
function _complete_nix {
local -a words
local cword cur
_get_comp_words_by_ref -n ':=&' words cword cur
local have_type
while IFS= read -r line; do
if [[ -z $have_type ]]; then
have_type=1
if [[ $line = filenames ]]; then
compopt -o filenames
fi
else
COMPREPLY+=("$line")
fi
done < <(NIX_GET_COMPLETIONS=$cword "${words[@]}")
__ltrim_colon_completions "$cur"
}
complete -F _complete_nix nix

1
misc/bash/local.mk Normal file
View File

@ -0,0 +1 @@
$(eval $(call install-file-as, $(d)/completion.sh, $(datarootdir)/bash-completion/completions/nix, 0644))

View File

@ -1,82 +0,0 @@
{ pkgs }:
with pkgs;
rec {
# Use "busybox-sandbox-shell" if present,
# if not (legacy) fallback and hope it's sufficient.
sh = pkgs.busybox-sandbox-shell or (busybox.override {
useMusl = true;
enableStatic = true;
enableMinimal = true;
extraConfig = ''
CONFIG_FEATURE_FANCY_ECHO y
CONFIG_FEATURE_SH_MATH y
CONFIG_FEATURE_SH_MATH_64 y
CONFIG_ASH y
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
CONFIG_ASH_ALIAS y
CONFIG_ASH_BASH_COMPAT y
CONFIG_ASH_CMDCMD y
CONFIG_ASH_ECHO y
CONFIG_ASH_GETOPTS y
CONFIG_ASH_INTERNAL_GLOB y
CONFIG_ASH_JOB_CONTROL y
CONFIG_ASH_PRINTF y
CONFIG_ASH_TEST y
'';
});
configureFlags =
lib.optionals stdenv.isLinux [
"--with-sandbox-shell=${sh}/bin/busybox"
];
buildDeps =
[ bison
flex
libxml2
libxslt
docbook5
docbook_xsl_ns
autoconf-archive
autoreconfHook
curl
bzip2 xz brotli zlib editline
openssl pkgconfig sqlite
libarchive
boost
nlohmann_json
# Tests
git
mercurial
gmock
]
++ lib.optionals stdenv.isLinux [libseccomp utillinuxMinimal]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
++ lib.optional (stdenv.isLinux || stdenv.isDarwin)
((aws-sdk-cpp.override {
apis = ["s3" "transfer"];
customMemoryManagement = false;
}).overrideDerivation (args: {
/*
patches = args.patches or [] ++ [ (fetchpatch {
url = https://github.com/edolstra/aws-sdk-cpp/commit/3e07e1f1aae41b4c8b340735ff9e8c735f0c063f.patch;
sha256 = "1pij0v449p166f9l29x7ppzk8j7g9k9mp15ilh5qxp29c7fnvxy2";
}) ];
*/
}));
propagatedDeps =
[ (boehmgc.override { enableLargeConfig = true; })
];
perlDeps =
[ perl
perlPackages.DBDSQLite
];
}

View File

@ -1,303 +0,0 @@
{ nix ? builtins.fetchGit ./.
, nixpkgs ? builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-20.03-small.tar.gz
, officialRelease ? false
, systems ? [ "x86_64-linux" "i686-linux" "x86_64-darwin" "aarch64-linux" ]
}:
let
pkgs = import nixpkgs { system = builtins.currentSystem or "x86_64-linux"; };
version =
builtins.readFile ./.version
+ (if officialRelease then "" else "pre${toString nix.revCount}_${nix.shortRev}");
jobs = rec {
build = pkgs.lib.genAttrs systems (system:
let pkgs = import nixpkgs { inherit system; }; in
with pkgs;
with import ./release-common.nix { inherit pkgs; };
stdenv.mkDerivation {
name = "nix-${version}";
src = nix;
outputs = [ "out" "dev" "doc" ];
buildInputs = buildDeps;
propagatedBuildInputs = propagatedDeps;
preConfigure =
''
# Copy libboost_context so we don't get all of Boost in our closure.
# https://github.com/NixOS/nixpkgs/issues/45462
mkdir -p $out/lib
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
rm -f $out/lib/*.a
${lib.optionalString stdenv.isLinux ''
chmod u+w $out/lib/*.so.*
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
''}
(cd perl; autoreconf --install --force --verbose)
'';
configureFlags = configureFlags ++
[ "--sysconfdir=/etc" ];
enableParallelBuilding = true;
makeFlags = "profiledir=$(out)/etc/profile.d";
installFlags = "sysconfdir=$(out)/etc";
postInstall = ''
mkdir -p $doc/nix-support
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
'';
doCheck = true;
doInstallCheck = true;
installCheckFlags = "sysconfdir=$(out)/etc";
separateDebugInfo = true;
});
perlBindings = pkgs.lib.genAttrs systems (system:
let pkgs = import nixpkgs { inherit system; }; in with pkgs;
releaseTools.nixBuild {
name = "nix-perl-${version}";
src = nix;
buildInputs =
[ autoconf-archive
autoreconfHook
jobs.build.${system}
curl
bzip2
xz
pkgconfig
pkgs.perl
boost
]
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium;
configureFlags = ''
--with-dbi=${perlPackages.DBI}/${pkgs.perl.libPrefix}
--with-dbd-sqlite=${perlPackages.DBDSQLite}/${pkgs.perl.libPrefix}
'';
enableParallelBuilding = true;
postUnpack = "sourceRoot=$sourceRoot/perl";
});
binaryTarball = pkgs.lib.genAttrs systems (system:
with import nixpkgs { inherit system; };
let
toplevel = builtins.getAttr system jobs.build;
installerClosureInfo = closureInfo { rootPaths = [ toplevel cacert ]; };
in
runCommand "nix-binary-tarball-${version}"
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
meta.description = "Distribution-independent Nix bootstrap binaries for ${system}";
}
''
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
--subst-var-by nix ${toplevel} \
--subst-var-by cacert ${cacert}
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
--subst-var-by nix ${toplevel} \
--subst-var-by cacert ${cacert}
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
--subst-var-by nix ${toplevel} \
--subst-var-by cacert ${cacert}
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
--subst-var-by nix ${toplevel} \
--subst-var-by cacert ${cacert}
if type -p shellcheck; then
# SC1090: Don't worry about not being able to find
# $nix/etc/profile.d/nix.sh
shellcheck --exclude SC1090 $TMPDIR/install
shellcheck $TMPDIR/create-darwin-volume.sh
shellcheck $TMPDIR/install-darwin-multi-user.sh
shellcheck $TMPDIR/install-systemd-multi-user.sh
# SC1091: Don't panic about not being able to source
# /etc/profile
# SC2002: Ignore "useless cat" "error", when loading
# .reginfo, as the cat is a much cleaner
# implementation, even though it is "useless"
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
# root's home directory
shellcheck --external-sources \
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
fi
chmod +x $TMPDIR/install
chmod +x $TMPDIR/create-darwin-volume.sh
chmod +x $TMPDIR/install-darwin-multi-user.sh
chmod +x $TMPDIR/install-systemd-multi-user.sh
chmod +x $TMPDIR/install-multi-user
dir=nix-${version}-${system}
fn=$out/$dir.tar.xz
mkdir -p $out/nix-support
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
tar cvfJ $fn \
--owner=0 --group=0 --mode=u+rw,uga+r \
--absolute-names \
--hard-dereference \
--transform "s,$TMPDIR/install,$dir/install," \
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
--transform "s,$NIX_STORE,$dir/store,S" \
$TMPDIR/install \
$TMPDIR/create-darwin-volume.sh \
$TMPDIR/install-darwin-multi-user.sh \
$TMPDIR/install-systemd-multi-user.sh \
$TMPDIR/install-multi-user \
$TMPDIR/reginfo \
$(cat ${installerClosureInfo}/store-paths)
'');
coverage =
with pkgs;
with import ./release-common.nix { inherit pkgs; };
releaseTools.coverageAnalysis {
name = "nix-coverage-${version}";
src = nix;
enableParallelBuilding = true;
buildInputs = buildDeps ++ propagatedDeps;
dontInstall = false;
doInstallCheck = true;
lcovFilter = [ "*/boost/*" "*-tab.*" ];
# We call `dot', and even though we just use it to
# syntax-check generated dot files, it still requires some
# fonts. So provide those.
FONTCONFIG_FILE = texFunctions.fontsConf;
# To test building without precompiled headers.
makeFlagsArray = [ "PRECOMPILE_HEADERS=0" ];
};
# System tests.
tests.remoteBuilds = (import ./tests/remote-builds.nix rec {
inherit nixpkgs;
nix = build.x86_64-linux; system = "x86_64-linux";
});
tests.nix-copy-closure = (import ./tests/nix-copy-closure.nix rec {
inherit nixpkgs;
nix = build.x86_64-linux; system = "x86_64-linux";
});
tests.setuid = pkgs.lib.genAttrs
["i686-linux" "x86_64-linux"]
(system:
import ./tests/setuid.nix rec {
inherit nixpkgs;
nix = build.${system}; inherit system;
});
tests.binaryTarball =
with import nixpkgs { system = "x86_64-linux"; };
vmTools.runInLinuxImage (runCommand "nix-binary-tarball-test"
{ diskImage = vmTools.diskImages.ubuntu1204x86_64;
}
''
set -x
useradd -m alice
su - alice -c 'tar xf ${binaryTarball.x86_64-linux}/*.tar.*'
mkdir /dest-nix
mount -o bind /dest-nix /nix # Provide a writable /nix.
chown alice /nix
su - alice -c '_NIX_INSTALLER_TEST=1 ./nix-*/install'
su - alice -c 'nix-store --verify'
su - alice -c 'PAGER= nix-store -qR ${build.x86_64-linux}'
# Check whether 'nix upgrade-nix' works.
cat > /tmp/paths.nix <<EOF
{
x86_64-linux = "${build.x86_64-linux}";
}
EOF
su - alice -c 'nix --experimental-features nix-command upgrade-nix -vvv --nix-store-paths-url file:///tmp/paths.nix'
(! [ -L /home/alice/.profile-1-link ])
su - alice -c 'PAGER= nix-store -qR ${build.x86_64-linux}'
mkdir -p $out/nix-support
touch $out/nix-support/hydra-build-products
umount /nix
''); # */
/*
tests.evalNixpkgs =
import (nixpkgs + "/pkgs/top-level/make-tarball.nix") {
inherit nixpkgs;
inherit pkgs;
nix = build.x86_64-linux;
officialRelease = false;
};
tests.evalNixOS =
pkgs.runCommand "eval-nixos" { buildInputs = [ build.x86_64-linux ]; }
''
export NIX_STATE_DIR=$TMPDIR
nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
--arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
touch $out
'';
*/
installerScript =
pkgs.runCommand "installer-script"
{ buildInputs = [ build.${builtins.currentSystem or "x86_64-linux"} ]; }
''
mkdir -p $out/nix-support
substitute ${./scripts/install.in} $out/install \
${pkgs.lib.concatMapStrings
(system: "--replace '@binaryTarball_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${binaryTarball.${system}}/*.tar.xz) ")
systems
} \
--replace '@nixVersion@' ${version}
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
'';
};
in jobs

View File

@ -1,25 +0,0 @@
{ useClang ? false }:
with import (builtins.fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-20.03-small.tar.gz) {};
with import ./release-common.nix { inherit pkgs; };
(if useClang then clangStdenv else stdenv).mkDerivation {
name = "nix";
buildInputs = buildDeps ++ propagatedDeps ++ perlDeps;
inherit configureFlags;
enableParallelBuilding = true;
installFlags = "sysconfdir=$(out)/etc";
shellHook =
''
export prefix=$(pwd)/inst
configureFlags+=" --prefix=$prefix"
PKG_CONFIG_PATH=$prefix/lib/pkgconfig:$PKG_CONFIG_PATH
PATH=$prefix/bin:$PATH
'';
}

View File

@ -4,6 +4,8 @@
#include "util.hh"
#include "eval.hh"
#include "fetchers.hh"
#include "registry.hh"
#include "flake/flakeref.hh"
#include "store-api.hh"
namespace nix {
@ -31,6 +33,27 @@ MixEvalArgs::MixEvalArgs()
.labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }}
});
addFlag({
.longName = "impure",
.description = "allow access to mutable paths and repositories",
.handler = {[&]() {
evalSettings.pureEval = false;
}},
});
addFlag({
.longName = "override-flake",
.description = "override a flake registry value",
.labels = {"original-ref", "resolved-ref"},
.handler = {[&](std::string _from, std::string _to) {
auto from = parseFlakeRef(_from, absPath("."));
auto to = parseFlakeRef(_to, absPath("."));
fetchers::Attrs extraAttrs;
if (to.subdir != "") extraAttrs["dir"] = to.subdir;
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
}}
});
}
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
@ -53,7 +76,7 @@ Path lookupFileArg(EvalState & state, string s)
if (isUri(s)) {
return state.store->toRealPath(
fetchers::downloadTarball(
state.store, resolveUri(s), "source", false).storePath);
state.store, resolveUri(s), "source", false).first.storePath);
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
Path p = s.substr(1, s.size() - 2);
return state.findFile(p);

616
src/libexpr/eval-cache.cc Normal file
View File

@ -0,0 +1,616 @@
#include "eval-cache.hh"
#include "sqlite.hh"
#include "eval.hh"
#include "eval-inline.hh"
#include "store-api.hh"
namespace nix::eval_cache {
static const char * schema = R"sql(
create table if not exists Attributes (
parent integer not null,
name text,
type integer not null,
value text,
context text,
primary key (parent, name)
);
)sql";
struct AttrDb
{
std::atomic_bool failed{false};
struct State
{
SQLite db;
SQLiteStmt insertAttribute;
SQLiteStmt insertAttributeWithContext;
SQLiteStmt queryAttribute;
SQLiteStmt queryAttributes;
std::unique_ptr<SQLiteTxn> txn;
};
std::unique_ptr<Sync<State>> _state;
AttrDb(const Hash & fingerprint)
: _state(std::make_unique<Sync<State>>())
{
auto state(_state->lock());
Path cacheDir = getCacheDir() + "/nix/eval-cache-v2";
createDirs(cacheDir);
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
state->db = SQLite(dbPath);
state->db.isCache();
state->db.exec(schema);
state->insertAttribute.create(state->db,
"insert or replace into Attributes(parent, name, type, value) values (?, ?, ?, ?)");
state->insertAttributeWithContext.create(state->db,
"insert or replace into Attributes(parent, name, type, value, context) values (?, ?, ?, ?, ?)");
state->queryAttribute.create(state->db,
"select rowid, type, value, context from Attributes where parent = ? and name = ?");
state->queryAttributes.create(state->db,
"select name from Attributes where parent = ?");
state->txn = std::make_unique<SQLiteTxn>(state->db);
}
~AttrDb()
{
try {
auto state(_state->lock());
if (!failed)
state->txn->commit();
state->txn.reset();
} catch (...) {
ignoreException();
}
}
template<typename F>
AttrId doSQLite(F && fun)
{
if (failed) return 0;
try {
return fun();
} catch (SQLiteError &) {
ignoreException();
failed = true;
return 0;
}
}
AttrId setAttrs(
AttrKey key,
const std::vector<Symbol> & attrs)
{
return doSQLite([&]()
{
auto state(_state->lock());
state->insertAttribute.use()
(key.first)
(key.second)
(AttrType::FullAttrs)
(0, false).exec();
AttrId rowId = state->db.getLastInsertedRowId();
assert(rowId);
for (auto & attr : attrs)
state->insertAttribute.use()
(rowId)
(attr)
(AttrType::Placeholder)
(0, false).exec();
return rowId;
});
}
AttrId setString(
AttrKey key,
std::string_view s,
const char * * context = nullptr)
{
return doSQLite([&]()
{
auto state(_state->lock());
if (context) {
std::string ctx;
for (const char * * p = context; *p; ++p) {
if (p != context) ctx.push_back(' ');
ctx.append(*p);
}
state->insertAttributeWithContext.use()
(key.first)
(key.second)
(AttrType::String)
(s)
(ctx).exec();
} else {
state->insertAttribute.use()
(key.first)
(key.second)
(AttrType::String)
(s).exec();
}
return state->db.getLastInsertedRowId();
});
}
AttrId setBool(
AttrKey key,
bool b)
{
return doSQLite([&]()
{
auto state(_state->lock());
state->insertAttribute.use()
(key.first)
(key.second)
(AttrType::Bool)
(b ? 1 : 0).exec();
return state->db.getLastInsertedRowId();
});
}
AttrId setPlaceholder(AttrKey key)
{
return doSQLite([&]()
{
auto state(_state->lock());
state->insertAttribute.use()
(key.first)
(key.second)
(AttrType::Placeholder)
(0, false).exec();
return state->db.getLastInsertedRowId();
});
}
AttrId setMissing(AttrKey key)
{
return doSQLite([&]()
{
auto state(_state->lock());
state->insertAttribute.use()
(key.first)
(key.second)
(AttrType::Missing)
(0, false).exec();
return state->db.getLastInsertedRowId();
});
}
AttrId setMisc(AttrKey key)
{
return doSQLite([&]()
{
auto state(_state->lock());
state->insertAttribute.use()
(key.first)
(key.second)
(AttrType::Misc)
(0, false).exec();
return state->db.getLastInsertedRowId();
});
}
AttrId setFailed(AttrKey key)
{
return doSQLite([&]()
{
auto state(_state->lock());
state->insertAttribute.use()
(key.first)
(key.second)
(AttrType::Failed)
(0, false).exec();
return state->db.getLastInsertedRowId();
});
}
std::optional<std::pair<AttrId, AttrValue>> getAttr(
AttrKey key,
SymbolTable & symbols)
{
auto state(_state->lock());
auto queryAttribute(state->queryAttribute.use()(key.first)(key.second));
if (!queryAttribute.next()) return {};
auto rowId = (AttrType) queryAttribute.getInt(0);
auto type = (AttrType) queryAttribute.getInt(1);
switch (type) {
case AttrType::Placeholder:
return {{rowId, placeholder_t()}};
case AttrType::FullAttrs: {
// FIXME: expensive, should separate this out.
std::vector<Symbol> attrs;
auto queryAttributes(state->queryAttributes.use()(rowId));
while (queryAttributes.next())
attrs.push_back(symbols.create(queryAttributes.getStr(0)));
return {{rowId, attrs}};
}
case AttrType::String: {
std::vector<std::pair<Path, std::string>> context;
if (!queryAttribute.isNull(3))
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
context.push_back(decodeContext(s));
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
}
case AttrType::Bool:
return {{rowId, queryAttribute.getInt(2) != 0}};
case AttrType::Missing:
return {{rowId, missing_t()}};
case AttrType::Misc:
return {{rowId, misc_t()}};
case AttrType::Failed:
return {{rowId, failed_t()}};
default:
throw Error("unexpected type in evaluation cache");
}
}
};
static std::shared_ptr<AttrDb> makeAttrDb(const Hash & fingerprint)
{
try {
return std::make_shared<AttrDb>(fingerprint);
} catch (SQLiteError &) {
ignoreException();
return nullptr;
}
}
EvalCache::EvalCache(
std::optional<std::reference_wrapper<const Hash>> useCache,
EvalState & state,
RootLoader rootLoader)
: db(useCache ? makeAttrDb(*useCache) : nullptr)
, state(state)
, rootLoader(rootLoader)
{
}
Value * EvalCache::getRootValue()
{
if (!value) {
debug("getting root value");
value = allocRootValue(rootLoader());
}
return *value;
}
std::shared_ptr<AttrCursor> EvalCache::getRoot()
{
return std::make_shared<AttrCursor>(ref(shared_from_this()), std::nullopt);
}
AttrCursor::AttrCursor(
ref<EvalCache> root,
Parent parent,
Value * value,
std::optional<std::pair<AttrId, AttrValue>> && cachedValue)
: root(root), parent(parent), cachedValue(std::move(cachedValue))
{
if (value)
_value = allocRootValue(value);
}
AttrKey AttrCursor::getKey()
{
if (!parent)
return {0, root->state.sEpsilon};
if (!parent->first->cachedValue) {
parent->first->cachedValue = root->db->getAttr(
parent->first->getKey(), root->state.symbols);
assert(parent->first->cachedValue);
}
return {parent->first->cachedValue->first, parent->second};
}
Value & AttrCursor::getValue()
{
if (!_value) {
if (parent) {
auto & vParent = parent->first->getValue();
root->state.forceAttrs(vParent);
auto attr = vParent.attrs->get(parent->second);
if (!attr)
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
_value = allocRootValue(attr->value);
} else
_value = allocRootValue(root->getRootValue());
}
return **_value;
}
std::vector<Symbol> AttrCursor::getAttrPath() const
{
if (parent) {
auto attrPath = parent->first->getAttrPath();
attrPath.push_back(parent->second);
return attrPath;
} else
return {};
}
std::vector<Symbol> AttrCursor::getAttrPath(Symbol name) const
{
auto attrPath = getAttrPath();
attrPath.push_back(name);
return attrPath;
}
std::string AttrCursor::getAttrPathStr() const
{
return concatStringsSep(".", getAttrPath());
}
std::string AttrCursor::getAttrPathStr(Symbol name) const
{
return concatStringsSep(".", getAttrPath(name));
}
Value & AttrCursor::forceValue()
{
debug("evaluating uncached attribute %s", getAttrPathStr());
auto & v = getValue();
try {
root->state.forceValue(v);
} catch (EvalError &) {
debug("setting '%s' to failed", getAttrPathStr());
if (root->db)
cachedValue = {root->db->setFailed(getKey()), failed_t()};
throw;
}
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
if (v.type == tString)
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context), v.string.s};
else if (v.type == tPath)
cachedValue = {root->db->setString(getKey(), v.path), v.path};
else if (v.type == tBool)
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
else if (v.type == tAttrs)
; // FIXME: do something?
else
cachedValue = {root->db->setMisc(getKey()), misc_t()};
}
return v;
}
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name)
{
if (root->db) {
if (!cachedValue)
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
if (cachedValue) {
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
for (auto & attr : *attrs)
if (attr == name)
return std::make_shared<AttrCursor>(root, std::make_pair(shared_from_this(), name));
return nullptr;
} else if (std::get_if<placeholder_t>(&cachedValue->second)) {
auto attr = root->db->getAttr({cachedValue->first, name}, root->state.symbols);
if (attr) {
if (std::get_if<missing_t>(&attr->second))
return nullptr;
else if (std::get_if<failed_t>(&attr->second))
throw EvalError("cached failure of attribute '%s'", getAttrPathStr(name));
else
return std::make_shared<AttrCursor>(root,
std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
}
// Incomplete attrset, so need to fall thru and
// evaluate to see whether 'name' exists
} else
return nullptr;
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
}
}
auto & v = forceValue();
if (v.type != tAttrs)
return nullptr;
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
auto attr = v.attrs->get(name);
if (!attr) {
if (root->db) {
if (!cachedValue)
cachedValue = {root->db->setPlaceholder(getKey()), placeholder_t()};
root->db->setMissing({cachedValue->first, name});
}
return nullptr;
}
std::optional<std::pair<AttrId, AttrValue>> cachedValue2;
if (root->db) {
if (!cachedValue)
cachedValue = {root->db->setPlaceholder(getKey()), placeholder_t()};
cachedValue2 = {root->db->setPlaceholder({cachedValue->first, name}), placeholder_t()};
}
return std::make_shared<AttrCursor>(
root, std::make_pair(shared_from_this(), name), attr->value, std::move(cachedValue2));
}
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name)
{
return maybeGetAttr(root->state.symbols.create(name));
}
std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name)
{
auto p = maybeGetAttr(name);
if (!p)
throw Error("attribute '%s' does not exist", getAttrPathStr(name));
return p;
}
std::shared_ptr<AttrCursor> AttrCursor::getAttr(std::string_view name)
{
return getAttr(root->state.symbols.create(name));
}
std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath)
{
auto res = shared_from_this();
for (auto & attr : attrPath) {
res = res->maybeGetAttr(attr);
if (!res) return {};
}
return res;
}
std::string AttrCursor::getString()
{
if (root->db) {
if (!cachedValue)
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
debug("using cached string attribute '%s'", getAttrPathStr());
return s->first;
} else
throw TypeError("'%s' is not a string", getAttrPathStr());
}
}
auto & v = forceValue();
if (v.type != tString && v.type != tPath)
throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type));
return v.type == tString ? v.string.s : v.path;
}
string_t AttrCursor::getStringWithContext()
{
if (root->db) {
if (!cachedValue)
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
debug("using cached string attribute '%s'", getAttrPathStr());
return *s;
} else
throw TypeError("'%s' is not a string", getAttrPathStr());
}
}
auto & v = forceValue();
if (v.type == tString)
return {v.string.s, v.getContext()};
else if (v.type == tPath)
return {v.path, {}};
else
throw TypeError("'%s' is not a string but %s", getAttrPathStr(), showType(v.type));
}
bool AttrCursor::getBool()
{
if (root->db) {
if (!cachedValue)
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
if (auto b = std::get_if<bool>(&cachedValue->second)) {
debug("using cached Boolean attribute '%s'", getAttrPathStr());
return *b;
} else
throw TypeError("'%s' is not a Boolean", getAttrPathStr());
}
}
auto & v = forceValue();
if (v.type != tBool)
throw TypeError("'%s' is not a Boolean", getAttrPathStr());
return v.boolean;
}
std::vector<Symbol> AttrCursor::getAttrs()
{
if (root->db) {
if (!cachedValue)
cachedValue = root->db->getAttr(getKey(), root->state.symbols);
if (cachedValue && !std::get_if<placeholder_t>(&cachedValue->second)) {
if (auto attrs = std::get_if<std::vector<Symbol>>(&cachedValue->second)) {
debug("using cached attrset attribute '%s'", getAttrPathStr());
return *attrs;
} else
throw TypeError("'%s' is not an attribute set", getAttrPathStr());
}
}
auto & v = forceValue();
if (v.type != tAttrs)
throw TypeError("'%s' is not an attribute set", getAttrPathStr());
std::vector<Symbol> attrs;
for (auto & attr : *getValue().attrs)
attrs.push_back(attr.name);
std::sort(attrs.begin(), attrs.end(), [](const Symbol & a, const Symbol & b) {
return (const string &) a < (const string &) b;
});
if (root->db)
cachedValue = {root->db->setAttrs(getKey(), attrs), attrs};
return attrs;
}
bool AttrCursor::isDerivation()
{
auto aType = maybeGetAttr("type");
return aType && aType->getString() == "derivation";
}
StorePath AttrCursor::forceDerivation()
{
auto aDrvPath = getAttr(root->state.sDrvPath);
auto drvPath = root->state.store->parseStorePath(aDrvPath->getString());
if (!root->state.store->isValidPath(drvPath) && !settings.readOnlyMode) {
/* The eval cache contains 'drvPath', but the actual path has
been garbage-collected. So force it to be regenerated. */
aDrvPath->forceValue();
if (!root->state.store->isValidPath(drvPath))
throw Error("don't know how to recreate store derivation '%s'!",
root->state.store->printStorePath(drvPath));
}
return drvPath;
}
}

121
src/libexpr/eval-cache.hh Normal file
View File

@ -0,0 +1,121 @@
#pragma once
#include "sync.hh"
#include "hash.hh"
#include "eval.hh"
#include <functional>
#include <variant>
namespace nix::eval_cache {
class AttrDb;
class AttrCursor;
class EvalCache : public std::enable_shared_from_this<EvalCache>
{
friend class AttrCursor;
std::shared_ptr<AttrDb> db;
EvalState & state;
typedef std::function<Value *()> RootLoader;
RootLoader rootLoader;
RootValue value;
Value * getRootValue();
public:
EvalCache(
std::optional<std::reference_wrapper<const Hash>> useCache,
EvalState & state,
RootLoader rootLoader);
std::shared_ptr<AttrCursor> getRoot();
};
enum AttrType {
Placeholder = 0,
FullAttrs = 1,
String = 2,
Missing = 3,
Misc = 4,
Failed = 5,
Bool = 6,
};
struct placeholder_t {};
struct missing_t {};
struct misc_t {};
struct failed_t {};
typedef uint64_t AttrId;
typedef std::pair<AttrId, Symbol> AttrKey;
typedef std::pair<std::string, std::vector<std::pair<Path, std::string>>> string_t;
typedef std::variant<
std::vector<Symbol>,
string_t,
placeholder_t,
missing_t,
misc_t,
failed_t,
bool
> AttrValue;
class AttrCursor : public std::enable_shared_from_this<AttrCursor>
{
friend class EvalCache;
ref<EvalCache> root;
typedef std::optional<std::pair<std::shared_ptr<AttrCursor>, Symbol>> Parent;
Parent parent;
RootValue _value;
std::optional<std::pair<AttrId, AttrValue>> cachedValue;
AttrKey getKey();
Value & getValue();
public:
AttrCursor(
ref<EvalCache> root,
Parent parent,
Value * value = nullptr,
std::optional<std::pair<AttrId, AttrValue>> && cachedValue = {});
std::vector<Symbol> getAttrPath() const;
std::vector<Symbol> getAttrPath(Symbol name) const;
std::string getAttrPathStr() const;
std::string getAttrPathStr(Symbol name) const;
std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name);
std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name);
std::shared_ptr<AttrCursor> getAttr(Symbol name);
std::shared_ptr<AttrCursor> getAttr(std::string_view name);
std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath);
std::string getString();
string_t getStringWithContext();
bool getBool();
std::vector<Symbol> getAttrs();
bool isDerivation();
Value & forceValue();
/* Force creation of the .drv file in the Nix store. */
StorePath forceDerivation();
};
}

View File

@ -199,6 +199,18 @@ string showType(const Value & v)
}
bool Value::isTrivial() const
{
return
type != tApp
&& type != tPrimOpApp
&& (type != tThunk
|| (dynamic_cast<ExprAttrs *>(thunk.expr)
&& ((ExprAttrs *) thunk.expr)->dynamicAttrs.empty())
|| dynamic_cast<ExprLambda *>(thunk.expr));
}
#if HAVE_BOEHMGC
/* Called when the Boehm GC runs out of memory. */
static void * oomHandler(size_t requested)
@ -337,6 +349,9 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sOutputHashAlgo(symbols.create("outputHashAlgo"))
, sOutputHashMode(symbols.create("outputHashMode"))
, sRecurseForDerivations(symbols.create("recurseForDerivations"))
, sDescription(symbols.create("description"))
, sSelf(symbols.create("self"))
, sEpsilon(symbols.create(""))
, repair(NoRepair)
, store(store)
, baseEnv(allocEnv(128))
@ -782,7 +797,7 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
}
void EvalState::evalFile(const Path & path_, Value & v)
void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
{
auto path = checkSourcePath(path_);
@ -811,6 +826,11 @@ void EvalState::evalFile(const Path & path_, Value & v)
fileParseCache[path2] = e;
try {
// Enforce that 'flake.nix' is a direct attrset, not a
// computation.
if (mustBeTrivial &&
!(dynamic_cast<ExprAttrs *>(e)))
throw Error("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
addErrorTrace(e, "while evaluating the file '%1%':", path2);
@ -1586,6 +1606,18 @@ string EvalState::forceString(Value & v, const Pos & pos)
}
/* Decode a context string !<name>!<path> into a pair <path,
name>. */
std::pair<string, string> decodeContext(std::string_view s)
{
if (s.at(0) == '!') {
size_t index = s.find("!", 1);
return {std::string(s.substr(index + 1)), std::string(s.substr(1, index - 1))};
} else
return {s.at(0) == '/' ? std::string(s) : std::string(s.substr(1)), ""};
}
void copyContext(const Value & v, PathSet & context)
{
if (v.string.context)
@ -1594,6 +1626,17 @@ void copyContext(const Value & v, PathSet & context)
}
std::vector<std::pair<Path, std::string>> Value::getContext()
{
std::vector<std::pair<Path, std::string>> res;
assert(type == tString);
if (string.context)
for (const char * * p = string.context; *p; ++p)
res.push_back(decodeContext(*p));
return res;
}
string EvalState::forceString(Value & v, PathSet & context, const Pos & pos)
{
string s = forceString(v, pos);

View File

@ -4,13 +4,13 @@
#include "value.hh"
#include "nixexpr.hh"
#include "symbol-table.hh"
#include "hash.hh"
#include "config.hh"
#include <regex>
#include <map>
#include <optional>
#include <unordered_map>
#include <mutex>
namespace nix {
@ -75,7 +75,8 @@ public:
sFile, sLine, sColumn, sFunctor, sToString,
sRight, sWrong, sStructuredAttrs, sBuilder, sArgs,
sOutputHash, sOutputHashAlgo, sOutputHashMode,
sRecurseForDerivations;
sRecurseForDerivations,
sDescription, sSelf, sEpsilon;
Symbol sDerivationNix;
/* If set, force copying files to the Nix store even if they
@ -90,6 +91,7 @@ public:
const ref<Store> store;
private:
SrcToStore srcToStore;
@ -152,8 +154,9 @@ public:
Expr * parseStdin();
/* Evaluate an expression read from the given file to normal
form. */
void evalFile(const Path & path, Value & v);
form. Optionally enforce that the top-level expression is
trivial (i.e. doesn't require arbitrary computation). */
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
void resetFileCache();
@ -330,7 +333,7 @@ string showType(const Value & v);
/* Decode a context string !<name>!<path> into a pair <path,
name>. */
std::pair<string, string> decodeContext(const string & s);
std::pair<string, string> decodeContext(std::string_view s);
/* If `path' refers to a directory, then append "/default.nix". */
Path resolveExprPath(Path path);

View File

@ -0,0 +1,56 @@
lockFileStr: rootSrc: rootSubdir:
let
lockFile = builtins.fromJSON lockFileStr;
allNodes =
builtins.mapAttrs
(key: node:
let
sourceInfo =
if key == lockFile.root
then rootSrc
else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
subdir = if key == lockFile.root then rootSubdir else node.locked.dir or "";
flake = import (sourceInfo + (if subdir != "" then "/" else "") + subdir + "/flake.nix");
inputs = builtins.mapAttrs
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
(node.inputs or {});
# Resolve a input spec into a node name. An input spec is
# either a node name, or a 'follows' path from the root
# node.
resolveInput = inputSpec:
if builtins.isList inputSpec
then getInputByPath lockFile.root inputSpec
else inputSpec;
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
# root node, returning the final node.
getInputByPath = nodeName: path:
if path == []
then nodeName
else
getInputByPath
# Since this could be a 'follows' input, call resolveInput.
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
(builtins.tail path);
outputs = flake.outputs (inputs // { self = result; });
result = outputs // sourceInfo // { inherit inputs; inherit outputs; inherit sourceInfo; };
in
if node.flake or true then
assert builtins.isFunction flake.outputs;
result
else
sourceInfo
)
lockFile.nodes;
in allNodes.${lockFile.root}

609
src/libexpr/flake/flake.cc Normal file
View File

@ -0,0 +1,609 @@
#include "flake.hh"
#include "lockfile.hh"
#include "primops.hh"
#include "eval-inline.hh"
#include "store-api.hh"
#include "fetchers.hh"
#include "finally.hh"
namespace nix {
using namespace flake;
namespace flake {
typedef std::pair<Tree, FlakeRef> FetchedFlake;
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
static std::optional<FetchedFlake> lookupInFlakeCache(
const FlakeCache & flakeCache,
const FlakeRef & flakeRef)
{
// FIXME: inefficient.
for (auto & i : flakeCache) {
if (flakeRef == i.first) {
debug("mapping '%s' to previously seen input '%s' -> '%s",
flakeRef, i.first, i.second.second);
return i.second;
}
}
return std::nullopt;
}
static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
EvalState & state,
const FlakeRef & originalRef,
bool allowLookup,
FlakeCache & flakeCache)
{
auto fetched = lookupInFlakeCache(flakeCache, originalRef);
FlakeRef resolvedRef = originalRef;
if (!fetched) {
if (originalRef.input.isDirect()) {
fetched.emplace(originalRef.fetchTree(state.store));
} else {
if (allowLookup) {
resolvedRef = originalRef.resolve(state.store);
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
flakeCache.push_back({resolvedRef, fetchedResolved.value()});
fetched.emplace(fetchedResolved.value());
}
else {
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
}
}
flakeCache.push_back({originalRef, fetched.value()});
}
auto [tree, lockedRef] = fetched.value();
debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef);
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
return {std::move(tree), resolvedRef, lockedRef};
}
static void expectType(EvalState & state, ValueType type,
Value & value, const Pos & pos)
{
if (value.type == tThunk && value.isTrivial())
state.forceValue(value, pos);
if (value.type != type)
throw Error("expected %s but got %s at %s",
showType(type), showType(value.type), pos);
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state, Value * value, const Pos & pos);
static FlakeInput parseFlakeInput(EvalState & state,
const std::string & inputName, Value * value, const Pos & pos)
{
expectType(state, tAttrs, *value, pos);
FlakeInput input;
auto sInputs = state.symbols.create("inputs");
auto sUrl = state.symbols.create("url");
auto sFlake = state.symbols.create("flake");
auto sFollows = state.symbols.create("follows");
fetchers::Attrs attrs;
std::optional<std::string> url;
for (nix::Attr attr : *(value->attrs)) {
try {
if (attr.name == sUrl) {
expectType(state, tString, *attr.value, *attr.pos);
url = attr.value->string.s;
attrs.emplace("url", *url);
} else if (attr.name == sFlake) {
expectType(state, tBool, *attr.value, *attr.pos);
input.isFlake = attr.value->boolean;
} else if (attr.name == sInputs) {
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos);
} else if (attr.name == sFollows) {
expectType(state, tString, *attr.value, *attr.pos);
input.follows = parseInputPath(attr.value->string.s);
} else {
state.forceValue(*attr.value);
if (attr.value->type == tString)
attrs.emplace(attr.name, attr.value->string.s);
else
throw TypeError("flake input attribute '%s' is %s while a string is expected",
attr.name, showType(*attr.value));
}
} catch (Error & e) {
e.addTrace(*attr.pos, hintfmt("in flake attribute '%s'", attr.name));
throw;
}
}
if (attrs.count("type"))
try {
input.ref = FlakeRef::fromAttrs(attrs);
} catch (Error & e) {
e.addTrace(pos, hintfmt("in flake input"));
throw;
}
else {
attrs.erase("url");
if (!attrs.empty())
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos);
if (url)
input.ref = parseFlakeRef(*url, {}, true);
}
if (!input.follows && !input.ref)
input.ref = FlakeRef::fromAttrs({{"type", "indirect"}, {"id", inputName}});
return input;
}
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state, Value * value, const Pos & pos)
{
std::map<FlakeId, FlakeInput> inputs;
expectType(state, tAttrs, *value, pos);
for (nix::Attr & inputAttr : *(*value).attrs) {
inputs.emplace(inputAttr.name,
parseFlakeInput(state,
inputAttr.name,
inputAttr.value,
*inputAttr.pos));
}
return inputs;
}
static Flake getFlake(
EvalState & state,
const FlakeRef & originalRef,
bool allowLookup,
FlakeCache & flakeCache)
{
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, allowLookup, flakeCache);
// Guard against symlink attacks.
auto flakeFile = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir + "/flake.nix");
if (!isInDir(flakeFile, sourceInfo.actualPath))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(sourceInfo.storePath));
Flake flake {
.originalRef = originalRef,
.resolvedRef = resolvedRef,
.lockedRef = lockedRef,
.sourceInfo = std::make_shared<fetchers::Tree>(std::move(sourceInfo))
};
if (!pathExists(flakeFile))
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
Value vInfo;
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
expectType(state, tAttrs, vInfo, Pos(foFile, state.symbols.create(flakeFile), 0, 0));
auto sEdition = state.symbols.create("edition"); // FIXME: remove soon
if (vInfo.attrs->get(sEdition))
warn("flake '%s' has deprecated attribute 'edition'", lockedRef);
if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, tString, *description->value, *description->pos);
flake.description = description->value->string.s;
}
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs->get(sInputs))
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos);
auto sOutputs = state.symbols.create("outputs");
if (auto outputs = vInfo.attrs->get(sOutputs)) {
expectType(state, tLambda, *outputs->value, *outputs->pos);
flake.vOutputs = allocRootValue(outputs->value);
if ((*flake.vOutputs)->lambda.fun->matchAttrs) {
for (auto & formal : (*flake.vOutputs)->lambda.fun->formals->formals) {
if (formal.name != state.sSelf)
flake.inputs.emplace(formal.name, FlakeInput {
.ref = parseFlakeRef(formal.name)
});
}
}
} else
throw Error("flake '%s' lacks attribute 'outputs'", lockedRef);
for (auto & attr : *vInfo.attrs) {
if (attr.name != sEdition &&
attr.name != state.sDescription &&
attr.name != sInputs &&
attr.name != sOutputs)
throw Error("flake '%s' has an unsupported attribute '%s', at %s",
lockedRef, attr.name, *attr.pos);
}
return flake;
}
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
{
FlakeCache flakeCache;
return getFlake(state, originalRef, allowLookup, flakeCache);
}
/* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, it the flake is writable. */
LockedFlake lockFlake(
EvalState & state,
const FlakeRef & topRef,
const LockFlags & lockFlags)
{
settings.requireExperimentalFeature("flakes");
FlakeCache flakeCache;
auto flake = getFlake(state, topRef, lockFlags.useRegistries, flakeCache);
// FIXME: symlink attack
auto oldLockFile = LockFile::read(
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock");
debug("old lock file: %s", oldLockFile);
// FIXME: check whether all overrides are used.
std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> overridesUsed, updatesUsed;
for (auto & i : lockFlags.inputOverrides)
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
LockFile newLockFile;
std::vector<FlakeRef> parents;
std::function<void(
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode)>
computeLocks;
computeLocks = [&](
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode)
{
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
/* Get the overrides (i.e. attributes of the form
'inputs.nixops.inputs.nixpkgs.url = ...'). */
// FIXME: check this
for (auto & [id, input] : flake.inputs) {
for (auto & [idOverride, inputOverride] : input.overrides) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
inputPath.push_back(idOverride);
overrides.insert_or_assign(inputPath, inputOverride);
}
}
/* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */
for (auto & [id, input2] : flakeInputs) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
auto inputPathS = printInputPath(inputPath);
debug("computing input '%s'", inputPathS);
/* Do we have an override for this input from one of the
ancestors? */
auto i = overrides.find(inputPath);
bool hasOverride = i != overrides.end();
if (hasOverride) overridesUsed.insert(inputPath);
auto & input = hasOverride ? i->second : input2;
/* Resolve 'follows' later (since it may refer to an input
path we haven't processed yet. */
if (input.follows) {
InputPath target;
if (hasOverride || input.absolute)
/* 'follows' from an override is relative to the
root of the graph. */
target = *input.follows;
else {
/* Otherwise, it's relative to the current flake. */
target = inputPathPrefix;
for (auto & i : *input.follows) target.push_back(i);
}
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
node->inputs.insert_or_assign(id, target);
continue;
}
assert(input.ref);
/* Do we have an entry in the existing lock file? And we
don't have a --update-input flag for this input? */
std::shared_ptr<LockedNode> oldLock;
updatesUsed.insert(inputPath);
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
if (auto oldLock2 = get(oldNode->inputs, id))
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
oldLock = *oldLock3;
if (oldLock
&& oldLock->originalRef == *input.ref
&& !hasOverride)
{
debug("keeping existing input '%s'", inputPathS);
/* Copy the input from the old lock since its flakeref
didn't change and there is no override from a
higher level flake. */
auto childNode = std::make_shared<LockedNode>(
oldLock->lockedRef, oldLock->originalRef, oldLock->isFlake);
node->inputs.insert_or_assign(id, childNode);
/* If we have an --update-input flag for an input
of this input, then we must fetch the flake to
to update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto hasChildUpdate =
lb != lockFlags.inputUpdates.end()
&& lb->size() > inputPath.size()
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
if (hasChildUpdate) {
auto inputFlake = getFlake(
state, oldLock->lockedRef, false, flakeCache);
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock);
} else {
/* No need to fetch this flake, we can be
lazy. However there may be new overrides on the
inputs of this flake, so we need to check
those. */
FlakeInputs fakeInputs;
for (auto & i : oldLock->inputs) {
if (auto lockedNode = std::get_if<0>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
.ref = (*lockedNode)->originalRef,
.isFlake = (*lockedNode)->isFlake,
});
} else if (auto follows = std::get_if<1>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
.follows = *follows,
.absolute = true
});
}
}
computeLocks(fakeInputs, childNode, inputPath, oldLock);
}
} else {
/* We need to create a new lock file entry. So fetch
this input. */
debug("creating new input '%s'", inputPathS);
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
if (input.isFlake) {
auto inputFlake = getFlake(state, *input.ref, lockFlags.useRegistries, flakeCache);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
"original" field, rather than the
override. This ensures that the override isn't
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto childNode = std::make_shared<LockedNode>(
inputFlake.lockedRef, input2.ref ? *input2.ref : *input.ref);
node->inputs.insert_or_assign(id, childNode);
/* Guard against circular flake imports. */
for (auto & parent : parents)
if (parent == *input.ref)
throw Error("found circular import of flake '%s'", parent);
parents.push_back(*input.ref);
Finally cleanup([&]() { parents.pop_back(); });
/* Recursively process the inputs of this
flake. Also, unless we already have this flake
in the top-level lock file, use this flake's
own lock file. */
computeLocks(
inputFlake.inputs, childNode, inputPath,
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root);
}
else {
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, lockFlags.useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
}
}
}
};
computeLocks(
flake.inputs, newLockFile.root, {},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root);
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
warn("the flag '--override-input %s %s' does not match any input",
printInputPath(i.first), i.second);
for (auto & i : lockFlags.inputUpdates)
if (!updatesUsed.count(i))
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
/* Check 'follows' inputs. */
newLockFile.check();
debug("new lock file: %s", newLockFile);
/* Check whether we need to / can write the new lock file. */
if (!(newLockFile == oldLockFile)) {
auto diff = LockFile::diff(oldLockFile, newLockFile);
if (lockFlags.writeLockFile) {
if (auto sourcePath = topRef.input.getSourcePath()) {
if (!newLockFile.isImmutable()) {
if (settings.warnDirty)
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
} else {
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
auto path = *sourcePath + "/" + relPath;
bool lockFileExists = pathExists(path);
if (lockFileExists) {
auto s = chomp(diff);
if (s.empty())
warn("updating lock file '%s'", path);
else
warn("updating lock file '%s':\n%s", path, s);
} else
warn("creating lock file '%s'", path);
newLockFile.write(path);
topRef.input.markChangedFile(
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
lockFlags.commitLockFile
? std::optional<std::string>(fmt("%s: %s\n\nFlake input changes:\n\n%s",
relPath, lockFileExists ? "Update" : "Add", diff))
: std::nullopt);
/* Rewriting the lockfile changed the top-level
repo, so we should re-read it. FIXME: we could
also just clear the 'rev' field... */
auto prevLockedRef = flake.lockedRef;
FlakeCache dummyCache;
flake = getFlake(state, topRef, lockFlags.useRegistries, dummyCache);
if (lockFlags.commitLockFile &&
flake.lockedRef.input.getRev() &&
prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
/* Make sure that we picked up the change,
i.e. the tree should usually be dirty
now. Corner case: we could have reverted from a
dirty to a clean tree! */
if (flake.lockedRef.input == prevLockedRef.input
&& !flake.lockedRef.input.isImmutable())
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
}
} else
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
} else
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
}
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
}
void callFlake(EvalState & state,
const LockedFlake & lockedFlake,
Value & vRes)
{
auto vLocks = state.allocValue();
auto vRootSrc = state.allocValue();
auto vRootSubdir = state.allocValue();
auto vTmp1 = state.allocValue();
auto vTmp2 = state.allocValue();
mkString(*vLocks, lockedFlake.lockFile.to_string());
emitTreeAttrs(state, *lockedFlake.flake.sourceInfo, lockedFlake.flake.lockedRef.input, *vRootSrc);
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
static RootValue vCallFlake = nullptr;
if (!vCallFlake) {
vCallFlake = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "call-flake.nix.gen.hh"
, "/"), **vCallFlake);
}
state.callFunction(**vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
}
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
throw Error("cannot call 'getFlake' on mutable flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
callFlake(state,
lockFlake(state, flakeRef,
LockFlags {
.updateLockFile = false,
.useRegistries = !evalSettings.pureEval,
.allowMutable = !evalSettings.pureEval,
}),
v);
}
static RegisterPrimOp r2("__getFlake", 1, prim_getFlake, "flakes");
}
Fingerprint LockedFlake::getFingerprint() const
{
// FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
fmt("%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(),
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),
lockFile));
}
Flake::~Flake() { }
}

111
src/libexpr/flake/flake.hh Normal file
View File

@ -0,0 +1,111 @@
#pragma once
#include "types.hh"
#include "flakeref.hh"
#include "lockfile.hh"
#include "value.hh"
namespace nix {
class EvalState;
namespace fetchers { struct Tree; }
namespace flake {
struct FlakeInput;
typedef std::map<FlakeId, FlakeInput> FlakeInputs;
struct FlakeInput
{
std::optional<FlakeRef> ref;
bool isFlake = true;
std::optional<InputPath> follows;
bool absolute = false; // whether 'follows' is relative to the flake root
FlakeInputs overrides;
};
struct Flake
{
FlakeRef originalRef;
FlakeRef resolvedRef;
FlakeRef lockedRef;
std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo;
FlakeInputs inputs;
RootValue vOutputs;
~Flake();
};
Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
/* Fingerprint of a locked flake; used as a cache key. */
typedef Hash Fingerprint;
struct LockedFlake
{
Flake flake;
LockFile lockFile;
Fingerprint getFingerprint() const;
};
struct LockFlags
{
/* Whether to ignore the existing lock file, creating a new one
from scratch. */
bool recreateLockFile = false;
/* Whether to update the lock file at all. If set to false, if any
change to the lock file is needed (e.g. when an input has been
added to flake.nix), you get a fatal error. */
bool updateLockFile = true;
/* Whether to write the lock file to disk. If set to true, if the
any changes to the lock file are needed and the flake is not
writable (i.e. is not a local Git working tree or similar), you
get a fatal error. If set to false, Nix will use the modified
lock file in memory only, without writing it to disk. */
bool writeLockFile = true;
/* Whether to use the registries to lookup indirect flake
references like 'nixpkgs'. */
bool useRegistries = true;
/* Whether mutable flake references (i.e. those without a Git
revision or similar) without a corresponding lock are
allowed. Mutable flake references with a lock are always
allowed. */
bool allowMutable = true;
/* Whether to commit changes to flake.lock. */
bool commitLockFile = false;
/* Flake inputs to be overriden. */
std::map<InputPath, FlakeRef> inputOverrides;
/* Flake inputs to be updated. This means that any existing lock
for those inputs will be ignored. */
std::set<InputPath> inputUpdates;
};
LockedFlake lockFlake(
EvalState & state,
const FlakeRef & flakeRef,
const LockFlags & lockFlags);
void callFlake(
EvalState & state,
const LockedFlake & lockedFlake,
Value & v);
}
void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const fetchers::Input & input,
Value & v);
}

View File

@ -0,0 +1,199 @@
#include "flakeref.hh"
#include "store-api.hh"
#include "url.hh"
#include "fetchers.hh"
#include "registry.hh"
namespace nix {
#if 0
// 'dir' path elements cannot start with a '.'. We also reject
// potentially dangerous characters like ';'.
const static std::string subDirElemRegex = "(?:[a-zA-Z0-9_-]+[a-zA-Z0-9._-]*)";
const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRegex + ")*";
#endif
std::string FlakeRef::to_string() const
{
auto url = input.toURL();
if (subdir != "")
url.query.insert_or_assign("dir", subdir);
return url.to_string();
}
fetchers::Attrs FlakeRef::toAttrs() const
{
auto attrs = input.toAttrs();
if (subdir != "")
attrs.emplace("dir", subdir);
return attrs;
}
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
{
str << flakeRef.to_string();
return str;
}
bool FlakeRef::operator ==(const FlakeRef & other) const
{
return input == other.input && subdir == other.subdir;
}
FlakeRef FlakeRef::resolve(ref<Store> store) const
{
auto [input2, extraAttrs] = lookupInRegistries(store, input);
return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir));
}
FlakeRef parseFlakeRef(
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
{
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir, allowMissing);
if (fragment != "")
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
return flakeRef;
}
std::optional<FlakeRef> maybeParseFlakeRef(
const std::string & url, const std::optional<Path> & baseDir)
{
try {
return parseFlakeRef(url, baseDir);
} catch (Error &) {
return {};
}
}
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
{
using namespace fetchers;
static std::string fnRegex = "[0-9a-zA-Z-._~!$&'\"()*+,;=]+";
static std::regex pathUrlRegex(
"(/?" + fnRegex + "(?:/" + fnRegex + ")*/?)"
+ "(?:\\?(" + queryRegex + "))?"
+ "(?:#(" + queryRegex + "))?",
std::regex::ECMAScript);
static std::regex flakeRegex(
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
+ "(?:#(" + queryRegex + "))?",
std::regex::ECMAScript);
std::smatch match;
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
if (std::regex_match(url, match, flakeRegex)) {
auto parsedURL = ParsedURL{
.url = url,
.base = "flake:" + std::string(match[1]),
.scheme = "flake",
.authority = "",
.path = match[1],
};
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL), ""),
percentDecode(std::string(match[6])));
}
/* Check if 'url' is a path (either absolute or relative to
'baseDir'). If so, search upward to the root of the repo
(i.e. the directory containing .git). */
else if (std::regex_match(url, match, pathUrlRegex)) {
std::string path = match[1];
if (!baseDir && !hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url);
path = absPath(path, baseDir, true);
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
auto fragment = percentDecode(std::string(match[3]));
auto flakeRoot = path;
std::string subdir;
while (flakeRoot != "/") {
if (pathExists(flakeRoot + "/.git")) {
auto base = std::string("git+file://") + flakeRoot;
auto parsedURL = ParsedURL{
.url = base, // FIXME
.base = base,
.scheme = "git+file",
.authority = "",
.path = flakeRoot,
.query = decodeQuery(match[2]),
};
if (subdir != "") {
if (parsedURL.query.count("dir"))
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
parsedURL.query.insert_or_assign("dir", subdir);
}
if (pathExists(flakeRoot + "/.git/shallow"))
parsedURL.query.insert_or_assign("shallow", "1");
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
fragment);
}
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
flakeRoot = dirOf(flakeRoot);
}
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "path");
attrs.insert_or_assign("path", path);
return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment);
}
else {
auto parsedURL = parseURL(url);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
fragment);
}
}
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
const std::string & url, const std::optional<Path> & baseDir)
{
try {
return parseFlakeRefWithFragment(url, baseDir);
} catch (Error & e) {
return {};
}
}
FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
{
auto attrs2(attrs);
attrs2.erase("dir");
return FlakeRef(
fetchers::Input::fromAttrs(std::move(attrs2)),
fetchers::maybeGetStrAttr(attrs, "dir").value_or(""));
}
std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
{
auto [tree, lockedInput] = input.fetch(store);
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
}
}

View File

@ -0,0 +1,53 @@
#pragma once
#include "types.hh"
#include "hash.hh"
#include "fetchers.hh"
#include <variant>
namespace nix {
class Store;
typedef std::string FlakeId;
struct FlakeRef
{
fetchers::Input input;
Path subdir;
bool operator==(const FlakeRef & other) const;
FlakeRef(fetchers::Input && input, const Path & subdir)
: input(std::move(input)), subdir(subdir)
{ }
// FIXME: change to operator <<.
std::string to_string() const;
fetchers::Attrs toAttrs() const;
FlakeRef resolve(ref<Store> store) const;
static FlakeRef fromAttrs(const fetchers::Attrs & attrs);
std::pair<fetchers::Tree, FlakeRef> fetchTree(ref<Store> store) const;
};
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
FlakeRef parseFlakeRef(
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
std::optional<FlakeRef> maybeParseFlake(
const std::string & url, const std::optional<Path> & baseDir = {});
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
const std::string & url, const std::optional<Path> & baseDir = {});
}

View File

@ -0,0 +1,338 @@
#include "lockfile.hh"
#include "store-api.hh"
#include <nlohmann/json.hpp>
namespace nix::flake {
FlakeRef getFlakeRef(
const nlohmann::json & json,
const char * attr,
const char * info)
{
auto i = json.find(attr);
if (i != json.end()) {
auto attrs = jsonToAttrs(*i);
// FIXME: remove when we drop support for version 5.
if (info) {
auto j = json.find(info);
if (j != json.end()) {
for (auto k : jsonToAttrs(*j))
attrs.insert_or_assign(k.first, k.second);
}
}
return FlakeRef::fromAttrs(attrs);
}
throw Error("attribute '%s' missing in lock file", attr);
}
LockedNode::LockedNode(const nlohmann::json & json)
: lockedRef(getFlakeRef(json, "locked", "info"))
, originalRef(getFlakeRef(json, "original", nullptr))
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
{
if (!lockedRef.input.isImmutable())
throw Error("lockfile contains mutable lock '%s'", attrsToJson(lockedRef.input.toAttrs()));
}
StorePath LockedNode::computeStorePath(Store & store) const
{
return lockedRef.input.computeStorePath(store);
}
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{
auto pos = root;
if (!pos) return {};
for (auto & elem : path) {
if (auto i = get(pos->inputs, elem)) {
if (auto node = std::get_if<0>(&*i))
pos = *node;
else if (auto follows = std::get_if<1>(&*i)) {
pos = findInput(*follows);
if (!pos) return {};
}
} else
return {};
}
return pos;
}
LockFile::LockFile(const nlohmann::json & json, const Path & path)
{
auto version = json.value("version", 0);
if (version < 5 || version > 7)
throw Error("lock file '%s' has unsupported version %d", path, version);
std::unordered_map<std::string, std::shared_ptr<Node>> nodeMap;
std::function<void(Node & node, const nlohmann::json & jsonNode)> getInputs;
getInputs = [&](Node & node, const nlohmann::json & jsonNode)
{
if (jsonNode.find("inputs") == jsonNode.end()) return;
for (auto & i : jsonNode["inputs"].items()) {
if (i.value().is_array()) {
InputPath path;
for (auto & j : i.value())
path.push_back(j);
node.inputs.insert_or_assign(i.key(), path);
} else {
std::string inputKey = i.value();
auto k = nodeMap.find(inputKey);
if (k == nodeMap.end()) {
auto jsonNode2 = json["nodes"][inputKey];
auto input = std::make_shared<LockedNode>(jsonNode2);
k = nodeMap.insert_or_assign(inputKey, input).first;
getInputs(*input, jsonNode2);
}
if (auto child = std::dynamic_pointer_cast<LockedNode>(k->second))
node.inputs.insert_or_assign(i.key(), child);
else
// FIXME: replace by follows node
throw Error("lock file contains cycle to root node");
}
}
};
std::string rootKey = json["root"];
nodeMap.insert_or_assign(rootKey, root);
getInputs(*root, json["nodes"][rootKey]);
// FIXME: check that there are no cycles in version >= 7. Cycles
// between inputs are only possible using 'follows' indirections.
// Once we drop support for version <= 6, we can simplify the code
// a bit since we don't need to worry about cycles.
}
nlohmann::json LockFile::toJson() const
{
nlohmann::json nodes;
std::unordered_map<std::shared_ptr<const Node>, std::string> nodeKeys;
std::unordered_set<std::string> keys;
std::function<std::string(const std::string & key, std::shared_ptr<const Node> node)> dumpNode;
dumpNode = [&](std::string key, std::shared_ptr<const Node> node) -> std::string
{
auto k = nodeKeys.find(node);
if (k != nodeKeys.end())
return k->second;
if (!keys.insert(key).second) {
for (int n = 2; ; ++n) {
auto k = fmt("%s_%d", key, n);
if (keys.insert(k).second) {
key = k;
break;
}
}
}
nodeKeys.insert_or_assign(node, key);
auto n = nlohmann::json::object();
if (!node->inputs.empty()) {
auto inputs = nlohmann::json::object();
for (auto & i : node->inputs) {
if (auto child = std::get_if<0>(&i.second)) {
inputs[i.first] = dumpNode(i.first, *child);
} else if (auto follows = std::get_if<1>(&i.second)) {
auto arr = nlohmann::json::array();
for (auto & x : *follows)
arr.push_back(x);
inputs[i.first] = std::move(arr);
}
}
n["inputs"] = std::move(inputs);
}
if (auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(node)) {
n["original"] = fetchers::attrsToJson(lockedNode->originalRef.toAttrs());
n["locked"] = fetchers::attrsToJson(lockedNode->lockedRef.toAttrs());
if (!lockedNode->isFlake) n["flake"] = false;
}
nodes[key] = std::move(n);
return key;
};
nlohmann::json json;
json["version"] = 7;
json["root"] = dumpNode("root", root);
json["nodes"] = std::move(nodes);
return json;
}
std::string LockFile::to_string() const
{
return toJson().dump(2);
}
LockFile LockFile::read(const Path & path)
{
if (!pathExists(path)) return LockFile();
return LockFile(nlohmann::json::parse(readFile(path)), path);
}
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
{
stream << lockFile.toJson().dump(2);
return stream;
}
void LockFile::write(const Path & path) const
{
createDirs(dirOf(path));
writeFile(path, fmt("%s\n", *this));
}
bool LockFile::isImmutable() const
{
std::unordered_set<std::shared_ptr<const Node>> nodes;
std::function<void(std::shared_ptr<const Node> node)> visit;
visit = [&](std::shared_ptr<const Node> node)
{
if (!nodes.insert(node).second) return;
for (auto & i : node->inputs)
if (auto child = std::get_if<0>(&i.second))
visit(*child);
};
visit(root);
for (auto & i : nodes) {
if (i == root) continue;
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
if (lockedNode && !lockedNode->lockedRef.input.isImmutable()) return false;
}
return true;
}
bool LockFile::operator ==(const LockFile & other) const
{
// FIXME: slow
return toJson() == other.toJson();
}
InputPath parseInputPath(std::string_view s)
{
InputPath path;
for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) {
if (!std::regex_match(elem, flakeIdRegex))
throw UsageError("invalid flake input path element '%s'", elem);
path.push_back(elem);
}
return path;
}
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
{
std::unordered_set<std::shared_ptr<Node>> done;
std::map<InputPath, Node::Edge> res;
std::function<void(const InputPath & prefix, std::shared_ptr<Node> node)> recurse;
recurse = [&](const InputPath & prefix, std::shared_ptr<Node> node)
{
if (!done.insert(node).second) return;
for (auto &[id, input] : node->inputs) {
auto inputPath(prefix);
inputPath.push_back(id);
res.emplace(inputPath, input);
if (auto child = std::get_if<0>(&input))
recurse(inputPath, *child);
}
};
recurse({}, root);
return res;
}
std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
{
if (auto node = std::get_if<0>(&edge))
stream << "'" << (*node)->lockedRef << "'";
else if (auto follows = std::get_if<1>(&edge))
stream << fmt("follows '%s'", printInputPath(*follows));
return stream;
}
static bool equals(const Node::Edge & e1, const Node::Edge & e2)
{
if (auto n1 = std::get_if<0>(&e1))
if (auto n2 = std::get_if<0>(&e2))
return (*n1)->lockedRef == (*n2)->lockedRef;
if (auto f1 = std::get_if<1>(&e1))
if (auto f2 = std::get_if<1>(&e2))
return *f1 == *f2;
return false;
}
std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
{
auto oldFlat = oldLocks.getAllInputs();
auto newFlat = newLocks.getAllInputs();
auto i = oldFlat.begin();
auto j = newFlat.begin();
std::string res;
while (i != oldFlat.end() || j != newFlat.end()) {
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
res += fmt("* Added '%s': %s\n", printInputPath(j->first), j->second);
++j;
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
res += fmt("* Removed '%s'\n", printInputPath(i->first));
++i;
} else {
if (!equals(i->second, j->second)) {
res += fmt("* Updated '%s': %s -> %s\n",
printInputPath(i->first),
i->second,
j->second);
}
++i;
++j;
}
}
return res;
}
void LockFile::check()
{
auto inputs = getAllInputs();
for (auto & [inputPath, input] : inputs) {
if (auto follows = std::get_if<1>(&input)) {
if (!follows->empty() && !get(inputs, *follows))
throw Error("input '%s' follows a non-existent input '%s'",
printInputPath(inputPath),
printInputPath(*follows));
}
}
}
void check();
std::string printInputPath(const InputPath & path)
{
return concatStringsSep("/", path);
}
}

View File

@ -0,0 +1,85 @@
#pragma once
#include "flakeref.hh"
#include <nlohmann/json_fwd.hpp>
namespace nix {
class Store;
struct StorePath;
}
namespace nix::flake {
using namespace fetchers;
typedef std::vector<FlakeId> InputPath;
struct LockedNode;
/* A node in the lock file. It has outgoing edges to other nodes (its
inputs). Only the root node has this type; all other nodes have
type LockedNode. */
struct Node : std::enable_shared_from_this<Node>
{
typedef std::variant<std::shared_ptr<LockedNode>, InputPath> Edge;
std::map<FlakeId, Edge> inputs;
virtual ~Node() { }
};
/* A non-root node in the lock file. */
struct LockedNode : Node
{
FlakeRef lockedRef, originalRef;
bool isFlake = true;
LockedNode(
const FlakeRef & lockedRef,
const FlakeRef & originalRef,
bool isFlake = true)
: lockedRef(lockedRef), originalRef(originalRef), isFlake(isFlake)
{ }
LockedNode(const nlohmann::json & json);
StorePath computeStorePath(Store & store) const;
};
struct LockFile
{
std::shared_ptr<Node> root = std::make_shared<Node>();
LockFile() {};
LockFile(const nlohmann::json & json, const Path & path);
nlohmann::json toJson() const;
std::string to_string() const;
static LockFile read(const Path & path);
void write(const Path & path) const;
bool isImmutable() const;
bool operator ==(const LockFile & other) const;
std::shared_ptr<Node> findInput(const InputPath & path);
std::map<InputPath, Node::Edge> getAllInputs() const;
static std::string diff(const LockFile & oldLocks, const LockFile & newLocks);
/* Check that every 'follows' input target exists. */
void check();
};
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile);
InputPath parseInputPath(std::string_view s);
std::string printInputPath(const InputPath & path);
}

View File

@ -4,7 +4,12 @@ libexpr_NAME = libnixexpr
libexpr_DIR := $(d)
libexpr_SOURCES := $(wildcard $(d)/*.cc) $(wildcard $(d)/primops/*.cc) $(d)/lexer-tab.cc $(d)/parser-tab.cc
libexpr_SOURCES := \
$(wildcard $(d)/*.cc) \
$(wildcard $(d)/primops/*.cc) \
$(wildcard $(d)/flake/*.cc) \
$(d)/lexer-tab.cc \
$(d)/parser-tab.cc
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
@ -34,4 +39,9 @@ dist-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh

View File

@ -719,7 +719,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
if (isUri(elem.second)) {
try {
res = { true, store->toRealPath(fetchers::downloadTarball(
store, resolveUri(elem.second), "source", false).storePath) };
store, resolveUri(elem.second), "source", false).first.storePath) };
} catch (FileTransferError & e) {
logWarning({
.name = "Entry download",

View File

@ -30,18 +30,6 @@ namespace nix {
*************************************************************/
/* Decode a context string !<name>!<path> into a pair <path,
name>. */
std::pair<string, string> decodeContext(const string & s)
{
if (s.at(0) == '!') {
size_t index = s.find("!", 1);
return std::pair<string, string>(string(s, index + 1), string(s, 1, index - 1));
} else
return std::pair<string, string>(s.at(0) == '/' ? s : string(s, 1), "");
}
InvalidPathError::InvalidPathError(const Path & path) :
EvalError("path '%s' is not valid", path), path(path) {}

View File

@ -62,23 +62,23 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
if (ref) attrs.insert_or_assign("ref", *ref);
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
if (fetchSubmodules) attrs.insert_or_assign("submodules", true);
auto input = fetchers::inputFromAttrs(attrs);
if (fetchSubmodules) attrs.insert_or_assign("submodules", fetchers::Explicit<bool>{true});
auto input = fetchers::Input::fromAttrs(std::move(attrs));
// FIXME: use name?
auto [tree, input2] = input->fetchTree(state.store);
auto [tree, input2] = input.fetch(state.store);
state.mkAttrs(v, 8);
auto storePath = state.store->printStorePath(tree.storePath);
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
// Backward compatibility: set 'revCount' to 0 for a dirty tree.
mkInt(*state.allocAttr(v, state.symbols.create("revCount")),
tree.info.revCount.value_or(0));
input2.getRevCount().value_or(0));
mkBool(*state.allocAttr(v, state.symbols.create("submodules")), fetchSubmodules);
v.attrs->sort();

View File

@ -65,23 +65,23 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
if (ref) attrs.insert_or_assign("ref", *ref);
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
auto input = fetchers::inputFromAttrs(attrs);
auto input = fetchers::Input::fromAttrs(std::move(attrs));
// FIXME: use name
auto [tree, input2] = input->fetchTree(state.store);
auto [tree, input2] = input.fetch(state.store);
state.mkAttrs(v, 8);
auto storePath = state.store->printStorePath(tree.storePath);
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
if (input2->getRef())
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
if (input2.getRef())
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2.getRef());
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
if (tree.info.revCount)
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
if (auto revCount = input2.getRevCount())
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
v.attrs->sort();
if (state.allowedPaths)

View File

@ -3,6 +3,7 @@
#include "store-api.hh"
#include "fetchers.hh"
#include "filetransfer.hh"
#include "registry.hh"
#include <ctime>
#include <iomanip>
@ -12,30 +13,37 @@ namespace nix {
void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
std::shared_ptr<const fetchers::Input> input,
const fetchers::Input & input,
Value & v)
{
assert(input.isImmutable());
state.mkAttrs(v, 8);
auto storePath = state.store->printStorePath(tree.storePath);
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
assert(tree.info.narHash);
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
tree.info.narHash->to_string(SRI, true));
// FIXME: support arbitrary input attributes.
if (input->getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), input->getRev()->gitShortRev());
auto narHash = input.getNarHash();
assert(narHash);
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
narHash->to_string(SRI, true));
if (auto rev = input.getRev()) {
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
}
if (tree.info.revCount)
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.info.revCount);
if (auto revCount = input.getRevCount())
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
if (tree.info.lastModified)
mkString(*state.allocAttr(v, state.symbols.create("lastModified")),
fmt("%s", std::put_time(std::gmtime(&*tree.info.lastModified), "%Y%m%d%H%M%S")));
if (auto lastModified = input.getLastModified()) {
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
mkString(*state.allocAttr(v, state.symbols.create("lastModifiedDate")),
fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
}
v.attrs->sort();
}
@ -44,7 +52,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
{
settings.requireExperimentalFeature("flakes");
std::shared_ptr<const fetchers::Input> input;
fetchers::Input input;
PathSet context;
state.forceValue(*args[0]);
@ -59,9 +67,11 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
if (attr.value->type == tString)
attrs.emplace(attr.name, attr.value->string.s);
else if (attr.value->type == tBool)
attrs.emplace(attr.name, attr.value->boolean);
attrs.emplace(attr.name, fetchers::Explicit<bool>{attr.value->boolean});
else if (attr.value->type == tInt)
attrs.emplace(attr.name, attr.value->integer);
else
throw TypeError("fetchTree argument '%s' is %s while a string or Boolean is expected",
throw TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
attr.name, showType(*attr.value));
}
@ -71,15 +81,17 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
.errPos = pos
});
input = fetchers::inputFromAttrs(attrs);
input = fetchers::Input::fromAttrs(std::move(attrs));
} else
input = fetchers::inputFromURL(state.coerceToString(pos, *args[0], context, false, false));
input = fetchers::Input::fromURL(state.coerceToString(pos, *args[0], context, false, false));
if (evalSettings.pureEval && !input->isImmutable())
throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input");
if (!evalSettings.pureEval && !input.isDirect())
input = lookupInRegistries(state.store, input).first;
// FIXME: use fetchOrSubstituteTree
auto [tree, input2] = input->fetchTree(state.store);
if (evalSettings.pureEval && !input.isImmutable())
throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input, at %s", pos);
auto [tree, input2] = input.fetch(state.store);
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
@ -136,7 +148,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
auto storePath =
unpack
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
auto path = state.store->toRealPath(storePath);

View File

@ -28,6 +28,12 @@ public:
return s == s2.s;
}
// FIXME: remove
bool operator == (std::string_view s2) const
{
return s->compare(s2) == 0;
}
bool operator != (const Symbol & s2) const
{
return s != s2.s;
@ -68,9 +74,10 @@ private:
Symbols symbols;
public:
Symbol create(const string & s)
Symbol create(std::string_view s)
{
std::pair<Symbols::iterator, bool> res = symbols.insert(s);
// FIXME: avoid allocation if 's' already exists in the symbol table.
std::pair<Symbols::iterator, bool> res = symbols.emplace(std::string(s));
return Symbol(&*res.first);
}

View File

@ -166,6 +166,13 @@ struct Value
{
return type == tList1 ? 1 : type == tList2 ? 2 : bigList.size;
}
/* Check whether forcing this value requires a trivial amount of
computation. In particular, function applications are
non-trivial. */
bool isTrivial() const;
std::vector<std::pair<Path, std::string>> getContext();
};

View File

@ -27,7 +27,7 @@ nlohmann::json attrsToJson(const Attrs & attrs)
{
nlohmann::json json;
for (auto & attr : attrs) {
if (auto v = std::get_if<int64_t>(&attr.second)) {
if (auto v = std::get_if<uint64_t>(&attr.second)) {
json[attr.first] = *v;
} else if (auto v = std::get_if<std::string>(&attr.second)) {
json[attr.first] = *v;
@ -55,16 +55,16 @@ std::string getStrAttr(const Attrs & attrs, const std::string & name)
return *s;
}
std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name)
std::optional<uint64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name)
{
auto i = attrs.find(name);
if (i == attrs.end()) return {};
if (auto v = std::get_if<int64_t>(&i->second))
if (auto v = std::get_if<uint64_t>(&i->second))
return *v;
throw Error("input attribute '%s' is not an integer", name);
}
int64_t getIntAttr(const Attrs & attrs, const std::string & name)
uint64_t getIntAttr(const Attrs & attrs, const std::string & name)
{
auto s = maybeGetIntAttr(attrs, name);
if (!s)
@ -76,8 +76,8 @@ std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & na
{
auto i = attrs.find(name);
if (i == attrs.end()) return {};
if (auto v = std::get_if<int64_t>(&i->second))
return *v;
if (auto v = std::get_if<Explicit<bool>>(&i->second))
return v->t;
throw Error("input attribute '%s' is not a Boolean", name);
}
@ -93,7 +93,7 @@ std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
{
std::map<std::string, std::string> query;
for (auto & attr : attrs) {
if (auto v = std::get_if<int64_t>(&attr.second)) {
if (auto v = std::get_if<uint64_t>(&attr.second)) {
query.insert_or_assign(attr.first, fmt("%d", *v));
} else if (auto v = std::get_if<std::string>(&attr.second)) {
query.insert_or_assign(attr.first, *v);

View File

@ -13,9 +13,14 @@ namespace nix::fetchers {
template<typename T>
struct Explicit {
T t;
bool operator ==(const Explicit<T> & other) const
{
return t == other.t;
}
};
typedef std::variant<std::string, int64_t, Explicit<bool>> Attr;
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;
typedef std::map<std::string, Attr> Attrs;
Attrs jsonToAttrs(const nlohmann::json & json);
@ -26,9 +31,9 @@ std::optional<std::string> maybeGetStrAttr(const Attrs & attrs, const std::strin
std::string getStrAttr(const Attrs & attrs, const std::string & name);
std::optional<int64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name);
std::optional<uint64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name);
int64_t getIntAttr(const Attrs & attrs, const std::string & name);
uint64_t getIntAttr(const Attrs & attrs, const std::string & name);
std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name);

View File

@ -5,71 +5,265 @@
namespace nix::fetchers {
std::unique_ptr<std::vector<std::unique_ptr<InputScheme>>> inputSchemes = nullptr;
std::unique_ptr<std::vector<std::shared_ptr<InputScheme>>> inputSchemes = nullptr;
void registerInputScheme(std::unique_ptr<InputScheme> && inputScheme)
void registerInputScheme(std::shared_ptr<InputScheme> && inputScheme)
{
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::unique_ptr<InputScheme>>>();
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::shared_ptr<InputScheme>>>();
inputSchemes->push_back(std::move(inputScheme));
}
std::unique_ptr<Input> inputFromURL(const ParsedURL & url)
Input Input::fromURL(const std::string & url)
{
return fromURL(parseURL(url));
}
static void fixupInput(Input & input)
{
// Check common attributes.
input.getType();
input.getRef();
if (input.getRev())
input.immutable = true;
input.getRevCount();
input.getLastModified();
if (input.getNarHash())
input.immutable = true;
}
Input Input::fromURL(const ParsedURL & url)
{
for (auto & inputScheme : *inputSchemes) {
auto res = inputScheme->inputFromURL(url);
if (res) return res;
if (res) {
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
}
}
throw Error("input '%s' is unsupported", url.url);
}
std::unique_ptr<Input> inputFromURL(const std::string & url)
Input Input::fromAttrs(Attrs && attrs)
{
return inputFromURL(parseURL(url));
}
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs)
{
auto attrs2(attrs);
attrs2.erase("narHash");
for (auto & inputScheme : *inputSchemes) {
auto res = inputScheme->inputFromAttrs(attrs2);
auto res = inputScheme->inputFromAttrs(attrs);
if (res) {
if (auto narHash = maybeGetStrAttr(attrs, "narHash"))
// FIXME: require SRI hash.
res->narHash = newHashAllowEmpty(*narHash, {});
return res;
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
}
}
throw Error("input '%s' is unsupported", attrsToJson(attrs));
Input input;
input.attrs = attrs;
fixupInput(input);
return input;
}
ParsedURL Input::toURL() const
{
if (!scheme)
throw Error("cannot show unsupported input '%s'", attrsToJson(attrs));
return scheme->toURL(*this);
}
std::string Input::to_string() const
{
return toURL().to_string();
}
Attrs Input::toAttrs() const
{
auto attrs = toAttrsInternal();
if (narHash)
attrs.emplace("narHash", narHash->to_string(SRI, true));
attrs.emplace("type", type());
return attrs;
}
std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store) const
bool Input::hasAllInfo() const
{
auto [tree, input] = fetchTreeInternal(store);
return getNarHash() && scheme && scheme->hasAllInfo(*this);
}
bool Input::operator ==(const Input & other) const
{
return attrs == other.attrs;
}
bool Input::contains(const Input & other) const
{
if (*this == other) return true;
auto other2(other);
other2.attrs.erase("ref");
other2.attrs.erase("rev");
if (*this == other2) return true;
return false;
}
std::pair<Tree, Input> Input::fetch(ref<Store> store) const
{
if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJson(toAttrs()));
/* The tree may already be in the Nix store, or it could be
substituted (which is often faster than fetching from the
original source). So check that. */
if (hasAllInfo()) {
try {
auto storePath = computeStorePath(*store);
store->ensurePath(storePath);
debug("using substituted/cached input '%s' in '%s'",
to_string(), store->printStorePath(storePath));
auto actualPath = store->toRealPath(storePath);
return {fetchers::Tree(std::move(actualPath), std::move(storePath)), *this};
} catch (Error & e) {
debug("substitution of input '%s' failed: %s", to_string(), e.what());
}
}
auto [tree, input] = scheme->fetch(store, *this);
if (tree.actualPath == "")
tree.actualPath = store->toRealPath(tree.storePath);
if (!tree.info.narHash)
tree.info.narHash = store->queryPathInfo(tree.storePath)->narHash;
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash->to_string(SRI, true));
if (input->narHash)
assert(input->narHash == tree.info.narHash);
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash->to_string(SRI, true));
}
if (narHash && narHash != input->narHash)
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, narHash->to_string(SRI, true), input->narHash->to_string(SRI, true));
if (auto prevLastModified = getLastModified()) {
if (input.getLastModified() != prevLastModified)
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
input.to_string(), *prevLastModified);
}
if (auto prevRevCount = getRevCount()) {
if (input.getRevCount() != prevRevCount)
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
input.to_string(), *prevRevCount);
}
input.immutable = true;
assert(input.hasAllInfo());
return {std::move(tree), input};
}
Input Input::applyOverrides(
std::optional<std::string> ref,
std::optional<Hash> rev) const
{
if (!scheme) return *this;
return scheme->applyOverrides(*this, ref, rev);
}
void Input::clone(const Path & destDir) const
{
assert(scheme);
scheme->clone(*this, destDir);
}
std::optional<Path> Input::getSourcePath() const
{
assert(scheme);
return scheme->getSourcePath(*this);
}
void Input::markChangedFile(
std::string_view file,
std::optional<std::string> commitMsg) const
{
assert(scheme);
return scheme->markChangedFile(*this, file, commitMsg);
}
StorePath Input::computeStorePath(Store & store) const
{
auto narHash = getNarHash();
if (!narHash)
throw Error("cannot compute store path for mutable input '%s'", to_string());
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
}
std::string Input::getType() const
{
return getStrAttr(attrs, "type");
}
std::optional<Hash> Input::getNarHash() const
{
if (auto s = maybeGetStrAttr(attrs, "narHash"))
// FIXME: require SRI hash.
return newHashAllowEmpty(*s, htSHA256);
return {};
}
std::optional<std::string> Input::getRef() const
{
if (auto s = maybeGetStrAttr(attrs, "ref"))
return *s;
return {};
}
std::optional<Hash> Input::getRev() const
{
if (auto s = maybeGetStrAttr(attrs, "rev"))
return Hash(*s, htSHA1);
return {};
}
std::optional<uint64_t> Input::getRevCount() const
{
if (auto n = maybeGetIntAttr(attrs, "revCount"))
return *n;
return {};
}
std::optional<time_t> Input::getLastModified() const
{
if (auto n = maybeGetIntAttr(attrs, "lastModified"))
return *n;
return {};
}
ParsedURL InputScheme::toURL(const Input & input)
{
throw Error("don't know how to convert input '%s' to a URL", attrsToJson(input.attrs));
}
Input InputScheme::applyOverrides(
const Input & input,
std::optional<std::string> ref,
std::optional<Hash> rev)
{
if (ref)
throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref);
if (rev)
throw Error("don't know how to set revision of input '%s' to '%s'", input.to_string(), rev->gitRev());
return input;
}
std::optional<Path> InputScheme::getSourcePath(const Input & input)
{
return {};
}
void InputScheme::markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg)
{
assert(false);
}
void InputScheme::clone(const Input & input, const Path & destDir)
{
throw Error("do not know how to clone input '%s'", input.to_string());
}
}

View File

@ -3,7 +3,6 @@
#include "types.hh"
#include "hash.hh"
#include "path.hh"
#include "tree-info.hh"
#include "attrs.hh"
#include "url.hh"
@ -13,73 +12,101 @@ namespace nix { class Store; }
namespace nix::fetchers {
struct Input;
struct Tree
{
Path actualPath;
StorePath storePath;
TreeInfo info;
Tree(Path && actualPath, StorePath && storePath) : actualPath(actualPath), storePath(std::move(storePath)) {}
};
struct Input : std::enable_shared_from_this<Input>
struct InputScheme;
struct Input
{
std::optional<Hash> narHash; // FIXME: implement
friend class InputScheme;
virtual std::string type() const = 0;
std::shared_ptr<InputScheme> scheme; // note: can be null
Attrs attrs;
bool immutable = false;
bool direct = true;
virtual ~Input() { }
public:
static Input fromURL(const std::string & url);
virtual bool operator ==(const Input & other) const { return false; }
static Input fromURL(const ParsedURL & url);
/* Check whether this is a "direct" input, that is, not
one that goes through a registry. */
virtual bool isDirect() const { return true; }
static Input fromAttrs(Attrs && attrs);
/* Check whether this is an "immutable" input, that is,
one that contains a commit hash or content hash. */
virtual bool isImmutable() const { return (bool) narHash; }
ParsedURL toURL() const;
virtual bool contains(const Input & other) const { return false; }
virtual std::optional<std::string> getRef() const { return {}; }
virtual std::optional<Hash> getRev() const { return {}; }
virtual ParsedURL toURL() const = 0;
std::string to_string() const
{
return toURL().to_string();
}
std::string to_string() const;
Attrs toAttrs() const;
std::pair<Tree, std::shared_ptr<const Input>> fetchTree(ref<Store> store) const;
/* Check whether this is a "direct" input, that is, not
one that goes through a registry. */
bool isDirect() const { return direct; }
private:
/* Check whether this is an "immutable" input, that is,
one that contains a commit hash or content hash. */
bool isImmutable() const { return immutable; }
virtual std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(ref<Store> store) const = 0;
bool hasAllInfo() const;
virtual Attrs toAttrsInternal() const = 0;
bool operator ==(const Input & other) const;
bool contains(const Input & other) const;
std::pair<Tree, Input> fetch(ref<Store> store) const;
Input applyOverrides(
std::optional<std::string> ref,
std::optional<Hash> rev) const;
void clone(const Path & destDir) const;
std::optional<Path> getSourcePath() const;
void markChangedFile(
std::string_view file,
std::optional<std::string> commitMsg) const;
StorePath computeStorePath(Store & store) const;
// Convience functions for common attributes.
std::string getType() const;
std::optional<Hash> getNarHash() const;
std::optional<std::string> getRef() const;
std::optional<Hash> getRev() const;
std::optional<uint64_t> getRevCount() const;
std::optional<time_t> getLastModified() const;
};
struct InputScheme
{
virtual ~InputScheme() { }
virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0;
virtual std::unique_ptr<Input> inputFromURL(const ParsedURL & url) = 0;
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0;
virtual std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) = 0;
virtual ParsedURL toURL(const Input & input);
virtual bool hasAllInfo(const Input & input) = 0;
virtual Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
std::optional<Hash> rev);
virtual void clone(const Input & input, const Path & destDir);
virtual std::optional<Path> getSourcePath(const Input & input);
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
virtual std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) = 0;
};
std::unique_ptr<Input> inputFromURL(const ParsedURL & url);
std::unique_ptr<Input> inputFromURL(const std::string & url);
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs);
void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
struct DownloadFileResult
{
@ -94,7 +121,7 @@ DownloadFileResult downloadFile(
const std::string & name,
bool immutable);
Tree downloadTarball(
std::pair<Tree, time_t> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,

View File

@ -22,80 +22,152 @@ static bool isNotDotGitDirectory(const Path & path)
return not std::regex_match(path, gitDirRegex);
}
struct GitInput : Input
struct GitInputScheme : InputScheme
{
ParsedURL url;
std::optional<std::string> ref;
std::optional<Hash> rev;
bool shallow = false;
bool submodules = false;
GitInput(const ParsedURL & url) : url(url)
{ }
std::string type() const override { return "git"; }
bool operator ==(const Input & other) const override
std::optional<Input> inputFromURL(const ParsedURL & url) override
{
auto other2 = dynamic_cast<const GitInput *>(&other);
return
other2
&& url == other2->url
&& rev == other2->rev
&& ref == other2->ref;
}
if (url.scheme != "git" &&
url.scheme != "git+http" &&
url.scheme != "git+https" &&
url.scheme != "git+ssh" &&
url.scheme != "git+file") return {};
bool isImmutable() const override
{
return (bool) rev || narHash;
}
auto url2(url);
if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
url2.query.clear();
std::optional<std::string> getRef() const override { return ref; }
std::optional<Hash> getRev() const override { return rev; }
ParsedURL toURL() const override
{
ParsedURL url2(url);
if (url2.scheme != "git") url2.scheme = "git+" + url2.scheme;
if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
if (ref) url2.query.insert_or_assign("ref", *ref);
if (shallow) url2.query.insert_or_assign("shallow", "1");
return url2;
}
Attrs toAttrsInternal() const override
{
Attrs attrs;
attrs.emplace("url", url.to_string());
if (ref)
attrs.emplace("ref", *ref);
if (rev)
attrs.emplace("rev", rev->gitRev());
if (shallow)
attrs.emplace("shallow", true);
if (submodules)
attrs.emplace("submodules", true);
return attrs;
attrs.emplace("type", "git");
for (auto &[name, value] : url.query) {
if (name == "rev" || name == "ref")
attrs.emplace(name, value);
else if (name == "shallow")
attrs.emplace(name, Explicit<bool> { value == "1" });
else
url2.query.emplace(name, value);
}
attrs.emplace("url", url2.to_string());
return inputFromAttrs(attrs);
}
std::pair<bool, std::string> getActualUrl() const
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
{
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash")
throw Error("unsupported Git input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
maybeGetBoolAttr(attrs, "shallow");
maybeGetBoolAttr(attrs, "submodules");
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (std::regex_search(*ref, badGitRefRegex))
throw BadURL("invalid Git branch/tag name '%s'", *ref);
}
Input input;
input.attrs = attrs;
return input;
}
ParsedURL toURL(const Input & input) override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false))
url.query.insert_or_assign("shallow", "1");
return url;
}
bool hasAllInfo(const Input & input) override
{
bool maybeDirty = !input.getRef();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
return
maybeGetIntAttr(input.attrs, "lastModified")
&& (shallow || maybeDirty || maybeGetIntAttr(input.attrs, "revCount"));
}
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
std::optional<Hash> rev) override
{
auto res(input);
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) res.attrs.insert_or_assign("ref", *ref);
if (!res.getRef() && res.getRev())
throw Error("Git input '%s' has a commit hash but no branch/tag name", res.to_string());
return res;
}
void clone(const Input & input, const Path & destDir) override
{
auto [isLocal, actualUrl] = getActualUrl(input);
Strings args = {"clone"};
args.push_back(actualUrl);
if (auto ref = input.getRef()) {
args.push_back("--branch");
args.push_back(*ref);
}
if (input.getRev()) throw Error("cloning a specific revision is not implemented");
args.push_back(destDir);
runProgram("git", true, args);
}
std::optional<Path> getSourcePath(const Input & input) override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme == "file" && !input.getRef() && !input.getRev())
return url.path;
return {};
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
{
auto sourcePath = getSourcePath(input);
assert(sourcePath);
runProgram("git", true,
{ "-C", *sourcePath, "add", "--force", "--intent-to-add", "--", std::string(file) });
if (commitMsg)
runProgram("git", true,
{ "-C", *sourcePath, "commit", std::string(file), "-m", *commitMsg });
}
std::pair<bool, std::string> getActualUrl(const Input & input) const
{
// Don't clone file:// URIs (but otherwise treat them the
// same as remote URIs, i.e. don't use the working tree or
// HEAD).
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isLocal = url.scheme == "file" && !forceHttp;
return {isLocal, isLocal ? url.path : url.base};
}
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
auto name = "source";
auto input = std::make_shared<GitInput>(*this);
Input input(_input);
assert(!rev || rev->type == htSHA1);
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
std::string cacheType = "git";
if (shallow) cacheType += "-shallow";
@ -106,39 +178,35 @@ struct GitInput : Input
return Attrs({
{"type", cacheType},
{"name", name},
{"rev", input->rev->gitRev()},
{"rev", input.getRev()->gitRev()},
});
};
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
-> std::pair<Tree, std::shared_ptr<const Input>>
-> std::pair<Tree, Input>
{
assert(input->rev);
assert(!rev || rev == input->rev);
assert(input.getRev());
assert(!_input.getRev() || _input.getRev() == input.getRev());
if (!shallow)
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
return {
Tree {
.actualPath = store->toRealPath(storePath),
.storePath = std::move(storePath),
.info = TreeInfo {
.revCount = shallow ? std::nullopt : std::optional(getIntAttr(infoAttrs, "revCount")),
.lastModified = getIntAttr(infoAttrs, "lastModified"),
},
},
Tree(store->toRealPath(storePath), std::move(storePath)),
input
};
};
if (rev) {
if (input.getRev()) {
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
return makeResult(res->first, std::move(res->second));
}
auto [isLocal, actualUrl_] = getActualUrl();
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
// If this is a local directory and no ref or revision is
// given, then allow the use of an unclean working tree.
if (!input->ref && !input->rev && isLocal) {
if (!input.getRef() && !input.getRev() && isLocal) {
bool clean = false;
/* Check whether this repo has any commits. There are
@ -197,35 +265,35 @@ struct GitInput : Input
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
auto tree = Tree {
.actualPath = store->printStorePath(storePath),
.storePath = std::move(storePath),
.info = TreeInfo {
// FIXME: maybe we should use the timestamp of the last
// modified dirty file?
.lastModified = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0,
}
};
// FIXME: maybe we should use the timestamp of the last
// modified dirty file?
input.attrs.insert_or_assign(
"lastModified",
haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0);
return {std::move(tree), input};
return {
Tree(store->printStorePath(storePath), std::move(storePath)),
input
};
}
}
if (!input->ref) input->ref = isLocal ? readHead(actualUrl) : "master";
if (!input.getRef()) input.attrs.insert_or_assign("ref", isLocal ? readHead(actualUrl) : "master");
Attrs mutableAttrs({
{"type", cacheType},
{"name", name},
{"url", actualUrl},
{"ref", *input->ref},
{"ref", *input.getRef()},
});
Path repoDir;
if (isLocal) {
if (!input->rev)
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
if (!input.getRev())
input.attrs.insert_or_assign("rev",
Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input.getRef() })), htSHA1).gitRev());
repoDir = actualUrl;
@ -233,8 +301,8 @@ struct GitInput : Input
if (auto res = getCache()->lookup(store, mutableAttrs)) {
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
if (!rev || rev == rev2) {
input->rev = rev2;
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
}
}
@ -248,18 +316,18 @@ struct GitInput : Input
}
Path localRefFile =
input->ref->compare(0, 5, "refs/") == 0
? cacheDir + "/" + *input->ref
: cacheDir + "/refs/heads/" + *input->ref;
input.getRef()->compare(0, 5, "refs/") == 0
? cacheDir + "/" + *input.getRef()
: cacheDir + "/refs/heads/" + *input.getRef();
bool doFetch;
time_t now = time(0);
/* If a rev was specified, we need to fetch if it's not in the
repo. */
if (input->rev) {
if (input.getRev()) {
try {
runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input->rev->gitRev() });
runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input.getRev()->gitRev() });
doFetch = false;
} catch (ExecError & e) {
if (WIFEXITED(e.status)) {
@ -282,9 +350,10 @@ struct GitInput : Input
// FIXME: git stderr messes up our progress indicator, so
// we're using --quiet for now. Should process its stderr.
try {
auto fetchRef = input->ref->compare(0, 5, "refs/") == 0
? *input->ref
: "refs/heads/" + *input->ref;
auto ref = input.getRef();
auto fetchRef = ref->compare(0, 5, "refs/") == 0
? *ref
: "refs/heads/" + *ref;
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
@ -300,8 +369,8 @@ struct GitInput : Input
utimes(localRefFile.c_str(), times);
}
if (!input->rev)
input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
if (!input.getRev())
input.attrs.insert_or_assign("rev", Hash(chomp(readFile(localRefFile)), htSHA1).gitRev());
}
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
@ -311,7 +380,7 @@ struct GitInput : Input
// FIXME: check whether rev is an ancestor of ref.
printTalkative("using revision %s of repo '%s'", input->rev->gitRev(), actualUrl);
printTalkative("using revision %s of repo '%s'", input.getRev()->gitRev(), actualUrl);
/* Now that we know the ref, check again whether we have it in
the store. */
@ -333,7 +402,7 @@ struct GitInput : Input
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
"--update-head-ok", "--", repoDir, "refs/*:refs/*" });
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input->rev->gitRev() });
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
runProgram("git", true, { "-C", tmpDir, "remote", "add", "origin", actualUrl });
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" });
@ -342,7 +411,7 @@ struct GitInput : Input
// FIXME: should pipe this, or find some better way to extract a
// revision.
auto source = sinkToSource([&](Sink & sink) {
RunOptions gitOptions("git", { "-C", repoDir, "archive", input->rev->gitRev() });
RunOptions gitOptions("git", { "-C", repoDir, "archive", input.getRev()->gitRev() });
gitOptions.standardOut = &sink;
runProgram2(gitOptions);
});
@ -352,18 +421,18 @@ struct GitInput : Input
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input.getRev()->gitRev() }));
Attrs infoAttrs({
{"rev", input->rev->gitRev()},
{"rev", input.getRev()->gitRev()},
{"lastModified", lastModified},
});
if (!shallow)
infoAttrs.insert_or_assign("revCount",
std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input->rev->gitRev() })));
std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input.getRev()->gitRev() })));
if (!this->rev)
if (!_input.getRev())
getCache()->add(
store,
mutableAttrs,
@ -382,60 +451,6 @@ struct GitInput : Input
}
};
struct GitInputScheme : InputScheme
{
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
{
if (url.scheme != "git" &&
url.scheme != "git+http" &&
url.scheme != "git+https" &&
url.scheme != "git+ssh" &&
url.scheme != "git+file") return nullptr;
auto url2(url);
if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
url2.query.clear();
Attrs attrs;
attrs.emplace("type", "git");
for (auto &[name, value] : url.query) {
if (name == "rev" || name == "ref")
attrs.emplace(name, value);
else
url2.query.emplace(name, value);
}
attrs.emplace("url", url2.to_string());
return inputFromAttrs(attrs);
}
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
{
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules")
throw Error("unsupported Git input attribute '%s'", name);
auto input = std::make_unique<GitInput>(parseURL(getStrAttr(attrs, "url")));
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (std::regex_search(*ref, badGitRefRegex))
throw BadURL("invalid Git branch/tag name '%s'", *ref);
input->ref = *ref;
}
if (auto rev = maybeGetStrAttr(attrs, "rev"))
input->rev = Hash(*rev, htSHA1);
input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);
input->submodules = maybeGetBoolAttr(attrs, "submodules").value_or(false);
return input;
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
}

View File

@ -8,81 +8,142 @@
namespace nix::fetchers {
std::regex ownerRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
std::regex repoRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
// A github or gitlab url
const static std::string urlRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
std::regex urlRegex(urlRegexS, std::regex::ECMAScript);
struct GitHubInput : Input
struct GitArchiveInputScheme : InputScheme
{
std::string owner;
std::string repo;
std::optional<std::string> ref;
std::optional<Hash> rev;
virtual std::string type() = 0;
std::string type() const override { return "github"; }
bool operator ==(const Input & other) const override
std::optional<Input> inputFromURL(const ParsedURL & url) override
{
auto other2 = dynamic_cast<const GitHubInput *>(&other);
return
other2
&& owner == other2->owner
&& repo == other2->repo
&& rev == other2->rev
&& ref == other2->ref;
if (url.scheme != type()) return {};
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
std::optional<Hash> rev;
std::optional<std::string> ref;
std::optional<std::string> host_url;
if (path.size() == 2) {
} else if (path.size() == 3) {
if (std::regex_match(path[2], revRegex))
rev = Hash(path[2], htSHA1);
else if (std::regex_match(path[2], refRegex))
ref = path[2];
else
throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
} else
throw BadURL("URL '%s' is invalid", url.url);
for (auto &[name, value] : url.query) {
if (name == "rev") {
if (rev)
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
rev = Hash(value, htSHA1);
}
else if (name == "ref") {
if (!std::regex_match(value, refRegex))
throw BadURL("URL '%s' contains an invalid branch/tag name", url.url);
if (ref)
throw BadURL("URL '%s' contains multiple branch/tag names", url.url);
ref = value;
}
else if (name == "url") {
if (!std::regex_match(value, urlRegex))
throw BadURL("URL '%s' contains an invalid instance url", url.url);
host_url = value;
}
// FIXME: barf on unsupported attributes
}
if (ref && rev)
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev());
Input input;
input.attrs.insert_or_assign("type", type());
input.attrs.insert_or_assign("owner", path[0]);
input.attrs.insert_or_assign("repo", path[1]);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) input.attrs.insert_or_assign("ref", *ref);
if (host_url) input.attrs.insert_or_assign("url", *host_url);
return input;
}
bool isImmutable() const override
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
{
return (bool) rev || narHash;
if (maybeGetStrAttr(attrs, "type") != type()) return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified")
throw Error("unsupported input attribute '%s'", name);
getStrAttr(attrs, "owner");
getStrAttr(attrs, "repo");
Input input;
input.attrs = attrs;
return input;
}
std::optional<std::string> getRef() const override { return ref; }
std::optional<Hash> getRev() const override { return rev; }
ParsedURL toURL() const override
ParsedURL toURL(const Input & input) override
{
auto owner = getStrAttr(input.attrs, "owner");
auto repo = getStrAttr(input.attrs, "repo");
auto ref = input.getRef();
auto rev = input.getRev();
auto path = owner + "/" + repo;
assert(!(ref && rev));
if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(Base16, false);
return ParsedURL {
.scheme = "github",
.scheme = type(),
.path = path,
};
}
Attrs toAttrsInternal() const override
bool hasAllInfo(const Input & input) override
{
Attrs attrs;
attrs.emplace("owner", owner);
attrs.emplace("repo", repo);
if (ref)
attrs.emplace("ref", *ref);
if (rev)
attrs.emplace("rev", rev->gitRev());
return attrs;
return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified");
}
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
std::optional<Hash> rev) override
{
auto rev = this->rev;
auto ref = this->ref.value_or("master");
if (!rev) {
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
owner, repo, ref);
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false).storePath)));
rev = Hash(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
auto input(_input);
if (rev && ref)
throw BadURL("cannot apply both a commit hash (%s) and a branch/tag name ('%s') to input '%s'",
rev->gitRev(), *ref, input.to_string());
if (rev) {
input.attrs.insert_or_assign("rev", rev->gitRev());
input.attrs.erase("ref");
}
if (ref) {
input.attrs.insert_or_assign("ref", *ref);
input.attrs.erase("rev");
}
return input;
}
auto input = std::make_shared<GitHubInput>(*this);
input->ref = {};
input->rev = *rev;
virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0;
virtual std::string getDownloadUrl(const Input & input) const = 0;
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
Input input(_input);
if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
auto rev = input.getRev();
if (!rev) rev = getRevFromRef(store, input);
input.attrs.erase("ref");
input.attrs.insert_or_assign("rev", rev->gitRev());
Attrs immutableAttrs({
{"type", "git-tarball"},
@ -90,36 +151,25 @@ struct GitHubInput : Input
});
if (auto res = getCache()->lookup(store, immutableAttrs)) {
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
return {
Tree{
.actualPath = store->toRealPath(res->second),
.storePath = std::move(res->second),
.info = TreeInfo {
.lastModified = getIntAttr(res->first, "lastModified"),
},
},
Tree(store->toRealPath(res->second), std::move(res->second)),
input
};
}
// FIXME: use regular /archive URLs instead? api.github.com
// might have stricter rate limits.
auto url = getDownloadUrl(input);
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
owner, repo, rev->to_string(Base16, false));
auto [tree, lastModified] = downloadTarball(store, url, "source", true);
std::string accessToken = settings.githubAccessToken.get();
if (accessToken != "")
url += "?access_token=" + accessToken;
auto tree = downloadTarball(store, url, "source", true);
input.attrs.insert_or_assign("lastModified", lastModified);
getCache()->add(
store,
immutableAttrs,
{
{"rev", rev->gitRev()},
{"lastModified", *tree.info.lastModified}
{"lastModified", lastModified}
},
tree.storePath,
true);
@ -128,68 +178,96 @@ struct GitHubInput : Input
}
};
struct GitHubInputScheme : InputScheme
struct GitHubInputScheme : GitArchiveInputScheme
{
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
std::string type() override { return "github"; }
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
if (url.scheme != "github") return nullptr;
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
auto input = std::make_unique<GitHubInput>();
if (path.size() == 2) {
} else if (path.size() == 3) {
if (std::regex_match(path[2], revRegex))
input->rev = Hash(path[2], htSHA1);
else if (std::regex_match(path[2], refRegex))
input->ref = path[2];
else
throw BadURL("in GitHub URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
} else
throw BadURL("GitHub URL '%s' is invalid", url.url);
for (auto &[name, value] : url.query) {
if (name == "rev") {
if (input->rev)
throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
input->rev = Hash(value, htSHA1);
}
else if (name == "ref") {
if (!std::regex_match(value, refRegex))
throw BadURL("GitHub URL '%s' contains an invalid branch/tag name", url.url);
if (input->ref)
throw BadURL("GitHub URL '%s' contains multiple branch/tag names", url.url);
input->ref = value;
}
}
if (input->ref && input->rev)
throw BadURL("GitHub URL '%s' contains both a commit hash and a branch/tag name", url.url);
input->owner = path[0];
input->repo = path[1];
return input;
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false).storePath)));
auto rev = Hash(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
std::string getDownloadUrl(const Input & input) const override
{
if (maybeGetStrAttr(attrs, "type") != "github") return {};
// FIXME: use regular /archive URLs instead? api.github.com
// might have stricter rate limits.
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
for (auto & [name, value] : attrs)
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev")
throw Error("unsupported GitHub input attribute '%s'", name);
std::string accessToken = settings.githubAccessToken.get();
if (accessToken != "")
url += "?access_token=" + accessToken;
auto input = std::make_unique<GitHubInput>();
input->owner = getStrAttr(attrs, "owner");
input->repo = getStrAttr(attrs, "repo");
input->ref = maybeGetStrAttr(attrs, "ref");
if (auto rev = maybeGetStrAttr(attrs, "rev"))
input->rev = Hash(*rev, htSHA1);
return input;
return url;
}
void clone(const Input & input, const Path & destDir) override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir);
}
};
struct GitLabInputScheme : GitArchiveInputScheme
{
std::string type() override { return "gitlab"; }
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false).storePath)));
auto rev = Hash(std::string(json[0]["id"]), htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
std::string getDownloadUrl(const Input & input) const override
{
// FIXME: This endpoint has a rate limit threshold of 5 requests per minute
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
/* # FIXME: add privat token auth (`curl --header "PRIVATE-TOKEN: <your_access_token>"`)
std::string accessToken = settings.githubAccessToken.get();
if (accessToken != "")
url += "?access_token=" + accessToken;*/
return url;
}
void clone(const Input & input, const Path & destDir) override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
// FIXME: get username somewhere
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir);
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
static auto r2 = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); });
}

104
src/libfetchers/indirect.cc Normal file
View File

@ -0,0 +1,104 @@
#include "fetchers.hh"
namespace nix::fetchers {
std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
struct IndirectInputScheme : InputScheme
{
std::optional<Input> inputFromURL(const ParsedURL & url) override
{
if (url.scheme != "flake") return {};
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
std::optional<Hash> rev;
std::optional<std::string> ref;
if (path.size() == 1) {
} else if (path.size() == 2) {
if (std::regex_match(path[1], revRegex))
rev = Hash(path[1], htSHA1);
else if (std::regex_match(path[1], refRegex))
ref = path[1];
else
throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]);
} else if (path.size() == 3) {
if (!std::regex_match(path[1], refRegex))
throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url.url, path[1]);
ref = path[1];
if (!std::regex_match(path[2], revRegex))
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
rev = Hash(path[2], htSHA1);
} else
throw BadURL("GitHub URL '%s' is invalid", url.url);
std::string id = path[0];
if (!std::regex_match(id, flakeRegex))
throw BadURL("'%s' is not a valid flake ID", id);
// FIXME: forbid query params?
Input input;
input.direct = false;
input.attrs.insert_or_assign("type", "indirect");
input.attrs.insert_or_assign("id", id);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) input.attrs.insert_or_assign("ref", *ref);
return input;
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
{
if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "id" && name != "ref" && name != "rev" && name != "narHash")
throw Error("unsupported indirect input attribute '%s'", name);
auto id = getStrAttr(attrs, "id");
if (!std::regex_match(id, flakeRegex))
throw BadURL("'%s' is not a valid flake ID", id);
Input input;
input.direct = false;
input.attrs = attrs;
return input;
}
ParsedURL toURL(const Input & input) override
{
ParsedURL url;
url.scheme = "flake";
url.path = getStrAttr(input.attrs, "id");
if (auto ref = input.getRef()) { url.path += '/'; url.path += *ref; };
if (auto rev = input.getRev()) { url.path += '/'; url.path += rev->gitRev(); };
return url;
}
bool hasAllInfo(const Input & input) override
{
return false;
}
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
std::optional<Hash> rev) override
{
auto input(_input);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) input.attrs.insert_or_assign("ref", *ref);
return input;
}
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
}

View File

@ -10,76 +10,124 @@ using namespace std::string_literals;
namespace nix::fetchers {
struct MercurialInput : Input
struct MercurialInputScheme : InputScheme
{
ParsedURL url;
std::optional<std::string> ref;
std::optional<Hash> rev;
MercurialInput(const ParsedURL & url) : url(url)
{ }
std::string type() const override { return "hg"; }
bool operator ==(const Input & other) const override
std::optional<Input> inputFromURL(const ParsedURL & url) override
{
auto other2 = dynamic_cast<const MercurialInput *>(&other);
return
other2
&& url == other2->url
&& rev == other2->rev
&& ref == other2->ref;
if (url.scheme != "hg+http" &&
url.scheme != "hg+https" &&
url.scheme != "hg+ssh" &&
url.scheme != "hg+file") return {};
auto url2(url);
url2.scheme = std::string(url2.scheme, 3);
url2.query.clear();
Attrs attrs;
attrs.emplace("type", "hg");
for (auto &[name, value] : url.query) {
if (name == "rev" || name == "ref")
attrs.emplace(name, value);
else
url2.query.emplace(name, value);
}
attrs.emplace("url", url2.to_string());
return inputFromAttrs(attrs);
}
bool isImmutable() const override
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
{
return (bool) rev || narHash;
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash")
throw Error("unsupported Mercurial input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (!std::regex_match(*ref, refRegex))
throw BadURL("invalid Mercurial branch/tag name '%s'", *ref);
}
Input input;
input.attrs = attrs;
return input;
}
std::optional<std::string> getRef() const override { return ref; }
std::optional<Hash> getRev() const override { return rev; }
ParsedURL toURL() const override
ParsedURL toURL(const Input & input) override
{
ParsedURL url2(url);
url2.scheme = "hg+" + url2.scheme;
if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
if (ref) url2.query.insert_or_assign("ref", *ref);
auto url = parseURL(getStrAttr(input.attrs, "url"));
url.scheme = "hg+" + url.scheme;
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
return url;
}
Attrs toAttrsInternal() const override
bool hasAllInfo(const Input & input) override
{
Attrs attrs;
attrs.emplace("url", url.to_string());
if (ref)
attrs.emplace("ref", *ref);
if (rev)
attrs.emplace("rev", rev->gitRev());
return attrs;
// FIXME: ugly, need to distinguish between dirty and clean
// default trees.
return input.getRef() == "default" || maybeGetIntAttr(input.attrs, "revCount");
}
std::pair<bool, std::string> getActualUrl() const
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
std::optional<Hash> rev) override
{
auto res(input);
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) res.attrs.insert_or_assign("ref", *ref);
return res;
}
std::optional<Path> getSourcePath(const Input & input) override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme == "file" && !input.getRef() && !input.getRev())
return url.path;
return {};
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
{
auto sourcePath = getSourcePath(input);
assert(sourcePath);
// FIXME: shut up if file is already tracked.
runProgram("hg", true,
{ "add", *sourcePath + "/" + std::string(file) });
if (commitMsg)
runProgram("hg", true,
{ "commit", *sourcePath + "/" + std::string(file), "-m", *commitMsg });
}
std::pair<bool, std::string> getActualUrl(const Input & input) const
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isLocal = url.scheme == "file";
return {isLocal, isLocal ? url.path : url.base};
}
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
auto name = "source";
auto input = std::make_shared<MercurialInput>(*this);
Input input(_input);
auto [isLocal, actualUrl_] = getActualUrl();
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
// FIXME: return lastModified.
// FIXME: don't clone local repositories.
if (!input->ref && !input->rev && isLocal && pathExists(actualUrl + "/.hg")) {
if (!input.getRef() && !input.getRev() && isLocal && pathExists(actualUrl + "/.hg")) {
bool clean = runProgram("hg", true, { "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == "";
@ -94,7 +142,7 @@ struct MercurialInput : Input
if (settings.warnDirty)
warn("Mercurial tree '%s' is unclean", actualUrl);
input->ref = chomp(runProgram("hg", true, { "branch", "-R", actualUrl }));
input.attrs.insert_or_assign("ref", chomp(runProgram("hg", true, { "branch", "-R", actualUrl })));
auto files = tokenizeString<std::set<std::string>>(
runProgram("hg", true, { "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
@ -116,60 +164,54 @@ struct MercurialInput : Input
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
return {Tree {
.actualPath = store->printStorePath(storePath),
.storePath = std::move(storePath),
}, input};
return {
Tree(store->printStorePath(storePath), std::move(storePath)),
input
};
}
}
if (!input->ref) input->ref = "default";
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
auto getImmutableAttrs = [&]()
{
return Attrs({
{"type", "hg"},
{"name", name},
{"rev", input->rev->gitRev()},
{"rev", input.getRev()->gitRev()},
});
};
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
-> std::pair<Tree, std::shared_ptr<const Input>>
-> std::pair<Tree, Input>
{
assert(input->rev);
assert(!rev || rev == input->rev);
assert(input.getRev());
assert(!_input.getRev() || _input.getRev() == input.getRev());
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
return {
Tree{
.actualPath = store->toRealPath(storePath),
.storePath = std::move(storePath),
.info = TreeInfo {
.revCount = getIntAttr(infoAttrs, "revCount"),
},
},
Tree(store->toRealPath(storePath), std::move(storePath)),
input
};
};
if (input->rev) {
if (input.getRev()) {
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
return makeResult(res->first, std::move(res->second));
}
assert(input->rev || input->ref);
auto revOrRef = input->rev ? input->rev->gitRev() : *input->ref;
auto revOrRef = input.getRev() ? input.getRev()->gitRev() : *input.getRef();
Attrs mutableAttrs({
{"type", "hg"},
{"name", name},
{"url", actualUrl},
{"ref", *input->ref},
{"ref", *input.getRef()},
});
if (auto res = getCache()->lookup(store, mutableAttrs)) {
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
if (!rev || rev == rev2) {
input->rev = rev2;
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
}
}
@ -178,10 +220,10 @@ struct MercurialInput : Input
/* If this is a commit hash that we already have, we don't
have to pull again. */
if (!(input->rev
if (!(input.getRev()
&& pathExists(cacheDir)
&& runProgram(
RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
RunOptions("hg", { "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })
.killStderr(true)).second == "1"))
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
@ -210,9 +252,9 @@ struct MercurialInput : Input
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3);
input->rev = Hash(tokens[0], htSHA1);
input.attrs.insert_or_assign("rev", Hash(tokens[0], htSHA1).gitRev());
auto revCount = std::stoull(tokens[1]);
input->ref = tokens[2];
input.attrs.insert_or_assign("ref", tokens[2]);
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
return makeResult(res->first, std::move(res->second));
@ -220,18 +262,18 @@ struct MercurialInput : Input
Path tmpDir = createTempDir();
AutoDelete delTmpDir(tmpDir, true);
runProgram("hg", true, { "archive", "-R", cacheDir, "-r", input->rev->gitRev(), tmpDir });
runProgram("hg", true, { "archive", "-R", cacheDir, "-r", input.getRev()->gitRev(), tmpDir });
deletePath(tmpDir + "/.hg_archival.txt");
auto storePath = store->addToStore(name, tmpDir);
Attrs infoAttrs({
{"rev", input->rev->gitRev()},
{"rev", input.getRev()->gitRev()},
{"revCount", (int64_t) revCount},
});
if (!this->rev)
if (!_input.getRev())
getCache()->add(
store,
mutableAttrs,
@ -250,54 +292,6 @@ struct MercurialInput : Input
}
};
struct MercurialInputScheme : InputScheme
{
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
{
if (url.scheme != "hg+http" &&
url.scheme != "hg+https" &&
url.scheme != "hg+ssh" &&
url.scheme != "hg+file") return nullptr;
auto url2(url);
url2.scheme = std::string(url2.scheme, 3);
url2.query.clear();
Attrs attrs;
attrs.emplace("type", "hg");
for (auto &[name, value] : url.query) {
if (name == "rev" || name == "ref")
attrs.emplace(name, value);
else
url2.query.emplace(name, value);
}
attrs.emplace("url", url2.to_string());
return inputFromAttrs(attrs);
}
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
{
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev")
throw Error("unsupported Mercurial input attribute '%s'", name);
auto input = std::make_unique<MercurialInput>(parseURL(getStrAttr(attrs, "url")));
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (!std::regex_match(*ref, refRegex))
throw BadURL("invalid Mercurial branch/tag name '%s'", *ref);
input->ref = *ref;
}
if (auto rev = maybeGetStrAttr(attrs, "rev"))
input->rev = Hash(*rev, htSHA1);
return input;
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
}

View File

@ -3,65 +3,86 @@
namespace nix::fetchers {
struct PathInput : Input
struct PathInputScheme : InputScheme
{
Path path;
/* Allow the user to pass in "fake" tree info attributes. This is
useful for making a pinned tree work the same as the repository
from which is exported
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
std::optional<Hash> rev;
std::optional<uint64_t> revCount;
std::optional<time_t> lastModified;
std::string type() const override { return "path"; }
std::optional<Hash> getRev() const override { return rev; }
bool operator ==(const Input & other) const override
std::optional<Input> inputFromURL(const ParsedURL & url) override
{
auto other2 = dynamic_cast<const PathInput *>(&other);
return
other2
&& path == other2->path
&& rev == other2->rev
&& revCount == other2->revCount
&& lastModified == other2->lastModified;
if (url.scheme != "path") return {};
if (url.authority && *url.authority != "")
throw Error("path URL '%s' should not have an authority ('%s')", url.url, *url.authority);
Input input;
input.attrs.insert_or_assign("type", "path");
input.attrs.insert_or_assign("path", url.path);
for (auto & [name, value] : url.query)
if (name == "rev" || name == "narHash")
input.attrs.insert_or_assign(name, value);
else if (name == "revCount" || name == "lastModified") {
uint64_t n;
if (!string2Int(value, n))
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
input.attrs.insert_or_assign(name, n);
}
else
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
return input;
}
bool isImmutable() const override
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
{
return (bool) narHash;
if (maybeGetStrAttr(attrs, "type") != "path") return {};
getStrAttr(attrs, "path");
for (auto & [name, value] : attrs)
/* Allow the user to pass in "fake" tree info
attributes. This is useful for making a pinned tree
work the same as the repository from which is exported
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path")
// checked in Input::fromAttrs
;
else
throw Error("unsupported path input attribute '%s'", name);
Input input;
input.attrs = attrs;
return input;
}
ParsedURL toURL() const override
ParsedURL toURL(const Input & input) override
{
auto query = attrsToQuery(toAttrsInternal());
auto query = attrsToQuery(input.attrs);
query.erase("path");
query.erase("type");
return ParsedURL {
.scheme = "path",
.path = path,
.path = getStrAttr(input.attrs, "path"),
.query = query,
};
}
Attrs toAttrsInternal() const override
bool hasAllInfo(const Input & input) override
{
Attrs attrs;
attrs.emplace("path", path);
if (rev)
attrs.emplace("rev", rev->gitRev());
if (revCount)
attrs.emplace("revCount", *revCount);
if (lastModified)
attrs.emplace("lastModified", *lastModified);
return attrs;
return true;
}
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
std::optional<Path> getSourcePath(const Input & input) override
{
auto input = std::make_shared<PathInput>(*this);
return getStrAttr(input.attrs, "path");
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
{
// nothing to do
}
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
auto path = getStrAttr(input.attrs, "path");
// FIXME: check whether access to 'path' is allowed.
@ -74,72 +95,10 @@ struct PathInput : Input
// FIXME: try to substitute storePath.
storePath = store->addToStore("source", path);
return
{
Tree {
.actualPath = store->toRealPath(*storePath),
.storePath = std::move(*storePath),
.info = TreeInfo {
.revCount = revCount,
.lastModified = lastModified
}
},
input
};
}
};
struct PathInputScheme : InputScheme
{
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
{
if (url.scheme != "path") return nullptr;
auto input = std::make_unique<PathInput>();
input->path = url.path;
for (auto & [name, value] : url.query)
if (name == "rev")
input->rev = Hash(value, htSHA1);
else if (name == "revCount") {
uint64_t revCount;
if (!string2Int(value, revCount))
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
input->revCount = revCount;
}
else if (name == "lastModified") {
time_t lastModified;
if (!string2Int(value, lastModified))
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
input->lastModified = lastModified;
}
else
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
return input;
}
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
{
if (maybeGetStrAttr(attrs, "type") != "path") return {};
auto input = std::make_unique<PathInput>();
input->path = getStrAttr(attrs, "path");
for (auto & [name, value] : attrs)
if (name == "rev")
input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1);
else if (name == "revCount")
input->revCount = getIntAttr(attrs, "revCount");
else if (name == "lastModified")
input->lastModified = getIntAttr(attrs, "lastModified");
else if (name == "type" || name == "path")
;
else
throw Error("unsupported path input attribute '%s'", name);
return input;
return {
Tree(store->toRealPath(*storePath), std::move(*storePath)),
input
};
}
};

212
src/libfetchers/registry.cc Normal file
View File

@ -0,0 +1,212 @@
#include "registry.hh"
#include "fetchers.hh"
#include "util.hh"
#include "globals.hh"
#include "store-api.hh"
#include <nlohmann/json.hpp>
namespace nix::fetchers {
std::shared_ptr<Registry> Registry::read(
const Path & path, RegistryType type)
{
auto registry = std::make_shared<Registry>(type);
if (!pathExists(path))
return std::make_shared<Registry>(type);
try {
auto json = nlohmann::json::parse(readFile(path));
auto version = json.value("version", 0);
if (version == 2) {
for (auto & i : json["flakes"]) {
auto toAttrs = jsonToAttrs(i["to"]);
Attrs extraAttrs;
auto j = toAttrs.find("dir");
if (j != toAttrs.end()) {
extraAttrs.insert(*j);
toAttrs.erase(j);
}
auto exact = i.find("exact");
registry->entries.push_back(
Entry {
.from = Input::fromAttrs(jsonToAttrs(i["from"])),
.to = Input::fromAttrs(std::move(toAttrs)),
.extraAttrs = extraAttrs,
.exact = exact != i.end() && exact.value()
});
}
}
else
throw Error("flake registry '%s' has unsupported version %d", path, version);
} catch (nlohmann::json::exception & e) {
warn("cannot parse flake registry '%s': %s", path, e.what());
} catch (Error & e) {
warn("cannot read flake registry '%s': %s", path, e.what());
}
return registry;
}
void Registry::write(const Path & path)
{
nlohmann::json arr;
for (auto & entry : entries) {
nlohmann::json obj;
obj["from"] = attrsToJson(entry.from.toAttrs());
obj["to"] = attrsToJson(entry.to.toAttrs());
if (!entry.extraAttrs.empty())
obj["to"].update(attrsToJson(entry.extraAttrs));
if (entry.exact)
obj["exact"] = true;
arr.emplace_back(std::move(obj));
}
nlohmann::json json;
json["version"] = 2;
json["flakes"] = std::move(arr);
createDirs(dirOf(path));
writeFile(path, json.dump(2));
}
void Registry::add(
const Input & from,
const Input & to,
const Attrs & extraAttrs)
{
entries.emplace_back(
Entry {
.from = from,
.to = to,
.extraAttrs = extraAttrs
});
}
void Registry::remove(const Input & input)
{
// FIXME: use C++20 std::erase.
for (auto i = entries.begin(); i != entries.end(); )
if (i->from == input)
i = entries.erase(i);
else
++i;
}
static Path getSystemRegistryPath()
{
return settings.nixConfDir + "/registry.json";
}
static std::shared_ptr<Registry> getSystemRegistry()
{
static auto systemRegistry =
Registry::read(getSystemRegistryPath(), Registry::System);
return systemRegistry;
}
Path getUserRegistryPath()
{
return getHome() + "/.config/nix/registry.json";
}
std::shared_ptr<Registry> getUserRegistry()
{
static auto userRegistry =
Registry::read(getUserRegistryPath(), Registry::User);
return userRegistry;
}
static std::shared_ptr<Registry> flagRegistry =
std::make_shared<Registry>(Registry::Flag);
std::shared_ptr<Registry> getFlagRegistry()
{
return flagRegistry;
}
void overrideRegistry(
const Input & from,
const Input & to,
const Attrs & extraAttrs)
{
flagRegistry->add(from, to, extraAttrs);
}
static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
{
static auto reg = [&]() {
auto path = settings.flakeRegistry.get();
if (!hasPrefix(path, "/")) {
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
store2->addPermRoot(storePath, getCacheDir() + "/nix/flake-registry.json", true);
path = store->toRealPath(storePath);
}
return Registry::read(path, Registry::Global);
}();
return reg;
}
Registries getRegistries(ref<Store> store)
{
Registries registries;
registries.push_back(getFlagRegistry());
registries.push_back(getUserRegistry());
registries.push_back(getSystemRegistry());
registries.push_back(getGlobalRegistry(store));
return registries;
}
std::pair<Input, Attrs> lookupInRegistries(
ref<Store> store,
const Input & _input)
{
Attrs extraAttrs;
int n = 0;
Input input(_input);
restart:
n++;
if (n > 100) throw Error("cycle detected in flake registry for '%s'", input.to_string());
for (auto & registry : getRegistries(store)) {
// FIXME: O(n)
for (auto & entry : registry->entries) {
if (entry.exact) {
if (entry.from == input) {
input = entry.to;
extraAttrs = entry.extraAttrs;
goto restart;
}
} else {
if (entry.from.contains(input)) {
input = entry.to.applyOverrides(
!entry.from.getRef() && input.getRef() ? input.getRef() : std::optional<std::string>(),
!entry.from.getRev() && input.getRev() ? input.getRev() : std::optional<Hash>());
extraAttrs = entry.extraAttrs;
goto restart;
}
}
}
}
if (!input.isDirect())
throw Error("cannot find flake '%s' in the flake registries", input.to_string());
debug("looked up '%s' -> '%s'", _input.to_string(), input.to_string());
return {input, extraAttrs};
}
}

View File

@ -0,0 +1,64 @@
#pragma once
#include "types.hh"
#include "fetchers.hh"
namespace nix { class Store; }
namespace nix::fetchers {
struct Registry
{
enum RegistryType {
Flag = 0,
User = 1,
System = 2,
Global = 3,
};
RegistryType type;
struct Entry
{
Input from, to;
Attrs extraAttrs;
bool exact = false;
};
std::vector<Entry> entries;
Registry(RegistryType type)
: type(type)
{ }
static std::shared_ptr<Registry> read(
const Path & path, RegistryType type);
void write(const Path & path);
void add(
const Input & from,
const Input & to,
const Attrs & extraAttrs);
void remove(const Input & input);
};
typedef std::vector<std::shared_ptr<Registry>> Registries;
std::shared_ptr<Registry> getUserRegistry();
Path getUserRegistryPath();
Registries getRegistries(ref<Store> store);
void overrideRegistry(
const Input & from,
const Input & to,
const Attrs & extraAttrs);
std::pair<Input, Attrs> lookupInRegistries(
ref<Store> store,
const Input & input);
}

View File

@ -105,7 +105,7 @@ DownloadFileResult downloadFile(
};
}
Tree downloadTarball(
std::pair<Tree, time_t> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
@ -120,12 +120,9 @@ Tree downloadTarball(
auto cached = getCache()->lookupExpired(store, inAttrs);
if (cached && !cached->expired)
return Tree {
.actualPath = store->toRealPath(cached->storePath),
.storePath = std::move(cached->storePath),
.info = TreeInfo {
.lastModified = getIntAttr(cached->infoAttrs, "lastModified"),
},
return {
Tree(store->toRealPath(cached->storePath), std::move(cached->storePath)),
getIntAttr(cached->infoAttrs, "lastModified")
};
auto res = downloadFile(store, url, name, immutable);
@ -160,117 +157,72 @@ Tree downloadTarball(
*unpackedStorePath,
immutable);
return Tree {
.actualPath = store->toRealPath(*unpackedStorePath),
.storePath = std::move(*unpackedStorePath),
.info = TreeInfo {
.lastModified = lastModified,
},
return {
Tree(store->toRealPath(*unpackedStorePath), std::move(*unpackedStorePath)),
lastModified,
};
}
struct TarballInput : Input
{
ParsedURL url;
std::optional<Hash> hash;
TarballInput(const ParsedURL & url) : url(url)
{ }
std::string type() const override { return "tarball"; }
bool operator ==(const Input & other) const override
{
auto other2 = dynamic_cast<const TarballInput *>(&other);
return
other2
&& to_string() == other2->to_string()
&& hash == other2->hash;
}
bool isImmutable() const override
{
return hash || narHash;
}
ParsedURL toURL() const override
{
auto url2(url);
// NAR hashes are preferred over file hashes since tar/zip files
// don't have a canonical representation.
if (narHash)
url2.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
else if (hash)
url2.query.insert_or_assign("hash", hash->to_string(SRI, true));
return url2;
}
Attrs toAttrsInternal() const override
{
Attrs attrs;
attrs.emplace("url", url.to_string());
if (hash)
attrs.emplace("hash", hash->to_string(SRI, true));
return attrs;
}
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
{
auto tree = downloadTarball(store, url.to_string(), "source", false);
auto input = std::make_shared<TarballInput>(*this);
input->narHash = store->queryPathInfo(tree.storePath)->narHash;
return {std::move(tree), input};
}
};
struct TarballInputScheme : InputScheme
{
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
std::optional<Input> inputFromURL(const ParsedURL & url) override
{
if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return nullptr;
if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return {};
if (!hasSuffix(url.path, ".zip")
&& !hasSuffix(url.path, ".tar")
&& !hasSuffix(url.path, ".tar.gz")
&& !hasSuffix(url.path, ".tar.xz")
&& !hasSuffix(url.path, ".tar.bz2"))
return nullptr;
auto input = std::make_unique<TarballInput>(url);
auto hash = input->url.query.find("hash");
if (hash != input->url.query.end()) {
// FIXME: require SRI hash.
input->hash = Hash(hash->second);
input->url.query.erase(hash);
}
auto narHash = input->url.query.find("narHash");
if (narHash != input->url.query.end()) {
// FIXME: require SRI hash.
input->narHash = Hash(narHash->second);
input->url.query.erase(narHash);
}
return {};
Input input;
input.attrs.insert_or_assign("type", "tarball");
input.attrs.insert_or_assign("url", url.to_string());
auto narHash = url.query.find("narHash");
if (narHash != url.query.end())
input.attrs.insert_or_assign("narHash", narHash->second);
return input;
}
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
std::optional<Input> inputFromAttrs(const Attrs & attrs) override
{
if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "hash")
if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash")
throw Error("unsupported tarball input attribute '%s'", name);
auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
if (auto hash = maybeGetStrAttr(attrs, "hash"))
input->hash = newHashAllowEmpty(*hash, {});
Input input;
input.attrs = attrs;
//input.immutable = (bool) maybeGetStrAttr(input.attrs, "hash");
return input;
}
ParsedURL toURL(const Input & input) override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
// NAR hashes are preferred over file hashes since tar/zip files
// don't have a canonical representation.
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
/*
else if (auto hash = maybeGetStrAttr(input.attrs, "hash"))
url.query.insert_or_assign("hash", Hash(*hash).to_string(SRI, true));
*/
return url;
}
bool hasAllInfo(const Input & input) override
{
return true;
}
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false).first;
return {std::move(tree), input};
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });

View File

@ -1,14 +0,0 @@
#include "tree-info.hh"
#include "store-api.hh"
#include <nlohmann/json.hpp>
namespace nix::fetchers {
StorePath TreeInfo::computeStorePath(Store & store) const
{
assert(narHash);
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
}
}

View File

@ -1,29 +0,0 @@
#pragma once
#include "path.hh"
#include "hash.hh"
#include <nlohmann/json_fwd.hpp>
namespace nix { class Store; }
namespace nix::fetchers {
struct TreeInfo
{
std::optional<Hash> narHash;
std::optional<uint64_t> revCount;
std::optional<time_t> lastModified;
bool operator ==(const TreeInfo & other) const
{
return
narHash == other.narHash
&& revCount == other.revCount
&& lastModified == other.lastModified;
}
StorePath computeStorePath(Store & store) const;
};
}

View File

@ -34,9 +34,19 @@ MixCommonArgs::MixCommonArgs(const string & programName)
try {
globalConfig.set(name, value);
} catch (UsageError & e) {
warn(e.what());
if (!completions)
warn(e.what());
}
}},
.completer = [](size_t index, std::string_view prefix) {
if (index == 0) {
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (auto & s : settings)
if (hasPrefix(s.first, prefix))
completions->insert(s.first);
}
}
});
addFlag({

View File

@ -1,12 +1,13 @@
#include "loggers.hh"
#include "progress-bar.hh"
#include "util.hh"
namespace nix {
LogFormat defaultLogFormat = LogFormat::raw;
LogFormat parseLogFormat(const std::string & logFormatStr) {
if (logFormatStr == "raw")
if (logFormatStr == "raw" || getEnv("NIX_GET_COMPLETIONS"))
return LogFormat::raw;
else if (logFormatStr == "raw-with-logs")
return LogFormat::rawWithLogs;

View File

@ -9,7 +9,7 @@ struct Package {
Path path;
bool active;
int priority;
Package(Path path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
Package(const Path & path, bool active, int priority) : path{path}, active{active}, priority{priority} {}
};
typedef std::vector<Package> Packages;

View File

@ -22,6 +22,7 @@
#include <queue>
#include <random>
#include <thread>
#include <regex>
using namespace std::string_literals;
@ -56,7 +57,7 @@ struct curlFileTransfer : public FileTransfer
Callback<FileTransferResult> callback;
CURL * req = 0;
bool active = false; // whether the handle has been added to the multi object
std::string status;
std::string statusMsg;
unsigned int attempt = 0;
@ -175,12 +176,13 @@ struct curlFileTransfer : public FileTransfer
size_t realSize = size * nmemb;
std::string line((char *) contents, realSize);
printMsg(lvlVomit, format("got header for '%s': %s") % request.uri % trim(line));
if (line.compare(0, 5, "HTTP/") == 0) { // new response starts
static std::regex statusLine("HTTP/[^ ]+ +[0-9]+(.*)", std::regex::extended | std::regex::icase);
std::smatch match;
if (std::regex_match(line, match, statusLine)) {
result.etag = "";
auto ss = tokenizeString<vector<string>>(line, " ");
status = ss.size() >= 2 ? ss[1] : "";
result.data = std::make_shared<std::string>();
result.bodySize = 0;
statusMsg = trim(match[1]);
acceptRanges = false;
encoding = "";
} else {
@ -194,7 +196,9 @@ struct curlFileTransfer : public FileTransfer
the expected ETag on a 200 response, then shut
down the connection because we already have the
data. */
if (result.etag == request.expectedETag && status == "200") {
long httpStatus = 0;
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
if (result.etag == request.expectedETag && httpStatus == 200) {
debug(format("shutting down on 200 HTTP response with expected ETag"));
return 0;
}
@ -413,8 +417,8 @@ struct curlFileTransfer : public FileTransfer
? FileTransferError(Interrupted, fmt("%s of '%s' was interrupted", request.verb(), request.uri))
: httpStatus != 0
? FileTransferError(err,
fmt("unable to %s '%s': HTTP error %d",
request.verb(), request.uri, httpStatus)
fmt("unable to %s '%s': HTTP error %d ('%s')",
request.verb(), request.uri, httpStatus, statusMsg)
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
)
: FileTransferError(err,

View File

@ -368,6 +368,9 @@ public:
Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size",
"Maximum size of NARs before spilling them to disk."};
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."};
};

View File

@ -594,7 +594,7 @@ uint64_t LocalStore::addValidPath(State & state,
(concatStringsSep(" ", info.sigs), !info.sigs.empty())
(renderContentAddress(info.ca), (bool) info.ca)
.exec();
uint64_t id = sqlite3_last_insert_rowid(state.db);
uint64_t id = state.db.getLastInsertedRowId();
/* If this is a derivation, then store the derivation outputs in
the database. This is useful for the garbage collector: it can

View File

@ -61,3 +61,6 @@ $(d)/build.cc:
clean-files += $(d)/schema.sql.gen.hh
$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))

View File

@ -61,6 +61,11 @@ void SQLite::exec(const std::string & stmt)
});
}
uint64_t SQLite::getLastInsertedRowId()
{
return sqlite3_last_insert_rowid(db);
}
void SQLiteStmt::create(sqlite3 * db, const string & sql)
{
checkInterrupt();
@ -95,10 +100,10 @@ SQLiteStmt::Use::~Use()
sqlite3_reset(stmt);
}
SQLiteStmt::Use & SQLiteStmt::Use::operator () (const std::string & value, bool notNull)
SQLiteStmt::Use & SQLiteStmt::Use::operator () (std::string_view value, bool notNull)
{
if (notNull) {
if (sqlite3_bind_text(stmt, curArg++, value.c_str(), -1, SQLITE_TRANSIENT) != SQLITE_OK)
if (sqlite3_bind_text(stmt, curArg++, value.data(), -1, SQLITE_TRANSIENT) != SQLITE_OK)
throwSQLiteError(stmt.db, "binding argument");
} else
bind();

View File

@ -26,6 +26,8 @@ struct SQLite
void isCache();
void exec(const std::string & stmt);
uint64_t getLastInsertedRowId();
};
/* RAII wrapper to create and destroy SQLite prepared statements. */
@ -54,7 +56,7 @@ struct SQLiteStmt
~Use();
/* Bind the next parameter. */
Use & operator () (const std::string & value, bool notNull = true);
Use & operator () (std::string_view value, bool notNull = true);
Use & operator () (const unsigned char * data, size_t len, bool notNull = true);
Use & operator () (int64_t value, bool notNull = true);
Use & bind(); // null

View File

@ -1,6 +1,8 @@
#include "args.hh"
#include "hash.hh"
#include <glob.h>
namespace nix {
void Args::addFlag(Flag && flag_)
@ -13,6 +15,20 @@ void Args::addFlag(Flag && flag_)
if (flag->shortName) shortFlags[flag->shortName] = flag;
}
bool pathCompletions = false;
std::shared_ptr<std::set<std::string>> completions;
std::string completionMarker = "___COMPLETE___";
std::optional<std::string> needsCompletion(std::string_view s)
{
if (!completions) return {};
auto i = s.find(completionMarker);
if (i != std::string::npos)
return std::string(s.begin(), i);
return {};
}
void Args::parseCmdline(const Strings & _cmdline)
{
Strings pendingArgs;
@ -20,6 +36,14 @@ void Args::parseCmdline(const Strings & _cmdline)
Strings cmdline(_cmdline);
if (auto s = getEnv("NIX_GET_COMPLETIONS")) {
size_t n = std::stoi(*s);
assert(n > 0 && n <= cmdline.size());
*std::next(cmdline.begin(), n - 1) += completionMarker;
completions = std::make_shared<decltype(completions)::element_type>();
verbosity = lvlError;
}
for (auto pos = cmdline.begin(); pos != cmdline.end(); ) {
auto arg = *pos;
@ -63,7 +87,7 @@ void Args::printHelp(const string & programName, std::ostream & out)
for (auto & exp : expectedArgs) {
std::cout << renderLabels({exp.label});
// FIXME: handle arity > 1
if (exp.arity == 0) std::cout << "...";
if (exp.handler.arity == ArityAny) std::cout << "...";
if (exp.optional) std::cout << "?";
}
std::cout << "\n";
@ -99,18 +123,32 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
auto process = [&](const std::string & name, const Flag & flag) -> bool {
++pos;
std::vector<std::string> args;
bool anyCompleted = false;
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
if (pos == end) {
if (flag.handler.arity == ArityAny) break;
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
}
if (flag.completer)
if (auto prefix = needsCompletion(*pos)) {
anyCompleted = true;
flag.completer(n, *prefix);
}
args.push_back(*pos++);
}
flag.handler.fun(std::move(args));
if (!anyCompleted)
flag.handler.fun(std::move(args));
return true;
};
if (string(*pos, 0, 2) == "--") {
if (auto prefix = needsCompletion(*pos)) {
for (auto & [name, flag] : longFlags) {
if (!hiddenCategories.count(flag->category)
&& hasPrefix(name, std::string(*prefix, 2)))
completions->insert("--" + name);
}
}
auto i = longFlags.find(string(*pos, 2));
if (i == longFlags.end()) return false;
return process("--" + i->first, *i->second);
@ -123,6 +161,14 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
return process(std::string("-") + c, *i->second);
}
if (auto prefix = needsCompletion(*pos)) {
if (prefix == "-") {
completions->insert("--");
for (auto & [flag, _] : shortFlags)
completions->insert(std::string("-") + flag);
}
}
return false;
}
@ -138,12 +184,17 @@ bool Args::processArgs(const Strings & args, bool finish)
bool res = false;
if ((exp.arity == 0 && finish) ||
(exp.arity > 0 && args.size() == exp.arity))
if ((exp.handler.arity == ArityAny && finish) ||
(exp.handler.arity != ArityAny && args.size() == exp.handler.arity))
{
std::vector<std::string> ss;
for (auto & s : args) ss.push_back(s);
exp.handler(std::move(ss));
for (const auto &[n, s] : enumerate(args)) {
ss.push_back(s);
if (exp.completer)
if (auto prefix = needsCompletion(s))
exp.completer(n, *prefix);
}
exp.handler.fun(ss);
expectedArgs.pop_front();
res = true;
}
@ -154,6 +205,13 @@ bool Args::processArgs(const Strings & args, bool finish)
return res;
}
static void hashTypeCompleter(size_t index, std::string_view prefix)
{
for (auto & type : hashTypes)
if (hasPrefix(type, prefix))
completions->insert(type);
}
Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
{
return Flag {
@ -162,7 +220,8 @@ Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
.labels = {"hash-algo"},
.handler = {[ht](std::string s) {
*ht = parseHashType(s);
}}
}},
.completer = hashTypeCompleter
};
}
@ -174,10 +233,42 @@ Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<
.labels = {"hash-algo"},
.handler = {[oht](std::string s) {
*oht = std::optional<HashType> { parseHashType(s) };
}}
}},
.completer = hashTypeCompleter
};
}
static void completePath(std::string_view prefix, bool onlyDirs)
{
pathCompletions = true;
glob_t globbuf;
int flags = GLOB_NOESCAPE | GLOB_TILDE;
#ifdef GLOB_ONLYDIR
if (onlyDirs)
flags |= GLOB_ONLYDIR;
#endif
if (glob((std::string(prefix) + "*").c_str(), flags, nullptr, &globbuf) == 0) {
for (size_t i = 0; i < globbuf.gl_pathc; ++i) {
if (onlyDirs) {
auto st = lstat(globbuf.gl_pathv[i]);
if (!S_ISDIR(st.st_mode)) continue;
}
completions->insert(globbuf.gl_pathv[i]);
}
globfree(&globbuf);
}
}
void completePath(size_t, std::string_view prefix)
{
completePath(prefix, false);
}
void completeDir(size_t, std::string_view prefix)
{
completePath(prefix, true);
}
Strings argvToStrings(int argc, char * * argv)
{
Strings args;
@ -225,18 +316,26 @@ void Command::printHelp(const string & programName, std::ostream & out)
MultiCommand::MultiCommand(const Commands & commands)
: commands(commands)
{
expectedArgs.push_back(ExpectedArg{"command", 1, true, [=](std::vector<std::string> ss) {
assert(!command);
auto cmd = ss[0];
if (auto alias = get(deprecatedAliases, cmd)) {
warn("'%s' is a deprecated alias for '%s'", cmd, *alias);
cmd = *alias;
}
auto i = commands.find(cmd);
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", cmd);
command = {cmd, i->second()};
}});
expectArgs({
.label = "command",
.optional = true,
.handler = {[=](std::string s) {
assert(!command);
if (auto alias = get(deprecatedAliases, s)) {
warn("'%s' is a deprecated alias for '%s'", s, *alias);
s = *alias;
}
if (auto prefix = needsCompletion(s)) {
for (auto & [name, command] : commands)
if (hasPrefix(name, *prefix))
completions->insert(name);
}
auto i = commands.find(s);
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", s);
command = {s, i->second()};
}}
});
categories[Command::catDefault] = "Available commands";
}

View File

@ -8,8 +8,6 @@
namespace nix {
MakeError(UsageError, Error);
enum HashType : char;
class Args
@ -28,61 +26,67 @@ protected:
static const size_t ArityAny = std::numeric_limits<size_t>::max();
struct Handler
{
std::function<void(std::vector<std::string>)> fun;
size_t arity;
Handler() {}
Handler(std::function<void(std::vector<std::string>)> && fun)
: fun(std::move(fun))
, arity(ArityAny)
{ }
Handler(std::function<void()> && handler)
: fun([handler{std::move(handler)}](std::vector<std::string>) { handler(); })
, arity(0)
{ }
Handler(std::function<void(std::string)> && handler)
: fun([handler{std::move(handler)}](std::vector<std::string> ss) {
handler(std::move(ss[0]));
})
, arity(1)
{ }
Handler(std::function<void(std::string, std::string)> && handler)
: fun([handler{std::move(handler)}](std::vector<std::string> ss) {
handler(std::move(ss[0]), std::move(ss[1]));
})
, arity(2)
{ }
Handler(std::vector<std::string> * dest)
: fun([=](std::vector<std::string> ss) { *dest = ss; })
, arity(ArityAny)
{ }
template<class T>
Handler(T * dest)
: fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
, arity(1)
{ }
template<class T>
Handler(T * dest, const T & val)
: fun([=](std::vector<std::string> ss) { *dest = val; })
, arity(0)
{ }
};
/* Flags. */
struct Flag
{
typedef std::shared_ptr<Flag> ptr;
struct Handler
{
std::function<void(std::vector<std::string>)> fun;
size_t arity;
Handler() {}
Handler(std::function<void(std::vector<std::string>)> && fun)
: fun(std::move(fun))
, arity(ArityAny)
{ }
Handler(std::function<void()> && handler)
: fun([handler{std::move(handler)}](std::vector<std::string>) { handler(); })
, arity(0)
{ }
Handler(std::function<void(std::string)> && handler)
: fun([handler{std::move(handler)}](std::vector<std::string> ss) {
handler(std::move(ss[0]));
})
, arity(1)
{ }
Handler(std::function<void(std::string, std::string)> && handler)
: fun([handler{std::move(handler)}](std::vector<std::string> ss) {
handler(std::move(ss[0]), std::move(ss[1]));
})
, arity(2)
{ }
template<class T>
Handler(T * dest)
: fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
, arity(1)
{ }
template<class T>
Handler(T * dest, const T & val)
: fun([=](std::vector<std::string> ss) { *dest = val; })
, arity(0)
{ }
};
std::string longName;
char shortName = 0;
std::string description;
std::string category;
Strings labels;
Handler handler;
std::function<void(size_t, std::string_view)> completer;
static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
@ -99,9 +103,9 @@ protected:
struct ExpectedArg
{
std::string label;
size_t arity; // 0 = any
bool optional;
std::function<void(std::vector<std::string>)> handler;
bool optional = false;
Handler handler;
std::function<void(size_t, std::string_view)> completer;
};
std::list<ExpectedArg> expectedArgs;
@ -175,20 +179,28 @@ public:
});
}
void expectArgs(ExpectedArg && arg)
{
expectedArgs.emplace_back(std::move(arg));
}
/* Expect a string argument. */
void expectArg(const std::string & label, string * dest, bool optional = false)
{
expectedArgs.push_back(ExpectedArg{label, 1, optional, [=](std::vector<std::string> ss) {
*dest = ss[0];
}});
expectArgs({
.label = label,
.optional = true,
.handler = {dest}
});
}
/* Expect 0 or more arguments. */
void expectArgs(const std::string & label, std::vector<std::string> * dest)
{
expectedArgs.push_back(ExpectedArg{label, 0, false, [=](std::vector<std::string> ss) {
*dest = std::move(ss);
}});
expectArgs({
.label = label,
.handler = {dest}
});
}
friend class MultiCommand;
@ -259,4 +271,13 @@ typedef std::vector<std::pair<std::string, std::string>> Table2;
void printTable(std::ostream & out, const Table2 & table);
extern std::shared_ptr<std::set<std::string>> completions;
extern bool pathCompletions;
std::optional<std::string> needsCompletion(std::string_view s);
void completePath(size_t, std::string_view prefix);
void completeDir(size_t, std::string_view prefix);
}

View File

@ -191,6 +191,7 @@ public:
}
MakeError(Error, BaseError);
MakeError(UsageError, Error);
class SysError : public Error
{

View File

@ -25,6 +25,9 @@ static size_t regularHashSize(HashType type) {
abort();
}
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
void Hash::init()
{
hashSize = regularHashSize(type);

View File

@ -18,6 +18,8 @@ const int sha1HashSize = 20;
const int sha256HashSize = 32;
const int sha512HashSize = 64;
extern std::set<std::string> hashTypes;
extern const string base32Chars;
enum Base : int { Base64, Base32, Base16, SRI };
@ -119,6 +121,7 @@ Hash compressHash(const Hash & hash, unsigned int newSize);
/* Parse a string representing a hash type. */
HashType parseHashType(std::string_view s);
/* Will return nothing on parse error */
std::optional<HashType> parseHashTypeOpt(std::string_view s);

View File

@ -23,6 +23,7 @@
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/wait.h>
#include <sys/time.h>
#include <sys/un.h>
#include <unistd.h>
@ -79,7 +80,7 @@ void replaceEnv(std::map<std::string, std::string> newEnv)
}
Path absPath(Path path, std::optional<Path> dir)
Path absPath(Path path, std::optional<Path> dir, bool resolveSymlinks)
{
if (path[0] != '/') {
if (!dir) {
@ -100,7 +101,7 @@ Path absPath(Path path, std::optional<Path> dir)
}
path = *dir + "/" + path;
}
return canonPath(path);
return canonPath(path, resolveSymlinks);
}
@ -345,7 +346,6 @@ void writeFile(const Path & path, Source & source, mode_t mode)
}
}
string readLine(int fd)
{
string s;
@ -581,20 +581,31 @@ Paths createDirs(const Path & path)
}
void createSymlink(const Path & target, const Path & link)
void createSymlink(const Path & target, const Path & link,
std::optional<time_t> mtime)
{
if (symlink(target.c_str(), link.c_str()))
throw SysError("creating symlink from '%1%' to '%2%'", link, target);
if (mtime) {
struct timeval times[2];
times[0].tv_sec = *mtime;
times[0].tv_usec = 0;
times[1].tv_sec = *mtime;
times[1].tv_usec = 0;
if (lutimes(link.c_str(), times))
throw SysError("setting time of symlink '%s'", link);
}
}
void replaceSymlink(const Path & target, const Path & link)
void replaceSymlink(const Path & target, const Path & link,
std::optional<time_t> mtime)
{
for (unsigned int n = 0; true; n++) {
Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
try {
createSymlink(target, tmp);
createSymlink(target, tmp, mtime);
} catch (SysError & e) {
if (e.errNo == EEXIST) continue;
throw;
@ -1006,12 +1017,14 @@ std::vector<char *> stringsToCharPtrs(const Strings & ss)
return res;
}
// Output = "standard out" output stream
string runProgram(Path program, bool searchPath, const Strings & args,
const std::optional<std::string> & input)
{
RunOptions opts(program, args);
opts.searchPath = searchPath;
// This allows you to refer to a program with a pathname relative to the
// PATH variable.
opts.input = input;
auto res = runProgram(opts);
@ -1022,6 +1035,7 @@ string runProgram(Path program, bool searchPath, const Strings & args,
return res.second;
}
// Output = error code + "standard out" output stream
std::pair<int, std::string> runProgram(const RunOptions & options_)
{
RunOptions options(options_);
@ -1094,6 +1108,8 @@ void runProgram2(const RunOptions & options)
if (options.searchPath)
execvp(options.program.c_str(), stringsToCharPtrs(args_).data());
// This allows you to refer to a program with a pathname relative
// to the PATH variable.
else
execv(options.program.c_str(), stringsToCharPtrs(args_).data());

View File

@ -49,7 +49,9 @@ void clearEnv();
/* Return an absolutized path, resolving paths relative to the
specified directory, or the current directory otherwise. The path
is also canonicalised. */
Path absPath(Path path, std::optional<Path> dir = {});
Path absPath(Path path,
std::optional<Path> dir = {},
bool resolveSymlinks = false);
/* Canonicalise a path by removing all `.' or `..' components and
double or trailing slashes. Optionally resolves all symlink
@ -147,10 +149,12 @@ Path getDataDir();
Paths createDirs(const Path & path);
/* Create a symlink. */
void createSymlink(const Path & target, const Path & link);
void createSymlink(const Path & target, const Path & link,
std::optional<time_t> mtime = {});
/* Atomically create or replace a symlink. */
void replaceSymlink(const Path & target, const Path & link);
void replaceSymlink(const Path & target, const Path & link,
std::optional<time_t> mtime = {});
/* Wrappers arount read()/write() that read/write exactly the

46
src/nix/app.cc Normal file
View File

@ -0,0 +1,46 @@
#include "installables.hh"
#include "store-api.hh"
#include "eval-inline.hh"
#include "eval-cache.hh"
#include "names.hh"
namespace nix {
App Installable::toApp(EvalState & state)
{
auto [cursor, attrPath] = getCursor(state, true);
auto type = cursor->getAttr("type")->getString();
if (type == "app") {
auto [program, context] = cursor->getAttr("program")->getStringWithContext();
if (!state.store->isInStore(program))
throw Error("app program '%s' is not in the Nix store", program);
std::vector<StorePathWithOutputs> context2;
for (auto & [path, name] : context)
context2.push_back({state.store->parseStorePath(path), {name}});
return App {
.context = std::move(context2),
.program = program,
};
}
else if (type == "derivation") {
auto drvPath = cursor->forceDerivation();
auto outPath = cursor->getAttr(state.sOutPath)->getString();
auto outputName = cursor->getAttr(state.sOutputName)->getString();
auto name = cursor->getAttr(state.sName)->getString();
return App {
.context = { { drvPath, {outputName} } },
.program = outPath + "/bin/" + DrvName(name).name,
};
}
else
throw Error("attribute '%s' has unsupported type '%s'", attrPath, type);
}
}

View File

@ -1,3 +1,4 @@
#include "eval.hh"
#include "command.hh"
#include "common-args.hh"
#include "shared.hh"
@ -17,6 +18,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
.description = "path of the symlink to the build result",
.labels = {"path"},
.handler = {&outLink},
.completer = completePath
});
addFlag({
@ -44,14 +46,14 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixProfile
},
Example{
"To make a profile point at GNU Hello:",
"nix build --profile /tmp/profile nixpkgs.hello"
"nix build --profile /tmp/profile nixpkgs#hello"
},
};
}
void run(ref<Store> store) override
{
auto buildables = build(store, dryRun ? DryRun : Build, installables);
auto buildables = build(store, dryRun ? Realise::Nothing : Realise::Outputs, installables);
if (dryRun) return;

View File

@ -25,7 +25,11 @@ struct CmdCatStore : StoreCommand, MixCat
{
CmdCatStore()
{
expectArg("path", &path);
expectArgs({
.label = "path",
.handler = {&path},
.completer = completePath
});
}
std::string description() override
@ -47,7 +51,11 @@ struct CmdCatNar : StoreCommand, MixCat
CmdCatNar()
{
expectArg("nar", &narPath);
expectArgs({
.label = "nar",
.handler = {&narPath},
.completer = completePath
});
expectArg("path", &path);
}

View File

@ -63,7 +63,7 @@ void StorePathsCommand::run(ref<Store> store)
}
else {
for (auto & p : toStorePaths(store, realiseMode, installables))
for (auto & p : toStorePaths(store, realiseMode, operateOn, installables))
storePaths.push_back(p);
if (recursive) {
@ -80,7 +80,7 @@ void StorePathsCommand::run(ref<Store> store)
void StorePathCommand::run(ref<Store> store)
{
auto storePaths = toStorePaths(store, NoBuild, installables);
auto storePaths = toStorePaths(store, Realise::Nothing, operateOn, installables);
if (storePaths.size() != 1)
throw UsageError("this command requires exactly one store path");
@ -108,6 +108,7 @@ MixProfile::MixProfile()
.description = "profile to update",
.labels = {"path"},
.handler = {&profile},
.completer = completePath
});
}

View File

@ -4,12 +4,18 @@
#include "args.hh"
#include "common-eval-args.hh"
#include "path.hh"
#include "eval.hh"
#include "flake/lockfile.hh"
#include <optional>
namespace nix {
extern std::string programPath;
class EvalState;
struct Pos;
class Store;
static constexpr Command::Category catSecondary = 100;
static constexpr Command::Category catUtility = 101;
static constexpr Command::Category catNixInstallation = 102;
@ -27,28 +33,64 @@ private:
std::shared_ptr<Store> _store;
};
struct SourceExprCommand : virtual StoreCommand, MixEvalArgs
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
Path file;
ref<EvalState> getEvalState();
std::shared_ptr<EvalState> evalState;
};
struct MixFlakeOptions : virtual Args, EvalCommand
{
flake::LockFlags lockFlags;
MixFlakeOptions();
virtual std::optional<FlakeRef> getFlakeRefForCompletion()
{ return {}; }
};
/* How to handle derivations in commands that operate on store paths. */
enum class OperateOn {
/* Operate on the output path. */
Output,
/* Operate on the .drv path. */
Derivation
};
struct SourceExprCommand : virtual Args, MixFlakeOptions
{
std::optional<Path> file;
std::optional<std::string> expr;
// FIXME: move this; not all commands (e.g. 'nix run') use it.
OperateOn operateOn = OperateOn::Output;
SourceExprCommand();
/* Return a value representing the Nix expression from which we
are installing. This is either the file specified by --file,
or an attribute set constructed from $NIX_PATH, e.g. { nixpkgs
= import ...; bla = import ...; }. */
Value * getSourceExpr(EvalState & state);
std::vector<std::shared_ptr<Installable>> parseInstallables(
ref<Store> store, std::vector<std::string> ss);
ref<EvalState> getEvalState();
std::shared_ptr<Installable> parseInstallable(
ref<Store> store, const std::string & installable);
private:
virtual Strings getDefaultFlakeAttrPaths();
std::shared_ptr<EvalState> evalState;
virtual Strings getDefaultFlakeAttrPathPrefixes();
RootValue vSourceExpr;
void completeInstallable(std::string_view prefix);
};
enum RealiseMode { Build, NoBuild, DryRun };
enum class Realise {
/* Build the derivation. Postcondition: the
derivation outputs exist. */
Outputs,
/* Don't build the derivation. Postcondition: the store derivation
exists. */
Derivation,
/* Evaluate in dry-run mode. Postcondition: nothing. */
Nothing
};
/* A command that operates on a list of "installables", which can be
store paths, attribute paths, Nix expressions, etc. */
@ -56,15 +98,14 @@ struct InstallablesCommand : virtual Args, SourceExprCommand
{
std::vector<std::shared_ptr<Installable>> installables;
InstallablesCommand()
{
expectArgs("installables", &_installables);
}
InstallablesCommand();
void prepare() override;
virtual bool useDefaultInstallables() { return true; }
std::optional<FlakeRef> getFlakeRefForCompletion() override;
private:
std::vector<std::string> _installables;
@ -75,16 +116,18 @@ struct InstallableCommand : virtual Args, SourceExprCommand
{
std::shared_ptr<Installable> installable;
InstallableCommand()
{
expectArg("installable", &_installable);
}
InstallableCommand();
void prepare() override;
std::optional<FlakeRef> getFlakeRefForCompletion() override
{
return parseFlakeRef(_installable, absPath("."));
}
private:
std::string _installable;
std::string _installable{"."};
};
/* A command that operates on zero or more store paths. */
@ -97,7 +140,7 @@ private:
protected:
RealiseMode realiseMode = NoBuild;
Realise realiseMode = Realise::Derivation;
public:
@ -141,17 +184,15 @@ static RegisterCommand registerCommand(const std::string & name)
return RegisterCommand(name, [](){ return make_ref<T>(); });
}
std::shared_ptr<Installable> parseInstallable(
SourceExprCommand & cmd, ref<Store> store, const std::string & installable,
bool useDefaultInstallables);
Buildables build(ref<Store> store, RealiseMode mode,
Buildables build(ref<Store> store, Realise mode,
std::vector<std::shared_ptr<Installable>> installables);
std::set<StorePath> toStorePaths(ref<Store> store, RealiseMode mode,
std::set<StorePath> toStorePaths(ref<Store> store,
Realise mode, OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables);
StorePath toStorePath(ref<Store> store, RealiseMode mode,
StorePath toStorePath(ref<Store> store,
Realise mode, OperateOn operateOn,
std::shared_ptr<Installable> installable);
std::set<StorePath> toDerivations(ref<Store> store,
@ -194,4 +235,13 @@ struct MixEnvironment : virtual Args {
void setEnviron();
};
void completeFlakeRef(ref<Store> store, std::string_view prefix);
void completeFlakeRefWithFragment(
ref<EvalState> evalState,
flake::LockFlags lockFlags,
Strings attrPathPrefixes,
const Strings & defaultFlakeAttrPaths,
std::string_view prefix);
}

View File

@ -45,6 +45,8 @@ struct CmdCopy : StorePathsCommand
.description = "whether to try substitutes on the destination store (only supported by SSH)",
.handler = {&substitute, Substitute},
});
realiseMode = Realise::Outputs;
}
std::string description() override
@ -70,11 +72,11 @@ struct CmdCopy : StorePathsCommand
#ifdef ENABLE_S3
Example{
"To copy Hello to an S3 binary cache:",
"nix copy --to s3://my-bucket?region=eu-west-1 nixpkgs.hello"
"nix copy --to s3://my-bucket?region=eu-west-1 nixpkgs#hello"
},
Example{
"To copy Hello to an S3-compatible binary cache:",
"nix copy --to s3://my-bucket?region=eu-west-1&endpoint=example.com nixpkgs.hello"
"nix copy --to s3://my-bucket?region=eu-west-1&endpoint=example.com nixpkgs#hello"
},
#endif
};
@ -87,11 +89,16 @@ struct CmdCopy : StorePathsCommand
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
}
void run(ref<Store> srcStore, StorePaths storePaths) override
void run(ref<Store> store) override
{
if (srcUri.empty() && dstUri.empty())
throw UsageError("you must pass '--from' and/or '--to'");
StorePathsCommand::run(store);
}
void run(ref<Store> srcStore, StorePaths storePaths) override
{
ref<Store> dstStore = dstUri.empty() ? openStore() : openStore(dstUri);
copyPaths(srcStore, dstStore, StorePathSet(storePaths.begin(), storePaths.end()),

View File

@ -130,7 +130,9 @@ StorePath getDerivationEnvironment(ref<Store> store, const StorePath & drvPath)
drvName += "-env";
for (auto & output : drv.outputs)
drv.env.erase(output.first);
drv.outputs = {{"out", DerivationOutput { .path = StorePath::dummy }}};
drv.env["out"] = "";
drv.env["_outputs_saved"] = drv.env["outputs"];
drv.env["outputs"] = "out";
drv.inputSrcs.insert(std::move(getEnvShPath));
Hash h = hashDerivationModulo(*store, drv, true);
@ -201,6 +203,11 @@ struct Common : InstallableCommand, MixProfile
out << "eval \"$shellHook\"\n";
}
Strings getDefaultFlakeAttrPaths() override
{
return {"devShell." + settings.thisSystem.get(), "defaultPackage." + settings.thisSystem.get()};
}
StorePath getShellOutPath(ref<Store> store)
{
auto path = installable->getStorePath();
@ -259,11 +266,15 @@ struct CmdDevelop : Common, MixEnvironment
return {
Example{
"To get the build environment of GNU hello:",
"nix develop nixpkgs.hello"
"nix develop nixpkgs#hello"
},
Example{
"To get the build environment of the default package of flake in the current directory:",
"nix develop"
},
Example{
"To store the build environment in a profile:",
"nix develop --profile /tmp/my-shell nixpkgs.hello"
"nix develop --profile /tmp/my-shell nixpkgs#hello"
},
Example{
"To use a build environment previously recorded in a profile:",
@ -294,12 +305,28 @@ struct CmdDevelop : Common, MixEnvironment
stopProgressBar();
auto shell = getEnv("SHELL").value_or("bash");
setEnviron();
// prevent garbage collection until shell exits
setenv("NIX_GCROOT", gcroot.data(), 1);
Path shell = "bash";
try {
auto state = getEvalState();
auto bashInstallable = std::make_shared<InstallableFlake>(
state,
std::move(installable->nixpkgsFlakeRef()),
Strings{"bashInteractive"},
Strings{"legacyPackages." + settings.thisSystem.get() + "."},
lockFlags);
shell = state->store->printStorePath(
toStorePath(state->store, Realise::Outputs, OperateOn::Output, bashInstallable)) + "/bin/bash";
} catch (Error &) {
ignoreException();
}
auto args = Strings{std::string(baseNameOf(shell)), "--rcfile", rcFilePath};
restoreAffinity();
@ -323,7 +350,7 @@ struct CmdPrintDevEnv : Common
return {
Example{
"To apply the build environment of GNU hello to the current shell:",
". <(nix print-dev-env nixpkgs.hello)"
". <(nix print-dev-env nixpkgs#hello)"
},
};
}

134
src/nix/diff-closures.cc Normal file
View File

@ -0,0 +1,134 @@
#include "command.hh"
#include "shared.hh"
#include "store-api.hh"
#include "common-args.hh"
#include "names.hh"
#include <regex>
using namespace nix;
struct Info
{
std::string outputName;
};
// name -> version -> store paths
typedef std::map<std::string, std::map<std::string, std::map<StorePath, Info>>> GroupedPaths;
GroupedPaths getClosureInfo(ref<Store> store, const StorePath & toplevel)
{
StorePathSet closure;
store->computeFSClosure({toplevel}, closure);
GroupedPaths groupedPaths;
for (auto & path : closure) {
/* Strip the output name. Unfortunately this is ambiguous (we
can't distinguish between output names like "bin" and
version suffixes like "unstable"). */
static std::regex regex("(.*)-([a-z]+|lib32|lib64)");
std::smatch match;
std::string name(path.name());
std::string outputName;
if (std::regex_match(name, match, regex)) {
name = match[1];
outputName = match[2];
}
DrvName drvName(name);
groupedPaths[drvName.name][drvName.version].emplace(path, Info { .outputName = outputName });
}
return groupedPaths;
}
std::string showVersions(const std::set<std::string> & versions)
{
if (versions.empty()) return "";
std::set<std::string> versions2;
for (auto & version : versions)
versions2.insert(version.empty() ? "ε" : version);
return concatStringsSep(", ", versions2);
}
struct CmdDiffClosures : SourceExprCommand
{
std::string _before, _after;
CmdDiffClosures()
{
expectArg("before", &_before);
expectArg("after", &_after);
}
std::string description() override
{
return "show what packages and versions were added and removed between two closures";
}
Category category() override { return catSecondary; }
Examples examples() override
{
return {
{
"To show what got added and removed between two versions of the NixOS system profile:",
"nix diff-closures /nix/var/nix/profiles/system-655-link /nix/var/nix/profiles/system-658-link",
},
};
}
void run(ref<Store> store) override
{
auto before = parseInstallable(store, _before);
auto beforePath = toStorePath(store, Realise::Outputs, operateOn, before);
auto after = parseInstallable(store, _after);
auto afterPath = toStorePath(store, Realise::Outputs, operateOn, after);
auto beforeClosure = getClosureInfo(store, beforePath);
auto afterClosure = getClosureInfo(store, afterPath);
std::set<std::string> allNames;
for (auto & [name, _] : beforeClosure) allNames.insert(name);
for (auto & [name, _] : afterClosure) allNames.insert(name);
for (auto & name : allNames) {
auto & beforeVersions = beforeClosure[name];
auto & afterVersions = afterClosure[name];
auto totalSize = [&](const std::map<std::string, std::map<StorePath, Info>> & versions)
{
uint64_t sum = 0;
for (auto & [_, paths] : versions)
for (auto & [path, _] : paths)
sum += store->queryPathInfo(path)->narSize;
return sum;
};
auto beforeSize = totalSize(beforeVersions);
auto afterSize = totalSize(afterVersions);
auto sizeDelta = (int64_t) afterSize - (int64_t) beforeSize;
auto showDelta = abs(sizeDelta) >= 8 * 1024;
std::set<std::string> removed, unchanged;
for (auto & [version, _] : beforeVersions)
if (!afterVersions.count(version)) removed.insert(version); else unchanged.insert(version);
std::set<std::string> added;
for (auto & [version, _] : afterVersions)
if (!beforeVersions.count(version)) added.insert(version);
if (showDelta || !removed.empty() || !added.empty()) {
std::vector<std::string> items;
if (!removed.empty() || !added.empty())
items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added)));
if (showDelta)
items.push_back(fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0));
std::cout << fmt("%s: %s\n", name, concatStringsSep(", ", items));
}
}
}
};
static auto r1 = registerCommand<CmdDiffClosures>("diff-closures");

View File

@ -20,7 +20,7 @@ struct CmdEdit : InstallableCommand
return {
Example{
"To open the Nix expression of the GNU Hello package:",
"nix edit nixpkgs.hello"
"nix edit nixpkgs#hello"
},
};
}

View File

@ -12,10 +12,18 @@ using namespace nix;
struct CmdEval : MixJSON, InstallableCommand
{
bool raw = false;
std::optional<std::string> apply;
CmdEval()
{
mkFlag(0, "raw", "print strings unquoted", &raw);
addFlag({
.longName = "apply",
.description = "apply a function to each argument",
.labels = {"expr"},
.handler = {&apply},
});
}
std::string description() override
@ -26,21 +34,25 @@ struct CmdEval : MixJSON, InstallableCommand
Examples examples() override
{
return {
Example{
{
"To evaluate a Nix expression given on the command line:",
"nix eval '(1 + 2)'"
"nix eval --expr '1 + 2'"
},
Example{
{
"To evaluate a Nix expression from a file or URI:",
"nix eval -f channel:nixos-17.09 hello.name"
"nix eval -f ./my-nixpkgs hello.name"
},
Example{
{
"To get the current version of Nixpkgs:",
"nix eval --raw nixpkgs.lib.version"
"nix eval --raw nixpkgs#lib.version"
},
Example{
{
"To print the store path of the Hello package:",
"nix eval --raw nixpkgs.hello"
"nix eval --raw nixpkgs#hello"
},
{
"To get a list of checks in the 'nix' flake:",
"nix eval nix#checks.x86_64-linux --apply builtins.attrNames"
},
};
}
@ -57,6 +69,14 @@ struct CmdEval : MixJSON, InstallableCommand
auto v = installable->toValue(*state).first;
PathSet context;
if (apply) {
auto vApply = state->allocValue();
state->eval(state->parseExprFromString(*apply, absPath(".")), *vApply);
auto vRes = state->allocValue();
state->callFunction(*vApply, *v, *vRes, noPos);
v = vRes;
}
if (raw) {
stopProgressBar();
std::cout << state->coerceToString(noPos, *v, context);

955
src/nix/flake.cc Normal file
View File

@ -0,0 +1,955 @@
#include "command.hh"
#include "common-args.hh"
#include "shared.hh"
#include "eval.hh"
#include "eval-inline.hh"
#include "flake/flake.hh"
#include "get-drvs.hh"
#include "store-api.hh"
#include "derivations.hh"
#include "attr-path.hh"
#include "fetchers.hh"
#include "registry.hh"
#include "json.hh"
#include "eval-cache.hh"
#include <nlohmann/json.hpp>
#include <queue>
#include <iomanip>
using namespace nix;
using namespace nix::flake;
class FlakeCommand : virtual Args, public MixFlakeOptions
{
std::string flakeUrl = ".";
public:
FlakeCommand()
{
expectArgs({
.label = "flake-url",
.optional = true,
.handler = {&flakeUrl},
.completer = {[&](size_t, std::string_view prefix) {
completeFlakeRef(getStore(), prefix);
}}
});
}
FlakeRef getFlakeRef()
{
return parseFlakeRef(flakeUrl, absPath(".")); //FIXME
}
Flake getFlake()
{
auto evalState = getEvalState();
return flake::getFlake(*evalState, getFlakeRef(), lockFlags.useRegistries);
}
LockedFlake lockFlake()
{
return flake::lockFlake(*getEvalState(), getFlakeRef(), lockFlags);
}
std::optional<FlakeRef> getFlakeRefForCompletion() override
{
return getFlakeRef();
}
};
static void printFlakeInfo(const Store & store, const Flake & flake)
{
logger->stdout("Resolved URL: %s", flake.resolvedRef.to_string());
logger->stdout("Locked URL: %s", flake.lockedRef.to_string());
if (flake.description)
logger->stdout("Description: %s", *flake.description);
logger->stdout("Path: %s", store.printStorePath(flake.sourceInfo->storePath));
if (auto rev = flake.lockedRef.input.getRev())
logger->stdout("Revision: %s", rev->to_string(Base16, false));
if (auto revCount = flake.lockedRef.input.getRevCount())
logger->stdout("Revisions: %s", *revCount);
if (auto lastModified = flake.lockedRef.input.getLastModified())
logger->stdout("Last modified: %s",
std::put_time(std::localtime(&*lastModified), "%F %T"));
}
static nlohmann::json flakeToJson(const Store & store, const Flake & flake)
{
nlohmann::json j;
if (flake.description)
j["description"] = *flake.description;
j["originalUrl"] = flake.originalRef.to_string();
j["original"] = attrsToJson(flake.originalRef.toAttrs());
j["resolvedUrl"] = flake.resolvedRef.to_string();
j["resolved"] = attrsToJson(flake.resolvedRef.toAttrs());
j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
j["locked"] = attrsToJson(flake.lockedRef.toAttrs());
if (auto rev = flake.lockedRef.input.getRev())
j["revision"] = rev->to_string(Base16, false);
if (auto revCount = flake.lockedRef.input.getRevCount())
j["revCount"] = *revCount;
if (auto lastModified = flake.lockedRef.input.getLastModified())
j["lastModified"] = *lastModified;
j["path"] = store.printStorePath(flake.sourceInfo->storePath);
return j;
}
struct CmdFlakeUpdate : FlakeCommand
{
std::string description() override
{
return "update flake lock file";
}
void run(nix::ref<nix::Store> store) override
{
/* Use --refresh by default for 'nix flake update'. */
settings.tarballTtl = 0;
lockFlake();
}
};
static void enumerateOutputs(EvalState & state, Value & vFlake,
std::function<void(const std::string & name, Value & vProvide, const Pos & pos)> callback)
{
state.forceAttrs(vFlake);
auto aOutputs = vFlake.attrs->get(state.symbols.create("outputs"));
assert(aOutputs);
state.forceAttrs(*aOutputs->value);
for (auto & attr : *aOutputs->value->attrs)
callback(attr.name, *attr.value, *attr.pos);
}
struct CmdFlakeInfo : FlakeCommand, MixJSON
{
std::string description() override
{
return "list info about a given flake";
}
void run(nix::ref<nix::Store> store) override
{
auto flake = getFlake();
if (json) {
auto json = flakeToJson(*store, flake);
logger->stdout("%s", json.dump());
} else
printFlakeInfo(*store, flake);
}
};
struct CmdFlakeListInputs : FlakeCommand, MixJSON
{
std::string description() override
{
return "list flake inputs";
}
void run(nix::ref<nix::Store> store) override
{
auto flake = lockFlake();
if (json)
logger->stdout("%s", flake.lockFile.toJson());
else {
logger->stdout("%s", flake.flake.lockedRef);
std::unordered_set<std::shared_ptr<Node>> visited;
std::function<void(const Node & node, const std::string & prefix)> recurse;
recurse = [&](const Node & node, const std::string & prefix)
{
for (const auto & [i, input] : enumerate(node.inputs)) {
bool last = i + 1 == node.inputs.size();
if (auto lockedNode = std::get_if<0>(&input.second)) {
logger->stdout("%s" ANSI_BOLD "%s" ANSI_NORMAL ": %s",
prefix + (last ? treeLast : treeConn), input.first,
*lockedNode ? (*lockedNode)->lockedRef : flake.flake.lockedRef);
bool firstVisit = visited.insert(*lockedNode).second;
if (firstVisit) recurse(**lockedNode, prefix + (last ? treeNull : treeLine));
} else if (auto follows = std::get_if<1>(&input.second)) {
logger->stdout("%s" ANSI_BOLD "%s" ANSI_NORMAL " follows input '%s'",
prefix + (last ? treeLast : treeConn), input.first,
printInputPath(*follows));
}
}
};
visited.insert(flake.lockFile.root);
recurse(*flake.lockFile.root, "");
}
}
};
struct CmdFlakeCheck : FlakeCommand
{
bool build = true;
CmdFlakeCheck()
{
addFlag({
.longName = "no-build",
.description = "do not build checks",
.handler = {&build, false}
});
}
std::string description() override
{
return "check whether the flake evaluates and run its tests";
}
void run(nix::ref<nix::Store> store) override
{
settings.readOnlyMode = !build;
auto state = getEvalState();
auto flake = lockFlake();
// FIXME: rewrite to use EvalCache.
auto checkSystemName = [&](const std::string & system, const Pos & pos) {
// FIXME: what's the format of "system"?
if (system.find('-') == std::string::npos)
throw Error("'%s' is not a valid system type, at %s", system, pos);
};
auto checkDerivation = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
auto drvInfo = getDerivation(*state, v, false);
if (!drvInfo)
throw Error("flake attribute '%s' is not a derivation", attrPath);
// FIXME: check meta attributes
return store->parseStorePath(drvInfo->queryDrvPath());
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the derivation '%s'", attrPath));
throw;
}
};
std::vector<StorePathWithOutputs> drvPaths;
auto checkApp = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
#if 0
// FIXME
auto app = App(*state, v);
for (auto & i : app.context) {
auto [drvPathS, outputName] = decodeContext(i);
store->parseStorePath(drvPathS);
}
#endif
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the app definition '%s'", attrPath));
throw;
}
};
auto checkOverlay = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
state->forceValue(v, pos);
if (v.type != tLambda || v.lambda.fun->matchAttrs || std::string(v.lambda.fun->arg) != "final")
throw Error("overlay does not take an argument named 'final'");
auto body = dynamic_cast<ExprLambda *>(v.lambda.fun->body);
if (!body || body->matchAttrs || std::string(body->arg) != "prev")
throw Error("overlay does not take an argument named 'prev'");
// FIXME: if we have a 'nixpkgs' input, use it to
// evaluate the overlay.
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the overlay '%s'", attrPath));
throw;
}
};
auto checkModule = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
state->forceValue(v, pos);
if (v.type == tLambda) {
if (!v.lambda.fun->matchAttrs || !v.lambda.fun->formals->ellipsis)
throw Error("module must match an open attribute set ('{ config, ... }')");
} else if (v.type == tAttrs) {
for (auto & attr : *v.attrs)
try {
state->forceValue(*attr.value, *attr.pos);
} catch (Error & e) {
e.addTrace(*attr.pos, hintfmt("while evaluating the option '%s'", attr.name));
throw;
}
} else
throw Error("module must be a function or an attribute set");
// FIXME: if we have a 'nixpkgs' input, use it to
// check the module.
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the NixOS module '%s'", attrPath));
throw;
}
};
std::function<void(const std::string & attrPath, Value & v, const Pos & pos)> checkHydraJobs;
checkHydraJobs = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
state->forceAttrs(v, pos);
if (state->isDerivation(v))
throw Error("jobset should not be a derivation at top-level");
for (auto & attr : *v.attrs) {
state->forceAttrs(*attr.value, *attr.pos);
if (!state->isDerivation(*attr.value))
checkHydraJobs(attrPath + "." + (std::string) attr.name,
*attr.value, *attr.pos);
}
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the Hydra jobset '%s'", attrPath));
throw;
}
};
auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
Activity act(*logger, lvlChatty, actUnknown,
fmt("checking NixOS configuration '%s'", attrPath));
Bindings & bindings(*state->allocBindings(0));
auto vToplevel = findAlongAttrPath(*state, "config.system.build.toplevel", bindings, v).first;
state->forceAttrs(*vToplevel, pos);
if (!state->isDerivation(*vToplevel))
throw Error("attribute 'config.system.build.toplevel' is not a derivation");
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the NixOS configuration '%s'", attrPath));
throw;
}
};
auto checkTemplate = [&](const std::string & attrPath, Value & v, const Pos & pos) {
try {
Activity act(*logger, lvlChatty, actUnknown,
fmt("checking template '%s'", attrPath));
state->forceAttrs(v, pos);
if (auto attr = v.attrs->get(state->symbols.create("path"))) {
if (attr->name == state->symbols.create("path")) {
PathSet context;
auto path = state->coerceToPath(*attr->pos, *attr->value, context);
if (!store->isInStore(path))
throw Error("template '%s' has a bad 'path' attribute");
// TODO: recursively check the flake in 'path'.
}
} else
throw Error("template '%s' lacks attribute 'path'", attrPath);
if (auto attr = v.attrs->get(state->symbols.create("description")))
state->forceStringNoCtx(*attr->value, *attr->pos);
else
throw Error("template '%s' lacks attribute 'description'", attrPath);
for (auto & attr : *v.attrs) {
std::string name(attr.name);
if (name != "path" && name != "description")
throw Error("template '%s' has unsupported attribute '%s'", attrPath, name);
}
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking the template '%s'", attrPath));
throw;
}
};
{
Activity act(*logger, lvlInfo, actUnknown, "evaluating flake");
auto vFlake = state->allocValue();
flake::callFlake(*state, flake, *vFlake);
enumerateOutputs(*state,
*vFlake,
[&](const std::string & name, Value & vOutput, const Pos & pos) {
Activity act(*logger, lvlChatty, actUnknown,
fmt("checking flake output '%s'", name));
try {
state->forceValue(vOutput, pos);
if (name == "checks") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs) {
checkSystemName(attr.name, *attr.pos);
state->forceAttrs(*attr.value, *attr.pos);
for (auto & attr2 : *attr.value->attrs) {
auto drvPath = checkDerivation(
fmt("%s.%s.%s", name, attr.name, attr2.name),
*attr2.value, *attr2.pos);
if ((std::string) attr.name == settings.thisSystem.get())
drvPaths.push_back({drvPath});
}
}
}
else if (name == "packages") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs) {
checkSystemName(attr.name, *attr.pos);
state->forceAttrs(*attr.value, *attr.pos);
for (auto & attr2 : *attr.value->attrs)
checkDerivation(
fmt("%s.%s.%s", name, attr.name, attr2.name),
*attr2.value, *attr2.pos);
}
}
else if (name == "apps") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs) {
checkSystemName(attr.name, *attr.pos);
state->forceAttrs(*attr.value, *attr.pos);
for (auto & attr2 : *attr.value->attrs)
checkApp(
fmt("%s.%s.%s", name, attr.name, attr2.name),
*attr2.value, *attr2.pos);
}
}
else if (name == "defaultPackage" || name == "devShell") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs) {
checkSystemName(attr.name, *attr.pos);
checkDerivation(
fmt("%s.%s", name, attr.name),
*attr.value, *attr.pos);
}
}
else if (name == "defaultApp") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs) {
checkSystemName(attr.name, *attr.pos);
checkApp(
fmt("%s.%s", name, attr.name),
*attr.value, *attr.pos);
}
}
else if (name == "legacyPackages") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs) {
checkSystemName(attr.name, *attr.pos);
// FIXME: do getDerivations?
}
}
else if (name == "overlay")
checkOverlay(name, vOutput, pos);
else if (name == "overlays") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs)
checkOverlay(fmt("%s.%s", name, attr.name),
*attr.value, *attr.pos);
}
else if (name == "nixosModule")
checkModule(name, vOutput, pos);
else if (name == "nixosModules") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs)
checkModule(fmt("%s.%s", name, attr.name),
*attr.value, *attr.pos);
}
else if (name == "nixosConfigurations") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs)
checkNixOSConfiguration(fmt("%s.%s", name, attr.name),
*attr.value, *attr.pos);
}
else if (name == "hydraJobs")
checkHydraJobs(name, vOutput, pos);
else if (name == "defaultTemplate")
checkTemplate(name, vOutput, pos);
else if (name == "templates") {
state->forceAttrs(vOutput, pos);
for (auto & attr : *vOutput.attrs)
checkTemplate(fmt("%s.%s", name, attr.name),
*attr.value, *attr.pos);
}
else
warn("unknown flake output '%s'", name);
} catch (Error & e) {
e.addTrace(pos, hintfmt("while checking flake output '%s'", name));
throw;
}
});
}
if (build && !drvPaths.empty()) {
Activity act(*logger, lvlInfo, actUnknown, "running flake checks");
store->buildPaths(drvPaths);
}
}
};
struct CmdFlakeInitCommon : virtual Args, EvalCommand
{
std::string templateUrl = "templates";
Path destDir;
const Strings attrsPathPrefixes{"templates."};
const LockFlags lockFlags{ .writeLockFile = false };
CmdFlakeInitCommon()
{
addFlag({
.longName = "template",
.shortName = 't',
.description = "the template to use",
.labels = {"template"},
.handler = {&templateUrl},
.completer = {[&](size_t, std::string_view prefix) {
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
attrsPathPrefixes,
{"defaultTemplate"},
prefix);
}}
});
}
void run(nix::ref<nix::Store> store) override
{
auto flakeDir = absPath(destDir);
auto evalState = getEvalState();
auto [templateFlakeRef, templateName] = parseFlakeRefWithFragment(templateUrl, absPath("."));
auto installable = InstallableFlake(
evalState, std::move(templateFlakeRef),
Strings{templateName == "" ? "defaultTemplate" : templateName},
Strings(attrsPathPrefixes), lockFlags);
auto [cursor, attrPath] = installable.getCursor(*evalState, true);
auto templateDir = cursor->getAttr("path")->getString();
assert(store->isInStore(templateDir));
std::vector<Path> files;
std::function<void(const Path & from, const Path & to)> copyDir;
copyDir = [&](const Path & from, const Path & to)
{
createDirs(to);
for (auto & entry : readDirectory(from)) {
auto from2 = from + "/" + entry.name;
auto to2 = to + "/" + entry.name;
auto st = lstat(from2);
if (S_ISDIR(st.st_mode))
copyDir(from2, to2);
else if (S_ISREG(st.st_mode)) {
auto contents = readFile(from2);
if (pathExists(to2)) {
auto contents2 = readFile(to2);
if (contents != contents2)
throw Error("refusing to overwrite existing file '%s'", to2);
} else
writeFile(to2, contents);
}
else if (S_ISLNK(st.st_mode)) {
auto target = readLink(from2);
if (pathExists(to2)) {
if (readLink(to2) != target)
throw Error("refusing to overwrite existing symlink '%s'", to2);
} else
createSymlink(target, to2);
}
else
throw Error("file '%s' has unsupported type", from2);
files.push_back(to2);
}
};
copyDir(templateDir, flakeDir);
if (pathExists(flakeDir + "/.git")) {
Strings args = { "-C", flakeDir, "add", "--intent-to-add", "--force", "--" };
for (auto & s : files) args.push_back(s);
runProgram("git", true, args);
}
}
};
struct CmdFlakeInit : CmdFlakeInitCommon
{
std::string description() override
{
return "create a flake in the current directory from a template";
}
Examples examples() override
{
return {
Example{
"To create a flake using the default template:",
"nix flake init"
},
Example{
"To see available templates:",
"nix flake show templates"
},
Example{
"To create a flake from a specific template:",
"nix flake init -t templates#nixos-container"
},
};
}
CmdFlakeInit()
{
destDir = ".";
}
};
struct CmdFlakeNew : CmdFlakeInitCommon
{
std::string description() override
{
return "create a flake in the specified directory from a template";
}
CmdFlakeNew()
{
expectArgs({
.label = "dest-dir",
.handler = {&destDir},
.completer = completePath
});
}
};
struct CmdFlakeClone : FlakeCommand
{
Path destDir;
std::string description() override
{
return "clone flake repository";
}
CmdFlakeClone()
{
addFlag({
.longName = "dest",
.shortName = 'f',
.description = "destination path",
.labels = {"path"},
.handler = {&destDir}
});
}
void run(nix::ref<nix::Store> store) override
{
if (destDir.empty())
throw Error("missing flag '--dest'");
getFlakeRef().resolve(store).input.clone(destDir);
}
};
struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
{
std::string dstUri;
CmdFlakeArchive()
{
addFlag({
.longName = "to",
.description = "URI of the destination Nix store",
.labels = {"store-uri"},
.handler = {&dstUri}
});
}
std::string description() override
{
return "copy a flake and all its inputs to a store";
}
Examples examples() override
{
return {
Example{
"To copy the dwarffs flake and its dependencies to a binary cache:",
"nix flake archive --to file:///tmp/my-cache dwarffs"
},
Example{
"To fetch the dwarffs flake and its dependencies to the local Nix store:",
"nix flake archive dwarffs"
},
Example{
"To print the store paths of the flake sources of NixOps without fetching them:",
"nix flake archive --json --dry-run nixops"
},
};
}
void run(nix::ref<nix::Store> store) override
{
auto flake = lockFlake();
auto jsonRoot = json ? std::optional<JSONObject>(std::cout) : std::nullopt;
StorePathSet sources;
sources.insert(flake.flake.sourceInfo->storePath);
if (jsonRoot)
jsonRoot->attr("path", store->printStorePath(flake.flake.sourceInfo->storePath));
// FIXME: use graph output, handle cycles.
std::function<void(const Node & node, std::optional<JSONObject> & jsonObj)> traverse;
traverse = [&](const Node & node, std::optional<JSONObject> & jsonObj)
{
auto jsonObj2 = jsonObj ? jsonObj->object("inputs") : std::optional<JSONObject>();
for (auto & [inputName, input] : node.inputs) {
if (auto inputNode = std::get_if<0>(&input)) {
auto jsonObj3 = jsonObj2 ? jsonObj2->object(inputName) : std::optional<JSONObject>();
auto storePath =
dryRun
? (*inputNode)->lockedRef.input.computeStorePath(*store)
: (*inputNode)->lockedRef.input.fetch(store).first.storePath;
if (jsonObj3)
jsonObj3->attr("path", store->printStorePath(storePath));
sources.insert(std::move(storePath));
traverse(**inputNode, jsonObj3);
}
}
};
traverse(*flake.lockFile.root, jsonRoot);
if (!dryRun && !dstUri.empty()) {
ref<Store> dstStore = dstUri.empty() ? openStore() : openStore(dstUri);
copyPaths(store, dstStore, sources);
}
}
};
struct CmdFlakeShow : FlakeCommand
{
bool showLegacy = false;
bool useEvalCache = true;
CmdFlakeShow()
{
addFlag({
.longName = "legacy",
.description = "show the contents of the 'legacyPackages' output",
.handler = {&showLegacy, true}
});
addFlag({
.longName = "no-eval-cache",
.description = "do not use the flake evaluation cache",
.handler = {[&]() { useEvalCache = false; }}
});
}
std::string description() override
{
return "show the outputs provided by a flake";
}
void run(nix::ref<nix::Store> store) override
{
auto state = getEvalState();
auto flake = std::make_shared<LockedFlake>(lockFlake());
std::function<void(eval_cache::AttrCursor & visitor, const std::vector<Symbol> & attrPath, const std::string & headerPrefix, const std::string & nextPrefix)> visit;
visit = [&](eval_cache::AttrCursor & visitor, const std::vector<Symbol> & attrPath, const std::string & headerPrefix, const std::string & nextPrefix)
{
Activity act(*logger, lvlInfo, actUnknown,
fmt("evaluating '%s'", concatStringsSep(".", attrPath)));
try {
auto recurse = [&]()
{
logger->stdout("%s", headerPrefix);
auto attrs = visitor.getAttrs();
for (const auto & [i, attr] : enumerate(attrs)) {
bool last = i + 1 == attrs.size();
auto visitor2 = visitor.getAttr(attr);
auto attrPath2(attrPath);
attrPath2.push_back(attr);
visit(*visitor2, attrPath2,
fmt(ANSI_GREEN "%s%s" ANSI_NORMAL ANSI_BOLD "%s" ANSI_NORMAL, nextPrefix, last ? treeLast : treeConn, attr),
nextPrefix + (last ? treeNull : treeLine));
}
};
auto showDerivation = [&]()
{
auto name = visitor.getAttr(state->sName)->getString();
/*
std::string description;
if (auto aMeta = visitor.maybeGetAttr("meta")) {
if (auto aDescription = aMeta->maybeGetAttr("description"))
description = aDescription->getString();
}
*/
logger->stdout("%s: %s '%s'",
headerPrefix,
attrPath.size() == 2 && attrPath[0] == "devShell" ? "development environment" :
attrPath.size() == 3 && attrPath[0] == "checks" ? "derivation" :
attrPath.size() >= 1 && attrPath[0] == "hydraJobs" ? "derivation" :
"package",
name);
};
if (attrPath.size() == 0
|| (attrPath.size() == 1 && (
attrPath[0] == "defaultPackage"
|| attrPath[0] == "devShell"
|| attrPath[0] == "nixosConfigurations"
|| attrPath[0] == "nixosModules"
|| attrPath[0] == "defaultApp"
|| attrPath[0] == "templates"))
|| ((attrPath.size() == 1 || attrPath.size() == 2)
&& (attrPath[0] == "checks"
|| attrPath[0] == "packages"
|| attrPath[0] == "apps"))
)
{
recurse();
}
else if (
(attrPath.size() == 2 && (attrPath[0] == "defaultPackage" || attrPath[0] == "devShell"))
|| (attrPath.size() == 3 && (attrPath[0] == "checks" || attrPath[0] == "packages"))
)
{
if (visitor.isDerivation())
showDerivation();
else
throw Error("expected a derivation");
}
else if (attrPath.size() > 0 && attrPath[0] == "hydraJobs") {
if (visitor.isDerivation())
showDerivation();
else
recurse();
}
else if (attrPath.size() > 0 && attrPath[0] == "legacyPackages") {
if (attrPath.size() == 1)
recurse();
else if (!showLegacy)
logger->stdout("%s: " ANSI_YELLOW "omitted" ANSI_NORMAL " (use '--legacy' to show)", headerPrefix);
else {
if (visitor.isDerivation())
showDerivation();
else if (attrPath.size() <= 2)
// FIXME: handle recurseIntoAttrs
recurse();
}
}
else if (
(attrPath.size() == 2 && attrPath[0] == "defaultApp") ||
(attrPath.size() == 3 && attrPath[0] == "apps"))
{
auto aType = visitor.maybeGetAttr("type");
if (!aType || aType->getString() != "app")
throw EvalError("not an app definition");
logger->stdout("%s: app", headerPrefix);
}
else if (
(attrPath.size() == 1 && attrPath[0] == "defaultTemplate") ||
(attrPath.size() == 2 && attrPath[0] == "templates"))
{
auto description = visitor.getAttr("description")->getString();
logger->stdout("%s: template: " ANSI_BOLD "%s" ANSI_NORMAL, headerPrefix, description);
}
else {
logger->stdout("%s: %s",
headerPrefix,
attrPath.size() == 1 && attrPath[0] == "overlay" ? "Nixpkgs overlay" :
attrPath.size() == 2 && attrPath[0] == "nixosConfigurations" ? "NixOS configuration" :
attrPath.size() == 2 && attrPath[0] == "nixosModules" ? "NixOS module" :
ANSI_YELLOW "unknown" ANSI_NORMAL);
}
} catch (EvalError & e) {
if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages"))
throw;
}
};
auto cache = openEvalCache(*state, flake, useEvalCache);
visit(*cache->getRoot(), {}, fmt(ANSI_BOLD "%s" ANSI_NORMAL, flake->flake.lockedRef), "");
}
};
struct CmdFlake : virtual MultiCommand, virtual Command
{
CmdFlake()
: MultiCommand({
{"update", []() { return make_ref<CmdFlakeUpdate>(); }},
{"info", []() { return make_ref<CmdFlakeInfo>(); }},
{"list-inputs", []() { return make_ref<CmdFlakeListInputs>(); }},
{"check", []() { return make_ref<CmdFlakeCheck>(); }},
{"init", []() { return make_ref<CmdFlakeInit>(); }},
{"new", []() { return make_ref<CmdFlakeNew>(); }},
{"clone", []() { return make_ref<CmdFlakeClone>(); }},
{"archive", []() { return make_ref<CmdFlakeArchive>(); }},
{"show", []() { return make_ref<CmdFlakeShow>(); }},
})
{
}
std::string description() override
{
return "manage Nix flakes";
}
void run() override
{
if (!command)
throw UsageError("'nix flake' requires a sub-command.");
settings.requireExperimentalFeature("flakes");
command->second->prepare();
command->second->run();
}
void printHelp(const string & programName, std::ostream & out) override
{
MultiCommand::printHelp(programName, out);
}
};
static auto r1 = registerCommand<CmdFlake>("flake");

View File

@ -1,9 +1,18 @@
set -e
if [ -e .attrs.sh ]; then source .attrs.sh; fi
outputs=$_outputs_saved
for __output in $_outputs_saved; do
declare "$__output"="$out"
done
unset _outputs_saved __output
export IN_NIX_SHELL=impure
export dontAddDisableDepTrack=1
if [[ -n $stdenv ]]; then
source $stdenv/setup
fi
export > $out
set >> $out

View File

@ -31,7 +31,11 @@ struct CmdHash : Command
.labels({"modulus"})
.dest(&modulus);
#endif
expectArgs("paths", &paths);
expectArgs({
.label = "paths",
.handler = {&paths},
.completer = completePath
});
}
std::string description() override

View File

@ -1,3 +1,4 @@
#include "installables.hh"
#include "command.hh"
#include "attr-path.hh"
#include "common-eval-args.hh"
@ -7,11 +8,108 @@
#include "get-drvs.hh"
#include "store-api.hh"
#include "shared.hh"
#include "flake/flake.hh"
#include "eval-cache.hh"
#include "url.hh"
#include "registry.hh"
#include <regex>
#include <queue>
namespace nix {
void completeFlakeInputPath(
ref<EvalState> evalState,
const FlakeRef & flakeRef,
std::string_view prefix)
{
auto flake = flake::getFlake(*evalState, flakeRef, true);
for (auto & input : flake.inputs)
if (hasPrefix(input.first, prefix))
completions->insert(input.first);
}
MixFlakeOptions::MixFlakeOptions()
{
addFlag({
.longName = "recreate-lock-file",
.description = "recreate lock file from scratch",
.handler = {&lockFlags.recreateLockFile, true}
});
addFlag({
.longName = "no-update-lock-file",
.description = "do not allow any updates to the lock file",
.handler = {&lockFlags.updateLockFile, false}
});
addFlag({
.longName = "no-write-lock-file",
.description = "do not write the newly generated lock file",
.handler = {&lockFlags.writeLockFile, false}
});
addFlag({
.longName = "no-registries",
.description = "don't use flake registries",
.handler = {&lockFlags.useRegistries, false}
});
addFlag({
.longName = "commit-lock-file",
.description = "commit changes to the lock file",
.handler = {&lockFlags.commitLockFile, true}
});
addFlag({
.longName = "update-input",
.description = "update a specific flake input",
.labels = {"input-path"},
.handler = {[&](std::string s) {
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
}},
.completer = {[&](size_t, std::string_view prefix) {
if (auto flakeRef = getFlakeRefForCompletion())
completeFlakeInputPath(getEvalState(), *flakeRef, prefix);
}}
});
addFlag({
.longName = "override-input",
.description = "override a specific flake input (e.g. 'dwarffs/nixpkgs')",
.labels = {"input-path", "flake-url"},
.handler = {[&](std::string inputPath, std::string flakeRef) {
lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath),
parseFlakeRef(flakeRef, absPath(".")));
}}
});
addFlag({
.longName = "inputs-from",
.description = "use the inputs of the specified flake as registry entries",
.labels = {"flake-url"},
.handler = {[&](std::string flakeRef) {
auto evalState = getEvalState();
auto flake = flake::lockFlake(
*evalState,
parseFlakeRef(flakeRef, absPath(".")),
{ .writeLockFile = false });
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
if (auto input3 = std::dynamic_pointer_cast<const flake::LockedNode>(input2)) {
overrideRegistry(
fetchers::Input::fromAttrs({{"type","indirect"}, {"id", inputName}}),
input3->lockedRef.input,
{});
}
}
}},
.completer = {[&](size_t, std::string_view prefix) {
completeFlakeRef(getEvalState()->store, prefix);
}}
});
}
SourceExprCommand::SourceExprCommand()
{
@ -20,67 +118,152 @@ SourceExprCommand::SourceExprCommand()
.shortName = 'f',
.description = "evaluate FILE rather than the default",
.labels = {"file"},
.handler = {&file}
.handler = {&file},
.completer = completePath
});
addFlag({
.longName ="expr",
.description = "evaluate attributes from EXPR",
.labels = {"expr"},
.handler = {&expr}
});
addFlag({
.longName ="derivation",
.description = "operate on the store derivation rather than its outputs",
.handler = {&operateOn, OperateOn::Derivation},
});
}
Value * SourceExprCommand::getSourceExpr(EvalState & state)
Strings SourceExprCommand::getDefaultFlakeAttrPaths()
{
if (vSourceExpr) return *vSourceExpr;
auto sToplevel = state.symbols.create("_toplevel");
vSourceExpr = allocRootValue(state.allocValue());
if (file != "")
state.evalFile(lookupFileArg(state, file), **vSourceExpr);
else {
/* Construct the installation source from $NIX_PATH. */
auto searchPath = state.getSearchPath();
state.mkAttrs(**vSourceExpr, 1024);
mkBool(*state.allocAttr(**vSourceExpr, sToplevel), true);
std::unordered_set<std::string> seen;
auto addEntry = [&](const std::string & name) {
if (name == "") return;
if (!seen.insert(name).second) return;
Value * v1 = state.allocValue();
mkPrimOpApp(*v1, state.getBuiltin("findFile"), state.getBuiltin("nixPath"));
Value * v2 = state.allocValue();
mkApp(*v2, *v1, mkString(*state.allocValue(), name));
mkApp(*state.allocAttr(**vSourceExpr, state.symbols.create(name)),
state.getBuiltin("import"), *v2);
};
for (auto & i : searchPath)
/* Hack to handle channels. */
if (i.first.empty() && pathExists(i.second + "/manifest.nix")) {
for (auto & j : readDirectory(i.second))
if (j.name != "manifest.nix"
&& pathExists(fmt("%s/%s/default.nix", i.second, j.name)))
addEntry(j.name);
} else
addEntry(i.first);
(*vSourceExpr)->attrs->sort();
}
return *vSourceExpr;
return {"defaultPackage." + settings.thisSystem.get()};
}
ref<EvalState> SourceExprCommand::getEvalState()
Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
{
return {
// As a convenience, look for the attribute in
// 'outputs.packages'.
"packages." + settings.thisSystem.get() + ".",
// As a temporary hack until Nixpkgs is properly converted
// to provide a clean 'packages' set, look in 'legacyPackages'.
"legacyPackages." + settings.thisSystem.get() + "."
};
}
void SourceExprCommand::completeInstallable(std::string_view prefix)
{
if (file) return; // FIXME
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
getDefaultFlakeAttrPaths(),
prefix);
}
void completeFlakeRefWithFragment(
ref<EvalState> evalState,
flake::LockFlags lockFlags,
Strings attrPathPrefixes,
const Strings & defaultFlakeAttrPaths,
std::string_view prefix)
{
/* Look for flake output attributes that match the
prefix. */
try {
auto hash = prefix.find('#');
if (hash != std::string::npos) {
auto fragment = prefix.substr(hash + 1);
auto flakeRefS = std::string(prefix.substr(0, hash));
// FIXME: do tilde expansion.
auto flakeRef = parseFlakeRef(flakeRefS, absPath("."));
auto evalCache = openEvalCache(*evalState,
std::make_shared<flake::LockedFlake>(lockFlake(*evalState, flakeRef, lockFlags)),
true);
auto root = evalCache->getRoot();
/* Complete 'fragment' relative to all the
attrpath prefixes as well as the root of the
flake. */
attrPathPrefixes.push_back("");
for (auto & attrPathPrefixS : attrPathPrefixes) {
auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS);
auto attrPathS = attrPathPrefixS + std::string(fragment);
auto attrPath = parseAttrPath(*evalState, attrPathS);
std::string lastAttr;
if (!attrPath.empty() && !hasSuffix(attrPathS, ".")) {
lastAttr = attrPath.back();
attrPath.pop_back();
}
auto attr = root->findAlongAttrPath(attrPath);
if (!attr) continue;
for (auto & attr2 : attr->getAttrs()) {
if (hasPrefix(attr2, lastAttr)) {
auto attrPath2 = attr->getAttrPath(attr2);
/* Strip the attrpath prefix. */
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
completions->insert(flakeRefS + "#" + concatStringsSep(".", attrPath2));
}
}
}
/* And add an empty completion for the default
attrpaths. */
if (fragment.empty()) {
for (auto & attrPath : defaultFlakeAttrPaths) {
auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath));
if (!attr) continue;
completions->insert(flakeRefS + "#");
}
}
}
} catch (Error & e) {
warn(e.msg());
}
completeFlakeRef(evalState->store, prefix);
}
ref<EvalState> EvalCommand::getEvalState()
{
if (!evalState)
evalState = std::make_shared<EvalState>(searchPath, getStore());
return ref<EvalState>(evalState);
}
void completeFlakeRef(ref<Store> store, std::string_view prefix)
{
if (prefix == "")
completions->insert(".");
completeDir(0, prefix);
/* Look for registry entries that match the prefix. */
for (auto & registry : fetchers::getRegistries(store)) {
for (auto & entry : registry->entries) {
auto from = entry.from.to_string();
if (!hasPrefix(prefix, "flake:") && hasPrefix(from, "flake:")) {
std::string from2(from, 6);
if (hasPrefix(from2, prefix))
completions->insert(from2);
} else {
if (hasPrefix(from, prefix))
completions->insert(from);
}
}
}
}
Buildable Installable::toBuildable()
{
auto buildables = toBuildables();
@ -89,6 +272,24 @@ Buildable Installable::toBuildable()
return std::move(buildables[0]);
}
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
Installable::getCursors(EvalState & state, bool useEvalCache)
{
auto evalCache =
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
[&]() { return toValue(state).first; });
return {{evalCache->getRoot(), ""}};
}
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
Installable::getCursor(EvalState & state, bool useEvalCache)
{
auto cursors = getCursors(state, useEvalCache);
if (cursors.empty())
throw Error("cannot find flake attribute '%s'", what());
return cursors[0];
}
struct InstallableStorePath : Installable
{
ref<Store> store;
@ -101,15 +302,24 @@ struct InstallableStorePath : Installable
Buildables toBuildables() override
{
std::map<std::string, StorePath> outputs;
outputs.insert_or_assign("out", storePath);
Buildable b{
.drvPath = storePath.isDerivation() ? storePath : std::optional<StorePath>(),
.outputs = std::move(outputs)
};
Buildables bs;
bs.push_back(std::move(b));
return bs;
if (storePath.isDerivation()) {
std::map<std::string, StorePath> outputs;
for (auto & [name, output] : store->readDerivation(storePath).outputs)
outputs.emplace(name, output.path);
return {
Buildable {
.drvPath = storePath,
.outputs = std::move(outputs)
}
};
} else {
return {
Buildable {
.drvPath = {},
.outputs = {{"out", storePath}}
}
};
}
}
std::optional<StorePath> getStorePath() override
@ -118,146 +328,325 @@ struct InstallableStorePath : Installable
}
};
struct InstallableValue : Installable
Buildables InstallableValue::toBuildables()
{
SourceExprCommand & cmd;
Buildables res;
InstallableValue(SourceExprCommand & cmd) : cmd(cmd) { }
StorePathSet drvPaths;
Buildables toBuildables() override
{
auto state = cmd.getEvalState();
for (auto & drv : toDerivations()) {
Buildable b{.drvPath = drv.drvPath};
drvPaths.insert(drv.drvPath);
auto v = toValue(*state).first;
auto outputName = drv.outputName;
if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(*b.drvPath));
Bindings & autoArgs = *cmd.getAutoArgs(*state);
b.outputs.emplace(outputName, drv.outPath);
DrvInfos drvs;
getDerivations(*state, *v, "", autoArgs, drvs, false);
Buildables res;
StorePathSet drvPaths;
for (auto & drv : drvs) {
Buildable b{.drvPath = state->store->parseStorePath(drv.queryDrvPath())};
drvPaths.insert(*b.drvPath);
auto outputName = drv.queryOutputName();
if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(*b.drvPath));
b.outputs.emplace(outputName, state->store->parseStorePath(drv.queryOutPath()));
res.push_back(std::move(b));
}
// Hack to recognize .all: if all drvs have the same drvPath,
// merge the buildables.
if (drvPaths.size() == 1) {
Buildable b{.drvPath = *drvPaths.begin()};
for (auto & b2 : res)
for (auto & output : b2.outputs)
b.outputs.insert_or_assign(output.first, output.second);
Buildables bs;
bs.push_back(std::move(b));
return bs;
} else
return res;
res.push_back(std::move(b));
}
};
struct InstallableExpr : InstallableValue
{
std::string text;
InstallableExpr(SourceExprCommand & cmd, const std::string & text)
: InstallableValue(cmd), text(text) { }
std::string what() override { return text; }
std::pair<Value *, Pos> toValue(EvalState & state) override
{
auto v = state.allocValue();
state.eval(state.parseExprFromString(text, absPath(".")), *v);
return {v, noPos};
}
};
// Hack to recognize .all: if all drvs have the same drvPath,
// merge the buildables.
if (drvPaths.size() == 1) {
Buildable b{.drvPath = *drvPaths.begin()};
for (auto & b2 : res)
for (auto & output : b2.outputs)
b.outputs.insert_or_assign(output.first, output.second);
Buildables bs;
bs.push_back(std::move(b));
return bs;
} else
return res;
}
struct InstallableAttrPath : InstallableValue
{
SourceExprCommand & cmd;
RootValue v;
std::string attrPath;
InstallableAttrPath(SourceExprCommand & cmd, const std::string & attrPath)
: InstallableValue(cmd), attrPath(attrPath)
InstallableAttrPath(ref<EvalState> state, SourceExprCommand & cmd, Value * v, const std::string & attrPath)
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
{ }
std::string what() override { return attrPath; }
std::pair<Value *, Pos> toValue(EvalState & state) override
{
auto source = cmd.getSourceExpr(state);
Bindings & autoArgs = *cmd.getAutoArgs(state);
auto v = findAlongAttrPath(state, attrPath, autoArgs, *source).first;
state.forceValue(*v);
return {v, noPos};
auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v);
state.forceValue(*vRes);
return {vRes, pos};
}
virtual std::vector<InstallableValue::DerivationInfo> toDerivations() override;
};
// FIXME: extend
std::string attrRegex = R"([A-Za-z_][A-Za-z0-9-_+]*)";
static std::regex attrPathRegex(fmt(R"(%1%(\.%1%)*)", attrRegex));
std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations()
{
auto v = toValue(*state).first;
static std::vector<std::shared_ptr<Installable>> parseInstallables(
SourceExprCommand & cmd, ref<Store> store, std::vector<std::string> ss, bool useDefaultInstallables)
Bindings & autoArgs = *cmd.getAutoArgs(*state);
DrvInfos drvInfos;
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
std::vector<DerivationInfo> res;
for (auto & drvInfo : drvInfos) {
res.push_back({
state->store->parseStorePath(drvInfo.queryDrvPath()),
state->store->parseStorePath(drvInfo.queryOutPath()),
drvInfo.queryOutputName()
});
}
return res;
}
std::vector<std::string> InstallableFlake::getActualAttrPaths()
{
std::vector<std::string> res;
for (auto & prefix : prefixes)
res.push_back(prefix + *attrPaths.begin());
for (auto & s : attrPaths)
res.push_back(s);
return res;
}
Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake)
{
auto vFlake = state.allocValue();
callFlake(state, lockedFlake, *vFlake);
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
assert(aOutputs);
state.forceValue(*aOutputs->value);
return aOutputs->value;
}
ref<eval_cache::EvalCache> openEvalCache(
EvalState & state,
std::shared_ptr<flake::LockedFlake> lockedFlake,
bool useEvalCache)
{
auto fingerprint = lockedFlake->getFingerprint();
return make_ref<nix::eval_cache::EvalCache>(
useEvalCache && evalSettings.pureEval
? std::optional { std::cref(fingerprint) }
: std::nullopt,
state,
[&state, lockedFlake]()
{
/* For testing whether the evaluation cache is
complete. */
if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0")
throw Error("not everything is cached, but evaluation is not allowed");
auto vFlake = state.allocValue();
flake::callFlake(state, *lockedFlake, *vFlake);
state.forceAttrs(*vFlake);
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
assert(aOutputs);
return aOutputs->value;
});
}
static std::string showAttrPaths(const std::vector<std::string> & paths)
{
std::string s;
for (const auto & [n, i] : enumerate(paths)) {
if (n > 0) s += n + 1 == paths.size() ? " or " : ", ";
s += '\''; s += i; s += '\'';
}
return s;
}
std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableFlake::toDerivation()
{
auto lockedFlake = getLockedFlake();
auto cache = openEvalCache(*state, lockedFlake, true);
auto root = cache->getRoot();
for (auto & attrPath : getActualAttrPaths()) {
auto attr = root->findAlongAttrPath(parseAttrPath(*state, attrPath));
if (!attr) continue;
if (!attr->isDerivation())
throw Error("flake output attribute '%s' is not a derivation", attrPath);
auto drvPath = attr->forceDerivation();
auto drvInfo = DerivationInfo{
std::move(drvPath),
state->store->parseStorePath(attr->getAttr(state->sOutPath)->getString()),
attr->getAttr(state->sOutputName)->getString()
};
return {attrPath, lockedFlake->flake.lockedRef, std::move(drvInfo)};
}
throw Error("flake '%s' does not provide attribute %s",
flakeRef, showAttrPaths(getActualAttrPaths()));
}
std::vector<InstallableValue::DerivationInfo> InstallableFlake::toDerivations()
{
std::vector<DerivationInfo> res;
res.push_back(std::get<2>(toDerivation()));
return res;
}
std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
{
auto lockedFlake = getLockedFlake();
auto vOutputs = getFlakeOutputs(state, *lockedFlake);
auto emptyArgs = state.allocBindings(0);
for (auto & attrPath : getActualAttrPaths()) {
try {
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
state.forceValue(*v);
return {v, pos};
} catch (AttrPathNotFound & e) {
}
}
throw Error("flake '%s' does not provide attribute %s",
flakeRef, showAttrPaths(getActualAttrPaths()));
}
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
InstallableFlake::getCursors(EvalState & state, bool useEvalCache)
{
auto evalCache = openEvalCache(state,
std::make_shared<flake::LockedFlake>(lockFlake(state, flakeRef, lockFlags)),
useEvalCache);
auto root = evalCache->getRoot();
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>> res;
for (auto & attrPath : getActualAttrPaths()) {
auto attr = root->findAlongAttrPath(parseAttrPath(state, attrPath));
if (attr) res.push_back({attr, attrPath});
}
return res;
}
std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const
{
if (!_lockedFlake)
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlags));
return _lockedFlake;
}
FlakeRef InstallableFlake::nixpkgsFlakeRef() const
{
auto lockedFlake = getLockedFlake();
if (auto nixpkgsInput = lockedFlake->lockFile.findInput({"nixpkgs"})) {
if (auto lockedNode = std::dynamic_pointer_cast<const flake::LockedNode>(nixpkgsInput)) {
debug("using nixpkgs flake '%s'", lockedNode->lockedRef);
return std::move(lockedNode->lockedRef);
}
}
return Installable::nixpkgsFlakeRef();
}
std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
ref<Store> store, std::vector<std::string> ss)
{
std::vector<std::shared_ptr<Installable>> result;
if (ss.empty() && useDefaultInstallables) {
if (cmd.file == "")
cmd.file = ".";
ss = {""};
}
if (file || expr) {
if (file && expr)
throw UsageError("'--file' and '--expr' are exclusive");
for (auto & s : ss) {
// FIXME: backward compatibility hack
if (file) evalSettings.pureEval = false;
if (s.compare(0, 1, "(") == 0)
result.push_back(std::make_shared<InstallableExpr>(cmd, s));
auto state = getEvalState();
auto vFile = state->allocValue();
else if (s.find("/") != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(
store,
store->toStorePath(store->followLinksToStore(s)).first));
} catch (BadStorePath &) { }
if (file)
state->evalFile(lookupFileArg(*state, *file), *vFile);
else {
auto e = state->parseExprFromString(*expr, absPath("."));
state->eval(e, *vFile);
}
else if (s == "" || std::regex_match(s, attrPathRegex))
result.push_back(std::make_shared<InstallableAttrPath>(cmd, s));
for (auto & s : ss)
result.push_back(std::make_shared<InstallableAttrPath>(state, *this, vFile, s == "." ? "" : s));
else
throw UsageError("don't know what to do with argument '%s'", s);
} else {
for (auto & s : ss) {
std::exception_ptr ex;
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
getEvalState(), std::move(flakeRef),
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
getDefaultFlakeAttrPathPrefixes(), lockFlags));
continue;
} catch (...) {
ex = std::current_exception();
}
if (s.find('/') != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
continue;
} catch (BadStorePath &) {
} catch (...) {
if (!ex)
ex = std::current_exception();
}
}
std::rethrow_exception(ex);
/*
throw Error(
pathExists(s)
? "path '%s' is not a flake or a store path"
: "don't know how to handle argument '%s'", s);
*/
}
}
return result;
}
std::shared_ptr<Installable> parseInstallable(
SourceExprCommand & cmd, ref<Store> store, const std::string & installable,
bool useDefaultInstallables)
std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
ref<Store> store, const std::string & installable)
{
auto installables = parseInstallables(cmd, store, {installable}, false);
auto installables = parseInstallables(store, {installable});
assert(installables.size() == 1);
return installables.front();
}
Buildables build(ref<Store> store, RealiseMode mode,
Buildables build(ref<Store> store, Realise mode,
std::vector<std::shared_ptr<Installable>> installables)
{
if (mode != Build)
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
Buildables buildables;
@ -278,33 +667,45 @@ Buildables build(ref<Store> store, RealiseMode mode,
}
}
if (mode == DryRun)
if (mode == Realise::Nothing)
printMissing(store, pathsToBuild, lvlError);
else if (mode == Build)
else if (mode == Realise::Outputs)
store->buildPaths(pathsToBuild);
return buildables;
}
StorePathSet toStorePaths(ref<Store> store, RealiseMode mode,
StorePathSet toStorePaths(ref<Store> store,
Realise mode, OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables)
{
StorePathSet outPaths;
for (auto & b : build(store, mode, installables))
for (auto & output : b.outputs)
outPaths.insert(output.second);
if (operateOn == OperateOn::Output) {
for (auto & b : build(store, mode, installables))
for (auto & output : b.outputs)
outPaths.insert(output.second);
} else {
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
for (auto & i : installables)
for (auto & b : i->toBuildables())
if (b.drvPath)
outPaths.insert(*b.drvPath);
}
return outPaths;
}
StorePath toStorePath(ref<Store> store, RealiseMode mode,
StorePath toStorePath(ref<Store> store,
Realise mode, OperateOn operateOn,
std::shared_ptr<Installable> installable)
{
auto paths = toStorePaths(store, mode, {installable});
auto paths = toStorePaths(store, mode, operateOn, {installable});
if (paths.size() != 1)
throw Error("argument '%s' should evaluate to one store path", installable->what());
throw Error("argument '%s' should evaluate to one store path", installable->what());
return *paths.begin();
}
@ -333,14 +734,51 @@ StorePathSet toDerivations(ref<Store> store,
return drvPaths;
}
InstallablesCommand::InstallablesCommand()
{
expectArgs({
.label = "installables",
.handler = {&_installables},
.completer = {[&](size_t, std::string_view prefix) {
completeInstallable(prefix);
}}
});
}
void InstallablesCommand::prepare()
{
installables = parseInstallables(*this, getStore(), _installables, useDefaultInstallables());
if (_installables.empty() && useDefaultInstallables())
// FIXME: commands like "nix install" should not have a
// default, probably.
_installables.push_back(".");
installables = parseInstallables(getStore(), _installables);
}
std::optional<FlakeRef> InstallablesCommand::getFlakeRefForCompletion()
{
if (_installables.empty()) {
if (useDefaultInstallables())
return parseFlakeRef(".", absPath("."));
return {};
}
return parseFlakeRef(_installables.front(), absPath("."));
}
InstallableCommand::InstallableCommand()
{
expectArgs({
.label = "installable",
.optional = true,
.handler = {&_installable},
.completer = {[&](size_t, std::string_view prefix) {
completeInstallable(prefix);
}}
});
}
void InstallableCommand::prepare()
{
installable = parseInstallable(*this, getStore(), _installable, false);
installable = parseInstallable(getStore(), _installable);
}
}

View File

@ -3,11 +3,17 @@
#include "util.hh"
#include "path.hh"
#include "eval.hh"
#include "flake/flake.hh"
#include <optional>
namespace nix {
struct DrvInfo;
struct SourceExprCommand;
namespace eval_cache { class EvalCache; class AttrCursor; }
struct Buildable
{
std::optional<StorePath> drvPath;
@ -16,19 +22,25 @@ struct Buildable
typedef std::vector<Buildable> Buildables;
struct App
{
std::vector<StorePathWithOutputs> context;
Path program;
// FIXME: add args, sandbox settings, metadata, ...
};
struct Installable
{
virtual ~Installable() { }
virtual std::string what() = 0;
virtual Buildables toBuildables()
{
throw Error("argument '%s' cannot be built", what());
}
virtual Buildables toBuildables() = 0;
Buildable toBuildable();
App toApp(EvalState & state);
virtual std::pair<Value *, Pos> toValue(EvalState & state)
{
throw Error("argument '%s' cannot be evaluated", what());
@ -40,6 +52,74 @@ struct Installable
{
return {};
}
virtual std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
getCursors(EvalState & state, bool useEvalCache);
std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>
getCursor(EvalState & state, bool useEvalCache);
virtual FlakeRef nixpkgsFlakeRef() const
{
return std::move(FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}}));
}
};
struct InstallableValue : Installable
{
ref<EvalState> state;
InstallableValue(ref<EvalState> state) : state(state) {}
struct DerivationInfo
{
StorePath drvPath;
StorePath outPath;
std::string outputName;
};
virtual std::vector<DerivationInfo> toDerivations() = 0;
Buildables toBuildables() override;
};
struct InstallableFlake : InstallableValue
{
FlakeRef flakeRef;
Strings attrPaths;
Strings prefixes;
const flake::LockFlags & lockFlags;
mutable std::shared_ptr<flake::LockedFlake> _lockedFlake;
InstallableFlake(ref<EvalState> state, FlakeRef && flakeRef,
Strings && attrPaths, Strings && prefixes, const flake::LockFlags & lockFlags)
: InstallableValue(state), flakeRef(flakeRef), attrPaths(attrPaths),
prefixes(prefixes), lockFlags(lockFlags)
{ }
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
std::vector<std::string> getActualAttrPaths();
Value * getFlakeOutputs(EvalState & state, const flake::LockedFlake & lockedFlake);
std::tuple<std::string, FlakeRef, DerivationInfo> toDerivation();
std::vector<DerivationInfo> toDerivations() override;
std::pair<Value *, Pos> toValue(EvalState & state) override;
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
getCursors(EvalState & state, bool useEvalCache) override;
std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
FlakeRef nixpkgsFlakeRef() const override;
};
ref<eval_cache::EvalCache> openEvalCache(
EvalState & state,
std::shared_ptr<flake::LockedFlake> lockedFlake,
bool useEvalCache);
}

View File

@ -18,7 +18,7 @@ struct CmdLog : InstallableCommand
return {
Example{
"To get the build log of GNU Hello:",
"nix log nixpkgs.hello"
"nix log nixpkgs#hello"
},
Example{
"To get the build log of a specific path:",
@ -26,7 +26,7 @@ struct CmdLog : InstallableCommand
},
Example{
"To get a build log from a specific binary cache:",
"nix log --store https://cache.nixos.org nixpkgs.hello"
"nix log --store https://cache.nixos.org nixpkgs#hello"
},
};
}

View File

@ -85,7 +85,11 @@ struct CmdLsStore : StoreCommand, MixLs
{
CmdLsStore()
{
expectArg("path", &path);
expectArgs({
.label = "path",
.handler = {&path},
.completer = completePath
});
}
Examples examples() override
@ -117,7 +121,11 @@ struct CmdLsNar : Command, MixLs
CmdLsNar()
{
expectArg("nar", &narPath);
expectArgs({
.label = "nar",
.handler = {&narPath},
.completer = completePath
});
expectArg("path", &path);
}

View File

@ -7,7 +7,6 @@
#include "legacy.hh"
#include "shared.hh"
#include "store-api.hh"
#include "progress-bar.hh"
#include "filetransfer.hh"
#include "finally.hh"
#include "loggers.hh"
@ -69,7 +68,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
addFlag({
.longName = "help",
.description = "show usage information",
.handler = {[&]() { showHelpAndExit(); }},
.handler = {[&]() { if (!completions) showHelpAndExit(); }},
});
addFlag({
@ -97,7 +96,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
addFlag({
.longName = "version",
.description = "show version information",
.handler = {[&]() { printVersion(programName); }},
.handler = {[&]() { if (!completions) printVersion(programName); }},
});
addFlag({
@ -165,6 +164,7 @@ void mainWrapped(int argc, char * * argv)
verbosity = lvlWarn;
settings.verboseBuild = false;
evalSettings.pureEval = true;
setLogFormat("bar");
@ -172,7 +172,22 @@ void mainWrapped(int argc, char * * argv)
NixArgs args;
args.parseCmdline(argvToStrings(argc, argv));
Finally printCompletions([&]()
{
if (completions) {
std::cout << (pathCompletions ? "filenames\n" : "no-filenames\n");
for (auto & s : *completions)
std::cout << s << "\n";
}
});
try {
args.parseCmdline(argvToStrings(argc, argv));
} catch (UsageError &) {
if (!completions) throw;
}
if (completions) return;
initPlugins();

View File

@ -10,7 +10,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
{
CmdMakeContentAddressable()
{
realiseMode = Build;
realiseMode = Realise::Outputs;
}
std::string description() override
@ -23,7 +23,7 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
return {
Example{
"To create a content-addressable representation of GNU Hello (but not its dependencies):",
"nix make-content-addressable nixpkgs.hello"
"nix make-content-addressable nixpkgs#hello"
},
Example{
"To compute a content-addressable representation of the current NixOS system closure:",

View File

@ -40,7 +40,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
},
Example{
"To show a package's closure size and all its dependencies with human readable sizes:",
"nix path-info -rsSh nixpkgs.rust"
"nix path-info -rsSh nixpkgs#rust"
},
Example{
"To check the existence of a path in a binary cache:",

428
src/nix/profile.cc Normal file
View File

@ -0,0 +1,428 @@
#include "command.hh"
#include "common-args.hh"
#include "shared.hh"
#include "store-api.hh"
#include "derivations.hh"
#include "archive.hh"
#include "builtins/buildenv.hh"
#include "flake/flakeref.hh"
#include "../nix-env/user-env.hh"
#include <nlohmann/json.hpp>
#include <regex>
using namespace nix;
struct ProfileElementSource
{
FlakeRef originalRef;
// FIXME: record original attrpath.
FlakeRef resolvedRef;
std::string attrPath;
// FIXME: output names
};
struct ProfileElement
{
StorePathSet storePaths;
std::optional<ProfileElementSource> source;
bool active = true;
// FIXME: priority
};
struct ProfileManifest
{
std::vector<ProfileElement> elements;
ProfileManifest() { }
ProfileManifest(EvalState & state, const Path & profile)
{
auto manifestPath = profile + "/manifest.json";
if (pathExists(manifestPath)) {
auto json = nlohmann::json::parse(readFile(manifestPath));
auto version = json.value("version", 0);
if (version != 1)
throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version);
for (auto & e : json["elements"]) {
ProfileElement element;
for (auto & p : e["storePaths"])
element.storePaths.insert(state.store->parseStorePath((std::string) p));
element.active = e["active"];
if (e.value("uri", "") != "") {
element.source = ProfileElementSource{
parseFlakeRef(e["originalUri"]),
parseFlakeRef(e["uri"]),
e["attrPath"]
};
}
elements.emplace_back(std::move(element));
}
}
else if (pathExists(profile + "/manifest.nix")) {
// FIXME: needed because of pure mode; ugly.
if (state.allowedPaths) {
state.allowedPaths->insert(state.store->followLinksToStore(profile));
state.allowedPaths->insert(state.store->followLinksToStore(profile + "/manifest.nix"));
}
auto drvInfos = queryInstalled(state, state.store->followLinksToStore(profile));
for (auto & drvInfo : drvInfos) {
ProfileElement element;
element.storePaths = {state.store->parseStorePath(drvInfo.queryOutPath())};
elements.emplace_back(std::move(element));
}
}
}
std::string toJSON(Store & store) const
{
auto array = nlohmann::json::array();
for (auto & element : elements) {
auto paths = nlohmann::json::array();
for (auto & path : element.storePaths)
paths.push_back(store.printStorePath(path));
nlohmann::json obj;
obj["storePaths"] = paths;
obj["active"] = element.active;
if (element.source) {
obj["originalUri"] = element.source->originalRef.to_string();
obj["uri"] = element.source->resolvedRef.to_string();
obj["attrPath"] = element.source->attrPath;
}
array.push_back(obj);
}
nlohmann::json json;
json["version"] = 1;
json["elements"] = array;
return json.dump();
}
StorePath build(ref<Store> store)
{
auto tempDir = createTempDir();
StorePathSet references;
Packages pkgs;
for (auto & element : elements) {
for (auto & path : element.storePaths) {
if (element.active)
pkgs.emplace_back(store->printStorePath(path), true, 5);
references.insert(path);
}
}
buildProfile(tempDir, std::move(pkgs));
writeFile(tempDir + "/manifest.json", toJSON(*store));
/* Add the symlink tree to the store. */
StringSink sink;
dumpPath(tempDir, sink);
auto narHash = hashString(htSHA256, *sink.s);
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "profile", references));
info.references = std::move(references);
info.narHash = narHash;
info.narSize = sink.s->size();
info.ca = FixedOutputHash { .method = FileIngestionMethod::Recursive, .hash = *info.narHash };
auto source = StringSource { *sink.s };
store->addToStore(info, source);
return std::move(info.path);
}
};
struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
{
std::string description() override
{
return "install a package into a profile";
}
Examples examples() override
{
return {
Example{
"To install a package from Nixpkgs:",
"nix profile install nixpkgs#hello"
},
Example{
"To install a package from a specific branch of Nixpkgs:",
"nix profile install nixpkgs/release-19.09#hello"
},
Example{
"To install a package from a specific revision of Nixpkgs:",
"nix profile install nixpkgs/1028bb33859f8dfad7f98e1c8d185f3d1aaa7340#hello"
},
};
}
void run(ref<Store> store) override
{
ProfileManifest manifest(*getEvalState(), *profile);
std::vector<StorePathWithOutputs> pathsToBuild;
for (auto & installable : installables) {
if (auto installable2 = std::dynamic_pointer_cast<InstallableFlake>(installable)) {
auto [attrPath, resolvedRef, drv] = installable2->toDerivation();
ProfileElement element;
element.storePaths = {drv.outPath}; // FIXME
element.source = ProfileElementSource{
installable2->flakeRef,
resolvedRef,
attrPath,
};
pathsToBuild.push_back({drv.drvPath, StringSet{"out"}}); // FIXME
manifest.elements.emplace_back(std::move(element));
} else
throw Error("'nix profile install' does not support argument '%s'", installable->what());
}
store->buildPaths(pathsToBuild);
updateProfile(manifest.build(store));
}
};
class MixProfileElementMatchers : virtual Args
{
std::vector<std::string> _matchers;
public:
MixProfileElementMatchers()
{
expectArgs("elements", &_matchers);
}
typedef std::variant<size_t, Path, std::regex> Matcher;
std::vector<Matcher> getMatchers(ref<Store> store)
{
std::vector<Matcher> res;
for (auto & s : _matchers) {
size_t n;
if (string2Int(s, n))
res.push_back(n);
else if (store->isStorePath(s))
res.push_back(s);
else
res.push_back(std::regex(s, std::regex::extended | std::regex::icase));
}
return res;
}
bool matches(const Store & store, const ProfileElement & element, size_t pos, const std::vector<Matcher> & matchers)
{
for (auto & matcher : matchers) {
if (auto n = std::get_if<size_t>(&matcher)) {
if (*n == pos) return true;
} else if (auto path = std::get_if<Path>(&matcher)) {
if (element.storePaths.count(store.parseStorePath(*path))) return true;
} else if (auto regex = std::get_if<std::regex>(&matcher)) {
if (element.source
&& std::regex_match(element.source->attrPath, *regex))
return true;
}
}
return false;
}
};
struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElementMatchers
{
std::string description() override
{
return "remove packages from a profile";
}
Examples examples() override
{
return {
Example{
"To remove a package by attribute path:",
"nix profile remove packages.x86_64-linux.hello"
},
Example{
"To remove all packages:",
"nix profile remove '.*'"
},
Example{
"To remove a package by store path:",
"nix profile remove /nix/store/rr3y0c6zyk7kjjl8y19s4lsrhn4aiq1z-hello-2.10"
},
Example{
"To remove a package by position:",
"nix profile remove 3"
},
};
}
void run(ref<Store> store) override
{
ProfileManifest oldManifest(*getEvalState(), *profile);
auto matchers = getMatchers(store);
ProfileManifest newManifest;
for (size_t i = 0; i < oldManifest.elements.size(); ++i) {
auto & element(oldManifest.elements[i]);
if (!matches(*store, element, i, matchers))
newManifest.elements.push_back(std::move(element));
}
// FIXME: warn about unused matchers?
printInfo("removed %d packages, kept %d packages",
oldManifest.elements.size() - newManifest.elements.size(),
newManifest.elements.size());
updateProfile(newManifest.build(store));
}
};
struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProfileElementMatchers
{
std::string description() override
{
return "upgrade packages using their most recent flake";
}
Examples examples() override
{
return {
Example{
"To upgrade all packages that were installed using a mutable flake reference:",
"nix profile upgrade '.*'"
},
Example{
"To upgrade a specific package:",
"nix profile upgrade packages.x86_64-linux.hello"
},
};
}
void run(ref<Store> store) override
{
ProfileManifest manifest(*getEvalState(), *profile);
auto matchers = getMatchers(store);
// FIXME: code duplication
std::vector<StorePathWithOutputs> pathsToBuild;
for (size_t i = 0; i < manifest.elements.size(); ++i) {
auto & element(manifest.elements[i]);
if (element.source
&& !element.source->originalRef.input.isImmutable()
&& matches(*store, element, i, matchers))
{
Activity act(*logger, lvlChatty, actUnknown,
fmt("checking '%s' for updates", element.source->attrPath));
InstallableFlake installable(getEvalState(), FlakeRef(element.source->originalRef), {element.source->attrPath}, {}, lockFlags);
auto [attrPath, resolvedRef, drv] = installable.toDerivation();
if (element.source->resolvedRef == resolvedRef) continue;
printInfo("upgrading '%s' from flake '%s' to '%s'",
element.source->attrPath, element.source->resolvedRef, resolvedRef);
element.storePaths = {drv.outPath}; // FIXME
element.source = ProfileElementSource{
installable.flakeRef,
resolvedRef,
attrPath,
};
pathsToBuild.push_back({drv.drvPath, StringSet{"out"}}); // FIXME
}
}
store->buildPaths(pathsToBuild);
updateProfile(manifest.build(store));
}
};
struct CmdProfileInfo : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile
{
std::string description() override
{
return "list installed packages";
}
Examples examples() override
{
return {
Example{
"To show what packages are installed in the default profile:",
"nix profile info"
},
};
}
void run(ref<Store> store) override
{
ProfileManifest manifest(*getEvalState(), *profile);
for (size_t i = 0; i < manifest.elements.size(); ++i) {
auto & element(manifest.elements[i]);
logger->stdout("%d %s %s %s", i,
element.source ? element.source->originalRef.to_string() + "#" + element.source->attrPath : "-",
element.source ? element.source->resolvedRef.to_string() + "#" + element.source->attrPath : "-",
concatStringsSep(" ", store->printStorePathSet(element.storePaths)));
}
}
};
struct CmdProfile : virtual MultiCommand, virtual Command
{
CmdProfile()
: MultiCommand({
{"install", []() { return make_ref<CmdProfileInstall>(); }},
{"remove", []() { return make_ref<CmdProfileRemove>(); }},
{"upgrade", []() { return make_ref<CmdProfileUpgrade>(); }},
{"info", []() { return make_ref<CmdProfileInfo>(); }},
})
{ }
std::string description() override
{
return "manage Nix profiles";
}
void run() override
{
if (!command)
throw UsageError("'nix profile' requires a sub-command.");
command->second->prepare();
command->second->run();
}
void printHelp(const string & programName, std::ostream & out) override
{
MultiCommand::printHelp(programName, out);
}
};
static auto r1 = registerCommand<CmdProfile>("profile");

150
src/nix/registry.cc Normal file
View File

@ -0,0 +1,150 @@
#include "command.hh"
#include "common-args.hh"
#include "shared.hh"
#include "eval.hh"
#include "flake/flake.hh"
#include "store-api.hh"
#include "fetchers.hh"
#include "registry.hh"
using namespace nix;
using namespace nix::flake;
struct CmdRegistryList : StoreCommand
{
std::string description() override
{
return "list available Nix flakes";
}
void run(nix::ref<nix::Store> store) override
{
using namespace fetchers;
auto registries = getRegistries(store);
for (auto & registry : registries) {
for (auto & entry : registry->entries) {
// FIXME: format nicely
logger->stdout("%s %s %s",
registry->type == Registry::Flag ? "flags " :
registry->type == Registry::User ? "user " :
registry->type == Registry::System ? "system" :
"global",
entry.from.to_string(),
entry.to.to_string());
}
}
}
};
struct CmdRegistryAdd : MixEvalArgs, Command
{
std::string fromUrl, toUrl;
std::string description() override
{
return "add/replace flake in user flake registry";
}
CmdRegistryAdd()
{
expectArg("from-url", &fromUrl);
expectArg("to-url", &toUrl);
}
void run() override
{
auto fromRef = parseFlakeRef(fromUrl);
auto toRef = parseFlakeRef(toUrl);
fetchers::Attrs extraAttrs;
if (toRef.subdir != "") extraAttrs["dir"] = toRef.subdir;
auto userRegistry = fetchers::getUserRegistry();
userRegistry->remove(fromRef.input);
userRegistry->add(fromRef.input, toRef.input, extraAttrs);
userRegistry->write(fetchers::getUserRegistryPath());
}
};
struct CmdRegistryRemove : virtual Args, MixEvalArgs, Command
{
std::string url;
std::string description() override
{
return "remove flake from user flake registry";
}
CmdRegistryRemove()
{
expectArg("url", &url);
}
void run() override
{
auto userRegistry = fetchers::getUserRegistry();
userRegistry->remove(parseFlakeRef(url).input);
userRegistry->write(fetchers::getUserRegistryPath());
}
};
struct CmdRegistryPin : virtual Args, EvalCommand
{
std::string url;
std::string description() override
{
return "pin a flake to its current version in user flake registry";
}
CmdRegistryPin()
{
expectArg("url", &url);
}
void run(nix::ref<nix::Store> store) override
{
auto ref = parseFlakeRef(url);
auto userRegistry = fetchers::getUserRegistry();
userRegistry->remove(ref.input);
auto [tree, resolved] = ref.resolve(store).input.fetch(store);
fetchers::Attrs extraAttrs;
if (ref.subdir != "") extraAttrs["dir"] = ref.subdir;
userRegistry->add(ref.input, resolved, extraAttrs);
}
};
struct CmdRegistry : virtual MultiCommand, virtual Command
{
CmdRegistry()
: MultiCommand({
{"list", []() { return make_ref<CmdRegistryList>(); }},
{"add", []() { return make_ref<CmdRegistryAdd>(); }},
{"remove", []() { return make_ref<CmdRegistryRemove>(); }},
{"pin", []() { return make_ref<CmdRegistryPin>(); }},
})
{
}
std::string description() override
{
return "manage the flake registry";
}
Category category() override { return catSecondary; }
void run() override
{
if (!command)
throw UsageError("'nix registry' requires a sub-command.");
command->second->prepare();
command->second->run();
}
void printHelp(const string & programName, std::ostream & out) override
{
MultiCommand::printHelp(programName, out);
}
};
static auto r1 = registerCommand<CmdRegistry>("registry");

View File

@ -760,7 +760,11 @@ struct CmdRepl : StoreCommand, MixEvalArgs
CmdRepl()
{
expectArgs("files", &files);
expectArgs({
.label = "files",
.handler = {&files},
.completer = completePath
});
}
std::string description() override
@ -780,6 +784,7 @@ struct CmdRepl : StoreCommand, MixEvalArgs
void run(ref<Store> store) override
{
evalSettings.pureEval = false;
auto repl = std::make_unique<NixRepl>(searchPath, openStore());
repl->autoArgs = getAutoArgs(*repl->state);
repl->mainLoop(files);

View File

@ -84,31 +84,30 @@ struct CmdShell : InstallablesCommand, RunCommon, MixEnvironment
{
return {
Example{
"To start a shell providing GNU Hello from NixOS 17.03:",
"nix shell -f channel:nixos-17.03 hello"
"To start a shell providing GNU Hello from NixOS 20.03:",
"nix shell nixpkgs/nixos-20.03#hello"
},
Example{
"To start a shell providing youtube-dl from your 'nixpkgs' channel:",
"nix shell nixpkgs.youtube-dl"
"nix shell nixpkgs#youtube-dl"
},
Example{
"To run GNU Hello:",
"nix shell nixpkgs.hello -c hello --greeting 'Hi everybody!'"
"nix shell nixpkgs#hello -c hello --greeting 'Hi everybody!'"
},
Example{
"To run GNU Hello in a chroot store:",
"nix shell --store ~/my-nix nixpkgs.hello -c hello"
"nix shell --store ~/my-nix nixpkgs#hello -c hello"
},
};
}
void run(ref<Store> store) override
{
auto outPaths = toStorePaths(store, Build, installables);
auto outPaths = toStorePaths(store, Realise::Outputs, OperateOn::Output, installables);
auto accessor = store->getFSAccessor();
std::unordered_set<StorePath> done;
std::queue<StorePath> todo;
for (auto & path : outPaths) todo.push(path);
@ -143,6 +142,67 @@ struct CmdShell : InstallablesCommand, RunCommon, MixEnvironment
static auto r1 = registerCommand<CmdShell>("shell");
struct CmdRun : InstallableCommand, RunCommon
{
std::vector<std::string> args;
CmdRun()
{
expectArgs({
.label = "args",
.handler = {&args},
.completer = completePath
});
}
std::string description() override
{
return "run a Nix application";
}
Examples examples() override
{
return {
Example{
"To run Blender:",
"nix run blender-bin"
},
};
}
Strings getDefaultFlakeAttrPaths() override
{
Strings res{"defaultApp." + settings.thisSystem.get()};
for (auto & s : SourceExprCommand::getDefaultFlakeAttrPaths())
res.push_back(s);
return res;
}
Strings getDefaultFlakeAttrPathPrefixes() override
{
Strings res{"apps." + settings.thisSystem.get() + ".", "packages"};
for (auto & s : SourceExprCommand::getDefaultFlakeAttrPathPrefixes())
res.push_back(s);
return res;
}
void run(ref<Store> store) override
{
auto state = getEvalState();
auto app = installable->toApp(*state);
state->store->buildPaths(app.context);
Strings allArgs{app.program};
for (auto & i : args) allArgs.push_back(i);
runProgram(store, app.program, allArgs);
}
};
static auto r2 = registerCommand<CmdRun>("run");
void chrootHelper(int argc, char * * argv)
{
int p = 1;

View File

@ -6,8 +6,9 @@
#include "get-drvs.hh"
#include "common-args.hh"
#include "json.hh"
#include "json-to-value.hh"
#include "shared.hh"
#include "eval-cache.hh"
#include "attr-path.hh"
#include <regex>
#include <fstream>
@ -25,33 +26,17 @@ std::string hilite(const std::string & s, const std::smatch & m, std::string pos
m.empty()
? s
: std::string(m.prefix())
+ ANSI_RED + std::string(m.str()) + postfix
+ ANSI_GREEN + std::string(m.str()) + postfix
+ std::string(m.suffix());
}
struct CmdSearch : SourceExprCommand, MixJSON
struct CmdSearch : InstallableCommand, MixJSON
{
std::vector<std::string> res;
bool writeCache = true;
bool useCache = true;
CmdSearch()
{
expectArgs("regex", &res);
addFlag({
.longName = "update-cache",
.shortName = 'u',
.description = "update the package search cache",
.handler = {[&]() { writeCache = true; useCache = false; }}
});
addFlag({
.longName = "no-cache",
.description = "do not use or update the package search cache",
.handler = {[&]() { writeCache = false; useCache = false; }}
});
}
std::string description() override
@ -63,24 +48,32 @@ struct CmdSearch : SourceExprCommand, MixJSON
{
return {
Example{
"To show all available packages:",
"To show all packages in the flake in the current directory:",
"nix search"
},
Example{
"To show any packages containing 'blender' in its name or description:",
"nix search blender"
"To show packages in the 'nixpkgs' flake containing 'blender' in its name or description:",
"nix search nixpkgs blender"
},
Example{
"To search for Firefox or Chromium:",
"nix search 'firefox|chromium'"
"nix search nixpkgs 'firefox|chromium'"
},
Example{
"To search for git and frontend or gui:",
"nix search git 'frontend|gui'"
"To search for packages containing 'git' and either 'frontend' or 'gui':",
"nix search nixpkgs git 'frontend|gui'"
}
};
}
Strings getDefaultFlakeAttrPaths() override
{
return {
"packages." + settings.thisSystem.get() + ".",
"legacyPackages." + settings.thisSystem.get() + "."
};
}
void run(ref<Store> store) override
{
settings.readOnlyMode = true;
@ -88,189 +81,107 @@ struct CmdSearch : SourceExprCommand, MixJSON
// Empty search string should match all packages
// Use "^" here instead of ".*" due to differences in resulting highlighting
// (see #1893 -- libc++ claims empty search string is not in POSIX grammar)
if (res.empty()) {
if (res.empty())
res.push_back("^");
}
std::vector<std::regex> regexes;
regexes.reserve(res.size());
for (auto &re : res) {
for (auto & re : res)
regexes.push_back(std::regex(re, std::regex::extended | std::regex::icase));
}
auto state = getEvalState();
auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
auto sToplevel = state->symbols.create("_toplevel");
auto sRecurse = state->symbols.create("recurseForDerivations");
uint64_t results = 0;
bool fromCache = false;
std::map<std::string, std::string> results;
std::function<void(Value *, std::string, bool, JSONObject *)> doExpr;
doExpr = [&](Value * v, std::string attrPath, bool toplevel, JSONObject * cache) {
debug("at attribute '%s'", attrPath);
std::function<void(eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath)> visit;
visit = [&](eval_cache::AttrCursor & cursor, const std::vector<Symbol> & attrPath)
{
Activity act(*logger, lvlInfo, actUnknown,
fmt("evaluating '%s'", concatStringsSep(".", attrPath)));
try {
uint found = 0;
auto recurse = [&]()
{
for (const auto & attr : cursor.getAttrs()) {
auto cursor2 = cursor.getAttr(attr);
auto attrPath2(attrPath);
attrPath2.push_back(attr);
visit(*cursor2, attrPath2);
}
};
state->forceValue(*v);
if (cursor.isDerivation()) {
size_t found = 0;
if (v->type == tLambda && toplevel) {
Value * v2 = state->allocValue();
state->autoCallFunction(*state->allocBindings(1), *v, *v2);
v = v2;
state->forceValue(*v);
}
DrvName name(cursor.getAttr("name")->getString());
if (state->isDerivation(*v)) {
auto aMeta = cursor.maybeGetAttr("meta");
auto aDescription = aMeta ? aMeta->maybeGetAttr("description") : nullptr;
auto description = aDescription ? aDescription->getString() : "";
std::replace(description.begin(), description.end(), '\n', ' ');
auto attrPath2 = concatStringsSep(".", attrPath);
DrvInfo drv(*state, attrPath, v->attrs);
std::string description;
std::smatch attrPathMatch;
std::smatch descriptionMatch;
std::smatch nameMatch;
std::string name;
DrvName parsed(drv.queryName());
for (auto &regex : regexes) {
std::regex_search(attrPath, attrPathMatch, regex);
name = parsed.name;
std::regex_search(name, nameMatch, regex);
description = drv.queryMetaString("description");
std::replace(description.begin(), description.end(), '\n', ' ');
for (auto & regex : regexes) {
std::regex_search(attrPath2, attrPathMatch, regex);
std::regex_search(name.name, nameMatch, regex);
std::regex_search(description, descriptionMatch, regex);
if (!attrPathMatch.empty()
|| !nameMatch.empty()
|| !descriptionMatch.empty())
{
found++;
}
}
if (found == res.size()) {
results++;
if (json) {
auto jsonElem = jsonOut->object(attrPath);
jsonElem.attr("pkgName", parsed.name);
jsonElem.attr("version", parsed.version);
auto jsonElem = jsonOut->object(attrPath2);
jsonElem.attr("pname", name.name);
jsonElem.attr("version", name.version);
jsonElem.attr("description", description);
} else {
auto name = hilite(parsed.name, nameMatch, "\e[0;2m")
+ std::string(parsed.fullName, parsed.name.length());
results[attrPath] = fmt(
"* %s (%s)\n %s\n",
wrap("\e[0;1m", hilite(attrPath, attrPathMatch, "\e[0;1m")),
wrap("\e[0;2m", hilite(name, nameMatch, "\e[0;2m")),
hilite(description, descriptionMatch, ANSI_NORMAL));
}
}
if (cache) {
cache->attr("type", "derivation");
cache->attr("name", drv.queryName());
cache->attr("system", drv.querySystem());
if (description != "") {
auto meta(cache->object("meta"));
meta.attr("description", description);
auto name2 = hilite(name.name, nameMatch, "\e[0;2m");
if (results > 1) logger->stdout("");
logger->stdout(
"* %s%s",
wrap("\e[0;1m", hilite(attrPath2, attrPathMatch, "\e[0;1m")),
name.version != "" ? " (" + name.version + ")" : "");
if (description != "")
logger->stdout(
" %s", hilite(description, descriptionMatch, ANSI_NORMAL));
}
}
}
else if (v->type == tAttrs) {
else if (
attrPath.size() == 0
|| (attrPath[0] == "legacyPackages" && attrPath.size() <= 2)
|| (attrPath[0] == "packages" && attrPath.size() <= 2))
recurse();
if (!toplevel) {
auto attrs = v->attrs;
Bindings::iterator j = attrs->find(sRecurse);
if (j == attrs->end() || !state->forceBool(*j->value, *j->pos)) {
debug("skip attribute '%s'", attrPath);
return;
}
}
bool toplevel2 = false;
if (!fromCache) {
Bindings::iterator j = v->attrs->find(sToplevel);
toplevel2 = j != v->attrs->end() && state->forceBool(*j->value, *j->pos);
}
for (auto & i : *v->attrs) {
auto cache2 =
cache ? std::make_unique<JSONObject>(cache->object(i.name)) : nullptr;
doExpr(i.value,
attrPath == "" ? (std::string) i.name : attrPath + "." + (std::string) i.name,
toplevel2 || fromCache, cache2 ? cache2.get() : nullptr);
}
else if (attrPath[0] == "legacyPackages" && attrPath.size() > 2) {
auto attr = cursor.maybeGetAttr(state->sRecurseForDerivations);
if (attr && attr->getBool())
recurse();
}
} catch (AssertionError & e) {
} catch (Error & e) {
if (!toplevel) {
e.addTrace(std::nullopt, "While evaluating the attribute '%s'", attrPath);
} catch (EvalError & e) {
if (!(attrPath.size() > 0 && attrPath[0] == "legacyPackages"))
throw;
}
}
};
Path jsonCacheFileName = getCacheDir() + "/nix/package-search.json";
for (auto & [cursor, prefix] : installable->getCursors(*state, true))
visit(*cursor, parseAttrPath(*state, prefix));
if (useCache && pathExists(jsonCacheFileName)) {
warn("using cached results; pass '-u' to update the cache");
Value vRoot;
parseJSON(*state, readFile(jsonCacheFileName), vRoot);
fromCache = true;
doExpr(&vRoot, "", true, nullptr);
}
else {
createDirs(dirOf(jsonCacheFileName));
Path tmpFile = fmt("%s.tmp.%d", jsonCacheFileName, getpid());
std::ofstream jsonCacheFile;
try {
// iostream considered harmful
jsonCacheFile.exceptions(std::ofstream::failbit);
jsonCacheFile.open(tmpFile);
auto cache = writeCache ? std::make_unique<JSONObject>(jsonCacheFile, false) : nullptr;
doExpr(getSourceExpr(*state), "", true, cache.get());
} catch (std::exception &) {
/* Fun fact: catching std::ios::failure does not work
due to C++11 ABI shenanigans.
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=66145 */
if (!jsonCacheFile)
throw Error("error writing to %s", tmpFile);
throw;
}
if (writeCache && rename(tmpFile.c_str(), jsonCacheFileName.c_str()) == -1)
throw SysError("cannot rename '%s' to '%s'", tmpFile, jsonCacheFileName);
}
if (!json && results.size() == 0)
if (!json && !results)
throw Error("no results for the given search term(s)!");
RunPager pager;
for (auto el : results) std::cout << el.second << "\n";
}
};

View File

@ -33,7 +33,7 @@ struct CmdShowDerivation : InstallablesCommand
return {
Example{
"To show the store derivation that results from evaluating the Hello package:",
"nix show-derivation nixpkgs.hello"
"nix show-derivation nixpkgs#hello"
},
Example{
"To show the full derivation graph (if available) that produced your NixOS system:",

View File

@ -105,7 +105,8 @@ struct CmdSignPaths : StorePathsCommand
.shortName = 'k',
.description = "file containing the secret signing key",
.labels = {"file"},
.handler = {&secretKeyFile}
.handler = {&secretKeyFile},
.completer = completePath
});
}

View File

@ -55,15 +55,15 @@ struct CmdWhyDepends : SourceExprCommand
return {
Example{
"To show one path through the dependency graph leading from Hello to Glibc:",
"nix why-depends nixpkgs.hello nixpkgs.glibc"
"nix why-depends nixpkgs#hello nixpkgs#glibc"
},
Example{
"To show all files and paths in the dependency graph leading from Thunderbird to libX11:",
"nix why-depends --all nixpkgs.thunderbird nixpkgs.xorg.libX11"
"nix why-depends --all nixpkgs#thunderbird nixpkgs#xorg.libX11"
},
Example{
"To show why Glibc depends on itself:",
"nix why-depends nixpkgs.glibc nixpkgs.glibc"
"nix why-depends nixpkgs#glibc nixpkgs#glibc"
},
};
}
@ -72,17 +72,19 @@ struct CmdWhyDepends : SourceExprCommand
void run(ref<Store> store) override
{
auto package = parseInstallable(*this, store, _package, false);
auto packagePath = toStorePath(store, Build, package);
auto dependency = parseInstallable(*this, store, _dependency, false);
auto dependencyPath = toStorePath(store, NoBuild, dependency);
auto package = parseInstallable(store, _package);
auto packagePath = toStorePath(store, Realise::Outputs, operateOn, package);
auto dependency = parseInstallable(store, _dependency);
auto dependencyPath = toStorePath(store, Realise::Derivation, operateOn, dependency);
auto dependencyPathHash = dependencyPath.hashPart();
StorePathSet closure;
store->computeFSClosure({packagePath}, closure, false, false);
if (!closure.count(dependencyPath)) {
printError("'%s' does not depend on '%s'", package->what(), dependency->what());
printError("'%s' does not depend on '%s'",
store->printStorePath(packagePath),
store->printStorePath(dependencyPath));
return;
}

View File

@ -31,44 +31,44 @@ rev2=$(git -C $repo rev-parse HEAD)
# Fetch a worktree
unset _NIX_FORCE_HTTP
path0=$(nix eval --raw "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath")
path0=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath")
export _NIX_FORCE_HTTP=1
[[ $(tail -n 1 $path0/hello) = "hello" ]]
# Fetch the default branch.
path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
[[ $(cat $path/hello) = world ]]
# In pure eval mode, fetchGit without a revision should fail.
[[ $(nix eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))") = world ]]
(! nix eval --pure-eval --raw "(builtins.readFile (fetchGit file://$repo + \"/hello\"))")
[[ $(nix eval --impure --raw --expr "builtins.readFile (fetchGit file://$repo + \"/hello\")") = world ]]
(! nix eval --raw --expr "builtins.readFile (fetchGit file://$repo + \"/hello\")")
# Fetch using an explicit revision hash.
path2=$(nix eval --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path2 ]]
# In pure eval mode, fetchGit with a revision should succeed.
[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]]
[[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]]
# Fetch again. This should be cached.
mv $repo ${repo}-tmp
path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
[[ $path = $path2 ]]
[[ $(nix eval "(builtins.fetchGit file://$repo).revCount") = 2 ]]
[[ $(nix eval --raw "(builtins.fetchGit file://$repo).rev") = $rev2 ]]
[[ $(nix eval --impure --expr "(builtins.fetchGit file://$repo).revCount") = 2 ]]
[[ $(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).rev") = $rev2 ]]
# Fetching with a explicit hash should succeed.
path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path2 ]]
path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchGit { url = file://$repo; rev = \"$rev1\"; }).outPath")
path2=$(nix eval --refresh --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev1\"; }).outPath")
[[ $(cat $path2/hello) = utrecht ]]
mv ${repo}-tmp $repo
# Using a clean working tree should produce the same result.
path2=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
[[ $path = $path2 ]]
# Using an unclean tree should yield the tracked but uncommitted changes.
@ -80,26 +80,26 @@ git -C $repo add dir1/foo
git -C $repo rm hello
unset _NIX_FORCE_HTTP
path2=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
[ ! -e $path2/hello ]
[ ! -e $path2/bar ]
[ ! -e $path2/dir2/bar ]
[ ! -e $path2/.git ]
[[ $(cat $path2/dir1/foo) = foo ]]
[[ $(nix eval --raw "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]]
[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]]
# ... unless we're using an explicit ref or rev.
path3=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
path3=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
[[ $path = $path3 ]]
path3=$(nix eval --raw "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; }).outPath")
path3=$(nix eval --raw --expr "(builtins.fetchGit { url = $repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path3 ]]
# Committing should not affect the store path.
git -C $repo commit -m 'Bla3' -a
path4=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchGit file://$repo).outPath")
path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchGit file://$repo).outPath")
[[ $path2 = $path4 ]]
# tarball-ttl should be ignored if we specify a rev
@ -107,32 +107,32 @@ echo delft > $repo/hello
git -C $repo add hello
git -C $repo commit -m 'Bla4'
rev3=$(git -C $repo rev-parse HEAD)
nix eval --tarball-ttl 3600 "(builtins.fetchGit { url = $repo; rev = \"$rev3\"; })" >/dev/null
nix eval --tarball-ttl 3600 --expr "builtins.fetchGit { url = $repo; rev = \"$rev3\"; }" >/dev/null
# Update 'path' to reflect latest master
path=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
# Check behavior when non-master branch is used
git -C $repo checkout $rev2 -b dev
echo dev > $repo/hello
# File URI uses dirty tree unless specified otherwise
path2=$(nix eval --raw "(builtins.fetchGit file://$repo).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
[ $(cat $path2/hello) = dev ]
# Using local path with branch other than 'master' should work when clean or dirty
path3=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
path3=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
# (check dirty-tree handling was used)
[[ $(nix eval --raw "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]]
[[ $(nix eval --impure --raw --expr "(builtins.fetchGit $repo).rev") = 0000000000000000000000000000000000000000 ]]
# Committing shouldn't change store path, or switch to using 'master'
git -C $repo commit -m 'Bla5' -a
path4=$(nix eval --raw "(builtins.fetchGit $repo).outPath")
path4=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
[[ $(cat $path4/hello) = dev ]]
[[ $path3 = $path4 ]]
# Confirm same as 'dev' branch
path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
[[ $path3 = $path5 ]]
@ -141,19 +141,18 @@ rm -rf $TEST_HOME/.cache/nix
# Try again, but without 'git' on PATH. This should fail.
NIX=$(command -v nix)
# This should fail
(! PATH= $NIX eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath" )
(! PATH= $NIX eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath" )
# Try again, with 'git' available. This should work.
path5=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
path5=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = $repo; ref = \"dev\"; }).outPath")
[[ $path3 = $path5 ]]
# Fetching a shallow repo shouldn't work by default, because we can't
# return a revCount.
git clone --depth 1 file://$repo $TEST_ROOT/shallow
(! nix eval --raw "(builtins.fetchGit { url = $TEST_ROOT/shallow; ref = \"dev\"; }).outPath")
(! nix eval --impure --raw --expr "(builtins.fetchGit { url = $TEST_ROOT/shallow; ref = \"dev\"; }).outPath")
# But you can request a shallow clone, which won't return a revCount.
path6=$(nix eval --raw "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).outPath")
path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).outPath")
[[ $path3 = $path6 ]]
[[ $(nix eval "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]]
[[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]]

View File

@ -19,7 +19,7 @@ echo utrecht > "$repo"/hello
git -C "$repo" add hello
git -C "$repo" commit -m 'Bla1'
path=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
path=$(nix eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = \"master\"; }).outPath")
# Test various combinations of ref names
# (taken from the git project)
@ -42,7 +42,7 @@ valid_ref() {
{ set +x; printf >&2 '\n>>>>>>>>>> valid_ref %s\b <<<<<<<<<<\n' $(printf %s "$1" | sed -n -e l); set -x; }
git check-ref-format --branch "$1" >/dev/null
git -C "$repo" branch "$1" master >/dev/null
path1=$(nix eval --raw "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath")
path1=$(nix eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath")
[[ $path1 = $path ]]
git -C "$repo" branch -D "$1" >/dev/null
}
@ -56,7 +56,7 @@ invalid_ref() {
else
(! git check-ref-format --branch "$1" >/dev/null 2>&1)
fi
nix --debug eval --raw "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath" 2>&1 | grep 'invalid Git branch/tag name' >/dev/null
nix --debug eval --raw --impure --expr "(builtins.fetchGit { url = $repo; ref = ''$1''; }).outPath" 2>&1 | grep 'invalid Git branch/tag name' >/dev/null
}

View File

@ -38,18 +38,18 @@ git -C $rootRepo commit -m "Add submodule"
rev=$(git -C $rootRepo rev-parse HEAD)
r1=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath")
r2=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = false; }).outPath")
r3=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
r1=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath")
r2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = false; }).outPath")
r3=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
[[ $r1 == $r2 ]]
[[ $r2 != $r3 ]]
r4=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; }).outPath")
r5=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = false; }).outPath")
r6=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
r7=$(nix eval --raw "(builtins.fetchGit { url = $rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
r8=$(nix eval --raw "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
r4=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; }).outPath")
r5=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = false; }).outPath")
r6=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
r7=$(nix eval --raw --expr "(builtins.fetchGit { url = $rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
r8=$(nix eval --raw --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
[[ $r1 == $r4 ]]
[[ $r4 == $r5 ]]
@ -57,19 +57,19 @@ r8=$(nix eval --raw "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submo
[[ $r6 == $r7 ]]
[[ $r7 == $r8 ]]
have_submodules=$(nix eval "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; }).submodules")
have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; }).submodules")
[[ $have_submodules == false ]]
have_submodules=$(nix eval "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = false; }).submodules")
have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = false; }).submodules")
[[ $have_submodules == false ]]
have_submodules=$(nix eval "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).submodules")
have_submodules=$(nix eval --expr "(builtins.fetchGit { url = $rootRepo; rev = \"$rev\"; submodules = true; }).submodules")
[[ $have_submodules == true ]]
pathWithoutSubmodules=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath")
pathWithSubmodules=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
pathWithSubmodulesAgain=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
pathWithSubmodulesAgainWithRef=$(nix eval --raw "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
pathWithoutSubmodules=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; }).outPath")
pathWithSubmodules=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
pathWithSubmodulesAgain=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; rev = \"$rev\"; submodules = true; }).outPath")
pathWithSubmodulesAgainWithRef=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$rootRepo; ref = \"master\"; rev = \"$rev\"; submodules = true; }).outPath")
# The resulting store path cannot be the same.
[[ $pathWithoutSubmodules != $pathWithSubmodules ]]
@ -91,7 +91,7 @@ test "$(find "$pathWithSubmodules" -name .git)" = ""
# Git repos without submodules can be fetched with submodules = true.
subRev=$(git -C $subRepo rev-parse HEAD)
noSubmoduleRepoBaseline=$(nix eval --raw "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; }).outPath")
noSubmoduleRepo=$(nix eval --raw "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; submodules = true; }).outPath")
noSubmoduleRepoBaseline=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; }).outPath")
noSubmoduleRepo=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$subRepo; rev = \"$subRev\"; submodules = true; }).outPath")
[[ $noSubmoduleRepoBaseline == $noSubmoduleRepo ]]

View File

@ -9,7 +9,7 @@ clearStore
repo=$TEST_ROOT/hg
rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix/hg
rm -rf $repo ${repo}-tmp $TEST_HOME/.cache/nix
hg init $repo
echo '[ui]' >> $repo/.hg/hgrc
@ -26,43 +26,43 @@ hg commit --cwd $repo -m 'Bla2'
rev2=$(hg log --cwd $repo -r tip --template '{node}')
# Fetch the default branch.
path=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath")
path=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath")
[[ $(cat $path/hello) = world ]]
# In pure eval mode, fetchGit without a revision should fail.
[[ $(nix eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") = world ]]
(! nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))")
[[ $(nix eval --impure --raw --expr "(builtins.readFile (fetchMercurial file://$repo + \"/hello\"))") = world ]]
(! nix eval --raw --expr "builtins.readFile (fetchMercurial file://$repo + \"/hello\")")
# Fetch using an explicit revision hash.
path2=$(nix eval --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path2 ]]
# In pure eval mode, fetchGit with a revision should succeed.
[[ $(nix eval --pure-eval --raw "(builtins.readFile (fetchMercurial { url = file://$repo; rev = \"$rev2\"; } + \"/hello\"))") = world ]]
[[ $(nix eval --raw --expr "builtins.readFile (fetchMercurial { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]]
# Fetch again. This should be cached.
mv $repo ${repo}-tmp
path2=$(nix eval --raw "(builtins.fetchMercurial file://$repo).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).outPath")
[[ $path = $path2 ]]
[[ $(nix eval --raw "(builtins.fetchMercurial file://$repo).branch") = default ]]
[[ $(nix eval "(builtins.fetchMercurial file://$repo).revCount") = 1 ]]
[[ $(nix eval --raw "(builtins.fetchMercurial file://$repo).rev") = $rev2 ]]
[[ $(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).branch") = default ]]
[[ $(nix eval --impure --expr "(builtins.fetchMercurial file://$repo).revCount") = 1 ]]
[[ $(nix eval --impure --raw --expr "(builtins.fetchMercurial file://$repo).rev") = $rev2 ]]
# But with TTL 0, it should fail.
(! nix eval --tarball-ttl 0 "(builtins.fetchMercurial file://$repo)")
(! nix eval --impure --refresh --expr "builtins.fetchMercurial file://$repo")
# Fetching with a explicit hash should succeed.
path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath")
path2=$(nix eval --refresh --raw --expr "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev2\"; }).outPath")
[[ $path = $path2 ]]
path2=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev1\"; }).outPath")
path2=$(nix eval --refresh --raw --expr "(builtins.fetchMercurial { url = file://$repo; rev = \"$rev1\"; }).outPath")
[[ $(cat $path2/hello) = utrecht ]]
mv ${repo}-tmp $repo
# Using a clean working tree should produce the same result.
path2=$(nix eval --raw "(builtins.fetchMercurial $repo).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial $repo).outPath")
[[ $path = $path2 ]]
# Using an unclean tree should yield the tracked but uncommitted changes.
@ -73,21 +73,21 @@ echo bar > $repo/dir2/bar
hg add --cwd $repo dir1/foo
hg rm --cwd $repo hello
path2=$(nix eval --raw "(builtins.fetchMercurial $repo).outPath")
path2=$(nix eval --impure --raw --expr "(builtins.fetchMercurial $repo).outPath")
[ ! -e $path2/hello ]
[ ! -e $path2/bar ]
[ ! -e $path2/dir2/bar ]
[ ! -e $path2/.hg ]
[[ $(cat $path2/dir1/foo) = foo ]]
[[ $(nix eval --raw "(builtins.fetchMercurial $repo).rev") = 0000000000000000000000000000000000000000 ]]
[[ $(nix eval --impure --raw --expr "(builtins.fetchMercurial $repo).rev") = 0000000000000000000000000000000000000000 ]]
# ... unless we're using an explicit rev.
path3=$(nix eval --raw "(builtins.fetchMercurial { url = $repo; rev = \"default\"; }).outPath")
# ... unless we're using an explicit ref.
path3=$(nix eval --impure --raw --expr "(builtins.fetchMercurial { url = $repo; rev = \"default\"; }).outPath")
[[ $path = $path3 ]]
# Committing should not affect the store path.
hg commit --cwd $repo -m 'Bla3'
path4=$(nix eval --tarball-ttl 0 --raw "(builtins.fetchMercurial file://$repo).outPath")
path4=$(nix eval --impure --refresh --raw --expr "(builtins.fetchMercurial file://$repo).outPath")
[[ $path2 = $path4 ]]

721
tests/flakes.sh Normal file
View File

@ -0,0 +1,721 @@
source common.sh
if [[ -z $(type -p git) ]]; then
echo "Git not installed; skipping flake tests"
exit 99
fi
if [[ -z $(type -p hg) ]]; then
echo "Mercurial not installed; skipping flake tests"
exit 99
fi
clearStore
rm -rf $TEST_HOME/.cache $TEST_HOME/.config
registry=$TEST_ROOT/registry.json
flake1Dir=$TEST_ROOT/flake1
flake2Dir=$TEST_ROOT/flake2
flake3Dir=$TEST_ROOT/flake3
flake4Dir=$TEST_ROOT/flake4
flake5Dir=$TEST_ROOT/flake5
flake6Dir=$TEST_ROOT/flake6
flake7Dir=$TEST_ROOT/flake7
templatesDir=$TEST_ROOT/templates
nonFlakeDir=$TEST_ROOT/nonFlake
flakeA=$TEST_ROOT/flakeA
flakeB=$TEST_ROOT/flakeB
for repo in $flake1Dir $flake2Dir $flake3Dir $flake7Dir $templatesDir $nonFlakeDir $flakeA $flakeB; do
rm -rf $repo $repo.tmp
mkdir $repo
git -C $repo init
git -C $repo config user.email "foobar@example.com"
git -C $repo config user.name "Foobar"
done
cat > $flake1Dir/flake.nix <<EOF
{
description = "Bla bla";
outputs = inputs: rec {
packages.$system.foo = import ./simple.nix;
defaultPackage.$system = packages.$system.foo;
# To test "nix flake init".
legacyPackages.x86_64-linux.hello = import ./simple.nix;
};
}
EOF
cp ./simple.nix ./simple.builder.sh ./config.nix $flake1Dir/
git -C $flake1Dir add flake.nix simple.nix simple.builder.sh config.nix
git -C $flake1Dir commit -m 'Initial'
cat > $flake2Dir/flake.nix <<EOF
{
description = "Fnord";
outputs = { self, flake1 }: rec {
packages.$system.bar = flake1.packages.$system.foo;
};
}
EOF
git -C $flake2Dir add flake.nix
git -C $flake2Dir commit -m 'Initial'
cat > $flake3Dir/flake.nix <<EOF
{
description = "Fnord";
outputs = { self, flake2 }: rec {
packages.$system.xyzzy = flake2.packages.$system.bar;
checks = {
xyzzy = packages.$system.xyzzy;
};
};
}
EOF
git -C $flake3Dir add flake.nix
git -C $flake3Dir commit -m 'Initial'
cat > $nonFlakeDir/README.md <<EOF
FNORD
EOF
git -C $nonFlakeDir add README.md
git -C $nonFlakeDir commit -m 'Initial'
cat > $registry <<EOF
{
"version": 2,
"flakes": [
{ "from": {
"type": "indirect",
"id": "flake1"
},
"to": {
"type": "git",
"url": "file://$flake1Dir"
}
},
{ "from": {
"type": "indirect",
"id": "flake2"
},
"to": {
"type": "git",
"url": "file://$flake2Dir"
}
},
{ "from": {
"type": "indirect",
"id": "flake3"
},
"to": {
"type": "git",
"url": "file://$flake3Dir"
}
},
{ "from": {
"type": "indirect",
"id": "flake4"
},
"to": {
"type": "indirect",
"id": "flake3"
}
},
{ "from": {
"type": "indirect",
"id": "flake5"
},
"to": {
"type": "hg",
"url": "file://$flake5Dir"
}
},
{ "from": {
"type": "indirect",
"id": "nixpkgs"
},
"to": {
"type": "indirect",
"id": "flake1"
}
},
{ "from": {
"type": "indirect",
"id": "templates"
},
"to": {
"type": "git",
"url": "file://$templatesDir"
}
}
]
}
EOF
# Test 'nix flake list'.
[[ $(nix registry list | wc -l) == 7 ]]
# Test 'nix flake info'.
nix flake info flake1 | grep -q 'URL: .*flake1.*'
# Test 'nix flake info' on a local flake.
(cd $flake1Dir && nix flake info) | grep -q 'URL: .*flake1.*'
(cd $flake1Dir && nix flake info .) | grep -q 'URL: .*flake1.*'
nix flake info $flake1Dir | grep -q 'URL: .*flake1.*'
# Test 'nix flake info --json'.
json=$(nix flake info flake1 --json | jq .)
[[ $(echo "$json" | jq -r .description) = 'Bla bla' ]]
[[ -d $(echo "$json" | jq -r .path) ]]
[[ $(echo "$json" | jq -r .lastModified) = $(git -C $flake1Dir log -n1 --format=%ct) ]]
hash1=$(echo "$json" | jq -r .revision)
echo -n '# foo' >> $flake1Dir/flake.nix
git -C $flake1Dir commit -a -m 'Foo'
hash2=$(nix flake info flake1 --json --refresh | jq -r .revision)
[[ $hash1 != $hash2 ]]
# Test 'nix build' on a flake.
nix build -o $TEST_ROOT/result flake1#foo
[[ -e $TEST_ROOT/result/hello ]]
# Test defaultPackage.
nix build -o $TEST_ROOT/result flake1
[[ -e $TEST_ROOT/result/hello ]]
nix build -o $TEST_ROOT/result $flake1Dir
nix build -o $TEST_ROOT/result git+file://$flake1Dir
# Check that store symlinks inside a flake are not interpreted as flakes.
nix build -o $flake1Dir/result git+file://$flake1Dir
nix path-info $flake1Dir/result
# 'getFlake' on a mutable flakeref should fail in pure mode, but succeed in impure mode.
(! nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"$flake1Dir\").defaultPackage.$system")
nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"$flake1Dir\").defaultPackage.$system" --impure
# 'getFlake' on an immutable flakeref should succeed even in pure mode.
nix build -o $TEST_ROOT/result --expr "(builtins.getFlake \"git+file://$flake1Dir?rev=$hash2\").defaultPackage.$system"
# Building a flake with an unlocked dependency should fail in pure mode.
(! nix build -o $TEST_ROOT/result flake2#bar --no-registries)
(! nix eval --expr "builtins.getFlake \"$flake2Dir\"")
# But should succeed in impure mode.
(! nix build -o $TEST_ROOT/result flake2#bar --impure)
nix build -o $TEST_ROOT/result flake2#bar --impure --no-write-lock-file
# Building a local flake with an unlocked dependency should fail with --no-update-lock-file.
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
# But it should succeed without that flag.
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-write-lock-file
nix build -o $TEST_ROOT/result $flake2Dir#bar --no-update-lock-file 2>&1 | grep 'requires lock file changes'
nix build -o $TEST_ROOT/result $flake2Dir#bar --commit-lock-file
[[ -e $flake2Dir/flake.lock ]]
[[ -z $(git -C $flake2Dir diff master) ]]
# Rerunning the build should not change the lockfile.
nix build -o $TEST_ROOT/result $flake2Dir#bar
[[ -z $(git -C $flake2Dir diff master) ]]
# Building with a lockfile should not require a fetch of the registry.
nix build -o $TEST_ROOT/result --flake-registry file:///no-registry.json $flake2Dir#bar --refresh
nix build -o $TEST_ROOT/result --no-registries $flake2Dir#bar --refresh
# Updating the flake should not change the lockfile.
nix flake update $flake2Dir
[[ -z $(git -C $flake2Dir diff master) ]]
# Now we should be able to build the flake in pure mode.
nix build -o $TEST_ROOT/result flake2#bar
# Or without a registry.
nix build -o $TEST_ROOT/result --no-registries git+file://$flake2Dir#bar --refresh
# Test whether indirect dependencies work.
nix build -o $TEST_ROOT/result $flake3Dir#xyzzy
git -C $flake3Dir add flake.lock
# Add dependency to flake3.
rm $flake3Dir/flake.nix
cat > $flake3Dir/flake.nix <<EOF
{
description = "Fnord";
outputs = { self, flake1, flake2 }: rec {
packages.$system.xyzzy = flake2.packages.$system.bar;
packages.$system."sth sth" = flake1.packages.$system.foo;
};
}
EOF
git -C $flake3Dir add flake.nix
git -C $flake3Dir commit -m 'Update flake.nix'
# Check whether `nix build` works with an incomplete lockfile
nix build -o $TEST_ROOT/result $flake3Dir#"sth sth"
nix build -o $TEST_ROOT/result $flake3Dir#"sth%20sth"
# Check whether it saved the lockfile
(! [[ -z $(git -C $flake3Dir diff master) ]])
git -C $flake3Dir add flake.lock
git -C $flake3Dir commit -m 'Add lockfile'
# Test whether registry caching works.
nix registry list --flake-registry file://$registry | grep -q flake3
mv $registry $registry.tmp
nix-store --gc
nix registry list --flake-registry file://$registry --refresh | grep -q flake3
mv $registry.tmp $registry
# Test whether flakes are registered as GC roots for offline use.
# FIXME: use tarballs rather than git.
rm -rf $TEST_HOME/.cache
nix-store --gc # get rid of copies in the store to ensure they get fetched to our git cache
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar
mv $flake1Dir $flake1Dir.tmp
mv $flake2Dir $flake2Dir.tmp
nix-store --gc
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar --refresh
mv $flake1Dir.tmp $flake1Dir
mv $flake2Dir.tmp $flake2Dir
# Add nonFlakeInputs to flake3.
rm $flake3Dir/flake.nix
cat > $flake3Dir/flake.nix <<EOF
{
inputs = {
flake1 = {};
flake2 = {};
nonFlake = {
url = git+file://$nonFlakeDir;
flake = false;
};
};
description = "Fnord";
outputs = inputs: rec {
packages.$system.xyzzy = inputs.flake2.packages.$system.bar;
packages.$system.sth = inputs.flake1.packages.$system.foo;
packages.$system.fnord =
with import ./config.nix;
mkDerivation {
inherit system;
name = "fnord";
buildCommand = ''
cat \${inputs.nonFlake}/README.md > \$out
'';
};
};
}
EOF
cp ./config.nix $flake3Dir
git -C $flake3Dir add flake.nix config.nix
git -C $flake3Dir commit -m 'Add nonFlakeInputs'
# Check whether `nix build` works with a lockfile which is missing a
# nonFlakeInputs.
nix build -o $TEST_ROOT/result $flake3Dir#sth --commit-lock-file
nix build -o $TEST_ROOT/result flake3#fnord
[[ $(cat $TEST_ROOT/result) = FNORD ]]
# Check whether flake input fetching is lazy: flake3#sth does not
# depend on flake2, so this shouldn't fail.
rm -rf $TEST_HOME/.cache
clearStore
mv $flake2Dir $flake2Dir.tmp
mv $nonFlakeDir $nonFlakeDir.tmp
nix build -o $TEST_ROOT/result flake3#sth
(! nix build -o $TEST_ROOT/result flake3#xyzzy)
(! nix build -o $TEST_ROOT/result flake3#fnord)
mv $flake2Dir.tmp $flake2Dir
mv $nonFlakeDir.tmp $nonFlakeDir
nix build -o $TEST_ROOT/result flake3#xyzzy flake3#fnord
# Test doing multiple `lookupFlake`s
nix build -o $TEST_ROOT/result flake4#xyzzy
# Test 'nix flake update' and --override-flake.
nix flake update $flake3Dir
[[ -z $(git -C $flake3Dir diff master) ]]
nix flake update $flake3Dir --recreate-lock-file --override-flake flake2 nixpkgs
[[ ! -z $(git -C $flake3Dir diff master) ]]
# Make branch "removeXyzzy" where flake3 doesn't have xyzzy anymore
git -C $flake3Dir checkout -b removeXyzzy
rm $flake3Dir/flake.nix
cat > $flake3Dir/flake.nix <<EOF
{
inputs = {
nonFlake = {
url = "$nonFlakeDir";
flake = false;
};
};
description = "Fnord";
outputs = { self, flake1, flake2, nonFlake }: rec {
packages.$system.sth = flake1.packages.$system.foo;
packages.$system.fnord =
with import ./config.nix;
mkDerivation {
inherit system;
name = "fnord";
buildCommand = ''
cat \${nonFlake}/README.md > \$out
'';
};
};
}
EOF
git -C $flake3Dir add flake.nix
git -C $flake3Dir commit -m 'Remove packages.xyzzy'
git -C $flake3Dir checkout master
# Test whether fuzzy-matching works for IsAlias
(! nix build -o $TEST_ROOT/result flake4/removeXyzzy#xyzzy)
# Test whether fuzzy-matching works for IsGit
(! nix build -o $TEST_ROOT/result flake4/removeXyzzy#xyzzy)
nix build -o $TEST_ROOT/result flake4/removeXyzzy#sth
# Testing the nix CLI
nix registry add flake1 flake3
[[ $(nix registry list | wc -l) == 8 ]]
nix registry pin flake1
[[ $(nix registry list | wc -l) == 8 ]]
nix registry remove flake1
[[ $(nix registry list | wc -l) == 7 ]]
# Test 'nix flake init'.
cat > $templatesDir/flake.nix <<EOF
{
description = "Some templates";
outputs = { self }: {
templates = {
trivial = {
path = ./trivial;
description = "A trivial flake";
};
};
defaultTemplate = self.templates.trivial;
};
}
EOF
mkdir $templatesDir/trivial
cat > $templatesDir/trivial/flake.nix <<EOF
{
description = "A flake for building Hello World";
outputs = { self, nixpkgs }: {
packages.x86_64-linux.hello = nixpkgs.legacyPackages.x86_64-linux.hello;
defaultPackage.x86_64-linux = self.packages.x86_64-linux.hello;
};
}
EOF
git -C $templatesDir add flake.nix trivial/flake.nix
git -C $templatesDir commit -m 'Initial'
nix flake check templates
nix flake show templates
(cd $flake7Dir && nix flake init)
(cd $flake7Dir && nix flake init) # check idempotence
git -C $flake7Dir add flake.nix
nix flake check $flake7Dir
nix flake show $flake7Dir
git -C $flake7Dir commit -a -m 'Initial'
# Test 'nix flake new'.
rm -rf $flake6Dir
nix flake new -t templates#trivial $flake6Dir
nix flake new -t templates#trivial $flake6Dir # check idempotence
nix flake check $flake6Dir
# Test 'nix flake clone'.
rm -rf $TEST_ROOT/flake1-v2
nix flake clone flake1 --dest $TEST_ROOT/flake1-v2
[ -e $TEST_ROOT/flake1-v2/flake.nix ]
# More 'nix flake check' tests.
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
overlay = final: prev: {
};
};
}
EOF
nix flake check $flake3Dir
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
overlay = finalll: prev: {
};
};
}
EOF
(! nix flake check $flake3Dir)
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModules.foo = {
a.b.c = 123;
foo = true;
};
};
}
EOF
nix flake check $flake3Dir
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModules.foo = {
a.b.c = 123;
foo = assert false; true;
};
};
}
EOF
(! nix flake check $flake3Dir)
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModule = { config, pkgs, ... }: {
a.b.c = 123;
};
};
}
EOF
nix flake check $flake3Dir
cat > $flake3Dir/flake.nix <<EOF
{
outputs = { flake1, self }: {
nixosModule = { config, pkgs }: {
a.b.c = 123;
};
};
}
EOF
(! nix flake check $flake3Dir)
# Test 'follows' inputs.
cat > $flake3Dir/flake.nix <<EOF
{
inputs.foo = {
type = "indirect";
id = "flake1";
};
inputs.bar.follows = "foo";
outputs = { self, foo, bar }: {
};
}
EOF
nix flake update $flake3Dir
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["foo"]' ]]
cat > $flake3Dir/flake.nix <<EOF
{
inputs.bar.follows = "flake2/flake1";
outputs = { self, flake2, bar }: {
};
}
EOF
nix flake update $flake3Dir
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2","flake1"]' ]]
cat > $flake3Dir/flake.nix <<EOF
{
inputs.bar.follows = "flake2";
outputs = { self, flake2, bar }: {
};
}
EOF
nix flake update $flake3Dir
[[ $(jq -c .nodes.root.inputs.bar $flake3Dir/flake.lock) = '["flake2"]' ]]
# Test overriding inputs of inputs.
cat > $flake3Dir/flake.nix <<EOF
{
inputs.flake2.inputs.flake1 = {
type = "git";
url = file://$flake7Dir;
};
outputs = { self, flake2 }: {
};
}
EOF
nix flake update $flake3Dir
[[ $(jq .nodes.flake1.locked.url $flake3Dir/flake.lock) =~ flake7 ]]
cat > $flake3Dir/flake.nix <<EOF
{
inputs.flake2.inputs.flake1.follows = "foo";
inputs.foo.url = git+file://$flake7Dir;
outputs = { self, flake2 }: {
};
}
EOF
nix flake update $flake3Dir --recreate-lock-file
[[ $(jq -c .nodes.flake2.inputs.flake1 $flake3Dir/flake.lock) =~ '["foo"]' ]]
[[ $(jq .nodes.foo.locked.url $flake3Dir/flake.lock) =~ flake7 ]]
# Test Mercurial flakes.
rm -rf $flake5Dir
hg init $flake5Dir
cat > $flake5Dir/flake.nix <<EOF
{
outputs = { self, flake1 }: {
defaultPackage.$system = flake1.defaultPackage.$system;
expr = assert builtins.pathExists ./flake.lock; 123;
};
}
EOF
hg add $flake5Dir/flake.nix
hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Initial commit'
nix build -o $TEST_ROOT/result hg+file://$flake5Dir
[[ -e $TEST_ROOT/result/hello ]]
(! nix flake info --json hg+file://$flake5Dir | jq -e -r .revision)
nix eval hg+file://$flake5Dir#expr
nix eval hg+file://$flake5Dir#expr
(! nix eval hg+file://$flake5Dir#expr --no-allow-dirty)
(! nix flake info --json hg+file://$flake5Dir | jq -e -r .revision)
hg commit --config ui.username=foobar@example.org $flake5Dir -m 'Add lock file'
nix flake info --json hg+file://$flake5Dir --refresh | jq -e -r .revision
nix flake info --json hg+file://$flake5Dir
[[ $(nix flake info --json hg+file://$flake5Dir | jq -e -r .revCount) = 1 ]]
nix build -o $TEST_ROOT/result hg+file://$flake5Dir --no-registries --no-allow-dirty
# Test tarball flakes
tar cfz $TEST_ROOT/flake.tar.gz -C $TEST_ROOT --exclude .hg flake5
nix build -o $TEST_ROOT/result file://$TEST_ROOT/flake.tar.gz
# Building with a tarball URL containing a SRI hash should also work.
url=$(nix flake info --json file://$TEST_ROOT/flake.tar.gz | jq -r .url)
[[ $url =~ sha256- ]]
nix build -o $TEST_ROOT/result $url
# Building with an incorrect SRI hash should fail.
nix build -o $TEST_ROOT/result "file://$TEST_ROOT/flake.tar.gz?narHash=sha256-qQ2Zz4DNHViCUrp6gTS7EE4+RMqFQtUfWF2UNUtJKS0=" 2>&1 | grep 'NAR hash mismatch'
# Test --override-input.
git -C $flake3Dir reset --hard
nix flake update $flake3Dir --override-input flake2/flake1 flake5 -vvvvv
[[ $(jq .nodes.flake1_2.locked.url $flake3Dir/flake.lock) =~ flake5 ]]
nix flake update $flake3Dir --override-input flake2/flake1 flake1
[[ $(jq -r .nodes.flake1_2.locked.rev $flake3Dir/flake.lock) =~ $hash2 ]]
nix flake update $flake3Dir --override-input flake2/flake1 flake1/master/$hash1
[[ $(jq -r .nodes.flake1_2.locked.rev $flake3Dir/flake.lock) =~ $hash1 ]]
# Test --update-input.
nix flake update $flake3Dir
[[ $(jq -r .nodes.flake1_2.locked.rev $flake3Dir/flake.lock) = $hash1 ]]
nix flake update $flake3Dir --update-input flake2/flake1
[[ $(jq -r .nodes.flake1_2.locked.rev $flake3Dir/flake.lock) =~ $hash2 ]]
# Test 'nix flake list-inputs'.
[[ $(nix flake list-inputs $flake3Dir | wc -l) == 5 ]]
nix flake list-inputs $flake3Dir --json | jq .
# Test circular flake dependencies.
cat > $flakeA/flake.nix <<EOF
{
inputs.b.url = git+file://$flakeB;
inputs.b.inputs.a.follows = "/";
outputs = { self, nixpkgs, b }: {
foo = 123 + b.bar;
xyzzy = 1000;
};
}
EOF
git -C $flakeA add flake.nix
cat > $flakeB/flake.nix <<EOF
{
inputs.a.url = git+file://$flakeA;
outputs = { self, nixpkgs, a }: {
bar = 456 + a.xyzzy;
};
}
EOF
git -C $flakeB add flake.nix
git -C $flakeB commit -a -m 'Foo'
[[ $(nix eval $flakeA#foo) = 1579 ]]
[[ $(nix eval $flakeA#foo) = 1579 ]]
sed -i $flakeB/flake.nix -e 's/456/789/'
git -C $flakeB commit -a -m 'Foo'
[[ $(nix eval --update-input b $flakeA#foo) = 1912 ]]
# Test list-inputs with circular dependencies
nix flake list-inputs $flakeA

View File

@ -59,11 +59,11 @@ with import ./config.nix; mkDerivation {
EOF
)
nix build -v -o $TEST_ROOT/result-A -L "($expr)" \
nix build --impure -v -o $TEST_ROOT/result-A -L --expr "$expr" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid1=$!
nix build -v -o $TEST_ROOT/result-B -L "($expr2)" \
nix build --impure -v -o $TEST_ROOT/result-B -L --expr "$expr2" \
--min-free 1000 --max-free 2000 --min-free-check-interval 1 &
pid2=$!

148
tests/github-flakes.nix Normal file
View File

@ -0,0 +1,148 @@
{ nixpkgs, system, overlay }:
with import (nixpkgs + "/nixos/lib/testing.nix") {
inherit system;
extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ];
};
let
# Generate a fake root CA and a fake github.com certificate.
cert = pkgs.runCommand "cert" { buildInputs = [ pkgs.openssl ]; }
''
mkdir -p $out
openssl genrsa -out ca.key 2048
openssl req -new -x509 -days 36500 -key ca.key \
-subj "/C=NL/ST=Denial/L=Springfield/O=Dis/CN=Root CA" -out $out/ca.crt
openssl req -newkey rsa:2048 -nodes -keyout $out/server.key \
-subj "/C=CN/ST=Denial/L=Springfield/O=Dis/CN=github.com" -out server.csr
openssl x509 -req -extfile <(printf "subjectAltName=DNS:api.github.com,DNS:github.com,DNS:raw.githubusercontent.com") \
-days 36500 -in server.csr -CA $out/ca.crt -CAkey ca.key -CAcreateserial -out $out/server.crt
'';
registry = pkgs.writeTextFile {
name = "registry";
text = ''
{
"flakes": [
{
"from": {
"type": "indirect",
"id": "nixpkgs"
},
"to": {
"type": "github",
"owner": "NixOS",
"repo": "nixpkgs"
}
}
],
"version": 2
}
'';
destination = "/flake-registry.json";
};
api = pkgs.runCommand "nixpkgs-flake" {}
''
mkdir -p $out/tarball
dir=NixOS-nixpkgs-${nixpkgs.shortRev}
cp -prd ${nixpkgs} $dir
# Set the correct timestamp in the tarball.
find $dir -print0 | xargs -0 touch -t ${builtins.substring 0 12 nixpkgs.lastModifiedDate}.${builtins.substring 12 2 nixpkgs.lastModifiedDate} --
tar cfz $out/tarball/${nixpkgs.rev} $dir --hard-dereference
mkdir -p $out/commits
echo '{"sha": "${nixpkgs.rev}"}' > $out/commits/HEAD
'';
in
makeTest (
{
nodes =
{ # Impersonate github.com and api.github.com.
github =
{ config, pkgs, ... }:
{ networking.firewall.allowedTCPPorts = [ 80 443 ];
services.httpd.enable = true;
services.httpd.adminAddr = "foo@example.org";
services.httpd.extraConfig = ''
ErrorLog syslog:local6
'';
services.httpd.virtualHosts."github.com" =
{ forceSSL = true;
sslServerKey = "${cert}/server.key";
sslServerCert = "${cert}/server.crt";
servedDirs =
[ { urlPath = "/NixOS/flake-registry/raw/master";
dir = registry;
}
];
};
services.httpd.virtualHosts."api.github.com" =
{ forceSSL = true;
sslServerKey = "${cert}/server.key";
sslServerCert = "${cert}/server.crt";
servedDirs =
[ { urlPath = "/repos/NixOS/nixpkgs";
dir = api;
}
];
};
};
client =
{ config, lib, pkgs, nodes, ... }:
{ virtualisation.writableStore = true;
virtualisation.diskSize = 2048;
virtualisation.pathsInNixDB = [ pkgs.hello pkgs.fuse ];
virtualisation.memorySize = 4096;
nix.binaryCaches = lib.mkForce [ ];
nix.extraOptions = "experimental-features = nix-command flakes";
environment.systemPackages = [ pkgs.jq ];
networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
[ "github.com" "api.github.com" "raw.githubusercontent.com" ];
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
};
};
testScript = { nodes }:
''
use POSIX qw(strftime);
startAll;
$github->waitForUnit("httpd.service");
$client->succeed("curl -v https://github.com/ >&2");
$client->succeed("nix registry list | grep nixpkgs");
$client->succeed("nix flake info nixpkgs --json | jq -r .revision") eq "${nixpkgs.rev}\n"
or die "revision mismatch";
$client->succeed("nix registry pin nixpkgs");
$client->succeed("nix flake info nixpkgs --tarball-ttl 0 >&2");
# Shut down the web server. The flake should be cached on the client.
$github->succeed("systemctl stop httpd.service");
my $date = $client->succeed("nix flake info nixpkgs --json | jq -M .lastModified");
strftime("%Y%m%d%H%M%S", gmtime($date)) eq "${nixpkgs.lastModifiedDate}" or die "time mismatch";
$client->succeed("nix build nixpkgs#hello");
# The build shouldn't fail even with --tarball-ttl 0 (the server
# being down should not be a fatal error).
$client->succeed("nix build nixpkgs#fuse --tarball-ttl 0");
'';
})

View File

@ -19,6 +19,7 @@ keep-derivations = false
sandbox = false
experimental-features = nix-command flakes
gc-reserved-space = 0
flake-registry = $TEST_ROOT/registry.json
include nix.conf.extra
EOF

View File

@ -31,13 +31,14 @@ nix_tests = \
nix-copy-ssh.sh \
post-hook.sh \
function-trace.sh \
recursive.sh
recursive.sh \
flakes.sh
# parallel.sh
install-tests += $(foreach x, $(nix_tests), tests/$(x))
tests-environment = NIX_REMOTE= $(bash) -e
clean-files += $(d)/common.sh
clean-files += $(d)/common.sh $(d)/config.nix
test-deps += tests/common.sh tests/config.nix tests/plugins/libplugintest.$(SO_EXT)

View File

@ -1,8 +1,11 @@
# Test nix-copy-closure.
{ nixpkgs, system, nix }:
{ nixpkgs, system, overlay }:
with import (nixpkgs + "/nixos/lib/testing.nix") { inherit system; };
with import (nixpkgs + "/nixos/lib/testing.nix") {
inherit system;
extraConfigurations = [ { nixpkgs.overlays = [ overlay ]; } ];
};
makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in {
@ -11,7 +14,6 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in {
{ config, lib, pkgs, ... }:
{ virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgA ];
nix.package = nix;
nix.binaryCaches = lib.mkForce [ ];
};
@ -20,7 +22,6 @@ makeTest (let pkgA = pkgs.cowsay; pkgB = pkgs.wget; pkgC = pkgs.hello; in {
{ services.openssh.enable = true;
virtualisation.writableStore = true;
virtualisation.pathsInNixDB = [ pkgB pkgC ];
nix.package = nix;
};
};

Some files were not shown because too many files have changed in this diff Show More