Merge remote-tracking branch 'upstream/master' into lfs

This commit is contained in:
Leandro Reina 2025-01-10 18:33:03 +01:00
commit b08b7bee4e
101 changed files with 2148 additions and 395 deletions

View File

@ -8,7 +8,7 @@ permissions: read-all
jobs: jobs:
eval: eval:
runs-on: ubuntu-latest runs-on: ubuntu-24.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@ -20,8 +20,15 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ubuntu-latest, macos-latest] include:
runs-on: ${{ matrix.os }} - scenario: on ubuntu
runs-on: ubuntu-24.04
os: linux
- scenario: on macos
runs-on: macos-14
os: darwin
name: tests ${{ matrix.scenario }}
runs-on: ${{ matrix.runs-on }}
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -37,7 +44,7 @@ jobs:
# Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user: # Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user:
# https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces # https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces
- run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 - run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
if: matrix.os == 'ubuntu-latest' if: matrix.os == 'linux'
- run: scripts/build-checks - run: scripts/build-checks
- run: scripts/prepare-installer-for-github-actions - run: scripts/prepare-installer-for-github-actions
- name: Upload installer tarball - name: Upload installer tarball
@ -51,8 +58,15 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ubuntu-latest, macos-latest] include:
runs-on: ${{ matrix.os }} - scenario: on ubuntu
runs-on: ubuntu-24.04
os: linux
- scenario: on macos
runs-on: macos-14
os: darwin
name: installer test ${{ matrix.scenario }}
runs-on: ${{ matrix.runs-on }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Download installer tarball - name: Download installer tarball
@ -68,9 +82,9 @@ jobs:
install_url: 'http://localhost:8126/install' install_url: 'http://localhost:8126/install'
install_options: "--tarball-url-prefix http://localhost:8126/" install_options: "--tarball-url-prefix http://localhost:8126/"
- run: sudo apt install fish zsh - run: sudo apt install fish zsh
if: matrix.os == 'ubuntu-latest' if: matrix.os == 'linux'
- run: brew install fish - run: brew install fish
if: matrix.os == 'macos-latest' if: matrix.os == 'darwin'
- run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval" - run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval" - run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
- run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval" - run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
@ -86,7 +100,7 @@ jobs:
permissions: permissions:
contents: none contents: none
name: Check Docker secrets present for installer tests name: Check Docker secrets present for installer tests
runs-on: ubuntu-latest runs-on: ubuntu-24.04
outputs: outputs:
docker: ${{ steps.secret.outputs.docker }} docker: ${{ steps.secret.outputs.docker }}
steps: steps:
@ -106,7 +120,7 @@ jobs:
needs.check_secrets.outputs.docker == 'true' && needs.check_secrets.outputs.docker == 'true' &&
github.event_name == 'push' && github.event_name == 'push' &&
github.ref_name == 'master' github.ref_name == 'master'
runs-on: ubuntu-latest runs-on: ubuntu-24.04
steps: steps:
- name: Check for secrets - name: Check for secrets
id: secret id: secret
@ -158,7 +172,7 @@ jobs:
docker push $IMAGE_ID:master docker push $IMAGE_ID:master
vm_tests: vm_tests:
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: DeterminateSystems/nix-installer-action@main - uses: DeterminateSystems/nix-installer-action@main
@ -173,7 +187,7 @@ jobs:
flake_regressions: flake_regressions:
needs: vm_tests needs: vm_tests
runs-on: ubuntu-22.04 runs-on: ubuntu-24.04
steps: steps:
- name: Checkout nix - name: Checkout nix
uses: actions/checkout@v4 uses: actions/checkout@v4

View File

@ -15,7 +15,7 @@ permissions:
jobs: jobs:
labels: labels:
runs-on: ubuntu-latest runs-on: ubuntu-24.04
if: github.repository_owner == 'NixOS' if: github.repository_owner == 'NixOS'
steps: steps:
- uses: actions/labeler@v5 - uses: actions/labeler@v5

View File

@ -2,10 +2,10 @@ queue_rules:
- name: default - name: default
# all required tests need to go here # all required tests need to go here
merge_conditions: merge_conditions:
- check-success=tests (macos-latest) - check-success=tests on macos
- check-success=tests (ubuntu-latest) - check-success=tests on ubuntu
- check-success=installer_test (macos-latest) - check-success=installer test on macos
- check-success=installer_test (ubuntu-latest) - check-success=installer test on ubuntu
- check-success=vm_tests - check-success=vm_tests
batch_size: 5 batch_size: 5

View File

@ -3,7 +3,7 @@
, meson , meson
, ninja , ninja
, lowdown , lowdown-unsandboxed
, mdbook , mdbook
, mdbook-linkcheck , mdbook-linkcheck
, jq , jq
@ -42,7 +42,7 @@ mkMesonDerivation (finalAttrs: {
passthru.externalNativeBuildInputs = [ passthru.externalNativeBuildInputs = [
meson meson
ninja ninja
(lib.getBin lowdown) (lib.getBin lowdown-unsandboxed)
mdbook mdbook
mdbook-linkcheck mdbook-linkcheck
jq jq

View File

@ -0,0 +1,8 @@
---
synopsis: "`nix-instantiate --eval` now supports `--raw`"
prs: [12119]
---
The `nix-instantiate --eval` command now supports a `--raw` flag, when used
the evaluation result must be a string, which is printed verbatim without
quotation marks or escaping.

View File

@ -62,6 +62,15 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto
This is the equivalent of invoking [`nix-env --delete-generations <period>`](@docroot@/command-ref/nix-env/delete-generations.md#generations-time) on each found profile. This is the equivalent of invoking [`nix-env --delete-generations <period>`](@docroot@/command-ref/nix-env/delete-generations.md#generations-time) on each found profile.
See the documentation of that command for additional information about the *period* argument. See the documentation of that command for additional information about the *period* argument.
- <span id="opt-max-freed">[`--max-freed`](#opt-max-freed)</span> *bytes*
<!-- duplication from https://github.com/NixOS/nix/blob/442a2623e48357ff72c77bb11cf2cf06d94d2f90/doc/manual/source/command-ref/nix-store/gc.md?plain=1#L39-L44 -->
Keep deleting paths until at least *bytes* bytes have been deleted,
then stop. The argument *bytes* can be followed by the
multiplicative suffix `K`, `M`, `G` or `T`, denoting KiB, MiB, GiB
or TiB units.
{{#include ./opt-common.md}} {{#include ./opt-common.md}}
{{#include ./env-common.md}} {{#include ./env-common.md}}

View File

@ -84,7 +84,7 @@ When using public key authentication, you can avoid typing the passphrase with `
> Copy GNU Hello from a remote machine using a known store path, and run it: > Copy GNU Hello from a remote machine using a known store path, and run it:
> >
> ```shell-session > ```shell-session
> $ storePath="$(nix-instantiate --eval '<nixpkgs>' -I nixpkgs=channel:nixpkgs-unstable -A hello.outPath | tr -d '"')" > $ storePath="$(nix-instantiate --eval --raw '<nixpkgs>' -I nixpkgs=channel:nixpkgs-unstable -A hello.outPath)"
> $ nix-copy-closure --from alice@itchy.example.org "$storePath" > $ nix-copy-closure --from alice@itchy.example.org "$storePath"
> $ "$storePath"/bin/hello > $ "$storePath"/bin/hello
> Hello, world! > Hello, world!

View File

@ -5,7 +5,7 @@
# Synopsis # Synopsis
`nix-instantiate` `nix-instantiate`
[`--parse` | `--eval` [`--strict`] [`--json`] [`--xml`] ] [`--parse` | `--eval` [`--strict`] [`--raw` | `--json` | `--xml`] ]
[`--read-write-mode`] [`--read-write-mode`]
[`--arg` *name* *value*] [`--arg` *name* *value*]
[{`--attr`| `-A`} *attrPath*] [{`--attr`| `-A`} *attrPath*]
@ -102,6 +102,11 @@ standard input.
> This option can cause non-termination, because lazy data > This option can cause non-termination, because lazy data
> structures can be infinitely large. > structures can be infinitely large.
- `--raw`
When used with `--eval`, the evaluation result must be a string,
which is printed verbatim, without quoting, escaping or trailing newline.
- `--json` - `--json`
When used with `--eval`, print the resulting value as an JSON When used with `--eval`, print the resulting value as an JSON

View File

@ -21,6 +21,9 @@ This operation has the following options:
Use recursive instead of flat hashing mode, used when adding Use recursive instead of flat hashing mode, used when adding
directories to the store. directories to the store.
*paths* that refer to symlinks are not dereferenced, but added to the store
as symlinks with the same target.
{{#include ./opt-common.md}} {{#include ./opt-common.md}}
{{#include ../opt-common.md}} {{#include ../opt-common.md}}

View File

@ -11,6 +11,9 @@
The operation `--add` adds the specified paths to the Nix store. It The operation `--add` adds the specified paths to the Nix store. It
prints the resulting paths in the Nix store on standard output. prints the resulting paths in the Nix store on standard output.
*paths* that refer to symlinks are not dereferenced, but added to the store
as symlinks with the same target.
{{#include ./opt-common.md}} {{#include ./opt-common.md}}
{{#include ../opt-common.md}} {{#include ../opt-common.md}}

View File

@ -19,10 +19,11 @@ nix-build -E '(import ./.).packages.${builtins.currentSystem}.nix.doc'
or or
```console ```console
nix build .#nix^doc nix build .#nix-manual
``` ```
and open `./result-doc/share/doc/nix/manual/index.html`. and open `./result/share/doc/nix/manual/index.html`.
To build the manual incrementally, [enter the development shell](./building.md) and run: To build the manual incrementally, [enter the development shell](./building.md) and run:

View File

@ -297,7 +297,7 @@ Creating a Cachix cache for your installer tests and adding its authorisation to
- `armv7l-linux` - `armv7l-linux`
- `x86_64-darwin` - `x86_64-darwin`
- The `installer_test` job (which runs on `ubuntu-latest` and `macos-latest`) will try to install Nix with the cached installer and run a trivial Nix command. - The `installer_test` job (which runs on `ubuntu-24.04` and `macos-14`) will try to install Nix with the cached installer and run a trivial Nix command.
### One-time setup ### One-time setup

View File

@ -3,11 +3,11 @@
"flake-compat": { "flake-compat": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1696426674, "lastModified": 1733328505,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", "narHash": "sha256-NeCCThCEP3eCl2l/+27kNNK7QrwZB1IJCrXfrbv5oqU=",
"owner": "edolstra", "owner": "edolstra",
"repo": "flake-compat", "repo": "flake-compat",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", "rev": "ff81ac966bb2cae68946d5ed5fc4994f96d0ffec",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -23,11 +23,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1719994518, "lastModified": 1733312601,
"narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=", "narHash": "sha256-4pDvzqnegAfRkPwO3wmwBhVi/Sye1mzps0zHWYnP88c=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "flake-parts", "repo": "flake-parts",
"rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7", "rev": "205b12d8b7cd4802fbcb8e8ef6a0f1408781a4f9",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -48,11 +48,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1721042469, "lastModified": 1734279981,
"narHash": "sha256-6FPUl7HVtvRHCCBQne7Ylp4p+dpP3P/OYuzjztZ4s70=", "narHash": "sha256-NdaCraHPp8iYMWzdXAt5Nv6sA3MUzlCiGiR586TCwo0=",
"owner": "cachix", "owner": "cachix",
"repo": "git-hooks.nix", "repo": "git-hooks.nix",
"rev": "f451c19376071a90d8c58ab1a953c6e9840527fd", "rev": "aa9f40c906904ebd83da78e7f328cd8aeaeae785",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -61,35 +61,18 @@
"type": "github" "type": "github"
} }
}, },
"libgit2": {
"flake": false,
"locked": {
"lastModified": 1715853528,
"narHash": "sha256-J2rCxTecyLbbDdsyBWn9w7r3pbKRMkI9E7RvRgAqBdY=",
"owner": "libgit2",
"repo": "libgit2",
"rev": "36f7e21ad757a3dacc58cf7944329da6bc1d6e96",
"type": "github"
},
"original": {
"owner": "libgit2",
"ref": "v1.8.1",
"repo": "libgit2",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1723688146, "lastModified": 1734359947,
"narHash": "sha256-sqLwJcHYeWLOeP/XoLwAtYjr01TISlkOfz+NG82pbdg=", "narHash": "sha256-1Noao/H+N8nFB4Beoy8fgwrcOQLVm9o4zKW1ODaqK9E=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "c3d4ac725177c030b1e289015989da2ad9d56af0", "rev": "48d12d5e70ee91fe8481378e540433a7303dbf6a",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "NixOS",
"ref": "nixos-24.05", "ref": "release-24.11",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }
@ -131,7 +114,6 @@
"flake-compat": "flake-compat", "flake-compat": "flake-compat",
"flake-parts": "flake-parts", "flake-parts": "flake-parts",
"git-hooks-nix": "git-hooks-nix", "git-hooks-nix": "git-hooks-nix",
"libgit2": "libgit2",
"nixpkgs": "nixpkgs", "nixpkgs": "nixpkgs",
"nixpkgs-23-11": "nixpkgs-23-11", "nixpkgs-23-11": "nixpkgs-23-11",
"nixpkgs-regression": "nixpkgs-regression" "nixpkgs-regression": "nixpkgs-regression"

View File

@ -1,11 +1,11 @@
{ {
description = "The purely functional package manager"; description = "The purely functional package manager";
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05"; inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-24.11";
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
inputs.libgit2 = { url = "github:libgit2/libgit2/v1.8.1"; flake = false; };
# dev tooling # dev tooling
inputs.flake-parts.url = "github:hercules-ci/flake-parts"; inputs.flake-parts.url = "github:hercules-ci/flake-parts";
@ -18,7 +18,7 @@
inputs.git-hooks-nix.inputs.flake-compat.follows = ""; inputs.git-hooks-nix.inputs.flake-compat.follows = "";
inputs.git-hooks-nix.inputs.gitignore.follows = ""; inputs.git-hooks-nix.inputs.gitignore.follows = "";
outputs = inputs@{ self, nixpkgs, nixpkgs-regression, libgit2, ... }: outputs = inputs@{ self, nixpkgs, nixpkgs-regression, ... }:
let let
@ -36,7 +36,8 @@
"armv6l-unknown-linux-gnueabihf" "armv6l-unknown-linux-gnueabihf"
"armv7l-unknown-linux-gnueabihf" "armv7l-unknown-linux-gnueabihf"
"riscv64-unknown-linux-gnu" "riscv64-unknown-linux-gnu"
"x86_64-unknown-netbsd" # Disabled because of https://github.com/NixOS/nixpkgs/issues/344423
# "x86_64-unknown-netbsd"
"x86_64-unknown-freebsd" "x86_64-unknown-freebsd"
"x86_64-w64-mingw32" "x86_64-w64-mingw32"
]; ];
@ -163,7 +164,6 @@
if prev.stdenv.hostPlatform.system == "i686-linux" if prev.stdenv.hostPlatform.system == "i686-linux"
then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; }) then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; })
else prev.pre-commit; else prev.pre-commit;
}; };
in { in {

View File

@ -1,66 +0,0 @@
# Ensure that this bug is not present in the C++ toolchain we are using.
#
# URL for bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431
#
# The test program is from that issue, with only a slight modification
# to set an exit status instead of printing strings.
AC_DEFUN([ENSURE_NO_GCC_BUG_80431],
[
AC_MSG_CHECKING([that GCC bug 80431 is fixed])
AC_LANG_PUSH(C++)
AC_RUN_IFELSE(
[AC_LANG_PROGRAM(
[[
#include <cstdio>
static bool a = true;
static bool b = true;
struct Options { };
struct Option
{
Option(Options * options)
{
a = false;
}
~Option()
{
b = false;
}
};
struct MyOptions : Options { };
struct MyOptions2 : virtual MyOptions
{
Option foo{this};
};
]],
[[
{
MyOptions2 opts;
}
return (a << 1) | b;
]])],
[status_80431=0],
[status_80431=$?],
[status_80431=''])
AC_LANG_POP(C++)
AS_CASE([$status_80431],
[''],[
AC_MSG_RESULT(cannot check because cross compiling)
AC_MSG_NOTICE(assume we are bug free)
],
[0],[
AC_MSG_RESULT(yes)
],
[2],[
AC_MSG_RESULT(no)
AC_MSG_ERROR(Cannot build Nix with C++ compiler with this bug)
],
[
AC_MSG_RESULT(unexpected result $status_80431: not expected failure with bug, ignoring)
])
])

View File

@ -356,6 +356,7 @@
''^src/libutil/util\.cc$'' ''^src/libutil/util\.cc$''
''^src/libutil/util\.hh$'' ''^src/libutil/util\.hh$''
''^src/libutil/variant-wrapper\.hh$'' ''^src/libutil/variant-wrapper\.hh$''
''^src/libutil/widecharwidth/widechar_width\.h$'' # vendored source
''^src/libutil/windows/file-descriptor\.cc$'' ''^src/libutil/windows/file-descriptor\.cc$''
''^src/libutil/windows/file-path\.cc$'' ''^src/libutil/windows/file-path\.cc$''
''^src/libutil/windows/processes\.cc$'' ''^src/libutil/windows/processes\.cc$''

View File

@ -16,7 +16,3 @@ add_project_arguments(
'-Wno-deprecated-declarations', '-Wno-deprecated-declarations',
language : 'cpp', language : 'cpp',
) )
if get_option('buildtype') not in ['debug']
add_project_arguments('-O3', language : 'cpp')
endif

View File

@ -66,6 +66,21 @@ let
mesonLayer = finalAttrs: prevAttrs: mesonLayer = finalAttrs: prevAttrs:
{ {
# NOTE:
# As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26,
# `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default.
# More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype.
mesonBuildType = "release";
# NOTE:
# Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the
# guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10.
# For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable.
preConfigure = prevAttrs.preConfigure or "" + ''
case "$mesonBuildType" in
release|minsize) appendToVar mesonFlags "-Db_lto=true" ;;
*) appendToVar mesonFlags "-Db_lto=false" ;;
esac
'';
nativeBuildInputs = [ nativeBuildInputs = [
pkgs.buildPackages.meson pkgs.buildPackages.meson
pkgs.buildPackages.ninja pkgs.buildPackages.ninja
@ -114,14 +129,6 @@ scope: {
requiredSystemFeatures = [ ]; requiredSystemFeatures = [ ];
}; };
libseccomp = pkgs.libseccomp.overrideAttrs (_: rec {
version = "2.5.5";
src = pkgs.fetchurl {
url = "https://github.com/seccomp/libseccomp/releases/download/v${version}/libseccomp-${version}.tar.gz";
hash = "sha256-JIosik2bmFiqa69ScSw0r+/PnJ6Ut23OAsHJqiX7M3U=";
};
});
boehmgc = pkgs.boehmgc.override { boehmgc = pkgs.boehmgc.override {
enableLargeConfig = true; enableLargeConfig = true;
}; };
@ -140,8 +147,6 @@ scope: {
}); });
libgit2 = pkgs.libgit2.overrideAttrs (attrs: { libgit2 = pkgs.libgit2.overrideAttrs (attrs: {
src = inputs.libgit2;
version = inputs.libgit2.lastModifiedDate;
cmakeFlags = attrs.cmakeFlags or [] cmakeFlags = attrs.cmakeFlags or []
++ [ "-DUSE_SSH=exec" ]; ++ [ "-DUSE_SSH=exec" ];
nativeBuildInputs = attrs.nativeBuildInputs or [] nativeBuildInputs = attrs.nativeBuildInputs or []
@ -169,36 +174,6 @@ scope: {
]; ];
}); });
busybox-sandbox-shell = pkgs.busybox-sandbox-shell or (pkgs.busybox.override {
useMusl = true;
enableStatic = true;
enableMinimal = true;
extraConfig = ''
CONFIG_FEATURE_FANCY_ECHO y
CONFIG_FEATURE_SH_MATH y
CONFIG_FEATURE_SH_MATH_64 y
CONFIG_ASH y
CONFIG_ASH_OPTIMIZE_FOR_SIZE y
CONFIG_ASH_ALIAS y
CONFIG_ASH_BASH_COMPAT y
CONFIG_ASH_CMDCMD y
CONFIG_ASH_ECHO y
CONFIG_ASH_GETOPTS y
CONFIG_ASH_INTERNAL_GLOB y
CONFIG_ASH_JOB_CONTROL y
CONFIG_ASH_PRINTF y
CONFIG_ASH_TEST y
'';
});
# TODO change in Nixpkgs, Windows works fine. First commit of
# https://github.com/NixOS/nixpkgs/pull/322977 backported will fix.
toml11 = pkgs.toml11.overrideAttrs (old: {
meta.platforms = lib.platforms.all;
});
inherit resolvePath filesetToSource; inherit resolvePath filesetToSource;
mkMesonDerivation = mkMesonDerivation =

View File

@ -16,13 +16,25 @@ static std::string doRenderMarkdownToTerminal(std::string_view markdown)
{ {
int windowWidth = getWindowSize().second; int windowWidth = getWindowSize().second;
struct lowdown_opts opts #if HAVE_LOWDOWN_1_4
{ struct lowdown_opts_term opts_term {
.type = LOWDOWN_TERM,
.maxdepth = 20,
.cols = (size_t) std::max(windowWidth - 5, 60), .cols = (size_t) std::max(windowWidth - 5, 60),
.hmargin = 0, .hmargin = 0,
.vmargin = 0, .vmargin = 0,
};
#endif
struct lowdown_opts opts
{
.type = LOWDOWN_TERM,
#if HAVE_LOWDOWN_1_4
.term = opts_term,
#endif
.maxdepth = 20,
#if !HAVE_LOWDOWN_1_4
.cols = (size_t) std::max(windowWidth - 5, 60),
.hmargin = 0,
.vmargin = 0,
#endif
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES, .feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
.oflags = LOWDOWN_TERM_NOLINK, .oflags = LOWDOWN_TERM_NOLINK,
}; };

View File

@ -4,8 +4,6 @@ project('nix-cmd', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',
@ -36,6 +34,8 @@ deps_public += nlohmann_json
lowdown = dependency('lowdown', version : '>= 0.9.0', required : get_option('markdown')) lowdown = dependency('lowdown', version : '>= 0.9.0', required : get_option('markdown'))
deps_private += lowdown deps_private += lowdown
configdata.set('HAVE_LOWDOWN', lowdown.found().to_int()) configdata.set('HAVE_LOWDOWN', lowdown.found().to_int())
# The API changed slightly around terminal initialization.
configdata.set('HAVE_LOWDOWN_1_4', lowdown.version().version_compare('>= 1.4.0').to_int())
readline_flavor = get_option('readline-flavor') readline_flavor = get_option('readline-flavor')
if readline_flavor == 'editline' if readline_flavor == 'editline'

View File

@ -4,8 +4,6 @@ project('nix-expr-c', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-expr-test-support', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-expr-tests', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -3185,12 +3185,16 @@ std::ostream & operator << (std::ostream & str, const ExternalValueBase & v) {
return v.print(str); return v.print(str);
} }
void forceNoNullByte(std::string_view s) void forceNoNullByte(std::string_view s, std::function<Pos()> pos)
{ {
if (s.find('\0') != s.npos) { if (s.find('\0') != s.npos) {
using namespace std::string_view_literals; using namespace std::string_view_literals;
auto str = replaceStrings(std::string(s), "\0"sv, ""sv); auto str = replaceStrings(std::string(s), "\0"sv, ""sv);
throw Error("input string '%s' cannot be represented as Nix string because it contains null bytes", str); Error error("input string '%s' cannot be represented as Nix string because it contains null bytes", str);
if (pos) {
error.atPos(pos());
}
throw error;
} }
} }

View File

@ -41,16 +41,18 @@ namespace nix {
// we make use of the fact that the parser receives a private copy of the input // we make use of the fact that the parser receives a private copy of the input
// string and can munge around in it. // string and can munge around in it.
static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length) // getting the position is expensive and thus it is implemented lazily.
static StringToken unescapeStr(char * const s, size_t length, std::function<Pos()> && pos)
{ {
char * result = s; bool noNullByte = true;
char * t = s; char * t = s;
char c;
// the input string is terminated with *two* NULs, so we can safely take // the input string is terminated with *two* NULs, so we can safely take
// *one* character after the one being checked against. // *one* character after the one being checked against.
while ((c = *s++)) { for (size_t i = 0; i < length; t++) {
char c = s[i++];
noNullByte &= c != '\0';
if (c == '\\') { if (c == '\\') {
c = *s++; c = s[i++];
if (c == 'n') *t = '\n'; if (c == 'n') *t = '\n';
else if (c == 'r') *t = '\r'; else if (c == 'r') *t = '\r';
else if (c == 't') *t = '\t'; else if (c == 't') *t = '\t';
@ -59,12 +61,14 @@ static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
else if (c == '\r') { else if (c == '\r') {
/* Normalise CR and CR/LF into LF. */ /* Normalise CR and CR/LF into LF. */
*t = '\n'; *t = '\n';
if (*s == '\n') s++; /* cr/lf */ if (s[i] == '\n') i++; /* cr/lf */
} }
else *t = c; else *t = c;
t++;
} }
return {result, size_t(t - result)}; if (!noNullByte) {
forceNoNullByte({s, size_t(t - s)}, std::move(pos));
}
return {s, size_t(t - s)};
} }
static void requireExperimentalFeature(const ExperimentalFeature & feature, const Pos & pos) static void requireExperimentalFeature(const ExperimentalFeature & feature, const Pos & pos)
@ -175,7 +179,7 @@ or { return OR_KW; }
/* It is impossible to match strings ending with '$' with one /* It is impossible to match strings ending with '$' with one
regex because trailing contexts are only valid at the end regex because trailing contexts are only valid at the end
of a rule. (A sane but undocumented limitation.) */ of a rule. (A sane but undocumented limitation.) */
yylval->str = unescapeStr(state->symbols, yytext, yyleng); yylval->str = unescapeStr(yytext, yyleng, [&]() { return state->positions[CUR_POS]; });
return STR; return STR;
} }
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } <STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
@ -191,6 +195,7 @@ or { return OR_KW; }
\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; } \'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; }
<IND_STRING>([^\$\']|\$[^\{\']|\'[^\'\$])+ { <IND_STRING>([^\$\']|\$[^\{\']|\'[^\'\$])+ {
yylval->str = {yytext, (size_t) yyleng, true}; yylval->str = {yytext, (size_t) yyleng, true};
forceNoNullByte(yylval->str, [&]() { return state->positions[CUR_POS]; });
return IND_STR; return IND_STR;
} }
<IND_STRING>\'\'\$ | <IND_STRING>\'\'\$ |
@ -203,7 +208,7 @@ or { return OR_KW; }
return IND_STR; return IND_STR;
} }
<IND_STRING>\'\'\\{ANY} { <IND_STRING>\'\'\\{ANY} {
yylval->str = unescapeStr(state->symbols, yytext + 2, yyleng - 2); yylval->str = unescapeStr(yytext + 2, yyleng - 2, [&]() { return state->positions[CUR_POS]; });
return IND_STR; return IND_STR;
} }
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } <IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }

View File

@ -4,8 +4,6 @@ project('nix-expr', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -2045,7 +2045,7 @@ static RegisterPrimOp primop_readFileType({
.args = {"p"}, .args = {"p"},
.doc = R"( .doc = R"(
Determine the directory entry type of a filesystem node, being Determine the directory entry type of a filesystem node, being
one of "directory", "regular", "symlink", or "unknown". one of `"directory"`, `"regular"`, `"symlink"`, or `"unknown"`.
)", )",
.fun = prim_readFileType, .fun = prim_readFileType,
}); });
@ -4059,7 +4059,7 @@ static RegisterPrimOp primop_toString({
}); });
/* `substring start len str' returns the substring of `str' starting /* `substring start len str' returns the substring of `str' starting
at character position `min(start, stringLength str)' inclusive and at byte position `min(start, stringLength str)' inclusive and
ending at `min(start + len, stringLength str)'. `start' must be ending at `min(start + len, stringLength str)'. `start' must be
non-negative. */ non-negative. */
static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v) static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v)
@ -4098,7 +4098,7 @@ static RegisterPrimOp primop_substring({
.name = "__substring", .name = "__substring",
.args = {"start", "len", "s"}, .args = {"start", "len", "s"},
.doc = R"( .doc = R"(
Return the substring of *s* from character position *start* Return the substring of *s* from byte position *start*
(zero-based) up to but not including *start + len*. If *start* is (zero-based) up to but not including *start + len*. If *start* is
greater than the length of the string, an empty string is returned. greater than the length of the string, an empty string is returned.
If *start + len* lies beyond the end of the string or *len* is `-1`, If *start + len* lies beyond the end of the string or *len* is `-1`,

View File

@ -108,7 +108,11 @@ json printValueAsJSON(EvalState & state, bool strict,
void printValueAsJSON(EvalState & state, bool strict, void printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore) Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore)
{ {
str << printValueAsJSON(state, strict, v, pos, context, copyToStore); try {
str << printValueAsJSON(state, strict, v, pos, context, copyToStore);
} catch (nlohmann::json::exception & e) {
throw JSONSerializationError("JSON serialization error: %s", e.what());
}
} }
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,

View File

@ -16,4 +16,7 @@ nlohmann::json printValueAsJSON(EvalState & state, bool strict,
void printValueAsJSON(EvalState & state, bool strict, void printValueAsJSON(EvalState & state, bool strict,
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore = true); Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore = true);
MakeError(JSONSerializationError, Error);
} }

View File

@ -510,6 +510,6 @@ typedef std::shared_ptr<Value *> RootValue;
RootValue allocRootValue(Value * v); RootValue allocRootValue(Value * v);
void forceNoNullByte(std::string_view s); void forceNoNullByte(std::string_view s, std::function<Pos()> = nullptr);
} }

View File

@ -4,8 +4,6 @@ project('nix-fetchers-tests', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -66,7 +66,7 @@ Input Input::fromURL(
} }
} }
throw Error("input '%s' is unsupported", url.url); throw Error("input '%s' is unsupported", url);
} }
Input Input::fromAttrs(const Settings & settings, Attrs && attrs) Input Input::fromAttrs(const Settings & settings, Attrs && attrs)

View File

@ -434,7 +434,7 @@ struct GitInputScheme : InputScheme
auto url = parseURL(getStrAttr(input.attrs, "url")); auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git"); bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
repoInfo.isLocal = url.scheme == "file" && !forceHttp && !isBareRepository; repoInfo.isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
repoInfo.url = repoInfo.isLocal ? url.path : url.base; repoInfo.url = repoInfo.isLocal ? url.path : url.to_string();
// If this is a local directory and no ref or revision is // If this is a local directory and no ref or revision is
// given, then allow the use of an unclean working tree. // given, then allow the use of an unclean working tree.

View File

@ -50,7 +50,7 @@ struct GitArchiveInputScheme : InputScheme
else if (std::regex_match(path[2], refRegex)) else if (std::regex_match(path[2], refRegex))
ref = path[2]; ref = path[2];
else else
throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]); throw BadURL("in URL '%s', '%s' is not a commit hash or branch/tag name", url, path[2]);
} else if (size > 3) { } else if (size > 3) {
std::string rs; std::string rs;
for (auto i = std::next(path.begin(), 2); i != path.end(); i++) { for (auto i = std::next(path.begin(), 2); i != path.end(); i++) {
@ -63,34 +63,34 @@ struct GitArchiveInputScheme : InputScheme
if (std::regex_match(rs, refRegex)) { if (std::regex_match(rs, refRegex)) {
ref = rs; ref = rs;
} else { } else {
throw BadURL("in URL '%s', '%s' is not a branch/tag name", url.url, rs); throw BadURL("in URL '%s', '%s' is not a branch/tag name", url, rs);
} }
} else if (size < 2) } else if (size < 2)
throw BadURL("URL '%s' is invalid", url.url); throw BadURL("URL '%s' is invalid", url);
for (auto &[name, value] : url.query) { for (auto &[name, value] : url.query) {
if (name == "rev") { if (name == "rev") {
if (rev) if (rev)
throw BadURL("URL '%s' contains multiple commit hashes", url.url); throw BadURL("URL '%s' contains multiple commit hashes", url);
rev = Hash::parseAny(value, HashAlgorithm::SHA1); rev = Hash::parseAny(value, HashAlgorithm::SHA1);
} }
else if (name == "ref") { else if (name == "ref") {
if (!std::regex_match(value, refRegex)) if (!std::regex_match(value, refRegex))
throw BadURL("URL '%s' contains an invalid branch/tag name", url.url); throw BadURL("URL '%s' contains an invalid branch/tag name", url);
if (ref) if (ref)
throw BadURL("URL '%s' contains multiple branch/tag names", url.url); throw BadURL("URL '%s' contains multiple branch/tag names", url);
ref = value; ref = value;
} }
else if (name == "host") { else if (name == "host") {
if (!std::regex_match(value, hostRegex)) if (!std::regex_match(value, hostRegex))
throw BadURL("URL '%s' contains an invalid instance host", url.url); throw BadURL("URL '%s' contains an invalid instance host", url);
host_url = value; host_url = value;
} }
// FIXME: barf on unsupported attributes // FIXME: barf on unsupported attributes
} }
if (ref && rev) if (ref && rev)
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev()); throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url, *ref, rev->gitRev());
Input input{settings}; Input input{settings};
input.attrs.insert_or_assign("type", std::string { schemeName() }); input.attrs.insert_or_assign("type", std::string { schemeName() });

View File

@ -26,16 +26,16 @@ struct IndirectInputScheme : InputScheme
else if (std::regex_match(path[1], refRegex)) else if (std::regex_match(path[1], refRegex))
ref = path[1]; ref = path[1];
else else
throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]); throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url, path[1]);
} else if (path.size() == 3) { } else if (path.size() == 3) {
if (!std::regex_match(path[1], refRegex)) if (!std::regex_match(path[1], refRegex))
throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url.url, path[1]); throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url, path[1]);
ref = path[1]; ref = path[1];
if (!std::regex_match(path[2], revRegex)) if (!std::regex_match(path[2], revRegex))
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]); throw BadURL("in flake URL '%s', '%s' is not a commit hash", url, path[2]);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1); rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
} else } else
throw BadURL("GitHub URL '%s' is invalid", url.url); throw BadURL("GitHub URL '%s' is invalid", url);
std::string id = path[0]; std::string id = path[0];
if (!std::regex_match(id, flakeRegex)) if (!std::regex_match(id, flakeRegex))

View File

@ -161,7 +161,7 @@ struct MercurialInputScheme : InputScheme
{ {
auto url = parseURL(getStrAttr(input.attrs, "url")); auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isLocal = url.scheme == "file"; bool isLocal = url.scheme == "file";
return {isLocal, isLocal ? url.path : url.base}; return {isLocal, isLocal ? url.path : url.to_string()};
} }
StorePath fetchToStore(ref<Store> store, Input & input) const StorePath fetchToStore(ref<Store> store, Input & input) const

View File

@ -4,8 +4,6 @@ project('nix-fetchers', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -14,7 +14,7 @@ struct PathInputScheme : InputScheme
if (url.scheme != "path") return {}; if (url.scheme != "path") return {};
if (url.authority && *url.authority != "") if (url.authority && *url.authority != "")
throw Error("path URL '%s' should not have an authority ('%s')", url.url, *url.authority); throw Error("path URL '%s' should not have an authority ('%s')", url, *url.authority);
Input input{settings}; Input input{settings};
input.attrs.insert_or_assign("type", "path"); input.attrs.insert_or_assign("type", "path");
@ -27,10 +27,10 @@ struct PathInputScheme : InputScheme
if (auto n = string2Int<uint64_t>(value)) if (auto n = string2Int<uint64_t>(value))
input.attrs.insert_or_assign(name, *n); input.attrs.insert_or_assign(name, *n);
else else
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name); throw Error("path URL '%s' has invalid parameter '%s'", url, name);
} }
else else
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name); throw Error("path URL '%s' has unsupported parameter '%s'", url, name);
return input; return input;
} }

View File

@ -4,8 +4,6 @@ project('nix-flake-c', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -7,18 +7,60 @@ namespace nix {
/* ----------- tests for flake/flakeref.hh --------------------------------------------------*/ /* ----------- tests for flake/flakeref.hh --------------------------------------------------*/
/* ---------------------------------------------------------------------------- TEST(parseFlakeRef, path) {
* to_string experimentalFeatureSettings.experimentalFeatures.get().insert(Xp::Flakes);
* --------------------------------------------------------------------------*/
fetchers::Settings fetchSettings;
{
auto s = "/foo/bar";
auto flakeref = parseFlakeRef(fetchSettings, s);
ASSERT_EQ(flakeref.to_string(), "path:/foo/bar");
}
{
auto s = "/foo/bar?revCount=123&rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
auto flakeref = parseFlakeRef(fetchSettings, s);
ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?rev=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa&revCount=123");
}
{
auto s = "/foo/bar?xyzzy=123";
EXPECT_THROW(
parseFlakeRef(fetchSettings, s),
Error);
}
{
auto s = "/foo/bar#bla";
EXPECT_THROW(
parseFlakeRef(fetchSettings, s),
Error);
}
{
auto s = "/foo/bar#bla";
auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s);
ASSERT_EQ(flakeref.to_string(), "path:/foo/bar");
ASSERT_EQ(fragment, "bla");
}
{
auto s = "/foo/bar?revCount=123#bla";
auto [flakeref, fragment] = parseFlakeRefWithFragment(fetchSettings, s);
ASSERT_EQ(flakeref.to_string(), "path:/foo/bar?revCount=123");
ASSERT_EQ(fragment, "bla");
}
}
TEST(to_string, doesntReencodeUrl) { TEST(to_string, doesntReencodeUrl) {
fetchers::Settings fetchSettings; fetchers::Settings fetchSettings;
auto s = "http://localhost:8181/test/+3d.tar.gz"; auto s = "http://localhost:8181/test/+3d.tar.gz";
auto flakeref = parseFlakeRef(fetchSettings, s); auto flakeref = parseFlakeRef(fetchSettings, s);
auto parsed = flakeref.to_string(); auto unparsed = flakeref.to_string();
auto expected = "http://localhost:8181/test/%2B3d.tar.gz"; auto expected = "http://localhost:8181/test/%2B3d.tar.gz";
ASSERT_EQ(parsed, expected); ASSERT_EQ(unparsed, expected);
} }
} }

View File

@ -4,8 +4,6 @@ project('nix-flake-tests', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -89,23 +89,16 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
bool allowMissing, bool allowMissing,
bool isFlake) bool isFlake)
{ {
std::string path = url; static std::regex pathFlakeRegex(
std::string fragment = ""; R"(([^?#]*)(\?([^#]*))?(#(.*))?)",
std::map<std::string, std::string> query; std::regex::ECMAScript);
auto pathEnd = url.find_first_of("#?");
auto fragmentStart = pathEnd; std::smatch match;
if (pathEnd != std::string::npos && url[pathEnd] == '?') { auto succeeds = std::regex_match(url, match, pathFlakeRegex);
fragmentStart = url.find("#"); assert(succeeds);
} auto path = match[1].str();
if (pathEnd != std::string::npos) { auto query = decodeQuery(match[3]);
path = url.substr(0, pathEnd); auto fragment = percentDecode(match[5].str());
}
if (fragmentStart != std::string::npos) {
fragment = percentDecode(url.substr(fragmentStart+1));
}
if (pathEnd != std::string::npos && fragmentStart != std::string::npos && url[pathEnd] == '?') {
query = decodeQuery(url.substr(pathEnd + 1, fragmentStart - pathEnd - 1));
}
if (baseDir) { if (baseDir) {
/* Check if 'url' is a path (either absolute or relative /* Check if 'url' is a path (either absolute or relative
@ -159,11 +152,7 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
while (flakeRoot != "/") { while (flakeRoot != "/") {
if (pathExists(flakeRoot + "/.git")) { if (pathExists(flakeRoot + "/.git")) {
auto base = std::string("git+file://") + flakeRoot;
auto parsedURL = ParsedURL{ auto parsedURL = ParsedURL{
.url = base, // FIXME
.base = base,
.scheme = "git+file", .scheme = "git+file",
.authority = "", .authority = "",
.path = flakeRoot, .path = flakeRoot,
@ -194,11 +183,13 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
path = canonPath(path + "/" + getOr(query, "dir", "")); path = canonPath(path + "/" + getOr(query, "dir", ""));
} }
fetchers::Attrs attrs; return fromParsedURL(fetchSettings, {
attrs.insert_or_assign("type", "path"); .scheme = "path",
attrs.insert_or_assign("path", path); .authority = "",
.path = path,
return std::make_pair(FlakeRef(fetchers::Input::fromAttrs(fetchSettings, std::move(attrs)), ""), fragment); .query = query,
.fragment = fragment
}, isFlake);
} }
/** /**
@ -220,8 +211,6 @@ static std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
if (std::regex_match(url, match, flakeRegex)) { if (std::regex_match(url, match, flakeRegex)) {
auto parsedURL = ParsedURL{ auto parsedURL = ParsedURL{
.url = url,
.base = "flake:" + match.str(1),
.scheme = "flake", .scheme = "flake",
.authority = "", .authority = "",
.path = match[1], .path = match[1],

View File

@ -4,8 +4,6 @@ project('nix-flake', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-main-c', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-main', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-store-c', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-store-test-support', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-store-tests', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -701,6 +701,7 @@ Goal::Co DerivationGoal::tryToBuild()
if (buildMode != bmCheck && allValid) { if (buildMode != bmCheck && allValid) {
debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath)); debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath));
outputLocks.setDeletion(true); outputLocks.setDeletion(true);
outputLocks.unlock();
co_return done(BuildResult::AlreadyValid, std::move(validOutputs)); co_return done(BuildResult::AlreadyValid, std::move(validOutputs));
} }

View File

@ -784,7 +784,7 @@ struct curlFileTransfer : public FileTransfer
auto s3Res = s3Helper.getObject(bucketName, key); auto s3Res = s3Helper.getObject(bucketName, key);
FileTransferResult res; FileTransferResult res;
if (!s3Res.data) if (!s3Res.data)
throw FileTransferError(NotFound, "S3 object '%s' does not exist", request.uri); throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri);
res.data = std::move(*s3Res.data); res.data = std::move(*s3Res.data);
res.urls.push_back(request.uri); res.urls.push_back(request.uri);
callback(std::move(res)); callback(std::move(res));

View File

@ -4,8 +4,6 @@ project('nix-store', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
'localstatedir=/nix/var', 'localstatedir=/nix/var',
], ],

View File

@ -3,7 +3,7 @@
#include "signals.hh" #include "signals.hh"
#include "util.hh" #include "util.hh"
#ifdef WIN32 #ifdef _WIN32
# include <errhandlingapi.h> # include <errhandlingapi.h>
# include <fileapi.h> # include <fileapi.h>
# include <windows.h> # include <windows.h>

View File

@ -4,8 +4,6 @@ project('nix-util-c', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -4,8 +4,6 @@ project('nix-util-test-support', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -261,4 +261,18 @@ TEST(pathExists, bogusPathDoesNotExist)
{ {
ASSERT_FALSE(pathExists("/schnitzel/darmstadt/pommes")); ASSERT_FALSE(pathExists("/schnitzel/darmstadt/pommes"));
} }
/* ----------------------------------------------------------------------------
* makeParentCanonical
* --------------------------------------------------------------------------*/
TEST(makeParentCanonical, noParent)
{
ASSERT_EQ(makeParentCanonical("file"), absPath(std::filesystem::path("file")));
}
TEST(makeParentCanonical, root)
{
ASSERT_EQ(makeParentCanonical("/"), "/");
}
} }

View File

@ -4,8 +4,6 @@ project('nix-util-tests', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',

View File

@ -55,6 +55,10 @@ TEST(filterANSIEscapes, utf8)
ASSERT_EQ(filterANSIEscapes("fóóbär", true, 3), "fóó"); ASSERT_EQ(filterANSIEscapes("fóóbär", true, 3), "fóó");
ASSERT_EQ(filterANSIEscapes("f€€bär", true, 4), "f€€b"); ASSERT_EQ(filterANSIEscapes("f€€bär", true, 4), "f€€b");
ASSERT_EQ(filterANSIEscapes("f𐍈𐍈bär", true, 4), "f𐍈𐍈b"); ASSERT_EQ(filterANSIEscapes("f𐍈𐍈bär", true, 4), "f𐍈𐍈b");
ASSERT_EQ(filterANSIEscapes("f🔍bar", true, 6), "f🔍bar");
ASSERT_EQ(filterANSIEscapes("f🔍bar", true, 3), "f🔍");
ASSERT_EQ(filterANSIEscapes("f🔍bar", true, 2), "f");
ASSERT_EQ(filterANSIEscapes("foo\u0301", true, 3), "foó");
} }
TEST(filterANSIEscapes, osc8) TEST(filterANSIEscapes, osc8)

View File

@ -20,24 +20,11 @@ namespace nix {
} }
std::ostream& operator<<(std::ostream& os, const ParsedURL& p) {
return os << "\n"
<< "url: " << p.url << "\n"
<< "base: " << p.base << "\n"
<< "scheme: " << p.scheme << "\n"
<< "authority: " << p.authority.value() << "\n"
<< "path: " << p.path << "\n"
<< "query: " << print_map(p.query) << "\n"
<< "fragment: " << p.fragment << "\n";
}
TEST(parseURL, parsesSimpleHttpUrl) { TEST(parseURL, parsesSimpleHttpUrl) {
auto s = "http://www.example.org/file.tar.gz"; auto s = "http://www.example.org/file.tar.gz";
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "http://www.example.org/file.tar.gz",
.base = "http://www.example.org/file.tar.gz",
.scheme = "http", .scheme = "http",
.authority = "www.example.org", .authority = "www.example.org",
.path = "/file.tar.gz", .path = "/file.tar.gz",
@ -53,8 +40,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "https://www.example.org/file.tar.gz",
.base = "https://www.example.org/file.tar.gz",
.scheme = "https", .scheme = "https",
.authority = "www.example.org", .authority = "www.example.org",
.path = "/file.tar.gz", .path = "/file.tar.gz",
@ -70,8 +55,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "https://www.example.org/file.tar.gz",
.base = "https://www.example.org/file.tar.gz",
.scheme = "https", .scheme = "https",
.authority = "www.example.org", .authority = "www.example.org",
.path = "/file.tar.gz", .path = "/file.tar.gz",
@ -87,8 +70,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "http://www.example.org/file.tar.gz",
.base = "http://www.example.org/file.tar.gz",
.scheme = "http", .scheme = "http",
.authority = "www.example.org", .authority = "www.example.org",
.path = "/file.tar.gz", .path = "/file.tar.gz",
@ -104,8 +85,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "file+https://www.example.org/video.mp4",
.base = "https://www.example.org/video.mp4",
.scheme = "file+https", .scheme = "file+https",
.authority = "www.example.org", .authority = "www.example.org",
.path = "/video.mp4", .path = "/video.mp4",
@ -126,8 +105,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "http://127.0.0.1:8080/file.tar.gz",
.base = "https://127.0.0.1:8080/file.tar.gz",
.scheme = "http", .scheme = "http",
.authority = "127.0.0.1:8080", .authority = "127.0.0.1:8080",
.path = "/file.tar.gz", .path = "/file.tar.gz",
@ -143,8 +120,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
.base = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
.scheme = "http", .scheme = "http",
.authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080", .authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
.path = "", .path = "",
@ -161,8 +136,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
.base = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
.scheme = "http", .scheme = "http",
.authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080", .authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
.path = "", .path = "",
@ -185,8 +158,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "http://user:pass@www.example.org/file.tar.gz",
.base = "http://user:pass@www.example.org/file.tar.gz",
.scheme = "http", .scheme = "http",
.authority = "user:pass@www.example.org:8080", .authority = "user:pass@www.example.org:8080",
.path = "/file.tar.gz", .path = "/file.tar.gz",
@ -203,8 +174,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "",
.base = "",
.scheme = "file", .scheme = "file",
.authority = "", .authority = "",
.path = "/none/of//your/business", .path = "/none/of//your/business",
@ -228,8 +197,6 @@ namespace nix {
auto parsed = parseURL(s); auto parsed = parseURL(s);
ParsedURL expected { ParsedURL expected {
.url = "ftp://ftp.nixos.org/downloads/nixos.iso",
.base = "ftp://ftp.nixos.org/downloads/nixos.iso",
.scheme = "ftp", .scheme = "ftp",
.authority = "ftp.nixos.org", .authority = "ftp.nixos.org",
.path = "/downloads/nixos.iso", .path = "/downloads/nixos.iso",

View File

@ -262,6 +262,7 @@ public:
operator const T &() const { return value; } operator const T &() const { return value; }
operator T &() { return value; } operator T &() { return value; }
const T & get() const { return value; } const T & get() const { return value; }
T & get() { return value; }
template<typename U> template<typename U>
bool operator ==(const U & v2) const { return value == v2; } bool operator ==(const U & v2) const { return value == v2; }
template<typename U> template<typename U>

View File

@ -331,7 +331,7 @@ void syncParent(const Path & path)
void recursiveSync(const Path & path) void recursiveSync(const Path & path)
{ {
/* If it's a file, just fsync and return. */ /* If it's a file or symlink, just fsync and return. */
auto st = lstat(path); auto st = lstat(path);
if (S_ISREG(st.st_mode)) { if (S_ISREG(st.st_mode)) {
AutoCloseFD fd = toDescriptor(open(path.c_str(), O_RDONLY, 0)); AutoCloseFD fd = toDescriptor(open(path.c_str(), O_RDONLY, 0));
@ -339,7 +339,8 @@ void recursiveSync(const Path & path)
throw SysError("opening file '%1%'", path); throw SysError("opening file '%1%'", path);
fd.fsync(); fd.fsync();
return; return;
} } else if (S_ISLNK(st.st_mode))
return;
/* Otherwise, perform a depth-first traversal of the directory and /* Otherwise, perform a depth-first traversal of the directory and
fsync all the files. */ fsync all the files. */
@ -384,7 +385,7 @@ static void _deletePath(Descriptor parentfd, const fs::path & path, uint64_t & b
if (fstatat(parentfd, name.c_str(), &st, if (fstatat(parentfd, name.c_str(), &st,
AT_SYMLINK_NOFOLLOW) == -1) { AT_SYMLINK_NOFOLLOW) == -1) {
if (errno == ENOENT) return; if (errno == ENOENT) return;
throw SysError("getting status of '%1%'", path); throw SysError("getting status of %1%", path);
} }
if (!S_ISDIR(st.st_mode)) { if (!S_ISDIR(st.st_mode)) {
@ -416,15 +417,15 @@ static void _deletePath(Descriptor parentfd, const fs::path & path, uint64_t & b
const auto PERM_MASK = S_IRUSR | S_IWUSR | S_IXUSR; const auto PERM_MASK = S_IRUSR | S_IWUSR | S_IXUSR;
if ((st.st_mode & PERM_MASK) != PERM_MASK) { if ((st.st_mode & PERM_MASK) != PERM_MASK) {
if (fchmodat(parentfd, name.c_str(), st.st_mode | PERM_MASK, 0) == -1) if (fchmodat(parentfd, name.c_str(), st.st_mode | PERM_MASK, 0) == -1)
throw SysError("chmod '%1%'", path); throw SysError("chmod %1%", path);
} }
int fd = openat(parentfd, path.c_str(), O_RDONLY); int fd = openat(parentfd, path.c_str(), O_RDONLY);
if (fd == -1) if (fd == -1)
throw SysError("opening directory '%1%'", path); throw SysError("opening directory %1%", path);
AutoCloseDir dir(fdopendir(fd)); AutoCloseDir dir(fdopendir(fd));
if (!dir) if (!dir)
throw SysError("opening directory '%1%'", path); throw SysError("opening directory %1%", path);
struct dirent * dirent; struct dirent * dirent;
while (errno = 0, dirent = readdir(dir.get())) { /* sic */ while (errno = 0, dirent = readdir(dir.get())) { /* sic */
@ -433,13 +434,13 @@ static void _deletePath(Descriptor parentfd, const fs::path & path, uint64_t & b
if (childName == "." || childName == "..") continue; if (childName == "." || childName == "..") continue;
_deletePath(dirfd(dir.get()), path + "/" + childName, bytesFreed); _deletePath(dirfd(dir.get()), path + "/" + childName, bytesFreed);
} }
if (errno) throw SysError("reading directory '%1%'", path); if (errno) throw SysError("reading directory %1%", path);
} }
int flags = S_ISDIR(st.st_mode) ? AT_REMOVEDIR : 0; int flags = S_ISDIR(st.st_mode) ? AT_REMOVEDIR : 0;
if (unlinkat(parentfd, name.c_str(), flags) == -1) { if (unlinkat(parentfd, name.c_str(), flags) == -1) {
if (errno == ENOENT) return; if (errno == ENOENT) return;
throw SysError("cannot unlink '%1%'", path); throw SysError("cannot unlink %1%", path);
} }
#else #else
// TODO implement // TODO implement
@ -765,4 +766,19 @@ bool isExecutableFileAmbient(const fs::path & exe) {
) == 0; ) == 0;
} }
std::filesystem::path makeParentCanonical(const std::filesystem::path & rawPath)
{
std::filesystem::path path(absPath(rawPath));;
try {
auto parent = path.parent_path();
if (parent == path) {
// `path` is a root directory => trivially canonical
return parent;
}
return std::filesystem::canonical(parent) / path.filename();
} catch (fs::filesystem_error & e) {
throw SysError("canonicalising parent path of '%1%'", path);
}
} }
} // namespace nix

View File

@ -143,6 +143,23 @@ inline bool symlink_exists(const std::filesystem::path & path) {
} // namespace fs } // namespace fs
/**
* Canonicalize a path except for the last component.
*
* This is useful for getting the canonical location of a symlink.
*
* Consider the case where `foo/l` is a symlink. `canonical("foo/l")` will
* resolve the symlink `l` to its target.
* `makeParentCanonical("foo/l")` will not resolve the symlink `l` to its target,
* but does ensure that the returned parent part of the path, `foo` is resolved
* to `canonical("foo")`, and can therefore be retrieved without traversing any
* symlinks.
*
* If a relative path is passed, it will be made absolute, so that the parent
* can always be canonicalized.
*/
std::filesystem::path makeParentCanonical(const std::filesystem::path & path);
/** /**
* A version of pathExists that returns false on a permission error. * A version of pathExists that returns false on a permission error.
* Useful for inferring default paths across directories that might not * Useful for inferring default paths across directories that might not

View File

@ -4,8 +4,6 @@ project('nix-util', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.1', meson_version : '>= 1.1',
@ -108,6 +106,8 @@ deps_private += cpuid
nlohmann_json = dependency('nlohmann_json', version : '>= 3.9') nlohmann_json = dependency('nlohmann_json', version : '>= 3.9')
deps_public += nlohmann_json deps_public += nlohmann_json
cxx = meson.get_compiler('cpp')
config_h = configure_file( config_h = configure_file(
configuration : configdata, configuration : configdata,
output : 'config-util.hh', output : 'config-util.hh',
@ -168,6 +168,10 @@ sources = files(
) )
include_dirs = [include_directories('.')] include_dirs = [include_directories('.')]
if not cxx.has_header('widechar_width.h', required : false)
# use vendored widechar_width.h
include_dirs += include_directories('./widecharwidth')
endif
headers = [config_h] + files( headers = [config_h] + files(
'abstract-setting-to-json.hh', 'abstract-setting-to-json.hh',

View File

@ -29,6 +29,7 @@ mkMesonLibrary (finalAttrs: {
./nix-meson-build-support ./nix-meson-build-support
../../.version ../../.version
./.version ./.version
./widecharwidth
./meson.build ./meson.build
./meson.options ./meson.options
./linux/meson.build ./linux/meson.build

View File

@ -43,13 +43,25 @@ struct PosixSourceAccessor : virtual SourceAccessor
std::optional<std::filesystem::path> getPhysicalPath(const CanonPath & path) override; std::optional<std::filesystem::path> getPhysicalPath(const CanonPath & path) override;
/** /**
* Create a `PosixSourceAccessor` and `CanonPath` corresponding to * Create a `PosixSourceAccessor` and `SourcePath` corresponding to
* some native path. * some native path.
* *
* The `PosixSourceAccessor` is rooted as far up the tree as * The `PosixSourceAccessor` is rooted as far up the tree as
* possible, (e.g. on Windows it could scoped to a drive like * possible, (e.g. on Windows it could scoped to a drive like
* `C:\`). This allows more `..` parent accessing to work. * `C:\`). This allows more `..` parent accessing to work.
* *
* @note When `path` is trusted user input, canonicalize it using
* `std::filesystem::canonical`, `makeParentCanonical`, `std::filesystem::weakly_canonical`, etc,
* as appropriate for the use case. At least weak canonicalization is
* required for the `SourcePath` to do anything useful at the location it
* points to.
*
* @note A canonicalizing behavior is not built in `createAtRoot` so that
* callers do not accidentally introduce symlink-related security vulnerabilities.
* Furthermore, `createAtRoot` does not know whether the file pointed to by
* `path` should be resolved if it is itself a symlink. In other words,
* `createAtRoot` can not decide between aforementioned `canonical`, `makeParentCanonical`, etc. for its callers.
*
* See * See
* [`std::filesystem::path::root_path`](https://en.cppreference.com/w/cpp/filesystem/path/root_path) * [`std::filesystem::path::root_path`](https://en.cppreference.com/w/cpp/filesystem/path/root_path)
* and * and

View File

@ -11,6 +11,53 @@
# include <sys/ioctl.h> # include <sys/ioctl.h>
#endif #endif
#include <unistd.h> #include <unistd.h>
#include <widechar_width.h>
namespace {
inline std::pair<int, size_t> charWidthUTF8Helper(std::string_view s)
{
size_t bytes = 1;
uint32_t ch = s[0];
uint32_t max = 1U << 7;
if ((ch & 0x80U) == 0U) {
} else if ((ch & 0xe0U) == 0xc0U) {
ch &= 0x1fU;
bytes = 2;
max = 1U << 11;
} else if ((ch & 0xf0U) == 0xe0U) {
ch &= 0x0fU;
bytes = 3;
max = 1U << 16;
} else if ((ch & 0xf8U) == 0xf0U) {
ch &= 0x07U;
bytes = 4;
max = 0x110000U;
} else {
return {bytes, bytes}; // invalid UTF-8 start byte
}
for (size_t i = 1; i < bytes; i++) {
if (i < s.size() && (s[i] & 0xc0) == 0x80) {
ch = (ch << 6) | (s[i] & 0x3f);
} else {
return {i, i}; // invalid UTF-8 encoding; assume one character per byte
}
}
int width = bytes; // in case of overlong encoding
if (ch < max) {
width = widechar_wcwidth(ch);
if (width == widechar_ambiguous) {
width = 1; // just a guess...
} else if (width == widechar_widened_in_9) {
width = 2;
} else if (width < 0) {
width = 0;
}
}
return {width, bytes};
}
}
namespace nix { namespace nix {
@ -30,7 +77,7 @@ std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int w
size_t w = 0; size_t w = 0;
auto i = s.begin(); auto i = s.begin();
while (w < (size_t) width && i != s.end()) { while (i != s.end()) {
if (*i == '\e') { if (*i == '\e') {
std::string e; std::string e;
@ -61,10 +108,12 @@ std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int w
} }
else if (*i == '\t') { else if (*i == '\t') {
i++; t += ' '; w++; do {
while (w < (size_t) width && w % 8) { if (++w > (size_t) width)
t += ' '; w++; return t;
} t += ' ';
} while (w % 8);
i++;
} }
else if (*i == '\r' || *i == '\a') else if (*i == '\r' || *i == '\a')
@ -72,35 +121,18 @@ std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int w
i++; i++;
else { else {
w++; auto [chWidth, bytes] = charWidthUTF8Helper({i, s.end()});
// Copy one UTF-8 character. w += chWidth;
if ((*i & 0xe0) == 0xc0) { if (w > (size_t) width) {
t += *i++; break;
if (i != s.end() && ((*i & 0xc0) == 0x80)) t += *i++; }
} else if ((*i & 0xf0) == 0xe0) { t += {i, i + bytes};
t += *i++; i += bytes;
if (i != s.end() && ((*i & 0xc0) == 0x80)) {
t += *i++;
if (i != s.end() && ((*i & 0xc0) == 0x80)) t += *i++;
}
} else if ((*i & 0xf8) == 0xf0) {
t += *i++;
if (i != s.end() && ((*i & 0xc0) == 0x80)) {
t += *i++;
if (i != s.end() && ((*i & 0xc0) == 0x80)) {
t += *i++;
if (i != s.end() && ((*i & 0xc0) == 0x80)) t += *i++;
}
}
} else
t += *i++;
} }
} }
return t; return t;
} }
////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////
static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}}; static Sync<std::pair<unsigned short, unsigned short>> windowSize{{0, 0}};

View File

@ -22,7 +22,6 @@ ParsedURL parseURL(const std::string & url)
std::smatch match; std::smatch match;
if (std::regex_match(url, match, uriRegex)) { if (std::regex_match(url, match, uriRegex)) {
auto & base = match[1];
std::string scheme = match[2]; std::string scheme = match[2];
auto authority = match[3].matched auto authority = match[3].matched
? std::optional<std::string>(match[3]) : std::nullopt; ? std::optional<std::string>(match[3]) : std::nullopt;
@ -40,8 +39,6 @@ ParsedURL parseURL(const std::string & url)
path = "/"; path = "/";
return ParsedURL{ return ParsedURL{
.url = url,
.base = base,
.scheme = scheme, .scheme = scheme,
.authority = authority, .authority = authority,
.path = percentDecode(path), .path = percentDecode(path),
@ -136,6 +133,12 @@ std::string ParsedURL::to_string() const
+ (fragment.empty() ? "" : "#" + percentEncode(fragment)); + (fragment.empty() ? "" : "#" + percentEncode(fragment));
} }
std::ostream & operator << (std::ostream & os, const ParsedURL & url)
{
os << url.to_string();
return os;
}
bool ParsedURL::operator ==(const ParsedURL & other) const noexcept bool ParsedURL::operator ==(const ParsedURL & other) const noexcept
{ {
return return

View File

@ -7,9 +7,6 @@ namespace nix {
struct ParsedURL struct ParsedURL
{ {
std::string url;
/// URL without query/fragment
std::string base;
std::string scheme; std::string scheme;
std::optional<std::string> authority; std::optional<std::string> authority;
std::string path; std::string path;
@ -26,6 +23,8 @@ struct ParsedURL
ParsedURL canonicalise(); ParsedURL canonicalise();
}; };
std::ostream & operator << (std::ostream & os, const ParsedURL & url);
MakeError(BadURL, Error); MakeError(BadURL, Error);
std::string percentDecode(std::string_view in); std::string percentDecode(std::string_view in);

View File

@ -0,0 +1,4 @@
widecharwidth - wcwidth implementation
Written in 2018 by ridiculous_fish
To the extent possible under law, the author(s) have dedicated all copyright and related and neighboring rights to this software to the public domain worldwide. This software is distributed without any warranty.
You should have received a copy of the CC0 Public Domain Dedication along with this software. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
#include "environment-variables.hh" #include "environment-variables.hh"
#ifdef WIN32 #ifdef _WIN32
# include "processenv.h" # include "processenv.h"
namespace nix { namespace nix {

View File

@ -5,7 +5,7 @@
#include "windows-error.hh" #include "windows-error.hh"
#include "file-path.hh" #include "file-path.hh"
#ifdef WIN32 #ifdef _WIN32
#include <fileapi.h> #include <fileapi.h>
#include <error.h> #include <error.h>
#include <namedpipeapi.h> #include <namedpipeapi.h>

View File

@ -1,6 +1,6 @@
#include "file-system.hh" #include "file-system.hh"
#ifdef WIN32 #ifdef _WIN32
namespace nix { namespace nix {
Descriptor openDirectory(const std::filesystem::path & path) Descriptor openDirectory(const std::filesystem::path & path)

View File

@ -1,4 +1,4 @@
#ifdef WIN32 #ifdef _WIN32
# include <ioapiset.h> # include <ioapiset.h>
# include "windows-error.hh" # include "windows-error.hh"

View File

@ -7,7 +7,7 @@
#include "file-path-impl.hh" #include "file-path-impl.hh"
#include "util.hh" #include "util.hh"
#ifdef WIN32 #ifdef _WIN32
namespace nix { namespace nix {

View File

@ -23,7 +23,7 @@
#include <sys/types.h> #include <sys/types.h>
#include <unistd.h> #include <unistd.h>
#ifdef WIN32 #ifdef _WIN32
#define WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MEAN
#include <windows.h> #include <windows.h>

View File

@ -4,7 +4,7 @@
#include "file-system.hh" #include "file-system.hh"
#include "windows-error.hh" #include "windows-error.hh"
#ifdef WIN32 #ifdef _WIN32
#define WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MEAN
#include <windows.h> #include <windows.h>

View File

@ -1,7 +1,7 @@
#include "windows-async-pipe.hh" #include "windows-async-pipe.hh"
#include "windows-error.hh" #include "windows-error.hh"
#ifdef WIN32 #ifdef _WIN32
namespace nix::windows { namespace nix::windows {

View File

@ -2,7 +2,7 @@
///@file ///@file
#include "file-descriptor.hh" #include "file-descriptor.hh"
#ifdef WIN32 #ifdef _WIN32
namespace nix::windows { namespace nix::windows {

View File

@ -1,6 +1,6 @@
#include "windows-error.hh" #include "windows-error.hh"
#ifdef WIN32 #ifdef _WIN32
#include <error.h> #include <error.h>
#define WIN32_LEAN_AND_MEAN #define WIN32_LEAN_AND_MEAN
#include <windows.h> #include <windows.h>

View File

@ -1,7 +1,7 @@
#pragma once #pragma once
///@file ///@file
#ifdef WIN32 #ifdef _WIN32
#include <errhandlingapi.h> #include <errhandlingapi.h>
#include "error.hh" #include "error.hh"

View File

@ -24,7 +24,7 @@ static Path gcRoot;
static int rootNr = 0; static int rootNr = 0;
enum OutputKind { okPlain, okXML, okJSON }; enum OutputKind { okPlain, okRaw, okXML, okJSON };
void processExpr(EvalState & state, const Strings & attrPaths, void processExpr(EvalState & state, const Strings & attrPaths,
bool parseOnly, bool strict, Bindings & autoArgs, bool parseOnly, bool strict, Bindings & autoArgs,
@ -50,7 +50,11 @@ void processExpr(EvalState & state, const Strings & attrPaths,
vRes = v; vRes = v;
else else
state.autoCallFunction(autoArgs, v, vRes); state.autoCallFunction(autoArgs, v, vRes);
if (output == okXML) if (output == okRaw)
std::cout << *state.coerceToString(noPos, vRes, context, "while generating the nix-instantiate output");
// We intentionally don't output a newline here. The default PS1 for Bash in NixOS starts with a newline
// and other interactive shells like Zsh are smart enough to print a missing newline before the prompt.
else if (output == okXML)
printValueAsXML(state, strict, location, vRes, std::cout, context, noPos); printValueAsXML(state, strict, location, vRes, std::cout, context, noPos);
else if (output == okJSON) { else if (output == okJSON) {
printValueAsJSON(state, strict, vRes, v.determinePos(noPos), std::cout, context); printValueAsJSON(state, strict, vRes, v.determinePos(noPos), std::cout, context);
@ -132,6 +136,8 @@ static int main_nix_instantiate(int argc, char * * argv)
gcRoot = getArg(*arg, arg, end); gcRoot = getArg(*arg, arg, end);
else if (*arg == "--indirect") else if (*arg == "--indirect")
; ;
else if (*arg == "--raw")
outputKind = okRaw;
else if (*arg == "--xml") else if (*arg == "--xml")
outputKind = okXML; outputKind = okXML;
else if (*arg == "--json") else if (*arg == "--json")

View File

@ -183,9 +183,9 @@ static void opAdd(Strings opFlags, Strings opArgs)
if (!opFlags.empty()) throw UsageError("unknown flag"); if (!opFlags.empty()) throw UsageError("unknown flag");
for (auto & i : opArgs) { for (auto & i : opArgs) {
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); auto sourcePath = PosixSourceAccessor::createAtRoot(makeParentCanonical(i));
cout << fmt("%s\n", store->printStorePath(store->addToStore( cout << fmt("%s\n", store->printStorePath(store->addToStore(
std::string(baseNameOf(i)), {accessor, canonPath}))); std::string(baseNameOf(i)), sourcePath)));
} }
} }
@ -207,10 +207,10 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
opArgs.pop_front(); opArgs.pop_front();
for (auto & i : opArgs) { for (auto & i : opArgs) {
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); auto sourcePath = PosixSourceAccessor::createAtRoot(makeParentCanonical(i));
std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(
baseNameOf(i), baseNameOf(i),
{accessor, canonPath}, sourcePath,
method, method,
hashAlgo).path)); hashAlgo).path));
} }

View File

@ -37,13 +37,13 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{ {
if (!namePart) namePart = baseNameOf(path); if (!namePart) namePart = baseNameOf(path);
auto [accessor, path2] = PosixSourceAccessor::createAtRoot(path); auto sourcePath = PosixSourceAccessor::createAtRoot(makeParentCanonical(path));
auto storePath = dryRun auto storePath = dryRun
? store->computeStorePath( ? store->computeStorePath(
*namePart, {accessor, path2}, caMethod, hashAlgo, {}).first *namePart, sourcePath, caMethod, hashAlgo, {}).first
: store->addToStoreSlow( : store->addToStoreSlow(
*namePart, {accessor, path2}, caMethod, hashAlgo, {}).path; *namePart, sourcePath, caMethod, hashAlgo, {}).path;
logger->cout("%s", store->printStorePath(storePath)); logger->cout("%s", store->printStorePath(storePath));
} }

View File

@ -938,7 +938,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
} }
continue; continue;
} else } else
createSymlink(target, to2); createSymlink(target, os_string_to_string(PathViewNG { to2 }));
} }
else else
throw Error("file '%s' has unsupported type", from2); throw Error("file '%s' has unsupported type", from2);

View File

@ -87,18 +87,35 @@ struct CmdHashBase : Command
return std::make_unique<HashSink>(hashAlgo); return std::make_unique<HashSink>(hashAlgo);
}; };
auto path2 = PosixSourceAccessor::createAtRoot(path); auto makeSourcePath = [&]() -> SourcePath {
return PosixSourceAccessor::createAtRoot(makeParentCanonical(path));
};
Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++ Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++
switch (mode) { switch (mode) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
{
// While usually we could use the some code as for NixArchive,
// the Flat method needs to support FIFOs, such as those
// produced by bash process substitution, e.g.:
// nix hash --mode flat <(echo hi)
// Also symlinks semantics are unambiguous in the flat case,
// so we don't need to go low-level, or reject symlink `path`s.
auto hashSink = makeSink();
readFile(path, *hashSink);
h = hashSink->finish().first;
break;
}
case FileIngestionMethod::NixArchive: case FileIngestionMethod::NixArchive:
{ {
auto sourcePath = makeSourcePath();
auto hashSink = makeSink(); auto hashSink = makeSink();
dumpPath(path2, *hashSink, (FileSerialisationMethod) mode); dumpPath(sourcePath, *hashSink, (FileSerialisationMethod) mode);
h = hashSink->finish().first; h = hashSink->finish().first;
break; break;
} }
case FileIngestionMethod::Git: { case FileIngestionMethod::Git: {
auto sourcePath = makeSourcePath();
std::function<git::DumpHook> hook; std::function<git::DumpHook> hook;
hook = [&](const SourcePath & path) -> git::TreeEntry { hook = [&](const SourcePath & path) -> git::TreeEntry {
auto hashSink = makeSink(); auto hashSink = makeSink();
@ -109,7 +126,7 @@ struct CmdHashBase : Command
.hash = hash, .hash = hash,
}; };
}; };
h = hook(path2).hash; h = hook(sourcePath).hash;
break; break;
} }
} }

View File

@ -4,8 +4,6 @@ project('nix', 'cpp',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
'localstatedir=/nix/var', 'localstatedir=/nix/var',
], ],

View File

@ -29,6 +29,47 @@ echo "$hash2"
test "$hash1" = "sha256:$hash2" test "$hash1" = "sha256:$hash2"
# The contents can be accessed through a symlink, and this symlink has no effect on the hash
# https://github.com/NixOS/nix/issues/11941
test_issue_11941() {
local expected actual
mkdir -p "$TEST_ROOT/foo/bar" && ln -s "$TEST_ROOT/foo" "$TEST_ROOT/foo-link"
# legacy
expected=$(nix-store --add-fixed --recursive sha256 "$TEST_ROOT/foo/bar")
actual=$(nix-store --add-fixed --recursive sha256 "$TEST_ROOT/foo-link/bar")
[[ "$expected" == "$actual" ]]
actual=$(nix-store --add "$TEST_ROOT/foo-link/bar")
[[ "$expected" == "$actual" ]]
# nix store add
actual=$(nix store add --hash-algo sha256 --mode nar "$TEST_ROOT/foo/bar")
[[ "$expected" == "$actual" ]]
# cleanup
rm -r "$TEST_ROOT/foo" "$TEST_ROOT/foo-link"
}
test_issue_11941
# A symlink is added to the store as a symlink, not as a copy of the target
test_add_symlink() {
ln -s /bin "$TEST_ROOT/my-bin"
# legacy
path=$(nix-store --add-fixed --recursive sha256 "$TEST_ROOT/my-bin")
[[ "$(readlink "$path")" == /bin ]]
path=$(nix-store --add "$TEST_ROOT/my-bin")
[[ "$(readlink "$path")" == /bin ]]
# nix store add
path=$(nix store add --hash-algo sha256 --mode nar "$TEST_ROOT/my-bin")
[[ "$(readlink "$path")" == /bin ]]
# cleanup
rm "$TEST_ROOT/my-bin"
}
test_add_symlink
#### New style commands #### New style commands
clearStoreIfPossible clearStoreIfPossible

View File

@ -1,5 +1,5 @@
{ {
int = 123; int = 123;
str = "foo"; str = "foo\nbar";
attr.foo = "bar"; attr.foo = "bar";
} }

View File

@ -16,8 +16,8 @@ EOF
nix eval --expr 'assert 1 + 2 == 3; true' nix eval --expr 'assert 1 + 2 == 3; true'
[[ $(nix eval int -f "./eval.nix") == 123 ]] [[ $(nix eval int -f "./eval.nix") == 123 ]]
[[ $(nix eval str -f "./eval.nix") == '"foo"' ]] [[ $(nix eval str -f "./eval.nix") == '"foo\nbar"' ]]
[[ $(nix eval str --raw -f "./eval.nix") == 'foo' ]] [[ $(nix eval str --raw -f "./eval.nix") == $'foo\nbar' ]]
[[ "$(nix eval attr -f "./eval.nix")" == '{ foo = "bar"; }' ]] [[ "$(nix eval attr -f "./eval.nix")" == '{ foo = "bar"; }' ]]
[[ $(nix eval attr --json -f "./eval.nix") == '{"foo":"bar"}' ]] [[ $(nix eval attr --json -f "./eval.nix") == '{"foo":"bar"}' ]]
[[ $(nix eval int -f - < "./eval.nix") == 123 ]] [[ $(nix eval int -f - < "./eval.nix") == 123 ]]
@ -28,7 +28,8 @@ nix eval --expr 'assert 1 + 2 == 3; true'
nix-instantiate --eval -E 'assert 1 + 2 == 3; true' nix-instantiate --eval -E 'assert 1 + 2 == 3; true'
[[ $(nix-instantiate -A int --eval "./eval.nix") == 123 ]] [[ $(nix-instantiate -A int --eval "./eval.nix") == 123 ]]
[[ $(nix-instantiate -A str --eval "./eval.nix") == '"foo"' ]] [[ $(nix-instantiate -A str --eval "./eval.nix") == '"foo\nbar"' ]]
[[ $(nix-instantiate -A str --raw --eval "./eval.nix") == $'foo\nbar' ]]
[[ "$(nix-instantiate -A attr --eval "./eval.nix")" == '{ foo = "bar"; }' ]] [[ "$(nix-instantiate -A attr --eval "./eval.nix")" == '{ foo = "bar"; }' ]]
[[ $(nix-instantiate -A attr --eval --json "./eval.nix") == '{"foo":"bar"}' ]] [[ $(nix-instantiate -A attr --eval --json "./eval.nix") == '{"foo":"bar"}' ]]
[[ $(nix-instantiate -A int --eval - < "./eval.nix") == 123 ]] [[ $(nix-instantiate -A int --eval - < "./eval.nix") == 123 ]]

View File

@ -63,3 +63,16 @@ flakeref=git+file://$rootRepo\?submodules=1\&dir=submodule
echo '"foo"' > "$rootRepo"/submodule/sub.nix echo '"foo"' > "$rootRepo"/submodule/sub.nix
[[ $(nix eval --json "$flakeref#sub" ) = '"foo"' ]] [[ $(nix eval --json "$flakeref#sub" ) = '"foo"' ]]
[[ $(nix flake metadata --json "$flakeref" | jq -r .locked.rev) = null ]] [[ $(nix flake metadata --json "$flakeref" | jq -r .locked.rev) = null ]]
# Test that `nix flake metadata` parses `submodule` correctly.
cat > "$rootRepo"/flake.nix <<EOF
{
outputs = { self }: {
};
}
EOF
git -C "$rootRepo" add flake.nix
git -C "$rootRepo" commit -m "Add flake.nix"
storePath=$(nix flake metadata --json "$rootRepo?submodules=1" | jq -r .path)
[[ -e "$storePath/submodule" ]]

View File

@ -92,3 +92,32 @@ try2 md5 "20f3ffe011d4cfa7d72bfabef7882836"
rm "$TEST_ROOT/hash-path/hello" rm "$TEST_ROOT/hash-path/hello"
ln -s x "$TEST_ROOT/hash-path/hello" ln -s x "$TEST_ROOT/hash-path/hello"
try2 md5 "f78b733a68f5edbdf9413899339eaa4a" try2 md5 "f78b733a68f5edbdf9413899339eaa4a"
# Flat mode supports process substitution
h=$(nix hash path --mode flat --type sha256 --base32 <(printf "SMASH THE STATE"))
[[ 0d9n3r2i4m1zgy0wpqbsyabsfzgs952066bfp8gwvcg4mkr4r5g8 == "$h" ]]
# Flat mode supports process substitution (hash file)
h=$(nix hash file --type sha256 --base32 <(printf "SMASH THE STATE"))
[[ 0d9n3r2i4m1zgy0wpqbsyabsfzgs952066bfp8gwvcg4mkr4r5g8 == "$h" ]]
# Symlinks in the ancestry are ok and don't affect the result
mkdir -p "$TEST_ROOT/simple" "$TEST_ROOT/try/to/mess/with/it"
echo hi > "$TEST_ROOT/simple/hi"
ln -s "$TEST_ROOT/simple" "$TEST_ROOT/try/to/mess/with/it/simple-link"
h=$(nix hash path --type sha256 --base32 "$TEST_ROOT/simple/hi")
[[ 1xmr8jicvzszfzpz46g37mlpvbzjl2wpwvl2b05psipssyp1sm8h == "$h" ]]
h=$(nix hash path --type sha256 --base32 "$TEST_ROOT/try/to/mess/with/it/simple-link/hi")
[[ 1xmr8jicvzszfzpz46g37mlpvbzjl2wpwvl2b05psipssyp1sm8h == "$h" ]]
# nix hash --mode nar does not canonicalize a symlink argument.
# Otherwise it can't generate a NAR whose root is a symlink.
# If you want to follow the symlink, pass $(realpath -s ...) instead.
ln -s /non-existent-48cujwe8ndf4as0bne "$TEST_ROOT/symlink-to-nowhere"
h=$(nix hash path --mode nar --type sha256 --base32 "$TEST_ROOT/symlink-to-nowhere")
[[ 1bl5ry3x1fcbwgr5c2x50bn572iixh4j1p6ax5isxly2ddgn8pbp == "$h" ]] # manually verified hash
if [[ -e /bin ]]; then
ln -s /bin "$TEST_ROOT/symlink-to-bin"
h=$(nix hash path --mode nar --type sha256 --base32 "$TEST_ROOT/symlink-to-bin")
[[ 0z2mdmkd43l0ijdxfbj1y8vzli15yh9b09n3a3rrygmjshbyypsw == "$h" ]] # manually verified hash
fi

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,8 @@
error:
… while calling the 'toJSON' builtin
at /pwd/lang/eval-fail-toJSON-non-utf-8.nix:1:1:
1| builtins.toJSON "_invalid UTF-8: ÿ_"
| ^
2|
error: JSON serialization error: [json.exception.type_error.316] invalid UTF-8 byte at index 16: 0xFF

View File

@ -0,0 +1 @@
builtins.toJSON "_invalid UTF-8: ÿ_"

View File

@ -4,8 +4,6 @@ project('nix-functional-tests',
'cpp_std=c++2a', 'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level # TODO(Qyriad): increase the warning level
'warning_level=1', 'warning_level=1',
'debug=true',
'optimization=2',
'errorlogs=true', # Please print logs for tests that fail 'errorlogs=true', # Please print logs for tests that fail
], ],
meson_version : '>= 1.3', meson_version : '>= 1.3',

View File

@ -26,6 +26,8 @@ let
# Evaluate VMs faster # Evaluate VMs faster
documentation.enable = false; documentation.enable = false;
# this links against nix and might break with our git version.
system.tools.nixos-option.enable = false;
}; };
_module.args.nixpkgs = nixpkgs; _module.args.nixpkgs = nixpkgs;
_module.args.system = system; _module.args.system = system;
@ -157,6 +159,8 @@ in
functional_root = runNixOSTestFor "x86_64-linux" ./functional/as-root.nix; functional_root = runNixOSTestFor "x86_64-linux" ./functional/as-root.nix;
functional_symlinked-home = runNixOSTestFor "x86_64-linux" ./functional/symlinked-home.nix;
user-sandboxing = runNixOSTestFor "x86_64-linux" ./user-sandboxing; user-sandboxing = runNixOSTestFor "x86_64-linux" ./user-sandboxing;
s3-binary-cache-store = runNixOSTestFor "x86_64-linux" ./s3-binary-cache-store.nix; s3-binary-cache-store = runNixOSTestFor "x86_64-linux" ./s3-binary-cache-store.nix;

View File

@ -0,0 +1,36 @@
/**
This test runs the functional tests on a NixOS system where the home directory
is symlinked to another location.
The purpose of this test is to find cases where Nix uses low-level operations
that don't support symlinks on paths that include them.
It is not a substitute for more intricate, use case-specific tests, but helps
catch common issues.
*/
# TODO: add symlinked tmpdir
{ ... }:
{
name = "functional-tests-on-nixos_user_symlinked-home";
imports = [ ./common.nix ];
nodes.machine = {
users.users.alice = { isNormalUser = true; };
};
testScript = ''
machine.wait_for_unit("multi-user.target")
with subtest("prepare symlinked home"):
machine.succeed("""
(
set -x
mv /home/alice /home/alice.real
ln -s alice.real /home/alice
) 1>&2
""")
machine.succeed("""
su --login --command "run-test-suite" alice >&2
""")
'';
}

Some files were not shown because too many files have changed in this diff Show More