mirror of
https://github.com/NixOS/nix.git
synced 2025-04-16 06:08:03 +00:00
Apply clang-format universally.
* It is tough to contribute to a project that doesn't use a formatter, * It is extra hard to contribute to a project which has configured the formatter, but ignores it for some files * Code formatting makes it harder to hide obscure / weird bugs by accident or on purpose, Let's rip the bandaid off? Note that PRs currently in flight should be able to be merged relatively easily by applying `clang-format` to their tip prior to merge.
This commit is contained in:
parent
c58be744cc
commit
c01a35bb1c
@ -13,371 +13,377 @@
|
||||
// - values are redirection targets relative to the current path.
|
||||
|
||||
const redirects = {
|
||||
"index.html": {
|
||||
"part-advanced-topics": "advanced-topics/index.html",
|
||||
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
|
||||
"chap-diff-hook": "advanced-topics/diff-hook.html",
|
||||
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
|
||||
"chap-distributed-builds": "command-ref/conf-file.html#conf-builders",
|
||||
"chap-post-build-hook": "advanced-topics/post-build-hook.html",
|
||||
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
|
||||
"chap-writing-nix-expressions": "language/index.html",
|
||||
"part-command-ref": "command-ref/index.html",
|
||||
"conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
|
||||
"conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
|
||||
"conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
|
||||
"conf-allowed-users": "command-ref/conf-file.html#conf-allowed-users",
|
||||
"conf-auto-optimise-store": "command-ref/conf-file.html#conf-auto-optimise-store",
|
||||
"conf-binary-cache-public-keys": "command-ref/conf-file.html#conf-binary-cache-public-keys",
|
||||
"conf-binary-caches": "command-ref/conf-file.html#conf-binary-caches",
|
||||
"conf-build-compress-log": "command-ref/conf-file.html#conf-build-compress-log",
|
||||
"conf-build-cores": "command-ref/conf-file.html#conf-build-cores",
|
||||
"conf-build-extra-chroot-dirs": "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
|
||||
"conf-build-extra-sandbox-paths": "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
|
||||
"conf-build-fallback": "command-ref/conf-file.html#conf-build-fallback",
|
||||
"conf-build-max-jobs": "command-ref/conf-file.html#conf-build-max-jobs",
|
||||
"conf-build-max-log-size": "command-ref/conf-file.html#conf-build-max-log-size",
|
||||
"conf-build-max-silent-time": "command-ref/conf-file.html#conf-build-max-silent-time",
|
||||
"conf-build-timeout": "command-ref/conf-file.html#conf-build-timeout",
|
||||
"conf-build-use-chroot": "command-ref/conf-file.html#conf-build-use-chroot",
|
||||
"conf-build-use-sandbox": "command-ref/conf-file.html#conf-build-use-sandbox",
|
||||
"conf-build-use-substitutes": "command-ref/conf-file.html#conf-build-use-substitutes",
|
||||
"conf-build-users-group": "command-ref/conf-file.html#conf-build-users-group",
|
||||
"conf-builders": "command-ref/conf-file.html#conf-builders",
|
||||
"conf-builders-use-substitutes": "command-ref/conf-file.html#conf-builders-use-substitutes",
|
||||
"conf-compress-build-log": "command-ref/conf-file.html#conf-compress-build-log",
|
||||
"conf-connect-timeout": "command-ref/conf-file.html#conf-connect-timeout",
|
||||
"conf-cores": "command-ref/conf-file.html#conf-cores",
|
||||
"conf-diff-hook": "command-ref/conf-file.html#conf-diff-hook",
|
||||
"conf-env-keep-derivations": "command-ref/conf-file.html#conf-env-keep-derivations",
|
||||
"conf-extra-binary-caches": "command-ref/conf-file.html#conf-extra-binary-caches",
|
||||
"conf-extra-platforms": "command-ref/conf-file.html#conf-extra-platforms",
|
||||
"conf-extra-sandbox-paths": "command-ref/conf-file.html#conf-extra-sandbox-paths",
|
||||
"conf-extra-substituters": "command-ref/conf-file.html#conf-extra-substituters",
|
||||
"conf-fallback": "command-ref/conf-file.html#conf-fallback",
|
||||
"conf-fsync-metadata": "command-ref/conf-file.html#conf-fsync-metadata",
|
||||
"conf-gc-keep-derivations": "command-ref/conf-file.html#conf-gc-keep-derivations",
|
||||
"conf-gc-keep-outputs": "command-ref/conf-file.html#conf-gc-keep-outputs",
|
||||
"conf-hashed-mirrors": "command-ref/conf-file.html#conf-hashed-mirrors",
|
||||
"conf-http-connections": "command-ref/conf-file.html#conf-http-connections",
|
||||
"conf-keep-build-log": "command-ref/conf-file.html#conf-keep-build-log",
|
||||
"conf-keep-derivations": "command-ref/conf-file.html#conf-keep-derivations",
|
||||
"conf-keep-env-derivations": "command-ref/conf-file.html#conf-keep-env-derivations",
|
||||
"conf-keep-outputs": "command-ref/conf-file.html#conf-keep-outputs",
|
||||
"conf-max-build-log-size": "command-ref/conf-file.html#conf-max-build-log-size",
|
||||
"conf-max-free": "command-ref/conf-file.html#conf-max-free",
|
||||
"conf-max-jobs": "command-ref/conf-file.html#conf-max-jobs",
|
||||
"conf-max-silent-time": "command-ref/conf-file.html#conf-max-silent-time",
|
||||
"conf-min-free": "command-ref/conf-file.html#conf-min-free",
|
||||
"conf-narinfo-cache-negative-ttl": "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
|
||||
"conf-narinfo-cache-positive-ttl": "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
|
||||
"conf-netrc-file": "command-ref/conf-file.html#conf-netrc-file",
|
||||
"conf-plugin-files": "command-ref/conf-file.html#conf-plugin-files",
|
||||
"conf-post-build-hook": "command-ref/conf-file.html#conf-post-build-hook",
|
||||
"conf-pre-build-hook": "command-ref/conf-file.html#conf-pre-build-hook",
|
||||
"conf-require-sigs": "command-ref/conf-file.html#conf-require-sigs",
|
||||
"conf-restrict-eval": "command-ref/conf-file.html#conf-restrict-eval",
|
||||
"conf-run-diff-hook": "command-ref/conf-file.html#conf-run-diff-hook",
|
||||
"conf-sandbox": "command-ref/conf-file.html#conf-sandbox",
|
||||
"conf-sandbox-dev-shm-size": "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
|
||||
"conf-sandbox-paths": "command-ref/conf-file.html#conf-sandbox-paths",
|
||||
"conf-secret-key-files": "command-ref/conf-file.html#conf-secret-key-files",
|
||||
"conf-show-trace": "command-ref/conf-file.html#conf-show-trace",
|
||||
"conf-stalled-download-timeout": "command-ref/conf-file.html#conf-stalled-download-timeout",
|
||||
"conf-substitute": "command-ref/conf-file.html#conf-substitute",
|
||||
"conf-substituters": "command-ref/conf-file.html#conf-substituters",
|
||||
"conf-system": "command-ref/conf-file.html#conf-system",
|
||||
"conf-system-features": "command-ref/conf-file.html#conf-system-features",
|
||||
"conf-tarball-ttl": "command-ref/conf-file.html#conf-tarball-ttl",
|
||||
"conf-timeout": "command-ref/conf-file.html#conf-timeout",
|
||||
"conf-trace-function-calls": "command-ref/conf-file.html#conf-trace-function-calls",
|
||||
"conf-trusted-binary-caches": "command-ref/conf-file.html#conf-trusted-binary-caches",
|
||||
"conf-trusted-public-keys": "command-ref/conf-file.html#conf-trusted-public-keys",
|
||||
"conf-trusted-substituters": "command-ref/conf-file.html#conf-trusted-substituters",
|
||||
"conf-trusted-users": "command-ref/conf-file.html#conf-trusted-users",
|
||||
"extra-sandbox-paths": "command-ref/conf-file.html#extra-sandbox-paths",
|
||||
"sec-conf-file": "command-ref/conf-file.html",
|
||||
"env-NIX_PATH": "command-ref/env-common.html#env-NIX_PATH",
|
||||
"env-common": "command-ref/env-common.html",
|
||||
"envar-remote": "command-ref/env-common.html#env-NIX_REMOTE",
|
||||
"sec-common-env": "command-ref/env-common.html",
|
||||
"ch-files": "command-ref/files.html",
|
||||
"ch-main-commands": "command-ref/main-commands.html",
|
||||
"opt-out-link": "command-ref/nix-build.html#opt-out-link",
|
||||
"sec-nix-build": "command-ref/nix-build.html",
|
||||
"sec-nix-channel": "command-ref/nix-channel.html",
|
||||
"sec-nix-collect-garbage": "command-ref/nix-collect-garbage.html",
|
||||
"sec-nix-copy-closure": "command-ref/nix-copy-closure.html",
|
||||
"sec-nix-daemon": "command-ref/nix-daemon.html",
|
||||
"refsec-nix-env-install-examples": "command-ref/nix-env.html#examples",
|
||||
"rsec-nix-env-install": "command-ref/nix-env.html#operation---install",
|
||||
"rsec-nix-env-set": "command-ref/nix-env.html#operation---set",
|
||||
"rsec-nix-env-set-flag": "command-ref/nix-env.html#operation---set-flag",
|
||||
"rsec-nix-env-upgrade": "command-ref/nix-env.html#operation---upgrade",
|
||||
"sec-nix-env": "command-ref/nix-env.html",
|
||||
"ssec-version-comparisons": "command-ref/nix-env.html#versions",
|
||||
"sec-nix-hash": "command-ref/nix-hash.html",
|
||||
"sec-nix-instantiate": "command-ref/nix-instantiate.html",
|
||||
"sec-nix-prefetch-url": "command-ref/nix-prefetch-url.html",
|
||||
"sec-nix-shell": "command-ref/nix-shell.html",
|
||||
"ssec-nix-shell-shebang": "command-ref/nix-shell.html#use-as-a--interpreter",
|
||||
"nixref-queries": "command-ref/nix-store.html#queries",
|
||||
"opt-add-root": "command-ref/nix-store.html#opt-add-root",
|
||||
"refsec-nix-store-dump": "command-ref/nix-store.html#operation---dump",
|
||||
"refsec-nix-store-export": "command-ref/nix-store.html#operation---export",
|
||||
"refsec-nix-store-import": "command-ref/nix-store.html#operation---import",
|
||||
"refsec-nix-store-query": "command-ref/nix-store.html#operation---query",
|
||||
"refsec-nix-store-verify": "command-ref/nix-store.html#operation---verify",
|
||||
"rsec-nix-store-gc": "command-ref/nix-store.html#operation---gc",
|
||||
"rsec-nix-store-generate-binary-cache-key": "command-ref/nix-store.html#operation---generate-binary-cache-key",
|
||||
"rsec-nix-store-realise": "command-ref/nix-store.html#operation---realise",
|
||||
"rsec-nix-store-serve": "command-ref/nix-store.html#operation---serve",
|
||||
"sec-nix-store": "command-ref/nix-store.html",
|
||||
"opt-I": "command-ref/opt-common.html#opt-I",
|
||||
"opt-attr": "command-ref/opt-common.html#opt-attr",
|
||||
"opt-common": "command-ref/opt-common.html",
|
||||
"opt-cores": "command-ref/opt-common.html#opt-cores",
|
||||
"opt-log-format": "command-ref/opt-common.html#opt-log-format",
|
||||
"opt-max-jobs": "command-ref/opt-common.html#opt-max-jobs",
|
||||
"opt-max-silent-time": "command-ref/opt-common.html#opt-max-silent-time",
|
||||
"opt-timeout": "command-ref/opt-common.html#opt-timeout",
|
||||
"sec-common-options": "command-ref/opt-common.html",
|
||||
"ch-utilities": "command-ref/utilities.html",
|
||||
"chap-hacking": "development/building.html",
|
||||
"adv-attr-allowSubstitutes": "language/advanced-attributes.html#adv-attr-allowSubstitutes",
|
||||
"adv-attr-allowedReferences": "language/advanced-attributes.html#adv-attr-allowedReferences",
|
||||
"adv-attr-allowedRequisites": "language/advanced-attributes.html#adv-attr-allowedRequisites",
|
||||
"adv-attr-disallowedReferences": "language/advanced-attributes.html#adv-attr-disallowedReferences",
|
||||
"adv-attr-disallowedRequisites": "language/advanced-attributes.html#adv-attr-disallowedRequisites",
|
||||
"adv-attr-exportReferencesGraph": "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
|
||||
"adv-attr-impureEnvVars": "language/advanced-attributes.html#adv-attr-impureEnvVars",
|
||||
"adv-attr-outputHash": "language/advanced-attributes.html#adv-attr-outputHash",
|
||||
"adv-attr-outputHashAlgo": "language/advanced-attributes.html#adv-attr-outputHashAlgo",
|
||||
"adv-attr-outputHashMode": "language/advanced-attributes.html#adv-attr-outputHashMode",
|
||||
"adv-attr-passAsFile": "language/advanced-attributes.html#adv-attr-passAsFile",
|
||||
"adv-attr-preferLocalBuild": "language/advanced-attributes.html#adv-attr-preferLocalBuild",
|
||||
"fixed-output-drvs": "language/advanced-attributes.html#adv-attr-outputHash",
|
||||
"sec-advanced-attributes": "language/advanced-attributes.html",
|
||||
"builtin-abort": "language/builtins.html#builtins-abort",
|
||||
"builtin-add": "language/builtins.html#builtins-add",
|
||||
"builtin-all": "language/builtins.html#builtins-all",
|
||||
"builtin-any": "language/builtins.html#builtins-any",
|
||||
"builtin-attrNames": "language/builtins.html#builtins-attrNames",
|
||||
"builtin-attrValues": "language/builtins.html#builtins-attrValues",
|
||||
"builtin-baseNameOf": "language/builtins.html#builtins-baseNameOf",
|
||||
"builtin-bitAnd": "language/builtins.html#builtins-bitAnd",
|
||||
"builtin-bitOr": "language/builtins.html#builtins-bitOr",
|
||||
"builtin-bitXor": "language/builtins.html#builtins-bitXor",
|
||||
"builtin-builtins": "language/builtins.html#builtins-builtins",
|
||||
"builtin-compareVersions": "language/builtins.html#builtins-compareVersions",
|
||||
"builtin-concatLists": "language/builtins.html#builtins-concatLists",
|
||||
"builtin-concatStringsSep": "language/builtins.html#builtins-concatStringsSep",
|
||||
"builtin-currentSystem": "language/builtins.html#builtins-currentSystem",
|
||||
"builtin-deepSeq": "language/builtins.html#builtins-deepSeq",
|
||||
"builtin-derivation": "language/builtins.html#builtins-derivation",
|
||||
"builtin-dirOf": "language/builtins.html#builtins-dirOf",
|
||||
"builtin-div": "language/builtins.html#builtins-div",
|
||||
"builtin-elem": "language/builtins.html#builtins-elem",
|
||||
"builtin-elemAt": "language/builtins.html#builtins-elemAt",
|
||||
"builtin-fetchGit": "language/builtins.html#builtins-fetchGit",
|
||||
"builtin-fetchTarball": "language/builtins.html#builtins-fetchTarball",
|
||||
"builtin-fetchurl": "language/builtins.html#builtins-fetchurl",
|
||||
"builtin-filterSource": "language/builtins.html#builtins-filterSource",
|
||||
"builtin-foldl-prime": "language/builtins.html#builtins-foldl-prime",
|
||||
"builtin-fromJSON": "language/builtins.html#builtins-fromJSON",
|
||||
"builtin-functionArgs": "language/builtins.html#builtins-functionArgs",
|
||||
"builtin-genList": "language/builtins.html#builtins-genList",
|
||||
"builtin-getAttr": "language/builtins.html#builtins-getAttr",
|
||||
"builtin-getEnv": "language/builtins.html#builtins-getEnv",
|
||||
"builtin-hasAttr": "language/builtins.html#builtins-hasAttr",
|
||||
"builtin-hashFile": "language/builtins.html#builtins-hashFile",
|
||||
"builtin-hashString": "language/builtins.html#builtins-hashString",
|
||||
"builtin-head": "language/builtins.html#builtins-head",
|
||||
"builtin-import": "language/builtins.html#builtins-import",
|
||||
"builtin-intersectAttrs": "language/builtins.html#builtins-intersectAttrs",
|
||||
"builtin-isAttrs": "language/builtins.html#builtins-isAttrs",
|
||||
"builtin-isBool": "language/builtins.html#builtins-isBool",
|
||||
"builtin-isFloat": "language/builtins.html#builtins-isFloat",
|
||||
"builtin-isFunction": "language/builtins.html#builtins-isFunction",
|
||||
"builtin-isInt": "language/builtins.html#builtins-isInt",
|
||||
"builtin-isList": "language/builtins.html#builtins-isList",
|
||||
"builtin-isNull": "language/builtins.html#builtins-isNull",
|
||||
"builtin-isString": "language/builtins.html#builtins-isString",
|
||||
"builtin-length": "language/builtins.html#builtins-length",
|
||||
"builtin-lessThan": "language/builtins.html#builtins-lessThan",
|
||||
"builtin-listToAttrs": "language/builtins.html#builtins-listToAttrs",
|
||||
"builtin-map": "language/builtins.html#builtins-map",
|
||||
"builtin-match": "language/builtins.html#builtins-match",
|
||||
"builtin-mul": "language/builtins.html#builtins-mul",
|
||||
"builtin-parseDrvName": "language/builtins.html#builtins-parseDrvName",
|
||||
"builtin-path": "language/builtins.html#builtins-path",
|
||||
"builtin-pathExists": "language/builtins.html#builtins-pathExists",
|
||||
"builtin-placeholder": "language/builtins.html#builtins-placeholder",
|
||||
"builtin-readDir": "language/builtins.html#builtins-readDir",
|
||||
"builtin-readFile": "language/builtins.html#builtins-readFile",
|
||||
"builtin-removeAttrs": "language/builtins.html#builtins-removeAttrs",
|
||||
"builtin-replaceStrings": "language/builtins.html#builtins-replaceStrings",
|
||||
"builtin-seq": "language/builtins.html#builtins-seq",
|
||||
"builtin-sort": "language/builtins.html#builtins-sort",
|
||||
"builtin-split": "language/builtins.html#builtins-split",
|
||||
"builtin-splitVersion": "language/builtins.html#builtins-splitVersion",
|
||||
"builtin-stringLength": "language/builtins.html#builtins-stringLength",
|
||||
"builtin-sub": "language/builtins.html#builtins-sub",
|
||||
"builtin-substring": "language/builtins.html#builtins-substring",
|
||||
"builtin-tail": "language/builtins.html#builtins-tail",
|
||||
"builtin-throw": "language/builtins.html#builtins-throw",
|
||||
"builtin-toFile": "language/builtins.html#builtins-toFile",
|
||||
"builtin-toJSON": "language/builtins.html#builtins-toJSON",
|
||||
"builtin-toPath": "language/builtins.html#builtins-toPath",
|
||||
"builtin-toString": "language/builtins.html#builtins-toString",
|
||||
"builtin-toXML": "language/builtins.html#builtins-toXML",
|
||||
"builtin-trace": "language/builtins.html#builtins-trace",
|
||||
"builtin-tryEval": "language/builtins.html#builtins-tryEval",
|
||||
"builtin-typeOf": "language/builtins.html#builtins-typeOf",
|
||||
"ssec-builtins": "language/builtins.html",
|
||||
"attr-system": "language/derivations.html#attr-system",
|
||||
"ssec-derivation": "language/derivations.html",
|
||||
"ch-expression-language": "language/index.html",
|
||||
"sec-constructs": "language/syntax.html",
|
||||
"sect-let-language": "language/syntax.html#let-expressions",
|
||||
"ss-functions": "language/syntax.html#functions",
|
||||
"sec-language-operators": "language/operators.html",
|
||||
"table-operators": "language/operators.html",
|
||||
"ssec-values": "language/types.html",
|
||||
"gloss-closure": "glossary.html#gloss-closure",
|
||||
"gloss-derivation": "glossary.html#gloss-derivation",
|
||||
"gloss-deriver": "glossary.html#gloss-deriver",
|
||||
"gloss-nar": "glossary.html#gloss-nar",
|
||||
"gloss-output-path": "glossary.html#gloss-output-path",
|
||||
"gloss-profile": "glossary.html#gloss-profile",
|
||||
"gloss-reachable": "glossary.html#gloss-reachable",
|
||||
"gloss-reference": "glossary.html#gloss-reference",
|
||||
"gloss-substitute": "glossary.html#gloss-substitute",
|
||||
"gloss-user-env": "glossary.html#gloss-user-env",
|
||||
"gloss-validity": "glossary.html#gloss-validity",
|
||||
"part-glossary": "glossary.html",
|
||||
"sec-building-source": "installation/building-source.html",
|
||||
"ch-env-variables": "installation/env-variables.html",
|
||||
"sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
|
||||
"sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
|
||||
"sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
|
||||
"chap-installation": "installation/index.html",
|
||||
"ch-installing-binary": "installation/installing-binary.html",
|
||||
"sect-macos-installation": "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-encrypted-volume": "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-recommended-notes": "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-symlink": "installation/installing-binary.html#macos-installation",
|
||||
"sect-multi-user-installation": "installation/installing-binary.html#multi-user-installation",
|
||||
"sect-nix-install-binary-tarball": "installation/installing-binary.html#installing-from-a-binary-tarball",
|
||||
"sect-nix-install-pinned-version-url": "installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
|
||||
"sect-single-user-installation": "installation/installing-binary.html#single-user-installation",
|
||||
"ch-installing-source": "installation/installing-source.html",
|
||||
"ssec-multi-user": "installation/multi-user.html",
|
||||
"ch-nix-security": "installation/nix-security.html",
|
||||
"sec-obtaining-source": "installation/obtaining-source.html",
|
||||
"sec-prerequisites-source": "installation/prerequisites-source.html",
|
||||
"sec-single-user": "installation/single-user.html",
|
||||
"ch-supported-platforms": "installation/supported-platforms.html",
|
||||
"ch-upgrading-nix": "installation/upgrading.html",
|
||||
"ch-about-nix": "introduction.html",
|
||||
"chap-introduction": "introduction.html",
|
||||
"ch-basic-package-mgmt": "package-management/basic-package-mgmt.html",
|
||||
"ssec-binary-cache-substituter": "package-management/binary-cache-substituter.html",
|
||||
"sec-channels": "command-ref/nix-channel.html",
|
||||
"ssec-copy-closure": "command-ref/nix-copy-closure.html",
|
||||
"sec-garbage-collection": "package-management/garbage-collection.html",
|
||||
"ssec-gc-roots": "package-management/garbage-collector-roots.html",
|
||||
"chap-package-management": "package-management/index.html",
|
||||
"sec-profiles": "package-management/profiles.html",
|
||||
"ssec-s3-substituter": "store/types/s3-substituter.html",
|
||||
"ssec-s3-substituter-anonymous-reads": "store/types/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
|
||||
"ssec-s3-substituter-authenticated-reads": "store/types/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
|
||||
"ssec-s3-substituter-authenticated-writes": "store/types/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
|
||||
"sec-sharing-packages": "package-management/sharing-packages.html",
|
||||
"ssec-ssh-substituter": "package-management/ssh-substituter.html",
|
||||
"chap-quick-start": "quick-start.html",
|
||||
"sec-relnotes": "release-notes/index.html",
|
||||
"ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
|
||||
"ch-relnotes-0.10": "release-notes/rl-0.10.html",
|
||||
"ssec-relnotes-0.11": "release-notes/rl-0.11.html",
|
||||
"ssec-relnotes-0.12": "release-notes/rl-0.12.html",
|
||||
"ssec-relnotes-0.13": "release-notes/rl-0.13.html",
|
||||
"ssec-relnotes-0.14": "release-notes/rl-0.14.html",
|
||||
"ssec-relnotes-0.15": "release-notes/rl-0.15.html",
|
||||
"ssec-relnotes-0.16": "release-notes/rl-0.16.html",
|
||||
"ch-relnotes-0.5": "release-notes/rl-0.5.html",
|
||||
"ch-relnotes-0.6": "release-notes/rl-0.6.html",
|
||||
"ch-relnotes-0.7": "release-notes/rl-0.7.html",
|
||||
"ch-relnotes-0.8.1": "release-notes/rl-0.8.1.html",
|
||||
"ch-relnotes-0.8": "release-notes/rl-0.8.html",
|
||||
"ch-relnotes-0.9.1": "release-notes/rl-0.9.1.html",
|
||||
"ch-relnotes-0.9.2": "release-notes/rl-0.9.2.html",
|
||||
"ch-relnotes-0.9": "release-notes/rl-0.9.html",
|
||||
"ssec-relnotes-1.0": "release-notes/rl-1.0.html",
|
||||
"ssec-relnotes-1.1": "release-notes/rl-1.1.html",
|
||||
"ssec-relnotes-1.10": "release-notes/rl-1.10.html",
|
||||
"ssec-relnotes-1.11.10": "release-notes/rl-1.11.10.html",
|
||||
"ssec-relnotes-1.11": "release-notes/rl-1.11.html",
|
||||
"ssec-relnotes-1.2": "release-notes/rl-1.2.html",
|
||||
"ssec-relnotes-1.3": "release-notes/rl-1.3.html",
|
||||
"ssec-relnotes-1.4": "release-notes/rl-1.4.html",
|
||||
"ssec-relnotes-1.5.1": "release-notes/rl-1.5.1.html",
|
||||
"ssec-relnotes-1.5.2": "release-notes/rl-1.5.2.html",
|
||||
"ssec-relnotes-1.5": "release-notes/rl-1.5.html",
|
||||
"ssec-relnotes-1.6.1": "release-notes/rl-1.6.1.html",
|
||||
"ssec-relnotes-1.6.0": "release-notes/rl-1.6.html",
|
||||
"ssec-relnotes-1.7": "release-notes/rl-1.7.html",
|
||||
"ssec-relnotes-1.8": "release-notes/rl-1.8.html",
|
||||
"ssec-relnotes-1.9": "release-notes/rl-1.9.html",
|
||||
"ssec-relnotes-2.0": "release-notes/rl-2.0.html",
|
||||
"ssec-relnotes-2.1": "release-notes/rl-2.1.html",
|
||||
"ssec-relnotes-2.2": "release-notes/rl-2.2.html",
|
||||
"ssec-relnotes-2.3": "release-notes/rl-2.3.html",
|
||||
},
|
||||
"language/types.html": {
|
||||
"simple-values": "#primitives",
|
||||
"lists": "#list",
|
||||
"strings": "#string",
|
||||
"attribute-sets": "#attribute-set",
|
||||
"type-number": "#type-int",
|
||||
},
|
||||
"language/syntax.html": {
|
||||
"scoping-rules": "scoping.html",
|
||||
"string-literal": "string-literals.html",
|
||||
},
|
||||
"language/derivations.md": {
|
||||
"builder-execution": "store/drv/building.md#builder-execution",
|
||||
},
|
||||
"installation/installing-binary.html": {
|
||||
"linux": "uninstall.html#linux",
|
||||
"macos": "uninstall.html#macos",
|
||||
"uninstalling": "uninstall.html",
|
||||
},
|
||||
"development/building.html": {
|
||||
"nix-with-flakes": "#building-nix-with-flakes",
|
||||
"classic-nix": "#building-nix",
|
||||
"running-tests": "testing.html#running-tests",
|
||||
"unit-tests": "testing.html#unit-tests",
|
||||
"functional-tests": "testing.html#functional-tests",
|
||||
"debugging-failing-functional-tests": "testing.html#debugging-failing-functional-tests",
|
||||
"integration-tests": "testing.html#integration-tests",
|
||||
"installer-tests": "testing.html#installer-tests",
|
||||
"one-time-setup": "testing.html#one-time-setup",
|
||||
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
||||
"characterization-testing": "testing.html#characterisation-testing-unit",
|
||||
"add-a-release-note": "contributing.html#add-a-release-note",
|
||||
"add-an-entry": "contributing.html#add-an-entry",
|
||||
"build-process": "contributing.html#build-process",
|
||||
"reverting": "contributing.html#reverting",
|
||||
"branches": "contributing.html#branches",
|
||||
},
|
||||
"glossary.html": {
|
||||
"gloss-local-store": "store/types/local-store.html",
|
||||
"package-attribute-set": "#package",
|
||||
"gloss-chroot-store": "store/types/local-store.html",
|
||||
"gloss-content-addressed-derivation": "#gloss-content-addressing-derivation",
|
||||
},
|
||||
"index.html" : {
|
||||
"part-advanced-topics" : "advanced-topics/index.html",
|
||||
"chap-tuning-cores-and-jobs" : "advanced-topics/cores-vs-jobs.html",
|
||||
"chap-diff-hook" : "advanced-topics/diff-hook.html",
|
||||
"check-dirs-are-unregistered" : "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
|
||||
"chap-distributed-builds" : "command-ref/conf-file.html#conf-builders",
|
||||
"chap-post-build-hook" : "advanced-topics/post-build-hook.html",
|
||||
"chap-post-build-hook-caveats" : "advanced-topics/post-build-hook.html#implementation-caveats",
|
||||
"chap-writing-nix-expressions" : "language/index.html",
|
||||
"part-command-ref" : "command-ref/index.html",
|
||||
"conf-allow-import-from-derivation" : "command-ref/conf-file.html#conf-allow-import-from-derivation",
|
||||
"conf-allow-new-privileges" : "command-ref/conf-file.html#conf-allow-new-privileges",
|
||||
"conf-allowed-uris" : "command-ref/conf-file.html#conf-allowed-uris",
|
||||
"conf-allowed-users" : "command-ref/conf-file.html#conf-allowed-users",
|
||||
"conf-auto-optimise-store" : "command-ref/conf-file.html#conf-auto-optimise-store",
|
||||
"conf-binary-cache-public-keys" : "command-ref/conf-file.html#conf-binary-cache-public-keys",
|
||||
"conf-binary-caches" : "command-ref/conf-file.html#conf-binary-caches",
|
||||
"conf-build-compress-log" : "command-ref/conf-file.html#conf-build-compress-log",
|
||||
"conf-build-cores" : "command-ref/conf-file.html#conf-build-cores",
|
||||
"conf-build-extra-chroot-dirs" : "command-ref/conf-file.html#conf-build-extra-chroot-dirs",
|
||||
"conf-build-extra-sandbox-paths" : "command-ref/conf-file.html#conf-build-extra-sandbox-paths",
|
||||
"conf-build-fallback" : "command-ref/conf-file.html#conf-build-fallback",
|
||||
"conf-build-max-jobs" : "command-ref/conf-file.html#conf-build-max-jobs",
|
||||
"conf-build-max-log-size" : "command-ref/conf-file.html#conf-build-max-log-size",
|
||||
"conf-build-max-silent-time" : "command-ref/conf-file.html#conf-build-max-silent-time",
|
||||
"conf-build-timeout" : "command-ref/conf-file.html#conf-build-timeout",
|
||||
"conf-build-use-chroot" : "command-ref/conf-file.html#conf-build-use-chroot",
|
||||
"conf-build-use-sandbox" : "command-ref/conf-file.html#conf-build-use-sandbox",
|
||||
"conf-build-use-substitutes" : "command-ref/conf-file.html#conf-build-use-substitutes",
|
||||
"conf-build-users-group" : "command-ref/conf-file.html#conf-build-users-group",
|
||||
"conf-builders" : "command-ref/conf-file.html#conf-builders",
|
||||
"conf-builders-use-substitutes" : "command-ref/conf-file.html#conf-builders-use-substitutes",
|
||||
"conf-compress-build-log" : "command-ref/conf-file.html#conf-compress-build-log",
|
||||
"conf-connect-timeout" : "command-ref/conf-file.html#conf-connect-timeout",
|
||||
"conf-cores" : "command-ref/conf-file.html#conf-cores",
|
||||
"conf-diff-hook" : "command-ref/conf-file.html#conf-diff-hook",
|
||||
"conf-env-keep-derivations" : "command-ref/conf-file.html#conf-env-keep-derivations",
|
||||
"conf-extra-binary-caches" : "command-ref/conf-file.html#conf-extra-binary-caches",
|
||||
"conf-extra-platforms" : "command-ref/conf-file.html#conf-extra-platforms",
|
||||
"conf-extra-sandbox-paths" : "command-ref/conf-file.html#conf-extra-sandbox-paths",
|
||||
"conf-extra-substituters" : "command-ref/conf-file.html#conf-extra-substituters",
|
||||
"conf-fallback" : "command-ref/conf-file.html#conf-fallback",
|
||||
"conf-fsync-metadata" : "command-ref/conf-file.html#conf-fsync-metadata",
|
||||
"conf-gc-keep-derivations" : "command-ref/conf-file.html#conf-gc-keep-derivations",
|
||||
"conf-gc-keep-outputs" : "command-ref/conf-file.html#conf-gc-keep-outputs",
|
||||
"conf-hashed-mirrors" : "command-ref/conf-file.html#conf-hashed-mirrors",
|
||||
"conf-http-connections" : "command-ref/conf-file.html#conf-http-connections",
|
||||
"conf-keep-build-log" : "command-ref/conf-file.html#conf-keep-build-log",
|
||||
"conf-keep-derivations" : "command-ref/conf-file.html#conf-keep-derivations",
|
||||
"conf-keep-env-derivations" : "command-ref/conf-file.html#conf-keep-env-derivations",
|
||||
"conf-keep-outputs" : "command-ref/conf-file.html#conf-keep-outputs",
|
||||
"conf-max-build-log-size" : "command-ref/conf-file.html#conf-max-build-log-size",
|
||||
"conf-max-free" : "command-ref/conf-file.html#conf-max-free",
|
||||
"conf-max-jobs" : "command-ref/conf-file.html#conf-max-jobs",
|
||||
"conf-max-silent-time" : "command-ref/conf-file.html#conf-max-silent-time",
|
||||
"conf-min-free" : "command-ref/conf-file.html#conf-min-free",
|
||||
"conf-narinfo-cache-negative-ttl" : "command-ref/conf-file.html#conf-narinfo-cache-negative-ttl",
|
||||
"conf-narinfo-cache-positive-ttl" : "command-ref/conf-file.html#conf-narinfo-cache-positive-ttl",
|
||||
"conf-netrc-file" : "command-ref/conf-file.html#conf-netrc-file",
|
||||
"conf-plugin-files" : "command-ref/conf-file.html#conf-plugin-files",
|
||||
"conf-post-build-hook" : "command-ref/conf-file.html#conf-post-build-hook",
|
||||
"conf-pre-build-hook" : "command-ref/conf-file.html#conf-pre-build-hook",
|
||||
"conf-require-sigs" : "command-ref/conf-file.html#conf-require-sigs",
|
||||
"conf-restrict-eval" : "command-ref/conf-file.html#conf-restrict-eval",
|
||||
"conf-run-diff-hook" : "command-ref/conf-file.html#conf-run-diff-hook",
|
||||
"conf-sandbox" : "command-ref/conf-file.html#conf-sandbox",
|
||||
"conf-sandbox-dev-shm-size" : "command-ref/conf-file.html#conf-sandbox-dev-shm-size",
|
||||
"conf-sandbox-paths" : "command-ref/conf-file.html#conf-sandbox-paths",
|
||||
"conf-secret-key-files" : "command-ref/conf-file.html#conf-secret-key-files",
|
||||
"conf-show-trace" : "command-ref/conf-file.html#conf-show-trace",
|
||||
"conf-stalled-download-timeout" : "command-ref/conf-file.html#conf-stalled-download-timeout",
|
||||
"conf-substitute" : "command-ref/conf-file.html#conf-substitute",
|
||||
"conf-substituters" : "command-ref/conf-file.html#conf-substituters",
|
||||
"conf-system" : "command-ref/conf-file.html#conf-system",
|
||||
"conf-system-features" : "command-ref/conf-file.html#conf-system-features",
|
||||
"conf-tarball-ttl" : "command-ref/conf-file.html#conf-tarball-ttl",
|
||||
"conf-timeout" : "command-ref/conf-file.html#conf-timeout",
|
||||
"conf-trace-function-calls" : "command-ref/conf-file.html#conf-trace-function-calls",
|
||||
"conf-trusted-binary-caches" : "command-ref/conf-file.html#conf-trusted-binary-caches",
|
||||
"conf-trusted-public-keys" : "command-ref/conf-file.html#conf-trusted-public-keys",
|
||||
"conf-trusted-substituters" : "command-ref/conf-file.html#conf-trusted-substituters",
|
||||
"conf-trusted-users" : "command-ref/conf-file.html#conf-trusted-users",
|
||||
"extra-sandbox-paths" : "command-ref/conf-file.html#extra-sandbox-paths",
|
||||
"sec-conf-file" : "command-ref/conf-file.html",
|
||||
"env-NIX_PATH" : "command-ref/env-common.html#env-NIX_PATH",
|
||||
"env-common" : "command-ref/env-common.html",
|
||||
"envar-remote" : "command-ref/env-common.html#env-NIX_REMOTE",
|
||||
"sec-common-env" : "command-ref/env-common.html",
|
||||
"ch-files" : "command-ref/files.html",
|
||||
"ch-main-commands" : "command-ref/main-commands.html",
|
||||
"opt-out-link" : "command-ref/nix-build.html#opt-out-link",
|
||||
"sec-nix-build" : "command-ref/nix-build.html",
|
||||
"sec-nix-channel" : "command-ref/nix-channel.html",
|
||||
"sec-nix-collect-garbage" : "command-ref/nix-collect-garbage.html",
|
||||
"sec-nix-copy-closure" : "command-ref/nix-copy-closure.html",
|
||||
"sec-nix-daemon" : "command-ref/nix-daemon.html",
|
||||
"refsec-nix-env-install-examples" : "command-ref/nix-env.html#examples",
|
||||
"rsec-nix-env-install" : "command-ref/nix-env.html#operation---install",
|
||||
"rsec-nix-env-set" : "command-ref/nix-env.html#operation---set",
|
||||
"rsec-nix-env-set-flag" : "command-ref/nix-env.html#operation---set-flag",
|
||||
"rsec-nix-env-upgrade" : "command-ref/nix-env.html#operation---upgrade",
|
||||
"sec-nix-env" : "command-ref/nix-env.html",
|
||||
"ssec-version-comparisons" : "command-ref/nix-env.html#versions",
|
||||
"sec-nix-hash" : "command-ref/nix-hash.html",
|
||||
"sec-nix-instantiate" : "command-ref/nix-instantiate.html",
|
||||
"sec-nix-prefetch-url" : "command-ref/nix-prefetch-url.html",
|
||||
"sec-nix-shell" : "command-ref/nix-shell.html",
|
||||
"ssec-nix-shell-shebang" : "command-ref/nix-shell.html#use-as-a--interpreter",
|
||||
"nixref-queries" : "command-ref/nix-store.html#queries",
|
||||
"opt-add-root" : "command-ref/nix-store.html#opt-add-root",
|
||||
"refsec-nix-store-dump" : "command-ref/nix-store.html#operation---dump",
|
||||
"refsec-nix-store-export" : "command-ref/nix-store.html#operation---export",
|
||||
"refsec-nix-store-import" : "command-ref/nix-store.html#operation---import",
|
||||
"refsec-nix-store-query" : "command-ref/nix-store.html#operation---query",
|
||||
"refsec-nix-store-verify" : "command-ref/nix-store.html#operation---verify",
|
||||
"rsec-nix-store-gc" : "command-ref/nix-store.html#operation---gc",
|
||||
"rsec-nix-store-generate-binary-cache-key" : "command-ref/nix-store.html#operation---generate-binary-cache-key",
|
||||
"rsec-nix-store-realise" : "command-ref/nix-store.html#operation---realise",
|
||||
"rsec-nix-store-serve" : "command-ref/nix-store.html#operation---serve",
|
||||
"sec-nix-store" : "command-ref/nix-store.html",
|
||||
"opt-I" : "command-ref/opt-common.html#opt-I",
|
||||
"opt-attr" : "command-ref/opt-common.html#opt-attr",
|
||||
"opt-common" : "command-ref/opt-common.html",
|
||||
"opt-cores" : "command-ref/opt-common.html#opt-cores",
|
||||
"opt-log-format" : "command-ref/opt-common.html#opt-log-format",
|
||||
"opt-max-jobs" : "command-ref/opt-common.html#opt-max-jobs",
|
||||
"opt-max-silent-time" : "command-ref/opt-common.html#opt-max-silent-time",
|
||||
"opt-timeout" : "command-ref/opt-common.html#opt-timeout",
|
||||
"sec-common-options" : "command-ref/opt-common.html",
|
||||
"ch-utilities" : "command-ref/utilities.html",
|
||||
"chap-hacking" : "development/building.html",
|
||||
"adv-attr-allowSubstitutes" : "language/advanced-attributes.html#adv-attr-allowSubstitutes",
|
||||
"adv-attr-allowedReferences" : "language/advanced-attributes.html#adv-attr-allowedReferences",
|
||||
"adv-attr-allowedRequisites" : "language/advanced-attributes.html#adv-attr-allowedRequisites",
|
||||
"adv-attr-disallowedReferences" : "language/advanced-attributes.html#adv-attr-disallowedReferences",
|
||||
"adv-attr-disallowedRequisites" : "language/advanced-attributes.html#adv-attr-disallowedRequisites",
|
||||
"adv-attr-exportReferencesGraph" : "language/advanced-attributes.html#adv-attr-exportReferencesGraph",
|
||||
"adv-attr-impureEnvVars" : "language/advanced-attributes.html#adv-attr-impureEnvVars",
|
||||
"adv-attr-outputHash" : "language/advanced-attributes.html#adv-attr-outputHash",
|
||||
"adv-attr-outputHashAlgo" : "language/advanced-attributes.html#adv-attr-outputHashAlgo",
|
||||
"adv-attr-outputHashMode" : "language/advanced-attributes.html#adv-attr-outputHashMode",
|
||||
"adv-attr-passAsFile" : "language/advanced-attributes.html#adv-attr-passAsFile",
|
||||
"adv-attr-preferLocalBuild" : "language/advanced-attributes.html#adv-attr-preferLocalBuild",
|
||||
"fixed-output-drvs" : "language/advanced-attributes.html#adv-attr-outputHash",
|
||||
"sec-advanced-attributes" : "language/advanced-attributes.html",
|
||||
"builtin-abort" : "language/builtins.html#builtins-abort",
|
||||
"builtin-add" : "language/builtins.html#builtins-add",
|
||||
"builtin-all" : "language/builtins.html#builtins-all",
|
||||
"builtin-any" : "language/builtins.html#builtins-any",
|
||||
"builtin-attrNames" : "language/builtins.html#builtins-attrNames",
|
||||
"builtin-attrValues" : "language/builtins.html#builtins-attrValues",
|
||||
"builtin-baseNameOf" : "language/builtins.html#builtins-baseNameOf",
|
||||
"builtin-bitAnd" : "language/builtins.html#builtins-bitAnd",
|
||||
"builtin-bitOr" : "language/builtins.html#builtins-bitOr",
|
||||
"builtin-bitXor" : "language/builtins.html#builtins-bitXor",
|
||||
"builtin-builtins" : "language/builtins.html#builtins-builtins",
|
||||
"builtin-compareVersions" : "language/builtins.html#builtins-compareVersions",
|
||||
"builtin-concatLists" : "language/builtins.html#builtins-concatLists",
|
||||
"builtin-concatStringsSep" : "language/builtins.html#builtins-concatStringsSep",
|
||||
"builtin-currentSystem" : "language/builtins.html#builtins-currentSystem",
|
||||
"builtin-deepSeq" : "language/builtins.html#builtins-deepSeq",
|
||||
"builtin-derivation" : "language/builtins.html#builtins-derivation",
|
||||
"builtin-dirOf" : "language/builtins.html#builtins-dirOf",
|
||||
"builtin-div" : "language/builtins.html#builtins-div",
|
||||
"builtin-elem" : "language/builtins.html#builtins-elem",
|
||||
"builtin-elemAt" : "language/builtins.html#builtins-elemAt",
|
||||
"builtin-fetchGit" : "language/builtins.html#builtins-fetchGit",
|
||||
"builtin-fetchTarball" : "language/builtins.html#builtins-fetchTarball",
|
||||
"builtin-fetchurl" : "language/builtins.html#builtins-fetchurl",
|
||||
"builtin-filterSource" : "language/builtins.html#builtins-filterSource",
|
||||
"builtin-foldl-prime" : "language/builtins.html#builtins-foldl-prime",
|
||||
"builtin-fromJSON" : "language/builtins.html#builtins-fromJSON",
|
||||
"builtin-functionArgs" : "language/builtins.html#builtins-functionArgs",
|
||||
"builtin-genList" : "language/builtins.html#builtins-genList",
|
||||
"builtin-getAttr" : "language/builtins.html#builtins-getAttr",
|
||||
"builtin-getEnv" : "language/builtins.html#builtins-getEnv",
|
||||
"builtin-hasAttr" : "language/builtins.html#builtins-hasAttr",
|
||||
"builtin-hashFile" : "language/builtins.html#builtins-hashFile",
|
||||
"builtin-hashString" : "language/builtins.html#builtins-hashString",
|
||||
"builtin-head" : "language/builtins.html#builtins-head",
|
||||
"builtin-import" : "language/builtins.html#builtins-import",
|
||||
"builtin-intersectAttrs" : "language/builtins.html#builtins-intersectAttrs",
|
||||
"builtin-isAttrs" : "language/builtins.html#builtins-isAttrs",
|
||||
"builtin-isBool" : "language/builtins.html#builtins-isBool",
|
||||
"builtin-isFloat" : "language/builtins.html#builtins-isFloat",
|
||||
"builtin-isFunction" : "language/builtins.html#builtins-isFunction",
|
||||
"builtin-isInt" : "language/builtins.html#builtins-isInt",
|
||||
"builtin-isList" : "language/builtins.html#builtins-isList",
|
||||
"builtin-isNull" : "language/builtins.html#builtins-isNull",
|
||||
"builtin-isString" : "language/builtins.html#builtins-isString",
|
||||
"builtin-length" : "language/builtins.html#builtins-length",
|
||||
"builtin-lessThan" : "language/builtins.html#builtins-lessThan",
|
||||
"builtin-listToAttrs" : "language/builtins.html#builtins-listToAttrs",
|
||||
"builtin-map" : "language/builtins.html#builtins-map",
|
||||
"builtin-match" : "language/builtins.html#builtins-match",
|
||||
"builtin-mul" : "language/builtins.html#builtins-mul",
|
||||
"builtin-parseDrvName" : "language/builtins.html#builtins-parseDrvName",
|
||||
"builtin-path" : "language/builtins.html#builtins-path",
|
||||
"builtin-pathExists" : "language/builtins.html#builtins-pathExists",
|
||||
"builtin-placeholder" : "language/builtins.html#builtins-placeholder",
|
||||
"builtin-readDir" : "language/builtins.html#builtins-readDir",
|
||||
"builtin-readFile" : "language/builtins.html#builtins-readFile",
|
||||
"builtin-removeAttrs" : "language/builtins.html#builtins-removeAttrs",
|
||||
"builtin-replaceStrings" : "language/builtins.html#builtins-replaceStrings",
|
||||
"builtin-seq" : "language/builtins.html#builtins-seq",
|
||||
"builtin-sort" : "language/builtins.html#builtins-sort",
|
||||
"builtin-split" : "language/builtins.html#builtins-split",
|
||||
"builtin-splitVersion" : "language/builtins.html#builtins-splitVersion",
|
||||
"builtin-stringLength" : "language/builtins.html#builtins-stringLength",
|
||||
"builtin-sub" : "language/builtins.html#builtins-sub",
|
||||
"builtin-substring" : "language/builtins.html#builtins-substring",
|
||||
"builtin-tail" : "language/builtins.html#builtins-tail",
|
||||
"builtin-throw" : "language/builtins.html#builtins-throw",
|
||||
"builtin-toFile" : "language/builtins.html#builtins-toFile",
|
||||
"builtin-toJSON" : "language/builtins.html#builtins-toJSON",
|
||||
"builtin-toPath" : "language/builtins.html#builtins-toPath",
|
||||
"builtin-toString" : "language/builtins.html#builtins-toString",
|
||||
"builtin-toXML" : "language/builtins.html#builtins-toXML",
|
||||
"builtin-trace" : "language/builtins.html#builtins-trace",
|
||||
"builtin-tryEval" : "language/builtins.html#builtins-tryEval",
|
||||
"builtin-typeOf" : "language/builtins.html#builtins-typeOf",
|
||||
"ssec-builtins" : "language/builtins.html",
|
||||
"attr-system" : "language/derivations.html#attr-system",
|
||||
"ssec-derivation" : "language/derivations.html",
|
||||
"ch-expression-language" : "language/index.html",
|
||||
"sec-constructs" : "language/syntax.html",
|
||||
"sect-let-language" : "language/syntax.html#let-expressions",
|
||||
"ss-functions" : "language/syntax.html#functions",
|
||||
"sec-language-operators" : "language/operators.html",
|
||||
"table-operators" : "language/operators.html",
|
||||
"ssec-values" : "language/types.html",
|
||||
"gloss-closure" : "glossary.html#gloss-closure",
|
||||
"gloss-derivation" : "glossary.html#gloss-derivation",
|
||||
"gloss-deriver" : "glossary.html#gloss-deriver",
|
||||
"gloss-nar" : "glossary.html#gloss-nar",
|
||||
"gloss-output-path" : "glossary.html#gloss-output-path",
|
||||
"gloss-profile" : "glossary.html#gloss-profile",
|
||||
"gloss-reachable" : "glossary.html#gloss-reachable",
|
||||
"gloss-reference" : "glossary.html#gloss-reference",
|
||||
"gloss-substitute" : "glossary.html#gloss-substitute",
|
||||
"gloss-user-env" : "glossary.html#gloss-user-env",
|
||||
"gloss-validity" : "glossary.html#gloss-validity",
|
||||
"part-glossary" : "glossary.html",
|
||||
"sec-building-source" : "installation/building-source.html",
|
||||
"ch-env-variables" : "installation/env-variables.html",
|
||||
"sec-installer-proxy-settings" : "installation/env-variables.html#proxy-environment-variables",
|
||||
"sec-nix-ssl-cert-file" : "installation/env-variables.html#nix_ssl_cert_file",
|
||||
"sec-nix-ssl-cert-file-with-nix-daemon-and-macos" :
|
||||
"installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
|
||||
"chap-installation" : "installation/index.html",
|
||||
"ch-installing-binary" : "installation/installing-binary.html",
|
||||
"sect-macos-installation" : "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-change-store-prefix" : "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-encrypted-volume" : "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-recommended-notes" : "installation/installing-binary.html#macos-installation",
|
||||
"sect-macos-installation-symlink" : "installation/installing-binary.html#macos-installation",
|
||||
"sect-multi-user-installation" : "installation/installing-binary.html#multi-user-installation",
|
||||
"sect-nix-install-binary-tarball" : "installation/installing-binary.html#installing-from-a-binary-tarball",
|
||||
"sect-nix-install-pinned-version-url" :
|
||||
"installation/installing-binary.html#installing-a-pinned-nix-version-from-a-url",
|
||||
"sect-single-user-installation" : "installation/installing-binary.html#single-user-installation",
|
||||
"ch-installing-source" : "installation/installing-source.html",
|
||||
"ssec-multi-user" : "installation/multi-user.html",
|
||||
"ch-nix-security" : "installation/nix-security.html",
|
||||
"sec-obtaining-source" : "installation/obtaining-source.html",
|
||||
"sec-prerequisites-source" : "installation/prerequisites-source.html",
|
||||
"sec-single-user" : "installation/single-user.html",
|
||||
"ch-supported-platforms" : "installation/supported-platforms.html",
|
||||
"ch-upgrading-nix" : "installation/upgrading.html",
|
||||
"ch-about-nix" : "introduction.html",
|
||||
"chap-introduction" : "introduction.html",
|
||||
"ch-basic-package-mgmt" : "package-management/basic-package-mgmt.html",
|
||||
"ssec-binary-cache-substituter" : "package-management/binary-cache-substituter.html",
|
||||
"sec-channels" : "command-ref/nix-channel.html",
|
||||
"ssec-copy-closure" : "command-ref/nix-copy-closure.html",
|
||||
"sec-garbage-collection" : "package-management/garbage-collection.html",
|
||||
"ssec-gc-roots" : "package-management/garbage-collector-roots.html",
|
||||
"chap-package-management" : "package-management/index.html",
|
||||
"sec-profiles" : "package-management/profiles.html",
|
||||
"ssec-s3-substituter" : "store/types/s3-substituter.html",
|
||||
"ssec-s3-substituter-anonymous-reads" :
|
||||
"store/types/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
|
||||
"ssec-s3-substituter-authenticated-reads" :
|
||||
"store/types/s3-substituter.html#authenticated-reads-to-your-s3-binary-cache",
|
||||
"ssec-s3-substituter-authenticated-writes" :
|
||||
"store/types/s3-substituter.html#authenticated-writes-to-your-s3-compatible-binary-cache",
|
||||
"sec-sharing-packages" : "package-management/sharing-packages.html",
|
||||
"ssec-ssh-substituter" : "package-management/ssh-substituter.html",
|
||||
"chap-quick-start" : "quick-start.html",
|
||||
"sec-relnotes" : "release-notes/index.html",
|
||||
"ch-relnotes-0.10.1" : "release-notes/rl-0.10.1.html",
|
||||
"ch-relnotes-0.10" : "release-notes/rl-0.10.html",
|
||||
"ssec-relnotes-0.11" : "release-notes/rl-0.11.html",
|
||||
"ssec-relnotes-0.12" : "release-notes/rl-0.12.html",
|
||||
"ssec-relnotes-0.13" : "release-notes/rl-0.13.html",
|
||||
"ssec-relnotes-0.14" : "release-notes/rl-0.14.html",
|
||||
"ssec-relnotes-0.15" : "release-notes/rl-0.15.html",
|
||||
"ssec-relnotes-0.16" : "release-notes/rl-0.16.html",
|
||||
"ch-relnotes-0.5" : "release-notes/rl-0.5.html",
|
||||
"ch-relnotes-0.6" : "release-notes/rl-0.6.html",
|
||||
"ch-relnotes-0.7" : "release-notes/rl-0.7.html",
|
||||
"ch-relnotes-0.8.1" : "release-notes/rl-0.8.1.html",
|
||||
"ch-relnotes-0.8" : "release-notes/rl-0.8.html",
|
||||
"ch-relnotes-0.9.1" : "release-notes/rl-0.9.1.html",
|
||||
"ch-relnotes-0.9.2" : "release-notes/rl-0.9.2.html",
|
||||
"ch-relnotes-0.9" : "release-notes/rl-0.9.html",
|
||||
"ssec-relnotes-1.0" : "release-notes/rl-1.0.html",
|
||||
"ssec-relnotes-1.1" : "release-notes/rl-1.1.html",
|
||||
"ssec-relnotes-1.10" : "release-notes/rl-1.10.html",
|
||||
"ssec-relnotes-1.11.10" : "release-notes/rl-1.11.10.html",
|
||||
"ssec-relnotes-1.11" : "release-notes/rl-1.11.html",
|
||||
"ssec-relnotes-1.2" : "release-notes/rl-1.2.html",
|
||||
"ssec-relnotes-1.3" : "release-notes/rl-1.3.html",
|
||||
"ssec-relnotes-1.4" : "release-notes/rl-1.4.html",
|
||||
"ssec-relnotes-1.5.1" : "release-notes/rl-1.5.1.html",
|
||||
"ssec-relnotes-1.5.2" : "release-notes/rl-1.5.2.html",
|
||||
"ssec-relnotes-1.5" : "release-notes/rl-1.5.html",
|
||||
"ssec-relnotes-1.6.1" : "release-notes/rl-1.6.1.html",
|
||||
"ssec-relnotes-1.6.0" : "release-notes/rl-1.6.html",
|
||||
"ssec-relnotes-1.7" : "release-notes/rl-1.7.html",
|
||||
"ssec-relnotes-1.8" : "release-notes/rl-1.8.html",
|
||||
"ssec-relnotes-1.9" : "release-notes/rl-1.9.html",
|
||||
"ssec-relnotes-2.0" : "release-notes/rl-2.0.html",
|
||||
"ssec-relnotes-2.1" : "release-notes/rl-2.1.html",
|
||||
"ssec-relnotes-2.2" : "release-notes/rl-2.2.html",
|
||||
"ssec-relnotes-2.3" : "release-notes/rl-2.3.html",
|
||||
},
|
||||
"language/types.html" : {
|
||||
"simple-values" : "#primitives",
|
||||
"lists" : "#list",
|
||||
"strings" : "#string",
|
||||
"attribute-sets" : "#attribute-set",
|
||||
"type-number" : "#type-int",
|
||||
},
|
||||
"language/syntax.html" : {
|
||||
"scoping-rules" : "scoping.html",
|
||||
"string-literal" : "string-literals.html",
|
||||
},
|
||||
"language/derivations.md" : {
|
||||
"builder-execution" : "store/drv/building.md#builder-execution",
|
||||
},
|
||||
"installation/installing-binary.html" : {
|
||||
"linux" : "uninstall.html#linux",
|
||||
"macos" : "uninstall.html#macos",
|
||||
"uninstalling" : "uninstall.html",
|
||||
},
|
||||
"development/building.html" : {
|
||||
"nix-with-flakes" : "#building-nix-with-flakes",
|
||||
"classic-nix" : "#building-nix",
|
||||
"running-tests" : "testing.html#running-tests",
|
||||
"unit-tests" : "testing.html#unit-tests",
|
||||
"functional-tests" : "testing.html#functional-tests",
|
||||
"debugging-failing-functional-tests" : "testing.html#debugging-failing-functional-tests",
|
||||
"integration-tests" : "testing.html#integration-tests",
|
||||
"installer-tests" : "testing.html#installer-tests",
|
||||
"one-time-setup" : "testing.html#one-time-setup",
|
||||
"using-the-ci-generated-installer-for-manual-testing" :
|
||||
"testing.html#using-the-ci-generated-installer-for-manual-testing",
|
||||
"characterization-testing" : "testing.html#characterisation-testing-unit",
|
||||
"add-a-release-note" : "contributing.html#add-a-release-note",
|
||||
"add-an-entry" : "contributing.html#add-an-entry",
|
||||
"build-process" : "contributing.html#build-process",
|
||||
"reverting" : "contributing.html#reverting",
|
||||
"branches" : "contributing.html#branches",
|
||||
},
|
||||
"glossary.html" : {
|
||||
"gloss-local-store" : "store/types/local-store.html",
|
||||
"package-attribute-set" : "#package",
|
||||
"gloss-chroot-store" : "store/types/local-store.html",
|
||||
"gloss-content-addressed-derivation" : "#gloss-content-addressing-derivation",
|
||||
},
|
||||
};
|
||||
|
||||
// the following code matches the current page's URL against the set of redirects.
|
||||
@ -418,8 +424,11 @@ let segments = document.location.pathname.split('/');
|
||||
let file = segments.pop();
|
||||
|
||||
// normalize file name
|
||||
if (file === '') { file = "index.html"; }
|
||||
else if (!file.endsWith('.html')) { file = file + '.html'; }
|
||||
if (file === '') {
|
||||
file = "index.html";
|
||||
} else if (!file.endsWith('.html')) {
|
||||
file = file + '.html';
|
||||
}
|
||||
|
||||
segments.push(file);
|
||||
|
||||
@ -453,8 +462,8 @@ const anchor = document.location.hash.substring(1);
|
||||
|
||||
const redirect = redirects[path];
|
||||
if (redirect) {
|
||||
const target = redirect[anchor];
|
||||
if (target) {
|
||||
document.location.href = target;
|
||||
}
|
||||
const target = redirect[anchor];
|
||||
if (target) {
|
||||
document.location.href = target;
|
||||
}
|
||||
}
|
||||
|
@ -49,15 +49,15 @@
|
||||
#include <unistd.h>
|
||||
|
||||
#ifndef _WIN32
|
||||
# include <grp.h>
|
||||
# include <netdb.h>
|
||||
# include <pwd.h>
|
||||
# include <sys/resource.h>
|
||||
# include <sys/select.h>
|
||||
# include <sys/socket.h>
|
||||
# include <sys/utsname.h>
|
||||
# include <sys/wait.h>
|
||||
# include <termios.h>
|
||||
# include <grp.h>
|
||||
# include <netdb.h>
|
||||
# include <pwd.h>
|
||||
# include <sys/resource.h>
|
||||
# include <sys/select.h>
|
||||
# include <sys/socket.h>
|
||||
# include <sys/utsname.h>
|
||||
# include <sys/wait.h>
|
||||
# include <termios.h>
|
||||
#endif
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
@ -6,7 +6,7 @@
|
||||
#include <tuple>
|
||||
#include <iomanip>
|
||||
#ifdef __APPLE__
|
||||
#include <sys/time.h>
|
||||
# include <sys/time.h>
|
||||
#endif
|
||||
|
||||
#include "nix/store/machines.hh"
|
||||
@ -26,8 +26,7 @@
|
||||
using namespace nix;
|
||||
using std::cin;
|
||||
|
||||
static void handleAlarm(int sig) {
|
||||
}
|
||||
static void handleAlarm(int sig) {}
|
||||
|
||||
std::string escapeUri(std::string uri)
|
||||
{
|
||||
@ -42,13 +41,15 @@ static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
|
||||
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri.render()), slot), true);
|
||||
}
|
||||
|
||||
static bool allSupportedLocally(Store & store, const std::set<std::string>& requiredFeatures) {
|
||||
static bool allSupportedLocally(Store & store, const std::set<std::string> & requiredFeatures)
|
||||
{
|
||||
for (auto & feature : requiredFeatures)
|
||||
if (!store.systemFeatures.get().count(feature)) return false;
|
||||
if (!store.systemFeatures.get().count(feature))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
static int main_build_remote(int argc, char * * argv)
|
||||
static int main_build_remote(int argc, char ** argv)
|
||||
{
|
||||
{
|
||||
logger = makeJSONLogger(getStandardError());
|
||||
@ -85,7 +86,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||
that gets cleared on reboot, but it wouldn't work on macOS. */
|
||||
auto currentLoadName = "/current-load";
|
||||
if (auto localStore = store.dynamic_pointer_cast<LocalFSStore>())
|
||||
currentLoad = std::string { localStore->stateDir } + currentLoadName;
|
||||
currentLoad = std::string{localStore->stateDir} + currentLoadName;
|
||||
else
|
||||
currentLoad = settings.nixStateDir + currentLoadName;
|
||||
|
||||
@ -107,8 +108,11 @@ static int main_build_remote(int argc, char * * argv)
|
||||
|
||||
try {
|
||||
auto s = readString(source);
|
||||
if (s != "try") return 0;
|
||||
} catch (EndOfFile &) { return 0; }
|
||||
if (s != "try")
|
||||
return 0;
|
||||
} catch (EndOfFile &) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
auto amWilling = readInt(source);
|
||||
auto neededSystem = readString(source);
|
||||
@ -117,10 +121,10 @@ static int main_build_remote(int argc, char * * argv)
|
||||
|
||||
/* It would be possible to build locally after some builds clear out,
|
||||
so don't show the warning now: */
|
||||
bool couldBuildLocally = maxBuildJobs > 0
|
||||
&& ( neededSystem == settings.thisSystem
|
||||
|| settings.extraPlatforms.get().count(neededSystem) > 0)
|
||||
&& allSupportedLocally(*store, requiredFeatures);
|
||||
bool couldBuildLocally =
|
||||
maxBuildJobs > 0
|
||||
&& (neededSystem == settings.thisSystem || settings.extraPlatforms.get().count(neededSystem) > 0)
|
||||
&& allSupportedLocally(*store, requiredFeatures);
|
||||
/* It's possible to build this locally right now: */
|
||||
bool canBuildLocally = amWilling && couldBuildLocally;
|
||||
|
||||
@ -139,11 +143,8 @@ static int main_build_remote(int argc, char * * argv)
|
||||
for (auto & m : machines) {
|
||||
debug("considering building on remote machine '%s'", m.storeUri.render());
|
||||
|
||||
if (m.enabled &&
|
||||
m.systemSupported(neededSystem) &&
|
||||
m.allSupported(requiredFeatures) &&
|
||||
m.mandatoryMet(requiredFeatures))
|
||||
{
|
||||
if (m.enabled && m.systemSupported(neededSystem) && m.allSupported(requiredFeatures)
|
||||
&& m.mandatoryMet(requiredFeatures)) {
|
||||
rightType = true;
|
||||
AutoCloseFD free;
|
||||
uint64_t load = 0;
|
||||
@ -185,8 +186,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||
if (!bestSlotLock) {
|
||||
if (rightType && !canBuildLocally)
|
||||
std::cerr << "# postpone\n";
|
||||
else
|
||||
{
|
||||
else {
|
||||
// build the hint template.
|
||||
std::string errorText =
|
||||
"Failed to find a machine for remote build!\n"
|
||||
@ -205,16 +205,11 @@ static int main_build_remote(int argc, char * * argv)
|
||||
drvstr = "<unknown>";
|
||||
|
||||
auto error = HintFmt::fromFormatString(errorText);
|
||||
error
|
||||
% drvstr
|
||||
% neededSystem
|
||||
% concatStringsSep<StringSet>(", ", requiredFeatures)
|
||||
error % drvstr % neededSystem % concatStringsSep<StringSet>(", ", requiredFeatures)
|
||||
% machines.size();
|
||||
|
||||
for (auto & m : machines)
|
||||
error
|
||||
% concatStringsSep<StringSet>(", ", m.systemTypes)
|
||||
% m.maxJobs
|
||||
error % concatStringsSep<StringSet>(", ", m.systemTypes) % m.maxJobs
|
||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||
|
||||
@ -242,9 +237,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||
sshStore->connect();
|
||||
} catch (std::exception & e) {
|
||||
auto msg = chomp(drainFD(5, false));
|
||||
printError("cannot build on '%s': %s%s",
|
||||
storeUri, e.what(),
|
||||
msg.empty() ? "" : ": " + msg);
|
||||
printError("cannot build on '%s': %s%s", storeUri, e.what(), msg.empty() ? "" : ": " + msg);
|
||||
bestMachine->enabled = false;
|
||||
continue;
|
||||
}
|
||||
@ -253,7 +246,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||
}
|
||||
}
|
||||
|
||||
connected:
|
||||
connected:
|
||||
close(5);
|
||||
|
||||
assert(sshStore);
|
||||
@ -265,13 +258,14 @@ connected:
|
||||
|
||||
AutoCloseFD uploadLock;
|
||||
{
|
||||
auto setUpdateLock = [&](auto && fileName){
|
||||
auto setUpdateLock = [&](auto && fileName) {
|
||||
uploadLock = openLockFile(currentLoad + "/" + escapeUri(fileName) + ".upload-lock", true);
|
||||
};
|
||||
try {
|
||||
setUpdateLock(storeUri);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENAMETOOLONG) throw;
|
||||
if (e.errNo != ENAMETOOLONG)
|
||||
throw;
|
||||
// Try again hashing the store URL so we have a shorter path
|
||||
auto h = hashString(HashAlgorithm::MD5, storeUri);
|
||||
setUpdateLock(h.to_string(HashFormat::Base64, false));
|
||||
@ -315,7 +309,7 @@ connected:
|
||||
//
|
||||
// This condition mirrors that: that code enforces the "rules" outlined there;
|
||||
// we do the best we can given those "rules".
|
||||
if (trustedOrLegacy || drv.type().isCA()) {
|
||||
if (trustedOrLegacy || drv.type().isCA()) {
|
||||
// Hijack the inputs paths of the derivation to include all
|
||||
// the paths that come from the `inputDrvs` set. We don’t do
|
||||
// that for the derivations whose `inputDrvs` is empty
|
||||
@ -330,28 +324,26 @@ connected:
|
||||
optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv);
|
||||
auto & result = *optResult;
|
||||
if (!result.success())
|
||||
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
|
||||
throw Error(
|
||||
"build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
|
||||
} else {
|
||||
copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute);
|
||||
auto res = sshStore->buildPathsWithResults({
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(*drvPath),
|
||||
.outputs = OutputsSpec::All {},
|
||||
}
|
||||
});
|
||||
copyClosure(*store, *sshStore, StorePathSet{*drvPath}, NoRepair, NoCheckSigs, substitute);
|
||||
auto res = sshStore->buildPathsWithResults({DerivedPath::Built{
|
||||
.drvPath = makeConstantStorePathRef(*drvPath),
|
||||
.outputs = OutputsSpec::All{},
|
||||
}});
|
||||
// One path to build should produce exactly one build result
|
||||
assert(res.size() == 1);
|
||||
optResult = std::move(res[0]);
|
||||
}
|
||||
|
||||
|
||||
auto outputHashes = staticOutputHashes(*store, drv);
|
||||
std::set<Realisation> missingRealisations;
|
||||
StorePathSet missingPaths;
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) && !drv.type().hasKnownOutputPaths()) {
|
||||
for (auto & outputName : wantedOutputs) {
|
||||
auto thisOutputHash = outputHashes.at(outputName);
|
||||
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
||||
auto thisOutputId = DrvOutput{thisOutputHash, outputName};
|
||||
if (!store->queryRealisation(thisOutputId)) {
|
||||
debug("missing output %s", outputName);
|
||||
assert(optResult);
|
||||
|
@ -10,23 +10,13 @@
|
||||
namespace nix {
|
||||
|
||||
// Custom implementation to avoid `ref` ptr equality
|
||||
GENERATE_CMP_EXT(
|
||||
,
|
||||
std::strong_ordering,
|
||||
SingleBuiltPathBuilt,
|
||||
*me->drvPath,
|
||||
me->output);
|
||||
GENERATE_CMP_EXT(, std::strong_ordering, SingleBuiltPathBuilt, *me->drvPath, me->output);
|
||||
|
||||
// Custom implementation to avoid `ref` ptr equality
|
||||
|
||||
// TODO no `GENERATE_CMP_EXT` because no `std::set::operator<=>` on
|
||||
// Darwin, per header.
|
||||
GENERATE_EQUAL(
|
||||
,
|
||||
BuiltPathBuilt ::,
|
||||
BuiltPathBuilt,
|
||||
*me->drvPath,
|
||||
me->outputs);
|
||||
GENERATE_EQUAL(, BuiltPathBuilt ::, BuiltPathBuilt, *me->drvPath, me->outputs);
|
||||
|
||||
StorePath SingleBuiltPath::outPath() const
|
||||
{
|
||||
@ -34,8 +24,8 @@ StorePath SingleBuiltPath::outPath() const
|
||||
overloaded{
|
||||
[](const SingleBuiltPath::Opaque & p) { return p.path; },
|
||||
[](const SingleBuiltPath::Built & b) { return b.output.second; },
|
||||
}, raw()
|
||||
);
|
||||
},
|
||||
raw());
|
||||
}
|
||||
|
||||
StorePathSet BuiltPath::outPaths() const
|
||||
@ -49,13 +39,13 @@ StorePathSet BuiltPath::outPaths() const
|
||||
res.insert(path);
|
||||
return res;
|
||||
},
|
||||
}, raw()
|
||||
);
|
||||
},
|
||||
raw());
|
||||
}
|
||||
|
||||
SingleDerivedPath::Built SingleBuiltPath::Built::discardOutputPath() const
|
||||
{
|
||||
return SingleDerivedPath::Built {
|
||||
return SingleDerivedPath::Built{
|
||||
.drvPath = make_ref<SingleDerivedPath>(drvPath->discardOutputPath()),
|
||||
.output = output.first,
|
||||
};
|
||||
@ -65,14 +55,10 @@ SingleDerivedPath SingleBuiltPath::discardOutputPath() const
|
||||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[](const SingleBuiltPath::Opaque & p) -> SingleDerivedPath {
|
||||
return p;
|
||||
},
|
||||
[](const SingleBuiltPath::Built & b) -> SingleDerivedPath {
|
||||
return b.discardOutputPath();
|
||||
},
|
||||
}, raw()
|
||||
);
|
||||
[](const SingleBuiltPath::Opaque & p) -> SingleDerivedPath { return p; },
|
||||
[](const SingleBuiltPath::Built & b) -> SingleDerivedPath { return b.discardOutputPath(); },
|
||||
},
|
||||
raw());
|
||||
}
|
||||
|
||||
nlohmann::json BuiltPath::Built::toJSON(const StoreDirConfig & store) const
|
||||
@ -97,16 +83,12 @@ nlohmann::json SingleBuiltPath::Built::toJSON(const StoreDirConfig & store) cons
|
||||
|
||||
nlohmann::json SingleBuiltPath::toJSON(const StoreDirConfig & store) const
|
||||
{
|
||||
return std::visit([&](const auto & buildable) {
|
||||
return buildable.toJSON(store);
|
||||
}, raw());
|
||||
return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw());
|
||||
}
|
||||
|
||||
nlohmann::json BuiltPath::toJSON(const StoreDirConfig & store) const
|
||||
{
|
||||
return std::visit([&](const auto & buildable) {
|
||||
return buildable.toJSON(store);
|
||||
}, raw());
|
||||
return std::visit([&](const auto & buildable) { return buildable.toJSON(store); }, raw());
|
||||
}
|
||||
|
||||
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
||||
@ -116,20 +98,18 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
||||
overloaded{
|
||||
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
|
||||
[&](const BuiltPath::Built & p) {
|
||||
auto drvHashes =
|
||||
staticOutputHashes(store, store.readDerivation(p.drvPath->outPath()));
|
||||
for (auto& [outputName, outputPath] : p.outputs) {
|
||||
if (experimentalFeatureSettings.isEnabled(
|
||||
Xp::CaDerivations)) {
|
||||
auto drvHashes = staticOutputHashes(store, store.readDerivation(p.drvPath->outPath()));
|
||||
for (auto & [outputName, outputPath] : p.outputs) {
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
auto drvOutput = get(drvHashes, outputName);
|
||||
if (!drvOutput)
|
||||
throw Error(
|
||||
"the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
|
||||
store.printStorePath(p.drvPath->outPath()), outputName);
|
||||
auto thisRealisation = store.queryRealisation(
|
||||
DrvOutput{*drvOutput, outputName});
|
||||
assert(thisRealisation); // We’ve built it, so we must
|
||||
// have the realisation
|
||||
store.printStorePath(p.drvPath->outPath()),
|
||||
outputName);
|
||||
auto thisRealisation = store.queryRealisation(DrvOutput{*drvOutput, outputName});
|
||||
assert(thisRealisation); // We’ve built it, so we must
|
||||
// have the realisation
|
||||
res.insert(*thisRealisation);
|
||||
} else {
|
||||
res.insert(outputPath);
|
||||
|
@ -18,13 +18,15 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
namespace fs { using namespace std::filesystem; }
|
||||
namespace fs {
|
||||
using namespace std::filesystem;
|
||||
}
|
||||
|
||||
fetchers::Settings fetchSettings;
|
||||
|
||||
static GlobalConfig::Register rFetchSettings(&fetchSettings);
|
||||
|
||||
EvalSettings evalSettings {
|
||||
EvalSettings evalSettings{
|
||||
settings.readOnlyMode,
|
||||
{
|
||||
{
|
||||
@ -32,10 +34,11 @@ EvalSettings evalSettings {
|
||||
[](EvalState & state, std::string_view rest) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
// FIXME `parseFlakeRef` should take a `std::string_view`.
|
||||
auto flakeRef = parseFlakeRef(fetchSettings, std::string { rest }, {}, true, false);
|
||||
auto flakeRef = parseFlakeRef(fetchSettings, std::string{rest}, {}, true, false);
|
||||
debug("fetching flake search path element '%s''", rest);
|
||||
auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store);
|
||||
auto storePath = nix::fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName());
|
||||
auto storePath =
|
||||
nix::fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName());
|
||||
state.allowPath(storePath);
|
||||
return state.storePath(storePath);
|
||||
},
|
||||
@ -45,17 +48,14 @@ EvalSettings evalSettings {
|
||||
|
||||
static GlobalConfig::Register rEvalSettings(&evalSettings);
|
||||
|
||||
|
||||
flake::Settings flakeSettings;
|
||||
|
||||
static GlobalConfig::Register rFlakeSettings(&flakeSettings);
|
||||
|
||||
|
||||
CompatibilitySettings compatibilitySettings {};
|
||||
CompatibilitySettings compatibilitySettings{};
|
||||
|
||||
static GlobalConfig::Register rCompatibilitySettings(&compatibilitySettings);
|
||||
|
||||
|
||||
MixEvalArgs::MixEvalArgs()
|
||||
{
|
||||
addFlag({
|
||||
@ -63,7 +63,9 @@ MixEvalArgs::MixEvalArgs()
|
||||
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "expr"},
|
||||
.handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr{expr}}); }},
|
||||
.handler = {[&](std::string name, std::string expr) {
|
||||
autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr{expr}});
|
||||
}},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
@ -71,7 +73,9 @@ MixEvalArgs::MixEvalArgs()
|
||||
.description = "Pass the string *string* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "string"},
|
||||
.handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString{s}}); }},
|
||||
.handler = {[&](std::string name, std::string s) {
|
||||
autoArgs.insert_or_assign(name, AutoArg{AutoArgString{s}});
|
||||
}},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
@ -79,7 +83,9 @@ MixEvalArgs::MixEvalArgs()
|
||||
.description = "Pass the contents of file *path* as the argument *name* to Nix functions.",
|
||||
.category = category,
|
||||
.labels = {"name", "path"},
|
||||
.handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile{path}}); }},
|
||||
.handler = {[&](std::string name, std::string path) {
|
||||
autoArgs.insert_or_assign(name, AutoArg{AutoArgFile{path}});
|
||||
}},
|
||||
.completer = completePath,
|
||||
});
|
||||
|
||||
@ -103,18 +109,14 @@ MixEvalArgs::MixEvalArgs()
|
||||
)",
|
||||
.category = category,
|
||||
.labels = {"path"},
|
||||
.handler = {[&](std::string s) {
|
||||
lookupPath.elements.emplace_back(LookupPath::Elem::parse(s));
|
||||
}},
|
||||
.handler = {[&](std::string s) { lookupPath.elements.emplace_back(LookupPath::Elem::parse(s)); }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "impure",
|
||||
.description = "Allow access to mutable paths and repositories.",
|
||||
.category = category,
|
||||
.handler = {[&]() {
|
||||
evalSettings.pureEval = false;
|
||||
}},
|
||||
.handler = {[&]() { evalSettings.pureEval = false; }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
@ -126,7 +128,8 @@ MixEvalArgs::MixEvalArgs()
|
||||
auto from = parseFlakeRef(fetchSettings, _from, fs::current_path().string());
|
||||
auto to = parseFlakeRef(fetchSettings, _to, fs::current_path().string());
|
||||
fetchers::Attrs extraAttrs;
|
||||
if (to.subdir != "") extraAttrs["dir"] = to.subdir;
|
||||
if (to.subdir != "")
|
||||
extraAttrs["dir"] = to.subdir;
|
||||
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
@ -137,7 +140,7 @@ MixEvalArgs::MixEvalArgs()
|
||||
addFlag({
|
||||
.longName = "eval-store",
|
||||
.description =
|
||||
R"(
|
||||
R"(
|
||||
The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format)
|
||||
to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them.
|
||||
)",
|
||||
@ -152,20 +155,21 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
auto res = state.buildBindings(autoArgs.size());
|
||||
for (auto & [name, arg] : autoArgs) {
|
||||
auto v = state.allocValue();
|
||||
std::visit(overloaded {
|
||||
[&](const AutoArgExpr & arg) {
|
||||
state.mkThunk_(*v, state.parseExprFromString(arg.expr, compatibilitySettings.nixShellShebangArgumentsRelativeToScript ? state.rootPath(absPath(getCommandBaseDir())) : state.rootPath(".")));
|
||||
},
|
||||
[&](const AutoArgString & arg) {
|
||||
v->mkString(arg.s);
|
||||
},
|
||||
[&](const AutoArgFile & arg) {
|
||||
v->mkString(readFile(arg.path.string()));
|
||||
},
|
||||
[&](const AutoArgStdin & arg) {
|
||||
v->mkString(readFile(STDIN_FILENO));
|
||||
}
|
||||
}, arg);
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const AutoArgExpr & arg) {
|
||||
state.mkThunk_(
|
||||
*v,
|
||||
state.parseExprFromString(
|
||||
arg.expr,
|
||||
compatibilitySettings.nixShellShebangArgumentsRelativeToScript
|
||||
? state.rootPath(absPath(getCommandBaseDir()))
|
||||
: state.rootPath(".")));
|
||||
},
|
||||
[&](const AutoArgString & arg) { v->mkString(arg.s); },
|
||||
[&](const AutoArgFile & arg) { v->mkString(readFile(arg.path.string())); },
|
||||
[&](const AutoArgStdin & arg) { v->mkString(readFile(STDIN_FILENO)); }},
|
||||
arg);
|
||||
res.insert(state.symbols.create(name), v);
|
||||
}
|
||||
return res.finish();
|
||||
@ -174,10 +178,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
|
||||
{
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto accessor = fetchers::downloadTarball(
|
||||
state.store,
|
||||
state.fetchSettings,
|
||||
EvalSettings::resolvePseudoUrl(s));
|
||||
auto accessor = fetchers::downloadTarball(state.store, state.fetchSettings, EvalSettings::resolvePseudoUrl(s));
|
||||
auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy);
|
||||
return state.storePath(storePath);
|
||||
}
|
||||
@ -186,7 +187,8 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(fetchSettings, std::string(s.substr(6)), {}, true, false);
|
||||
auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store);
|
||||
auto storePath = nix::fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName());
|
||||
auto storePath =
|
||||
nix::fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName());
|
||||
state.allowPath(storePath);
|
||||
return state.storePath(storePath);
|
||||
}
|
||||
|
@ -11,11 +11,9 @@ Strings editorFor(const SourcePath & file, uint32_t line)
|
||||
throw Error("cannot open '%s' in an editor because it has no physical path", file);
|
||||
auto editor = getEnv("EDITOR").value_or("cat");
|
||||
auto args = tokenizeString<Strings>(editor);
|
||||
if (line > 0 && (
|
||||
editor.find("emacs") != std::string::npos ||
|
||||
editor.find("nano") != std::string::npos ||
|
||||
editor.find("vim") != std::string::npos ||
|
||||
editor.find("kak") != std::string::npos))
|
||||
if (line > 0
|
||||
&& (editor.find("emacs") != std::string::npos || editor.find("nano") != std::string::npos
|
||||
|| editor.find("vim") != std::string::npos || editor.find("kak") != std::string::npos))
|
||||
args.push_back(fmt("+%d", line));
|
||||
args.push_back(path->string());
|
||||
return args;
|
||||
|
@ -8,7 +8,8 @@ namespace nix {
|
||||
|
||||
struct SingleBuiltPath;
|
||||
|
||||
struct SingleBuiltPathBuilt {
|
||||
struct SingleBuiltPathBuilt
|
||||
{
|
||||
ref<SingleBuiltPath> drvPath;
|
||||
std::pair<std::string, StorePath> output;
|
||||
|
||||
@ -18,26 +19,25 @@ struct SingleBuiltPathBuilt {
|
||||
static SingleBuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
|
||||
nlohmann::json toJSON(const StoreDirConfig & store) const;
|
||||
|
||||
bool operator ==(const SingleBuiltPathBuilt &) const noexcept;
|
||||
std::strong_ordering operator <=>(const SingleBuiltPathBuilt &) const noexcept;
|
||||
bool operator==(const SingleBuiltPathBuilt &) const noexcept;
|
||||
std::strong_ordering operator<=>(const SingleBuiltPathBuilt &) const noexcept;
|
||||
};
|
||||
|
||||
using _SingleBuiltPathRaw = std::variant<
|
||||
DerivedPathOpaque,
|
||||
SingleBuiltPathBuilt
|
||||
>;
|
||||
using _SingleBuiltPathRaw = std::variant<DerivedPathOpaque, SingleBuiltPathBuilt>;
|
||||
|
||||
struct SingleBuiltPath : _SingleBuiltPathRaw {
|
||||
struct SingleBuiltPath : _SingleBuiltPathRaw
|
||||
{
|
||||
using Raw = _SingleBuiltPathRaw;
|
||||
using Raw::Raw;
|
||||
|
||||
using Opaque = DerivedPathOpaque;
|
||||
using Built = SingleBuiltPathBuilt;
|
||||
|
||||
bool operator == (const SingleBuiltPath &) const = default;
|
||||
auto operator <=> (const SingleBuiltPath &) const = default;
|
||||
bool operator==(const SingleBuiltPath &) const = default;
|
||||
auto operator<=>(const SingleBuiltPath &) const = default;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
inline const Raw & raw() const
|
||||
{
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
|
||||
@ -51,7 +51,7 @@ struct SingleBuiltPath : _SingleBuiltPathRaw {
|
||||
|
||||
static inline ref<SingleBuiltPath> staticDrv(StorePath drvPath)
|
||||
{
|
||||
return make_ref<SingleBuiltPath>(SingleBuiltPath::Opaque { drvPath });
|
||||
return make_ref<SingleBuiltPath>(SingleBuiltPath::Opaque{drvPath});
|
||||
}
|
||||
|
||||
/**
|
||||
@ -59,40 +59,40 @@ static inline ref<SingleBuiltPath> staticDrv(StorePath drvPath)
|
||||
*
|
||||
* See 'BuiltPath' for more an explanation.
|
||||
*/
|
||||
struct BuiltPathBuilt {
|
||||
struct BuiltPathBuilt
|
||||
{
|
||||
ref<SingleBuiltPath> drvPath;
|
||||
std::map<std::string, StorePath> outputs;
|
||||
|
||||
bool operator == (const BuiltPathBuilt &) const noexcept;
|
||||
bool operator==(const BuiltPathBuilt &) const noexcept;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet.
|
||||
//std::strong_ordering operator <=> (const BuiltPathBuilt &) const noexcept;
|
||||
// std::strong_ordering operator <=> (const BuiltPathBuilt &) const noexcept;
|
||||
|
||||
std::string to_string(const StoreDirConfig & store) const;
|
||||
static BuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
|
||||
nlohmann::json toJSON(const StoreDirConfig & store) const;
|
||||
};
|
||||
|
||||
using _BuiltPathRaw = std::variant<
|
||||
DerivedPath::Opaque,
|
||||
BuiltPathBuilt
|
||||
>;
|
||||
using _BuiltPathRaw = std::variant<DerivedPath::Opaque, BuiltPathBuilt>;
|
||||
|
||||
/**
|
||||
* A built path. Similar to a DerivedPath, but enriched with the corresponding
|
||||
* output path(s).
|
||||
*/
|
||||
struct BuiltPath : _BuiltPathRaw {
|
||||
struct BuiltPath : _BuiltPathRaw
|
||||
{
|
||||
using Raw = _BuiltPathRaw;
|
||||
using Raw::Raw;
|
||||
|
||||
using Opaque = DerivedPathOpaque;
|
||||
using Built = BuiltPathBuilt;
|
||||
|
||||
bool operator == (const BuiltPath &) const = default;
|
||||
bool operator==(const BuiltPath &) const = default;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet.
|
||||
//auto operator <=> (const BuiltPath &) const = default;
|
||||
// auto operator <=> (const BuiltPath &) const = default;
|
||||
|
||||
inline const Raw & raw() const {
|
||||
inline const Raw & raw() const
|
||||
{
|
||||
return static_cast<const Raw &>(*this);
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,9 @@ namespace nix {
|
||||
|
||||
class Store;
|
||||
|
||||
namespace fetchers { struct Settings; }
|
||||
namespace fetchers {
|
||||
struct Settings;
|
||||
}
|
||||
|
||||
class EvalState;
|
||||
struct EvalSettings;
|
||||
@ -20,7 +22,9 @@ struct CompatibilitySettings;
|
||||
class Bindings;
|
||||
struct SourcePath;
|
||||
|
||||
namespace flake { struct Settings; }
|
||||
namespace flake {
|
||||
struct Settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @todo Get rid of global setttings variables
|
||||
@ -55,10 +59,20 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
|
||||
std::optional<std::string> evalStoreUrl;
|
||||
|
||||
private:
|
||||
struct AutoArgExpr { std::string expr; };
|
||||
struct AutoArgString { std::string s; };
|
||||
struct AutoArgFile { std::filesystem::path path; };
|
||||
struct AutoArgStdin { };
|
||||
struct AutoArgExpr
|
||||
{
|
||||
std::string expr;
|
||||
};
|
||||
struct AutoArgString
|
||||
{
|
||||
std::string s;
|
||||
};
|
||||
struct AutoArgFile
|
||||
{
|
||||
std::filesystem::path path;
|
||||
};
|
||||
struct AutoArgStdin
|
||||
{};
|
||||
|
||||
using AutoArg = std::variant<AutoArgExpr, AutoArgString, AutoArgFile, AutoArgStdin>;
|
||||
|
||||
|
@ -39,7 +39,10 @@ class InstallableAttrPath : public InstallableValue
|
||||
const std::string & attrPath,
|
||||
ExtendedOutputsSpec extendedOutputsSpec);
|
||||
|
||||
std::string what() const override { return attrPath; };
|
||||
std::string what() const override
|
||||
{
|
||||
return attrPath;
|
||||
};
|
||||
|
||||
std::pair<Value *, PosIdx> toValue(EvalState & state) override;
|
||||
|
||||
|
@ -11,8 +11,10 @@ struct InstallableDerivedPath : Installable
|
||||
DerivedPath derivedPath;
|
||||
|
||||
InstallableDerivedPath(ref<Store> store, DerivedPath && derivedPath)
|
||||
: store(store), derivedPath(std::move(derivedPath))
|
||||
{ }
|
||||
: store(store)
|
||||
, derivedPath(std::move(derivedPath))
|
||||
{
|
||||
}
|
||||
|
||||
std::string what() const override;
|
||||
|
||||
@ -20,10 +22,8 @@ struct InstallableDerivedPath : Installable
|
||||
|
||||
std::optional<StorePath> getStorePath() override;
|
||||
|
||||
static InstallableDerivedPath parse(
|
||||
ref<Store> store,
|
||||
std::string_view prefix,
|
||||
ExtendedOutputsSpec extendedOutputsSpec);
|
||||
static InstallableDerivedPath
|
||||
parse(ref<Store> store, std::string_view prefix, ExtendedOutputsSpec extendedOutputsSpec);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -18,7 +18,8 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue
|
||||
/**
|
||||
* Extra struct to get around C++ designated initializer limitations
|
||||
*/
|
||||
struct Flake {
|
||||
struct Flake
|
||||
{
|
||||
FlakeRef originalRef;
|
||||
FlakeRef lockedRef;
|
||||
};
|
||||
@ -26,8 +27,10 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue
|
||||
Flake flake;
|
||||
|
||||
ExtraPathInfoFlake(Value && v, Flake && f)
|
||||
: ExtraPathInfoValue(std::move(v)), flake(std::move(f))
|
||||
{ }
|
||||
: ExtraPathInfoValue(std::move(v))
|
||||
, flake(std::move(f))
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
struct InstallableFlake : InstallableValue
|
||||
@ -49,7 +52,10 @@ struct InstallableFlake : InstallableValue
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags);
|
||||
|
||||
std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||
std::string what() const override
|
||||
{
|
||||
return flakeRef.to_string() + "#" + *attrPaths.begin();
|
||||
}
|
||||
|
||||
std::vector<std::string> getActualAttrPaths();
|
||||
|
||||
@ -61,8 +67,7 @@ struct InstallableFlake : InstallableValue
|
||||
* Get a cursor to every attrpath in getActualAttrPaths() that
|
||||
* exists. However if none exists, throw an exception.
|
||||
*/
|
||||
std::vector<ref<eval_cache::AttrCursor>>
|
||||
getCursors(EvalState & state) override;
|
||||
std::vector<ref<eval_cache::AttrCursor>> getCursors(EvalState & state) override;
|
||||
|
||||
std::shared_ptr<flake::LockedFlake> getLockedFlake() const;
|
||||
|
||||
@ -79,11 +84,9 @@ struct InstallableFlake : InstallableValue
|
||||
*/
|
||||
static inline FlakeRef defaultNixpkgsFlakeRef()
|
||||
{
|
||||
return FlakeRef::fromAttrs(fetchSettings, {{"type","indirect"}, {"id", "nixpkgs"}});
|
||||
return FlakeRef::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", "nixpkgs"}});
|
||||
}
|
||||
|
||||
ref<eval_cache::EvalCache> openEvalCache(
|
||||
EvalState & state,
|
||||
std::shared_ptr<flake::LockedFlake> lockedFlake);
|
||||
ref<eval_cache::EvalCache> openEvalCache(EvalState & state, std::shared_ptr<flake::LockedFlake> lockedFlake);
|
||||
|
||||
}
|
||||
|
@ -9,7 +9,10 @@ namespace nix {
|
||||
struct PackageInfo;
|
||||
struct SourceExprCommand;
|
||||
|
||||
namespace eval_cache { class EvalCache; class AttrCursor; }
|
||||
namespace eval_cache {
|
||||
class EvalCache;
|
||||
class AttrCursor;
|
||||
}
|
||||
|
||||
struct App
|
||||
{
|
||||
@ -36,7 +39,8 @@ struct ExtraPathInfoValue : ExtraPathInfo
|
||||
/**
|
||||
* Extra struct to get around C++ designated initializer limitations
|
||||
*/
|
||||
struct Value {
|
||||
struct Value
|
||||
{
|
||||
/**
|
||||
* An optional priority for use with "build envs". See Package
|
||||
*/
|
||||
@ -60,7 +64,8 @@ struct ExtraPathInfoValue : ExtraPathInfo
|
||||
|
||||
ExtraPathInfoValue(Value && v)
|
||||
: value(std::move(v))
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
virtual ~ExtraPathInfoValue() = default;
|
||||
};
|
||||
@ -73,9 +78,12 @@ struct InstallableValue : Installable
|
||||
{
|
||||
ref<EvalState> state;
|
||||
|
||||
InstallableValue(ref<EvalState> state) : state(state) {}
|
||||
InstallableValue(ref<EvalState> state)
|
||||
: state(state)
|
||||
{
|
||||
}
|
||||
|
||||
virtual ~InstallableValue() { }
|
||||
virtual ~InstallableValue() {}
|
||||
|
||||
virtual std::pair<Value *, PosIdx> toValue(EvalState & state) = 0;
|
||||
|
||||
@ -84,15 +92,13 @@ struct InstallableValue : Installable
|
||||
* However if none exists, throw exception instead of returning
|
||||
* empty vector.
|
||||
*/
|
||||
virtual std::vector<ref<eval_cache::AttrCursor>>
|
||||
getCursors(EvalState & state);
|
||||
virtual std::vector<ref<eval_cache::AttrCursor>> getCursors(EvalState & state);
|
||||
|
||||
/**
|
||||
* Get the first and most preferred cursor this Installable could
|
||||
* refer to, or throw an exception if none exists.
|
||||
*/
|
||||
virtual ref<eval_cache::AttrCursor>
|
||||
getCursor(EvalState & state);
|
||||
virtual ref<eval_cache::AttrCursor> getCursor(EvalState & state);
|
||||
|
||||
UnresolvedApp toApp(EvalState & state);
|
||||
|
||||
@ -115,7 +121,8 @@ protected:
|
||||
* @result A derived path (with empty info, for now) if the value
|
||||
* matched the above criteria.
|
||||
*/
|
||||
std::optional<DerivedPathWithInfo> trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
std::optional<DerivedPathWithInfo>
|
||||
trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ typedef std::vector<ref<Installable>> Installables;
|
||||
*/
|
||||
struct Installable
|
||||
{
|
||||
virtual ~Installable() { }
|
||||
virtual ~Installable() {}
|
||||
|
||||
/**
|
||||
* What Installable is this?
|
||||
@ -168,37 +168,19 @@ struct Installable
|
||||
BuildMode bMode = bmNormal);
|
||||
|
||||
static std::set<StorePath> toStorePathSet(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const Installables & installables);
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, const Installables & installables);
|
||||
|
||||
static std::vector<StorePath> toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const Installables & installables);
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, const Installables & installables);
|
||||
|
||||
static StorePath toStorePath(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
ref<Installable> installable);
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, ref<Installable> installable);
|
||||
|
||||
static std::set<StorePath> toDerivations(
|
||||
ref<Store> store,
|
||||
const Installables & installables,
|
||||
bool useDeriver = false);
|
||||
static std::set<StorePath>
|
||||
toDerivations(ref<Store> store, const Installables & installables, bool useDeriver = false);
|
||||
|
||||
static BuiltPaths toBuiltPaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const Installables & installables);
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, const Installables & installables);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
typedef std::function<void(int, char * *)> MainFunction;
|
||||
typedef std::function<void(int, char **)> MainFunction;
|
||||
|
||||
struct RegisterLegacyCommand
|
||||
{
|
||||
@ -16,7 +16,8 @@ struct RegisterLegacyCommand
|
||||
|
||||
RegisterLegacyCommand(const std::string & name, MainFunction fun)
|
||||
{
|
||||
if (!commands) commands = new Commands;
|
||||
if (!commands)
|
||||
commands = new Commands;
|
||||
(*commands)[name] = fun;
|
||||
}
|
||||
};
|
||||
|
@ -11,7 +11,8 @@ namespace nix {
|
||||
namespace detail {
|
||||
/** Provides the completion hooks for the repl, without exposing its complete
|
||||
* internals. */
|
||||
struct ReplCompleterMixin {
|
||||
struct ReplCompleterMixin
|
||||
{
|
||||
virtual StringSet completePrefix(const std::string & prefix) = 0;
|
||||
};
|
||||
};
|
||||
@ -29,7 +30,7 @@ public:
|
||||
virtual Guard init(detail::ReplCompleterMixin * repl) = 0;
|
||||
/** Returns a boolean of whether the interacter got EOF */
|
||||
virtual bool getLine(std::string & input, ReplPromptType promptType) = 0;
|
||||
virtual ~ReplInteracter(){};
|
||||
virtual ~ReplInteracter() {};
|
||||
};
|
||||
|
||||
class ReadlineLikeInteracter : public virtual ReplInteracter
|
||||
|
@ -12,12 +12,12 @@ struct AbstractNixRepl
|
||||
|
||||
AbstractNixRepl(ref<EvalState> state)
|
||||
: state(state)
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
virtual ~AbstractNixRepl()
|
||||
{ }
|
||||
virtual ~AbstractNixRepl() {}
|
||||
|
||||
typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
|
||||
typedef std::vector<std::pair<Value *, std::string>> AnnotatedValues;
|
||||
|
||||
using RunNix = void(Path program, const Strings & args, const std::optional<std::string> & input);
|
||||
|
||||
@ -33,9 +33,7 @@ struct AbstractNixRepl
|
||||
std::function<AnnotatedValues()> getValues,
|
||||
RunNix * runNix = nullptr);
|
||||
|
||||
static ReplExitStatus runSimple(
|
||||
ref<EvalState> evalState,
|
||||
const ValMap & extraEnv);
|
||||
static ReplExitStatus runSimple(ref<EvalState> evalState, const ValMap & extraEnv);
|
||||
|
||||
virtual void initEnv() = 0;
|
||||
|
||||
|
@ -35,7 +35,8 @@ InstallableAttrPath::InstallableAttrPath(
|
||||
, v(allocRootValue(v))
|
||||
, attrPath(attrPath)
|
||||
, extendedOutputsSpec(std::move(extendedOutputsSpec))
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
std::pair<Value *, PosIdx> InstallableAttrPath::toValue(EvalState & state)
|
||||
{
|
||||
@ -48,12 +49,9 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
||||
{
|
||||
auto [v, pos] = toValue(*state);
|
||||
|
||||
if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths(
|
||||
*v,
|
||||
pos,
|
||||
fmt("while evaluating the attribute '%s'", attrPath)))
|
||||
{
|
||||
return { *derivedPathWithInfo };
|
||||
if (std::optional derivedPathWithInfo =
|
||||
trySinglePathToDerivedPaths(*v, pos, fmt("while evaluating the attribute '%s'", attrPath))) {
|
||||
return {*derivedPathWithInfo};
|
||||
}
|
||||
|
||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||
@ -70,19 +68,19 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
||||
if (!drvPath)
|
||||
throw Error("'%s' is not a derivation", what());
|
||||
|
||||
auto newOutputs = std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
for (auto & output : packageInfo.queryOutputs(false, true))
|
||||
outputsToInstall.insert(output.first);
|
||||
if (outputsToInstall.empty())
|
||||
outputsToInstall.insert("out");
|
||||
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||
auto newOutputs = std::visit(
|
||||
overloaded{
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
for (auto & output : packageInfo.queryOutputs(false, true))
|
||||
outputsToInstall.insert(output.first);
|
||||
if (outputsToInstall.empty())
|
||||
outputsToInstall.insert("out");
|
||||
return OutputsSpec::Names{std::move(outputsToInstall)};
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { return e; },
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||
return e;
|
||||
},
|
||||
}, extendedOutputsSpec.raw);
|
||||
extendedOutputsSpec.raw);
|
||||
|
||||
auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs);
|
||||
|
||||
@ -93,11 +91,12 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
||||
DerivedPathsWithInfo res;
|
||||
for (auto & [drvPath, outputs] : byDrvPath)
|
||||
res.push_back({
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = make_ref<ExtraPathInfoValue>(ExtraPathInfoValue::Value {
|
||||
.path =
|
||||
DerivedPath::Built{
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = make_ref<ExtraPathInfoValue>(ExtraPathInfoValue::Value{
|
||||
.extendedOutputsSpec = outputs,
|
||||
/* FIXME: reconsider backwards compatibility above
|
||||
so we can fill in this info. */
|
||||
@ -115,8 +114,10 @@ InstallableAttrPath InstallableAttrPath::parse(
|
||||
ExtendedOutputsSpec extendedOutputsSpec)
|
||||
{
|
||||
return {
|
||||
state, cmd, v,
|
||||
prefix == "." ? "" : std::string { prefix },
|
||||
state,
|
||||
cmd,
|
||||
v,
|
||||
prefix == "." ? "" : std::string{prefix},
|
||||
std::move(extendedOutputsSpec),
|
||||
};
|
||||
}
|
||||
|
@ -21,32 +21,32 @@ std::optional<StorePath> InstallableDerivedPath::getStorePath()
|
||||
return derivedPath.getBaseStorePath();
|
||||
}
|
||||
|
||||
InstallableDerivedPath InstallableDerivedPath::parse(
|
||||
ref<Store> store,
|
||||
std::string_view prefix,
|
||||
ExtendedOutputsSpec extendedOutputsSpec)
|
||||
InstallableDerivedPath
|
||||
InstallableDerivedPath::parse(ref<Store> store, std::string_view prefix, ExtendedOutputsSpec extendedOutputsSpec)
|
||||
{
|
||||
auto derivedPath = std::visit(overloaded {
|
||||
// If the user did not use ^, we treat the output more
|
||||
// liberally: we accept a symlink chain or an actual
|
||||
// store path.
|
||||
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
|
||||
auto storePath = store->followLinksToStorePath(prefix);
|
||||
return DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
};
|
||||
auto derivedPath = std::visit(
|
||||
overloaded{
|
||||
// If the user did not use ^, we treat the output more
|
||||
// liberally: we accept a symlink chain or an actual
|
||||
// store path.
|
||||
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
|
||||
auto storePath = store->followLinksToStorePath(prefix);
|
||||
return DerivedPath::Opaque{
|
||||
.path = std::move(storePath),
|
||||
};
|
||||
},
|
||||
// If the user did use ^, we just do exactly what is written.
|
||||
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
|
||||
auto drv = make_ref<SingleDerivedPath>(SingleDerivedPath::parse(*store, prefix));
|
||||
drvRequireExperiment(*drv);
|
||||
return DerivedPath::Built{
|
||||
.drvPath = std::move(drv),
|
||||
.outputs = outputSpec,
|
||||
};
|
||||
},
|
||||
},
|
||||
// If the user did use ^, we just do exactly what is written.
|
||||
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
|
||||
auto drv = make_ref<SingleDerivedPath>(SingleDerivedPath::parse(*store, prefix));
|
||||
drvRequireExperiment(*drv);
|
||||
return DerivedPath::Built {
|
||||
.drvPath = std::move(drv),
|
||||
.outputs = outputSpec,
|
||||
};
|
||||
},
|
||||
}, extendedOutputsSpec.raw);
|
||||
return InstallableDerivedPath {
|
||||
extendedOutputsSpec.raw);
|
||||
return InstallableDerivedPath{
|
||||
store,
|
||||
std::move(derivedPath),
|
||||
};
|
||||
|
@ -28,8 +28,8 @@ namespace nix {
|
||||
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
||||
{
|
||||
std::vector<std::string> res;
|
||||
if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")){
|
||||
attrPaths.front().erase(0,1);
|
||||
if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")) {
|
||||
attrPaths.front().erase(0, 1);
|
||||
res.push_back(attrPaths.front());
|
||||
return res;
|
||||
}
|
||||
@ -47,8 +47,11 @@ static std::string showAttrPaths(const std::vector<std::string> & paths)
|
||||
{
|
||||
std::string s;
|
||||
for (const auto & [n, i] : enumerate(paths)) {
|
||||
if (n > 0) s += n + 1 == paths.size() ? " or " : ", ";
|
||||
s += '\''; s += i; s += '\'';
|
||||
if (n > 0)
|
||||
s += n + 1 == paths.size() ? " or " : ", ";
|
||||
s += '\'';
|
||||
s += i;
|
||||
s += '\'';
|
||||
}
|
||||
return s;
|
||||
}
|
||||
@ -62,12 +65,12 @@ InstallableFlake::InstallableFlake(
|
||||
Strings attrPaths,
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags)
|
||||
: InstallableValue(state),
|
||||
flakeRef(flakeRef),
|
||||
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||
extendedOutputsSpec(std::move(extendedOutputsSpec)),
|
||||
lockFlags(lockFlags)
|
||||
: InstallableValue(state)
|
||||
, flakeRef(flakeRef)
|
||||
, attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment})
|
||||
, prefixes(fragment == "" ? Strings{} : prefixes)
|
||||
, extendedOutputsSpec(std::move(extendedOutputsSpec))
|
||||
, lockFlags(lockFlags)
|
||||
{
|
||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||
throw UsageError("'--arg' and '--argstr' are incompatible with flakes");
|
||||
@ -87,18 +90,14 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||
auto v = attr->forceValue();
|
||||
|
||||
if (std::optional derivedPathWithInfo = trySinglePathToDerivedPaths(
|
||||
v,
|
||||
noPos,
|
||||
fmt("while evaluating the flake output attribute '%s'", attrPath)))
|
||||
{
|
||||
return { *derivedPathWithInfo };
|
||||
v, noPos, fmt("while evaluating the flake output attribute '%s'", attrPath))) {
|
||||
return {*derivedPathWithInfo};
|
||||
} else {
|
||||
throw Error(
|
||||
"expected flake output attribute '%s' to be a derivation or path but found %s: %s",
|
||||
attrPath,
|
||||
showType(v),
|
||||
ValuePrinter(*this->state, v, errorPrintOptions)
|
||||
);
|
||||
ValuePrinter(*this->state, v, errorPrintOptions));
|
||||
}
|
||||
}
|
||||
|
||||
@ -113,39 +112,40 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
||||
}
|
||||
|
||||
return {{
|
||||
.path = DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(std::move(drvPath)),
|
||||
.outputs = std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||
if (aOutputSpecified->getBool()) {
|
||||
if (auto aOutputName = attr->maybeGetAttr("outputName"))
|
||||
outputsToInstall = { aOutputName->getString() };
|
||||
}
|
||||
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
||||
for (auto & s : aOutputsToInstall->getListOfStrings())
|
||||
outputsToInstall.insert(s);
|
||||
}
|
||||
.path =
|
||||
DerivedPath::Built{
|
||||
.drvPath = makeConstantStorePathRef(std::move(drvPath)),
|
||||
.outputs = std::visit(
|
||||
overloaded{
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
if (auto aOutputSpecified = attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||
if (aOutputSpecified->getBool()) {
|
||||
if (auto aOutputName = attr->maybeGetAttr("outputName"))
|
||||
outputsToInstall = {aOutputName->getString()};
|
||||
}
|
||||
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||
if (auto aOutputsToInstall = aMeta->maybeGetAttr("outputsToInstall"))
|
||||
for (auto & s : aOutputsToInstall->getListOfStrings())
|
||||
outputsToInstall.insert(s);
|
||||
}
|
||||
|
||||
if (outputsToInstall.empty())
|
||||
outputsToInstall.insert("out");
|
||||
if (outputsToInstall.empty())
|
||||
outputsToInstall.insert("out");
|
||||
|
||||
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
|
||||
return e;
|
||||
},
|
||||
}, extendedOutputsSpec.raw),
|
||||
},
|
||||
return OutputsSpec::Names{std::move(outputsToInstall)};
|
||||
},
|
||||
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec { return e; },
|
||||
},
|
||||
extendedOutputsSpec.raw),
|
||||
},
|
||||
.info = make_ref<ExtraPathInfoFlake>(
|
||||
ExtraPathInfoValue::Value {
|
||||
ExtraPathInfoValue::Value{
|
||||
.priority = priority,
|
||||
.attrPath = attrPath,
|
||||
.extendedOutputsSpec = extendedOutputsSpec,
|
||||
},
|
||||
ExtraPathInfoFlake::Flake {
|
||||
ExtraPathInfoFlake::Flake{
|
||||
.originalRef = flakeRef,
|
||||
.lockedRef = getLockedFlake()->flake.lockedRef,
|
||||
}),
|
||||
@ -157,8 +157,7 @@ std::pair<Value *, PosIdx> InstallableFlake::toValue(EvalState & state)
|
||||
return {&getCursor(state)->forceValue(), noPos};
|
||||
}
|
||||
|
||||
std::vector<ref<eval_cache::AttrCursor>>
|
||||
InstallableFlake::getCursors(EvalState & state)
|
||||
std::vector<ref<eval_cache::AttrCursor>> InstallableFlake::getCursors(EvalState & state)
|
||||
{
|
||||
auto evalCache = openEvalCache(state, getLockedFlake());
|
||||
|
||||
@ -181,11 +180,7 @@ InstallableFlake::getCursors(EvalState & state)
|
||||
}
|
||||
|
||||
if (res.size() == 0)
|
||||
throw Error(
|
||||
suggestions,
|
||||
"flake '%s' does not provide attribute %s",
|
||||
flakeRef,
|
||||
showAttrPaths(attrPaths));
|
||||
throw Error(suggestions, "flake '%s' does not provide attribute %s", flakeRef, showAttrPaths(attrPaths));
|
||||
|
||||
return res;
|
||||
}
|
||||
@ -196,8 +191,8 @@ std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const
|
||||
flake::LockFlags lockFlagsApplyConfig = lockFlags;
|
||||
// FIXME why this side effect?
|
||||
lockFlagsApplyConfig.applyNixConfig = true;
|
||||
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(
|
||||
flakeSettings, *state, flakeRef, lockFlagsApplyConfig));
|
||||
_lockedFlake =
|
||||
std::make_shared<flake::LockedFlake>(lockFlake(flakeSettings, *state, flakeRef, lockFlagsApplyConfig));
|
||||
}
|
||||
return _lockedFlake;
|
||||
}
|
||||
|
@ -4,17 +4,14 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::vector<ref<eval_cache::AttrCursor>>
|
||||
InstallableValue::getCursors(EvalState & state)
|
||||
std::vector<ref<eval_cache::AttrCursor>> InstallableValue::getCursors(EvalState & state)
|
||||
{
|
||||
auto evalCache =
|
||||
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state,
|
||||
[&]() { return toValue(state).first; });
|
||||
std::make_shared<nix::eval_cache::EvalCache>(std::nullopt, state, [&]() { return toValue(state).first; });
|
||||
return {evalCache->getRoot()};
|
||||
}
|
||||
|
||||
ref<eval_cache::AttrCursor>
|
||||
InstallableValue::getCursor(EvalState & state)
|
||||
ref<eval_cache::AttrCursor> InstallableValue::getCursor(EvalState & state)
|
||||
{
|
||||
/* Although getCursors should return at least one element, in case it doesn't,
|
||||
bound check to avoid an undefined behavior for vector[0] */
|
||||
@ -39,30 +36,32 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
|
||||
auto castedInstallable = installable.dynamic_pointer_cast<InstallableValue>();
|
||||
if (!castedInstallable)
|
||||
throw nonValueInstallable(*installable);
|
||||
return ref { castedInstallable };
|
||||
return ref{castedInstallable};
|
||||
}
|
||||
|
||||
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
std::optional<DerivedPathWithInfo>
|
||||
InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
if (v.type() == nPath) {
|
||||
auto storePath = fetchToStore(*state->store, v.path(), FetchMode::Copy);
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
},
|
||||
.path =
|
||||
DerivedPath::Opaque{
|
||||
.path = std::move(storePath),
|
||||
},
|
||||
.info = make_ref<ExtraPathInfo>(),
|
||||
}};
|
||||
}
|
||||
|
||||
else if (v.type() == nString) {
|
||||
return {{
|
||||
.path = DerivedPath::fromSingle(
|
||||
state->coerceToSingleDerivedPath(pos, v, errorCtx)),
|
||||
.path = DerivedPath::fromSingle(state->coerceToSingleDerivedPath(pos, v, errorCtx)),
|
||||
.info = make_ref<ExtraPathInfo>(),
|
||||
}};
|
||||
}
|
||||
|
||||
else return std::nullopt;
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -31,7 +31,9 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
namespace fs { using namespace std::filesystem; }
|
||||
namespace fs {
|
||||
using namespace std::filesystem;
|
||||
}
|
||||
|
||||
void completeFlakeInputAttrPath(
|
||||
AddCompletions & completions,
|
||||
@ -63,7 +65,8 @@ MixFlakeOptions::MixFlakeOptions()
|
||||
.category = category,
|
||||
.handler = {[&]() {
|
||||
lockFlags.recreateLockFile = true;
|
||||
warn("'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead.");
|
||||
warn(
|
||||
"'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead.");
|
||||
}},
|
||||
});
|
||||
|
||||
@ -160,9 +163,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||
.description = "Write the given lock file instead of `flake.lock` within the top-level flake.",
|
||||
.category = category,
|
||||
.labels = {"flake-lock-path"},
|
||||
.handler = {[&](std::string lockFilePath) {
|
||||
lockFlags.outputLockFilePath = lockFilePath;
|
||||
}},
|
||||
.handler = {[&](std::string lockFilePath) { lockFlags.outputLockFilePath = lockFilePath; }},
|
||||
.completer = completePath,
|
||||
});
|
||||
|
||||
@ -177,12 +178,12 @@ MixFlakeOptions::MixFlakeOptions()
|
||||
flakeSettings,
|
||||
*evalState,
|
||||
parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir())),
|
||||
{ .writeLockFile = false });
|
||||
{.writeLockFile = false});
|
||||
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
|
||||
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
|
||||
if (auto input3 = std::dynamic_pointer_cast<const flake::LockedNode>(input2)) {
|
||||
overrideRegistry(
|
||||
fetchers::Input::fromAttrs(fetchSettings, {{"type","indirect"}, {"id", inputName}}),
|
||||
fetchers::Input::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", inputName}}),
|
||||
input3->lockedRef.input,
|
||||
{});
|
||||
}
|
||||
@ -211,7 +212,8 @@ SourceExprCommand::SourceExprCommand()
|
||||
|
||||
addFlag({
|
||||
.longName = "expr",
|
||||
.description = "Interpret [*installables*](@docroot@/command-ref/new-cli/nix.md#installables) as attribute paths relative to the Nix expression *expr*.",
|
||||
.description =
|
||||
"Interpret [*installables*](@docroot@/command-ref/new-cli/nix.md#installables) as attribute paths relative to the Nix expression *expr*.",
|
||||
.category = installablesCategory,
|
||||
.labels = {"expr"},
|
||||
.handler = {&expr},
|
||||
@ -222,32 +224,26 @@ MixReadOnlyOption::MixReadOnlyOption()
|
||||
{
|
||||
addFlag({
|
||||
.longName = "read-only",
|
||||
.description =
|
||||
"Do not instantiate each evaluated derivation. "
|
||||
"This improves performance, but can cause errors when accessing "
|
||||
"store paths of derivations during evaluation.",
|
||||
.description = "Do not instantiate each evaluated derivation. "
|
||||
"This improves performance, but can cause errors when accessing "
|
||||
"store paths of derivations during evaluation.",
|
||||
.handler = {&settings.readOnlyMode, true},
|
||||
});
|
||||
}
|
||||
|
||||
Strings SourceExprCommand::getDefaultFlakeAttrPaths()
|
||||
{
|
||||
return {
|
||||
"packages." + settings.thisSystem.get() + ".default",
|
||||
"defaultPackage." + settings.thisSystem.get()
|
||||
};
|
||||
return {"packages." + settings.thisSystem.get() + ".default", "defaultPackage." + settings.thisSystem.get()};
|
||||
}
|
||||
|
||||
Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
||||
{
|
||||
return {
|
||||
// As a convenience, look for the attribute in
|
||||
// 'outputs.packages'.
|
||||
"packages." + settings.thisSystem.get() + ".",
|
||||
// As a temporary hack until Nixpkgs is properly converted
|
||||
// to provide a clean 'packages' set, look in 'legacyPackages'.
|
||||
"legacyPackages." + settings.thisSystem.get() + "."
|
||||
};
|
||||
return {// As a convenience, look for the attribute in
|
||||
// 'outputs.packages'.
|
||||
"packages." + settings.thisSystem.get() + ".",
|
||||
// As a temporary hack until Nixpkgs is properly converted
|
||||
// to provide a clean 'packages' set, look in 'legacyPackages'.
|
||||
"legacyPackages." + settings.thisSystem.get() + "."};
|
||||
}
|
||||
|
||||
Args::CompleterClosure SourceExprCommand::getCompleteInstallable()
|
||||
@ -265,10 +261,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
||||
|
||||
evalSettings.pureEval = false;
|
||||
auto state = getEvalState();
|
||||
auto e =
|
||||
state->parseExprFromFile(
|
||||
resolveExprPath(
|
||||
lookupFileArg(*state, *file)));
|
||||
auto e = state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, *file)));
|
||||
|
||||
Value root;
|
||||
state->eval(e, root);
|
||||
@ -287,7 +280,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
||||
}
|
||||
|
||||
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
|
||||
Value &v1(*v);
|
||||
Value & v1(*v);
|
||||
state->forceValue(v1, pos);
|
||||
Value v2;
|
||||
state->autoCallFunction(*autoArgs, v1, v2);
|
||||
@ -312,7 +305,7 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
||||
getDefaultFlakeAttrPaths(),
|
||||
prefix);
|
||||
}
|
||||
} catch (EvalError&) {
|
||||
} catch (EvalError &) {
|
||||
// Don't want eval errors to mess-up with the completion engine, so let's just swallow them
|
||||
}
|
||||
}
|
||||
@ -336,7 +329,7 @@ void completeFlakeRefWithFragment(
|
||||
|
||||
auto fragment = prefix.substr(hash + 1);
|
||||
std::string prefixRoot = "";
|
||||
if (fragment.starts_with(".")){
|
||||
if (fragment.starts_with(".")) {
|
||||
fragment = fragment.substr(1);
|
||||
prefixRoot = ".";
|
||||
}
|
||||
@ -345,13 +338,13 @@ void completeFlakeRefWithFragment(
|
||||
// TODO: ideally this would use the command base directory instead of assuming ".".
|
||||
auto flakeRef = parseFlakeRef(fetchSettings, expandTilde(flakeRefS), fs::current_path().string());
|
||||
|
||||
auto evalCache = openEvalCache(*evalState,
|
||||
std::make_shared<flake::LockedFlake>(lockFlake(
|
||||
flakeSettings, *evalState, flakeRef, lockFlags)));
|
||||
auto evalCache = openEvalCache(
|
||||
*evalState,
|
||||
std::make_shared<flake::LockedFlake>(lockFlake(flakeSettings, *evalState, flakeRef, lockFlags)));
|
||||
|
||||
auto root = evalCache->getRoot();
|
||||
|
||||
if (prefixRoot == "."){
|
||||
if (prefixRoot == ".") {
|
||||
attrPathPrefixes.clear();
|
||||
}
|
||||
/* Complete 'fragment' relative to all the
|
||||
@ -371,7 +364,8 @@ void completeFlakeRefWithFragment(
|
||||
}
|
||||
|
||||
auto attr = root->findAlongAttrPath(attrPath);
|
||||
if (!attr) continue;
|
||||
if (!attr)
|
||||
continue;
|
||||
|
||||
for (auto & attr2 : (*attr)->getAttrs()) {
|
||||
if (hasPrefix(evalState->symbols[attr2], lastAttr)) {
|
||||
@ -379,7 +373,9 @@ void completeFlakeRefWithFragment(
|
||||
/* Strip the attrpath prefix. */
|
||||
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
|
||||
// FIXME: handle names with dots
|
||||
completions.add(flakeRefS + "#" + prefixRoot + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
||||
completions.add(
|
||||
flakeRefS + "#" + prefixRoot
|
||||
+ concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -389,7 +385,8 @@ void completeFlakeRefWithFragment(
|
||||
if (fragment.empty()) {
|
||||
for (auto & attrPath : defaultFlakeAttrPaths) {
|
||||
auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath));
|
||||
if (!attr) continue;
|
||||
if (!attr)
|
||||
continue;
|
||||
completions.add(flakeRefS + "#" + prefixRoot);
|
||||
}
|
||||
}
|
||||
@ -429,14 +426,12 @@ DerivedPathWithInfo Installable::toDerivedPath()
|
||||
{
|
||||
auto buildables = toDerivedPaths();
|
||||
if (buildables.size() != 1)
|
||||
throw Error("installable '%s' evaluates to %d derivations, where only one is expected", what(), buildables.size());
|
||||
throw Error(
|
||||
"installable '%s' evaluates to %d derivations, where only one is expected", what(), buildables.size());
|
||||
return std::move(buildables[0]);
|
||||
}
|
||||
|
||||
static StorePath getDeriver(
|
||||
ref<Store> store,
|
||||
const Installable & i,
|
||||
const StorePath & drvPath)
|
||||
static StorePath getDeriver(ref<Store> store, const Installable & i, const StorePath & drvPath)
|
||||
{
|
||||
auto derivers = store->queryValidDerivers(drvPath);
|
||||
if (derivers.empty())
|
||||
@ -445,35 +440,35 @@ static StorePath getDeriver(
|
||||
return *derivers.begin();
|
||||
}
|
||||
|
||||
ref<eval_cache::EvalCache> openEvalCache(
|
||||
EvalState & state,
|
||||
std::shared_ptr<flake::LockedFlake> lockedFlake)
|
||||
ref<eval_cache::EvalCache> openEvalCache(EvalState & state, std::shared_ptr<flake::LockedFlake> lockedFlake)
|
||||
{
|
||||
auto fingerprint = evalSettings.useEvalCache && evalSettings.pureEval
|
||||
? lockedFlake->getFingerprint(state.store, state.fetchSettings)
|
||||
: std::nullopt;
|
||||
auto rootLoader = [&state, lockedFlake]()
|
||||
{
|
||||
/* For testing whether the evaluation cache is
|
||||
complete. */
|
||||
if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0")
|
||||
throw Error("not everything is cached, but evaluation is not allowed");
|
||||
? lockedFlake->getFingerprint(state.store, state.fetchSettings)
|
||||
: std::nullopt;
|
||||
auto rootLoader = [&state, lockedFlake]() {
|
||||
/* For testing whether the evaluation cache is
|
||||
complete. */
|
||||
if (getEnv("NIX_ALLOW_EVAL").value_or("1") == "0")
|
||||
throw Error("not everything is cached, but evaluation is not allowed");
|
||||
|
||||
auto vFlake = state.allocValue();
|
||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||
auto vFlake = state.allocValue();
|
||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||
|
||||
state.forceAttrs(*vFlake, noPos, "while parsing cached flake data");
|
||||
state.forceAttrs(*vFlake, noPos, "while parsing cached flake data");
|
||||
|
||||
auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
auto aOutputs = vFlake->attrs()->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
|
||||
return aOutputs->value;
|
||||
};
|
||||
return aOutputs->value;
|
||||
};
|
||||
|
||||
if (fingerprint) {
|
||||
auto search = state.evalCaches.find(fingerprint.value());
|
||||
if (search == state.evalCaches.end()) {
|
||||
search = state.evalCaches.emplace(fingerprint.value(), make_ref<nix::eval_cache::EvalCache>(fingerprint, state, rootLoader)).first;
|
||||
search =
|
||||
state.evalCaches
|
||||
.emplace(fingerprint.value(), make_ref<nix::eval_cache::EvalCache>(fingerprint, state, rootLoader))
|
||||
.first;
|
||||
}
|
||||
return search->second;
|
||||
} else {
|
||||
@ -481,8 +476,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
||||
}
|
||||
}
|
||||
|
||||
Installables SourceExprCommand::parseInstallables(
|
||||
ref<Store> store, std::vector<std::string> ss)
|
||||
Installables SourceExprCommand::parseInstallables(ref<Store> store, std::vector<std::string> ss)
|
||||
{
|
||||
Installables result;
|
||||
|
||||
@ -491,7 +485,8 @@ Installables SourceExprCommand::parseInstallables(
|
||||
throw UsageError("'--file' and '--expr' are exclusive");
|
||||
|
||||
// FIXME: backward compatibility hack
|
||||
if (file) evalSettings.pureEval = false;
|
||||
if (file)
|
||||
evalSettings.pureEval = false;
|
||||
|
||||
auto state = getEvalState();
|
||||
auto vFile = state->allocValue();
|
||||
@ -499,12 +494,10 @@ Installables SourceExprCommand::parseInstallables(
|
||||
if (file == "-") {
|
||||
auto e = state->parseStdin();
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
else if (file) {
|
||||
} else if (file) {
|
||||
auto dir = absPath(getCommandBaseDir());
|
||||
state->evalFile(lookupFileArg(*state, *file, &dir), *vFile);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
Path dir = absPath(getCommandBaseDir());
|
||||
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
||||
state->eval(e, *vFile);
|
||||
@ -513,9 +506,8 @@ Installables SourceExprCommand::parseInstallables(
|
||||
for (auto & s : ss) {
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(s);
|
||||
result.push_back(
|
||||
make_ref<InstallableAttrPath>(
|
||||
InstallableAttrPath::parse(
|
||||
state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
|
||||
make_ref<InstallableAttrPath>(InstallableAttrPath::parse(
|
||||
state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
|
||||
}
|
||||
|
||||
} else {
|
||||
@ -530,8 +522,9 @@ Installables SourceExprCommand::parseInstallables(
|
||||
|
||||
if (prefix.find('/') != std::string::npos) {
|
||||
try {
|
||||
result.push_back(make_ref<InstallableDerivedPath>(
|
||||
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
|
||||
result.push_back(
|
||||
make_ref<InstallableDerivedPath>(
|
||||
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
|
||||
continue;
|
||||
} catch (BadStorePath &) {
|
||||
} catch (...) {
|
||||
@ -541,9 +534,10 @@ Installables SourceExprCommand::parseInstallables(
|
||||
}
|
||||
|
||||
try {
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(
|
||||
fetchSettings, std::string { prefix }, absPath(getCommandBaseDir()));
|
||||
result.push_back(make_ref<InstallableFlake>(
|
||||
auto [flakeRef, fragment] =
|
||||
parseFlakeRefWithFragment(fetchSettings, std::string{prefix}, absPath(getCommandBaseDir()));
|
||||
result.push_back(
|
||||
make_ref<InstallableFlake>(
|
||||
this,
|
||||
getEvalState(),
|
||||
std::move(flakeRef),
|
||||
@ -564,8 +558,7 @@ Installables SourceExprCommand::parseInstallables(
|
||||
return result;
|
||||
}
|
||||
|
||||
ref<Installable> SourceExprCommand::parseInstallable(
|
||||
ref<Store> store, const std::string & installable)
|
||||
ref<Installable> SourceExprCommand::parseInstallable(ref<Store> store, const std::string & installable)
|
||||
{
|
||||
auto installables = parseInstallables(store, {installable});
|
||||
assert(installables.size() == 1);
|
||||
@ -576,20 +569,18 @@ static SingleBuiltPath getBuiltPath(ref<Store> evalStore, ref<Store> store, cons
|
||||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[&](const SingleDerivedPath::Opaque & bo) -> SingleBuiltPath {
|
||||
return SingleBuiltPath::Opaque { bo.path };
|
||||
},
|
||||
[&](const SingleDerivedPath::Opaque & bo) -> SingleBuiltPath { return SingleBuiltPath::Opaque{bo.path}; },
|
||||
[&](const SingleDerivedPath::Built & bfd) -> SingleBuiltPath {
|
||||
auto drvPath = getBuiltPath(evalStore, store, *bfd.drvPath);
|
||||
// Resolving this instead of `bfd` will yield the same result, but avoid duplicative work.
|
||||
SingleDerivedPath::Built truncatedBfd {
|
||||
SingleDerivedPath::Built truncatedBfd{
|
||||
.drvPath = makeConstantStorePathRef(drvPath.outPath()),
|
||||
.output = bfd.output,
|
||||
};
|
||||
auto outputPath = resolveDerivedPath(*store, truncatedBfd, &*evalStore);
|
||||
return SingleBuiltPath::Built {
|
||||
return SingleBuiltPath::Built{
|
||||
.drvPath = make_ref<SingleBuiltPath>(std::move(drvPath)),
|
||||
.output = { bfd.output, outputPath },
|
||||
.output = {bfd.output, outputPath},
|
||||
};
|
||||
},
|
||||
},
|
||||
@ -597,11 +588,7 @@ static SingleBuiltPath getBuiltPath(ref<Store> evalStore, ref<Store> store, cons
|
||||
}
|
||||
|
||||
std::vector<BuiltPathWithResult> Installable::build(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
const Installables & installables,
|
||||
BuildMode bMode)
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, const Installables & installables, BuildMode bMode)
|
||||
{
|
||||
std::vector<BuiltPathWithResult> res;
|
||||
for (auto & [_, builtPathWithResult] : build2(evalStore, store, mode, installables, bMode))
|
||||
@ -609,9 +596,7 @@ std::vector<BuiltPathWithResult> Installable::build(
|
||||
return res;
|
||||
}
|
||||
|
||||
static void throwBuildErrors(
|
||||
std::vector<KeyedBuildResult> & buildResults,
|
||||
const Store & store)
|
||||
static void throwBuildErrors(std::vector<KeyedBuildResult> & buildResults, const Store & store)
|
||||
{
|
||||
std::vector<KeyedBuildResult> failed;
|
||||
for (auto & buildResult : buildResults) {
|
||||
@ -628,10 +613,11 @@ static void throwBuildErrors(
|
||||
StringSet failedPaths;
|
||||
for (; failedResult != failed.end(); failedResult++) {
|
||||
if (!failedResult->errorMsg.empty()) {
|
||||
logError(ErrorInfo{
|
||||
.level = lvlError,
|
||||
.msg = failedResult->errorMsg,
|
||||
});
|
||||
logError(
|
||||
ErrorInfo{
|
||||
.level = lvlError,
|
||||
.msg = failedResult->errorMsg,
|
||||
});
|
||||
}
|
||||
failedPaths.insert(failedResult->path.to_string(store));
|
||||
}
|
||||
@ -641,11 +627,7 @@ static void throwBuildErrors(
|
||||
}
|
||||
|
||||
std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build2(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
const Installables & installables,
|
||||
BuildMode bMode)
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, const Installables & installables, BuildMode bMode)
|
||||
{
|
||||
if (mode == Realise::Nothing)
|
||||
settings.readOnlyMode = true;
|
||||
@ -676,22 +658,25 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
|
||||
|
||||
for (auto & path : pathsToBuild) {
|
||||
for (auto & aux : backmap[path]) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Built {
|
||||
.drvPath = make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = aux.info}});
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
|
||||
res.push_back(
|
||||
{aux.installable,
|
||||
{.path =
|
||||
BuiltPath::Built{
|
||||
.drvPath =
|
||||
make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = aux.info}});
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back({aux.installable, {.path = BuiltPath::Opaque{bo.path}, .info = aux.info}});
|
||||
},
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Opaque { bo.path },
|
||||
.info = aux.info}});
|
||||
},
|
||||
}, path.raw());
|
||||
path.raw());
|
||||
}
|
||||
}
|
||||
|
||||
@ -705,26 +690,30 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
|
||||
throwBuildErrors(buildResults, *store);
|
||||
for (auto & buildResult : buildResults) {
|
||||
for (auto & aux : backmap[buildResult.path]) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & [outputName, realisation] : buildResult.builtOutputs)
|
||||
outputs.emplace(outputName, realisation.outPath);
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Built {
|
||||
.drvPath = make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = aux.info,
|
||||
.result = buildResult}});
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & [outputName, realisation] : buildResult.builtOutputs)
|
||||
outputs.emplace(outputName, realisation.outPath);
|
||||
res.push_back(
|
||||
{aux.installable,
|
||||
{.path =
|
||||
BuiltPath::Built{
|
||||
.drvPath =
|
||||
make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
|
||||
.outputs = outputs,
|
||||
},
|
||||
.info = aux.info,
|
||||
.result = buildResult}});
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back(
|
||||
{aux.installable,
|
||||
{.path = BuiltPath::Opaque{bo.path}, .info = aux.info, .result = buildResult}});
|
||||
},
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back({aux.installable, {
|
||||
.path = BuiltPath::Opaque { bo.path },
|
||||
.info = aux.info,
|
||||
.result = buildResult}});
|
||||
},
|
||||
}, buildResult.path.raw());
|
||||
buildResult.path.raw());
|
||||
}
|
||||
}
|
||||
|
||||
@ -739,11 +728,7 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
|
||||
}
|
||||
|
||||
BuiltPaths Installable::toBuiltPaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const Installables & installables)
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, const Installables & installables)
|
||||
{
|
||||
if (operateOn == OperateOn::Output) {
|
||||
BuiltPaths res;
|
||||
@ -762,10 +747,7 @@ BuiltPaths Installable::toBuiltPaths(
|
||||
}
|
||||
|
||||
StorePathSet Installable::toStorePathSet(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
const Installables & installables)
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, const Installables & installables)
|
||||
{
|
||||
StorePathSet outPaths;
|
||||
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
|
||||
@ -776,10 +758,7 @@ StorePathSet Installable::toStorePathSet(
|
||||
}
|
||||
|
||||
StorePaths Installable::toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
const Installables & installables)
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, const Installables & installables)
|
||||
{
|
||||
StorePaths outPaths;
|
||||
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
|
||||
@ -790,10 +769,7 @@ StorePaths Installable::toStorePaths(
|
||||
}
|
||||
|
||||
StorePath Installable::toStorePath(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
ref<Installable> installable)
|
||||
ref<Store> evalStore, ref<Store> store, Realise mode, OperateOn operateOn, ref<Installable> installable)
|
||||
{
|
||||
auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable});
|
||||
|
||||
@ -803,28 +779,23 @@ StorePath Installable::toStorePath(
|
||||
return *paths.begin();
|
||||
}
|
||||
|
||||
StorePathSet Installable::toDerivations(
|
||||
ref<Store> store,
|
||||
const Installables & installables,
|
||||
bool useDeriver)
|
||||
StorePathSet Installable::toDerivations(ref<Store> store, const Installables & installables, bool useDeriver)
|
||||
{
|
||||
StorePathSet drvPaths;
|
||||
|
||||
for (const auto & i : installables)
|
||||
for (const auto & b : i->toDerivedPaths())
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
drvPaths.insert(
|
||||
bo.path.isDerivation()
|
||||
? bo.path
|
||||
: useDeriver
|
||||
? getDeriver(store, *i, bo.path)
|
||||
: throw Error("argument '%s' did not evaluate to a derivation", i->what()));
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
drvPaths.insert(
|
||||
bo.path.isDerivation() ? bo.path
|
||||
: useDeriver ? getDeriver(store, *i, bo.path)
|
||||
: throw Error("argument '%s' did not evaluate to a derivation", i->what()));
|
||||
},
|
||||
[&](const DerivedPath::Built & bfd) { drvPaths.insert(resolveDerivedPath(*store, *bfd.drvPath)); },
|
||||
},
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
drvPaths.insert(resolveDerivedPath(*store, *bfd.drvPath));
|
||||
},
|
||||
}, b.path.raw());
|
||||
b.path.raw());
|
||||
|
||||
return drvPaths;
|
||||
}
|
||||
@ -859,10 +830,7 @@ std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
|
||||
std::vector<FlakeRef> res;
|
||||
res.reserve(rawInstallables.size());
|
||||
for (const auto & i : rawInstallables)
|
||||
res.push_back(parseFlakeRefWithFragment(
|
||||
fetchSettings,
|
||||
expandTilde(i),
|
||||
absPath(getCommandBaseDir())).first);
|
||||
res.push_back(parseFlakeRefWithFragment(fetchSettings, expandTilde(i), absPath(getCommandBaseDir())).first);
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -881,12 +849,7 @@ void RawInstallablesCommand::run(ref<Store> store)
|
||||
|
||||
std::vector<FlakeRef> InstallableCommand::getFlakeRefsForCompletion()
|
||||
{
|
||||
return {
|
||||
parseFlakeRefWithFragment(
|
||||
fetchSettings,
|
||||
expandTilde(_installable),
|
||||
absPath(getCommandBaseDir())).first
|
||||
};
|
||||
return {parseFlakeRefWithFragment(fetchSettings, expandTilde(_installable), absPath(getCommandBaseDir())).first};
|
||||
}
|
||||
|
||||
void InstallablesCommand::run(ref<Store> store, std::vector<std::string> && rawInstallables)
|
||||
|
@ -18,25 +18,24 @@ static std::string doRenderMarkdownToTerminal(std::string_view markdown)
|
||||
{
|
||||
int windowWidth = getWindowSize().second;
|
||||
|
||||
#if HAVE_LOWDOWN_1_4
|
||||
struct lowdown_opts_term opts_term {
|
||||
# if HAVE_LOWDOWN_1_4
|
||||
struct lowdown_opts_term opts_term{
|
||||
.cols = (size_t) std::max(windowWidth - 5, 60),
|
||||
.hmargin = 0,
|
||||
.vmargin = 0,
|
||||
};
|
||||
#endif
|
||||
struct lowdown_opts opts
|
||||
{
|
||||
# endif
|
||||
struct lowdown_opts opts{
|
||||
.type = LOWDOWN_TERM,
|
||||
#if HAVE_LOWDOWN_1_4
|
||||
# if HAVE_LOWDOWN_1_4
|
||||
.term = opts_term,
|
||||
#endif
|
||||
# endif
|
||||
.maxdepth = 20,
|
||||
#if !HAVE_LOWDOWN_1_4
|
||||
# if !HAVE_LOWDOWN_1_4
|
||||
.cols = (size_t) std::max(windowWidth - 5, 60),
|
||||
.hmargin = 0,
|
||||
.vmargin = 0,
|
||||
#endif
|
||||
# endif
|
||||
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
|
||||
.oflags = LOWDOWN_TERM_NOLINK,
|
||||
};
|
||||
|
@ -1,7 +1,6 @@
|
||||
#include "nix/cmd/misc-store-flags.hh"
|
||||
|
||||
namespace nix::flag
|
||||
{
|
||||
namespace nix::flag {
|
||||
|
||||
static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||
{
|
||||
@ -15,27 +14,23 @@ static void hashFormatCompleter(AddCompletions & completions, size_t index, std:
|
||||
Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf)
|
||||
{
|
||||
assert(*hf == nix::HashFormat::SRI);
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[hf](std::string s) {
|
||||
*hf = parseHashFormat(s);
|
||||
}},
|
||||
.completer = hashFormatCompleter,
|
||||
return Args::Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[hf](std::string s) { *hf = parseHashFormat(s); }},
|
||||
.completer = hashFormatCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag hashFormatOpt(std::string && longName, std::optional<HashFormat> * ohf)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[ohf](std::string s) {
|
||||
*ohf = std::optional<HashFormat>{parseHashFormat(s)};
|
||||
}},
|
||||
.completer = hashFormatCompleter,
|
||||
return Args::Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[ohf](std::string s) { *ohf = std::optional<HashFormat>{parseHashFormat(s)}; }},
|
||||
.completer = hashFormatCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
@ -48,34 +43,31 @@ static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::s
|
||||
|
||||
Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`).",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ha](std::string s) {
|
||||
*ha = parseHashAlgo(s);
|
||||
}},
|
||||
.completer = hashAlgoCompleter,
|
||||
return Args::Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`).",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ha](std::string s) { *ha = parseHashAlgo(s); }},
|
||||
.completer = hashAlgoCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag hashAlgoOpt(std::string && longName, std::optional<HashAlgorithm> * oha)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oha](std::string s) {
|
||||
*oha = std::optional<HashAlgorithm>{parseHashAlgo(s)};
|
||||
}},
|
||||
.completer = hashAlgoCompleter,
|
||||
return Args::Flag{
|
||||
.longName = std::move(longName),
|
||||
.description =
|
||||
"Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oha](std::string s) { *oha = std::optional<HashAlgorithm>{parseHashAlgo(s)}; }},
|
||||
.completer = hashAlgoCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag fileIngestionMethod(FileIngestionMethod * method)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = "mode",
|
||||
return Args::Flag{
|
||||
.longName = "mode",
|
||||
// FIXME indentation carefully made for context, this is messed up.
|
||||
.description = R"(
|
||||
How to compute the hash of the input.
|
||||
@ -92,16 +84,14 @@ Args::Flag fileIngestionMethod(FileIngestionMethod * method)
|
||||
it to the hash function.
|
||||
)",
|
||||
.labels = {"file-ingestion-method"},
|
||||
.handler = {[method](std::string s) {
|
||||
*method = parseFileIngestionMethod(s);
|
||||
}},
|
||||
.handler = {[method](std::string s) { *method = parseFileIngestionMethod(s); }},
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag contentAddressMethod(ContentAddressMethod * method)
|
||||
{
|
||||
return Args::Flag {
|
||||
.longName = "mode",
|
||||
return Args::Flag{
|
||||
.longName = "mode",
|
||||
// FIXME indentation carefully made for context, this is messed up.
|
||||
.description = R"(
|
||||
How to compute the content-address of the store object.
|
||||
@ -126,9 +116,7 @@ Args::Flag contentAddressMethod(ContentAddressMethod * method)
|
||||
for regular usage prefer `nar` and `flat`.
|
||||
)",
|
||||
.labels = {"content-address-method"},
|
||||
.handler = {[method](std::string s) {
|
||||
*method = ContentAddressMethod::parse(s);
|
||||
}},
|
||||
.handler = {[method](std::string s) { *method = ContentAddressMethod::parse(s); }},
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,8 @@
|
||||
#include <cstdio>
|
||||
|
||||
#if USE_READLINE
|
||||
#include <readline/history.h>
|
||||
#include <readline/readline.h>
|
||||
# include <readline/history.h>
|
||||
# include <readline/readline.h>
|
||||
#else
|
||||
// editline < 1.15.2 don't wrap their API for C++ usage
|
||||
// (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461).
|
||||
@ -12,7 +12,7 @@
|
||||
// For compatibility with these versions, we wrap the API here
|
||||
// (wrapping multiple times on newer versions is no problem).
|
||||
extern "C" {
|
||||
#include <editline.h>
|
||||
# include <editline.h>
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -183,8 +183,7 @@ bool ReadlineLikeInteracter::getLine(std::string & input, ReplPromptType promptT
|
||||
// editline doesn't echo the input to the output when non-interactive, unlike readline
|
||||
// this results in a different behavior when running tests. The echoing is
|
||||
// quite useful for reading the test output, so we add it here.
|
||||
if (auto e = getEnv("_NIX_TEST_REPL_ECHO"); s && e && *e == "1")
|
||||
{
|
||||
if (auto e = getEnv("_NIX_TEST_REPL_ECHO"); s && e && *e == "1") {
|
||||
#if !USE_READLINE
|
||||
// This is probably not right for multi-line input, but we don't use that
|
||||
// in the characterisation tests, so it's fine.
|
||||
|
@ -54,10 +54,7 @@ enum class ProcessLineResult {
|
||||
PromptAgain,
|
||||
};
|
||||
|
||||
struct NixRepl
|
||||
: AbstractNixRepl
|
||||
, detail::ReplCompleterMixin
|
||||
, gc
|
||||
struct NixRepl : AbstractNixRepl, detail::ReplCompleterMixin, gc
|
||||
{
|
||||
size_t debugTraceIndex;
|
||||
|
||||
@ -76,8 +73,12 @@ struct NixRepl
|
||||
|
||||
std::unique_ptr<ReplInteracter> interacter;
|
||||
|
||||
NixRepl(const LookupPath & lookupPath, nix::ref<Store> store,ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues, RunNix * runNix);
|
||||
NixRepl(
|
||||
const LookupPath & lookupPath,
|
||||
nix::ref<Store> store,
|
||||
ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues,
|
||||
RunNix * runNix);
|
||||
virtual ~NixRepl() = default;
|
||||
|
||||
ReplExitStatus mainLoop() override;
|
||||
@ -97,20 +98,22 @@ struct NixRepl
|
||||
void evalString(std::string s, Value & v);
|
||||
void loadDebugTraceEnv(DebugTrace & dt);
|
||||
|
||||
void printValue(std::ostream & str,
|
||||
Value & v,
|
||||
unsigned int maxDepth = std::numeric_limits<unsigned int>::max())
|
||||
void printValue(std::ostream & str, Value & v, unsigned int maxDepth = std::numeric_limits<unsigned int>::max())
|
||||
{
|
||||
// Hide the progress bar during printing because it might interfere
|
||||
auto suspension = logger->suspend();
|
||||
::nix::printValue(*state, str, v, PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
.derivationPaths = true,
|
||||
.maxDepth = maxDepth,
|
||||
.prettyIndent = 2,
|
||||
.errors = ErrorPrintBehavior::ThrowTopLevel,
|
||||
});
|
||||
::nix::printValue(
|
||||
*state,
|
||||
str,
|
||||
v,
|
||||
PrintOptions{
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
.derivationPaths = true,
|
||||
.maxDepth = maxDepth,
|
||||
.prettyIndent = 2,
|
||||
.errors = ErrorPrintBehavior::ThrowTopLevel,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@ -118,13 +121,17 @@ std::string removeWhitespace(std::string s)
|
||||
{
|
||||
s = chomp(s);
|
||||
size_t n = s.find_first_not_of(" \n\r\t");
|
||||
if (n != std::string::npos) s = std::string(s, n);
|
||||
if (n != std::string::npos)
|
||||
s = std::string(s, n);
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
NixRepl::NixRepl(const LookupPath & lookupPath, nix::ref<Store> store, ref<EvalState> state,
|
||||
std::function<NixRepl::AnnotatedValues()> getValues, RunNix * runNix)
|
||||
NixRepl::NixRepl(
|
||||
const LookupPath & lookupPath,
|
||||
nix::ref<Store> store,
|
||||
ref<EvalState> state,
|
||||
std::function<NixRepl::AnnotatedValues()> getValues,
|
||||
RunNix * runNix)
|
||||
: AbstractNixRepl(state)
|
||||
, debugTraceIndex(0)
|
||||
, getValues(getValues)
|
||||
@ -180,7 +187,8 @@ ReplExitStatus NixRepl::mainLoop()
|
||||
auto suspension = logger->suspend();
|
||||
// When continuing input from previous lines, don't print a prompt, just align to the same
|
||||
// number of chars as the prompt.
|
||||
if (!interacter->getLine(input, input.empty() ? ReplPromptType::ReplPrompt : ReplPromptType::ContinuationPrompt)) {
|
||||
if (!interacter->getLine(
|
||||
input, input.empty() ? ReplPromptType::ReplPrompt : ReplPromptType::ContinuationPrompt)) {
|
||||
// Ctrl-D should exit the debugger.
|
||||
state->debugStop = false;
|
||||
logger->cout("");
|
||||
@ -192,14 +200,14 @@ ReplExitStatus NixRepl::mainLoop()
|
||||
}
|
||||
try {
|
||||
switch (processLine(input)) {
|
||||
case ProcessLineResult::Quit:
|
||||
return ReplExitStatus::QuitAll;
|
||||
case ProcessLineResult::Continue:
|
||||
return ReplExitStatus::Continue;
|
||||
case ProcessLineResult::PromptAgain:
|
||||
break;
|
||||
default:
|
||||
unreachable();
|
||||
case ProcessLineResult::Quit:
|
||||
return ReplExitStatus::QuitAll;
|
||||
case ProcessLineResult::Continue:
|
||||
return ReplExitStatus::Continue;
|
||||
case ProcessLineResult::PromptAgain:
|
||||
break;
|
||||
default:
|
||||
unreachable();
|
||||
}
|
||||
} catch (ParseError & e) {
|
||||
if (e.msg().find("unexpected end of file") != std::string::npos) {
|
||||
@ -207,7 +215,7 @@ ReplExitStatus NixRepl::mainLoop()
|
||||
// input without clearing the input so far.
|
||||
continue;
|
||||
} else {
|
||||
printMsg(lvlError, e.msg());
|
||||
printMsg(lvlError, e.msg());
|
||||
}
|
||||
} catch (EvalError & e) {
|
||||
printMsg(lvlError, e.msg());
|
||||
@ -257,7 +265,8 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||
/* This is a variable name; look it up in the current scope. */
|
||||
StringSet::iterator i = varNames.lower_bound(cur);
|
||||
while (i != varNames.end()) {
|
||||
if (i->substr(0, cur.size()) != cur) break;
|
||||
if (i->substr(0, cur.size()) != cur)
|
||||
break;
|
||||
completions.insert(prev + *i);
|
||||
i++;
|
||||
}
|
||||
@ -276,11 +285,15 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||
Expr * e = parseString(expr);
|
||||
Value v;
|
||||
e->eval(*state, *env, v);
|
||||
state->forceAttrs(v, noPos, "while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)");
|
||||
state->forceAttrs(
|
||||
v,
|
||||
noPos,
|
||||
"while evaluating an attrset for the purpose of completion (this error should not be displayed; file an issue?)");
|
||||
|
||||
for (auto & i : *v.attrs()) {
|
||||
std::string_view name = state->symbols[i.name];
|
||||
if (name.substr(0, cur2.size()) != cur2) continue;
|
||||
if (name.substr(0, cur2.size()) != cur2)
|
||||
continue;
|
||||
completions.insert(concatStrings(prev, expr, ".", name));
|
||||
}
|
||||
|
||||
@ -298,24 +311,23 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||
return completions;
|
||||
}
|
||||
|
||||
|
||||
// FIXME: DRY and match or use the parser
|
||||
static bool isVarName(std::string_view s)
|
||||
{
|
||||
if (s.size() == 0) return false;
|
||||
if (s.size() == 0)
|
||||
return false;
|
||||
char c = s[0];
|
||||
if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false;
|
||||
if ((c >= '0' && c <= '9') || c == '-' || c == '\'')
|
||||
return false;
|
||||
for (auto & i : s)
|
||||
if (!((i >= 'a' && i <= 'z') ||
|
||||
(i >= 'A' && i <= 'Z') ||
|
||||
(i >= '0' && i <= '9') ||
|
||||
i == '_' || i == '-' || i == '\''))
|
||||
if (!((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z') || (i >= '0' && i <= '9') || i == '_' || i == '-'
|
||||
|| i == '\''))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
StorePath NixRepl::getDerivationPath(Value & v) {
|
||||
StorePath NixRepl::getDerivationPath(Value & v)
|
||||
{
|
||||
auto packageInfo = getDerivation(*state, v, false);
|
||||
if (!packageInfo)
|
||||
throw Error("expression does not evaluate to a derivation, so I can't build it");
|
||||
@ -354,52 +366,49 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
if (line[0] == ':') {
|
||||
size_t p = line.find_first_of(" \n\r\t");
|
||||
command = line.substr(0, p);
|
||||
if (p != std::string::npos) arg = removeWhitespace(line.substr(p));
|
||||
if (p != std::string::npos)
|
||||
arg = removeWhitespace(line.substr(p));
|
||||
} else {
|
||||
arg = line;
|
||||
}
|
||||
|
||||
if (command == ":?" || command == ":help") {
|
||||
// FIXME: convert to Markdown, include in the 'nix repl' manpage.
|
||||
std::cout
|
||||
<< "The following commands are available:\n"
|
||||
<< "\n"
|
||||
<< " <expr> Evaluate and print expression\n"
|
||||
<< " <x> = <expr> Bind expression to variable\n"
|
||||
<< " :a, :add <expr> Add attributes from resulting set to scope\n"
|
||||
<< " :b <expr> Build a derivation\n"
|
||||
<< " :bl <expr> Build a derivation, creating GC roots in the\n"
|
||||
<< " working directory\n"
|
||||
<< " :e, :edit <expr> Open package or function in $EDITOR\n"
|
||||
<< " :i <expr> Build derivation, then install result into\n"
|
||||
<< " current profile\n"
|
||||
<< " :l, :load <path> Load Nix expression and add it to scope\n"
|
||||
<< " :lf, :load-flake <ref> Load Nix flake and add it to scope\n"
|
||||
<< " :p, :print <expr> Evaluate and print expression recursively\n"
|
||||
<< " Strings are printed directly, without escaping.\n"
|
||||
<< " :q, :quit Exit nix-repl\n"
|
||||
<< " :r, :reload Reload all files\n"
|
||||
<< " :sh <expr> Build dependencies of derivation, then start\n"
|
||||
<< " nix-shell\n"
|
||||
<< " :t <expr> Describe result of evaluation\n"
|
||||
<< " :u <expr> Build derivation, then start nix-shell\n"
|
||||
<< " :doc <expr> Show documentation of a builtin function\n"
|
||||
<< " :log <expr> Show logs for a derivation\n"
|
||||
<< " :te, :trace-enable [bool] Enable, disable or toggle showing traces for\n"
|
||||
<< " errors\n"
|
||||
<< " :?, :help Brings up this help menu\n"
|
||||
;
|
||||
std::cout << "The following commands are available:\n"
|
||||
<< "\n"
|
||||
<< " <expr> Evaluate and print expression\n"
|
||||
<< " <x> = <expr> Bind expression to variable\n"
|
||||
<< " :a, :add <expr> Add attributes from resulting set to scope\n"
|
||||
<< " :b <expr> Build a derivation\n"
|
||||
<< " :bl <expr> Build a derivation, creating GC roots in the\n"
|
||||
<< " working directory\n"
|
||||
<< " :e, :edit <expr> Open package or function in $EDITOR\n"
|
||||
<< " :i <expr> Build derivation, then install result into\n"
|
||||
<< " current profile\n"
|
||||
<< " :l, :load <path> Load Nix expression and add it to scope\n"
|
||||
<< " :lf, :load-flake <ref> Load Nix flake and add it to scope\n"
|
||||
<< " :p, :print <expr> Evaluate and print expression recursively\n"
|
||||
<< " Strings are printed directly, without escaping.\n"
|
||||
<< " :q, :quit Exit nix-repl\n"
|
||||
<< " :r, :reload Reload all files\n"
|
||||
<< " :sh <expr> Build dependencies of derivation, then start\n"
|
||||
<< " nix-shell\n"
|
||||
<< " :t <expr> Describe result of evaluation\n"
|
||||
<< " :u <expr> Build derivation, then start nix-shell\n"
|
||||
<< " :doc <expr> Show documentation of a builtin function\n"
|
||||
<< " :log <expr> Show logs for a derivation\n"
|
||||
<< " :te, :trace-enable [bool] Enable, disable or toggle showing traces for\n"
|
||||
<< " errors\n"
|
||||
<< " :?, :help Brings up this help menu\n";
|
||||
if (state->debugRepl) {
|
||||
std::cout
|
||||
<< "\n"
|
||||
<< " Debug mode commands\n"
|
||||
<< " :env Show env stack\n"
|
||||
<< " :bt, :backtrace Show trace stack\n"
|
||||
<< " :st Show current trace\n"
|
||||
<< " :st <idx> Change to another trace in the stack\n"
|
||||
<< " :c, :continue Go until end of program, exception, or builtins.break\n"
|
||||
<< " :s, :step Go one step\n"
|
||||
;
|
||||
std::cout << "\n"
|
||||
<< " Debug mode commands\n"
|
||||
<< " :env Show env stack\n"
|
||||
<< " :bt, :backtrace Show trace stack\n"
|
||||
<< " :st Show current trace\n"
|
||||
<< " :st <idx> Change to another trace in the stack\n"
|
||||
<< " :c, :continue Go until end of program, exception, or builtins.break\n"
|
||||
<< " :s, :step Go one step\n";
|
||||
}
|
||||
|
||||
}
|
||||
@ -424,17 +433,18 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
try {
|
||||
// change the DebugTrace index.
|
||||
debugTraceIndex = stoi(arg);
|
||||
} catch (...) { }
|
||||
} catch (...) {
|
||||
}
|
||||
|
||||
for (const auto & [idx, i] : enumerate(state->debugTraces)) {
|
||||
if (idx == debugTraceIndex) {
|
||||
std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": ";
|
||||
showDebugTrace(std::cout, state->positions, i);
|
||||
std::cout << std::endl;
|
||||
printEnvBindings(*state, i.expr, i.env);
|
||||
loadDebugTraceEnv(i);
|
||||
break;
|
||||
}
|
||||
if (idx == debugTraceIndex) {
|
||||
std::cout << "\n" << ANSI_BLUE << idx << ANSI_NORMAL << ": ";
|
||||
showDebugTrace(std::cout, state->positions, i);
|
||||
std::cout << std::endl;
|
||||
printEnvBindings(*state, i.expr, i.env);
|
||||
loadDebugTraceEnv(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -474,7 +484,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
Value v;
|
||||
evalString(arg, v);
|
||||
|
||||
const auto [path, line] = [&] () -> std::pair<SourcePath, uint32_t> {
|
||||
const auto [path, line] = [&]() -> std::pair<SourcePath, uint32_t> {
|
||||
if (v.type() == nPath || v.type() == nString) {
|
||||
NixStringContext context;
|
||||
auto path = state->coerceToPath(noPos, v, context, "while evaluating the filename to edit");
|
||||
@ -498,7 +508,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
|
||||
// runProgram redirects stdout to a StringSink,
|
||||
// using runProgram2 to allow editors to display their UI
|
||||
runProgram2(RunOptions { .program = editor, .lookupPath = true, .args = args , .isInteractive = true });
|
||||
runProgram2(RunOptions{.program = editor, .lookupPath = true, .args = args, .isInteractive = true});
|
||||
|
||||
// Reload right after exiting the editor
|
||||
state->resetFileCache();
|
||||
@ -529,9 +539,9 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
|
||||
if (command == ":b" || command == ":bl") {
|
||||
state->store->buildPaths({
|
||||
DerivedPath::Built {
|
||||
DerivedPath::Built{
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
.outputs = OutputsSpec::All { },
|
||||
.outputs = OutputsSpec::All{},
|
||||
},
|
||||
});
|
||||
auto drv = state->store->readDerivation(drvPath);
|
||||
@ -550,9 +560,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
runNix("nix-env", {"-i", drvPathRaw});
|
||||
} else if (command == ":log") {
|
||||
settings.readOnlyMode = true;
|
||||
Finally roModeReset([&]() {
|
||||
settings.readOnlyMode = false;
|
||||
});
|
||||
Finally roModeReset([&]() { settings.readOnlyMode = false; });
|
||||
auto subs = getDefaultSubstituters();
|
||||
|
||||
subs.push_front(state->store);
|
||||
@ -575,7 +583,8 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!foundLog) throw Error("build log of '%s' is not available", drvPathRaw);
|
||||
if (!foundLog)
|
||||
throw Error("build log of '%s' is not available", drvPathRaw);
|
||||
} else {
|
||||
runNix("nix-shell", {drvPathRaw});
|
||||
}
|
||||
@ -638,9 +647,8 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
for (auto & arg : args)
|
||||
arg = "*" + arg + "*";
|
||||
|
||||
markdown +=
|
||||
"**Synopsis:** `builtins." + (std::string) (*doc->name) + "` "
|
||||
+ concatStringsSep(" ", args) + "\n\n";
|
||||
markdown += "**Synopsis:** `builtins." + (std::string) (*doc->name) + "` " + concatStringsSep(" ", args)
|
||||
+ "\n\n";
|
||||
}
|
||||
|
||||
markdown += stripIndentation(doc->doc);
|
||||
@ -681,11 +689,8 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
||||
else {
|
||||
size_t p = line.find('=');
|
||||
std::string name;
|
||||
if (p != std::string::npos &&
|
||||
p < line.size() &&
|
||||
line[p + 1] != '=' &&
|
||||
isVarName(name = removeWhitespace(line.substr(0, p))))
|
||||
{
|
||||
if (p != std::string::npos && p < line.size() && line[p + 1] != '='
|
||||
&& isVarName(name = removeWhitespace(line.substr(0, p)))) {
|
||||
Expr * e = parseString(line.substr(p + 1));
|
||||
Value & v(*state->allocValue());
|
||||
v.mkThunk(env, e);
|
||||
@ -730,9 +735,13 @@ void NixRepl::loadFlake(const std::string & flakeRefS)
|
||||
|
||||
Value v;
|
||||
|
||||
flake::callFlake(*state,
|
||||
flake::lockFlake(flakeSettings, *state, flakeRef,
|
||||
flake::LockFlags {
|
||||
flake::callFlake(
|
||||
*state,
|
||||
flake::lockFlake(
|
||||
flakeSettings,
|
||||
*state,
|
||||
flakeRef,
|
||||
flake::LockFlags{
|
||||
.updateLockFile = false,
|
||||
.useRegistries = !evalSettings.pureEval,
|
||||
.allowUnlocked = !evalSettings.pureEval,
|
||||
@ -741,7 +750,6 @@ void NixRepl::loadFlake(const std::string & flakeRefS)
|
||||
addAttrsToScope(v);
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::initEnv()
|
||||
{
|
||||
env = &state->allocEnv(envSize);
|
||||
@ -754,7 +762,6 @@ void NixRepl::initEnv()
|
||||
varNames.emplace(state->symbols[i.first]);
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::reloadFiles()
|
||||
{
|
||||
initEnv();
|
||||
@ -762,7 +769,6 @@ void NixRepl::reloadFiles()
|
||||
loadFiles();
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::loadFiles()
|
||||
{
|
||||
Strings old = loadedFiles;
|
||||
@ -779,10 +785,12 @@ void NixRepl::loadFiles()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::addAttrsToScope(Value & attrs)
|
||||
{
|
||||
state->forceAttrs(attrs, [&]() { return attrs.determinePos(noPos); }, "while evaluating an attribute set to be merged in the global scope");
|
||||
state->forceAttrs(
|
||||
attrs,
|
||||
[&]() { return attrs.determinePos(noPos); },
|
||||
"while evaluating an attribute set to be merged in the global scope");
|
||||
if (displ + attrs.attrs()->size() >= envSize)
|
||||
throw Error("environment full; cannot add more variables");
|
||||
|
||||
@ -796,7 +804,6 @@ void NixRepl::addAttrsToScope(Value & attrs)
|
||||
notice("Added %1% variables.", attrs.attrs()->size());
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::addVarToScope(const Symbol name, Value & v)
|
||||
{
|
||||
if (displ >= envSize)
|
||||
@ -809,13 +816,11 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
|
||||
varNames.emplace(state->symbols[name]);
|
||||
}
|
||||
|
||||
|
||||
Expr * NixRepl::parseString(std::string s)
|
||||
{
|
||||
return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::evalString(std::string s, Value & v)
|
||||
{
|
||||
Expr * e = parseString(s);
|
||||
@ -823,46 +828,39 @@ void NixRepl::evalString(std::string s, Value & v)
|
||||
state->forceValue(v, v.determinePos(noPos));
|
||||
}
|
||||
|
||||
|
||||
void NixRepl::runNix(Path program, const Strings & args, const std::optional<std::string> & input)
|
||||
{
|
||||
if (runNixPtr)
|
||||
(*runNixPtr)(program, args, input);
|
||||
else
|
||||
throw Error("Cannot run '%s' because no method of calling the Nix CLI was provided. This is a configuration problem pertaining to how this program was built. See Nix 2.25 release notes", program);
|
||||
throw Error(
|
||||
"Cannot run '%s' because no method of calling the Nix CLI was provided. This is a configuration problem pertaining to how this program was built. See Nix 2.25 release notes",
|
||||
program);
|
||||
}
|
||||
|
||||
|
||||
std::unique_ptr<AbstractNixRepl> AbstractNixRepl::create(
|
||||
const LookupPath & lookupPath, nix::ref<Store> store, ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues, RunNix * runNix)
|
||||
const LookupPath & lookupPath,
|
||||
nix::ref<Store> store,
|
||||
ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues,
|
||||
RunNix * runNix)
|
||||
{
|
||||
return std::make_unique<NixRepl>(
|
||||
lookupPath,
|
||||
std::move(store),
|
||||
state,
|
||||
getValues,
|
||||
runNix
|
||||
);
|
||||
return std::make_unique<NixRepl>(lookupPath, std::move(store), state, getValues, runNix);
|
||||
}
|
||||
|
||||
|
||||
ReplExitStatus AbstractNixRepl::runSimple(
|
||||
ref<EvalState> evalState,
|
||||
const ValMap & extraEnv)
|
||||
ReplExitStatus AbstractNixRepl::runSimple(ref<EvalState> evalState, const ValMap & extraEnv)
|
||||
{
|
||||
auto getValues = [&]()->NixRepl::AnnotatedValues{
|
||||
auto getValues = [&]() -> NixRepl::AnnotatedValues {
|
||||
NixRepl::AnnotatedValues values;
|
||||
return values;
|
||||
};
|
||||
LookupPath lookupPath = {};
|
||||
auto repl = std::make_unique<NixRepl>(
|
||||
lookupPath,
|
||||
openStore(),
|
||||
evalState,
|
||||
getValues,
|
||||
/*runNix=*/nullptr
|
||||
);
|
||||
lookupPath,
|
||||
openStore(),
|
||||
evalState,
|
||||
getValues,
|
||||
/*runNix=*/nullptr);
|
||||
|
||||
repl->initEnv();
|
||||
|
||||
|
@ -31,13 +31,11 @@
|
||||
* @param init Function that takes a T* and returns the initializer for T
|
||||
* @return Pointer to allocated and initialized object
|
||||
*/
|
||||
template <typename T, typename F>
|
||||
template<typename T, typename F>
|
||||
static T * unsafe_new_with_self(F && init)
|
||||
{
|
||||
// Allocate
|
||||
void * p = ::operator new(
|
||||
sizeof(T),
|
||||
static_cast<std::align_val_t>(alignof(T)));
|
||||
void * p = ::operator new(sizeof(T), static_cast<std::align_val_t>(alignof(T)));
|
||||
// Initialize with placement new
|
||||
return new (p) T(init(static_cast<T *>(p)));
|
||||
}
|
||||
@ -86,12 +84,13 @@ nix_err nix_value_call(nix_c_context * context, EvalState * state, Value * fn, n
|
||||
NIXC_CATCH_ERRS
|
||||
}
|
||||
|
||||
nix_err nix_value_call_multi(nix_c_context * context, EvalState * state, nix_value * fn, size_t nargs, nix_value ** args, nix_value * value)
|
||||
nix_err nix_value_call_multi(
|
||||
nix_c_context * context, EvalState * state, nix_value * fn, size_t nargs, nix_value ** args, nix_value * value)
|
||||
{
|
||||
if (context)
|
||||
context->last_err_code = NIX_OK;
|
||||
try {
|
||||
state->state.callFunction(fn->value, {(nix::Value * *) args, nargs}, value->value, nix::noPos);
|
||||
state->state.callFunction(fn->value, {(nix::Value **) args, nargs}, value->value, nix::noPos);
|
||||
state->state.forceValue(value->value, nix::noPos);
|
||||
}
|
||||
NIXC_CATCH_ERRS
|
||||
@ -152,7 +151,8 @@ nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_buil
|
||||
NIXC_CATCH_ERRS
|
||||
}
|
||||
|
||||
nix_err nix_eval_state_builder_set_lookup_path(nix_c_context * context, nix_eval_state_builder * builder, const char ** lookupPath_c)
|
||||
nix_err nix_eval_state_builder_set_lookup_path(
|
||||
nix_c_context * context, nix_eval_state_builder * builder, const char ** lookupPath_c)
|
||||
{
|
||||
if (context)
|
||||
context->last_err_code = NIX_OK;
|
||||
@ -175,11 +175,7 @@ EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder
|
||||
return EvalState{
|
||||
.fetchSettings = std::move(builder->fetchSettings),
|
||||
.settings = std::move(builder->settings),
|
||||
.state = nix::EvalState(
|
||||
builder->lookupPath,
|
||||
builder->store,
|
||||
self->fetchSettings,
|
||||
self->settings),
|
||||
.state = nix::EvalState(builder->lookupPath, builder->store, self->fetchSettings, self->settings),
|
||||
};
|
||||
});
|
||||
}
|
||||
@ -195,11 +191,10 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath_c
|
||||
if (nix_eval_state_builder_load(context, builder) != NIX_OK)
|
||||
return nullptr;
|
||||
|
||||
if (nix_eval_state_builder_set_lookup_path(context, builder, lookupPath_c)
|
||||
!= NIX_OK)
|
||||
if (nix_eval_state_builder_set_lookup_path(context, builder, lookupPath_c) != NIX_OK)
|
||||
return nullptr;
|
||||
|
||||
auto *state = nix_eval_state_build(context, builder);
|
||||
auto * state = nix_eval_state_build(context, builder);
|
||||
nix_eval_state_builder_free(builder);
|
||||
return state;
|
||||
}
|
||||
@ -274,11 +269,11 @@ nix_err nix_gc_decref(nix_c_context * context, const void *)
|
||||
void nix_gc_now() {}
|
||||
#endif
|
||||
|
||||
nix_err nix_value_incref(nix_c_context * context, nix_value *x)
|
||||
nix_err nix_value_incref(nix_c_context * context, nix_value * x)
|
||||
{
|
||||
return nix_gc_incref(context, (const void *) x);
|
||||
}
|
||||
nix_err nix_value_decref(nix_c_context * context, nix_value *x)
|
||||
nix_err nix_value_decref(nix_c_context * context, nix_value * x)
|
||||
{
|
||||
return nix_gc_decref(context, (const void *) x);
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ class NixCExternalValue : public nix::ExternalValueBase
|
||||
public:
|
||||
NixCExternalValue(NixCExternalValueDesc & desc, void * v)
|
||||
: desc(desc)
|
||||
, v(v){};
|
||||
, v(v) {};
|
||||
void * get_ptr()
|
||||
{
|
||||
return v;
|
||||
@ -155,11 +155,17 @@ public:
|
||||
}
|
||||
nix_string_context ctx{context};
|
||||
desc.printValueAsXML(
|
||||
v, (EvalState *) &state, strict, location, &doc, &ctx, &drvsSeen,
|
||||
v,
|
||||
(EvalState *) &state,
|
||||
strict,
|
||||
location,
|
||||
&doc,
|
||||
&ctx,
|
||||
&drvsSeen,
|
||||
*reinterpret_cast<const uint32_t *>(&pos));
|
||||
}
|
||||
|
||||
virtual ~NixCExternalValue() override{};
|
||||
virtual ~NixCExternalValue() override {};
|
||||
};
|
||||
|
||||
ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v)
|
||||
|
@ -16,141 +16,158 @@
|
||||
#include "nix/store/tests/libstore.hh"
|
||||
|
||||
namespace nix {
|
||||
class LibExprTest : public LibStoreTest {
|
||||
public:
|
||||
static void SetUpTestSuite() {
|
||||
LibStoreTest::SetUpTestSuite();
|
||||
initGC();
|
||||
}
|
||||
|
||||
protected:
|
||||
LibExprTest()
|
||||
: LibStoreTest()
|
||||
, state({}, store, fetchSettings, evalSettings, nullptr)
|
||||
{
|
||||
evalSettings.nixPath = {};
|
||||
}
|
||||
Value eval(std::string input, bool forceValue = true) {
|
||||
Value v;
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
|
||||
assert(e);
|
||||
state.eval(e, v);
|
||||
if (forceValue)
|
||||
state.forceValue(v, noPos);
|
||||
return v;
|
||||
}
|
||||
|
||||
Value * maybeThunk(std::string input, bool forceValue = true) {
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
|
||||
assert(e);
|
||||
return e->maybeThunk(state, state.baseEnv);
|
||||
}
|
||||
|
||||
Symbol createSymbol(const char * value) {
|
||||
return state.symbols.create(value);
|
||||
}
|
||||
|
||||
bool readOnlyMode = true;
|
||||
fetchers::Settings fetchSettings{};
|
||||
EvalSettings evalSettings{readOnlyMode};
|
||||
EvalState state;
|
||||
};
|
||||
|
||||
MATCHER(IsListType, "") {
|
||||
return arg != nList;
|
||||
class LibExprTest : public LibStoreTest
|
||||
{
|
||||
public:
|
||||
static void SetUpTestSuite()
|
||||
{
|
||||
LibStoreTest::SetUpTestSuite();
|
||||
initGC();
|
||||
}
|
||||
|
||||
MATCHER(IsList, "") {
|
||||
return arg.type() == nList;
|
||||
protected:
|
||||
LibExprTest()
|
||||
: LibStoreTest()
|
||||
, state({}, store, fetchSettings, evalSettings, nullptr)
|
||||
{
|
||||
evalSettings.nixPath = {};
|
||||
}
|
||||
Value eval(std::string input, bool forceValue = true)
|
||||
{
|
||||
Value v;
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
|
||||
assert(e);
|
||||
state.eval(e, v);
|
||||
if (forceValue)
|
||||
state.forceValue(v, noPos);
|
||||
return v;
|
||||
}
|
||||
|
||||
MATCHER(IsString, "") {
|
||||
return arg.type() == nString;
|
||||
Value * maybeThunk(std::string input, bool forceValue = true)
|
||||
{
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
|
||||
assert(e);
|
||||
return e->maybeThunk(state, state.baseEnv);
|
||||
}
|
||||
|
||||
MATCHER(IsNull, "") {
|
||||
return arg.type() == nNull;
|
||||
Symbol createSymbol(const char * value)
|
||||
{
|
||||
return state.symbols.create(value);
|
||||
}
|
||||
|
||||
MATCHER(IsThunk, "") {
|
||||
return arg.type() == nThunk;
|
||||
}
|
||||
bool readOnlyMode = true;
|
||||
fetchers::Settings fetchSettings{};
|
||||
EvalSettings evalSettings{readOnlyMode};
|
||||
EvalState state;
|
||||
};
|
||||
|
||||
MATCHER(IsAttrs, "") {
|
||||
return arg.type() == nAttrs;
|
||||
}
|
||||
MATCHER(IsListType, "")
|
||||
{
|
||||
return arg != nList;
|
||||
}
|
||||
|
||||
MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) {
|
||||
if (arg.type() != nString) {
|
||||
MATCHER(IsList, "")
|
||||
{
|
||||
return arg.type() == nList;
|
||||
}
|
||||
|
||||
MATCHER(IsString, "")
|
||||
{
|
||||
return arg.type() == nString;
|
||||
}
|
||||
|
||||
MATCHER(IsNull, "")
|
||||
{
|
||||
return arg.type() == nNull;
|
||||
}
|
||||
|
||||
MATCHER(IsThunk, "")
|
||||
{
|
||||
return arg.type() == nThunk;
|
||||
}
|
||||
|
||||
MATCHER(IsAttrs, "")
|
||||
{
|
||||
return arg.type() == nAttrs;
|
||||
}
|
||||
|
||||
MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s))
|
||||
{
|
||||
if (arg.type() != nString) {
|
||||
return false;
|
||||
}
|
||||
return std::string_view(arg.c_str()) == s;
|
||||
}
|
||||
|
||||
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v))
|
||||
{
|
||||
if (arg.type() != nInt) {
|
||||
return false;
|
||||
}
|
||||
return arg.integer().value == v;
|
||||
}
|
||||
|
||||
MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v))
|
||||
{
|
||||
if (arg.type() != nFloat) {
|
||||
return false;
|
||||
}
|
||||
return arg.fpoint() == v;
|
||||
}
|
||||
|
||||
MATCHER(IsTrue, "")
|
||||
{
|
||||
if (arg.type() != nBool) {
|
||||
return false;
|
||||
}
|
||||
return arg.boolean() == true;
|
||||
}
|
||||
|
||||
MATCHER(IsFalse, "")
|
||||
{
|
||||
if (arg.type() != nBool) {
|
||||
return false;
|
||||
}
|
||||
return arg.boolean() == false;
|
||||
}
|
||||
|
||||
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p))
|
||||
{
|
||||
if (arg.type() != nPath) {
|
||||
*result_listener << "Expected a path got " << arg.type();
|
||||
return false;
|
||||
} else {
|
||||
auto path = arg.path();
|
||||
if (path.path != CanonPath(p)) {
|
||||
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path;
|
||||
return false;
|
||||
}
|
||||
return std::string_view(arg.c_str()) == s;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
|
||||
if (arg.type() != nInt) {
|
||||
return false;
|
||||
}
|
||||
return arg.integer().value == v;
|
||||
MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n))
|
||||
{
|
||||
if (arg.type() != nList) {
|
||||
*result_listener << "Expected list got " << arg.type();
|
||||
return false;
|
||||
} else if (arg.listSize() != (size_t) n) {
|
||||
*result_listener << "Expected as list of size " << n << " got " << arg.listSize();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) {
|
||||
if (arg.type() != nFloat) {
|
||||
return false;
|
||||
}
|
||||
return arg.fpoint() == v;
|
||||
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n))
|
||||
{
|
||||
if (arg.type() != nAttrs) {
|
||||
*result_listener << "Expected set got " << arg.type();
|
||||
return false;
|
||||
} else if (arg.attrs()->size() != (size_t) n) {
|
||||
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs()->size();
|
||||
return false;
|
||||
}
|
||||
|
||||
MATCHER(IsTrue, "") {
|
||||
if (arg.type() != nBool) {
|
||||
return false;
|
||||
}
|
||||
return arg.boolean() == true;
|
||||
}
|
||||
|
||||
MATCHER(IsFalse, "") {
|
||||
if (arg.type() != nBool) {
|
||||
return false;
|
||||
}
|
||||
return arg.boolean() == false;
|
||||
}
|
||||
|
||||
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
|
||||
if (arg.type() != nPath) {
|
||||
*result_listener << "Expected a path got " << arg.type();
|
||||
return false;
|
||||
} else {
|
||||
auto path = arg.path();
|
||||
if (path.path != CanonPath(p)) {
|
||||
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) {
|
||||
if (arg.type() != nList) {
|
||||
*result_listener << "Expected list got " << arg.type();
|
||||
return false;
|
||||
} else if (arg.listSize() != (size_t)n) {
|
||||
*result_listener << "Expected as list of size " << n << " got " << arg.listSize();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
|
||||
if (arg.type() != nAttrs) {
|
||||
*result_listener << "Expected set got " << arg.type();
|
||||
return false;
|
||||
} else if (arg.attrs()->size() != (size_t) n) {
|
||||
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs()->size();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} /* namespace nix */
|
||||
|
@ -9,22 +9,26 @@ namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem::Opaque> {
|
||||
struct Arbitrary<NixStringContextElem::Opaque>
|
||||
{
|
||||
static Gen<NixStringContextElem::Opaque> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem::Built> {
|
||||
struct Arbitrary<NixStringContextElem::Built>
|
||||
{
|
||||
static Gen<NixStringContextElem::Built> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem::DrvDeep> {
|
||||
struct Arbitrary<NixStringContextElem::DrvDeep>
|
||||
{
|
||||
static Gen<NixStringContextElem::DrvDeep> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem> {
|
||||
struct Arbitrary<NixStringContextElem>
|
||||
{
|
||||
static Gen<NixStringContextElem> arbitrary();
|
||||
};
|
||||
|
||||
|
@ -8,36 +8,30 @@
|
||||
namespace nix {
|
||||
|
||||
// Testing of trivial expressions
|
||||
class DerivedPathExpressionTest : public LibExprTest {};
|
||||
class DerivedPathExpressionTest : public LibExprTest
|
||||
{};
|
||||
|
||||
// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is
|
||||
// no a real fixture.
|
||||
//
|
||||
// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args
|
||||
TEST_F(DerivedPathExpressionTest, force_init)
|
||||
{
|
||||
}
|
||||
TEST_F(DerivedPathExpressionTest, force_init) {}
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
DerivedPathExpressionTest,
|
||||
prop_opaque_path_round_trip,
|
||||
(const SingleDerivedPath::Opaque & o))
|
||||
RC_GTEST_FIXTURE_PROP(DerivedPathExpressionTest, prop_opaque_path_round_trip, (const SingleDerivedPath::Opaque & o))
|
||||
{
|
||||
auto * v = state.allocValue();
|
||||
state.mkStorePathString(o.path, *v);
|
||||
auto d = state.coerceToSingleDerivedPath(noPos, *v, "");
|
||||
RC_ASSERT(SingleDerivedPath { o } == d);
|
||||
RC_ASSERT(SingleDerivedPath{o} == d);
|
||||
}
|
||||
|
||||
// TODO use DerivedPath::Built for parameter once it supports a single output
|
||||
// path only.
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
DerivedPathExpressionTest,
|
||||
prop_derived_path_built_placeholder_round_trip,
|
||||
(const SingleDerivedPath::Built & b))
|
||||
DerivedPathExpressionTest, prop_derived_path_built_placeholder_round_trip, (const SingleDerivedPath::Built & b))
|
||||
{
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
@ -49,7 +43,7 @@ RC_GTEST_FIXTURE_PROP(
|
||||
auto * v = state.allocValue();
|
||||
state.mkOutputString(*v, b, std::nullopt, mockXpSettings);
|
||||
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "", mockXpSettings);
|
||||
RC_ASSERT(SingleDerivedPath { b } == d);
|
||||
RC_ASSERT(SingleDerivedPath{b} == d);
|
||||
}
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
@ -63,7 +57,7 @@ RC_GTEST_FIXTURE_PROP(
|
||||
auto * v = state.allocValue();
|
||||
state.mkOutputString(*v, b, outPath, mockXpSettings);
|
||||
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "", mockXpSettings);
|
||||
RC_ASSERT(SingleDerivedPath { b } == d);
|
||||
RC_ASSERT(SingleDerivedPath{b} == d);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -6,7 +6,8 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(nix_isAllowedURI, http_example_com) {
|
||||
TEST(nix_isAllowedURI, http_example_com)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("http://example.com");
|
||||
|
||||
@ -20,7 +21,8 @@ TEST(nix_isAllowedURI, http_example_com) {
|
||||
ASSERT_FALSE(isAllowedURI("http://example.org/foo", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, http_example_com_foo) {
|
||||
TEST(nix_isAllowedURI, http_example_com_foo)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("http://example.com/foo");
|
||||
|
||||
@ -34,7 +36,8 @@ TEST(nix_isAllowedURI, http_example_com_foo) {
|
||||
// ASSERT_TRUE(isAllowedURI("http://example.com/foo?ok=1", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, http) {
|
||||
TEST(nix_isAllowedURI, http)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("http://");
|
||||
|
||||
@ -48,7 +51,8 @@ TEST(nix_isAllowedURI, http) {
|
||||
ASSERT_FALSE(isAllowedURI("http:foo", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, https) {
|
||||
TEST(nix_isAllowedURI, https)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("https://");
|
||||
|
||||
@ -58,7 +62,8 @@ TEST(nix_isAllowedURI, https) {
|
||||
ASSERT_FALSE(isAllowedURI("http://example.com/https:", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, absolute_path) {
|
||||
TEST(nix_isAllowedURI, absolute_path)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("/var/evil"); // bad idea
|
||||
|
||||
@ -76,7 +81,8 @@ TEST(nix_isAllowedURI, absolute_path) {
|
||||
ASSERT_FALSE(isAllowedURI("http://example.com//var/evil/foo", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, file_url) {
|
||||
TEST(nix_isAllowedURI, file_url)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("file:///var/evil"); // bad idea
|
||||
|
||||
@ -103,7 +109,8 @@ TEST(nix_isAllowedURI, file_url) {
|
||||
ASSERT_FALSE(isAllowedURI("file://", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, github_all) {
|
||||
TEST(nix_isAllowedURI, github_all)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("github:");
|
||||
ASSERT_TRUE(isAllowedURI("github:", allowed));
|
||||
@ -117,7 +124,8 @@ TEST(nix_isAllowedURI, github_all) {
|
||||
ASSERT_FALSE(isAllowedURI("github", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, github_org) {
|
||||
TEST(nix_isAllowedURI, github_org)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("github:foo");
|
||||
ASSERT_FALSE(isAllowedURI("github:", allowed));
|
||||
@ -130,7 +138,8 @@ TEST(nix_isAllowedURI, github_org) {
|
||||
ASSERT_FALSE(isAllowedURI("file:///github:foo/bar/archive/master.tar.gz", allowed));
|
||||
}
|
||||
|
||||
TEST(nix_isAllowedURI, non_scheme_colon) {
|
||||
TEST(nix_isAllowedURI, non_scheme_colon)
|
||||
{
|
||||
Strings allowed;
|
||||
allowed.push_back("https://foo/bar:");
|
||||
ASSERT_TRUE(isAllowedURI("https://foo/bar:", allowed));
|
||||
@ -138,16 +147,19 @@ TEST(nix_isAllowedURI, non_scheme_colon) {
|
||||
ASSERT_FALSE(isAllowedURI("https://foo/bar:baz", allowed));
|
||||
}
|
||||
|
||||
class EvalStateTest : public LibExprTest {};
|
||||
class EvalStateTest : public LibExprTest
|
||||
{};
|
||||
|
||||
TEST_F(EvalStateTest, getBuiltins_ok) {
|
||||
TEST_F(EvalStateTest, getBuiltins_ok)
|
||||
{
|
||||
auto evaled = maybeThunk("builtins");
|
||||
auto & builtins = state.getBuiltins();
|
||||
ASSERT_TRUE(builtins.type() == nAttrs);
|
||||
ASSERT_EQ(evaled, &builtins);
|
||||
}
|
||||
|
||||
TEST_F(EvalStateTest, getBuiltin_ok) {
|
||||
TEST_F(EvalStateTest, getBuiltin_ok)
|
||||
{
|
||||
auto & builtin = state.getBuiltin("toString");
|
||||
ASSERT_TRUE(builtin.type() == nFunction);
|
||||
// FIXME
|
||||
@ -157,7 +169,8 @@ TEST_F(EvalStateTest, getBuiltin_ok) {
|
||||
ASSERT_EQ(state.forceBool(builtin2, noPos, "in unit test"), true);
|
||||
}
|
||||
|
||||
TEST_F(EvalStateTest, getBuiltin_fail) {
|
||||
TEST_F(EvalStateTest, getBuiltin_fail)
|
||||
{
|
||||
ASSERT_THROW(state.getBuiltin("nonexistent"), EvalError);
|
||||
}
|
||||
|
||||
|
@ -4,65 +4,75 @@
|
||||
namespace nix {
|
||||
// Testing the conversion to JSON
|
||||
|
||||
class JSONValueTest : public LibExprTest {
|
||||
protected:
|
||||
std::string getJSONValue(Value& value) {
|
||||
std::stringstream ss;
|
||||
NixStringContext ps;
|
||||
printValueAsJSON(state, true, value, noPos, ss, ps);
|
||||
return ss.str();
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(JSONValueTest, null) {
|
||||
Value v;
|
||||
v.mkNull();
|
||||
ASSERT_EQ(getJSONValue(v), "null");
|
||||
class JSONValueTest : public LibExprTest
|
||||
{
|
||||
protected:
|
||||
std::string getJSONValue(Value & value)
|
||||
{
|
||||
std::stringstream ss;
|
||||
NixStringContext ps;
|
||||
printValueAsJSON(state, true, value, noPos, ss, ps);
|
||||
return ss.str();
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(JSONValueTest, BoolFalse) {
|
||||
Value v;
|
||||
v.mkBool(false);
|
||||
ASSERT_EQ(getJSONValue(v),"false");
|
||||
}
|
||||
TEST_F(JSONValueTest, null)
|
||||
{
|
||||
Value v;
|
||||
v.mkNull();
|
||||
ASSERT_EQ(getJSONValue(v), "null");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, BoolTrue) {
|
||||
Value v;
|
||||
v.mkBool(true);
|
||||
ASSERT_EQ(getJSONValue(v), "true");
|
||||
}
|
||||
TEST_F(JSONValueTest, BoolFalse)
|
||||
{
|
||||
Value v;
|
||||
v.mkBool(false);
|
||||
ASSERT_EQ(getJSONValue(v), "false");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, IntPositive) {
|
||||
Value v;
|
||||
v.mkInt(100);
|
||||
ASSERT_EQ(getJSONValue(v), "100");
|
||||
}
|
||||
TEST_F(JSONValueTest, BoolTrue)
|
||||
{
|
||||
Value v;
|
||||
v.mkBool(true);
|
||||
ASSERT_EQ(getJSONValue(v), "true");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, IntNegative) {
|
||||
Value v;
|
||||
v.mkInt(-100);
|
||||
ASSERT_EQ(getJSONValue(v), "-100");
|
||||
}
|
||||
TEST_F(JSONValueTest, IntPositive)
|
||||
{
|
||||
Value v;
|
||||
v.mkInt(100);
|
||||
ASSERT_EQ(getJSONValue(v), "100");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, String) {
|
||||
Value v;
|
||||
v.mkString("test");
|
||||
ASSERT_EQ(getJSONValue(v), "\"test\"");
|
||||
}
|
||||
TEST_F(JSONValueTest, IntNegative)
|
||||
{
|
||||
Value v;
|
||||
v.mkInt(-100);
|
||||
ASSERT_EQ(getJSONValue(v), "-100");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, StringQuotes) {
|
||||
Value v;
|
||||
TEST_F(JSONValueTest, String)
|
||||
{
|
||||
Value v;
|
||||
v.mkString("test");
|
||||
ASSERT_EQ(getJSONValue(v), "\"test\"");
|
||||
}
|
||||
|
||||
v.mkString("test\"");
|
||||
ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
|
||||
}
|
||||
TEST_F(JSONValueTest, StringQuotes)
|
||||
{
|
||||
Value v;
|
||||
|
||||
// The dummy store doesn't support writing files. Fails with this exception message:
|
||||
// C++ exception with description "error: operation 'addToStoreFromDump' is
|
||||
// not supported by store 'dummy'" thrown in the test body.
|
||||
TEST_F(JSONValueTest, DISABLED_Path) {
|
||||
Value v;
|
||||
v.mkPath(state.rootPath(CanonPath("/test")));
|
||||
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
|
||||
}
|
||||
v.mkString("test\"");
|
||||
ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
|
||||
}
|
||||
|
||||
// The dummy store doesn't support writing files. Fails with this exception message:
|
||||
// C++ exception with description "error: operation 'addToStoreFromDump' is
|
||||
// not supported by store 'dummy'" thrown in the test body.
|
||||
TEST_F(JSONValueTest, DISABLED_Path)
|
||||
{
|
||||
Value v;
|
||||
v.mkPath(state.rootPath(CanonPath("/test")));
|
||||
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
|
||||
}
|
||||
} /* namespace nix */
|
||||
|
@ -5,7 +5,8 @@
|
||||
|
||||
using namespace nix;
|
||||
|
||||
int main (int argc, char **argv) {
|
||||
int main(int argc, char ** argv)
|
||||
{
|
||||
if (argc > 1 && std::string_view(argv[1]) == "__build-remote") {
|
||||
printError("test-build-remote: not supported in libexpr unit tests");
|
||||
return 1;
|
||||
@ -14,25 +15,26 @@ int main (int argc, char **argv) {
|
||||
// Disable build hook. We won't be testing remote builds in these unit tests. If we do, fix the above build hook.
|
||||
settings.buildHook = {};
|
||||
|
||||
#ifdef __linux__ // should match the conditional around sandboxBuildDir declaration.
|
||||
#ifdef __linux__ // should match the conditional around sandboxBuildDir declaration.
|
||||
|
||||
// When building and testing nix within the host's Nix sandbox, our store dir will be located in the host's sandboxBuildDir, e.g.:
|
||||
// Host
|
||||
// When building and testing nix within the host's Nix sandbox, our store dir will be located in the host's
|
||||
// sandboxBuildDir, e.g.: Host
|
||||
// storeDir = /nix/store
|
||||
// sandboxBuildDir = /build
|
||||
// This process
|
||||
// storeDir = /build/foo/bar/store
|
||||
// sandboxBuildDir = /build
|
||||
// However, we have a rule that the store dir must not be inside the storeDir, so we need to pick a different sandboxBuildDir.
|
||||
// However, we have a rule that the store dir must not be inside the storeDir, so we need to pick a different
|
||||
// sandboxBuildDir.
|
||||
settings.sandboxBuildDir = "/test-build-dir-instead-of-usual-build-dir";
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef __APPLE__
|
||||
#ifdef __APPLE__
|
||||
// Avoid this error, when already running in a sandbox:
|
||||
// sandbox-exec: sandbox_apply: Operation not permitted
|
||||
settings.sandboxMode = smDisabled;
|
||||
setEnv("_NIX_TEST_NO_SANDBOX", "1");
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// For pipe operator tests in trivial.cc
|
||||
experimentalFeatureSettings.set("experimental-features", "pipe-operators");
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -5,86 +5,98 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(LookupPathElem, parse_justPath) {
|
||||
TEST(LookupPathElem, parse_justPath)
|
||||
{
|
||||
ASSERT_EQ(
|
||||
LookupPath::Elem::parse("foo"),
|
||||
(LookupPath::Elem {
|
||||
.prefix = LookupPath::Prefix { .s = "" },
|
||||
.path = LookupPath::Path { .s = "foo" },
|
||||
(LookupPath::Elem{
|
||||
.prefix = LookupPath::Prefix{.s = ""},
|
||||
.path = LookupPath::Path{.s = "foo"},
|
||||
}));
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, parse_emptyPrefix) {
|
||||
TEST(LookupPathElem, parse_emptyPrefix)
|
||||
{
|
||||
ASSERT_EQ(
|
||||
LookupPath::Elem::parse("=foo"),
|
||||
(LookupPath::Elem {
|
||||
.prefix = LookupPath::Prefix { .s = "" },
|
||||
.path = LookupPath::Path { .s = "foo" },
|
||||
(LookupPath::Elem{
|
||||
.prefix = LookupPath::Prefix{.s = ""},
|
||||
.path = LookupPath::Path{.s = "foo"},
|
||||
}));
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, parse_oneEq) {
|
||||
TEST(LookupPathElem, parse_oneEq)
|
||||
{
|
||||
ASSERT_EQ(
|
||||
LookupPath::Elem::parse("foo=bar"),
|
||||
(LookupPath::Elem {
|
||||
.prefix = LookupPath::Prefix { .s = "foo" },
|
||||
.path = LookupPath::Path { .s = "bar" },
|
||||
(LookupPath::Elem{
|
||||
.prefix = LookupPath::Prefix{.s = "foo"},
|
||||
.path = LookupPath::Path{.s = "bar"},
|
||||
}));
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, parse_twoEqs) {
|
||||
TEST(LookupPathElem, parse_twoEqs)
|
||||
{
|
||||
ASSERT_EQ(
|
||||
LookupPath::Elem::parse("foo=bar=baz"),
|
||||
(LookupPath::Elem {
|
||||
.prefix = LookupPath::Prefix { .s = "foo" },
|
||||
.path = LookupPath::Path { .s = "bar=baz" },
|
||||
(LookupPath::Elem{
|
||||
.prefix = LookupPath::Prefix{.s = "foo"},
|
||||
.path = LookupPath::Path{.s = "bar=baz"},
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_justPath) {
|
||||
LookupPath::Prefix prefix { .s = "" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional { "any/thing" });
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_justPath)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = ""};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional{"any/thing"});
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix1) {
|
||||
LookupPath::Prefix prefix { .s = "foo" };
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix1)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "foo"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX"), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix2) {
|
||||
LookupPath::Prefix prefix { .s = "foo" };
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_misleadingPrefix2)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "foo"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX/bar"), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_partialPrefix) {
|
||||
LookupPath::Prefix prefix { .s = "fooX" };
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_partialPrefix)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "fooX"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_exactPrefix) {
|
||||
LookupPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional { "" });
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_exactPrefix)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "foo"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional{""});
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_multiKey) {
|
||||
LookupPath::Prefix prefix { .s = "foo/bar" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "baz" });
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_multiKey)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "foo/bar"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional{"baz"});
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_trailingSlash) {
|
||||
LookupPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional { "" });
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_trailingSlash)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "foo"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional{""});
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_trailingDoubleSlash) {
|
||||
LookupPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional { "/" });
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_trailingDoubleSlash)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "foo"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional{"/"});
|
||||
}
|
||||
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_trailingPath) {
|
||||
LookupPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "bar/baz" });
|
||||
TEST(LookupPathElem, suffixIfPotentialMatch_trailingPath)
|
||||
{
|
||||
LookupPath::Prefix prefix{.s = "foo"};
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional{"bar/baz"});
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,181 +1,202 @@
|
||||
#include "nix/expr/tests/libexpr.hh"
|
||||
|
||||
namespace nix {
|
||||
// Testing of trivial expressions
|
||||
class TrivialExpressionTest : public LibExprTest {};
|
||||
// Testing of trivial expressions
|
||||
class TrivialExpressionTest : public LibExprTest
|
||||
{};
|
||||
|
||||
TEST_F(TrivialExpressionTest, true) {
|
||||
auto v = eval("true");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, true)
|
||||
{
|
||||
auto v = eval("true");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, false) {
|
||||
auto v = eval("false");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, false)
|
||||
{
|
||||
auto v = eval("false");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, null) {
|
||||
auto v = eval("null");
|
||||
ASSERT_THAT(v, IsNull());
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, null)
|
||||
{
|
||||
auto v = eval("null");
|
||||
ASSERT_THAT(v, IsNull());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, 1) {
|
||||
auto v = eval("1");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, 1)
|
||||
{
|
||||
auto v = eval("1");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, 1plus1) {
|
||||
auto v = eval("1+1");
|
||||
ASSERT_THAT(v, IsIntEq(2));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, 1plus1)
|
||||
{
|
||||
auto v = eval("1+1");
|
||||
ASSERT_THAT(v, IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, minus1) {
|
||||
auto v = eval("-1");
|
||||
ASSERT_THAT(v, IsIntEq(-1));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, minus1)
|
||||
{
|
||||
auto v = eval("-1");
|
||||
ASSERT_THAT(v, IsIntEq(-1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, 1minus1) {
|
||||
auto v = eval("1-1");
|
||||
ASSERT_THAT(v, IsIntEq(0));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, 1minus1)
|
||||
{
|
||||
auto v = eval("1-1");
|
||||
ASSERT_THAT(v, IsIntEq(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, lambdaAdd) {
|
||||
auto v = eval("let add = a: b: a + b; in add 1 2");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, lambdaAdd)
|
||||
{
|
||||
auto v = eval("let add = a: b: a + b; in add 1 2");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, list) {
|
||||
auto v = eval("[]");
|
||||
ASSERT_THAT(v, IsListOfSize(0));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, list)
|
||||
{
|
||||
auto v = eval("[]");
|
||||
ASSERT_THAT(v, IsListOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, attrs) {
|
||||
auto v = eval("{}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, attrs)
|
||||
{
|
||||
auto v = eval("{}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, float) {
|
||||
auto v = eval("1.234");
|
||||
ASSERT_THAT(v, IsFloatEq(1.234));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, float)
|
||||
{
|
||||
auto v = eval("1.234");
|
||||
ASSERT_THAT(v, IsFloatEq(1.234));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, updateAttrs) {
|
||||
auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
auto a = v.attrs()->find(createSymbol("a"));
|
||||
ASSERT_NE(a, nullptr);
|
||||
ASSERT_THAT(*a->value, IsIntEq(3));
|
||||
TEST_F(TrivialExpressionTest, updateAttrs)
|
||||
{
|
||||
auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
auto a = v.attrs()->find(createSymbol("a"));
|
||||
ASSERT_NE(a, nullptr);
|
||||
ASSERT_THAT(*a->value, IsIntEq(3));
|
||||
|
||||
auto b = v.attrs()->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(2));
|
||||
}
|
||||
auto b = v.attrs()->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, hasAttrOpFalse) {
|
||||
auto v = eval("{} ? a");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, hasAttrOpFalse)
|
||||
{
|
||||
auto v = eval("{} ? a");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, hasAttrOpTrue) {
|
||||
auto v = eval("{ a = 123; } ? a");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, hasAttrOpTrue)
|
||||
{
|
||||
auto v = eval("{ a = 123; } ? a");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, withFound) {
|
||||
auto v = eval("with { a = 23; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(23));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, withFound)
|
||||
{
|
||||
auto v = eval("with { a = 23; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(23));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, withNotFound) {
|
||||
ASSERT_THROW(eval("with {}; a"), Error);
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, withNotFound)
|
||||
{
|
||||
ASSERT_THROW(eval("with {}; a"), Error);
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, withOverride) {
|
||||
auto v = eval("with { a = 23; }; with { a = 42; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(42));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, withOverride)
|
||||
{
|
||||
auto v = eval("with { a = 23; }; with { a = 42; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(42));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, letOverWith) {
|
||||
auto v = eval("let a = 23; in with { a = 1; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(23));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, letOverWith)
|
||||
{
|
||||
auto v = eval("let a = 23; in with { a = 1; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(23));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, multipleLet) {
|
||||
auto v = eval("let a = 23; in let a = 42; in a");
|
||||
ASSERT_THAT(v, IsIntEq(42));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, multipleLet)
|
||||
{
|
||||
auto v = eval("let a = 23; in let a = 42; in a");
|
||||
ASSERT_THAT(v, IsIntEq(42));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgs) {
|
||||
auto v = eval("({ a ? 123 }: a) {}");
|
||||
ASSERT_THAT(v, IsIntEq(123));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgs)
|
||||
{
|
||||
auto v = eval("({ a ? 123 }: a) {}");
|
||||
ASSERT_THAT(v, IsIntEq(123));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) {
|
||||
auto v = eval("({ a ? 123 }: a) { a = 5; }");
|
||||
ASSERT_THAT(v, IsIntEq(5));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride)
|
||||
{
|
||||
auto v = eval("({ a ? 123 }: a) { a = 5; }");
|
||||
ASSERT_THAT(v, IsIntEq(5));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) {
|
||||
auto v = eval("({ a ? 123 }@args: args) {}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack)
|
||||
{
|
||||
auto v = eval("({ a ? 123 }@args: args) {}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) {
|
||||
auto v = eval("(args@{ a ? 123 }: args) {}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront)
|
||||
{
|
||||
auto v = eval("(args@{ a ? 123 }: args) {}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, assertThrows) {
|
||||
ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, assertThrows)
|
||||
{
|
||||
ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, assertPassed) {
|
||||
auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
|
||||
ASSERT_THAT(v, IsIntEq(123));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, assertPassed)
|
||||
{
|
||||
auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
|
||||
ASSERT_THAT(v, IsIntEq(123));
|
||||
}
|
||||
|
||||
class AttrSetMergeTrvialExpressionTest :
|
||||
public TrivialExpressionTest,
|
||||
public testing::WithParamInterface<const char*>
|
||||
{};
|
||||
class AttrSetMergeTrvialExpressionTest : public TrivialExpressionTest, public testing::WithParamInterface<const char *>
|
||||
{};
|
||||
|
||||
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) {
|
||||
// Usually Nix rejects duplicate keys in an attrset but it does allow
|
||||
// so if it is an attribute set that contains disjoint sets of keys.
|
||||
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
|
||||
// The attribute set `a` will be a Thunk at first as the attribuets
|
||||
// have to be merged (or otherwise computed) and that is done in a lazy
|
||||
// manner.
|
||||
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy)
|
||||
{
|
||||
// Usually Nix rejects duplicate keys in an attrset but it does allow
|
||||
// so if it is an attribute set that contains disjoint sets of keys.
|
||||
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
|
||||
// The attribute set `a` will be a Thunk at first as the attribuets
|
||||
// have to be merged (or otherwise computed) and that is done in a lazy
|
||||
// manner.
|
||||
|
||||
auto expr = GetParam();
|
||||
auto v = eval(expr);
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
auto expr = GetParam();
|
||||
auto v = eval(expr);
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
|
||||
auto a = v.attrs()->find(createSymbol("a"));
|
||||
ASSERT_NE(a, nullptr);
|
||||
auto a = v.attrs()->find(createSymbol("a"));
|
||||
ASSERT_NE(a, nullptr);
|
||||
|
||||
ASSERT_THAT(*a->value, IsThunk());
|
||||
state.forceValue(*a->value, noPos);
|
||||
ASSERT_THAT(*a->value, IsThunk());
|
||||
state.forceValue(*a->value, noPos);
|
||||
|
||||
ASSERT_THAT(*a->value, IsAttrsOfSize(2));
|
||||
ASSERT_THAT(*a->value, IsAttrsOfSize(2));
|
||||
|
||||
auto b = a->value->attrs()->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||
auto b = a->value->attrs()->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||
|
||||
auto c = a->value->attrs()->find(createSymbol("c"));
|
||||
ASSERT_NE(c, nullptr);
|
||||
ASSERT_THAT(*c->value, IsIntEq(2));
|
||||
}
|
||||
auto c = a->value->attrs()->find(createSymbol("c"));
|
||||
ASSERT_NE(c, nullptr);
|
||||
ASSERT_THAT(*c->value, IsIntEq(2));
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
attrsetMergeLazy,
|
||||
AttrSetMergeTrvialExpressionTest,
|
||||
testing::Values(
|
||||
"{ a.b = 1; a.c = 2; }",
|
||||
"{ a = { b = 1; }; a = { c = 2; }; }"
|
||||
)
|
||||
);
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
attrsetMergeLazy,
|
||||
AttrSetMergeTrvialExpressionTest,
|
||||
testing::Values("{ a.b = 1; a.c = 2; }", "{ a = { b = 1; }; a = { c = 2; }; }"));
|
||||
|
||||
// The following macros ultimately define 48 tests (16 variations on three
|
||||
// templates). Each template tests an expression that can be written in 2^4
|
||||
@ -199,28 +220,34 @@ namespace nix {
|
||||
// expanded.
|
||||
#define X_EXPAND_IF0(k, v) k "." v
|
||||
#define X_EXPAND_IF1(k, v) k " = { " v " };"
|
||||
#define X4(w, x, y, z) \
|
||||
TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) { \
|
||||
auto v = eval("{ a.b = { c = 1; d = 2; }; } == { " \
|
||||
X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \
|
||||
X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \
|
||||
ASSERT_THAT(v, IsTrue()); \
|
||||
}; \
|
||||
TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) { \
|
||||
ASSERT_THROW(eval("{ " \
|
||||
X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \
|
||||
X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "c = 2;")) " }"), Error); \
|
||||
}; \
|
||||
TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) { \
|
||||
auto v = eval("{ b = { c = 1; d = 2; }; } == (let " \
|
||||
X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " \
|
||||
X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \
|
||||
ASSERT_THAT(v, IsTrue()); \
|
||||
#define X4(w, x, y, z) \
|
||||
TEST_F(TrivialExpressionTest, nestedAttrsetMerge##w##x##y##z) \
|
||||
{ \
|
||||
auto v = eval( \
|
||||
"{ a.b = { c = 1; d = 2; }; } == { " X_EXPAND_IF##w( \
|
||||
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " }"); \
|
||||
ASSERT_THAT(v, IsTrue()); \
|
||||
}; \
|
||||
TEST_F(TrivialExpressionTest, nestedAttrsetMergeDup##w##x##y##z) \
|
||||
{ \
|
||||
ASSERT_THROW( \
|
||||
eval( \
|
||||
"{ " X_EXPAND_IF##w("a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y( \
|
||||
"a", X_EXPAND_IF##z("b", "c = 2;")) " }"), \
|
||||
Error); \
|
||||
}; \
|
||||
TEST_F(TrivialExpressionTest, nestedAttrsetMergeLet##w##x##y##z) \
|
||||
{ \
|
||||
auto v = eval( \
|
||||
"{ b = { c = 1; d = 2; }; } == (let " X_EXPAND_IF##w( \
|
||||
"a", X_EXPAND_IF##x("b", "c = 1;")) " " X_EXPAND_IF##y("a", X_EXPAND_IF##z("b", "d = 2;")) " in a)"); \
|
||||
ASSERT_THAT(v, IsTrue()); \
|
||||
};
|
||||
#define X3(...) X4(__VA_ARGS__, 0) X4(__VA_ARGS__, 1)
|
||||
#define X2(...) X3(__VA_ARGS__, 0) X3(__VA_ARGS__, 1)
|
||||
#define X1(...) X2(__VA_ARGS__, 0) X2(__VA_ARGS__, 1)
|
||||
X1(0) X1(1)
|
||||
X1(0)
|
||||
X1(1)
|
||||
#undef X_EXPAND_IF0
|
||||
#undef X_EXPAND_IF1
|
||||
#undef X1
|
||||
@ -228,74 +255,88 @@ namespace nix {
|
||||
#undef X3
|
||||
#undef X4
|
||||
|
||||
TEST_F(TrivialExpressionTest, functor) {
|
||||
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
|
||||
ASSERT_THAT(v, IsIntEq(15));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, functor)
|
||||
{
|
||||
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
|
||||
ASSERT_THAT(v, IsIntEq(15));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, forwardPipe) {
|
||||
auto v = eval("1 |> builtins.add 2 |> builtins.mul 3");
|
||||
ASSERT_THAT(v, IsIntEq(9));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, forwardPipe)
|
||||
{
|
||||
auto v = eval("1 |> builtins.add 2 |> builtins.mul 3");
|
||||
ASSERT_THAT(v, IsIntEq(9));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, backwardPipe) {
|
||||
auto v = eval("builtins.add 1 <| builtins.mul 2 <| 3");
|
||||
ASSERT_THAT(v, IsIntEq(7));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, backwardPipe)
|
||||
{
|
||||
auto v = eval("builtins.add 1 <| builtins.mul 2 <| 3");
|
||||
ASSERT_THAT(v, IsIntEq(7));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, forwardPipeEvaluationOrder) {
|
||||
auto v = eval("1 |> null |> (x: 2)");
|
||||
ASSERT_THAT(v, IsIntEq(2));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, forwardPipeEvaluationOrder)
|
||||
{
|
||||
auto v = eval("1 |> null |> (x: 2)");
|
||||
ASSERT_THAT(v, IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, backwardPipeEvaluationOrder) {
|
||||
auto v = eval("(x: 1) <| null <| 2");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, backwardPipeEvaluationOrder)
|
||||
{
|
||||
auto v = eval("(x: 1) <| null <| 2");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, differentPipeOperatorsDoNotAssociate) {
|
||||
ASSERT_THROW(eval("(x: 1) <| 2 |> (x: 3)"), ParseError);
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, differentPipeOperatorsDoNotAssociate)
|
||||
{
|
||||
ASSERT_THROW(eval("(x: 1) <| 2 |> (x: 3)"), ParseError);
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, differentPipeOperatorsParensLeft) {
|
||||
auto v = eval("((x: 1) <| 2) |> (x: 3)");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, differentPipeOperatorsParensLeft)
|
||||
{
|
||||
auto v = eval("((x: 1) <| 2) |> (x: 3)");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, differentPipeOperatorsParensRight) {
|
||||
auto v = eval("(x: 1) <| (2 |> (x: 3))");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, differentPipeOperatorsParensRight)
|
||||
{
|
||||
auto v = eval("(x: 1) <| (2 |> (x: 3))");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, forwardPipeLowestPrecedence) {
|
||||
auto v = eval("false -> true |> (x: !x)");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, forwardPipeLowestPrecedence)
|
||||
{
|
||||
auto v = eval("false -> true |> (x: !x)");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, backwardPipeLowestPrecedence) {
|
||||
auto v = eval("(x: !x) <| false -> true");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, backwardPipeLowestPrecedence)
|
||||
{
|
||||
auto v = eval("(x: !x) <| false -> true");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, forwardPipeStrongerThanElse) {
|
||||
auto v = eval("if true then 1 else 2 |> 3");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, forwardPipeStrongerThanElse)
|
||||
{
|
||||
auto v = eval("if true then 1 else 2 |> 3");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, backwardPipeStrongerThanElse) {
|
||||
auto v = eval("if true then 1 else 2 <| 3");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, backwardPipeStrongerThanElse)
|
||||
{
|
||||
auto v = eval("if true then 1 else 2 <| 3");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, bindOr) {
|
||||
auto v = eval("{ or = 1; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
auto b = v.attrs()->find(createSymbol("or"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, bindOr)
|
||||
{
|
||||
auto v = eval("{ or = 1; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
auto b = v.attrs()->find(createSymbol("or"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, orCantBeUsed) {
|
||||
ASSERT_THROW(eval("let or = 1; in or"), Error);
|
||||
}
|
||||
TEST_F(TrivialExpressionTest, orCantBeUsed)
|
||||
{
|
||||
ASSERT_THROW(eval("let or = 1; in or"), Error);
|
||||
}
|
||||
} /* namespace nix */
|
||||
|
@ -10,46 +10,42 @@ namespace nix {
|
||||
|
||||
// Test a few cases of invalid string context elements.
|
||||
|
||||
TEST(NixStringContextElemTest, empty_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(""),
|
||||
BadNixStringContextElem);
|
||||
TEST(NixStringContextElemTest, empty_invalid)
|
||||
{
|
||||
EXPECT_THROW(NixStringContextElem::parse(""), BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, single_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("!"),
|
||||
BadNixStringContextElem);
|
||||
TEST(NixStringContextElemTest, single_bang_invalid)
|
||||
{
|
||||
EXPECT_THROW(NixStringContextElem::parse("!"), BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, double_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("!!/"),
|
||||
BadStorePath);
|
||||
TEST(NixStringContextElemTest, double_bang_invalid)
|
||||
{
|
||||
EXPECT_THROW(NixStringContextElem::parse("!!/"), BadStorePath);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, eq_slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("=/"),
|
||||
BadStorePath);
|
||||
TEST(NixStringContextElemTest, eq_slash_invalid)
|
||||
{
|
||||
EXPECT_THROW(NixStringContextElem::parse("=/"), BadStorePath);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("/"),
|
||||
BadStorePath);
|
||||
TEST(NixStringContextElemTest, slash_invalid)
|
||||
{
|
||||
EXPECT_THROW(NixStringContextElem::parse("/"), BadStorePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for
|
||||
* `NixStringContextElem::Opaque`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, opaque) {
|
||||
TEST(NixStringContextElemTest, opaque)
|
||||
{
|
||||
std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
|
||||
auto elem = NixStringContextElem::parse(opaque);
|
||||
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem.raw);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->path, StorePath { opaque });
|
||||
ASSERT_EQ(p->path, StorePath{opaque});
|
||||
ASSERT_EQ(elem.to_string(), opaque);
|
||||
}
|
||||
|
||||
@ -57,12 +53,13 @@ TEST(NixStringContextElemTest, opaque) {
|
||||
* Round trip (string <-> data structure) test for
|
||||
* `NixStringContextElem::DrvDeep`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, drvDeep) {
|
||||
TEST(NixStringContextElemTest, drvDeep)
|
||||
{
|
||||
std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(drvDeep);
|
||||
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem.raw);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) });
|
||||
ASSERT_EQ(p->drvPath, StorePath{drvDeep.substr(1)});
|
||||
ASSERT_EQ(elem.to_string(), drvDeep);
|
||||
}
|
||||
|
||||
@ -70,15 +67,18 @@ TEST(NixStringContextElemTest, drvDeep) {
|
||||
* Round trip (string <-> data structure) test for a simpler
|
||||
* `NixStringContextElem::Built`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, built_opaque) {
|
||||
TEST(NixStringContextElemTest, built_opaque)
|
||||
{
|
||||
std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(built);
|
||||
auto * p = std::get_if<NixStringContextElem::Built>(&elem.raw);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->output, "foo");
|
||||
ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
|
||||
.path = StorePath { built.substr(5) },
|
||||
}));
|
||||
ASSERT_EQ(
|
||||
*p->drvPath,
|
||||
((SingleDerivedPath) SingleDerivedPath::Opaque{
|
||||
.path = StorePath{built.substr(5)},
|
||||
}));
|
||||
ASSERT_EQ(elem.to_string(), built);
|
||||
}
|
||||
|
||||
@ -86,7 +86,8 @@ TEST(NixStringContextElemTest, built_opaque) {
|
||||
* Round trip (string <-> data structure) test for a more complex,
|
||||
* inductive `NixStringContextElem::Built`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, built_built) {
|
||||
TEST(NixStringContextElemTest, built_built)
|
||||
{
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
@ -102,9 +103,11 @@ TEST(NixStringContextElemTest, built_built) {
|
||||
auto * drvPath = std::get_if<SingleDerivedPath::Built>(&*p->drvPath);
|
||||
ASSERT_TRUE(drvPath);
|
||||
ASSERT_EQ(drvPath->output, "bar");
|
||||
ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
|
||||
.path = StorePath { built.substr(9) },
|
||||
}));
|
||||
ASSERT_EQ(
|
||||
*drvPath->drvPath,
|
||||
((SingleDerivedPath) SingleDerivedPath::Opaque{
|
||||
.path = StorePath{built.substr(9)},
|
||||
}));
|
||||
ASSERT_EQ(elem.to_string(), built);
|
||||
}
|
||||
|
||||
@ -112,17 +115,15 @@ TEST(NixStringContextElemTest, built_built) {
|
||||
* Without the right experimental features enabled, we cannot parse a
|
||||
* complex inductive string context element.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, built_built_xp) {
|
||||
TEST(NixStringContextElemTest, built_built_xp)
|
||||
{
|
||||
ASSERT_THROW(
|
||||
NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature);
|
||||
NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature);
|
||||
}
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
RC_GTEST_PROP(
|
||||
NixStringContextElemTest,
|
||||
prop_round_rip,
|
||||
(const NixStringContextElem & o))
|
||||
RC_GTEST_PROP(NixStringContextElemTest, prop_round_rip, (const NixStringContextElem & o))
|
||||
{
|
||||
ExperimentalFeatureSettings xpSettings;
|
||||
xpSettings.set("experimental-features", "dynamic-derivations");
|
||||
|
@ -106,14 +106,11 @@ TEST_F(ValuePrintingTests, vApp)
|
||||
|
||||
TEST_F(ValuePrintingTests, vLambda)
|
||||
{
|
||||
Env env {
|
||||
.up = nullptr,
|
||||
.values = { }
|
||||
};
|
||||
Env env{.up = nullptr, .values = {}};
|
||||
PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
|
||||
auto posIdx = state.positions.add(origin, 0);
|
||||
auto body = ExprInt(0);
|
||||
auto formals = Formals {};
|
||||
auto formals = Formals{};
|
||||
|
||||
ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body);
|
||||
|
||||
@ -130,9 +127,7 @@ TEST_F(ValuePrintingTests, vLambda)
|
||||
TEST_F(ValuePrintingTests, vPrimOp)
|
||||
{
|
||||
Value vPrimOp;
|
||||
PrimOp primOp{
|
||||
.name = "puppy"
|
||||
};
|
||||
PrimOp primOp{.name = "puppy"};
|
||||
vPrimOp.mkPrimOp(&primOp);
|
||||
|
||||
test(vPrimOp, "«primop puppy»");
|
||||
@ -140,9 +135,7 @@ TEST_F(ValuePrintingTests, vPrimOp)
|
||||
|
||||
TEST_F(ValuePrintingTests, vPrimOpApp)
|
||||
{
|
||||
PrimOp primOp{
|
||||
.name = "puppy"
|
||||
};
|
||||
PrimOp primOp{.name = "puppy"};
|
||||
Value vPrimOp;
|
||||
vPrimOp.mkPrimOp(&primOp);
|
||||
|
||||
@ -220,10 +213,13 @@ TEST_F(ValuePrintingTests, depthAttrs)
|
||||
Value vNested;
|
||||
vNested.mkAttrs(builder2.finish());
|
||||
|
||||
test(vNested, "{ nested = { ... }; one = 1; two = 2; }", PrintOptions { .maxDepth = 1 });
|
||||
test(vNested, "{ nested = { nested = { ... }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 2 });
|
||||
test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 3 });
|
||||
test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 4 });
|
||||
test(vNested, "{ nested = { ... }; one = 1; two = 2; }", PrintOptions{.maxDepth = 1});
|
||||
test(
|
||||
vNested,
|
||||
"{ nested = { nested = { ... }; one = 1; two = 2; }; one = 1; two = 2; }",
|
||||
PrintOptions{.maxDepth = 2});
|
||||
test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions{.maxDepth = 3});
|
||||
test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions{.maxDepth = 4});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, depthList)
|
||||
@ -256,11 +252,11 @@ TEST_F(ValuePrintingTests, depthList)
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
|
||||
test(vList, "[ 1 2 { ... } ]", PrintOptions { .maxDepth = 1 });
|
||||
test(vList, "[ 1 2 { nested = { ... }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 2 });
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 3 });
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 4 });
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 5 });
|
||||
test(vList, "[ 1 2 { ... } ]", PrintOptions{.maxDepth = 1});
|
||||
test(vList, "[ 1 2 { nested = { ... }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 2});
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 3});
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 4});
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions{.maxDepth = 5});
|
||||
}
|
||||
|
||||
struct StringPrintingTests : LibExprTest
|
||||
@ -272,9 +268,7 @@ struct StringPrintingTests : LibExprTest
|
||||
v.mkString(literal);
|
||||
|
||||
std::stringstream out;
|
||||
printValue(state, out, v, PrintOptions {
|
||||
.maxStringLength = maxLength
|
||||
});
|
||||
printValue(state, out, v, PrintOptions{.maxStringLength = maxLength});
|
||||
ASSERT_EQ(out.str(), expected);
|
||||
}
|
||||
};
|
||||
@ -305,15 +299,9 @@ TEST_F(ValuePrintingTests, attrsTypeFirst)
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs,
|
||||
"{ type = \"puppy\"; apple = \"apple\"; }",
|
||||
PrintOptions {
|
||||
.maxAttrs = 100
|
||||
});
|
||||
test(vAttrs, "{ type = \"puppy\"; apple = \"apple\"; }", PrintOptions{.maxAttrs = 100});
|
||||
|
||||
test(vAttrs,
|
||||
"{ apple = \"apple\"; type = \"puppy\"; }",
|
||||
PrintOptions { });
|
||||
test(vAttrs, "{ apple = \"apple\"; type = \"puppy\"; }", PrintOptions{});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsInt)
|
||||
@ -321,11 +309,7 @@ TEST_F(ValuePrintingTests, ansiColorsInt)
|
||||
Value v;
|
||||
v.mkInt(10);
|
||||
|
||||
test(v,
|
||||
ANSI_CYAN "10" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_CYAN "10" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsFloat)
|
||||
@ -333,11 +317,7 @@ TEST_F(ValuePrintingTests, ansiColorsFloat)
|
||||
Value v;
|
||||
v.mkFloat(1.6);
|
||||
|
||||
test(v,
|
||||
ANSI_CYAN "1.6" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_CYAN "1.6" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsBool)
|
||||
@ -345,11 +325,7 @@ TEST_F(ValuePrintingTests, ansiColorsBool)
|
||||
Value v;
|
||||
v.mkBool(true);
|
||||
|
||||
test(v,
|
||||
ANSI_CYAN "true" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_CYAN "true" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsString)
|
||||
@ -357,11 +333,7 @@ TEST_F(ValuePrintingTests, ansiColorsString)
|
||||
Value v;
|
||||
v.mkString("puppy");
|
||||
|
||||
test(v,
|
||||
ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsStringElided)
|
||||
@ -369,12 +341,10 @@ TEST_F(ValuePrintingTests, ansiColorsStringElided)
|
||||
Value v;
|
||||
v.mkString("puppy");
|
||||
|
||||
test(v,
|
||||
ANSI_MAGENTA "\"pup\" " ANSI_FAINT "«2 bytes elided»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.maxStringLength = 3
|
||||
});
|
||||
test(
|
||||
v,
|
||||
ANSI_MAGENTA "\"pup\" " ANSI_FAINT "«2 bytes elided»" ANSI_NORMAL,
|
||||
PrintOptions{.ansiColors = true, .maxStringLength = 3});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsPath)
|
||||
@ -382,11 +352,7 @@ TEST_F(ValuePrintingTests, ansiColorsPath)
|
||||
Value v;
|
||||
v.mkPath(state.rootPath(CanonPath("puppy")));
|
||||
|
||||
test(v,
|
||||
ANSI_GREEN "/puppy" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_GREEN "/puppy" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsNull)
|
||||
@ -394,11 +360,7 @@ TEST_F(ValuePrintingTests, ansiColorsNull)
|
||||
Value v;
|
||||
v.mkNull();
|
||||
|
||||
test(v,
|
||||
ANSI_CYAN "null" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_CYAN "null" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsAttrs)
|
||||
@ -416,11 +378,10 @@ TEST_F(ValuePrintingTests, ansiColorsAttrs)
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs,
|
||||
"{ one = " ANSI_CYAN "1" ANSI_NORMAL "; two = " ANSI_CYAN "2" ANSI_NORMAL "; }",
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(
|
||||
vAttrs,
|
||||
"{ one = " ANSI_CYAN "1" ANSI_NORMAL "; two = " ANSI_CYAN "2" ANSI_NORMAL "; }",
|
||||
PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsDerivation)
|
||||
@ -434,20 +395,15 @@ TEST_F(ValuePrintingTests, ansiColorsDerivation)
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs,
|
||||
ANSI_GREEN "«derivation»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
.derivationPaths = true
|
||||
});
|
||||
test(
|
||||
vAttrs,
|
||||
ANSI_GREEN "«derivation»" ANSI_NORMAL,
|
||||
PrintOptions{.ansiColors = true, .force = true, .derivationPaths = true});
|
||||
|
||||
test(vAttrs,
|
||||
"{ type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL "; }",
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true
|
||||
});
|
||||
test(
|
||||
vAttrs,
|
||||
"{ type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL "; }",
|
||||
PrintOptions{.ansiColors = true, .force = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsError)
|
||||
@ -458,14 +414,13 @@ TEST_F(ValuePrintingTests, ansiColorsError)
|
||||
Value vError;
|
||||
vError.mkApp(&throw_, &message);
|
||||
|
||||
test(vError,
|
||||
ANSI_RED
|
||||
"«error: uh oh!»"
|
||||
ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
});
|
||||
test(
|
||||
vError,
|
||||
ANSI_RED "«error: uh oh!»" ANSI_NORMAL,
|
||||
PrintOptions{
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsDerivationError)
|
||||
@ -486,30 +441,20 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError)
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs,
|
||||
"{ drvPath = "
|
||||
ANSI_RED
|
||||
"«error: uh oh!»"
|
||||
ANSI_NORMAL
|
||||
"; type = "
|
||||
ANSI_MAGENTA
|
||||
"\"derivation\""
|
||||
ANSI_NORMAL
|
||||
"; }",
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true
|
||||
});
|
||||
test(
|
||||
vAttrs,
|
||||
"{ drvPath = " ANSI_RED "«error: uh oh!»" ANSI_NORMAL "; type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL
|
||||
"; }",
|
||||
PrintOptions{.ansiColors = true, .force = true});
|
||||
|
||||
test(vAttrs,
|
||||
ANSI_RED
|
||||
"«error: uh oh!»"
|
||||
ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
.derivationPaths = true,
|
||||
});
|
||||
test(
|
||||
vAttrs,
|
||||
ANSI_RED "«error: uh oh!»" ANSI_NORMAL,
|
||||
PrintOptions{
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
.derivationPaths = true,
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsAssert)
|
||||
@ -523,12 +468,7 @@ TEST_F(ValuePrintingTests, ansiColorsAssert)
|
||||
Value v;
|
||||
state.mkThunk_(v, &expr);
|
||||
|
||||
test(v,
|
||||
ANSI_RED "«error: assertion 'false' failed»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true
|
||||
});
|
||||
test(v, ANSI_RED "«error: assertion 'false' failed»" ANSI_NORMAL, PrintOptions{.ansiColors = true, .force = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsList)
|
||||
@ -545,77 +485,51 @@ TEST_F(ValuePrintingTests, ansiColorsList)
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
|
||||
test(vList,
|
||||
"[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_CYAN "2" ANSI_NORMAL " " ANSI_MAGENTA "«nullptr»" ANSI_NORMAL " ]",
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(
|
||||
vList,
|
||||
"[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_CYAN "2" ANSI_NORMAL " " ANSI_MAGENTA "«nullptr»" ANSI_NORMAL " ]",
|
||||
PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsLambda)
|
||||
{
|
||||
Env env {
|
||||
.up = nullptr,
|
||||
.values = { }
|
||||
};
|
||||
Env env{.up = nullptr, .values = {}};
|
||||
PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
|
||||
auto posIdx = state.positions.add(origin, 0);
|
||||
auto body = ExprInt(0);
|
||||
auto formals = Formals {};
|
||||
auto formals = Formals{};
|
||||
|
||||
ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body);
|
||||
|
||||
Value vLambda;
|
||||
vLambda.mkLambda(&env, &eLambda);
|
||||
|
||||
test(vLambda,
|
||||
ANSI_BLUE "«lambda @ «none»:1:1»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true
|
||||
});
|
||||
test(vLambda, ANSI_BLUE "«lambda @ «none»:1:1»" ANSI_NORMAL, PrintOptions{.ansiColors = true, .force = true});
|
||||
|
||||
eLambda.setName(createSymbol("puppy"));
|
||||
|
||||
test(vLambda,
|
||||
ANSI_BLUE "«lambda puppy @ «none»:1:1»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true
|
||||
});
|
||||
test(vLambda, ANSI_BLUE "«lambda puppy @ «none»:1:1»" ANSI_NORMAL, PrintOptions{.ansiColors = true, .force = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsPrimOp)
|
||||
{
|
||||
PrimOp primOp{
|
||||
.name = "puppy"
|
||||
};
|
||||
PrimOp primOp{.name = "puppy"};
|
||||
Value v;
|
||||
v.mkPrimOp(&primOp);
|
||||
|
||||
test(v,
|
||||
ANSI_BLUE "«primop puppy»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_BLUE "«primop puppy»" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsPrimOpApp)
|
||||
{
|
||||
PrimOp primOp{
|
||||
.name = "puppy"
|
||||
};
|
||||
PrimOp primOp{.name = "puppy"};
|
||||
Value vPrimOp;
|
||||
vPrimOp.mkPrimOp(&primOp);
|
||||
|
||||
Value v;
|
||||
v.mkPrimOpApp(&vPrimOp, nullptr);
|
||||
|
||||
test(v,
|
||||
ANSI_BLUE "«partially applied primop puppy»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_BLUE "«partially applied primop puppy»" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsThunk)
|
||||
@ -623,11 +537,7 @@ TEST_F(ValuePrintingTests, ansiColorsThunk)
|
||||
Value v;
|
||||
v.mkThunk(nullptr, nullptr);
|
||||
|
||||
test(v,
|
||||
ANSI_MAGENTA "«thunk»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_MAGENTA "«thunk»" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsBlackhole)
|
||||
@ -635,11 +545,7 @@ TEST_F(ValuePrintingTests, ansiColorsBlackhole)
|
||||
Value v;
|
||||
v.mkBlackhole();
|
||||
|
||||
test(v,
|
||||
ANSI_RED "«potential infinite recursion»" ANSI_NORMAL,
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(v, ANSI_RED "«potential infinite recursion»" ANSI_NORMAL, PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated)
|
||||
@ -656,11 +562,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated)
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs,
|
||||
"{ a = { }; b = " ANSI_MAGENTA "«repeated»" ANSI_NORMAL "; }",
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(vAttrs, "{ a = { }; b = " ANSI_MAGENTA "«repeated»" ANSI_NORMAL "; }", PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsListRepeated)
|
||||
@ -676,11 +578,7 @@ TEST_F(ValuePrintingTests, ansiColorsListRepeated)
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
|
||||
test(vList,
|
||||
"[ { } " ANSI_MAGENTA "«repeated»" ANSI_NORMAL " ]",
|
||||
PrintOptions {
|
||||
.ansiColors = true
|
||||
});
|
||||
test(vList, "[ { } " ANSI_MAGENTA "«repeated»" ANSI_NORMAL " ]", PrintOptions{.ansiColors = true});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, listRepeated)
|
||||
@ -696,12 +594,8 @@ TEST_F(ValuePrintingTests, listRepeated)
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
|
||||
test(vList, "[ { } «repeated» ]", PrintOptions { });
|
||||
test(vList,
|
||||
"[ { } { } ]",
|
||||
PrintOptions {
|
||||
.trackRepeated = false
|
||||
});
|
||||
test(vList, "[ { } «repeated» ]", PrintOptions{});
|
||||
test(vList, "[ { } { } ]", PrintOptions{.trackRepeated = false});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsAttrsElided)
|
||||
@ -719,12 +613,10 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided)
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs,
|
||||
"{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL " }",
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.maxAttrs = 1
|
||||
});
|
||||
test(
|
||||
vAttrs,
|
||||
"{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL " }",
|
||||
PrintOptions{.ansiColors = true, .maxAttrs = 1});
|
||||
|
||||
Value vThree;
|
||||
vThree.mkInt(3);
|
||||
@ -732,12 +624,10 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided)
|
||||
builder.insert(state.symbols.create("three"), &vThree);
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs,
|
||||
"{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }",
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.maxAttrs = 1
|
||||
});
|
||||
test(
|
||||
vAttrs,
|
||||
"{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }",
|
||||
PrintOptions{.ansiColors = true, .maxAttrs = 1});
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, ansiColorsListElided)
|
||||
@ -751,37 +641,33 @@ TEST_F(ValuePrintingTests, ansiColorsListElided)
|
||||
vTwo.mkInt(2);
|
||||
|
||||
{
|
||||
auto list = state.buildList(2);
|
||||
list.elems[0] = &vOne;
|
||||
list.elems[1] = &vTwo;
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
auto list = state.buildList(2);
|
||||
list.elems[0] = &vOne;
|
||||
list.elems[1] = &vTwo;
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
|
||||
test(vList,
|
||||
"[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]",
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.maxListItems = 1
|
||||
});
|
||||
test(
|
||||
vList,
|
||||
"[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]",
|
||||
PrintOptions{.ansiColors = true, .maxListItems = 1});
|
||||
}
|
||||
|
||||
Value vThree;
|
||||
vThree.mkInt(3);
|
||||
|
||||
{
|
||||
auto list = state.buildList(3);
|
||||
list.elems[0] = &vOne;
|
||||
list.elems[1] = &vTwo;
|
||||
list.elems[2] = &vThree;
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
auto list = state.buildList(3);
|
||||
list.elems[0] = &vOne;
|
||||
list.elems[1] = &vTwo;
|
||||
list.elems[2] = &vThree;
|
||||
Value vList;
|
||||
vList.mkList(list);
|
||||
|
||||
test(vList,
|
||||
"[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]",
|
||||
PrintOptions {
|
||||
.ansiColors = true,
|
||||
.maxListItems = 1
|
||||
});
|
||||
test(
|
||||
vList,
|
||||
"[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]",
|
||||
PrintOptions{.ansiColors = true, .maxListItems = 1});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,8 @@
|
||||
#include "nix/expr/attr-path.hh"
|
||||
#include "nix/expr/eval-inline.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
static Strings parseAttrPath(std::string_view s)
|
||||
{
|
||||
Strings res;
|
||||
@ -19,18 +17,19 @@ static Strings parseAttrPath(std::string_view s)
|
||||
while (1) {
|
||||
if (i == s.end())
|
||||
throw ParseError("missing closing quote in selection path '%1%'", s);
|
||||
if (*i == '"') break;
|
||||
if (*i == '"')
|
||||
break;
|
||||
cur.push_back(*i++);
|
||||
}
|
||||
} else
|
||||
cur.push_back(*i);
|
||||
++i;
|
||||
}
|
||||
if (!cur.empty()) res.push_back(cur);
|
||||
if (!cur.empty())
|
||||
res.push_back(cur);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s)
|
||||
{
|
||||
std::vector<Symbol> res;
|
||||
@ -39,9 +38,8 @@ std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s)
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::string & attrPath,
|
||||
Bindings & autoArgs, Value & vIn)
|
||||
std::pair<Value *, PosIdx>
|
||||
findAlongAttrPath(EvalState & state, const std::string & attrPath, Bindings & autoArgs, Value & vIn)
|
||||
{
|
||||
Strings tokens = parseAttrPath(attrPath);
|
||||
|
||||
@ -65,10 +63,12 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||
if (!attrIndex) {
|
||||
|
||||
if (v->type() != nAttrs)
|
||||
state.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a set but is %2%",
|
||||
attrPath,
|
||||
showType(*v)).debugThrow();
|
||||
state
|
||||
.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a set but is %2%",
|
||||
attrPath,
|
||||
showType(*v))
|
||||
.debugThrow();
|
||||
if (attr.empty())
|
||||
throw Error("empty attribute name in selection path '%1%'", attrPath);
|
||||
|
||||
@ -79,7 +79,8 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||
attrNames.insert(std::string(state.symbols[attr.name]));
|
||||
|
||||
auto suggestions = Suggestions::bestMatches(attrNames, attr);
|
||||
throw AttrPathNotFound(suggestions, "attribute '%1%' in selection path '%2%' not found", attr, attrPath);
|
||||
throw AttrPathNotFound(
|
||||
suggestions, "attribute '%1%' in selection path '%2%' not found", attr, attrPath);
|
||||
}
|
||||
v = &*a->value;
|
||||
pos = a->pos;
|
||||
@ -88,23 +89,23 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||
else {
|
||||
|
||||
if (!v->isList())
|
||||
state.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
||||
attrPath,
|
||||
showType(*v)).debugThrow();
|
||||
state
|
||||
.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
||||
attrPath,
|
||||
showType(*v))
|
||||
.debugThrow();
|
||||
if (*attrIndex >= v->listSize())
|
||||
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
|
||||
|
||||
v = v->listElems()[*attrIndex];
|
||||
pos = noPos;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return {v, pos};
|
||||
}
|
||||
|
||||
|
||||
std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v, std::string what)
|
||||
{
|
||||
Value * v2;
|
||||
@ -118,17 +119,17 @@ std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v
|
||||
// FIXME: is it possible to extract the Pos object instead of doing this
|
||||
// toString + parsing?
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToPath(noPos, *v2, context, "while evaluating the 'meta.position' attribute of a derivation");
|
||||
auto path =
|
||||
state.coerceToPath(noPos, *v2, context, "while evaluating the 'meta.position' attribute of a derivation");
|
||||
|
||||
auto fn = path.path.abs();
|
||||
|
||||
auto fail = [fn]() {
|
||||
throw ParseError("cannot parse 'meta.position' attribute '%s'", fn);
|
||||
};
|
||||
auto fail = [fn]() { throw ParseError("cannot parse 'meta.position' attribute '%s'", fn); };
|
||||
|
||||
try {
|
||||
auto colon = fn.rfind(':');
|
||||
if (colon == std::string::npos) fail();
|
||||
if (colon == std::string::npos)
|
||||
fail();
|
||||
auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos));
|
||||
return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno};
|
||||
} catch (std::invalid_argument & e) {
|
||||
@ -137,5 +138,4 @@ std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -3,11 +3,8 @@
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
|
||||
/* Allocate a new array of attributes for an attribute set with a specific
|
||||
capacity. The space is implicitly reserved after the Bindings
|
||||
structure. */
|
||||
@ -22,7 +19,6 @@ Bindings * EvalState::allocBindings(size_t capacity)
|
||||
return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity);
|
||||
}
|
||||
|
||||
|
||||
Value & BindingsBuilder::alloc(Symbol name, PosIdx pos)
|
||||
{
|
||||
auto value = state.allocValue();
|
||||
@ -30,24 +26,21 @@ Value & BindingsBuilder::alloc(Symbol name, PosIdx pos)
|
||||
return *value;
|
||||
}
|
||||
|
||||
|
||||
Value & BindingsBuilder::alloc(std::string_view name, PosIdx pos)
|
||||
{
|
||||
return alloc(state.symbols.create(name), pos);
|
||||
}
|
||||
|
||||
|
||||
void Bindings::sort()
|
||||
{
|
||||
if (size_) std::sort(begin(), end());
|
||||
if (size_)
|
||||
std::sort(begin(), end());
|
||||
}
|
||||
|
||||
|
||||
Value & Value::mkAttrs(BindingsBuilder & bindings)
|
||||
{
|
||||
mkAttrs(bindings.finish());
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -11,8 +11,10 @@ namespace nix::eval_cache {
|
||||
|
||||
CachedEvalError::CachedEvalError(ref<AttrCursor> cursor, Symbol attr)
|
||||
: EvalError(cursor->root->state, "cached failure of attribute '%s'", cursor->getAttrPathStr(attr))
|
||||
, cursor(cursor), attr(attr)
|
||||
{ }
|
||||
, cursor(cursor)
|
||||
, attr(attr)
|
||||
{
|
||||
}
|
||||
|
||||
void CachedEvalError::force()
|
||||
{
|
||||
@ -25,7 +27,8 @@ void CachedEvalError::force()
|
||||
}
|
||||
|
||||
// Shouldn't happen.
|
||||
throw EvalError(state, "evaluation of cached failed attribute '%s' unexpectedly succeeded", cursor->getAttrPathStr(attr));
|
||||
throw EvalError(
|
||||
state, "evaluation of cached failed attribute '%s' unexpectedly succeeded", cursor->getAttrPathStr(attr));
|
||||
}
|
||||
|
||||
static const char * schema = R"sql(
|
||||
@ -59,10 +62,7 @@ struct AttrDb
|
||||
|
||||
SymbolTable & symbols;
|
||||
|
||||
AttrDb(
|
||||
const StoreDirConfig & cfg,
|
||||
const Hash & fingerprint,
|
||||
SymbolTable & symbols)
|
||||
AttrDb(const StoreDirConfig & cfg, const Hash & fingerprint, SymbolTable & symbols)
|
||||
: cfg(cfg)
|
||||
, _state(std::make_unique<Sync<State>>())
|
||||
, symbols(symbols)
|
||||
@ -78,17 +78,16 @@ struct AttrDb
|
||||
state->db.isCache();
|
||||
state->db.exec(schema);
|
||||
|
||||
state->insertAttribute.create(state->db,
|
||||
"insert or replace into Attributes(parent, name, type, value) values (?, ?, ?, ?)");
|
||||
state->insertAttribute.create(
|
||||
state->db, "insert or replace into Attributes(parent, name, type, value) values (?, ?, ?, ?)");
|
||||
|
||||
state->insertAttributeWithContext.create(state->db,
|
||||
"insert or replace into Attributes(parent, name, type, value, context) values (?, ?, ?, ?, ?)");
|
||||
state->insertAttributeWithContext.create(
|
||||
state->db, "insert or replace into Attributes(parent, name, type, value, context) values (?, ?, ?, ?, ?)");
|
||||
|
||||
state->queryAttribute.create(state->db,
|
||||
"select rowid, type, value, context from Attributes where parent = ? and name = ?");
|
||||
state->queryAttribute.create(
|
||||
state->db, "select rowid, type, value, context from Attributes where parent = ? and name = ?");
|
||||
|
||||
state->queryAttributes.create(state->db,
|
||||
"select name from Attributes where parent = ?");
|
||||
state->queryAttributes.create(state->db, "select name from Attributes where parent = ?");
|
||||
|
||||
state->txn = std::make_unique<SQLiteTxn>(state->db);
|
||||
}
|
||||
@ -108,7 +107,8 @@ struct AttrDb
|
||||
template<typename F>
|
||||
AttrId doSQLite(F && fun)
|
||||
{
|
||||
if (failed) return 0;
|
||||
if (failed)
|
||||
return 0;
|
||||
try {
|
||||
return fun();
|
||||
} catch (SQLiteError &) {
|
||||
@ -118,116 +118,76 @@ struct AttrDb
|
||||
}
|
||||
}
|
||||
|
||||
AttrId setAttrs(
|
||||
AttrKey key,
|
||||
const std::vector<Symbol> & attrs)
|
||||
AttrId setAttrs(AttrKey key, const std::vector<Symbol> & attrs)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::FullAttrs)
|
||||
(0, false).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::FullAttrs) (0, false).exec();
|
||||
|
||||
AttrId rowId = state->db.getLastInsertedRowId();
|
||||
assert(rowId);
|
||||
|
||||
for (auto & attr : attrs)
|
||||
state->insertAttribute.use()
|
||||
(rowId)
|
||||
(symbols[attr])
|
||||
(AttrType::Placeholder)
|
||||
(0, false).exec();
|
||||
state->insertAttribute.use()(rowId)(symbols[attr])(AttrType::Placeholder) (0, false).exec();
|
||||
|
||||
return rowId;
|
||||
});
|
||||
}
|
||||
|
||||
AttrId setString(
|
||||
AttrKey key,
|
||||
std::string_view s,
|
||||
const char * * context = nullptr)
|
||||
AttrId setString(AttrKey key, std::string_view s, const char ** context = nullptr)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
if (context) {
|
||||
std::string ctx;
|
||||
for (const char * * p = context; *p; ++p) {
|
||||
if (p != context) ctx.push_back(' ');
|
||||
for (const char ** p = context; *p; ++p) {
|
||||
if (p != context)
|
||||
ctx.push_back(' ');
|
||||
ctx.append(*p);
|
||||
}
|
||||
state->insertAttributeWithContext.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::String)
|
||||
(s)
|
||||
(ctx).exec();
|
||||
state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String) (s) (ctx)
|
||||
.exec();
|
||||
} else {
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::String)
|
||||
(s).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::String) (s).exec();
|
||||
}
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
}
|
||||
|
||||
AttrId setBool(
|
||||
AttrKey key,
|
||||
bool b)
|
||||
AttrId setBool(AttrKey key, bool b)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::Bool)
|
||||
(b ? 1 : 0).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Bool) (b ? 1 : 0).exec();
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
}
|
||||
|
||||
AttrId setInt(
|
||||
AttrKey key,
|
||||
int n)
|
||||
AttrId setInt(AttrKey key, int n)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::Int)
|
||||
(n).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Int) (n).exec();
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
}
|
||||
|
||||
AttrId setListOfStrings(
|
||||
AttrKey key,
|
||||
const std::vector<std::string> & l)
|
||||
AttrId setListOfStrings(AttrKey key, const std::vector<std::string> & l)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::ListOfStrings)
|
||||
(dropEmptyInitThenConcatStringsSep("\t", l)).exec();
|
||||
state->insertAttribute
|
||||
.use()(key.first)(symbols[key.second])(
|
||||
AttrType::ListOfStrings) (dropEmptyInitThenConcatStringsSep("\t", l))
|
||||
.exec();
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
@ -235,15 +195,10 @@ struct AttrDb
|
||||
|
||||
AttrId setPlaceholder(AttrKey key)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::Placeholder)
|
||||
(0, false).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Placeholder) (0, false).exec();
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
@ -251,15 +206,10 @@ struct AttrDb
|
||||
|
||||
AttrId setMissing(AttrKey key)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::Missing)
|
||||
(0, false).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Missing) (0, false).exec();
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
@ -267,15 +217,10 @@ struct AttrDb
|
||||
|
||||
AttrId setMisc(AttrKey key)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::Misc)
|
||||
(0, false).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Misc) (0, false).exec();
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
@ -283,15 +228,10 @@ struct AttrDb
|
||||
|
||||
AttrId setFailed(AttrKey key)
|
||||
{
|
||||
return doSQLite([&]()
|
||||
{
|
||||
return doSQLite([&]() {
|
||||
auto state(_state->lock());
|
||||
|
||||
state->insertAttribute.use()
|
||||
(key.first)
|
||||
(symbols[key.second])
|
||||
(AttrType::Failed)
|
||||
(0, false).exec();
|
||||
state->insertAttribute.use()(key.first)(symbols[key.second])(AttrType::Failed) (0, false).exec();
|
||||
|
||||
return state->db.getLastInsertedRowId();
|
||||
});
|
||||
@ -302,51 +242,49 @@ struct AttrDb
|
||||
auto state(_state->lock());
|
||||
|
||||
auto queryAttribute(state->queryAttribute.use()(key.first)(symbols[key.second]));
|
||||
if (!queryAttribute.next()) return {};
|
||||
if (!queryAttribute.next())
|
||||
return {};
|
||||
|
||||
auto rowId = (AttrId) queryAttribute.getInt(0);
|
||||
auto type = (AttrType) queryAttribute.getInt(1);
|
||||
|
||||
switch (type) {
|
||||
case AttrType::Placeholder:
|
||||
return {{rowId, placeholder_t()}};
|
||||
case AttrType::FullAttrs: {
|
||||
// FIXME: expensive, should separate this out.
|
||||
std::vector<Symbol> attrs;
|
||||
auto queryAttributes(state->queryAttributes.use()(rowId));
|
||||
while (queryAttributes.next())
|
||||
attrs.emplace_back(symbols.create(queryAttributes.getStr(0)));
|
||||
return {{rowId, attrs}};
|
||||
}
|
||||
case AttrType::String: {
|
||||
NixStringContext context;
|
||||
if (!queryAttribute.isNull(3))
|
||||
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
||||
context.insert(NixStringContextElem::parse(s));
|
||||
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
||||
}
|
||||
case AttrType::Bool:
|
||||
return {{rowId, queryAttribute.getInt(2) != 0}};
|
||||
case AttrType::Int:
|
||||
return {{rowId, int_t{NixInt{queryAttribute.getInt(2)}}}};
|
||||
case AttrType::ListOfStrings:
|
||||
return {{rowId, tokenizeString<std::vector<std::string>>(queryAttribute.getStr(2), "\t")}};
|
||||
case AttrType::Missing:
|
||||
return {{rowId, missing_t()}};
|
||||
case AttrType::Misc:
|
||||
return {{rowId, misc_t()}};
|
||||
case AttrType::Failed:
|
||||
return {{rowId, failed_t()}};
|
||||
default:
|
||||
throw Error("unexpected type in evaluation cache");
|
||||
case AttrType::Placeholder:
|
||||
return {{rowId, placeholder_t()}};
|
||||
case AttrType::FullAttrs: {
|
||||
// FIXME: expensive, should separate this out.
|
||||
std::vector<Symbol> attrs;
|
||||
auto queryAttributes(state->queryAttributes.use()(rowId));
|
||||
while (queryAttributes.next())
|
||||
attrs.emplace_back(symbols.create(queryAttributes.getStr(0)));
|
||||
return {{rowId, attrs}};
|
||||
}
|
||||
case AttrType::String: {
|
||||
NixStringContext context;
|
||||
if (!queryAttribute.isNull(3))
|
||||
for (auto & s : tokenizeString<std::vector<std::string>>(queryAttribute.getStr(3), ";"))
|
||||
context.insert(NixStringContextElem::parse(s));
|
||||
return {{rowId, string_t{queryAttribute.getStr(2), context}}};
|
||||
}
|
||||
case AttrType::Bool:
|
||||
return {{rowId, queryAttribute.getInt(2) != 0}};
|
||||
case AttrType::Int:
|
||||
return {{rowId, int_t{NixInt{queryAttribute.getInt(2)}}}};
|
||||
case AttrType::ListOfStrings:
|
||||
return {{rowId, tokenizeString<std::vector<std::string>>(queryAttribute.getStr(2), "\t")}};
|
||||
case AttrType::Missing:
|
||||
return {{rowId, missing_t()}};
|
||||
case AttrType::Misc:
|
||||
return {{rowId, misc_t()}};
|
||||
case AttrType::Failed:
|
||||
return {{rowId, failed_t()}};
|
||||
default:
|
||||
throw Error("unexpected type in evaluation cache");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
static std::shared_ptr<AttrDb> makeAttrDb(
|
||||
const StoreDirConfig & cfg,
|
||||
const Hash & fingerprint,
|
||||
SymbolTable & symbols)
|
||||
static std::shared_ptr<AttrDb> makeAttrDb(const StoreDirConfig & cfg, const Hash & fingerprint, SymbolTable & symbols)
|
||||
{
|
||||
try {
|
||||
return std::make_shared<AttrDb>(cfg, fingerprint, symbols);
|
||||
@ -357,9 +295,7 @@ static std::shared_ptr<AttrDb> makeAttrDb(
|
||||
}
|
||||
|
||||
EvalCache::EvalCache(
|
||||
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||
EvalState & state,
|
||||
RootLoader rootLoader)
|
||||
std::optional<std::reference_wrapper<const Hash>> useCache, EvalState & state, RootLoader rootLoader)
|
||||
: db(useCache ? makeAttrDb(*state.store, *useCache, state.symbols) : nullptr)
|
||||
, state(state)
|
||||
, rootLoader(rootLoader)
|
||||
@ -381,11 +317,10 @@ ref<AttrCursor> EvalCache::getRoot()
|
||||
}
|
||||
|
||||
AttrCursor::AttrCursor(
|
||||
ref<EvalCache> root,
|
||||
Parent parent,
|
||||
Value * value,
|
||||
std::optional<std::pair<AttrId, AttrValue>> && cachedValue)
|
||||
: root(root), parent(parent), cachedValue(std::move(cachedValue))
|
||||
ref<EvalCache> root, Parent parent, Value * value, std::optional<std::pair<AttrId, AttrValue>> && cachedValue)
|
||||
: root(root)
|
||||
, parent(parent)
|
||||
, cachedValue(std::move(cachedValue))
|
||||
{
|
||||
if (value)
|
||||
_value = allocRootValue(value);
|
||||
@ -470,13 +405,11 @@ Value & AttrCursor::forceValue()
|
||||
|
||||
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
|
||||
if (v.type() == nString)
|
||||
cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()),
|
||||
string_t{v.c_str(), {}}};
|
||||
cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()), string_t{v.c_str(), {}}};
|
||||
else if (v.type() == nPath) {
|
||||
auto path = v.path().path;
|
||||
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
|
||||
}
|
||||
else if (v.type() == nBool)
|
||||
} else if (v.type() == nBool)
|
||||
cachedValue = {root->db->setBool(getKey(), v.boolean()), v.boolean()};
|
||||
else if (v.type() == nInt)
|
||||
cachedValue = {root->db->setInt(getKey(), v.integer().value), int_t{v.integer()}};
|
||||
@ -518,14 +451,14 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name)
|
||||
else if (std::get_if<failed_t>(&attr->second))
|
||||
throw CachedEvalError(ref(shared_from_this()), name);
|
||||
else
|
||||
return std::make_shared<AttrCursor>(root,
|
||||
std::make_pair(ref(shared_from_this()), name), nullptr, std::move(attr));
|
||||
return std::make_shared<AttrCursor>(
|
||||
root, std::make_pair(ref(shared_from_this()), name), nullptr, std::move(attr));
|
||||
}
|
||||
// Incomplete attrset, so need to fall thru and
|
||||
// evaluate to see whether 'name' exists
|
||||
} else
|
||||
return nullptr;
|
||||
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
// error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
@ -533,7 +466,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name)
|
||||
|
||||
if (v.type() != nAttrs)
|
||||
return nullptr;
|
||||
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
// error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
|
||||
auto attr = v.attrs()->get(name);
|
||||
|
||||
@ -618,17 +551,15 @@ string_t AttrCursor::getStringWithContext()
|
||||
if (auto s = std::get_if<string_t>(&cachedValue->second)) {
|
||||
bool valid = true;
|
||||
for (auto & c : s->second) {
|
||||
const StorePath & path = std::visit(overloaded {
|
||||
[&](const NixStringContextElem::DrvDeep & d) -> const StorePath & {
|
||||
return d.drvPath;
|
||||
const StorePath & path = std::visit(
|
||||
overloaded{
|
||||
[&](const NixStringContextElem::DrvDeep & d) -> const StorePath & { return d.drvPath; },
|
||||
[&](const NixStringContextElem::Built & b) -> const StorePath & {
|
||||
return b.drvPath->getBaseStorePath();
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) -> const StorePath & { return o.path; },
|
||||
},
|
||||
[&](const NixStringContextElem::Built & b) -> const StorePath & {
|
||||
return b.drvPath->getBaseStorePath();
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) -> const StorePath & {
|
||||
return o.path;
|
||||
},
|
||||
}, c.raw);
|
||||
c.raw);
|
||||
if (!root->state.store->isValidPath(path)) {
|
||||
valid = false;
|
||||
break;
|
||||
@ -649,8 +580,7 @@ string_t AttrCursor::getStringWithContext()
|
||||
NixStringContext context;
|
||||
copyContext(v, context);
|
||||
return {v.c_str(), std::move(context)};
|
||||
}
|
||||
else if (v.type() == nPath)
|
||||
} else if (v.type() == nPath)
|
||||
return {v.path().to_string(), {}};
|
||||
else
|
||||
root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr(), showType(v)).debugThrow();
|
||||
@ -722,7 +652,8 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
||||
std::vector<std::string> res;
|
||||
|
||||
for (auto & elem : v.listItems())
|
||||
res.push_back(std::string(root->state.forceStringNoCtx(*elem, noPos, "while evaluating an attribute for caching")));
|
||||
res.push_back(
|
||||
std::string(root->state.forceStringNoCtx(*elem, noPos, "while evaluating an attribute for caching")));
|
||||
|
||||
if (root->db)
|
||||
cachedValue = {root->db->setListOfStrings(getKey(), res), res};
|
||||
@ -778,8 +709,8 @@ StorePath AttrCursor::forceDerivation()
|
||||
been garbage-collected. So force it to be regenerated. */
|
||||
aDrvPath->forceValue();
|
||||
if (!root->state.store->isValidPath(drvPath))
|
||||
throw Error("don't know how to recreate store derivation '%s'!",
|
||||
root->state.store->printStorePath(drvPath));
|
||||
throw Error(
|
||||
"don't know how to recreate store derivation '%s'!", root->state.store->printStorePath(drvPath));
|
||||
}
|
||||
return drvPath;
|
||||
}
|
||||
|
@ -44,12 +44,13 @@ EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrame(const Env & env, const Expr
|
||||
// NOTE: This is abusing side-effects.
|
||||
// TODO: check compatibility with nested debugger calls.
|
||||
// TODO: What side-effects??
|
||||
error.state.debugTraces.push_front(DebugTrace{
|
||||
.pos = expr.getPos(),
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = HintFmt("Fake frame for debugging purposes"),
|
||||
.isError = true});
|
||||
error.state.debugTraces.push_front(
|
||||
DebugTrace{
|
||||
.pos = expr.getPos(),
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = HintFmt("Fake frame for debugging purposes"),
|
||||
.isError = true});
|
||||
return *this;
|
||||
}
|
||||
|
||||
@ -96,7 +97,8 @@ template<class T>
|
||||
void EvalErrorBuilder<T>::panic()
|
||||
{
|
||||
logError(error.info());
|
||||
printError("This is a bug! An unexpected condition occurred, causing the Nix evaluator to have to stop. If you could share a reproducible example or a core dump, please open an issue at https://github.com/NixOS/nix/issues");
|
||||
printError(
|
||||
"This is a bug! An unexpected condition occurred, causing the Nix evaluator to have to stop. If you could share a reproducible example or a core dump, please open an issue at https://github.com/NixOS/nix/issues");
|
||||
abort();
|
||||
}
|
||||
|
||||
|
@ -19,12 +19,14 @@ Strings EvalSettings::parseNixPath(const std::string & s)
|
||||
auto start2 = p;
|
||||
|
||||
while (p != s.end() && *p != ':') {
|
||||
if (*p == '=') start2 = p + 1;
|
||||
if (*p == '=')
|
||||
start2 = p + 1;
|
||||
++p;
|
||||
}
|
||||
|
||||
if (p == s.end()) {
|
||||
if (p != start) res.push_back(std::string(start, p));
|
||||
if (p != start)
|
||||
res.push_back(std::string(start, p));
|
||||
break;
|
||||
}
|
||||
|
||||
@ -32,10 +34,12 @@ Strings EvalSettings::parseNixPath(const std::string & s)
|
||||
auto prefix = std::string(start2, s.end());
|
||||
if (EvalSettings::isPseudoUrl(prefix) || hasPrefix(prefix, "flake:")) {
|
||||
++p;
|
||||
while (p != s.end() && *p != ':') ++p;
|
||||
while (p != s.end() && *p != ':')
|
||||
++p;
|
||||
}
|
||||
res.push_back(std::string(start, p));
|
||||
if (p == s.end()) break;
|
||||
if (p == s.end())
|
||||
break;
|
||||
}
|
||||
|
||||
++p;
|
||||
@ -75,11 +79,14 @@ Strings EvalSettings::getDefaultNixPath()
|
||||
|
||||
bool EvalSettings::isPseudoUrl(std::string_view s)
|
||||
{
|
||||
if (s.compare(0, 8, "channel:") == 0) return true;
|
||||
if (s.compare(0, 8, "channel:") == 0)
|
||||
return true;
|
||||
size_t pos = s.find("://");
|
||||
if (pos == std::string::npos) return false;
|
||||
if (pos == std::string::npos)
|
||||
return false;
|
||||
std::string scheme(s, 0, pos);
|
||||
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
|
||||
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git"
|
||||
|| scheme == "s3" || scheme == "ssh";
|
||||
}
|
||||
|
||||
std::string EvalSettings::resolvePseudoUrl(std::string_view url)
|
||||
@ -98,9 +105,7 @@ const std::string & EvalSettings::getCurrentSystem() const
|
||||
|
||||
Path getNixDefExpr()
|
||||
{
|
||||
return settings.useXDGBaseDirectories
|
||||
? getStateDir() + "/defexpr"
|
||||
: getHome() + "/.nix-defexpr";
|
||||
return settings.useXDGBaseDirectories ? getStateDir() + "/defexpr" : getHome() + "/.nix-defexpr";
|
||||
}
|
||||
|
||||
} // namespace nix
|
1171
src/libexpr/eval.cc
1171
src/libexpr/eval.cc
File diff suppressed because it is too large
Load Diff
@ -3,13 +3,16 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
FunctionCallTrace::FunctionCallTrace(const Pos & pos) : pos(pos) {
|
||||
FunctionCallTrace::FunctionCallTrace(const Pos & pos)
|
||||
: pos(pos)
|
||||
{
|
||||
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
||||
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
||||
printMsg(lvlInfo, "function-trace entered %1% at %2%", pos, ns.count());
|
||||
}
|
||||
|
||||
FunctionCallTrace::~FunctionCallTrace() {
|
||||
FunctionCallTrace::~FunctionCallTrace()
|
||||
{
|
||||
auto duration = std::chrono::high_resolution_clock::now().time_since_epoch();
|
||||
auto ns = std::chrono::duration_cast<std::chrono::nanoseconds>(duration);
|
||||
printMsg(lvlInfo, "function-trace exited %1% at %2%", pos, ns.count());
|
||||
|
@ -7,18 +7,19 @@
|
||||
#include <cstring>
|
||||
#include <regex>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
PackageInfo::PackageInfo(EvalState & state, std::string attrPath, const Bindings * attrs)
|
||||
: state(&state), attrs(attrs), attrPath(std::move(attrPath))
|
||||
: state(&state)
|
||||
, attrs(attrs)
|
||||
, attrPath(std::move(attrPath))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
PackageInfo::PackageInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs)
|
||||
: state(&state), attrs(nullptr), attrPath("")
|
||||
: state(&state)
|
||||
, attrs(nullptr)
|
||||
, attrPath("")
|
||||
{
|
||||
auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs);
|
||||
|
||||
@ -31,10 +32,7 @@ PackageInfo::PackageInfo(EvalState & state, ref<Store> store, const std::string
|
||||
if (selectedOutputs.size() > 1)
|
||||
throw Error("building more than one derivation output is not supported, in '%s'", drvPathWithOutputs);
|
||||
|
||||
outputName =
|
||||
selectedOutputs.empty()
|
||||
? getOr(drv.env, "outputName", "out")
|
||||
: *selectedOutputs.begin();
|
||||
outputName = selectedOutputs.empty() ? getOr(drv.env, "outputName", "out") : *selectedOutputs.begin();
|
||||
|
||||
auto i = drv.outputs.find(outputName);
|
||||
if (i == drv.outputs.end())
|
||||
@ -44,34 +42,36 @@ PackageInfo::PackageInfo(EvalState & state, ref<Store> store, const std::string
|
||||
outPath = {output.path(*store, drv.name, outputName)};
|
||||
}
|
||||
|
||||
|
||||
std::string PackageInfo::queryName() const
|
||||
{
|
||||
if (name == "" && attrs) {
|
||||
auto i = attrs->find(state->sName);
|
||||
if (i == attrs->end()) state->error<TypeError>("derivation name missing").debugThrow();
|
||||
if (i == attrs->end())
|
||||
state->error<TypeError>("derivation name missing").debugThrow();
|
||||
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
|
||||
}
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
std::string PackageInfo::querySystem() const
|
||||
{
|
||||
if (system == "" && attrs) {
|
||||
auto i = attrs->find(state->sSystem);
|
||||
system = i == attrs->end() ? "unknown" : state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation");
|
||||
system =
|
||||
i == attrs->end()
|
||||
? "unknown"
|
||||
: state->forceStringNoCtx(*i->value, i->pos, "while evaluating the 'system' attribute of a derivation");
|
||||
}
|
||||
return system;
|
||||
}
|
||||
|
||||
|
||||
std::optional<StorePath> PackageInfo::queryDrvPath() const
|
||||
{
|
||||
if (!drvPath && attrs) {
|
||||
if (auto i = attrs->get(state->sDrvPath)) {
|
||||
NixStringContext context;
|
||||
auto found = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the 'drvPath' attribute of a derivation");
|
||||
auto found = state->coerceToStorePath(
|
||||
i->pos, *i->value, context, "while evaluating the 'drvPath' attribute of a derivation");
|
||||
try {
|
||||
found.requireDerivation();
|
||||
} catch (Error & e) {
|
||||
@ -85,7 +85,6 @@ std::optional<StorePath> PackageInfo::queryDrvPath() const
|
||||
return drvPath.value_or(std::nullopt);
|
||||
}
|
||||
|
||||
|
||||
StorePath PackageInfo::requireDrvPath() const
|
||||
{
|
||||
if (auto drvPath = queryDrvPath())
|
||||
@ -93,21 +92,20 @@ StorePath PackageInfo::requireDrvPath() const
|
||||
throw Error("derivation does not contain a 'drvPath' attribute");
|
||||
}
|
||||
|
||||
|
||||
StorePath PackageInfo::queryOutPath() const
|
||||
{
|
||||
if (!outPath && attrs) {
|
||||
auto i = attrs->find(state->sOutPath);
|
||||
NixStringContext context;
|
||||
if (i != attrs->end())
|
||||
outPath = state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the output path of a derivation");
|
||||
outPath = state->coerceToStorePath(
|
||||
i->pos, *i->value, context, "while evaluating the output path of a derivation");
|
||||
}
|
||||
if (!outPath)
|
||||
throw UnimplementedError("CA derivations are not yet supported");
|
||||
return *outPath;
|
||||
}
|
||||
|
||||
|
||||
PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall)
|
||||
{
|
||||
if (outputs.empty()) {
|
||||
@ -118,19 +116,25 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT
|
||||
|
||||
/* For each output... */
|
||||
for (auto elem : i->value->listItems()) {
|
||||
std::string output(state->forceStringNoCtx(*elem, i->pos, "while evaluating the name of an output of a derivation"));
|
||||
std::string output(
|
||||
state->forceStringNoCtx(*elem, i->pos, "while evaluating the name of an output of a derivation"));
|
||||
|
||||
if (withPaths) {
|
||||
/* Evaluate the corresponding set. */
|
||||
auto out = attrs->get(state->symbols.create(output));
|
||||
if (!out) continue; // FIXME: throw error?
|
||||
if (!out)
|
||||
continue; // FIXME: throw error?
|
||||
state->forceAttrs(*out->value, i->pos, "while evaluating an output of a derivation");
|
||||
|
||||
/* And evaluate its ‘outPath’ attribute. */
|
||||
auto outPath = out->value->attrs()->get(state->sOutPath);
|
||||
if (!outPath) continue; // FIXME: throw error?
|
||||
if (!outPath)
|
||||
continue; // FIXME: throw error?
|
||||
NixStringContext context;
|
||||
outputs.emplace(output, state->coerceToStorePath(outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation"));
|
||||
outputs.emplace(
|
||||
output,
|
||||
state->coerceToStorePath(
|
||||
outPath->pos, *outPath->value, context, "while evaluating an output path of a derivation"));
|
||||
} else
|
||||
outputs.emplace(output, std::nullopt);
|
||||
}
|
||||
@ -142,7 +146,8 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT
|
||||
return outputs;
|
||||
|
||||
const Attr * i;
|
||||
if (attrs && (i = attrs->get(state->sOutputSpecified)) && state->forceBool(*i->value, i->pos, "while evaluating the 'outputSpecified' attribute of a derivation")) {
|
||||
if (attrs && (i = attrs->get(state->sOutputSpecified))
|
||||
&& state->forceBool(*i->value, i->pos, "while evaluating the 'outputSpecified' attribute of a derivation")) {
|
||||
Outputs result;
|
||||
auto out = outputs.find(queryOutputName());
|
||||
if (out == outputs.end())
|
||||
@ -154,95 +159,103 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT
|
||||
else {
|
||||
/* Check for `meta.outputsToInstall` and return `outputs` reduced to that. */
|
||||
const Value * outTI = queryMeta("outputsToInstall");
|
||||
if (!outTI) return outputs;
|
||||
if (!outTI)
|
||||
return outputs;
|
||||
auto errMsg = Error("this derivation has bad 'meta.outputsToInstall'");
|
||||
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
||||
if (!outTI->isList()) throw errMsg;
|
||||
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
||||
if (!outTI->isList())
|
||||
throw errMsg;
|
||||
Outputs result;
|
||||
for (auto elem : outTI->listItems()) {
|
||||
if (elem->type() != nString) throw errMsg;
|
||||
if (elem->type() != nString)
|
||||
throw errMsg;
|
||||
auto out = outputs.find(elem->c_str());
|
||||
if (out == outputs.end()) throw errMsg;
|
||||
if (out == outputs.end())
|
||||
throw errMsg;
|
||||
result.insert(*out);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::string PackageInfo::queryOutputName() const
|
||||
{
|
||||
if (outputName == "" && attrs) {
|
||||
auto i = attrs->get(state->sOutputName);
|
||||
outputName = i ? state->forceStringNoCtx(*i->value, noPos, "while evaluating the output name of a derivation") : "";
|
||||
outputName =
|
||||
i ? state->forceStringNoCtx(*i->value, noPos, "while evaluating the output name of a derivation") : "";
|
||||
}
|
||||
return outputName;
|
||||
}
|
||||
|
||||
|
||||
const Bindings * PackageInfo::getMeta()
|
||||
{
|
||||
if (meta) return meta;
|
||||
if (!attrs) return 0;
|
||||
if (meta)
|
||||
return meta;
|
||||
if (!attrs)
|
||||
return 0;
|
||||
auto a = attrs->get(state->sMeta);
|
||||
if (!a) return 0;
|
||||
if (!a)
|
||||
return 0;
|
||||
state->forceAttrs(*a->value, a->pos, "while evaluating the 'meta' attribute of a derivation");
|
||||
meta = a->value->attrs();
|
||||
return meta;
|
||||
}
|
||||
|
||||
|
||||
StringSet PackageInfo::queryMetaNames()
|
||||
{
|
||||
StringSet res;
|
||||
if (!getMeta()) return res;
|
||||
if (!getMeta())
|
||||
return res;
|
||||
for (auto & i : *meta)
|
||||
res.emplace(state->symbols[i.name]);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
bool PackageInfo::checkMeta(Value & v)
|
||||
{
|
||||
state->forceValue(v, v.determinePos(noPos));
|
||||
if (v.type() == nList) {
|
||||
for (auto elem : v.listItems())
|
||||
if (!checkMeta(*elem)) return false;
|
||||
if (!checkMeta(*elem))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
else if (v.type() == nAttrs) {
|
||||
if (v.attrs()->get(state->sOutPath)) return false;
|
||||
} else if (v.type() == nAttrs) {
|
||||
if (v.attrs()->get(state->sOutPath))
|
||||
return false;
|
||||
for (auto & i : *v.attrs())
|
||||
if (!checkMeta(*i.value)) return false;
|
||||
if (!checkMeta(*i.value))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
else return v.type() == nInt || v.type() == nBool || v.type() == nString ||
|
||||
v.type() == nFloat;
|
||||
} else
|
||||
return v.type() == nInt || v.type() == nBool || v.type() == nString || v.type() == nFloat;
|
||||
}
|
||||
|
||||
|
||||
Value * PackageInfo::queryMeta(const std::string & name)
|
||||
{
|
||||
if (!getMeta()) return 0;
|
||||
if (!getMeta())
|
||||
return 0;
|
||||
auto a = meta->get(state->symbols.create(name));
|
||||
if (!a || !checkMeta(*a->value)) return 0;
|
||||
if (!a || !checkMeta(*a->value))
|
||||
return 0;
|
||||
return a->value;
|
||||
}
|
||||
|
||||
|
||||
std::string PackageInfo::queryMetaString(const std::string & name)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v || v->type() != nString) return "";
|
||||
if (!v || v->type() != nString)
|
||||
return "";
|
||||
return v->c_str();
|
||||
}
|
||||
|
||||
|
||||
NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
if (v->type() == nInt) return v->integer();
|
||||
if (!v)
|
||||
return def;
|
||||
if (v->type() == nInt)
|
||||
return v->integer();
|
||||
if (v->type() == nString) {
|
||||
/* Backwards compatibility with before we had support for
|
||||
integer meta fields. */
|
||||
@ -255,8 +268,10 @@ NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def)
|
||||
NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
if (v->type() == nFloat) return v->fpoint();
|
||||
if (!v)
|
||||
return def;
|
||||
if (v->type() == nFloat)
|
||||
return v->fpoint();
|
||||
if (v->type() == nString) {
|
||||
/* Backwards compatibility with before we had support for
|
||||
float meta fields. */
|
||||
@ -266,22 +281,24 @@ NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
||||
return def;
|
||||
}
|
||||
|
||||
|
||||
bool PackageInfo::queryMetaBool(const std::string & name, bool def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
if (v->type() == nBool) return v->boolean();
|
||||
if (!v)
|
||||
return def;
|
||||
if (v->type() == nBool)
|
||||
return v->boolean();
|
||||
if (v->type() == nString) {
|
||||
/* Backwards compatibility with before we had support for
|
||||
Boolean meta fields. */
|
||||
if (v->string_view() == "true") return true;
|
||||
if (v->string_view() == "false") return false;
|
||||
if (v->string_view() == "true")
|
||||
return true;
|
||||
if (v->string_view() == "false")
|
||||
return false;
|
||||
}
|
||||
return def;
|
||||
}
|
||||
|
||||
|
||||
void PackageInfo::setMeta(const std::string & name, Value * v)
|
||||
{
|
||||
getMeta();
|
||||
@ -291,30 +308,35 @@ void PackageInfo::setMeta(const std::string & name, Value * v)
|
||||
for (auto i : *meta)
|
||||
if (i.name != sym)
|
||||
attrs.insert(i);
|
||||
if (v) attrs.insert(sym, v);
|
||||
if (v)
|
||||
attrs.insert(sym, v);
|
||||
meta = attrs.finish();
|
||||
}
|
||||
|
||||
|
||||
/* Cache for already considered attrsets. */
|
||||
typedef std::set<const Bindings *> Done;
|
||||
|
||||
|
||||
/* Evaluate value `v'. If it evaluates to a set of type `derivation',
|
||||
then put information about it in `drvs' (unless it's already in `done').
|
||||
The result boolean indicates whether it makes sense
|
||||
for the caller to recursively search for derivations in `v'. */
|
||||
static bool getDerivation(EvalState & state, Value & v,
|
||||
const std::string & attrPath, PackageInfos & drvs, Done & done,
|
||||
static bool getDerivation(
|
||||
EvalState & state,
|
||||
Value & v,
|
||||
const std::string & attrPath,
|
||||
PackageInfos & drvs,
|
||||
Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
try {
|
||||
state.forceValue(v, v.determinePos(noPos));
|
||||
if (!state.isDerivation(v)) return true;
|
||||
if (!state.isDerivation(v))
|
||||
return true;
|
||||
|
||||
/* Remove spurious duplicates (e.g., a set like `rec { x =
|
||||
derivation {...}; y = x;}'. */
|
||||
if (!done.insert(v.attrs()).second) return false;
|
||||
if (!done.insert(v.attrs()).second)
|
||||
return false;
|
||||
|
||||
PackageInfo drv(state, attrPath, v.attrs());
|
||||
|
||||
@ -325,42 +347,44 @@ static bool getDerivation(EvalState & state, Value & v,
|
||||
return false;
|
||||
|
||||
} catch (AssertionError & e) {
|
||||
if (ignoreAssertionFailures) return false;
|
||||
if (ignoreAssertionFailures)
|
||||
return false;
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::optional<PackageInfo> getDerivation(EvalState & state, Value & v,
|
||||
bool ignoreAssertionFailures)
|
||||
std::optional<PackageInfo> getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures)
|
||||
{
|
||||
Done done;
|
||||
PackageInfos drvs;
|
||||
getDerivation(state, v, "", drvs, done, ignoreAssertionFailures);
|
||||
if (drvs.size() != 1) return {};
|
||||
if (drvs.size() != 1)
|
||||
return {};
|
||||
return std::move(drvs.front());
|
||||
}
|
||||
|
||||
|
||||
static std::string addToPath(const std::string & s1, std::string_view s2)
|
||||
{
|
||||
return s1.empty() ? std::string(s2) : s1 + "." + s2;
|
||||
}
|
||||
|
||||
|
||||
static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*");
|
||||
|
||||
|
||||
static void getDerivations(EvalState & state, Value & vIn,
|
||||
const std::string & pathPrefix, Bindings & autoArgs,
|
||||
PackageInfos & drvs, Done & done,
|
||||
static void getDerivations(
|
||||
EvalState & state,
|
||||
Value & vIn,
|
||||
const std::string & pathPrefix,
|
||||
Bindings & autoArgs,
|
||||
PackageInfos & drvs,
|
||||
Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
Value v;
|
||||
state.autoCallFunction(autoArgs, vIn, v);
|
||||
|
||||
/* Process the expression. */
|
||||
if (!getDerivation(state, v, pathPrefix, drvs, done, ignoreAssertionFailures)) ;
|
||||
if (!getDerivation(state, v, pathPrefix, drvs, done, ignoreAssertionFailures))
|
||||
;
|
||||
|
||||
else if (v.type() == nAttrs) {
|
||||
|
||||
@ -388,8 +412,11 @@ static void getDerivations(EvalState & state, Value & vIn,
|
||||
`recurseForDerivations = true' attribute. */
|
||||
if (i->value->type() == nAttrs) {
|
||||
auto j = i->value->attrs()->get(state.sRecurseForDerivations);
|
||||
if (j && state.forceBool(*j->value, j->pos, "while evaluating the attribute `recurseForDerivations`"))
|
||||
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
if (j
|
||||
&& state.forceBool(
|
||||
*j->value, j->pos, "while evaluating the attribute `recurseForDerivations`"))
|
||||
getDerivations(
|
||||
state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
}
|
||||
}
|
||||
} catch (Error & e) {
|
||||
@ -411,13 +438,16 @@ static void getDerivations(EvalState & state, Value & vIn,
|
||||
state.error<TypeError>("expression does not evaluate to a derivation (or a set or list of those)").debugThrow();
|
||||
}
|
||||
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures)
|
||||
void getDerivations(
|
||||
EvalState & state,
|
||||
Value & v,
|
||||
const std::string & pathPrefix,
|
||||
Bindings & autoArgs,
|
||||
PackageInfos & drvs,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
Done done;
|
||||
getDerivations(state, v, pathPrefix, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -11,11 +11,8 @@ namespace nix {
|
||||
MakeError(AttrPathNotFound, Error);
|
||||
MakeError(NoPositionInfo, Error);
|
||||
|
||||
std::pair<Value *, PosIdx> findAlongAttrPath(
|
||||
EvalState & state,
|
||||
const std::string & attrPath,
|
||||
Bindings & autoArgs,
|
||||
Value & vIn);
|
||||
std::pair<Value *, PosIdx>
|
||||
findAlongAttrPath(EvalState & state, const std::string & attrPath, Bindings & autoArgs, Value & vIn);
|
||||
|
||||
/**
|
||||
* Heuristic to find the filename and lineno or a nix value.
|
||||
|
@ -8,7 +8,6 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
class EvalState;
|
||||
struct Value;
|
||||
|
||||
@ -25,15 +24,18 @@ struct Attr
|
||||
PosIdx pos;
|
||||
Value * value;
|
||||
Attr(Symbol name, Value * value, PosIdx pos = noPos)
|
||||
: name(name), pos(pos), value(value) { };
|
||||
Attr() { };
|
||||
auto operator <=> (const Attr & a) const
|
||||
: name(name)
|
||||
, pos(pos)
|
||||
, value(value) {};
|
||||
Attr() {};
|
||||
auto operator<=>(const Attr & a) const
|
||||
{
|
||||
return name <=> a.name;
|
||||
}
|
||||
};
|
||||
|
||||
static_assert(sizeof(Attr) == 2 * sizeof(uint32_t) + sizeof(Value *),
|
||||
static_assert(
|
||||
sizeof(Attr) == 2 * sizeof(uint32_t) + sizeof(Value *),
|
||||
"performance of the evaluator is highly sensitive to the size of Attr. "
|
||||
"avoid introducing any padding into Attr if at all possible, and do not "
|
||||
"introduce new fields that need not be present for almost every instance.");
|
||||
@ -54,13 +56,23 @@ private:
|
||||
size_t size_, capacity_;
|
||||
Attr attrs[0];
|
||||
|
||||
Bindings(size_t capacity) : size_(0), capacity_(capacity) { }
|
||||
Bindings(size_t capacity)
|
||||
: size_(0)
|
||||
, capacity_(capacity)
|
||||
{
|
||||
}
|
||||
Bindings(const Bindings & bindings) = delete;
|
||||
|
||||
public:
|
||||
size_t size() const { return size_; }
|
||||
size_t size() const
|
||||
{
|
||||
return size_;
|
||||
}
|
||||
|
||||
bool empty() const { return !size_; }
|
||||
bool empty() const
|
||||
{
|
||||
return !size_;
|
||||
}
|
||||
|
||||
typedef Attr * iterator;
|
||||
|
||||
@ -76,7 +88,8 @@ public:
|
||||
{
|
||||
Attr key(name, 0);
|
||||
const_iterator i = std::lower_bound(begin(), end(), key);
|
||||
if (i != end() && i->name == name) return i;
|
||||
if (i != end() && i->name == name)
|
||||
return i;
|
||||
return end();
|
||||
}
|
||||
|
||||
@ -84,15 +97,28 @@ public:
|
||||
{
|
||||
Attr key(name, 0);
|
||||
const_iterator i = std::lower_bound(begin(), end(), key);
|
||||
if (i != end() && i->name == name) return &*i;
|
||||
if (i != end() && i->name == name)
|
||||
return &*i;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
iterator begin() { return &attrs[0]; }
|
||||
iterator end() { return &attrs[size_]; }
|
||||
iterator begin()
|
||||
{
|
||||
return &attrs[0];
|
||||
}
|
||||
iterator end()
|
||||
{
|
||||
return &attrs[size_];
|
||||
}
|
||||
|
||||
const_iterator begin() const { return &attrs[0]; }
|
||||
const_iterator end() const { return &attrs[size_]; }
|
||||
const_iterator begin() const
|
||||
{
|
||||
return &attrs[0];
|
||||
}
|
||||
const_iterator end() const
|
||||
{
|
||||
return &attrs[size_];
|
||||
}
|
||||
|
||||
Attr & operator[](size_t pos)
|
||||
{
|
||||
@ -106,7 +132,10 @@ public:
|
||||
|
||||
void sort();
|
||||
|
||||
size_t capacity() const { return capacity_; }
|
||||
size_t capacity() const
|
||||
{
|
||||
return capacity_;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the attributes in lexicographically sorted order.
|
||||
@ -143,8 +172,10 @@ public:
|
||||
EvalState & state;
|
||||
|
||||
BindingsBuilder(EvalState & state, Bindings * bindings)
|
||||
: bindings(bindings), state(state)
|
||||
{ }
|
||||
: bindings(bindings)
|
||||
, state(state)
|
||||
{
|
||||
}
|
||||
|
||||
void insert(Symbol name, Value * value, PosIdx pos = noPos)
|
||||
{
|
||||
|
@ -43,10 +43,7 @@ class EvalCache : public std::enable_shared_from_this<EvalCache>
|
||||
|
||||
public:
|
||||
|
||||
EvalCache(
|
||||
std::optional<std::reference_wrapper<const Hash>> useCache,
|
||||
EvalState & state,
|
||||
RootLoader rootLoader);
|
||||
EvalCache(std::optional<std::reference_wrapper<const Hash>> useCache, EvalState & state, RootLoader rootLoader);
|
||||
|
||||
ref<AttrCursor> getRoot();
|
||||
};
|
||||
@ -63,11 +60,18 @@ enum AttrType {
|
||||
Int = 8,
|
||||
};
|
||||
|
||||
struct placeholder_t {};
|
||||
struct missing_t {};
|
||||
struct misc_t {};
|
||||
struct failed_t {};
|
||||
struct int_t { NixInt x; };
|
||||
struct placeholder_t
|
||||
{};
|
||||
struct missing_t
|
||||
{};
|
||||
struct misc_t
|
||||
{};
|
||||
struct failed_t
|
||||
{};
|
||||
struct int_t
|
||||
{
|
||||
NixInt x;
|
||||
};
|
||||
typedef uint64_t AttrId;
|
||||
typedef std::pair<AttrId, Symbol> AttrKey;
|
||||
typedef std::pair<std::string, NixStringContext> string_t;
|
||||
@ -81,8 +85,8 @@ typedef std::variant<
|
||||
failed_t,
|
||||
bool,
|
||||
int_t,
|
||||
std::vector<std::string>
|
||||
> AttrValue;
|
||||
std::vector<std::string>>
|
||||
AttrValue;
|
||||
|
||||
class AttrCursor : public std::enable_shared_from_this<AttrCursor>
|
||||
{
|
||||
|
@ -23,11 +23,11 @@ inline void * allocBytes(size_t n)
|
||||
#else
|
||||
p = calloc(n, 1);
|
||||
#endif
|
||||
if (!p) throw std::bad_alloc();
|
||||
if (!p)
|
||||
throw std::bad_alloc();
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
[[gnu::always_inline]]
|
||||
Value * EvalState::allocValue()
|
||||
{
|
||||
@ -38,7 +38,8 @@ Value * EvalState::allocValue()
|
||||
have to explicitly clear the first word of every object we take. */
|
||||
if (!*valueAllocCache) {
|
||||
*valueAllocCache = GC_malloc_many(sizeof(Value));
|
||||
if (!*valueAllocCache) throw std::bad_alloc();
|
||||
if (!*valueAllocCache)
|
||||
throw std::bad_alloc();
|
||||
}
|
||||
|
||||
/* GC_NEXT is a convenience macro for accessing the first word of an object.
|
||||
@ -54,7 +55,6 @@ Value * EvalState::allocValue()
|
||||
return (Value *) p;
|
||||
}
|
||||
|
||||
|
||||
[[gnu::always_inline]]
|
||||
Env & EvalState::allocEnv(size_t size)
|
||||
{
|
||||
@ -68,7 +68,8 @@ Env & EvalState::allocEnv(size_t size)
|
||||
/* see allocValue for explanations. */
|
||||
if (!*env1AllocCache) {
|
||||
*env1AllocCache = GC_malloc_many(sizeof(Env) + sizeof(Value *));
|
||||
if (!*env1AllocCache) throw std::bad_alloc();
|
||||
if (!*env1AllocCache)
|
||||
throw std::bad_alloc();
|
||||
}
|
||||
|
||||
void * p = *env1AllocCache;
|
||||
@ -84,7 +85,6 @@ Env & EvalState::allocEnv(size_t size)
|
||||
return *env;
|
||||
}
|
||||
|
||||
|
||||
[[gnu::always_inline]]
|
||||
void EvalState::forceValue(Value & v, const PosIdx pos)
|
||||
{
|
||||
@ -94,7 +94,7 @@ void EvalState::forceValue(Value & v, const PosIdx pos)
|
||||
Expr * expr = v.payload.thunk.expr;
|
||||
try {
|
||||
v.mkBlackhole();
|
||||
//checkInterrupt();
|
||||
// checkInterrupt();
|
||||
if (env) [[likely]]
|
||||
expr->eval(*this, *env, v);
|
||||
else
|
||||
@ -104,50 +104,43 @@ void EvalState::forceValue(Value & v, const PosIdx pos)
|
||||
tryFixupBlackHolePos(v, pos);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
else if (v.isApp())
|
||||
} else if (v.isApp())
|
||||
callFunction(*v.payload.app.left, *v.payload.app.right, v, pos);
|
||||
}
|
||||
|
||||
|
||||
[[gnu::always_inline]]
|
||||
inline void EvalState::forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
forceAttrs(v, [&]() { return pos; }, errorCtx);
|
||||
}
|
||||
|
||||
|
||||
template <typename Callable>
|
||||
template<typename Callable>
|
||||
[[gnu::always_inline]]
|
||||
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
|
||||
{
|
||||
PosIdx pos = getPos();
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nAttrs) {
|
||||
error<TypeError>(
|
||||
"expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withTrace(pos, errorCtx).debugThrow();
|
||||
error<TypeError>("expected a set but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
[[gnu::always_inline]]
|
||||
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
forceValue(v, pos);
|
||||
if (!v.isList()) {
|
||||
error<TypeError>(
|
||||
"expected a list but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withTrace(pos, errorCtx).debugThrow();
|
||||
error<TypeError>("expected a list but found %1%: %2%", showType(v), ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
[[gnu::always_inline]]
|
||||
inline CallDepth EvalState::addCallDepth(const PosIdx pos) {
|
||||
inline CallDepth EvalState::addCallDepth(const PosIdx pos)
|
||||
{
|
||||
if (callDepth > settings.maxCallDepth)
|
||||
error<EvalBaseError>("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow();
|
||||
|
||||
|
@ -73,7 +73,9 @@ struct EvalSettings : Config
|
||||
)"};
|
||||
|
||||
Setting<Strings> nixPath{
|
||||
this, {}, "nix-path",
|
||||
this,
|
||||
{},
|
||||
"nix-path",
|
||||
R"(
|
||||
List of search paths to use for [lookup path](@docroot@/language/constructs/lookup-path.md) resolution.
|
||||
This setting determines the value of
|
||||
@ -106,10 +108,14 @@ struct EvalSettings : Config
|
||||
> If [restricted evaluation](@docroot@/command-ref/conf-file.md#conf-restrict-eval) is enabled, the default value is empty.
|
||||
>
|
||||
> If [pure evaluation](#conf-pure-eval) is enabled, `builtins.nixPath` *always* evaluates to the empty list `[ ]`.
|
||||
)", {}, false};
|
||||
)",
|
||||
{},
|
||||
false};
|
||||
|
||||
Setting<std::string> currentSystem{
|
||||
this, "", "eval-system",
|
||||
this,
|
||||
"",
|
||||
"eval-system",
|
||||
R"(
|
||||
This option defines
|
||||
[`builtins.currentSystem`](@docroot@/language/builtins.md#builtins-currentSystem)
|
||||
@ -129,7 +135,9 @@ struct EvalSettings : Config
|
||||
const std::string & getCurrentSystem() const;
|
||||
|
||||
Setting<bool> restrictEval{
|
||||
this, false, "restrict-eval",
|
||||
this,
|
||||
false,
|
||||
"restrict-eval",
|
||||
R"(
|
||||
If set to `true`, the Nix evaluator will not allow access to any
|
||||
files outside of
|
||||
@ -138,7 +146,10 @@ struct EvalSettings : Config
|
||||
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
|
||||
)"};
|
||||
|
||||
Setting<bool> pureEval{this, false, "pure-eval",
|
||||
Setting<bool> pureEval{
|
||||
this,
|
||||
false,
|
||||
"pure-eval",
|
||||
R"(
|
||||
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
|
||||
|
||||
@ -148,11 +159,12 @@ struct EvalSettings : Config
|
||||
- [`builtins.currentTime`](@docroot@/language/builtins.md#builtins-currentTime)
|
||||
- [`builtins.nixPath`](@docroot@/language/builtins.md#builtins-nixPath)
|
||||
- [`builtins.storePath`](@docroot@/language/builtins.md#builtins-storePath)
|
||||
)"
|
||||
};
|
||||
)"};
|
||||
|
||||
Setting<bool> enableImportFromDerivation{
|
||||
this, true, "allow-import-from-derivation",
|
||||
this,
|
||||
true,
|
||||
"allow-import-from-derivation",
|
||||
R"(
|
||||
By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md).
|
||||
|
||||
@ -162,7 +174,10 @@ struct EvalSettings : Config
|
||||
regardless of the state of the store.
|
||||
)"};
|
||||
|
||||
Setting<Strings> allowedUris{this, {}, "allowed-uris",
|
||||
Setting<Strings> allowedUris{
|
||||
this,
|
||||
{},
|
||||
"allowed-uris",
|
||||
R"(
|
||||
A list of URI prefixes to which access is allowed in restricted
|
||||
evaluation mode. For example, when set to
|
||||
@ -175,7 +190,10 @@ struct EvalSettings : Config
|
||||
- or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme.
|
||||
)"};
|
||||
|
||||
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
|
||||
Setting<bool> traceFunctionCalls{
|
||||
this,
|
||||
false,
|
||||
"trace-function-calls",
|
||||
R"(
|
||||
If set to `true`, the Nix evaluator will trace every function call.
|
||||
Nix will print a log message at the "vomit" level for every function
|
||||
@ -193,26 +211,38 @@ struct EvalSettings : Config
|
||||
`flamegraph.pl`.
|
||||
)"};
|
||||
|
||||
Setting<bool> useEvalCache{this, true, "eval-cache",
|
||||
Setting<bool> useEvalCache{
|
||||
this,
|
||||
true,
|
||||
"eval-cache",
|
||||
R"(
|
||||
Whether to use the flake evaluation cache.
|
||||
Certain commands won't have to evaluate when invoked for the second time with a particular version of a flake.
|
||||
Intermediate results are not cached.
|
||||
)"};
|
||||
|
||||
Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
|
||||
Setting<bool> ignoreExceptionsDuringTry{
|
||||
this,
|
||||
false,
|
||||
"ignore-try",
|
||||
R"(
|
||||
If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
|
||||
debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
|
||||
)"};
|
||||
|
||||
Setting<bool> traceVerbose{this, false, "trace-verbose",
|
||||
Setting<bool> traceVerbose{
|
||||
this,
|
||||
false,
|
||||
"trace-verbose",
|
||||
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
|
||||
|
||||
Setting<unsigned int> maxCallDepth{this, 10000, "max-call-depth",
|
||||
"The maximum function call depth to allow before erroring."};
|
||||
Setting<unsigned int> maxCallDepth{
|
||||
this, 10000, "max-call-depth", "The maximum function call depth to allow before erroring."};
|
||||
|
||||
Setting<bool> builtinsTraceDebugger{this, false, "debugger-on-trace",
|
||||
Setting<bool> builtinsTraceDebugger{
|
||||
this,
|
||||
false,
|
||||
"debugger-on-trace",
|
||||
R"(
|
||||
If set to true and the `--debugger` flag is given, the following functions
|
||||
will enter the debugger like [`builtins.break`](@docroot@/language/builtins.md#builtins-break).
|
||||
@ -225,7 +255,10 @@ struct EvalSettings : Config
|
||||
This is useful for debugging warnings in third-party Nix code.
|
||||
)"};
|
||||
|
||||
Setting<bool> builtinsDebuggerOnWarn{this, false, "debugger-on-warn",
|
||||
Setting<bool> builtinsDebuggerOnWarn{
|
||||
this,
|
||||
false,
|
||||
"debugger-on-warn",
|
||||
R"(
|
||||
If set to true and the `--debugger` flag is given, [`builtins.warn`](@docroot@/language/builtins.md#builtins-warn)
|
||||
will enter the debugger like [`builtins.break`](@docroot@/language/builtins.md#builtins-break).
|
||||
@ -235,7 +268,10 @@ struct EvalSettings : Config
|
||||
Use [`debugger-on-trace`](#conf-debugger-on-trace) to also enter the debugger on legacy warnings that are logged with [`builtins.trace`](@docroot@/language/builtins.md#builtins-trace).
|
||||
)"};
|
||||
|
||||
Setting<bool> builtinsAbortOnWarn{this, false, "abort-on-warn",
|
||||
Setting<bool> builtinsAbortOnWarn{
|
||||
this,
|
||||
false,
|
||||
"abort-on-warn",
|
||||
R"(
|
||||
If set to true, [`builtins.warn`](@docroot@/language/builtins.md#builtins-warn) will throw an error when logging a warning.
|
||||
|
||||
|
@ -33,7 +33,9 @@ namespace nix {
|
||||
constexpr size_t maxPrimOpArity = 8;
|
||||
|
||||
class Store;
|
||||
namespace fetchers { struct Settings; }
|
||||
namespace fetchers {
|
||||
struct Settings;
|
||||
}
|
||||
struct EvalSettings;
|
||||
class EvalState;
|
||||
class StorePath;
|
||||
@ -41,28 +43,32 @@ struct SingleDerivedPath;
|
||||
enum RepairFlag : bool;
|
||||
struct MemorySourceAccessor;
|
||||
namespace eval_cache {
|
||||
class EvalCache;
|
||||
class EvalCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments a count on construction and decrements on destruction.
|
||||
*/
|
||||
class CallDepth {
|
||||
size_t & count;
|
||||
class CallDepth
|
||||
{
|
||||
size_t & count;
|
||||
|
||||
public:
|
||||
CallDepth(size_t & count) : count(count) {
|
||||
++count;
|
||||
}
|
||||
~CallDepth() {
|
||||
--count;
|
||||
}
|
||||
CallDepth(size_t & count)
|
||||
: count(count)
|
||||
{
|
||||
++count;
|
||||
}
|
||||
~CallDepth()
|
||||
{
|
||||
--count;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Function that implements a primop.
|
||||
*/
|
||||
using PrimOpFun = void(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
using PrimOpFun = void(EvalState & state, const PosIdx pos, Value ** args, Value & v);
|
||||
|
||||
/**
|
||||
* Info about a primitive operation, and its implementation
|
||||
@ -147,7 +153,9 @@ struct Constant
|
||||
bool impureOnly = false;
|
||||
};
|
||||
|
||||
typedef std::map<std::string, Value *, std::less<std::string>, traceable_allocator<std::pair<const std::string, Value *> > > ValMap;
|
||||
typedef std::
|
||||
map<std::string, Value *, std::less<std::string>, traceable_allocator<std::pair<const std::string, Value *>>>
|
||||
ValMap;
|
||||
|
||||
typedef std::unordered_map<PosIdx, DocComment> DocCommentMap;
|
||||
|
||||
@ -157,23 +165,25 @@ struct Env
|
||||
Value * values[0];
|
||||
};
|
||||
|
||||
void printEnvBindings(const EvalState &es, const Expr & expr, const Env & env);
|
||||
void printEnvBindings(const EvalState & es, const Expr & expr, const Env & env);
|
||||
void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env, int lvl = 0);
|
||||
|
||||
std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & env);
|
||||
|
||||
void copyContext(const Value & v, NixStringContext & context, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
void copyContext(
|
||||
const Value & v,
|
||||
NixStringContext & context,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
std::string printValue(EvalState & state, Value & v);
|
||||
std::ostream & operator << (std::ostream & os, const ValueType t);
|
||||
|
||||
std::ostream & operator<<(std::ostream & os, const ValueType t);
|
||||
|
||||
struct RegexCache;
|
||||
|
||||
std::shared_ptr<RegexCache> makeRegexCache();
|
||||
|
||||
struct DebugTrace {
|
||||
struct DebugTrace
|
||||
{
|
||||
/* WARNING: Converting PosIdx -> Pos should be done with extra care. This is
|
||||
due to the fact that operator[] of PosTable is incredibly expensive. */
|
||||
std::variant<Pos, PosIdx> pos;
|
||||
@ -206,19 +216,11 @@ public:
|
||||
SymbolTable symbols;
|
||||
PosTable positions;
|
||||
|
||||
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
|
||||
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
||||
sFile, sLine, sColumn, sFunctor, sToString,
|
||||
sRight, sWrong, sStructuredAttrs,
|
||||
sAllowedReferences, sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites,
|
||||
sMaxSize, sMaxClosureSize,
|
||||
sBuilder, sArgs,
|
||||
sContentAddressed, sImpure,
|
||||
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
||||
sRecurseForDerivations,
|
||||
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||
sPrefix,
|
||||
sOutputSpecified;
|
||||
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue, sSystem, sOverrides, sOutputs, sOutputName,
|
||||
sIgnoreNulls, sFile, sLine, sColumn, sFunctor, sToString, sRight, sWrong, sStructuredAttrs, sAllowedReferences,
|
||||
sAllowedRequisites, sDisallowedReferences, sDisallowedRequisites, sMaxSize, sMaxClosureSize, sBuilder, sArgs,
|
||||
sContentAddressed, sImpure, sOutputHash, sOutputHashAlgo, sOutputHashMode, sRecurseForDerivations, sDescription,
|
||||
sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath, sPrefix, sOutputSpecified;
|
||||
|
||||
const Expr::AstSymbols exprSymbols;
|
||||
|
||||
@ -303,19 +305,20 @@ public:
|
||||
/**
|
||||
* Debugger
|
||||
*/
|
||||
ReplExitStatus (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
|
||||
ReplExitStatus (*debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
|
||||
bool debugStop;
|
||||
bool inDebugger = false;
|
||||
int trylevel;
|
||||
std::list<DebugTrace> debugTraces;
|
||||
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;
|
||||
std::map<const Expr *, const std::shared_ptr<const StaticEnv>> exprEnvs;
|
||||
const std::shared_ptr<const StaticEnv> getStaticEnv(const Expr & expr) const
|
||||
{
|
||||
auto i = exprEnvs.find(&expr);
|
||||
if (i != exprEnvs.end())
|
||||
return i->second;
|
||||
else
|
||||
return std::shared_ptr<const StaticEnv>();;
|
||||
return std::shared_ptr<const StaticEnv>();
|
||||
;
|
||||
}
|
||||
|
||||
/** Whether a debug repl can be started. If `false`, `runDebugRepl(error)` will return without starting a repl. */
|
||||
@ -334,7 +337,8 @@ public:
|
||||
|
||||
template<class T, typename... Args>
|
||||
[[nodiscard, gnu::noinline]]
|
||||
EvalErrorBuilder<T> & error(const Args & ... args) {
|
||||
EvalErrorBuilder<T> & error(const Args &... args)
|
||||
{
|
||||
// `EvalErrorBuilder::debugThrow` performs the corresponding `delete`.
|
||||
return *new EvalErrorBuilder<T>(*this, args...);
|
||||
}
|
||||
@ -353,13 +357,25 @@ private:
|
||||
/**
|
||||
* A cache from path names to parse trees.
|
||||
*/
|
||||
typedef std::unordered_map<SourcePath, Expr *, std::hash<SourcePath>, std::equal_to<SourcePath>, traceable_allocator<std::pair<const SourcePath, Expr *>>> FileParseCache;
|
||||
typedef std::unordered_map<
|
||||
SourcePath,
|
||||
Expr *,
|
||||
std::hash<SourcePath>,
|
||||
std::equal_to<SourcePath>,
|
||||
traceable_allocator<std::pair<const SourcePath, Expr *>>>
|
||||
FileParseCache;
|
||||
FileParseCache fileParseCache;
|
||||
|
||||
/**
|
||||
* A cache from path names to values.
|
||||
*/
|
||||
typedef std::unordered_map<SourcePath, Value, std::hash<SourcePath>, std::equal_to<SourcePath>, traceable_allocator<std::pair<const SourcePath, Value>>> FileEvalCache;
|
||||
typedef std::unordered_map<
|
||||
SourcePath,
|
||||
Value,
|
||||
std::hash<SourcePath>,
|
||||
std::equal_to<SourcePath>,
|
||||
traceable_allocator<std::pair<const SourcePath, Value>>>
|
||||
FileEvalCache;
|
||||
FileEvalCache fileEvalCache;
|
||||
|
||||
/**
|
||||
@ -399,7 +415,10 @@ public:
|
||||
std::shared_ptr<Store> buildStore = nullptr);
|
||||
~EvalState();
|
||||
|
||||
LookupPath getLookupPath() { return lookupPath; }
|
||||
LookupPath getLookupPath()
|
||||
{
|
||||
return lookupPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a `SourcePath` that refers to `path` in the root
|
||||
@ -480,9 +499,7 @@ public:
|
||||
*
|
||||
* If it is not found, return `std::nullopt`.
|
||||
*/
|
||||
std::optional<SourcePath> resolveLookupPathPath(
|
||||
const LookupPath::Path & elem,
|
||||
bool initAccessControl = false);
|
||||
std::optional<SourcePath> resolveLookupPathPath(const LookupPath::Path & elem, bool initAccessControl = false);
|
||||
|
||||
/**
|
||||
* Evaluate an expression to normal form
|
||||
@ -524,7 +541,7 @@ public:
|
||||
|
||||
void forceAttrs(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
template <typename Callable>
|
||||
template<typename Callable>
|
||||
inline void forceAttrs(Value & v, Callable getPos, std::string_view errorCtx);
|
||||
|
||||
inline void forceList(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
@ -533,15 +550,20 @@ public:
|
||||
*/
|
||||
void forceFunction(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
std::string_view forceString(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
std::string_view forceString(
|
||||
Value & v,
|
||||
NixStringContext & context,
|
||||
const PosIdx pos,
|
||||
std::string_view errorCtx,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
template<typename... Args>
|
||||
[[gnu::noinline]]
|
||||
void addErrorTrace(Error & e, const Args & ... formatArgs) const;
|
||||
void addErrorTrace(Error & e, const Args &... formatArgs) const;
|
||||
template<typename... Args>
|
||||
[[gnu::noinline]]
|
||||
void addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const;
|
||||
void addErrorTrace(Error & e, const PosIdx pos, const Args &... formatArgs) const;
|
||||
|
||||
public:
|
||||
/**
|
||||
@ -550,8 +572,8 @@ public:
|
||||
*/
|
||||
bool isDerivation(Value & v);
|
||||
|
||||
std::optional<std::string> tryAttrsToString(const PosIdx pos, Value & v,
|
||||
NixStringContext & context, bool coerceMore = false, bool copyToStore = true);
|
||||
std::optional<std::string> tryAttrsToString(
|
||||
const PosIdx pos, Value & v, NixStringContext & context, bool coerceMore = false, bool copyToStore = true);
|
||||
|
||||
/**
|
||||
* String coercion.
|
||||
@ -561,9 +583,13 @@ public:
|
||||
* booleans and lists to a string. If `copyToStore` is set,
|
||||
* referenced paths are copied to the Nix store as a side effect.
|
||||
*/
|
||||
BackedStringView coerceToString(const PosIdx pos, Value & v, NixStringContext & context,
|
||||
BackedStringView coerceToString(
|
||||
const PosIdx pos,
|
||||
Value & v,
|
||||
NixStringContext & context,
|
||||
std::string_view errorCtx,
|
||||
bool coerceMore = false, bool copyToStore = true,
|
||||
bool coerceMore = false,
|
||||
bool copyToStore = true,
|
||||
bool canonicalizePath = true);
|
||||
|
||||
StorePath copyPathToStore(NixStringContext & context, const SourcePath & path);
|
||||
@ -585,7 +611,11 @@ public:
|
||||
/**
|
||||
* Part of `coerceToSingleDerivedPath()` without any store IO which is exposed for unit testing only.
|
||||
*/
|
||||
std::pair<SingleDerivedPath, std::string_view> coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
std::pair<SingleDerivedPath, std::string_view> coerceToSingleDerivedPathUnchecked(
|
||||
const PosIdx pos,
|
||||
Value & v,
|
||||
std::string_view errorCtx,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Coerce to `SingleDerivedPath`.
|
||||
@ -625,7 +655,13 @@ public:
|
||||
/**
|
||||
* Internal primops not exposed to the user.
|
||||
*/
|
||||
std::unordered_map<std::string, Value *, std::hash<std::string>, std::equal_to<std::string>, traceable_allocator<std::pair<const std::string, Value *>>> internalPrimOps;
|
||||
std::unordered_map<
|
||||
std::string,
|
||||
Value *,
|
||||
std::hash<std::string>,
|
||||
std::equal_to<std::string>,
|
||||
traceable_allocator<std::pair<const std::string, Value *>>>
|
||||
internalPrimOps;
|
||||
|
||||
/**
|
||||
* Name and documentation about every constant.
|
||||
@ -699,7 +735,8 @@ private:
|
||||
std::shared_ptr<StaticEnv> & staticEnv);
|
||||
|
||||
/**
|
||||
* Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack.
|
||||
* Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run
|
||||
* out of system stack.
|
||||
*/
|
||||
size_t callDepth = 0;
|
||||
|
||||
@ -762,7 +799,7 @@ public:
|
||||
/**
|
||||
* Return a boolean `Value *` without allocating.
|
||||
*/
|
||||
Value *getBool(bool b);
|
||||
Value * getBool(bool b);
|
||||
|
||||
void mkThunk_(Value & v, Expr * expr);
|
||||
void mkPos(Value & v, PosIdx pos);
|
||||
@ -806,9 +843,7 @@ public:
|
||||
*
|
||||
* A combination of `mkStorePathString` and `mkOutputString`.
|
||||
*/
|
||||
void mkSingleDerivedPathString(
|
||||
const SingleDerivedPath & p,
|
||||
Value & v);
|
||||
void mkSingleDerivedPathString(const SingleDerivedPath & p, Value & v);
|
||||
|
||||
void concatLists(Value & v, size_t nrLists, Value * const * lists, const PosIdx pos, std::string_view errorCtx);
|
||||
|
||||
@ -839,22 +874,22 @@ public:
|
||||
* @param[out] maybePaths if not nullptr, all built or referenced store paths will be added to this set
|
||||
* @return a mapping from the placeholders used to construct the associated value to their final store path.
|
||||
*/
|
||||
[[nodiscard]] StringMap realiseContext(const NixStringContext & context, StorePathSet * maybePaths = nullptr, bool isIFD = true);
|
||||
[[nodiscard]] StringMap
|
||||
realiseContext(const NixStringContext & context, StorePathSet * maybePaths = nullptr, bool isIFD = true);
|
||||
|
||||
/**
|
||||
* Realise the given string with context, and return the string with outputs instead of downstream output placeholders.
|
||||
* Realise the given string with context, and return the string with outputs instead of downstream output
|
||||
* placeholders.
|
||||
* @param[in] str the string to realise
|
||||
* @param[out] paths all referenced store paths will be added to this set
|
||||
* @return the realised string
|
||||
* @throw EvalError if the value is not a string, path or derivation (see `coerceToString`)
|
||||
*/
|
||||
std::string realiseString(Value & str, StorePathSet * storePathsOutMaybe, bool isIFD = true, const PosIdx pos = noPos);
|
||||
std::string
|
||||
realiseString(Value & str, StorePathSet * storePathsOutMaybe, bool isIFD = true, const PosIdx pos = noPos);
|
||||
|
||||
/* Call the binary path filter predicate used builtins.path etc. */
|
||||
bool callPathFilter(
|
||||
Value * filterFun,
|
||||
const SourcePath & path,
|
||||
PosIdx pos);
|
||||
bool callPathFilter(Value * filterFun, const SourcePath & path, PosIdx pos);
|
||||
|
||||
DocComment getDocCommentForPos(PosIdx pos);
|
||||
|
||||
@ -873,8 +908,7 @@ private:
|
||||
* Like `mkSingleDerivedPathStringRaw` but just creates a raw string
|
||||
* Value, which would also have a string context.
|
||||
*/
|
||||
std::string mkSingleDerivedPathStringRaw(
|
||||
const SingleDerivedPath & p);
|
||||
std::string mkSingleDerivedPathStringRaw(const SingleDerivedPath & p);
|
||||
|
||||
unsigned long nrEnvs = 0;
|
||||
unsigned long nrValuesInEnvs = 0;
|
||||
@ -911,15 +945,16 @@ private:
|
||||
friend struct ExprFloat;
|
||||
friend struct ExprPath;
|
||||
friend struct ExprSelect;
|
||||
friend void prim_getAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
friend void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
friend void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
friend void prim_getAttr(EvalState & state, const PosIdx pos, Value ** args, Value & v);
|
||||
friend void prim_match(EvalState & state, const PosIdx pos, Value ** args, Value & v);
|
||||
friend void prim_split(EvalState & state, const PosIdx pos, Value ** args, Value & v);
|
||||
|
||||
friend struct Value;
|
||||
friend class ListBuilder;
|
||||
};
|
||||
|
||||
struct DebugTraceStacker {
|
||||
struct DebugTraceStacker
|
||||
{
|
||||
DebugTraceStacker(EvalState & evalState, DebugTrace t);
|
||||
~DebugTraceStacker()
|
||||
{
|
||||
|
@ -9,13 +9,13 @@ namespace nix {
|
||||
/**
|
||||
* A GC compatible vector that may used a reserved portion of `nItems` on the stack instead of allocating on the heap.
|
||||
*/
|
||||
template <typename T, size_t nItems>
|
||||
template<typename T, size_t nItems>
|
||||
using SmallVector = boost::container::small_vector<T, nItems, traceable_allocator<T>>;
|
||||
|
||||
/**
|
||||
* A vector of value pointers. See `SmallVector`.
|
||||
*/
|
||||
template <size_t nItems>
|
||||
template<size_t nItems>
|
||||
using SmallValueVector = SmallVector<Value *, nItems>;
|
||||
|
||||
/**
|
||||
@ -23,7 +23,7 @@ using SmallValueVector = SmallVector<Value *, nItems>;
|
||||
*
|
||||
* See also `SmallValueVector`.
|
||||
*/
|
||||
template <size_t nItems>
|
||||
template<size_t nItems>
|
||||
using SmallTemporaryValueVector = SmallVector<Value, nItems>;
|
||||
|
||||
}
|
||||
|
@ -7,7 +7,6 @@
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
@ -33,7 +32,7 @@ private:
|
||||
*/
|
||||
bool failed = false;
|
||||
|
||||
const Bindings * attrs = nullptr, * meta = nullptr;
|
||||
const Bindings *attrs = nullptr, *meta = nullptr;
|
||||
|
||||
const Bindings * getMeta();
|
||||
|
||||
@ -45,7 +44,8 @@ public:
|
||||
*/
|
||||
std::string attrPath;
|
||||
|
||||
PackageInfo(EvalState & state) : state(&state) { };
|
||||
PackageInfo(EvalState & state)
|
||||
: state(&state) {};
|
||||
PackageInfo(EvalState & state, std::string attrPath, const Bindings * attrs);
|
||||
PackageInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs);
|
||||
|
||||
@ -74,28 +74,43 @@ public:
|
||||
MetaValue queryMetaInfo(EvalState & state, const string & name) const;
|
||||
*/
|
||||
|
||||
void setName(const std::string & s) { name = s; }
|
||||
void setDrvPath(StorePath path) { drvPath = {{std::move(path)}}; }
|
||||
void setOutPath(StorePath path) { outPath = {{std::move(path)}}; }
|
||||
void setName(const std::string & s)
|
||||
{
|
||||
name = s;
|
||||
}
|
||||
void setDrvPath(StorePath path)
|
||||
{
|
||||
drvPath = {{std::move(path)}};
|
||||
}
|
||||
void setOutPath(StorePath path)
|
||||
{
|
||||
outPath = {{std::move(path)}};
|
||||
}
|
||||
|
||||
void setFailed() { failed = true; };
|
||||
bool hasFailed() { return failed; };
|
||||
void setFailed()
|
||||
{
|
||||
failed = true;
|
||||
};
|
||||
bool hasFailed()
|
||||
{
|
||||
return failed;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
typedef std::list<PackageInfo, traceable_allocator<PackageInfo>> PackageInfos;
|
||||
|
||||
|
||||
/**
|
||||
* If value `v` denotes a derivation, return a PackageInfo object
|
||||
* describing it. Otherwise return nothing.
|
||||
*/
|
||||
std::optional<PackageInfo> getDerivation(EvalState & state,
|
||||
Value & v, bool ignoreAssertionFailures);
|
||||
std::optional<PackageInfo> getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures);
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, PackageInfos & drvs,
|
||||
void getDerivations(
|
||||
EvalState & state,
|
||||
Value & v,
|
||||
const std::string & pathPrefix,
|
||||
Bindings & autoArgs,
|
||||
PackageInfos & drvs,
|
||||
bool ignoreAssertionFailures);
|
||||
|
||||
|
||||
}
|
||||
|
@ -19,7 +19,8 @@ struct StaticEnv;
|
||||
struct Value;
|
||||
|
||||
/**
|
||||
* A documentation comment, in the sense of [RFC 145](https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md)
|
||||
* A documentation comment, in the sense of [RFC
|
||||
* 145](https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md)
|
||||
*
|
||||
* Note that this does not implement the following:
|
||||
* - argument attribute names ("formals"): TBD
|
||||
@ -34,7 +35,8 @@ struct Value;
|
||||
* `f: g: final: prev: <...>`. The parameters `final` and `prev` are part
|
||||
* of the overlay concept, while distracting from the function's purpose.
|
||||
*/
|
||||
struct DocComment {
|
||||
struct DocComment
|
||||
{
|
||||
|
||||
/**
|
||||
* Start of the comment, including the opening, ie `/` and `**`.
|
||||
@ -53,10 +55,12 @@ struct DocComment {
|
||||
* therefore baking optionality into it is also useful, to avoiding the memory
|
||||
* overhead of `std::optional`.
|
||||
*/
|
||||
operator bool() const { return static_cast<bool>(begin); }
|
||||
operator bool() const
|
||||
{
|
||||
return static_cast<bool>(begin);
|
||||
}
|
||||
|
||||
std::string getInnerText(const PosTable & positions) const;
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
@ -66,52 +70,63 @@ struct AttrName
|
||||
{
|
||||
Symbol symbol;
|
||||
Expr * expr = nullptr;
|
||||
AttrName(Symbol s) : symbol(s) {};
|
||||
AttrName(Expr * e) : expr(e) {};
|
||||
AttrName(Symbol s)
|
||||
: symbol(s) {};
|
||||
AttrName(Expr * e)
|
||||
: expr(e) {};
|
||||
};
|
||||
|
||||
typedef std::vector<AttrName> AttrPath;
|
||||
|
||||
std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath);
|
||||
|
||||
|
||||
/* Abstract syntax of Nix expressions. */
|
||||
|
||||
struct Expr
|
||||
{
|
||||
struct AstSymbols {
|
||||
struct AstSymbols
|
||||
{
|
||||
Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body;
|
||||
};
|
||||
|
||||
|
||||
static unsigned long nrExprs;
|
||||
Expr() {
|
||||
Expr()
|
||||
{
|
||||
nrExprs++;
|
||||
}
|
||||
virtual ~Expr() { };
|
||||
virtual ~Expr() {};
|
||||
virtual void show(const SymbolTable & symbols, std::ostream & str) const;
|
||||
virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||
virtual void eval(EvalState & state, Env & env, Value & v);
|
||||
virtual Value * maybeThunk(EvalState & state, Env & env);
|
||||
virtual void setName(Symbol name);
|
||||
virtual void setDocComment(DocComment docComment) { };
|
||||
virtual PosIdx getPos() const { return noPos; }
|
||||
virtual void setDocComment(DocComment docComment) {};
|
||||
virtual PosIdx getPos() const
|
||||
{
|
||||
return noPos;
|
||||
}
|
||||
|
||||
// These are temporary methods to be used only in parser.y
|
||||
virtual void resetCursedOr() { };
|
||||
virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) { };
|
||||
virtual void resetCursedOr() {};
|
||||
virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) {};
|
||||
};
|
||||
|
||||
#define COMMON_METHODS \
|
||||
#define COMMON_METHODS \
|
||||
void show(const SymbolTable & symbols, std::ostream & str) const override; \
|
||||
void eval(EvalState & state, Env & env, Value & v) override; \
|
||||
void eval(EvalState & state, Env & env, Value & v) override; \
|
||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override;
|
||||
|
||||
struct ExprInt : Expr
|
||||
{
|
||||
Value v;
|
||||
ExprInt(NixInt n) { v.mkInt(n); };
|
||||
ExprInt(NixInt::Inner n) { v.mkInt(n); };
|
||||
ExprInt(NixInt n)
|
||||
{
|
||||
v.mkInt(n);
|
||||
};
|
||||
ExprInt(NixInt::Inner n)
|
||||
{
|
||||
v.mkInt(n);
|
||||
};
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
@ -119,7 +134,10 @@ struct ExprInt : Expr
|
||||
struct ExprFloat : Expr
|
||||
{
|
||||
Value v;
|
||||
ExprFloat(NixFloat nf) { v.mkFloat(nf); };
|
||||
ExprFloat(NixFloat nf)
|
||||
{
|
||||
v.mkFloat(nf);
|
||||
};
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
@ -128,7 +146,11 @@ struct ExprString : Expr
|
||||
{
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprString(std::string &&s) : s(std::move(s)) { v.mkString(this->s.data()); };
|
||||
ExprString(std::string && s)
|
||||
: s(std::move(s))
|
||||
{
|
||||
v.mkString(this->s.data());
|
||||
};
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
@ -138,7 +160,9 @@ struct ExprPath : Expr
|
||||
ref<SourceAccessor> accessor;
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprPath(ref<SourceAccessor> accessor, std::string s) : accessor(accessor), s(std::move(s))
|
||||
ExprPath(ref<SourceAccessor> accessor, std::string s)
|
||||
: accessor(accessor)
|
||||
, s(std::move(s))
|
||||
{
|
||||
v.mkPath(&*accessor, this->s.c_str());
|
||||
}
|
||||
@ -170,10 +194,16 @@ struct ExprVar : Expr
|
||||
Level level = 0;
|
||||
Displacement displ = 0;
|
||||
|
||||
ExprVar(Symbol name) : name(name) { };
|
||||
ExprVar(const PosIdx & pos, Symbol name) : pos(pos), name(name) { };
|
||||
ExprVar(Symbol name)
|
||||
: name(name) {};
|
||||
ExprVar(const PosIdx & pos, Symbol name)
|
||||
: pos(pos)
|
||||
, name(name) {};
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
PosIdx getPos() const override { return pos; }
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
@ -184,7 +214,8 @@ struct ExprVar : Expr
|
||||
*/
|
||||
struct ExprInheritFrom : ExprVar
|
||||
{
|
||||
ExprInheritFrom(PosIdx pos, Displacement displ): ExprVar(pos, {})
|
||||
ExprInheritFrom(PosIdx pos, Displacement displ)
|
||||
: ExprVar(pos, {})
|
||||
{
|
||||
this->level = 0;
|
||||
this->displ = displ;
|
||||
@ -197,11 +228,24 @@ struct ExprInheritFrom : ExprVar
|
||||
struct ExprSelect : Expr
|
||||
{
|
||||
PosIdx pos;
|
||||
Expr * e, * def;
|
||||
Expr *e, *def;
|
||||
AttrPath attrPath;
|
||||
ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
|
||||
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def)
|
||||
: pos(pos)
|
||||
, e(e)
|
||||
, def(def)
|
||||
, attrPath(std::move(attrPath)) {};
|
||||
ExprSelect(const PosIdx & pos, Expr * e, Symbol name)
|
||||
: pos(pos)
|
||||
, e(e)
|
||||
, def(0)
|
||||
{
|
||||
attrPath.push_back(AttrName(name));
|
||||
};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate the `a.b.c` part of `a.b.c.d`. This exists mostly for the purpose of :doc in the repl.
|
||||
@ -209,7 +253,8 @@ struct ExprSelect : Expr
|
||||
* @param[out] attrs The attribute set that should contain the last attribute name (if it exists).
|
||||
* @return The last attribute name in `attrPath`
|
||||
*
|
||||
* @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not exist.
|
||||
* @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not
|
||||
* exist.
|
||||
*/
|
||||
Symbol evalExceptFinalSelect(EvalState & state, Env & env, Value & attrs);
|
||||
|
||||
@ -220,8 +265,13 @@ struct ExprOpHasAttr : Expr
|
||||
{
|
||||
Expr * e;
|
||||
AttrPath attrPath;
|
||||
ExprOpHasAttr(Expr * e, AttrPath attrPath) : e(e), attrPath(std::move(attrPath)) { };
|
||||
PosIdx getPos() const override { return e->getPos(); }
|
||||
ExprOpHasAttr(Expr * e, AttrPath attrPath)
|
||||
: e(e)
|
||||
, attrPath(std::move(attrPath)) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return e->getPos();
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
@ -229,7 +279,8 @@ struct ExprAttrs : Expr
|
||||
{
|
||||
bool recursive;
|
||||
PosIdx pos;
|
||||
struct AttrDef {
|
||||
struct AttrDef
|
||||
{
|
||||
enum class Kind {
|
||||
/** `attr = expr;` */
|
||||
Plain,
|
||||
@ -244,8 +295,10 @@ struct ExprAttrs : Expr
|
||||
PosIdx pos;
|
||||
Displacement displ = 0; // displacement
|
||||
AttrDef(Expr * e, const PosIdx & pos, Kind kind = Kind::Plain)
|
||||
: kind(kind), e(e), pos(pos) { };
|
||||
AttrDef() { };
|
||||
: kind(kind)
|
||||
, e(e)
|
||||
, pos(pos) {};
|
||||
AttrDef() {};
|
||||
|
||||
template<typename T>
|
||||
const T & chooseByKind(const T & plain, const T & inherited, const T & inheritedFrom) const
|
||||
@ -264,21 +317,29 @@ struct ExprAttrs : Expr
|
||||
typedef std::map<Symbol, AttrDef> AttrDefs;
|
||||
AttrDefs attrs;
|
||||
std::unique_ptr<std::vector<Expr *>> inheritFromExprs;
|
||||
struct DynamicAttrDef {
|
||||
Expr * nameExpr, * valueExpr;
|
||||
struct DynamicAttrDef
|
||||
{
|
||||
Expr *nameExpr, *valueExpr;
|
||||
PosIdx pos;
|
||||
DynamicAttrDef(Expr * nameExpr, Expr * valueExpr, const PosIdx & pos)
|
||||
: nameExpr(nameExpr), valueExpr(valueExpr), pos(pos) { };
|
||||
: nameExpr(nameExpr)
|
||||
, valueExpr(valueExpr)
|
||||
, pos(pos) {};
|
||||
};
|
||||
typedef std::vector<DynamicAttrDef> DynamicAttrDefs;
|
||||
DynamicAttrDefs dynamicAttrs;
|
||||
ExprAttrs(const PosIdx &pos) : recursive(false), pos(pos) { };
|
||||
ExprAttrs() : recursive(false) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
ExprAttrs(const PosIdx & pos)
|
||||
: recursive(false)
|
||||
, pos(pos) {};
|
||||
ExprAttrs()
|
||||
: recursive(false) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
COMMON_METHODS
|
||||
|
||||
std::shared_ptr<const StaticEnv> bindInheritSources(
|
||||
EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||
std::shared_ptr<const StaticEnv> bindInheritSources(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||
Env * buildInheritFromEnv(EvalState & state, Env & up);
|
||||
void showBindings(const SymbolTable & symbols, std::ostream & str) const;
|
||||
};
|
||||
@ -286,7 +347,7 @@ struct ExprAttrs : Expr
|
||||
struct ExprList : Expr
|
||||
{
|
||||
std::vector<Expr *> elems;
|
||||
ExprList() { };
|
||||
ExprList() {};
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
|
||||
@ -311,19 +372,18 @@ struct Formals
|
||||
|
||||
bool has(Symbol arg) const
|
||||
{
|
||||
auto it = std::lower_bound(formals.begin(), formals.end(), arg,
|
||||
[] (const Formal & f, const Symbol & sym) { return f.name < sym; });
|
||||
auto it = std::lower_bound(
|
||||
formals.begin(), formals.end(), arg, [](const Formal & f, const Symbol & sym) { return f.name < sym; });
|
||||
return it != formals.end() && it->name == arg;
|
||||
}
|
||||
|
||||
std::vector<Formal> lexicographicOrder(const SymbolTable & symbols) const
|
||||
{
|
||||
std::vector<Formal> result(formals.begin(), formals.end());
|
||||
std::sort(result.begin(), result.end(),
|
||||
[&] (const Formal & a, const Formal & b) {
|
||||
std::string_view sa = symbols[a.name], sb = symbols[b.name];
|
||||
return sa < sb;
|
||||
});
|
||||
std::sort(result.begin(), result.end(), [&](const Formal & a, const Formal & b) {
|
||||
std::string_view sa = symbols[a.name], sb = symbols[b.name];
|
||||
return sa < sb;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
};
|
||||
@ -338,17 +398,26 @@ struct ExprLambda : Expr
|
||||
DocComment docComment;
|
||||
|
||||
ExprLambda(PosIdx pos, Symbol arg, Formals * formals, Expr * body)
|
||||
: pos(pos), arg(arg), formals(formals), body(body)
|
||||
{
|
||||
};
|
||||
: pos(pos)
|
||||
, arg(arg)
|
||||
, formals(formals)
|
||||
, body(body) {};
|
||||
ExprLambda(PosIdx pos, Formals * formals, Expr * body)
|
||||
: pos(pos), formals(formals), body(body)
|
||||
: pos(pos)
|
||||
, formals(formals)
|
||||
, body(body)
|
||||
{
|
||||
}
|
||||
void setName(Symbol name) override;
|
||||
std::string showNamePos(const EvalState & state) const;
|
||||
inline bool hasFormals() const { return formals != nullptr; }
|
||||
PosIdx getPos() const override { return pos; }
|
||||
inline bool hasFormals() const
|
||||
{
|
||||
return formals != nullptr;
|
||||
}
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
virtual void setDocComment(DocComment docComment) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
@ -360,12 +429,23 @@ struct ExprCall : Expr
|
||||
PosIdx pos;
|
||||
std::optional<PosIdx> cursedOrEndPos; // used during parsing to warn about https://github.com/NixOS/nix/issues/11118
|
||||
ExprCall(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args)
|
||||
: fun(fun), args(args), pos(pos), cursedOrEndPos({})
|
||||
{ }
|
||||
: fun(fun)
|
||||
, args(args)
|
||||
, pos(pos)
|
||||
, cursedOrEndPos({})
|
||||
{
|
||||
}
|
||||
ExprCall(const PosIdx & pos, Expr * fun, std::vector<Expr *> && args, PosIdx && cursedOrEndPos)
|
||||
: fun(fun), args(args), pos(pos), cursedOrEndPos(cursedOrEndPos)
|
||||
{ }
|
||||
PosIdx getPos() const override { return pos; }
|
||||
: fun(fun)
|
||||
, args(args)
|
||||
, pos(pos)
|
||||
, cursedOrEndPos(cursedOrEndPos)
|
||||
{
|
||||
}
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
virtual void resetCursedOr() override;
|
||||
virtual void warnIfCursedOr(const SymbolTable & symbols, const PosTable & positions) override;
|
||||
COMMON_METHODS
|
||||
@ -375,90 +455,132 @@ struct ExprLet : Expr
|
||||
{
|
||||
ExprAttrs * attrs;
|
||||
Expr * body;
|
||||
ExprLet(ExprAttrs * attrs, Expr * body) : attrs(attrs), body(body) { };
|
||||
ExprLet(ExprAttrs * attrs, Expr * body)
|
||||
: attrs(attrs)
|
||||
, body(body) {};
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprWith : Expr
|
||||
{
|
||||
PosIdx pos;
|
||||
Expr * attrs, * body;
|
||||
Expr *attrs, *body;
|
||||
size_t prevWith;
|
||||
ExprWith * parentWith;
|
||||
ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
ExprWith(const PosIdx & pos, Expr * attrs, Expr * body)
|
||||
: pos(pos)
|
||||
, attrs(attrs)
|
||||
, body(body) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprIf : Expr
|
||||
{
|
||||
PosIdx pos;
|
||||
Expr * cond, * then, * else_;
|
||||
ExprIf(const PosIdx & pos, Expr * cond, Expr * then, Expr * else_) : pos(pos), cond(cond), then(then), else_(else_) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
Expr *cond, *then, *else_;
|
||||
ExprIf(const PosIdx & pos, Expr * cond, Expr * then, Expr * else_)
|
||||
: pos(pos)
|
||||
, cond(cond)
|
||||
, then(then)
|
||||
, else_(else_) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprAssert : Expr
|
||||
{
|
||||
PosIdx pos;
|
||||
Expr * cond, * body;
|
||||
ExprAssert(const PosIdx & pos, Expr * cond, Expr * body) : pos(pos), cond(cond), body(body) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
Expr *cond, *body;
|
||||
ExprAssert(const PosIdx & pos, Expr * cond, Expr * body)
|
||||
: pos(pos)
|
||||
, cond(cond)
|
||||
, body(body) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprOpNot : Expr
|
||||
{
|
||||
Expr * e;
|
||||
ExprOpNot(Expr * e) : e(e) { };
|
||||
PosIdx getPos() const override { return e->getPos(); }
|
||||
ExprOpNot(Expr * e)
|
||||
: e(e) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return e->getPos();
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
#define MakeBinOp(name, s) \
|
||||
struct name : Expr \
|
||||
{ \
|
||||
PosIdx pos; \
|
||||
Expr * e1, * e2; \
|
||||
name(Expr * e1, Expr * e2) : e1(e1), e2(e2) { }; \
|
||||
name(const PosIdx & pos, Expr * e1, Expr * e2) : pos(pos), e1(e1), e2(e2) { }; \
|
||||
void show(const SymbolTable & symbols, std::ostream & str) const override \
|
||||
{ \
|
||||
str << "("; e1->show(symbols, str); str << " " s " "; e2->show(symbols, str); str << ")"; \
|
||||
} \
|
||||
#define MakeBinOp(name, s) \
|
||||
struct name : Expr \
|
||||
{ \
|
||||
PosIdx pos; \
|
||||
Expr *e1, *e2; \
|
||||
name(Expr * e1, Expr * e2) \
|
||||
: e1(e1) \
|
||||
, e2(e2) {}; \
|
||||
name(const PosIdx & pos, Expr * e1, Expr * e2) \
|
||||
: pos(pos) \
|
||||
, e1(e1) \
|
||||
, e2(e2) {}; \
|
||||
void show(const SymbolTable & symbols, std::ostream & str) const override \
|
||||
{ \
|
||||
str << "("; \
|
||||
e1->show(symbols, str); \
|
||||
str << " " s " "; \
|
||||
e2->show(symbols, str); \
|
||||
str << ")"; \
|
||||
} \
|
||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override \
|
||||
{ \
|
||||
e1->bindVars(es, env); e2->bindVars(es, env); \
|
||||
} \
|
||||
void eval(EvalState & state, Env & env, Value & v) override; \
|
||||
PosIdx getPos() const override { return pos; } \
|
||||
{ \
|
||||
e1->bindVars(es, env); \
|
||||
e2->bindVars(es, env); \
|
||||
} \
|
||||
void eval(EvalState & state, Env & env, Value & v) override; \
|
||||
PosIdx getPos() const override \
|
||||
{ \
|
||||
return pos; \
|
||||
} \
|
||||
};
|
||||
|
||||
MakeBinOp(ExprOpEq, "==")
|
||||
MakeBinOp(ExprOpNEq, "!=")
|
||||
MakeBinOp(ExprOpAnd, "&&")
|
||||
MakeBinOp(ExprOpOr, "||")
|
||||
MakeBinOp(ExprOpImpl, "->")
|
||||
MakeBinOp(ExprOpUpdate, "//")
|
||||
MakeBinOp(ExprOpConcatLists, "++")
|
||||
MakeBinOp(ExprOpEq, "==") MakeBinOp(ExprOpNEq, "!=") MakeBinOp(ExprOpAnd, "&&") MakeBinOp(ExprOpOr, "||")
|
||||
MakeBinOp(ExprOpImpl, "->") MakeBinOp(ExprOpUpdate, "//") MakeBinOp(ExprOpConcatLists, "++")
|
||||
|
||||
struct ExprConcatStrings : Expr
|
||||
struct ExprConcatStrings : Expr
|
||||
{
|
||||
PosIdx pos;
|
||||
bool forceString;
|
||||
std::vector<std::pair<PosIdx, Expr *>> * es;
|
||||
ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector<std::pair<PosIdx, Expr *>> * es)
|
||||
: pos(pos), forceString(forceString), es(es) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
: pos(pos)
|
||||
, forceString(forceString)
|
||||
, es(es) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprPos : Expr
|
||||
{
|
||||
PosIdx pos;
|
||||
ExprPos(const PosIdx & pos) : pos(pos) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
ExprPos(const PosIdx & pos)
|
||||
: pos(pos) {};
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
return pos;
|
||||
}
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
@ -473,7 +595,6 @@ struct ExprBlackHole : Expr
|
||||
|
||||
extern ExprBlackHole eBlackHole;
|
||||
|
||||
|
||||
/* Static environments are used to map variable names onto (level,
|
||||
displacement) pairs used to obtain the value of the variable at
|
||||
runtime. */
|
||||
@ -495,8 +616,9 @@ struct StaticEnv
|
||||
|
||||
void sort()
|
||||
{
|
||||
std::stable_sort(vars.begin(), vars.end(),
|
||||
[](const Vars::value_type & a, const Vars::value_type & b) { return a.first < b.first; });
|
||||
std::stable_sort(vars.begin(), vars.end(), [](const Vars::value_type & a, const Vars::value_type & b) {
|
||||
return a.first < b.first;
|
||||
});
|
||||
}
|
||||
|
||||
void deduplicate()
|
||||
@ -504,7 +626,8 @@ struct StaticEnv
|
||||
auto it = vars.begin(), jt = it, end = vars.end();
|
||||
while (jt != end) {
|
||||
*it = *jt++;
|
||||
while (jt != end && it->first == jt->first) *it = *jt++;
|
||||
while (jt != end && it->first == jt->first)
|
||||
*it = *jt++;
|
||||
it++;
|
||||
}
|
||||
vars.erase(it, end);
|
||||
@ -514,10 +637,10 @@ struct StaticEnv
|
||||
{
|
||||
Vars::value_type key(name, 0);
|
||||
auto i = std::lower_bound(vars.begin(), vars.end(), key);
|
||||
if (i != vars.end() && i->first == name) return i;
|
||||
if (i != vars.end() && i->first == name)
|
||||
return i;
|
||||
return vars.end();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
@ -17,7 +17,10 @@ struct StringToken
|
||||
const char * p;
|
||||
size_t l;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return {p, l}; }
|
||||
operator std::string_view() const
|
||||
{
|
||||
return {p, l};
|
||||
}
|
||||
};
|
||||
|
||||
// This type must be trivially copyable; see YYLTYPE_IS_TRIVIAL in parser.y.
|
||||
@ -29,12 +32,14 @@ struct ParserLocation
|
||||
// backup to recover from yyless(0)
|
||||
int stashedBeginOffset, stashedEndOffset;
|
||||
|
||||
void stash() {
|
||||
void stash()
|
||||
{
|
||||
stashedBeginOffset = beginOffset;
|
||||
stashedEndOffset = endOffset;
|
||||
}
|
||||
|
||||
void unstash() {
|
||||
void unstash()
|
||||
{
|
||||
beginOffset = stashedBeginOffset;
|
||||
endOffset = stashedEndOffset;
|
||||
}
|
||||
@ -87,32 +92,30 @@ struct ParserState
|
||||
|
||||
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
|
||||
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);
|
||||
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc);
|
||||
void addAttr(
|
||||
ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc);
|
||||
void addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def);
|
||||
Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {});
|
||||
Expr * stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es);
|
||||
Expr * stripIndentation(const PosIdx pos, std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es);
|
||||
PosIdx at(const ParserLocation & loc);
|
||||
};
|
||||
|
||||
inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = HintFmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(symbols, attrPath), positions[prevPos]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
throw ParseError(
|
||||
{.msg = HintFmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]),
|
||||
.pos = positions[pos]});
|
||||
}
|
||||
|
||||
inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
throw ParseError(
|
||||
{.msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]),
|
||||
.pos = positions[pos]});
|
||||
}
|
||||
|
||||
inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc)
|
||||
inline void ParserState::addAttr(
|
||||
ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc)
|
||||
{
|
||||
AttrPath::iterator i;
|
||||
// All attrpaths have at least one attr
|
||||
@ -159,7 +162,8 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, const
|
||||
* Precondition: attrPath is used for error messages and should already contain
|
||||
* symbol as its last element.
|
||||
*/
|
||||
inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def)
|
||||
inline void
|
||||
ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def)
|
||||
{
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
@ -189,12 +193,14 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const S
|
||||
attrPath.pop_back();
|
||||
}
|
||||
ae->attrs.clear();
|
||||
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(),
|
||||
jAttrs->dynamicAttrs.insert(
|
||||
jAttrs->dynamicAttrs.end(),
|
||||
std::make_move_iterator(ae->dynamicAttrs.begin()),
|
||||
std::make_move_iterator(ae->dynamicAttrs.end()));
|
||||
ae->dynamicAttrs.clear();
|
||||
if (ae->inheritFromExprs) {
|
||||
jAttrs->inheritFromExprs->insert(jAttrs->inheritFromExprs->end(),
|
||||
jAttrs->inheritFromExprs->insert(
|
||||
jAttrs->inheritFromExprs->end(),
|
||||
std::make_move_iterator(ae->inheritFromExprs->begin()),
|
||||
std::make_move_iterator(ae->inheritFromExprs->end()));
|
||||
ae->inheritFromExprs = nullptr;
|
||||
@ -211,10 +217,9 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath & attrPath, const S
|
||||
|
||||
inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg)
|
||||
{
|
||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||
[] (const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
std::sort(formals->formals.begin(), formals->formals.end(), [](const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
|
||||
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||
@ -224,24 +229,22 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym
|
||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]),
|
||||
.pos = positions[duplicate->second]
|
||||
});
|
||||
throw ParseError(
|
||||
{.msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]),
|
||||
.pos = positions[duplicate->second]});
|
||||
|
||||
if (arg && formals->has(arg))
|
||||
throw ParseError({
|
||||
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
throw ParseError(
|
||||
{.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos]});
|
||||
|
||||
return formals;
|
||||
}
|
||||
|
||||
inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
||||
inline Expr *
|
||||
ParserState::stripIndentation(const PosIdx pos, std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
||||
{
|
||||
if (es.empty()) return new ExprString("");
|
||||
if (es.empty())
|
||||
return new ExprString("");
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
@ -255,7 +258,8 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
if (curIndent < minIndent)
|
||||
minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@ -269,7 +273,8 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
curIndent = 0;
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
if (curIndent < minIndent)
|
||||
minIndent = curIndent;
|
||||
}
|
||||
} else if (str->p[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
@ -284,20 +289,19 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (Expr * e) {
|
||||
const auto trimExpr = [&](Expr * e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->emplace_back(i->first, e);
|
||||
};
|
||||
const auto trimString = [&] (const StringToken & t) {
|
||||
const auto trimString = [&](const StringToken & t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (t.p[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += t.p[j];
|
||||
}
|
||||
else if (t.p[j] == '\n') {
|
||||
} else if (t.p[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
} else {
|
||||
@ -307,7 +311,8 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
}
|
||||
} else {
|
||||
s2 += t.p[j];
|
||||
if (t.p[j] == '\n') atStartOfLine = true;
|
||||
if (t.p[j] == '\n')
|
||||
atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
@ -325,20 +330,20 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
}
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||
std::visit(overloaded{trimExpr, trimString}, i->second);
|
||||
}
|
||||
|
||||
// If there is nothing at all, return the empty string directly.
|
||||
// This also ensures that equivalent empty strings result in the same ast, which is helpful when testing formatters.
|
||||
if (es2->size() == 0) {
|
||||
auto *const result = new ExprString("");
|
||||
auto * const result = new ExprString("");
|
||||
delete es2;
|
||||
return result;
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
||||
auto *const result = (*es2)[0].second;
|
||||
auto * const result = (*es2)[0].second;
|
||||
delete es2;
|
||||
return result;
|
||||
}
|
||||
|
@ -44,12 +44,12 @@ struct RegisterPrimOp
|
||||
/**
|
||||
* Load a ValueInitializer from a DSO and return whatever it initializes
|
||||
*/
|
||||
void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
void prim_importNative(EvalState & state, const PosIdx pos, Value ** args, Value & v);
|
||||
|
||||
/**
|
||||
* Execute a program and parse its output
|
||||
*/
|
||||
void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
void prim_exec(EvalState & state, const PosIdx pos, Value ** args, Value & v);
|
||||
|
||||
void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column);
|
||||
|
||||
|
@ -15,10 +15,6 @@ namespace nix {
|
||||
* See: https://github.com/NixOS/nix/issues/9730
|
||||
*/
|
||||
void printAmbiguous(
|
||||
Value &v,
|
||||
const SymbolTable &symbols,
|
||||
std::ostream &str,
|
||||
std::set<const void *> *seen,
|
||||
int depth);
|
||||
Value & v, const SymbolTable & symbols, std::ostream & str, std::set<const void *> * seen, int depth);
|
||||
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ struct PrintOptions
|
||||
* `PrintOptions` for unknown and therefore potentially large values in error messages,
|
||||
* to avoid printing "too much" output.
|
||||
*/
|
||||
static PrintOptions errorPrintOptions = PrintOptions {
|
||||
static PrintOptions errorPrintOptions = PrintOptions{
|
||||
.ansiColors = true,
|
||||
.maxDepth = 10,
|
||||
.maxAttrs = 10,
|
||||
|
@ -26,10 +26,12 @@ struct Value;
|
||||
* @param s The logical string
|
||||
*/
|
||||
std::ostream & printLiteralString(std::ostream & o, std::string_view s);
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const char * s) {
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const char * s)
|
||||
{
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) {
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const std::string & s)
|
||||
{
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
|
||||
@ -60,27 +62,31 @@ bool isReservedKeyword(const std::string_view str);
|
||||
*/
|
||||
std::ostream & printIdentifier(std::ostream & o, std::string_view s);
|
||||
|
||||
void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {});
|
||||
void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions{});
|
||||
|
||||
/**
|
||||
* A partially-applied form of `printValue` which can be formatted using `<<`
|
||||
* without allocating an intermediate string.
|
||||
*/
|
||||
class ValuePrinter {
|
||||
friend std::ostream & operator << (std::ostream & output, const ValuePrinter & printer);
|
||||
class ValuePrinter
|
||||
{
|
||||
friend std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer);
|
||||
private:
|
||||
EvalState & state;
|
||||
Value & value;
|
||||
PrintOptions options;
|
||||
|
||||
public:
|
||||
ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions {})
|
||||
: state(state), value(value), options(options) { }
|
||||
ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions{})
|
||||
: state(state)
|
||||
, value(value)
|
||||
, options(options)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer);
|
||||
|
||||
|
||||
/**
|
||||
* `ValuePrinter` does its own ANSI formatting, so we don't color it
|
||||
* magenta.
|
||||
|
@ -23,10 +23,13 @@ class SymbolStr
|
||||
private:
|
||||
const std::string * s;
|
||||
|
||||
explicit SymbolStr(const std::string & symbol): s(&symbol) {}
|
||||
explicit SymbolStr(const std::string & symbol)
|
||||
: s(&symbol)
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
bool operator == (std::string_view s2) const
|
||||
bool operator==(std::string_view s2) const
|
||||
{
|
||||
return *s == s2;
|
||||
}
|
||||
@ -36,12 +39,12 @@ public:
|
||||
return s->c_str();
|
||||
}
|
||||
|
||||
operator const std::string_view () const
|
||||
operator const std::string_view() const
|
||||
{
|
||||
return *s;
|
||||
}
|
||||
|
||||
friend std::ostream & operator <<(std::ostream & os, const SymbolStr & symbol);
|
||||
friend std::ostream & operator<<(std::ostream & os, const SymbolStr & symbol);
|
||||
|
||||
bool empty() const
|
||||
{
|
||||
@ -61,15 +64,30 @@ class Symbol
|
||||
private:
|
||||
uint32_t id;
|
||||
|
||||
explicit Symbol(uint32_t id): id(id) {}
|
||||
explicit Symbol(uint32_t id)
|
||||
: id(id)
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
Symbol() : id(0) {}
|
||||
Symbol()
|
||||
: id(0)
|
||||
{
|
||||
}
|
||||
|
||||
explicit operator bool() const { return id > 0; }
|
||||
explicit operator bool() const
|
||||
{
|
||||
return id > 0;
|
||||
}
|
||||
|
||||
auto operator<=>(const Symbol other) const { return id <=> other.id; }
|
||||
bool operator==(const Symbol other) const { return id == other.id; }
|
||||
auto operator<=>(const Symbol other) const
|
||||
{
|
||||
return id <=> other.id;
|
||||
}
|
||||
bool operator==(const Symbol other) const
|
||||
{
|
||||
return id == other.id;
|
||||
}
|
||||
|
||||
friend class std::hash<Symbol>;
|
||||
};
|
||||
@ -97,7 +115,8 @@ public:
|
||||
// on the original implementation using unordered_set
|
||||
// FIXME: make this thread-safe.
|
||||
auto it = symbols.find(s);
|
||||
if (it != symbols.end()) return Symbol(it->second.second + 1);
|
||||
if (it != symbols.end())
|
||||
return Symbol(it->second.second + 1);
|
||||
|
||||
const auto & [rawSym, idx] = store.add(std::string(s));
|
||||
symbols.emplace(rawSym, std::make_pair(&rawSym, idx));
|
||||
|
@ -10,12 +10,17 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
nlohmann::json printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore = true);
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore = true);
|
||||
nlohmann::json printValueAsJSON(
|
||||
EvalState & state, bool strict, Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore = true);
|
||||
|
||||
void printValueAsJSON(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
Value & v,
|
||||
const PosIdx pos,
|
||||
std::ostream & str,
|
||||
NixStringContext & context,
|
||||
bool copyToStore = true);
|
||||
|
||||
MakeError(JSONSerializationError, Error);
|
||||
|
||||
|
@ -9,7 +9,13 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos);
|
||||
void printValueAsXML(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
bool location,
|
||||
Value & v,
|
||||
std::ostream & out,
|
||||
NixStringContext & context,
|
||||
const PosIdx pos);
|
||||
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ namespace nix {
|
||||
struct Value;
|
||||
class BindingsBuilder;
|
||||
|
||||
|
||||
typedef enum {
|
||||
tUninitialized = 0,
|
||||
tInt = 1,
|
||||
@ -44,19 +43,7 @@ typedef enum {
|
||||
* grouping together implementation details like tList*, different function
|
||||
* types, and types in non-normal form (so thunks and co.)
|
||||
*/
|
||||
typedef enum {
|
||||
nThunk,
|
||||
nInt,
|
||||
nFloat,
|
||||
nBool,
|
||||
nString,
|
||||
nPath,
|
||||
nNull,
|
||||
nAttrs,
|
||||
nList,
|
||||
nFunction,
|
||||
nExternal
|
||||
} ValueType;
|
||||
typedef enum { nThunk, nInt, nFloat, nBool, nString, nPath, nNull, nAttrs, nList, nFunction, nExternal } ValueType;
|
||||
|
||||
class Bindings;
|
||||
struct Env;
|
||||
@ -81,15 +68,15 @@ using NixFloat = double;
|
||||
*/
|
||||
class ExternalValueBase
|
||||
{
|
||||
friend std::ostream & operator << (std::ostream & str, const ExternalValueBase & v);
|
||||
friend std::ostream & operator<<(std::ostream & str, const ExternalValueBase & v);
|
||||
friend class Printer;
|
||||
protected:
|
||||
protected:
|
||||
/**
|
||||
* Print out the value
|
||||
*/
|
||||
virtual std::ostream & print(std::ostream & str) const = 0;
|
||||
|
||||
public:
|
||||
public:
|
||||
/**
|
||||
* Return a simple string describing the type
|
||||
*/
|
||||
@ -104,41 +91,44 @@ class ExternalValueBase
|
||||
* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||
* error.
|
||||
*/
|
||||
virtual std::string coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||
virtual std::string coerceToString(
|
||||
EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||
|
||||
/**
|
||||
* Compare to another value of the same type. Defaults to uncomparable,
|
||||
* i.e. always false.
|
||||
*/
|
||||
virtual bool operator ==(const ExternalValueBase & b) const noexcept;
|
||||
virtual bool operator==(const ExternalValueBase & b) const noexcept;
|
||||
|
||||
/**
|
||||
* Print the value as JSON. Defaults to unconvertable, i.e. throws an error
|
||||
*/
|
||||
virtual nlohmann::json printValueAsJSON(EvalState & state, bool strict,
|
||||
NixStringContext & context, bool copyToStore = true) const;
|
||||
virtual nlohmann::json
|
||||
printValueAsJSON(EvalState & state, bool strict, NixStringContext & context, bool copyToStore = true) const;
|
||||
|
||||
/**
|
||||
* Print the value as XML. Defaults to unevaluated
|
||||
*/
|
||||
virtual void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
virtual void printValueAsXML(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
bool location,
|
||||
XMLWriter & doc,
|
||||
NixStringContext & context,
|
||||
PathSet & drvsSeen,
|
||||
const PosIdx pos) const;
|
||||
|
||||
virtual ~ExternalValueBase()
|
||||
{
|
||||
};
|
||||
virtual ~ExternalValueBase() {};
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const ExternalValueBase & v);
|
||||
|
||||
std::ostream & operator<<(std::ostream & str, const ExternalValueBase & v);
|
||||
|
||||
class ListBuilder
|
||||
{
|
||||
const size_t size;
|
||||
Value * inlineElems[2] = {nullptr, nullptr};
|
||||
public:
|
||||
Value * * elems;
|
||||
Value ** elems;
|
||||
ListBuilder(EvalState & state, size_t size);
|
||||
|
||||
// NOTE: Can be noexcept because we are just copying integral values and
|
||||
@ -147,22 +137,28 @@ public:
|
||||
: size(x.size)
|
||||
, inlineElems{x.inlineElems[0], x.inlineElems[1]}
|
||||
, elems(size <= 2 ? inlineElems : x.elems)
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
Value * & operator [](size_t n)
|
||||
Value *& operator[](size_t n)
|
||||
{
|
||||
return elems[n];
|
||||
}
|
||||
|
||||
typedef Value * * iterator;
|
||||
typedef Value ** iterator;
|
||||
|
||||
iterator begin() { return &elems[0]; }
|
||||
iterator end() { return &elems[size]; }
|
||||
iterator begin()
|
||||
{
|
||||
return &elems[0];
|
||||
}
|
||||
iterator end()
|
||||
{
|
||||
return &elems[size];
|
||||
}
|
||||
|
||||
friend struct Value;
|
||||
};
|
||||
|
||||
|
||||
struct Value
|
||||
{
|
||||
private:
|
||||
@ -172,21 +168,36 @@ private:
|
||||
|
||||
public:
|
||||
|
||||
void print(EvalState &state, std::ostream &str, PrintOptions options = PrintOptions {});
|
||||
void print(EvalState & state, std::ostream & str, PrintOptions options = PrintOptions{});
|
||||
|
||||
// Functions needed to distinguish the type
|
||||
// These should be removed eventually, by putting the functionality that's
|
||||
// needed by callers into methods of this type
|
||||
|
||||
// type() == nThunk
|
||||
inline bool isThunk() const { return internalType == tThunk; };
|
||||
inline bool isApp() const { return internalType == tApp; };
|
||||
inline bool isThunk() const
|
||||
{
|
||||
return internalType == tThunk;
|
||||
};
|
||||
inline bool isApp() const
|
||||
{
|
||||
return internalType == tApp;
|
||||
};
|
||||
inline bool isBlackhole() const;
|
||||
|
||||
// type() == nFunction
|
||||
inline bool isLambda() const { return internalType == tLambda; };
|
||||
inline bool isPrimOp() const { return internalType == tPrimOp; };
|
||||
inline bool isPrimOpApp() const { return internalType == tPrimOpApp; };
|
||||
inline bool isLambda() const
|
||||
{
|
||||
return internalType == tLambda;
|
||||
};
|
||||
inline bool isPrimOp() const
|
||||
{
|
||||
return internalType == tPrimOp;
|
||||
};
|
||||
inline bool isPrimOpApp() const
|
||||
{
|
||||
return internalType == tPrimOpApp;
|
||||
};
|
||||
|
||||
/**
|
||||
* Strings in the evaluator carry a so-called `context` which
|
||||
@ -210,26 +221,31 @@ public:
|
||||
|
||||
* For canonicity, the store paths should be in sorted order.
|
||||
*/
|
||||
struct StringWithContext {
|
||||
struct StringWithContext
|
||||
{
|
||||
const char * c_str;
|
||||
const char * * context; // must be in sorted order
|
||||
const char ** context; // must be in sorted order
|
||||
};
|
||||
|
||||
struct Path {
|
||||
struct Path
|
||||
{
|
||||
SourceAccessor * accessor;
|
||||
const char * path;
|
||||
};
|
||||
|
||||
struct ClosureThunk {
|
||||
struct ClosureThunk
|
||||
{
|
||||
Env * env;
|
||||
Expr * expr;
|
||||
};
|
||||
|
||||
struct FunctionApplicationThunk {
|
||||
Value * left, * right;
|
||||
struct FunctionApplicationThunk
|
||||
{
|
||||
Value *left, *right;
|
||||
};
|
||||
|
||||
struct Lambda {
|
||||
struct Lambda
|
||||
{
|
||||
Env * env;
|
||||
ExprLambda * fun;
|
||||
};
|
||||
@ -244,7 +260,8 @@ public:
|
||||
Path path;
|
||||
|
||||
Bindings * attrs;
|
||||
struct {
|
||||
struct
|
||||
{
|
||||
size_t size;
|
||||
Value * const * elems;
|
||||
} bigList;
|
||||
@ -270,18 +287,35 @@ public:
|
||||
inline ValueType type(bool invalidIsThunk = false) const
|
||||
{
|
||||
switch (internalType) {
|
||||
case tUninitialized: break;
|
||||
case tInt: return nInt;
|
||||
case tBool: return nBool;
|
||||
case tString: return nString;
|
||||
case tPath: return nPath;
|
||||
case tNull: return nNull;
|
||||
case tAttrs: return nAttrs;
|
||||
case tList1: case tList2: case tListN: return nList;
|
||||
case tLambda: case tPrimOp: case tPrimOpApp: return nFunction;
|
||||
case tExternal: return nExternal;
|
||||
case tFloat: return nFloat;
|
||||
case tThunk: case tApp: return nThunk;
|
||||
case tUninitialized:
|
||||
break;
|
||||
case tInt:
|
||||
return nInt;
|
||||
case tBool:
|
||||
return nBool;
|
||||
case tString:
|
||||
return nString;
|
||||
case tPath:
|
||||
return nPath;
|
||||
case tNull:
|
||||
return nNull;
|
||||
case tAttrs:
|
||||
return nAttrs;
|
||||
case tList1:
|
||||
case tList2:
|
||||
case tListN:
|
||||
return nList;
|
||||
case tLambda:
|
||||
case tPrimOp:
|
||||
case tPrimOpApp:
|
||||
return nFunction;
|
||||
case tExternal:
|
||||
return nExternal;
|
||||
case tFloat:
|
||||
return nFloat;
|
||||
case tThunk:
|
||||
case tApp:
|
||||
return nThunk;
|
||||
}
|
||||
if (invalidIsThunk)
|
||||
return nThunk;
|
||||
@ -312,17 +346,17 @@ public:
|
||||
|
||||
inline void mkInt(NixInt n)
|
||||
{
|
||||
finishValue(tInt, { .integer = n });
|
||||
finishValue(tInt, {.integer = n});
|
||||
}
|
||||
|
||||
inline void mkBool(bool b)
|
||||
{
|
||||
finishValue(tBool, { .boolean = b });
|
||||
finishValue(tBool, {.boolean = b});
|
||||
}
|
||||
|
||||
inline void mkString(const char * s, const char * * context = 0)
|
||||
inline void mkString(const char * s, const char ** context = 0)
|
||||
{
|
||||
finishValue(tString, { .string = { .c_str = s, .context = context } });
|
||||
finishValue(tString, {.string = {.c_str = s, .context = context}});
|
||||
}
|
||||
|
||||
void mkString(std::string_view s);
|
||||
@ -341,7 +375,7 @@ public:
|
||||
|
||||
inline void mkPath(SourceAccessor * accessor, const char * path)
|
||||
{
|
||||
finishValue(tPath, { .path = { .accessor = accessor, .path = path } });
|
||||
finishValue(tPath, {.path = {.accessor = accessor, .path = path}});
|
||||
}
|
||||
|
||||
inline void mkNull()
|
||||
@ -351,7 +385,7 @@ public:
|
||||
|
||||
inline void mkAttrs(Bindings * a)
|
||||
{
|
||||
finishValue(tAttrs, { .attrs = a });
|
||||
finishValue(tAttrs, {.attrs = a});
|
||||
}
|
||||
|
||||
Value & mkAttrs(BindingsBuilder & bindings);
|
||||
@ -359,26 +393,26 @@ public:
|
||||
void mkList(const ListBuilder & builder)
|
||||
{
|
||||
if (builder.size == 1)
|
||||
finishValue(tList1, { .smallList = { builder.inlineElems[0] } });
|
||||
finishValue(tList1, {.smallList = {builder.inlineElems[0]}});
|
||||
else if (builder.size == 2)
|
||||
finishValue(tList2, { .smallList = { builder.inlineElems[0], builder.inlineElems[1] } });
|
||||
finishValue(tList2, {.smallList = {builder.inlineElems[0], builder.inlineElems[1]}});
|
||||
else
|
||||
finishValue(tListN, { .bigList = { .size = builder.size, .elems = builder.elems } });
|
||||
finishValue(tListN, {.bigList = {.size = builder.size, .elems = builder.elems}});
|
||||
}
|
||||
|
||||
inline void mkThunk(Env * e, Expr * ex)
|
||||
{
|
||||
finishValue(tThunk, { .thunk = { .env = e, .expr = ex } });
|
||||
finishValue(tThunk, {.thunk = {.env = e, .expr = ex}});
|
||||
}
|
||||
|
||||
inline void mkApp(Value * l, Value * r)
|
||||
{
|
||||
finishValue(tApp, { .app = { .left = l, .right = r } });
|
||||
finishValue(tApp, {.app = {.left = l, .right = r}});
|
||||
}
|
||||
|
||||
inline void mkLambda(Env * e, ExprLambda * f)
|
||||
{
|
||||
finishValue(tLambda, { .lambda = { .env = e, .fun = f } });
|
||||
finishValue(tLambda, {.lambda = {.env = e, .fun = f}});
|
||||
}
|
||||
|
||||
inline void mkBlackhole();
|
||||
@ -387,7 +421,7 @@ public:
|
||||
|
||||
inline void mkPrimOpApp(Value * l, Value * r)
|
||||
{
|
||||
finishValue(tPrimOpApp, { .primOpApp = { .left = l, .right = r } });
|
||||
finishValue(tPrimOpApp, {.primOpApp = {.left = l, .right = r}});
|
||||
}
|
||||
|
||||
/**
|
||||
@ -397,12 +431,12 @@ public:
|
||||
|
||||
inline void mkExternal(ExternalValueBase * e)
|
||||
{
|
||||
finishValue(tExternal, { .external = e });
|
||||
finishValue(tExternal, {.external = e});
|
||||
}
|
||||
|
||||
inline void mkFloat(NixFloat n)
|
||||
{
|
||||
finishValue(tFloat, { .fpoint = n });
|
||||
finishValue(tFloat, {.fpoint = n});
|
||||
}
|
||||
|
||||
bool isList() const
|
||||
@ -444,8 +478,7 @@ public:
|
||||
{
|
||||
assert(internalType == tPath);
|
||||
return SourcePath(
|
||||
ref(payload.path.accessor->shared_from_this()),
|
||||
CanonPath(CanonPath::unchecked_t(), payload.path.path));
|
||||
ref(payload.path.accessor->shared_from_this()), CanonPath(CanonPath::unchecked_t(), payload.path.path));
|
||||
}
|
||||
|
||||
std::string_view string_view() const
|
||||
@ -460,36 +493,47 @@ public:
|
||||
return payload.string.c_str;
|
||||
}
|
||||
|
||||
const char * * context() const
|
||||
const char ** context() const
|
||||
{
|
||||
return payload.string.context;
|
||||
}
|
||||
|
||||
ExternalValueBase * external() const
|
||||
{ return payload.external; }
|
||||
{
|
||||
return payload.external;
|
||||
}
|
||||
|
||||
const Bindings * attrs() const
|
||||
{ return payload.attrs; }
|
||||
{
|
||||
return payload.attrs;
|
||||
}
|
||||
|
||||
const PrimOp * primOp() const
|
||||
{ return payload.primOp; }
|
||||
{
|
||||
return payload.primOp;
|
||||
}
|
||||
|
||||
bool boolean() const
|
||||
{ return payload.boolean; }
|
||||
{
|
||||
return payload.boolean;
|
||||
}
|
||||
|
||||
NixInt integer() const
|
||||
{ return payload.integer; }
|
||||
{
|
||||
return payload.integer;
|
||||
}
|
||||
|
||||
NixFloat fpoint() const
|
||||
{ return payload.fpoint; }
|
||||
{
|
||||
return payload.fpoint;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
extern ExprBlackHole eBlackHole;
|
||||
|
||||
bool Value::isBlackhole() const
|
||||
{
|
||||
return internalType == tThunk && payload.thunk.expr == (Expr*) &eBlackHole;
|
||||
return internalType == tThunk && payload.thunk.expr == (Expr *) &eBlackHole;
|
||||
}
|
||||
|
||||
void Value::mkBlackhole()
|
||||
@ -497,11 +541,16 @@ void Value::mkBlackhole()
|
||||
mkThunk(nullptr, (Expr *) &eBlackHole);
|
||||
}
|
||||
|
||||
|
||||
typedef std::vector<Value *, traceable_allocator<Value *>> ValueVector;
|
||||
typedef std::unordered_map<Symbol, Value *, std::hash<Symbol>, std::equal_to<Symbol>, traceable_allocator<std::pair<const Symbol, Value *>>> ValueMap;
|
||||
typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector>>> ValueVectorMap;
|
||||
|
||||
typedef std::unordered_map<
|
||||
Symbol,
|
||||
Value *,
|
||||
std::hash<Symbol>,
|
||||
std::equal_to<Symbol>,
|
||||
traceable_allocator<std::pair<const Symbol, Value *>>>
|
||||
ValueMap;
|
||||
typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector>>>
|
||||
ValueVectorMap;
|
||||
|
||||
/**
|
||||
* A value allocated in traceable memory.
|
||||
|
@ -15,7 +15,7 @@ public:
|
||||
std::string_view raw;
|
||||
|
||||
template<typename... Args>
|
||||
BadNixStringContextElem(std::string_view raw_, const Args & ... args)
|
||||
BadNixStringContextElem(std::string_view raw_, const Args &... args)
|
||||
: Error("")
|
||||
{
|
||||
raw = raw_;
|
||||
@ -24,7 +24,8 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
struct NixStringContextElem {
|
||||
struct NixStringContextElem
|
||||
{
|
||||
/**
|
||||
* Plain opaque path to some store object.
|
||||
*
|
||||
@ -41,7 +42,8 @@ struct NixStringContextElem {
|
||||
*
|
||||
* Encoded in the form `=<drvPath>`.
|
||||
*/
|
||||
struct DrvDeep {
|
||||
struct DrvDeep
|
||||
{
|
||||
StorePath drvPath;
|
||||
|
||||
GENERATE_CMP(DrvDeep, me->drvPath);
|
||||
@ -54,11 +56,7 @@ struct NixStringContextElem {
|
||||
*/
|
||||
using Built = SingleDerivedPath::Built;
|
||||
|
||||
using Raw = std::variant<
|
||||
Opaque,
|
||||
DrvDeep,
|
||||
Built
|
||||
>;
|
||||
using Raw = std::variant<Opaque, DrvDeep, Built>;
|
||||
|
||||
Raw raw;
|
||||
|
||||
@ -74,9 +72,8 @@ struct NixStringContextElem {
|
||||
*
|
||||
* @param xpSettings Stop-gap to avoid globals during unit tests.
|
||||
*/
|
||||
static NixStringContextElem parse(
|
||||
std::string_view s,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
static NixStringContextElem
|
||||
parse(std::string_view s, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
std::string to_string() const;
|
||||
};
|
||||
|
||||
|
@ -12,8 +12,10 @@ namespace nix {
|
||||
|
||||
// for more information, refer to
|
||||
// https://github.com/nlohmann/json/blob/master/include/nlohmann/detail/input/json_sax.hpp
|
||||
class JSONSax : nlohmann::json_sax<json> {
|
||||
class JSONState {
|
||||
class JSONSax : nlohmann::json_sax<json>
|
||||
{
|
||||
class JSONState
|
||||
{
|
||||
protected:
|
||||
std::unique_ptr<JSONState> parent;
|
||||
RootValue v;
|
||||
@ -22,8 +24,14 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||
{
|
||||
throw std::logic_error("tried to close toplevel json parser state");
|
||||
}
|
||||
explicit JSONState(std::unique_ptr<JSONState> && p) : parent(std::move(p)) {}
|
||||
explicit JSONState(Value * v) : v(allocRootValue(v)) {}
|
||||
explicit JSONState(std::unique_ptr<JSONState> && p)
|
||||
: parent(std::move(p))
|
||||
{
|
||||
}
|
||||
explicit JSONState(Value * v)
|
||||
: v(allocRootValue(v))
|
||||
{
|
||||
}
|
||||
JSONState(JSONState & p) = delete;
|
||||
Value & value(EvalState & state)
|
||||
{
|
||||
@ -35,7 +43,8 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||
virtual void add() {}
|
||||
};
|
||||
|
||||
class JSONObjectState : public JSONState {
|
||||
class JSONObjectState : public JSONState
|
||||
{
|
||||
using JSONState::JSONState;
|
||||
ValueMap attrs;
|
||||
std::unique_ptr<JSONState> resolve(EvalState & state) override
|
||||
@ -46,7 +55,10 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||
parent->value(state).mkAttrs(attrs2);
|
||||
return std::move(parent);
|
||||
}
|
||||
void add() override { v = nullptr; }
|
||||
void add() override
|
||||
{
|
||||
v = nullptr;
|
||||
}
|
||||
public:
|
||||
void key(string_t & name, EvalState & state)
|
||||
{
|
||||
@ -55,7 +67,8 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||
}
|
||||
};
|
||||
|
||||
class JSONListState : public JSONState {
|
||||
class JSONListState : public JSONState
|
||||
{
|
||||
ValueVector values;
|
||||
std::unique_ptr<JSONState> resolve(EvalState & state) override
|
||||
{
|
||||
@ -65,12 +78,14 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||
parent->value(state).mkList(list);
|
||||
return std::move(parent);
|
||||
}
|
||||
void add() override {
|
||||
void add() override
|
||||
{
|
||||
values.push_back(*v);
|
||||
v = nullptr;
|
||||
}
|
||||
public:
|
||||
JSONListState(std::unique_ptr<JSONState> && p, std::size_t reserve) : JSONState(std::move(p))
|
||||
JSONListState(std::unique_ptr<JSONState> && p, std::size_t reserve)
|
||||
: JSONState(std::move(p))
|
||||
{
|
||||
values.reserve(reserve);
|
||||
}
|
||||
@ -80,7 +95,9 @@ class JSONSax : nlohmann::json_sax<json> {
|
||||
std::unique_ptr<JSONState> rs;
|
||||
|
||||
public:
|
||||
JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {};
|
||||
JSONSax(EvalState & state, Value & v)
|
||||
: state(state)
|
||||
, rs(new JSONState(&v)) {};
|
||||
|
||||
bool null() override
|
||||
{
|
||||
@ -130,7 +147,7 @@ public:
|
||||
}
|
||||
|
||||
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
|
||||
bool binary(binary_t&) override
|
||||
bool binary(binary_t &) override
|
||||
{
|
||||
// This function ought to be unreachable
|
||||
assert(false);
|
||||
@ -146,27 +163,30 @@ public:
|
||||
|
||||
bool key(string_t & name) override
|
||||
{
|
||||
dynamic_cast<JSONObjectState*>(rs.get())->key(name, state);
|
||||
dynamic_cast<JSONObjectState *>(rs.get())->key(name, state);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool end_object() override {
|
||||
bool end_object() override
|
||||
{
|
||||
rs = rs->resolve(state);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool end_array() override {
|
||||
bool end_array() override
|
||||
{
|
||||
return end_object();
|
||||
}
|
||||
|
||||
bool start_array(size_t len) override {
|
||||
rs = std::make_unique<JSONListState>(std::move(rs),
|
||||
len != std::numeric_limits<size_t>::max() ? len : 128);
|
||||
bool start_array(size_t len) override
|
||||
{
|
||||
rs = std::make_unique<JSONListState>(std::move(rs), len != std::numeric_limits<size_t>::max() ? len : 128);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) override {
|
||||
bool parse_error(std::size_t, const std::string &, const nlohmann::detail::exception & ex) override
|
||||
{
|
||||
throw JSONParseError("%s", ex.what());
|
||||
}
|
||||
};
|
||||
|
@ -17,7 +17,7 @@ ExprBlackHole eBlackHole;
|
||||
|
||||
// FIXME: remove, because *symbols* are abstract and do not have a single
|
||||
// textual representation; see printIdentifier()
|
||||
std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol)
|
||||
std::ostream & operator<<(std::ostream & str, const SymbolStr & symbol)
|
||||
{
|
||||
std::string_view s = symbol;
|
||||
return printIdentifier(str, s);
|
||||
@ -76,7 +76,8 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
|
||||
{
|
||||
typedef const decltype(attrs)::value_type * Attr;
|
||||
std::vector<Attr> sorted;
|
||||
for (auto & i : attrs) sorted.push_back(&i);
|
||||
for (auto & i : attrs)
|
||||
sorted.push_back(&i);
|
||||
std::sort(sorted.begin(), sorted.end(), [&](Attr a, Attr b) {
|
||||
std::string_view sa = symbols[a->first], sb = symbols[b->first];
|
||||
return sa < sb;
|
||||
@ -102,14 +103,16 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
|
||||
}
|
||||
if (!inherits.empty()) {
|
||||
str << "inherit";
|
||||
for (auto sym : inherits) str << " " << symbols[sym];
|
||||
for (auto sym : inherits)
|
||||
str << " " << symbols[sym];
|
||||
str << "; ";
|
||||
}
|
||||
for (const auto & [from, syms] : inheritsFrom) {
|
||||
str << "inherit (";
|
||||
(*inheritFromExprs)[from]->show(symbols, str);
|
||||
str << ")";
|
||||
for (auto sym : syms) str << " " << symbols[sym];
|
||||
for (auto sym : syms)
|
||||
str << " " << symbols[sym];
|
||||
str << "; ";
|
||||
}
|
||||
for (auto & i : sorted) {
|
||||
@ -130,7 +133,8 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
|
||||
|
||||
void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
if (recursive) str << "rec ";
|
||||
if (recursive)
|
||||
str << "rec ";
|
||||
str << "{ ";
|
||||
showBindings(symbols, str);
|
||||
str << "}";
|
||||
@ -157,7 +161,10 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
// same expression being printed in two different ways depending on its
|
||||
// context. always use lexicographic ordering to avoid this.
|
||||
for (auto & i : formals->lexicographicOrder(symbols)) {
|
||||
if (first) first = false; else str << ", ";
|
||||
if (first)
|
||||
first = false;
|
||||
else
|
||||
str << ", ";
|
||||
str << symbols[i.name];
|
||||
if (i.def) {
|
||||
str << " ? ";
|
||||
@ -165,13 +172,16 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
}
|
||||
}
|
||||
if (formals->ellipsis) {
|
||||
if (!first) str << ", ";
|
||||
if (!first)
|
||||
str << ", ";
|
||||
str << "...";
|
||||
}
|
||||
str << " }";
|
||||
if (arg) str << " @ ";
|
||||
if (arg)
|
||||
str << " @ ";
|
||||
}
|
||||
if (arg) str << symbols[arg];
|
||||
if (arg)
|
||||
str << symbols[arg];
|
||||
str << ": ";
|
||||
body->show(symbols, str);
|
||||
str << ")";
|
||||
@ -182,7 +192,7 @@ void ExprCall::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
str << '(';
|
||||
fun->show(symbols, str);
|
||||
for (auto e : args) {
|
||||
str << ' ';
|
||||
str << ' ';
|
||||
e->show(symbols, str);
|
||||
}
|
||||
str << ')';
|
||||
@ -237,7 +247,10 @@ void ExprConcatStrings::show(const SymbolTable & symbols, std::ostream & str) co
|
||||
bool first = true;
|
||||
str << "(";
|
||||
for (auto & i : *es) {
|
||||
if (first) first = false; else str << " + ";
|
||||
if (first)
|
||||
first = false;
|
||||
else
|
||||
str << " + ";
|
||||
i.second->show(symbols, str);
|
||||
}
|
||||
str << ")";
|
||||
@ -248,13 +261,15 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
str << "__curPos";
|
||||
}
|
||||
|
||||
|
||||
std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
||||
{
|
||||
std::ostringstream out;
|
||||
bool first = true;
|
||||
for (auto & i : attrPath) {
|
||||
if (!first) out << '.'; else first = false;
|
||||
if (!first)
|
||||
out << '.';
|
||||
else
|
||||
first = false;
|
||||
if (i.symbol)
|
||||
out << symbols[i.symbol];
|
||||
else {
|
||||
@ -266,7 +281,6 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
||||
return out.str();
|
||||
}
|
||||
|
||||
|
||||
/* Computing levels/displacements for variables. */
|
||||
|
||||
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
@ -312,7 +326,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||
int withLevel = -1;
|
||||
for (curEnv = env.get(), level = 0; curEnv; curEnv = curEnv->up.get(), level++) {
|
||||
if (curEnv->isWith) {
|
||||
if (withLevel == -1) withLevel = level;
|
||||
if (withLevel == -1)
|
||||
withLevel = level;
|
||||
} else {
|
||||
auto i = curEnv->find(name);
|
||||
if (i != curEnv->vars.end()) {
|
||||
@ -327,10 +342,7 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||
enclosing `with'. If there is no `with', then we can issue an
|
||||
"undefined variable" error now. */
|
||||
if (withLevel == -1)
|
||||
es.error<UndefinedVarError>(
|
||||
"undefined variable '%1%'",
|
||||
es.symbols[name]
|
||||
).atPos(pos).debugThrow();
|
||||
es.error<UndefinedVarError>("undefined variable '%1%'", es.symbols[name]).atPos(pos).debugThrow();
|
||||
for (auto * e = env.get(); e && !fromWith; e = e->up.get())
|
||||
fromWith = e->isWith;
|
||||
this->level = withLevel;
|
||||
@ -348,7 +360,8 @@ void ExprSelect::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
e->bindVars(es, env);
|
||||
if (def) def->bindVars(es, env);
|
||||
if (def)
|
||||
def->bindVars(es, env);
|
||||
for (auto & i : attrPath)
|
||||
if (!i.symbol)
|
||||
i.expr->bindVars(es, env);
|
||||
@ -365,8 +378,8 @@ void ExprOpHasAttr::bindVars(EvalState & es, const std::shared_ptr<const StaticE
|
||||
i.expr->bindVars(es, env);
|
||||
}
|
||||
|
||||
std::shared_ptr<const StaticEnv> ExprAttrs::bindInheritSources(
|
||||
EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
std::shared_ptr<const StaticEnv>
|
||||
ExprAttrs::bindInheritSources(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
if (!inheritFromExprs)
|
||||
return nullptr;
|
||||
@ -392,7 +405,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
if (recursive) {
|
||||
auto newEnv = [&] () -> std::shared_ptr<const StaticEnv> {
|
||||
auto newEnv = [&]() -> std::shared_ptr<const StaticEnv> {
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env, attrs.size());
|
||||
|
||||
Displacement displ = 0;
|
||||
@ -411,8 +424,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
||||
i.nameExpr->bindVars(es, newEnv);
|
||||
i.valueExpr->bindVars(es, newEnv);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
auto inheritFromEnv = bindInheritSources(es, env);
|
||||
|
||||
for (auto & i : attrs)
|
||||
@ -439,14 +451,13 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
auto newEnv = std::make_shared<StaticEnv>(
|
||||
nullptr, env,
|
||||
(hasFormals() ? formals->formals.size() : 0) +
|
||||
(!arg ? 0 : 1));
|
||||
auto newEnv =
|
||||
std::make_shared<StaticEnv>(nullptr, env, (hasFormals() ? formals->formals.size() : 0) + (!arg ? 0 : 1));
|
||||
|
||||
Displacement displ = 0;
|
||||
|
||||
if (arg) newEnv->vars.emplace_back(arg, displ++);
|
||||
if (arg)
|
||||
newEnv->vars.emplace_back(arg, displ++);
|
||||
|
||||
if (hasFormals()) {
|
||||
for (auto & i : formals->formals)
|
||||
@ -455,7 +466,8 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
||||
newEnv->sort();
|
||||
|
||||
for (auto & i : formals->formals)
|
||||
if (i.def) i.def->bindVars(es, newEnv);
|
||||
if (i.def)
|
||||
i.def->bindVars(es, newEnv);
|
||||
}
|
||||
|
||||
body->bindVars(es, newEnv);
|
||||
@ -473,7 +485,7 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||
|
||||
void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
auto newEnv = [&] () -> std::shared_ptr<const StaticEnv> {
|
||||
auto newEnv = [&]() -> std::shared_ptr<const StaticEnv> {
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env, attrs->attrs.size());
|
||||
|
||||
Displacement displ = 0;
|
||||
@ -562,13 +574,9 @@ void ExprPos::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
}
|
||||
|
||||
|
||||
/* Storing function names. */
|
||||
|
||||
void Expr::setName(Symbol name)
|
||||
{
|
||||
}
|
||||
|
||||
void Expr::setName(Symbol name) {}
|
||||
|
||||
void ExprLambda::setName(Symbol name)
|
||||
{
|
||||
@ -576,16 +584,14 @@ void ExprLambda::setName(Symbol name)
|
||||
body->setName(name);
|
||||
}
|
||||
|
||||
|
||||
std::string ExprLambda::showNamePos(const EvalState & state) const
|
||||
{
|
||||
std::string id(name
|
||||
? concatStrings("'", state.symbols[name], "'")
|
||||
: "anonymous function");
|
||||
std::string id(name ? concatStrings("'", state.symbols[name], "'") : "anonymous function");
|
||||
return fmt("%1% at %2%", id, state.positions[pos]);
|
||||
}
|
||||
|
||||
void ExprLambda::setDocComment(DocComment docComment) {
|
||||
void ExprLambda::setDocComment(DocComment docComment)
|
||||
{
|
||||
// RFC 145 specifies that the innermost doc comment wins.
|
||||
// See https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md#ambiguous-placement
|
||||
if (!this->docComment) {
|
||||
@ -606,11 +612,12 @@ void ExprLambda::setDocComment(DocComment docComment) {
|
||||
size_t SymbolTable::totalSize() const
|
||||
{
|
||||
size_t n = 0;
|
||||
dump([&] (const std::string & s) { n += s.size(); });
|
||||
dump([&](const std::string & s) { n += s.size(); });
|
||||
return n;
|
||||
}
|
||||
|
||||
std::string DocComment::getInnerText(const PosTable & positions) const {
|
||||
std::string DocComment::getInnerText(const PosTable & positions) const
|
||||
{
|
||||
auto beginPos = positions[begin];
|
||||
auto endPos = positions[end];
|
||||
auto docCommentStr = beginPos.getSnippetUpTo(endPos).value_or("");
|
||||
@ -628,8 +635,6 @@ std::string DocComment::getInnerText(const PosTable & positions) const {
|
||||
return docStr;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* ‘Cursed or’ handling.
|
||||
*
|
||||
* In parser.y, every use of expr_select in a production must call one of the
|
||||
@ -647,10 +652,13 @@ void ExprCall::warnIfCursedOr(const SymbolTable & symbols, const PosTable & posi
|
||||
{
|
||||
if (cursedOrEndPos.has_value()) {
|
||||
std::ostringstream out;
|
||||
out << "at " << positions[pos] << ": "
|
||||
out << "at " << positions[pos]
|
||||
<< ": "
|
||||
"This expression uses `or` as an identifier in a way that will change in a future Nix release.\n"
|
||||
"Wrap this entire expression in parentheses to preserve its current meaning:\n"
|
||||
" (" << positions[pos].getSnippetUpTo(positions[*cursedOrEndPos]).value_or("could not read expression") << ")\n"
|
||||
" ("
|
||||
<< positions[pos].getSnippetUpTo(positions[*cursedOrEndPos]).value_or("could not read expression")
|
||||
<< ")\n"
|
||||
"Give feedback at https://github.com/NixOS/nix/pull/11121";
|
||||
warn(out.str());
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -5,10 +5,11 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_unsafeDiscardStringContext(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext");
|
||||
auto s = state.coerceToString(
|
||||
pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardStringContext");
|
||||
v.mkString(*s);
|
||||
}
|
||||
|
||||
@ -21,18 +22,17 @@ static RegisterPrimOp primop_unsafeDiscardStringContext({
|
||||
.fun = prim_unsafeDiscardStringContext,
|
||||
});
|
||||
|
||||
|
||||
static void prim_hasContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_hasContext(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.hasContext");
|
||||
v.mkBool(!context.empty());
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hasContext({
|
||||
.name = "__hasContext",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
static RegisterPrimOp primop_hasContext(
|
||||
{.name = "__hasContext",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
Return `true` if string *s* has a non-empty context.
|
||||
The context can be obtained with
|
||||
[`getContext`](#builtins-getContext).
|
||||
@ -50,21 +50,18 @@ static RegisterPrimOp primop_hasContext({
|
||||
> else { ${name} = meta; }
|
||||
> ```
|
||||
)",
|
||||
.fun = prim_hasContext
|
||||
});
|
||||
.fun = prim_hasContext});
|
||||
|
||||
|
||||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
|
||||
auto s = state.coerceToString(
|
||||
pos, *args[0], context, "while evaluating the argument passed to builtins.unsafeDiscardOutputDependency");
|
||||
|
||||
NixStringContext context2;
|
||||
for (auto && c : context) {
|
||||
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c.raw)) {
|
||||
context2.emplace(NixStringContextElem::Opaque {
|
||||
.path = ptr->drvPath
|
||||
});
|
||||
context2.emplace(NixStringContextElem::Opaque{.path = ptr->drvPath});
|
||||
} else {
|
||||
/* Can reuse original item */
|
||||
context2.emplace(std::move(c).raw);
|
||||
@ -74,10 +71,10 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
|
||||
v.mkString(*s, context2);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_unsafeDiscardOutputDependency({
|
||||
.name = "__unsafeDiscardOutputDependency",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
static RegisterPrimOp primop_unsafeDiscardOutputDependency(
|
||||
{.name = "__unsafeDiscardOutputDependency",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
Create a copy of the given string where every
|
||||
[derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep)
|
||||
string context element is turned into a
|
||||
@ -94,58 +91,58 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency({
|
||||
|
||||
[`builtins.addDrvOutputDependencies`]: #builtins-addDrvOutputDependencies
|
||||
)",
|
||||
.fun = prim_unsafeDiscardOutputDependency
|
||||
});
|
||||
.fun = prim_unsafeDiscardOutputDependency});
|
||||
|
||||
|
||||
static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.addDrvOutputDependencies");
|
||||
auto s = state.coerceToString(
|
||||
pos, *args[0], context, "while evaluating the argument passed to builtins.addDrvOutputDependencies");
|
||||
|
||||
auto contextSize = context.size();
|
||||
auto contextSize = context.size();
|
||||
if (contextSize != 1) {
|
||||
state.error<EvalError>(
|
||||
"context of string '%s' must have exactly one element, but has %d",
|
||||
*s,
|
||||
contextSize
|
||||
).atPos(pos).debugThrow();
|
||||
state.error<EvalError>("context of string '%s' must have exactly one element, but has %d", *s, contextSize)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
NixStringContext context2 {
|
||||
(NixStringContextElem { std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
|
||||
if (!c.path.isDerivation()) {
|
||||
state.error<EvalError>(
|
||||
"path '%s' is not a derivation",
|
||||
state.store->printStorePath(c.path)
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
return NixStringContextElem::DrvDeep {
|
||||
.drvPath = c.path,
|
||||
};
|
||||
NixStringContext context2{
|
||||
(NixStringContextElem{std::visit(
|
||||
overloaded{
|
||||
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
|
||||
if (!c.path.isDerivation()) {
|
||||
state.error<EvalError>("path '%s' is not a derivation", state.store->printStorePath(c.path))
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
return NixStringContextElem::DrvDeep{
|
||||
.drvPath = c.path,
|
||||
};
|
||||
},
|
||||
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
|
||||
state
|
||||
.error<EvalError>(
|
||||
"`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'",
|
||||
c.output)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
|
||||
/* Reuse original item because we want this to be idempotent. */
|
||||
/* FIXME: Suspicious move out of const. This is actually a copy, so the comment
|
||||
above does not make much sense. */
|
||||
return std::move(c);
|
||||
},
|
||||
},
|
||||
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
|
||||
state.error<EvalError>(
|
||||
"`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'",
|
||||
c.output
|
||||
).atPos(pos).debugThrow();
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
|
||||
/* Reuse original item because we want this to be idempotent. */
|
||||
/* FIXME: Suspicious move out of const. This is actually a copy, so the comment
|
||||
above does not make much sense. */
|
||||
return std::move(c);
|
||||
},
|
||||
}, context.begin()->raw) }),
|
||||
context.begin()->raw)}),
|
||||
};
|
||||
|
||||
v.mkString(*s, context2);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_addDrvOutputDependencies({
|
||||
.name = "__addDrvOutputDependencies",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
static RegisterPrimOp primop_addDrvOutputDependencies(
|
||||
{.name = "__addDrvOutputDependencies",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
Create a copy of the given string where a single
|
||||
[constant](@docroot@/language/string-context.md#string-context-element-constant)
|
||||
string context element is turned into a
|
||||
@ -159,9 +156,7 @@ static RegisterPrimOp primop_addDrvOutputDependencies({
|
||||
|
||||
This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-unsafeDiscardOutputDependency).
|
||||
)",
|
||||
.fun = prim_addDrvOutputDependencies
|
||||
});
|
||||
|
||||
.fun = prim_addDrvOutputDependencies});
|
||||
|
||||
/* Extract the context of a string as a structured Nix value.
|
||||
|
||||
@ -182,9 +177,10 @@ static RegisterPrimOp primop_addDrvOutputDependencies({
|
||||
Note that for a given path any combination of the above attributes
|
||||
may be present.
|
||||
*/
|
||||
static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_getContext(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
struct ContextInfo {
|
||||
struct ContextInfo
|
||||
{
|
||||
bool path = false;
|
||||
bool allOutputs = false;
|
||||
Strings outputs;
|
||||
@ -193,20 +189,18 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
||||
state.forceString(*args[0], context, pos, "while evaluating the argument passed to builtins.getContext");
|
||||
auto contextInfos = std::map<StorePath, ContextInfo>();
|
||||
for (auto && i : context) {
|
||||
std::visit(overloaded {
|
||||
[&](NixStringContextElem::DrvDeep && d) {
|
||||
contextInfos[std::move(d.drvPath)].allOutputs = true;
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](NixStringContextElem::DrvDeep && d) { contextInfos[std::move(d.drvPath)].allOutputs = true; },
|
||||
[&](NixStringContextElem::Built && b) {
|
||||
// FIXME should eventually show string context as is, no
|
||||
// resolving here.
|
||||
auto drvPath = resolveDerivedPath(*state.store, *b.drvPath);
|
||||
contextInfos[std::move(drvPath)].outputs.emplace_back(std::move(b.output));
|
||||
},
|
||||
[&](NixStringContextElem::Opaque && o) { contextInfos[std::move(o.path)].path = true; },
|
||||
},
|
||||
[&](NixStringContextElem::Built && b) {
|
||||
// FIXME should eventually show string context as is, no
|
||||
// resolving here.
|
||||
auto drvPath = resolveDerivedPath(*state.store, *b.drvPath);
|
||||
contextInfos[std::move(drvPath)].outputs.emplace_back(std::move(b.output));
|
||||
},
|
||||
[&](NixStringContextElem::Opaque && o) {
|
||||
contextInfos[std::move(o.path)].path = true;
|
||||
},
|
||||
}, ((NixStringContextElem &&) i).raw);
|
||||
((NixStringContextElem &&) i).raw);
|
||||
}
|
||||
|
||||
auto attrs = state.buildBindings(contextInfos.size());
|
||||
@ -231,10 +225,10 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_getContext({
|
||||
.name = "__getContext",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
static RegisterPrimOp primop_getContext(
|
||||
{.name = "__getContext",
|
||||
.args = {"s"},
|
||||
.doc = R"(
|
||||
Return the string context of *s*.
|
||||
|
||||
The string context tracks references to derivations within a string.
|
||||
@ -253,19 +247,18 @@ static RegisterPrimOp primop_getContext({
|
||||
{ "/nix/store/arhvjaf6zmlyn8vh8fgn55rpwnxq0n7l-a.drv" = { outputs = [ "out" ]; }; }
|
||||
```
|
||||
)",
|
||||
.fun = prim_getContext
|
||||
});
|
||||
|
||||
.fun = prim_getContext});
|
||||
|
||||
/* Append the given context to a given string.
|
||||
|
||||
See the commentary above getContext for details of the
|
||||
context representation.
|
||||
*/
|
||||
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
auto orig = state.forceString(*args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext");
|
||||
auto orig = state.forceString(
|
||||
*args[0], context, noPos, "while evaluating the first argument passed to builtins.appendContext");
|
||||
|
||||
state.forceAttrs(*args[1], pos, "while evaluating the second argument passed to builtins.appendContext");
|
||||
|
||||
@ -274,10 +267,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
for (auto & i : *args[1]->attrs()) {
|
||||
const auto & name = state.symbols[i.name];
|
||||
if (!state.store->isStorePath(name))
|
||||
state.error<EvalError>(
|
||||
"context key '%s' is not a store path",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
state.error<EvalError>("context key '%s' is not a store path", name).atPos(i.pos).debugThrow();
|
||||
auto namePath = state.store->parseStorePath(name);
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(namePath);
|
||||
@ -285,39 +275,46 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
|
||||
if (auto attr = i.value->attrs()->get(sPath)) {
|
||||
if (state.forceBool(*attr->value, attr->pos, "while evaluating the `path` attribute of a string context"))
|
||||
context.emplace(NixStringContextElem::Opaque {
|
||||
.path = namePath,
|
||||
});
|
||||
context.emplace(
|
||||
NixStringContextElem::Opaque{
|
||||
.path = namePath,
|
||||
});
|
||||
}
|
||||
|
||||
if (auto attr = i.value->attrs()->get(sAllOutputs)) {
|
||||
if (state.forceBool(*attr->value, attr->pos, "while evaluating the `allOutputs` attribute of a string context")) {
|
||||
if (state.forceBool(
|
||||
*attr->value, attr->pos, "while evaluating the `allOutputs` attribute of a string context")) {
|
||||
if (!isDerivation(name)) {
|
||||
state.error<EvalError>(
|
||||
"tried to add all-outputs context of %s, which is not a derivation, to a string",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
state
|
||||
.error<EvalError>(
|
||||
"tried to add all-outputs context of %s, which is not a derivation, to a string", name)
|
||||
.atPos(i.pos)
|
||||
.debugThrow();
|
||||
}
|
||||
context.emplace(NixStringContextElem::DrvDeep {
|
||||
.drvPath = namePath,
|
||||
});
|
||||
context.emplace(
|
||||
NixStringContextElem::DrvDeep{
|
||||
.drvPath = namePath,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (auto attr = i.value->attrs()->get(state.sOutputs)) {
|
||||
state.forceList(*attr->value, attr->pos, "while evaluating the `outputs` attribute of a string context");
|
||||
if (attr->value->listSize() && !isDerivation(name)) {
|
||||
state.error<EvalError>(
|
||||
"tried to add derivation output context of %s, which is not a derivation, to a string",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
state
|
||||
.error<EvalError>(
|
||||
"tried to add derivation output context of %s, which is not a derivation, to a string", name)
|
||||
.atPos(i.pos)
|
||||
.debugThrow();
|
||||
}
|
||||
for (auto elem : attr->value->listItems()) {
|
||||
auto outputName = state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context");
|
||||
context.emplace(NixStringContextElem::Built {
|
||||
.drvPath = makeConstantStorePathRef(namePath),
|
||||
.output = std::string { outputName },
|
||||
});
|
||||
auto outputName =
|
||||
state.forceStringNoCtx(*elem, attr->pos, "while evaluating an output name within a string context");
|
||||
context.emplace(
|
||||
NixStringContextElem::Built{
|
||||
.drvPath = makeConstantStorePathRef(namePath),
|
||||
.output = std::string{outputName},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -325,10 +322,6 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||
v.mkString(orig, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_appendContext({
|
||||
.name = "__appendContext",
|
||||
.arity = 2,
|
||||
.fun = prim_appendContext
|
||||
});
|
||||
static RegisterPrimOp primop_appendContext({.name = "__appendContext", .arity = 2, .fun = prim_appendContext});
|
||||
|
||||
}
|
||||
|
@ -15,29 +15,35 @@ namespace nix {
|
||||
* @param toPathMaybe Path to write the rewritten path to. If empty, the error shows the actual path.
|
||||
* @param v Return `Value`
|
||||
*/
|
||||
static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, const std::optional<StorePath> & toPathMaybe, Value &v) {
|
||||
static void runFetchClosureWithRewrite(
|
||||
EvalState & state,
|
||||
const PosIdx pos,
|
||||
Store & fromStore,
|
||||
const StorePath & fromPath,
|
||||
const std::optional<StorePath> & toPathMaybe,
|
||||
Value & v)
|
||||
{
|
||||
|
||||
// establish toPath or throw
|
||||
|
||||
if (!toPathMaybe || !state.store->isValidPath(*toPathMaybe)) {
|
||||
auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath);
|
||||
if (toPathMaybe && *toPathMaybe != rewrittenPath)
|
||||
throw Error({
|
||||
.msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath),
|
||||
state.store->printStorePath(*toPathMaybe)),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt(
|
||||
"rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath),
|
||||
state.store->printStorePath(*toPathMaybe)),
|
||||
.pos = state.positions[pos]});
|
||||
if (!toPathMaybe)
|
||||
throw Error({
|
||||
.msg = HintFmt(
|
||||
"rewriting '%s' to content-addressed form yielded '%s'\n"
|
||||
"Use this value for the 'toPath' attribute passed to 'fetchClosure'",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath)),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt(
|
||||
"rewriting '%s' to content-addressed form yielded '%s'\n"
|
||||
"Use this value for the 'toPath' attribute passed to 'fetchClosure'",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath)),
|
||||
.pos = state.positions[pos]});
|
||||
}
|
||||
|
||||
const auto & toPath = *toPathMaybe;
|
||||
@ -49,13 +55,12 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
||||
if (!resultInfo->isContentAddressed(*state.store)) {
|
||||
// We don't perform the rewriting when outPath already exists, as an optimisation.
|
||||
// However, we can quickly detect a mistake if the toPath is input addressed.
|
||||
throw Error({
|
||||
.msg = HintFmt(
|
||||
"The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
|
||||
"Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
|
||||
state.store->printStorePath(toPath)),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt(
|
||||
"The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
|
||||
"Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
|
||||
state.store->printStorePath(toPath)),
|
||||
.pos = state.positions[pos]});
|
||||
}
|
||||
|
||||
state.mkStorePathString(toPath, v);
|
||||
@ -64,24 +69,25 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
||||
/**
|
||||
* Fetch the closure and make sure it's content addressed.
|
||||
*/
|
||||
static void runFetchClosureWithContentAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) {
|
||||
static void runFetchClosureWithContentAddressedPath(
|
||||
EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v)
|
||||
{
|
||||
|
||||
if (!state.store->isValidPath(fromPath))
|
||||
copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath });
|
||||
copyClosure(fromStore, *state.store, RealisedPath::Set{fromPath});
|
||||
|
||||
auto info = state.store->queryPathInfo(fromPath);
|
||||
|
||||
if (!info->isContentAddressed(*state.store)) {
|
||||
throw Error({
|
||||
.msg = HintFmt(
|
||||
"The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
|
||||
"If you do intend to fetch an input-addressed store path, add\n\n"
|
||||
" inputAddressed = true;\n\n"
|
||||
"to the 'fetchClosure' arguments.\n\n"
|
||||
"Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt(
|
||||
"The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
|
||||
"If you do intend to fetch an input-addressed store path, add\n\n"
|
||||
" inputAddressed = true;\n\n"
|
||||
"to the 'fetchClosure' arguments.\n\n"
|
||||
"Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.pos = state.positions[pos]});
|
||||
}
|
||||
|
||||
state.mkStorePathString(fromPath, v);
|
||||
@ -90,21 +96,22 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos
|
||||
/**
|
||||
* Fetch the closure and make sure it's input addressed.
|
||||
*/
|
||||
static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) {
|
||||
static void runFetchClosureWithInputAddressedPath(
|
||||
EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v)
|
||||
{
|
||||
|
||||
if (!state.store->isValidPath(fromPath))
|
||||
copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath });
|
||||
copyClosure(fromStore, *state.store, RealisedPath::Set{fromPath});
|
||||
|
||||
auto info = state.store->queryPathInfo(fromPath);
|
||||
|
||||
if (info->isContentAddressed(*state.store)) {
|
||||
throw Error({
|
||||
.msg = HintFmt(
|
||||
"The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
|
||||
"Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt(
|
||||
"The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
|
||||
"Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.pos = state.positions[pos]});
|
||||
}
|
||||
|
||||
state.mkStorePathString(fromPath, v);
|
||||
@ -112,7 +119,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId
|
||||
|
||||
typedef std::optional<StorePath> StorePathOrGap;
|
||||
|
||||
static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.fetchClosure");
|
||||
|
||||
@ -136,67 +143,58 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||
state.forceValue(*attr.value, attr.pos);
|
||||
bool isEmptyString = attr.value->type() == nString && attr.value->string_view() == "";
|
||||
if (isEmptyString) {
|
||||
toPath = StorePathOrGap {};
|
||||
}
|
||||
else {
|
||||
toPath = StorePathOrGap{};
|
||||
} else {
|
||||
NixStringContext context;
|
||||
toPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint());
|
||||
}
|
||||
}
|
||||
|
||||
else if (attrName == "fromStore")
|
||||
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos,
|
||||
attrHint());
|
||||
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos, attrHint());
|
||||
|
||||
else if (attrName == "inputAddressed")
|
||||
inputAddressedMaybe = state.forceBool(*attr.value, attr.pos, attrHint());
|
||||
|
||||
else
|
||||
throw Error({
|
||||
.msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
||||
.pos = state.positions[pos]});
|
||||
}
|
||||
|
||||
if (!fromPath)
|
||||
throw Error({
|
||||
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
||||
.pos = state.positions[pos]});
|
||||
|
||||
bool inputAddressed = inputAddressedMaybe.value_or(false);
|
||||
|
||||
if (inputAddressed) {
|
||||
if (toPath)
|
||||
throw Error({
|
||||
.msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
||||
"inputAddressed",
|
||||
"toPath"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt(
|
||||
"attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
||||
"inputAddressed",
|
||||
"toPath"),
|
||||
.pos = state.positions[pos]});
|
||||
}
|
||||
|
||||
if (!fromStoreUrl)
|
||||
throw Error({
|
||||
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
||||
.pos = state.positions[pos]});
|
||||
|
||||
auto parsedURL = parseURL(*fromStoreUrl);
|
||||
|
||||
if (parsedURL.scheme != "http" &&
|
||||
parsedURL.scheme != "https" &&
|
||||
!(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file"))
|
||||
throw Error({
|
||||
.msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
if (parsedURL.scheme != "http" && parsedURL.scheme != "https"
|
||||
&& !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file"))
|
||||
throw Error(
|
||||
{.msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"), .pos = state.positions[pos]});
|
||||
|
||||
if (!parsedURL.query.empty())
|
||||
throw Error({
|
||||
.msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
throw Error(
|
||||
{.msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||
.pos = state.positions[pos]});
|
||||
|
||||
auto fromStore = openStore(parsedURL.to_string());
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
std::string url;
|
||||
std::optional<Hash> rev;
|
||||
@ -23,31 +23,46 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
for (auto & attr : *args[0]->attrs()) {
|
||||
std::string_view n(state.symbols[attr.name]);
|
||||
if (n == "url")
|
||||
url = state.coerceToString(attr.pos, *attr.value, context,
|
||||
"while evaluating the `url` attribute passed to builtins.fetchMercurial",
|
||||
false, false).toOwned();
|
||||
url = state
|
||||
.coerceToString(
|
||||
attr.pos,
|
||||
*attr.value,
|
||||
context,
|
||||
"while evaluating the `url` attribute passed to builtins.fetchMercurial",
|
||||
false,
|
||||
false)
|
||||
.toOwned();
|
||||
else if (n == "rev") {
|
||||
// Ugly: unlike fetchGit, here the "rev" attribute can
|
||||
// be both a revision or a branch/tag name.
|
||||
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
||||
auto value = state.forceStringNoCtx(
|
||||
*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
|
||||
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||
else
|
||||
ref = value;
|
||||
}
|
||||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
|
||||
} else if (n == "name")
|
||||
name = state.forceStringNoCtx(
|
||||
*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
|
||||
else
|
||||
state.error<EvalError>("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow();
|
||||
state.error<EvalError>("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name])
|
||||
.atPos(attr.pos)
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
if (url.empty())
|
||||
state.error<EvalError>("'url' argument required").atPos(pos).debugThrow();
|
||||
|
||||
} else
|
||||
url = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the first argument passed to builtins.fetchMercurial",
|
||||
false, false).toOwned();
|
||||
url = state
|
||||
.coerceToString(
|
||||
pos,
|
||||
*args[0],
|
||||
context,
|
||||
"while evaluating the first argument passed to builtins.fetchMercurial",
|
||||
false,
|
||||
false)
|
||||
.toOwned();
|
||||
|
||||
// FIXME: git externals probably can be used to bypass the URI
|
||||
// whitelist. Ah well.
|
||||
@ -60,8 +75,10 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
attrs.insert_or_assign("type", "hg");
|
||||
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
||||
attrs.insert_or_assign("name", std::string(name));
|
||||
if (ref) attrs.insert_or_assign("ref", *ref);
|
||||
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref)
|
||||
attrs.insert_or_assign("ref", *ref);
|
||||
if (rev)
|
||||
attrs.insert_or_assign("rev", rev->gitRev());
|
||||
auto input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs));
|
||||
|
||||
auto [storePath, input2] = input.fetchToStore(state.store);
|
||||
@ -82,10 +99,6 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||
state.allowPath(storePath);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r_fetchMercurial({
|
||||
.name = "fetchMercurial",
|
||||
.arity = 1,
|
||||
.fun = prim_fetchMercurial
|
||||
});
|
||||
static RegisterPrimOp r_fetchMercurial({.name = "fetchMercurial", .arity = 1, .fun = prim_fetchMercurial});
|
||||
|
||||
}
|
||||
|
@ -37,8 +37,7 @@ void emitTreeAttrs(
|
||||
attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true));
|
||||
|
||||
if (input.getType() == "git")
|
||||
attrs.alloc("submodules").mkBool(
|
||||
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
|
||||
attrs.alloc("submodules").mkBool(fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
|
||||
|
||||
if (!forceDirty) {
|
||||
|
||||
@ -56,7 +55,6 @@ void emitTreeAttrs(
|
||||
attrs.alloc("revCount").mkInt(*revCount);
|
||||
else if (emptyRevFallback)
|
||||
attrs.alloc("revCount").mkInt(0);
|
||||
|
||||
}
|
||||
|
||||
if (auto dirtyRev = fetchers::maybeGetStrAttr(input.attrs, "dirtyRev")) {
|
||||
@ -66,14 +64,14 @@ void emitTreeAttrs(
|
||||
|
||||
if (auto lastModified = input.getLastModified()) {
|
||||
attrs.alloc("lastModified").mkInt(*lastModified);
|
||||
attrs.alloc("lastModifiedDate").mkString(
|
||||
fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
|
||||
attrs.alloc("lastModifiedDate").mkString(fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
|
||||
}
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
|
||||
struct FetchTreeParams {
|
||||
struct FetchTreeParams
|
||||
{
|
||||
bool emptyRevFallback = false;
|
||||
bool allowNameArgument = false;
|
||||
bool isFetchGit = false;
|
||||
@ -81,17 +79,14 @@ struct FetchTreeParams {
|
||||
};
|
||||
|
||||
static void fetchTree(
|
||||
EvalState & state,
|
||||
const PosIdx pos,
|
||||
Value * * args,
|
||||
Value & v,
|
||||
const FetchTreeParams & params = FetchTreeParams{}
|
||||
) {
|
||||
fetchers::Input input { state.fetchSettings };
|
||||
EvalState & state, const PosIdx pos, Value ** args, Value & v, const FetchTreeParams & params = FetchTreeParams{})
|
||||
{
|
||||
fetchers::Input input{state.fetchSettings};
|
||||
NixStringContext context;
|
||||
std::optional<std::string> type;
|
||||
auto fetcher = params.isFetchGit ? "fetchGit" : "fetchTree";
|
||||
if (params.isFetchGit) type = "git";
|
||||
if (params.isFetchGit)
|
||||
type = "git";
|
||||
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
@ -102,47 +97,55 @@ static void fetchTree(
|
||||
|
||||
if (auto aType = args[0]->attrs()->get(state.sType)) {
|
||||
if (type)
|
||||
state.error<EvalError>(
|
||||
"unexpected argument 'type'"
|
||||
).atPos(pos).debugThrow();
|
||||
type = state.forceStringNoCtx(*aType->value, aType->pos,
|
||||
fmt("while evaluating the `type` argument passed to '%s'", fetcher));
|
||||
state.error<EvalError>("unexpected argument 'type'").atPos(pos).debugThrow();
|
||||
type = state.forceStringNoCtx(
|
||||
*aType->value, aType->pos, fmt("while evaluating the `type` argument passed to '%s'", fetcher));
|
||||
} else if (!type)
|
||||
state.error<EvalError>(
|
||||
"argument 'type' is missing in call to '%s'", fetcher
|
||||
).atPos(pos).debugThrow();
|
||||
state.error<EvalError>("argument 'type' is missing in call to '%s'", fetcher).atPos(pos).debugThrow();
|
||||
|
||||
attrs.emplace("type", type.value());
|
||||
|
||||
for (auto & attr : *args[0]->attrs()) {
|
||||
if (attr.name == state.sType) continue;
|
||||
if (attr.name == state.sType)
|
||||
continue;
|
||||
state.forceValue(*attr.value, attr.pos);
|
||||
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
||||
auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned();
|
||||
attrs.emplace(state.symbols[attr.name],
|
||||
params.isFetchGit && state.symbols[attr.name] == "url"
|
||||
? fixGitURL(s)
|
||||
: s);
|
||||
}
|
||||
else if (attr.value->type() == nBool)
|
||||
attrs.emplace(
|
||||
state.symbols[attr.name],
|
||||
params.isFetchGit && state.symbols[attr.name] == "url" ? fixGitURL(s) : s);
|
||||
} else if (attr.value->type() == nBool)
|
||||
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean()});
|
||||
else if (attr.value->type() == nInt) {
|
||||
auto intValue = attr.value->integer().value;
|
||||
|
||||
if (intValue < 0)
|
||||
state.error<EvalError>("negative value given for '%s' argument '%s': %d", fetcher, state.symbols[attr.name], intValue).atPos(pos).debugThrow();
|
||||
state
|
||||
.error<EvalError>(
|
||||
"negative value given for '%s' argument '%s': %d",
|
||||
fetcher,
|
||||
state.symbols[attr.name],
|
||||
intValue)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(intValue));
|
||||
} else if (state.symbols[attr.name] == "publicKeys") {
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||
}
|
||||
else
|
||||
state.error<TypeError>("argument '%s' to '%s' is %s while a string, Boolean or integer is expected",
|
||||
state.symbols[attr.name], fetcher, showType(*attr.value)).debugThrow();
|
||||
attrs.emplace(
|
||||
state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||
} else
|
||||
state
|
||||
.error<TypeError>(
|
||||
"argument '%s' to '%s' is %s while a string, Boolean or integer is expected",
|
||||
state.symbols[attr.name],
|
||||
fetcher,
|
||||
showType(*attr.value))
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
if (params.isFetchGit && !attrs.contains("exportIgnore")
|
||||
&& (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
attrs.emplace("exportIgnore", Explicit<bool>{true});
|
||||
}
|
||||
|
||||
@ -153,29 +156,38 @@ static void fetchTree(
|
||||
|
||||
if (!params.allowNameArgument)
|
||||
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
||||
state.error<EvalError>(
|
||||
"argument 'name' isn’t supported in call to '%s'", fetcher
|
||||
).atPos(pos).debugThrow();
|
||||
state.error<EvalError>("argument 'name' isn’t supported in call to '%s'", fetcher)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
|
||||
input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs));
|
||||
} else {
|
||||
auto url = state.coerceToString(pos, *args[0], context,
|
||||
fmt("while evaluating the first argument passed to '%s'", fetcher),
|
||||
false, false).toOwned();
|
||||
auto url = state
|
||||
.coerceToString(
|
||||
pos,
|
||||
*args[0],
|
||||
context,
|
||||
fmt("while evaluating the first argument passed to '%s'", fetcher),
|
||||
false,
|
||||
false)
|
||||
.toOwned();
|
||||
|
||||
if (params.isFetchGit) {
|
||||
fetchers::Attrs attrs;
|
||||
attrs.emplace("type", "git");
|
||||
attrs.emplace("url", fixGitURL(url));
|
||||
if (!attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
if (!attrs.contains("exportIgnore")
|
||||
&& (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
attrs.emplace("exportIgnore", Explicit<bool>{true});
|
||||
}
|
||||
input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs));
|
||||
} else {
|
||||
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||
state.error<EvalError>(
|
||||
"passing a string argument to '%s' requires the 'flakes' experimental feature", fetcher
|
||||
).atPos(pos).debugThrow();
|
||||
state
|
||||
.error<EvalError>(
|
||||
"passing a string argument to '%s' requires the 'flakes' experimental feature", fetcher)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
input = fetchers::Input::fromURL(state.fetchSettings, url);
|
||||
}
|
||||
}
|
||||
@ -190,9 +202,11 @@ static void fetchTree(
|
||||
"This is deprecated since such inputs are verifiable but may not be reproducible.",
|
||||
input.to_string());
|
||||
else
|
||||
state.error<EvalError>(
|
||||
"in pure evaluation mode, '%s' will not fetch unlocked input '%s'",
|
||||
fetcher, input.to_string()).atPos(pos).debugThrow();
|
||||
state
|
||||
.error<EvalError>(
|
||||
"in pure evaluation mode, '%s' will not fetch unlocked input '%s'", fetcher, input.to_string())
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
state.checkURI(input.toURLString());
|
||||
@ -211,9 +225,9 @@ static void fetchTree(
|
||||
emitTreeAttrs(state, storePath, input2, v, params.emptyRevFallback, false);
|
||||
}
|
||||
|
||||
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
fetchTree(state, pos, args, v, { });
|
||||
fetchTree(state, pos, args, v, {});
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_fetchTree({
|
||||
@ -446,7 +460,7 @@ static RegisterPrimOp primop_fetchTree({
|
||||
.experimentalFeature = Xp::FetchTree,
|
||||
});
|
||||
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
fetchTree(state, pos, args, v, {.isFinal = true});
|
||||
}
|
||||
@ -458,8 +472,14 @@ static RegisterPrimOp primop_fetchFinalTree({
|
||||
.internal = true,
|
||||
});
|
||||
|
||||
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
||||
const std::string & who, bool unpack, std::string name)
|
||||
static void fetch(
|
||||
EvalState & state,
|
||||
const PosIdx pos,
|
||||
Value ** args,
|
||||
Value & v,
|
||||
const std::string & who,
|
||||
bool unpack,
|
||||
std::string name)
|
||||
{
|
||||
std::optional<std::string> url;
|
||||
std::optional<Hash> expectedHash;
|
||||
@ -476,19 +496,20 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
if (n == "url")
|
||||
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
|
||||
else if (n == "sha256")
|
||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
|
||||
expectedHash = newHashAllowEmpty(
|
||||
state.forceStringNoCtx(
|
||||
*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"),
|
||||
HashAlgorithm::SHA256);
|
||||
else if (n == "name") {
|
||||
nameAttrPassed = true;
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||
}
|
||||
else
|
||||
state.error<EvalError>("unsupported argument '%s' to '%s'", n, who)
|
||||
.atPos(pos).debugThrow();
|
||||
name = state.forceStringNoCtx(
|
||||
*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||
} else
|
||||
state.error<EvalError>("unsupported argument '%s' to '%s'", n, who).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
if (!url)
|
||||
state.error<EvalError>(
|
||||
"'url' argument required").atPos(pos).debugThrow();
|
||||
state.error<EvalError>("'url' argument required").atPos(pos).debugThrow();
|
||||
} else
|
||||
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
||||
|
||||
@ -504,27 +525,41 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
checkName(name);
|
||||
} catch (BadStorePathName & e) {
|
||||
auto resolution =
|
||||
nameAttrPassed ? HintFmt("Please change the value for the 'name' attribute passed to '%s', so that it can create a valid store path.", who) :
|
||||
isArgAttrs ? HintFmt("Please add a valid 'name' attribute to the argument for '%s', so that it can create a valid store path.", who) :
|
||||
HintFmt("Please pass an attribute set with 'url' and 'name' attributes to '%s', so that it can create a valid store path.", who);
|
||||
nameAttrPassed
|
||||
? HintFmt(
|
||||
"Please change the value for the 'name' attribute passed to '%s', so that it can create a valid store path.",
|
||||
who)
|
||||
: isArgAttrs
|
||||
? HintFmt(
|
||||
"Please add a valid 'name' attribute to the argument for '%s', so that it can create a valid store path.",
|
||||
who)
|
||||
: HintFmt(
|
||||
"Please pass an attribute set with 'url' and 'name' attributes to '%s', so that it can create a valid store path.",
|
||||
who);
|
||||
|
||||
state.error<EvalError>(
|
||||
std::string("invalid store path name when fetching URL '%s': %s. %s"), *url, Uncolored(e.message()), Uncolored(resolution.str()))
|
||||
.atPos(pos).debugThrow();
|
||||
state
|
||||
.error<EvalError>(
|
||||
std::string("invalid store path name when fetching URL '%s': %s. %s"),
|
||||
*url,
|
||||
Uncolored(e.message()),
|
||||
Uncolored(resolution.str()))
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
if (state.settings.pureEval && !expectedHash)
|
||||
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow();
|
||||
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
|
||||
// early exit if pinned and already in the store
|
||||
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
||||
auto expectedPath = state.store->makeFixedOutputPath(
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
FixedOutputInfo{
|
||||
.method = unpack ? FileIngestionMethod::NixArchive : FileIngestionMethod::Flat,
|
||||
.hash = *expectedHash,
|
||||
.references = {}
|
||||
});
|
||||
.references = {}});
|
||||
|
||||
if (state.store->isValidPath(expectedPath)) {
|
||||
state.allowAndSetStorePathString(expectedPath, v);
|
||||
@ -534,34 +569,32 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||
|
||||
// TODO: fetching may fail, yet the path may be substitutable.
|
||||
// https://github.com/NixOS/nix/issues/4313
|
||||
auto storePath =
|
||||
unpack
|
||||
? fetchToStore(
|
||||
*state.store,
|
||||
fetchers::downloadTarball(state.store, state.fetchSettings, *url),
|
||||
FetchMode::Copy,
|
||||
name)
|
||||
: fetchers::downloadFile(state.store, *url, name).storePath;
|
||||
auto storePath = unpack ? fetchToStore(
|
||||
*state.store,
|
||||
fetchers::downloadTarball(state.store, state.fetchSettings, *url),
|
||||
FetchMode::Copy,
|
||||
name)
|
||||
: fetchers::downloadFile(state.store, *url, name).storePath;
|
||||
|
||||
if (expectedHash) {
|
||||
auto hash = unpack
|
||||
? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||
auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||
if (hash != *expectedHash) {
|
||||
state.error<EvalError>(
|
||||
"hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url,
|
||||
expectedHash->to_string(HashFormat::Nix32, true),
|
||||
hash.to_string(HashFormat::Nix32, true)
|
||||
).withExitStatus(102)
|
||||
.debugThrow();
|
||||
state
|
||||
.error<EvalError>(
|
||||
"hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url,
|
||||
expectedHash->to_string(HashFormat::Nix32, true),
|
||||
hash.to_string(HashFormat::Nix32, true))
|
||||
.withExitStatus(102)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
state.allowAndSetStorePathString(storePath, v);
|
||||
}
|
||||
|
||||
static void prim_fetchurl(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_fetchurl(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
fetch(state, pos, args, v, "fetchurl", false, "");
|
||||
}
|
||||
@ -587,7 +620,7 @@ static RegisterPrimOp primop_fetchurl({
|
||||
.fun = prim_fetchurl,
|
||||
});
|
||||
|
||||
static void prim_fetchTarball(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_fetchTarball(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
fetch(state, pos, args, v, "fetchTarball", true, "source");
|
||||
}
|
||||
@ -637,14 +670,10 @@ static RegisterPrimOp primop_fetchTarball({
|
||||
.fun = prim_fetchTarball,
|
||||
});
|
||||
|
||||
static void prim_fetchGit(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
static void prim_fetchGit(EvalState & state, const PosIdx pos, Value ** args, Value & v)
|
||||
{
|
||||
fetchTree(state, pos, args, v,
|
||||
FetchTreeParams {
|
||||
.emptyRevFallback = true,
|
||||
.allowNameArgument = true,
|
||||
.isFetchGit = true
|
||||
});
|
||||
fetchTree(
|
||||
state, pos, args, v, FetchTreeParams{.emptyRevFallback = true, .allowNameArgument = true, .isFetchGit = true});
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_fetchGit({
|
||||
|
@ -7,7 +7,7 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, Value & val)
|
||||
static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Value & val)
|
||||
{
|
||||
auto toml = state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.fromTOML");
|
||||
|
||||
@ -16,75 +16,75 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
std::function<void(Value &, toml::value)> visit;
|
||||
|
||||
visit = [&](Value & v, toml::value t) {
|
||||
switch (t.type()) {
|
||||
case toml::value_t::table: {
|
||||
auto table = toml::get<toml::table>(t);
|
||||
|
||||
switch(t.type())
|
||||
{
|
||||
case toml::value_t::table:
|
||||
{
|
||||
auto table = toml::get<toml::table>(t);
|
||||
size_t size = 0;
|
||||
for (auto & i : table) {
|
||||
(void) i;
|
||||
size++;
|
||||
}
|
||||
|
||||
size_t size = 0;
|
||||
for (auto & i : table) { (void) i; size++; }
|
||||
auto attrs = state.buildBindings(size);
|
||||
|
||||
auto attrs = state.buildBindings(size);
|
||||
for (auto & elem : table) {
|
||||
forceNoNullByte(elem.first);
|
||||
visit(attrs.alloc(elem.first), elem.second);
|
||||
}
|
||||
|
||||
for(auto & elem : table) {
|
||||
forceNoNullByte(elem.first);
|
||||
visit(attrs.alloc(elem.first), elem.second);
|
||||
}
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::array:
|
||||
{
|
||||
auto array = toml::get<std::vector<toml::value>>(t);
|
||||
|
||||
auto list = state.buildList(array.size());
|
||||
for (const auto & [n, v] : enumerate(list))
|
||||
visit(*(v = state.allocValue()), array[n]);
|
||||
v.mkList(list);
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::boolean:
|
||||
v.mkBool(toml::get<bool>(t));
|
||||
break;;
|
||||
case toml::value_t::integer:
|
||||
v.mkInt(toml::get<int64_t>(t));
|
||||
break;;
|
||||
case toml::value_t::floating:
|
||||
v.mkFloat(toml::get<NixFloat>(t));
|
||||
break;;
|
||||
case toml::value_t::string:
|
||||
{
|
||||
auto s = toml::get<std::string_view>(t);
|
||||
forceNoNullByte(s);
|
||||
v.mkString(s);
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::local_datetime:
|
||||
case toml::value_t::offset_datetime:
|
||||
case toml::value_t::local_date:
|
||||
case toml::value_t::local_time:
|
||||
{
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) {
|
||||
auto attrs = state.buildBindings(2);
|
||||
attrs.alloc("_type").mkString("timestamp");
|
||||
std::ostringstream s;
|
||||
s << t;
|
||||
auto str = toView(s);
|
||||
forceNoNullByte(str);
|
||||
attrs.alloc("value").mkString(str);
|
||||
v.mkAttrs(attrs);
|
||||
} else {
|
||||
throw std::runtime_error("Dates and times are not supported");
|
||||
}
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::empty:
|
||||
v.mkNull();
|
||||
break;;
|
||||
v.mkAttrs(attrs);
|
||||
} break;
|
||||
;
|
||||
case toml::value_t::array: {
|
||||
auto array = toml::get<std::vector<toml::value>>(t);
|
||||
|
||||
auto list = state.buildList(array.size());
|
||||
for (const auto & [n, v] : enumerate(list))
|
||||
visit(*(v = state.allocValue()), array[n]);
|
||||
v.mkList(list);
|
||||
} break;
|
||||
;
|
||||
case toml::value_t::boolean:
|
||||
v.mkBool(toml::get<bool>(t));
|
||||
break;
|
||||
;
|
||||
case toml::value_t::integer:
|
||||
v.mkInt(toml::get<int64_t>(t));
|
||||
break;
|
||||
;
|
||||
case toml::value_t::floating:
|
||||
v.mkFloat(toml::get<NixFloat>(t));
|
||||
break;
|
||||
;
|
||||
case toml::value_t::string: {
|
||||
auto s = toml::get<std::string_view>(t);
|
||||
forceNoNullByte(s);
|
||||
v.mkString(s);
|
||||
} break;
|
||||
;
|
||||
case toml::value_t::local_datetime:
|
||||
case toml::value_t::offset_datetime:
|
||||
case toml::value_t::local_date:
|
||||
case toml::value_t::local_time: {
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::ParseTomlTimestamps)) {
|
||||
auto attrs = state.buildBindings(2);
|
||||
attrs.alloc("_type").mkString("timestamp");
|
||||
std::ostringstream s;
|
||||
s << t;
|
||||
auto str = toView(s);
|
||||
forceNoNullByte(str);
|
||||
attrs.alloc("value").mkString(str);
|
||||
v.mkAttrs(attrs);
|
||||
} else {
|
||||
throw std::runtime_error("Dates and times are not supported");
|
||||
}
|
||||
} break;
|
||||
;
|
||||
case toml::value_t::empty:
|
||||
v.mkNull();
|
||||
break;
|
||||
;
|
||||
}
|
||||
};
|
||||
|
||||
@ -95,10 +95,10 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
|
||||
}
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_fromTOML({
|
||||
.name = "fromTOML",
|
||||
.args = {"e"},
|
||||
.doc = R"(
|
||||
static RegisterPrimOp primop_fromTOML(
|
||||
{.name = "fromTOML",
|
||||
.args = {"e"},
|
||||
.doc = R"(
|
||||
Convert a TOML string to a Nix value. For example,
|
||||
|
||||
```nix
|
||||
@ -112,7 +112,6 @@ static RegisterPrimOp primop_fromTOML({
|
||||
|
||||
returns the value `{ s = "a"; table = { y = 2; }; x = 1; }`.
|
||||
)",
|
||||
.fun = prim_fromTOML
|
||||
});
|
||||
.fun = prim_fromTOML});
|
||||
|
||||
}
|
||||
|
@ -7,11 +7,7 @@ namespace nix {
|
||||
|
||||
// See: https://github.com/NixOS/nix/issues/9730
|
||||
void printAmbiguous(
|
||||
Value &v,
|
||||
const SymbolTable &symbols,
|
||||
std::ostream &str,
|
||||
std::set<const void *> *seen,
|
||||
int depth)
|
||||
Value & v, const SymbolTable & symbols, std::ostream & str, std::set<const void *> * seen, int depth)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
|
@ -28,9 +28,7 @@ void printElided(
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
|
||||
std::ostream &
|
||||
printLiteralString(std::ostream & str, const std::string_view string, size_t maxLength, bool ansiColors)
|
||||
std::ostream & printLiteralString(std::ostream & str, const std::string_view string, size_t maxLength, bool ansiColors)
|
||||
{
|
||||
size_t charsPrinted = 0;
|
||||
if (ansiColors)
|
||||
@ -43,12 +41,18 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max
|
||||
return str;
|
||||
}
|
||||
|
||||
if (*i == '\"' || *i == '\\') str << "\\" << *i;
|
||||
else if (*i == '\n') str << "\\n";
|
||||
else if (*i == '\r') str << "\\r";
|
||||
else if (*i == '\t') str << "\\t";
|
||||
else if (*i == '$' && *(i+1) == '{') str << "\\" << *i;
|
||||
else str << *i;
|
||||
if (*i == '\"' || *i == '\\')
|
||||
str << "\\" << *i;
|
||||
else if (*i == '\n')
|
||||
str << "\\n";
|
||||
else if (*i == '\r')
|
||||
str << "\\r";
|
||||
else if (*i == '\t')
|
||||
str << "\\t";
|
||||
else if (*i == '$' && *(i + 1) == '{')
|
||||
str << "\\" << *i;
|
||||
else
|
||||
str << *i;
|
||||
charsPrinted++;
|
||||
}
|
||||
str << "\"";
|
||||
@ -57,14 +61,12 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max
|
||||
return str;
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printLiteralString(std::ostream & str, const std::string_view string)
|
||||
std::ostream & printLiteralString(std::ostream & str, const std::string_view string)
|
||||
{
|
||||
return printLiteralString(str, string, std::numeric_limits<size_t>::max(), false);
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printLiteralBool(std::ostream & str, bool boolean)
|
||||
std::ostream & printLiteralBool(std::ostream & str, bool boolean)
|
||||
{
|
||||
str << (boolean ? "true" : "false");
|
||||
return str;
|
||||
@ -80,13 +82,12 @@ printLiteralBool(std::ostream & str, bool boolean)
|
||||
bool isReservedKeyword(const std::string_view str)
|
||||
{
|
||||
static const std::unordered_set<std::string_view> reservedKeywords = {
|
||||
"if", "then", "else", "assert", "with", "let", "in", "rec", "inherit"
|
||||
};
|
||||
"if", "then", "else", "assert", "with", "let", "in", "rec", "inherit"};
|
||||
return reservedKeywords.contains(str);
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printIdentifier(std::ostream & str, std::string_view s) {
|
||||
std::ostream & printIdentifier(std::ostream & str, std::string_view s)
|
||||
{
|
||||
if (s.empty())
|
||||
str << "\"\"";
|
||||
else if (isReservedKeyword(s))
|
||||
@ -98,10 +99,8 @@ printIdentifier(std::ostream & str, std::string_view s) {
|
||||
return str;
|
||||
}
|
||||
for (auto c : s)
|
||||
if (!((c >= 'a' && c <= 'z') ||
|
||||
(c >= 'A' && c <= 'Z') ||
|
||||
(c >= '0' && c <= '9') ||
|
||||
c == '_' || c == '\'' || c == '-')) {
|
||||
if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '\''
|
||||
|| c == '-')) {
|
||||
printLiteralString(str, s);
|
||||
return str;
|
||||
}
|
||||
@ -112,21 +111,22 @@ printIdentifier(std::ostream & str, std::string_view s) {
|
||||
|
||||
static bool isVarName(std::string_view s)
|
||||
{
|
||||
if (s.size() == 0) return false;
|
||||
if (isReservedKeyword(s)) return false;
|
||||
if (s.size() == 0)
|
||||
return false;
|
||||
if (isReservedKeyword(s))
|
||||
return false;
|
||||
char c = s[0];
|
||||
if ((c >= '0' && c <= '9') || c == '-' || c == '\'') return false;
|
||||
if ((c >= '0' && c <= '9') || c == '-' || c == '\'')
|
||||
return false;
|
||||
for (auto & i : s)
|
||||
if (!((i >= 'a' && i <= 'z') ||
|
||||
(i >= 'A' && i <= 'Z') ||
|
||||
(i >= '0' && i <= '9') ||
|
||||
i == '_' || i == '-' || i == '\''))
|
||||
if (!((i >= 'a' && i <= 'z') || (i >= 'A' && i <= 'Z') || (i >= '0' && i <= '9') || i == '_' || i == '-'
|
||||
|| i == '\''))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printAttributeName(std::ostream & str, std::string_view name) {
|
||||
std::ostream & printAttributeName(std::ostream & str, std::string_view name)
|
||||
{
|
||||
if (isVarName(name))
|
||||
str << name;
|
||||
else
|
||||
@ -134,7 +134,7 @@ printAttributeName(std::ostream & str, std::string_view name) {
|
||||
return str;
|
||||
}
|
||||
|
||||
bool isImportantAttrName(const std::string& attrName)
|
||||
bool isImportantAttrName(const std::string & attrName)
|
||||
{
|
||||
return attrName == "type" || attrName == "_type";
|
||||
}
|
||||
@ -144,12 +144,11 @@ typedef std::pair<std::string, Value *> AttrPair;
|
||||
struct ImportantFirstAttrNameCmp
|
||||
{
|
||||
|
||||
bool operator()(const AttrPair& lhs, const AttrPair& rhs) const
|
||||
bool operator()(const AttrPair & lhs, const AttrPair & rhs) const
|
||||
{
|
||||
auto lhsIsImportant = isImportantAttrName(lhs.first);
|
||||
auto rhsIsImportant = isImportantAttrName(rhs.first);
|
||||
return std::forward_as_tuple(!lhsIsImportant, lhs.first)
|
||||
< std::forward_as_tuple(!rhsIsImportant, rhs.first);
|
||||
return std::forward_as_tuple(!lhsIsImportant, lhs.first) < std::forward_as_tuple(!rhsIsImportant, rhs.first);
|
||||
}
|
||||
};
|
||||
|
||||
@ -275,7 +274,8 @@ private:
|
||||
std::optional<StorePath> storePath;
|
||||
if (auto i = v.attrs()->get(state.sDrvPath)) {
|
||||
NixStringContext context;
|
||||
storePath = state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation");
|
||||
storePath =
|
||||
state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation");
|
||||
}
|
||||
|
||||
/* This unfortunately breaks printing nested values because of
|
||||
@ -499,10 +499,10 @@ private:
|
||||
output << ANSI_NORMAL;
|
||||
} else if (v.isThunk() || v.isApp()) {
|
||||
if (options.ansiColors)
|
||||
output << ANSI_MAGENTA;
|
||||
output << ANSI_MAGENTA;
|
||||
output << "«thunk»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
output << ANSI_NORMAL;
|
||||
} else {
|
||||
unreachable();
|
||||
}
|
||||
@ -593,8 +593,7 @@ private:
|
||||
}
|
||||
} catch (Error & e) {
|
||||
if (options.errors == ErrorPrintBehavior::Throw
|
||||
|| (options.errors == ErrorPrintBehavior::ThrowTopLevel
|
||||
&& depth == 0)) {
|
||||
|| (options.errors == ErrorPrintBehavior::ThrowTopLevel && depth == 0)) {
|
||||
throw;
|
||||
}
|
||||
printError_(e);
|
||||
@ -603,7 +602,11 @@ private:
|
||||
|
||||
public:
|
||||
Printer(std::ostream & output, EvalState & state, PrintOptions options)
|
||||
: output(output), state(state), options(options) { }
|
||||
: output(output)
|
||||
, state(state)
|
||||
, options(options)
|
||||
{
|
||||
}
|
||||
|
||||
void print(Value & v)
|
||||
{
|
||||
@ -636,8 +639,8 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer)
|
||||
template<>
|
||||
HintFmt & HintFmt::operator%(const ValuePrinter & value)
|
||||
{
|
||||
fmt % value;
|
||||
return *this;
|
||||
fmt % value;
|
||||
return *this;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -2,8 +2,7 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::optional<std::string_view> LookupPath::Prefix::suffixIfPotentialMatch(
|
||||
std::string_view path) const
|
||||
std::optional<std::string_view> LookupPath::Prefix::suffixIfPotentialMatch(std::string_view path) const
|
||||
{
|
||||
auto n = s.size();
|
||||
|
||||
@ -21,29 +20,25 @@ std::optional<std::string_view> LookupPath::Prefix::suffixIfPotentialMatch(
|
||||
}
|
||||
|
||||
/* Skip next path separator. */
|
||||
return {
|
||||
path.substr(needSeparator ? n + 1 : n)
|
||||
};
|
||||
return {path.substr(needSeparator ? n + 1 : n)};
|
||||
}
|
||||
|
||||
|
||||
LookupPath::Elem LookupPath::Elem::parse(std::string_view rawElem)
|
||||
{
|
||||
size_t pos = rawElem.find('=');
|
||||
|
||||
return LookupPath::Elem {
|
||||
.prefix = Prefix {
|
||||
.s = pos == std::string::npos
|
||||
? std::string { "" }
|
||||
: std::string { rawElem.substr(0, pos) },
|
||||
},
|
||||
.path = Path {
|
||||
.s = std::string { rawElem.substr(pos + 1) },
|
||||
},
|
||||
return LookupPath::Elem{
|
||||
.prefix =
|
||||
Prefix{
|
||||
.s = pos == std::string::npos ? std::string{""} : std::string{rawElem.substr(0, pos)},
|
||||
},
|
||||
.path =
|
||||
Path{
|
||||
.s = std::string{rawElem.substr(pos + 1)},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
LookupPath LookupPath::parse(const Strings & rawElems)
|
||||
{
|
||||
LookupPath res;
|
||||
|
@ -7,107 +7,108 @@
|
||||
#include <iomanip>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
||||
namespace nix {
|
||||
using json = nlohmann::json;
|
||||
// TODO: rename. It doesn't print.
|
||||
json printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore)
|
||||
json printValueAsJSON(
|
||||
EvalState & state, bool strict, Value & v, const PosIdx pos, NixStringContext & context, bool copyToStore)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (strict) state.forceValue(v, pos);
|
||||
if (strict)
|
||||
state.forceValue(v, pos);
|
||||
|
||||
json out;
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
case nInt:
|
||||
out = v.integer().value;
|
||||
break;
|
||||
case nInt:
|
||||
out = v.integer().value;
|
||||
break;
|
||||
|
||||
case nBool:
|
||||
out = v.boolean();
|
||||
break;
|
||||
case nBool:
|
||||
out = v.boolean();
|
||||
break;
|
||||
|
||||
case nString:
|
||||
copyContext(v, context);
|
||||
out = v.c_str();
|
||||
break;
|
||||
case nString:
|
||||
copyContext(v, context);
|
||||
out = v.c_str();
|
||||
break;
|
||||
|
||||
case nPath:
|
||||
if (copyToStore)
|
||||
out = state.store->printStorePath(
|
||||
state.copyPathToStore(context, v.path()));
|
||||
else
|
||||
out = v.path().path.abs();
|
||||
break;
|
||||
case nPath:
|
||||
if (copyToStore)
|
||||
out = state.store->printStorePath(state.copyPathToStore(context, v.path()));
|
||||
else
|
||||
out = v.path().path.abs();
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
// already initialized as null
|
||||
break;
|
||||
case nNull:
|
||||
// already initialized as null
|
||||
break;
|
||||
|
||||
case nAttrs: {
|
||||
auto maybeString = state.tryAttrsToString(pos, v, context, false, false);
|
||||
if (maybeString) {
|
||||
out = *maybeString;
|
||||
break;
|
||||
}
|
||||
if (auto i = v.attrs()->get(state.sOutPath))
|
||||
return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore);
|
||||
else {
|
||||
out = json::object();
|
||||
for (auto & a : v.attrs()->lexicographicOrder(state.symbols)) {
|
||||
try {
|
||||
out.emplace(state.symbols[a->name], printValueAsJSON(state, strict, *a->value, a->pos, context, copyToStore));
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[a->pos],
|
||||
HintFmt("while evaluating attribute '%1%'", state.symbols[a->name]));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
case nAttrs: {
|
||||
auto maybeString = state.tryAttrsToString(pos, v, context, false, false);
|
||||
if (maybeString) {
|
||||
out = *maybeString;
|
||||
break;
|
||||
}
|
||||
|
||||
case nList: {
|
||||
out = json::array();
|
||||
int i = 0;
|
||||
for (auto elem : v.listItems()) {
|
||||
if (auto i = v.attrs()->get(state.sOutPath))
|
||||
return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore);
|
||||
else {
|
||||
out = json::object();
|
||||
for (auto & a : v.attrs()->lexicographicOrder(state.symbols)) {
|
||||
try {
|
||||
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
||||
out.emplace(
|
||||
state.symbols[a->name],
|
||||
printValueAsJSON(state, strict, *a->value, a->pos, context, copyToStore));
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos],
|
||||
HintFmt("while evaluating list element at index %1%", i));
|
||||
e.addTrace(
|
||||
state.positions[a->pos], HintFmt("while evaluating attribute '%1%'", state.symbols[a->name]));
|
||||
throw;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case nExternal:
|
||||
return v.external()->printValueAsJSON(state, strict, context, copyToStore);
|
||||
break;
|
||||
case nList: {
|
||||
out = json::array();
|
||||
int i = 0;
|
||||
for (auto elem : v.listItems()) {
|
||||
try {
|
||||
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], HintFmt("while evaluating list element at index %1%", i));
|
||||
throw;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case nFloat:
|
||||
out = v.fpoint();
|
||||
break;
|
||||
case nExternal:
|
||||
return v.external()->printValueAsJSON(state, strict, context, copyToStore);
|
||||
break;
|
||||
|
||||
case nThunk:
|
||||
case nFunction:
|
||||
state.error<TypeError>(
|
||||
"cannot convert %1% to JSON",
|
||||
showType(v)
|
||||
)
|
||||
.atPos(v.determinePos(pos))
|
||||
.debugThrow();
|
||||
case nFloat:
|
||||
out = v.fpoint();
|
||||
break;
|
||||
|
||||
case nThunk:
|
||||
case nFunction:
|
||||
state.error<TypeError>("cannot convert %1% to JSON", showType(v)).atPos(v.determinePos(pos)).debugThrow();
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, const PosIdx pos, std::ostream & str, NixStringContext & context, bool copyToStore)
|
||||
void printValueAsJSON(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
Value & v,
|
||||
const PosIdx pos,
|
||||
std::ostream & str,
|
||||
NixStringContext & context,
|
||||
bool copyToStore)
|
||||
{
|
||||
try {
|
||||
str << printValueAsJSON(state, strict, v, pos, context, copyToStore);
|
||||
@ -116,12 +117,10 @@ void printValueAsJSON(EvalState & state, bool strict,
|
||||
}
|
||||
}
|
||||
|
||||
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
||||
NixStringContext & context, bool copyToStore) const
|
||||
json ExternalValueBase::printValueAsJSON(
|
||||
EvalState & state, bool strict, NixStringContext & context, bool copyToStore) const
|
||||
{
|
||||
state.error<TypeError>("cannot convert %1% to JSON", showType())
|
||||
.debugThrow();
|
||||
state.error<TypeError>("cannot convert %1% to JSON", showType()).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -5,10 +5,8 @@
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
static XMLAttrs singletonAttrs(const std::string & name, std::string_view value)
|
||||
{
|
||||
XMLAttrs attrs;
|
||||
@ -16,12 +14,16 @@ static XMLAttrs singletonAttrs(const std::string & name, std::string_view value)
|
||||
return attrs;
|
||||
}
|
||||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
static void printValueAsXML(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
bool location,
|
||||
Value & v,
|
||||
XMLWriter & doc,
|
||||
NixStringContext & context,
|
||||
PathSet & drvsSeen,
|
||||
const PosIdx pos);
|
||||
|
||||
|
||||
static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos)
|
||||
{
|
||||
if (auto path = std::get_if<SourcePath>(&pos.origin))
|
||||
@ -30,142 +32,167 @@ static void posToXML(EvalState & state, XMLAttrs & xmlAttrs, const Pos & pos)
|
||||
xmlAttrs["column"] = fmt("%1%", pos.column);
|
||||
}
|
||||
|
||||
|
||||
static void showAttrs(EvalState & state, bool strict, bool location,
|
||||
const Bindings & attrs, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen)
|
||||
static void showAttrs(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
bool location,
|
||||
const Bindings & attrs,
|
||||
XMLWriter & doc,
|
||||
NixStringContext & context,
|
||||
PathSet & drvsSeen)
|
||||
{
|
||||
StringSet names;
|
||||
|
||||
for (auto & a : attrs.lexicographicOrder(state.symbols)) {
|
||||
XMLAttrs xmlAttrs;
|
||||
xmlAttrs["name"] = state.symbols[a->name];
|
||||
if (location && a->pos) posToXML(state, xmlAttrs, state.positions[a->pos]);
|
||||
if (location && a->pos)
|
||||
posToXML(state, xmlAttrs, state.positions[a->pos]);
|
||||
|
||||
XMLOpenElement _(doc, "attr", xmlAttrs);
|
||||
printValueAsXML(state, strict, location,
|
||||
*a->value, doc, context, drvsSeen, a->pos);
|
||||
printValueAsXML(state, strict, location, *a->value, doc, context, drvsSeen, a->pos);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
static void printValueAsXML(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
bool location,
|
||||
Value & v,
|
||||
XMLWriter & doc,
|
||||
NixStringContext & context,
|
||||
PathSet & drvsSeen,
|
||||
const PosIdx pos)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (strict) state.forceValue(v, pos);
|
||||
if (strict)
|
||||
state.forceValue(v, pos);
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
case nInt:
|
||||
doc.writeEmptyElement("int", singletonAttrs("value", fmt("%1%", v.integer())));
|
||||
break;
|
||||
case nInt:
|
||||
doc.writeEmptyElement("int", singletonAttrs("value", fmt("%1%", v.integer())));
|
||||
break;
|
||||
|
||||
case nBool:
|
||||
doc.writeEmptyElement("bool", singletonAttrs("value", v.boolean() ? "true" : "false"));
|
||||
break;
|
||||
case nBool:
|
||||
doc.writeEmptyElement("bool", singletonAttrs("value", v.boolean() ? "true" : "false"));
|
||||
break;
|
||||
|
||||
case nString:
|
||||
/* !!! show the context? */
|
||||
copyContext(v, context);
|
||||
doc.writeEmptyElement("string", singletonAttrs("value", v.c_str()));
|
||||
break;
|
||||
case nString:
|
||||
/* !!! show the context? */
|
||||
copyContext(v, context);
|
||||
doc.writeEmptyElement("string", singletonAttrs("value", v.c_str()));
|
||||
break;
|
||||
|
||||
case nPath:
|
||||
doc.writeEmptyElement("path", singletonAttrs("value", v.path().to_string()));
|
||||
break;
|
||||
case nPath:
|
||||
doc.writeEmptyElement("path", singletonAttrs("value", v.path().to_string()));
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
doc.writeEmptyElement("null");
|
||||
break;
|
||||
case nNull:
|
||||
doc.writeEmptyElement("null");
|
||||
break;
|
||||
|
||||
case nAttrs:
|
||||
if (state.isDerivation(v)) {
|
||||
XMLAttrs xmlAttrs;
|
||||
|
||||
Path drvPath;
|
||||
if (auto a = v.attrs()->get(state.sDrvPath)) {
|
||||
if (strict) state.forceValue(*a->value, a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["drvPath"] = drvPath = a->value->c_str();
|
||||
}
|
||||
|
||||
if (auto a = v.attrs()->get(state.sOutPath)) {
|
||||
if (strict) state.forceValue(*a->value, a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["outPath"] = a->value->c_str();
|
||||
}
|
||||
|
||||
XMLOpenElement _(doc, "derivation", xmlAttrs);
|
||||
|
||||
if (drvPath != "" && drvsSeen.insert(drvPath).second)
|
||||
showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen);
|
||||
else
|
||||
doc.writeEmptyElement("repeated");
|
||||
}
|
||||
|
||||
else {
|
||||
XMLOpenElement _(doc, "attrs");
|
||||
showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen);
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case nList: {
|
||||
XMLOpenElement _(doc, "list");
|
||||
for (auto v2 : v.listItems())
|
||||
printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
}
|
||||
|
||||
case nFunction: {
|
||||
if (!v.isLambda()) {
|
||||
// FIXME: Serialize primops and primopapps
|
||||
doc.writeEmptyElement("unevaluated");
|
||||
break;
|
||||
}
|
||||
case nAttrs:
|
||||
if (state.isDerivation(v)) {
|
||||
XMLAttrs xmlAttrs;
|
||||
if (location) posToXML(state, xmlAttrs, state.positions[v.payload.lambda.fun->pos]);
|
||||
XMLOpenElement _(doc, "function", xmlAttrs);
|
||||
|
||||
if (v.payload.lambda.fun->hasFormals()) {
|
||||
XMLAttrs attrs;
|
||||
if (v.payload.lambda.fun->arg) attrs["name"] = state.symbols[v.payload.lambda.fun->arg];
|
||||
if (v.payload.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";
|
||||
XMLOpenElement _(doc, "attrspat", attrs);
|
||||
for (auto & i : v.payload.lambda.fun->formals->lexicographicOrder(state.symbols))
|
||||
doc.writeEmptyElement("attr", singletonAttrs("name", state.symbols[i.name]));
|
||||
} else
|
||||
doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.payload.lambda.fun->arg]));
|
||||
Path drvPath;
|
||||
if (auto a = v.attrs()->get(state.sDrvPath)) {
|
||||
if (strict)
|
||||
state.forceValue(*a->value, a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["drvPath"] = drvPath = a->value->c_str();
|
||||
}
|
||||
|
||||
break;
|
||||
if (auto a = v.attrs()->get(state.sOutPath)) {
|
||||
if (strict)
|
||||
state.forceValue(*a->value, a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["outPath"] = a->value->c_str();
|
||||
}
|
||||
|
||||
XMLOpenElement _(doc, "derivation", xmlAttrs);
|
||||
|
||||
if (drvPath != "" && drvsSeen.insert(drvPath).second)
|
||||
showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen);
|
||||
else
|
||||
doc.writeEmptyElement("repeated");
|
||||
}
|
||||
|
||||
case nExternal:
|
||||
v.external()->printValueAsXML(state, strict, location, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
else {
|
||||
XMLOpenElement _(doc, "attrs");
|
||||
showAttrs(state, strict, location, *v.attrs(), doc, context, drvsSeen);
|
||||
}
|
||||
|
||||
case nFloat:
|
||||
doc.writeEmptyElement("float", singletonAttrs("value", fmt("%1%", v.fpoint())));
|
||||
break;
|
||||
break;
|
||||
|
||||
case nThunk:
|
||||
case nList: {
|
||||
XMLOpenElement _(doc, "list");
|
||||
for (auto v2 : v.listItems())
|
||||
printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
}
|
||||
|
||||
case nFunction: {
|
||||
if (!v.isLambda()) {
|
||||
// FIXME: Serialize primops and primopapps
|
||||
doc.writeEmptyElement("unevaluated");
|
||||
break;
|
||||
}
|
||||
XMLAttrs xmlAttrs;
|
||||
if (location)
|
||||
posToXML(state, xmlAttrs, state.positions[v.payload.lambda.fun->pos]);
|
||||
XMLOpenElement _(doc, "function", xmlAttrs);
|
||||
|
||||
if (v.payload.lambda.fun->hasFormals()) {
|
||||
XMLAttrs attrs;
|
||||
if (v.payload.lambda.fun->arg)
|
||||
attrs["name"] = state.symbols[v.payload.lambda.fun->arg];
|
||||
if (v.payload.lambda.fun->formals->ellipsis)
|
||||
attrs["ellipsis"] = "1";
|
||||
XMLOpenElement _(doc, "attrspat", attrs);
|
||||
for (auto & i : v.payload.lambda.fun->formals->lexicographicOrder(state.symbols))
|
||||
doc.writeEmptyElement("attr", singletonAttrs("name", state.symbols[i.name]));
|
||||
} else
|
||||
doc.writeEmptyElement("varpat", singletonAttrs("name", state.symbols[v.payload.lambda.fun->arg]));
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case nExternal:
|
||||
v.external()->printValueAsXML(state, strict, location, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
|
||||
case nFloat:
|
||||
doc.writeEmptyElement("float", singletonAttrs("value", fmt("%1%", v.fpoint())));
|
||||
break;
|
||||
|
||||
case nThunk:
|
||||
doc.writeEmptyElement("unevaluated");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void ExternalValueBase::printValueAsXML(EvalState & state, bool strict,
|
||||
bool location, XMLWriter & doc, NixStringContext & context, PathSet & drvsSeen,
|
||||
void ExternalValueBase::printValueAsXML(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
bool location,
|
||||
XMLWriter & doc,
|
||||
NixStringContext & context,
|
||||
PathSet & drvsSeen,
|
||||
const PosIdx pos) const
|
||||
{
|
||||
doc.writeEmptyElement("unevaluated");
|
||||
}
|
||||
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, NixStringContext & context, const PosIdx pos)
|
||||
void printValueAsXML(
|
||||
EvalState & state,
|
||||
bool strict,
|
||||
bool location,
|
||||
Value & v,
|
||||
std::ostream & out,
|
||||
NixStringContext & context,
|
||||
const PosIdx pos)
|
||||
{
|
||||
XMLWriter doc(true, out);
|
||||
XMLOpenElement root(doc, "expr");
|
||||
@ -173,5 +200,4 @@ void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
printValueAsXML(state, strict, location, v, doc, context, drvsSeen, pos);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -5,9 +5,7 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
NixStringContextElem NixStringContextElem::parse(
|
||||
std::string_view s0,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
NixStringContextElem NixStringContextElem::parse(std::string_view s0, const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
std::string_view s = s0;
|
||||
|
||||
@ -16,16 +14,16 @@ NixStringContextElem NixStringContextElem::parse(
|
||||
// Case on whether there is a '!'
|
||||
size_t index = s.find("!");
|
||||
if (index == std::string_view::npos) {
|
||||
return SingleDerivedPath::Opaque {
|
||||
.path = StorePath { s },
|
||||
return SingleDerivedPath::Opaque{
|
||||
.path = StorePath{s},
|
||||
};
|
||||
} else {
|
||||
std::string output { s.substr(0, index) };
|
||||
std::string output{s.substr(0, index)};
|
||||
// Advance string to parse after the '!'
|
||||
s = s.substr(index + 1);
|
||||
auto drv = make_ref<SingleDerivedPath>(parseRest());
|
||||
drvRequireExperiment(*drv, xpSettings);
|
||||
return SingleDerivedPath::Built {
|
||||
return SingleDerivedPath::Built{
|
||||
.drvPath = std::move(drv),
|
||||
.output = std::move(output),
|
||||
};
|
||||
@ -33,8 +31,7 @@ NixStringContextElem NixStringContextElem::parse(
|
||||
};
|
||||
|
||||
if (s.size() == 0) {
|
||||
throw BadNixStringContextElem(s0,
|
||||
"String context element should never be an empty string");
|
||||
throw BadNixStringContextElem(s0, "String context element should never be an empty string");
|
||||
}
|
||||
|
||||
switch (s.at(0)) {
|
||||
@ -44,28 +41,23 @@ NixStringContextElem NixStringContextElem::parse(
|
||||
|
||||
// Find *second* '!'
|
||||
if (s.find("!") == std::string_view::npos) {
|
||||
throw BadNixStringContextElem(s0,
|
||||
"String content element beginning with '!' should have a second '!'");
|
||||
throw BadNixStringContextElem(s0, "String content element beginning with '!' should have a second '!'");
|
||||
}
|
||||
|
||||
return std::visit(
|
||||
[&](auto x) -> NixStringContextElem { return std::move(x); },
|
||||
parseRest());
|
||||
return std::visit([&](auto x) -> NixStringContextElem { return std::move(x); }, parseRest());
|
||||
}
|
||||
case '=': {
|
||||
return NixStringContextElem::DrvDeep {
|
||||
.drvPath = StorePath { s.substr(1) },
|
||||
return NixStringContextElem::DrvDeep{
|
||||
.drvPath = StorePath{s.substr(1)},
|
||||
};
|
||||
}
|
||||
default: {
|
||||
// Ensure no '!'
|
||||
if (s.find("!") != std::string_view::npos) {
|
||||
throw BadNixStringContextElem(s0,
|
||||
"String content element not beginning with '!' should not have a second '!'");
|
||||
throw BadNixStringContextElem(
|
||||
s0, "String content element not beginning with '!' should not have a second '!'");
|
||||
}
|
||||
return std::visit(
|
||||
[&](auto x) -> NixStringContextElem { return std::move(x); },
|
||||
parseRest());
|
||||
return std::visit([&](auto x) -> NixStringContextElem { return std::move(x); }, parseRest());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -76,31 +68,31 @@ std::string NixStringContextElem::to_string() const
|
||||
|
||||
std::function<void(const SingleDerivedPath &)> toStringRest;
|
||||
toStringRest = [&](auto & p) {
|
||||
std::visit(overloaded {
|
||||
[&](const SingleDerivedPath::Opaque & o) {
|
||||
res += o.path.to_string();
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const SingleDerivedPath::Opaque & o) { res += o.path.to_string(); },
|
||||
[&](const SingleDerivedPath::Built & o) {
|
||||
res += o.output;
|
||||
res += '!';
|
||||
toStringRest(*o.drvPath);
|
||||
},
|
||||
},
|
||||
[&](const SingleDerivedPath::Built & o) {
|
||||
res += o.output;
|
||||
res += '!';
|
||||
toStringRest(*o.drvPath);
|
||||
},
|
||||
}, p.raw());
|
||||
p.raw());
|
||||
};
|
||||
|
||||
std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
res += '!';
|
||||
toStringRest(b);
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
res += '!';
|
||||
toStringRest(b);
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) { toStringRest(o); },
|
||||
[&](const NixStringContextElem::DrvDeep & d) {
|
||||
res += '=';
|
||||
res += d.drvPath.to_string();
|
||||
},
|
||||
},
|
||||
[&](const NixStringContextElem::Opaque & o) {
|
||||
toStringRest(o);
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & d) {
|
||||
res += '=';
|
||||
res += d.drvPath.to_string();
|
||||
},
|
||||
}, raw);
|
||||
raw);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
@ -13,39 +13,41 @@ class PublicKeyTest : public CharacterizationTest
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "public-key";
|
||||
|
||||
public:
|
||||
std::filesystem::path goldenMaster(std::string_view testStem) const override {
|
||||
std::filesystem::path goldenMaster(std::string_view testStem) const override
|
||||
{
|
||||
return unitTestData / testStem;
|
||||
}
|
||||
};
|
||||
|
||||
#define TEST_JSON(FIXTURE, NAME, VAL) \
|
||||
TEST_F(FIXTURE, PublicKey_ ## NAME ## _from_json) { \
|
||||
readTest(#NAME ".json", [&](const auto & encoded_) { \
|
||||
fetchers::PublicKey expected { VAL }; \
|
||||
fetchers::PublicKey got = nlohmann::json::parse(encoded_); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, PublicKey_ ## NAME ## _to_json) { \
|
||||
writeTest(#NAME ".json", [&]() -> json { \
|
||||
return nlohmann::json(fetchers::PublicKey { VAL }); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
#define TEST_JSON(FIXTURE, NAME, VAL) \
|
||||
TEST_F(FIXTURE, PublicKey_##NAME##_from_json) \
|
||||
{ \
|
||||
readTest(#NAME ".json", [&](const auto & encoded_) { \
|
||||
fetchers::PublicKey expected{VAL}; \
|
||||
fetchers::PublicKey got = nlohmann::json::parse(encoded_); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, PublicKey_##NAME##_to_json) \
|
||||
{ \
|
||||
writeTest( \
|
||||
#NAME ".json", \
|
||||
[&]() -> json { return nlohmann::json(fetchers::PublicKey{VAL}); }, \
|
||||
[](const auto & file) { return json::parse(readFile(file)); }, \
|
||||
[](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \
|
||||
}
|
||||
|
||||
TEST_JSON(PublicKeyTest, simple, (fetchers::PublicKey { .type = "ssh-rsa", .key = "ABCDE" }))
|
||||
TEST_JSON(PublicKeyTest, simple, (fetchers::PublicKey{.type = "ssh-rsa", .key = "ABCDE"}))
|
||||
|
||||
TEST_JSON(PublicKeyTest, defaultType, fetchers::PublicKey { .key = "ABCDE" })
|
||||
TEST_JSON(PublicKeyTest, defaultType, fetchers::PublicKey{.key = "ABCDE"})
|
||||
|
||||
#undef TEST_JSON
|
||||
|
||||
TEST_F(PublicKeyTest, PublicKey_noRoundTrip_from_json) {
|
||||
TEST_F(PublicKeyTest, PublicKey_noRoundTrip_from_json)
|
||||
{
|
||||
readTest("noRoundTrip.json", [&](const auto & encoded_) {
|
||||
fetchers::PublicKey expected = { .type = "ssh-ed25519", .key = "ABCDE" };
|
||||
fetchers::PublicKey expected = {.type = "ssh-ed25519", .key = "ABCDE"};
|
||||
fetchers::PublicKey got = nlohmann::json::parse(encoded_);
|
||||
ASSERT_EQ(got, expected);
|
||||
});
|
||||
|
@ -15,7 +15,7 @@ Attrs jsonToAttrs(const nlohmann::json & json)
|
||||
else if (i.value().is_string())
|
||||
attrs.emplace(i.key(), i.value().get<std::string>());
|
||||
else if (i.value().is_boolean())
|
||||
attrs.emplace(i.key(), Explicit<bool> { i.value().get<bool>() });
|
||||
attrs.emplace(i.key(), Explicit<bool>{i.value().get<bool>()});
|
||||
else
|
||||
throw Error("unsupported input attribute type in lock file");
|
||||
}
|
||||
@ -33,7 +33,8 @@ nlohmann::json attrsToJSON(const Attrs & attrs)
|
||||
json[attr.first] = *v;
|
||||
} else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
|
||||
json[attr.first] = v->t;
|
||||
} else unreachable();
|
||||
} else
|
||||
unreachable();
|
||||
}
|
||||
return json;
|
||||
}
|
||||
@ -41,7 +42,8 @@ nlohmann::json attrsToJSON(const Attrs & attrs)
|
||||
std::optional<std::string> maybeGetStrAttr(const Attrs & attrs, const std::string & name)
|
||||
{
|
||||
auto i = attrs.find(name);
|
||||
if (i == attrs.end()) return {};
|
||||
if (i == attrs.end())
|
||||
return {};
|
||||
if (auto v = std::get_if<std::string>(&i->second))
|
||||
return *v;
|
||||
throw Error("input attribute '%s' is not a string %s", name, attrsToJSON(attrs).dump());
|
||||
@ -58,7 +60,8 @@ std::string getStrAttr(const Attrs & attrs, const std::string & name)
|
||||
std::optional<uint64_t> maybeGetIntAttr(const Attrs & attrs, const std::string & name)
|
||||
{
|
||||
auto i = attrs.find(name);
|
||||
if (i == attrs.end()) return {};
|
||||
if (i == attrs.end())
|
||||
return {};
|
||||
if (auto v = std::get_if<uint64_t>(&i->second))
|
||||
return *v;
|
||||
throw Error("input attribute '%s' is not an integer", name);
|
||||
@ -75,7 +78,8 @@ uint64_t getIntAttr(const Attrs & attrs, const std::string & name)
|
||||
std::optional<bool> maybeGetBoolAttr(const Attrs & attrs, const std::string & name)
|
||||
{
|
||||
auto i = attrs.find(name);
|
||||
if (i == attrs.end()) return {};
|
||||
if (i == attrs.end())
|
||||
return {};
|
||||
if (auto v = std::get_if<Explicit<bool>>(&i->second))
|
||||
return v->t;
|
||||
throw Error("input attribute '%s' is not a Boolean", name);
|
||||
@ -99,7 +103,8 @@ std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
|
||||
query.insert_or_assign(attr.first, *v);
|
||||
} else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
|
||||
query.insert_or_assign(attr.first, v->t ? "1" : "0");
|
||||
} else unreachable();
|
||||
} else
|
||||
unreachable();
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
@ -43,46 +43,37 @@ struct CacheImpl : Cache
|
||||
state->db.isCache();
|
||||
state->db.exec(schema);
|
||||
|
||||
state->upsert.create(state->db,
|
||||
"insert or replace into Cache(domain, key, value, timestamp) values (?, ?, ?, ?)");
|
||||
state->upsert.create(
|
||||
state->db, "insert or replace into Cache(domain, key, value, timestamp) values (?, ?, ?, ?)");
|
||||
|
||||
state->lookup.create(state->db,
|
||||
"select value, timestamp from Cache where domain = ? and key = ?");
|
||||
state->lookup.create(state->db, "select value, timestamp from Cache where domain = ? and key = ?");
|
||||
}
|
||||
|
||||
void upsert(
|
||||
const Key & key,
|
||||
const Attrs & value) override
|
||||
void upsert(const Key & key, const Attrs & value) override
|
||||
{
|
||||
_state.lock()->upsert.use()
|
||||
(key.first)
|
||||
(attrsToJSON(key.second).dump())
|
||||
(attrsToJSON(value).dump())
|
||||
(time(0)).exec();
|
||||
_state.lock()
|
||||
->upsert.use()(key.first)(attrsToJSON(key.second).dump())(attrsToJSON(value).dump())(time(0))
|
||||
.exec();
|
||||
}
|
||||
|
||||
std::optional<Attrs> lookup(
|
||||
const Key & key) override
|
||||
std::optional<Attrs> lookup(const Key & key) override
|
||||
{
|
||||
if (auto res = lookupExpired(key))
|
||||
return std::move(res->value);
|
||||
return {};
|
||||
}
|
||||
|
||||
std::optional<Attrs> lookupWithTTL(
|
||||
const Key & key) override
|
||||
std::optional<Attrs> lookupWithTTL(const Key & key) override
|
||||
{
|
||||
if (auto res = lookupExpired(key)) {
|
||||
if (!res->expired)
|
||||
return std::move(res->value);
|
||||
debug("ignoring expired cache entry '%s:%s'",
|
||||
key.first, attrsToJSON(key.second).dump());
|
||||
debug("ignoring expired cache entry '%s:%s'", key.first, attrsToJSON(key.second).dump());
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
std::optional<Result> lookupExpired(
|
||||
const Key & key) override
|
||||
std::optional<Result> lookupExpired(const Key & key) override
|
||||
{
|
||||
auto state(_state.lock());
|
||||
|
||||
@ -99,17 +90,13 @@ struct CacheImpl : Cache
|
||||
|
||||
debug("using cache entry '%s:%s' -> '%s'", key.first, keyJSON, valueJSON);
|
||||
|
||||
return Result {
|
||||
return Result{
|
||||
.expired = settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0),
|
||||
.value = jsonToAttrs(nlohmann::json::parse(valueJSON)),
|
||||
};
|
||||
}
|
||||
|
||||
void upsert(
|
||||
Key key,
|
||||
Store & store,
|
||||
Attrs value,
|
||||
const StorePath & storePath) override
|
||||
void upsert(Key key, Store & store, Attrs value, const StorePath & storePath) override
|
||||
{
|
||||
/* Add the store prefix to the cache key to handle multiple
|
||||
store prefixes. */
|
||||
@ -120,14 +107,13 @@ struct CacheImpl : Cache
|
||||
upsert(key, value);
|
||||
}
|
||||
|
||||
std::optional<ResultWithStorePath> lookupStorePath(
|
||||
Key key,
|
||||
Store & store) override
|
||||
std::optional<ResultWithStorePath> lookupStorePath(Key key, Store & store) override
|
||||
{
|
||||
key.second.insert_or_assign("store", store.storeDir);
|
||||
|
||||
auto res = lookupExpired(key);
|
||||
if (!res) return std::nullopt;
|
||||
if (!res)
|
||||
return std::nullopt;
|
||||
|
||||
auto storePathS = getStrAttr(res->value, "storePath");
|
||||
res->value.erase("storePath");
|
||||
@ -137,14 +123,16 @@ struct CacheImpl : Cache
|
||||
store.addTempRoot(res2.storePath);
|
||||
if (!store.isValidPath(res2.storePath)) {
|
||||
// FIXME: we could try to substitute 'storePath'.
|
||||
debug("ignoring disappeared cache entry '%s:%s' -> '%s'",
|
||||
debug(
|
||||
"ignoring disappeared cache entry '%s:%s' -> '%s'",
|
||||
key.first,
|
||||
attrsToJSON(key.second).dump(),
|
||||
store.printStorePath(res2.storePath));
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
debug("using cache entry '%s:%s' -> '%s', '%s'",
|
||||
debug(
|
||||
"using cache entry '%s:%s' -> '%s', '%s'",
|
||||
key.first,
|
||||
attrsToJSON(key.second).dump(),
|
||||
attrsToJSON(res2.value).dump(),
|
||||
@ -153,9 +141,7 @@ struct CacheImpl : Cache
|
||||
return res2;
|
||||
}
|
||||
|
||||
std::optional<ResultWithStorePath> lookupStorePathWithTTL(
|
||||
Key key,
|
||||
Store & store) override
|
||||
std::optional<ResultWithStorePath> lookupStorePathWithTTL(Key key, Store & store) override
|
||||
{
|
||||
auto res = lookupStorePath(std::move(key), store);
|
||||
return res && !res->expired ? res : std::nullopt;
|
||||
|
@ -2,8 +2,6 @@
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
Settings::Settings()
|
||||
{
|
||||
}
|
||||
Settings::Settings() {}
|
||||
|
||||
}
|
||||
|
@ -4,18 +4,11 @@
|
||||
namespace nix {
|
||||
|
||||
fetchers::Cache::Key makeFetchToStoreCacheKey(
|
||||
const std::string &name,
|
||||
const std::string &fingerprint,
|
||||
ContentAddressMethod method,
|
||||
const std::string &path)
|
||||
const std::string & name, const std::string & fingerprint, ContentAddressMethod method, const std::string & path)
|
||||
{
|
||||
return fetchers::Cache::Key{"fetchToStore", {
|
||||
{"name", name},
|
||||
{"fingerprint", fingerprint},
|
||||
{"method", std::string{method.render()}},
|
||||
{"path", path}
|
||||
}};
|
||||
|
||||
return fetchers::Cache::Key{
|
||||
"fetchToStore",
|
||||
{{"name", name}, {"fingerprint", fingerprint}, {"method", std::string{method.render()}}, {"path", path}}};
|
||||
}
|
||||
|
||||
StorePath fetchToStore(
|
||||
@ -41,17 +34,17 @@ StorePath fetchToStore(
|
||||
} else
|
||||
debug("source path '%s' is uncacheable", path);
|
||||
|
||||
Activity act(*logger, lvlChatty, actUnknown,
|
||||
Activity act(
|
||||
*logger,
|
||||
lvlChatty,
|
||||
actUnknown,
|
||||
fmt(mode == FetchMode::DryRun ? "hashing '%s'" : "copying '%s' to the store", path));
|
||||
|
||||
auto filter2 = filter ? *filter : defaultPathFilter;
|
||||
|
||||
auto storePath =
|
||||
mode == FetchMode::DryRun
|
||||
? store.computeStorePath(
|
||||
name, path, method, HashAlgorithm::SHA256, {}, filter2).first
|
||||
: store.addToStore(
|
||||
name, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||
auto storePath = mode == FetchMode::DryRun
|
||||
? store.computeStorePath(name, path, method, HashAlgorithm::SHA256, {}, filter2).first
|
||||
: store.addToStore(name, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||
|
||||
debug(mode == FetchMode::DryRun ? "hashed '%s'" : "copied '%s' to '%s'", path, store.printStorePath(storePath));
|
||||
|
||||
|
@ -24,7 +24,8 @@ void registerInputScheme(std::shared_ptr<InputScheme> && inputScheme)
|
||||
inputSchemes->insert_or_assign(schemeName, std::move(inputScheme));
|
||||
}
|
||||
|
||||
nlohmann::json dumpRegisterInputSchemeInfo() {
|
||||
nlohmann::json dumpRegisterInputSchemeInfo()
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto res = json::object();
|
||||
@ -37,9 +38,7 @@ nlohmann::json dumpRegisterInputSchemeInfo() {
|
||||
return res;
|
||||
}
|
||||
|
||||
Input Input::fromURL(
|
||||
const Settings & settings,
|
||||
const std::string & url, bool requireTree)
|
||||
Input Input::fromURL(const Settings & settings, const std::string & url, bool requireTree)
|
||||
{
|
||||
return fromURL(settings, parseURL(url), requireTree);
|
||||
}
|
||||
@ -53,9 +52,7 @@ static void fixupInput(Input & input)
|
||||
input.getLastModified();
|
||||
}
|
||||
|
||||
Input Input::fromURL(
|
||||
const Settings & settings,
|
||||
const ParsedURL & url, bool requireTree)
|
||||
Input Input::fromURL(const Settings & settings, const ParsedURL & url, bool requireTree)
|
||||
{
|
||||
for (auto & [_, inputScheme] : *inputSchemes) {
|
||||
auto res = inputScheme->inputFromURL(settings, url, requireTree);
|
||||
@ -84,7 +81,7 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
|
||||
// but not all of them. Doing this is to support those other
|
||||
// operations which are supposed to be robust on
|
||||
// unknown/uninterpretable inputs.
|
||||
Input input { settings };
|
||||
Input input{settings};
|
||||
input.attrs = attrs;
|
||||
fixupInput(input);
|
||||
return input;
|
||||
@ -95,7 +92,8 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
|
||||
i == inputSchemes->end() ? nullptr : i->second;
|
||||
});
|
||||
|
||||
if (!inputScheme) return raw();
|
||||
if (!inputScheme)
|
||||
return raw();
|
||||
|
||||
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
|
||||
|
||||
@ -106,7 +104,8 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
|
||||
throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName);
|
||||
|
||||
auto res = inputScheme->inputFromAttrs(settings, attrs);
|
||||
if (!res) return raw();
|
||||
if (!res)
|
||||
return raw();
|
||||
res->scheme = inputScheme;
|
||||
fixupInput(*res);
|
||||
return std::move(*res);
|
||||
@ -114,9 +113,11 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
|
||||
|
||||
std::optional<std::string> Input::getFingerprint(ref<Store> store) const
|
||||
{
|
||||
if (!scheme) return std::nullopt;
|
||||
if (!scheme)
|
||||
return std::nullopt;
|
||||
|
||||
if (cachedFingerprint) return *cachedFingerprint;
|
||||
if (cachedFingerprint)
|
||||
return *cachedFingerprint;
|
||||
|
||||
auto fingerprint = scheme->getFingerprint(store, *this);
|
||||
|
||||
@ -171,18 +172,20 @@ Attrs Input::toAttrs() const
|
||||
return attrs;
|
||||
}
|
||||
|
||||
bool Input::operator ==(const Input & other) const noexcept
|
||||
bool Input::operator==(const Input & other) const noexcept
|
||||
{
|
||||
return attrs == other.attrs;
|
||||
}
|
||||
|
||||
bool Input::contains(const Input & other) const
|
||||
{
|
||||
if (*this == other) return true;
|
||||
if (*this == other)
|
||||
return true;
|
||||
auto other2(other);
|
||||
other2.attrs.erase("ref");
|
||||
other2.attrs.erase("rev");
|
||||
if (*this == other2) return true;
|
||||
if (*this == other2)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -235,7 +238,8 @@ void Input::checkLocks(Input specified, Input & result)
|
||||
for (auto & field : specified.attrs) {
|
||||
auto field2 = result.attrs.find(field.first);
|
||||
if (field2 != result.attrs.end() && field.second != field2->second)
|
||||
throw Error("mismatch in field '%s' of input '%s', got '%s'",
|
||||
throw Error(
|
||||
"mismatch in field '%s' of input '%s', got '%s'",
|
||||
field.first,
|
||||
attrsToJSON(specified.attrs),
|
||||
attrsToJSON(result.attrs));
|
||||
@ -249,30 +253,38 @@ void Input::checkLocks(Input specified, Input & result)
|
||||
if (auto prevNarHash = specified.getNarHash()) {
|
||||
if (result.getNarHash() != prevNarHash) {
|
||||
if (result.getNarHash())
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'",
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), result.getNarHash()->to_string(HashFormat::SRI, true));
|
||||
throw Error(
|
||||
(unsigned int) 102,
|
||||
"NAR hash mismatch in input '%s', expected '%s' but got '%s'",
|
||||
specified.to_string(),
|
||||
prevNarHash->to_string(HashFormat::SRI, true),
|
||||
result.getNarHash()->to_string(HashFormat::SRI, true));
|
||||
else
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none",
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true));
|
||||
throw Error(
|
||||
(unsigned int) 102,
|
||||
"NAR hash mismatch in input '%s', expected '%s' but got none",
|
||||
specified.to_string(),
|
||||
prevNarHash->to_string(HashFormat::SRI, true));
|
||||
}
|
||||
}
|
||||
|
||||
if (auto prevLastModified = specified.getLastModified()) {
|
||||
if (result.getLastModified() != prevLastModified)
|
||||
throw Error("'lastModified' attribute mismatch in input '%s', expected %d, got %d",
|
||||
result.to_string(), *prevLastModified, result.getLastModified().value_or(-1));
|
||||
throw Error(
|
||||
"'lastModified' attribute mismatch in input '%s', expected %d, got %d",
|
||||
result.to_string(),
|
||||
*prevLastModified,
|
||||
result.getLastModified().value_or(-1));
|
||||
}
|
||||
|
||||
if (auto prevRev = specified.getRev()) {
|
||||
if (result.getRev() != prevRev)
|
||||
throw Error("'rev' attribute mismatch in input '%s', expected %s",
|
||||
result.to_string(), prevRev->gitRev());
|
||||
throw Error("'rev' attribute mismatch in input '%s', expected %s", result.to_string(), prevRev->gitRev());
|
||||
}
|
||||
|
||||
if (auto prevRevCount = specified.getRevCount()) {
|
||||
if (result.getRevCount() != prevRevCount)
|
||||
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
|
||||
result.to_string(), *prevRevCount);
|
||||
throw Error("'revCount' attribute mismatch in input '%s', expected %d", result.to_string(), *prevRevCount);
|
||||
}
|
||||
}
|
||||
|
||||
@ -316,8 +328,7 @@ std::pair<ref<SourceAccessor>, Input> Input::getAccessorUnchecked(ref<Store> sto
|
||||
|
||||
store->ensurePath(storePath);
|
||||
|
||||
debug("using substituted/cached input '%s' in '%s'",
|
||||
to_string(), store->printStorePath(storePath));
|
||||
debug("using substituted/cached input '%s' in '%s'", to_string(), store->printStorePath(storePath));
|
||||
|
||||
auto accessor = makeStorePathAccessor(store, storePath);
|
||||
|
||||
@ -339,11 +350,10 @@ std::pair<ref<SourceAccessor>, Input> Input::getAccessorUnchecked(ref<Store> sto
|
||||
return {accessor, std::move(result)};
|
||||
}
|
||||
|
||||
Input Input::applyOverrides(
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const
|
||||
Input Input::applyOverrides(std::optional<std::string> ref, std::optional<Hash> rev) const
|
||||
{
|
||||
if (!scheme) return *this;
|
||||
if (!scheme)
|
||||
return *this;
|
||||
return scheme->applyOverrides(*this, ref, rev);
|
||||
}
|
||||
|
||||
@ -359,10 +369,7 @@ std::optional<std::filesystem::path> Input::getSourcePath() const
|
||||
return scheme->getSourcePath(*this);
|
||||
}
|
||||
|
||||
void Input::putFile(
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const
|
||||
void Input::putFile(const CanonPath & path, std::string_view contents, std::optional<std::string> commitMsg) const
|
||||
{
|
||||
assert(scheme);
|
||||
return scheme->putFile(*this, path, contents, commitMsg);
|
||||
@ -378,11 +385,13 @@ StorePath Input::computeStorePath(Store & store) const
|
||||
auto narHash = getNarHash();
|
||||
if (!narHash)
|
||||
throw Error("cannot compute store path for unlocked input '%s'", to_string());
|
||||
return store.makeFixedOutputPath(getName(), FixedOutputInfo {
|
||||
.method = FileIngestionMethod::NixArchive,
|
||||
.hash = *narHash,
|
||||
.references = {},
|
||||
});
|
||||
return store.makeFixedOutputPath(
|
||||
getName(),
|
||||
FixedOutputInfo{
|
||||
.method = FileIngestionMethod::NixArchive,
|
||||
.hash = *narHash,
|
||||
.references = {},
|
||||
});
|
||||
}
|
||||
|
||||
std::string Input::getType() const
|
||||
@ -415,7 +424,7 @@ std::optional<Hash> Input::getRev() const
|
||||
if (auto s = maybeGetStrAttr(attrs, "rev")) {
|
||||
try {
|
||||
hash = Hash::parseAnyPrefixed(*s);
|
||||
} catch (BadHash &e) {
|
||||
} catch (BadHash & e) {
|
||||
// Default to sha1 for backwards compatibility with existing
|
||||
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
|
||||
hash = Hash::parseAny(*s, HashAlgorithm::SHA1);
|
||||
@ -444,10 +453,7 @@ ParsedURL InputScheme::toURL(const Input & input) const
|
||||
throw Error("don't know how to convert input '%s' to a URL", attrsToJSON(input.attrs));
|
||||
}
|
||||
|
||||
Input InputScheme::applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const
|
||||
Input InputScheme::applyOverrides(const Input & input, std::optional<std::string> ref, std::optional<Hash> rev) const
|
||||
{
|
||||
if (ref)
|
||||
throw Error("don't know how to set branch/tag name of input '%s' to '%s'", input.to_string(), *ref);
|
||||
@ -462,10 +468,7 @@ std::optional<std::filesystem::path> InputScheme::getSourcePath(const Input & in
|
||||
}
|
||||
|
||||
void InputScheme::putFile(
|
||||
const Input & input,
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const
|
||||
const Input & input, const CanonPath & path, std::string_view contents, std::optional<std::string> commitMsg) const
|
||||
{
|
||||
throw Error("input '%s' does not support modifying file '%s'", input.to_string(), path);
|
||||
}
|
||||
@ -480,7 +483,7 @@ std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string publicKeys_to_string(const std::vector<PublicKey>& publicKeys)
|
||||
std::string publicKeys_to_string(const std::vector<PublicKey> & publicKeys)
|
||||
{
|
||||
return ((nlohmann::json) publicKeys).dump();
|
||||
}
|
||||
@ -495,7 +498,7 @@ using namespace nix;
|
||||
|
||||
fetchers::PublicKey adl_serializer<fetchers::PublicKey>::from_json(const json & json)
|
||||
{
|
||||
fetchers::PublicKey res = { };
|
||||
fetchers::PublicKey res = {};
|
||||
if (auto type = optionalValueAt(json, "type"))
|
||||
res.type = getString(*type);
|
||||
|
||||
|
@ -50,9 +50,8 @@ std::string FilteringSourceAccessor::showPath(const CanonPath & path)
|
||||
void FilteringSourceAccessor::checkAccess(const CanonPath & path)
|
||||
{
|
||||
if (!isAllowed(path))
|
||||
throw makeNotAllowedError
|
||||
? makeNotAllowedError(path)
|
||||
: RestrictedPathError("access to path '%s' is forbidden", showPath(path));
|
||||
throw makeNotAllowedError ? makeNotAllowedError(path)
|
||||
: RestrictedPathError("access to path '%s' is forbidden", showPath(path));
|
||||
}
|
||||
|
||||
struct AllowListSourceAccessorImpl : AllowListSourceAccessor
|
||||
@ -68,13 +67,12 @@ struct AllowListSourceAccessorImpl : AllowListSourceAccessor
|
||||
: AllowListSourceAccessor(SourcePath(next), std::move(makeNotAllowedError))
|
||||
, allowedPrefixes(std::move(allowedPrefixes))
|
||||
, allowedPaths(std::move(allowedPaths))
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
bool isAllowed(const CanonPath & path) override
|
||||
{
|
||||
return
|
||||
allowedPaths.contains(path)
|
||||
|| path.isAllowed(allowedPrefixes);
|
||||
return allowedPaths.contains(path) || path.isAllowed(allowedPrefixes);
|
||||
}
|
||||
|
||||
void allowPrefix(CanonPath prefix) override
|
||||
@ -90,16 +88,14 @@ ref<AllowListSourceAccessor> AllowListSourceAccessor::create(
|
||||
MakeNotAllowedError && makeNotAllowedError)
|
||||
{
|
||||
return make_ref<AllowListSourceAccessorImpl>(
|
||||
next,
|
||||
std::move(allowedPrefixes),
|
||||
std::move(allowedPaths),
|
||||
std::move(makeNotAllowedError));
|
||||
next, std::move(allowedPrefixes), std::move(allowedPaths), std::move(makeNotAllowedError));
|
||||
}
|
||||
|
||||
bool CachingFilteringSourceAccessor::isAllowed(const CanonPath & path)
|
||||
{
|
||||
auto i = cache.find(path);
|
||||
if (i != cache.end()) return i->second;
|
||||
if (i != cache.end())
|
||||
return i->second;
|
||||
auto res = isAllowedUncached(path);
|
||||
cache.emplace(path, res);
|
||||
return res;
|
||||
|
@ -36,23 +36,24 @@
|
||||
|
||||
namespace std {
|
||||
|
||||
template<> struct hash<git_oid>
|
||||
template<>
|
||||
struct hash<git_oid>
|
||||
{
|
||||
size_t operator()(const git_oid & oid) const
|
||||
{
|
||||
return * (size_t *) oid.id;
|
||||
return *(size_t *) oid.id;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const git_oid & oid)
|
||||
std::ostream & operator<<(std::ostream & str, const git_oid & oid)
|
||||
{
|
||||
str << git_oid_tostr_s(&oid);
|
||||
return str;
|
||||
}
|
||||
|
||||
bool operator == (const git_oid & oid1, const git_oid & oid2)
|
||||
bool operator==(const git_oid & oid1, const git_oid & oid2)
|
||||
{
|
||||
return git_oid_equal(&oid1, &oid2);
|
||||
}
|
||||
@ -80,9 +81,9 @@ typedef std::unique_ptr<git_indexer, Deleter<git_indexer_free>> Indexer;
|
||||
|
||||
Hash toHash(const git_oid & oid)
|
||||
{
|
||||
#ifdef GIT_EXPERIMENTAL_SHA256
|
||||
#ifdef GIT_EXPERIMENTAL_SHA256
|
||||
assert(oid.type == GIT_OID_SHA1);
|
||||
#endif
|
||||
#endif
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
memcpy(hash.hash, oid.id, hash.hashSize);
|
||||
return hash;
|
||||
@ -116,7 +117,7 @@ template<typename T>
|
||||
T peelObject(git_object * obj, git_object_t type)
|
||||
{
|
||||
T obj2;
|
||||
if (git_object_peel((git_object * *) (typename T::pointer *) Setter(obj2), obj, type)) {
|
||||
if (git_object_peel((git_object **) (typename T::pointer *) Setter(obj2), obj, type)) {
|
||||
auto err = git_error_last();
|
||||
throw Error("peeling Git object '%s': %s", *git_object_id(obj), err->message);
|
||||
}
|
||||
@ -127,7 +128,7 @@ template<typename T>
|
||||
T dupObject(typename T::pointer obj)
|
||||
{
|
||||
T obj2;
|
||||
if (git_object_dup((git_object * *) (typename T::pointer *) Setter(obj2), (git_object *) obj))
|
||||
if (git_object_dup((git_object **) (typename T::pointer *) Setter(obj2), (git_object *) obj))
|
||||
throw Error("duplicating object '%s': %s", *git_object_id((git_object *) obj), git_error_last()->message);
|
||||
return obj2;
|
||||
}
|
||||
@ -146,21 +147,22 @@ static Object peelToTreeOrBlob(git_object * obj)
|
||||
return peelObject<Object>(obj, GIT_OBJECT_TREE);
|
||||
}
|
||||
|
||||
struct PackBuilderContext {
|
||||
struct PackBuilderContext
|
||||
{
|
||||
std::exception_ptr exception;
|
||||
|
||||
void handleException(const char * activity, int errCode)
|
||||
{
|
||||
switch (errCode) {
|
||||
case GIT_OK:
|
||||
break;
|
||||
case GIT_EUSER:
|
||||
if (!exception)
|
||||
panic("PackBuilderContext::handleException: user error, but exception was not set");
|
||||
case GIT_OK:
|
||||
break;
|
||||
case GIT_EUSER:
|
||||
if (!exception)
|
||||
panic("PackBuilderContext::handleException: user error, but exception was not set");
|
||||
|
||||
std::rethrow_exception(exception);
|
||||
default:
|
||||
throw Error("%s: %i, %s", Uncolored(activity), errCode, git_error_last()->message);
|
||||
std::rethrow_exception(exception);
|
||||
default:
|
||||
throw Error("%s: %i, %s", Uncolored(activity), errCode, git_error_last()->message);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -170,9 +172,9 @@ extern "C" {
|
||||
/**
|
||||
* A `git_packbuilder_progress` implementation that aborts the pack building if needed.
|
||||
*/
|
||||
static int packBuilderProgressCheckInterrupt(int stage, uint32_t current, uint32_t total, void *payload)
|
||||
static int packBuilderProgressCheckInterrupt(int stage, uint32_t current, uint32_t total, void * payload)
|
||||
{
|
||||
PackBuilderContext & args = * (PackBuilderContext *) payload;
|
||||
PackBuilderContext & args = *(PackBuilderContext *) payload;
|
||||
try {
|
||||
checkInterrupt();
|
||||
return GIT_OK;
|
||||
@ -185,11 +187,12 @@ static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuil
|
||||
|
||||
} // extern "C"
|
||||
|
||||
static void initRepoAtomically(std::filesystem::path &path, bool bare)
|
||||
static void initRepoAtomically(std::filesystem::path & path, bool bare)
|
||||
{
|
||||
if (pathExists(path.string())) return;
|
||||
if (pathExists(path.string()))
|
||||
return;
|
||||
|
||||
Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() }));
|
||||
Path tmpDir = createTempDir(os_string_to_string(PathViewNG{std::filesystem::path(path).parent_path()}));
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
Repository tmpRepo;
|
||||
|
||||
@ -203,8 +206,7 @@ static void initRepoAtomically(std::filesystem::path &path, bool bare)
|
||||
// `path` may be attempted to be deleted by s::f::rename, in which case the code is:
|
||||
|| e.code() == std::errc::directory_not_empty) {
|
||||
return;
|
||||
}
|
||||
else
|
||||
} else
|
||||
throw SysError("moving temporary git repository from %s to %s", tmpDir, path);
|
||||
}
|
||||
// we successfully moved the repository, so the temporary directory no longer exists.
|
||||
@ -248,16 +250,17 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
throw Error("adding mempack backend to Git object database: %s", git_error_last()->message);
|
||||
}
|
||||
|
||||
operator git_repository * ()
|
||||
operator git_repository *()
|
||||
{
|
||||
return repo.get();
|
||||
}
|
||||
|
||||
void flush() override {
|
||||
void flush() override
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
git_buf buf = GIT_BUF_INIT;
|
||||
Finally _disposeBuf { [&] { git_buf_dispose(&buf); } };
|
||||
Finally _disposeBuf{[&] { git_buf_dispose(&buf); }};
|
||||
PackBuilder packBuilder;
|
||||
PackBuilderContext packBuilderContext;
|
||||
git_packbuilder_new(Setter(packBuilder), *this);
|
||||
@ -265,14 +268,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
git_packbuilder_set_threads(packBuilder.get(), 0 /* autodetect */);
|
||||
|
||||
packBuilderContext.handleException(
|
||||
"preparing packfile",
|
||||
git_mempack_write_thin_pack(mempack_backend, packBuilder.get())
|
||||
);
|
||||
"preparing packfile", git_mempack_write_thin_pack(mempack_backend, packBuilder.get()));
|
||||
checkInterrupt();
|
||||
packBuilderContext.handleException(
|
||||
"writing packfile",
|
||||
git_packbuilder_write_buf(&buf, packBuilder.get())
|
||||
);
|
||||
packBuilderContext.handleException("writing packfile", git_packbuilder_write_buf(&buf, packBuilder.get()));
|
||||
checkInterrupt();
|
||||
|
||||
std::string repo_path = std::string(git_repository_path(repo.get()));
|
||||
@ -317,12 +315,16 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
todo.push(peelObject<Commit>(lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT));
|
||||
|
||||
while (auto commit = pop(todo)) {
|
||||
if (!done.insert(*git_commit_id(commit->get())).second) continue;
|
||||
if (!done.insert(*git_commit_id(commit->get())).second)
|
||||
continue;
|
||||
|
||||
for (size_t n = 0; n < git_commit_parentcount(commit->get()); ++n) {
|
||||
git_commit * parent;
|
||||
if (git_commit_parent(&parent, commit->get(), n))
|
||||
throw Error("getting parent of Git commit '%s': %s", *git_commit_id(commit->get()), git_error_last()->message);
|
||||
throw Error(
|
||||
"getting parent of Git commit '%s': %s",
|
||||
*git_commit_id(commit->get()),
|
||||
git_error_last()->message);
|
||||
todo.push(Commit(parent));
|
||||
}
|
||||
}
|
||||
@ -372,7 +374,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
while (true) {
|
||||
git_config_entry * entry = nullptr;
|
||||
if (auto err = git_config_next(&entry, it.get())) {
|
||||
if (err == GIT_ITEROVER) break;
|
||||
if (err == GIT_ITEROVER)
|
||||
break;
|
||||
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
|
||||
}
|
||||
entries.emplace(entry->name + 10, entry->value);
|
||||
@ -381,14 +384,16 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
std::vector<Submodule> result;
|
||||
|
||||
for (auto & [key, value] : entries) {
|
||||
if (!hasSuffix(key, ".path")) continue;
|
||||
if (!hasSuffix(key, ".path"))
|
||||
continue;
|
||||
std::string key2(key, 0, key.size() - 5);
|
||||
auto path = CanonPath(value);
|
||||
result.push_back(Submodule {
|
||||
.path = path,
|
||||
.url = entries[key2 + ".url"],
|
||||
.branch = entries[key2 + ".branch"],
|
||||
});
|
||||
result.push_back(
|
||||
Submodule{
|
||||
.path = path,
|
||||
.url = entries[key2 + ".url"],
|
||||
.branch = entries[key2 + ".branch"],
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
@ -414,11 +419,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
|
||||
/* Get all tracked files and determine whether the working
|
||||
directory is dirty. */
|
||||
std::function<int(const char * path, unsigned int statusFlags)> statusCallback = [&](const char * path, unsigned int statusFlags)
|
||||
{
|
||||
if (!(statusFlags & GIT_STATUS_INDEX_DELETED) &&
|
||||
!(statusFlags & GIT_STATUS_WT_DELETED))
|
||||
{
|
||||
std::function<int(const char * path, unsigned int statusFlags)> statusCallback = [&](const char * path,
|
||||
unsigned int statusFlags) {
|
||||
if (!(statusFlags & GIT_STATUS_INDEX_DELETED) && !(statusFlags & GIT_STATUS_WT_DELETED)) {
|
||||
info.files.insert(CanonPath(path));
|
||||
if (statusFlags != GIT_STATUS_CURRENT)
|
||||
info.dirtyFiles.insert(CanonPath(path));
|
||||
@ -474,7 +477,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
|
||||
Object obj;
|
||||
if (auto errCode = git_object_lookup(Setter(obj), *this, &oid, GIT_OBJECT_ANY)) {
|
||||
if (errCode == GIT_ENOTFOUND) return false;
|
||||
if (errCode == GIT_ENOTFOUND)
|
||||
return false;
|
||||
auto err = git_error_last();
|
||||
throw Error("getting Git object '%s': %s", oid, err->message);
|
||||
}
|
||||
@ -485,15 +489,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
/**
|
||||
* A 'GitSourceAccessor' with no regard for export-ignore or any other transformations.
|
||||
*/
|
||||
ref<GitSourceAccessor> getRawAccessor(
|
||||
const Hash & rev,
|
||||
bool smudgeLfs = false);
|
||||
ref<GitSourceAccessor> getRawAccessor(const Hash & rev, bool smudgeLfs = false);
|
||||
|
||||
ref<SourceAccessor> getAccessor(
|
||||
const Hash & rev,
|
||||
bool exportIgnore,
|
||||
std::string displayPrefix,
|
||||
bool smudgeLfs = false) override;
|
||||
ref<SourceAccessor>
|
||||
getAccessor(const Hash & rev, bool exportIgnore, std::string displayPrefix, bool smudgeLfs = false) override;
|
||||
|
||||
ref<SourceAccessor> getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override;
|
||||
|
||||
@ -509,7 +508,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
static int transferProgressCallback(const git_indexer_progress * stats, void * payload)
|
||||
{
|
||||
auto act = (Activity *) payload;
|
||||
act->result(resFetchStatus,
|
||||
act->result(
|
||||
resFetchStatus,
|
||||
fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB",
|
||||
stats->received_objects,
|
||||
stats->total_objects,
|
||||
@ -519,14 +519,12 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
return getInterrupted() ? -1 : 0;
|
||||
}
|
||||
|
||||
void fetch(
|
||||
const std::string & url,
|
||||
const std::string & refspec,
|
||||
bool shallow) override
|
||||
void fetch(const std::string & url, const std::string & refspec, bool shallow) override
|
||||
{
|
||||
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
|
||||
|
||||
// TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that)
|
||||
// TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support
|
||||
// that)
|
||||
// then use code that was removed in this commit (see blame)
|
||||
|
||||
auto dir = this->path;
|
||||
@ -535,55 +533,52 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
append(gitArgs, {"--depth", "1"});
|
||||
append(gitArgs, {std::string("--"), url, refspec});
|
||||
|
||||
runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.lookupPath = true,
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
.args = gitArgs,
|
||||
.input = {},
|
||||
.isInteractive = true
|
||||
});
|
||||
runProgram(
|
||||
RunOptions{
|
||||
.program = "git",
|
||||
.lookupPath = true,
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
.args = gitArgs,
|
||||
.input = {},
|
||||
.isInteractive = true});
|
||||
}
|
||||
|
||||
void verifyCommit(
|
||||
const Hash & rev,
|
||||
const std::vector<fetchers::PublicKey> & publicKeys) override
|
||||
void verifyCommit(const Hash & rev, const std::vector<fetchers::PublicKey> & publicKeys) override
|
||||
{
|
||||
// Create ad-hoc allowedSignersFile and populate it with publicKeys
|
||||
auto allowedSignersFile = createTempFile().second;
|
||||
std::string allowedSigners;
|
||||
for (const fetchers::PublicKey & k : publicKeys) {
|
||||
if (k.type != "ssh-dsa"
|
||||
&& k.type != "ssh-ecdsa"
|
||||
&& k.type != "ssh-ecdsa-sk"
|
||||
&& k.type != "ssh-ed25519"
|
||||
&& k.type != "ssh-ed25519-sk"
|
||||
&& k.type != "ssh-rsa")
|
||||
throw Error("Unknown key type '%s'.\n"
|
||||
if (k.type != "ssh-dsa" && k.type != "ssh-ecdsa" && k.type != "ssh-ecdsa-sk" && k.type != "ssh-ed25519"
|
||||
&& k.type != "ssh-ed25519-sk" && k.type != "ssh-rsa")
|
||||
throw Error(
|
||||
"Unknown key type '%s'.\n"
|
||||
"Please use one of\n"
|
||||
"- ssh-dsa\n"
|
||||
" ssh-ecdsa\n"
|
||||
" ssh-ecdsa-sk\n"
|
||||
" ssh-ed25519\n"
|
||||
" ssh-ed25519-sk\n"
|
||||
" ssh-rsa", k.type);
|
||||
" ssh-rsa",
|
||||
k.type);
|
||||
allowedSigners += "* " + k.type + " " + k.key + "\n";
|
||||
}
|
||||
writeFile(allowedSignersFile, allowedSigners);
|
||||
|
||||
// Run verification command
|
||||
auto [status, output] = runProgram(RunOptions {
|
||||
auto [status, output] = runProgram(
|
||||
RunOptions{
|
||||
.program = "git",
|
||||
.args = {
|
||||
"-c",
|
||||
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
||||
"-C", path.string(),
|
||||
"verify-commit",
|
||||
rev.gitRev()
|
||||
},
|
||||
.args =
|
||||
{"-c",
|
||||
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
||||
"-C",
|
||||
path.string(),
|
||||
"verify-commit",
|
||||
rev.gitRev()},
|
||||
.mergeStderrToStdout = true,
|
||||
});
|
||||
});
|
||||
|
||||
/* Evaluate result through status code and checking if public
|
||||
key fingerprints appear on stderr. This is neccessary
|
||||
@ -591,7 +586,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
commit being signed by gpg keys that are present in the
|
||||
users key agent. */
|
||||
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
||||
for (const fetchers::PublicKey & k : publicKeys){
|
||||
for (const fetchers::PublicKey & k : publicKeys) {
|
||||
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
||||
std::string keyDecoded;
|
||||
try {
|
||||
@ -599,8 +594,9 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while decoding public key '%s' used for git signature", k.key);
|
||||
}
|
||||
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, keyDecoded).to_string(nix::HashFormat::Base64, false), "=");
|
||||
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
||||
auto fingerprint =
|
||||
trim(hashString(HashAlgorithm::SHA256, keyDecoded).to_string(nix::HashFormat::Base64, false), "=");
|
||||
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+");
|
||||
re += "(" + escaped_fingerprint + ")";
|
||||
}
|
||||
re += "]";
|
||||
@ -621,7 +617,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
|
||||
auto narHash = accessor->hashPath(CanonPath::root);
|
||||
|
||||
fetchers::getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}}));
|
||||
fetchers::getCache()->upsert(
|
||||
cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}}));
|
||||
|
||||
return narHash;
|
||||
}
|
||||
@ -675,8 +672,9 @@ struct GitSourceAccessor : SourceAccessor
|
||||
if (lfsFetch->shouldFetch(path)) {
|
||||
StringSink s;
|
||||
try {
|
||||
auto contents = std::string((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get()));
|
||||
lfsFetch->fetch(contents, path, s, [&s](uint64_t size){ s.s.reserve(size); });
|
||||
auto contents =
|
||||
std::string((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get()));
|
||||
lfsFetch->fetch(contents, path, s, [&s](uint64_t size) { s.s.reserve(size); });
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while smudging git-lfs file '%s'", path);
|
||||
throw;
|
||||
@ -701,7 +699,7 @@ struct GitSourceAccessor : SourceAccessor
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||
{
|
||||
if (path.isRoot())
|
||||
return Stat { .type = git_object_type(root.get()) == GIT_OBJECT_TREE ? tDirectory : tRegular };
|
||||
return Stat{.type = git_object_type(root.get()) == GIT_OBJECT_TREE ? tDirectory : tRegular};
|
||||
|
||||
auto entry = lookup(path);
|
||||
if (!entry)
|
||||
@ -710,20 +708,20 @@ struct GitSourceAccessor : SourceAccessor
|
||||
auto mode = git_tree_entry_filemode(entry);
|
||||
|
||||
if (mode == GIT_FILEMODE_TREE)
|
||||
return Stat { .type = tDirectory };
|
||||
return Stat{.type = tDirectory};
|
||||
|
||||
else if (mode == GIT_FILEMODE_BLOB)
|
||||
return Stat { .type = tRegular };
|
||||
return Stat{.type = tRegular};
|
||||
|
||||
else if (mode == GIT_FILEMODE_BLOB_EXECUTABLE)
|
||||
return Stat { .type = tRegular, .isExecutable = true };
|
||||
return Stat{.type = tRegular, .isExecutable = true};
|
||||
|
||||
else if (mode == GIT_FILEMODE_LINK)
|
||||
return Stat { .type = tSymlink };
|
||||
return Stat{.type = tSymlink};
|
||||
|
||||
else if (mode == GIT_FILEMODE_COMMIT)
|
||||
// Treat submodules as an empty directory.
|
||||
return Stat { .type = tDirectory };
|
||||
return Stat{.type = tDirectory};
|
||||
|
||||
else
|
||||
throw Error("file '%s' has an unsupported Git file type");
|
||||
@ -731,24 +729,23 @@ struct GitSourceAccessor : SourceAccessor
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](Tree tree) {
|
||||
DirEntries res;
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[&](Tree tree) {
|
||||
DirEntries res;
|
||||
|
||||
auto count = git_tree_entrycount(tree.get());
|
||||
auto count = git_tree_entrycount(tree.get());
|
||||
|
||||
for (size_t n = 0; n < count; ++n) {
|
||||
auto entry = git_tree_entry_byindex(tree.get(), n);
|
||||
// FIXME: add to cache
|
||||
res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{});
|
||||
}
|
||||
for (size_t n = 0; n < count; ++n) {
|
||||
auto entry = git_tree_entry_byindex(tree.get(), n);
|
||||
// FIXME: add to cache
|
||||
res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{});
|
||||
}
|
||||
|
||||
return res;
|
||||
},
|
||||
[&](Submodule) {
|
||||
return DirEntries();
|
||||
}
|
||||
}, getTree(path));
|
||||
return res;
|
||||
},
|
||||
[&](Submodule) { return DirEntries(); }},
|
||||
getTree(path));
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
@ -776,15 +773,18 @@ struct GitSourceAccessor : SourceAccessor
|
||||
git_tree_entry * lookup(const CanonPath & path)
|
||||
{
|
||||
auto i = lookupCache.find(path);
|
||||
if (i != lookupCache.end()) return i->second.get();
|
||||
if (i != lookupCache.end())
|
||||
return i->second.get();
|
||||
|
||||
auto parent = path.parent();
|
||||
if (!parent) return nullptr;
|
||||
if (!parent)
|
||||
return nullptr;
|
||||
|
||||
auto name = path.baseName().value();
|
||||
|
||||
auto parentTree = lookupTree(*parent);
|
||||
if (!parentTree) return nullptr;
|
||||
if (!parentTree)
|
||||
return nullptr;
|
||||
|
||||
auto count = git_tree_entrycount(parentTree->get());
|
||||
|
||||
@ -826,7 +826,7 @@ struct GitSourceAccessor : SourceAccessor
|
||||
return std::nullopt;
|
||||
|
||||
Tree tree;
|
||||
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
|
||||
if (git_tree_entry_to_object((git_object **) (git_tree **) Setter(tree), *repo, entry))
|
||||
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
|
||||
|
||||
return tree;
|
||||
@ -840,7 +840,8 @@ struct GitSourceAccessor : SourceAccessor
|
||||
return entry;
|
||||
}
|
||||
|
||||
struct Submodule { };
|
||||
struct Submodule
|
||||
{};
|
||||
|
||||
std::variant<Tree, Submodule> getTree(const CanonPath & path)
|
||||
{
|
||||
@ -860,7 +861,7 @@ struct GitSourceAccessor : SourceAccessor
|
||||
throw Error("'%s' is not a directory", showPath(path));
|
||||
|
||||
Tree tree;
|
||||
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
|
||||
if (git_tree_entry_to_object((git_object **) (git_tree **) Setter(tree), *repo, entry))
|
||||
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
|
||||
|
||||
return tree;
|
||||
@ -871,16 +872,12 @@ struct GitSourceAccessor : SourceAccessor
|
||||
if (!expectSymlink && git_object_type(root.get()) == GIT_OBJECT_BLOB)
|
||||
return dupObject<Blob>((git_blob *) &*root);
|
||||
|
||||
auto notExpected = [&]()
|
||||
{
|
||||
throw Error(
|
||||
expectSymlink
|
||||
? "'%s' is not a symlink"
|
||||
: "'%s' is not a regular file",
|
||||
showPath(path));
|
||||
auto notExpected = [&]() {
|
||||
throw Error(expectSymlink ? "'%s' is not a symlink" : "'%s' is not a regular file", showPath(path));
|
||||
};
|
||||
|
||||
if (path.isRoot()) notExpected();
|
||||
if (path.isRoot())
|
||||
notExpected();
|
||||
|
||||
auto entry = need(path);
|
||||
|
||||
@ -897,26 +894,31 @@ struct GitSourceAccessor : SourceAccessor
|
||||
}
|
||||
|
||||
Blob blob;
|
||||
if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *repo, entry))
|
||||
if (git_tree_entry_to_object((git_object **) (git_blob **) Setter(blob), *repo, entry))
|
||||
throw Error("looking up file '%s': %s", showPath(path), git_error_last()->message);
|
||||
|
||||
return blob;
|
||||
}
|
||||
};
|
||||
|
||||
struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor {
|
||||
struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor
|
||||
{
|
||||
ref<GitRepoImpl> repo;
|
||||
std::optional<Hash> rev;
|
||||
|
||||
GitExportIgnoreSourceAccessor(ref<GitRepoImpl> repo, ref<SourceAccessor> next, std::optional<Hash> rev)
|
||||
: CachingFilteringSourceAccessor(next, [&](const CanonPath & path) {
|
||||
return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path));
|
||||
})
|
||||
: CachingFilteringSourceAccessor(
|
||||
next,
|
||||
[&](const CanonPath & path) {
|
||||
return RestrictedPathError(
|
||||
fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path));
|
||||
})
|
||||
, repo(repo)
|
||||
, rev(rev)
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut)
|
||||
bool gitAttrGet(const CanonPath & path, const char * attrName, const char *& valueOut)
|
||||
{
|
||||
const char * pathCStr = path.rel_c_str();
|
||||
|
||||
@ -926,27 +928,16 @@ struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor {
|
||||
// TODO: test that gitattributes from global and system are not used
|
||||
// (ie more or less: home and etc - both of them!)
|
||||
opts.flags = GIT_ATTR_CHECK_INCLUDE_COMMIT | GIT_ATTR_CHECK_NO_SYSTEM;
|
||||
return git_attr_get_ext(
|
||||
&valueOut,
|
||||
*repo,
|
||||
&opts,
|
||||
pathCStr,
|
||||
attrName
|
||||
);
|
||||
}
|
||||
else {
|
||||
return git_attr_get_ext(&valueOut, *repo, &opts, pathCStr, attrName);
|
||||
} else {
|
||||
return git_attr_get(
|
||||
&valueOut,
|
||||
*repo,
|
||||
GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM,
|
||||
pathCStr,
|
||||
attrName);
|
||||
&valueOut, *repo, GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM, pathCStr, attrName);
|
||||
}
|
||||
}
|
||||
|
||||
bool isExportIgnored(const CanonPath & path)
|
||||
{
|
||||
const char *exportIgnoreEntry = nullptr;
|
||||
const char * exportIgnoreEntry = nullptr;
|
||||
|
||||
// GIT_ATTR_CHECK_INDEX_ONLY:
|
||||
// > It will use index only for creating archives or for a bare repo
|
||||
@ -957,8 +948,7 @@ struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor {
|
||||
return false;
|
||||
else
|
||||
throw Error("looking up '%s': %s", showPath(path), git_error_last()->message);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// Official git will silently reject export-ignore lines that have
|
||||
// values. We do the same.
|
||||
return GIT_ATTR_IS_TRUE(exportIgnoreEntry);
|
||||
@ -969,7 +959,6 @@ struct GitExportIgnoreSourceAccessor : CachingFilteringSourceAccessor {
|
||||
{
|
||||
return !isExportIgnored(path);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
@ -989,26 +978,25 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
const git_tree_entry * entry;
|
||||
Tree prevTree = nullptr;
|
||||
|
||||
if (!pendingDirs.empty() &&
|
||||
(entry = git_treebuilder_get(pendingDirs.back().builder.get(), name.c_str())))
|
||||
{
|
||||
if (!pendingDirs.empty() && (entry = git_treebuilder_get(pendingDirs.back().builder.get(), name.c_str()))) {
|
||||
/* Clone a tree that we've already finished. This happens
|
||||
if a tarball has directory entries that are not
|
||||
contiguous. */
|
||||
if (git_tree_entry_type(entry) != GIT_OBJECT_TREE)
|
||||
throw Error("parent of '%s' is not a directory", name);
|
||||
|
||||
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(prevTree), *repo, entry))
|
||||
if (git_tree_entry_to_object((git_object **) (git_tree **) Setter(prevTree), *repo, entry))
|
||||
throw Error("looking up parent of '%s': %s", name, git_error_last()->message);
|
||||
}
|
||||
|
||||
git_treebuilder * b;
|
||||
if (git_treebuilder_new(&b, *repo, prevTree.get()))
|
||||
throw Error("creating a tree builder: %s", git_error_last()->message);
|
||||
pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) });
|
||||
pendingDirs.push_back({.name = std::move(name), .builder = TreeBuilder(b)});
|
||||
};
|
||||
|
||||
GitFileSystemObjectSinkImpl(ref<GitRepoImpl> repo) : repo(repo)
|
||||
GitFileSystemObjectSinkImpl(ref<GitRepoImpl> repo)
|
||||
: repo(repo)
|
||||
{
|
||||
pushBuilder("");
|
||||
}
|
||||
@ -1055,34 +1043,34 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
{
|
||||
std::span<const std::string> pathComponents2{pathComponents};
|
||||
|
||||
updateBuilders(
|
||||
isDir
|
||||
? pathComponents2
|
||||
: pathComponents2.first(pathComponents2.size() - 1));
|
||||
updateBuilders(isDir ? pathComponents2 : pathComponents2.first(pathComponents2.size() - 1));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void createRegularFile(
|
||||
const CanonPath & path,
|
||||
std::function<void(CreateRegularFileSink &)> func) override
|
||||
void createRegularFile(const CanonPath & path, std::function<void(CreateRegularFileSink &)> func) override
|
||||
{
|
||||
auto pathComponents = tokenizeString<std::vector<std::string>>(path.rel(), "/");
|
||||
if (!prepareDirs(pathComponents, false)) return;
|
||||
if (!prepareDirs(pathComponents, false))
|
||||
return;
|
||||
|
||||
git_writestream * stream = nullptr;
|
||||
if (git_blob_create_from_stream(&stream, *repo, nullptr))
|
||||
throw Error("creating a blob stream object: %s", git_error_last()->message);
|
||||
|
||||
struct CRF : CreateRegularFileSink {
|
||||
struct CRF : CreateRegularFileSink
|
||||
{
|
||||
const CanonPath & path;
|
||||
GitFileSystemObjectSinkImpl & back;
|
||||
git_writestream * stream;
|
||||
bool executable = false;
|
||||
CRF(const CanonPath & path, GitFileSystemObjectSinkImpl & back, git_writestream * stream)
|
||||
: path(path), back(back), stream(stream)
|
||||
{}
|
||||
void operator () (std::string_view data) override
|
||||
: path(path)
|
||||
, back(back)
|
||||
, stream(stream)
|
||||
{
|
||||
}
|
||||
void operator()(std::string_view data) override
|
||||
{
|
||||
if (stream->write(stream, data.data(), data.size()))
|
||||
throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message);
|
||||
@ -1091,17 +1079,14 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
{
|
||||
executable = true;
|
||||
}
|
||||
} crf { path, *this, stream };
|
||||
} crf{path, *this, stream};
|
||||
func(crf);
|
||||
|
||||
git_oid oid;
|
||||
if (git_blob_create_from_stream_commit(&oid, stream))
|
||||
throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message);
|
||||
|
||||
addToTree(*pathComponents.rbegin(), oid,
|
||||
crf.executable
|
||||
? GIT_FILEMODE_BLOB_EXECUTABLE
|
||||
: GIT_FILEMODE_BLOB);
|
||||
addToTree(*pathComponents.rbegin(), oid, crf.executable ? GIT_FILEMODE_BLOB_EXECUTABLE : GIT_FILEMODE_BLOB);
|
||||
}
|
||||
|
||||
void createDirectory(const CanonPath & path) override
|
||||
@ -1113,7 +1098,8 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
void createSymlink(const CanonPath & path, const std::string & target) override
|
||||
{
|
||||
auto pathComponents = tokenizeString<std::vector<std::string>>(path.rel(), "/");
|
||||
if (!prepareDirs(pathComponents, false)) return;
|
||||
if (!prepareDirs(pathComponents, false))
|
||||
return;
|
||||
|
||||
git_oid oid;
|
||||
if (git_blob_create_from_buffer(&oid, *repo, target.c_str(), target.size()))
|
||||
@ -1128,7 +1114,8 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
for (auto & c : path)
|
||||
pathComponents.emplace_back(c);
|
||||
|
||||
if (!prepareDirs(pathComponents, false)) return;
|
||||
if (!prepareDirs(pathComponents, false))
|
||||
return;
|
||||
|
||||
// We can't just look up the path from the start of the root, since
|
||||
// some parent directories may not have finished yet, so we compute
|
||||
@ -1172,9 +1159,7 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
|
||||
assert(entry);
|
||||
|
||||
addToTree(*pathComponents.rbegin(),
|
||||
*git_tree_entry_id(entry),
|
||||
git_tree_entry_filemode(entry));
|
||||
addToTree(*pathComponents.rbegin(), *git_tree_entry_id(entry), git_tree_entry_filemode(entry));
|
||||
}
|
||||
|
||||
Hash flush() override
|
||||
@ -1189,19 +1174,14 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
||||
}
|
||||
};
|
||||
|
||||
ref<GitSourceAccessor> GitRepoImpl::getRawAccessor(
|
||||
const Hash & rev,
|
||||
bool smudgeLfs)
|
||||
ref<GitSourceAccessor> GitRepoImpl::getRawAccessor(const Hash & rev, bool smudgeLfs)
|
||||
{
|
||||
auto self = ref<GitRepoImpl>(shared_from_this());
|
||||
return make_ref<GitSourceAccessor>(self, rev, smudgeLfs);
|
||||
}
|
||||
|
||||
ref<SourceAccessor> GitRepoImpl::getAccessor(
|
||||
const Hash & rev,
|
||||
bool exportIgnore,
|
||||
std::string displayPrefix,
|
||||
bool smudgeLfs)
|
||||
ref<SourceAccessor>
|
||||
GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore, std::string displayPrefix, bool smudgeLfs)
|
||||
{
|
||||
auto self = ref<GitRepoImpl>(shared_from_this());
|
||||
ref<GitSourceAccessor> rawGitAccessor = getRawAccessor(rev, smudgeLfs);
|
||||
@ -1212,16 +1192,17 @@ ref<SourceAccessor> GitRepoImpl::getAccessor(
|
||||
return rawGitAccessor;
|
||||
}
|
||||
|
||||
ref<SourceAccessor> GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError)
|
||||
ref<SourceAccessor>
|
||||
GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError)
|
||||
{
|
||||
auto self = ref<GitRepoImpl>(shared_from_this());
|
||||
ref<SourceAccessor> fileAccessor =
|
||||
AllowListSourceAccessor::create(
|
||||
makeFSSourceAccessor(path),
|
||||
std::set<CanonPath>{ wd.files },
|
||||
// Always allow access to the root, but not its children.
|
||||
std::unordered_set<CanonPath>{CanonPath::root},
|
||||
std::move(makeNotAllowedError)).cast<SourceAccessor>();
|
||||
ref<SourceAccessor> fileAccessor = AllowListSourceAccessor::create(
|
||||
makeFSSourceAccessor(path),
|
||||
std::set<CanonPath>{wd.files},
|
||||
// Always allow access to the root, but not its children.
|
||||
std::unordered_set<CanonPath>{CanonPath::root},
|
||||
std::move(makeNotAllowedError))
|
||||
.cast<SourceAccessor>();
|
||||
if (exportIgnore)
|
||||
return make_ref<GitExportIgnoreSourceAccessor>(self, fileAccessor, std::nullopt);
|
||||
else
|
||||
@ -1239,7 +1220,8 @@ std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules
|
||||
CanonPath modulesFile(".gitmodules");
|
||||
|
||||
auto accessor = getAccessor(rev, exportIgnore, "");
|
||||
if (!accessor->pathExists(modulesFile)) return {};
|
||||
if (!accessor->pathExists(modulesFile))
|
||||
return {};
|
||||
|
||||
/* Parse it and get the revision of each submodule. */
|
||||
auto configS = accessor->readFile(modulesFile);
|
||||
@ -1279,7 +1261,8 @@ GitRepo::WorkdirInfo GitRepo::getCachedWorkdirInfo(const std::filesystem::path &
|
||||
{
|
||||
auto cache(_cache.lock());
|
||||
auto i = cache->find(path);
|
||||
if (i != cache->end()) return i->second;
|
||||
if (i != cache->end())
|
||||
return i->second;
|
||||
}
|
||||
auto workdirInfo = GitRepo::openRepo(path)->getWorkdirInfo();
|
||||
_cache.lock()->emplace(path, workdirInfo);
|
||||
|
@ -43,10 +43,8 @@ bool isCacheFileWithinTtl(time_t now, const struct stat & st)
|
||||
|
||||
Path getCachePath(std::string_view key, bool shallow)
|
||||
{
|
||||
return getCacheDir()
|
||||
+ "/gitv3/"
|
||||
+ hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false)
|
||||
+ (shallow ? "-shallow" : "");
|
||||
return getCacheDir() + "/gitv3/" + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false)
|
||||
+ (shallow ? "-shallow" : "");
|
||||
}
|
||||
|
||||
// Returns the name of the HEAD branch.
|
||||
@ -58,24 +56,26 @@ Path getCachePath(std::string_view key, bool shallow)
|
||||
// ...
|
||||
std::optional<std::string> readHead(const Path & path)
|
||||
{
|
||||
auto [status, output] = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
// FIXME: use 'HEAD' to avoid returning all refs
|
||||
.args = {"ls-remote", "--symref", path},
|
||||
.isInteractive = true,
|
||||
});
|
||||
if (status != 0) return std::nullopt;
|
||||
auto [status, output] = runProgram(
|
||||
RunOptions{
|
||||
.program = "git",
|
||||
// FIXME: use 'HEAD' to avoid returning all refs
|
||||
.args = {"ls-remote", "--symref", path},
|
||||
.isInteractive = true,
|
||||
});
|
||||
if (status != 0)
|
||||
return std::nullopt;
|
||||
|
||||
std::string_view line = output;
|
||||
line = line.substr(0, line.find("\n"));
|
||||
if (const auto parseResult = git::parseLsRemoteLine(line); parseResult && parseResult->reference == "HEAD") {
|
||||
switch (parseResult->kind) {
|
||||
case git::LsRemoteRefLine::Kind::Symbolic:
|
||||
debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path);
|
||||
break;
|
||||
case git::LsRemoteRefLine::Kind::Object:
|
||||
debug("resolved HEAD rev '%s' for repo '%s'", parseResult->target, path);
|
||||
break;
|
||||
case git::LsRemoteRefLine::Kind::Symbolic:
|
||||
debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path);
|
||||
break;
|
||||
case git::LsRemoteRefLine::Kind::Object:
|
||||
debug("resolved HEAD rev '%s' for repo '%s'", parseResult->target, path);
|
||||
break;
|
||||
}
|
||||
return parseResult->target;
|
||||
}
|
||||
@ -87,15 +87,15 @@ bool storeCachedHead(const std::string & actualUrl, bool shallow, const std::str
|
||||
{
|
||||
Path cacheDir = getCachePath(actualUrl, shallow);
|
||||
try {
|
||||
runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
|
||||
} catch (ExecError &e) {
|
||||
runProgram("git", true, {"-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef});
|
||||
} catch (ExecError & e) {
|
||||
if (
|
||||
#ifndef WIN32 // TODO abstract over exit status handling on Windows
|
||||
!WIFEXITED(e.status)
|
||||
#else
|
||||
e.status != 0
|
||||
#endif
|
||||
)
|
||||
)
|
||||
throw;
|
||||
|
||||
return false;
|
||||
@ -116,17 +116,15 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl, bool sh
|
||||
std::optional<std::string> cachedRef;
|
||||
if (stat(headRefFile.c_str(), &st) == 0) {
|
||||
cachedRef = readHead(cacheDir);
|
||||
if (cachedRef != std::nullopt &&
|
||||
*cachedRef != gitInitialBranch &&
|
||||
isCacheFileWithinTtl(now, st))
|
||||
{
|
||||
if (cachedRef != std::nullopt && *cachedRef != gitInitialBranch && isCacheFileWithinTtl(now, st)) {
|
||||
debug("using cached HEAD ref '%s' for repo '%s'", *cachedRef, actualUrl);
|
||||
return cachedRef;
|
||||
}
|
||||
}
|
||||
|
||||
auto ref = readHead(actualUrl);
|
||||
if (ref) return ref;
|
||||
if (ref)
|
||||
return ref;
|
||||
|
||||
if (cachedRef) {
|
||||
// If the cached git ref is expired in fetch() below, and the 'git fetch'
|
||||
@ -152,28 +150,26 @@ std::vector<PublicKey> getPublicKeys(const Attrs & attrs)
|
||||
}
|
||||
}
|
||||
if (attrs.contains("publicKey"))
|
||||
publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")});
|
||||
publicKeys.push_back(
|
||||
PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"), getStrAttr(attrs, "publicKey")});
|
||||
return publicKeys;
|
||||
}
|
||||
|
||||
} // end namespace
|
||||
} // end namespace
|
||||
|
||||
static const Hash nullRev{HashAlgorithm::SHA1};
|
||||
|
||||
struct GitInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(
|
||||
const Settings & settings,
|
||||
const ParsedURL & url, bool requireTree) const override
|
||||
std::optional<Input> inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override
|
||||
{
|
||||
if (url.scheme != "git" &&
|
||||
url.scheme != "git+http" &&
|
||||
url.scheme != "git+https" &&
|
||||
url.scheme != "git+ssh" &&
|
||||
url.scheme != "git+file") return {};
|
||||
if (url.scheme != "git" && url.scheme != "git+http" && url.scheme != "git+https" && url.scheme != "git+ssh"
|
||||
&& url.scheme != "git+file")
|
||||
return {};
|
||||
|
||||
auto url2(url);
|
||||
if (hasPrefix(url2.scheme, "git+")) url2.scheme = std::string(url2.scheme, 4);
|
||||
if (hasPrefix(url2.scheme, "git+"))
|
||||
url2.scheme = std::string(url2.scheme, 4);
|
||||
url2.query.clear();
|
||||
|
||||
Attrs attrs;
|
||||
@ -182,8 +178,10 @@ struct GitInputScheme : InputScheme
|
||||
for (auto & [name, value] : url.query) {
|
||||
if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys")
|
||||
attrs.emplace(name, value);
|
||||
else if (name == "shallow" || name == "submodules" || name == "lfs" || name == "exportIgnore" || name == "allRefs" || name == "verifyCommit")
|
||||
attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||
else if (
|
||||
name == "shallow" || name == "submodules" || name == "lfs" || name == "exportIgnore"
|
||||
|| name == "allRefs" || name == "verifyCommit")
|
||||
attrs.emplace(name, Explicit<bool>{value == "1"});
|
||||
else
|
||||
url2.query.emplace(name, value);
|
||||
}
|
||||
@ -193,7 +191,6 @@ struct GitInputScheme : InputScheme
|
||||
return inputFromAttrs(settings, attrs);
|
||||
}
|
||||
|
||||
|
||||
std::string_view schemeName() const override
|
||||
{
|
||||
return "git";
|
||||
@ -223,15 +220,10 @@ struct GitInputScheme : InputScheme
|
||||
};
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(
|
||||
const Settings & settings,
|
||||
const Attrs & attrs) const override
|
||||
std::optional<Input> inputFromAttrs(const Settings & settings, const Attrs & attrs) const override
|
||||
{
|
||||
for (auto & [name, _] : attrs)
|
||||
if (name == "verifyCommit"
|
||||
|| name == "keytype"
|
||||
|| name == "publicKey"
|
||||
|| name == "publicKeys")
|
||||
if (name == "verifyCommit" || name == "keytype" || name == "publicKey" || name == "publicKeys")
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
|
||||
maybeGetBoolAttr(attrs, "verifyCommit");
|
||||
@ -255,9 +247,12 @@ struct GitInputScheme : InputScheme
|
||||
ParsedURL toURL(const Input & input) const override
|
||||
{
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
|
||||
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
|
||||
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
|
||||
if (url.scheme != "git")
|
||||
url.scheme = "git+" + url.scheme;
|
||||
if (auto rev = input.getRev())
|
||||
url.query.insert_or_assign("rev", rev->gitRev());
|
||||
if (auto ref = input.getRef())
|
||||
url.query.insert_or_assign("ref", *ref);
|
||||
if (getShallowAttr(input))
|
||||
url.query.insert_or_assign("shallow", "1");
|
||||
if (getLfsAttr(input))
|
||||
@ -272,20 +267,18 @@ struct GitInputScheme : InputScheme
|
||||
if (publicKeys.size() == 1) {
|
||||
url.query.insert_or_assign("keytype", publicKeys.at(0).type);
|
||||
url.query.insert_or_assign("publicKey", publicKeys.at(0).key);
|
||||
}
|
||||
else if (publicKeys.size() > 1)
|
||||
} else if (publicKeys.size() > 1)
|
||||
url.query.insert_or_assign("publicKeys", publicKeys_to_string(publicKeys));
|
||||
return url;
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const override
|
||||
Input applyOverrides(const Input & input, std::optional<std::string> ref, std::optional<Hash> rev) const override
|
||||
{
|
||||
auto res(input);
|
||||
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref) res.attrs.insert_or_assign("ref", *ref);
|
||||
if (rev)
|
||||
res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref)
|
||||
res.attrs.insert_or_assign("ref", *ref);
|
||||
if (!res.getRef() && res.getRev())
|
||||
throw Error("Git input '%s' has a commit hash but no branch/tag name", res.to_string());
|
||||
return res;
|
||||
@ -304,7 +297,8 @@ struct GitInputScheme : InputScheme
|
||||
args.push_back(*ref);
|
||||
}
|
||||
|
||||
if (input.getRev()) throw UnimplementedError("cloning a specific revision is not implemented");
|
||||
if (input.getRev())
|
||||
throw UnimplementedError("cloning a specific revision is not implemented");
|
||||
|
||||
args.push_back(destDir);
|
||||
|
||||
@ -325,14 +319,23 @@ struct GitInputScheme : InputScheme
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
auto repoPath = repoInfo.getPath();
|
||||
if (!repoPath)
|
||||
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
|
||||
throw Error(
|
||||
"cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
|
||||
|
||||
writeFile(*repoPath / path.rel(), contents);
|
||||
|
||||
auto result = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.args = {"-C", repoPath->string(), "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())},
|
||||
});
|
||||
auto result = runProgram(
|
||||
RunOptions{
|
||||
.program = "git",
|
||||
.args =
|
||||
{"-C",
|
||||
repoPath->string(),
|
||||
"--git-dir",
|
||||
repoInfo.gitDir,
|
||||
"check-ignore",
|
||||
"--quiet",
|
||||
std::string(path.rel())},
|
||||
});
|
||||
auto exitCode =
|
||||
#ifndef WIN32 // TODO abstract over exit status handling on Windows
|
||||
WEXITSTATUS(result.first)
|
||||
@ -343,15 +346,32 @@ struct GitInputScheme : InputScheme
|
||||
|
||||
if (exitCode != 0) {
|
||||
// The path is not `.gitignore`d, we can add the file.
|
||||
runProgram("git", true,
|
||||
{ "-C", repoPath->string(), "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
|
||||
|
||||
runProgram(
|
||||
"git",
|
||||
true,
|
||||
{"-C",
|
||||
repoPath->string(),
|
||||
"--git-dir",
|
||||
repoInfo.gitDir,
|
||||
"add",
|
||||
"--intent-to-add",
|
||||
"--",
|
||||
std::string(path.rel())});
|
||||
|
||||
if (commitMsg) {
|
||||
// Pause the logger to allow for user input (such as a gpg passphrase) in `git commit`
|
||||
auto suspension = logger->suspend();
|
||||
runProgram("git", true,
|
||||
{ "-C", repoPath->string(), "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-F", "-" },
|
||||
runProgram(
|
||||
"git",
|
||||
true,
|
||||
{"-C",
|
||||
repoPath->string(),
|
||||
"--git-dir",
|
||||
repoInfo.gitDir,
|
||||
"commit",
|
||||
std::string(path.rel()),
|
||||
"-F",
|
||||
"-"},
|
||||
*commitMsg);
|
||||
}
|
||||
}
|
||||
@ -370,12 +390,10 @@ struct GitInputScheme : InputScheme
|
||||
std::string locationToArg() const
|
||||
{
|
||||
return std::visit(
|
||||
overloaded {
|
||||
[&](const std::filesystem::path & path)
|
||||
{ return path.string(); },
|
||||
[&](const ParsedURL & url)
|
||||
{ return url.to_string(); }
|
||||
}, location);
|
||||
overloaded{
|
||||
[&](const std::filesystem::path & path) { return path.string(); },
|
||||
[&](const ParsedURL & url) { return url.to_string(); }},
|
||||
location);
|
||||
}
|
||||
|
||||
std::optional<std::filesystem::path> getPath() const
|
||||
@ -427,10 +445,11 @@ struct GitInputScheme : InputScheme
|
||||
|
||||
RepoInfo getRepoInfo(const Input & input) const
|
||||
{
|
||||
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash) {
|
||||
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||
throw Error(
|
||||
"Hash '%s' is not supported by Git. Supported types are sha1 and sha256.",
|
||||
hash->to_string(HashFormat::Base16, true));
|
||||
};
|
||||
|
||||
if (auto rev = input.getRev())
|
||||
@ -505,7 +524,8 @@ struct GitInputScheme : InputScheme
|
||||
if (auto revCountAttrs = cache->lookup(key))
|
||||
return getIntAttr(*revCountAttrs, "revCount");
|
||||
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.locationToArg()));
|
||||
Activity act(
|
||||
*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.locationToArg()));
|
||||
|
||||
auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev);
|
||||
|
||||
@ -517,12 +537,10 @@ struct GitInputScheme : InputScheme
|
||||
std::string getDefaultRef(const RepoInfo & repoInfo, bool shallow) const
|
||||
{
|
||||
auto head = std::visit(
|
||||
overloaded {
|
||||
[&](const std::filesystem::path & path)
|
||||
{ return GitRepo::openRepo(path)->getWorkdirRef(); },
|
||||
[&](const ParsedURL & url)
|
||||
{ return readHeadCached(url.to_string(), shallow); }
|
||||
}, repoInfo.location);
|
||||
overloaded{
|
||||
[&](const std::filesystem::path & path) { return GitRepo::openRepo(path)->getWorkdirRef(); },
|
||||
[&](const ParsedURL & url) { return readHeadCached(url.to_string(), shallow); }},
|
||||
repoInfo.location);
|
||||
if (!head) {
|
||||
warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.locationToArg());
|
||||
return "master";
|
||||
@ -556,14 +574,13 @@ struct GitInputScheme : InputScheme
|
||||
if (input.getRev() && repo)
|
||||
repo->verifyCommit(*input.getRev(), publicKeys);
|
||||
else
|
||||
throw Error("commit verification is required for Git repository '%s', but it's dirty", input.to_string());
|
||||
throw Error(
|
||||
"commit verification is required for Git repository '%s', but it's dirty", input.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
std::pair<ref<SourceAccessor>, Input> getAccessorFromCommit(
|
||||
ref<Store> store,
|
||||
RepoInfo & repoInfo,
|
||||
Input && input) const
|
||||
std::pair<ref<SourceAccessor>, Input>
|
||||
getAccessorFromCommit(ref<Store> store, RepoInfo & repoInfo, Input && input) const
|
||||
{
|
||||
assert(!repoInfo.workdirInfo.isDirty);
|
||||
|
||||
@ -594,10 +611,7 @@ struct GitInputScheme : InputScheme
|
||||
// We need to set the origin so resolving submodule URLs works
|
||||
repo->setRemote("origin", repoUrl.to_string());
|
||||
|
||||
auto localRefFile =
|
||||
ref.compare(0, 5, "refs/") == 0
|
||||
? cacheDir / ref
|
||||
: cacheDir / "refs/heads" / ref;
|
||||
auto localRefFile = ref.compare(0, 5, "refs/") == 0 ? cacheDir / ref : cacheDir / "refs/heads" / ref;
|
||||
|
||||
bool doFetch;
|
||||
time_t now = time(0);
|
||||
@ -613,30 +627,27 @@ struct GitInputScheme : InputScheme
|
||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||
git fetch to update the local ref to the remote ref. */
|
||||
struct stat st;
|
||||
doFetch = stat(localRefFile.string().c_str(), &st) != 0 ||
|
||||
!isCacheFileWithinTtl(now, st);
|
||||
doFetch = stat(localRefFile.string().c_str(), &st) != 0 || !isCacheFileWithinTtl(now, st);
|
||||
}
|
||||
}
|
||||
|
||||
if (doFetch) {
|
||||
bool shallow = getShallowAttr(input);
|
||||
try {
|
||||
auto fetchRef =
|
||||
getAllRefsAttr(input)
|
||||
? "refs/*:refs/*"
|
||||
: input.getRev()
|
||||
? input.getRev()->gitRev()
|
||||
: ref.compare(0, 5, "refs/") == 0
|
||||
? fmt("%1%:%1%", ref)
|
||||
: ref == "HEAD"
|
||||
? ref
|
||||
: fmt("%1%:%1%", "refs/heads/" + ref);
|
||||
auto fetchRef = getAllRefsAttr(input) ? "refs/*:refs/*"
|
||||
: input.getRev() ? input.getRev()->gitRev()
|
||||
: ref.compare(0, 5, "refs/") == 0 ? fmt("%1%:%1%", ref)
|
||||
: ref == "HEAD" ? ref
|
||||
: fmt("%1%:%1%", "refs/heads/" + ref);
|
||||
|
||||
repo->fetch(repoUrl.to_string(), fetchRef, shallow);
|
||||
} catch (Error & e) {
|
||||
if (!std::filesystem::exists(localRefFile)) throw;
|
||||
if (!std::filesystem::exists(localRefFile))
|
||||
throw;
|
||||
logError(e.info());
|
||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.locationToArg());
|
||||
warn(
|
||||
"could not update local clone of Git repository '%s'; continuing with the most recent version",
|
||||
repoInfo.locationToArg());
|
||||
}
|
||||
|
||||
try {
|
||||
@ -653,16 +664,17 @@ struct GitInputScheme : InputScheme
|
||||
if (!repo->hasObject(*rev))
|
||||
throw Error(
|
||||
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
|
||||
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
|
||||
ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
|
||||
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
|
||||
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the " ANSI_BOLD
|
||||
"ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD "allRefs = true;" ANSI_NORMAL
|
||||
" to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
|
||||
rev->gitRev(),
|
||||
ref,
|
||||
repoInfo.locationToArg());
|
||||
} else
|
||||
input.attrs.insert_or_assign("rev", repo->resolveRef(ref).gitRev());
|
||||
|
||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in
|
||||
// the remainder
|
||||
}
|
||||
|
||||
auto repo = GitRepo::openRepo(repoDir);
|
||||
@ -670,7 +682,9 @@ struct GitInputScheme : InputScheme
|
||||
auto isShallow = repo->isShallow();
|
||||
|
||||
if (isShallow && !getShallowAttr(input))
|
||||
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.locationToArg());
|
||||
throw Error(
|
||||
"'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified",
|
||||
repoInfo.locationToArg());
|
||||
|
||||
// FIXME: check whether rev is an ancestor of ref?
|
||||
|
||||
@ -682,8 +696,7 @@ struct GitInputScheme : InputScheme
|
||||
});
|
||||
|
||||
if (!getShallowAttr(input))
|
||||
infoAttrs.insert_or_assign("revCount",
|
||||
getRevCount(repoInfo, repoDir, rev));
|
||||
infoAttrs.insert_or_assign("revCount", getRevCount(repoInfo, repoDir, rev));
|
||||
|
||||
printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.locationToArg());
|
||||
|
||||
@ -701,21 +714,25 @@ struct GitInputScheme : InputScheme
|
||||
|
||||
for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev, exportIgnore)) {
|
||||
auto resolved = repo->resolveSubmoduleUrl(submodule.url);
|
||||
debug("Git submodule %s: %s %s %s -> %s",
|
||||
submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved);
|
||||
debug(
|
||||
"Git submodule %s: %s %s %s -> %s",
|
||||
submodule.path,
|
||||
submodule.url,
|
||||
submodule.branch,
|
||||
submoduleRev.gitRev(),
|
||||
resolved);
|
||||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "git");
|
||||
attrs.insert_or_assign("url", resolved);
|
||||
if (submodule.branch != "")
|
||||
attrs.insert_or_assign("ref", submodule.branch);
|
||||
attrs.insert_or_assign("rev", submoduleRev.gitRev());
|
||||
attrs.insert_or_assign("exportIgnore", Explicit<bool>{ exportIgnore });
|
||||
attrs.insert_or_assign("submodules", Explicit<bool>{ true });
|
||||
attrs.insert_or_assign("lfs", Explicit<bool>{ smudgeLfs });
|
||||
attrs.insert_or_assign("allRefs", Explicit<bool>{ true });
|
||||
attrs.insert_or_assign("exportIgnore", Explicit<bool>{exportIgnore});
|
||||
attrs.insert_or_assign("submodules", Explicit<bool>{true});
|
||||
attrs.insert_or_assign("lfs", Explicit<bool>{smudgeLfs});
|
||||
attrs.insert_or_assign("allRefs", Explicit<bool>{true});
|
||||
auto submoduleInput = fetchers::Input::fromAttrs(*input.settings, std::move(attrs));
|
||||
auto [submoduleAccessor, submoduleInput2] =
|
||||
submoduleInput.getAccessor(store);
|
||||
auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store);
|
||||
submoduleAccessor->setPathDisplay("«" + submoduleInput.to_string() + "»");
|
||||
mounts.insert_or_assign(submodule.path, submoduleAccessor);
|
||||
}
|
||||
@ -734,10 +751,8 @@ struct GitInputScheme : InputScheme
|
||||
return {accessor, std::move(input)};
|
||||
}
|
||||
|
||||
std::pair<ref<SourceAccessor>, Input> getAccessorFromWorkdir(
|
||||
ref<Store> store,
|
||||
RepoInfo & repoInfo,
|
||||
Input && input) const
|
||||
std::pair<ref<SourceAccessor>, Input>
|
||||
getAccessorFromWorkdir(ref<Store> store, RepoInfo & repoInfo, Input && input) const
|
||||
{
|
||||
auto repoPath = repoInfo.getPath().value();
|
||||
|
||||
@ -751,9 +766,7 @@ struct GitInputScheme : InputScheme
|
||||
auto exportIgnore = getExportIgnoreAttr(input);
|
||||
|
||||
ref<SourceAccessor> accessor =
|
||||
repo->getAccessor(repoInfo.workdirInfo,
|
||||
exportIgnore,
|
||||
makeNotAllowedError(repoPath));
|
||||
repo->getAccessor(repoInfo.workdirInfo, exportIgnore, makeNotAllowedError(repoPath));
|
||||
|
||||
/* If the repo has submodules, return a mounted input accessor
|
||||
consisting of the accessor for the top-level repo and the
|
||||
@ -766,14 +779,13 @@ struct GitInputScheme : InputScheme
|
||||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "git");
|
||||
attrs.insert_or_assign("url", submodulePath.string());
|
||||
attrs.insert_or_assign("exportIgnore", Explicit<bool>{ exportIgnore });
|
||||
attrs.insert_or_assign("submodules", Explicit<bool>{ true });
|
||||
attrs.insert_or_assign("exportIgnore", Explicit<bool>{exportIgnore});
|
||||
attrs.insert_or_assign("submodules", Explicit<bool>{true});
|
||||
// TODO: fall back to getAccessorFromCommit-like fetch when submodules aren't checked out
|
||||
// attrs.insert_or_assign("allRefs", Explicit<bool>{ true });
|
||||
|
||||
auto submoduleInput = fetchers::Input::fromAttrs(*input.settings, std::move(attrs));
|
||||
auto [submoduleAccessor, submoduleInput2] =
|
||||
submoduleInput.getAccessor(store);
|
||||
auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store);
|
||||
submoduleAccessor->setPathDisplay("«" + submoduleInput.to_string() + "»");
|
||||
|
||||
/* If the submodule is dirty, mark this repo dirty as
|
||||
@ -798,18 +810,15 @@ struct GitInputScheme : InputScheme
|
||||
auto rev = repoInfo.workdirInfo.headRev.value_or(nullRev);
|
||||
|
||||
input.attrs.insert_or_assign("rev", rev.gitRev());
|
||||
input.attrs.insert_or_assign("revCount",
|
||||
rev == nullRev ? 0 : getRevCount(repoInfo, repoPath, rev));
|
||||
input.attrs.insert_or_assign("revCount", rev == nullRev ? 0 : getRevCount(repoInfo, repoPath, rev));
|
||||
|
||||
verifyCommit(input, repo);
|
||||
} else {
|
||||
repoInfo.warnDirty(*input.settings);
|
||||
|
||||
if (repoInfo.workdirInfo.headRev) {
|
||||
input.attrs.insert_or_assign("dirtyRev",
|
||||
repoInfo.workdirInfo.headRev->gitRev() + "-dirty");
|
||||
input.attrs.insert_or_assign("dirtyShortRev",
|
||||
repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty");
|
||||
input.attrs.insert_or_assign("dirtyRev", repoInfo.workdirInfo.headRev->gitRev() + "-dirty");
|
||||
input.attrs.insert_or_assign("dirtyShortRev", repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty");
|
||||
}
|
||||
|
||||
verifyCommit(input, nullptr);
|
||||
@ -817,9 +826,7 @@ struct GitInputScheme : InputScheme
|
||||
|
||||
input.attrs.insert_or_assign(
|
||||
"lastModified",
|
||||
repoInfo.workdirInfo.headRev
|
||||
? getLastModified(repoInfo, repoPath, *repoInfo.workdirInfo.headRev)
|
||||
: 0);
|
||||
repoInfo.workdirInfo.headRev ? getLastModified(repoInfo, repoPath, *repoInfo.workdirInfo.headRev) : 0);
|
||||
|
||||
return {accessor, std::move(input)};
|
||||
}
|
||||
@ -830,8 +837,7 @@ struct GitInputScheme : InputScheme
|
||||
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
|
||||
if (getExportIgnoreAttr(input)
|
||||
&& getSubmodulesAttr(input)) {
|
||||
if (getExportIgnoreAttr(input) && getSubmodulesAttr(input)) {
|
||||
/* In this situation, we don't have a git CLI behavior that we can copy.
|
||||
`git archive` does not support submodules, so it is unclear whether
|
||||
rules from the parent should affect the submodule or not.
|
||||
@ -840,26 +846,26 @@ struct GitInputScheme : InputScheme
|
||||
throw UnimplementedError("exportIgnore and submodules are not supported together yet");
|
||||
}
|
||||
|
||||
auto [accessor, final] =
|
||||
input.getRef() || input.getRev() || !repoInfo.getPath()
|
||||
? getAccessorFromCommit(store, repoInfo, std::move(input))
|
||||
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
|
||||
auto [accessor, final] = input.getRef() || input.getRev() || !repoInfo.getPath()
|
||||
? getAccessorFromCommit(store, repoInfo, std::move(input))
|
||||
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
|
||||
|
||||
return {accessor, std::move(final)};
|
||||
}
|
||||
|
||||
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
|
||||
{
|
||||
auto makeFingerprint = [&](const Hash & rev)
|
||||
{
|
||||
return rev.gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : "") + (getLfsAttr(input) ? ";l" : "");
|
||||
auto makeFingerprint = [&](const Hash & rev) {
|
||||
return rev.gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : "")
|
||||
+ (getLfsAttr(input) ? ";l" : "");
|
||||
};
|
||||
|
||||
if (auto rev = input.getRev())
|
||||
return makeFingerprint(*rev);
|
||||
else {
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
if (auto repoPath = repoInfo.getPath(); repoPath && repoInfo.workdirInfo.headRev && repoInfo.workdirInfo.submodules.empty()) {
|
||||
if (auto repoPath = repoInfo.getPath();
|
||||
repoPath && repoInfo.workdirInfo.headRev && repoInfo.workdirInfo.submodules.empty()) {
|
||||
/* Calculate a fingerprint that takes into account the
|
||||
deleted and modified/added files. */
|
||||
HashSink hashSink{HashAlgorithm::SHA512};
|
||||
@ -873,7 +879,7 @@ struct GitInputScheme : InputScheme
|
||||
writeString(file.abs(), hashSink);
|
||||
}
|
||||
return makeFingerprint(*repoInfo.workdirInfo.headRev)
|
||||
+ ";d=" + hashSink.finish().first.to_string(HashFormat::Base16, false);
|
||||
+ ";d=" + hashSink.finish().first.to_string(HashFormat::Base16, false);
|
||||
}
|
||||
return std::nullopt;
|
||||
}
|
||||
|
@ -29,13 +29,14 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
||||
|
||||
struct GitArchiveInputScheme : InputScheme
|
||||
{
|
||||
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
|
||||
virtual std::optional<std::pair<std::string, std::string>>
|
||||
accessHeaderFromToken(const std::string & token) const = 0;
|
||||
|
||||
std::optional<Input> inputFromURL(
|
||||
const fetchers::Settings & settings,
|
||||
const ParsedURL & url, bool requireTree) const override
|
||||
std::optional<Input>
|
||||
inputFromURL(const fetchers::Settings & settings, const ParsedURL & url, bool requireTree) const override
|
||||
{
|
||||
if (url.scheme != schemeName()) return {};
|
||||
if (url.scheme != schemeName())
|
||||
return {};
|
||||
|
||||
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
||||
|
||||
@ -68,20 +69,18 @@ struct GitArchiveInputScheme : InputScheme
|
||||
} else if (size < 2)
|
||||
throw BadURL("URL '%s' is invalid", url);
|
||||
|
||||
for (auto &[name, value] : url.query) {
|
||||
for (auto & [name, value] : url.query) {
|
||||
if (name == "rev") {
|
||||
if (rev)
|
||||
throw BadURL("URL '%s' contains multiple commit hashes", url);
|
||||
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
|
||||
}
|
||||
else if (name == "ref") {
|
||||
} else if (name == "ref") {
|
||||
if (!std::regex_match(value, refRegex))
|
||||
throw BadURL("URL '%s' contains an invalid branch/tag name", url);
|
||||
if (ref)
|
||||
throw BadURL("URL '%s' contains multiple branch/tag names", url);
|
||||
ref = value;
|
||||
}
|
||||
else if (name == "host") {
|
||||
} else if (name == "host") {
|
||||
if (!std::regex_match(value, hostRegex))
|
||||
throw BadURL("URL '%s' contains an invalid instance host", url);
|
||||
host_url = value;
|
||||
@ -93,12 +92,15 @@ struct GitArchiveInputScheme : InputScheme
|
||||
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url, *ref, rev->gitRev());
|
||||
|
||||
Input input{settings};
|
||||
input.attrs.insert_or_assign("type", std::string { schemeName() });
|
||||
input.attrs.insert_or_assign("type", std::string{schemeName()});
|
||||
input.attrs.insert_or_assign("owner", path[0]);
|
||||
input.attrs.insert_or_assign("repo", path[1]);
|
||||
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref) input.attrs.insert_or_assign("ref", *ref);
|
||||
if (host_url) input.attrs.insert_or_assign("host", *host_url);
|
||||
if (rev)
|
||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref)
|
||||
input.attrs.insert_or_assign("ref", *ref);
|
||||
if (host_url)
|
||||
input.attrs.insert_or_assign("host", *host_url);
|
||||
|
||||
auto narHash = url.query.find("narHash");
|
||||
if (narHash != url.query.end())
|
||||
@ -121,9 +123,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||
};
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(
|
||||
const fetchers::Settings & settings,
|
||||
const Attrs & attrs) const override
|
||||
std::optional<Input> inputFromAttrs(const fetchers::Settings & settings, const Attrs & attrs) const override
|
||||
{
|
||||
getStrAttr(attrs, "owner");
|
||||
getStrAttr(attrs, "repo");
|
||||
@ -141,10 +141,12 @@ struct GitArchiveInputScheme : InputScheme
|
||||
auto rev = input.getRev();
|
||||
auto path = owner + "/" + repo;
|
||||
assert(!(ref && rev));
|
||||
if (ref) path += "/" + *ref;
|
||||
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
|
||||
auto url = ParsedURL {
|
||||
.scheme = std::string { schemeName() },
|
||||
if (ref)
|
||||
path += "/" + *ref;
|
||||
if (rev)
|
||||
path += "/" + rev->to_string(HashFormat::Base16, false);
|
||||
auto url = ParsedURL{
|
||||
.scheme = std::string{schemeName()},
|
||||
.path = path,
|
||||
};
|
||||
if (auto narHash = input.getNarHash())
|
||||
@ -155,15 +157,15 @@ struct GitArchiveInputScheme : InputScheme
|
||||
return url;
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & _input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const override
|
||||
Input applyOverrides(const Input & _input, std::optional<std::string> ref, std::optional<Hash> rev) const override
|
||||
{
|
||||
auto input(_input);
|
||||
if (rev && ref)
|
||||
throw BadURL("cannot apply both a commit hash (%s) and a branch/tag name ('%s') to input '%s'",
|
||||
rev->gitRev(), *ref, input.to_string());
|
||||
throw BadURL(
|
||||
"cannot apply both a commit hash (%s) and a branch/tag name ('%s') to input '%s'",
|
||||
rev->gitRev(),
|
||||
*ref,
|
||||
input.to_string());
|
||||
if (rev) {
|
||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
input.attrs.erase("ref");
|
||||
@ -176,22 +178,18 @@ struct GitArchiveInputScheme : InputScheme
|
||||
}
|
||||
|
||||
// Search for the longest possible match starting from the begining and ending at either the end or a path segment.
|
||||
std::optional<std::string> getAccessToken(const fetchers::Settings & settings, const std::string & host, const std::string & url) const override
|
||||
std::optional<std::string> getAccessToken(
|
||||
const fetchers::Settings & settings, const std::string & host, const std::string & url) const override
|
||||
{
|
||||
auto tokens = settings.accessTokens.get();
|
||||
std::string answer;
|
||||
size_t answer_match_len = 0;
|
||||
if(! url.empty()) {
|
||||
if (!url.empty()) {
|
||||
for (auto & token : tokens) {
|
||||
auto first = url.find(token.first);
|
||||
if (
|
||||
first != std::string::npos
|
||||
&& token.first.length() > answer_match_len
|
||||
&& first == 0
|
||||
&& url.substr(0,token.first.length()) == token.first
|
||||
&& (url.length() == token.first.length() || url[token.first.length()] == '/')
|
||||
)
|
||||
{
|
||||
if (first != std::string::npos && token.first.length() > answer_match_len && first == 0
|
||||
&& url.substr(0, token.first.length()) == token.first
|
||||
&& (url.length() == token.first.length() || url[token.first.length()] == '/')) {
|
||||
answer = token.second;
|
||||
answer_match_len = token.first.length();
|
||||
}
|
||||
@ -204,21 +202,17 @@ struct GitArchiveInputScheme : InputScheme
|
||||
return {};
|
||||
}
|
||||
|
||||
Headers makeHeadersWithAuthTokens(
|
||||
const fetchers::Settings & settings,
|
||||
const std::string & host,
|
||||
const Input & input) const
|
||||
Headers
|
||||
makeHeadersWithAuthTokens(const fetchers::Settings & settings, const std::string & host, const Input & input) const
|
||||
{
|
||||
auto owner = getStrAttr(input.attrs, "owner");
|
||||
auto repo = getStrAttr(input.attrs, "repo");
|
||||
auto hostAndPath = fmt( "%s/%s/%s", host, owner, repo);
|
||||
auto hostAndPath = fmt("%s/%s/%s", host, owner, repo);
|
||||
return makeHeadersWithAuthTokens(settings, host, hostAndPath);
|
||||
}
|
||||
|
||||
Headers makeHeadersWithAuthTokens(
|
||||
const fetchers::Settings & settings,
|
||||
const std::string & host,
|
||||
const std::string & hostAndPath) const
|
||||
const fetchers::Settings & settings, const std::string & host, const std::string & hostAndPath) const
|
||||
{
|
||||
Headers headers;
|
||||
auto accessToken = getAccessToken(settings, host, hostAndPath);
|
||||
@ -250,7 +244,8 @@ struct GitArchiveInputScheme : InputScheme
|
||||
|
||||
std::pair<Input, TarballInfo> downloadArchive(ref<Store> store, Input input) const
|
||||
{
|
||||
if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
|
||||
if (!maybeGetStrAttr(input.attrs, "ref"))
|
||||
input.attrs.insert_or_assign("ref", "HEAD");
|
||||
|
||||
std::optional<Hash> upstreamTreeHash;
|
||||
|
||||
@ -275,7 +270,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||
auto treeHash = getRevAttr(*treeHashAttrs, "treeHash");
|
||||
auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified");
|
||||
if (getTarballCache()->hasObject(treeHash))
|
||||
return {std::move(input), TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }};
|
||||
return {std::move(input), TarballInfo{.treeHash = treeHash, .lastModified = (time_t) lastModified}};
|
||||
else
|
||||
debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev());
|
||||
}
|
||||
@ -290,10 +285,10 @@ struct GitArchiveInputScheme : InputScheme
|
||||
getFileTransfer()->download(std::move(req), sink);
|
||||
});
|
||||
|
||||
auto act = std::make_unique<Activity>(*logger, lvlInfo, actUnknown,
|
||||
fmt("unpacking '%s' into the Git cache", input.to_string()));
|
||||
auto act = std::make_unique<Activity>(
|
||||
*logger, lvlInfo, actUnknown, fmt("unpacking '%s' into the Git cache", input.to_string()));
|
||||
|
||||
TarArchive archive { *source };
|
||||
TarArchive archive{*source};
|
||||
auto tarballCache = getTarballCache();
|
||||
auto parseSink = tarballCache->getFileSystemObjectSink();
|
||||
auto lastModified = unpackTarfileToSink(archive, *parseSink);
|
||||
@ -301,22 +296,20 @@ struct GitArchiveInputScheme : InputScheme
|
||||
|
||||
act.reset();
|
||||
|
||||
TarballInfo tarballInfo {
|
||||
.treeHash = tarballCache->dereferenceSingletonDirectory(tree),
|
||||
.lastModified = lastModified
|
||||
};
|
||||
TarballInfo tarballInfo{
|
||||
.treeHash = tarballCache->dereferenceSingletonDirectory(tree), .lastModified = lastModified};
|
||||
|
||||
cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}});
|
||||
cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}});
|
||||
|
||||
#if 0
|
||||
#if 0
|
||||
if (upstreamTreeHash != tarballInfo.treeHash)
|
||||
warn(
|
||||
"Git tree hash mismatch for revision '%s' of '%s': "
|
||||
"expected '%s', got '%s'. "
|
||||
"This can happen if the Git repository uses submodules.",
|
||||
rev->gitRev(), input.to_string(), upstreamTreeHash->gitRev(), tarballInfo.treeHash.gitRev());
|
||||
#endif
|
||||
#endif
|
||||
|
||||
return {std::move(input), tarballInfo};
|
||||
}
|
||||
@ -325,15 +318,12 @@ struct GitArchiveInputScheme : InputScheme
|
||||
{
|
||||
auto [input, tarballInfo] = downloadArchive(store, _input);
|
||||
|
||||
#if 0
|
||||
#if 0
|
||||
input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev());
|
||||
#endif
|
||||
#endif
|
||||
input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified));
|
||||
|
||||
auto accessor = getTarballCache()->getAccessor(
|
||||
tarballInfo.treeHash,
|
||||
false,
|
||||
"«" + input.to_string() + "»");
|
||||
auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false, "«" + input.to_string() + "»");
|
||||
|
||||
return {accessor, input};
|
||||
}
|
||||
@ -345,8 +335,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||
locking. FIXME: in the future, we may want to require a Git
|
||||
tree hash instead of a NAR hash. */
|
||||
return input.getRev().has_value()
|
||||
&& (input.settings->trustTarballsFromGitForges ||
|
||||
input.getNarHash().has_value());
|
||||
&& (input.settings->trustTarballsFromGitForges || input.getNarHash().has_value());
|
||||
}
|
||||
|
||||
std::optional<ExperimentalFeature> experimentalFeature() const override
|
||||
@ -365,7 +354,10 @@ struct GitArchiveInputScheme : InputScheme
|
||||
|
||||
struct GitHubInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string_view schemeName() const override { return "github"; }
|
||||
std::string_view schemeName() const override
|
||||
{
|
||||
return "github";
|
||||
}
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
@ -397,22 +389,20 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
auto host = getHost(input);
|
||||
auto url = fmt(
|
||||
host == "github.com"
|
||||
? "https://api.%s/repos/%s/%s/commits/%s"
|
||||
: "https://%s/api/v3/repos/%s/%s/commits/%s",
|
||||
host, getOwner(input), getRepo(input), *input.getRef());
|
||||
host == "github.com" ? "https://api.%s/repos/%s/%s/commits/%s" : "https://%s/api/v3/repos/%s/%s/commits/%s",
|
||||
host,
|
||||
getOwner(input),
|
||||
getRepo(input),
|
||||
*input.getRef());
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
|
||||
|
||||
auto json = nlohmann::json::parse(
|
||||
readFile(
|
||||
store->toRealPath(
|
||||
downloadFile(store, url, "source", headers).storePath)));
|
||||
auto json =
|
||||
nlohmann::json::parse(readFile(store->toRealPath(downloadFile(store, url, "source", headers).storePath)));
|
||||
|
||||
return RefInfo {
|
||||
.rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1),
|
||||
.treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, HashAlgorithm::SHA1)
|
||||
};
|
||||
return RefInfo{
|
||||
.rev = Hash::parseAny(std::string{json["sha"]}, HashAlgorithm::SHA1),
|
||||
.treeHash = Hash::parseAny(std::string{json["commit"]["tree"]["sha"]}, HashAlgorithm::SHA1)};
|
||||
}
|
||||
|
||||
DownloadUrl getDownloadUrl(const Input & input) const override
|
||||
@ -423,24 +413,20 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||
|
||||
// If we have no auth headers then we default to the public archive
|
||||
// urls so we do not run into rate limits.
|
||||
const auto urlFmt =
|
||||
host != "github.com"
|
||||
? "https://%s/api/v3/repos/%s/%s/tarball/%s"
|
||||
: headers.empty()
|
||||
? "https://%s/%s/%s/archive/%s.tar.gz"
|
||||
: "https://api.%s/repos/%s/%s/tarball/%s";
|
||||
const auto urlFmt = host != "github.com" ? "https://%s/api/v3/repos/%s/%s/tarball/%s"
|
||||
: headers.empty() ? "https://%s/%s/%s/archive/%s.tar.gz"
|
||||
: "https://api.%s/repos/%s/%s/tarball/%s";
|
||||
|
||||
const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
const auto url =
|
||||
fmt(urlFmt, host, getOwner(input), getRepo(input), input.getRev()->to_string(HashFormat::Base16, false));
|
||||
|
||||
return DownloadUrl { url, headers };
|
||||
return DownloadUrl{url, headers};
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) const override
|
||||
{
|
||||
auto host = getHost(input);
|
||||
Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s.git",
|
||||
host, getOwner(input), getRepo(input)))
|
||||
Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s.git", host, getOwner(input), getRepo(input)))
|
||||
.applyOverrides(input.getRef(), input.getRev())
|
||||
.clone(destDir);
|
||||
}
|
||||
@ -448,7 +434,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||
|
||||
struct GitLabInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string_view schemeName() const override { return "gitlab"; }
|
||||
std::string_view schemeName() const override
|
||||
{
|
||||
return "gitlab";
|
||||
}
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
@ -462,32 +451,33 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||
auto fldsplit = token.find_first_of(':');
|
||||
// n.b. C++20 would allow: if (token.starts_with("OAuth2:")) ...
|
||||
if ("OAuth2" == token.substr(0, fldsplit))
|
||||
return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit+1)));
|
||||
return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit + 1)));
|
||||
if ("PAT" == token.substr(0, fldsplit))
|
||||
return std::make_pair("Private-token", token.substr(fldsplit+1));
|
||||
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
|
||||
return std::make_pair(token.substr(0,fldsplit), token.substr(fldsplit+1));
|
||||
return std::make_pair("Private-token", token.substr(fldsplit + 1));
|
||||
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
|
||||
return std::make_pair(token.substr(0, fldsplit), token.substr(fldsplit + 1));
|
||||
}
|
||||
|
||||
RefInfo getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
||||
// See rate limiting note below
|
||||
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
||||
auto url =
|
||||
fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
|
||||
host,
|
||||
getStrAttr(input.attrs, "owner"),
|
||||
getStrAttr(input.attrs, "repo"),
|
||||
*input.getRef());
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
|
||||
|
||||
auto json = nlohmann::json::parse(
|
||||
readFile(
|
||||
store->toRealPath(
|
||||
downloadFile(store, url, "source", headers).storePath)));
|
||||
auto json =
|
||||
nlohmann::json::parse(readFile(store->toRealPath(downloadFile(store, url, "source", headers).storePath)));
|
||||
|
||||
if (json.is_array() && json.size() >= 1 && json[0]["id"] != nullptr) {
|
||||
return RefInfo {
|
||||
.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)
|
||||
};
|
||||
} if (json.is_array() && json.size() == 0) {
|
||||
return RefInfo{.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)};
|
||||
}
|
||||
if (json.is_array() && json.size() == 0) {
|
||||
throw Error("No commits returned by GitLab API -- does the git ref really exist?");
|
||||
} else {
|
||||
throw Error("Unexpected response received from GitLab: %s", json);
|
||||
@ -502,20 +492,24 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||
// is 10 reqs/sec/ip-addr. See
|
||||
// https://docs.gitlab.com/ee/user/gitlab_com/index.html#gitlabcom-specific-rate-limits
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
||||
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
auto url =
|
||||
fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
|
||||
host,
|
||||
getStrAttr(input.attrs, "owner"),
|
||||
getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
|
||||
return DownloadUrl { url, headers };
|
||||
return DownloadUrl{url, headers};
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
||||
// FIXME: get username somewhere
|
||||
Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s.git",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||
Input::fromURL(
|
||||
*input.settings,
|
||||
fmt("git+https://%s/%s/%s.git", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||
.applyOverrides(input.getRef(), input.getRev())
|
||||
.clone(destDir);
|
||||
}
|
||||
@ -523,7 +517,10 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||
|
||||
struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string_view schemeName() const override { return "sourcehut"; }
|
||||
std::string_view schemeName() const override
|
||||
{
|
||||
return "sourcehut";
|
||||
}
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
@ -543,15 +540,14 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
auto ref = *input.getRef();
|
||||
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
auto base_url = fmt("https://%s/%s/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"));
|
||||
auto base_url =
|
||||
fmt("https://%s/%s/%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
|
||||
|
||||
std::string refUri;
|
||||
if (ref == "HEAD") {
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/HEAD", base_url), "source", headers).storePath);
|
||||
auto file = store->toRealPath(downloadFile(store, fmt("%s/HEAD", base_url), "source", headers).storePath);
|
||||
std::ifstream is(file);
|
||||
std::string line;
|
||||
getline(is, line);
|
||||
@ -566,13 +562,12 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
}
|
||||
std::regex refRegex(refUri);
|
||||
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/info/refs", base_url), "source", headers).storePath);
|
||||
auto file = store->toRealPath(downloadFile(store, fmt("%s/info/refs", base_url), "source", headers).storePath);
|
||||
std::ifstream is(file);
|
||||
|
||||
std::string line;
|
||||
std::optional<std::string> id;
|
||||
while(!id && getline(is, line)) {
|
||||
while (!id && getline(is, line)) {
|
||||
auto parsedLine = git::parseLsRemoteLine(line);
|
||||
if (parsedLine && parsedLine->reference && std::regex_match(*parsedLine->reference, refRegex))
|
||||
id = parsedLine->target;
|
||||
@ -581,27 +576,29 @@ struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
if (!id)
|
||||
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
||||
|
||||
return RefInfo {
|
||||
.rev = Hash::parseAny(*id, HashAlgorithm::SHA1)
|
||||
};
|
||||
return RefInfo{.rev = Hash::parseAny(*id, HashAlgorithm::SHA1)};
|
||||
}
|
||||
|
||||
DownloadUrl getDownloadUrl(const Input & input) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
auto url =
|
||||
fmt("https://%s/%s/%s/archive/%s.tar.gz",
|
||||
host,
|
||||
getStrAttr(input.attrs, "owner"),
|
||||
getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(HashFormat::Base16, false));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
|
||||
return DownloadUrl { url, headers };
|
||||
return DownloadUrl{url, headers};
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||
Input::fromURL(
|
||||
*input.settings,
|
||||
fmt("git+https://%s/%s/%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||
.applyOverrides(input.getRef(), input.getRev())
|
||||
.clone(destDir);
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ namespace nix::fetchers {
|
||||
*/
|
||||
struct Cache
|
||||
{
|
||||
virtual ~Cache() { }
|
||||
virtual ~Cache() {}
|
||||
|
||||
/**
|
||||
* A domain is a partition of the key/value cache for a particular
|
||||
@ -28,22 +28,18 @@ struct Cache
|
||||
/**
|
||||
* Add a key/value pair to the cache.
|
||||
*/
|
||||
virtual void upsert(
|
||||
const Key & key,
|
||||
const Attrs & value) = 0;
|
||||
virtual void upsert(const Key & key, const Attrs & value) = 0;
|
||||
|
||||
/**
|
||||
* Look up a key with infinite TTL.
|
||||
*/
|
||||
virtual std::optional<Attrs> lookup(
|
||||
const Key & key) = 0;
|
||||
virtual std::optional<Attrs> lookup(const Key & key) = 0;
|
||||
|
||||
/**
|
||||
* Look up a key. Return nothing if its TTL has exceeded
|
||||
* `settings.tarballTTL`.
|
||||
*/
|
||||
virtual std::optional<Attrs> lookupWithTTL(
|
||||
const Key & key) = 0;
|
||||
virtual std::optional<Attrs> lookupWithTTL(const Key & key) = 0;
|
||||
|
||||
struct Result
|
||||
{
|
||||
@ -55,19 +51,14 @@ struct Cache
|
||||
* Look up a key. Return a bool denoting whether its TTL has
|
||||
* exceeded `settings.tarballTTL`.
|
||||
*/
|
||||
virtual std::optional<Result> lookupExpired(
|
||||
const Key & key) = 0;
|
||||
virtual std::optional<Result> lookupExpired(const Key & key) = 0;
|
||||
|
||||
/**
|
||||
* Insert a cache entry that has a store path associated with
|
||||
* it. Such cache entries are always considered stale if the
|
||||
* associated store path is invalid.
|
||||
*/
|
||||
virtual void upsert(
|
||||
Key key,
|
||||
Store & store,
|
||||
Attrs value,
|
||||
const StorePath & storePath) = 0;
|
||||
virtual void upsert(Key key, Store & store, Attrs value, const StorePath & storePath) = 0;
|
||||
|
||||
struct ResultWithStorePath : Result
|
||||
{
|
||||
@ -78,17 +69,13 @@ struct Cache
|
||||
* Look up a store path in the cache. The returned store path will
|
||||
* be valid, but it may be expired.
|
||||
*/
|
||||
virtual std::optional<ResultWithStorePath> lookupStorePath(
|
||||
Key key,
|
||||
Store & store) = 0;
|
||||
virtual std::optional<ResultWithStorePath> lookupStorePath(Key key, Store & store) = 0;
|
||||
|
||||
/**
|
||||
* Look up a store path in the cache. Return nothing if its TTL
|
||||
* has exceeded `settings.tarballTTL`.
|
||||
*/
|
||||
virtual std::optional<ResultWithStorePath> lookupStorePathWithTTL(
|
||||
Key key,
|
||||
Store & store) = 0;
|
||||
virtual std::optional<ResultWithStorePath> lookupStorePathWithTTL(Key key, Store & store) = 0;
|
||||
};
|
||||
|
||||
ref<Cache> getCache();
|
||||
|
@ -15,7 +15,10 @@ struct Settings : public Config
|
||||
{
|
||||
Settings();
|
||||
|
||||
Setting<StringMap> accessTokens{this, {}, "access-tokens",
|
||||
Setting<StringMap> accessTokens{
|
||||
this,
|
||||
{},
|
||||
"access-tokens",
|
||||
R"(
|
||||
Access tokens used to access protected GitHub, GitLab, or
|
||||
other locations requiring token-based authentication.
|
||||
@ -66,11 +69,9 @@ struct Settings : public Config
|
||||
value.
|
||||
)"};
|
||||
|
||||
Setting<bool> allowDirty{this, true, "allow-dirty",
|
||||
"Whether to allow dirty Git/Mercurial trees."};
|
||||
Setting<bool> allowDirty{this, true, "allow-dirty", "Whether to allow dirty Git/Mercurial trees."};
|
||||
|
||||
Setting<bool> warnDirty{this, true, "warn-dirty",
|
||||
"Whether to warn about dirty Git/Mercurial trees."};
|
||||
Setting<bool> warnDirty{this, true, "warn-dirty", "Whether to warn about dirty Git/Mercurial trees."};
|
||||
|
||||
Setting<bool> allowDirtyLocks{
|
||||
this,
|
||||
@ -89,7 +90,9 @@ struct Settings : public Config
|
||||
Xp::Flakes};
|
||||
|
||||
Setting<bool> trustTarballsFromGitForges{
|
||||
this, true, "trust-tarballs-from-git-forges",
|
||||
this,
|
||||
true,
|
||||
"trust-tarballs-from-git-forges",
|
||||
R"(
|
||||
If enabled (the default), Nix will consider tarballs from
|
||||
GitHub and similar Git forges to be locked if a Git revision
|
||||
@ -103,13 +106,18 @@ struct Settings : public Config
|
||||
e.g. `github:NixOS/patchelf/7c2f768bf9601268a4e71c2ebe91e2011918a70f?narHash=sha256-PPXqKY2hJng4DBVE0I4xshv/vGLUskL7jl53roB8UdU%3D`.
|
||||
)"};
|
||||
|
||||
Setting<std::string> flakeRegistry{this, "https://channels.nixos.org/flake-registry.json", "flake-registry",
|
||||
Setting<std::string> flakeRegistry{
|
||||
this,
|
||||
"https://channels.nixos.org/flake-registry.json",
|
||||
"flake-registry",
|
||||
R"(
|
||||
Path or URI of the global flake registry.
|
||||
|
||||
When empty, disables the global flake registry.
|
||||
)",
|
||||
{}, true, Xp::Flakes};
|
||||
{},
|
||||
true,
|
||||
Xp::Flakes};
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -13,7 +13,11 @@
|
||||
|
||||
#include "nix/util/ref.hh"
|
||||
|
||||
namespace nix { class Store; class StorePath; struct SourceAccessor; }
|
||||
namespace nix {
|
||||
class Store;
|
||||
class StorePath;
|
||||
struct SourceAccessor;
|
||||
}
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
@ -36,7 +40,8 @@ struct Input
|
||||
|
||||
Input(const Settings & settings)
|
||||
: settings{&settings}
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
std::shared_ptr<InputScheme> scheme; // note: can be null
|
||||
Attrs attrs;
|
||||
@ -52,22 +57,16 @@ public:
|
||||
*
|
||||
* The URL indicate which sort of fetcher, and provides information to that fetcher.
|
||||
*/
|
||||
static Input fromURL(
|
||||
const Settings & settings,
|
||||
const std::string & url, bool requireTree = true);
|
||||
static Input fromURL(const Settings & settings, const std::string & url, bool requireTree = true);
|
||||
|
||||
static Input fromURL(
|
||||
const Settings & settings,
|
||||
const ParsedURL & url, bool requireTree = true);
|
||||
static Input fromURL(const Settings & settings, const ParsedURL & url, bool requireTree = true);
|
||||
|
||||
/**
|
||||
* Create an `Input` from a an `Attrs`.
|
||||
*
|
||||
* The URL indicate which sort of fetcher, and provides information to that fetcher.
|
||||
*/
|
||||
static Input fromAttrs(
|
||||
const Settings & settings,
|
||||
Attrs && attrs);
|
||||
static Input fromAttrs(const Settings & settings, Attrs && attrs);
|
||||
|
||||
ParsedURL toURL() const;
|
||||
|
||||
@ -108,9 +107,9 @@ public:
|
||||
*/
|
||||
bool isFinal() const;
|
||||
|
||||
bool operator ==(const Input & other) const noexcept;
|
||||
bool operator==(const Input & other) const noexcept;
|
||||
|
||||
bool operator <(const Input & other) const
|
||||
bool operator<(const Input & other) const
|
||||
{
|
||||
return attrs < other.attrs;
|
||||
}
|
||||
@ -149,9 +148,7 @@ private:
|
||||
|
||||
public:
|
||||
|
||||
Input applyOverrides(
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const;
|
||||
Input applyOverrides(std::optional<std::string> ref, std::optional<Hash> rev) const;
|
||||
|
||||
void clone(const Path & destDir) const;
|
||||
|
||||
@ -161,10 +158,7 @@ public:
|
||||
* Write a file to this input, for input types that support
|
||||
* writing. Optionally commit the change (for e.g. Git inputs).
|
||||
*/
|
||||
void putFile(
|
||||
const CanonPath & path,
|
||||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const;
|
||||
void putFile(const CanonPath & path, std::string_view contents, std::optional<std::string> commitMsg) const;
|
||||
|
||||
std::string getName() const;
|
||||
|
||||
@ -200,16 +194,12 @@ public:
|
||||
*/
|
||||
struct InputScheme
|
||||
{
|
||||
virtual ~InputScheme()
|
||||
{ }
|
||||
virtual ~InputScheme() {}
|
||||
|
||||
virtual std::optional<Input> inputFromURL(
|
||||
const Settings & settings,
|
||||
const ParsedURL & url, bool requireTree) const = 0;
|
||||
virtual std::optional<Input>
|
||||
inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const = 0;
|
||||
|
||||
virtual std::optional<Input> inputFromAttrs(
|
||||
const Settings & settings,
|
||||
const Attrs & attrs) const = 0;
|
||||
virtual std::optional<Input> inputFromAttrs(const Settings & settings, const Attrs & attrs) const = 0;
|
||||
|
||||
/**
|
||||
* What is the name of the scheme?
|
||||
@ -231,10 +221,7 @@ struct InputScheme
|
||||
|
||||
virtual ParsedURL toURL(const Input & input) const;
|
||||
|
||||
virtual Input applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const;
|
||||
virtual Input applyOverrides(const Input & input, std::optional<std::string> ref, std::optional<Hash> rev) const;
|
||||
|
||||
virtual void clone(const Input & input, const Path & destDir) const;
|
||||
|
||||
@ -254,19 +241,30 @@ struct InputScheme
|
||||
virtual std::optional<ExperimentalFeature> experimentalFeature() const;
|
||||
|
||||
virtual bool isDirect(const Input & input) const
|
||||
{ return true; }
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
virtual std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const
|
||||
{ return std::nullopt; }
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
virtual bool isLocked(const Input & input) const
|
||||
{ return false; }
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
virtual std::optional<std::string> isRelative(const Input & input) const
|
||||
{ return std::nullopt; }
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
virtual std::optional<std::string> getAccessToken(const fetchers::Settings & settings, const std::string & host, const std::string & url) const
|
||||
{ return {};}
|
||||
virtual std::optional<std::string>
|
||||
getAccessToken(const fetchers::Settings & settings, const std::string & host, const std::string & url) const
|
||||
{
|
||||
return {};
|
||||
}
|
||||
};
|
||||
|
||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||
@ -278,10 +276,10 @@ struct PublicKey
|
||||
std::string type = "ssh-ed25519";
|
||||
std::string key;
|
||||
|
||||
auto operator <=>(const PublicKey &) const = default;
|
||||
auto operator<=>(const PublicKey &) const = default;
|
||||
};
|
||||
|
||||
std::string publicKeys_to_string(const std::vector<PublicKey>&);
|
||||
std::string publicKeys_to_string(const std::vector<PublicKey> &);
|
||||
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,9 @@
|
||||
|
||||
namespace nix {
|
||||
|
||||
namespace fetchers { struct PublicKey; }
|
||||
namespace fetchers {
|
||||
struct PublicKey;
|
||||
}
|
||||
|
||||
/**
|
||||
* A sink that writes into a Git repository. Note that nothing may be written
|
||||
@ -21,8 +23,7 @@ struct GitFileSystemObjectSink : ExtendedFileSystemObjectSink
|
||||
|
||||
struct GitRepo
|
||||
{
|
||||
virtual ~GitRepo()
|
||||
{ }
|
||||
virtual ~GitRepo() {}
|
||||
|
||||
static ref<GitRepo> openRepo(const std::filesystem::path & path, bool create = false, bool bare = false);
|
||||
|
||||
@ -86,30 +87,23 @@ struct GitRepo
|
||||
|
||||
virtual bool hasObject(const Hash & oid) = 0;
|
||||
|
||||
virtual ref<SourceAccessor> getAccessor(
|
||||
const Hash & rev,
|
||||
bool exportIgnore,
|
||||
std::string displayPrefix,
|
||||
bool smudgeLfs = false) = 0;
|
||||
virtual ref<SourceAccessor>
|
||||
getAccessor(const Hash & rev, bool exportIgnore, std::string displayPrefix, bool smudgeLfs = false) = 0;
|
||||
|
||||
virtual ref<SourceAccessor> getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0;
|
||||
virtual ref<SourceAccessor>
|
||||
getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0;
|
||||
|
||||
virtual ref<GitFileSystemObjectSink> getFileSystemObjectSink() = 0;
|
||||
|
||||
virtual void flush() = 0;
|
||||
|
||||
virtual void fetch(
|
||||
const std::string & url,
|
||||
const std::string & refspec,
|
||||
bool shallow) = 0;
|
||||
virtual void fetch(const std::string & url, const std::string & refspec, bool shallow) = 0;
|
||||
|
||||
/**
|
||||
* Verify that commit `rev` is signed by one of the keys in
|
||||
* `publicKeys`. Throw an error if it isn't.
|
||||
*/
|
||||
virtual void verifyCommit(
|
||||
const Hash & rev,
|
||||
const std::vector<fetchers::PublicKey> & publicKeys) = 0;
|
||||
virtual void verifyCommit(const Hash & rev, const std::vector<fetchers::PublicKey> & publicKeys) = 0;
|
||||
|
||||
/**
|
||||
* Given a Git tree hash, compute the hash of its NAR
|
||||
@ -131,8 +125,11 @@ ref<GitRepo> getTarballCache();
|
||||
template<auto del>
|
||||
struct Deleter
|
||||
{
|
||||
template <typename T>
|
||||
void operator()(T * p) const { del(p); };
|
||||
template<typename T>
|
||||
void operator()(T * p) const
|
||||
{
|
||||
del(p);
|
||||
};
|
||||
};
|
||||
|
||||
// A helper to ensure that we don't leak objects returned by libgit2.
|
||||
@ -142,11 +139,21 @@ struct Setter
|
||||
T & t;
|
||||
typename T::pointer p = nullptr;
|
||||
|
||||
Setter(T & t) : t(t) { }
|
||||
Setter(T & t)
|
||||
: t(t)
|
||||
{
|
||||
}
|
||||
|
||||
~Setter() { if (p) t = T(p); }
|
||||
~Setter()
|
||||
{
|
||||
if (p)
|
||||
t = T(p);
|
||||
}
|
||||
|
||||
operator typename T::pointer * () { return &p; }
|
||||
operator typename T::pointer *()
|
||||
{
|
||||
return &p;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -4,7 +4,9 @@
|
||||
#include "nix/util/types.hh"
|
||||
#include "nix/fetchers/fetchers.hh"
|
||||
|
||||
namespace nix { class Store; }
|
||||
namespace nix {
|
||||
class Store;
|
||||
}
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
@ -34,18 +36,14 @@ struct Registry
|
||||
Registry(const Settings & settings, RegistryType type)
|
||||
: settings{settings}
|
||||
, type{type}
|
||||
{ }
|
||||
{
|
||||
}
|
||||
|
||||
static std::shared_ptr<Registry> read(
|
||||
const Settings & settings,
|
||||
const Path & path, RegistryType type);
|
||||
static std::shared_ptr<Registry> read(const Settings & settings, const Path & path, RegistryType type);
|
||||
|
||||
void write(const Path & path);
|
||||
|
||||
void add(
|
||||
const Input & from,
|
||||
const Input & to,
|
||||
const Attrs & extraAttrs);
|
||||
void add(const Input & from, const Input & to, const Attrs & extraAttrs);
|
||||
|
||||
void remove(const Input & input);
|
||||
};
|
||||
@ -60,10 +58,7 @@ Path getUserRegistryPath();
|
||||
|
||||
Registries getRegistries(const Settings & settings, ref<Store> store);
|
||||
|
||||
void overrideRegistry(
|
||||
const Input & from,
|
||||
const Input & to,
|
||||
const Attrs & extraAttrs);
|
||||
void overrideRegistry(const Input & from, const Input & to, const Attrs & extraAttrs);
|
||||
|
||||
using RegistryFilter = std::function<bool(Registry::RegistryType)>;
|
||||
|
||||
@ -71,9 +66,6 @@ using RegistryFilter = std::function<bool(Registry::RegistryType)>;
|
||||
* Rewrite a flakeref using the registries. If `filter` is set, only
|
||||
* use the registries for which the filter function returns true.
|
||||
*/
|
||||
std::pair<Input, Attrs> lookupInRegistries(
|
||||
ref<Store> store,
|
||||
const Input & input,
|
||||
const RegistryFilter & filter = {});
|
||||
std::pair<Input, Attrs> lookupInRegistries(ref<Store> store, const Input & input, const RegistryFilter & filter = {});
|
||||
|
||||
}
|
||||
|
@ -24,11 +24,8 @@ struct DownloadFileResult
|
||||
std::optional<std::string> immutableUrl;
|
||||
};
|
||||
|
||||
DownloadFileResult downloadFile(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
const Headers & headers = {});
|
||||
DownloadFileResult
|
||||
downloadFile(ref<Store> store, const std::string & url, const std::string & name, const Headers & headers = {});
|
||||
|
||||
struct DownloadTarballResult
|
||||
{
|
||||
@ -42,9 +39,6 @@ struct DownloadTarballResult
|
||||
* Download and import a tarball into the Git cache. The result is the
|
||||
* Git tree hash of the root directory.
|
||||
*/
|
||||
ref<SourceAccessor> downloadTarball(
|
||||
ref<Store> store,
|
||||
const Settings & settings,
|
||||
const std::string & url);
|
||||
ref<SourceAccessor> downloadTarball(ref<Store> store, const Settings & settings, const std::string & url);
|
||||
|
||||
}
|
||||
|
@ -8,11 +8,10 @@ std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
||||
|
||||
struct IndirectInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(
|
||||
const Settings & settings,
|
||||
const ParsedURL & url, bool requireTree) const override
|
||||
std::optional<Input> inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override
|
||||
{
|
||||
if (url.scheme != "flake") return {};
|
||||
if (url.scheme != "flake")
|
||||
return {};
|
||||
|
||||
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
||||
|
||||
@ -46,8 +45,10 @@ struct IndirectInputScheme : InputScheme
|
||||
Input input{settings};
|
||||
input.attrs.insert_or_assign("type", "indirect");
|
||||
input.attrs.insert_or_assign("id", id);
|
||||
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref) input.attrs.insert_or_assign("ref", *ref);
|
||||
if (rev)
|
||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref)
|
||||
input.attrs.insert_or_assign("ref", *ref);
|
||||
|
||||
return input;
|
||||
}
|
||||
@ -67,9 +68,7 @@ struct IndirectInputScheme : InputScheme
|
||||
};
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(
|
||||
const Settings & settings,
|
||||
const Attrs & attrs) const override
|
||||
std::optional<Input> inputFromAttrs(const Settings & settings, const Attrs & attrs) const override
|
||||
{
|
||||
auto id = getStrAttr(attrs, "id");
|
||||
if (!std::regex_match(id, flakeRegex))
|
||||
@ -85,19 +84,24 @@ struct IndirectInputScheme : InputScheme
|
||||
ParsedURL url;
|
||||
url.scheme = "flake";
|
||||
url.path = getStrAttr(input.attrs, "id");
|
||||
if (auto ref = input.getRef()) { url.path += '/'; url.path += *ref; };
|
||||
if (auto rev = input.getRev()) { url.path += '/'; url.path += rev->gitRev(); };
|
||||
if (auto ref = input.getRef()) {
|
||||
url.path += '/';
|
||||
url.path += *ref;
|
||||
};
|
||||
if (auto rev = input.getRev()) {
|
||||
url.path += '/';
|
||||
url.path += rev->gitRev();
|
||||
};
|
||||
return url;
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & _input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const override
|
||||
Input applyOverrides(const Input & _input, std::optional<std::string> ref, std::optional<Hash> rev) const override
|
||||
{
|
||||
auto input(_input);
|
||||
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref) input.attrs.insert_or_assign("ref", *ref);
|
||||
if (rev)
|
||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref)
|
||||
input.attrs.insert_or_assign("ref", *ref);
|
||||
return input;
|
||||
}
|
||||
|
||||
@ -112,7 +116,9 @@ struct IndirectInputScheme : InputScheme
|
||||
}
|
||||
|
||||
bool isDirect(const Input & input) const override
|
||||
{ return false; }
|
||||
{
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
|
||||
|
@ -21,12 +21,7 @@ static RunOptions hgOptions(const Strings & args)
|
||||
// Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
|
||||
env["HGPLAIN"] = "";
|
||||
|
||||
return {
|
||||
.program = "hg",
|
||||
.lookupPath = true,
|
||||
.args = args,
|
||||
.environment = env
|
||||
};
|
||||
return {.program = "hg", .lookupPath = true, .args = args, .environment = env};
|
||||
}
|
||||
|
||||
// runProgram wrapper that uses hgOptions instead of stock RunOptions.
|
||||
@ -45,14 +40,10 @@ static std::string runHg(const Strings & args, const std::optional<std::string>
|
||||
|
||||
struct MercurialInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(
|
||||
const Settings & settings,
|
||||
const ParsedURL & url, bool requireTree) const override
|
||||
std::optional<Input> inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override
|
||||
{
|
||||
if (url.scheme != "hg+http" &&
|
||||
url.scheme != "hg+https" &&
|
||||
url.scheme != "hg+ssh" &&
|
||||
url.scheme != "hg+file") return {};
|
||||
if (url.scheme != "hg+http" && url.scheme != "hg+https" && url.scheme != "hg+ssh" && url.scheme != "hg+file")
|
||||
return {};
|
||||
|
||||
auto url2(url);
|
||||
url2.scheme = std::string(url2.scheme, 3);
|
||||
@ -61,7 +52,7 @@ struct MercurialInputScheme : InputScheme
|
||||
Attrs attrs;
|
||||
attrs.emplace("type", "hg");
|
||||
|
||||
for (auto &[name, value] : url.query) {
|
||||
for (auto & [name, value] : url.query) {
|
||||
if (name == "rev" || name == "ref")
|
||||
attrs.emplace(name, value);
|
||||
else
|
||||
@ -90,9 +81,7 @@ struct MercurialInputScheme : InputScheme
|
||||
};
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(
|
||||
const Settings & settings,
|
||||
const Attrs & attrs) const override
|
||||
std::optional<Input> inputFromAttrs(const Settings & settings, const Attrs & attrs) const override
|
||||
{
|
||||
parseURL(getStrAttr(attrs, "url"));
|
||||
|
||||
@ -110,19 +99,20 @@ struct MercurialInputScheme : InputScheme
|
||||
{
|
||||
auto url = parseURL(getStrAttr(input.attrs, "url"));
|
||||
url.scheme = "hg+" + url.scheme;
|
||||
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
|
||||
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
|
||||
if (auto rev = input.getRev())
|
||||
url.query.insert_or_assign("rev", rev->gitRev());
|
||||
if (auto ref = input.getRef())
|
||||
url.query.insert_or_assign("ref", *ref);
|
||||
return url;
|
||||
}
|
||||
|
||||
Input applyOverrides(
|
||||
const Input & input,
|
||||
std::optional<std::string> ref,
|
||||
std::optional<Hash> rev) const override
|
||||
Input applyOverrides(const Input & input, std::optional<std::string> ref, std::optional<Hash> rev) const override
|
||||
{
|
||||
auto res(input);
|
||||
if (rev) res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref) res.attrs.insert_or_assign("ref", *ref);
|
||||
if (rev)
|
||||
res.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
if (ref)
|
||||
res.attrs.insert_or_assign("ref", *ref);
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -142,19 +132,20 @@ struct MercurialInputScheme : InputScheme
|
||||
{
|
||||
auto [isLocal, repoPath] = getActualUrl(input);
|
||||
if (!isLocal)
|
||||
throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string());
|
||||
throw Error(
|
||||
"cannot commit '%s' to Mercurial repository '%s' because it's not a working tree",
|
||||
path,
|
||||
input.to_string());
|
||||
|
||||
auto absPath = CanonPath(repoPath) / path;
|
||||
|
||||
writeFile(absPath.abs(), contents);
|
||||
|
||||
// FIXME: shut up if file is already tracked.
|
||||
runHg(
|
||||
{ "add", absPath.abs() });
|
||||
runHg({"add", absPath.abs()});
|
||||
|
||||
if (commitMsg)
|
||||
runHg(
|
||||
{ "commit", absPath.abs(), "-m", *commitMsg });
|
||||
runHg({"commit", absPath.abs(), "-m", *commitMsg});
|
||||
}
|
||||
|
||||
std::pair<bool, std::string> getActualUrl(const Input & input) const
|
||||
@ -179,7 +170,7 @@ struct MercurialInputScheme : InputScheme
|
||||
|
||||
if (!input.getRef() && !input.getRev() && isLocal && pathExists(actualUrl + "/.hg")) {
|
||||
|
||||
bool clean = runHg({ "status", "-R", actualUrl, "--modified", "--added", "--removed" }) == "";
|
||||
bool clean = runHg({"status", "-R", actualUrl, "--modified", "--added", "--removed"}) == "";
|
||||
|
||||
if (!clean) {
|
||||
|
||||
@ -192,10 +183,11 @@ struct MercurialInputScheme : InputScheme
|
||||
if (input.settings->warnDirty)
|
||||
warn("Mercurial tree '%s' is unclean", actualUrl);
|
||||
|
||||
input.attrs.insert_or_assign("ref", chomp(runHg({ "branch", "-R", actualUrl })));
|
||||
input.attrs.insert_or_assign("ref", chomp(runHg({"branch", "-R", actualUrl})));
|
||||
|
||||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runHg({ "status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
|
||||
runHg({"status", "-R", actualUrl, "--clean", "--modified", "--added", "--no-status", "--print0"}),
|
||||
"\0"s);
|
||||
|
||||
Path actualPath(absPath(actualUrl));
|
||||
|
||||
@ -217,29 +209,28 @@ struct MercurialInputScheme : InputScheme
|
||||
auto storePath = store->addToStore(
|
||||
input.getName(),
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)},
|
||||
ContentAddressMethod::Raw::NixArchive, HashAlgorithm::SHA256, {},
|
||||
ContentAddressMethod::Raw::NixArchive,
|
||||
HashAlgorithm::SHA256,
|
||||
{},
|
||||
filter);
|
||||
|
||||
return storePath;
|
||||
}
|
||||
}
|
||||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||
if (!input.getRef())
|
||||
input.attrs.insert_or_assign("ref", "default");
|
||||
|
||||
auto revInfoKey = [&](const Hash & rev)
|
||||
{
|
||||
auto revInfoKey = [&](const Hash & rev) {
|
||||
if (rev.algo != HashAlgorithm::SHA1)
|
||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", rev.to_string(HashFormat::Base16, true));
|
||||
throw Error(
|
||||
"Hash '%s' is not supported by Mercurial. Only sha1 is supported.",
|
||||
rev.to_string(HashFormat::Base16, true));
|
||||
|
||||
return Cache::Key{"hgRev", {
|
||||
{"store", store->storeDir},
|
||||
{"name", name},
|
||||
{"rev", input.getRev()->gitRev()}
|
||||
}};
|
||||
return Cache::Key{"hgRev", {{"store", store->storeDir}, {"name", name}, {"rev", input.getRev()->gitRev()}}};
|
||||
};
|
||||
|
||||
auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
|
||||
{
|
||||
auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath {
|
||||
assert(input.getRev());
|
||||
assert(!origRev || origRev == input.getRev());
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
@ -247,10 +238,7 @@ struct MercurialInputScheme : InputScheme
|
||||
};
|
||||
|
||||
/* Check the cache for the most recent rev for this URL/ref. */
|
||||
Cache::Key refToRevKey{"hgRefToRev", {
|
||||
{"url", actualUrl},
|
||||
{"ref", *input.getRef()}
|
||||
}};
|
||||
Cache::Key refToRevKey{"hgRefToRev", {{"url", actualUrl}, {"ref", *input.getRef()}}};
|
||||
|
||||
if (!input.getRev()) {
|
||||
if (auto res = getCache()->lookupWithTTL(refToRevKey))
|
||||
@ -263,43 +251,47 @@ struct MercurialInputScheme : InputScheme
|
||||
return makeResult(res->value, res->storePath);
|
||||
}
|
||||
|
||||
Path cacheDir = fmt("%s/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
|
||||
Path cacheDir =
|
||||
fmt("%s/hg/%s",
|
||||
getCacheDir(),
|
||||
hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
|
||||
|
||||
/* If this is a commit hash that we already have, we don't
|
||||
have to pull again. */
|
||||
if (!(input.getRev()
|
||||
&& pathExists(cacheDir)
|
||||
&& runProgram(hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })).second == "1"))
|
||||
{
|
||||
if (!(input.getRev() && pathExists(cacheDir)
|
||||
&& runProgram(hgOptions({"log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1"}))
|
||||
.second
|
||||
== "1")) {
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
|
||||
|
||||
if (pathExists(cacheDir)) {
|
||||
try {
|
||||
runHg({ "pull", "-R", cacheDir, "--", actualUrl });
|
||||
}
|
||||
catch (ExecError & e) {
|
||||
runHg({"pull", "-R", cacheDir, "--", actualUrl});
|
||||
} catch (ExecError & e) {
|
||||
auto transJournal = cacheDir + "/.hg/store/journal";
|
||||
/* hg throws "abandoned transaction" error only if this file exists */
|
||||
if (pathExists(transJournal)) {
|
||||
runHg({ "recover", "-R", cacheDir });
|
||||
runHg({ "pull", "-R", cacheDir, "--", actualUrl });
|
||||
runHg({"recover", "-R", cacheDir});
|
||||
runHg({"pull", "-R", cacheDir, "--", actualUrl});
|
||||
} else {
|
||||
throw ExecError(e.status, "'hg pull' %s", statusToString(e.status));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
createDirs(dirOf(cacheDir));
|
||||
runHg({ "clone", "--noupdate", "--", actualUrl, cacheDir });
|
||||
runHg({"clone", "--noupdate", "--", actualUrl, cacheDir});
|
||||
}
|
||||
}
|
||||
|
||||
/* Fetch the remote rev or ref. */
|
||||
auto tokens = tokenizeString<std::vector<std::string>>(
|
||||
runHg({
|
||||
"log", "-R", cacheDir,
|
||||
"-r", input.getRev() ? input.getRev()->gitRev() : *input.getRef(),
|
||||
"--template", "{node} {rev} {branch}"
|
||||
}));
|
||||
auto tokens = tokenizeString<std::vector<std::string>>(runHg(
|
||||
{"log",
|
||||
"-R",
|
||||
cacheDir,
|
||||
"-r",
|
||||
input.getRev() ? input.getRev()->gitRev() : *input.getRef(),
|
||||
"--template",
|
||||
"{node} {rev} {branch}"}));
|
||||
assert(tokens.size() == 3);
|
||||
|
||||
auto rev = Hash::parseAny(tokens[0], HashAlgorithm::SHA1);
|
||||
@ -315,7 +307,7 @@ struct MercurialInputScheme : InputScheme
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
|
||||
runHg({ "archive", "-R", cacheDir, "-r", rev.gitRev(), tmpDir });
|
||||
runHg({"archive", "-R", cacheDir, "-r", rev.gitRev(), tmpDir});
|
||||
|
||||
deletePath(tmpDir + "/.hg_archival.txt");
|
||||
|
||||
|
@ -9,11 +9,10 @@ namespace nix::fetchers {
|
||||
|
||||
struct PathInputScheme : InputScheme
|
||||
{
|
||||
std::optional<Input> inputFromURL(
|
||||
const Settings & settings,
|
||||
const ParsedURL & url, bool requireTree) const override
|
||||
std::optional<Input> inputFromURL(const Settings & settings, const ParsedURL & url, bool requireTree) const override
|
||||
{
|
||||
if (url.scheme != "path") return {};
|
||||
if (url.scheme != "path")
|
||||
return {};
|
||||
|
||||
if (url.authority && *url.authority != "")
|
||||
throw Error("path URL '%s' should not have an authority ('%s')", url, *url.authority);
|
||||
@ -30,8 +29,7 @@ struct PathInputScheme : InputScheme
|
||||
input.attrs.insert_or_assign(name, *n);
|
||||
else
|
||||
throw Error("path URL '%s' has invalid parameter '%s'", url, name);
|
||||
}
|
||||
else
|
||||
} else
|
||||
throw Error("path URL '%s' has unsupported parameter '%s'", url, name);
|
||||
|
||||
return input;
|
||||
@ -58,9 +56,7 @@ struct PathInputScheme : InputScheme
|
||||
};
|
||||
}
|
||||
|
||||
std::optional<Input> inputFromAttrs(
|
||||
const Settings & settings,
|
||||
const Attrs & attrs) const override
|
||||
std::optional<Input> inputFromAttrs(const Settings & settings, const Attrs & attrs) const override
|
||||
{
|
||||
getStrAttr(attrs, "path");
|
||||
|
||||
@ -75,7 +71,7 @@ struct PathInputScheme : InputScheme
|
||||
query.erase("path");
|
||||
query.erase("type");
|
||||
query.erase("__final");
|
||||
return ParsedURL {
|
||||
return ParsedURL{
|
||||
.scheme = "path",
|
||||
.path = getStrAttr(input.attrs, "path"),
|
||||
.query = query,
|
||||
@ -138,9 +134,8 @@ struct PathInputScheme : InputScheme
|
||||
time_t mtime = 0;
|
||||
if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) {
|
||||
// FIXME: try to substitute storePath.
|
||||
auto src = sinkToSource([&](Sink & sink) {
|
||||
mtime = dumpPathAndGetMtime(absPath.string(), sink, defaultPathFilter);
|
||||
});
|
||||
auto src = sinkToSource(
|
||||
[&](Sink & sink) { mtime = dumpPathAndGetMtime(absPath.string(), sink, defaultPathFilter); });
|
||||
storePath = store->addToStoreFromDump(*src, "source");
|
||||
}
|
||||
|
||||
|
@ -10,9 +10,7 @@
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
std::shared_ptr<Registry> Registry::read(
|
||||
const Settings & settings,
|
||||
const Path & path, RegistryType type)
|
||||
std::shared_ptr<Registry> Registry::read(const Settings & settings, const Path & path, RegistryType type)
|
||||
{
|
||||
auto registry = std::make_shared<Registry>(settings, type);
|
||||
|
||||
@ -36,12 +34,11 @@ std::shared_ptr<Registry> Registry::read(
|
||||
}
|
||||
auto exact = i.find("exact");
|
||||
registry->entries.push_back(
|
||||
Entry {
|
||||
Entry{
|
||||
.from = Input::fromAttrs(settings, jsonToAttrs(i["from"])),
|
||||
.to = Input::fromAttrs(settings, std::move(toAttrs)),
|
||||
.extraAttrs = extraAttrs,
|
||||
.exact = exact != i.end() && exact.value()
|
||||
});
|
||||
.exact = exact != i.end() && exact.value()});
|
||||
}
|
||||
}
|
||||
|
||||
@ -79,17 +76,9 @@ void Registry::write(const Path & path)
|
||||
writeFile(path, json.dump(2));
|
||||
}
|
||||
|
||||
void Registry::add(
|
||||
const Input & from,
|
||||
const Input & to,
|
||||
const Attrs & extraAttrs)
|
||||
void Registry::add(const Input & from, const Input & to, const Attrs & extraAttrs)
|
||||
{
|
||||
entries.emplace_back(
|
||||
Entry {
|
||||
.from = from,
|
||||
.to = to,
|
||||
.extraAttrs = extraAttrs
|
||||
});
|
||||
entries.emplace_back(Entry{.from = from, .to = to, .extraAttrs = extraAttrs});
|
||||
}
|
||||
|
||||
void Registry::remove(const Input & input)
|
||||
@ -106,8 +95,7 @@ static Path getSystemRegistryPath()
|
||||
|
||||
static std::shared_ptr<Registry> getSystemRegistry(const Settings & settings)
|
||||
{
|
||||
static auto systemRegistry =
|
||||
Registry::read(settings, getSystemRegistryPath(), Registry::System);
|
||||
static auto systemRegistry = Registry::read(settings, getSystemRegistryPath(), Registry::System);
|
||||
return systemRegistry;
|
||||
}
|
||||
|
||||
@ -118,29 +106,23 @@ Path getUserRegistryPath()
|
||||
|
||||
std::shared_ptr<Registry> getUserRegistry(const Settings & settings)
|
||||
{
|
||||
static auto userRegistry =
|
||||
Registry::read(settings, getUserRegistryPath(), Registry::User);
|
||||
static auto userRegistry = Registry::read(settings, getUserRegistryPath(), Registry::User);
|
||||
return userRegistry;
|
||||
}
|
||||
|
||||
std::shared_ptr<Registry> getCustomRegistry(const Settings & settings, const Path & p)
|
||||
{
|
||||
static auto customRegistry =
|
||||
Registry::read(settings, p, Registry::Custom);
|
||||
static auto customRegistry = Registry::read(settings, p, Registry::Custom);
|
||||
return customRegistry;
|
||||
}
|
||||
|
||||
std::shared_ptr<Registry> getFlagRegistry(const Settings & settings)
|
||||
{
|
||||
static auto flagRegistry =
|
||||
std::make_shared<Registry>(settings, Registry::Flag);
|
||||
static auto flagRegistry = std::make_shared<Registry>(settings, Registry::Flag);
|
||||
return flagRegistry;
|
||||
}
|
||||
|
||||
void overrideRegistry(
|
||||
const Input & from,
|
||||
const Input & to,
|
||||
const Attrs & extraAttrs)
|
||||
void overrideRegistry(const Input & from, const Input & to, const Attrs & extraAttrs)
|
||||
{
|
||||
getFlagRegistry(*from.settings)->add(from, to, extraAttrs);
|
||||
}
|
||||
@ -176,22 +158,21 @@ Registries getRegistries(const Settings & settings, ref<Store> store)
|
||||
return registries;
|
||||
}
|
||||
|
||||
std::pair<Input, Attrs> lookupInRegistries(
|
||||
ref<Store> store,
|
||||
const Input & _input,
|
||||
const RegistryFilter & filter)
|
||||
std::pair<Input, Attrs> lookupInRegistries(ref<Store> store, const Input & _input, const RegistryFilter & filter)
|
||||
{
|
||||
Attrs extraAttrs;
|
||||
int n = 0;
|
||||
Input input(_input);
|
||||
|
||||
restart:
|
||||
restart:
|
||||
|
||||
n++;
|
||||
if (n > 100) throw Error("cycle detected in flake registry for '%s'", input.to_string());
|
||||
if (n > 100)
|
||||
throw Error("cycle detected in flake registry for '%s'", input.to_string());
|
||||
|
||||
for (auto & registry : getRegistries(*input.settings, store)) {
|
||||
if (filter && !filter(registry->type)) continue;
|
||||
if (filter && !filter(registry->type))
|
||||
continue;
|
||||
// FIXME: O(n)
|
||||
for (auto & entry : registry->entries) {
|
||||
if (entry.exact) {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user