mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-21 22:43:01 +00:00
lib.fetchers: add hash-normalization helpers (#342072)
This commit is contained in:
commit
45b95421fd
174
lib/fetchers.nix
174
lib/fetchers.nix
@ -1,6 +1,17 @@
|
||||
# snippets that can be shared by multiple fetchers (pkgs/build-support)
|
||||
{ lib }:
|
||||
{
|
||||
let
|
||||
commonH = hashTypes: rec {
|
||||
hashNames = [ "hash" ] ++ hashTypes;
|
||||
hashSet = lib.genAttrs hashNames (lib.const {});
|
||||
};
|
||||
|
||||
fakeH = {
|
||||
hash = lib.fakeHash;
|
||||
sha256 = lib.fakeSha256;
|
||||
sha512 = lib.fakeSha512;
|
||||
};
|
||||
in rec {
|
||||
|
||||
proxyImpureEnvVars = [
|
||||
# We borrow these environment variables from the caller to allow
|
||||
@ -14,4 +25,165 @@
|
||||
"NIX_SSL_CERT_FILE"
|
||||
];
|
||||
|
||||
/**
|
||||
Converts an attrset containing one of `hash`, `sha256` or `sha512`,
|
||||
into one containing `outputHash{,Algo}` as accepted by `mkDerivation`.
|
||||
|
||||
An appropriate “fake hash” is substituted when the hash value is `""`,
|
||||
as is the [convention for fetchers](#sec-pkgs-fetchers-updating-source-hashes-fakehash-method).
|
||||
|
||||
All other attributes in the set remain as-is.
|
||||
|
||||
# Example
|
||||
|
||||
```nix
|
||||
normalizeHash { } { hash = ""; foo = "bar"; }
|
||||
=>
|
||||
{
|
||||
outputHash = lib.fakeHash;
|
||||
outputHashAlgo = null;
|
||||
foo = "bar";
|
||||
}
|
||||
```
|
||||
|
||||
```nix
|
||||
normalizeHash { } { sha256 = lib.fakeSha256; }
|
||||
=>
|
||||
{
|
||||
outputHash = lib.fakeSha256;
|
||||
outputHashAlgo = "sha256";
|
||||
}
|
||||
```
|
||||
|
||||
```nix
|
||||
normalizeHash { } { sha512 = lib.fakeSha512; }
|
||||
=>
|
||||
{
|
||||
outputHash = lib.fakeSha512;
|
||||
outputHashAlgo = "sha512";
|
||||
}
|
||||
```
|
||||
|
||||
# Type
|
||||
```
|
||||
normalizeHash :: { hashTypes :: List String, required :: Bool } -> AttrSet -> AttrSet
|
||||
```
|
||||
|
||||
# Arguments
|
||||
|
||||
hashTypes
|
||||
: the set of attribute names accepted as hash inputs, in addition to `hash`
|
||||
|
||||
required
|
||||
: whether to throw if no hash was present in the input; otherwise returns the original input, unmodified
|
||||
*/
|
||||
normalizeHash = {
|
||||
hashTypes ? [ "sha256" ],
|
||||
required ? true,
|
||||
}:
|
||||
let
|
||||
inherit (lib) concatMapStringsSep head tail throwIf;
|
||||
inherit (lib.attrsets) attrsToList intersectAttrs removeAttrs optionalAttrs;
|
||||
|
||||
inherit (commonH hashTypes) hashNames hashSet;
|
||||
in
|
||||
args:
|
||||
if args ? "outputHash" then
|
||||
args
|
||||
else
|
||||
let
|
||||
# The argument hash, as a {name, value} pair
|
||||
h =
|
||||
# All hashes passed in arguments (possibly 0 or >1) as a list of {name, value} pairs
|
||||
let hashesAsNVPairs = attrsToList (intersectAttrs hashSet args); in
|
||||
if hashesAsNVPairs == [] then
|
||||
throwIf required "fetcher called without `hash`" null
|
||||
else if tail hashesAsNVPairs != [] then
|
||||
throw "fetcher called with mutually-incompatible arguments: ${concatMapStringsSep ", " (a: a.name) hashesAsNVPairs}"
|
||||
else
|
||||
head hashesAsNVPairs
|
||||
;
|
||||
in
|
||||
removeAttrs args hashNames // (optionalAttrs (h != null) {
|
||||
outputHashAlgo = if h.name == "hash" then null else h.name;
|
||||
outputHash =
|
||||
if h.value == "" then
|
||||
fakeH.${h.name} or (throw "no “fake hash” defined for ${h.name}")
|
||||
else
|
||||
h.value;
|
||||
})
|
||||
;
|
||||
|
||||
/**
|
||||
Wraps a function which accepts `outputHash{,Algo}` into one which accepts `hash` or `sha{256,512}`
|
||||
|
||||
# Example
|
||||
```nix
|
||||
withNormalizedHash { hashTypes = [ "sha256" "sha512" ]; } (
|
||||
{ outputHash, outputHashAlgo, ... }:
|
||||
...
|
||||
)
|
||||
```
|
||||
is a function which accepts one of `hash`, `sha256`, or `sha512` (or the original's `outputHash` and `outputHashAlgo`).
|
||||
|
||||
Its `functionArgs` metadata only lists `hash` as a parameter, optional iff. `outputHash` was an optional parameter of
|
||||
the original function. `sha256`, `sha512`, `outputHash`, or `outputHashAlgo` are not mentioned in the `functionArgs`
|
||||
metadata.
|
||||
|
||||
# Type
|
||||
```
|
||||
withNormalizedHash :: { hashTypes :: List String } -> (AttrSet -> T) -> (AttrSet -> T)
|
||||
```
|
||||
|
||||
# Arguments
|
||||
|
||||
hashTypes
|
||||
: the set of attribute names accepted as hash inputs, in addition to `hash`
|
||||
: they must correspond to a valid value for `outputHashAlgo`, currently one of: `md5`, `sha1`, `sha256`, or `sha512`.
|
||||
|
||||
f
|
||||
: the function to be wrapped
|
||||
|
||||
::: {.note}
|
||||
In nixpkgs, `mkDerivation` rejects MD5 `outputHash`es, and SHA-1 is being deprecated.
|
||||
|
||||
As such, there is no reason to add `md5` to `hashTypes`, and
|
||||
`sha1` should only ever be included for backwards compatibility.
|
||||
:::
|
||||
|
||||
# Output
|
||||
|
||||
`withNormalizedHash { inherit hashTypes; } f` is functionally equivalent to
|
||||
```nix
|
||||
args: f (normalizeHash {
|
||||
inherit hashTypes;
|
||||
required = !(lib.functionArgs f).outputHash;
|
||||
} args)
|
||||
```
|
||||
|
||||
However, `withNormalizedHash` preserves `functionArgs` metadata insofar as possible,
|
||||
and is implemented somewhat more efficiently.
|
||||
*/
|
||||
withNormalizedHash = {
|
||||
hashTypes ? [ "sha256" ]
|
||||
}: fetcher:
|
||||
let
|
||||
inherit (lib.attrsets) genAttrs intersectAttrs removeAttrs;
|
||||
inherit (lib.trivial) const functionArgs setFunctionArgs;
|
||||
|
||||
inherit (commonH hashTypes) hashSet;
|
||||
fArgs = functionArgs fetcher;
|
||||
|
||||
normalize = normalizeHash {
|
||||
inherit hashTypes;
|
||||
required = !fArgs.outputHash;
|
||||
};
|
||||
in
|
||||
# The o.g. fetcher must *only* accept outputHash and outputHashAlgo
|
||||
assert fArgs ? outputHash && fArgs ? outputHashAlgo;
|
||||
assert intersectAttrs fArgs hashSet == {};
|
||||
|
||||
setFunctionArgs
|
||||
(args: fetcher (normalize args))
|
||||
(removeAttrs fArgs [ "outputHash" "outputHashAlgo" ] // { hash = fArgs.outputHash; });
|
||||
}
|
||||
|
165
lib/tests/fetchers.nix
Normal file
165
lib/tests/fetchers.nix
Normal file
@ -0,0 +1,165 @@
|
||||
let
|
||||
lib = import ./..;
|
||||
|
||||
inherit (lib)
|
||||
fakeHash
|
||||
fakeSha256
|
||||
fakeSha512
|
||||
flip
|
||||
functionArgs
|
||||
runTests
|
||||
;
|
||||
inherit (lib.fetchers) normalizeHash withNormalizedHash;
|
||||
|
||||
testingThrow = expr: {
|
||||
expr = with builtins; tryEval (seq expr "didn't throw");
|
||||
expected = {
|
||||
success = false;
|
||||
value = false;
|
||||
};
|
||||
};
|
||||
|
||||
# hashes of empty
|
||||
sri256 = "sha256-d6xi4mKdjkX2JFicDIv5niSzpyI0m/Hnm8GGAIU04kY=";
|
||||
sri512 = "sha512-AXFyVo7jiZ5we10fxZ5E9qfPjSfqkizY2apCzORKFVYZaNhCIVbooY+J4cYST00ztLf0EjivIBPPdtIYFUMfzQ==";
|
||||
|
||||
unionOfDisjoints = lib.foldl lib.attrsets.unionOfDisjoint { };
|
||||
|
||||
genTests = n: f: {
|
||||
"test${n}AlreadyNormalized" = {
|
||||
expr = f { } {
|
||||
outputHash = "";
|
||||
outputHashAlgo = "md42";
|
||||
};
|
||||
expected = {
|
||||
outputHash = "";
|
||||
outputHashAlgo = "md42";
|
||||
};
|
||||
};
|
||||
|
||||
"test${n}EmptySha256" = {
|
||||
expr = f { } { sha256 = ""; };
|
||||
expected = {
|
||||
outputHash = fakeSha256;
|
||||
outputHashAlgo = "sha256";
|
||||
};
|
||||
};
|
||||
|
||||
"test${n}EmptySha512" = {
|
||||
expr = f { hashTypes = [ "sha512" ]; } { sha512 = ""; };
|
||||
expected = {
|
||||
outputHash = fakeSha512;
|
||||
outputHashAlgo = "sha512";
|
||||
};
|
||||
};
|
||||
|
||||
"test${n}EmptyHash" = {
|
||||
expr = f { } { hash = ""; };
|
||||
expected = {
|
||||
outputHash = fakeHash;
|
||||
outputHashAlgo = null;
|
||||
};
|
||||
};
|
||||
|
||||
"test${n}Sri256" = {
|
||||
expr = f { } { hash = sri256; };
|
||||
expected = {
|
||||
outputHash = sri256;
|
||||
outputHashAlgo = null;
|
||||
};
|
||||
};
|
||||
|
||||
"test${n}Sri512" = {
|
||||
expr = f { } { hash = sri512; };
|
||||
expected = {
|
||||
outputHash = sri512;
|
||||
outputHashAlgo = null;
|
||||
};
|
||||
};
|
||||
|
||||
"test${n}PreservesAttrs" = {
|
||||
expr = f { } {
|
||||
hash = "aaaa";
|
||||
destination = "Earth";
|
||||
};
|
||||
expected = {
|
||||
outputHash = "aaaa";
|
||||
outputHashAlgo = null;
|
||||
destination = "Earth";
|
||||
};
|
||||
};
|
||||
|
||||
"test${n}RejectsSha1ByDefault" = testingThrow (f { } { sha1 = ""; });
|
||||
"test${n}RejectsSha512ByDefault" = testingThrow (f { } { sha512 = ""; });
|
||||
|
||||
"test${n}ThrowsOnMissing" = testingThrow (f { } { gibi = false; });
|
||||
};
|
||||
in
|
||||
runTests (unionOfDisjoints [
|
||||
(genTests "NormalizeHash" normalizeHash)
|
||||
(genTests "WithNormalized" (
|
||||
flip withNormalizedHash ({ outputHash, outputHashAlgo, ... }@args: args)
|
||||
))
|
||||
{
|
||||
testNormalizeNotRequiredEquivalent = {
|
||||
expr = normalizeHash { required = false; } {
|
||||
hash = "";
|
||||
prof = "shadoko";
|
||||
};
|
||||
expected = normalizeHash { } {
|
||||
hash = "";
|
||||
prof = "shadoko";
|
||||
};
|
||||
};
|
||||
|
||||
testNormalizeNotRequiredPassthru = {
|
||||
expr = normalizeHash { required = false; } { "ga bu" = "zo meu"; };
|
||||
expected."ga bu" = "zo meu";
|
||||
};
|
||||
|
||||
testOptionalArg = {
|
||||
expr = withNormalizedHash { } (
|
||||
{
|
||||
outputHash ? "",
|
||||
outputHashAlgo ? null,
|
||||
...
|
||||
}@args:
|
||||
args
|
||||
) { author = "Jacques Rouxel"; };
|
||||
expected.author = "Jacques Rouxel";
|
||||
};
|
||||
|
||||
testOptionalArgMetadata = {
|
||||
expr = functionArgs (
|
||||
withNormalizedHash { } (
|
||||
{
|
||||
outputHash ? "",
|
||||
outputHashAlgo ? null,
|
||||
}:
|
||||
{ }
|
||||
)
|
||||
);
|
||||
expected.hash = true;
|
||||
};
|
||||
|
||||
testPreservesArgsMetadata = {
|
||||
expr = functionArgs (
|
||||
withNormalizedHash { } (
|
||||
{
|
||||
outputHash,
|
||||
outputHashAlgo,
|
||||
pumping ? true,
|
||||
}:
|
||||
{ }
|
||||
)
|
||||
);
|
||||
expected = {
|
||||
hash = false;
|
||||
pumping = true;
|
||||
};
|
||||
};
|
||||
|
||||
testRejectsMissingHashArg = testingThrow (withNormalizedHash { } ({ outputHashAlgo }: { }));
|
||||
testRejectsMissingAlgoArg = testingThrow (withNormalizedHash { } ({ outputHash }: { }));
|
||||
}
|
||||
])
|
@ -17,6 +17,7 @@
|
||||
pkgs.runCommand "nixpkgs-lib-tests-nix-${nix.version}" {
|
||||
buildInputs = [
|
||||
(import ./check-eval.nix)
|
||||
(import ./fetchers.nix)
|
||||
(import ./maintainers.nix {
|
||||
inherit pkgs;
|
||||
lib = import ../.;
|
||||
|
@ -64,7 +64,7 @@ let
|
||||
;
|
||||
};
|
||||
fBuildAttrs = fArgs // buildAttrs;
|
||||
fFetchAttrs = fArgs // removeAttrs fetchAttrs [ "sha256" ];
|
||||
fFetchAttrs = fArgs // removeAttrs fetchAttrs [ "hash" "sha256" ];
|
||||
bazelCmd = { cmd, additionalFlags, targets, targetRunFlags ? [ ] }:
|
||||
lib.optionalString (targets != [ ]) ''
|
||||
# See footnote called [USER and BAZEL_USE_CPP_ONLY_TOOLCHAIN variables]
|
||||
@ -197,8 +197,10 @@ stdenv.mkDerivation (fBuildAttrs // {
|
||||
dontFixup = true;
|
||||
allowedRequisites = [];
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHash = fetchAttrs.sha256;
|
||||
inherit (lib.fetchers.normalizeHash { hashTypes = [ "sha256" ]; } fetchAttrs)
|
||||
outputHash
|
||||
outputHashAlgo
|
||||
;
|
||||
});
|
||||
|
||||
nativeBuildInputs = fBuildAttrs.nativeBuildInputs or [] ++ [ (bazel.override { enableNixHacks = true; }) ];
|
||||
|
@ -10,8 +10,9 @@
|
||||
appendShort = lib.optionalString ((builtins.match "[a-f0-9]*" rev) != null) "-${short}";
|
||||
in "${if matched == null then base else builtins.head matched}${appendShort}";
|
||||
in
|
||||
lib.makeOverridable (
|
||||
{ url, rev ? "HEAD", sha256 ? "", hash ? "", leaveDotGit ? deepClone
|
||||
lib.makeOverridable (lib.fetchers.withNormalizedHash { } (
|
||||
{ url, rev ? "HEAD", leaveDotGit ? deepClone
|
||||
, outputHash ? lib.fakeHash, outputHashAlgo ? null
|
||||
, fetchSubmodules ? true, deepClone ? false
|
||||
, branchName ? null
|
||||
, sparseCheckout ? []
|
||||
@ -56,9 +57,7 @@ lib.makeOverridable (
|
||||
assert deepClone -> leaveDotGit;
|
||||
assert nonConeMode -> (sparseCheckout != []);
|
||||
|
||||
if hash != "" && sha256 != "" then
|
||||
throw "Only one of sha256 or hash can be set"
|
||||
else if builtins.isString sparseCheckout then
|
||||
if builtins.isString sparseCheckout then
|
||||
# Changed to throw on 2023-06-04
|
||||
throw "Please provide directories/patterns for sparse checkout as a list of strings. Passing a (multi-line) string is not supported any more."
|
||||
else
|
||||
@ -70,14 +69,8 @@ stdenvNoCC.mkDerivation {
|
||||
nativeBuildInputs = [ git cacert ]
|
||||
++ lib.optionals fetchLFS [ git-lfs ];
|
||||
|
||||
outputHashAlgo = if hash != "" then null else "sha256";
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
outputHash = if hash != "" then
|
||||
hash
|
||||
else if sha256 != "" then
|
||||
sha256
|
||||
else
|
||||
lib.fakeSha256;
|
||||
|
||||
# git-sparse-checkout(1) says:
|
||||
# > When the --stdin option is provided, the directories or patterns are read
|
||||
@ -105,4 +98,4 @@ stdenvNoCC.mkDerivation {
|
||||
gitRepoUrl = url;
|
||||
};
|
||||
}
|
||||
)
|
||||
))
|
||||
|
@ -1,83 +1,83 @@
|
||||
{ lib, stdenvNoCC, gitRepo, cacert, copyPathsToStore }:
|
||||
lib.fetchers.withNormalizedHash { } (
|
||||
{ name, manifest, rev ? "HEAD", outputHash, outputHashAlgo
|
||||
# Optional parameters:
|
||||
, repoRepoURL ? "", repoRepoRev ? "", referenceDir ? "", manifestName ? ""
|
||||
, localManifests ? [], createMirror ? false, useArchive ? false
|
||||
}:
|
||||
|
||||
{ name, manifest, rev ? "HEAD", sha256
|
||||
# Optional parameters:
|
||||
, repoRepoURL ? "", repoRepoRev ? "", referenceDir ? "", manifestName ? ""
|
||||
, localManifests ? [], createMirror ? false, useArchive ? false
|
||||
}:
|
||||
assert repoRepoRev != "" -> repoRepoURL != "";
|
||||
assert createMirror -> !useArchive;
|
||||
|
||||
assert repoRepoRev != "" -> repoRepoURL != "";
|
||||
assert createMirror -> !useArchive;
|
||||
let
|
||||
inherit (lib)
|
||||
concatMapStringsSep
|
||||
concatStringsSep
|
||||
fetchers
|
||||
optionalString
|
||||
;
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
concatMapStringsSep
|
||||
concatStringsSep
|
||||
fetchers
|
||||
optionalString
|
||||
;
|
||||
extraRepoInitFlags = [
|
||||
(optionalString (repoRepoURL != "") "--repo-url=${repoRepoURL}")
|
||||
(optionalString (repoRepoRev != "") "--repo-branch=${repoRepoRev}")
|
||||
(optionalString (referenceDir != "") "--reference=${referenceDir}")
|
||||
(optionalString (manifestName != "") "--manifest-name=${manifestName}")
|
||||
];
|
||||
|
||||
extraRepoInitFlags = [
|
||||
(optionalString (repoRepoURL != "") "--repo-url=${repoRepoURL}")
|
||||
(optionalString (repoRepoRev != "") "--repo-branch=${repoRepoRev}")
|
||||
(optionalString (referenceDir != "") "--reference=${referenceDir}")
|
||||
(optionalString (manifestName != "") "--manifest-name=${manifestName}")
|
||||
];
|
||||
repoInitFlags = [
|
||||
"--manifest-url=${manifest}"
|
||||
"--manifest-branch=${rev}"
|
||||
"--depth=1"
|
||||
(optionalString createMirror "--mirror")
|
||||
(optionalString useArchive "--archive")
|
||||
] ++ extraRepoInitFlags;
|
||||
|
||||
repoInitFlags = [
|
||||
"--manifest-url=${manifest}"
|
||||
"--manifest-branch=${rev}"
|
||||
"--depth=1"
|
||||
(optionalString createMirror "--mirror")
|
||||
(optionalString useArchive "--archive")
|
||||
] ++ extraRepoInitFlags;
|
||||
local_manifests = copyPathsToStore localManifests;
|
||||
|
||||
local_manifests = copyPathsToStore localManifests;
|
||||
in stdenvNoCC.mkDerivation {
|
||||
inherit name;
|
||||
|
||||
in stdenvNoCC.mkDerivation {
|
||||
inherit name;
|
||||
inherit cacert manifest rev repoRepoURL repoRepoRev referenceDir; # TODO
|
||||
|
||||
inherit cacert manifest rev repoRepoURL repoRepoRev referenceDir; # TODO
|
||||
inherit outputHash outputHashAlgo;
|
||||
outputHashMode = "recursive";
|
||||
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
outputHash = sha256;
|
||||
preferLocalBuild = true;
|
||||
enableParallelBuilding = true;
|
||||
|
||||
preferLocalBuild = true;
|
||||
enableParallelBuilding = true;
|
||||
impureEnvVars = fetchers.proxyImpureEnvVars ++ [
|
||||
"GIT_PROXY_COMMAND" "SOCKS_SERVER"
|
||||
];
|
||||
|
||||
impureEnvVars = fetchers.proxyImpureEnvVars ++ [
|
||||
"GIT_PROXY_COMMAND" "SOCKS_SERVER"
|
||||
];
|
||||
nativeBuildInputs = [ gitRepo cacert ];
|
||||
|
||||
nativeBuildInputs = [ gitRepo cacert ];
|
||||
GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
|
||||
|
||||
GIT_SSL_CAINFO = "${cacert}/etc/ssl/certs/ca-bundle.crt";
|
||||
buildCommand = ''
|
||||
# Path must be absolute (e.g. for GnuPG: ~/.repoconfig/gnupg/pubring.kbx)
|
||||
export HOME="$(pwd)"
|
||||
|
||||
buildCommand = ''
|
||||
# Path must be absolute (e.g. for GnuPG: ~/.repoconfig/gnupg/pubring.kbx)
|
||||
export HOME="$(pwd)"
|
||||
mkdir $out
|
||||
cd $out
|
||||
|
||||
mkdir $out
|
||||
cd $out
|
||||
mkdir .repo
|
||||
${optionalString (local_manifests != []) ''
|
||||
mkdir .repo/local_manifests
|
||||
for local_manifest in ${concatMapStringsSep " " toString local_manifests}; do
|
||||
cp $local_manifest .repo/local_manifests/$(stripHash $local_manifest)
|
||||
done
|
||||
''}
|
||||
|
||||
mkdir .repo
|
||||
${optionalString (local_manifests != []) ''
|
||||
mkdir .repo/local_manifests
|
||||
for local_manifest in ${concatMapStringsSep " " toString local_manifests}; do
|
||||
cp $local_manifest .repo/local_manifests/$(stripHash $local_manifest)
|
||||
done
|
||||
''}
|
||||
repo init ${concatStringsSep " " repoInitFlags}
|
||||
repo sync --jobs=$NIX_BUILD_CORES --current-branch
|
||||
|
||||
repo init ${concatStringsSep " " repoInitFlags}
|
||||
repo sync --jobs=$NIX_BUILD_CORES --current-branch
|
||||
|
||||
# TODO: The git-index files (and probably the files in .repo as well) have
|
||||
# different contents each time and will therefore change the final hash
|
||||
# (i.e. creating a mirror probably won't work).
|
||||
${optionalString (!createMirror) ''
|
||||
rm -rf .repo
|
||||
find -type d -name '.git' -prune -exec rm -rf {} +
|
||||
''}
|
||||
'';
|
||||
}
|
||||
# TODO: The git-index files (and probably the files in .repo as well) have
|
||||
# different contents each time and will therefore change the final hash
|
||||
# (i.e. creating a mirror probably won't work).
|
||||
${optionalString (!createMirror) ''
|
||||
rm -rf .repo
|
||||
find -type d -name '.git' -prune -exec rm -rf {} +
|
||||
''}
|
||||
'';
|
||||
}
|
||||
)
|
||||
|
@ -33,7 +33,7 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
name = "amdvlk-src";
|
||||
manifest = "https://github.com/GPUOpen-Drivers/AMDVLK.git";
|
||||
rev = "refs/tags/v-${finalAttrs.version}";
|
||||
sha256 = "1Svdr93ShjhaWJUTLn5y1kBM4hHey1dUVDiHqFIKgrU=";
|
||||
hash = "sha256-1Svdr93ShjhaWJUTLn5y1kBM4hHey1dUVDiHqFIKgrU=";
|
||||
};
|
||||
|
||||
buildInputs =
|
||||
|
@ -26,7 +26,7 @@ buildBazelPackage rec {
|
||||
];
|
||||
|
||||
fetchAttrs = {
|
||||
sha256 = "sha256-Qm6Ng9cXvKx043P7qyNHyyMvdGK9aNarX1ZKeCp3mgY=";
|
||||
hash = "sha256-Qm6Ng9cXvKx043P7qyNHyyMvdGK9aNarX1ZKeCp3mgY=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ jdk ];
|
||||
|
@ -19,7 +19,7 @@ buildBazelPackage rec {
|
||||
|
||||
LIBTOOL = lib.optionalString stdenv.isDarwin "${cctools}/bin/libtool";
|
||||
|
||||
fetchAttrs.sha256 = "sha256-WOBlZ0XNrl5UxIaSDxZeOfzS2a8ZkrKdTLKHBDC9UNQ=";
|
||||
fetchAttrs.hash = "sha256-WOBlZ0XNrl5UxIaSDxZeOfzS2a8ZkrKdTLKHBDC9UNQ=";
|
||||
|
||||
buildAttrs.installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
|
@ -37,7 +37,7 @@ buildBazelPackage rec {
|
||||
];
|
||||
|
||||
fetchAttrs = {
|
||||
sha256 = "sha256-bKASgc5KftCWtMvJkGA4nweBAtgdnyC9uXIJxPjKYS0=";
|
||||
hash = "sha256-bKASgc5KftCWtMvJkGA4nweBAtgdnyC9uXIJxPjKYS0=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
Loading…
Reference in New Issue
Block a user