mirror of
https://github.com/NixOS/nixpkgs.git
synced 2024-11-02 15:41:48 +00:00
add nix-prefetch-zip
This commit is contained in:
parent
15166ca5c2
commit
a6a729b207
@ -1,4 +1,4 @@
|
||||
{ stdenv, makeWrapper, git, subversion, mercurial, bazaar, cvs }:
|
||||
{ stdenv, makeWrapper, git, subversion, mercurial, bazaar, cvs, unzip, curl }:
|
||||
|
||||
stdenv.mkDerivation {
|
||||
name = "nix-prefetch-scripts";
|
||||
@ -11,9 +11,12 @@ stdenv.mkDerivation {
|
||||
function copyScript {
|
||||
local name=nix-prefetch-$1;
|
||||
local src=$2;
|
||||
local exe=$3/bin;
|
||||
cp $src $out/bin/$name;
|
||||
wrapProgram $out/bin/$name --suffix PATH : "$exe"
|
||||
for dep in ''${@:3}; do
|
||||
local exe=$dep/bin;
|
||||
local wrapArgs="$wrapArgs --suffix PATH : $exe"
|
||||
done
|
||||
wrapProgram $out/bin/$name $wrapArgs
|
||||
}
|
||||
|
||||
copyScript "hg" ${../../../build-support/fetchhg/nix-prefetch-hg} ${mercurial}
|
||||
@ -21,6 +24,7 @@ stdenv.mkDerivation {
|
||||
copyScript "svn" ${../../../build-support/fetchsvn/nix-prefetch-svn} ${subversion}
|
||||
copyScript "bzr" ${../../../build-support/fetchbzr/nix-prefetch-bzr} ${bazaar}
|
||||
copyScript "cvs" ${../../../build-support/fetchcvs/nix-prefetch-cvs} ${cvs}
|
||||
copyScript "zip" ${./nix-prefetch-zip} ${unzip} ${curl}
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
@ -30,4 +34,4 @@ stdenv.mkDerivation {
|
||||
# Quicker to build than to download, I hope
|
||||
hydraPlatforms = [];
|
||||
};
|
||||
}
|
||||
}
|
||||
|
147
pkgs/tools/package-management/nix-prefetch-scripts/nix-prefetch-zip
Executable file
147
pkgs/tools/package-management/nix-prefetch-scripts/nix-prefetch-zip
Executable file
@ -0,0 +1,147 @@
|
||||
#! /bin/sh -e
|
||||
|
||||
usage(){
|
||||
echo >&2 "syntax: nix-prefetch-zip [options]
|
||||
|
||||
Options:
|
||||
--url name The url of the archive to fetch.
|
||||
--name name The name to use for the store path (defaults to \`basename \$url\`).
|
||||
--hash name The hash of unpacked archive.
|
||||
--hash-type hash Use the specified cryptographic hash algorithm, which can be one of md5, sha1, and sha256.
|
||||
--base32 Print/accept the hash in a base-32 representation rather than hexadecimal.
|
||||
--leave-root Keep the root directory of the archive.
|
||||
--help Show this help text.
|
||||
"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
argi=0
|
||||
argfun=""
|
||||
for arg; do
|
||||
if test -z "$argfun"; then
|
||||
case $arg in
|
||||
--url) argfun=set_url;;
|
||||
--name) argfun=set_name;;
|
||||
--hash) argfun=set_expHash;;
|
||||
--hash-type) argfun=set_hashType;;
|
||||
--base32) hashFormat="--base32";;
|
||||
--leave-root) leaveRoot=true;;
|
||||
--help) usage;;
|
||||
*)
|
||||
echo "Unexpected argument: $arg" >&2
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
else
|
||||
case $argfun in
|
||||
set_*)
|
||||
var=$(echo $argfun | sed 's,^set_,,')
|
||||
eval "$var=\$arg"
|
||||
;;
|
||||
esac
|
||||
argfun=""
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$url" ]; then
|
||||
echo "Error: No --url flag given" >&2
|
||||
usage
|
||||
fi
|
||||
|
||||
if [ -z "$name" ]; then
|
||||
name=$(basename "$url")
|
||||
fi
|
||||
|
||||
if test -z "$hashType"; then
|
||||
hashType=sha256
|
||||
fi
|
||||
|
||||
tmp=$(mktemp -d 2>/dev/null || mktemp -d -t "$$")
|
||||
trap "rm -rf \"\$tmp\"" EXIT
|
||||
|
||||
TMPDIR=$tmp/unpacked/$name
|
||||
mkdir -p $TMPDIR
|
||||
downloadedFile=$tmp/$name
|
||||
|
||||
unpackFile() {
|
||||
local curSrc="$1"
|
||||
|
||||
case "$curSrc" in
|
||||
*.tar.xz | *.tar.lzma)
|
||||
# Don't rely on tar knowing about .xz.
|
||||
xz -d < $curSrc | tar xf -
|
||||
;;
|
||||
*.tar | *.tar.* | *.tgz | *.tbz2)
|
||||
# GNU tar can automatically select the decompression method
|
||||
# (info "(tar) gzip").
|
||||
tar xf $curSrc
|
||||
;;
|
||||
*.zip)
|
||||
unzip -qq $curSrc
|
||||
;;
|
||||
*)
|
||||
echo "source archive $curSrc has unknown type" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# If the hash was given, a file with that hash may already be in the
|
||||
# store.
|
||||
if test -n "$expHash"; then
|
||||
finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" "$name")
|
||||
if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
|
||||
finalPath=
|
||||
fi
|
||||
hash=$expHash
|
||||
fi
|
||||
|
||||
# If we don't know the hash or a path with that hash doesn't exist,
|
||||
# download the file and add it to the store.
|
||||
if test -z "$finalPath"; then
|
||||
curl="curl \
|
||||
--location --max-redirs 20 \
|
||||
--disable-epsv \
|
||||
--insecure"
|
||||
|
||||
if ! $curl --fail "$url" --output "$downloadedFile"; then
|
||||
echo "error: could not download $url" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $TMPDIR
|
||||
unpackFile "$downloadedFile"
|
||||
|
||||
# FIXME: handle zip files that contain a single regular file.
|
||||
if [ -z "$leaveRoot" ]; then
|
||||
shopt -s dotglob
|
||||
if [ $(ls -d $TMPDIR/* | wc -l) != 1 ]; then
|
||||
echo "error: zip file must contain a single directory."
|
||||
exit 1
|
||||
fi
|
||||
fn=$(cd "$TMPDIR" && echo *)
|
||||
mv $TMPDIR/$fn/* "$TMPDIR/"
|
||||
rmdir "$TMPDIR/$fn"
|
||||
fi
|
||||
|
||||
# Compute the hash.
|
||||
hash=$(nix-hash --type $hashType $hashFormat $TMPDIR)
|
||||
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
|
||||
|
||||
# Add the downloaded file to the Nix store.
|
||||
finalPath=$(nix-store --add-fixed --recursive "$hashType" $TMPDIR)
|
||||
|
||||
if test -n "$expHash" -a "$expHash" != "$hash"; then
|
||||
echo "hash mismatch for URL \`$url'"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
|
||||
|
||||
echo $hash
|
||||
|
||||
if test -n "$PRINT_PATH"; then
|
||||
echo $finalPath
|
||||
fi
|
Loading…
Reference in New Issue
Block a user