2005-12-05 14:11:09 +00:00
|
|
|
source $stdenv/setup
|
2003-11-02 17:42:19 +00:00
|
|
|
|
2008-08-22 15:53:21 +00:00
|
|
|
source $mirrorsFile
|
|
|
|
|
2003-11-02 17:42:19 +00:00
|
|
|
|
2007-08-23 15:22:30 +00:00
|
|
|
# Curl flags to handle redirects, not use EPSV, handle cookies for
|
|
|
|
# servers to need them during redirects, and work on SSL without a
|
|
|
|
# certificate (this isn't a security problem because we check the
|
2013-02-06 14:15:28 +00:00
|
|
|
# cryptographic hash of the output anyway).
|
2007-08-23 15:22:30 +00:00
|
|
|
curl="curl \
|
|
|
|
--location --max-redirs 20 \
|
2015-10-06 18:00:03 +00:00
|
|
|
--retry 3 \
|
2007-08-23 15:22:30 +00:00
|
|
|
--disable-epsv \
|
|
|
|
--cookie-jar cookies \
|
2010-05-27 18:59:19 +00:00
|
|
|
--insecure \
|
2013-05-16 08:18:12 +00:00
|
|
|
$curlOpts \
|
2010-05-28 06:49:32 +00:00
|
|
|
$NIX_CURL_FLAGS"
|
2003-11-02 17:42:19 +00:00
|
|
|
|
2007-08-23 15:22:30 +00:00
|
|
|
|
2014-05-08 12:57:20 +00:00
|
|
|
downloadedFile="$out"
|
|
|
|
if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
|
|
|
|
|
|
|
|
|
2007-08-23 15:22:30 +00:00
|
|
|
tryDownload() {
|
|
|
|
local url="$1"
|
|
|
|
echo
|
|
|
|
header "trying $url"
|
2014-10-01 11:58:42 +00:00
|
|
|
local curlexit=18;
|
|
|
|
|
2014-10-05 07:51:57 +00:00
|
|
|
success=
|
|
|
|
|
2014-10-01 11:58:42 +00:00
|
|
|
# if we get error code 18, resume partial download
|
|
|
|
while [ $curlexit -eq 18 ]; do
|
2014-10-05 07:51:57 +00:00
|
|
|
# keep this inside an if statement, since on failure it doesn't abort the script
|
|
|
|
if $curl -C - --fail "$url" --output "$downloadedFile"; then
|
|
|
|
success=1
|
2014-10-05 17:23:11 +00:00
|
|
|
break
|
2014-10-05 07:51:57 +00:00
|
|
|
else
|
|
|
|
curlexit=$?;
|
|
|
|
fi
|
2014-10-01 11:58:42 +00:00
|
|
|
done
|
2007-08-23 15:22:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
finish() {
|
2014-05-08 12:57:20 +00:00
|
|
|
set +o noglob
|
2016-01-19 10:17:49 +00:00
|
|
|
|
|
|
|
if [[ $executable == "1" ]]; then
|
|
|
|
chmod +x $downloadedFile
|
|
|
|
fi
|
|
|
|
|
2014-05-08 12:57:20 +00:00
|
|
|
runHook postFetch
|
2007-08-23 15:22:30 +00:00
|
|
|
exit 0
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2007-08-27 12:44:01 +00:00
|
|
|
tryHashedMirrors() {
|
2007-09-11 13:48:53 +00:00
|
|
|
if test -n "$NIX_HASHED_MIRRORS"; then
|
|
|
|
hashedMirrors="$NIX_HASHED_MIRRORS"
|
|
|
|
fi
|
2013-02-06 14:15:28 +00:00
|
|
|
|
2007-08-27 12:44:01 +00:00
|
|
|
for mirror in $hashedMirrors; do
|
|
|
|
url="$mirror/$outputHashAlgo/$outputHash"
|
2013-02-06 14:15:28 +00:00
|
|
|
if $curl --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
|
|
|
|
--fail --silent --show-error --head "$url" \
|
2007-08-27 12:44:01 +00:00
|
|
|
--write-out "%{http_code}" --output /dev/null > code 2> log; then
|
|
|
|
tryDownload "$url"
|
|
|
|
if test -n "$success"; then finish; fi
|
|
|
|
else
|
|
|
|
# Be quiet about 404 errors, which we interpret as the file
|
|
|
|
# not being present on this particular mirror.
|
|
|
|
if test "$(cat code)" != 404; then
|
|
|
|
echo "error checking the existence of $url:"
|
|
|
|
cat log
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2009-05-08 17:00:33 +00:00
|
|
|
# URL list may contain ?. No glob expansion for that, please
|
|
|
|
set -o noglob
|
|
|
|
|
2007-08-27 12:44:01 +00:00
|
|
|
urls2=
|
|
|
|
for url in $urls; do
|
|
|
|
if test "${url:0:9}" != "mirror://"; then
|
|
|
|
urls2="$urls2 $url"
|
2007-08-23 15:22:30 +00:00
|
|
|
else
|
2007-08-27 12:44:01 +00:00
|
|
|
url2="${url:9}"; echo "${url2/\// }" > split; read site fileName < split
|
|
|
|
#varName="mirror_$site"
|
|
|
|
varName="$site" # !!! danger of name clash, fix this
|
|
|
|
if test -z "${!varName}"; then
|
|
|
|
echo "warning: unknown mirror:// site \`$site'"
|
|
|
|
else
|
2007-09-11 15:00:49 +00:00
|
|
|
mirrors=${!varName}
|
|
|
|
|
|
|
|
# Allow command-line override by setting NIX_MIRRORS_$site.
|
|
|
|
varName="NIX_MIRRORS_$site"
|
|
|
|
if test -n "${!varName}"; then mirrors="${!varName}"; fi
|
|
|
|
|
|
|
|
for url3 in $mirrors; do
|
2007-08-27 12:44:01 +00:00
|
|
|
urls2="$urls2 $url3$fileName";
|
|
|
|
done
|
2007-08-23 15:22:30 +00:00
|
|
|
fi
|
2005-02-22 15:23:56 +00:00
|
|
|
fi
|
2007-08-23 15:22:30 +00:00
|
|
|
done
|
2007-08-27 12:44:01 +00:00
|
|
|
urls="$urls2"
|
|
|
|
|
2009-05-08 17:00:33 +00:00
|
|
|
# Restore globbing settings
|
|
|
|
set +o noglob
|
2007-08-23 15:22:30 +00:00
|
|
|
|
2008-07-23 16:04:10 +00:00
|
|
|
if test -n "$showURLs"; then
|
|
|
|
echo "$urls" > $out
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
|
2007-08-27 12:44:01 +00:00
|
|
|
if test -n "$preferHashedMirrors"; then
|
|
|
|
tryHashedMirrors
|
|
|
|
fi
|
2007-08-23 15:22:30 +00:00
|
|
|
|
2009-05-08 17:00:33 +00:00
|
|
|
# URL list may contain ?. No glob expansion for that, please
|
|
|
|
set -o noglob
|
|
|
|
|
2007-08-23 15:22:30 +00:00
|
|
|
success=
|
|
|
|
for url in $urls; do
|
|
|
|
tryDownload "$url"
|
|
|
|
if test -n "$success"; then finish; fi
|
|
|
|
done
|
|
|
|
|
2009-05-08 17:00:33 +00:00
|
|
|
# Restore globbing settings
|
|
|
|
set +o noglob
|
|
|
|
|
2007-08-27 12:44:01 +00:00
|
|
|
if test -z "$preferHashedMirrors"; then
|
|
|
|
tryHashedMirrors
|
|
|
|
fi
|
|
|
|
|
2005-02-22 15:23:56 +00:00
|
|
|
|
2007-08-23 15:22:30 +00:00
|
|
|
echo "error: cannot download $name from any mirror"
|
|
|
|
exit 1
|