Merge pull request #11677 from DeterminateSystems/fix-s3-crash

builtins.fetchurl: Fix segfault on s3:// URLs
This commit is contained in:
Eelco Dolstra 2024-10-14 13:55:49 +02:00 committed by GitHub
commit 71c2d82302
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 11 additions and 0 deletions

View File

@ -90,6 +90,7 @@ DownloadFileResult downloadFile(
/* Cache metadata for all URLs in the redirect chain. */ /* Cache metadata for all URLs in the redirect chain. */
for (auto & url : res.urls) { for (auto & url : res.urls) {
key.second.insert_or_assign("url", url); key.second.insert_or_assign("url", url);
assert(!res.urls.empty());
infoAttrs.insert_or_assign("url", *res.urls.rbegin()); infoAttrs.insert_or_assign("url", *res.urls.rbegin());
getCache()->upsert(key, *store, infoAttrs, *storePath); getCache()->upsert(key, *store, infoAttrs, *storePath);
} }

View File

@ -759,12 +759,17 @@ struct curlFileTransfer : public FileTransfer
S3Helper s3Helper(profile, region, scheme, endpoint); S3Helper s3Helper(profile, region, scheme, endpoint);
Activity act(*logger, lvlTalkative, actFileTransfer,
fmt("downloading '%s'", request.uri),
{request.uri}, request.parentAct);
// FIXME: implement ETag // FIXME: implement ETag
auto s3Res = s3Helper.getObject(bucketName, key); auto s3Res = s3Helper.getObject(bucketName, key);
FileTransferResult res; FileTransferResult res;
if (!s3Res.data) if (!s3Res.data)
throw FileTransferError(NotFound, "S3 object '%s' does not exist", request.uri); throw FileTransferError(NotFound, "S3 object '%s' does not exist", request.uri);
res.data = std::move(*s3Res.data); res.data = std::move(*s3Res.data);
res.urls.push_back(request.uri);
callback(std::move(res)); callback(std::move(res));
#else #else
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri); throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri);

View File

@ -9,6 +9,7 @@
#include "globals.hh" #include "globals.hh"
#include "compression.hh" #include "compression.hh"
#include "filetransfer.hh" #include "filetransfer.hh"
#include "signals.hh"
#include <aws/core/Aws.h> #include <aws/core/Aws.h>
#include <aws/core/VersionConfig.h> #include <aws/core/VersionConfig.h>
@ -117,6 +118,7 @@ class RetryStrategy : public Aws::Client::DefaultRetryStrategy
{ {
bool ShouldRetry(const Aws::Client::AWSError<Aws::Client::CoreErrors>& error, long attemptedRetries) const override bool ShouldRetry(const Aws::Client::AWSError<Aws::Client::CoreErrors>& error, long attemptedRetries) const override
{ {
checkInterrupt();
auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries); auto retry = Aws::Client::DefaultRetryStrategy::ShouldRetry(error, attemptedRetries);
if (retry) if (retry)
printError("AWS error '%s' (%s), will retry in %d ms", printError("AWS error '%s' (%s), will retry in %d ms",

View File

@ -51,6 +51,9 @@ in {
server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}") server.succeed("${env} nix copy --to '${storeUrl}' ${pkgA}")
# Test fetchurl on s3:// URLs while we're at it.
client.succeed("${env} nix eval --impure --expr 'builtins.fetchurl { name = \"foo\"; url = \"s3://my-cache/nix-cache-info?endpoint=http://server:9000&region=eu-west-1\"; }'")
# Copy a package from the binary cache. # Copy a package from the binary cache.
client.fail("nix path-info ${pkgA}") client.fail("nix path-info ${pkgA}")