mirror of
https://github.com/NixOS/nixpkgs.git
synced 2025-02-07 12:44:20 +00:00
commit
28e280ab56
@ -47,11 +47,11 @@ let
|
||||
in
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "go";
|
||||
version = "1.22.5";
|
||||
version = "1.22.6";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://go.dev/dl/go${finalAttrs.version}.src.tar.gz";
|
||||
hash = "sha256-rJxyPyJJaa7mJLw0/TTJ4T8qIS11xxyAfeZEu0bhEvY=";
|
||||
hash = "sha256-nkjZnVGYgleZF9gYnBfpjDc84lq667mHcuKScIiZKlE=";
|
||||
};
|
||||
|
||||
strictDeps = true;
|
||||
|
@ -2,6 +2,7 @@
|
||||
, lib
|
||||
, stdenv
|
||||
, fetchurl
|
||||
, fetchpatch
|
||||
, atk
|
||||
, buildPackages
|
||||
, cairo
|
||||
@ -63,6 +64,12 @@ stdenv.mkDerivation (finalAttrs: {
|
||||
patches = [
|
||||
./patches/2.0-immodules.cache.patch
|
||||
./patches/gtk2-theme-paths.patch
|
||||
(fetchpatch {
|
||||
# https://gitlab.gnome.org/GNOME/gtk/-/issues/6786
|
||||
name = "CVE-2024-6655.patch";
|
||||
url = "https://gitlab.gnome.org/GNOME/gtk/-/commit/3bbf0b6176d42836d23c36a6ac410e807ec0a7a7.patch";
|
||||
hash = "sha256-mstOPk9NNpUwScrdEbvGhmAv8jlds3SBdj53T0q33vM=";
|
||||
})
|
||||
] ++ lib.optionals stdenv.isDarwin [
|
||||
./patches/2.0-gnome_bugzilla_557780_306776_freeciv_darwin.patch
|
||||
./patches/2.0-darwin-x11.patch
|
||||
|
@ -17,11 +17,11 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "openldap";
|
||||
version = "2.6.7";
|
||||
version = "2.6.8";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://www.openldap.org/software/download/OpenLDAP/openldap-release/${pname}-${version}.tgz";
|
||||
hash = "sha256-zXdfYlyUTteKPaGKA7A7CO6nPIqryXtBuzNumhCVSTA=";
|
||||
hash = "sha256-SJaTI+lOO+OwPGoTKULcun741UXyrTVAFwkBn2lsPE4=";
|
||||
};
|
||||
|
||||
# TODO: separate "out" and "bin"
|
||||
|
@ -44,7 +44,7 @@
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "django";
|
||||
version = "4.2.14";
|
||||
version = "4.2.15";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.8";
|
||||
@ -53,7 +53,7 @@ buildPythonPackage rec {
|
||||
owner = "django";
|
||||
repo = "django";
|
||||
rev = "refs/tags/${version}";
|
||||
hash = "sha256-Nt+dVIikfe7gJY6/qJzkolkXeSFNBCbCEMSaKYmvCz4=";
|
||||
hash = "sha256-SWENMUsTgP3X3EvFiTgpKCZO0/KaZ1x1stSyp2kM/P4=";
|
||||
};
|
||||
|
||||
patches =
|
||||
|
250
pkgs/development/python-modules/twisted/CVE-2024-41671.patch
Normal file
250
pkgs/development/python-modules/twisted/CVE-2024-41671.patch
Normal file
@ -0,0 +1,250 @@
|
||||
From 1cc35b0189eea0687da4d72fbfd187305b5022ab Mon Sep 17 00:00:00 2001
|
||||
From: Adi Roiban <adiroiban@gmail.com>
|
||||
Date: Mon, 29 Jul 2024 14:27:23 +0100
|
||||
Subject: [PATCH 1/2] Merge commit from fork
|
||||
|
||||
Address GHSA-c8m8-j448-xjx7
|
||||
---
|
||||
src/twisted/web/http.py | 21 +++--
|
||||
src/twisted/web/test/test_http.py | 126 ++++++++++++++++++++++++++----
|
||||
2 files changed, 126 insertions(+), 21 deletions(-)
|
||||
|
||||
diff --git a/src/twisted/web/http.py b/src/twisted/web/http.py
|
||||
index 1c598380ac..3b784f5e3c 100644
|
||||
--- a/src/twisted/web/http.py
|
||||
+++ b/src/twisted/web/http.py
|
||||
@@ -2000,16 +2000,21 @@ class _ChunkedTransferDecoder:
|
||||
@returns: C{False}, as there is either insufficient data to continue,
|
||||
or no data remains.
|
||||
"""
|
||||
- if (
|
||||
- self._receivedTrailerHeadersSize + len(self._buffer)
|
||||
- > self._maxTrailerHeadersSize
|
||||
- ):
|
||||
- raise _MalformedChunkedDataError("Trailer headers data is too long.")
|
||||
-
|
||||
eolIndex = self._buffer.find(b"\r\n", self._start)
|
||||
|
||||
if eolIndex == -1:
|
||||
# Still no end of network line marker found.
|
||||
+ #
|
||||
+ # Check if we've run up against the trailer size limit: if the next
|
||||
+ # read contains the terminating CRLF then we'll have this many bytes
|
||||
+ # of trailers (including the CRLFs).
|
||||
+ minTrailerSize = (
|
||||
+ self._receivedTrailerHeadersSize
|
||||
+ + len(self._buffer)
|
||||
+ + (1 if self._buffer.endswith(b"\r") else 2)
|
||||
+ )
|
||||
+ if minTrailerSize > self._maxTrailerHeadersSize:
|
||||
+ raise _MalformedChunkedDataError("Trailer headers data is too long.")
|
||||
# Continue processing more data.
|
||||
return False
|
||||
|
||||
@@ -2019,6 +2024,8 @@ class _ChunkedTransferDecoder:
|
||||
del self._buffer[0 : eolIndex + 2]
|
||||
self._start = 0
|
||||
self._receivedTrailerHeadersSize += eolIndex + 2
|
||||
+ if self._receivedTrailerHeadersSize > self._maxTrailerHeadersSize:
|
||||
+ raise _MalformedChunkedDataError("Trailer headers data is too long.")
|
||||
return True
|
||||
|
||||
# eolIndex in this part of code is equal to 0
|
||||
@@ -2342,8 +2349,8 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):
|
||||
self.__header = line
|
||||
|
||||
def _finishRequestBody(self, data):
|
||||
- self.allContentReceived()
|
||||
self._dataBuffer.append(data)
|
||||
+ self.allContentReceived()
|
||||
|
||||
def _maybeChooseTransferDecoder(self, header, data):
|
||||
"""
|
||||
diff --git a/src/twisted/web/test/test_http.py b/src/twisted/web/test/test_http.py
|
||||
index 33d0a49fca..815854bccb 100644
|
||||
--- a/src/twisted/web/test/test_http.py
|
||||
+++ b/src/twisted/web/test/test_http.py
|
||||
@@ -135,7 +135,7 @@ class DummyHTTPHandler(http.Request):
|
||||
data = self.content.read()
|
||||
length = self.getHeader(b"content-length")
|
||||
if length is None:
|
||||
- length = networkString(str(length))
|
||||
+ length = str(length).encode()
|
||||
request = b"'''\n" + length + b"\n" + data + b"'''\n"
|
||||
self.setResponseCode(200)
|
||||
self.setHeader(b"Request", self.uri)
|
||||
@@ -563,17 +563,23 @@ class HTTP0_9Tests(HTTP1_0Tests):
|
||||
|
||||
class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
|
||||
"""
|
||||
- Tests that multiple pipelined requests with bodies are correctly buffered.
|
||||
+ Pipelined requests get buffered and executed in the order received,
|
||||
+ not processed in parallel.
|
||||
"""
|
||||
|
||||
requests = (
|
||||
b"POST / HTTP/1.1\r\n"
|
||||
b"Content-Length: 10\r\n"
|
||||
b"\r\n"
|
||||
- b"0123456789POST / HTTP/1.1\r\n"
|
||||
- b"Content-Length: 10\r\n"
|
||||
- b"\r\n"
|
||||
b"0123456789"
|
||||
+ # Chunk encoded request.
|
||||
+ b"POST / HTTP/1.1\r\n"
|
||||
+ b"Transfer-Encoding: chunked\r\n"
|
||||
+ b"\r\n"
|
||||
+ b"a\r\n"
|
||||
+ b"0123456789\r\n"
|
||||
+ b"0\r\n"
|
||||
+ b"\r\n"
|
||||
)
|
||||
|
||||
expectedResponses = [
|
||||
@@ -590,14 +596,16 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
|
||||
b"Request: /",
|
||||
b"Command: POST",
|
||||
b"Version: HTTP/1.1",
|
||||
- b"Content-Length: 21",
|
||||
- b"'''\n10\n0123456789'''\n",
|
||||
+ b"Content-Length: 23",
|
||||
+ b"'''\nNone\n0123456789'''\n",
|
||||
),
|
||||
]
|
||||
|
||||
- def test_noPipelining(self):
|
||||
+ def test_stepwiseTinyTube(self):
|
||||
"""
|
||||
- Test that pipelined requests get buffered, not processed in parallel.
|
||||
+ Imitate a slow connection that delivers one byte at a time.
|
||||
+ The request handler (L{DelayedHTTPHandler}) is puppeted to
|
||||
+ step through the handling of each request.
|
||||
"""
|
||||
b = StringTransport()
|
||||
a = http.HTTPChannel()
|
||||
@@ -606,10 +614,9 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
|
||||
# one byte at a time, to stress it.
|
||||
for byte in iterbytes(self.requests):
|
||||
a.dataReceived(byte)
|
||||
- value = b.value()
|
||||
|
||||
# So far only one request should have been dispatched.
|
||||
- self.assertEqual(value, b"")
|
||||
+ self.assertEqual(b.value(), b"")
|
||||
self.assertEqual(1, len(a.requests))
|
||||
|
||||
# Now, process each request one at a time.
|
||||
@@ -618,8 +625,95 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
|
||||
request = a.requests[0].original
|
||||
request.delayedProcess()
|
||||
|
||||
- value = b.value()
|
||||
- self.assertResponseEquals(value, self.expectedResponses)
|
||||
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
||||
+
|
||||
+ def test_stepwiseDumpTruck(self):
|
||||
+ """
|
||||
+ Imitate a fast connection where several pipelined
|
||||
+ requests arrive in a single read. The request handler
|
||||
+ (L{DelayedHTTPHandler}) is puppeted to step through the
|
||||
+ handling of each request.
|
||||
+ """
|
||||
+ b = StringTransport()
|
||||
+ a = http.HTTPChannel()
|
||||
+ a.requestFactory = DelayedHTTPHandlerProxy
|
||||
+ a.makeConnection(b)
|
||||
+
|
||||
+ a.dataReceived(self.requests)
|
||||
+
|
||||
+ # So far only one request should have been dispatched.
|
||||
+ self.assertEqual(b.value(), b"")
|
||||
+ self.assertEqual(1, len(a.requests))
|
||||
+
|
||||
+ # Now, process each request one at a time.
|
||||
+ while a.requests:
|
||||
+ self.assertEqual(1, len(a.requests))
|
||||
+ request = a.requests[0].original
|
||||
+ request.delayedProcess()
|
||||
+
|
||||
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
||||
+
|
||||
+ def test_immediateTinyTube(self):
|
||||
+ """
|
||||
+ Imitate a slow connection that delivers one byte at a time.
|
||||
+
|
||||
+ (L{DummyHTTPHandler}) immediately responds, but no more
|
||||
+ than one
|
||||
+ """
|
||||
+ b = StringTransport()
|
||||
+ a = http.HTTPChannel()
|
||||
+ a.requestFactory = DummyHTTPHandlerProxy # "sync"
|
||||
+ a.makeConnection(b)
|
||||
+
|
||||
+ # one byte at a time, to stress it.
|
||||
+ for byte in iterbytes(self.requests):
|
||||
+ a.dataReceived(byte)
|
||||
+ # There is never more than one request dispatched at a time:
|
||||
+ self.assertLessEqual(len(a.requests), 1)
|
||||
+
|
||||
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
||||
+
|
||||
+ def test_immediateDumpTruck(self):
|
||||
+ """
|
||||
+ Imitate a fast connection where several pipelined
|
||||
+ requests arrive in a single read. The request handler
|
||||
+ (L{DummyHTTPHandler}) immediately responds.
|
||||
+
|
||||
+ This doesn't check the at-most-one pending request
|
||||
+ invariant but exercises otherwise uncovered code paths.
|
||||
+ See GHSA-c8m8-j448-xjx7.
|
||||
+ """
|
||||
+ b = StringTransport()
|
||||
+ a = http.HTTPChannel()
|
||||
+ a.requestFactory = DummyHTTPHandlerProxy
|
||||
+ a.makeConnection(b)
|
||||
+
|
||||
+ # All bytes at once to ensure there's stuff to buffer.
|
||||
+ a.dataReceived(self.requests)
|
||||
+
|
||||
+ self.assertResponseEquals(b.value(), self.expectedResponses)
|
||||
+
|
||||
+ def test_immediateABiggerTruck(self):
|
||||
+ """
|
||||
+ Imitate a fast connection where a so many pipelined
|
||||
+ requests arrive in a single read that backpressure is indicated.
|
||||
+ The request handler (L{DummyHTTPHandler}) immediately responds.
|
||||
+
|
||||
+ This doesn't check the at-most-one pending request
|
||||
+ invariant but exercises otherwise uncovered code paths.
|
||||
+ See GHSA-c8m8-j448-xjx7.
|
||||
+
|
||||
+ @see: L{http.HTTPChannel._optimisticEagerReadSize}
|
||||
+ """
|
||||
+ b = StringTransport()
|
||||
+ a = http.HTTPChannel()
|
||||
+ a.requestFactory = DummyHTTPHandlerProxy
|
||||
+ a.makeConnection(b)
|
||||
+
|
||||
+ overLimitCount = a._optimisticEagerReadSize // len(self.requests) * 10
|
||||
+ a.dataReceived(self.requests * overLimitCount)
|
||||
+
|
||||
+ self.assertResponseEquals(b.value(), self.expectedResponses * overLimitCount)
|
||||
|
||||
def test_pipeliningReadLimit(self):
|
||||
"""
|
||||
@@ -1522,7 +1616,11 @@ class ChunkedTransferEncodingTests(unittest.TestCase):
|
||||
lambda b: None, # pragma: nocov
|
||||
)
|
||||
p._maxTrailerHeadersSize = 10
|
||||
- p.dataReceived(b"3\r\nabc\r\n0\r\n0123456789")
|
||||
+ # 9 bytes are received so far, in 2 packets.
|
||||
+ # For now, all is ok.
|
||||
+ p.dataReceived(b"3\r\nabc\r\n0\r\n01234567")
|
||||
+ p.dataReceived(b"\r")
|
||||
+ # Once the 10th byte is received, the processing fails.
|
||||
self.assertRaises(
|
||||
http._MalformedChunkedDataError,
|
||||
p.dataReceived,
|
||||
--
|
||||
2.45.2
|
||||
|
84
pkgs/development/python-modules/twisted/CVE-2024-41810.patch
Normal file
84
pkgs/development/python-modules/twisted/CVE-2024-41810.patch
Normal file
@ -0,0 +1,84 @@
|
||||
From 6df3fd0be944b763046829edf5fd46b6b4a42303 Mon Sep 17 00:00:00 2001
|
||||
From: Adi Roiban <adiroiban@gmail.com>
|
||||
Date: Mon, 29 Jul 2024 14:28:03 +0100
|
||||
Subject: [PATCH 2/2] Merge commit from fork
|
||||
|
||||
Added HTML output encoding the "URL" parameter of the "redirectTo" function
|
||||
---
|
||||
src/twisted/web/_template_util.py | 2 +-
|
||||
src/twisted/web/test/test_util.py | 39 ++++++++++++++++++++++++++++++-
|
||||
2 files changed, 39 insertions(+), 2 deletions(-)
|
||||
|
||||
diff --git a/src/twisted/web/_template_util.py b/src/twisted/web/_template_util.py
|
||||
index 230c33f3e8..7266079ac2 100644
|
||||
--- a/src/twisted/web/_template_util.py
|
||||
+++ b/src/twisted/web/_template_util.py
|
||||
@@ -92,7 +92,7 @@ def redirectTo(URL: bytes, request: IRequest) -> bytes:
|
||||
</body>
|
||||
</html>
|
||||
""" % {
|
||||
- b"url": URL
|
||||
+ b"url": escape(URL.decode("utf-8")).encode("utf-8")
|
||||
}
|
||||
return content
|
||||
|
||||
diff --git a/src/twisted/web/test/test_util.py b/src/twisted/web/test/test_util.py
|
||||
index 1e763009ca..9847dcbb8b 100644
|
||||
--- a/src/twisted/web/test/test_util.py
|
||||
+++ b/src/twisted/web/test/test_util.py
|
||||
@@ -5,7 +5,6 @@
|
||||
Tests for L{twisted.web.util}.
|
||||
"""
|
||||
|
||||
-
|
||||
import gc
|
||||
|
||||
from twisted.internet import defer
|
||||
@@ -64,6 +63,44 @@ class RedirectToTests(TestCase):
|
||||
targetURL = "http://target.example.com/4321"
|
||||
self.assertRaises(TypeError, redirectTo, targetURL, request)
|
||||
|
||||
+ def test_legitimateRedirect(self):
|
||||
+ """
|
||||
+ Legitimate URLs are fully interpolated in the `redirectTo` response body without transformation
|
||||
+ """
|
||||
+ request = DummyRequest([b""])
|
||||
+ html = redirectTo(b"https://twisted.org/", request)
|
||||
+ expected = b"""
|
||||
+<html>
|
||||
+ <head>
|
||||
+ <meta http-equiv=\"refresh\" content=\"0;URL=https://twisted.org/\">
|
||||
+ </head>
|
||||
+ <body bgcolor=\"#FFFFFF\" text=\"#000000\">
|
||||
+ <a href=\"https://twisted.org/\">click here</a>
|
||||
+ </body>
|
||||
+</html>
|
||||
+"""
|
||||
+ self.assertEqual(html, expected)
|
||||
+
|
||||
+ def test_maliciousRedirect(self):
|
||||
+ """
|
||||
+ Malicious URLs are HTML-escaped before interpolating them in the `redirectTo` response body
|
||||
+ """
|
||||
+ request = DummyRequest([b""])
|
||||
+ html = redirectTo(
|
||||
+ b'https://twisted.org/"><script>alert(document.location)</script>', request
|
||||
+ )
|
||||
+ expected = b"""
|
||||
+<html>
|
||||
+ <head>
|
||||
+ <meta http-equiv=\"refresh\" content=\"0;URL=https://twisted.org/"><script>alert(document.location)</script>\">
|
||||
+ </head>
|
||||
+ <body bgcolor=\"#FFFFFF\" text=\"#000000\">
|
||||
+ <a href=\"https://twisted.org/"><script>alert(document.location)</script>\">click here</a>
|
||||
+ </body>
|
||||
+</html>
|
||||
+"""
|
||||
+ self.assertEqual(html, expected)
|
||||
+
|
||||
|
||||
class ParentRedirectTests(SynchronousTestCase):
|
||||
"""
|
||||
--
|
||||
2.45.2
|
||||
|
@ -73,6 +73,12 @@ buildPythonPackage rec {
|
||||
url = "https://github.com/mweinelt/twisted/commit/e69e652de671aac0abf5c7e6c662fc5172758c5a.patch";
|
||||
hash = "sha256-LmvKUTViZoY/TPBmSlx4S9FbJNZfB5cxzn/YcciDmoI=";
|
||||
})
|
||||
|
||||
# https://github.com/twisted/twisted/security/advisories/GHSA-cf56-g6w6-pqq2
|
||||
./CVE-2024-41671.patch
|
||||
|
||||
# https://github.com/twisted/twisted/security/advisories/GHSA-c8m8-j448-xjx7
|
||||
./CVE-2024-41810.patch
|
||||
];
|
||||
|
||||
__darwinAllowLocalNetworking = true;
|
||||
|
86
pkgs/misc/cups/2.4.8-CVE-2024-35235.patch
Normal file
86
pkgs/misc/cups/2.4.8-CVE-2024-35235.patch
Normal file
@ -0,0 +1,86 @@
|
||||
Based on upstream ff1f8a623e090dee8a8aadf12a6a4b25efac143d, adjusted to
|
||||
apply to 2.4.8
|
||||
|
||||
diff --git a/cups/http-addr.c b/cups/http-addr.c
|
||||
index 6aeeb8074..73a6b2f37 100644
|
||||
--- a/cups/http-addr.c
|
||||
+++ b/cups/http-addr.c
|
||||
@@ -206,27 +206,30 @@ httpAddrListen(http_addr_t *addr, /* I - Address to bind to */
|
||||
* Remove any existing domain socket file...
|
||||
*/
|
||||
|
||||
- unlink(addr->un.sun_path);
|
||||
-
|
||||
- /*
|
||||
- * Save the current umask and set it to 0 so that all users can access
|
||||
- * the domain socket...
|
||||
- */
|
||||
-
|
||||
- mask = umask(0);
|
||||
+ // Remove any existing domain socket file...
|
||||
+ if ((status = unlink(addr->un.sun_path)) < 0)
|
||||
+ {
|
||||
+ DEBUG_printf(("1httpAddrListen: Unable to unlink \"%s\": %s", addr->un.sun_path, strerror(errno)));
|
||||
|
||||
- /*
|
||||
- * Bind the domain socket...
|
||||
- */
|
||||
+ if (errno == ENOENT)
|
||||
+ status = 0;
|
||||
+ }
|
||||
|
||||
- status = bind(fd, (struct sockaddr *)addr, (socklen_t)httpAddrLength(addr));
|
||||
+ if (!status)
|
||||
+ {
|
||||
+ // Save the current umask and set it to 0 so that all users can access
|
||||
+ // the domain socket...
|
||||
+ mask = umask(0);
|
||||
|
||||
- /*
|
||||
- * Restore the umask and fix permissions...
|
||||
- */
|
||||
+ // Bind the domain socket...
|
||||
+ if ((status = bind(fd, (struct sockaddr *)addr, (socklen_t)httpAddrLength(addr))) < 0)
|
||||
+ {
|
||||
+ DEBUG_printf(("1httpAddrListen: Unable to bind domain socket \"%s\": %s", addr->un.sun_path, strerror(errno)));
|
||||
+ }
|
||||
|
||||
- umask(mask);
|
||||
- chmod(addr->un.sun_path, 0140777);
|
||||
+ // Restore the umask...
|
||||
+ umask(mask);
|
||||
+ }
|
||||
}
|
||||
else
|
||||
#endif /* AF_LOCAL */
|
||||
diff --git a/scheduler/conf.c b/scheduler/conf.c
|
||||
index defca78aa..ebf8ca8cc 100644
|
||||
--- a/scheduler/conf.c
|
||||
+++ b/scheduler/conf.c
|
||||
@@ -3083,6 +3083,26 @@ read_cupsd_conf(cups_file_t *fp) /* I - File to read from */
|
||||
cupsd_listener_t *lis; /* New listeners array */
|
||||
|
||||
|
||||
+ /*
|
||||
+ * If we are launched on-demand, do not use domain sockets from the config
|
||||
+ * file. Also check that the domain socket path is not too long...
|
||||
+ */
|
||||
+
|
||||
+#ifdef HAVE_ONDEMAND
|
||||
+ if (*value == '/' && OnDemand)
|
||||
+ {
|
||||
+ if (strcmp(value, CUPS_DEFAULT_DOMAINSOCKET))
|
||||
+ cupsdLogMessage(CUPSD_LOG_INFO, "Ignoring %s address %s at line %d - only using domain socket from launchd/systemd.", line, value, linenum);
|
||||
+ continue;
|
||||
+ }
|
||||
+#endif // HAVE_ONDEMAND
|
||||
+
|
||||
+ if (*value == '/' && strlen(value) > (sizeof(addr->addr.un.sun_path) - 1))
|
||||
+ {
|
||||
+ cupsdLogMessage(CUPSD_LOG_INFO, "Ignoring %s address %s at line %d - too long.", line, value, linenum);
|
||||
+ continue;
|
||||
+ }
|
||||
+
|
||||
/*
|
||||
* Get the address list...
|
||||
*/
|
@ -32,6 +32,10 @@ stdenv.mkDerivation rec {
|
||||
|
||||
outputs = [ "out" "lib" "dev" "man" ];
|
||||
|
||||
patches = [
|
||||
./2.4.8-CVE-2024-35235.patch
|
||||
];
|
||||
|
||||
postPatch = ''
|
||||
substituteInPlace cups/testfile.c \
|
||||
--replace 'cupsFileFind("cat", "/bin' 'cupsFileFind("cat", "${coreutils}/bin'
|
||||
|
@ -1,4 +1,4 @@
|
||||
import ./generic.nix {
|
||||
version = "12.19";
|
||||
hash = "sha256-YX495Swi6CL09X0B1bIkBQPhmKnsyvWYqFEQm9GOb7s=";
|
||||
version = "12.20";
|
||||
hash = "sha256-LVQ68wCf7H/VrzX3pwyVCF0+72tQjlF6qUk+mbFenqk=";
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import ./generic.nix {
|
||||
version = "13.15";
|
||||
hash = "sha256-Qu3UFURtM7jCQr520a0FdTGyJksuhpOTObcHXG5OySU=";
|
||||
version = "13.16";
|
||||
hash = "sha256-ycu7YSnwIyggSCgGa7N4XACoXIyo/TKcKopTwfXNiGU=";
|
||||
muslPatches = {
|
||||
disable-test-collate-icu-utf8 = {
|
||||
url = "https://git.alpinelinux.org/aports/plain/main/postgresql13/disable-test-collate.icu.utf8.patch?id=69faa146ec9fff3b981511068f17f9e629d4688b";
|
||||
|
@ -1,6 +1,6 @@
|
||||
import ./generic.nix {
|
||||
version = "14.12";
|
||||
hash = "sha256-YRjQj53cwb2Dzyt8x007WDvc7C835iRaisADuPqoCSM=";
|
||||
version = "14.13";
|
||||
hash = "sha256-Wao8S0lasmqexp860KAijFHw/m+s82NN+tTRGX1hOlY=";
|
||||
muslPatches = {
|
||||
disable-test-collate-icu-utf8 = {
|
||||
url = "https://git.alpinelinux.org/aports/plain/main/postgresql14/disable-test-collate.icu.utf8.patch?id=56999e6d0265ceff5c5239f85fdd33e146f06cb7";
|
||||
|
@ -1,4 +1,4 @@
|
||||
import ./generic.nix {
|
||||
version = "15.7";
|
||||
hash = "sha256-pG/klIWrY4Xjnau7tlT10wSSBvds1pXiJCaHKVIJmPc=";
|
||||
version = "15.8";
|
||||
hash = "sha256-RANRX5pp7rPv68mPMLjGlhIr/fiV6Ss7I/W452nty2o=";
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import ./generic.nix {
|
||||
version = "16.3";
|
||||
hash = "sha256-Mxlj1dPcTK9CFqBJ+kC2bWvLjHMGFYWUEblRh2TmBYU=";
|
||||
version = "16.4";
|
||||
hash = "sha256-lxdm1kWqc+k7nvTjvkQgG09FtUdwlbBJElQD+fM4bW8=";
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ let
|
||||
, version, hash, muslPatches ? {}
|
||||
|
||||
# for tests
|
||||
, testers, nixosTests
|
||||
, testers
|
||||
|
||||
# JIT
|
||||
, jitSupport
|
||||
@ -119,7 +119,6 @@ let
|
||||
src = ./patches/locale-binary-path.patch;
|
||||
locale = "${if stdenv.isDarwin then darwin.adv_cmds else lib.getBin stdenv.cc.libc}/bin/locale";
|
||||
})
|
||||
|
||||
] ++ lib.optionals stdenv'.hostPlatform.isMusl (
|
||||
# Using fetchurl instead of fetchpatch on purpose: https://github.com/NixOS/nixpkgs/issues/240141
|
||||
map fetchurl (lib.attrValues muslPatches)
|
||||
|
@ -56,15 +56,12 @@ stdenv.mkDerivation rec {
|
||||
|
||||
patches = upstreamPatches ++ [
|
||||
./pgrp-pipe-5.patch
|
||||
(fetchurl {
|
||||
name = "fix-static.patch";
|
||||
url = "https://cgit.freebsd.org/ports/plain/shells/bash/files/patch-configure?id=3e147a1f594751a68fea00a28090d0792bee0b51";
|
||||
sha256 = "XHFMQ6eXTReNoywdETyrfQEv1rKF8+XFbQZP4YoVKFk=";
|
||||
})
|
||||
# Apply parallel build fix pending upstream inclusion:
|
||||
# https://savannah.gnu.org/patch/index.php?10373
|
||||
# Had to fetch manually to workaround -p0 default.
|
||||
./parallel.patch
|
||||
# Fix `pop_var_context: head of shell_variables not a function context`.
|
||||
./fix-pop-var-context-error.patch
|
||||
];
|
||||
|
||||
configureFlags = [
|
||||
|
@ -27,4 +27,10 @@ patch: [
|
||||
(patch "024" "1hq23djqbr7s9y2324jq9mxr5bwdkmgizn3zgpchbsqp054k85cp")
|
||||
(patch "025" "0x9hc4silzl4d3zw4p43i5dm7w86k50j47f87lracwfgwy3z8f2i")
|
||||
(patch "026" "1b1fhm1dsi67r8ip17s0xvx2qq31fsxc1g9n3r931dd0k9a1zvln")
|
||||
(patch "027" "0fdbhvs9dkf4knncifh98a76q4gylhyvfrffq5p9q3ag5q58jap1")
|
||||
(patch "028" "1hdacd6sssjshmry1sscdnxxfb2r51bvdyghlfjaqgc9l85phhk0")
|
||||
(patch "029" "11wrlb20w6v89b96krg0gwxipwhvrda6rq1y9f972m32gsrsqp0j")
|
||||
(patch "030" "13v9fqgim082dmvkslsr0hs793yzhsij2s91mjswsfhj1qip7zy3")
|
||||
(patch "031" "15d7rddj6spwc1fy997lxx6zvzq0zbxgf2h20mhi4wgp5nzbglf2")
|
||||
(patch "032" "05ia6yf32hjprmyyxqawhgckxs3684ikfx8xg08zfgx9xkd7g73v")
|
||||
]
|
||||
|
17
pkgs/shells/bash/fix-pop-var-context-error.patch
Normal file
17
pkgs/shells/bash/fix-pop-var-context-error.patch
Normal file
@ -0,0 +1,17 @@
|
||||
Excerpted from <https://git.savannah.gnu.org/cgit/bash.git/commit/?h=devel&id=055a4552c901e43e6a6007f5bc664a33698dbf85>.
|
||||
|
||||
Original author: Chet Ramey <chet.ramey@case.edu>
|
||||
|
||||
--- variables.c
|
||||
+++ variables.c
|
||||
@@ -5413,7 +5413,9 @@ pop_var_context ()
|
||||
vcxt = shell_variables;
|
||||
if (vc_isfuncenv (vcxt) == 0)
|
||||
{
|
||||
- internal_error (_("pop_var_context: head of shell_variables not a function context"));
|
||||
+ /* If we haven't flushed all of the local contexts already, flag an error */
|
||||
+ if (shell_variables != global_variables || variable_context > 0)
|
||||
+ internal_error (_("pop_var_context: head of shell_variables not a function context"));
|
||||
return;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user