Merge pull request #335112 from dotlambda/python3Packages.incremental

python312Packages.twisted: 24.3.0 -> 24.7.0
This commit is contained in:
Robert Schütz 2024-08-19 09:52:51 -07:00 committed by GitHub
commit e876aa2677
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 73 additions and 361 deletions

View File

@ -34,6 +34,11 @@ buildPythonPackage rec {
hash = "sha256-1v58I9WOyyrp9n+qdvVeMZ3EObqP/06XCOZYS0nEvPU=";
};
postPatch = ''
substituteInPlace requirements_setup.txt \
--replace-fail "==" ">="
'';
build-system = [
incremental
setuptools

View File

@ -6,6 +6,7 @@
fetchFromGitHub,
incremental,
pythonOlder,
pytest-asyncio,
pytestCheckHook,
setuptools,
}:
@ -24,7 +25,15 @@ buildPythonPackage rec {
hash = "sha256-pN/F4Rdov06sm1yfJQEzmWyujWVeVU+bNGGkgnN4jYw=";
};
build-system = [ setuptools ];
postPatch = ''
substituteInPlace requirements_setup.txt \
--replace-fail "==" ">="
'';
build-system = [
incremental
setuptools
];
dependencies = [
aiohttp
@ -33,14 +42,10 @@ buildPythonPackage rec {
nativeCheckInputs = [
aioresponses
pytest-asyncio
pytestCheckHook
];
disabledTests = [
# AssertionError, https://github.com/timmo001/aiolyric/issues/61
"test_priority"
];
pythonImportsCheck = [ "aiolyric" ];
meta = with lib; {

View File

@ -6,6 +6,7 @@
buildPythonPackage,
django,
fetchFromGitHub,
fetchpatch2,
hypothesis,
pytest-asyncio,
pytestCheckHook,
@ -28,6 +29,15 @@ buildPythonPackage rec {
hash = "sha256-RAK2CaKKVmVIv1MBK+9xyADOrHq664MQOry4KaGTNCw=";
};
patches = [
# https://github.com/django/daphne/pull/526
(fetchpatch2 {
name = "fix-tests-with-Twisted-24.7.0.patch";
url = "https://github.com/django/daphne/commit/0370c7a0937011d5345b14d484ec171d3ae9f875.patch";
hash = "sha256-/3d2pRcEtGvINGHRQF3RZ8IVIETSZb6rhf+ZHUFSQQo=";
})
];
postPatch = ''
substituteInPlace setup.cfg \
--replace-fail "pytest-runner" ""

View File

@ -1,23 +1,36 @@
{
lib,
buildPythonPackage,
fetchPypi,
click,
fetchFromGitHub,
lib,
pythonOlder,
setuptools,
tomli,
twisted,
}:
let
incremental = buildPythonPackage rec {
pname = "incremental";
version = "22.10.0";
format = "setuptools";
version = "24.7.2";
pyproject = true;
src = fetchPypi {
inherit pname version;
hash = "sha256-kS/uteD34BiOb0IkHS9FAALhG7wJN8ZYZQRYVMJMC9A=";
src = fetchFromGitHub {
owner = "twisted";
repo = "incremental";
rev = "refs/tags/incremental-${version}";
hash = "sha256-5MlIKUaBUwLTet23Rjd2Opf5e54LcHuZDowcGon0lOE=";
};
propagatedBuildInputs = [ click ];
# From upstream's pyproject.toml:
# "Keep this aligned with the project dependencies."
build-system = dependencies;
dependencies = [ setuptools ] ++ lib.optionals (pythonOlder "3.11") [ tomli ];
optional-dependencies = {
scripts = [ click ];
};
# escape infinite recursion with twisted
doCheck = false;
@ -36,11 +49,12 @@ let
pythonImportsCheck = [ "incremental" ];
meta = with lib; {
meta = {
changelog = "https://github.com/twisted/incremental/blob/${src.rev}/NEWS.rst";
homepage = "https://github.com/twisted/incremental";
description = "Incremental is a small library that versions your Python projects";
license = licenses.mit;
maintainers = [ ];
description = "Small library that versions your Python projects";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ dotlambda ];
};
};
in

View File

@ -24,7 +24,15 @@ buildPythonPackage rec {
hash = "sha256-ZcTSf7UejEUqQo0qEXP3fWjZYRx0a3ZBNVkwS2dL3Yk=";
};
build-system = [ setuptools ];
postPatch = ''
substituteInPlace requirements_setup.txt \
--replace-fail "==" ">="
'';
build-system = [
incremental
setuptools
];
nativeBuildInputs = [ incremental ];

View File

@ -1,250 +0,0 @@
From 1cc35b0189eea0687da4d72fbfd187305b5022ab Mon Sep 17 00:00:00 2001
From: Adi Roiban <adiroiban@gmail.com>
Date: Mon, 29 Jul 2024 14:27:23 +0100
Subject: [PATCH 1/2] Merge commit from fork
Address GHSA-c8m8-j448-xjx7
---
src/twisted/web/http.py | 21 +++--
src/twisted/web/test/test_http.py | 126 ++++++++++++++++++++++++++----
2 files changed, 126 insertions(+), 21 deletions(-)
diff --git a/src/twisted/web/http.py b/src/twisted/web/http.py
index 1c598380ac..3b784f5e3c 100644
--- a/src/twisted/web/http.py
+++ b/src/twisted/web/http.py
@@ -2000,16 +2000,21 @@ class _ChunkedTransferDecoder:
@returns: C{False}, as there is either insufficient data to continue,
or no data remains.
"""
- if (
- self._receivedTrailerHeadersSize + len(self._buffer)
- > self._maxTrailerHeadersSize
- ):
- raise _MalformedChunkedDataError("Trailer headers data is too long.")
-
eolIndex = self._buffer.find(b"\r\n", self._start)
if eolIndex == -1:
# Still no end of network line marker found.
+ #
+ # Check if we've run up against the trailer size limit: if the next
+ # read contains the terminating CRLF then we'll have this many bytes
+ # of trailers (including the CRLFs).
+ minTrailerSize = (
+ self._receivedTrailerHeadersSize
+ + len(self._buffer)
+ + (1 if self._buffer.endswith(b"\r") else 2)
+ )
+ if minTrailerSize > self._maxTrailerHeadersSize:
+ raise _MalformedChunkedDataError("Trailer headers data is too long.")
# Continue processing more data.
return False
@@ -2019,6 +2024,8 @@ class _ChunkedTransferDecoder:
del self._buffer[0 : eolIndex + 2]
self._start = 0
self._receivedTrailerHeadersSize += eolIndex + 2
+ if self._receivedTrailerHeadersSize > self._maxTrailerHeadersSize:
+ raise _MalformedChunkedDataError("Trailer headers data is too long.")
return True
# eolIndex in this part of code is equal to 0
@@ -2342,8 +2349,8 @@ class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):
self.__header = line
def _finishRequestBody(self, data):
- self.allContentReceived()
self._dataBuffer.append(data)
+ self.allContentReceived()
def _maybeChooseTransferDecoder(self, header, data):
"""
diff --git a/src/twisted/web/test/test_http.py b/src/twisted/web/test/test_http.py
index 33d0a49fca..815854bccb 100644
--- a/src/twisted/web/test/test_http.py
+++ b/src/twisted/web/test/test_http.py
@@ -135,7 +135,7 @@ class DummyHTTPHandler(http.Request):
data = self.content.read()
length = self.getHeader(b"content-length")
if length is None:
- length = networkString(str(length))
+ length = str(length).encode()
request = b"'''\n" + length + b"\n" + data + b"'''\n"
self.setResponseCode(200)
self.setHeader(b"Request", self.uri)
@@ -563,17 +563,23 @@ class HTTP0_9Tests(HTTP1_0Tests):
class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
"""
- Tests that multiple pipelined requests with bodies are correctly buffered.
+ Pipelined requests get buffered and executed in the order received,
+ not processed in parallel.
"""
requests = (
b"POST / HTTP/1.1\r\n"
b"Content-Length: 10\r\n"
b"\r\n"
- b"0123456789POST / HTTP/1.1\r\n"
- b"Content-Length: 10\r\n"
- b"\r\n"
b"0123456789"
+ # Chunk encoded request.
+ b"POST / HTTP/1.1\r\n"
+ b"Transfer-Encoding: chunked\r\n"
+ b"\r\n"
+ b"a\r\n"
+ b"0123456789\r\n"
+ b"0\r\n"
+ b"\r\n"
)
expectedResponses = [
@@ -590,14 +596,16 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
b"Request: /",
b"Command: POST",
b"Version: HTTP/1.1",
- b"Content-Length: 21",
- b"'''\n10\n0123456789'''\n",
+ b"Content-Length: 23",
+ b"'''\nNone\n0123456789'''\n",
),
]
- def test_noPipelining(self):
+ def test_stepwiseTinyTube(self):
"""
- Test that pipelined requests get buffered, not processed in parallel.
+ Imitate a slow connection that delivers one byte at a time.
+ The request handler (L{DelayedHTTPHandler}) is puppeted to
+ step through the handling of each request.
"""
b = StringTransport()
a = http.HTTPChannel()
@@ -606,10 +614,9 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
# one byte at a time, to stress it.
for byte in iterbytes(self.requests):
a.dataReceived(byte)
- value = b.value()
# So far only one request should have been dispatched.
- self.assertEqual(value, b"")
+ self.assertEqual(b.value(), b"")
self.assertEqual(1, len(a.requests))
# Now, process each request one at a time.
@@ -618,8 +625,95 @@ class PipeliningBodyTests(unittest.TestCase, ResponseTestMixin):
request = a.requests[0].original
request.delayedProcess()
- value = b.value()
- self.assertResponseEquals(value, self.expectedResponses)
+ self.assertResponseEquals(b.value(), self.expectedResponses)
+
+ def test_stepwiseDumpTruck(self):
+ """
+ Imitate a fast connection where several pipelined
+ requests arrive in a single read. The request handler
+ (L{DelayedHTTPHandler}) is puppeted to step through the
+ handling of each request.
+ """
+ b = StringTransport()
+ a = http.HTTPChannel()
+ a.requestFactory = DelayedHTTPHandlerProxy
+ a.makeConnection(b)
+
+ a.dataReceived(self.requests)
+
+ # So far only one request should have been dispatched.
+ self.assertEqual(b.value(), b"")
+ self.assertEqual(1, len(a.requests))
+
+ # Now, process each request one at a time.
+ while a.requests:
+ self.assertEqual(1, len(a.requests))
+ request = a.requests[0].original
+ request.delayedProcess()
+
+ self.assertResponseEquals(b.value(), self.expectedResponses)
+
+ def test_immediateTinyTube(self):
+ """
+ Imitate a slow connection that delivers one byte at a time.
+
+ (L{DummyHTTPHandler}) immediately responds, but no more
+ than one
+ """
+ b = StringTransport()
+ a = http.HTTPChannel()
+ a.requestFactory = DummyHTTPHandlerProxy # "sync"
+ a.makeConnection(b)
+
+ # one byte at a time, to stress it.
+ for byte in iterbytes(self.requests):
+ a.dataReceived(byte)
+ # There is never more than one request dispatched at a time:
+ self.assertLessEqual(len(a.requests), 1)
+
+ self.assertResponseEquals(b.value(), self.expectedResponses)
+
+ def test_immediateDumpTruck(self):
+ """
+ Imitate a fast connection where several pipelined
+ requests arrive in a single read. The request handler
+ (L{DummyHTTPHandler}) immediately responds.
+
+ This doesn't check the at-most-one pending request
+ invariant but exercises otherwise uncovered code paths.
+ See GHSA-c8m8-j448-xjx7.
+ """
+ b = StringTransport()
+ a = http.HTTPChannel()
+ a.requestFactory = DummyHTTPHandlerProxy
+ a.makeConnection(b)
+
+ # All bytes at once to ensure there's stuff to buffer.
+ a.dataReceived(self.requests)
+
+ self.assertResponseEquals(b.value(), self.expectedResponses)
+
+ def test_immediateABiggerTruck(self):
+ """
+ Imitate a fast connection where a so many pipelined
+ requests arrive in a single read that backpressure is indicated.
+ The request handler (L{DummyHTTPHandler}) immediately responds.
+
+ This doesn't check the at-most-one pending request
+ invariant but exercises otherwise uncovered code paths.
+ See GHSA-c8m8-j448-xjx7.
+
+ @see: L{http.HTTPChannel._optimisticEagerReadSize}
+ """
+ b = StringTransport()
+ a = http.HTTPChannel()
+ a.requestFactory = DummyHTTPHandlerProxy
+ a.makeConnection(b)
+
+ overLimitCount = a._optimisticEagerReadSize // len(self.requests) * 10
+ a.dataReceived(self.requests * overLimitCount)
+
+ self.assertResponseEquals(b.value(), self.expectedResponses * overLimitCount)
def test_pipeliningReadLimit(self):
"""
@@ -1522,7 +1616,11 @@ class ChunkedTransferEncodingTests(unittest.TestCase):
lambda b: None, # pragma: nocov
)
p._maxTrailerHeadersSize = 10
- p.dataReceived(b"3\r\nabc\r\n0\r\n0123456789")
+ # 9 bytes are received so far, in 2 packets.
+ # For now, all is ok.
+ p.dataReceived(b"3\r\nabc\r\n0\r\n01234567")
+ p.dataReceived(b"\r")
+ # Once the 10th byte is received, the processing fails.
self.assertRaises(
http._MalformedChunkedDataError,
p.dataReceived,
--
2.45.2

View File

@ -1,84 +0,0 @@
From 6df3fd0be944b763046829edf5fd46b6b4a42303 Mon Sep 17 00:00:00 2001
From: Adi Roiban <adiroiban@gmail.com>
Date: Mon, 29 Jul 2024 14:28:03 +0100
Subject: [PATCH 2/2] Merge commit from fork
Added HTML output encoding the "URL" parameter of the "redirectTo" function
---
src/twisted/web/_template_util.py | 2 +-
src/twisted/web/test/test_util.py | 39 ++++++++++++++++++++++++++++++-
2 files changed, 39 insertions(+), 2 deletions(-)
diff --git a/src/twisted/web/_template_util.py b/src/twisted/web/_template_util.py
index 230c33f3e8..7266079ac2 100644
--- a/src/twisted/web/_template_util.py
+++ b/src/twisted/web/_template_util.py
@@ -92,7 +92,7 @@ def redirectTo(URL: bytes, request: IRequest) -> bytes:
</body>
</html>
""" % {
- b"url": URL
+ b"url": escape(URL.decode("utf-8")).encode("utf-8")
}
return content
diff --git a/src/twisted/web/test/test_util.py b/src/twisted/web/test/test_util.py
index 1e763009ca..9847dcbb8b 100644
--- a/src/twisted/web/test/test_util.py
+++ b/src/twisted/web/test/test_util.py
@@ -5,7 +5,6 @@
Tests for L{twisted.web.util}.
"""
-
import gc
from twisted.internet import defer
@@ -64,6 +63,44 @@ class RedirectToTests(TestCase):
targetURL = "http://target.example.com/4321"
self.assertRaises(TypeError, redirectTo, targetURL, request)
+ def test_legitimateRedirect(self):
+ """
+ Legitimate URLs are fully interpolated in the `redirectTo` response body without transformation
+ """
+ request = DummyRequest([b""])
+ html = redirectTo(b"https://twisted.org/", request)
+ expected = b"""
+<html>
+ <head>
+ <meta http-equiv=\"refresh\" content=\"0;URL=https://twisted.org/\">
+ </head>
+ <body bgcolor=\"#FFFFFF\" text=\"#000000\">
+ <a href=\"https://twisted.org/\">click here</a>
+ </body>
+</html>
+"""
+ self.assertEqual(html, expected)
+
+ def test_maliciousRedirect(self):
+ """
+ Malicious URLs are HTML-escaped before interpolating them in the `redirectTo` response body
+ """
+ request = DummyRequest([b""])
+ html = redirectTo(
+ b'https://twisted.org/"><script>alert(document.location)</script>', request
+ )
+ expected = b"""
+<html>
+ <head>
+ <meta http-equiv=\"refresh\" content=\"0;URL=https://twisted.org/&quot;&gt;&lt;script&gt;alert(document.location)&lt;/script&gt;\">
+ </head>
+ <body bgcolor=\"#FFFFFF\" text=\"#000000\">
+ <a href=\"https://twisted.org/&quot;&gt;&lt;script&gt;alert(document.location)&lt;/script&gt;\">click here</a>
+ </body>
+</html>
+"""
+ self.assertEqual(html, expected)
+
class ParentRedirectTests(SynchronousTestCase):
"""
--
2.45.2

View File

@ -55,7 +55,7 @@
buildPythonPackage rec {
pname = "twisted";
version = "24.3.0";
version = "24.7.0";
format = "pyproject";
disabled = pythonOlder "3.6";
@ -63,7 +63,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
extension = "tar.gz";
hash = "sha256-azi27Ocpa14SLJ6xfaLuqz2YoZj1DKnv0A+wPltP1K4=";
hash = "sha256-WmAUfwRBh6En7H2pbRcNSbzOUMb9NvWU5g9Fh+/005Q=";
};
patches = [
@ -73,12 +73,6 @@ buildPythonPackage rec {
url = "https://github.com/mweinelt/twisted/commit/e69e652de671aac0abf5c7e6c662fc5172758c5a.patch";
hash = "sha256-LmvKUTViZoY/TPBmSlx4S9FbJNZfB5cxzn/YcciDmoI=";
})
# https://github.com/twisted/twisted/security/advisories/GHSA-cf56-g6w6-pqq2
./CVE-2024-41671.patch
# https://github.com/twisted/twisted/security/advisories/GHSA-c8m8-j448-xjx7
./CVE-2024-41810.patch
];
__darwinAllowLocalNetworking = true;

View File

@ -4,6 +4,7 @@
automat,
buildPythonPackage,
cryptography,
fetchpatch2,
fetchPypi,
geoip,
incremental,
@ -27,6 +28,15 @@ buildPythonPackage rec {
hash = "sha256-cfha6T121yZRAFnJ7XTmCLxaXJ99EDhTtJ5BQoBAai8=";
};
patches = [
# https://github.com/meejah/txtorcon/pull/400
(fetchpatch2 {
name = "twisted-24.7.0-fixes.patch";
url = "https://github.com/meejah/txtorcon/commit/88b5dc2971514babd36d837c93550715dea44b09.patch";
hash = "sha256-O7kFZw+y1PHJRcMdxCczy8UZd3ruLhjLMxh2tcawWI4=";
})
];
propagatedBuildInputs = [
cryptography
incremental