diff --git a/fix-CVE-2025-13836.patch b/fix-CVE-2025-13836.patch new file mode 100644 index 0000000000000000000000000000000000000000..99442eb55a2a4633239661883955831e0ad221b3 --- /dev/null +++ b/fix-CVE-2025-13836.patch @@ -0,0 +1,155 @@ +From 4f2bc24b750a82d3b439f174e7717fc09820bfeb Mon Sep 17 00:00:00 2001 +From: Serhiy Storchaka +Date: Mon, 1 Dec 2025 17:26:07 +0200 +Subject: [PATCH] gh-119451: Fix a potential denial of service in http.client + (GH-119454) + +Reading the whole body of the HTTP response could cause OOM if +the Content-Length value is too large even if the server does not send +a large amount of data. Now the HTTP client reads large data by chunks, +therefore the amount of consumed memory is proportional to the amount +of sent data. +(cherry picked from commit 5a4c4a033a4a54481be6870aa1896fad732555b5) + +Co-authored-by: Serhiy Storchaka +--- + Lib/http/client.py | 28 ++++++-- + Lib/test/test_httplib.py | 66 +++++++++++++++++++ + ...-05-23-11-47-48.gh-issue-119451.qkJe9-.rst | 5 ++ + 3 files changed, 95 insertions(+), 4 deletions(-) + create mode 100644 Misc/NEWS.d/next/Security/2024-05-23-11-47-48.gh-issue-119451.qkJe9-.rst + +diff --git a/Lib/http/client.py b/Lib/http/client.py +index 91ee1b470cfd47..c977612732afbc 100644 +--- a/Lib/http/client.py ++++ b/Lib/http/client.py +@@ -111,6 +111,11 @@ + _MAXLINE = 65536 + _MAXHEADERS = 100 + ++# Data larger than this will be read in chunks, to prevent extreme ++# overallocation. ++_MIN_READ_BUF_SIZE = 1 << 20 ++ ++ + # Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2) + # + # VCHAR = %x21-7E +@@ -635,10 +640,25 @@ def _safe_read(self, amt): + reading. If the bytes are truly not available (due to EOF), then the + IncompleteRead exception can be used to detect the problem. + """ +- data = self.fp.read(amt) +- if len(data) < amt: +- raise IncompleteRead(data, amt-len(data)) +- return data ++ cursize = min(amt, _MIN_READ_BUF_SIZE) ++ data = self.fp.read(cursize) ++ if len(data) >= amt: ++ return data ++ if len(data) < cursize: ++ raise IncompleteRead(data, amt - len(data)) ++ ++ data = io.BytesIO(data) ++ data.seek(0, 2) ++ while True: ++ # This is a geometric increase in read size (never more than ++ # doubling out the current length of data per loop iteration). ++ delta = min(cursize, amt - cursize) ++ data.write(self.fp.read(delta)) ++ if data.tell() >= amt: ++ return data.getvalue() ++ cursize += delta ++ if data.tell() < cursize: ++ raise IncompleteRead(data.getvalue(), amt - data.tell()) + + def _safe_readinto(self, b): + """Same as _safe_read, but for reading into a buffer.""" +diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py +index 8b9d49ec094813..55363413b3b140 100644 +--- a/Lib/test/test_httplib.py ++++ b/Lib/test/test_httplib.py +@@ -1390,6 +1390,72 @@ def run_server(): + thread.join() + self.assertEqual(result, b"proxied data\n") + ++ def test_large_content_length(self): ++ serv = socket.create_server((HOST, 0)) ++ self.addCleanup(serv.close) ++ ++ def run_server(): ++ [conn, address] = serv.accept() ++ with conn: ++ while conn.recv(1024): ++ conn.sendall( ++ b"HTTP/1.1 200 Ok\r\n" ++ b"Content-Length: %d\r\n" ++ b"\r\n" % size) ++ conn.sendall(b'A' * (size//3)) ++ conn.sendall(b'B' * (size - size//3)) ++ ++ thread = threading.Thread(target=run_server) ++ thread.start() ++ self.addCleanup(thread.join, 1.0) ++ ++ conn = client.HTTPConnection(*serv.getsockname()) ++ try: ++ for w in range(15, 27): ++ size = 1 << w ++ conn.request("GET", "/") ++ with conn.getresponse() as response: ++ self.assertEqual(len(response.read()), size) ++ finally: ++ conn.close() ++ thread.join(1.0) ++ ++ def test_large_content_length_truncated(self): ++ serv = socket.create_server((HOST, 0)) ++ self.addCleanup(serv.close) ++ ++ def run_server(): ++ while True: ++ [conn, address] = serv.accept() ++ with conn: ++ conn.recv(1024) ++ if not size: ++ break ++ conn.sendall( ++ b"HTTP/1.1 200 Ok\r\n" ++ b"Content-Length: %d\r\n" ++ b"\r\n" ++ b"Text" % size) ++ ++ thread = threading.Thread(target=run_server) ++ thread.start() ++ self.addCleanup(thread.join, 1.0) ++ ++ conn = client.HTTPConnection(*serv.getsockname()) ++ try: ++ for w in range(18, 65): ++ size = 1 << w ++ conn.request("GET", "/") ++ with conn.getresponse() as response: ++ self.assertRaises(client.IncompleteRead, response.read) ++ conn.close() ++ finally: ++ conn.close() ++ size = 0 ++ conn.request("GET", "/") ++ conn.close() ++ thread.join(1.0) ++ + def test_putrequest_override_domain_validation(self): + """ + It should be possible to override the default validation +diff --git a/Misc/NEWS.d/next/Security/2024-05-23-11-47-48.gh-issue-119451.qkJe9-.rst b/Misc/NEWS.d/next/Security/2024-05-23-11-47-48.gh-issue-119451.qkJe9-.rst +new file mode 100644 +index 00000000000000..6d6f25cd2f8bf7 +--- /dev/null ++++ b/Misc/NEWS.d/next/Security/2024-05-23-11-47-48.gh-issue-119451.qkJe9-.rst +@@ -0,0 +1,5 @@ ++Fix a potential memory denial of service in the :mod:`http.client` module. ++When connecting to a malicious server, it could cause ++an arbitrary amount of memory to be allocated. ++This could have led to symptoms including a :exc:`MemoryError`, swapping, out ++of memory (OOM) killed processes or containers, or even system crashes. diff --git a/fix-CVE-2025-13837.patch b/fix-CVE-2025-13837.patch new file mode 100644 index 0000000000000000000000000000000000000000..607fa92d6ba71ef182eb30ca2b698f43f27fbd7e --- /dev/null +++ b/fix-CVE-2025-13837.patch @@ -0,0 +1,164 @@ +From 694922cf40aa3a28f898b5f5ee08b71b4922df70 Mon Sep 17 00:00:00 2001 +From: Serhiy Storchaka +Date: Mon, 1 Dec 2025 17:28:15 +0200 +Subject: [PATCH] gh-119342: Fix a potential denial of service in plistlib + (GH-119343) + +Reading a specially prepared small Plist file could cause OOM because file's +read(n) preallocates a bytes object for reading the specified amount of +data. Now plistlib reads large data by chunks, therefore the upper limit of +consumed memory is proportional to the size of the input file. + + +--- + Lib/plistlib.py | 31 +++++++++------ + Lib/test/test_plistlib.py | 38 +++++++++++++++++-- + ...31.gh-issue-119342.BTFj4Z.rst\342\200\216" | 5 +++ + 3 files changed, 59 insertions(+), 15 deletions(-) + create mode 100644 "Misc/NEWS.d/next/Security/2024-05-21-22-11-31.gh-issue-119342.BTFj4Z.rst\342\200\216" + +diff --git a/Lib/plistlib.py b/Lib/plistlib.py +index 53e718f..8f218f2 100644 +--- a/Lib/plistlib.py ++++ b/Lib/plistlib.py +@@ -73,6 +73,9 @@ from xml.parsers.expat import ParserCreate + PlistFormat = enum.Enum('PlistFormat', 'FMT_XML FMT_BINARY', module=__name__) + globals().update(PlistFormat.__members__) + ++# Data larger than this will be read in chunks, to prevent extreme ++# overallocation. ++_MIN_READ_BUF_SIZE = 1 << 20 + + class UID: + def __init__(self, data): +@@ -498,13 +501,25 @@ class _BinaryPlistParser: + return struct.unpack(f, self._fp.read(s))[0] + + return tokenL ++ ++ def _read(self, size): ++ cursize = min(size, _MIN_READ_BUF_SIZE) ++ data = self._fp.read(cursize) ++ while True: ++ if len(data) != cursize: ++ raise InvalidFileException ++ if cursize == size: ++ return data ++ delta = min(cursize, size - cursize) ++ data += self._fp.read(delta) ++ cursize += delta + + def _read_ints(self, n, size): +- data = self._fp.read(size * n) ++ data = self._read(size * n) + if size in _BINARY_FORMAT: + return struct.unpack(f'>{n}{_BINARY_FORMAT[size]}', data) + else: +- if not size or len(data) != size * n: ++ if not size: + raise InvalidFileException() + return tuple(int.from_bytes(data[i: i + size], 'big') + for i in range(0, size * n, size)) +@@ -561,22 +576,16 @@ class _BinaryPlistParser: + + elif tokenH == 0x40: # data + s = self._get_size(tokenL) +- result = self._fp.read(s) +- if len(result) != s: +- raise InvalidFileException() ++ result = self._read(s) + + elif tokenH == 0x50: # ascii string + s = self._get_size(tokenL) +- data = self._fp.read(s) +- if len(data) != s: +- raise InvalidFileException() ++ data = self._read(s) + result = data.decode('ascii') + + elif tokenH == 0x60: # unicode string + s = self._get_size(tokenL) * 2 +- data = self._fp.read(s) +- if len(data) != s: +- raise InvalidFileException() ++ data = self._read(s) + result = data.decode('utf-16be') + + elif tokenH == 0x80: # UID +diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py +index 6b45744..a156025 100644 +--- a/Lib/test/test_plistlib.py ++++ b/Lib/test/test_plistlib.py +@@ -840,9 +840,8 @@ class TestPlistlib(unittest.TestCase): + + + class TestBinaryPlistlib(unittest.TestCase): +- +- @staticmethod +- def decode(*objects, offset_size=1, ref_size=1): ++ ++ def build(self, *objects, offset_size=1, ref_size=1): + data = [b'bplist00'] + offset = 8 + offsets = [] +@@ -854,7 +853,11 @@ class TestBinaryPlistlib(unittest.TestCase): + len(objects), 0, offset) + data.extend(offsets) + data.append(tail) +- return plistlib.loads(b''.join(data), fmt=plistlib.FMT_BINARY) ++ return b''.join(data) ++ ++ def decode(self, *objects, offset_size=1, ref_size=1): ++ data = self.build(*objects, offset_size=offset_size, ref_size=ref_size) ++ return plistlib.loads(data, fmt=plistlib.FMT_BINARY) + + def test_nonstandard_refs_size(self): + # Issue #21538: Refs and offsets are 24-bit integers +@@ -962,6 +965,33 @@ class TestBinaryPlistlib(unittest.TestCase): + with self.assertRaises(plistlib.InvalidFileException): + plistlib.loads(b'bplist00' + data, fmt=plistlib.FMT_BINARY) + ++ def test_truncated_large_data(self): ++ self.addCleanup(os_helper.unlink, os_helper.TESTFN) ++ def check(data): ++ with open(os_helper.TESTFN, 'wb') as f: ++ f.write(data) ++ # buffered file ++ with open(os_helper.TESTFN, 'rb') as f: ++ with self.assertRaises(plistlib.InvalidFileException): ++ plistlib.load(f, fmt=plistlib.FMT_BINARY) ++ # unbuffered file ++ with open(os_helper.TESTFN, 'rb', buffering=0) as f: ++ with self.assertRaises(plistlib.InvalidFileException): ++ plistlib.load(f, fmt=plistlib.FMT_BINARY) ++ for w in range(20, 64): ++ s = 1 << w ++ # data ++ check(self.build(b'\x4f\x13' + s.to_bytes(8, 'big'))) ++ # ascii string ++ check(self.build(b'\x5f\x13' + s.to_bytes(8, 'big'))) ++ # unicode string ++ check(self.build(b'\x6f\x13' + s.to_bytes(8, 'big'))) ++ # array ++ check(self.build(b'\xaf\x13' + s.to_bytes(8, 'big'))) ++ # dict ++ check(self.build(b'\xdf\x13' + s.to_bytes(8, 'big'))) ++ # number of objects ++ check(b'bplist00' + struct.pack('>6xBBQQQ', 1, 1, s, 0, 8)) + + class TestKeyedArchive(unittest.TestCase): + def test_keyed_archive_data(self): +diff --git "a/Misc/NEWS.d/next/Security/2024-05-21-22-11-31.gh-issue-119342.BTFj4Z.rst\342\200\216" "b/Misc/NEWS.d/next/Security/2024-05-21-22-11-31.gh-issue-119342.BTFj4Z.rst\342\200\216" +new file mode 100644 +index 0000000..04fd8fa +--- /dev/null ++++ "b/Misc/NEWS.d/next/Security/2024-05-21-22-11-31.gh-issue-119342.BTFj4Z.rst\342\200\216" +@@ -0,0 +1,5 @@ ++Fix a potential memory denial of service in the :mod:`plistlib` module. ++When reading a Plist file received from untrusted source, it could cause ++an arbitrary amount of memory to be allocated. ++This could have led to symptoms including a :exc:`MemoryError`, swapping, out ++of memory (OOM) killed processes or containers, or even system crashes. +-- +2.47.3 + diff --git a/python3.spec b/python3.spec index aacb38efc2d7cf3822609213aab4dde212a069e5..5563871567c4f3eef1b5aa568f7754f76246b864 100644 --- a/python3.spec +++ b/python3.spec @@ -1,4 +1,4 @@ -%define anolis_release 11 +%define anolis_release 12 %global pybasever 3.11 # pybasever without the dot: @@ -275,6 +275,10 @@ Patch1020: CVE-2024-6232.patch Patch1021: 1021-bugfix-for-CVE-2025-8291.patch # https://github.com/python/cpython/commit/8b8e68d3dc95f454f58fdd8aac10848facb1491d Patch1022: 1022-add-patch-to-fix-CVE-2025-6075.patch +# https://github.com/python/cpython/commit/694922cf40aa3a28f898b5f5ee08b71b4922df70 +Patch1023: fix-CVE-2025-13837.patch +# https://github.com/python/cpython/commit/5a4c4a033a4a54481be6870aa1896fad732555b5 +Patch1024: fix-CVE-2025-13836.patch # ========================================== # Descriptions, and metadata for subpackages @@ -1547,6 +1551,9 @@ CheckPython optimized # ====================================================== %changelog +* Mon Dec 15 2025 lzq11122 - 3.11.6-12 +- Add patch to fix CVE-2025-13837 and CVE-2025-13836 + * Tue Nov 11 2025 lzq11122 - 3.11.6-11 - Add patch to fix CVE-2025-6075