From a48f2abda079afb3deda64340715864bb4119f90 Mon Sep 17 00:00:00 2001 From: Treadgold Date: Sun, 1 Feb 2026 09:14:05 +1300 Subject: [PATCH 1/2] IteratorByteStream: split large iterator chunks into 64 KB pieces --- httpx/_content.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/httpx/_content.py b/httpx/_content.py index 6f479a0885..dc4442caa8 100644 --- a/httpx/_content.py +++ b/httpx/_content.py @@ -59,10 +59,14 @@ def __iter__(self) -> Iterator[bytes]: yield chunk chunk = self._stream.read(self.CHUNK_SIZE) else: - # Otherwise iterate. + # Otherwise iterate, splitting large chunks. for part in self._stream: - yield part - + # Split large chunks into CHUNK_SIZE pieces + offset = 0 + while offset < len(part): + chunk_size = min(self.CHUNK_SIZE, len(part) - offset) + yield part[offset : offset + chunk_size] + offset += chunk_size class AsyncIteratorByteStream(AsyncByteStream): CHUNK_SIZE = 65_536 From fc2d503b4aa2dbda9180046fef63e9e8e7d04516 Mon Sep 17 00:00:00 2001 From: Treadgold Date: Sun, 1 Feb 2026 09:45:47 +1300 Subject: [PATCH 2/2] IteratorByteStream: split large iterator chunks into 64KB pieces --- httpx/_content.py | 1 + tests/test_content.py | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/httpx/_content.py b/httpx/_content.py index dc4442caa8..f9d2c2e872 100644 --- a/httpx/_content.py +++ b/httpx/_content.py @@ -68,6 +68,7 @@ def __iter__(self) -> Iterator[bytes]: yield part[offset : offset + chunk_size] offset += chunk_size + class AsyncIteratorByteStream(AsyncByteStream): CHUNK_SIZE = 65_536 diff --git a/tests/test_content.py b/tests/test_content.py index 9bfe983722..07b686c7cb 100644 --- a/tests/test_content.py +++ b/tests/test_content.py @@ -516,3 +516,28 @@ def test_allow_nan_false(): ValueError, match="Out of range float values are not JSON compliant" ): httpx.Response(200, json=data_with_inf) + + +@pytest.mark.anyio +async def test_iterator_content_splits_large_chunks(): + # Generator yielding a single large chunk (100 KB) + large_chunk = b"a" * 102_400 # 100 KB + + def gen() -> typing.Iterator[bytes]: + yield large_chunk + + # Pass generator to Request (internally uses IteratorByteStream) + request = httpx.Request(method, url, content=gen()) + + # Cast to Iterable[bytes] to make mypy happy + sync_stream: typing.Iterable[bytes] = request.stream # type: ignore + + # Collect chunks + chunks = list(sync_stream) + + # Each chunk must be <= 64 KB + for chunk in chunks: + assert len(chunk) <= 64 * 1024 + + # Total content matches original + assert b"".join(chunks) == large_chunk