Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 7 additions & 2 deletions httpx/_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,14 @@ def __iter__(self) -> Iterator[bytes]:
yield chunk
chunk = self._stream.read(self.CHUNK_SIZE)
else:
# Otherwise iterate.
# Otherwise iterate, splitting large chunks.
for part in self._stream:
yield part
# Split large chunks into CHUNK_SIZE pieces
offset = 0
while offset < len(part):
chunk_size = min(self.CHUNK_SIZE, len(part) - offset)
yield part[offset : offset + chunk_size]
offset += chunk_size


class AsyncIteratorByteStream(AsyncByteStream):
Expand Down
25 changes: 25 additions & 0 deletions tests/test_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,3 +516,28 @@ def test_allow_nan_false():
ValueError, match="Out of range float values are not JSON compliant"
):
httpx.Response(200, json=data_with_inf)


@pytest.mark.anyio
async def test_iterator_content_splits_large_chunks():
# Generator yielding a single large chunk (100 KB)
large_chunk = b"a" * 102_400 # 100 KB

def gen() -> typing.Iterator[bytes]:
yield large_chunk

# Pass generator to Request (internally uses IteratorByteStream)
request = httpx.Request(method, url, content=gen())

# Cast to Iterable[bytes] to make mypy happy
sync_stream: typing.Iterable[bytes] = request.stream # type: ignore

# Collect chunks
chunks = list(sync_stream)

# Each chunk must be <= 64 KB
for chunk in chunks:
assert len(chunk) <= 64 * 1024

# Total content matches original
assert b"".join(chunks) == large_chunk