diff --git a/.github/.release-please-manifest.json b/.github/.release-please-manifest.json
index b21ec8097..1d17cc812 100644
--- a/.github/.release-please-manifest.json
+++ b/.github/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "4.14.1"
+ ".": "4.14.2"
}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index db72f034b..7ce24ff24 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
# Changelog
+## [4.14.2](https://github.com/testcontainers/testcontainers-python/compare/testcontainers-v4.14.1...testcontainers-v4.14.2) (2026-03-18)
+
+
+### Features
+
+* **kafka:** allow configurable listener name and security protocol ([#966](https://github.com/testcontainers/testcontainers-python/issues/966)) ([44dd40b](https://github.com/testcontainers/testcontainers-python/commit/44dd40b48c3a5020b487bae5d460124d9e594ac3))
+
## [4.14.1](https://github.com/testcontainers/testcontainers-python/compare/testcontainers-v4.14.0...testcontainers-v4.14.1) (2026-01-31)
diff --git a/Makefile b/Makefile
index 19cd33a1d..680b5d038 100644
--- a/Makefile
+++ b/Makefile
@@ -20,7 +20,10 @@ build: ## Build the python package
tests: ${TESTS} ## Run tests for each package
${TESTS}: %/tests:
- uv run coverage run --parallel -m pytest -v $*/tests
+ uv run coverage run --parallel -m pytest -v $*/tests
+
+quick-core-tests: ## Run core tests excluding long_running
+ uv run coverage run --parallel -m pytest -v -m "not long_running" core/tests
coverage: ## Target to combine and report coverage.
uv run coverage combine
@@ -61,7 +64,7 @@ clean-all: clean ## Remove all generated files and reset the local virtual envir
rm -rf .venv
# Targets that do not generate file-level artifacts.
-.PHONY: clean docs doctests image tests ${TESTS}
+.PHONY: clean docs doctests image tests quick-core-tests ${TESTS}
# Implements this pattern for autodocumenting Makefiles:
diff --git a/core/README.rst b/core/README.rst
index 7403d2665..8fc2aa16e 100644
--- a/core/README.rst
+++ b/core/README.rst
@@ -16,6 +16,8 @@ Testcontainers Core
.. autoclass:: testcontainers.core.wait_strategies.WaitStrategy
+.. autoclass:: testcontainers.core.transferable.Transferable
+
.. raw:: html
@@ -49,3 +51,20 @@ Using `DockerContainer` and `DockerImage` to create a container:
The `DockerImage` class is used to build the image from the specified path and tag.
The `DockerContainer` class is then used to create a container from the image.
+
+Copying a file from disk into a container:
+
+.. doctest::
+
+ >>> import tempfile
+ >>> from pathlib import Path
+ >>> from testcontainers.core.container import DockerContainer
+
+ >>> with tempfile.TemporaryDirectory() as tmp:
+ ... my_file = Path(tmp) / "my_file.txt"
+ ... _ = my_file.write_text("file content")
+ ... with DockerContainer("bash", command="sleep infinity") as container:
+ ... container.copy_into_container(my_file, "/tmp/my_file.txt")
+ ... result = container.exec("cat /tmp/my_file.txt")
+ ... result.output
+ b'file content'
diff --git a/core/testcontainers/core/config.py b/core/testcontainers/core/config.py
index cca5d65af..c9cd8c21e 100644
--- a/core/testcontainers/core/config.py
+++ b/core/testcontainers/core/config.py
@@ -88,6 +88,8 @@ def read_tc_properties() -> dict[str, str]:
@dataclass
class TestcontainersConfiguration:
+ __test__ = False
+
def _render_bool(self, env_name: str, prop_name: str) -> bool:
env_val = environ.get(env_name, None)
if env_val is not None:
diff --git a/core/testcontainers/core/container.py b/core/testcontainers/core/container.py
index cf61a85bf..09a980b28 100644
--- a/core/testcontainers/core/container.py
+++ b/core/testcontainers/core/container.py
@@ -1,5 +1,8 @@
import contextlib
+import io
+import pathlib
import sys
+import tarfile
from os import PathLike
from socket import socket
from types import TracebackType
@@ -18,6 +21,7 @@
from testcontainers.core.exceptions import ContainerConnectException, ContainerStartException
from testcontainers.core.labels import LABEL_SESSION_ID, SESSION_ID
from testcontainers.core.network import Network
+from testcontainers.core.transferable import Transferable, TransferSpec, build_transfer_tar
from testcontainers.core.utils import is_arm, setup_logger
from testcontainers.core.wait_strategies import LogMessageWaitStrategy
from testcontainers.core.waiting_utils import WaitStrategy
@@ -69,6 +73,7 @@ def __init__(
network: Optional[Network] = None,
network_aliases: Optional[list[str]] = None,
_wait_strategy: Optional[WaitStrategy] = None,
+ transferables: Optional[list[TransferSpec]] = None,
**kwargs: Any,
) -> None:
self.env = env or {}
@@ -82,6 +87,8 @@ def __init__(
for vol in volumes:
self.with_volume_mapping(*vol)
+ self.tmpfs: dict[str, str] = {}
+
self.image = image
self._docker = DockerClient(**(docker_client_kw or {}))
self._container: Optional[Container] = None
@@ -98,6 +105,11 @@ def __init__(
self._kwargs = kwargs
self._wait_strategy: Optional[WaitStrategy] = _wait_strategy
+ self._transferable_specs: list[TransferSpec] = []
+ if transferables:
+ for t in transferables:
+ self.with_copy_into_container(*t)
+
def with_env(self, key: str, value: str) -> Self:
self.env[key] = value
return self
@@ -198,6 +210,7 @@ def start(self) -> Self:
ports=cast("dict[int, Optional[int]]", self.ports),
name=self._name,
volumes=self.volumes,
+ tmpfs=self.tmpfs,
**{**network_kwargs, **self._kwargs},
)
@@ -205,6 +218,10 @@ def start(self) -> Self:
self._wait_strategy.wait_until_ready(self)
logger.info("Container started: %s", self._container.short_id)
+
+ for t in self._transferable_specs:
+ self._transfer_into_container(*t)
+
return self
def stop(self, force: bool = True, delete_volume: bool = True) -> None:
@@ -270,6 +287,16 @@ def with_volume_mapping(self, host: Union[str, PathLike[str]], container: str, m
self.volumes[str(host)] = mapping
return self
+ def with_tmpfs_mount(self, container_path: str, size: Optional[str] = None) -> Self:
+ """Mount a tmpfs volume on the container.
+
+ :param container_path: Container path to mount tmpfs on (e.g., '/data')
+ :param size: Optional size limit (e.g., '256m', '1g'). If None, unbounded.
+ :return: Self for chaining
+ """
+ self.tmpfs[container_path] = size or ""
+ return self
+
def get_wrapped_container(self) -> "Container":
return self._container
@@ -305,6 +332,35 @@ def _configure(self) -> None:
# placeholder if subclasses want to define this and use the default start method
pass
+ def with_copy_into_container(
+ self, transferable: Transferable, destination_in_container: str, mode: int = 0o644
+ ) -> Self:
+ self._transferable_specs.append((transferable, destination_in_container, mode))
+ return self
+
+ def copy_into_container(self, transferable: Transferable, destination_in_container: str, mode: int = 0o644) -> None:
+ return self._transfer_into_container(transferable, destination_in_container, mode)
+
+ def _transfer_into_container(self, transferable: Transferable, destination_in_container: str, mode: int) -> None:
+ if not self._container:
+ raise ContainerStartException("Container must be started before transferring files")
+
+ data = build_transfer_tar(transferable, destination_in_container, mode)
+ if not self._container.put_archive(path="/", data=data):
+ raise OSError(f"Failed to put archive into container at {destination_in_container}")
+
+ def copy_from_container(self, source_in_container: str, destination_on_host: pathlib.Path) -> None:
+ if not self._container:
+ raise ContainerStartException("Container must be started before copying files")
+
+ tar_stream, _ = self._container.get_archive(source_in_container)
+
+ with tarfile.open(fileobj=io.BytesIO(b"".join(tar_stream))) as tar:
+ for member in tar.getmembers():
+ extracted = tar.extractfile(member)
+ if extracted is not None:
+ destination_on_host.write_bytes(extracted.read())
+
class Reaper:
"""
diff --git a/core/testcontainers/core/generic.py b/core/testcontainers/core/generic.py
index 591a4a8a8..1410321ee 100644
--- a/core/testcontainers/core/generic.py
+++ b/core/testcontainers/core/generic.py
@@ -29,6 +29,8 @@
class DbContainer(DockerContainer):
"""
**DEPRECATED (for removal)**
+ Please use database-specific container classes or `SqlContainer` instead.
+ # from testcontainers.generic.sql import SqlContainer
Generic database container.
"""
diff --git a/core/testcontainers/core/transferable.py b/core/testcontainers/core/transferable.py
new file mode 100644
index 000000000..1f90f8e07
--- /dev/null
+++ b/core/testcontainers/core/transferable.py
@@ -0,0 +1,33 @@
+import io
+import pathlib
+import tarfile
+from typing import Union
+
+Transferable = Union[bytes, pathlib.Path]
+
+TransferSpec = Union[tuple[Transferable, str], tuple[Transferable, str, int]]
+
+
+def build_transfer_tar(transferable: Transferable, destination: str, mode: int = 0o644) -> bytes:
+ """Build a tar archive containing the transferable, ready for put_archive(path="/")."""
+ buf = io.BytesIO()
+ with tarfile.open(fileobj=buf, mode="w") as tar:
+ if isinstance(transferable, bytes):
+ info = tarfile.TarInfo(name=destination)
+ info.size = len(transferable)
+ info.mode = mode
+ tar.addfile(info, io.BytesIO(transferable))
+ elif isinstance(transferable, pathlib.Path):
+ if transferable.is_file():
+ info = tarfile.TarInfo(name=destination)
+ info.size = transferable.stat().st_size
+ info.mode = mode
+ with transferable.open("rb") as f:
+ tar.addfile(info, f)
+ elif transferable.is_dir():
+ tar.add(str(transferable), arcname=f"{destination.rstrip('/')}/{transferable.name}")
+ else:
+ raise TypeError(f"Path {transferable} is neither a file nor directory")
+ else:
+ raise TypeError("source must be bytes or Path")
+ return buf.getvalue()
diff --git a/core/tests/test_compose.py b/core/tests/test_compose.py
index 755b8b17b..ee39ec0c0 100644
--- a/core/tests/test_compose.py
+++ b/core/tests/test_compose.py
@@ -1,3 +1,4 @@
+import logging
import subprocess
from pathlib import Path
from re import split
@@ -150,7 +151,7 @@ def test_compose_logs():
assert not line or container.Service in next(iter(line.split("|")))
-def test_compose_volumes():
+def test_compose_volumes(caplog):
_file_in_volume = "/var/lib/example/data/hello"
volumes = DockerCompose(context=FIXTURES / "basic_volume", keep_volumes=True)
with volumes:
@@ -167,8 +168,11 @@ def test_compose_volumes():
assert "hello" in stdout
# third time we expect the file to be missing
- with volumes, pytest.raises(subprocess.CalledProcessError):
- volumes.exec_in_container(["cat", _file_in_volume], "alpine")
+ with caplog.at_level(
+ logging.CRITICAL, logger="testcontainers.compose.compose"
+ ): # suppress expected error logs about missing volume
+ with volumes, pytest.raises(subprocess.CalledProcessError):
+ volumes.exec_in_container(["cat", _file_in_volume], "alpine")
# noinspection HttpUrlsUsage
diff --git a/core/tests/test_docker_in_docker.py b/core/tests/test_docker_in_docker.py
index 43ec020c0..ada83c5ff 100644
--- a/core/tests/test_docker_in_docker.py
+++ b/core/tests/test_docker_in_docker.py
@@ -179,6 +179,7 @@ def test_find_host_network_in_dood() -> None:
assert DockerClient().find_host_network() == os.environ[EXPECTED_NETWORK_VAR]
+@pytest.mark.long_running
@pytest.mark.skipif(
is_mac(),
reason="Docker socket mounting and container networking do not work reliably on Docker Desktop for macOS",
@@ -219,6 +220,7 @@ def test_dood(python_testcontainer_image: str) -> None:
assert status["StatusCode"] == 0
+@pytest.mark.long_running
@pytest.mark.skipif(
is_mac(),
reason="Docker socket mounting and container networking do not work reliably on Docker Desktop for macOS",
diff --git a/core/tests/test_ryuk.py b/core/tests/test_ryuk.py
index 0321f1a9c..ed3bdd7ce 100644
--- a/core/tests/test_ryuk.py
+++ b/core/tests/test_ryuk.py
@@ -1,4 +1,4 @@
-from time import sleep
+from time import sleep, perf_counter
import pytest
from pytest import MonkeyPatch
@@ -12,6 +12,27 @@
from testcontainers.core.waiting_utils import wait_for_logs
+def _wait_for_container_removed(client: DockerClient, container_id: str, timeout: float = 30) -> None:
+ """Poll until a container is fully removed (raises NotFound)."""
+ start = perf_counter()
+ while perf_counter() - start < timeout:
+ try:
+ client.containers.get(container_id)
+ except NotFound:
+ return
+ sleep(0.5)
+
+ try:
+ c = client.containers.get(container_id)
+ name = c.name
+ status = c.status
+ started_at = c.attrs.get("State", {}).get("StartedAt", "unknown")
+ detail = f"name={name}, status={status}, started_at={started_at}"
+ except NotFound:
+ detail = "container disappeared just after timeout"
+ raise TimeoutError(f"Container {container_id} was not removed within {timeout}s ({detail})")
+
+
@pytest.mark.skipif(
is_mac(),
reason="Ryuk container reaping is unreliable on Docker Desktop for macOS due to VM-based container lifecycle handling",
@@ -39,8 +60,11 @@ def test_wait_for_reaper(monkeypatch: MonkeyPatch):
assert rs
rs.close()
- sleep(0.6) # Sleep until Ryuk reaps all dangling containers. 0.5 extra seconds for good measure.
+ # Ryuk will reap containers then auto-remove itself.
+ # Wait for the reaper container to disappear and once it's gone, all labeled containers are guaranteed reaped.
+ _wait_for_container_removed(docker_client, reaper_id)
+ # Verify both containers were reaped
with pytest.raises(NotFound):
docker_client.containers.get(container_id)
with pytest.raises(NotFound):
diff --git a/core/tests/test_transferable.py b/core/tests/test_transferable.py
new file mode 100644
index 000000000..992f163af
--- /dev/null
+++ b/core/tests/test_transferable.py
@@ -0,0 +1,153 @@
+from pathlib import Path
+
+import pytest
+from testcontainers.core.container import DockerContainer
+from testcontainers.core.transferable import Transferable, TransferSpec, build_transfer_tar
+
+import io
+import tarfile
+from typing import Any
+
+
+def test_build_transfer_tar_from_bytes():
+ data = b"hello world"
+ tar_bytes = build_transfer_tar(data, "/tmp/my_file")
+
+ with tarfile.open(fileobj=io.BytesIO(tar_bytes)) as tar:
+ members = tar.getmembers()
+ assert len(members) == 1
+ assert members[0].name == "/tmp/my_file"
+ assert members[0].size == len(data)
+ assert members[0].mode == 0o644
+ extracted = tar.extractfile(members[0])
+ assert extracted is not None
+ assert extracted.read() == data
+
+
+def test_build_transfer_tar_from_file(tmp_path: Path):
+ my_file = tmp_path / "my_file"
+ my_file.write_bytes(b"file content")
+
+ tar_bytes = build_transfer_tar(my_file, "/dest/my_file", mode=0o755)
+
+ with tarfile.open(fileobj=io.BytesIO(tar_bytes)) as tar:
+ members = tar.getmembers()
+ assert len(members) == 1
+ assert members[0].name == "/dest/my_file"
+ assert members[0].mode == 0o755
+ extracted = tar.extractfile(members[0])
+ assert extracted is not None
+ assert extracted.read() == b"file content"
+
+
+def test_build_transfer_tar_from_directory(tmp_path: Path):
+ source_dir = tmp_path / "my_dir"
+ source_dir.mkdir()
+ (source_dir / "a.txt").write_bytes(b"aaa")
+
+ tar_bytes = build_transfer_tar(source_dir, "/dest")
+
+ with tarfile.open(fileobj=io.BytesIO(tar_bytes)) as tar:
+ names = tar.getnames()
+ assert any("my_dir" in n for n in names)
+ assert any("a.txt" in n for n in names)
+
+
+def test_build_transfer_tar_rejects_invalid_type():
+ with pytest.raises(TypeError, match="source must be bytes or Path"):
+ invalid: Any = 123
+ build_transfer_tar(invalid, "/tmp/bad")
+
+
+def test_build_transfer_tar_rejects_nonexistent_path(tmp_path: Path):
+ bad_path = tmp_path / "does_not_exist"
+ with pytest.raises(TypeError, match="neither a file nor directory"):
+ build_transfer_tar(bad_path, "/tmp/bad")
+
+
+@pytest.fixture(name="transferable", params=(bytes, Path))
+def copy_sources_fixture(request, tmp_path: Path):
+ """
+ Provide source argument for tests of copy_into_container
+ """
+ raw_data = b"hello world"
+ if request.param is bytes:
+ return raw_data
+ elif request.param is Path:
+ my_file = tmp_path / "my_file"
+ my_file.write_bytes(raw_data)
+ return my_file
+ pytest.fail("Invalid type")
+
+
+def test_copy_into_container_at_runtime(transferable: Transferable):
+ destination_in_container = "/tmp/my_file"
+
+ with DockerContainer("bash", command="sleep infinity") as container:
+ container.copy_into_container(transferable, destination_in_container)
+ result = container.exec(f"cat {destination_in_container}")
+
+ assert result.exit_code == 0
+ assert result.output == b"hello world"
+
+
+def test_copy_into_container_at_startup(transferable: Transferable):
+ destination_in_container = "/tmp/my_file"
+
+ container = DockerContainer("bash", command="sleep infinity")
+ container.with_copy_into_container(transferable, destination_in_container)
+
+ with container:
+ result = container.exec(f"cat {destination_in_container}")
+
+ assert result.exit_code == 0
+ assert result.output == b"hello world"
+
+
+def test_copy_into_container_via_initializer(transferable: Transferable):
+ destination_in_container = "/tmp/my_file"
+ transferables: list[TransferSpec] = [(transferable, destination_in_container, 0o644)]
+
+ with DockerContainer("bash", command="sleep infinity", transferables=transferables) as container:
+ result = container.exec(f"cat {destination_in_container}")
+
+ assert result.exit_code == 0
+ assert result.output == b"hello world"
+
+
+def test_copy_file_from_container(tmp_path: Path):
+ file_in_container = "/tmp/foo.txt"
+ destination_on_host = tmp_path / "foo.txt"
+ assert not destination_on_host.is_file()
+
+ with DockerContainer("bash", command="sleep infinity") as container:
+ result = container.exec(f'bash -c "echo -n hello world > {file_in_container}"')
+ assert result.exit_code == 0
+ container.copy_from_container(file_in_container, destination_on_host)
+
+ assert destination_on_host.is_file()
+ assert destination_on_host.read_text() == "hello world"
+
+
+def test_copy_directory_into_container(tmp_path: Path):
+ source_dir = tmp_path / "my_directory"
+ source_dir.mkdir()
+ my_file = source_dir / "my_file"
+ my_file.write_bytes(b"hello world")
+
+ destination_in_container = "/tmp/my_destination_directory"
+
+ with DockerContainer("bash", command="sleep infinity") as container:
+ container.copy_into_container(source_dir, destination_in_container)
+ result = container.exec(f"ls {destination_in_container}")
+
+ assert result.exit_code == 0
+ assert result.output == b"my_directory\n"
+
+ result = container.exec(f"ls {destination_in_container}/my_directory")
+ assert result.exit_code == 0
+ assert result.output == b"my_file\n"
+
+ result = container.exec(f"cat {destination_in_container}/my_directory/my_file")
+ assert result.exit_code == 0
+ assert result.output == b"hello world"
diff --git a/core/tests/test_wait_strategies.py b/core/tests/test_wait_strategies.py
index da62f1fbe..20f0e2c2c 100644
--- a/core/tests/test_wait_strategies.py
+++ b/core/tests/test_wait_strategies.py
@@ -1,3 +1,4 @@
+import logging
import re
import time
from datetime import timedelta
@@ -528,7 +529,7 @@ def test_file_exists_wait_strategy_initialization(self, file_path):
@patch("pathlib.Path.is_file")
@patch("time.time")
@patch("time.sleep")
- def test_wait_until_ready(self, mock_sleep, mock_time, mock_is_file, file_exists, expected_behavior):
+ def test_wait_until_ready(self, mock_sleep, mock_time, mock_is_file, file_exists, expected_behavior, caplog):
strategy = FileExistsWaitStrategy("/tmp/test.txt").with_startup_timeout(1)
mock_container = Mock()
@@ -547,7 +548,8 @@ def test_wait_until_ready(self, mock_sleep, mock_time, mock_is_file, file_exists
mock_is_file.assert_called()
else:
with pytest.raises(TimeoutError, match="File.*did not exist within.*seconds"):
- strategy.wait_until_ready(mock_container)
+ with caplog.at_level(logging.CRITICAL, logger="testcontainers.core.wait_strategies"):
+ strategy.wait_until_ready(mock_container)
class TestCompositeWaitStrategy:
@@ -615,7 +617,7 @@ def test_wait_until_ready_all_strategies_succeed(self):
strategy2.wait_until_ready.assert_called_once_with(mock_container)
strategy3.wait_until_ready.assert_called_once_with(mock_container)
- def test_wait_until_ready_first_strategy_fails(self):
+ def test_wait_until_ready_first_strategy_fails(self, caplog):
"""Test that execution stops when first strategy fails."""
strategy1 = Mock()
strategy2 = Mock()
@@ -628,7 +630,8 @@ def test_wait_until_ready_first_strategy_fails(self):
strategy1.wait_until_ready.side_effect = TimeoutError("First strategy failed")
with pytest.raises(TimeoutError, match="First strategy failed"):
- composite.wait_until_ready(mock_container)
+ with caplog.at_level(logging.CRITICAL, logger="testcontainers.core.wait_strategies"):
+ composite.wait_until_ready(mock_container)
# Only first strategy should be called
strategy1.wait_until_ready.assert_called_once_with(mock_container)
diff --git a/docs/features/wait_strategies.md b/docs/features/wait_strategies.md
index 3bb42eb69..a8b1351ae 100644
--- a/docs/features/wait_strategies.md
+++ b/docs/features/wait_strategies.md
@@ -4,19 +4,17 @@ Testcontainers-Python provides several strategies to wait for containers to be r
## Basic Wait Strategy
-The simplest way to wait for a container is using the `wait_container_is_ready` decorator:
+The simplest way to wait for a container is using a structured wait strategy:
```python
-from testcontainers.core.waiting_utils import wait_container_is_ready
+from testcontainers.core.wait_strategies import HttpWaitStrategy
class MyContainer(DockerContainer):
- @wait_container_is_ready()
def _connect(self):
- # Your connection logic here
- pass
+ HttpWaitStrategy(8080).wait_until_ready(self)
```
-This decorator will retry the method until it succeeds or times out. By default, it will retry for 120 seconds with a 1-second interval between attempts.
+The strategy will retry until it succeeds or times out. By default, it will retry for 120 seconds with a 1-second interval between attempts.
## Log-based Waiting
diff --git a/docs/modules/valkey.md b/docs/modules/valkey.md
new file mode 100644
index 000000000..fbfea9ed7
--- /dev/null
+++ b/docs/modules/valkey.md
@@ -0,0 +1,23 @@
+# Valkey
+
+Since testcontainers-python :material-tag: v4.14.0
+
+## Introduction
+
+The Testcontainers module for Valkey.
+
+## Adding this module to your project dependencies
+
+Please run the following command to add the Valkey module to your python dependencies:
+
+```bash
+pip install testcontainers[valkey]
+```
+
+## Usage example
+
+
+
+[Creating a Valkey container](../../modules/valkey/example_basic.py)
+
+
diff --git a/mkdocs.yml b/mkdocs.yml
index aca8281b7..0a31629a2 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -72,6 +72,7 @@ nav:
- modules/redis.md
- modules/scylla.md
- modules/trino.md
+ - modules/valkey.md
- modules/weaviate.md
- modules/aws.md
- modules/azurite.md
diff --git a/modules/azurite/testcontainers/azurite/py.typed b/modules/azurite/testcontainers/azurite/py.typed
new file mode 100644
index 000000000..8b1378917
--- /dev/null
+++ b/modules/azurite/testcontainers/azurite/py.typed
@@ -0,0 +1 @@
+
diff --git a/modules/generic/README.rst b/modules/generic/README.rst
index 4497ec922..4b7281121 100644
--- a/modules/generic/README.rst
+++ b/modules/generic/README.rst
@@ -9,6 +9,7 @@ FastAPI container that is using :code:`ServerContainer`
>>> from testcontainers.generic import ServerContainer
>>> from testcontainers.core.waiting_utils import wait_for_logs
+ >>> from testcontainers.core.image import DockerImage
>>> with DockerImage(path="./modules/generic/tests/samples/fastapi", tag="fastapi-test:latest") as image:
... with ServerContainer(port=80, image=image) as fastapi_server:
@@ -50,3 +51,39 @@ A more advance use-case, where we are using a FastAPI container that is using Re
... response = client.get(f"/get/{test_data['key']}")
... assert response.status_code == 200, "Failed to get data"
... assert response.json() == {"key": test_data["key"], "value": test_data["value"]}
+
+.. autoclass:: testcontainers.generic.SqlContainer
+.. title:: testcontainers.generic.SqlContainer
+
+Postgres container that is using :code:`SqlContainer`
+
+.. doctest::
+
+ >>> from testcontainers.generic import SqlContainer
+ >>> from testcontainers.generic.providers.sql_connection_wait_strategy import SqlAlchemyConnectWaitStrategy
+ >>> from sqlalchemy import text
+ >>> import sqlalchemy
+
+ >>> class CustomPostgresContainer(SqlContainer):
+ ... def __init__(self, image="postgres:15-alpine",
+ ... port=5432, username="test", password="test", dbname="test"):
+ ... super().__init__(image=image, wait_strategy=SqlAlchemyConnectWaitStrategy())
+ ... self.port_to_expose = port
+ ... self.username = username
+ ... self.password = password
+ ... self.dbname = dbname
+ ... def get_connection_url(self) -> str:
+ ... host = self.get_container_host_ip()
+ ... port = self.get_exposed_port(self.port_to_expose)
+ ... return f"postgresql://{self.username}:{self.password}@{host}:{port}/{self.dbname}"
+ ... def _configure(self) -> None:
+ ... self.with_exposed_ports(self.port_to_expose)
+ ... self.with_env("POSTGRES_USER", self.username)
+ ... self.with_env("POSTGRES_PASSWORD", self.password)
+ ... self.with_env("POSTGRES_DB", self.dbname)
+
+ >>> with CustomPostgresContainer() as postgres:
+ ... engine = sqlalchemy.create_engine(postgres.get_connection_url())
+ ... with engine.connect() as conn:
+ ... result = conn.execute(text("SELECT 1"))
+ ... assert result.scalar() == 1
diff --git a/modules/generic/testcontainers/generic/__init__.py b/modules/generic/testcontainers/generic/__init__.py
index f239a80c6..ce6610a3c 100644
--- a/modules/generic/testcontainers/generic/__init__.py
+++ b/modules/generic/testcontainers/generic/__init__.py
@@ -1 +1,2 @@
from .server import ServerContainer # noqa: F401
+from .sql import SqlContainer # noqa: F401
diff --git a/modules/generic/testcontainers/generic/providers/__init__.py b/modules/generic/testcontainers/generic/providers/__init__.py
new file mode 100644
index 000000000..5b5eb95a2
--- /dev/null
+++ b/modules/generic/testcontainers/generic/providers/__init__.py
@@ -0,0 +1 @@
+from .sql_connection_wait_strategy import SqlAlchemyConnectWaitStrategy # noqa: F401
diff --git a/modules/generic/testcontainers/generic/providers/sql_connection_wait_strategy.py b/modules/generic/testcontainers/generic/providers/sql_connection_wait_strategy.py
new file mode 100644
index 000000000..bad46c743
--- /dev/null
+++ b/modules/generic/testcontainers/generic/providers/sql_connection_wait_strategy.py
@@ -0,0 +1,48 @@
+# This module provides a wait strategy for SQL database connectivity testing using SQLAlchemy.
+# It includes handling for transient exceptions and connection retries.
+
+import logging
+
+from testcontainers.core.waiting_utils import WaitStrategy, WaitStrategyTarget
+
+logger = logging.getLogger(__name__)
+
+ADDITIONAL_TRANSIENT_ERRORS = []
+try:
+ from sqlalchemy.exc import DBAPIError
+
+ ADDITIONAL_TRANSIENT_ERRORS.append(DBAPIError)
+except ImportError:
+ logger.debug("SQLAlchemy not available, skipping DBAPIError handling")
+
+
+class SqlAlchemyConnectWaitStrategy(WaitStrategy):
+ """Wait strategy for database connectivity testing using SQLAlchemy."""
+
+ def __init__(self):
+ super().__init__()
+ self.with_transient_exceptions(TimeoutError, ConnectionError, *ADDITIONAL_TRANSIENT_ERRORS)
+
+ def wait_until_ready(self, container: WaitStrategyTarget) -> None:
+ """Test database connectivity with retry logic until success or timeout."""
+ if not hasattr(container, "get_connection_url"):
+ raise AttributeError(f"Container {container} must have a get_connection_url method")
+
+ try:
+ import sqlalchemy
+ except ImportError as e:
+ raise ImportError("SQLAlchemy is required for database containers") from e
+
+ def _test_connection() -> bool:
+ """Test database connection, returning True if successful."""
+ engine = sqlalchemy.create_engine(container.get_connection_url())
+ try:
+ with engine.connect():
+ logger.info("Database connection successful")
+ return True
+ finally:
+ engine.dispose()
+
+ result = self._poll(_test_connection)
+ if not result:
+ raise TimeoutError(f"Database connection failed after {self._startup_timeout}s timeout")
diff --git a/modules/generic/testcontainers/generic/server.py b/modules/generic/testcontainers/generic/server.py
index 61e9c5eb9..efbd343a9 100644
--- a/modules/generic/testcontainers/generic/server.py
+++ b/modules/generic/testcontainers/generic/server.py
@@ -1,15 +1,11 @@
from typing import Union
-from urllib.error import HTTPError, URLError
-from urllib.request import urlopen
import httpx
from testcontainers.core.container import DockerContainer
from testcontainers.core.exceptions import ContainerStartException
from testcontainers.core.image import DockerImage
-from testcontainers.core.waiting_utils import wait_container_is_ready
-
-# This comment can be removed (Used for testing)
+from testcontainers.core.wait_strategies import HttpWaitStrategy
class ServerContainer(DockerContainer):
@@ -42,19 +38,9 @@ def __init__(self, port: int, image: Union[str, DockerImage]) -> None:
self.internal_port = port
self.with_exposed_ports(self.internal_port)
- @wait_container_is_ready(HTTPError, URLError)
def _connect(self) -> None:
- # noinspection HttpUrlsUsage
- url = self._create_connection_url()
- try:
- with urlopen(url) as r:
- assert b"" in r.read()
- except HTTPError as e:
- # 404 is expected, as the server may not have the specific endpoint we are looking for
- if e.code == 404:
- pass
- else:
- raise
+ strategy = HttpWaitStrategy(self.internal_port).for_status_code(404)
+ strategy.wait_until_ready(self)
def get_api_url(self) -> str:
raise NotImplementedError
diff --git a/modules/generic/testcontainers/generic/sql.py b/modules/generic/testcontainers/generic/sql.py
new file mode 100644
index 000000000..c7ed755ed
--- /dev/null
+++ b/modules/generic/testcontainers/generic/sql.py
@@ -0,0 +1,139 @@
+import logging
+from typing import Any, Optional
+from urllib.parse import quote, urlencode
+
+from testcontainers.core.container import DockerContainer
+from testcontainers.core.exceptions import ContainerStartException
+from testcontainers.core.waiting_utils import WaitStrategy
+
+logger = logging.getLogger(__name__)
+
+
+class SqlContainer(DockerContainer):
+ """
+ Generic SQL database container providing common functionality.
+
+ This class can serve as a base for database-specific container implementations.
+ It provides connection management, URL construction, and basic lifecycle methods.
+ Database connection readiness is automatically handled by the provided wait strategy.
+
+ Note: `SqlAlchemyConnectWaitStrategy` from `sql_connection_wait_strategy` is a provided wait strategy for SQL databases.
+ """
+
+ def __init__(self, image: str, wait_strategy: WaitStrategy, **kwargs):
+ """
+ Initialize SqlContainer with optional wait strategy.
+
+ Args:
+ image: Docker image name
+ wait_strategy: Wait strategy for SQL database connectivity
+ **kwargs: Additional arguments passed to DockerContainer
+ """
+ super().__init__(image, **kwargs)
+ self.wait_strategy = wait_strategy
+
+ def _create_connection_url(
+ self,
+ dialect: str,
+ username: str,
+ password: str,
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ dbname: Optional[str] = None,
+ query_params: Optional[dict[str, str]] = None,
+ **kwargs: Any,
+ ) -> str:
+ """
+ Create a database connection URL.
+
+ Args:
+ dialect: Database dialect (e.g., 'postgresql', 'mysql')
+ username: Database username
+ password: Database password
+ host: Database host (defaults to container host)
+ port: Database port
+ dbname: Database name
+ query_params: Additional query parameters for the URL
+ **kwargs: Additional parameters (checked for deprecated usage)
+
+ Returns:
+ str: Formatted database connection URL
+
+ Raises:
+ ValueError: If unexpected arguments are provided or required parameters are missing
+ ContainerStartException: If container is not started
+ """
+
+ if self._container is None:
+ raise ContainerStartException("Container has not been started")
+
+ host = host or self.get_container_host_ip()
+ exposed_port = self.get_exposed_port(port)
+ quoted_password = quote(password, safe="")
+ quoted_username = quote(username, safe="")
+ url = f"{dialect}://{quoted_username}:{quoted_password}@{host}:{exposed_port}"
+
+ if dbname:
+ quoted_dbname = quote(dbname, safe="")
+ url = f"{url}/{quoted_dbname}"
+
+ if query_params:
+ query_string = urlencode(query_params)
+ url = f"{url}?{query_string}"
+
+ return url
+
+ def start(self) -> "SqlContainer":
+ """
+ Start the database container and perform initialization.
+
+ Returns:
+ SqlContainer: Self for method chaining
+
+ Raises:
+ ContainerStartException: If container fails to start
+ Exception: If configuration, seed transfer, or connection fails
+ """
+ logger.info(f"Starting database container: {self.image}")
+
+ try:
+ self._configure()
+ self.waiting_for(self.wait_strategy)
+ super().start()
+ self._transfer_seed()
+ logger.info("Database container started successfully")
+ except Exception as e:
+ logger.error(f"Failed to start database container: {e}")
+ raise
+
+ return self
+
+ def _configure(self) -> None:
+ """
+ Configure the database container before starting.
+
+ Raises:
+ NotImplementedError: Must be implemented by subclasses
+ """
+ raise NotImplementedError("Subclasses must implement _configure()")
+
+ def _transfer_seed(self) -> None:
+ """
+ Transfer seed data to the database container.
+
+ This method can be overridden by subclasses to provide
+ database-specific seeding functionality.
+ """
+ logger.debug("No seed data to transfer")
+
+ def get_connection_url(self) -> str:
+ """
+ Get the database connection URL.
+
+ Returns:
+ str: Database connection URL
+
+ Raises:
+ NotImplementedError: Must be implemented by subclasses
+ """
+ raise NotImplementedError("Subclasses must implement get_connection_url()")
diff --git a/modules/generic/tests/test_generic.py b/modules/generic/tests/test_server.py
similarity index 100%
rename from modules/generic/tests/test_generic.py
rename to modules/generic/tests/test_server.py
diff --git a/modules/generic/tests/test_sql.py b/modules/generic/tests/test_sql.py
new file mode 100644
index 000000000..69fff2427
--- /dev/null
+++ b/modules/generic/tests/test_sql.py
@@ -0,0 +1,238 @@
+import pytest
+from unittest.mock import patch
+
+from testcontainers.core.exceptions import ContainerStartException
+from testcontainers.generic.sql import SqlContainer
+from testcontainers.generic.providers.sql_connection_wait_strategy import SqlAlchemyConnectWaitStrategy
+
+
+class SimpleSqlContainer(SqlContainer):
+ """Simple concrete implementation for testing."""
+
+ def __init__(self, image: str = "postgres:13"):
+ super().__init__(image, wait_strategy=SqlAlchemyConnectWaitStrategy())
+ self.username = "testuser"
+ self.password = "testpass"
+ self.dbname = "testdb"
+ self.port = 5432
+
+ def get_connection_url(self) -> str:
+ return self._create_connection_url(
+ dialect="postgresql", username=self.username, password=self.password, port=self.port, dbname=self.dbname
+ )
+
+ def _configure(self) -> None:
+ self.with_env("POSTGRES_USER", self.username)
+ self.with_env("POSTGRES_PASSWORD", self.password)
+ self.with_env("POSTGRES_DB", self.dbname)
+ self.with_exposed_ports(self.port)
+
+
+class TestSqlContainer:
+ def test_abstract_methods_raise_not_implemented(self):
+ container = SqlContainer("test:latest", SqlAlchemyConnectWaitStrategy())
+
+ with pytest.raises(NotImplementedError):
+ container.get_connection_url()
+
+ with pytest.raises(NotImplementedError):
+ container._configure()
+
+ def test_transfer_seed_default_behavior(self):
+ container = SqlContainer("test:latest", SqlAlchemyConnectWaitStrategy())
+ # Should not raise an exception
+ container._transfer_seed()
+
+ def test_connection_url_creation_basic(self):
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {})() # Simple mock
+ container.get_container_host_ip = lambda: "localhost"
+ container.get_exposed_port = lambda port: port
+
+ url = container._create_connection_url(dialect="postgresql", username="user", password="pass", port=5432)
+
+ assert url == "postgresql://user:pass@localhost:5432"
+
+ def test_connection_url_with_database_name(self):
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {})()
+ container.get_container_host_ip = lambda: "localhost"
+ container.get_exposed_port = lambda port: port
+
+ url = container._create_connection_url(
+ dialect="postgresql", username="user", password="pass", port=5432, dbname="mydb"
+ )
+
+ assert url == "postgresql://user:pass@localhost:5432/mydb"
+
+ def test_connection_url_with_special_characters(self):
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {})()
+ container.get_container_host_ip = lambda: "localhost"
+ container.get_exposed_port = lambda port: port
+
+ url = container._create_connection_url(
+ dialect="postgresql", username="user@domain", password="p@ss/word", port=5432
+ )
+
+ # Check that special characters are URL encoded
+ assert "user%40domain" in url
+ assert "p%40ss%2Fword" in url
+
+ def test_connection_url_with_query_params(self):
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {})()
+ container.get_container_host_ip = lambda: "localhost"
+ container.get_exposed_port = lambda port: port
+
+ url = container._create_connection_url(
+ dialect="postgresql",
+ username="user",
+ password="pass",
+ port=5432,
+ query_params={"ssl": "require", "timeout": "30"},
+ )
+
+ assert "?" in url
+ assert "ssl=require" in url
+ assert "timeout=30" in url
+
+ def test_connection_url_type_errors(self):
+ """Test that _create_connection_url raises TypeError with invalid types"""
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {"id": "test-id"})()
+
+ # Mock get_exposed_port to simulate what happens with None port
+ with patch.object(container, "get_exposed_port") as mock_get_port:
+ # Simulate the TypeError that would occur when int(None) is called
+ mock_get_port.side_effect = TypeError(
+ "int() argument must be a string, a bytes-like object or a real number, not 'NoneType'"
+ )
+
+ with pytest.raises(TypeError, match="int\\(\\) argument must be a string"):
+ container._create_connection_url("postgresql", "user", "pass", port=None)
+
+ def test_connection_url_container_not_started(self):
+ container = SimpleSqlContainer()
+ container._container = None
+
+ with pytest.raises(ContainerStartException, match="Container has not been started"):
+ container._create_connection_url("postgresql", "user", "pass", port=5432)
+
+ def test_container_configuration(self):
+ container = SimpleSqlContainer("postgres:13")
+
+ # Test that configuration sets up environment
+ container._configure()
+
+ assert container.env["POSTGRES_USER"] == "testuser"
+ assert container.env["POSTGRES_PASSWORD"] == "testpass"
+ assert container.env["POSTGRES_DB"] == "testdb"
+
+ def test_concrete_container_connection_url(self):
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {})()
+ container.get_container_host_ip = lambda: "localhost"
+ container.get_exposed_port = lambda port: 5432
+
+ url = container.get_connection_url()
+
+ assert url.startswith("postgresql://")
+ assert "testuser" in url
+ assert "testpass" in url
+ assert "testdb" in url
+ assert "localhost:5432" in url
+
+ def test_container_inheritance(self):
+ container = SimpleSqlContainer()
+
+ assert isinstance(container, SqlContainer)
+ assert hasattr(container, "get_connection_url")
+ assert hasattr(container, "_configure")
+ assert hasattr(container, "_transfer_seed")
+ assert hasattr(container, "start")
+
+ def test_additional_transient_errors_list(self):
+ from testcontainers.generic.providers.sql_connection_wait_strategy import ADDITIONAL_TRANSIENT_ERRORS
+
+ assert isinstance(ADDITIONAL_TRANSIENT_ERRORS, list)
+ # List may be empty if SQLAlchemy not available, or contain DBAPIError if it is
+
+ def test_empty_password_handling(self):
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {})()
+ container.get_container_host_ip = lambda: "localhost"
+ container.get_exposed_port = lambda port: port
+
+ url = container._create_connection_url(dialect="postgresql", username="user", password="", port=5432)
+
+ assert url == "postgresql://user:@localhost:5432"
+
+ def test_unicode_characters_in_credentials(self):
+ container = SimpleSqlContainer()
+ container._container = type("MockContainer", (), {})()
+ container.get_container_host_ip = lambda: "localhost"
+ container.get_exposed_port = lambda port: port
+
+ url = container._create_connection_url(
+ dialect="postgresql", username="usér", password="päss", port=5432, dbname="tëstdb"
+ )
+
+ assert "us%C3%A9r" in url
+ assert "p%C3%A4ss" in url
+ assert "t%C3%ABstdb" in url
+
+ def test_start_postgres_container_integration(self):
+ """Integration test that actually starts a PostgreSQL container."""
+ container = SimpleSqlContainer()
+
+ # This will start the container and test the connection
+ container.start()
+
+ # Verify the container is running
+ assert container._container is not None
+
+ # Test that we can get a connection URL
+ url = container.get_connection_url()
+ assert url.startswith("postgresql://")
+ assert "testuser" in url
+ assert "testdb" in url
+
+ # Verify environment variables are set
+ assert container.env["POSTGRES_USER"] == "testuser"
+ assert container.env["POSTGRES_PASSWORD"] == "testpass"
+ assert container.env["POSTGRES_DB"] == "testdb"
+
+ # check logs
+ logs = container.get_logs()
+ assert "database system is ready to accept connections" in logs[0].decode("utf-8").lower()
+
+ def test_sql_postgres_container_integration(self):
+ """Integration test for SqlContainer with PostgreSQL."""
+ container = SimpleSqlContainer()
+
+ # This will start the container and test the connection
+ container.start()
+
+ # Verify the container is running
+ assert container._container is not None
+
+ # Test that we can get a connection URL
+ url = container.get_connection_url()
+
+ # check sql operations
+ import sqlalchemy
+
+ engine = sqlalchemy.create_engine(url)
+ with engine.connect() as conn:
+ # Create a test table
+ conn.execute(
+ sqlalchemy.text("CREATE TABLE IF NOT EXISTS test_table (id SERIAL PRIMARY KEY, name VARCHAR(50));")
+ )
+ # Insert a test record
+ conn.execute(sqlalchemy.text("INSERT INTO test_table (name) VALUES ('test_name');"))
+ # Query the test record
+ result = conn.execute(sqlalchemy.text("SELECT name FROM test_table WHERE name='test_name';"))
+ fetched = result.fetchone()
+ assert fetched is not None
+ assert fetched[0] == "test_name"
diff --git a/modules/kafka/testcontainers/kafka/_redpanda.py b/modules/kafka/testcontainers/kafka/_redpanda.py
index b49957d03..a8adc0e03 100644
--- a/modules/kafka/testcontainers/kafka/_redpanda.py
+++ b/modules/kafka/testcontainers/kafka/_redpanda.py
@@ -1,11 +1,12 @@
import os.path
+import re
import tarfile
import time
from io import BytesIO
from textwrap import dedent
from testcontainers.core.container import DockerContainer
-from testcontainers.core.waiting_utils import wait_for_logs
+from testcontainers.core.wait_strategies import LogMessageWaitStrategy
class RedpandaContainer(DockerContainer):
@@ -34,6 +35,7 @@ def __init__(
self.redpanda_port = 9092
self.schema_registry_port = 8081
self.with_exposed_ports(self.redpanda_port, self.schema_registry_port)
+ self.wait_for: re.Pattern[str] = re.compile(r".*Started Kafka API server.*")
def get_bootstrap_server(self) -> str:
host = self.get_container_host_ip()
@@ -70,7 +72,9 @@ def start(self, timeout=10) -> "RedpandaContainer":
self.with_command(command)
super().start()
self.tc_start()
- wait_for_logs(self, r".*Started Kafka API server.*", timeout=timeout)
+ wait_strategy = LogMessageWaitStrategy(self.wait_for)
+ wait_strategy.with_startup_timeout(timeout)
+ wait_strategy.wait_until_ready(self)
return self
def create_file(self, content: bytes, path: str) -> None:
diff --git a/modules/kafka/tests/test_redpanda.py b/modules/kafka/tests/test_redpanda.py
index 7cee9fa8a..93f108a71 100644
--- a/modules/kafka/tests/test_redpanda.py
+++ b/modules/kafka/tests/test_redpanda.py
@@ -13,7 +13,7 @@ def test_redpanda_producer_consumer():
produce_and_consume_message(container)
-@pytest.mark.parametrize("version", ["v23.1.13", "v23.3.10"])
+@pytest.mark.parametrize("version", ["v23.1.13", "v25.3.6"])
def test_redpanda_confluent_version(version):
with RedpandaContainer(image=f"docker.redpanda.com/redpandadata/redpanda:{version}") as container:
produce_and_consume_message(container)
diff --git a/modules/keycloak/testcontainers/keycloak/__init__.py b/modules/keycloak/testcontainers/keycloak/__init__.py
index 21ffc4231..044ba0b2b 100644
--- a/modules/keycloak/testcontainers/keycloak/__init__.py
+++ b/modules/keycloak/testcontainers/keycloak/__init__.py
@@ -78,12 +78,22 @@ def _configure(self) -> None:
def get_url(self) -> str:
host = self.get_container_host_ip()
port = self.get_exposed_port(self.port)
- return f"http://{host}:{port}"
+
+ if "KC_HTTP_RELATIVE_PATH" in self.env:
+ path = self.env.get("KC_HTTP_RELATIVE_PATH", "").strip("/")
+ return f"http://{host}:{port}/{path}/"
+ else:
+ return f"http://{host}:{port}"
def get_management_url(self) -> str:
host = self.get_container_host_ip()
port = self.get_exposed_port(self.management_port)
- return f"http://{host}:{port}"
+
+ if "KC_HTTP_MANAGEMENT_RELATIVE_PATH" in self.env:
+ path = self.env.get("KC_HTTP_MANAGEMENT_RELATIVE_PATH", "").strip("/")
+ return f"http://{host}:{port}/{path}/"
+ else:
+ return f"http://{host}:{port}"
@wait_container_is_ready(requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout)
def _readiness_probe(self) -> None:
diff --git a/modules/keycloak/tests/test_keycloak.py b/modules/keycloak/tests/test_keycloak.py
index 24f533d11..4df0ca9c9 100644
--- a/modules/keycloak/tests/test_keycloak.py
+++ b/modules/keycloak/tests/test_keycloak.py
@@ -2,7 +2,14 @@
from testcontainers.keycloak import KeycloakContainer
-@pytest.mark.parametrize("image_version", ["26.0.0", "25.0", "24.0.1", "18.0"])
+@pytest.mark.parametrize("image_version", ["26.4.0", "26.0.0", "25.0", "24.0.1", "18.0"])
def test_docker_run_keycloak(image_version: str):
with KeycloakContainer(f"quay.io/keycloak/keycloak:{image_version}") as keycloak_admin:
assert keycloak_admin.get_client().users_count() == 1
+
+
+def test_docker_run_keycloak_with_management_relative_path():
+ with KeycloakContainer("quay.io/keycloak/keycloak:26.4.0").with_env(
+ "KC_HTTP_MANAGEMENT_RELATIVE_PATH", "/some/deeply/nested/path"
+ ) as keycloak_admin:
+ assert keycloak_admin.get_client().users_count() == 1
diff --git a/modules/mqtt/testcontainers/mqtt/__init__.py b/modules/mqtt/testcontainers/mqtt/__init__.py
index 54a2d87ac..854ec21f8 100644
--- a/modules/mqtt/testcontainers/mqtt/__init__.py
+++ b/modules/mqtt/testcontainers/mqtt/__init__.py
@@ -121,6 +121,10 @@ def start(self, configfile: Optional[str] = None) -> Self:
# default config file
configfile = Path(__file__).parent / MosquittoContainer.CONFIG_FILE
self.with_volume_mapping(configfile, "/mosquitto/config/mosquitto.conf")
+ # since version 2.1.1 - 2026-02-04, which fixed a PUID/PGID issue, the container needs to write to the data directory,
+ # so we mount it as tmpfs for better performance in tests
+ self.with_tmpfs_mount("/data")
+
# if self.password:
# # TODO: add authentication
# pass
diff --git a/modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf b/modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf
index 13728cec0..b3c67048f 100644
--- a/modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf
+++ b/modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf
@@ -1,7 +1,6 @@
# see https://mosquitto.org/man/mosquitto-conf-5.html
-protocol mqtt
-user root
+listener 1883
log_dest stdout
allow_anonymous true
@@ -14,7 +13,4 @@ log_timestamp_format %Y-%m-%d %H:%M:%S
persistence true
persistence_location /data/
-listener 1883
-protocol mqtt
-
sys_interval 1
diff --git a/modules/mqtt/tests/test_mosquitto.py b/modules/mqtt/tests/test_mosquitto.py
index 63ce7fcd9..1e058103c 100644
--- a/modules/mqtt/tests/test_mosquitto.py
+++ b/modules/mqtt/tests/test_mosquitto.py
@@ -2,7 +2,7 @@
from testcontainers.mqtt import MosquittoContainer
-VERSIONS = ["1.6.15", "2.0.18"]
+VERSIONS = ["1.6.15", "2.0.18", "2.1.2-alpine"]
@pytest.mark.parametrize("version", VERSIONS)
diff --git a/modules/sftp/testcontainers/sftp/__init__.py b/modules/sftp/testcontainers/sftp/__init__.py
index 0e073ea1a..1f39b9b83 100644
--- a/modules/sftp/testcontainers/sftp/__init__.py
+++ b/modules/sftp/testcontainers/sftp/__init__.py
@@ -20,7 +20,7 @@
from cryptography.hazmat.primitives.asymmetric import rsa
from testcontainers.core.container import DockerContainer
-from testcontainers.core.waiting_utils import wait_for_logs
+from testcontainers.core.wait_strategies import LogMessageWaitStrategy
if TYPE_CHECKING:
from typing_extensions import Self
@@ -265,8 +265,9 @@ def _configure(self) -> None:
self.with_exposed_ports(self.port)
def start(self) -> Self:
+ strategy = LogMessageWaitStrategy(f".*Server listening on 0.0.0.0 port {self.port}.*")
+ self.waiting_for(strategy)
super().start()
- wait_for_logs(self, f".*Server listening on 0.0.0.0 port {self.port}.*")
return self
def get_exposed_sftp_port(self) -> int:
diff --git a/modules/valkey/README.rst b/modules/valkey/README.rst
new file mode 100644
index 000000000..abe0c74e1
--- /dev/null
+++ b/modules/valkey/README.rst
@@ -0,0 +1,2 @@
+.. autoclass:: testcontainers.valkey.ValkeyContainer
+.. title:: testcontainers.valkey.ValkeyContainer
diff --git a/modules/valkey/example_basic.py b/modules/valkey/example_basic.py
new file mode 100644
index 000000000..593a729b8
--- /dev/null
+++ b/modules/valkey/example_basic.py
@@ -0,0 +1,78 @@
+from glide import GlideClient, NodeAddress
+
+from testcontainers.valkey import ValkeyContainer
+
+
+def basic_example():
+ with ValkeyContainer() as valkey_container:
+ # Get connection parameters
+ host = valkey_container.get_host()
+ port = valkey_container.get_exposed_port()
+ connection_url = valkey_container.get_connection_url()
+
+ print(f"Valkey connection URL: {connection_url}")
+ print(f"Host: {host}, Port: {port}")
+
+ # Connect using Glide client
+ client = GlideClient([NodeAddress(host, port)])
+
+ # PING command
+ pong = client.ping()
+ print(f"PING response: {pong}")
+
+ # SET command
+ client.set("key", "value")
+ print("SET response: OK")
+
+ # GET command
+ value = client.get("key")
+ print(f"GET response: {value}")
+
+ client.close()
+
+
+def password_example():
+ with ValkeyContainer().with_password("mypassword") as valkey_container:
+ host = valkey_container.get_host()
+ port = valkey_container.get_exposed_port()
+ connection_url = valkey_container.get_connection_url()
+
+ print(f"\nValkey with password connection URL: {connection_url}")
+
+ # Connect using Glide client with password
+ client = GlideClient([NodeAddress(host, port)], password="mypassword")
+
+ # PING after auth
+ pong = client.ping()
+ print(f"PING response: {pong}")
+
+ client.close()
+
+
+def version_example():
+ # Using specific version
+ with ValkeyContainer().with_image_tag("8.0") as valkey_container:
+ print(f"\nUsing image: {valkey_container.image}")
+ connection_url = valkey_container.get_connection_url()
+ print(f"Connection URL: {connection_url}")
+
+
+def bundle_example():
+ # Using bundle with all modules (JSON, Bloom, Search, etc.)
+ with ValkeyContainer().with_bundle() as valkey_container:
+ print(f"\nUsing bundle image: {valkey_container.image}")
+ host = valkey_container.get_host()
+ port = valkey_container.get_exposed_port()
+
+ # Connect using Glide client
+ client = GlideClient([NodeAddress(host, port)])
+ pong = client.ping()
+ print(f"PING response: {pong}")
+ client.close()
+
+
+if __name__ == "__main__":
+ basic_example()
+ password_example()
+ version_example()
+ bundle_example()
diff --git a/modules/valkey/testcontainers/valkey/__init__.py b/modules/valkey/testcontainers/valkey/__init__.py
new file mode 100644
index 000000000..1ee0c243c
--- /dev/null
+++ b/modules/valkey/testcontainers/valkey/__init__.py
@@ -0,0 +1,114 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from typing import Optional
+
+from testcontainers.core.container import DockerContainer
+from testcontainers.core.wait_strategies import ExecWaitStrategy
+
+
+class ValkeyContainer(DockerContainer):
+ """
+ Valkey container.
+
+ """
+
+ def __init__(self, image: str = "valkey/valkey:latest", port: int = 6379, **kwargs) -> None:
+ super().__init__(image, **kwargs)
+ self.port = port
+ self.password: Optional[str] = None
+ self.with_exposed_ports(self.port)
+
+ def with_password(self, password: str) -> "ValkeyContainer":
+ """
+ Configure authentication for Valkey.
+
+ Args:
+ password: Password for Valkey authentication.
+
+ Returns:
+ self: Container instance for method chaining.
+ """
+ self.password = password
+ self.with_command(["valkey-server", "--requirepass", password])
+ return self
+
+ def with_image_tag(self, tag: str) -> "ValkeyContainer":
+ """
+ Specify Valkey version.
+
+ Args:
+ tag: Image tag (e.g., '8.0', 'latest', 'bundle:latest').
+
+ Returns:
+ self: Container instance for method chaining.
+ """
+ base_image = self.image.split(":")[0]
+ self.image = f"{base_image}:{tag}"
+ return self
+
+ def with_bundle(self) -> "ValkeyContainer":
+ """
+ Enable all modules by switching to valkey-bundle image.
+
+ Returns:
+ self: Container instance for method chaining.
+ """
+ self.image = self.image.replace("valkey/valkey", "valkey/valkey-bundle")
+ return self
+
+ def get_connection_url(self) -> str:
+ """
+ Get connection URL for Valkey.
+
+ Returns:
+ url: Connection URL in format valkey://[:password@]host:port
+ """
+ host = self.get_host()
+ port = self.get_exposed_port()
+ if self.password:
+ return f"valkey://:{self.password}@{host}:{port}"
+ return f"valkey://{host}:{port}"
+
+ def get_host(self) -> str:
+ """
+ Get container host.
+
+ Returns:
+ host: Container host IP.
+ """
+ return self.get_container_host_ip()
+
+ def get_exposed_port(self) -> int:
+ """
+ Get mapped port.
+
+ Returns:
+ port: Exposed port number.
+ """
+ return int(super().get_exposed_port(self.port))
+
+ def start(self) -> "ValkeyContainer":
+ """
+ Start the container and wait for it to be ready.
+
+ Returns:
+ self: Started container instance.
+ """
+ if self.password:
+ self.waiting_for(ExecWaitStrategy(["valkey-cli", "-a", self.password, "ping"]))
+ else:
+ self.waiting_for(ExecWaitStrategy(["valkey-cli", "ping"]))
+
+ super().start()
+ return self
diff --git a/modules/valkey/tests/test_valkey.py b/modules/valkey/tests/test_valkey.py
new file mode 100644
index 000000000..6a2c76b1a
--- /dev/null
+++ b/modules/valkey/tests/test_valkey.py
@@ -0,0 +1,79 @@
+import socket
+
+from testcontainers.valkey import ValkeyContainer
+
+
+def test_docker_run_valkey():
+ with ValkeyContainer() as valkey:
+ host = valkey.get_host()
+ port = valkey.get_exposed_port()
+
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.connect((host, port))
+ s.sendall(b"*1\r\n$4\r\nPING\r\n")
+ response = s.recv(1024)
+ assert b"+PONG" in response
+
+
+def test_docker_run_valkey_with_password():
+ with ValkeyContainer().with_password("mypass") as valkey:
+ host = valkey.get_host()
+ port = valkey.get_exposed_port()
+
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.connect((host, port))
+ # Authenticate
+ s.sendall(b"*2\r\n$4\r\nAUTH\r\n$6\r\nmypass\r\n")
+ auth_response = s.recv(1024)
+ assert b"+OK" in auth_response
+
+ # Test SET command
+ s.sendall(b"*3\r\n$3\r\nSET\r\n$5\r\nhello\r\n$5\r\nworld\r\n")
+ set_response = s.recv(1024)
+ assert b"+OK" in set_response
+
+ # Test GET command
+ s.sendall(b"*2\r\n$3\r\nGET\r\n$5\r\nhello\r\n")
+ get_response = s.recv(1024)
+ assert b"world" in get_response
+
+
+def test_get_connection_url():
+ with ValkeyContainer() as valkey:
+ url = valkey.get_connection_url()
+ assert url.startswith("valkey://")
+ assert str(valkey.get_exposed_port()) in url
+
+
+def test_get_connection_url_with_password():
+ with ValkeyContainer().with_password("secret") as valkey:
+ url = valkey.get_connection_url()
+ assert url.startswith("valkey://:secret@")
+ assert str(valkey.get_exposed_port()) in url
+
+
+def test_with_image_tag():
+ container = ValkeyContainer().with_image_tag("8.0")
+ assert "valkey/valkey:8.0" in container.image
+
+
+def test_with_bundle():
+ container = ValkeyContainer().with_bundle()
+ assert container.image == "valkey/valkey-bundle:latest"
+
+
+def test_with_bundle_and_tag():
+ container = ValkeyContainer().with_bundle().with_image_tag("9.0")
+ assert container.image == "valkey/valkey-bundle:9.0"
+
+
+def test_bundle_starts():
+ with ValkeyContainer().with_bundle() as valkey:
+ host = valkey.get_host()
+ port = valkey.get_exposed_port()
+
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.connect((host, port))
+ s.sendall(b"*1\r\n$4\r\nPING\r\n")
+ response = s.recv(1024)
+ assert b"+PONG" in response
diff --git a/pyproject.toml b/pyproject.toml
index f983a2e3e..5314989ce 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "testcontainers"
-version = "4.14.1" # auto-incremented by release-please
+version = "4.14.2" # auto-incremented by release-please
description = "Python library for throwaway instances of anything that can run in a Docker container"
readme = "README.md"
requires-python = ">=3.10"
@@ -62,6 +62,7 @@ elasticsearch = []
generic = [
"httpx",
"redis>=7",
+ "sqlalchemy",
]
test_module_import = ["httpx"]
google = [
@@ -98,8 +99,9 @@ rabbitmq = ["pika>=1"]
redis = ["redis>=7"]
registry = ["bcrypt>=5"]
selenium = ["selenium>=4"]
-scylla = ["cassandra-driver>=3"]
+scylla = ["cassandra-driver>=3; python_version < '3.14'"]
sftp = ["cryptography"]
+valkey = []
vault = []
weaviate = ["weaviate-client>=4"]
chroma = ["chromadb-client>=1"]
@@ -120,7 +122,7 @@ test = [
"psycopg2-binary==2.9.11",
"pg8000==1.31.5",
"psycopg>=3",
- "cassandra-driver>=3",
+ "cassandra-driver>=3; python_version < '3.14'",
"kafka-python-ng>=2",
"hvac>=2; python_version < '4.0'",
"pymilvus>=2",
@@ -217,6 +219,7 @@ packages = [
"modules/selenium/testcontainers",
"modules/scylla/testcontainers",
"modules/trino/testcontainers",
+ "modules/valkey/testcontainers",
"modules/vault/testcontainers",
"modules/weaviate/testcontainers",
]
@@ -266,6 +269,7 @@ dev-mode-dirs = [
"modules/selenium",
"modules/scylla",
"modules/trino",
+ "modules/valkey",
"modules/vault",
"modules/weaviate",
]
@@ -277,6 +281,7 @@ log_cli = true
log_cli_level = "INFO"
markers = [
"inside_docker_check: mark test to be used to validate DinD/DooD is working as expected",
+ "long_running: mark test as very long running",
]
filterwarnings = [
"ignore:The @wait_container_is_ready decorator is deprecated.*:DeprecationWarning",
diff --git a/uv.lock b/uv.lock
index 5b0f9e1cc..22c671f37 100644
--- a/uv.lock
+++ b/uv.lock
@@ -483,7 +483,7 @@ name = "cassandra-driver"
version = "3.29.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "geomet" },
+ { name = "geomet", marker = "python_full_version < '3.14'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/47/4e0fbdf02a7a418997f16f59feba26937d9973b979d3f23d79fbd8f6186f/cassandra_driver-3.29.3.tar.gz", hash = "sha256:ff6b82ee4533f6fd4474d833e693b44b984f58337173ee98ed76bce08721a636", size = 294612, upload-time = "2025-10-22T15:15:01.335Z" }
wheels = [
@@ -1243,7 +1243,7 @@ name = "geomet"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "click" },
+ { name = "click", marker = "python_full_version < '3.14'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2a/8c/dde022aa6747b114f6b14a7392871275dea8867e2bd26cddb80cc6d66620/geomet-1.1.0.tar.gz", hash = "sha256:51e92231a0ef6aaa63ac20c443377ba78a303fd2ecd179dc3567de79f3c11605", size = 28732, upload-time = "2023-11-14T15:43:36.764Z" }
wheels = [
@@ -4896,7 +4896,7 @@ wheels = [
[[package]]
name = "testcontainers"
-version = "4.14.1"
+version = "4.14.2"
source = { editable = "." }
dependencies = [
{ name = "docker" },
@@ -4933,6 +4933,7 @@ db2 = [
generic = [
{ name = "httpx" },
{ name = "redis" },
+ { name = "sqlalchemy" },
]
google = [
{ name = "google-cloud-datastore" },
@@ -5002,7 +5003,7 @@ registry = [
{ name = "bcrypt" },
]
scylla = [
- { name = "cassandra-driver" },
+ { name = "cassandra-driver", marker = "python_full_version < '3.14'" },
]
selenium = [
{ name = "selenium" },
@@ -5023,7 +5024,7 @@ weaviate = [
[package.dev-dependencies]
dev = [
{ name = "anyio" },
- { name = "cassandra-driver" },
+ { name = "cassandra-driver", marker = "python_full_version < '3.14'" },
{ name = "hvac", marker = "python_full_version < '4'" },
{ name = "kafka-python-ng" },
{ name = "mkdocs" },
@@ -5071,7 +5072,7 @@ lint = [
]
test = [
{ name = "anyio" },
- { name = "cassandra-driver" },
+ { name = "cassandra-driver", marker = "python_full_version < '3.14'" },
{ name = "hvac", marker = "python_full_version < '4'" },
{ name = "kafka-python-ng" },
{ name = "paho-mqtt" },
@@ -5097,7 +5098,7 @@ requires-dist = [
{ name = "bcrypt", marker = "extra == 'registry'", specifier = ">=5" },
{ name = "boto3", marker = "extra == 'aws'", specifier = ">=1" },
{ name = "boto3", marker = "extra == 'localstack'", specifier = ">=1" },
- { name = "cassandra-driver", marker = "extra == 'scylla'", specifier = ">=3" },
+ { name = "cassandra-driver", marker = "python_full_version < '3.14' and extra == 'scylla'", specifier = ">=3" },
{ name = "chromadb-client", marker = "extra == 'chroma'", specifier = ">=1" },
{ name = "clickhouse-driver", marker = "extra == 'clickhouse'" },
{ name = "cryptography", marker = "extra == 'mailpit'" },
@@ -5132,6 +5133,7 @@ requires-dist = [
{ name = "redis", marker = "extra == 'redis'", specifier = ">=7" },
{ name = "selenium", marker = "extra == 'selenium'", specifier = ">=4" },
{ name = "sqlalchemy", marker = "extra == 'db2'", specifier = ">=2" },
+ { name = "sqlalchemy", marker = "extra == 'generic'" },
{ name = "sqlalchemy", marker = "extra == 'mssql'", specifier = ">=2" },
{ name = "sqlalchemy", marker = "extra == 'mysql'", specifier = ">=2" },
{ name = "sqlalchemy", marker = "extra == 'oracle'", specifier = ">=2" },
@@ -5147,7 +5149,7 @@ provides-extras = ["arangodb", "aws", "azurite", "cassandra", "clickhouse", "cos
[package.metadata.requires-dev]
dev = [
{ name = "anyio", specifier = ">=4" },
- { name = "cassandra-driver", specifier = ">=3" },
+ { name = "cassandra-driver", marker = "python_full_version < '3.14'", specifier = ">=3" },
{ name = "hvac", marker = "python_full_version < '4'", specifier = ">=2" },
{ name = "kafka-python-ng", specifier = ">=2" },
{ name = "mkdocs", specifier = ">=1.5.3,<2.0.0" },
@@ -5193,7 +5195,7 @@ lint = [
]
test = [
{ name = "anyio", specifier = ">=4" },
- { name = "cassandra-driver", specifier = ">=3" },
+ { name = "cassandra-driver", marker = "python_full_version < '3.14'", specifier = ">=3" },
{ name = "hvac", marker = "python_full_version < '4'", specifier = ">=2" },
{ name = "kafka-python-ng", specifier = ">=2" },
{ name = "paho-mqtt", specifier = ">=2" },