From 74350bf33ab8c46b87c4f38df18701b9361e63fb Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:00:57 +0100
Subject: [PATCH 01/15] Switches config from files to environment variables
Removes all support for .conf config files and migrates server, documentation,
and tests to use environment variables for all runtime configuration. Updates
the server to build its config from the environment, simplifies Docker and
local setup, and cleans up obsolete config references in docs and code.
Modernizes the API and test suite for better portability and deployment.
---
README.md | 67 +++++++-------
asgi.py | 12 +--
default.conf | 17 ----
docker-compose.yml | 16 +++-
docs/configuration.rst | 199 +++++++++++++++++-----------------------
docs/faqs.rst | 4 +-
docs/gettingstarted.rst | 146 ++++++-----------------------
docs/installation.rst | 40 ++++----
fq_server/__init__.py | 6 +-
fq_server/server.py | 151 ++++++++++++++++++++++++++----
pyproject.toml | 3 +-
tests/test.conf | 23 -----
tests/test_routes.py | 160 ++++++++++++++++++++------------
uv.lock | 10 +-
14 files changed, 418 insertions(+), 436 deletions(-)
delete mode 100644 default.conf
delete mode 100644 tests/test.conf
diff --git a/README.md b/README.md
index 945756e..d5cb229 100644
--- a/README.md
+++ b/README.md
@@ -4,24 +4,22 @@
Flowdacity Queue Server
=======================
-An async HTTP API for the [Flowdacity Queue (FQ)](https://github.com/flowdacity/flowdacity-queue) core, built with Starlette and Uvicorn. It keeps the original SHARQ behavior (leaky-bucket rate limiting and dynamic queues) while modernizing the stack.
+An async HTTP API for [Flowdacity Queue (FQ)](https://github.com/flowdacity/flowdacity-queue), built with Starlette and Uvicorn.
## Prerequisites
- Python 3.12+
-- Redis 7+ reachable from the server
-- A Flowdacity Queue config file (see `default.conf` for a starter)
+- Redis 7+
## Installation
-Clone the repo and install the package plus dev tools (uses [`uv`](https://github.com/astral-sh/uv) by default):
+This project currently pins `flowdacity-queue` to the upstream `v1.0.0` Git tag because that version is tagged upstream but not published on PyPI.
```bash
uv sync --group dev
-# or: uv pip install --system .
```
-If you prefer pip/venv without `uv`:
+If you prefer a virtualenv without `uv`:
```bash
python -m venv .venv
@@ -32,29 +30,45 @@ pip install pytest pytest-cov
## Configuration
-- Point the server at your FQ config via `FQ_CONFIG` (defaults to `./default.conf`).
-- `default.conf` defines three sections:
- - `[fq]` queue behavior (intervals, requeue limits).
- - `[fq-server]` host/port for the HTTP server (used by Docker/local defaults).
- - `[redis]` connection details for your Redis instance.
-- Copy and tweak as needed:
+The server reads all queue and Redis settings from environment variables. No config file is required.
+
+| Variable | Default | Description |
+| --- | --- | --- |
+| `FQ_JOB_EXPIRE_INTERVAL` | `1000` | Milliseconds before a dequeued job is considered expired. |
+| `FQ_JOB_REQUEUE_INTERVAL` | `1000` | Milliseconds between expired-job requeue passes. |
+| `FQ_DEFAULT_JOB_REQUEUE_LIMIT` | `-1` | Default retry limit. `-1` retries forever. |
+| `FQ_ENABLE_REQUEUE_SCRIPT` | `true` | Enables the background requeue loop. |
+| `FQ_REDIS_DB` | `0` | Redis database number. |
+| `FQ_REDIS_KEY_PREFIX` | `fq_server` | Prefix used for Redis keys. |
+| `FQ_REDIS_CONN_TYPE` | `tcp_sock` | Redis connection type: `tcp_sock` or `unix_sock`. |
+| `FQ_REDIS_HOST` | `127.0.0.1` | Redis host for TCP connections. |
+| `FQ_REDIS_PORT` | `6379` | Redis port for TCP connections. |
+| `FQ_REDIS_PASSWORD` | empty | Redis password. |
+| `FQ_REDIS_CLUSTERED` | `false` | Enables Redis Cluster mode. |
+| `FQ_REDIS_UNIX_SOCKET_PATH` | `/tmp/redis.sock` | Redis socket path when `FQ_REDIS_CONN_TYPE=unix_sock`. |
+| `PORT` | `8300` | Uvicorn port used by the container and local examples. |
+
+Boolean env vars accept `1`, `0`, `true`, `false`, `yes`, `no`, `on`, or `off`.
+
+## Run locally
+
+Start Redis:
```bash
-cp default.conf local.conf
-# edit local.conf to match your Redis host/port/password
+make redis-up
```
-## Run the server locally
+Run the API:
```bash
-# ensure Redis is running (make redis starts a container)
-make redis
-
-# start the ASGI server
-FQ_CONFIG=./local.conf uv run uvicorn asgi:app --host 0.0.0.0 --port 8080
+PORT=8080 \
+FQ_REDIS_HOST=127.0.0.1 \
+uv run uvicorn asgi:app --host 0.0.0.0 --port 8080
```
-Docker Compose is also available:
+## Docker
+
+`docker-compose.yml` now passes the queue settings through env vars, so there is no mounted config file:
```bash
docker compose up --build
@@ -63,34 +77,23 @@ docker compose up --build
## API quick start
```bash
-# health
curl http://127.0.0.1:8080/
-# enqueue a job
curl -X POST http://127.0.0.1:8080/enqueue/sms/user42/ \
-H "Content-Type: application/json" \
-d '{"job_id":"job-1","payload":{"message":"hi"},"interval":1000}'
-# dequeue
curl http://127.0.0.1:8080/dequeue/sms/
-# mark finished
curl -X POST http://127.0.0.1:8080/finish/sms/user42/job-1/
-# metrics
curl http://127.0.0.1:8080/metrics/
curl http://127.0.0.1:8080/metrics/sms/user42/
```
-All endpoints return JSON; failures surface as HTTP 4xx/5xx with a `status` field in the body.
-
## Testing
-Redis must be available. With dev deps installed:
-
```bash
-uv run pytest
-# or
make test
```
diff --git a/asgi.py b/asgi.py
index 8c613ff..09fb5a3 100644
--- a/asgi.py
+++ b/asgi.py
@@ -1,19 +1,9 @@
# Copyright (c) 2025 Flowdacity Team. See LICENSE.txt for details.
# ASGI application entrypoint for Flowdacity Queue (FQ) Server
-import os
from fq_server import setup_server
-# read config path from env variable, use default if not set
-fq_config_path = os.environ.get("FQ_CONFIG")
-if fq_config_path is None:
- print(
- "Warning: FQ_CONFIG environment variable not set. Using default config path './default.conf'."
- )
- fq_config_path = "./default.conf"
-fq_config_path = os.path.abspath(fq_config_path)
-
-server = setup_server(fq_config_path)
+server = setup_server()
# ASGI app exposed for Uvicorn/Hypercorn
app = server.app
diff --git a/default.conf b/default.conf
deleted file mode 100644
index c48d148..0000000
--- a/default.conf
+++ /dev/null
@@ -1,17 +0,0 @@
-[fq]
-job_expire_interval : 1000
-job_requeue_interval : 1000
-default_job_requeue_limit : -1
-enable_requeue_script : true
-
-[redis]
-db : 0
-key_prefix : fq_server
-conn_type : tcp_sock
-;; unix connection settings
-unix_socket_path : /tmp/redis.sock
-;; tcp connection settings
-port : 6379
-host : redis
-password :
-clustered : false
diff --git a/docker-compose.yml b/docker-compose.yml
index c5f0320..a40f222 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -2,12 +2,20 @@ services:
app:
build: .
environment:
- - FQ_CONFIG=/app/default.conf
- - PORT=8080
+ PORT: 8080
+ FQ_JOB_EXPIRE_INTERVAL: 1000
+ FQ_JOB_REQUEUE_INTERVAL: 1000
+ FQ_DEFAULT_JOB_REQUEUE_LIMIT: -1
+ FQ_ENABLE_REQUEUE_SCRIPT: "true"
+ FQ_REDIS_DB: 0
+ FQ_REDIS_KEY_PREFIX: fq_server
+ FQ_REDIS_CONN_TYPE: tcp_sock
+ FQ_REDIS_HOST: redis
+ FQ_REDIS_PORT: 6379
+ FQ_REDIS_PASSWORD: ""
+ FQ_REDIS_CLUSTERED: "false"
ports:
- "8080:8080"
- volumes:
- - ./default.conf:/app/default.conf:ro
depends_on:
redis:
condition: service_healthy
diff --git a/docs/configuration.rst b/docs/configuration.rst
index 53831b1..6bdea11 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -2,120 +2,85 @@
Configuration
=============
-The SHARQ configuration file is minimal and has three sections.
-
-* `Sharq Section <#id1>`_
-* `Sharq Server Section <#id2>`_
-* `Redis Section <#id3>`_
-
-
-fq section
-~~~~~~~~~~~~~
-
-This section contains the configurations specific to the SHARQ core.
-
-job\_expire\_interval
-^^^^^^^^^^^^^^^^^^^^^
-
-``job_expire_interval`` is the number of milliseconds after which any job
-not marked as finished will expire. All expired jobs are scheduled for re-queueing.
-
-job\_requeue\_interval
-^^^^^^^^^^^^^^^^^^^^^^
-
-``job_requeue_interval`` is the number of milliseconds to wait between
-two clean up processes. A clean up re-queues all the expired jobs back into their
-respective queues.
-
-fq-server section
-~~~~~~~~~~~~~~~~~~~~
-
-This section contains the configurations specific to the SHARQ Server.
-
-host
-^^^^
-
-``host`` is IP address to which the SHARQ Server should bind to.
-
-port
-^^^^
-
-``port`` is where the SHARQ Server should listen for requests.
-
-workers
-^^^^^^^
-
-SHARQ Server internally uses `Gunicorn `_ as the server. The ``workers`` parameter specifies the number of Gunicorn workers to spawn when the server starts. More details on this can be found in the `Gunicorn docs `_.
-
-accesslog
-^^^^^^^^^
-
-Location for the SHARQ Server to write its access logs.
-
-redis section
-~~~~~~~~~~~~~
-
-This section contains the configurations specific to Redis.
-
-db
-^^
-
-The Redis database number to which the SHARQ Server should connect.
-
-key\_prefix
-^^^^^^^^^^^
-
-Every key used by the SHARQ Server in Redis will start with this prefix.
-
-conn\_type
-^^^^^^^^^^
-
-Specifies how the SHARQ Server should connect to Redis. If Redis is in
-the same machine as the SHARQ Server, then connecting via unix socket (*unix_sock*)
-is recommended.
-
-If Redis is on a remote machine, set ``conn\_type`` to *tcp_sock*.
-
-unix\_socket\_path
-^^^^^^^^^^^^^^^^^^
-
-Absolute path of the unix socket created by Redis. This has to be set in
-case the ``conn\_type`` is set to *unix_sock*.
-
-port
-^^^^
-
-Port where Redis listens for connections.
-
-host
-^^^^
-
-IP address or FQDN of Redis.
-
-
-A Sample Configuration File
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-A sample configuration file looks like this. You can also get this configuration file from the `Github repository `_.
-
-.. code-block:: ini
-
- [fq]
- job_expire_interval : 1000 ; in milliseconds
- job_requeue_interval : 1000 ; in milliseconds
-
- [fq-server]
- host : 127.0.0.1
- port : 8080
- workers : 1 ; optional
- accesslog : /tmp/fq.log ; optional
-
- [redis]
- db : 0
- key_prefix : fq_server
- conn_type : tcp_sock ; or unix_sock
- ;; unix connection settings
- unix_socket_path : /tmp/redis.sock
- ;; tcp connection settings
- port : 6379
- host : 127.0.0.1
+Flowdacity Queue Server reads its runtime configuration from environment variables.
+
+Queue settings
+--------------
+
+``FQ_JOB_EXPIRE_INTERVAL``
+ Milliseconds after which a dequeued job is considered expired.
+
+``FQ_JOB_REQUEUE_INTERVAL``
+ Milliseconds between requeue passes for expired jobs.
+
+``FQ_DEFAULT_JOB_REQUEUE_LIMIT``
+ Default retry limit for jobs. ``-1`` means retry forever.
+
+``FQ_ENABLE_REQUEUE_SCRIPT``
+ Enables or disables the background requeue loop.
+
+Redis settings
+--------------
+
+``FQ_REDIS_DB``
+ Redis database number.
+
+``FQ_REDIS_KEY_PREFIX``
+ Prefix used for Redis keys created by the queue.
+
+``FQ_REDIS_CONN_TYPE``
+ Redis connection type. Supported values are ``tcp_sock`` and ``unix_sock``.
+
+``FQ_REDIS_HOST``
+ Redis host for TCP connections.
+
+``FQ_REDIS_PORT``
+ Redis port for TCP connections.
+
+``FQ_REDIS_PASSWORD``
+ Redis password. Leave empty when authentication is not required.
+
+``FQ_REDIS_CLUSTERED``
+ Enables Redis Cluster mode when set to a truthy value.
+
+``FQ_REDIS_UNIX_SOCKET_PATH``
+ Redis unix socket path when ``FQ_REDIS_CONN_TYPE=unix_sock``.
+
+Defaults
+--------
+
+.. list-table::
+ :header-rows: 1
+
+ * - Variable
+ - Default
+ * - ``FQ_JOB_EXPIRE_INTERVAL``
+ - ``1000``
+ * - ``FQ_JOB_REQUEUE_INTERVAL``
+ - ``1000``
+ * - ``FQ_DEFAULT_JOB_REQUEUE_LIMIT``
+ - ``-1``
+ * - ``FQ_ENABLE_REQUEUE_SCRIPT``
+ - ``true``
+ * - ``FQ_REDIS_DB``
+ - ``0``
+ * - ``FQ_REDIS_KEY_PREFIX``
+ - ``fq_server``
+ * - ``FQ_REDIS_CONN_TYPE``
+ - ``tcp_sock``
+ * - ``FQ_REDIS_HOST``
+ - ``127.0.0.1``
+ * - ``FQ_REDIS_PORT``
+ - ``6379``
+ * - ``FQ_REDIS_PASSWORD``
+ - empty
+ * - ``FQ_REDIS_CLUSTERED``
+ - ``false``
+ * - ``FQ_REDIS_UNIX_SOCKET_PATH``
+ - ``/tmp/redis.sock``
+
+Boolean values
+--------------
+
+Boolean environment variables accept ``1``, ``0``, ``true``, ``false``, ``yes``,
+``no``, ``on``, and ``off``.
diff --git a/docs/faqs.rst b/docs/faqs.rst
index 496c74e..dc6a5a3 100644
--- a/docs/faqs.rst
+++ b/docs/faqs.rst
@@ -55,13 +55,13 @@ A simple SHARQ worker polls for the jobs in a loop. The `Python snippet `_ for instructions.
+Run Redis locally, then start the API with environment variables.
-The **fq-server** command is minimal and accepts a SHARQ configuration file. To get started quickly, fetch the `SHARQ sample configuration file `_. Refer to the `configuration section `_ for more details.
-
-Running the SHARQ Server
-------------------------
-
-The SHARQ Server can be started with the following command.
+Start Redis
+-----------
::
- fq-server --config fq.conf
-
+ make redis-up
-This will run the SHARQ Server in the foreground with the following output.
+Start the server
+----------------
::
- ___ _ ___ ___
- / __| |_ __ _ _ _ / _ \ / __| ___ _ ___ _____ _ _
- \__ \ ' \/ _` | '_| (_) | \__ \/ -_) '_\ V / -_) '_|
- |___/_||_\__,_|_| \__\_\ |___/\___|_| \_/\___|_|
-
- Version: 0.1.0
-
- Listening on: 127.0.0.1:8080
-
-
-Ensure the SHARQ Server has started up correctly by making an HTTP GET request to the server root.
+ PORT=8080 \
+ FQ_REDIS_HOST=127.0.0.1 \
+ uv run uvicorn asgi:app --host 0.0.0.0 --port 8080
+Check the root endpoint
+-----------------------
.. code-block:: bash
curl http://127.0.0.1:8080/
- {
- "message": "Hello, FQ!"
- }
-
-SHARQ Workflow
+Queue workflow
--------------
-Before using SHARQ, understand its workflow as summarized in the following points:
-
-* **Enqueue** a job into the queue with parameters like ``queue_type``, ``queue_id``, ``interval``, etc. The interval parameter specifies the rate limit of the queue into which the job is being enqueued.
-* **Dequeue** a job from the queue by specifying the ``queue_type``. The dequeue is non-blocking. This means that dequeue succeeds only if there is any job ready to be dequeued (based on the rate limit) from any of the queues of the type specified by ``queue_type``.
-* Once the job has be dequeued, it is the responsibility of the worker to mark the job as successfully complete by making a **Finish** request. If the SHARQ Server does not receive a finish request within a preset interval, it re-queues the job back into the queue. This enables the SHARQ Server to make this job available to the workers, on future dequeue requests.
-
-Now that you have understood the basic workflow of SHARQ Server, go ahead and try out the SHARQ API as shown below.
-
+* Enqueue a job with ``queue_type``, ``queue_id``, ``job_id``, ``interval``, and ``payload``.
+* Dequeue work by queue type.
+* Finish a dequeued job after processing it successfully.
+* Expired jobs are requeued automatically based on ``FQ_JOB_EXPIRE_INTERVAL`` and
+ ``FQ_JOB_REQUEUE_INTERVAL``.
-SHARQ API Examples
-------------------
+Examples
+--------
Enqueue
```````
-The enqueue API will push the job into the SHARQ Server. Enqueue comes with a lot of parameters which makes SHARQ flexible. A typical enqueue request looks like this:
-
.. code-block:: bash
- curl -H "Accept: application/json" \
- -H "Content-type: application/json" \
- -X POST -d ' {"job_id": "b81c07a7-5bba-4790-ab40-a061994088c1", "interval": 1000, "payload": {"message": "hello, world"}}' \
- http://localhost:8080/enqueue/sms/1/
-
-
-Here is a break down of the above request. To translate the `cURL `_ request in normal English, an HTTP POST request is made to the url ``http://localhost:8080/enqueue/sms/1/`` with a JSON payload in the request body. The JSON payload is as follows:
-
-.. code-block:: python
-
- {
- "job_id": "b81c07a7-5bba-4790-ab40-a061994088c1",
- "interval": 1000,
- "payload": {"message": "hello, world"}
- }
-
-The url is of the form: ``http://hostname:port/enqueue///``.
-
-Each queue is uniquely identified by the ``queue_type`` and ``queue_id`` pair. Any job sent to this url will be pushed into this specific queue. Each job pushed into this queue is identified by the ``job_id``. The ``queue_type`` and ``queue_id`` pair has to be universally unique but the ``job_id`` can be unique at a queue level.
-
-The ``interval`` parameter is of pivotal importance in SHARQ. It is this parameter which defines the rate limit of the queue. Each queue identified by the ``queue_type``, ``queue_id`` pair can be set a rate limit (the inverse of interval). For example, if the queue has to be rate limited at 1 request per second, the ``interval`` has to be set to 1000 (in milliseconds).
-
-The ``payload`` is a JSON formatted blob which is the actual content that is being queued. This can be any message which has to be transmitted in the queue.
-
-When the enqueue request succeeds, the SHARQ Server responds with an HTTP status 201 and a message saying:
-
-.. code-block:: python
-
- {
- "status": "queued"
- }
-
-
-A simple Python snippet to illustrate this using the `Requests Python module `_ can be found `here `_.
+ curl -X POST http://127.0.0.1:8080/enqueue/sms/user42/ \
+ -H "Content-Type: application/json" \
+ -d '{"job_id":"job-1","payload":{"message":"hello, world"},"interval":1000}'
Dequeue
```````
-The dequeue API will pull the job from the SHARQ Server. The dequeue request will look for jobs in a particular ``queue_type``. Depending on whether any queue (with ``queue_id``) of that ``queue_type`` is ready to be dequeued (based on the rate limit set while enqueuing), the SHARQ Server returns a job or returns a dequeue failure.
-
-A simple successful dequeue request looks like this:
-
.. code-block:: bash
- curl http://localhost:8080/dequeue/sms/
-
-Here, *sms* is the ``queue_type``. The above request is trying to dequeue a job from any of the queues of type *sms*. If the job is ready, the SHARQ Server responds with an HTTP status 200 and the following content:
-
-.. code-block:: python
-
- {
- "job_id": "b81c07a7-5bba-4790-ab40-a061994088c1",
- "payload": {
- "message": "hello, world"
- },
- "queue_id": "1",
- "status": "success"
- }
-
-**NOTE:**
-
-* It is important to note that dequeue does not actually remove the job from the SHARQ Server. Internally, SHARQ changes the state of this job from *pending* to *active* when a dequeue happens. Every dequeue has to be accompanied with a finish request to mark the job as successfully completed. This notifies the SHARQ Server to remove the job completely. If a finish request is not received by SHARQ within a specific time after a successful dequeue, SHARQ assumes the job as failed (marks it as *expired*) and re-queues it back into the queue. This time interval for which the SHARQ Server waits before marking the job as *expired* is called the ``job_expire_interval``. This parameter can be set in the configuration file.
-* As the dequeue request is non-blocking, it is a common pattern to make the dequeue request in a loop. The SHARQ Server returns a HTTP status 200 on success and a 404 on failure.
-
-A simple Python snippet to illustrate a simple SHARQ worker using the `Requests Python module `_ can be found `here `_.
-
+ curl http://127.0.0.1:8080/dequeue/sms/
Finish
``````
-The finish API will mark any dequeued job as successfully completed. This notifies the SHARQ Server to remove the job from its system as the job has been acknowledged by the worker as successfully completed.
-
-A finish request will look like this:
-
.. code-block:: bash
- curl -X POST http://localhost:8080/finish/sms/1/b81c07a7-5bba-4790-ab40-a061994088c1/
-
-The above request example makes a finish request to the SHARQ Server with ``job_id`` *b81c07a7-5bba-4790-ab40-a061994088c1* belonging to the ``queue_id`` *1* and of ``queue_type`` *sms*. So, the finish request is of the form ``http://hostname:port/finish////``.
-
-The SHARQ Server responds with a status code of 200 and the following message when the finish request succeeds:
+ curl -X POST http://127.0.0.1:8080/finish/sms/user42/job-1/
-.. code-block:: python
-
- {
- "status": "success"
- }
-
-
-A simple Python snippet to illustrate a minimal but complete SHARQ worker with finish using the `Requests Python module `_ can be found `here `_.
+Metrics
+```````
-The SHARQ Server waits for the finish request after a dequeue for a specified time interval before marking the job as *expired* and further re-queueing the job back into the queue. Any job which gets a finish request within this interval will be marked as *successful* and removed from the SHARQ Server. This wait interval can be set in the configuration file. The ``job_expire_interval`` in the configuration file, specifies the time interval which the SHARQ Server waits for a dequeue request, from the worker, before marking a job as *expired* (ready to be re-queued back). The ``job_requeue_interval`` in the configuration file, specifies the time interval between two clean up operations on the SHARQ Server. A clean up operation is the process of re-queuing all jobs that are marked as *expired*.
+.. code-block:: bash
-The SHARQ Server contains an `Internal API `_ to update the rates of queues in real time. It also contains a `Metrics API `_ to get basic information such as the queue length, list of active queues, and so on. Check out the `API Reference `_ section for more details.
+ curl http://127.0.0.1:8080/metrics/
+ curl http://127.0.0.1:8080/metrics/sms/user42/
diff --git a/docs/installation.rst b/docs/installation.rst
index 534800f..ee58f71 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -2,37 +2,33 @@
Installation
============
-Installing from PyPI
---------------------
+Requirements
+------------
-SHARQ requires `Redis `_ which can be downloaded `here `_. SHARQ can be installed from `PyPI `_ using `pip `_.
+* Python 3.12+
+* Redis 7+
-::
-
- pip install fqserver
-
-
-Once the SHARQ Server is installed, head over to the `getting started section `_ to try out the API.
-
-Installing from Github
-----------------------
-
-Get the source code from the `SHARQ Github repository `_.
+Install with uv
+---------------
::
- git clone https://github.com/plivo/fq-server.git
+ uv sync --group dev
+This project currently pins ``flowdacity-queue`` to the upstream ``v1.0.0`` Git tag.
-Build the package from the source.
+Install with pip
+----------------
::
- make build
-
+ python -m venv .venv
+ source .venv/bin/activate
+ pip install -e .
+ pip install pytest pytest-cov
-Install the package.
-
-::
+Next steps
+----------
- make install
+Continue with the `getting started guide `_ to run Redis,
+set environment variables, and start the server.
diff --git a/fq_server/__init__.py b/fq_server/__init__.py
index 1cf1a62..b99669a 100644
--- a/fq_server/__init__.py
+++ b/fq_server/__init__.py
@@ -1,4 +1,4 @@
-from .server import FQServer, setup_server
+from .server import FQServer, build_config_from_env, setup_server
-__version__ = '0.1.0'
-__all__ = ['FQServer', 'setup_server']
\ No newline at end of file
+__version__ = "0.1.0"
+__all__ = ["FQServer", "build_config_from_env", "setup_server"]
diff --git a/fq_server/server.py b/fq_server/server.py
index a8d122f..3bbd599 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -3,12 +3,13 @@
# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
import asyncio
-import configparser
+import os
import traceback
import ujson as json
-from redis.exceptions import LockError
+from collections.abc import Mapping
from contextlib import asynccontextmanager, suppress
from fq import FQ
+from redis.exceptions import LockError
from starlette.applications import Starlette
from starlette.requests import Request
@@ -16,20 +17,130 @@
from starlette.routing import Route
+DEFAULT_FQ_ENV_CONFIG = {
+ "fq": {
+ "job_expire_interval": 1000,
+ "job_requeue_interval": 1000,
+ "default_job_requeue_limit": -1,
+ "enable_requeue_script": True,
+ },
+ "redis": {
+ "db": 0,
+ "key_prefix": "fq_server",
+ "conn_type": "tcp_sock",
+ "host": "127.0.0.1",
+ "port": 6379,
+ "password": "",
+ "clustered": False,
+ "unix_socket_path": "/tmp/redis.sock",
+ },
+}
+
+
+def _coerce_bool(value: str, env_var: str) -> bool:
+ normalized = value.strip().lower()
+ if normalized in {"1", "true", "yes", "on"}:
+ return True
+ if normalized in {"0", "false", "no", "off"}:
+ return False
+ raise ValueError(
+ f"Invalid boolean value for {env_var}: {value!r}. "
+ "Use one of: 1, 0, true, false, yes, no, on, off."
+ )
+
+
+def _get_env_int(
+ env: Mapping[str, str], env_var: str, default: int, *, allow_empty: bool = True
+) -> int:
+ value = env.get(env_var)
+ if value is None or (allow_empty and value == ""):
+ return default
+
+ try:
+ return int(value)
+ except ValueError as exc:
+ raise ValueError(
+ f"Invalid integer value for {env_var}: {value!r}."
+ ) from exc
+
+
+def _get_env_bool(env: Mapping[str, str], env_var: str, default: bool) -> bool:
+ value = env.get(env_var)
+ if value is None or value == "":
+ return default
+ return _coerce_bool(value, env_var)
+
+
+def _copy_config(config: Mapping[str, Mapping[str, object]]) -> dict[str, dict[str, object]]:
+ normalized = {}
+ for section_name, section_values in config.items():
+ if not isinstance(section_values, Mapping):
+ raise TypeError(f"Config section {section_name!r} must be a mapping.")
+ normalized[str(section_name)] = {
+ str(option): value for option, value in section_values.items()
+ }
+ return normalized
+
+
+def build_config_from_env(
+ env: Mapping[str, str] | None = None,
+) -> dict[str, dict[str, object]]:
+ """Build the FQ/FQ server configuration from environment variables."""
+ env_map = os.environ if env is None else env
+
+ config = _copy_config(DEFAULT_FQ_ENV_CONFIG)
+ config["fq"]["job_expire_interval"] = _get_env_int(
+ env_map, "FQ_JOB_EXPIRE_INTERVAL", config["fq"]["job_expire_interval"]
+ )
+ config["fq"]["job_requeue_interval"] = _get_env_int(
+ env_map, "FQ_JOB_REQUEUE_INTERVAL", config["fq"]["job_requeue_interval"]
+ )
+ config["fq"]["default_job_requeue_limit"] = _get_env_int(
+ env_map,
+ "FQ_DEFAULT_JOB_REQUEUE_LIMIT",
+ config["fq"]["default_job_requeue_limit"],
+ )
+ config["fq"]["enable_requeue_script"] = _get_env_bool(
+ env_map,
+ "FQ_ENABLE_REQUEUE_SCRIPT",
+ config["fq"]["enable_requeue_script"],
+ )
+
+ config["redis"]["db"] = _get_env_int(
+ env_map, "FQ_REDIS_DB", config["redis"]["db"]
+ )
+ config["redis"]["key_prefix"] = env_map.get(
+ "FQ_REDIS_KEY_PREFIX", config["redis"]["key_prefix"]
+ )
+ config["redis"]["conn_type"] = env_map.get(
+ "FQ_REDIS_CONN_TYPE", config["redis"]["conn_type"]
+ )
+ config["redis"]["host"] = env_map.get("FQ_REDIS_HOST", config["redis"]["host"])
+ config["redis"]["port"] = _get_env_int(
+ env_map, "FQ_REDIS_PORT", config["redis"]["port"]
+ )
+ config["redis"]["password"] = env_map.get(
+ "FQ_REDIS_PASSWORD", config["redis"]["password"]
+ )
+ config["redis"]["clustered"] = _get_env_bool(
+ env_map, "FQ_REDIS_CLUSTERED", config["redis"]["clustered"]
+ )
+ config["redis"]["unix_socket_path"] = env_map.get(
+ "FQ_REDIS_UNIX_SOCKET_PATH", config["redis"]["unix_socket_path"]
+ )
+ return config
+
+
class FQServer(object):
"""Defines a HTTP based API on top of FQ and
exposes the app to run the server (Starlette).
"""
- def __init__(self, config_path: str):
- """Load the FQ config and define the routes."""
- # read the configs required by fq-server.
- self.config = configparser.ConfigParser()
- files_read = self.config.read(config_path)
- if not files_read:
- raise FileNotFoundError(f"Config file not found: {config_path}")
- # pass the config file to configure the FQ core.
- self.queue = FQ(config_path)
+ def __init__(self, config: Mapping[str, Mapping[str, object]]):
+ """Load the FQ config mapping and define the routes."""
+ self.config = _copy_config(config)
+ self.queue = FQ(self.config)
+ self._requeue_task: asyncio.Task | None = None
# Starlette app with routes and startup hook
self.app = Starlette(
@@ -42,7 +153,7 @@ def __init__(self, config_path: str):
# ------------------------------------------------------------------
async def requeue(self):
"""Loop endlessly and requeue expired jobs (no lock)."""
- job_requeue_interval = float(self.config.get("fq", "job_requeue_interval"))
+ job_requeue_interval = float(self.config["fq"]["job_requeue_interval"])
while True:
try:
await self.queue.requeue()
@@ -53,12 +164,11 @@ async def requeue(self):
async def requeue_with_lock(self):
"""Loop endlessly and requeue expired jobs, but with a distributed lock."""
- enable_requeue_script = self.config.get("fq", "enable_requeue_script")
- if enable_requeue_script == "false":
+ if not self.config["fq"].get("enable_requeue_script", True):
print("requeue script disabled")
return
- job_requeue_interval = float(self.config.get("fq", "job_requeue_interval"))
+ job_requeue_interval = float(self.config["fq"]["job_requeue_interval"])
print("start requeue loop: job_requeue_interval = %f" % (job_requeue_interval))
@@ -95,6 +205,7 @@ async def _lifespan(self, app: Starlette):
self._requeue_task.cancel()
with suppress(asyncio.CancelledError):
await self._requeue_task
+ await self.queue.close()
# ------------------------------------------------------------------
# Routes definition
@@ -396,7 +507,11 @@ async def _view_clear_queue(self, request: Request):
# ----------------------------------------------------------------------
# Setup helpers to create and configure the server
# ----------------------------------------------------------------------
-def setup_server(config_path: str) -> FQServer:
+def setup_server(
+ config: Mapping[str, Mapping[str, object]] | None = None,
+ *,
+ env: Mapping[str, str] | None = None,
+) -> FQServer:
"""Configure FQ server and return the server instance."""
- server = FQServer(config_path)
- return server
+ server_config = build_config_from_env(env) if config is None else _copy_config(config)
+ return FQServer(server_config)
diff --git a/pyproject.toml b/pyproject.toml
index fed8980..e8826ea 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -5,7 +5,7 @@ description = "An API queuing server based on the Flowdacity Queue (FQ) library.
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
- "flowdacity-queue>=0.1.0",
+ "flowdacity-queue @ git+https://github.com/flowdacity/flowdacity-queue.git@v1.0.0",
"httpx>=0.28.1",
"msgpack>=1.1.2",
"redis[hiredis]>=7.1.0",
@@ -33,7 +33,6 @@ include = [
"fq_server",
"README.md",
"LICENSE.txt",
- "default.conf",
"asgi.py",
"docs",
"Makefile",
diff --git a/tests/test.conf b/tests/test.conf
deleted file mode 100644
index a513d2b..0000000
--- a/tests/test.conf
+++ /dev/null
@@ -1,23 +0,0 @@
-[fq]
-job_expire_interval : 1000
-job_requeue_interval : 1000
-default_job_requeue_limit : -1
-enable_requeue_script : true
-
-[fq-server]
-host : 0.0.0.0
-port : 8080
-workers : 1
-accesslog : /tmp/fq.log
-
-[redis]
-db : 0
-key_prefix : fq_server_test
-conn_type : tcp_sock
-;; unix connection settings
-unix_socket_path : /tmp/redis.sock
-;; tcp connection settings
-port : 6379
-host : localhost
-password :
-clustered : false
diff --git a/tests/test_routes.py b/tests/test_routes.py
index 4cf8533..107c121 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -1,62 +1,95 @@
-# tests/test_routes.py
-
# -*- coding: utf-8 -*-
# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
-import os
-import unittest
import asyncio
+import unittest
+
import ujson as json
-from unittest.mock import AsyncMock, patch
from httpx import AsyncClient, ASGITransport
from starlette.types import ASGIApp
-from fq_server import setup_server
-from fq_server.server import FQServer
+from unittest.mock import AsyncMock, patch
+
+from fq_server import build_config_from_env, setup_server
+
+
+def build_test_config():
+ return {
+ "fq": {
+ "job_expire_interval": 1000,
+ "job_requeue_interval": 1000,
+ "default_job_requeue_limit": -1,
+ "enable_requeue_script": True,
+ },
+ "redis": {
+ "db": 0,
+ "key_prefix": "fq_server_test",
+ "conn_type": "tcp_sock",
+ "host": "127.0.0.1",
+ "port": 6379,
+ "password": "",
+ "clustered": False,
+ "unix_socket_path": "/tmp/redis.sock",
+ },
+ }
class FQConfigTestCase(unittest.TestCase):
"""Tests for configuration validation."""
- def test_missing_fq_config_env_var(self):
- """Test that missing FQ_CONFIG environment variable uses default config."""
- # Ensure FQ_CONFIG is not set
- env_backup = os.environ.pop("FQ_CONFIG", None)
- try:
- # Capture stdout to verify warning message
- from io import StringIO
- import sys
- captured_output = StringIO()
- sys.stdout = captured_output
-
- # Re-import asgi module to trigger the check
- import importlib
- import asgi
- importlib.reload(asgi)
-
- sys.stdout = sys.__stdout__
-
- # Verify warning was printed
- output = captured_output.getvalue()
- self.assertIn("FQ_CONFIG", output)
- self.assertIn("default config path", output)
- finally:
- # Restore the environment variable if it was set
- if env_backup is not None:
- os.environ["FQ_CONFIG"] = env_backup
- sys.stdout = sys.__stdout__
-
- def test_config_file_not_found(self):
- """Test that non-existent config file raises FileNotFoundError."""
- with self.assertRaises(FileNotFoundError) as context:
- FQServer("/nonexistent/path/to/config.conf")
- self.assertIn("Config file not found", str(context.exception))
+ def test_build_config_from_env_defaults(self):
+ config = build_config_from_env({})
+ self.assertEqual(config["fq"]["job_expire_interval"], 1000)
+ self.assertEqual(config["fq"]["job_requeue_interval"], 1000)
+ self.assertEqual(config["fq"]["default_job_requeue_limit"], -1)
+ self.assertTrue(config["fq"]["enable_requeue_script"])
+ self.assertEqual(config["redis"]["host"], "127.0.0.1")
+ self.assertEqual(config["redis"]["port"], 6379)
+ self.assertEqual(config["redis"]["key_prefix"], "fq_server")
+
+ def test_build_config_from_env_overrides(self):
+ config = build_config_from_env(
+ {
+ "FQ_JOB_EXPIRE_INTERVAL": "5000",
+ "FQ_JOB_REQUEUE_INTERVAL": "6000",
+ "FQ_DEFAULT_JOB_REQUEUE_LIMIT": "5",
+ "FQ_ENABLE_REQUEUE_SCRIPT": "false",
+ "FQ_REDIS_DB": "2",
+ "FQ_REDIS_KEY_PREFIX": "custom_prefix",
+ "FQ_REDIS_CONN_TYPE": "unix_sock",
+ "FQ_REDIS_HOST": "redis.internal",
+ "FQ_REDIS_PORT": "6380",
+ "FQ_REDIS_PASSWORD": "secret",
+ "FQ_REDIS_CLUSTERED": "true",
+ "FQ_REDIS_UNIX_SOCKET_PATH": "/var/run/redis.sock",
+ }
+ )
+ self.assertEqual(config["fq"]["job_expire_interval"], 5000)
+ self.assertEqual(config["fq"]["job_requeue_interval"], 6000)
+ self.assertEqual(config["fq"]["default_job_requeue_limit"], 5)
+ self.assertFalse(config["fq"]["enable_requeue_script"])
+ self.assertEqual(config["redis"]["db"], 2)
+ self.assertEqual(config["redis"]["key_prefix"], "custom_prefix")
+ self.assertEqual(config["redis"]["conn_type"], "unix_sock")
+ self.assertEqual(config["redis"]["host"], "redis.internal")
+ self.assertEqual(config["redis"]["port"], 6380)
+ self.assertEqual(config["redis"]["password"], "secret")
+ self.assertTrue(config["redis"]["clustered"])
+ self.assertEqual(
+ config["redis"]["unix_socket_path"], "/var/run/redis.sock"
+ )
+
+ def test_build_config_from_env_rejects_invalid_values(self):
+ with self.assertRaisesRegex(ValueError, "FQ_REDIS_PORT"):
+ build_config_from_env({"FQ_REDIS_PORT": "redis"})
+
+ with self.assertRaisesRegex(ValueError, "FQ_ENABLE_REQUEUE_SCRIPT"):
+ build_config_from_env({"FQ_ENABLE_REQUEUE_SCRIPT": "maybe"})
class FQServerTestCase(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
# build server and Starlette app
- config_path = os.path.join(os.path.dirname(__file__), "test.conf")
- server = setup_server(config_path)
+ server = setup_server(build_test_config())
self.server = server
self.app: ASGIApp = server.app
@@ -76,6 +109,7 @@ async def asyncTearDown(self):
# flush redis after each test
await self.r.flushdb()
await self.client.aclose()
+ await self.queue.close()
async def test_root(self):
response = await self.client.get("/")
@@ -498,16 +532,16 @@ async def test_requeue_exception_handling(self):
async def test_requeue_with_lock_disabled(self):
"""Test requeue_with_lock when requeue is disabled."""
server = self.server
-
- # Mock config to disable requeue
- with patch.object(server.config, "get", return_value="false"):
- requeue_task = asyncio.create_task(server.requeue_with_lock())
-
- # Should return immediately (task completes)
- await asyncio.sleep(0.1)
-
- # Task should be done (returned, not cancelled)
- self.assertTrue(requeue_task.done())
+
+ server.config["fq"]["enable_requeue_script"] = False
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+
+ # Should return immediately (task completes)
+ await asyncio.sleep(0.1)
+
+ # Task should be done (returned, not cancelled)
+ self.assertTrue(requeue_task.done())
+ server.config["fq"]["enable_requeue_script"] = True
async def test_requeue_with_lock_lock_error(self):
"""Test requeue_with_lock when lock acquisition fails with LockError."""
@@ -572,8 +606,7 @@ class FQServerLifespanTestCase(unittest.IsolatedAsyncioTestCase):
async def test_lifespan_startup_shutdown(self):
"""Test lifespan startup and graceful shutdown."""
- config_path = os.path.join(os.path.dirname(__file__), "test.conf")
- server = setup_server(config_path)
+ server = setup_server(build_test_config())
# Simulate startup
app = server.app
@@ -599,18 +632,22 @@ async def test_lifespan_startup_shutdown(self):
async def test_lifespan_initializes_queue(self):
"""Test that lifespan calls queue.initialize()."""
- config_path = os.path.join(os.path.dirname(__file__), "test.conf")
- server = setup_server(config_path)
-
+ server = setup_server(build_test_config())
+
# Stub out both queue.initialize and the background requeue task to make
# startup/shutdown deterministic and avoid hitting an uninitialized queue.
- with patch.object(server.queue, "initialize", new_callable=AsyncMock) as mock_init, \
- patch.object(server, "requeue_with_lock", new_callable=AsyncMock):
+ with patch.object(
+ server.queue, "initialize", new_callable=AsyncMock
+ ) as mock_init, patch.object(
+ server.queue, "close", new_callable=AsyncMock
+ ) as mock_close, patch.object(
+ server, "requeue_with_lock", new_callable=AsyncMock
+ ):
lifespan_cm = server._lifespan(server.app)
await lifespan_cm.__aenter__()
-
+
mock_init.assert_called_once()
-
+
# Cleanup
if server._requeue_task is not None and not server._requeue_task.done():
server._requeue_task.cancel()
@@ -619,6 +656,7 @@ async def test_lifespan_initializes_queue(self):
except asyncio.CancelledError:
# Expected if the requeue task is cancelled during shutdown
pass
+ mock_close.assert_called_once()
diff --git a/uv.lock b/uv.lock
index 1d218d8..7f98a21 100644
--- a/uv.lock
+++ b/uv.lock
@@ -121,16 +121,12 @@ wheels = [
[[package]]
name = "flowdacity-queue"
-version = "0.1.2"
-source = { registry = "https://pypi.org/simple" }
+version = "1.0.0"
+source = { git = "https://github.com/flowdacity/flowdacity-queue.git?rev=v1.0.0#261362734fb2126ffd95e3e33739a0dc79c8aa81" }
dependencies = [
{ name = "msgpack" },
{ name = "redis", extra = ["hiredis"] },
]
-sdist = { url = "https://files.pythonhosted.org/packages/14/15/e9c00312dbb237f4508a94a1b3fd46459ffda3065c2d92276f1d788f494e/flowdacity_queue-0.1.2.tar.gz", hash = "sha256:4b872ca778a796c6db12652cf771342433dfd4dee846dc79a781c2bd18565ab1", size = 11487, upload-time = "2026-01-26T11:20:43.124Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/2c/a0/89d6be8592a750e370fbf25c0b9f3c8b8cfb0de7d0ae32b325db11047f6c/flowdacity_queue-0.1.2-py3-none-any.whl", hash = "sha256:da75e9707eb660b98b5a5ed522899da1496b6c8feb2e2fb3cf7f557c6da1c79d", size = 15347, upload-time = "2026-01-26T11:20:41.653Z" },
-]
[[package]]
name = "flowdacity-queue-server"
@@ -154,7 +150,7 @@ dev = [
[package.metadata]
requires-dist = [
- { name = "flowdacity-queue", specifier = ">=0.1.0" },
+ { name = "flowdacity-queue", git = "https://github.com/flowdacity/flowdacity-queue.git?rev=v1.0.0" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "msgpack", specifier = ">=1.1.2" },
{ name = "redis", extras = ["hiredis"], specifier = ">=7.1.0" },
From 6ee8ef9afe292773b0e1c5f37fe35a19f68d9735 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:07:22 +0100
Subject: [PATCH 02/15] Refactor documentation and code for Flowdacity Queue
Server
- Updated README.md to clarify accepted boolean environment variable values.
- Modified Makefile to reflect correct project names and paths.
- Changed layout.html to link to the correct GitHub repository.
- Revised apireference.rst to accurately describe the Flowdacity Queue Server API.
- Adjusted configuration.rst to specify boolean environment variable values.
- Enhanced contributing.rst with updated repository links and local workflow instructions.
- Improved faqs.rst to reflect the Flowdacity Queue Server's features and usage.
- Updated index.rst to provide a clearer introduction to the Flowdacity Queue Server.
- Expanded internals.rst to describe the architecture and background requeue loop.
- Revised license.rst to include current copyright information.
- Modified server.py to enforce stricter boolean value checks.
- Updated test_routes.py to include additional tests for environment variable validation.
---
README.md | 2 +-
docs/Makefile | 8 +-
docs/_templates/layout.html | 2 +-
docs/apireference.rst | 333 +++++++++++-------------------------
docs/conf.py | 303 ++++++--------------------------
docs/configuration.rst | 5 +-
docs/contributing.rst | 27 ++-
docs/faqs.rst | 133 +++++++-------
docs/index.rst | 16 +-
docs/internals.rst | 39 ++++-
docs/license.rst | 6 +-
fq_server/server.py | 8 +-
tests/test_routes.py | 5 +-
13 files changed, 299 insertions(+), 588 deletions(-)
diff --git a/README.md b/README.md
index d5cb229..ac85a2d 100644
--- a/README.md
+++ b/README.md
@@ -48,7 +48,7 @@ The server reads all queue and Redis settings from environment variables. No con
| `FQ_REDIS_UNIX_SOCKET_PATH` | `/tmp/redis.sock` | Redis socket path when `FQ_REDIS_CONN_TYPE=unix_sock`. |
| `PORT` | `8300` | Uvicorn port used by the container and local examples. |
-Boolean env vars accept `1`, `0`, `true`, `false`, `yes`, `no`, `on`, or `off`.
+Boolean env vars accept only `true` or `false`.
## Run locally
diff --git a/docs/Makefile b/docs/Makefile
index d17e2da..9d3692d 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -85,17 +85,17 @@ qthelp:
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
- @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/fqserver.qhcp"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/flowdacity-queue-server.qhcp"
@echo "To view the help file:"
- @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/fqserver.qhc"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/flowdacity-queue-server.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
- @echo "# mkdir -p $$HOME/.local/share/devhelp/fqserver"
- @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/fqserver"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/flowdacity-queue-server"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/flowdacity-queue-server"
@echo "# devhelp"
epub:
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index e6f69ff..ed6a0ef 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -1,5 +1,5 @@
{% extends "!layout.html" %}
{%- block extrahead %}
-
+
{% endblock %}
diff --git a/docs/apireference.rst b/docs/apireference.rst
index d8489f6..8c3ef01 100644
--- a/docs/apireference.rst
+++ b/docs/apireference.rst
@@ -2,353 +2,218 @@
API Reference
=============
-This section contains a very brief introduction to the SHARQ API. If you are looking to get started with SHARQ, refer to the `getting started section `_ before reading this.
+This document describes the HTTP API exposed by Flowdacity Queue Server. All
+documented routes include a trailing slash.
-Enqueue
-~~~~~~~
-
-Enqueue a job into the SHARQ Server.
+Root
+~~~~
::
- POST /enqueue///
+ GET /
-**Request (Raw JSON POST data):**
+Response:
-::
+.. code-block:: json
{
- "job_id": "b81c07a7-5bba-4790-ab40-a061994088c1",
- "interval": 1000,
- "payload": {"hello": "world"}
+ "message": "Hello, FQS!"
}
-**Response (success):**
-
-Status Code: 201
+Enqueue
+~~~~~~~
::
- {
- "status": "queued"
- }
-
-**Response (bad request):**
+ POST /enqueue///
-Status Code: 400
+Request body:
-::
+.. code-block:: json
{
- "message": "`queue_type` is a mandatory parameter",
- "status": "failure"
+ "job_id": "job-1",
+ "interval": 1000,
+ "payload": {
+ "message": "hello, world"
+ }
}
-**cURL Example:**
+Optional request fields:
-::
+* ``requeue_limit``: override the default retry limit for the job.
+* ``payload.max_queued_length``: reject the enqueue with HTTP ``429`` if the queue
+ already contains at least that many jobs.
+
+Success response:
- $curl -H "Accept: application/json" \
- -H "Content-type: application/json" \
- -X POST -d ' {"job_id": "b81c07a7-5bba-4790-ab40-a061994088c1", "interval": 1000, "payload": {"hello": "world"}}' \
- http://localhost:8080/enqueue/sms/1/
+* HTTP ``201``
+* Body:
+.. code-block:: json
+
+ {
+ "status": "queued"
+ }
Dequeue
~~~~~~~
-Dequeue a job from the SHARQ Server.
-
::
+ GET /dequeue/
GET /dequeue//
-**Response (success):**
+``/dequeue/`` uses the default queue type ``default``.
-Status Code: 200
+Success response:
-::
+* HTTP ``200``
+* Body:
+
+.. code-block:: json
{
- "job_id": "b81c07a7-5bba-4790-ab40-a061994088c1",
+ "status": "success",
+ "queue_id": "user42",
+ "job_id": "job-1",
"payload": {
- "hello": "world"
+ "message": "hello, world"
},
- "queue_id": "1",
- "status": "success"
- }
-
-**Response (queue has no job ready):**
-
-Status Code: 404
-
-::
-
- {
- "status": "failure"
+ "requeues_remaining": -1,
+ "current_queue_length": 0
}
-**Response (bad request):** Status Code: 400
+If no job is ready, the server returns HTTP ``404`` with:
-::
+.. code-block:: json
{
- "message": "`queue_type` has an invalid value.",
- "status": "failure"
+ "status": "failure"
}
-**cURL Example:**
-
-::
-
- curl http://localhost:8080/dequeue/sms/
-
Finish
~~~~~~
-Mark a dequeued job as finished.
-
::
POST /finish////
-**Response (success):**
+Success response:
-Status Code: 200
+* HTTP ``200``
+* Body:
-::
+.. code-block:: json
{
"status": "success"
}
-**Response (job was not found):**
-
-Status Code: 404
-
-::
-
- {
- "status": "failure"
- }
-
-**Response (bad request):**
-
-Status Code: 400
-
-::
-
- {
- "message": "`queue_id` is a mandatory parameter",
- "status": "failure"
- }
-
-**cURL Example:**
-
-::
-
- curl -X POST http://localhost:8080/finish/sms/1/b81c07a7-5bba-4790-ab40-a061994088c1/
+If the active job is not found, the server returns HTTP ``404``.
Interval
~~~~~~~~
-Updates the interval (and effectively the rate) of any queue. The interval has to be specified in the request body, in the JSON format as shown below:
-
::
POST /interval///
-**Request (Raw JSON POST data):**
+Request body:
-::
+.. code-block:: json
{
- "interval": 1000
+ "interval": 5000
}
-**Response (success):**
+Success response:
-Status Code: 200
-
-::
+.. code-block:: json
{
"status": "success"
}
-**Response (queue was not found):**
-
-Status Code: 404
-
-::
-
- {
- "status": "failure"
- }
-
-**Response (bad request):**
-
-Status Code: 400
-
-::
-
- {
- "message": "`interval` has an invalid value.",
- "status": "failure"
- }
-
-**cURL Example:**
-
-::
-
- curl -H "Accept: application/json" \
- -H "Content-type: application/json" \
- -X POST -d ' {"interval": 5000}' \
- http://localhost:8080/interval/sms/1/
-
+If the queue does not exist, the server returns HTTP ``404``.
Metrics
~~~~~~~
-The Metrics API enables getting basic metrics from the SHARQ Server.
-
-Global Metrics
-^^^^^^^^^^^^^^
-
-Fetches metrics on a global level (the consolidated metrics of all queues in SHARQ) from the SHARQ Server. The response to the API request, contains the enqueue and dequeue counts which show the number of enqueues and dequeues in each minute over a period of 10 minutes.
+Global metrics:
::
GET /metrics/
-**Response (success):**
+Response fields:
-Status Code: 200
+* ``queue_types``
+* ``enqueue_counts``
+* ``dequeue_counts``
+* ``status``
-::
-
- {
- "dequeue_counts": {
- "1406200290000": 0,
- "1406200344000": 0,
- "1406200392000": 0,
- "1406200434000": 0,
- "1406200470000": 0,
- "1406200500000": 0,
- "1406200524000": 0,
- "1406200542000": 0,
- "1406200554000": 0,
- "1406200560000": 0
- },
- "enqueue_counts": {
- "1406200290000": 0,
- "1406200344000": 0,
- "1406200392000": 0,
- "1406200434000": 0,
- "1406200470000": 0,
- "1406200500000": 0,
- "1406200524000": 0,
- "1406200542000": 0,
- "1406200554000": 0,
- "1406200560000": 0
- },
- "queue_types": [
- "sms"
- ],
- "status": "success"
- }
-
-**cURL Example:**
-
-::
-
- curl http://localhost:8080/metrics/
-
-List Queue Ids
-^^^^^^^^^^^^^^
-
-Lists all the queues of a particular queue type in the SHARQ Server.
+Queue IDs for a queue type:
::
GET /metrics//
-**Response (success):**
-
-Status Code: 200
-
-::
+Response fields:
- {
- "queue_ids": [
- "1"
- ],
- "status": "success"
- }
+* ``queue_ids``
+* ``status``
-**cURL Example:**
+Queue-specific metrics:
::
- curl http://localhost:8080/metrics/sms/
+ GET /metrics///
+
+Response fields:
-Queue Specific Metrics
-^^^^^^^^^^^^^^^^^^^^^^
+* ``queue_length``
+* ``enqueue_counts``
+* ``dequeue_counts``
+* ``status``
-Fetches metrics specific to a particular queue of a specific queue type. The response to the API request contains the enqueue and dequeue counts for each minute over a 10 minute period. The response also contains the length of the queue at that particular point in time.
+Delete Queue
+~~~~~~~~~~~~
::
- GET /metrics///
+ DELETE /deletequeue///
-**Response (success):**
+Optional request body:
-Status Code: 200
-
-::
+.. code-block:: json
{
- "dequeue_counts": {
- "1406200590000": 0,
- "1406200644000": 0,
- "1406200692000": 0,
- "1406200734000": 0,
- "1406200770000": 0,
- "1406200800000": 0,
- "1406200824000": 0,
- "1406200842000": 0,
- "1406200854000": 0,
- "1406200860000": 0
- },
- "enqueue_counts": {
- "1406200590000": 0,
- "1406200644000": 0,
- "1406200692000": 0,
- "1406200734000": 0,
- "1406200770000": 0,
- "1406200800000": 0,
- "1406200824000": 0,
- "1406200842000": 0,
- "1406200854000": 0,
- "1406200860000": 0
- },
- "queue_length": 3,
- "status": "success"
+ "purge_all": true
}
-**Response (bad request):**
+This removes queued jobs for the target queue. When ``purge_all`` is ``true``,
+related payload and interval metadata are removed as well.
-Status Code: 400
+Deep Status
+~~~~~~~~~~~
::
+ GET /deepstatus/
+
+If Redis is reachable and writable, the server returns:
+
+.. code-block:: json
+
{
- "message": "`queue_id` should be accompanied by `queue_type`.",
- "status": "failure"
+ "status": "success"
}
-**cURL Example:**
-
-::
+Common failures
+~~~~~~~~~~~~~~~
- curl http://localhost:8080/metrics/sms/1/
+* HTTP ``400``: invalid route parameters, invalid JSON, or invalid FQ arguments.
+* HTTP ``404``: no job ready to dequeue or target queue/job was not found.
+* HTTP ``429``: enqueue rejected because ``payload.max_queued_length`` was reached.
+* HTTP ``500``: backend health check failed during ``/deepstatus/``.
diff --git a/docs/conf.py b/docs/conf.py
index 745603d..340bb89 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,279 +1,74 @@
# -*- coding: utf-8 -*-
-#
-# fq server documentation build configuration file, created by
-# sphinx-quickstart on Mon Sep 22 17:57:32 2014.
-#
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys
-import os
import alabaster
-import sphinx_rtd_theme
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.insert(0, os.path.abspath('.'))
-
-# -- General configuration ------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
extensions = [
- 'sphinx.ext.autodoc',
- 'sphinx.ext.ifconfig',
- 'sphinx.ext.viewcode',
- 'alabaster'
+ "sphinx.ext.autodoc",
+ "sphinx.ext.ifconfig",
+ "sphinx.ext.viewcode",
+ "alabaster",
]
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The encoding of source files.
-#source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'SHARQ Server'
-copyright = u'2014, Plivo Team'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = '0.2.0'
-# The full version, including alpha/beta/rc tags.
-release = '0.2.0'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#language = None
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-exclude_patterns = ['_build']
-
-# The reST default role (used for this markup: `text`) to use for all
-# documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
+templates_path = ["_templates"]
+source_suffix = ".rst"
+master_doc = "index"
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+project = "Flowdacity Queue Server"
+copyright = "2025, Flowdacity Development Team"
+version = "0.1.0"
+release = "0.1.0"
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+exclude_patterns = ["_build"]
+pygments_style = "sphinx"
-# If true, keep warnings as "system message" paragraphs in the built documents.
-#keep_warnings = False
-
-
-# -- Options for HTML output ----------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-# html_theme = 'sphinx_rtd_theme'
-html_theme = 'alabaster'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
+html_theme = "alabaster"
+html_theme_path = [alabaster.get_path()]
html_theme_options = {
- 'logo': 'logo.png',
- 'github_user': 'plivo',
- 'github_repo': 'fq-server',
+ "logo": "logo.png",
+ "github_user": "flowdacity",
+ "github_repo": "flowdacity-queue-server",
+ "description": "Async HTTP API for Flowdacity Queue",
}
-# Add any paths that contain custom themes here, relative to this directory.
-# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
-html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
-html_theme_path = [alabaster.get_path()]
-
-# The name for this set of Sphinx documents. If None, it defaults to
-# " v documentation".
-#html_title = None
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-#html_logo = None
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-#html_favicon = None
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# Add any extra paths that contain custom files (such as robots.txt or
-# .htaccess) here, relative to this directory. These files are copied
-# directly to the root of the documentation.
-#html_extra_path = []
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-#html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+html_static_path = ["_static"]
html_sidebars = {
- '**': [
- 'about.html', 'navigation.html', 'searchbox.html',
- ]
+ "**": [
+ "about.html",
+ "navigation.html",
+ "searchbox.html",
+ ]
}
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
+htmlhelp_basename = "flowdacityqueueserverdoc"
-# If false, no module index is generated.
-#html_domain_indices = True
-
-# If false, no index is generated.
-#html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'fqserverdoc'
-
-
-# -- Options for LaTeX output ---------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
latex_documents = [
- ('index', 'fqserver.tex', u'fq server Documentation',
- u'Plivo Team', 'manual'),
+ (
+ "index",
+ "flowdacity-queue-server.tex",
+ "Flowdacity Queue Server Documentation",
+ "Flowdacity Development Team",
+ "manual",
+ ),
]
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output ---------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
man_pages = [
- ('index', 'fqserver', u'fq server Documentation',
- [u'Plivo Team'], 1)
+ (
+ "index",
+ "flowdacity-queue-server",
+ "Flowdacity Queue Server Documentation",
+ ["Flowdacity Development Team"],
+ 1,
+ )
]
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output -------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
texinfo_documents = [
- ('index', 'fqserver', u'fq server Documentation',
- u'Plivo Team', 'fqserver', 'One line description of project.',
- 'Miscellaneous'),
+ (
+ "index",
+ "flowdacity-queue-server",
+ "Flowdacity Queue Server Documentation",
+ "Flowdacity Development Team",
+ "flowdacity-queue-server",
+ "Async HTTP API for Flowdacity Queue.",
+ "Miscellaneous",
+ ),
]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
-
-# If true, do not generate a @detailmenu in the "Top" node's menu.
-#texinfo_no_detailmenu = False
diff --git a/docs/configuration.rst b/docs/configuration.rst
index 6bdea11..c921b36 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -41,7 +41,7 @@ Redis settings
Redis password. Leave empty when authentication is not required.
``FQ_REDIS_CLUSTERED``
- Enables Redis Cluster mode when set to a truthy value.
+ Enables Redis Cluster mode when set to ``true``.
``FQ_REDIS_UNIX_SOCKET_PATH``
Redis unix socket path when ``FQ_REDIS_CONN_TYPE=unix_sock``.
@@ -82,5 +82,4 @@ Defaults
Boolean values
--------------
-Boolean environment variables accept ``1``, ``0``, ``true``, ``false``, ``yes``,
-``no``, ``on``, and ``off``.
+Boolean environment variables accept only ``true`` and ``false``.
diff --git a/docs/contributing.rst b/docs/contributing.rst
index ba46218..f612727 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -2,17 +2,28 @@
Contributing
============
-SHARQ is open source and released under the permissive `MIT License `_. No software is complete and bugfree. If you feel something can be improved in SHARQ or would like to report bugs, feel free to do so. Pull requests are always welcome!
+Flowdacity Queue Server is open source and released under the permissive
+`MIT License `_. Issues and pull requests are welcome.
-SHARQ consists of two components architecturally and with respect to codebases.
+Repositories
+------------
-1. The `SHARQ Core `_ which implements the core functionality of SHARQ which is rate limiting.
-2. The `SHARQ Server `_ which exposes an HTTP interface via `Flask `_ & `Gevent `_.
+Development is split across two repositories:
-The core rate limiting algorithm is implemented in Lua. The detailed explanation of the algorithm with the implementation details and the `Redis `_ data structures can be found in `The Internals `_ section.
+1. Flowdacity Queue Server: https://github.com/flowdacity/flowdacity-queue-server
+2. Flowdacity Queue core: https://github.com/flowdacity/flowdacity-queue
+Local workflow
+--------------
-**Github Repository Links:**
+1. Install dependencies with ``uv sync --group dev``.
+2. Start Redis with ``make redis-up``.
+3. Run tests with ``make test``.
+4. Update docs and tests when behavior changes.
-* https://github.com/plivo/fq-server
-* https://github.com/plivo/fq
+What to include in a change
+---------------------------
+
+* Tests for new behavior or regressions.
+* Documentation updates for API or configuration changes.
+* Clear reproduction details when reporting a bug.
diff --git a/docs/faqs.rst b/docs/faqs.rst
index dc6a5a3..4e0a72a 100644
--- a/docs/faqs.rst
+++ b/docs/faqs.rst
@@ -1,101 +1,98 @@
-===========================
-Frequently Asked Questions
-===========================
+==========================
+Frequently Asked Questions
+==========================
-When should I use SHARQ?
-========================
+When should I use Flowdacity Queue Server?
+==========================================
-If you want the flexibility to dynamically create queues and update their rate limits in real time without making any configuration changes, then you need SHARQ.
+Use it when you want HTTP access to Flowdacity Queue from workers or services that
+should not link directly against the Python FQ library. It is especially useful when
+you need dynamic queues, per-queue rate limits, and Redis-backed retry/requeue behavior.
How do I set a rate limit for a queue?
======================================
-The rate limit of a queue (which is the inverse of an interval), can be set while making an enqueue request. Each enqueue request requires the ``interval`` parameter which defines the rate limit of the queue. For example, if the queue has to be rate limited at 1 request per second, the ``interval`` has to be set to 1000 (in milliseconds). Refer to the `Getting Started `_ section to know more about how to set the interval while making an enqueue request.
+Set the ``interval`` field when enqueueing a job. The interval is in milliseconds and
+represents the minimum gap between successful dequeues for the queue.
-How do I change the rate limit of a queue?
-==========================================
+How do I change the rate limit of an existing queue?
+====================================================
-Once the rate limit has been set for a queue during the enqueue operation, it can be changed using the `Interval API `_.
+Call the `Interval API `_:
-How do I write a SHARQ worker for processing jobs from the SHARQ Server?
-========================================================================
+.. code-block:: bash
-A simple SHARQ worker polls for the jobs in a loop. The `Python snippet `_ below shows how to structure a minimal worker:
+ curl -X POST http://127.0.0.1:8080/interval/sms/user42/ \
+ -H "Content-Type: application/json" \
+ -d '{"interval": 5000}'
-.. code-block:: python
-
- import time
- import json
- import requests
+How do I write a worker that processes jobs from the server?
+============================================================
- while True:
- # dequeue the job from the queue of type `sms`
- try:
- response = requests.get('http://localhost:8080/dequeue/sms/')
- if response.status_code == 200:
- # successful dequeue.
- r = json.loads(response.text)
- print r['payload'] # process the payload here.
- queue_id = r['queue_id']
- job_id = r['job_id']
- # mark the job as completed successfully by
- # sending a finish request.
- requests.post(
- 'http://localhost:8080/finish/sms/%s/%s/' % (
- queue_id, job_id))
- elif response.status_code == 404:
- # no job found (either queue is empty or none
- # of the jobs are ready yet).
- time.sleep(1) # wait for a second before retrying if needed.
- except Exception as e:
- print "Something went wrong!"
- time.sleep(5) # retry after 5 seconds.
+Any HTTP client can be used. A minimal Python example with ``httpx`` looks like this:
+.. code-block:: python
-How do I configure the time of expiry of a job?
-===============================================
+ import time
-Any job which is dequeued by the worker has to be acknowledged with a finish request within a specific time period, to mark that job as successfully processed. The job which does not get a finish request within this period will be marked as *expired* by the SHARQ Server. This time period is controlled by the ``FQ_JOB_EXPIRE_INTERVAL`` environment variable.
+ import httpx
-How do I configure when the expired jobs should be re-queued?
-=============================================================
+ with httpx.Client(base_url="http://127.0.0.1:8080") as client:
+ while True:
+ response = client.get("/dequeue/sms/")
-All expired jobs in the SHARQ Server will be re-queued back into their respective queues during the *clean up* process. The time interval between two clean ups is controlled by the ``FQ_JOB_REQUEUE_INTERVAL`` environment variable.
+ if response.status_code == 200:
+ job = response.json()
+ print(job["payload"])
+ client.post(
+ f"/finish/sms/{job['queue_id']}/{job['job_id']}/"
+ )
+ continue
-Is there a way to run the SHARQ Server using uWSGI?
-===================================================
+ if response.status_code == 404:
+ time.sleep(1)
+ continue
-Yes! By default the SHARQ Server uses `Gunicorn `_ internally. If you want to use `uWSGI `_ or any other server based on WSGI, you can do so by running ``wsgi.py`` provided in the source files `available on Github `_. For optimal performance, it is recommended to use uWSGI with `Nginx `_. More details can be found in the `uWSGI documentation `_.
+ raise RuntimeError(response.text)
-How do I know the number of jobs in any queue in real time?
-===========================================================
+How do I configure job expiry and requeue timing?
+=================================================
-The `Metrics API `_ lets you query the SHARQ Server for details like number of jobs, per minute enqueue & dequeue rates, and so on. Read the `API Reference `_ section for more details.
+Use environment variables:
-How do I get a list of all queues in the SHARQ Server?
-======================================================
+* ``FQ_JOB_EXPIRE_INTERVAL`` controls how long a dequeued job can remain active
+ before it is considered expired.
+* ``FQ_JOB_REQUEUE_INTERVAL`` controls how often expired jobs are scanned and
+ placed back onto their queues.
-The `Metrics API `_ lets you query the SHARQ Server for details like number of jobs, per minute enqueue & dequeue rates, and so on. Read the `API Reference `_ section for more details.
+How do I inspect queue depth and throughput?
+============================================
-How do I check the status of a job in real time?
-================================================
+Use the `Metrics API `_. It provides:
-This feature is not yet available in SHARQ but will be implemented in the future.
+* Global queue types plus enqueue/dequeue counts.
+* Queue IDs for a specific queue type.
+* Queue length and per-minute counters for a specific queue.
-Where can I find the source code of SHARQ?
-==========================================
+How do I clear a queue?
+=======================
-The SHARQ code base is split into two components - the core component and the server component. You can find it here:
+Call the ``DELETE /deletequeue///`` endpoint. If you want to
+remove related payload and interval metadata as well, send ``{"purge_all": true}``
+in the request body.
-**Github Repository Links:**
+Where is the source code?
+=========================
-* The SHARQ Core - https://github.com/plivo/fq
-* The SHARQ Server - https://github.com/plivo/fq-server
+The codebase is split across two repositories:
-Read the `Contributing `_ section for more details.
+* Flowdacity Queue Server: https://github.com/flowdacity/flowdacity-queue-server
+* Flowdacity Queue core: https://github.com/flowdacity/flowdacity-queue
-I just found a bug. How do I report it?
-=======================================
+How do I report a bug or contribute a fix?
+==========================================
-Read the `Contributing `_ section for more details.
+Open an issue or pull request in the server repository and include reproduction
+steps, Redis details, and any failing requests or tests when possible. The
+`Contributing `_ section covers the local development workflow.
diff --git a/docs/index.rst b/docs/index.rst
index fe76a00..700de7c 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,13 +1,15 @@
-Welcome to SHARQ!
-=================
+Flowdacity Queue Server
+=======================
-SHARQ is a flexible, open source, rate limited queuing system. Based on the `Leaky Bucket Algorithm `_, `SHARQ `_ lets you create queues dynamically and update their rate limits in real time.
+Flowdacity Queue Server is an async HTTP API for `Flowdacity Queue (FQ) `_.
+It runs on Starlette and Uvicorn, stores queue state in Redis through the FQ core,
+and exposes HTTP endpoints for enqueueing, dequeueing, finishing, requeueing, and
+inspecting jobs.
-SHARQ consists of two components - the core component and the server component. The SHARQ core is built on `Redis `_, using Python and Lua, and the SHARQ Server is built using `Flask `_ and `Gevent `_ and talks HTTP.
+The server is configured entirely through environment variables and is designed to
+fit containerized deployments without mounted config files.
-SHARQ is released under the permissive `MIT License `_ and is `available on Github `_!
-
-To learn more about SHARQ, check out the `getting started section `_.
+To learn more, start with the `getting started guide `_.
.. toctree::
:maxdepth: 2
diff --git a/docs/internals.rst b/docs/internals.rst
index c7a6676..dffe758 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -2,4 +2,41 @@
The Internals
=============
-Coming soon... In the mean time, you can go through `the source code on Github `_
+Flowdacity Queue Server has two main layers:
+
+* The `Flowdacity Queue core `_,
+ which manages queue state in Redis and executes Lua scripts for queue operations.
+* The HTTP server in this repository, which maps REST endpoints to FQ operations,
+ loads configuration from environment variables, and runs the background requeue loop.
+
+Architecture
+------------
+
+1. A request reaches the Starlette app in ``fq_server.server``.
+2. The handler validates the route and request body, then calls the matching FQ method.
+3. FQ uses Redis data structures and Lua scripts to mutate queue state atomically.
+4. The server translates the FQ result into an HTTP status code and JSON response.
+
+Background requeue loop
+-----------------------
+
+During startup, the server initializes the FQ client and starts a background task
+that periodically calls ``queue.requeue()``. A Redis distributed lock is used so
+multiple server instances do not requeue expired jobs at the same time.
+
+The loop is controlled by:
+
+* ``FQ_ENABLE_REQUEUE_SCRIPT``
+* ``FQ_JOB_REQUEUE_INTERVAL``
+
+Shutdown
+--------
+
+On shutdown, the server cancels the background requeue task and closes the Redis
+client cleanly.
+
+Related repositories
+--------------------
+
+* Server: https://github.com/flowdacity/flowdacity-queue-server
+* Core queue library: https://github.com/flowdacity/flowdacity-queue
diff --git a/docs/license.rst b/docs/license.rst
index 7f3c87c..c0884a2 100644
--- a/docs/license.rst
+++ b/docs/license.rst
@@ -2,14 +2,16 @@
License
=======
-SHARQ Server is released under the `MIT License `_.
-
+Flowdacity Queue Server is released under the `MIT License `_.
+The project inherits historical copyright notices from its upstream origins and
+also includes current Flowdacity copyright.
::
The MIT License (MIT)
Copyright (c) 2014 Plivo Inc
+ Copyright (c) 2025 Flowdacity Development Team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/fq_server/server.py b/fq_server/server.py
index 3bbd599..d9d832a 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -39,13 +39,13 @@
def _coerce_bool(value: str, env_var: str) -> bool:
normalized = value.strip().lower()
- if normalized in {"1", "true", "yes", "on"}:
+ if normalized == "true":
return True
- if normalized in {"0", "false", "no", "off"}:
+ if normalized == "false":
return False
raise ValueError(
f"Invalid boolean value for {env_var}: {value!r}. "
- "Use one of: 1, 0, true, false, yes, no, on, off."
+ "Use either 'true' or 'false'."
)
@@ -443,7 +443,7 @@ async def _view_metrics(self, request: Request):
response = {"status": "failure"}
request_data = {}
- # matches Flask defaults: queue_type and/or queue_id may be absent
+ # queue_type and/or queue_id may be absent depending on the route
queue_type = request.path_params.get("queue_type")
queue_id = request.path_params.get("queue_id")
diff --git a/tests/test_routes.py b/tests/test_routes.py
index 107c121..fdae4b1 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -83,7 +83,10 @@ def test_build_config_from_env_rejects_invalid_values(self):
build_config_from_env({"FQ_REDIS_PORT": "redis"})
with self.assertRaisesRegex(ValueError, "FQ_ENABLE_REQUEUE_SCRIPT"):
- build_config_from_env({"FQ_ENABLE_REQUEUE_SCRIPT": "maybe"})
+ build_config_from_env({"FQ_ENABLE_REQUEUE_SCRIPT": "yes"})
+
+ with self.assertRaisesRegex(ValueError, "FQ_REDIS_CLUSTERED"):
+ build_config_from_env({"FQ_REDIS_CLUSTERED": "1"})
class FQServerTestCase(unittest.IsolatedAsyncioTestCase):
From f7b3c0c52941e7f2cad3434fd3867c3e1226b620 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:20:21 +0100
Subject: [PATCH 03/15] Adopts pydantic-based settings for config validation
Replaces manual environment variable parsing with pydantic and pydantic-settings for startup configuration validation. Centralizes config logic, improves error handling, and enhances maintainability by using a typed settings model. Updates documentation to reflect validation changes and adds required dependencies.
---
README.md | 1 +
docs/configuration.rst | 1 +
fq_server/__init__.py | 3 +-
fq_server/server.py | 118 +++---------------------------------
fq_server/settings.py | 90 +++++++++++++++++++++++++++
pyproject.toml | 2 +
uv.lock | 134 +++++++++++++++++++++++++++++++++++++++++
7 files changed, 238 insertions(+), 111 deletions(-)
create mode 100644 fq_server/settings.py
diff --git a/README.md b/README.md
index ac85a2d..4aae308 100644
--- a/README.md
+++ b/README.md
@@ -31,6 +31,7 @@ pip install pytest pytest-cov
## Configuration
The server reads all queue and Redis settings from environment variables. No config file is required.
+Values are validated at startup with `pydantic-settings`.
| Variable | Default | Description |
| --- | --- | --- |
diff --git a/docs/configuration.rst b/docs/configuration.rst
index c921b36..4ffed06 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -3,6 +3,7 @@ Configuration
=============
Flowdacity Queue Server reads its runtime configuration from environment variables.
+Settings are validated at startup with ``pydantic-settings``.
Queue settings
--------------
diff --git a/fq_server/__init__.py b/fq_server/__init__.py
index b99669a..c1865f6 100644
--- a/fq_server/__init__.py
+++ b/fq_server/__init__.py
@@ -1,4 +1,5 @@
from .server import FQServer, build_config_from_env, setup_server
+from .settings import QueueServerSettings
__version__ = "0.1.0"
-__all__ = ["FQServer", "build_config_from_env", "setup_server"]
+__all__ = ["FQServer", "QueueServerSettings", "build_config_from_env", "setup_server"]
diff --git a/fq_server/server.py b/fq_server/server.py
index d9d832a..47cbaec 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -3,12 +3,12 @@
# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
import asyncio
-import os
import traceback
import ujson as json
from collections.abc import Mapping
from contextlib import asynccontextmanager, suppress
from fq import FQ
+from pydantic import ValidationError
from redis.exceptions import LockError
from starlette.applications import Starlette
@@ -16,119 +16,17 @@
from starlette.responses import JSONResponse
from starlette.routing import Route
-
-DEFAULT_FQ_ENV_CONFIG = {
- "fq": {
- "job_expire_interval": 1000,
- "job_requeue_interval": 1000,
- "default_job_requeue_limit": -1,
- "enable_requeue_script": True,
- },
- "redis": {
- "db": 0,
- "key_prefix": "fq_server",
- "conn_type": "tcp_sock",
- "host": "127.0.0.1",
- "port": 6379,
- "password": "",
- "clustered": False,
- "unix_socket_path": "/tmp/redis.sock",
- },
-}
-
-
-def _coerce_bool(value: str, env_var: str) -> bool:
- normalized = value.strip().lower()
- if normalized == "true":
- return True
- if normalized == "false":
- return False
- raise ValueError(
- f"Invalid boolean value for {env_var}: {value!r}. "
- "Use either 'true' or 'false'."
- )
-
-
-def _get_env_int(
- env: Mapping[str, str], env_var: str, default: int, *, allow_empty: bool = True
-) -> int:
- value = env.get(env_var)
- if value is None or (allow_empty and value == ""):
- return default
-
- try:
- return int(value)
- except ValueError as exc:
- raise ValueError(
- f"Invalid integer value for {env_var}: {value!r}."
- ) from exc
-
-
-def _get_env_bool(env: Mapping[str, str], env_var: str, default: bool) -> bool:
- value = env.get(env_var)
- if value is None or value == "":
- return default
- return _coerce_bool(value, env_var)
-
-
-def _copy_config(config: Mapping[str, Mapping[str, object]]) -> dict[str, dict[str, object]]:
- normalized = {}
- for section_name, section_values in config.items():
- if not isinstance(section_values, Mapping):
- raise TypeError(f"Config section {section_name!r} must be a mapping.")
- normalized[str(section_name)] = {
- str(option): value for option, value in section_values.items()
- }
- return normalized
+from fq_server.settings import QueueServerSettings
def build_config_from_env(
env: Mapping[str, str] | None = None,
) -> dict[str, dict[str, object]]:
"""Build the FQ/FQ server configuration from environment variables."""
- env_map = os.environ if env is None else env
-
- config = _copy_config(DEFAULT_FQ_ENV_CONFIG)
- config["fq"]["job_expire_interval"] = _get_env_int(
- env_map, "FQ_JOB_EXPIRE_INTERVAL", config["fq"]["job_expire_interval"]
- )
- config["fq"]["job_requeue_interval"] = _get_env_int(
- env_map, "FQ_JOB_REQUEUE_INTERVAL", config["fq"]["job_requeue_interval"]
- )
- config["fq"]["default_job_requeue_limit"] = _get_env_int(
- env_map,
- "FQ_DEFAULT_JOB_REQUEUE_LIMIT",
- config["fq"]["default_job_requeue_limit"],
- )
- config["fq"]["enable_requeue_script"] = _get_env_bool(
- env_map,
- "FQ_ENABLE_REQUEUE_SCRIPT",
- config["fq"]["enable_requeue_script"],
- )
-
- config["redis"]["db"] = _get_env_int(
- env_map, "FQ_REDIS_DB", config["redis"]["db"]
- )
- config["redis"]["key_prefix"] = env_map.get(
- "FQ_REDIS_KEY_PREFIX", config["redis"]["key_prefix"]
- )
- config["redis"]["conn_type"] = env_map.get(
- "FQ_REDIS_CONN_TYPE", config["redis"]["conn_type"]
- )
- config["redis"]["host"] = env_map.get("FQ_REDIS_HOST", config["redis"]["host"])
- config["redis"]["port"] = _get_env_int(
- env_map, "FQ_REDIS_PORT", config["redis"]["port"]
- )
- config["redis"]["password"] = env_map.get(
- "FQ_REDIS_PASSWORD", config["redis"]["password"]
- )
- config["redis"]["clustered"] = _get_env_bool(
- env_map, "FQ_REDIS_CLUSTERED", config["redis"]["clustered"]
- )
- config["redis"]["unix_socket_path"] = env_map.get(
- "FQ_REDIS_UNIX_SOCKET_PATH", config["redis"]["unix_socket_path"]
- )
- return config
+ try:
+ return QueueServerSettings.from_env(env).to_fq_config()
+ except ValidationError as exc:
+ raise ValueError(str(exc)) from exc
class FQServer(object):
@@ -138,7 +36,7 @@ class FQServer(object):
def __init__(self, config: Mapping[str, Mapping[str, object]]):
"""Load the FQ config mapping and define the routes."""
- self.config = _copy_config(config)
+ self.config = config
self.queue = FQ(self.config)
self._requeue_task: asyncio.Task | None = None
@@ -513,5 +411,5 @@ def setup_server(
env: Mapping[str, str] | None = None,
) -> FQServer:
"""Configure FQ server and return the server instance."""
- server_config = build_config_from_env(env) if config is None else _copy_config(config)
+ server_config = build_config_from_env(env) if config is None else config
return FQServer(server_config)
diff --git a/fq_server/settings.py b/fq_server/settings.py
new file mode 100644
index 0000000..ebfcde5
--- /dev/null
+++ b/fq_server/settings.py
@@ -0,0 +1,90 @@
+# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
+
+from collections.abc import Mapping
+from typing import Literal
+
+from pydantic import Field, field_validator
+from pydantic_settings import BaseSettings, SettingsConfigDict
+
+
+class QueueServerSettings(BaseSettings):
+ """Environment-backed settings for Flowdacity Queue Server."""
+
+ model_config = SettingsConfigDict(extra="ignore")
+
+ job_expire_interval: int = Field(
+ default=1000, ge=1, validation_alias="FQ_JOB_EXPIRE_INTERVAL"
+ )
+ job_requeue_interval: int = Field(
+ default=1000, ge=1, validation_alias="FQ_JOB_REQUEUE_INTERVAL"
+ )
+ default_job_requeue_limit: int = Field(
+ default=-1, ge=-1, validation_alias="FQ_DEFAULT_JOB_REQUEUE_LIMIT"
+ )
+ enable_requeue_script: bool = Field(
+ default=True, validation_alias="FQ_ENABLE_REQUEUE_SCRIPT"
+ )
+
+ redis_db: int = Field(default=0, ge=0, validation_alias="FQ_REDIS_DB")
+ redis_key_prefix: str = Field(
+ default="fq_server", min_length=1, validation_alias="FQ_REDIS_KEY_PREFIX"
+ )
+ redis_conn_type: Literal["tcp_sock", "unix_sock"] = Field(
+ default="tcp_sock", validation_alias="FQ_REDIS_CONN_TYPE"
+ )
+ redis_host: str = Field(
+ default="127.0.0.1", min_length=1, validation_alias="FQ_REDIS_HOST"
+ )
+ redis_port: int = Field(
+ default=6379, ge=1, le=65535, validation_alias="FQ_REDIS_PORT"
+ )
+ redis_password: str = Field(default="", validation_alias="FQ_REDIS_PASSWORD")
+ redis_clustered: bool = Field(
+ default=False, validation_alias="FQ_REDIS_CLUSTERED"
+ )
+ redis_unix_socket_path: str = Field(
+ default="/tmp/redis.sock",
+ min_length=1,
+ validation_alias="FQ_REDIS_UNIX_SOCKET_PATH",
+ )
+
+ @field_validator("enable_requeue_script", "redis_clustered", mode="before")
+ @classmethod
+ def validate_boolean_env(cls, value: bool | str) -> bool:
+ if isinstance(value, bool):
+ return value
+
+ if isinstance(value, str):
+ normalized = value.strip().lower()
+ if normalized == "true":
+ return True
+ if normalized == "false":
+ return False
+
+ raise ValueError("Use either 'true' or 'false'.")
+
+ @classmethod
+ def from_env(cls, env: Mapping[str, str] | None = None) -> "QueueServerSettings":
+ if env is None:
+ return cls()
+ return cls.model_validate(env)
+
+ def to_fq_config(self) -> dict[str, dict[str, object]]:
+ return {
+ "fq": {
+ "job_expire_interval": self.job_expire_interval,
+ "job_requeue_interval": self.job_requeue_interval,
+ "default_job_requeue_limit": self.default_job_requeue_limit,
+ "enable_requeue_script": self.enable_requeue_script,
+ },
+ "redis": {
+ "db": self.redis_db,
+ "key_prefix": self.redis_key_prefix,
+ "conn_type": self.redis_conn_type,
+ "host": self.redis_host,
+ "port": self.redis_port,
+ "password": self.redis_password,
+ "clustered": self.redis_clustered,
+ "unix_socket_path": self.redis_unix_socket_path,
+ },
+ }
diff --git a/pyproject.toml b/pyproject.toml
index e8826ea..5e20652 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,6 +8,8 @@ dependencies = [
"flowdacity-queue @ git+https://github.com/flowdacity/flowdacity-queue.git@v1.0.0",
"httpx>=0.28.1",
"msgpack>=1.1.2",
+ "pydantic>=2.0.0",
+ "pydantic-settings>=2.0.0",
"redis[hiredis]>=7.1.0",
"starlette>=0.50.0",
"ujson>=5.11.0",
diff --git a/uv.lock b/uv.lock
index 7f98a21..cda52c8 100644
--- a/uv.lock
+++ b/uv.lock
@@ -2,6 +2,15 @@ version = 1
revision = 3
requires-python = ">=3.12"
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
+]
+
[[package]]
name = "anyio"
version = "4.12.1"
@@ -136,6 +145,8 @@ dependencies = [
{ name = "flowdacity-queue" },
{ name = "httpx" },
{ name = "msgpack" },
+ { name = "pydantic" },
+ { name = "pydantic-settings" },
{ name = "redis", extra = ["hiredis"] },
{ name = "starlette" },
{ name = "ujson" },
@@ -153,6 +164,8 @@ requires-dist = [
{ name = "flowdacity-queue", git = "https://github.com/flowdacity/flowdacity-queue.git?rev=v1.0.0" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "msgpack", specifier = ">=1.1.2" },
+ { name = "pydantic", specifier = ">=2.0.0" },
+ { name = "pydantic-settings", specifier = ">=2.0.0" },
{ name = "redis", extras = ["hiredis"], specifier = ">=7.1.0" },
{ name = "starlette", specifier = ">=0.50.0" },
{ name = "ujson", specifier = ">=5.11.0" },
@@ -342,6 +355,106 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
]
+[[package]]
+name = "pydantic"
+version = "2.12.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.41.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
+ { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
+ { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
+ { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
+ { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
+ { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
+ { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
+ { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
+ { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
+ { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
+ { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
+]
+
+[[package]]
+name = "pydantic-settings"
+version = "2.13.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dotenv" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" },
+]
+
[[package]]
name = "pygments"
version = "2.19.2"
@@ -381,6 +494,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
]
+[[package]]
+name = "python-dotenv"
+version = "1.2.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
+]
+
[[package]]
name = "redis"
version = "7.1.0"
@@ -417,6 +539,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
]
+[[package]]
+name = "typing-inspection"
+version = "0.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
+]
+
[[package]]
name = "ujson"
version = "5.12.0"
From 3fc976eeda96c2cc0565b8aab40a938c446d53ea Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:24:58 +0100
Subject: [PATCH 04/15] Adopts explicit FQConfig type for server config
Introduces strongly-typed configuration using TypedDicts to clarify and enforce the structure of server configuration. Improves type safety, code clarity, and maintainability by replacing generic mapping types with explicit FQConfig throughout the codebase.
---
fq_server/__init__.py | 10 ++++++++--
fq_server/server.py | 10 +++++-----
fq_server/settings.py | 27 +++++++++++++++++++++++++--
tests/test_routes.py | 4 ++--
4 files changed, 40 insertions(+), 11 deletions(-)
diff --git a/fq_server/__init__.py b/fq_server/__init__.py
index c1865f6..c15868e 100644
--- a/fq_server/__init__.py
+++ b/fq_server/__init__.py
@@ -1,5 +1,11 @@
from .server import FQServer, build_config_from_env, setup_server
-from .settings import QueueServerSettings
+from .settings import FQConfig, QueueServerSettings
__version__ = "0.1.0"
-__all__ = ["FQServer", "QueueServerSettings", "build_config_from_env", "setup_server"]
+__all__ = [
+ "FQConfig",
+ "FQServer",
+ "QueueServerSettings",
+ "build_config_from_env",
+ "setup_server",
+]
diff --git a/fq_server/server.py b/fq_server/server.py
index 47cbaec..f490742 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -16,12 +16,12 @@
from starlette.responses import JSONResponse
from starlette.routing import Route
-from fq_server.settings import QueueServerSettings
+from fq_server.settings import FQConfig, QueueServerSettings
def build_config_from_env(
env: Mapping[str, str] | None = None,
-) -> dict[str, dict[str, object]]:
+) -> FQConfig:
"""Build the FQ/FQ server configuration from environment variables."""
try:
return QueueServerSettings.from_env(env).to_fq_config()
@@ -34,7 +34,7 @@ class FQServer(object):
exposes the app to run the server (Starlette).
"""
- def __init__(self, config: Mapping[str, Mapping[str, object]]):
+ def __init__(self, config: FQConfig):
"""Load the FQ config mapping and define the routes."""
self.config = config
self.queue = FQ(self.config)
@@ -62,7 +62,7 @@ async def requeue(self):
async def requeue_with_lock(self):
"""Loop endlessly and requeue expired jobs, but with a distributed lock."""
- if not self.config["fq"].get("enable_requeue_script", True):
+ if not self.config["fq"]["enable_requeue_script"]:
print("requeue script disabled")
return
@@ -406,7 +406,7 @@ async def _view_clear_queue(self, request: Request):
# Setup helpers to create and configure the server
# ----------------------------------------------------------------------
def setup_server(
- config: Mapping[str, Mapping[str, object]] | None = None,
+ config: FQConfig | None = None,
*,
env: Mapping[str, str] | None = None,
) -> FQServer:
diff --git a/fq_server/settings.py b/fq_server/settings.py
index ebfcde5..10c242c 100644
--- a/fq_server/settings.py
+++ b/fq_server/settings.py
@@ -1,12 +1,35 @@
# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
from collections.abc import Mapping
-from typing import Literal
+from typing import Literal, TypedDict
from pydantic import Field, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
+class FQSectionConfig(TypedDict):
+ job_expire_interval: int
+ job_requeue_interval: int
+ default_job_requeue_limit: int
+ enable_requeue_script: bool
+
+
+class RedisSectionConfig(TypedDict):
+ db: int
+ key_prefix: str
+ conn_type: Literal["tcp_sock", "unix_sock"]
+ host: str
+ port: int
+ password: str
+ clustered: bool
+ unix_socket_path: str
+
+
+class FQConfig(TypedDict):
+ fq: FQSectionConfig
+ redis: RedisSectionConfig
+
+
class QueueServerSettings(BaseSettings):
"""Environment-backed settings for Flowdacity Queue Server."""
@@ -69,7 +92,7 @@ def from_env(cls, env: Mapping[str, str] | None = None) -> "QueueServerSettings"
return cls()
return cls.model_validate(env)
- def to_fq_config(self) -> dict[str, dict[str, object]]:
+ def to_fq_config(self) -> FQConfig:
return {
"fq": {
"job_expire_interval": self.job_expire_interval,
diff --git a/tests/test_routes.py b/tests/test_routes.py
index fdae4b1..7255897 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -9,10 +9,10 @@
from starlette.types import ASGIApp
from unittest.mock import AsyncMock, patch
-from fq_server import build_config_from_env, setup_server
+from fq_server import FQConfig, build_config_from_env, setup_server
-def build_test_config():
+def build_test_config() -> FQConfig:
return {
"fq": {
"job_expire_interval": 1000,
From f244cc6af96eb2cf853745acf2226626d7333d10 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:28:47 +0100
Subject: [PATCH 05/15] Adds explicit JSONDict type aliases for responses
Introduces a type alias to clarify the structure of response and request data dictionaries, improving code readability and static analysis. Enhances type safety and aligns with modern Python typing practices.
---
fq_server/server.py | 31 +++++++++++++++++--------------
1 file changed, 17 insertions(+), 14 deletions(-)
diff --git a/fq_server/server.py b/fq_server/server.py
index f490742..684a4c2 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -7,6 +7,7 @@
import ujson as json
from collections.abc import Mapping
from contextlib import asynccontextmanager, suppress
+from typing import Any, TypeAlias
from fq import FQ
from pydantic import ValidationError
from redis.exceptions import LockError
@@ -18,6 +19,8 @@
from fq_server.settings import FQConfig, QueueServerSettings
+JSONDict: TypeAlias = dict[str, Any]
+
def build_config_from_env(
env: Mapping[str, str] | None = None,
@@ -186,10 +189,10 @@ async def _view_enqueue(self, request: Request):
queue_type = request.path_params["queue_type"]
queue_id = request.path_params["queue_id"]
- response = {"status": "failure"}
+ response: JSONDict = {"status": "failure"}
try:
raw_body = await request.body()
- request_data = json.loads(raw_body or b"{}")
+ request_data: JSONDict = json.loads(raw_body or b"{}")
except Exception as e:
response["message"] = str(e)
return JSONResponse(response, status_code=400)
@@ -253,8 +256,8 @@ async def _view_dequeue(self, request: Request):
return await self._dequeue_with_type(queue_type)
async def _dequeue_with_type(self, queue_type: str):
- response = {"status": "failure"}
- request_data = {"queue_type": queue_type}
+ response: JSONDict = {"status": "failure"}
+ request_data: JSONDict = {"queue_type": queue_type}
try:
response = await self.queue.dequeue(**request_data)
@@ -287,8 +290,8 @@ async def _view_finish(self, request: Request):
queue_id = request.path_params["queue_id"]
job_id = request.path_params["job_id"]
- response = {"status": "failure"}
- request_data = {
+ response: JSONDict = {"status": "failure"}
+ request_data: JSONDict = {
"queue_type": queue_type,
"queue_id": queue_id,
"job_id": job_id,
@@ -310,16 +313,16 @@ async def _view_interval(self, request: Request):
queue_type = request.path_params["queue_type"]
queue_id = request.path_params["queue_id"]
- response = {"status": "failure"}
+ response: JSONDict = {"status": "failure"}
try:
raw_body = await request.body()
- body = json.loads(raw_body or b"{}")
+ body: JSONDict = json.loads(raw_body or b"{}")
interval = body["interval"]
except Exception as e:
response["message"] = str(e)
return JSONResponse(response, status_code=400)
- request_data = {
+ request_data: JSONDict = {
"queue_type": queue_type,
"queue_id": queue_id,
"interval": interval,
@@ -338,8 +341,8 @@ async def _view_interval(self, request: Request):
async def _view_metrics(self, request: Request):
"""Gets FQ metrics based on the params."""
- response = {"status": "failure"}
- request_data = {}
+ response: JSONDict = {"status": "failure"}
+ request_data: JSONDict = {}
# queue_type and/or queue_id may be absent depending on the route
queue_type = request.path_params.get("queue_type")
@@ -363,7 +366,7 @@ async def _view_deep_status(self, request: Request):
"""Checks underlying data store health."""
try:
await self.queue.deep_status()
- response = {"status": "success"}
+ response: JSONDict = {"status": "success"}
return JSONResponse(response)
except Exception as e:
print(e)
@@ -377,10 +380,10 @@ async def _view_clear_queue(self, request: Request):
queue_type = request.path_params["queue_type"]
queue_id = request.path_params["queue_id"]
- response = {"status": "failure"}
+ response: JSONDict = {"status": "failure"}
try:
raw_body = await request.body()
- request_data = json.loads(raw_body or b"{}")
+ request_data: JSONDict = json.loads(raw_body or b"{}")
except Exception as e:
response["message"] = str(e)
return JSONResponse(response, status_code=400)
From 9607ba7e8bef63c8178dda2be470241427f8cef4 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:35:37 +0100
Subject: [PATCH 06/15] Adds assertion to ensure Redis client is initialized in
FQServer
---
fq_server/server.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/fq_server/server.py b/fq_server/server.py
index 684a4c2..858078a 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -76,6 +76,7 @@ async def requeue_with_lock(self):
while True:
try:
redis = self.queue.redis_client()
+ assert redis is not None, "Redis client is not initialized"
# assumes async lock
async with redis.lock("fq-requeue-lock-key", timeout=15):
try:
From 3f80de126f8934a7ded30a10bada290413fb7aa4 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:41:57 +0100
Subject: [PATCH 07/15] Adds configurable log level and structured logging
Introduces an environment-configurable application log level, validated via settings.
Switches server logging from print and traceback to structured logging with context-aware messages,
improving diagnostics and production readiness. Updates documentation, Docker config, and tests
to support and describe the new log level feature.
---
README.md | 1 +
asgi.py | 22 ++++++++++++--
docker-compose.yml | 4 +--
docs/configuration.rst | 6 ++++
fq_server/server.py | 66 ++++++++++++++++++++++++++----------------
fq_server/settings.py | 17 ++++++++++-
tests/test_routes.py | 11 ++++++-
7 files changed, 96 insertions(+), 31 deletions(-)
diff --git a/README.md b/README.md
index 4aae308..5ed4ae0 100644
--- a/README.md
+++ b/README.md
@@ -39,6 +39,7 @@ Values are validated at startup with `pydantic-settings`.
| `FQ_JOB_REQUEUE_INTERVAL` | `1000` | Milliseconds between expired-job requeue passes. |
| `FQ_DEFAULT_JOB_REQUEUE_LIMIT` | `-1` | Default retry limit. `-1` retries forever. |
| `FQ_ENABLE_REQUEUE_SCRIPT` | `true` | Enables the background requeue loop. |
+| `FQ_LOG_LEVEL` | `INFO` | Application log level. |
| `FQ_REDIS_DB` | `0` | Redis database number. |
| `FQ_REDIS_KEY_PREFIX` | `fq_server` | Prefix used for Redis keys. |
| `FQ_REDIS_CONN_TYPE` | `tcp_sock` | Redis connection type: `tcp_sock` or `unix_sock`. |
diff --git a/asgi.py b/asgi.py
index 09fb5a3..17252d0 100644
--- a/asgi.py
+++ b/asgi.py
@@ -1,9 +1,27 @@
# Copyright (c) 2025 Flowdacity Team. See LICENSE.txt for details.
# ASGI application entrypoint for Flowdacity Queue (FQ) Server
-from fq_server import setup_server
+import logging
-server = setup_server()
+from fq_server import QueueServerSettings, setup_server
+
+
+def configure_logging(log_level: str) -> None:
+ level = getattr(logging, log_level)
+ root_logger = logging.getLogger()
+
+ if not root_logger.handlers:
+ logging.basicConfig(
+ level=level,
+ format="%(asctime)s %(levelname)s [%(name)s] %(message)s",
+ )
+
+ logging.getLogger("fq_server").setLevel(level)
+
+
+settings = QueueServerSettings.from_env()
+configure_logging(settings.log_level)
+server = setup_server(settings.to_fq_config())
# ASGI app exposed for Uvicorn/Hypercorn
app = server.app
diff --git a/docker-compose.yml b/docker-compose.yml
index a40f222..550f935 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -2,11 +2,11 @@ services:
app:
build: .
environment:
- PORT: 8080
FQ_JOB_EXPIRE_INTERVAL: 1000
FQ_JOB_REQUEUE_INTERVAL: 1000
FQ_DEFAULT_JOB_REQUEUE_LIMIT: -1
FQ_ENABLE_REQUEUE_SCRIPT: "true"
+ FQ_LOG_LEVEL: INFO
FQ_REDIS_DB: 0
FQ_REDIS_KEY_PREFIX: fq_server
FQ_REDIS_CONN_TYPE: tcp_sock
@@ -15,7 +15,7 @@ services:
FQ_REDIS_PASSWORD: ""
FQ_REDIS_CLUSTERED: "false"
ports:
- - "8080:8080"
+ - "8300:8300"
depends_on:
redis:
condition: service_healthy
diff --git a/docs/configuration.rst b/docs/configuration.rst
index 4ffed06..b06fb83 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -20,6 +20,10 @@ Queue settings
``FQ_ENABLE_REQUEUE_SCRIPT``
Enables or disables the background requeue loop.
+``FQ_LOG_LEVEL``
+ Application log level. Supported values are ``DEBUG``, ``INFO``, ``WARNING``,
+ ``ERROR``, and ``CRITICAL``.
+
Redis settings
--------------
@@ -63,6 +67,8 @@ Defaults
- ``-1``
* - ``FQ_ENABLE_REQUEUE_SCRIPT``
- ``true``
+ * - ``FQ_LOG_LEVEL``
+ - ``INFO``
* - ``FQ_REDIS_DB``
- ``0``
* - ``FQ_REDIS_KEY_PREFIX``
diff --git a/fq_server/server.py b/fq_server/server.py
index 858078a..9a2a86f 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -3,7 +3,7 @@
# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
import asyncio
-import traceback
+import logging
import ujson as json
from collections.abc import Mapping
from contextlib import asynccontextmanager, suppress
@@ -20,6 +20,7 @@
from fq_server.settings import FQConfig, QueueServerSettings
JSONDict: TypeAlias = dict[str, Any]
+logger = logging.getLogger(__name__)
def build_config_from_env(
@@ -59,19 +60,22 @@ async def requeue(self):
try:
await self.queue.requeue()
except Exception:
- traceback.print_exc()
+ logger.exception("Failed to requeue expired jobs")
# in seconds
await asyncio.sleep(job_requeue_interval / 1000.0)
async def requeue_with_lock(self):
"""Loop endlessly and requeue expired jobs, but with a distributed lock."""
if not self.config["fq"]["enable_requeue_script"]:
- print("requeue script disabled")
+ logger.info("Requeue loop disabled")
return
job_requeue_interval = float(self.config["fq"]["job_requeue_interval"])
- print("start requeue loop: job_requeue_interval = %f" % (job_requeue_interval))
+ logger.info(
+ "Starting requeue loop",
+ extra={"job_requeue_interval": job_requeue_interval},
+ )
while True:
try:
@@ -82,10 +86,10 @@ async def requeue_with_lock(self):
try:
await self.queue.requeue()
except Exception:
- traceback.print_exc()
+ logger.exception("Failed to requeue expired jobs while holding lock")
except LockError:
# the lock wasn't acquired within specified time
- pass
+ logger.debug("Requeue lock is already held by another worker")
finally:
await asyncio.sleep(job_requeue_interval / 1000.0)
@@ -217,10 +221,10 @@ async def _view_enqueue(self, request: Request):
queue_type, queue_id
)
except Exception as e:
- print(
- "Error occurred while fetching redis key length as {} for auth_id {}".format(
- e, queue_id
- )
+ logger.warning(
+ "Failed to fetch queue length during enqueue",
+ exc_info=e,
+ extra={"queue_type": queue_type, "queue_id": queue_id},
)
if current_queue_length < max_queued_length:
@@ -228,7 +232,10 @@ async def _view_enqueue(self, request: Request):
response = await self.queue.enqueue(**request_data)
response["current_queue_length"] = current_queue_length
except Exception as e:
- traceback.print_exc()
+ logger.exception(
+ "Enqueue failed",
+ extra={"queue_type": queue_type, "queue_id": queue_id},
+ )
response["message"] = str(e)
return JSONResponse(response, status_code=400)
@@ -241,7 +248,10 @@ async def _view_enqueue(self, request: Request):
try:
response = await self.queue.enqueue(**request_data)
except Exception as e:
- traceback.print_exc()
+ logger.exception(
+ "Enqueue failed",
+ extra={"queue_type": queue_type, "queue_id": queue_id},
+ )
response["message"] = str(e)
return JSONResponse(response, status_code=400)
@@ -271,15 +281,14 @@ async def _dequeue_with_type(self, queue_type: str):
queue_type, response["queue_id"]
)
except Exception as e:
- print(
- "DEQUEUE::Error occurred while fetching redis key length {} for queue_id {}".format(
- e, response["queue_id"]
- )
+ logger.warning(
+ "Failed to fetch queue length during dequeue",
+ exc_info=e,
+ extra={"queue_type": queue_type, "queue_id": response["queue_id"]},
)
response["current_queue_length"] = current_queue_length
except Exception as e:
- for line in traceback.format_exc().splitlines():
- print(line)
+ logger.exception("Dequeue failed", extra={"queue_type": queue_type})
response["message"] = str(e)
return JSONResponse(response, status_code=400)
@@ -303,7 +312,10 @@ async def _view_finish(self, request: Request):
if response["status"] == "failure":
return JSONResponse(response, status_code=404)
except Exception as e:
- traceback.print_exc()
+ logger.exception(
+ "Finish failed",
+ extra={"queue_type": queue_type, "queue_id": queue_id, "job_id": job_id},
+ )
response["message"] = str(e)
return JSONResponse(response, status_code=400)
@@ -334,7 +346,10 @@ async def _view_interval(self, request: Request):
if response["status"] == "failure":
return JSONResponse(response, status_code=404)
except Exception as e:
- traceback.print_exc()
+ logger.exception(
+ "Interval update failed",
+ extra={"queue_type": queue_type, "queue_id": queue_id},
+ )
response["message"] = str(e)
return JSONResponse(response, status_code=400)
@@ -357,7 +372,7 @@ async def _view_metrics(self, request: Request):
try:
response = await self.queue.metrics(**request_data)
except Exception as e:
- traceback.print_exc()
+ logger.exception("Metrics query failed", extra=request_data)
response["message"] = str(e)
return JSONResponse(response, status_code=400)
@@ -370,9 +385,7 @@ async def _view_deep_status(self, request: Request):
response: JSONDict = {"status": "success"}
return JSONResponse(response)
except Exception as e:
- print(e)
- for line in traceback.format_exc().splitlines():
- print(line)
+ logger.exception("Deep status check failed")
# preserve original behavior: raise generic exception -> 500
raise Exception from e
@@ -399,7 +412,10 @@ async def _view_clear_queue(self, request: Request):
try:
response = await self.queue.clear_queue(**request_data)
except Exception as e:
- traceback.print_exc()
+ logger.exception(
+ "Clear queue failed",
+ extra={"queue_type": queue_type, "queue_id": queue_id},
+ )
response["message"] = str(e)
return JSONResponse(response, status_code=400)
diff --git a/fq_server/settings.py b/fq_server/settings.py
index 10c242c..8def599 100644
--- a/fq_server/settings.py
+++ b/fq_server/settings.py
@@ -1,11 +1,13 @@
# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
from collections.abc import Mapping
-from typing import Literal, TypedDict
+from typing import Literal, TypedDict, cast
from pydantic import Field, field_validator
from pydantic_settings import BaseSettings, SettingsConfigDict
+LogLevelName = Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
+
class FQSectionConfig(TypedDict):
job_expire_interval: int
@@ -47,6 +49,7 @@ class QueueServerSettings(BaseSettings):
enable_requeue_script: bool = Field(
default=True, validation_alias="FQ_ENABLE_REQUEUE_SCRIPT"
)
+ log_level: LogLevelName = Field(default="INFO", validation_alias="FQ_LOG_LEVEL")
redis_db: int = Field(default=0, ge=0, validation_alias="FQ_REDIS_DB")
redis_key_prefix: str = Field(
@@ -86,6 +89,18 @@ def validate_boolean_env(cls, value: bool | str) -> bool:
raise ValueError("Use either 'true' or 'false'.")
+ @field_validator("log_level", mode="before")
+ @classmethod
+ def validate_log_level(cls, value: LogLevelName | str) -> LogLevelName:
+ if isinstance(value, str):
+ normalized = value.strip().upper()
+ if normalized in {"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"}:
+ return cast(LogLevelName, normalized)
+
+ raise ValueError(
+ "Use one of: DEBUG, INFO, WARNING, ERROR, CRITICAL."
+ )
+
@classmethod
def from_env(cls, env: Mapping[str, str] | None = None) -> "QueueServerSettings":
if env is None:
diff --git a/tests/test_routes.py b/tests/test_routes.py
index 7255897..e0ba520 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -6,10 +6,11 @@
import ujson as json
from httpx import AsyncClient, ASGITransport
+from pydantic import ValidationError
from starlette.types import ASGIApp
from unittest.mock import AsyncMock, patch
-from fq_server import FQConfig, build_config_from_env, setup_server
+from fq_server import FQConfig, QueueServerSettings, build_config_from_env, setup_server
def build_test_config() -> FQConfig:
@@ -88,6 +89,14 @@ def test_build_config_from_env_rejects_invalid_values(self):
with self.assertRaisesRegex(ValueError, "FQ_REDIS_CLUSTERED"):
build_config_from_env({"FQ_REDIS_CLUSTERED": "1"})
+ def test_queue_server_settings_log_level_override(self):
+ settings = QueueServerSettings.from_env({"FQ_LOG_LEVEL": "debug"})
+ self.assertEqual(settings.log_level, "DEBUG")
+
+ def test_queue_server_settings_rejects_invalid_log_level(self):
+ with self.assertRaisesRegex(ValidationError, "FQ_LOG_LEVEL"):
+ QueueServerSettings.from_env({"FQ_LOG_LEVEL": "verbose"})
+
class FQServerTestCase(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
From 378a75a9a501b798909423ebb06348fe3553e26a Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 19:49:00 +0100
Subject: [PATCH 08/15] Removes FQ_ prefix from environment variable names
Standardizes configuration by dropping the FQ_ prefix from all environment variables and documentation references. Improves clarity and consistency for users configuring the application, reducing friction and potential confusion when setting environment variables.
---
README.md | 28 ++++++++++-----------
docker-compose.yml | 24 +++++++++---------
docs/configuration.rst | 54 ++++++++++++++++++++---------------------
docs/faqs.rst | 4 +--
docs/gettingstarted.rst | 6 ++---
docs/internals.rst | 4 +--
fq_server/settings.py | 26 ++++++++++----------
tests/test_routes.py | 42 ++++++++++++++++----------------
8 files changed, 94 insertions(+), 94 deletions(-)
diff --git a/README.md b/README.md
index 5ed4ae0..b264b97 100644
--- a/README.md
+++ b/README.md
@@ -35,19 +35,19 @@ Values are validated at startup with `pydantic-settings`.
| Variable | Default | Description |
| --- | --- | --- |
-| `FQ_JOB_EXPIRE_INTERVAL` | `1000` | Milliseconds before a dequeued job is considered expired. |
-| `FQ_JOB_REQUEUE_INTERVAL` | `1000` | Milliseconds between expired-job requeue passes. |
-| `FQ_DEFAULT_JOB_REQUEUE_LIMIT` | `-1` | Default retry limit. `-1` retries forever. |
-| `FQ_ENABLE_REQUEUE_SCRIPT` | `true` | Enables the background requeue loop. |
-| `FQ_LOG_LEVEL` | `INFO` | Application log level. |
-| `FQ_REDIS_DB` | `0` | Redis database number. |
-| `FQ_REDIS_KEY_PREFIX` | `fq_server` | Prefix used for Redis keys. |
-| `FQ_REDIS_CONN_TYPE` | `tcp_sock` | Redis connection type: `tcp_sock` or `unix_sock`. |
-| `FQ_REDIS_HOST` | `127.0.0.1` | Redis host for TCP connections. |
-| `FQ_REDIS_PORT` | `6379` | Redis port for TCP connections. |
-| `FQ_REDIS_PASSWORD` | empty | Redis password. |
-| `FQ_REDIS_CLUSTERED` | `false` | Enables Redis Cluster mode. |
-| `FQ_REDIS_UNIX_SOCKET_PATH` | `/tmp/redis.sock` | Redis socket path when `FQ_REDIS_CONN_TYPE=unix_sock`. |
+| `JOB_EXPIRE_INTERVAL` | `1000` | Milliseconds before a dequeued job is considered expired. |
+| `JOB_REQUEUE_INTERVAL` | `1000` | Milliseconds between expired-job requeue passes. |
+| `DEFAULT_JOB_REQUEUE_LIMIT` | `-1` | Default retry limit. `-1` retries forever. |
+| `ENABLE_REQUEUE_SCRIPT` | `true` | Enables the background requeue loop. |
+| `LOG_LEVEL` | `INFO` | Application log level. |
+| `REDIS_DB` | `0` | Redis database number. |
+| `REDIS_KEY_PREFIX` | `fq_server` | Prefix used for Redis keys. |
+| `REDIS_CONN_TYPE` | `tcp_sock` | Redis connection type: `tcp_sock` or `unix_sock`. |
+| `REDIS_HOST` | `127.0.0.1` | Redis host for TCP connections. |
+| `REDIS_PORT` | `6379` | Redis port for TCP connections. |
+| `REDIS_PASSWORD` | empty | Redis password. |
+| `REDIS_CLUSTERED` | `false` | Enables Redis Cluster mode. |
+| `REDIS_UNIX_SOCKET_PATH` | `/tmp/redis.sock` | Redis socket path when `REDIS_CONN_TYPE=unix_sock`. |
| `PORT` | `8300` | Uvicorn port used by the container and local examples. |
Boolean env vars accept only `true` or `false`.
@@ -64,7 +64,7 @@ Run the API:
```bash
PORT=8080 \
-FQ_REDIS_HOST=127.0.0.1 \
+REDIS_HOST=127.0.0.1 \
uv run uvicorn asgi:app --host 0.0.0.0 --port 8080
```
diff --git a/docker-compose.yml b/docker-compose.yml
index 550f935..00b51c1 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -2,18 +2,18 @@ services:
app:
build: .
environment:
- FQ_JOB_EXPIRE_INTERVAL: 1000
- FQ_JOB_REQUEUE_INTERVAL: 1000
- FQ_DEFAULT_JOB_REQUEUE_LIMIT: -1
- FQ_ENABLE_REQUEUE_SCRIPT: "true"
- FQ_LOG_LEVEL: INFO
- FQ_REDIS_DB: 0
- FQ_REDIS_KEY_PREFIX: fq_server
- FQ_REDIS_CONN_TYPE: tcp_sock
- FQ_REDIS_HOST: redis
- FQ_REDIS_PORT: 6379
- FQ_REDIS_PASSWORD: ""
- FQ_REDIS_CLUSTERED: "false"
+ JOB_EXPIRE_INTERVAL: 1000
+ JOB_REQUEUE_INTERVAL: 1000
+ DEFAULT_JOB_REQUEUE_LIMIT: -1
+ ENABLE_REQUEUE_SCRIPT: "true"
+ LOG_LEVEL: INFO
+ REDIS_DB: 0
+ REDIS_KEY_PREFIX: fq_server
+ REDIS_CONN_TYPE: tcp_sock
+ REDIS_HOST: redis
+ REDIS_PORT: 6379
+ REDIS_PASSWORD: ""
+ REDIS_CLUSTERED: "false"
ports:
- "8300:8300"
depends_on:
diff --git a/docs/configuration.rst b/docs/configuration.rst
index b06fb83..8036c7b 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -8,48 +8,48 @@ Settings are validated at startup with ``pydantic-settings``.
Queue settings
--------------
-``FQ_JOB_EXPIRE_INTERVAL``
+``JOB_EXPIRE_INTERVAL``
Milliseconds after which a dequeued job is considered expired.
-``FQ_JOB_REQUEUE_INTERVAL``
+``JOB_REQUEUE_INTERVAL``
Milliseconds between requeue passes for expired jobs.
-``FQ_DEFAULT_JOB_REQUEUE_LIMIT``
+``DEFAULT_JOB_REQUEUE_LIMIT``
Default retry limit for jobs. ``-1`` means retry forever.
-``FQ_ENABLE_REQUEUE_SCRIPT``
+``ENABLE_REQUEUE_SCRIPT``
Enables or disables the background requeue loop.
-``FQ_LOG_LEVEL``
+``LOG_LEVEL``
Application log level. Supported values are ``DEBUG``, ``INFO``, ``WARNING``,
``ERROR``, and ``CRITICAL``.
Redis settings
--------------
-``FQ_REDIS_DB``
+``REDIS_DB``
Redis database number.
-``FQ_REDIS_KEY_PREFIX``
+``REDIS_KEY_PREFIX``
Prefix used for Redis keys created by the queue.
-``FQ_REDIS_CONN_TYPE``
+``REDIS_CONN_TYPE``
Redis connection type. Supported values are ``tcp_sock`` and ``unix_sock``.
-``FQ_REDIS_HOST``
+``REDIS_HOST``
Redis host for TCP connections.
-``FQ_REDIS_PORT``
+``REDIS_PORT``
Redis port for TCP connections.
-``FQ_REDIS_PASSWORD``
+``REDIS_PASSWORD``
Redis password. Leave empty when authentication is not required.
-``FQ_REDIS_CLUSTERED``
+``REDIS_CLUSTERED``
Enables Redis Cluster mode when set to ``true``.
-``FQ_REDIS_UNIX_SOCKET_PATH``
- Redis unix socket path when ``FQ_REDIS_CONN_TYPE=unix_sock``.
+``REDIS_UNIX_SOCKET_PATH``
+ Redis unix socket path when ``REDIS_CONN_TYPE=unix_sock``.
Defaults
--------
@@ -59,31 +59,31 @@ Defaults
* - Variable
- Default
- * - ``FQ_JOB_EXPIRE_INTERVAL``
+ * - ``JOB_EXPIRE_INTERVAL``
- ``1000``
- * - ``FQ_JOB_REQUEUE_INTERVAL``
+ * - ``JOB_REQUEUE_INTERVAL``
- ``1000``
- * - ``FQ_DEFAULT_JOB_REQUEUE_LIMIT``
+ * - ``DEFAULT_JOB_REQUEUE_LIMIT``
- ``-1``
- * - ``FQ_ENABLE_REQUEUE_SCRIPT``
+ * - ``ENABLE_REQUEUE_SCRIPT``
- ``true``
- * - ``FQ_LOG_LEVEL``
+ * - ``LOG_LEVEL``
- ``INFO``
- * - ``FQ_REDIS_DB``
+ * - ``REDIS_DB``
- ``0``
- * - ``FQ_REDIS_KEY_PREFIX``
+ * - ``REDIS_KEY_PREFIX``
- ``fq_server``
- * - ``FQ_REDIS_CONN_TYPE``
+ * - ``REDIS_CONN_TYPE``
- ``tcp_sock``
- * - ``FQ_REDIS_HOST``
+ * - ``REDIS_HOST``
- ``127.0.0.1``
- * - ``FQ_REDIS_PORT``
+ * - ``REDIS_PORT``
- ``6379``
- * - ``FQ_REDIS_PASSWORD``
+ * - ``REDIS_PASSWORD``
- empty
- * - ``FQ_REDIS_CLUSTERED``
+ * - ``REDIS_CLUSTERED``
- ``false``
- * - ``FQ_REDIS_UNIX_SOCKET_PATH``
+ * - ``REDIS_UNIX_SOCKET_PATH``
- ``/tmp/redis.sock``
Boolean values
diff --git a/docs/faqs.rst b/docs/faqs.rst
index 4e0a72a..4741bb9 100644
--- a/docs/faqs.rst
+++ b/docs/faqs.rst
@@ -61,9 +61,9 @@ How do I configure job expiry and requeue timing?
Use environment variables:
-* ``FQ_JOB_EXPIRE_INTERVAL`` controls how long a dequeued job can remain active
+* ``JOB_EXPIRE_INTERVAL`` controls how long a dequeued job can remain active
before it is considered expired.
-* ``FQ_JOB_REQUEUE_INTERVAL`` controls how often expired jobs are scanned and
+* ``JOB_REQUEUE_INTERVAL`` controls how often expired jobs are scanned and
placed back onto their queues.
How do I inspect queue depth and throughput?
diff --git a/docs/gettingstarted.rst b/docs/gettingstarted.rst
index ef13237..1f502cc 100644
--- a/docs/gettingstarted.rst
+++ b/docs/gettingstarted.rst
@@ -17,7 +17,7 @@ Start the server
::
PORT=8080 \
- FQ_REDIS_HOST=127.0.0.1 \
+ REDIS_HOST=127.0.0.1 \
uv run uvicorn asgi:app --host 0.0.0.0 --port 8080
Check the root endpoint
@@ -33,8 +33,8 @@ Queue workflow
* Enqueue a job with ``queue_type``, ``queue_id``, ``job_id``, ``interval``, and ``payload``.
* Dequeue work by queue type.
* Finish a dequeued job after processing it successfully.
-* Expired jobs are requeued automatically based on ``FQ_JOB_EXPIRE_INTERVAL`` and
- ``FQ_JOB_REQUEUE_INTERVAL``.
+* Expired jobs are requeued automatically based on ``JOB_EXPIRE_INTERVAL`` and
+ ``JOB_REQUEUE_INTERVAL``.
Examples
--------
diff --git a/docs/internals.rst b/docs/internals.rst
index dffe758..aad21c4 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -26,8 +26,8 @@ multiple server instances do not requeue expired jobs at the same time.
The loop is controlled by:
-* ``FQ_ENABLE_REQUEUE_SCRIPT``
-* ``FQ_JOB_REQUEUE_INTERVAL``
+* ``ENABLE_REQUEUE_SCRIPT``
+* ``JOB_REQUEUE_INTERVAL``
Shutdown
--------
diff --git a/fq_server/settings.py b/fq_server/settings.py
index 8def599..5b82d9a 100644
--- a/fq_server/settings.py
+++ b/fq_server/settings.py
@@ -38,40 +38,40 @@ class QueueServerSettings(BaseSettings):
model_config = SettingsConfigDict(extra="ignore")
job_expire_interval: int = Field(
- default=1000, ge=1, validation_alias="FQ_JOB_EXPIRE_INTERVAL"
+ default=1000, ge=1, validation_alias="JOB_EXPIRE_INTERVAL"
)
job_requeue_interval: int = Field(
- default=1000, ge=1, validation_alias="FQ_JOB_REQUEUE_INTERVAL"
+ default=1000, ge=1, validation_alias="JOB_REQUEUE_INTERVAL"
)
default_job_requeue_limit: int = Field(
- default=-1, ge=-1, validation_alias="FQ_DEFAULT_JOB_REQUEUE_LIMIT"
+ default=-1, ge=-1, validation_alias="DEFAULT_JOB_REQUEUE_LIMIT"
)
enable_requeue_script: bool = Field(
- default=True, validation_alias="FQ_ENABLE_REQUEUE_SCRIPT"
+ default=True, validation_alias="ENABLE_REQUEUE_SCRIPT"
)
- log_level: LogLevelName = Field(default="INFO", validation_alias="FQ_LOG_LEVEL")
+ log_level: LogLevelName = Field(default="INFO", validation_alias="LOG_LEVEL")
- redis_db: int = Field(default=0, ge=0, validation_alias="FQ_REDIS_DB")
+ redis_db: int = Field(default=0, ge=0, validation_alias="REDIS_DB")
redis_key_prefix: str = Field(
- default="fq_server", min_length=1, validation_alias="FQ_REDIS_KEY_PREFIX"
+ default="fq_server", min_length=1, validation_alias="REDIS_KEY_PREFIX"
)
redis_conn_type: Literal["tcp_sock", "unix_sock"] = Field(
- default="tcp_sock", validation_alias="FQ_REDIS_CONN_TYPE"
+ default="tcp_sock", validation_alias="REDIS_CONN_TYPE"
)
redis_host: str = Field(
- default="127.0.0.1", min_length=1, validation_alias="FQ_REDIS_HOST"
+ default="127.0.0.1", min_length=1, validation_alias="REDIS_HOST"
)
redis_port: int = Field(
- default=6379, ge=1, le=65535, validation_alias="FQ_REDIS_PORT"
+ default=6379, ge=1, le=65535, validation_alias="REDIS_PORT"
)
- redis_password: str = Field(default="", validation_alias="FQ_REDIS_PASSWORD")
+ redis_password: str = Field(default="", validation_alias="REDIS_PASSWORD")
redis_clustered: bool = Field(
- default=False, validation_alias="FQ_REDIS_CLUSTERED"
+ default=False, validation_alias="REDIS_CLUSTERED"
)
redis_unix_socket_path: str = Field(
default="/tmp/redis.sock",
min_length=1,
- validation_alias="FQ_REDIS_UNIX_SOCKET_PATH",
+ validation_alias="REDIS_UNIX_SOCKET_PATH",
)
@field_validator("enable_requeue_script", "redis_clustered", mode="before")
diff --git a/tests/test_routes.py b/tests/test_routes.py
index e0ba520..987d4f1 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -50,18 +50,18 @@ def test_build_config_from_env_defaults(self):
def test_build_config_from_env_overrides(self):
config = build_config_from_env(
{
- "FQ_JOB_EXPIRE_INTERVAL": "5000",
- "FQ_JOB_REQUEUE_INTERVAL": "6000",
- "FQ_DEFAULT_JOB_REQUEUE_LIMIT": "5",
- "FQ_ENABLE_REQUEUE_SCRIPT": "false",
- "FQ_REDIS_DB": "2",
- "FQ_REDIS_KEY_PREFIX": "custom_prefix",
- "FQ_REDIS_CONN_TYPE": "unix_sock",
- "FQ_REDIS_HOST": "redis.internal",
- "FQ_REDIS_PORT": "6380",
- "FQ_REDIS_PASSWORD": "secret",
- "FQ_REDIS_CLUSTERED": "true",
- "FQ_REDIS_UNIX_SOCKET_PATH": "/var/run/redis.sock",
+ "JOB_EXPIRE_INTERVAL": "5000",
+ "JOB_REQUEUE_INTERVAL": "6000",
+ "DEFAULT_JOB_REQUEUE_LIMIT": "5",
+ "ENABLE_REQUEUE_SCRIPT": "false",
+ "REDIS_DB": "2",
+ "REDIS_KEY_PREFIX": "custom_prefix",
+ "REDIS_CONN_TYPE": "unix_sock",
+ "REDIS_HOST": "redis.internal",
+ "REDIS_PORT": "6380",
+ "REDIS_PASSWORD": "secret",
+ "REDIS_CLUSTERED": "true",
+ "REDIS_UNIX_SOCKET_PATH": "/var/run/redis.sock",
}
)
self.assertEqual(config["fq"]["job_expire_interval"], 5000)
@@ -80,22 +80,22 @@ def test_build_config_from_env_overrides(self):
)
def test_build_config_from_env_rejects_invalid_values(self):
- with self.assertRaisesRegex(ValueError, "FQ_REDIS_PORT"):
- build_config_from_env({"FQ_REDIS_PORT": "redis"})
+ with self.assertRaisesRegex(ValueError, "REDIS_PORT"):
+ build_config_from_env({"REDIS_PORT": "redis"})
- with self.assertRaisesRegex(ValueError, "FQ_ENABLE_REQUEUE_SCRIPT"):
- build_config_from_env({"FQ_ENABLE_REQUEUE_SCRIPT": "yes"})
+ with self.assertRaisesRegex(ValueError, "ENABLE_REQUEUE_SCRIPT"):
+ build_config_from_env({"ENABLE_REQUEUE_SCRIPT": "yes"})
- with self.assertRaisesRegex(ValueError, "FQ_REDIS_CLUSTERED"):
- build_config_from_env({"FQ_REDIS_CLUSTERED": "1"})
+ with self.assertRaisesRegex(ValueError, "REDIS_CLUSTERED"):
+ build_config_from_env({"REDIS_CLUSTERED": "1"})
def test_queue_server_settings_log_level_override(self):
- settings = QueueServerSettings.from_env({"FQ_LOG_LEVEL": "debug"})
+ settings = QueueServerSettings.from_env({"LOG_LEVEL": "debug"})
self.assertEqual(settings.log_level, "DEBUG")
def test_queue_server_settings_rejects_invalid_log_level(self):
- with self.assertRaisesRegex(ValidationError, "FQ_LOG_LEVEL"):
- QueueServerSettings.from_env({"FQ_LOG_LEVEL": "verbose"})
+ with self.assertRaisesRegex(ValidationError, "LOG_LEVEL"):
+ QueueServerSettings.from_env({"LOG_LEVEL": "verbose"})
class FQServerTestCase(unittest.IsolatedAsyncioTestCase):
From 7a07d75f83d96b0b78cf801c74edaa34ed2b5a7a Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 22:21:12 +0100
Subject: [PATCH 09/15] Updates dependencies and changes default API port
Switches default API port references from 8080 to 8300 in documentation for consistency.
Upgrades core dependencies to newer versions for improved compatibility and reliability.
Transitions from a git-based dependency for the queue library to a versioned PyPI release.
---
README.md | 16 +--
pyproject.toml | 8 +-
uv.lock | 298 ++++++++++++++++++++++++++-----------------------
3 files changed, 168 insertions(+), 154 deletions(-)
diff --git a/README.md b/README.md
index b264b97..7537a32 100644
--- a/README.md
+++ b/README.md
@@ -63,9 +63,9 @@ make redis-up
Run the API:
```bash
-PORT=8080 \
+PORT=8300 \
REDIS_HOST=127.0.0.1 \
-uv run uvicorn asgi:app --host 0.0.0.0 --port 8080
+uv run uvicorn asgi:app --host 0.0.0.0 --port 8300
```
## Docker
@@ -79,18 +79,18 @@ docker compose up --build
## API quick start
```bash
-curl http://127.0.0.1:8080/
+curl http://127.0.0.1:8300/
-curl -X POST http://127.0.0.1:8080/enqueue/sms/user42/ \
+curl -X POST http://127.0.0.1:8300/enqueue/sms/user42/ \
-H "Content-Type: application/json" \
-d '{"job_id":"job-1","payload":{"message":"hi"},"interval":1000}'
-curl http://127.0.0.1:8080/dequeue/sms/
+curl http://127.0.0.1:8300/dequeue/sms/
-curl -X POST http://127.0.0.1:8080/finish/sms/user42/job-1/
+curl -X POST http://127.0.0.1:8300/finish/sms/user42/job-1/
-curl http://127.0.0.1:8080/metrics/
-curl http://127.0.0.1:8080/metrics/sms/user42/
+curl http://127.0.0.1:8300/metrics/
+curl http://127.0.0.1:8300/metrics/sms/user42/
```
## Testing
diff --git a/pyproject.toml b/pyproject.toml
index 5e20652..6f1c12d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,11 +1,11 @@
[project]
-name = "flowdacity-queue-server"
-version = "0.1.0"
-description = "An API queuing server based on the Flowdacity Queue (FQ) library."
+name = "queue-server"
+version = "1.0.0"
+description = "An API queuing server based on the Flowdacity Queue Engine library."
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
- "flowdacity-queue @ git+https://github.com/flowdacity/flowdacity-queue.git@v1.0.0",
+ "flowdacity-queue>=1.0.0",
"httpx>=0.28.1",
"msgpack>=1.1.2",
"pydantic>=2.0.0",
diff --git a/uv.lock b/uv.lock
index cda52c8..152ba8b 100644
--- a/uv.lock
+++ b/uv.lock
@@ -13,24 +13,24 @@ wheels = [
[[package]]
name = "anyio"
-version = "4.12.1"
+version = "4.13.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
+ { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" },
]
[[package]]
name = "certifi"
-version = "2026.1.4"
+version = "2026.2.25"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" },
]
[[package]]
@@ -56,86 +56,100 @@ wheels = [
[[package]]
name = "coverage"
-version = "7.13.2"
+version = "7.13.5"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/ad/49/349848445b0e53660e258acbcc9b0d014895b6739237920886672240f84b/coverage-7.13.2.tar.gz", hash = "sha256:044c6951ec37146b72a50cc81ef02217d27d4c3640efd2640311393cbbf143d3", size = 826523, upload-time = "2026-01-25T13:00:04.889Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/46/39/e92a35f7800222d3f7b2cbb7bbc3b65672ae8d501cb31801b2d2bd7acdf1/coverage-7.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f106b2af193f965d0d3234f3f83fc35278c7fb935dfbde56ae2da3dd2c03b84d", size = 219142, upload-time = "2026-01-25T12:58:00.448Z" },
- { url = "https://files.pythonhosted.org/packages/45/7a/8bf9e9309c4c996e65c52a7c5a112707ecdd9fbaf49e10b5a705a402bbb4/coverage-7.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f45d21dc4d5d6bd29323f0320089ef7eae16e4bef712dff79d184fa7330af3", size = 219503, upload-time = "2026-01-25T12:58:02.451Z" },
- { url = "https://files.pythonhosted.org/packages/87/93/17661e06b7b37580923f3f12406ac91d78aeed293fb6da0b69cc7957582f/coverage-7.13.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:fae91dfecd816444c74531a9c3d6ded17a504767e97aa674d44f638107265b99", size = 251006, upload-time = "2026-01-25T12:58:04.059Z" },
- { url = "https://files.pythonhosted.org/packages/12/f0/f9e59fb8c310171497f379e25db060abef9fa605e09d63157eebec102676/coverage-7.13.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:264657171406c114787b441484de620e03d8f7202f113d62fcd3d9688baa3e6f", size = 253750, upload-time = "2026-01-25T12:58:05.574Z" },
- { url = "https://files.pythonhosted.org/packages/e5/b1/1935e31add2232663cf7edd8269548b122a7d100047ff93475dbaaae673e/coverage-7.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae47d8dcd3ded0155afbb59c62bd8ab07ea0fd4902e1c40567439e6db9dcaf2f", size = 254862, upload-time = "2026-01-25T12:58:07.647Z" },
- { url = "https://files.pythonhosted.org/packages/af/59/b5e97071ec13df5f45da2b3391b6cdbec78ba20757bc92580a5b3d5fa53c/coverage-7.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8a0b33e9fd838220b007ce8f299114d406c1e8edb21336af4c97a26ecfd185aa", size = 251420, upload-time = "2026-01-25T12:58:09.309Z" },
- { url = "https://files.pythonhosted.org/packages/3f/75/9495932f87469d013dc515fb0ce1aac5fa97766f38f6b1a1deb1ee7b7f3a/coverage-7.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b3becbea7f3ce9a2d4d430f223ec15888e4deb31395840a79e916368d6004cce", size = 252786, upload-time = "2026-01-25T12:58:10.909Z" },
- { url = "https://files.pythonhosted.org/packages/6a/59/af550721f0eb62f46f7b8cb7e6f1860592189267b1c411a4e3a057caacee/coverage-7.13.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f819c727a6e6eeb8711e4ce63d78c620f69630a2e9d53bc95ca5379f57b6ba94", size = 250928, upload-time = "2026-01-25T12:58:12.449Z" },
- { url = "https://files.pythonhosted.org/packages/9b/b1/21b4445709aae500be4ab43bbcfb4e53dc0811c3396dcb11bf9f23fd0226/coverage-7.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:4f7b71757a3ab19f7ba286e04c181004c1d61be921795ee8ba6970fd0ec91da5", size = 250496, upload-time = "2026-01-25T12:58:14.047Z" },
- { url = "https://files.pythonhosted.org/packages/ba/b1/0f5d89dfe0392990e4f3980adbde3eb34885bc1effb2dc369e0bf385e389/coverage-7.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b7fc50d2afd2e6b4f6f2f403b70103d280a8e0cb35320cbbe6debcda02a1030b", size = 252373, upload-time = "2026-01-25T12:58:15.976Z" },
- { url = "https://files.pythonhosted.org/packages/01/c9/0cf1a6a57a9968cc049a6b896693faa523c638a5314b1fc374eb2b2ac904/coverage-7.13.2-cp312-cp312-win32.whl", hash = "sha256:292250282cf9bcf206b543d7608bda17ca6fc151f4cbae949fc7e115112fbd41", size = 221696, upload-time = "2026-01-25T12:58:17.517Z" },
- { url = "https://files.pythonhosted.org/packages/4d/05/d7540bf983f09d32803911afed135524570f8c47bb394bf6206c1dc3a786/coverage-7.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:eeea10169fac01549a7921d27a3e517194ae254b542102267bef7a93ed38c40e", size = 222504, upload-time = "2026-01-25T12:58:19.115Z" },
- { url = "https://files.pythonhosted.org/packages/15/8b/1a9f037a736ced0a12aacf6330cdaad5008081142a7070bc58b0f7930cbc/coverage-7.13.2-cp312-cp312-win_arm64.whl", hash = "sha256:2a5b567f0b635b592c917f96b9a9cb3dbd4c320d03f4bf94e9084e494f2e8894", size = 221120, upload-time = "2026-01-25T12:58:21.334Z" },
- { url = "https://files.pythonhosted.org/packages/a7/f0/3d3eac7568ab6096ff23791a526b0048a1ff3f49d0e236b2af6fb6558e88/coverage-7.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed75de7d1217cf3b99365d110975f83af0528c849ef5180a12fd91b5064df9d6", size = 219168, upload-time = "2026-01-25T12:58:23.376Z" },
- { url = "https://files.pythonhosted.org/packages/a3/a6/f8b5cfeddbab95fdef4dcd682d82e5dcff7a112ced57a959f89537ee9995/coverage-7.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97e596de8fa9bada4d88fde64a3f4d37f1b6131e4faa32bad7808abc79887ddc", size = 219537, upload-time = "2026-01-25T12:58:24.932Z" },
- { url = "https://files.pythonhosted.org/packages/7b/e6/8d8e6e0c516c838229d1e41cadcec91745f4b1031d4db17ce0043a0423b4/coverage-7.13.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:68c86173562ed4413345410c9480a8d64864ac5e54a5cda236748031e094229f", size = 250528, upload-time = "2026-01-25T12:58:26.567Z" },
- { url = "https://files.pythonhosted.org/packages/8e/78/befa6640f74092b86961f957f26504c8fba3d7da57cc2ab7407391870495/coverage-7.13.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7be4d613638d678b2b3773b8f687537b284d7074695a43fe2fbbfc0e31ceaed1", size = 253132, upload-time = "2026-01-25T12:58:28.251Z" },
- { url = "https://files.pythonhosted.org/packages/9d/10/1630db1edd8ce675124a2ee0f7becc603d2bb7b345c2387b4b95c6907094/coverage-7.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d7f63ce526a96acd0e16c4af8b50b64334239550402fb1607ce6a584a6d62ce9", size = 254374, upload-time = "2026-01-25T12:58:30.294Z" },
- { url = "https://files.pythonhosted.org/packages/ed/1d/0d9381647b1e8e6d310ac4140be9c428a0277330991e0c35bdd751e338a4/coverage-7.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:406821f37f864f968e29ac14c3fccae0fec9fdeba48327f0341decf4daf92d7c", size = 250762, upload-time = "2026-01-25T12:58:32.036Z" },
- { url = "https://files.pythonhosted.org/packages/43/e4/5636dfc9a7c871ee8776af83ee33b4c26bc508ad6cee1e89b6419a366582/coverage-7.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ee68e5a4e3e5443623406b905db447dceddffee0dceb39f4e0cd9ec2a35004b5", size = 252502, upload-time = "2026-01-25T12:58:33.961Z" },
- { url = "https://files.pythonhosted.org/packages/02/2a/7ff2884d79d420cbb2d12fed6fff727b6d0ef27253140d3cdbbd03187ee0/coverage-7.13.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2ee0e58cca0c17dd9c6c1cdde02bb705c7b3fbfa5f3b0b5afeda20d4ebff8ef4", size = 250463, upload-time = "2026-01-25T12:58:35.529Z" },
- { url = "https://files.pythonhosted.org/packages/91/c0/ba51087db645b6c7261570400fc62c89a16278763f36ba618dc8657a187b/coverage-7.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e5bbb5018bf76a56aabdb64246b5288d5ae1b7d0dd4d0534fe86df2c2992d1c", size = 250288, upload-time = "2026-01-25T12:58:37.226Z" },
- { url = "https://files.pythonhosted.org/packages/03/07/44e6f428551c4d9faf63ebcefe49b30e5c89d1be96f6a3abd86a52da9d15/coverage-7.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a55516c68ef3e08e134e818d5e308ffa6b1337cc8b092b69b24287bf07d38e31", size = 252063, upload-time = "2026-01-25T12:58:38.821Z" },
- { url = "https://files.pythonhosted.org/packages/c2/67/35b730ad7e1859dd57e834d1bc06080d22d2f87457d53f692fce3f24a5a9/coverage-7.13.2-cp313-cp313-win32.whl", hash = "sha256:5b20211c47a8abf4abc3319d8ce2464864fa9f30c5fcaf958a3eed92f4f1fef8", size = 221716, upload-time = "2026-01-25T12:58:40.484Z" },
- { url = "https://files.pythonhosted.org/packages/0d/82/e5fcf5a97c72f45fc14829237a6550bf49d0ab882ac90e04b12a69db76b4/coverage-7.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:14f500232e521201cf031549fb1ebdfc0a40f401cf519157f76c397e586c3beb", size = 222522, upload-time = "2026-01-25T12:58:43.247Z" },
- { url = "https://files.pythonhosted.org/packages/b1/f1/25d7b2f946d239dd2d6644ca2cc060d24f97551e2af13b6c24c722ae5f97/coverage-7.13.2-cp313-cp313-win_arm64.whl", hash = "sha256:9779310cb5a9778a60c899f075a8514c89fa6d10131445c2207fc893e0b14557", size = 221145, upload-time = "2026-01-25T12:58:45Z" },
- { url = "https://files.pythonhosted.org/packages/9e/f7/080376c029c8f76fadfe43911d0daffa0cbdc9f9418a0eead70c56fb7f4b/coverage-7.13.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e64fa5a1e41ce5df6b547cbc3d3699381c9e2c2c369c67837e716ed0f549d48e", size = 219861, upload-time = "2026-01-25T12:58:46.586Z" },
- { url = "https://files.pythonhosted.org/packages/42/11/0b5e315af5ab35f4c4a70e64d3314e4eec25eefc6dec13be3a7d5ffe8ac5/coverage-7.13.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b01899e82a04085b6561eb233fd688474f57455e8ad35cd82286463ba06332b7", size = 220207, upload-time = "2026-01-25T12:58:48.277Z" },
- { url = "https://files.pythonhosted.org/packages/b2/0c/0874d0318fb1062117acbef06a09cf8b63f3060c22265adaad24b36306b7/coverage-7.13.2-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:838943bea48be0e2768b0cf7819544cdedc1bbb2f28427eabb6eb8c9eb2285d3", size = 261504, upload-time = "2026-01-25T12:58:49.904Z" },
- { url = "https://files.pythonhosted.org/packages/83/5e/1cd72c22ecb30751e43a72f40ba50fcef1b7e93e3ea823bd9feda8e51f9a/coverage-7.13.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:93d1d25ec2b27e90bcfef7012992d1f5121b51161b8bffcda756a816cf13c2c3", size = 263582, upload-time = "2026-01-25T12:58:51.582Z" },
- { url = "https://files.pythonhosted.org/packages/9b/da/8acf356707c7a42df4d0657020308e23e5a07397e81492640c186268497c/coverage-7.13.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93b57142f9621b0d12349c43fc7741fe578e4bc914c1e5a54142856cfc0bf421", size = 266008, upload-time = "2026-01-25T12:58:53.234Z" },
- { url = "https://files.pythonhosted.org/packages/41/41/ea1730af99960309423c6ea8d6a4f1fa5564b2d97bd1d29dda4b42611f04/coverage-7.13.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f06799ae1bdfff7ccb8665d75f8291c69110ba9585253de254688aa8a1ccc6c5", size = 260762, upload-time = "2026-01-25T12:58:55.372Z" },
- { url = "https://files.pythonhosted.org/packages/22/fa/02884d2080ba71db64fdc127b311db60e01fe6ba797d9c8363725e39f4d5/coverage-7.13.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7f9405ab4f81d490811b1d91c7a20361135a2df4c170e7f0b747a794da5b7f23", size = 263571, upload-time = "2026-01-25T12:58:57.52Z" },
- { url = "https://files.pythonhosted.org/packages/d2/6b/4083aaaeba9b3112f55ac57c2ce7001dc4d8fa3fcc228a39f09cc84ede27/coverage-7.13.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f9ab1d5b86f8fbc97a5b3cd6280a3fd85fef3b028689d8a2c00918f0d82c728c", size = 261200, upload-time = "2026-01-25T12:58:59.255Z" },
- { url = "https://files.pythonhosted.org/packages/e9/d2/aea92fa36d61955e8c416ede9cf9bf142aa196f3aea214bb67f85235a050/coverage-7.13.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:f674f59712d67e841525b99e5e2b595250e39b529c3bda14764e4f625a3fa01f", size = 260095, upload-time = "2026-01-25T12:59:01.066Z" },
- { url = "https://files.pythonhosted.org/packages/0d/ae/04ffe96a80f107ea21b22b2367175c621da920063260a1c22f9452fd7866/coverage-7.13.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c6cadac7b8ace1ba9144feb1ae3cb787a6065ba6d23ffc59a934b16406c26573", size = 262284, upload-time = "2026-01-25T12:59:02.802Z" },
- { url = "https://files.pythonhosted.org/packages/1c/7a/6f354dcd7dfc41297791d6fb4e0d618acb55810bde2c1fd14b3939e05c2b/coverage-7.13.2-cp313-cp313t-win32.whl", hash = "sha256:14ae4146465f8e6e6253eba0cccd57423e598a4cb925958b240c805300918343", size = 222389, upload-time = "2026-01-25T12:59:04.563Z" },
- { url = "https://files.pythonhosted.org/packages/8d/d5/080ad292a4a3d3daf411574be0a1f56d6dee2c4fdf6b005342be9fac807f/coverage-7.13.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9074896edd705a05769e3de0eac0a8388484b503b68863dd06d5e473f874fd47", size = 223450, upload-time = "2026-01-25T12:59:06.677Z" },
- { url = "https://files.pythonhosted.org/packages/88/96/df576fbacc522e9fb8d1c4b7a7fc62eb734be56e2cba1d88d2eabe08ea3f/coverage-7.13.2-cp313-cp313t-win_arm64.whl", hash = "sha256:69e526e14f3f854eda573d3cf40cffd29a1a91c684743d904c33dbdcd0e0f3e7", size = 221707, upload-time = "2026-01-25T12:59:08.363Z" },
- { url = "https://files.pythonhosted.org/packages/55/53/1da9e51a0775634b04fcc11eb25c002fc58ee4f92ce2e8512f94ac5fc5bf/coverage-7.13.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:387a825f43d680e7310e6f325b2167dd093bc8ffd933b83e9aa0983cf6e0a2ef", size = 219213, upload-time = "2026-01-25T12:59:11.909Z" },
- { url = "https://files.pythonhosted.org/packages/46/35/b3caac3ebbd10230fea5a33012b27d19e999a17c9285c4228b4b2e35b7da/coverage-7.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f0d7fea9d8e5d778cd5a9e8fc38308ad688f02040e883cdc13311ef2748cb40f", size = 219549, upload-time = "2026-01-25T12:59:13.638Z" },
- { url = "https://files.pythonhosted.org/packages/76/9c/e1cf7def1bdc72c1907e60703983a588f9558434a2ff94615747bd73c192/coverage-7.13.2-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e080afb413be106c95c4ee96b4fffdc9e2fa56a8bbf90b5c0918e5c4449412f5", size = 250586, upload-time = "2026-01-25T12:59:15.808Z" },
- { url = "https://files.pythonhosted.org/packages/ba/49/f54ec02ed12be66c8d8897270505759e057b0c68564a65c429ccdd1f139e/coverage-7.13.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a7fc042ba3c7ce25b8a9f097eb0f32a5ce1ccdb639d9eec114e26def98e1f8a4", size = 253093, upload-time = "2026-01-25T12:59:17.491Z" },
- { url = "https://files.pythonhosted.org/packages/fb/5e/aaf86be3e181d907e23c0f61fccaeb38de8e6f6b47aed92bf57d8fc9c034/coverage-7.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0ba505e021557f7f8173ee8cd6b926373d8653e5ff7581ae2efce1b11ef4c27", size = 254446, upload-time = "2026-01-25T12:59:19.752Z" },
- { url = "https://files.pythonhosted.org/packages/28/c8/a5fa01460e2d75b0c853b392080d6829d3ca8b5ab31e158fa0501bc7c708/coverage-7.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7de326f80e3451bd5cc7239ab46c73ddb658fe0b7649476bc7413572d36cd548", size = 250615, upload-time = "2026-01-25T12:59:21.928Z" },
- { url = "https://files.pythonhosted.org/packages/86/0b/6d56315a55f7062bb66410732c24879ccb2ec527ab6630246de5fe45a1df/coverage-7.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abaea04f1e7e34841d4a7b343904a3f59481f62f9df39e2cd399d69a187a9660", size = 252452, upload-time = "2026-01-25T12:59:23.592Z" },
- { url = "https://files.pythonhosted.org/packages/30/19/9bc550363ebc6b0ea121977ee44d05ecd1e8bf79018b8444f1028701c563/coverage-7.13.2-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9f93959ee0c604bccd8e0697be21de0887b1f73efcc3aa73a3ec0fd13feace92", size = 250418, upload-time = "2026-01-25T12:59:25.392Z" },
- { url = "https://files.pythonhosted.org/packages/1f/53/580530a31ca2f0cc6f07a8f2ab5460785b02bb11bdf815d4c4d37a4c5169/coverage-7.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:13fe81ead04e34e105bf1b3c9f9cdf32ce31736ee5d90a8d2de02b9d3e1bcb82", size = 250231, upload-time = "2026-01-25T12:59:27.888Z" },
- { url = "https://files.pythonhosted.org/packages/e2/42/dd9093f919dc3088cb472893651884bd675e3df3d38a43f9053656dca9a2/coverage-7.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d6d16b0f71120e365741bca2cb473ca6fe38930bc5431c5e850ba949f708f892", size = 251888, upload-time = "2026-01-25T12:59:29.636Z" },
- { url = "https://files.pythonhosted.org/packages/fa/a6/0af4053e6e819774626e133c3d6f70fae4d44884bfc4b126cb647baee8d3/coverage-7.13.2-cp314-cp314-win32.whl", hash = "sha256:9b2f4714bb7d99ba3790ee095b3b4ac94767e1347fe424278a0b10acb3ff04fe", size = 221968, upload-time = "2026-01-25T12:59:31.424Z" },
- { url = "https://files.pythonhosted.org/packages/c4/cc/5aff1e1f80d55862442855517bb8ad8ad3a68639441ff6287dde6a58558b/coverage-7.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:e4121a90823a063d717a96e0a0529c727fb31ea889369a0ee3ec00ed99bf6859", size = 222783, upload-time = "2026-01-25T12:59:33.118Z" },
- { url = "https://files.pythonhosted.org/packages/de/20/09abafb24f84b3292cc658728803416c15b79f9ee5e68d25238a895b07d9/coverage-7.13.2-cp314-cp314-win_arm64.whl", hash = "sha256:6873f0271b4a15a33e7590f338d823f6f66f91ed147a03938d7ce26efd04eee6", size = 221348, upload-time = "2026-01-25T12:59:34.939Z" },
- { url = "https://files.pythonhosted.org/packages/b6/60/a3820c7232db63be060e4019017cd3426751c2699dab3c62819cdbcea387/coverage-7.13.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:f61d349f5b7cd95c34017f1927ee379bfbe9884300d74e07cf630ccf7a610c1b", size = 219950, upload-time = "2026-01-25T12:59:36.624Z" },
- { url = "https://files.pythonhosted.org/packages/fd/37/e4ef5975fdeb86b1e56db9a82f41b032e3d93a840ebaf4064f39e770d5c5/coverage-7.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a43d34ce714f4ca674c0d90beb760eb05aad906f2c47580ccee9da8fe8bfb417", size = 220209, upload-time = "2026-01-25T12:59:38.339Z" },
- { url = "https://files.pythonhosted.org/packages/54/df/d40e091d00c51adca1e251d3b60a8b464112efa3004949e96a74d7c19a64/coverage-7.13.2-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bff1b04cb9d4900ce5c56c4942f047dc7efe57e2608cb7c3c8936e9970ccdbee", size = 261576, upload-time = "2026-01-25T12:59:40.446Z" },
- { url = "https://files.pythonhosted.org/packages/c5/44/5259c4bed54e3392e5c176121af9f71919d96dde853386e7730e705f3520/coverage-7.13.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6ae99e4560963ad8e163e819e5d77d413d331fd00566c1e0856aa252303552c1", size = 263704, upload-time = "2026-01-25T12:59:42.346Z" },
- { url = "https://files.pythonhosted.org/packages/16/bd/ae9f005827abcbe2c70157459ae86053971c9fa14617b63903abbdce26d9/coverage-7.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e79a8c7d461820257d9aa43716c4efc55366d7b292e46b5b37165be1d377405d", size = 266109, upload-time = "2026-01-25T12:59:44.073Z" },
- { url = "https://files.pythonhosted.org/packages/a2/c0/8e279c1c0f5b1eaa3ad9b0fb7a5637fc0379ea7d85a781c0fe0bb3cfc2ab/coverage-7.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:060ee84f6a769d40c492711911a76811b4befb6fba50abb450371abb720f5bd6", size = 260686, upload-time = "2026-01-25T12:59:45.804Z" },
- { url = "https://files.pythonhosted.org/packages/b2/47/3a8112627e9d863e7cddd72894171c929e94491a597811725befdcd76bce/coverage-7.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bca209d001fd03ea2d978f8a4985093240a355c93078aee3f799852c23f561a", size = 263568, upload-time = "2026-01-25T12:59:47.929Z" },
- { url = "https://files.pythonhosted.org/packages/92/bc/7ea367d84afa3120afc3ce6de294fd2dcd33b51e2e7fbe4bbfd200f2cb8c/coverage-7.13.2-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6b8092aa38d72f091db61ef83cb66076f18f02da3e1a75039a4f218629600e04", size = 261174, upload-time = "2026-01-25T12:59:49.717Z" },
- { url = "https://files.pythonhosted.org/packages/33/b7/f1092dcecb6637e31cc2db099581ee5c61a17647849bae6b8261a2b78430/coverage-7.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:4a3158dc2dcce5200d91ec28cd315c999eebff355437d2765840555d765a6e5f", size = 260017, upload-time = "2026-01-25T12:59:51.463Z" },
- { url = "https://files.pythonhosted.org/packages/2b/cd/f3d07d4b95fbe1a2ef0958c15da614f7e4f557720132de34d2dc3aa7e911/coverage-7.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3973f353b2d70bd9796cc12f532a05945232ccae966456c8ed7034cb96bbfd6f", size = 262337, upload-time = "2026-01-25T12:59:53.407Z" },
- { url = "https://files.pythonhosted.org/packages/e0/db/b0d5b2873a07cb1e06a55d998697c0a5a540dcefbf353774c99eb3874513/coverage-7.13.2-cp314-cp314t-win32.whl", hash = "sha256:79f6506a678a59d4ded048dc72f1859ebede8ec2b9a2d509ebe161f01c2879d3", size = 222749, upload-time = "2026-01-25T12:59:56.316Z" },
- { url = "https://files.pythonhosted.org/packages/e5/2f/838a5394c082ac57d85f57f6aba53093b30d9089781df72412126505716f/coverage-7.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:196bfeabdccc5a020a57d5a368c681e3a6ceb0447d153aeccc1ab4d70a5032ba", size = 223857, upload-time = "2026-01-25T12:59:58.201Z" },
- { url = "https://files.pythonhosted.org/packages/44/d4/b608243e76ead3a4298824b50922b89ef793e50069ce30316a65c1b4d7ef/coverage-7.13.2-cp314-cp314t-win_arm64.whl", hash = "sha256:69269ab58783e090bfbf5b916ab3d188126e22d6070bbfc93098fdd474ef937c", size = 221881, upload-time = "2026-01-25T13:00:00.449Z" },
- { url = "https://files.pythonhosted.org/packages/d2/db/d291e30fdf7ea617a335531e72294e0c723356d7fdde8fba00610a76bda9/coverage-7.13.2-py3-none-any.whl", hash = "sha256:40ce1ea1e25125556d8e76bd0b61500839a07944cc287ac21d5626f3e620cad5", size = 210943, upload-time = "2026-01-25T13:00:02.388Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" },
+ { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" },
+ { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" },
+ { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" },
+ { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" },
+ { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" },
+ { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" },
+ { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" },
+ { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" },
+ { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" },
+ { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" },
+ { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" },
+ { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" },
+ { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" },
+ { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" },
+ { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" },
+ { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" },
+ { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" },
+ { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" },
+ { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" },
+ { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" },
+ { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" },
+ { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" },
+ { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" },
+ { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" },
+ { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" },
+ { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" },
+ { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" },
+ { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" },
+ { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" },
+ { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" },
+ { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" },
+ { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" },
+ { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" },
+ { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" },
+ { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" },
+ { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" },
]
[[package]]
name = "flowdacity-queue"
version = "1.0.0"
-source = { git = "https://github.com/flowdacity/flowdacity-queue.git?rev=v1.0.0#261362734fb2126ffd95e3e33739a0dc79c8aa81" }
+source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "msgpack" },
{ name = "redis", extra = ["hiredis"] },
]
+sdist = { url = "https://files.pythonhosted.org/packages/c3/0e/2b0d6c989030e4cca98388fd11976d139867453ef231758bd6d5145577f8/flowdacity_queue-1.0.0.tar.gz", hash = "sha256:1cbe255ec23354048e03487342b90ae0be6bf923b4ce8c0dbae39d4970134e38", size = 11771, upload-time = "2026-03-24T17:44:24.427Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a8/47/1cf9ffebeb28af6115a640e9e35412fad632c1025b115ba30e8a68095731/flowdacity_queue-1.0.0-py3-none-any.whl", hash = "sha256:02f95cfb8db6f532c22de0cfeded9bb90a45da0e249526f8c70bb16a61fcee5a", size = 15407, upload-time = "2026-03-24T17:44:23.103Z" },
+]
[[package]]
name = "flowdacity-queue-server"
@@ -161,7 +175,7 @@ dev = [
[package.metadata]
requires-dist = [
- { name = "flowdacity-queue", git = "https://github.com/flowdacity/flowdacity-queue.git?rev=v1.0.0" },
+ { name = "flowdacity-queue", specifier = ">=1.0.0" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "msgpack", specifier = ">=1.1.2" },
{ name = "pydantic", specifier = ">=2.0.0" },
@@ -189,62 +203,62 @@ wheels = [
[[package]]
name = "hiredis"
-version = "3.3.0"
+version = "3.3.1"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/65/82/d2817ce0653628e0a0cb128533f6af0dd6318a49f3f3a6a7bd1f2f2154af/hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685", size = 89048, upload-time = "2025-10-14T16:33:34.263Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/97/d6/9bef6dc3052c168c93fbf7e6c0f2b12c45f0f741a2d30fd919096774343a/hiredis-3.3.1.tar.gz", hash = "sha256:da6f0302360e99d32bc2869772692797ebadd536e1b826d0103c72ba49d38698", size = 89101, upload-time = "2026-03-16T15:21:08.092Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/48/1c/ed28ae5d704f5c7e85b946fa327f30d269e6272c847fef7e91ba5fc86193/hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f", size = 82026, upload-time = "2025-10-14T16:32:12.004Z" },
- { url = "https://files.pythonhosted.org/packages/f4/9b/79f30c5c40e248291023b7412bfdef4ad9a8a92d9e9285d65d600817dac7/hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783", size = 46217, upload-time = "2025-10-14T16:32:13.133Z" },
- { url = "https://files.pythonhosted.org/packages/e7/c3/02b9ed430ad9087aadd8afcdf616717452d16271b701fa47edfe257b681e/hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271", size = 41858, upload-time = "2025-10-14T16:32:13.98Z" },
- { url = "https://files.pythonhosted.org/packages/f1/98/b2a42878b82130a535c7aa20bc937ba2d07d72e9af3ad1ad93e837c419b5/hiredis-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a95cef9989736ac313639f8f545b76b60b797e44e65834aabbb54e4fad8d6c8", size = 170195, upload-time = "2025-10-14T16:32:14.728Z" },
- { url = "https://files.pythonhosted.org/packages/66/1d/9dcde7a75115d3601b016113d9b90300726fa8e48aacdd11bf01a453c145/hiredis-3.3.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca2802934557ccc28a954414c245ba7ad904718e9712cb67c05152cf6b9dd0a3", size = 181808, upload-time = "2025-10-14T16:32:15.622Z" },
- { url = "https://files.pythonhosted.org/packages/56/a1/60f6bda9b20b4e73c85f7f5f046bc2c154a5194fc94eb6861e1fd97ced52/hiredis-3.3.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fe730716775f61e76d75810a38ee4c349d3af3896450f1525f5a4034cf8f2ed7", size = 180578, upload-time = "2025-10-14T16:32:16.514Z" },
- { url = "https://files.pythonhosted.org/packages/d9/01/859d21de65085f323a701824e23ea3330a0ac05f8e184544d7aa5c26128d/hiredis-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:749faa69b1ce1f741f5eaf743435ac261a9262e2d2d66089192477e7708a9abc", size = 172508, upload-time = "2025-10-14T16:32:17.411Z" },
- { url = "https://files.pythonhosted.org/packages/99/a8/28fd526e554c80853d0fbf57ef2a3235f00e4ed34ce0e622e05d27d0f788/hiredis-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:95c9427f2ac3f1dd016a3da4e1161fa9d82f221346c8f3fdd6f3f77d4e28946c", size = 166341, upload-time = "2025-10-14T16:32:18.561Z" },
- { url = "https://files.pythonhosted.org/packages/f2/91/ded746b7d2914f557fbbf77be55e90d21f34ba758ae10db6591927c642c8/hiredis-3.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c863ee44fe7bff25e41f3a5105c936a63938b76299b802d758f40994ab340071", size = 176765, upload-time = "2025-10-14T16:32:19.491Z" },
- { url = "https://files.pythonhosted.org/packages/d6/4c/04aa46ff386532cb5f08ee495c2bf07303e93c0acf2fa13850e031347372/hiredis-3.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2213c7eb8ad5267434891f3241c7776e3bafd92b5933fc57d53d4456247dc542", size = 170312, upload-time = "2025-10-14T16:32:20.404Z" },
- { url = "https://files.pythonhosted.org/packages/90/6e/67f9d481c63f542a9cf4c9f0ea4e5717db0312fb6f37fb1f78f3a66de93c/hiredis-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a172bae3e2837d74530cd60b06b141005075db1b814d966755977c69bd882ce8", size = 167965, upload-time = "2025-10-14T16:32:21.259Z" },
- { url = "https://files.pythonhosted.org/packages/7a/df/dde65144d59c3c0d85e43255798f1fa0c48d413e668cfd92b3d9f87924ef/hiredis-3.3.0-cp312-cp312-win32.whl", hash = "sha256:cb91363b9fd6d41c80df9795e12fffbaf5c399819e6ae8120f414dedce6de068", size = 20533, upload-time = "2025-10-14T16:32:22.192Z" },
- { url = "https://files.pythonhosted.org/packages/f5/a9/55a4ac9c16fdf32e92e9e22c49f61affe5135e177ca19b014484e28950f7/hiredis-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:04ec150e95eea3de9ff8bac754978aa17b8bf30a86d4ab2689862020945396b0", size = 22379, upload-time = "2025-10-14T16:32:22.916Z" },
- { url = "https://files.pythonhosted.org/packages/6d/39/2b789ebadd1548ccb04a2c18fbc123746ad1a7e248b7f3f3cac618ca10a6/hiredis-3.3.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:b7048b4ec0d5dddc8ddd03da603de0c4b43ef2540bf6e4c54f47d23e3480a4fa", size = 82035, upload-time = "2025-10-14T16:32:23.715Z" },
- { url = "https://files.pythonhosted.org/packages/85/74/4066d9c1093be744158ede277f2a0a4e4cd0fefeaa525c79e2876e9e5c72/hiredis-3.3.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:e5f86ce5a779319c15567b79e0be806e8e92c18bb2ea9153e136312fafa4b7d6", size = 46219, upload-time = "2025-10-14T16:32:24.554Z" },
- { url = "https://files.pythonhosted.org/packages/fa/3f/f9e0f6d632f399d95b3635703e1558ffaa2de3aea4cfcbc2d7832606ba43/hiredis-3.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fbdb97a942e66016fff034df48a7a184e2b7dc69f14c4acd20772e156f20d04b", size = 41860, upload-time = "2025-10-14T16:32:25.356Z" },
- { url = "https://files.pythonhosted.org/packages/4a/c5/b7dde5ec390dabd1cabe7b364a509c66d4e26de783b0b64cf1618f7149fc/hiredis-3.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0fb4bea72fe45ff13e93ddd1352b43ff0749f9866263b5cca759a4c960c776f", size = 170094, upload-time = "2025-10-14T16:32:26.148Z" },
- { url = "https://files.pythonhosted.org/packages/3e/d6/7f05c08ee74d41613be466935688068e07f7b6c55266784b5ace7b35b766/hiredis-3.3.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:85b9baf98050e8f43c2826ab46aaf775090d608217baf7af7882596aef74e7f9", size = 181746, upload-time = "2025-10-14T16:32:27.844Z" },
- { url = "https://files.pythonhosted.org/packages/0e/d2/aaf9f8edab06fbf5b766e0cae3996324297c0516a91eb2ca3bd1959a0308/hiredis-3.3.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69079fb0f0ebb61ba63340b9c4bce9388ad016092ca157e5772eb2818209d930", size = 180465, upload-time = "2025-10-14T16:32:29.185Z" },
- { url = "https://files.pythonhosted.org/packages/8d/1e/93ded8b9b484519b211fc71746a231af98c98928e3ebebb9086ed20bb1ad/hiredis-3.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17f77b79031ea4b0967d30255d2ae6e7df0603ee2426ad3274067f406938236", size = 172419, upload-time = "2025-10-14T16:32:30.059Z" },
- { url = "https://files.pythonhosted.org/packages/68/13/02880458e02bbfcedcaabb8f7510f9dda1c89d7c1921b1bb28c22bb38cbf/hiredis-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d14f745fc177bc05fc24bdf20e2b515e9a068d3d4cce90a0fb78d04c9c9d9a", size = 166400, upload-time = "2025-10-14T16:32:31.173Z" },
- { url = "https://files.pythonhosted.org/packages/11/60/896e03267670570f19f61dc65a2137fcb2b06e83ab0911d58eeec9f3cb88/hiredis-3.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ba063fdf1eff6377a0c409609cbe890389aefddfec109c2d20fcc19cfdafe9da", size = 176845, upload-time = "2025-10-14T16:32:32.12Z" },
- { url = "https://files.pythonhosted.org/packages/f1/90/a1d4bd0cdcf251fda72ac0bd932f547b48ad3420f89bb2ef91bf6a494534/hiredis-3.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1799cc66353ad066bfdd410135c951959da9f16bcb757c845aab2f21fc4ef099", size = 170365, upload-time = "2025-10-14T16:32:33.035Z" },
- { url = "https://files.pythonhosted.org/packages/f1/9a/7c98f7bb76bdb4a6a6003cf8209721f083e65d2eed2b514f4a5514bda665/hiredis-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2cbf71a121996ffac82436b6153290815b746afb010cac19b3290a1644381b07", size = 168022, upload-time = "2025-10-14T16:32:34.81Z" },
- { url = "https://files.pythonhosted.org/packages/0d/ca/672ee658ffe9525558615d955b554ecd36aa185acd4431ccc9701c655c9b/hiredis-3.3.0-cp313-cp313-win32.whl", hash = "sha256:a7cbbc6026bf03659f0b25e94bbf6e64f6c8c22f7b4bc52fe569d041de274194", size = 20533, upload-time = "2025-10-14T16:32:35.7Z" },
- { url = "https://files.pythonhosted.org/packages/20/93/511fd94f6a7b6d72a4cf9c2b159bf3d780585a9a1dca52715dd463825299/hiredis-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:a8def89dd19d4e2e4482b7412d453dec4a5898954d9a210d7d05f60576cedef6", size = 22387, upload-time = "2025-10-14T16:32:36.441Z" },
- { url = "https://files.pythonhosted.org/packages/aa/b3/b948ee76a6b2bc7e45249861646f91f29704f743b52565cf64cee9c4658b/hiredis-3.3.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c135bda87211f7af9e2fd4e046ab433c576cd17b69e639a0f5bb2eed5e0e71a9", size = 82105, upload-time = "2025-10-14T16:32:37.204Z" },
- { url = "https://files.pythonhosted.org/packages/a2/9b/4210f4ebfb3ab4ada964b8de08190f54cbac147198fb463cd3c111cc13e0/hiredis-3.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2f855c678230aed6fc29b962ce1cc67e5858a785ef3a3fd6b15dece0487a2e60", size = 46237, upload-time = "2025-10-14T16:32:38.07Z" },
- { url = "https://files.pythonhosted.org/packages/b3/7a/e38bfd7d04c05036b4ccc6f42b86b1032185cf6ae426e112a97551fece14/hiredis-3.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4059c78a930cbb33c391452ccce75b137d6f89e2eebf6273d75dafc5c2143c03", size = 41894, upload-time = "2025-10-14T16:32:38.929Z" },
- { url = "https://files.pythonhosted.org/packages/28/d3/eae43d9609c5d9a6effef0586ee47e13a0d84b44264b688d97a75cd17ee5/hiredis-3.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:334a3f1d14c253bb092e187736c3384203bd486b244e726319bbb3f7dffa4a20", size = 170486, upload-time = "2025-10-14T16:32:40.147Z" },
- { url = "https://files.pythonhosted.org/packages/c3/fd/34d664554880b27741ab2916d66207357563b1639e2648685f4c84cfb755/hiredis-3.3.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd137b147235447b3d067ec952c5b9b95ca54b71837e1b38dbb2ec03b89f24fc", size = 182031, upload-time = "2025-10-14T16:32:41.06Z" },
- { url = "https://files.pythonhosted.org/packages/08/a3/0c69fdde3f4155b9f7acc64ccffde46f312781469260061b3bbaa487fd34/hiredis-3.3.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8f88f4f2aceb73329ece86a1cb0794fdbc8e6d614cb5ca2d1023c9b7eb432db8", size = 180542, upload-time = "2025-10-14T16:32:42.993Z" },
- { url = "https://files.pythonhosted.org/packages/68/7a/ad5da4d7bc241e57c5b0c4fe95aa75d1f2116e6e6c51577394d773216e01/hiredis-3.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:550f4d1538822fc75ebf8cf63adc396b23d4958bdbbad424521f2c0e3dfcb169", size = 172353, upload-time = "2025-10-14T16:32:43.965Z" },
- { url = "https://files.pythonhosted.org/packages/4b/dc/c46eace64eb047a5b31acd5e4b0dc6d2f0390a4a3f6d507442d9efa570ad/hiredis-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:54b14211fbd5930fc696f6fcd1f1f364c660970d61af065a80e48a1fa5464dd6", size = 166435, upload-time = "2025-10-14T16:32:44.97Z" },
- { url = "https://files.pythonhosted.org/packages/4a/ac/ad13a714e27883a2e4113c980c94caf46b801b810de5622c40f8d3e8335f/hiredis-3.3.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9e96f63dbc489fc86f69951e9f83dadb9582271f64f6822c47dcffa6fac7e4a", size = 177218, upload-time = "2025-10-14T16:32:45.936Z" },
- { url = "https://files.pythonhosted.org/packages/c2/38/268fabd85b225271fe1ba82cb4a484fcc1bf922493ff2c74b400f1a6f339/hiredis-3.3.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:106e99885d46684d62ab3ec1d6b01573cc0e0083ac295b11aaa56870b536c7ec", size = 170477, upload-time = "2025-10-14T16:32:46.898Z" },
- { url = "https://files.pythonhosted.org/packages/20/6b/02bb8af810ea04247334ab7148acff7a61c08a8832830c6703f464be83a9/hiredis-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:087e2ef3206361281b1a658b5b4263572b6ba99465253e827796964208680459", size = 167915, upload-time = "2025-10-14T16:32:47.847Z" },
- { url = "https://files.pythonhosted.org/packages/83/94/901fa817e667b2e69957626395e6dee416e31609dca738f28e6b545ca6c2/hiredis-3.3.0-cp314-cp314-win32.whl", hash = "sha256:80638ebeab1cefda9420e9fedc7920e1ec7b4f0513a6b23d58c9d13c882f8065", size = 21165, upload-time = "2025-10-14T16:32:50.753Z" },
- { url = "https://files.pythonhosted.org/packages/b1/7e/4881b9c1d0b4cdaba11bd10e600e97863f977ea9d67c5988f7ec8cd363e5/hiredis-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a68aaf9ba024f4e28cf23df9196ff4e897bd7085872f3a30644dca07fa787816", size = 22996, upload-time = "2025-10-14T16:32:51.543Z" },
- { url = "https://files.pythonhosted.org/packages/a7/b6/d7e6c17da032665a954a89c1e6ee3bd12cb51cd78c37527842b03519981d/hiredis-3.3.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:f7f80442a32ce51ee5d89aeb5a84ee56189a0e0e875f1a57bbf8d462555ae48f", size = 83034, upload-time = "2025-10-14T16:32:52.395Z" },
- { url = "https://files.pythonhosted.org/packages/27/6c/6751b698060cdd1b2d8427702cff367c9ed7a1705bcf3792eb5b896f149b/hiredis-3.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a1a67530da714954ed50579f4fe1ab0ddbac9c43643b1721c2cb226a50dde263", size = 46701, upload-time = "2025-10-14T16:32:53.572Z" },
- { url = "https://files.pythonhosted.org/packages/ce/8e/20a5cf2c83c7a7e08c76b9abab113f99f71cd57468a9c7909737ce6e9bf8/hiredis-3.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:616868352e47ab355559adca30f4f3859f9db895b4e7bc71e2323409a2add751", size = 42381, upload-time = "2025-10-14T16:32:54.762Z" },
- { url = "https://files.pythonhosted.org/packages/be/0a/547c29c06e8c9c337d0df3eec39da0cf1aad701daf8a9658dd37f25aca66/hiredis-3.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e799b79f3150083e9702fc37e6243c0bd47a443d6eae3f3077b0b3f510d6a145", size = 180313, upload-time = "2025-10-14T16:32:55.644Z" },
- { url = "https://files.pythonhosted.org/packages/89/8a/488de5469e3d0921a1c425045bf00e983d48b2111a90e47cf5769eaa536c/hiredis-3.3.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ef1dfb0d2c92c3701655e2927e6bbe10c499aba632c7ea57b6392516df3864b", size = 190488, upload-time = "2025-10-14T16:32:56.649Z" },
- { url = "https://files.pythonhosted.org/packages/b5/59/8493edc3eb9ae0dbea2b2230c2041a52bc03e390b02ffa3ac0bca2af9aea/hiredis-3.3.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c290da6bc2a57e854c7da9956cd65013483ede935677e84560da3b848f253596", size = 189210, upload-time = "2025-10-14T16:32:57.759Z" },
- { url = "https://files.pythonhosted.org/packages/f0/de/8c9a653922057b32fb1e2546ecd43ef44c9aa1a7cf460c87cae507eb2bc7/hiredis-3.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd8c438d9e1728f0085bf9b3c9484d19ec31f41002311464e75b69550c32ffa8", size = 180972, upload-time = "2025-10-14T16:32:58.737Z" },
- { url = "https://files.pythonhosted.org/packages/e4/a3/51e6e6afaef2990986d685ca6e254ffbd191f1635a59b2d06c9e5d10c8a2/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1bbc6b8a88bbe331e3ebf6685452cebca6dfe6d38a6d4efc5651d7e363ba28bd", size = 175315, upload-time = "2025-10-14T16:32:59.774Z" },
- { url = "https://files.pythonhosted.org/packages/96/54/e436312feb97601f70f8b39263b8da5ac4a5d18305ebdfb08ad7621f6119/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:55d8c18fe9a05496c5c04e6eccc695169d89bf358dff964bcad95696958ec05f", size = 185653, upload-time = "2025-10-14T16:33:00.749Z" },
- { url = "https://files.pythonhosted.org/packages/ed/a3/88e66030d066337c6c0f883a912c6d4b2d6d7173490fbbc113a6cbe414ff/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:4ddc79afa76b805d364e202a754666cb3c4d9c85153cbfed522871ff55827838", size = 179032, upload-time = "2025-10-14T16:33:01.711Z" },
- { url = "https://files.pythonhosted.org/packages/bc/1f/fb7375467e9adaa371cd617c2984fefe44bdce73add4c70b8dd8cab1b33a/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e8a4b8540581dcd1b2b25827a54cfd538e0afeaa1a0e3ca87ad7126965981cc", size = 176127, upload-time = "2025-10-14T16:33:02.793Z" },
- { url = "https://files.pythonhosted.org/packages/66/14/0dc2b99209c400f3b8f24067273e9c3cb383d894e155830879108fb19e98/hiredis-3.3.0-cp314-cp314t-win32.whl", hash = "sha256:298593bb08487753b3afe6dc38bac2532e9bac8dcee8d992ef9977d539cc6776", size = 22024, upload-time = "2025-10-14T16:33:03.812Z" },
- { url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/1d/1a7d925d886211948ab9cca44221b1d9dd4d3481d015511e98794e37d369/hiredis-3.3.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:60543f3b068b16a86e99ed96b7fdae71cdc1d8abdfe9b3f82032a555e52ece7e", size = 82023, upload-time = "2026-03-16T15:19:34.157Z" },
+ { url = "https://files.pythonhosted.org/packages/13/2f/a6017fe1db47cd63a4aefc0dd21dd4dcb0c4e857bfbcfaa27329745f24a3/hiredis-3.3.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2611bfaaadc5e8d43fb7967f9bbf1110c8beaa83aee2f2d812c76f11cfb56c6a", size = 46215, upload-time = "2026-03-16T15:19:35.068Z" },
+ { url = "https://files.pythonhosted.org/packages/77/4b/35a71d088c6934e162aa81c7e289fa3110a3aca84ab695d88dbd488c74a2/hiredis-3.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e3754ce60e1b11b0afad9a053481ff184d2ee24bea47099107156d1b84a84aa", size = 41861, upload-time = "2026-03-16T15:19:36.32Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/54/904bc723a95926977764fefd6f0d46067579bac38fffc32b806f3f2c05c0/hiredis-3.3.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e89dabf436ee79b358fd970dcbed6333a36d91db73f27069ca24a02fb138a404", size = 170196, upload-time = "2026-03-16T15:19:37.274Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/01/4e840cd4cb53c28578234708b08fb9ec9e41c2880acc0e269a7264e1b3af/hiredis-3.3.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4f7e242eab698ad0be5a4b2ec616fa856569c57455cc67c625fd567726290e5f", size = 181808, upload-time = "2026-03-16T15:19:38.637Z" },
+ { url = "https://files.pythonhosted.org/packages/87/0d/fc845f06f8203ab76c401d4d2b97f9fb768e644b053a40f441f7dcc71f2d/hiredis-3.3.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53148a4e21057541b6d8e493b2ea1b500037ddf34433c391970036f3cbce00e3", size = 180577, upload-time = "2026-03-16T15:19:39.749Z" },
+ { url = "https://files.pythonhosted.org/packages/52/3a/859afe2620666bf6d58eb977870c47d98af4999d473b50528b323918f3f7/hiredis-3.3.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c25132902d3eff38781e0d54f27a0942ec849e3c07dbdce83c4d92b7e43c8dce", size = 172507, upload-time = "2026-03-16T15:19:40.87Z" },
+ { url = "https://files.pythonhosted.org/packages/60/a8/004349708ad8bf0d188d46049f846d3fe2d4a7a8d0d5a6a8ba024017d8b3/hiredis-3.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3fb6573efa15a29c12c0c0f7170b14e7c1347fe4bb39b6a15b779f46015cc929", size = 166339, upload-time = "2026-03-16T15:19:41.912Z" },
+ { url = "https://files.pythonhosted.org/packages/c3/fb/bfc6df29381830c99bfd9e97ed3b6d75d9303866a28c23d51ab8c50f63e3/hiredis-3.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:487658e1db83c1ee9fbbac6a43039ea76957767a5987ffb16b590613f9e68297", size = 176766, upload-time = "2026-03-16T15:19:42.981Z" },
+ { url = "https://files.pythonhosted.org/packages/53/e7/f54aaad4559a413ec8b1043a89567a5a1f898426e4091b9af5e0f2120371/hiredis-3.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a1d190790ee39b8b7adeeb10fc4090dc4859eb4e75ed27bd8108710eef18f358", size = 170313, upload-time = "2026-03-16T15:19:44.082Z" },
+ { url = "https://files.pythonhosted.org/packages/60/51/b80394db4c74d4cba342fa4208f690a2739c16f1125c2a62ba1701b8e2b7/hiredis-3.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a42c7becd4c9ec4ab5769c754eb61112777bdc6e1c1525e2077389e193b5f5aa", size = 167964, upload-time = "2026-03-16T15:19:45.237Z" },
+ { url = "https://files.pythonhosted.org/packages/47/ef/5e438d1e058be57cdc1bafc1b1ec8ab43cc890c61447e88f8b878a0e32c3/hiredis-3.3.1-cp312-cp312-win32.whl", hash = "sha256:17ec8b524055a88b80d76c177dbbbe475a25c17c5bf4b67bdbdbd0629bcae838", size = 20532, upload-time = "2026-03-16T15:19:46.233Z" },
+ { url = "https://files.pythonhosted.org/packages/e9/c6/39994b9c5646e7bf7d5e92170c07fd5f224ae9f34d95ff202f31845eb94b/hiredis-3.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:0fac4af8515e6cca74fc701169ae4dc9a71a90e9319c9d21006ec9454b43aa2f", size = 22381, upload-time = "2026-03-16T15:19:47.082Z" },
+ { url = "https://files.pythonhosted.org/packages/d8/4b/c7f4d6d6643622f296395269e24b02c69d4ac72822f052b8cae16fa3af03/hiredis-3.3.1-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:afe3c3863f16704fb5d7c2c6ff56aaf9e054f6d269f7b4c9074c5476178d1aba", size = 82027, upload-time = "2026-03-16T15:19:48.002Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/45/198be960a7443d6eb5045751e929480929c0defbca316ce1a47d15187330/hiredis-3.3.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:f19ee7dc1ef8a6497570d91fa4057ba910ad98297a50b8c44ff37589f7c89d17", size = 46220, upload-time = "2026-03-16T15:19:48.953Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/a4/6ab925177f289830008dbe1488a9858675e2e234f48c9c1653bd4d0eaddc/hiredis-3.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:09f5e510f637f2c72d2a79fb3ad05f7b6211e057e367ca5c4f97bb3d8c9d71f4", size = 41858, upload-time = "2026-03-16T15:19:49.939Z" },
+ { url = "https://files.pythonhosted.org/packages/fe/c8/a0ddbb9e9c27fcb0022f7b7e93abc75727cb634c6a5273ca5171033dac78/hiredis-3.3.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b46e96b50dad03495447860510daebd2c96fd44ed25ba8ccb03e9f89eaa9d34", size = 170095, upload-time = "2026-03-16T15:19:51.216Z" },
+ { url = "https://files.pythonhosted.org/packages/94/06/618d509cc454912028f71995f3dd6eb54606f0aa8163ff79c5b7ec1f2bda/hiredis-3.3.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b4fe7f38aa8956fcc1cea270e62601e0e11066aff78e384be70fd283d30293b6", size = 181745, upload-time = "2026-03-16T15:19:52.72Z" },
+ { url = "https://files.pythonhosted.org/packages/06/14/75b2deb62a61fc75a41ce1a6a781fe239133bbc88fef404d32a148ad152a/hiredis-3.3.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b96da7e365d6488d2a75266a662cbe3cc14b28c23dd9b0c9aa04b5bc5c20192", size = 180465, upload-time = "2026-03-16T15:19:53.847Z" },
+ { url = "https://files.pythonhosted.org/packages/7e/8c/8e03dcbfde8e2ca3f880fce06ad0877b3f098ed5fdfb17cf3b821a32323a/hiredis-3.3.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52d5641027d6731bc7b5e7d126a5158a99784a9f8c6de3d97ca89aca4969e9f8", size = 172419, upload-time = "2026-03-16T15:19:54.959Z" },
+ { url = "https://files.pythonhosted.org/packages/03/05/843005d68403a3805309075efc6638360a3ababa6cb4545163bf80c8e7f7/hiredis-3.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eddeb9a153795cf6e615f9f3cef66a1d573ff3b6ee16df2b10d1d1c2f2baeaa8", size = 166398, upload-time = "2026-03-16T15:19:56.36Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/23/abe2476244fd792f5108009ec0ae666eaa5b2165ca19f2e86638d8324ac9/hiredis-3.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:011a9071c3df4885cac7f58a2623feac6c8e2ad30e6ba93c55195af05ce61ff5", size = 176844, upload-time = "2026-03-16T15:19:57.462Z" },
+ { url = "https://files.pythonhosted.org/packages/c6/47/e1cdccc559b98e548bcff0868c3938d375663418c0adca465895ee1f72e7/hiredis-3.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:264ee7e9cb6c30dc78da4ecf71d74cf14ca122817c665d838eda8b4384bce1b0", size = 170366, upload-time = "2026-03-16T15:19:58.548Z" },
+ { url = "https://files.pythonhosted.org/packages/a2/e1/fda8325f51d06877e8e92500b15d4aff3855b4c3c91dbd9636a82e4591f2/hiredis-3.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d1434d0bcc1b3ef048bae53f26456405c08aeed9827e65b24094f5f3a6793f1", size = 168023, upload-time = "2026-03-16T15:19:59.727Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/21/2839d1625095989c116470e2b6841bbe1a2a5509585e82a4f3f5cd47f511/hiredis-3.3.1-cp313-cp313-win32.whl", hash = "sha256:f915a34fb742e23d0d61573349aa45d6f74037fde9d58a9f340435eff8d62736", size = 20535, upload-time = "2026-03-16T15:20:00.938Z" },
+ { url = "https://files.pythonhosted.org/packages/84/f9/534c2a89b24445a9a9623beb4697fd72b8c8f16286f6f3bda012c7af004a/hiredis-3.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:d8e56e0d1fe607bfff422633f313aec9191c3859ab99d11ff097e3e6e068000c", size = 22383, upload-time = "2026-03-16T15:20:01.865Z" },
+ { url = "https://files.pythonhosted.org/packages/03/72/0450d6b449da58120c5497346eb707738f8f67b9e60c28a8ef90133fc81f/hiredis-3.3.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:439f9a5cc8f9519ce208a24cdebfa0440fef26aa682a40ba2c92acb10a53f5e0", size = 82112, upload-time = "2026-03-16T15:20:02.865Z" },
+ { url = "https://files.pythonhosted.org/packages/22/c0/0be33a29bcd463e6cbb0282515dd4d0cdfe33c30c7afc6d4d8c460e23266/hiredis-3.3.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3724f0e58c6ff76fd683429945491de71324ab1bc0ad943a8d68cb0932d24075", size = 46238, upload-time = "2026-03-16T15:20:03.896Z" },
+ { url = "https://files.pythonhosted.org/packages/62/f2/f999854bfaf3bcbee0f797f24706c182ecfaca825f6a582f6281a6aa97e0/hiredis-3.3.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29fe35e3c6fe03204e75c86514f452591957a1e06b05d86e10d795455b71c355", size = 41891, upload-time = "2026-03-16T15:20:04.939Z" },
+ { url = "https://files.pythonhosted.org/packages/f2/c8/cd9ab90fec3a301d864d8ab6167aea387add8e2287969d89cbcd45d6b0e0/hiredis-3.3.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d42f3a13290f89191568fc113d95a3d2c8759cdd8c3672f021d8b7436f909e75", size = 170485, upload-time = "2026-03-16T15:20:06.284Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/9a/1ddf9ea236a292963146cbaf6722abeb9d503ca47d821267bb8b3b81c4f7/hiredis-3.3.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2afc675b831f7552da41116fffffca4340f387dc03f56d6ec0c7895ab0b59a10", size = 182030, upload-time = "2026-03-16T15:20:07.857Z" },
+ { url = "https://files.pythonhosted.org/packages/d4/b8/e070a1dbf8a1bbb8814baa0b00836fbe3f10c7af8e11f942cc739c64e062/hiredis-3.3.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4106201cd052d9eabe3cb7b5a24b0fe37307792bda4fcb3cf6ddd72f697828e8", size = 180543, upload-time = "2026-03-16T15:20:09.096Z" },
+ { url = "https://files.pythonhosted.org/packages/0d/bb/b5f4f98e44626e2446cd8a52ce6cb1fc1c99786b6e2db3bf09cea97b90cd/hiredis-3.3.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8887bf0f31e4b550bd988c8863b527b6587d200653e9375cd91eea2b944b7424", size = 172356, upload-time = "2026-03-16T15:20:10.245Z" },
+ { url = "https://files.pythonhosted.org/packages/ef/93/73a77b54ba94e82f76d02563c588d8a062513062675f483a033a43015f2c/hiredis-3.3.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1ac7697365dbe45109273b34227fee6826b276ead9a4a007e0877e1d3f0fcf21", size = 166433, upload-time = "2026-03-16T15:20:11.789Z" },
+ { url = "https://files.pythonhosted.org/packages/f3/c2/1b2dcbe5dc53a46a8cb05bed67d190a7e30bad2ad1f727ebe154dfeededd/hiredis-3.3.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2b6da6e07359107c653a809b3cff2d9ccaeedbafe33c6f16434aef6f53ce4a2b", size = 177220, upload-time = "2026-03-16T15:20:12.991Z" },
+ { url = "https://files.pythonhosted.org/packages/02/09/f4314cf096552568b5ea785ceb60c424771f4d35a76c410ad39d258f74bc/hiredis-3.3.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:ce334915f5d31048f76a42c607bf26687cf045eb1bc852b7340f09729c6a64fc", size = 170475, upload-time = "2026-03-16T15:20:14.519Z" },
+ { url = "https://files.pythonhosted.org/packages/b1/2e/3f56e438efc8fc27ed4a3dbad58c0280061466473ec35d8f86c90c841a84/hiredis-3.3.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ee11fd431f83d8a5b29d370b9d79a814d3218d30113bdcd44657e9bdf715fc92", size = 167913, upload-time = "2026-03-16T15:20:15.672Z" },
+ { url = "https://files.pythonhosted.org/packages/56/34/053e5ee91d6dc478faac661996d1fd4886c5acb7a1b5ac30e7d3c794bb51/hiredis-3.3.1-cp314-cp314-win32.whl", hash = "sha256:e0356561b4a97c83b9ee3de657a41b8d1a1781226853adaf47b550bb988fda6f", size = 21167, upload-time = "2026-03-16T15:20:17.013Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/33/06776c641d17881a9031e337e81b3b934c38c2adbb83c85062d6b5f83b72/hiredis-3.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:80aba5f85d6227faee628ae28d1c3b69c661806a0636548ac56c68782606454f", size = 23000, upload-time = "2026-03-16T15:20:17.966Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/5a/94f9a505b2ff5376d4a05fb279b69d89bafa7219dd33f6944026e3e56f80/hiredis-3.3.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:907f7b5501a534030738f0f27459a612d2266fd0507b007bb8f3e6de08167920", size = 83039, upload-time = "2026-03-16T15:20:19.316Z" },
+ { url = "https://files.pythonhosted.org/packages/93/ae/d3752a8f03a1fca43d402389d2a2d234d3db54c4d1f07f26c1041ca3c5de/hiredis-3.3.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:de94b409f49eb6a588ebdd5872e826caec417cd77c17af0fb94f2128427f1a2a", size = 46703, upload-time = "2026-03-16T15:20:20.401Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/76/e32c868a2fa23cd82bacaffd38649d938173244a0e717ec1c0c76874dbdd/hiredis-3.3.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:79cd03e7ff550c17758a7520bf437c156d3d4c8bb74214deeafa69cda49c85a4", size = 42379, upload-time = "2026-03-16T15:20:21.705Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/f6/d687d36a74ce6cf448826cf2e8edfc1eb37cc965308f74eb696aa97c69df/hiredis-3.3.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ffa7ba2e2da1f806f3181b9730b3e87ba9dbfec884806725d4584055ba3faa6", size = 180311, upload-time = "2026-03-16T15:20:23.037Z" },
+ { url = "https://files.pythonhosted.org/packages/db/ac/f520dc0066a62a15aa920c7dd0a2028c213f4862d5f901409ae92ee5d785/hiredis-3.3.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ee37fe8cf081b72dea72f96a0ee604f492ec02252eb77dc26ff6eec3f997b580", size = 190488, upload-time = "2026-03-16T15:20:24.357Z" },
+ { url = "https://files.pythonhosted.org/packages/4d/f5/ae10fff82d0f291e90c41bf10a5d6543a96aae00cccede01bf2b6f7e178d/hiredis-3.3.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9bfdeff778d3f7ff449ca5922ab773899e7d31e26a576028b06a5e9cf0ed8c34", size = 189210, upload-time = "2026-03-16T15:20:25.51Z" },
+ { url = "https://files.pythonhosted.org/packages/0f/8f/5be4344e542aa8d349a03d05486c59d9ca26f69c749d11e114bf34b84d50/hiredis-3.3.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:027ce4fabfeff5af5b9869d5524770877f9061d118bc36b85703ae3faf5aad8e", size = 180971, upload-time = "2026-03-16T15:20:26.631Z" },
+ { url = "https://files.pythonhosted.org/packages/41/a2/29e230226ec2a31f13f8a832fbafe366e263f3b090553ebe49bb4581a7bd/hiredis-3.3.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:dcea8c3f53674ae68e44b12e853b844a1d315250ca6677b11ec0c06aff85e86c", size = 175314, upload-time = "2026-03-16T15:20:27.848Z" },
+ { url = "https://files.pythonhosted.org/packages/89/2e/bf241707ad86b9f3ebfbc7ab89e19d5ec243ff92ca77644a383622e8740b/hiredis-3.3.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0b5ff2f643f4b452b0597b7fe6aa35d398cb31d8806801acfafb1558610ea2aa", size = 185652, upload-time = "2026-03-16T15:20:29.364Z" },
+ { url = "https://files.pythonhosted.org/packages/d0/c1/b39170d8bcccd01febd45af4ac6b43ff38e134a868e2ec167a82a036fb35/hiredis-3.3.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3586c8a5f56d34b9dddaaa9e76905f31933cac267251006adf86ec0eef7d0400", size = 179033, upload-time = "2026-03-16T15:20:30.549Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/3a/4fe39a169115434f911abff08ff485b9b6201c168500e112b3f6a8110c0a/hiredis-3.3.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a110d19881ca78a88583d3b07231e7c6864864f5f1f3491b638863ea45fa8708", size = 176126, upload-time = "2026-03-16T15:20:31.958Z" },
+ { url = "https://files.pythonhosted.org/packages/44/99/c1d0b0bc4f9e9150e24beb0dca2e186e32d5e749d0022e0d26453749ed51/hiredis-3.3.1-cp314-cp314t-win32.whl", hash = "sha256:98fd5b39410e9d69e10e90d0330e35650becaa5dd2548f509b9598f1f3c6124d", size = 22028, upload-time = "2026-03-16T15:20:33.33Z" },
+ { url = "https://files.pythonhosted.org/packages/35/d6/191e6741addc97bcf5e755661f8c82f0fd0aa35f07ece56e858da689b57e/hiredis-3.3.1-cp314-cp314t-win_amd64.whl", hash = "sha256:ab1f646ff531d70bfd25f01e60708dfa3d105eb458b7dedd9fe9a443039fd809", size = 23811, upload-time = "2026-03-16T15:20:34.292Z" },
]
[[package]]
@@ -482,16 +496,16 @@ wheels = [
[[package]]
name = "pytest-cov"
-version = "7.0.0"
+version = "7.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "coverage" },
{ name = "pluggy" },
{ name = "pytest" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
+ { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" },
]
[[package]]
@@ -505,11 +519,11 @@ wheels = [
[[package]]
name = "redis"
-version = "7.1.0"
+version = "7.4.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/7b/7f/3759b1d0d72b7c92f0d70ffd9dc962b7b7b5ee74e135f9d7d8ab06b8a318/redis-7.4.0.tar.gz", hash = "sha256:64a6ea7bf567ad43c964d2c30d82853f8df927c5c9017766c55a1d1ed95d18ad", size = 4943913, upload-time = "2026-03-24T09:14:37.53Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" },
+ { url = "https://files.pythonhosted.org/packages/74/3a/95deec7db1eb53979973ebd156f3369a72732208d1391cd2e5d127062a32/redis-7.4.0-py3-none-any.whl", hash = "sha256:a9c74a5c893a5ef8455a5adb793a31bb70feb821c86eccb62eebef5a19c429ec", size = 409772, upload-time = "2026-03-24T09:14:35.968Z" },
]
[package.optional-dependencies]
@@ -519,15 +533,15 @@ hiredis = [
[[package]]
name = "starlette"
-version = "0.52.1"
+version = "1.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" },
]
[[package]]
@@ -610,13 +624,13 @@ wheels = [
[[package]]
name = "uvicorn"
-version = "0.40.0"
+version = "0.42.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
{ name = "h11" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/e3/ad/4a96c425be6fb67e0621e62d86c402b4a17ab2be7f7c055d9bd2f638b9e2/uvicorn-0.42.0.tar.gz", hash = "sha256:9b1f190ce15a2dd22e7758651d9b6d12df09a13d51ba5bf4fc33c383a48e1775", size = 85393, upload-time = "2026-03-16T06:19:50.077Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" },
+ { url = "https://files.pythonhosted.org/packages/0a/89/f8827ccff89c1586027a105e5630ff6139a64da2515e24dafe860bd9ae4d/uvicorn-0.42.0-py3-none-any.whl", hash = "sha256:96c30f5c7abe6f74ae8900a70e92b85ad6613b745d4879eb9b16ccad15645359", size = 68830, upload-time = "2026-03-16T06:19:48.325Z" },
]
From c3464ba54de36db1071de75a2a97ca0919c0c17a Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 22:28:59 +0100
Subject: [PATCH 10/15] Renames project and core library to queue-server and
queue-engine
Updates all documentation, configuration, and dependency references to use the new repository and package names. Improves clarity and consistency with upstream naming, reduces confusion, and prepares for future maintenance.
---
.github/workflows/test.yml | 2 +-
README.md | 8 +++-----
docs/Makefile | 8 ++++----
docs/_templates/layout.html | 2 +-
docs/conf.py | 10 +++++-----
docs/contributing.rst | 4 ++--
docs/faqs.rst | 8 ++++----
docs/gettingstarted.rst | 16 ++++++++--------
docs/index.rst | 2 +-
docs/installation.rst | 2 +-
docs/internals.rst | 6 +++---
uv.lock | 8 ++++----
12 files changed, 37 insertions(+), 39 deletions(-)
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 1a535b6..fb4e360 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -48,4 +48,4 @@ jobs:
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
- slug: flowdacity/flowdacity-queue-server
+ slug: flowdacity/queue-server
diff --git a/README.md b/README.md
index 7537a32..ce1f851 100644
--- a/README.md
+++ b/README.md
@@ -1,10 +1,10 @@
-[](https://github.com/flowdacity/flowdacity-queue-server/actions/workflows/test.yml)
-[](https://codecov.io/github/flowdacity/flowdacity-queue-server)
+[](https://github.com/flowdacity/queue-server/actions/workflows/test.yml)
+[](https://codecov.io/github/flowdacity/queue-server)
Flowdacity Queue Server
=======================
-An async HTTP API for [Flowdacity Queue (FQ)](https://github.com/flowdacity/flowdacity-queue), built with Starlette and Uvicorn.
+An async HTTP API for [Flowdacity Queue (FQ)](https://github.com/flowdacity/queue-engine), built with Starlette and Uvicorn.
## Prerequisites
@@ -13,8 +13,6 @@ An async HTTP API for [Flowdacity Queue (FQ)](https://github.com/flowdacity/flow
## Installation
-This project currently pins `flowdacity-queue` to the upstream `v1.0.0` Git tag because that version is tagged upstream but not published on PyPI.
-
```bash
uv sync --group dev
```
diff --git a/docs/Makefile b/docs/Makefile
index 9d3692d..6dc5d31 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -85,17 +85,17 @@ qthelp:
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
- @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/flowdacity-queue-server.qhcp"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/queue-server.qhcp"
@echo "To view the help file:"
- @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/flowdacity-queue-server.qhc"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/queue-server.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
- @echo "# mkdir -p $$HOME/.local/share/devhelp/flowdacity-queue-server"
- @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/flowdacity-queue-server"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/queue-server"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/queue-server"
@echo "# devhelp"
epub:
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index ed6a0ef..f860275 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -1,5 +1,5 @@
{% extends "!layout.html" %}
{%- block extrahead %}
-
+
{% endblock %}
diff --git a/docs/conf.py b/docs/conf.py
index 340bb89..d8be408 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -26,7 +26,7 @@
html_theme_options = {
"logo": "logo.png",
"github_user": "flowdacity",
- "github_repo": "flowdacity-queue-server",
+ "github_repo": "queue-server",
"description": "Async HTTP API for Flowdacity Queue",
}
@@ -44,7 +44,7 @@
latex_documents = [
(
"index",
- "flowdacity-queue-server.tex",
+ "queue-server.tex",
"Flowdacity Queue Server Documentation",
"Flowdacity Development Team",
"manual",
@@ -54,7 +54,7 @@
man_pages = [
(
"index",
- "flowdacity-queue-server",
+ "queue-server",
"Flowdacity Queue Server Documentation",
["Flowdacity Development Team"],
1,
@@ -64,10 +64,10 @@
texinfo_documents = [
(
"index",
- "flowdacity-queue-server",
+ "queue-server",
"Flowdacity Queue Server Documentation",
"Flowdacity Development Team",
- "flowdacity-queue-server",
+ "queue-server",
"Async HTTP API for Flowdacity Queue.",
"Miscellaneous",
),
diff --git a/docs/contributing.rst b/docs/contributing.rst
index f612727..27db5f0 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -10,8 +10,8 @@ Repositories
Development is split across two repositories:
-1. Flowdacity Queue Server: https://github.com/flowdacity/flowdacity-queue-server
-2. Flowdacity Queue core: https://github.com/flowdacity/flowdacity-queue
+1. Flowdacity Queue Server: https://github.com/flowdacity/queue-server
+2. Flowdacity Queue core: https://github.com/flowdacity/queue-engine
Local workflow
--------------
diff --git a/docs/faqs.rst b/docs/faqs.rst
index 4741bb9..498fe3f 100644
--- a/docs/faqs.rst
+++ b/docs/faqs.rst
@@ -22,7 +22,7 @@ Call the `Interval API `_:
.. code-block:: bash
- curl -X POST http://127.0.0.1:8080/interval/sms/user42/ \
+ curl -X POST http://127.0.0.1:8300/interval/sms/user42/ \
-H "Content-Type: application/json" \
-d '{"interval": 5000}'
@@ -38,7 +38,7 @@ Any HTTP client can be used. A minimal Python example with ``httpx`` looks like
import httpx
- with httpx.Client(base_url="http://127.0.0.1:8080") as client:
+ with httpx.Client(base_url="http://127.0.0.1:8300") as client:
while True:
response = client.get("/dequeue/sms/")
@@ -87,8 +87,8 @@ Where is the source code?
The codebase is split across two repositories:
-* Flowdacity Queue Server: https://github.com/flowdacity/flowdacity-queue-server
-* Flowdacity Queue core: https://github.com/flowdacity/flowdacity-queue
+* Flowdacity Queue Server: https://github.com/flowdacity/queue-server
+* Flowdacity Queue core: https://github.com/flowdacity/queue-engine
How do I report a bug or contribute a fix?
==========================================
diff --git a/docs/gettingstarted.rst b/docs/gettingstarted.rst
index 1f502cc..e40ad01 100644
--- a/docs/gettingstarted.rst
+++ b/docs/gettingstarted.rst
@@ -16,16 +16,16 @@ Start the server
::
- PORT=8080 \
+ PORT=8300 \
REDIS_HOST=127.0.0.1 \
- uv run uvicorn asgi:app --host 0.0.0.0 --port 8080
+ uv run uvicorn asgi:app --host 0.0.0.0 --port 8300
Check the root endpoint
-----------------------
.. code-block:: bash
- curl http://127.0.0.1:8080/
+ curl http://127.0.0.1:8300/
Queue workflow
--------------
@@ -44,7 +44,7 @@ Enqueue
.. code-block:: bash
- curl -X POST http://127.0.0.1:8080/enqueue/sms/user42/ \
+ curl -X POST http://127.0.0.1:8300/enqueue/sms/user42/ \
-H "Content-Type: application/json" \
-d '{"job_id":"job-1","payload":{"message":"hello, world"},"interval":1000}'
@@ -53,19 +53,19 @@ Dequeue
.. code-block:: bash
- curl http://127.0.0.1:8080/dequeue/sms/
+ curl http://127.0.0.1:8300/dequeue/sms/
Finish
``````
.. code-block:: bash
- curl -X POST http://127.0.0.1:8080/finish/sms/user42/job-1/
+ curl -X POST http://127.0.0.1:8300/finish/sms/user42/job-1/
Metrics
```````
.. code-block:: bash
- curl http://127.0.0.1:8080/metrics/
- curl http://127.0.0.1:8080/metrics/sms/user42/
+ curl http://127.0.0.1:8300/metrics/
+ curl http://127.0.0.1:8300/metrics/sms/user42/
diff --git a/docs/index.rst b/docs/index.rst
index 700de7c..435c86e 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,7 +1,7 @@
Flowdacity Queue Server
=======================
-Flowdacity Queue Server is an async HTTP API for `Flowdacity Queue (FQ) `_.
+Flowdacity Queue Server is an async HTTP API for `Flowdacity Queue (FQ) `_.
It runs on Starlette and Uvicorn, stores queue state in Redis through the FQ core,
and exposes HTTP endpoints for enqueueing, dequeueing, finishing, requeueing, and
inspecting jobs.
diff --git a/docs/installation.rst b/docs/installation.rst
index ee58f71..af7df37 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -15,7 +15,7 @@ Install with uv
uv sync --group dev
-This project currently pins ``flowdacity-queue`` to the upstream ``v1.0.0`` Git tag.
+This project currently pins ``queue-engine`` to the upstream ``v1.0.0`` Git tag.
Install with pip
----------------
diff --git a/docs/internals.rst b/docs/internals.rst
index aad21c4..6e93ffd 100644
--- a/docs/internals.rst
+++ b/docs/internals.rst
@@ -4,7 +4,7 @@ The Internals
Flowdacity Queue Server has two main layers:
-* The `Flowdacity Queue core `_,
+* The `Flowdacity Queue core `_,
which manages queue state in Redis and executes Lua scripts for queue operations.
* The HTTP server in this repository, which maps REST endpoints to FQ operations,
loads configuration from environment variables, and runs the background requeue loop.
@@ -38,5 +38,5 @@ client cleanly.
Related repositories
--------------------
-* Server: https://github.com/flowdacity/flowdacity-queue-server
-* Core queue library: https://github.com/flowdacity/flowdacity-queue
+* Server: https://github.com/flowdacity/queue-server
+* Core queue library: https://github.com/flowdacity/queue-engine
diff --git a/uv.lock b/uv.lock
index 152ba8b..ccc8feb 100644
--- a/uv.lock
+++ b/uv.lock
@@ -139,7 +139,7 @@ wheels = [
]
[[package]]
-name = "flowdacity-queue"
+name = "queue-engine"
version = "1.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
@@ -152,11 +152,11 @@ wheels = [
]
[[package]]
-name = "flowdacity-queue-server"
+name = "queue-server"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
- { name = "flowdacity-queue" },
+ { name = "queue-engine" },
{ name = "httpx" },
{ name = "msgpack" },
{ name = "pydantic" },
@@ -175,7 +175,7 @@ dev = [
[package.metadata]
requires-dist = [
- { name = "flowdacity-queue", specifier = ">=1.0.0" },
+ { name = "queue-engine", specifier = ">=1.0.0" },
{ name = "httpx", specifier = ">=0.28.1" },
{ name = "msgpack", specifier = ">=1.1.2" },
{ name = "pydantic", specifier = ">=2.0.0" },
From f3d52cfcf9eb1e616ba64269c7cdc3a1c2e613b2 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 22:35:31 +0100
Subject: [PATCH 11/15] Clarifies README configuration section and updates PORT
usage instructions
---
README.md | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/README.md b/README.md
index ce1f851..88fc37b 100644
--- a/README.md
+++ b/README.md
@@ -29,7 +29,7 @@ pip install pytest pytest-cov
## Configuration
The server reads all queue and Redis settings from environment variables. No config file is required.
-Values are validated at startup with `pydantic-settings`.
+These application settings are validated at startup by `QueueServerSettings` with `pydantic-settings`.
| Variable | Default | Description |
| --- | --- | --- |
@@ -46,10 +46,11 @@ Values are validated at startup with `pydantic-settings`.
| `REDIS_PASSWORD` | empty | Redis password. |
| `REDIS_CLUSTERED` | `false` | Enables Redis Cluster mode. |
| `REDIS_UNIX_SOCKET_PATH` | `/tmp/redis.sock` | Redis socket path when `REDIS_CONN_TYPE=unix_sock`. |
-| `PORT` | `8300` | Uvicorn port used by the container and local examples. |
Boolean env vars accept only `true` or `false`.
+`PORT` is not part of `QueueServerSettings`. It is runtime launcher configuration used by the container entrypoint or by the `uvicorn` CLI, so pass it as a launcher environment variable or `--port` argument.
+
## Run locally
Start Redis:
@@ -61,9 +62,9 @@ make redis-up
Run the API:
```bash
-PORT=8300 \
-REDIS_HOST=127.0.0.1 \
-uv run uvicorn asgi:app --host 0.0.0.0 --port 8300
+export PORT=8300
+export REDIS_HOST=127.0.0.1
+uv run uvicorn asgi:app --host 0.0.0.0 --port "${PORT}"
```
## Docker
From 5d826fa9d6ed2d3ac2dd19914116d009f94910ab Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 22:40:46 +0100
Subject: [PATCH 12/15] Refactors package names and updates dependencies in
uv.lock
---
uv.lock | 84 ++++++++++++++++++++++++++++-----------------------------
1 file changed, 42 insertions(+), 42 deletions(-)
diff --git a/uv.lock b/uv.lock
index ccc8feb..b13f4a7 100644
--- a/uv.lock
+++ b/uv.lock
@@ -139,7 +139,7 @@ wheels = [
]
[[package]]
-name = "queue-engine"
+name = "flowdacity-queue"
version = "1.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
@@ -151,47 +151,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/47/1cf9ffebeb28af6115a640e9e35412fad632c1025b115ba30e8a68095731/flowdacity_queue-1.0.0-py3-none-any.whl", hash = "sha256:02f95cfb8db6f532c22de0cfeded9bb90a45da0e249526f8c70bb16a61fcee5a", size = 15407, upload-time = "2026-03-24T17:44:23.103Z" },
]
-[[package]]
-name = "queue-server"
-version = "0.1.0"
-source = { virtual = "." }
-dependencies = [
- { name = "queue-engine" },
- { name = "httpx" },
- { name = "msgpack" },
- { name = "pydantic" },
- { name = "pydantic-settings" },
- { name = "redis", extra = ["hiredis"] },
- { name = "starlette" },
- { name = "ujson" },
- { name = "uvicorn" },
-]
-
-[package.dev-dependencies]
-dev = [
- { name = "pytest" },
- { name = "pytest-cov" },
-]
-
-[package.metadata]
-requires-dist = [
- { name = "queue-engine", specifier = ">=1.0.0" },
- { name = "httpx", specifier = ">=0.28.1" },
- { name = "msgpack", specifier = ">=1.1.2" },
- { name = "pydantic", specifier = ">=2.0.0" },
- { name = "pydantic-settings", specifier = ">=2.0.0" },
- { name = "redis", extras = ["hiredis"], specifier = ">=7.1.0" },
- { name = "starlette", specifier = ">=0.50.0" },
- { name = "ujson", specifier = ">=5.11.0" },
- { name = "uvicorn", specifier = ">=0.38.0" },
-]
-
-[package.metadata.requires-dev]
-dev = [
- { name = "pytest", specifier = ">=9.0.1" },
- { name = "pytest-cov", specifier = ">=7.0.0" },
-]
-
[[package]]
name = "h11"
version = "0.16.0"
@@ -517,6 +476,47 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
]
+[[package]]
+name = "queue-server"
+version = "1.0.0"
+source = { virtual = "." }
+dependencies = [
+ { name = "flowdacity-queue" },
+ { name = "httpx" },
+ { name = "msgpack" },
+ { name = "pydantic" },
+ { name = "pydantic-settings" },
+ { name = "redis", extra = ["hiredis"] },
+ { name = "starlette" },
+ { name = "ujson" },
+ { name = "uvicorn" },
+]
+
+[package.dev-dependencies]
+dev = [
+ { name = "pytest" },
+ { name = "pytest-cov" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "flowdacity-queue", specifier = ">=1.0.0" },
+ { name = "httpx", specifier = ">=0.28.1" },
+ { name = "msgpack", specifier = ">=1.1.2" },
+ { name = "pydantic", specifier = ">=2.0.0" },
+ { name = "pydantic-settings", specifier = ">=2.0.0" },
+ { name = "redis", extras = ["hiredis"], specifier = ">=7.1.0" },
+ { name = "starlette", specifier = ">=0.50.0" },
+ { name = "ujson", specifier = ">=5.11.0" },
+ { name = "uvicorn", specifier = ">=0.38.0" },
+]
+
+[package.metadata.requires-dev]
+dev = [
+ { name = "pytest", specifier = ">=9.0.1" },
+ { name = "pytest-cov", specifier = ">=7.0.0" },
+]
+
[[package]]
name = "redis"
version = "7.4.0"
From c2bf0ee29fc8d9eada5f069b369f24cb3c7a8080 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 22:58:43 +0100
Subject: [PATCH 13/15] Enhances documentation and error handling; updates
version retrieval and adds tests for Redis client availability
---
docs/conf.py | 10 ++++++++--
docs/installation.rst | 26 +++++++++++++++++---------
fq_server/__init__.py | 8 +++++++-
fq_server/server.py | 4 +++-
tests/test_routes.py | 12 ++++++++++++
5 files changed, 47 insertions(+), 13 deletions(-)
diff --git a/docs/conf.py b/docs/conf.py
index d8be408..f9196a7 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,8 @@
# -*- coding: utf-8 -*-
+from pathlib import Path
+import tomllib
+
import alabaster
extensions = [
@@ -15,8 +18,11 @@
project = "Flowdacity Queue Server"
copyright = "2025, Flowdacity Development Team"
-version = "0.1.0"
-release = "0.1.0"
+project_root = Path(__file__).resolve().parents[1]
+release = tomllib.loads((project_root / "pyproject.toml").read_text(encoding="utf-8"))[
+ "project"
+]["version"]
+version = release
exclude_patterns = ["_build"]
pygments_style = "sphinx"
diff --git a/docs/installation.rst b/docs/installation.rst
index af7df37..c08c4be 100644
--- a/docs/installation.rst
+++ b/docs/installation.rst
@@ -7,20 +7,28 @@ Requirements
* Python 3.12+
* Redis 7+
+* `uv `_ (recommended)
-Install with uv
----------------
+Quick setup
+-----------
-::
+.. code-block:: bash
uv sync --group dev
-This project currently pins ``queue-engine`` to the upstream ``v1.0.0`` Git tag.
+This creates the local virtual environment and installs the project with the
+development dependencies from ``uv.lock``.
-Install with pip
-----------------
+You can run commands without activating the environment:
-::
+.. code-block:: bash
+
+ uv run uvicorn asgi:app --host 0.0.0.0 --port 8300
+
+Alternative with pip
+--------------------
+
+.. code-block:: bash
python -m venv .venv
source .venv/bin/activate
@@ -30,5 +38,5 @@ Install with pip
Next steps
----------
-Continue with the `getting started guide `_ to run Redis,
-set environment variables, and start the server.
+Continue with the `getting started guide `_ for Redis,
+environment variables, and API usage examples.
diff --git a/fq_server/__init__.py b/fq_server/__init__.py
index c15868e..cb9d4c0 100644
--- a/fq_server/__init__.py
+++ b/fq_server/__init__.py
@@ -1,7 +1,13 @@
+from importlib.metadata import PackageNotFoundError, version as package_version
+
from .server import FQServer, build_config_from_env, setup_server
from .settings import FQConfig, QueueServerSettings
-__version__ = "0.1.0"
+try:
+ __version__ = package_version("queue-server")
+except PackageNotFoundError:
+ __version__ = "1.0.0"
+
__all__ = [
"FQConfig",
"FQServer",
diff --git a/fq_server/server.py b/fq_server/server.py
index 9a2a86f..bc1397b 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -80,7 +80,9 @@ async def requeue_with_lock(self):
while True:
try:
redis = self.queue.redis_client()
- assert redis is not None, "Redis client is not initialized"
+ if redis is None:
+ logger.error("Redis client is not initialized; stopping requeue loop")
+ return
# assumes async lock
async with redis.lock("fq-requeue-lock-key", timeout=15):
try:
diff --git a/tests/test_routes.py b/tests/test_routes.py
index 987d4f1..9fb89ba 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -612,6 +612,18 @@ async def mock_requeue_with_failure():
except asyncio.CancelledError:
pass # Expected - task was cancelled after executing exception code path
+ async def test_requeue_with_lock_missing_redis_client(self):
+ """Test requeue_with_lock exits cleanly when the Redis client is unavailable."""
+ server = self.server
+ server.config["fq"]["job_requeue_interval"] = 1
+
+ with patch.object(server.queue, "redis_client", return_value=None):
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+ await asyncio.sleep(0.05)
+
+ self.assertTrue(requeue_task.done())
+ self.assertIsNone(requeue_task.exception())
+
class FQServerLifespanTestCase(unittest.IsolatedAsyncioTestCase):
"""Test FQServer lifespan (startup/shutdown)."""
From 420cacdb280ac1748819c92c4f9cbe132da3d5e5 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 23:10:21 +0100
Subject: [PATCH 14/15] Improves Redis error handling in requeue loop
Expands error handling to catch and log transient Redis connection,
timeout, and general Redis errors during the requeue loop to prevent
unexpected crashes and aid in debugging. Adds targeted tests to ensure
robustness and correct logging behavior for these error scenarios.
---
fq_server/server.py | 13 ++++++++-
tests/test_routes.py | 68 ++++++++++++++++++++++++++++++++++++++++++++
2 files changed, 80 insertions(+), 1 deletion(-)
diff --git a/fq_server/server.py b/fq_server/server.py
index bc1397b..4a3d730 100644
--- a/fq_server/server.py
+++ b/fq_server/server.py
@@ -10,7 +10,12 @@
from typing import Any, TypeAlias
from fq import FQ
from pydantic import ValidationError
-from redis.exceptions import LockError
+from redis.exceptions import (
+ ConnectionError as RedisConnectionError,
+ LockError,
+ RedisError,
+ TimeoutError as RedisTimeoutError,
+)
from starlette.applications import Starlette
from starlette.requests import Request
@@ -92,6 +97,12 @@ async def requeue_with_lock(self):
except LockError:
# the lock wasn't acquired within specified time
logger.debug("Requeue lock is already held by another worker")
+ except (RedisConnectionError, RedisTimeoutError, RedisError):
+ logger.exception(
+ "Transient Redis error in requeue loop while managing lock"
+ )
+ except Exception:
+ logger.exception("Unexpected error in requeue loop while managing lock")
finally:
await asyncio.sleep(job_requeue_interval / 1000.0)
diff --git a/tests/test_routes.py b/tests/test_routes.py
index 9fb89ba..fa449db 100644
--- a/tests/test_routes.py
+++ b/tests/test_routes.py
@@ -625,6 +625,74 @@ async def test_requeue_with_lock_missing_redis_client(self):
self.assertIsNone(requeue_task.exception())
+class FQServerRequeueLoopTestCase(unittest.IsolatedAsyncioTestCase):
+ """Focused tests for requeue loop error handling that do not need Redis."""
+
+ async def test_requeue_with_lock_redis_error(self):
+ """Test requeue_with_lock swallows Redis errors raised by redis.lock()."""
+ from redis.exceptions import RedisError
+
+ server = setup_server(build_test_config())
+
+ def failing_lock(*args, **kwargs):
+ raise RedisError("Redis lock creation failed")
+
+ mock_redis = AsyncMock()
+ mock_redis.lock = failing_lock
+
+ with patch.object(server.queue, "redis_client", return_value=mock_redis):
+ with self.assertLogs("fq_server.server", level="ERROR") as captured:
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+ await asyncio.sleep(0.05)
+
+ self.assertFalse(requeue_task.done())
+
+ requeue_task.cancel()
+ with self.assertRaises(asyncio.CancelledError):
+ await requeue_task
+
+ self.assertTrue(
+ any(
+ "Transient Redis error in requeue loop while managing lock" in message
+ for message in captured.output
+ )
+ )
+
+ async def test_requeue_with_lock_lock_context_timeout(self):
+ """Test requeue_with_lock swallows Redis timeout errors from lock context entry."""
+ from redis.exceptions import TimeoutError as RedisTimeoutError
+
+ server = setup_server(build_test_config())
+
+ class FailingLock:
+ async def __aenter__(self):
+ raise RedisTimeoutError("Timed out entering lock context")
+
+ async def __aexit__(self, *args):
+ pass
+
+ mock_redis = AsyncMock()
+ mock_redis.lock = lambda *args, **kwargs: FailingLock()
+
+ with patch.object(server.queue, "redis_client", return_value=mock_redis):
+ with self.assertLogs("fq_server.server", level="ERROR") as captured:
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+ await asyncio.sleep(0.05)
+
+ self.assertFalse(requeue_task.done())
+
+ requeue_task.cancel()
+ with self.assertRaises(asyncio.CancelledError):
+ await requeue_task
+
+ self.assertTrue(
+ any(
+ "Transient Redis error in requeue loop while managing lock" in message
+ for message in captured.output
+ )
+ )
+
+
class FQServerLifespanTestCase(unittest.IsolatedAsyncioTestCase):
"""Test FQServer lifespan (startup/shutdown)."""
From 38f534cc073543e182c91e7c9de92882e8cd3a40 Mon Sep 17 00:00:00 2001
From: "Ochui, Princewill Patrick" <21917688+ochui@users.noreply.github.com>
Date: Tue, 24 Mar 2026 23:51:56 +0100
Subject: [PATCH 15/15] Splits integration and config tests for improved
clarity
Refactors the test suite by separating configuration validation,
API/core route tests, and error path coverage into targeted modules.
Removes a large monolithic test file in favor of focused, maintainable
test cases organized by purpose. Enhances test maintainability and
eases future test additions by introducing shared test utilities.
---
tests/support.py | 58 ++
tests/test_api_routes_core.py | 156 ++++++
tests/test_api_routes_errors.py | 229 ++++++++
tests/test_config_settings.py | 71 +++
tests/test_routes.py | 756 --------------------------
tests/test_server_requeue_lifespan.py | 205 +++++++
6 files changed, 719 insertions(+), 756 deletions(-)
create mode 100644 tests/support.py
create mode 100644 tests/test_api_routes_core.py
create mode 100644 tests/test_api_routes_errors.py
create mode 100644 tests/test_config_settings.py
delete mode 100644 tests/test_routes.py
create mode 100644 tests/test_server_requeue_lifespan.py
diff --git a/tests/support.py b/tests/support.py
new file mode 100644
index 0000000..8d0cbfe
--- /dev/null
+++ b/tests/support.py
@@ -0,0 +1,58 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
+
+import unittest
+
+from httpx import ASGITransport, AsyncClient
+from starlette.types import ASGIApp
+
+from fq_server import FQConfig, build_config_from_env, setup_server
+
+
+def build_test_config() -> FQConfig:
+ return {
+ "fq": {
+ "job_expire_interval": 1000,
+ "job_requeue_interval": 1000,
+ "default_job_requeue_limit": -1,
+ "enable_requeue_script": True,
+ },
+ "redis": {
+ "db": 0,
+ "key_prefix": "fq_server_test",
+ "conn_type": "tcp_sock",
+ "host": "127.0.0.1",
+ "port": 6379,
+ "password": "",
+ "clustered": False,
+ "unix_socket_path": "/tmp/redis.sock",
+ },
+ }
+
+
+class FQServerAsyncTestCase(unittest.IsolatedAsyncioTestCase):
+ async def asyncSetUp(self):
+ server = setup_server(build_test_config())
+ self.server = server
+ self.app: ASGIApp = server.app
+
+ self.queue = server.queue
+ await self.queue.initialize()
+ self.r = self.queue._r
+
+ await self.r.flushdb()
+
+ transport = ASGITransport(app=self.app)
+ self.client = AsyncClient(transport=transport, base_url="http://test")
+
+ async def asyncTearDown(self):
+ await self.r.flushdb()
+ await self.client.aclose()
+ await self.queue.close()
+
+
+__all__ = [
+ "FQServerAsyncTestCase",
+ "build_config_from_env",
+ "build_test_config",
+]
\ No newline at end of file
diff --git a/tests/test_api_routes_core.py b/tests/test_api_routes_core.py
new file mode 100644
index 0000000..0ac35ab
--- /dev/null
+++ b/tests/test_api_routes_core.py
@@ -0,0 +1,156 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
+
+import ujson as json
+
+from tests.support import FQServerAsyncTestCase
+
+
+class TestApiRoutesCore(FQServerAsyncTestCase):
+ async def test_root(self):
+ response = await self.client.get("/")
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json(), {"message": "Hello, FQS!"})
+
+ async def test_enqueue(self):
+ request_params = {
+ "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
+ "payload": {"message": "Hello, world."},
+ "interval": 1000,
+ }
+ response = await self.client.post(
+ "/enqueue/sms/johdoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 201)
+ data = response.json()
+ self.assertEqual(data["status"], "queued")
+
+ request_params = {
+ "job_id": "ef022088-d2b3-44ad-bf1d-a93d6d93b82c",
+ "payload": {"message": "Hello, world."},
+ "interval": 1000,
+ "requeue_limit": 10,
+ }
+ response = await self.client.post(
+ "/enqueue/sms/johdoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 201)
+ data = response.json()
+ self.assertEqual(data["status"], "queued")
+
+ async def test_dequeue_fail(self):
+ response = await self.client.get("/dequeue/")
+ self.assertEqual(response.status_code, 404)
+ self.assertEqual(response.json()["status"], "failure")
+
+ response = await self.client.get("/dequeue/sms/")
+ self.assertEqual(response.status_code, 404)
+ self.assertEqual(response.json()["status"], "failure")
+
+ async def test_dequeue(self):
+ request_params = {
+ "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
+ "payload": {"message": "Hello, world."},
+ "interval": 1000,
+ }
+ await self.client.post(
+ "/enqueue/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+
+ response = await self.client.get("/dequeue/sms/")
+ self.assertEqual(response.status_code, 200)
+ data = response.json()
+ self.assertEqual(data["status"], "success")
+ self.assertEqual(data["job_id"], "ef022088-d2b3-44ad-bf0d-a93d6d93b82c")
+ self.assertEqual(data["payload"], {"message": "Hello, world."})
+ self.assertEqual(data["queue_id"], "johndoe")
+ self.assertEqual(data["requeues_remaining"], -1)
+
+ async def test_finish_fail(self):
+ response = await self.client.post(
+ "/finish/sms/johndoe/ef022088-d2b3-44ad-bf0d-a93d6d93b82c/"
+ )
+ self.assertEqual(response.status_code, 404)
+ self.assertEqual(response.json()["status"], "failure")
+
+ async def test_finish(self):
+ request_params = {
+ "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
+ "payload": {"message": "Hello, world."},
+ "interval": 1000,
+ }
+ await self.client.post(
+ "/enqueue/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+
+ await self.client.get("/dequeue/sms/")
+
+ response = await self.client.post(
+ "/finish/sms/johndoe/ef022088-d2b3-44ad-bf0d-a93d6d93b82c/"
+ )
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json()["status"], "success")
+
+ async def test_interval(self):
+ request_params = {
+ "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
+ "payload": {"message": "Hello, world."},
+ "interval": 1000,
+ }
+ await self.client.post(
+ "/enqueue/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+
+ request_params = {"interval": 5000}
+ response = await self.client.post(
+ "/interval/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.json()["status"], "success")
+
+ async def test_interval_fail(self):
+ request_params = {"interval": 5000}
+ response = await self.client.post(
+ "/interval/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.json()["status"], "failure")
+
+ async def test_metrics(self):
+ response = await self.client.get("/metrics/")
+ data = response.json()
+ self.assertEqual(data["status"], "success")
+ self.assertIn("queue_types", data)
+ self.assertIn("enqueue_counts", data)
+ self.assertIn("dequeue_counts", data)
+
+ async def test_metrics_with_queue_type(self):
+ response = await self.client.get("/metrics/sms/")
+ data = response.json()
+ self.assertEqual(data["status"], "success")
+ self.assertIn("queue_ids", data)
+
+ async def test_metrics_with_queue_type_and_queue_id(self):
+ response = await self.client.get("/metrics/sms/johndoe/")
+ data = response.json()
+ self.assertEqual(data["status"], "success")
+ self.assertIn("queue_length", data)
+ self.assertIn("enqueue_counts", data)
+ self.assertIn("dequeue_counts", data)
+
+ async def test_deep_status_success(self):
+ response = await self.client.get("/deepstatus/")
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json()["status"], "success")
\ No newline at end of file
diff --git a/tests/test_api_routes_errors.py b/tests/test_api_routes_errors.py
new file mode 100644
index 0000000..9e75d3f
--- /dev/null
+++ b/tests/test_api_routes_errors.py
@@ -0,0 +1,229 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
+
+from unittest.mock import patch
+
+import ujson as json
+
+from tests.support import FQServerAsyncTestCase
+
+
+class TestApiRoutesErrors(FQServerAsyncTestCase):
+ async def test_enqueue_malformed_json(self):
+ response = await self.client.post(
+ "/enqueue/sms/johndoe/",
+ content=b"invalid json {",
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("message", response.json())
+
+ async def test_enqueue_empty_body(self):
+ response = await self.client.post(
+ "/enqueue/sms/johndoe/",
+ content=b"",
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+
+ async def test_enqueue_with_max_queued_length_not_exceeded(self):
+ request_params = {
+ "job_id": "job-1",
+ "payload": {"message": "Test 1", "max_queued_length": 10},
+ "interval": 1000,
+ }
+ response = await self.client.post(
+ "/enqueue/sms/test_queue_1/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 201)
+ self.assertEqual(response.json()["status"], "queued")
+ self.assertEqual(response.json()["current_queue_length"], 0)
+
+ async def test_enqueue_with_max_queued_length_exceeded(self):
+ for i in range(3):
+ request_params = {
+ "job_id": f"job-{i}",
+ "payload": {"message": f"Test {i}"},
+ "interval": 1000,
+ }
+ await self.client.post(
+ "/enqueue/sms/test_queue_2/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+
+ request_params = {
+ "job_id": "job-overflow",
+ "payload": {"message": "Overflow", "max_queued_length": 2},
+ "interval": 1000,
+ }
+ response = await self.client.post(
+ "/enqueue/sms/test_queue_2/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 429)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Max queue length reached", response.json()["message"])
+ self.assertGreaterEqual(response.json()["current_queue_length"], 2)
+
+ async def test_enqueue_get_queue_length_exception(self):
+ request_params = {
+ "job_id": "job-error",
+ "payload": {"message": "Error test", "max_queued_length": 5},
+ "interval": 1000,
+ }
+
+ with patch.object(
+ self.queue, "get_queue_length", side_effect=Exception("Redis error")
+ ):
+ response = await self.client.post(
+ "/enqueue/sms/test_queue_3/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 201)
+ self.assertEqual(response.json()["status"], "queued")
+ self.assertEqual(response.json()["current_queue_length"], 0)
+
+ async def test_enqueue_queue_enqueue_exception(self):
+ request_params = {
+ "job_id": "job-queue-error",
+ "payload": {"message": "Queue error"},
+ "interval": 1000,
+ }
+
+ with patch.object(self.queue, "enqueue", side_effect=Exception("Queue error")):
+ response = await self.client.post(
+ "/enqueue/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Queue error", response.json()["message"])
+
+ async def test_enqueue_max_length_with_queue_exception(self):
+ request_params = {
+ "job_id": "job-with-max",
+ "payload": {"message": "Test", "max_queued_length": 10},
+ "interval": 1000,
+ }
+
+ with patch.object(self.queue, "enqueue", side_effect=Exception("Enqueue failed")):
+ response = await self.client.post(
+ "/enqueue/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Enqueue failed", response.json()["message"])
+
+ async def test_dequeue_get_queue_length_exception(self):
+ request_params = {
+ "job_id": "job-for-dequeue",
+ "payload": {"message": "Dequeue test"},
+ "interval": 1000,
+ }
+ await self.client.post(
+ "/enqueue/sms/dequeue_error_queue/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+
+ with patch.object(
+ self.queue, "get_queue_length", side_effect=Exception("Redis error")
+ ):
+ response = await self.client.get("/dequeue/sms/")
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json()["status"], "success")
+ self.assertEqual(response.json()["current_queue_length"], 0)
+
+ async def test_dequeue_exception_general(self):
+ with patch.object(self.queue, "dequeue", side_effect=Exception("Dequeue failed")):
+ response = await self.client.get("/dequeue/sms/")
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Dequeue failed", response.json()["message"])
+
+ async def test_finish_exception(self):
+ with patch.object(self.queue, "finish", side_effect=Exception("Finish error")):
+ response = await self.client.post("/finish/sms/johndoe/job-123/")
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Finish error", response.json()["message"])
+
+ async def test_interval_malformed_json(self):
+ response = await self.client.post(
+ "/interval/sms/johndoe/",
+ content=b"invalid json",
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+
+ async def test_interval_missing_interval_key(self):
+ request_params = {"some_other_key": 5000}
+ response = await self.client.post(
+ "/interval/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+
+ async def test_interval_exception(self):
+ request_params = {"interval": 5000}
+
+ with patch.object(self.queue, "interval", side_effect=Exception("Interval error")):
+ response = await self.client.post(
+ "/interval/sms/johndoe/",
+ content=json.dumps(request_params),
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Interval error", response.json()["message"])
+
+ async def test_metrics_exception(self):
+ with patch.object(self.queue, "metrics", side_effect=Exception("Metrics error")):
+ response = await self.client.get("/metrics/")
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Metrics error", response.json()["message"])
+
+ async def test_metrics_with_queue_type_exception(self):
+ with patch.object(self.queue, "metrics", side_effect=Exception("Metrics error")):
+ response = await self.client.get("/metrics/sms/")
+ self.assertEqual(response.status_code, 400)
+
+ async def test_clear_queue_malformed_json(self):
+ response = await self.client.request(
+ "DELETE",
+ "/deletequeue/sms/johndoe/",
+ content=b"invalid json",
+ headers={"Content-Type": "application/json"},
+ )
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+
+ async def test_clear_queue_exception(self):
+ with patch.object(
+ self.queue, "clear_queue", side_effect=Exception("Clear error")
+ ):
+ response = await self.client.delete("/deletequeue/sms/johndoe/")
+ self.assertEqual(response.status_code, 400)
+ self.assertEqual(response.json()["status"], "failure")
+ self.assertIn("Clear error", response.json()["message"])
+
+ async def test_deep_status_exception(self):
+ with patch.object(
+ self.queue, "deep_status", side_effect=Exception("Status check failed")
+ ):
+ with self.assertRaises(Exception):
+ await self.client.get("/deepstatus/")
\ No newline at end of file
diff --git a/tests/test_config_settings.py b/tests/test_config_settings.py
new file mode 100644
index 0000000..b7186c8
--- /dev/null
+++ b/tests/test_config_settings.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
+
+import unittest
+
+from pydantic import ValidationError
+
+from fq_server import QueueServerSettings
+from tests.support import build_config_from_env
+
+
+class TestConfigSettings(unittest.TestCase):
+ """Tests for configuration validation."""
+
+ def test_build_config_from_env_defaults(self):
+ config = build_config_from_env({})
+ self.assertEqual(config["fq"]["job_expire_interval"], 1000)
+ self.assertEqual(config["fq"]["job_requeue_interval"], 1000)
+ self.assertEqual(config["fq"]["default_job_requeue_limit"], -1)
+ self.assertTrue(config["fq"]["enable_requeue_script"])
+ self.assertEqual(config["redis"]["host"], "127.0.0.1")
+ self.assertEqual(config["redis"]["port"], 6379)
+ self.assertEqual(config["redis"]["key_prefix"], "fq_server")
+
+ def test_build_config_from_env_overrides(self):
+ config = build_config_from_env(
+ {
+ "JOB_EXPIRE_INTERVAL": "5000",
+ "JOB_REQUEUE_INTERVAL": "6000",
+ "DEFAULT_JOB_REQUEUE_LIMIT": "5",
+ "ENABLE_REQUEUE_SCRIPT": "false",
+ "REDIS_DB": "2",
+ "REDIS_KEY_PREFIX": "custom_prefix",
+ "REDIS_CONN_TYPE": "unix_sock",
+ "REDIS_HOST": "redis.internal",
+ "REDIS_PORT": "6380",
+ "REDIS_PASSWORD": "secret",
+ "REDIS_CLUSTERED": "true",
+ "REDIS_UNIX_SOCKET_PATH": "/var/run/redis.sock",
+ }
+ )
+ self.assertEqual(config["fq"]["job_expire_interval"], 5000)
+ self.assertEqual(config["fq"]["job_requeue_interval"], 6000)
+ self.assertEqual(config["fq"]["default_job_requeue_limit"], 5)
+ self.assertFalse(config["fq"]["enable_requeue_script"])
+ self.assertEqual(config["redis"]["db"], 2)
+ self.assertEqual(config["redis"]["key_prefix"], "custom_prefix")
+ self.assertEqual(config["redis"]["conn_type"], "unix_sock")
+ self.assertEqual(config["redis"]["host"], "redis.internal")
+ self.assertEqual(config["redis"]["port"], 6380)
+ self.assertEqual(config["redis"]["password"], "secret")
+ self.assertTrue(config["redis"]["clustered"])
+ self.assertEqual(config["redis"]["unix_socket_path"], "/var/run/redis.sock")
+
+ def test_build_config_from_env_rejects_invalid_values(self):
+ with self.assertRaisesRegex(ValueError, "REDIS_PORT"):
+ build_config_from_env({"REDIS_PORT": "redis"})
+
+ with self.assertRaisesRegex(ValueError, "ENABLE_REQUEUE_SCRIPT"):
+ build_config_from_env({"ENABLE_REQUEUE_SCRIPT": "yes"})
+
+ with self.assertRaisesRegex(ValueError, "REDIS_CLUSTERED"):
+ build_config_from_env({"REDIS_CLUSTERED": "1"})
+
+ def test_queue_server_settings_log_level_override(self):
+ settings = QueueServerSettings.from_env({"LOG_LEVEL": "debug"})
+ self.assertEqual(settings.log_level, "DEBUG")
+
+ def test_queue_server_settings_rejects_invalid_log_level(self):
+ with self.assertRaisesRegex(ValidationError, "LOG_LEVEL"):
+ QueueServerSettings.from_env({"LOG_LEVEL": "verbose"})
\ No newline at end of file
diff --git a/tests/test_routes.py b/tests/test_routes.py
deleted file mode 100644
index fa449db..0000000
--- a/tests/test_routes.py
+++ /dev/null
@@ -1,756 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
-
-import asyncio
-import unittest
-
-import ujson as json
-from httpx import AsyncClient, ASGITransport
-from pydantic import ValidationError
-from starlette.types import ASGIApp
-from unittest.mock import AsyncMock, patch
-
-from fq_server import FQConfig, QueueServerSettings, build_config_from_env, setup_server
-
-
-def build_test_config() -> FQConfig:
- return {
- "fq": {
- "job_expire_interval": 1000,
- "job_requeue_interval": 1000,
- "default_job_requeue_limit": -1,
- "enable_requeue_script": True,
- },
- "redis": {
- "db": 0,
- "key_prefix": "fq_server_test",
- "conn_type": "tcp_sock",
- "host": "127.0.0.1",
- "port": 6379,
- "password": "",
- "clustered": False,
- "unix_socket_path": "/tmp/redis.sock",
- },
- }
-
-
-class FQConfigTestCase(unittest.TestCase):
- """Tests for configuration validation."""
-
- def test_build_config_from_env_defaults(self):
- config = build_config_from_env({})
- self.assertEqual(config["fq"]["job_expire_interval"], 1000)
- self.assertEqual(config["fq"]["job_requeue_interval"], 1000)
- self.assertEqual(config["fq"]["default_job_requeue_limit"], -1)
- self.assertTrue(config["fq"]["enable_requeue_script"])
- self.assertEqual(config["redis"]["host"], "127.0.0.1")
- self.assertEqual(config["redis"]["port"], 6379)
- self.assertEqual(config["redis"]["key_prefix"], "fq_server")
-
- def test_build_config_from_env_overrides(self):
- config = build_config_from_env(
- {
- "JOB_EXPIRE_INTERVAL": "5000",
- "JOB_REQUEUE_INTERVAL": "6000",
- "DEFAULT_JOB_REQUEUE_LIMIT": "5",
- "ENABLE_REQUEUE_SCRIPT": "false",
- "REDIS_DB": "2",
- "REDIS_KEY_PREFIX": "custom_prefix",
- "REDIS_CONN_TYPE": "unix_sock",
- "REDIS_HOST": "redis.internal",
- "REDIS_PORT": "6380",
- "REDIS_PASSWORD": "secret",
- "REDIS_CLUSTERED": "true",
- "REDIS_UNIX_SOCKET_PATH": "/var/run/redis.sock",
- }
- )
- self.assertEqual(config["fq"]["job_expire_interval"], 5000)
- self.assertEqual(config["fq"]["job_requeue_interval"], 6000)
- self.assertEqual(config["fq"]["default_job_requeue_limit"], 5)
- self.assertFalse(config["fq"]["enable_requeue_script"])
- self.assertEqual(config["redis"]["db"], 2)
- self.assertEqual(config["redis"]["key_prefix"], "custom_prefix")
- self.assertEqual(config["redis"]["conn_type"], "unix_sock")
- self.assertEqual(config["redis"]["host"], "redis.internal")
- self.assertEqual(config["redis"]["port"], 6380)
- self.assertEqual(config["redis"]["password"], "secret")
- self.assertTrue(config["redis"]["clustered"])
- self.assertEqual(
- config["redis"]["unix_socket_path"], "/var/run/redis.sock"
- )
-
- def test_build_config_from_env_rejects_invalid_values(self):
- with self.assertRaisesRegex(ValueError, "REDIS_PORT"):
- build_config_from_env({"REDIS_PORT": "redis"})
-
- with self.assertRaisesRegex(ValueError, "ENABLE_REQUEUE_SCRIPT"):
- build_config_from_env({"ENABLE_REQUEUE_SCRIPT": "yes"})
-
- with self.assertRaisesRegex(ValueError, "REDIS_CLUSTERED"):
- build_config_from_env({"REDIS_CLUSTERED": "1"})
-
- def test_queue_server_settings_log_level_override(self):
- settings = QueueServerSettings.from_env({"LOG_LEVEL": "debug"})
- self.assertEqual(settings.log_level, "DEBUG")
-
- def test_queue_server_settings_rejects_invalid_log_level(self):
- with self.assertRaisesRegex(ValidationError, "LOG_LEVEL"):
- QueueServerSettings.from_env({"LOG_LEVEL": "verbose"})
-
-
-class FQServerTestCase(unittest.IsolatedAsyncioTestCase):
- async def asyncSetUp(self):
- # build server and Starlette app
- server = setup_server(build_test_config())
- self.server = server
- self.app: ASGIApp = server.app
-
- # queue + redis client (async)
- self.queue = server.queue
- await self.queue.initialize() # important: same loop as tests
- self.r = self.queue._r
-
- # flush redis before each test
- await self.r.flushdb()
-
- # async HTTP client bound to this ASGI app & this loop
- transport = ASGITransport(app=self.app)
- self.client = AsyncClient(transport=transport, base_url="http://test")
-
- async def asyncTearDown(self):
- # flush redis after each test
- await self.r.flushdb()
- await self.client.aclose()
- await self.queue.close()
-
- async def test_root(self):
- response = await self.client.get("/")
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.json(), {"message": "Hello, FQS!"})
-
- async def test_enqueue(self):
- request_params = {
- "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
- "payload": {"message": "Hello, world."},
- "interval": 1000,
- }
- response = await self.client.post(
- "/enqueue/sms/johdoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 201)
- data = response.json()
- self.assertEqual(data["status"], "queued")
-
- request_params = {
- "job_id": "ef022088-d2b3-44ad-bf1d-a93d6d93b82c",
- "payload": {"message": "Hello, world."},
- "interval": 1000,
- "requeue_limit": 10,
- }
- response = await self.client.post(
- "/enqueue/sms/johdoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 201)
- data = response.json()
- self.assertEqual(data["status"], "queued")
-
- async def test_dequeue_fail(self):
- response = await self.client.get("/dequeue/")
- # your Starlette handler returns 400 or 404 – pick what your code actually does
- self.assertEqual(response.status_code, 404)
- self.assertEqual(response.json()["status"], "failure")
-
- response = await self.client.get("/dequeue/sms/")
- self.assertEqual(response.status_code, 404)
- self.assertEqual(response.json()["status"], "failure")
-
- async def test_dequeue(self):
- # enqueue a job
- request_params = {
- "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
- "payload": {"message": "Hello, world."},
- "interval": 1000,
- }
- await self.client.post(
- "/enqueue/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
-
- # dequeue a job
- response = await self.client.get("/dequeue/sms/")
- self.assertEqual(response.status_code, 200)
- data = response.json()
- self.assertEqual(data["status"], "success")
- self.assertEqual(data["job_id"], "ef022088-d2b3-44ad-bf0d-a93d6d93b82c")
- self.assertEqual(data["payload"], {"message": "Hello, world."})
- self.assertEqual(data["queue_id"], "johndoe")
- self.assertEqual(data["requeues_remaining"], -1) # from config
-
- async def test_finish_fail(self):
- response = await self.client.post(
- "/finish/sms/johndoe/ef022088-d2b3-44ad-bf0d-a93d6d93b82c/"
- )
- self.assertEqual(response.status_code, 404)
- self.assertEqual(response.json()["status"], "failure")
-
- async def test_finish(self):
- # enqueue a job
- request_params = {
- "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
- "payload": {"message": "Hello, world."},
- "interval": 1000,
- }
- await self.client.post(
- "/enqueue/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
-
- # dequeue a job
- await self.client.get("/dequeue/sms/")
-
- # mark it as finished
- response = await self.client.post(
- "/finish/sms/johndoe/ef022088-d2b3-44ad-bf0d-a93d6d93b82c/"
- )
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.json()["status"], "success")
-
- async def test_interval(self):
- # enqueue a job
- request_params = {
- "job_id": "ef022088-d2b3-44ad-bf0d-a93d6d93b82c",
- "payload": {"message": "Hello, world."},
- "interval": 1000,
- }
- await self.client.post(
- "/enqueue/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
-
- # change the interval
- request_params = {"interval": 5000}
- response = await self.client.post(
- "/interval/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.json()["status"], "success")
-
- async def test_interval_fail(self):
- request_params = {"interval": 5000}
- response = await self.client.post(
- "/interval/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.json()["status"], "failure")
-
- async def test_metrics(self):
- response = await self.client.get("/metrics/")
- data = response.json()
- self.assertEqual(data["status"], "success")
- self.assertIn("queue_types", data)
- self.assertIn("enqueue_counts", data)
- self.assertIn("dequeue_counts", data)
-
- async def test_metrics_with_queue_type(self):
- response = await self.client.get("/metrics/sms/")
- data = response.json()
- self.assertEqual(data["status"], "success")
- self.assertIn("queue_ids", data)
-
- async def test_metrics_with_queue_type_and_queue_id(self):
- response = await self.client.get("/metrics/sms/johndoe/")
- data = response.json()
- self.assertEqual(data["status"], "success")
- self.assertIn("queue_length", data)
- self.assertIn("enqueue_counts", data)
- self.assertIn("dequeue_counts", data)
-
- # ===== NEW TESTS FOR UNCOVERED EXCEPTION PATHS =====
-
- async def test_enqueue_malformed_json(self):
- """Test enqueue with malformed JSON body."""
- response = await self.client.post(
- "/enqueue/sms/johndoe/",
- content=b"invalid json {",
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("message", response.json())
-
- async def test_enqueue_empty_body(self):
- """Test enqueue with empty body - fails because required fields missing."""
- response = await self.client.post(
- "/enqueue/sms/johndoe/",
- content=b"",
- headers={"Content-Type": "application/json"},
- )
- # Empty body becomes {}, but FQ requires payload, interval, job_id
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
-
- async def test_enqueue_with_max_queued_length_not_exceeded(self):
- """Test enqueue with max_queued_length when queue is below limit."""
- request_params = {
- "job_id": "job-1",
- "payload": {"message": "Test 1", "max_queued_length": 10},
- "interval": 1000,
- }
- response = await self.client.post(
- "/enqueue/sms/test_queue_1/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 201)
- self.assertEqual(response.json()["status"], "queued")
- self.assertEqual(response.json()["current_queue_length"], 0)
-
- async def test_enqueue_with_max_queued_length_exceeded(self):
- """Test enqueue when max_queued_length is exceeded (429 response)."""
- # First, enqueue some jobs to fill queue
- for i in range(3):
- request_params = {
- "job_id": f"job-{i}",
- "payload": {"message": f"Test {i}"},
- "interval": 1000,
- }
- await self.client.post(
- "/enqueue/sms/test_queue_2/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
-
- # Now try to enqueue with max_queued_length=2 (should fail with 429)
- request_params = {
- "job_id": "job-overflow",
- "payload": {"message": "Overflow", "max_queued_length": 2},
- "interval": 1000,
- }
- response = await self.client.post(
- "/enqueue/sms/test_queue_2/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 429)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Max queue length reached", response.json()["message"])
- self.assertGreaterEqual(response.json()["current_queue_length"], 2)
-
- async def test_enqueue_get_queue_length_exception(self):
- """Test enqueue when get_queue_length() raises an exception."""
- request_params = {
- "job_id": "job-error",
- "payload": {"message": "Error test", "max_queued_length": 5},
- "interval": 1000,
- }
-
- # Mock get_queue_length to fail, but let enqueue succeed normally
- with patch.object(self.queue, "get_queue_length", side_effect=Exception("Redis error")):
- response = await self.client.post(
- "/enqueue/sms/test_queue_3/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- # When get_queue_length fails, enqueue still succeeds with current_queue_length=0
- self.assertEqual(response.status_code, 201)
- self.assertEqual(response.json()["status"], "queued")
- self.assertEqual(response.json()["current_queue_length"], 0)
-
- async def test_enqueue_queue_enqueue_exception(self):
- """Test enqueue when queue.enqueue() raises an exception."""
- request_params = {
- "job_id": "job-queue-error",
- "payload": {"message": "Queue error"},
- "interval": 1000,
- }
-
- with patch.object(self.queue, "enqueue", side_effect=Exception("Queue error")):
- response = await self.client.post(
- "/enqueue/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Queue error", response.json()["message"])
-
- async def test_dequeue_get_queue_length_exception(self):
- """Test dequeue when get_queue_length() raises an exception."""
- # First enqueue a job
- request_params = {
- "job_id": "job-for-dequeue",
- "payload": {"message": "Dequeue test"},
- "interval": 1000,
- }
- await self.client.post(
- "/enqueue/sms/dequeue_error_queue/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
-
- # Now dequeue but mock get_queue_length to fail
- with patch.object(self.queue, "get_queue_length", side_effect=Exception("Redis error")):
- response = await self.client.get("/dequeue/sms/")
- # Should still return 200 but without current_queue_length
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.json()["status"], "success")
- self.assertEqual(response.json()["current_queue_length"], 0)
-
- async def test_dequeue_exception_general(self):
- """Test dequeue when queue.dequeue() raises a general exception."""
- with patch.object(self.queue, "dequeue", side_effect=Exception("Dequeue failed")):
- response = await self.client.get("/dequeue/sms/")
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Dequeue failed", response.json()["message"])
-
- async def test_finish_exception(self):
- """Test finish when queue.finish() raises an exception."""
- with patch.object(self.queue, "finish", side_effect=Exception("Finish error")):
- response = await self.client.post(
- "/finish/sms/johndoe/job-123/"
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Finish error", response.json()["message"])
-
- async def test_interval_malformed_json(self):
- """Test interval with malformed JSON body."""
- response = await self.client.post(
- "/interval/sms/johndoe/",
- content=b"invalid json",
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
-
- async def test_interval_missing_interval_key(self):
- """Test interval request without 'interval' key."""
- request_params = {"some_other_key": 5000}
- response = await self.client.post(
- "/interval/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
-
- async def test_interval_exception(self):
- """Test interval when queue.interval() raises an exception."""
- request_params = {"interval": 5000}
-
- with patch.object(self.queue, "interval", side_effect=Exception("Interval error")):
- response = await self.client.post(
- "/interval/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Interval error", response.json()["message"])
-
- async def test_metrics_exception(self):
- """Test metrics when queue.metrics() raises an exception."""
- with patch.object(self.queue, "metrics", side_effect=Exception("Metrics error")):
- response = await self.client.get("/metrics/")
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Metrics error", response.json()["message"])
-
- async def test_metrics_with_queue_type_exception(self):
- """Test metrics with queue_type when exception occurs."""
- with patch.object(self.queue, "metrics", side_effect=Exception("Metrics error")):
- response = await self.client.get("/metrics/sms/")
- self.assertEqual(response.status_code, 400)
-
- async def test_clear_queue_malformed_json(self):
- """Test clear_queue with malformed JSON body."""
- response = await self.client.request(
- "DELETE",
- "/deletequeue/sms/johndoe/",
- content=b"invalid json",
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
-
- async def test_clear_queue_exception(self):
- """Test clear_queue when queue.clear_queue() raises an exception."""
- with patch.object(self.queue, "clear_queue", side_effect=Exception("Clear error")):
- response = await self.client.delete("/deletequeue/sms/johndoe/")
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Clear error", response.json()["message"])
-
- async def test_enqueue_max_length_with_queue_exception(self):
- """Test enqueue max_queued_length when enqueue itself throws."""
- request_params = {
- "job_id": "job-with-max",
- "payload": {"message": "Test", "max_queued_length": 10},
- "interval": 1000,
- }
-
- with patch.object(self.queue, "enqueue", side_effect=Exception("Enqueue failed")):
- response = await self.client.post(
- "/enqueue/sms/johndoe/",
- content=json.dumps(request_params),
- headers={"Content-Type": "application/json"},
- )
- self.assertEqual(response.status_code, 400)
- self.assertEqual(response.json()["status"], "failure")
- self.assertIn("Enqueue failed", response.json()["message"])
-
- async def test_deep_status_exception(self):
- """Test deep_status when queue.deep_status() raises an exception."""
- with patch.object(self.queue, "deep_status", side_effect=Exception("Status check failed")):
- with self.assertRaises(Exception):
- await self.client.get("/deepstatus/")
-
- async def test_deep_status_success(self):
- """Test deep_status successful response."""
- response = await self.client.get("/deepstatus/")
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.json()["status"], "success")
-
- # ===== TESTS FOR REQUEUE AND LIFESPAN =====
-
- async def test_requeue_exception_handling(self):
- """Test requeue loop catches and continues on exception."""
- server = self.server
-
- # Mock the queue.requeue to raise an exception
- with patch.object(server.queue, "requeue", side_effect=Exception("Requeue failed")):
- # Create a requeue coroutine and run it briefly
- requeue_task = asyncio.create_task(server.requeue())
-
- # Let it run for a short moment
- await asyncio.sleep(0.1)
-
- # Cancel the task
- requeue_task.cancel()
-
- with self.assertRaises(asyncio.CancelledError):
- await requeue_task
-
- async def test_requeue_with_lock_disabled(self):
- """Test requeue_with_lock when requeue is disabled."""
- server = self.server
-
- server.config["fq"]["enable_requeue_script"] = False
- requeue_task = asyncio.create_task(server.requeue_with_lock())
-
- # Should return immediately (task completes)
- await asyncio.sleep(0.1)
-
- # Task should be done (returned, not cancelled)
- self.assertTrue(requeue_task.done())
- server.config["fq"]["enable_requeue_script"] = True
-
- async def test_requeue_with_lock_lock_error(self):
- """Test requeue_with_lock when lock acquisition fails with LockError."""
- from redis.exceptions import LockError
- server = self.server
-
- # Create an async context manager that raises LockError on enter
- class FailingLock:
- async def __aenter__(self):
- raise LockError("Failed to acquire lock")
-
- async def __aexit__(self, *args):
- pass
-
- # Mock redis_client with a lock method that returns the failing lock
- mock_redis = AsyncMock()
- # Make lock a regular (non-async) function that returns the context manager
- mock_redis.lock = lambda *args, **kwargs: FailingLock()
-
- with patch.object(server.queue, "redis_client", return_value=mock_redis):
- requeue_task = asyncio.create_task(server.requeue_with_lock())
-
- # Let it try to acquire lock and handle LockError (sleeps and continues)
- await asyncio.sleep(0.15)
-
- # Cancel it
- requeue_task.cancel()
-
- try:
- await requeue_task
- except asyncio.CancelledError:
- pass # Expected - loop continues after LockError, then cancelled
-
- async def test_requeue_with_lock_inner_exception(self):
- """Test requeue_with_lock when requeue() inside lock context fails."""
- server = self.server
-
- # First request succeeds to get past initial try, second fails
- call_count = [0]
-
- async def mock_requeue_with_failure():
- call_count[0] += 1
- if call_count[0] >= 1: # Fail on first and subsequent calls
- raise Exception("Inner requeue error")
- return None
-
- with patch.object(server.queue, "requeue", side_effect=mock_requeue_with_failure):
- requeue_task = asyncio.create_task(server.requeue_with_lock())
-
- # Let it run enough times to hit the exception in lock
- await asyncio.sleep(0.15)
- requeue_task.cancel()
-
- try:
- await requeue_task
- except asyncio.CancelledError:
- pass # Expected - task was cancelled after executing exception code path
-
- async def test_requeue_with_lock_missing_redis_client(self):
- """Test requeue_with_lock exits cleanly when the Redis client is unavailable."""
- server = self.server
- server.config["fq"]["job_requeue_interval"] = 1
-
- with patch.object(server.queue, "redis_client", return_value=None):
- requeue_task = asyncio.create_task(server.requeue_with_lock())
- await asyncio.sleep(0.05)
-
- self.assertTrue(requeue_task.done())
- self.assertIsNone(requeue_task.exception())
-
-
-class FQServerRequeueLoopTestCase(unittest.IsolatedAsyncioTestCase):
- """Focused tests for requeue loop error handling that do not need Redis."""
-
- async def test_requeue_with_lock_redis_error(self):
- """Test requeue_with_lock swallows Redis errors raised by redis.lock()."""
- from redis.exceptions import RedisError
-
- server = setup_server(build_test_config())
-
- def failing_lock(*args, **kwargs):
- raise RedisError("Redis lock creation failed")
-
- mock_redis = AsyncMock()
- mock_redis.lock = failing_lock
-
- with patch.object(server.queue, "redis_client", return_value=mock_redis):
- with self.assertLogs("fq_server.server", level="ERROR") as captured:
- requeue_task = asyncio.create_task(server.requeue_with_lock())
- await asyncio.sleep(0.05)
-
- self.assertFalse(requeue_task.done())
-
- requeue_task.cancel()
- with self.assertRaises(asyncio.CancelledError):
- await requeue_task
-
- self.assertTrue(
- any(
- "Transient Redis error in requeue loop while managing lock" in message
- for message in captured.output
- )
- )
-
- async def test_requeue_with_lock_lock_context_timeout(self):
- """Test requeue_with_lock swallows Redis timeout errors from lock context entry."""
- from redis.exceptions import TimeoutError as RedisTimeoutError
-
- server = setup_server(build_test_config())
-
- class FailingLock:
- async def __aenter__(self):
- raise RedisTimeoutError("Timed out entering lock context")
-
- async def __aexit__(self, *args):
- pass
-
- mock_redis = AsyncMock()
- mock_redis.lock = lambda *args, **kwargs: FailingLock()
-
- with patch.object(server.queue, "redis_client", return_value=mock_redis):
- with self.assertLogs("fq_server.server", level="ERROR") as captured:
- requeue_task = asyncio.create_task(server.requeue_with_lock())
- await asyncio.sleep(0.05)
-
- self.assertFalse(requeue_task.done())
-
- requeue_task.cancel()
- with self.assertRaises(asyncio.CancelledError):
- await requeue_task
-
- self.assertTrue(
- any(
- "Transient Redis error in requeue loop while managing lock" in message
- for message in captured.output
- )
- )
-
-
-class FQServerLifespanTestCase(unittest.IsolatedAsyncioTestCase):
- """Test FQServer lifespan (startup/shutdown)."""
-
- async def test_lifespan_startup_shutdown(self):
- """Test lifespan startup and graceful shutdown."""
- server = setup_server(build_test_config())
-
- # Simulate startup
- app = server.app
- lifespan_cm = server._lifespan(app)
-
- # Enter lifespan (startup)
- await lifespan_cm.__aenter__()
-
- # Check that requeue task was created
- self.assertIsNotNone(server._requeue_task)
- self.assertFalse(server._requeue_task.done())
-
- # Exit lifespan (shutdown)
- try:
- await lifespan_cm.__aexit__(None, None, None)
- except asyncio.CancelledError:
- # Expected if the requeue task is cancelled during shutdown
- pass
-
- # Task should be cancelled or done
- await asyncio.sleep(0.05)
- self.assertTrue(server._requeue_task.done() or server._requeue_task.cancelled())
-
- async def test_lifespan_initializes_queue(self):
- """Test that lifespan calls queue.initialize()."""
- server = setup_server(build_test_config())
-
- # Stub out both queue.initialize and the background requeue task to make
- # startup/shutdown deterministic and avoid hitting an uninitialized queue.
- with patch.object(
- server.queue, "initialize", new_callable=AsyncMock
- ) as mock_init, patch.object(
- server.queue, "close", new_callable=AsyncMock
- ) as mock_close, patch.object(
- server, "requeue_with_lock", new_callable=AsyncMock
- ):
- lifespan_cm = server._lifespan(server.app)
- await lifespan_cm.__aenter__()
-
- mock_init.assert_called_once()
-
- # Cleanup
- if server._requeue_task is not None and not server._requeue_task.done():
- server._requeue_task.cancel()
- try:
- await lifespan_cm.__aexit__(None, None, None)
- except asyncio.CancelledError:
- # Expected if the requeue task is cancelled during shutdown
- pass
- mock_close.assert_called_once()
-
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tests/test_server_requeue_lifespan.py b/tests/test_server_requeue_lifespan.py
new file mode 100644
index 0000000..91d7982
--- /dev/null
+++ b/tests/test_server_requeue_lifespan.py
@@ -0,0 +1,205 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2025 Flowdacity Development Team. See LICENSE.txt for details.
+
+import asyncio
+import unittest
+from unittest.mock import AsyncMock, patch
+
+from fq_server import setup_server
+from tests.support import FQServerAsyncTestCase, build_test_config
+
+
+class TestServerRequeue(FQServerAsyncTestCase):
+ async def test_requeue_exception_handling(self):
+ server = self.server
+
+ with patch.object(server.queue, "requeue", side_effect=Exception("Requeue failed")):
+ requeue_task = asyncio.create_task(server.requeue())
+ await asyncio.sleep(0.1)
+
+ requeue_task.cancel()
+
+ with self.assertRaises(asyncio.CancelledError):
+ await requeue_task
+
+ async def test_requeue_with_lock_disabled(self):
+ server = self.server
+
+ server.config["fq"]["enable_requeue_script"] = False
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+
+ await asyncio.sleep(0.1)
+
+ self.assertTrue(requeue_task.done())
+ server.config["fq"]["enable_requeue_script"] = True
+
+ async def test_requeue_with_lock_lock_error(self):
+ from redis.exceptions import LockError
+
+ server = self.server
+
+ class FailingLock:
+ async def __aenter__(self):
+ raise LockError("Failed to acquire lock")
+
+ async def __aexit__(self, *args):
+ return None
+
+ mock_redis = AsyncMock()
+ mock_redis.lock = lambda *args, **kwargs: FailingLock()
+
+ with patch.object(server.queue, "redis_client", return_value=mock_redis):
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+ await asyncio.sleep(0.15)
+
+ requeue_task.cancel()
+
+ try:
+ await requeue_task
+ except asyncio.CancelledError:
+ pass
+
+ async def test_requeue_with_lock_inner_exception(self):
+ server = self.server
+
+ call_count = [0]
+
+ async def mock_requeue_with_failure():
+ call_count[0] += 1
+ if call_count[0] >= 1:
+ raise Exception("Inner requeue error")
+ return None
+
+ with patch.object(server.queue, "requeue", side_effect=mock_requeue_with_failure):
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+
+ await asyncio.sleep(0.15)
+ requeue_task.cancel()
+
+ try:
+ await requeue_task
+ except asyncio.CancelledError:
+ pass
+
+ async def test_requeue_with_lock_missing_redis_client(self):
+ server = self.server
+ server.config["fq"]["job_requeue_interval"] = 1
+
+ with patch.object(server.queue, "redis_client", return_value=None):
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+ await asyncio.sleep(0.05)
+
+ self.assertTrue(requeue_task.done())
+ self.assertIsNone(requeue_task.exception())
+
+
+class TestServerRequeueRedisErrors(unittest.IsolatedAsyncioTestCase):
+ """Focused tests for requeue loop error handling that do not need Redis."""
+
+ async def test_requeue_with_lock_redis_error(self):
+ from redis.exceptions import RedisError
+
+ server = setup_server(build_test_config())
+
+ def failing_lock(*args, **kwargs):
+ raise RedisError("Redis lock creation failed")
+
+ mock_redis = AsyncMock()
+ mock_redis.lock = failing_lock
+
+ with patch.object(server.queue, "redis_client", return_value=mock_redis):
+ with self.assertLogs("fq_server.server", level="ERROR") as captured:
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+ await asyncio.sleep(0.05)
+
+ self.assertFalse(requeue_task.done())
+
+ requeue_task.cancel()
+ with self.assertRaises(asyncio.CancelledError):
+ await requeue_task
+
+ self.assertTrue(
+ any(
+ "Transient Redis error in requeue loop while managing lock" in message
+ for message in captured.output
+ )
+ )
+
+ async def test_requeue_with_lock_lock_context_timeout(self):
+ from redis.exceptions import TimeoutError as RedisTimeoutError
+
+ server = setup_server(build_test_config())
+
+ class FailingLock:
+ async def __aenter__(self):
+ raise RedisTimeoutError("Timed out entering lock context")
+
+ async def __aexit__(self, *args):
+ return None
+
+ mock_redis = AsyncMock()
+ mock_redis.lock = lambda *args, **kwargs: FailingLock()
+
+ with patch.object(server.queue, "redis_client", return_value=mock_redis):
+ with self.assertLogs("fq_server.server", level="ERROR") as captured:
+ requeue_task = asyncio.create_task(server.requeue_with_lock())
+ await asyncio.sleep(0.05)
+
+ self.assertFalse(requeue_task.done())
+
+ requeue_task.cancel()
+ with self.assertRaises(asyncio.CancelledError):
+ await requeue_task
+
+ self.assertTrue(
+ any(
+ "Transient Redis error in requeue loop while managing lock" in message
+ for message in captured.output
+ )
+ )
+
+
+class TestServerLifespan(unittest.IsolatedAsyncioTestCase):
+ """Test FQServer lifespan (startup/shutdown)."""
+
+ async def test_lifespan_startup_shutdown(self):
+ server = setup_server(build_test_config())
+
+ app = server.app
+ lifespan_cm = server._lifespan(app)
+
+ await lifespan_cm.__aenter__()
+
+ self.assertIsNotNone(server._requeue_task)
+ self.assertFalse(server._requeue_task.done())
+
+ try:
+ await lifespan_cm.__aexit__(None, None, None)
+ except asyncio.CancelledError:
+ pass
+
+ await asyncio.sleep(0.05)
+ self.assertTrue(server._requeue_task.done() or server._requeue_task.cancelled())
+
+ async def test_lifespan_initializes_queue(self):
+ server = setup_server(build_test_config())
+
+ with patch.object(
+ server.queue, "initialize", new_callable=AsyncMock
+ ) as mock_init, patch.object(
+ server.queue, "close", new_callable=AsyncMock
+ ) as mock_close, patch.object(
+ server, "requeue_with_lock", new_callable=AsyncMock
+ ):
+ lifespan_cm = server._lifespan(server.app)
+ await lifespan_cm.__aenter__()
+
+ mock_init.assert_called_once()
+
+ if server._requeue_task is not None and not server._requeue_task.done():
+ server._requeue_task.cancel()
+ try:
+ await lifespan_cm.__aexit__(None, None, None)
+ except asyncio.CancelledError:
+ pass
+ mock_close.assert_called_once()
\ No newline at end of file