From ed55cf6e0c057c3673567c379c95f04cb56549a0 Mon Sep 17 00:00:00 2001 From: "google-labs-jules[bot]" <161369871+google-labs-jules[bot]@users.noreply.github.com> Date: Sat, 11 Apr 2026 11:15:59 +0000 Subject: [PATCH 1/8] Add GraphQL template with AppSync and DynamoDB - Implement AppSync GraphQL resolver using AWS Lambda Powertools - Create GraphQL schema, models, and settings - Implement infrastructure stack for AppSync and DynamoDB - Update repository pattern to include list_items method - Add comprehensive unit tests for the new template - Update project configuration (Makefile, infra/app.py, deploy workflow) - Add documentation for the GraphQL template - Correct Python version requirement in pyproject.toml --- .github/workflows/deploy.yml | 3 +- Makefile | 5 +- docs/README.md | 3 ++ docs/template/graphql.md | 68 ++++++++++++++++++++++++ docs/template/index.md | 3 +- infra/app.py | 4 +- infra/stacks/graphql.py | 56 ++++++++++++++++++++ poetry.lock | 6 +-- pyproject.toml | 2 +- templates/graphql/__init__.py | 0 templates/graphql/handler.py | 91 ++++++++++++++++++++++++++++++++ templates/graphql/models.py | 15 ++++++ templates/graphql/schema.graphql | 18 +++++++ templates/graphql/settings.py | 10 ++++ templates/repository.py | 8 +++ tests/graphql/__init__.py | 0 tests/graphql/test_handler.py | 74 ++++++++++++++++++++++++++ tests/graphql/test_models.py | 14 +++++ tests/graphql/test_settings.py | 17 ++++++ 19 files changed, 388 insertions(+), 9 deletions(-) create mode 100644 docs/template/graphql.md create mode 100644 infra/stacks/graphql.py create mode 100644 templates/graphql/__init__.py create mode 100644 templates/graphql/handler.py create mode 100644 templates/graphql/models.py create mode 100644 templates/graphql/schema.graphql create mode 100644 templates/graphql/settings.py create mode 100644 tests/graphql/__init__.py create mode 100644 tests/graphql/test_handler.py create mode 100644 tests/graphql/test_models.py create mode 100644 tests/graphql/test_settings.py diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index b39b347..bb4bfa2 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -21,6 +21,7 @@ on: - agent - api - eventbridge + - graphql - s3 - stream - sqs @@ -40,7 +41,7 @@ jobs: strategy: max-parallel: 1 matrix: - stack: [api, stream, s3, eventbridge-api-caller, sqs] + stack: [agent, api, eventbridge, graphql, s3, sqs, stream] steps: - uses: actions/checkout@v6 - name: Setup Python diff --git a/Makefile b/Makefile index ad7bc79..996bec3 100644 --- a/Makefile +++ b/Makefile @@ -69,20 +69,21 @@ local: # Serve documentation on a local server STACK_MAP_api = ApiGatewayDynamodbStack STACK_MAP_stream = DynamodbStreamStack STACK_MAP_eventbridge = EventBridgeApiCallerStack +STACK_MAP_graphql = AppSyncDynamodbStack STACK_MAP_s3 = S3SqsStack STACK_MAP_sqs = SqsStack CDK_STACK = \$(STACK_MAP_\$(STACK)) .PHONY: deploy deploy: # Deploy an CDK stack - @[ -n "$(STACK)" ] || { echo "Usage: make deploy STACK="; exit 1; } + @[ -n "$(STACK)" ] || { echo "Usage: make deploy STACK="; exit 1; } @[ -n "\$(CDK_STACK)" ] || { echo "Error: unknown stack '\$(STACK)'"; exit 1; } STACK=\$(STACK) cdk deploy --app "python infra/app.py" --require-approval never \$(CDK_STACK) \ \$(if \$(AWS_PROFILE),--profile \$(AWS_PROFILE),) .PHONY: destroy destroy: # Destroy a deployed CDK stack - @[ -n "$(STACK)" ] || { echo "Usage: make destroy STACK="; exit 1; } + @[ -n "$(STACK)" ] || { echo "Usage: make destroy STACK="; exit 1; } @[ -n "\$(CDK_STACK)" ] || { echo "Error: unknown stack '\$(STACK)'"; exit 1; } STACK=\$(STACK) cdk destroy --force --app "python infra/app.py" \$(CDK_STACK) \ \$(if \$(AWS_PROFILE),--profile \$(AWS_PROFILE),) diff --git a/docs/README.md b/docs/README.md index e23529c..294ec40 100644 --- a/docs/README.md +++ b/docs/README.md @@ -199,6 +199,7 @@ make local │ ├── agent # Bedrock agent function handler │ ├── api # API request handler │ ├── eventbridge # EventBridge event handler +│ ├── graphql # AppSync GraphQL resolver │ ├── s3 # S3 event handler │ ├── stream # DynamoDB stream batch processor │ ├── sqs # SQS message handler @@ -210,6 +211,7 @@ make local │ ├── agent.py # Bedrock agent stack │ ├── api.py # ApiGateway stack │ ├── evetbridge.py # EventBridge stack +│ ├── graphql.py # AppSync stack │ ├── s3.py # S3 stack │ ├── stream.py # DynamoDB Stream stack │ └── sqs.py # SQS stack @@ -219,6 +221,7 @@ make local ├── agent # Bedrock agent scenario tests ├── api # API scenario tests ├── eventbridge # EventBridge scenario tests + ├── graphql # GraphQL scenario tests ├── s3 # S3 scenario tests ├── stream # DynamoDB Stream scenario tests └── sqs # SQS scenario tests diff --git a/docs/template/graphql.md b/docs/template/graphql.md new file mode 100644 index 0000000..9082887 --- /dev/null +++ b/docs/template/graphql.md @@ -0,0 +1,68 @@ +# GraphQL API + +This template demonstrates how to build a GraphQL API using AWS AppSync and Lambda. + +## Architecture + +The template sets up: + +1. **AWS AppSync GraphQL API**: The entry point for GraphQL requests. +2. **AWS Lambda function**: Processes GraphQL resolvers for `getItem`, `listItems`, and `createItem`. +3. **Amazon DynamoDB table**: Stores the items. + +## Implementation + +The Lambda function uses [AWS Lambda Powertools AppSyncResolver](https://docs.aws.amazon.com/powertools/python/latest/core/event_handler/appsync/) to route GraphQL requests to Python functions. + +### Schema + +The GraphQL schema is defined in `templates/graphql/schema.graphql`: + +```graphql +schema { + query: Query + mutation: Mutation +} + +type Query { + getItem(id: ID!): Item + listItems: [Item] +} + +type Mutation { + createItem(name: String!): Item +} + +type Item { + id: ID! + name: String! +} +``` + +### Handler + +The handler in `templates/graphql/handler.py` implements the resolvers: + +```python +@app.resolver(type_name="Query", field_name="getItem") +def get_item(id: str) -> dict | None: + return repository.get_item(id) + +@app.resolver(type_name="Query", field_name="listItems") +def list_items() -> list[dict]: + return repository.list_items() + +@app.resolver(type_name="Mutation", field_name="createItem") +def create_item(name: str) -> dict: + item = Item(name=name) + repository.put_item(item.model_dump()) + return item.dump() +``` + +## Deployment + +To deploy this template, run: + +```bash +make deploy STACK=graphql +``` diff --git a/docs/template/index.md b/docs/template/index.md index 4639a72..ffbebf4 100644 --- a/docs/template/index.md +++ b/docs/template/index.md @@ -1,5 +1,6 @@ -- [REST API](api.md): Handle REST API requests - [Bedrock Agent](agent.md): Handle Bedrock Agent function-based actions +- [GraphQL API](graphql.md): Handle AppSync GraphQL requests +- [REST API](api.md): Handle REST API requests - [DynamoDB Stream](stream.md): Batch process stream events - [EventBridge](eventbridge.md): Call external API on event - [S3 to SQS](s3.md): Send messages to queue on S3 object changes diff --git a/infra/app.py b/infra/app.py index 6ecb1f3..f166f46 100644 --- a/infra/app.py +++ b/infra/app.py @@ -6,6 +6,7 @@ from infra.stacks.agent import BedrockAgentStack from infra.stacks.api import ApiGatewayDynamodbStack from infra.stacks.eventbridge import EventBridgeApiCallerStack +from infra.stacks.graphql import AppSyncDynamodbStack from infra.stacks.s3 import S3SqsStack from infra.stacks.sqs import SqsLambdaDynamodbStack from infra.stacks.stream import DynamodbStreamStack @@ -14,9 +15,10 @@ "agent": BedrockAgentStack, "api": ApiGatewayDynamodbStack, "eventbridge": EventBridgeApiCallerStack, - "stream": DynamodbStreamStack, + "graphql": AppSyncDynamodbStack, "s3": S3SqsStack, "sqs": SqsLambdaDynamodbStack, + "stream": DynamodbStreamStack, } stack_name = os.environ.get("STACK") diff --git a/infra/stacks/graphql.py b/infra/stacks/graphql.py new file mode 100644 index 0000000..42b4606 --- /dev/null +++ b/infra/stacks/graphql.py @@ -0,0 +1,56 @@ +from aws_cdk import Expiration, Stack +from aws_cdk.aws_appsync import ( + ApiKeyConfig, + AuthorizationConfig, + AuthorizationType, + Definition, + GraphqlApi, + SchemaFile, +) +from aws_cdk.aws_dynamodb import Attribute, AttributeType, BillingMode, Table +from aws_cdk.aws_lambda import Code, Function, Runtime +from constructs import Construct + + +class AppSyncDynamodbStack(Stack): + def __init__(self, scope: Construct, construct_id: str, **kwargs: object) -> None: + super().__init__(scope, construct_id, **kwargs) + + table = Table( + self, + "ItemsTable", + partition_key=Attribute(name="id", type=AttributeType.STRING), + billing_mode=BillingMode.PAY_PER_REQUEST, + ) + + function = Function( + self, + "AppSyncDynamodbFunction", + runtime=Runtime.PYTHON_3_13, + handler="templates.graphql.handler.main", + code=Code.from_asset("."), + environment={ + "TABLE_NAME": table.table_name, + "SERVICE_NAME": "appsync-dynamodb", + "METRICS_NAMESPACE": "AppSyncDynamodb", + }, + ) + + table.grant_read_write_data(function) + + api = GraphqlApi( + self, + "AppSyncDynamodbApi", + name="AppSyncDynamodbApi", + definition=Definition.from_schema(SchemaFile.from_asset("templates/graphql/schema.graphql")), + authorization_config=AuthorizationConfig( + default_authorization=AuthorizationType.API_KEY, + api_key_config=ApiKeyConfig(expires=Expiration.after_days(365)), + ), + ) + + data_source = api.add_lambda_data_source("LambdaDataSource", function) + + data_source.create_resolver("GetItemResolver", type_name="Query", field_name="getItem") + data_source.create_resolver("ListItemsResolver", type_name="Query", field_name="listItems") + data_source.create_resolver("CreateItemResolver", type_name="Mutation", field_name="createItem") diff --git a/poetry.lock b/poetry.lock index 6a182eb..5e9c48a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.3.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -2071,5 +2071,5 @@ test = ["pytest", "pytest-cov"] [metadata] lock-version = "2.1" -python-versions = "^3.14" -content-hash = "cdd39d3838e830b897af01d491cbf3038e4758eb6be2bdf6a6c4f5e6f680afda" +python-versions = "^3.12" +content-hash = "d6ff6e8b72fdccef0bfb79c89a92bf5bbbeef809ba8a3396ee684fd75b179e92" diff --git a/pyproject.toml b/pyproject.toml index e3accba..01ed4e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ license = "MIT" readme = "docs/README.md" [tool.poetry.dependencies] -python = "^3.14" +python = "^3.12" pydantic = "^2.12.5" pydantic-settings = "^2.13.1" aws-lambda-powertools = "^3.27.0" diff --git a/templates/graphql/__init__.py b/templates/graphql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/templates/graphql/handler.py b/templates/graphql/handler.py new file mode 100644 index 0000000..143e907 --- /dev/null +++ b/templates/graphql/handler.py @@ -0,0 +1,91 @@ +from aws_lambda_powertools import Logger, Metrics, Tracer +from aws_lambda_powertools.event_handler import AppSyncResolver +from aws_lambda_powertools.logging import correlation_paths +from aws_lambda_powertools.utilities.typing import LambdaContext +from pydantic import ValidationError + +from templates.graphql.models import Item +from templates.graphql.settings import Settings +from templates.repository import Repository + +settings = Settings() + +logger = Logger(service=settings.service_name) +tracer = Tracer(service=settings.service_name) +metrics = Metrics(namespace=settings.metrics_namespace) + +repository = Repository(settings.table_name) +app = AppSyncResolver() + + +def get_repository() -> Repository: + return repository + + +@app.resolver(type_name="Query", field_name="getItem") +@tracer.capture_method +def get_item(id: str) -> dict | None: + """Retrieve an item by ID. + + Args: + id: The unique identifier of the item. + + Returns: + The item if found, or None. + """ + try: + return get_repository().get_item(id) + except Exception as exc: + logger.error("DynamoDB get_item failed", exc_info=exc) + raise + + +@app.resolver(type_name="Query", field_name="listItems") +@tracer.capture_method +def list_items() -> list[dict]: + """Retrieve all items. + + Returns: + A list of items. + """ + try: + return get_repository().list_items() + except Exception as exc: + logger.error("DynamoDB list_items failed", exc_info=exc) + raise + + +@app.resolver(type_name="Mutation", field_name="createItem") +@tracer.capture_method +def create_item(name: str) -> dict: + """Create a new item. + + Args: + name: The name of the item. + + Returns: + The created item. + """ + try: + item = Item(name=name) + get_repository().put_item(item.model_dump()) + return item.dump() + except (ValidationError, Exception) as exc: + logger.error("Failed to create item", exc_info=exc) + raise + + +@logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) +@tracer.capture_lambda_handler +@metrics.log_metrics +def main(event: dict, context: LambdaContext) -> dict: + """Lambda entry point for the AppSync resolver. + + Args: + event: The AppSync resolver event. + context: The Lambda execution context. + + Returns: + The resolved data. + """ + return app.resolve(event, context) diff --git a/templates/graphql/models.py b/templates/graphql/models.py new file mode 100644 index 0000000..e28060b --- /dev/null +++ b/templates/graphql/models.py @@ -0,0 +1,15 @@ +from typing import Any +from uuid import uuid4 + +from pydantic import BaseModel, Field +from pydantic.alias_generators import to_camel + + +class Item(BaseModel, populate_by_name=True, alias_generator=to_camel): + id: str = Field(description="Unique item identifier", default_factory=lambda: str(uuid4())) + name: str = Field(description="Human-readable item name") + + def dump(self, **kwargs: Any) -> dict: + kwargs.setdefault("by_alias", True) + kwargs.setdefault("exclude_none", True) + return self.model_dump(**kwargs) diff --git a/templates/graphql/schema.graphql b/templates/graphql/schema.graphql new file mode 100644 index 0000000..920f2a1 --- /dev/null +++ b/templates/graphql/schema.graphql @@ -0,0 +1,18 @@ +schema { + query: Query + mutation: Mutation +} + +type Query { + getItem(id: ID!): Item + listItems: [Item] +} + +type Mutation { + createItem(name: String!): Item +} + +type Item { + id: ID! + name: String! +} diff --git a/templates/graphql/settings.py b/templates/graphql/settings.py new file mode 100644 index 0000000..4ee01ed --- /dev/null +++ b/templates/graphql/settings.py @@ -0,0 +1,10 @@ +from pydantic import Field +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + """Configuration settings for the GraphQL Lambda function.""" + + table_name: str = Field(description="The name of the DynamoDB table.") + service_name: str = Field(description="The name of the service.", default="graphql-api") + metrics_namespace: str = Field(description="The CloudWatch Metrics namespace.", default="GraphQLApi") diff --git a/templates/repository.py b/templates/repository.py index 842e4bf..62c40dd 100644 --- a/templates/repository.py +++ b/templates/repository.py @@ -23,6 +23,14 @@ def get_item(self, item_id: str) -> dict | None: """ return self._table.get_item(Key={"id": item_id}).get("Item") + def list_items(self) -> list[dict]: + """Retrieve all items from the table. + + Returns: + A list of all items in the table. + """ + return self._table.scan().get("Items", []) + def put_item(self, item: dict) -> None: """Write an item to the table, replacing any existing item with the same key. diff --git a/tests/graphql/__init__.py b/tests/graphql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/graphql/test_handler.py b/tests/graphql/test_handler.py new file mode 100644 index 0000000..71babd6 --- /dev/null +++ b/tests/graphql/test_handler.py @@ -0,0 +1,74 @@ +from unittest.mock import MagicMock + +from aws_lambda_powertools.utilities.typing import LambdaContext +from pytest import fixture, main + + +@fixture(autouse=True) +def env(monkeypatch): + monkeypatch.setenv("TABLE_NAME", "test-table") + monkeypatch.setenv("POWERTOOLS_TRACE_DISABLED", "true") + monkeypatch.setenv("POWERTOOLS_METRICS_DISABLED", "true") + + +@fixture +def mock_repository(mocker): + mock = mocker.MagicMock() + mocker.patch("templates.graphql.handler.get_repository", return_value=mock) + return mock + + +@fixture +def lambda_context(): + ctx = MagicMock(spec=LambdaContext) + ctx.function_name = "test-function" + return ctx + + +def test_get_item_resolver(mock_repository, lambda_context): + from templates.graphql.handler import main + + mock_repository.get_item.return_value = {"id": "123", "name": "Test Item"} + event = { + "info": {"parentTypeName": "Query", "fieldName": "getItem"}, + "arguments": {"id": "123"}, + } + + result = main(event, lambda_context) + + assert result == {"id": "123", "name": "Test Item"} + mock_repository.get_item.assert_called_once_with("123") + + +def test_list_items_resolver(mock_repository, lambda_context): + from templates.graphql.handler import main + + mock_repository.list_items.return_value = [{"id": "123", "name": "Test Item"}] + event = { + "info": {"parentTypeName": "Query", "fieldName": "listItems"}, + "arguments": {}, + } + + result = main(event, lambda_context) + + assert result == [{"id": "123", "name": "Test Item"}] + mock_repository.list_items.assert_called_once() + + +def test_create_item_resolver(mock_repository, lambda_context): + from templates.graphql.handler import main + + event = { + "info": {"parentTypeName": "Mutation", "fieldName": "createItem"}, + "arguments": {"name": "New Item"}, + } + + result = main(event, lambda_context) + + assert result["name"] == "New Item" + assert "id" in result + mock_repository.put_item.assert_called_once() + + +if __name__ == "__main__": + main() diff --git a/tests/graphql/test_models.py b/tests/graphql/test_models.py new file mode 100644 index 0000000..a84f77c --- /dev/null +++ b/tests/graphql/test_models.py @@ -0,0 +1,14 @@ +from templates.graphql.models import Item + + +def test_item_model_defaults(): + item = Item(name="Test Item") + assert item.id is not None + assert item.name == "Test Item" + + +def test_item_model_dump(): + item = Item(id="123", name="Test Item") + dump = item.dump() + assert dump["id"] == "123" + assert dump["name"] == "Test Item" diff --git a/tests/graphql/test_settings.py b/tests/graphql/test_settings.py new file mode 100644 index 0000000..cc23ecd --- /dev/null +++ b/tests/graphql/test_settings.py @@ -0,0 +1,17 @@ +from pydantic import ValidationError +from pytest import raises + +from templates.graphql.settings import Settings + + +def test_settings_valid(monkeypatch): + monkeypatch.setenv("TABLE_NAME", "test-table") + settings = Settings() # type: ignore + assert settings.table_name == "test-table" + assert settings.service_name == "graphql-api" + + +def test_settings_invalid(monkeypatch): + monkeypatch.delenv("TABLE_NAME", raising=False) + with raises(ValidationError): + Settings() # type: ignore From 40ea9edeac7a5ef1dba076d64ced31a4aec0e6af Mon Sep 17 00:00:00 2001 From: "google-labs-jules[bot]" <161369871+google-labs-jules[bot]@users.noreply.github.com> Date: Sat, 11 Apr 2026 12:35:31 +0000 Subject: [PATCH 2/8] Add GraphQL template and rebase on main From 2242957dcc133ce4e4178bacca4e3dd1286a7e21 Mon Sep 17 00:00:00 2001 From: "google-labs-jules[bot]" <161369871+google-labs-jules[bot]@users.noreply.github.com> Date: Sat, 11 Apr 2026 12:44:46 +0000 Subject: [PATCH 3/8] Rebase on main and finalize GraphQL template implementation - Rebased branch on main - Verified all tests and linting pass - Finalized GraphQL template with AppSync, Lambda, and DynamoDB - Corrected Python version requirement in pyproject.toml From aea371022d580717495f12e7d5f3b51ad953bb1d Mon Sep 17 00:00:00 2001 From: Amr Abed Date: Sat, 11 Apr 2026 09:24:46 -0400 Subject: [PATCH 4/8] Remove .kiro specs --- .../cdk-deploy-destroy-targets/.config.kiro | 1 - .../cdk-deploy-destroy-targets/design.md | 152 ------ .../requirements.md | 62 --- .../specs/cdk-deploy-destroy-targets/tasks.md | 52 -- .kiro/specs/code-style-refactor/.config.kiro | 1 - .kiro/specs/code-style-refactor/design.md | 450 ------------------ .../specs/code-style-refactor/requirements.md | 155 ------ .kiro/specs/code-style-refactor/tasks.md | 85 ---- .kiro/specs/documentation-update/.config.kiro | 1 - .kiro/specs/documentation-update/design.md | 219 --------- .../documentation-update/requirements.md | 140 ------ .kiro/specs/documentation-update/tasks.md | 144 ------ .../specs/eventbridge-api-caller/.config.kiro | 1 - .kiro/specs/eventbridge-api-caller/design.md | 379 --------------- .../eventbridge-api-caller/requirements.md | 168 ------- .kiro/specs/eventbridge-api-caller/tasks.md | 201 -------- .../specs/github-deploy-workflow/.config.kiro | 1 - .kiro/specs/github-deploy-workflow/design.md | 225 --------- .../github-deploy-workflow/requirements.md | 92 ---- .kiro/specs/github-deploy-workflow/tasks.md | 76 --- .../lambda-scenario-templates/.config.kiro | 1 - .../specs/lambda-scenario-templates/design.md | 213 --------- .../lambda-scenario-templates/requirements.md | 132 ----- .../specs/lambda-scenario-templates/tasks.md | 94 ---- .../specs/s3-sqs-lambda-template/.config.kiro | 1 - .kiro/specs/s3-sqs-lambda-template/design.md | 244 ---------- .../s3-sqs-lambda-template/requirements.md | 118 ----- .kiro/specs/s3-sqs-lambda-template/tasks.md | 124 ----- 28 files changed, 3532 deletions(-) delete mode 100644 .kiro/specs/cdk-deploy-destroy-targets/.config.kiro delete mode 100644 .kiro/specs/cdk-deploy-destroy-targets/design.md delete mode 100644 .kiro/specs/cdk-deploy-destroy-targets/requirements.md delete mode 100644 .kiro/specs/cdk-deploy-destroy-targets/tasks.md delete mode 100644 .kiro/specs/code-style-refactor/.config.kiro delete mode 100644 .kiro/specs/code-style-refactor/design.md delete mode 100644 .kiro/specs/code-style-refactor/requirements.md delete mode 100644 .kiro/specs/code-style-refactor/tasks.md delete mode 100644 .kiro/specs/documentation-update/.config.kiro delete mode 100644 .kiro/specs/documentation-update/design.md delete mode 100644 .kiro/specs/documentation-update/requirements.md delete mode 100644 .kiro/specs/documentation-update/tasks.md delete mode 100644 .kiro/specs/eventbridge-api-caller/.config.kiro delete mode 100644 .kiro/specs/eventbridge-api-caller/design.md delete mode 100644 .kiro/specs/eventbridge-api-caller/requirements.md delete mode 100644 .kiro/specs/eventbridge-api-caller/tasks.md delete mode 100644 .kiro/specs/github-deploy-workflow/.config.kiro delete mode 100644 .kiro/specs/github-deploy-workflow/design.md delete mode 100644 .kiro/specs/github-deploy-workflow/requirements.md delete mode 100644 .kiro/specs/github-deploy-workflow/tasks.md delete mode 100644 .kiro/specs/lambda-scenario-templates/.config.kiro delete mode 100644 .kiro/specs/lambda-scenario-templates/design.md delete mode 100644 .kiro/specs/lambda-scenario-templates/requirements.md delete mode 100644 .kiro/specs/lambda-scenario-templates/tasks.md delete mode 100644 .kiro/specs/s3-sqs-lambda-template/.config.kiro delete mode 100644 .kiro/specs/s3-sqs-lambda-template/design.md delete mode 100644 .kiro/specs/s3-sqs-lambda-template/requirements.md delete mode 100644 .kiro/specs/s3-sqs-lambda-template/tasks.md diff --git a/.kiro/specs/cdk-deploy-destroy-targets/.config.kiro b/.kiro/specs/cdk-deploy-destroy-targets/.config.kiro deleted file mode 100644 index 3580b8a..0000000 --- a/.kiro/specs/cdk-deploy-destroy-targets/.config.kiro +++ /dev/null @@ -1 +0,0 @@ -{"specId": "c5a2e92e-2f4f-462d-8fd8-b32efccb2871", "workflowType": "requirements-first", "specType": "feature"} diff --git a/.kiro/specs/cdk-deploy-destroy-targets/design.md b/.kiro/specs/cdk-deploy-destroy-targets/design.md deleted file mode 100644 index 001a5d8..0000000 --- a/.kiro/specs/cdk-deploy-destroy-targets/design.md +++ /dev/null @@ -1,152 +0,0 @@ -# Design Document: cdk-deploy-destroy-targets - -## Overview - -This feature adds `deploy` and `destroy` Make targets to the project Makefile, and a CDK app entry point (`infra/app.py`) that selects a single stack based on a `STACK` environment variable. Developers can run `make deploy STACK=api` or `make destroy STACK=stream` without knowing CDK CLI syntax or full stack construct IDs. - -The two moving parts are: - -1. `infra/app.py` — a minimal CDK app that reads `STACK` from the environment and instantiates only the requested stack. -2. Two new Makefile targets (`deploy`, `destroy`) that validate the `STACK` argument, build the CDK CLI invocation, and forward an optional `AWS_PROFILE`. - -## Architecture - -```mermaid -sequenceDiagram - participant Dev as Developer - participant Make as Makefile - participant CDK as cdk CLI - participant App as infra/app.py - participant AWS as AWS - - Dev->>Make: make deploy STACK=api - Make->>Make: validate STACK ∈ {api, stream} - Make->>CDK: cdk deploy --app "python infra/app.py" ApiGatewayDynamodbStack [--profile ...] - CDK->>App: python infra/app.py (STACK=api in env) - App->>App: read STACK, instantiate ApiGatewayDynamodbStack - CDK->>AWS: deploy synthesised CloudFormation template - AWS-->>Dev: stack outputs -``` - -The Makefile is the sole entry point for developers. It owns argument validation and CDK CLI flag construction. `infra/app.py` is a thin adapter that maps the `STACK` env var to a stack class. - -## Components and Interfaces - -### 1. `infra/app.py` — CDK App Entry Point - -Reads `STACK` from `os.environ`, looks it up in a registry dict, and calls `app.synth()`. - -``` -STACK_REGISTRY: dict[str, type[Stack]] = { - "api": ApiGatewayDynamodbStack, - "stream": DynamodbStreamStack, -} -``` - -- If `STACK` is missing or not in the registry → `sys.exit(1)` with a descriptive message. -- If `STACK` is valid → instantiate the stack with a fixed construct ID derived from the class name, then synth. - -The construct ID passed to each stack is the class name (e.g. `ApiGatewayDynamodbStack`). This is the identifier that `cdk deploy` / `cdk destroy` targets. - -### 2. Makefile targets - -#### Stack name → CDK construct ID mapping - -A Makefile associative array (or conditional block) maps short names to CDK construct IDs: - -```makefile -STACK_MAP_api = ApiGatewayDynamodbStack -STACK_MAP_stream = DynamodbStreamStack -CDK_STACK = $(STACK_MAP_$(STACK)) -``` - -Adding a new stack requires one new `STACK_MAP_` line. - -#### `deploy` target - -``` -deploy: - @[ -n "$(STACK)" ] || { echo "Usage: make deploy STACK="; exit 1; } - @[ -n "$(CDK_STACK)" ] || { echo "Error: unknown stack '$(STACK)'"; exit 1; } - STACK=$(STACK) cdk deploy --app "python infra/app.py" $(CDK_STACK) \ - $(if $(AWS_PROFILE),--profile $(AWS_PROFILE),) -``` - -#### `destroy` target - -Same shape as `deploy` but calls `cdk destroy --force`. - -#### Profile forwarding - -Both targets use `$(if $(AWS_PROFILE),--profile $(AWS_PROFILE),)` so the flag is only appended when the variable is non-empty. - -## Data Models - -No persistent data models are introduced. The only runtime data is: - -| Name | Type | Source | Description | -|---|---|---|---| -| `STACK` | `str` | env var / Make variable | Short stack name (`api` or `stream`) | -| `AWS_PROFILE` | `str` (optional) | Make variable | Named AWS CLI profile | -| `CDK_STACK` | `str` | Makefile expansion | Full CDK construct ID derived from `STACK` | -| `STACK_REGISTRY` | `dict[str, type[Stack]]` | `infra/app.py` | Maps short name → stack class | - -## Correctness Properties - -*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* - -After prework analysis, requirements 1.1, 2.1, 4.1, and 4.2 all express the same invariant (valid name → correct class instantiated) and are consolidated into Property 1. Requirements 1.2, 1.3, 2.2, 2.3, and 4.3 all express the same invariant (invalid/missing name → non-zero exit) and are consolidated into Property 2. Requirements 3.1 and 3.2 are registry membership checks consolidated into a single example. - -### Property 1: Valid stack name instantiates exactly the correct stack class - -*For any* `(name, cls)` pair in `STACK_REGISTRY`, running `infra/app.py` with `STACK=name` must produce a CDK `App` whose stack list contains exactly one stack and that stack is an instance of `cls`. - -**Validates: Requirements 1.1, 2.1, 3.1, 3.2, 4.1, 4.2** - -### Property 2: Invalid or missing STACK causes non-zero exit - -*For any* string that is not a key in `STACK_REGISTRY` (including the empty string), running `infra/app.py` with that value as `STACK` must raise `SystemExit` with a non-zero code and emit a descriptive error message. - -**Validates: Requirements 1.2, 1.3, 2.2, 2.3, 4.3** - -## Error Handling - -| Scenario | Location | Behaviour | -|---|---|---| -| `STACK` not provided to `make deploy/destroy` | Makefile | Print usage message, `exit 1` | -| `STACK` value not in Makefile map | Makefile | Print "unknown stack" error, `exit 1` | -| `STACK` env var missing in `infra/app.py` | `infra/app.py` | `sys.exit(1)` with message | -| `STACK` env var unrecognised in `infra/app.py` | `infra/app.py` | `sys.exit(1)` with message listing valid names | -| `cdk deploy/destroy` fails | CDK CLI / shell | Non-zero exit propagates naturally (no `|| true`) | - -The Makefile and the CDK app both validate `STACK` independently. The Makefile check is the first line of defence (fast, no Python startup cost); the app check is the safety net when the app is invoked directly. - -## Testing Strategy - -### Unit tests (`tests/infra/test_app.py`) - -Focus on concrete examples and edge cases using `pytest` and `monkeypatch`: - -- Example: `STACK=api` → app contains exactly one stack, instance of `ApiGatewayDynamodbStack`. -- Example: `STACK=stream` → app contains exactly one stack, instance of `DynamodbStreamStack`. -- Example: `STACK` unset → `SystemExit` raised with non-zero code. -- Example: `STACK=unknown` → `SystemExit` raised with non-zero code. -- Example: registry keys are exactly `{"api", "stream"}` (validates 3.1, 3.2). - -### Property-based tests (`tests/infra/test_app_properties.py`) - -Use **Hypothesis** (already a dev dependency). Each property test runs a minimum of 100 iterations. - -Each test is tagged with a comment: -`# Feature: cdk-deploy-destroy-targets, Property : ` - -- **Property 1** — `@given(st.sampled_from(list(STACK_REGISTRY.items())))`: for any `(name, cls)` pair in the registry, the app produces exactly one stack that is an instance of `cls`. - `# Feature: cdk-deploy-destroy-targets, Property 1: valid stack name instantiates exactly the correct stack class` - -- **Property 2** — `@given(st.text().filter(lambda s: s not in STACK_REGISTRY))`: for any string not in the registry, the app raises `SystemExit` with a non-zero code. - `# Feature: cdk-deploy-destroy-targets, Property 2: invalid or missing STACK causes non-zero exit` - -### What is not tested here - -- Actual CDK synthesis against AWS (integration concern, out of scope). -- Makefile shell logic — `--profile` forwarding and exit-code propagation are validated manually or via shell integration tests. diff --git a/.kiro/specs/cdk-deploy-destroy-targets/requirements.md b/.kiro/specs/cdk-deploy-destroy-targets/requirements.md deleted file mode 100644 index de08f1f..0000000 --- a/.kiro/specs/cdk-deploy-destroy-targets/requirements.md +++ /dev/null @@ -1,62 +0,0 @@ -# Requirements Document - -## Introduction - -This feature adds `deploy` and `destroy` Make targets to the project's Makefile, enabling developers to deploy and tear down AWS Lambda stacks using AWS CDK from a single, consistent interface. The targets must support selecting which CDK stack to operate on (api or stream), accept an optional AWS profile, and integrate cleanly with the existing `make` workflow. - -## Glossary - -- **Makefile**: The project's top-level `Makefile` used to automate common developer workflows. -- **CDK**: AWS Cloud Development Kit — the infrastructure-as-code framework used under `infra/`. -- **Stack**: A deployable CDK unit. The project currently defines two stacks: `api` (ApiGatewayDynamodbStack) and `stream` (DynamodbStreamStack). -- **Stack_Name**: The logical identifier used to select a stack, corresponding to the stack module name under `infra/stacks/` (e.g., `api`, `stream`). -- **AWS_Profile**: An optional named AWS CLI profile used to authenticate CDK commands. -- **Deploy_Target**: The `deploy` Make target that synthesises and deploys a CDK stack to AWS. -- **Destroy_Target**: The `destroy` Make target that tears down a previously deployed CDK stack from AWS. - -## Requirements - -### Requirement 1: Deploy a CDK Stack - -**User Story:** As a developer, I want to run `make deploy STACK=` to deploy a Lambda stack to AWS, so that I can provision infrastructure without remembering CDK CLI syntax. - -#### Acceptance Criteria - -1. WHEN the `deploy` target is invoked with a valid `STACK` value, THE Deploy_Target SHALL execute `cdk deploy` for the corresponding CDK stack. -2. WHEN the `deploy` target is invoked without a `STACK` value, THE Deploy_Target SHALL print a usage error message and exit with a non-zero status code. -3. WHEN the `deploy` target is invoked with a `STACK` value that does not match any defined stack, THE Deploy_Target SHALL print an error message identifying the invalid stack name and exit with a non-zero status code. -4. WHERE an `AWS_PROFILE` variable is provided, THE Deploy_Target SHALL pass the profile to the CDK CLI via the `--profile` flag. -5. WHEN `cdk deploy` fails, THE Deploy_Target SHALL propagate the non-zero exit code to the calling shell. - -### Requirement 2: Destroy a CDK Stack - -**User Story:** As a developer, I want to run `make destroy STACK=` to tear down a deployed Lambda stack, so that I can clean up AWS resources without remembering CDK CLI syntax. - -#### Acceptance Criteria - -1. WHEN the `destroy` target is invoked with a valid `STACK` value, THE Destroy_Target SHALL execute `cdk destroy` with the `--force` flag for the corresponding CDK stack. -2. WHEN the `destroy` target is invoked without a `STACK` value, THE Destroy_Target SHALL print a usage error message and exit with a non-zero status code. -3. WHEN the `destroy` target is invoked with a `STACK` value that does not match any defined stack, THE Destroy_Target SHALL print an error message identifying the invalid stack name and exit with a non-zero status code. -4. WHERE an `AWS_PROFILE` variable is provided, THE Destroy_Target SHALL pass the profile to the CDK CLI via the `--profile` flag. -5. WHEN `cdk destroy` fails, THE Destroy_Target SHALL propagate the non-zero exit code to the calling shell. - -### Requirement 3: Stack Selection Mapping - -**User Story:** As a developer, I want the Make targets to map short stack names (e.g., `api`, `stream`) to the correct CDK stack class names, so that I don't need to know the full CDK construct ID. - -#### Acceptance Criteria - -1. THE Makefile SHALL define a mapping from the `api` Stack_Name to the `ApiGatewayDynamodbStack` CDK stack identifier. -2. THE Makefile SHALL define a mapping from the `stream` Stack_Name to the `DynamodbStreamStack` CDK stack identifier. -3. WHEN a new stack module is added under `infra/stacks/`, THE Makefile SHALL require only a single-line addition to the mapping to support the new stack in the `deploy` and `destroy` targets. - -### Requirement 4: CDK App Entry Point - -**User Story:** As a developer, I want a CDK app entry point that instantiates the selected stack, so that `cdk deploy` and `cdk destroy` can target individual stacks without deploying all stacks at once. - -#### Acceptance Criteria - -1. THE CDK_App SHALL accept a `STACK` environment variable to determine which stack to instantiate. -2. WHEN the `STACK` environment variable is set to a valid Stack_Name, THE CDK_App SHALL instantiate only the corresponding CDK stack. -3. WHEN the `STACK` environment variable is not set or is set to an unrecognised value, THE CDK_App SHALL exit with a descriptive error message and a non-zero status code. -4. THE Makefile SHALL pass the `STACK` variable to the CDK app via the `--context` flag or environment variable so that the CDK app can resolve the correct stack. diff --git a/.kiro/specs/cdk-deploy-destroy-targets/tasks.md b/.kiro/specs/cdk-deploy-destroy-targets/tasks.md deleted file mode 100644 index 58d4ed4..0000000 --- a/.kiro/specs/cdk-deploy-destroy-targets/tasks.md +++ /dev/null @@ -1,52 +0,0 @@ -# Implementation Plan: cdk-deploy-destroy-targets - -## Overview - -Create `infra/app.py` as the CDK app entry point and add `deploy`/`destroy` targets to the Makefile. - -## Tasks - -- [x] 1. Create `infra/app.py` CDK entry point - - Implement `STACK_REGISTRY` dict mapping `"api"` → `ApiGatewayDynamodbStack` and `"stream"` → `DynamodbStreamStack` - - Read `STACK` from `os.environ`; call `sys.exit(1)` with a descriptive message if missing or not in registry - - Instantiate the resolved stack class using the class name as the construct ID, then call `app.synth()` - - _Requirements: 4.1, 4.2, 4.3_ - - - [ ]* 1.1 Write unit tests for `infra/app.py` (`tests/infra/test_app.py`) - - Create `tests/infra/__init__.py` and `tests/infra/test_app.py` - - Test `STACK=api` → exactly one stack, instance of `ApiGatewayDynamodbStack` - - Test `STACK=stream` → exactly one stack, instance of `DynamodbStreamStack` - - Test `STACK` unset → `SystemExit` with non-zero code - - Test `STACK=unknown` → `SystemExit` with non-zero code - - Test registry keys are exactly `{"api", "stream"}` (validates 3.1, 3.2) - - End file with `if __name__ == "__main__": main()` - - _Requirements: 4.1, 4.2, 4.3, 3.1, 3.2_ - - - [ ]* 1.2 Write property test for `infra/app.py` (`tests/infra/test_app_properties.py`) - - **Property 1: Valid stack name instantiates exactly the correct stack class** - - `@given(st.sampled_from(list(STACK_REGISTRY.items())))` — for any `(name, cls)` pair, app produces exactly one stack that is an instance of `cls` - - **Validates: Requirements 1.1, 2.1, 3.1, 3.2, 4.1, 4.2** - - **Property 2: Invalid or missing STACK causes non-zero exit** - - `@given(st.text().filter(lambda s: s not in STACK_REGISTRY))` — for any string not in registry, app raises `SystemExit` with non-zero code - - **Validates: Requirements 1.2, 1.3, 2.2, 2.3, 4.3** - - Tag each test with the comment: `# Feature: cdk-deploy-destroy-targets, Property : ` - - End file with `if __name__ == "__main__": main()` - -- [x] 2. Checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -- [x] 3. Add `deploy` and `destroy` targets to `Makefile` - - Add `STACK_MAP_api`, `STACK_MAP_stream`, and `CDK_STACK` variable definitions - - Add `deploy` target: validate `STACK` is set, validate `CDK_STACK` is non-empty, invoke `cdk deploy --app "python infra/app.py" $(CDK_STACK)` with optional `--profile $(AWS_PROFILE)` - - Add `destroy` target: same validation, invoke `cdk destroy --force --app "python infra/app.py" $(CDK_STACK)` with optional `--profile $(AWS_PROFILE)` - - Add `deploy` and `destroy` to `.PHONY` - - _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 2.1, 2.2, 2.3, 2.4, 2.5, 3.1, 3.2, 3.3_ - -- [x] 4. Final checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -## Notes - -- Tasks marked with `*` are optional and can be skipped for faster MVP -- Property tests use Hypothesis (already a dev dependency) -- The Makefile and `infra/app.py` both validate `STACK` independently — Makefile is the first line of defence diff --git a/.kiro/specs/code-style-refactor/.config.kiro b/.kiro/specs/code-style-refactor/.config.kiro deleted file mode 100644 index 3580b8a..0000000 --- a/.kiro/specs/code-style-refactor/.config.kiro +++ /dev/null @@ -1 +0,0 @@ -{"specId": "c5a2e92e-2f4f-462d-8fd8-b32efccb2871", "workflowType": "requirements-first", "specType": "feature"} diff --git a/.kiro/specs/code-style-refactor/design.md b/.kiro/specs/code-style-refactor/design.md deleted file mode 100644 index 3dc3ffb..0000000 --- a/.kiro/specs/code-style-refactor/design.md +++ /dev/null @@ -1,450 +0,0 @@ -# Design Document: code-style-refactor - -## Overview - -This refactor standardises the code style of the `aws_lambda_template` project without introducing new runtime behaviour. The changes fall into three broad categories: - -1. **Structural renames** – the top-level package becomes `template`; the two scenarios become `api` and `stream`. -2. **Pydantic / pydantic-settings idiom updates** – `Field(description=...)`, constructor-keyword config, and camelCase alias support. -3. **Architectural patterns** – a `Repository` class per scenario for DynamoDB access, and a `Handler` class wrapping all Lambda business logic with a module-level `main` entry point. - -All existing tests must remain green after the refactor; they are updated in place to reflect the new names and patterns. - ---- - -## Architecture - -### Before - -``` -aws_lambda_template/ - app.py - scenarios/ - api_gateway_dynamodb/ - handler.py # module-level `handler` function is the Lambda entry point - models.py # plain BaseModel, no alias support - settings.py # model_config = SettingsConfigDict(...) - dynamodb_stream/ - handler.py # same pattern - models.py # model_config = ConfigDict(extra="allow") - settings.py # model_config = SettingsConfigDict(...) -``` - -### After - -``` -template/ - app.py - scenarios/ - api/ - handler.py # Handler class + module-level `main` entry point - models.py # BaseModel with populate_by_name=True, alias_generator=to_camel - repository.py # Repository class owning all boto3 DynamoDB calls - settings.py # BaseSettings with constructor-keyword config - stream/ - handler.py - models.py - repository.py - settings.py -``` - -### Dependency flow (per scenario) - -```mermaid -graph TD - main["main(event, context)"] -->|instantiates & calls| Handler - Handler -->|calls methods on| Repository - Repository -->|boto3| DynamoDB[(DynamoDB)] - Handler -->|validates with| Models - Handler -->|reads config from| Settings -``` - -The Powertools decorators (`@logger.inject_lambda_context`, `@tracer.capture_lambda_handler`, `@metrics.log_metrics`) are applied to `main`, not to `Handler.handle`. - ---- - -## Components and Interfaces - -### 1. Package rename: `aws_lambda_template` → `template` - -All Python source files move from `aws_lambda_template/` to `template/`. Internal imports are updated accordingly. `pyproject.toml` is updated: - -```toml -[tool.poetry] -name = "template" -packages = [{include = "template"}] - -[tool.poetry.scripts] -app = "template.app:main" - -[tool.coverage.run] -source = ["template"] -``` - -### 2. Scenario renames - -| Old path | New path | -|---|---| -| `template/scenarios/api_gateway_dynamodb/` | `template/scenarios/api/` | -| `template/scenarios/dynamodb_stream/` | `template/scenarios/stream/` | - -Infra handler strings update to: -- `templates.api.handler.main` -- `templates.stream.handler.main` - -### 3. Settings classes - -`model_config = SettingsConfigDict(...)` is replaced by constructor keyword arguments: - -```python -# Before -class Settings(BaseSettings): - model_config = SettingsConfigDict(case_sensitive=False) - table_name: str - """DynamoDB table name.""" - -# After -class Settings(BaseSettings, case_sensitive=False): - table_name: str = Field(description="DynamoDB table name.") -``` - -### 4. Pydantic models - -`model_config = ConfigDict(...)` is replaced by constructor keyword arguments, and every model gains `populate_by_name=True` and `alias_generator=to_camel`: - -```python -# Before -class Item(BaseModel): - id: str - name: str - -# After -from pydantic.alias_generators import to_camel - -class Item(BaseModel, populate_by_name=True, alias_generator=to_camel): - id: str = Field(description="Unique item identifier.") - name: str = Field(description="Human-readable item name.") -``` - -`DestinationItem` (stream scenario) previously used `ConfigDict(extra="allow")`: - -```python -# After -class DestinationItem(BaseModel, extra="allow", populate_by_name=True, alias_generator=to_camel): - id: str = Field(description="Unique item identifier.") -``` - -### 5. Repository class - -Each scenario gains a `repository.py` module with a `Repository` class that owns all `boto3` DynamoDB calls. - -**`template/scenarios/api/repository.py`** - -```python -class Repository: - def __init__(self, table) -> None: - self._table = table - - def get_item(self, item_id: str) -> dict | None: - response = self._table.get_item(Key={"id": item_id}) - return response.get("Item") - - def put_item(self, item: dict) -> None: - self._table.put_item(Item=item) -``` - -**`template/scenarios/stream/repository.py`** - -```python -class Repository: - def __init__(self, destination_table) -> None: - self._destination_table = destination_table - - def put_item(self, item: dict) -> None: - self._destination_table.put_item(Item=item) - - def delete_item(self, key: dict) -> None: - self._destination_table.delete_item(Key=key) -``` - -### 6. Handler class and `main` entry point - -All business logic moves into `Handler.handle`. The module-level `main` function instantiates `Handler` with a `Repository` and calls `handle`: - -```python -# template/scenarios/api/handler.py (sketch) - -class Handler: - def __init__(self, repository: Repository) -> None: - self._repo = repository - self._app = APIGatewayRestResolver() - self._register_routes() - - def _register_routes(self) -> None: - self._app.get("/items/")(self._get_item) - self._app.post("/items")(self._create_item) - - def handle(self, event: dict, context: LambdaContext) -> dict: - return self._app.resolve(event, context) - - def _get_item(self, id: str) -> dict: ... - def _create_item(self) -> tuple[dict, int]: ... - - -@logger.inject_lambda_context -@tracer.capture_lambda_handler -@metrics.log_metrics -def main(event: dict, context: LambdaContext) -> dict: - repo = Repository(table) - return Handler(repo).handle(event, context) -``` - -```python -# template/scenarios/stream/handler.py (sketch) - -class Handler: - def __init__(self, repository: Repository) -> None: - self._repo = repository - - def handle(self, event: DynamoDBStreamEvent, context: LambdaContext) -> None: - for record in event.records: - ... - - -@logger.inject_lambda_context -@tracer.capture_lambda_handler -@metrics.log_metrics -@event_source(data_class=DynamoDBStreamEvent) -def main(event: DynamoDBStreamEvent, context: LambdaContext) -> None: - repo = Repository(destination_table) - Handler(repo).handle(event, context) -``` - -### 7. Test updates - -| Concern | Before | After | -|---|---|---| -| Import path | `aws_lambda_templates.api_gateway_dynamodb.handler` | `templates.api.handler` | -| Module cache flush | keys containing `api_gateway_dynamodb` / `dynamodb_stream` | keys containing `templates.api` / `templates.stream` | -| DynamoDB mock | `mocker.patch.object(handler_module, "table")` | `mocker.patch.object(handler_module.Handler, ...)` or mock `Repository` instance | -| Entry point | `handler_module.handler(event, ctx)` | `handler_module.main(event, ctx)` | - -### 8. AGENTS.md updates - -A new "Coding Conventions" section is added covering: -- `Field(description=...)` for all Pydantic / pydantic-settings fields -- `Handler` class + `main` entry-point pattern -- Repository pattern for DynamoDB access -- camelCase alias convention -- Updated package and scenario names in all examples - ---- - -## Data Models - -### `api` scenario - -```python -# template/scenarios/api/models.py -from pydantic import BaseModel, Field -from pydantic.alias_generators import to_camel - -class Item(BaseModel, populate_by_name=True, alias_generator=to_camel): - id: str = Field(description="Unique item identifier.") - name: str = Field(description="Human-readable item name.") -``` - -```python -# template/scenarios/api/settings.py -from pydantic import Field -from pydantic_settings import BaseSettings - -class Settings(BaseSettings, case_sensitive=False): - table_name: str = Field(description="DynamoDB table name.") - service_name: str = Field(description="Powertools service name.") - metrics_namespace: str = Field(description="Powertools metrics namespace.") -``` - -### `stream` scenario - -```python -# template/scenarios/stream/models.py -from pydantic import BaseModel, Field -from pydantic.alias_generators import to_camel - -class DestinationItem(BaseModel, extra="allow", populate_by_name=True, alias_generator=to_camel): - id: str = Field(description="Unique item identifier.") -``` - -```python -# template/scenarios/stream/settings.py -from pydantic import Field -from pydantic_settings import BaseSettings - -class Settings(BaseSettings, case_sensitive=False): - source_table_name: str = Field(description="Source DynamoDB table name.") - destination_table_name: str = Field(description="Destination DynamoDB table name.") - service_name: str = Field(description="Powertools service name.") - metrics_namespace: str = Field(description="Powertools metrics namespace.") -``` - -### Alias behaviour summary - -| Call | Keys returned | -|---|---| -| `item.model_dump()` | `{"id": ..., "name": ...}` (snake_case) | -| `item.model_dump(by_alias=True)` | `{"id": ..., "name": ...}` (camelCase; single-word fields are unchanged) | -| `Item.model_validate({"itemId": ...})` | works when field is `item_id` | - ---- - -## Correctness Properties - -*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* - -### Property 1: Field descriptions are stored in Field metadata - -*For any* Pydantic model or pydantic-settings class in the `template` package, every field that carries a description SHALL have that description stored in the field's `FieldInfo.description` attribute (i.e. declared via `Field(description="...")`), not in a docstring or inline comment. - -**Validates: Requirements 3.1, 3.2** - -### Property 2: No class-level `model_config` attribute - -*For any* class in the `template` package that inherits from `BaseModel` or `BaseSettings`, the class SHALL NOT define a `model_config` class attribute that is an instance of `ConfigDict` or `SettingsConfigDict`. Configuration options must be passed as constructor keyword arguments to the base class. - -**Validates: Requirements 4.1, 4.2, 4.3, 4.4** - -### Property 3: All Pydantic models have camelCase alias support - -*For any* class in the `template` package that inherits from `BaseModel`, the class's `model_config` SHALL have `populate_by_name=True` and `alias_generator` set to `pydantic.alias_generators.to_camel`. - -**Validates: Requirements 5.1, 5.5** - -### Property 4: camelCase alias round-trip - -*For any* Pydantic model instance in the `template` package, serialising with `model_dump(by_alias=True)` and then deserialising the result with `model_validate` SHALL produce an equivalent model instance. Additionally, `model_dump()` (no arguments) SHALL return snake_case keys, and `model_dump(by_alias=True)` SHALL return camelCase keys for multi-word field names. - -**Validates: Requirements 5.2, 5.3, 5.4** - ---- - -## Error Handling - -### `api` scenario - -| Situation | Behaviour | -|---|---| -| `Repository.get_item` raises | `Handler._get_item` catches, logs, returns 500 | -| Item not found in DynamoDB | `Handler._get_item` raises `NotFoundError` → 404 | -| Request body fails Pydantic validation | `Handler._create_item` catches `ValidationError`, returns 422 with error detail | -| `Repository.put_item` raises | `Handler._create_item` catches, logs, returns 500 | - -### `stream` scenario - -| Situation | Behaviour | -|---|---| -| `DestinationItem.model_validate` raises `ValidationError` | Caught per-record; error logged with `metrics.add_metric(ProcessingError)`; processing continues | -| `Repository.put_item` / `delete_item` raises | Caught per-record; error logged; processing continues | - -Error handling logic lives inside `Handler.handle` / the route methods, not in `main`. `main` is kept thin (instantiate, delegate, return). - ---- - -## Testing Strategy - -### Dual testing approach - -Both unit tests and property-based tests are required. They are complementary: - -- **Unit tests** verify specific examples, integration points, and error conditions. -- **Property tests** verify universal invariants across many generated inputs. - -### Property-based testing library - -Use **[Hypothesis](https://hypothesis.readthedocs.io/)** (`hypothesis` package) for all property-based tests. Each property test must run a minimum of **100 iterations** (Hypothesis default is 100; do not lower it). - -Tag format for each property test: - -```python -# Feature: code-style-refactor, Property N: -@given(...) -def test_property_n_...(...) -``` - -### Unit tests (examples and error conditions) - -Each test module mirrors the source module path: - -| Source | Test | -|---|---| -| `template/scenarios/api/handler.py` | `tests/scenarios/api/test_handler.py` | -| `template/scenarios/stream/handler.py` | `tests/scenarios/stream/test_handler.py` | - -**`api` scenario unit tests** (updated from current): -- `test_get_item_found` — GET 200 with item data -- `test_post_item_created` — POST 201 with created item -- `test_get_item_not_found` — GET 404 when item absent -- `test_post_item_invalid_body` — POST 422 on validation failure -- `test_get_item_dynamodb_error` — GET 500 on DynamoDB exception -- `test_post_item_dynamodb_error` — POST 500 on DynamoDB exception - -All tests mock `Repository` instance methods rather than `boto3.resource`. Entry point is `handler_module.main`. - -**`stream` scenario unit tests** (updated from current): -- `test_insert_record_calls_put_item` -- `test_modify_record_calls_put_item` -- `test_remove_record_calls_delete_item` -- `test_deserialisation_failure_continues` -- `test_dynamodb_write_failure_continues` - -All tests mock `Repository` instance methods. Entry point is `handler_module.main`. - -### Property-based tests - -**Property 1 — Field descriptions in Field metadata** - -```python -# Feature: code-style-refactor, Property 1: field descriptions stored in FieldInfo -@given(st.sampled_from([Item, DestinationItem, Settings_api, Settings_stream])) -def test_field_descriptions_in_field_info(model_cls): - for name, field_info in model_cls.model_fields.items(): - if field_info.description is not None: - assert isinstance(field_info.description, str) - assert len(field_info.description) > 0 -``` - -**Property 2 — No class-level `model_config` attribute** - -```python -# Feature: code-style-refactor, Property 2: no class-level model_config attribute -@given(st.sampled_from([Item, DestinationItem, Settings_api, Settings_stream])) -def test_no_explicit_model_config_attribute(model_cls): - assert "model_config" not in model_cls.__dict__ -``` - -**Property 3 — camelCase alias support on all models** - -```python -# Feature: code-style-refactor, Property 3: all models have populate_by_name and to_camel alias_generator -@given(st.sampled_from([Item, DestinationItem])) -def test_models_have_camel_alias_config(model_cls): - cfg = model_cls.model_config - assert cfg.get("populate_by_name") is True - from pydantic.alias_generators import to_camel - assert cfg.get("alias_generator") is to_camel -``` - -**Property 4 — camelCase alias round-trip** - -```python -# Feature: code-style-refactor, Property 4: camelCase alias round-trip -@given(st.builds(Item)) -def test_item_alias_round_trip(item): - dumped = item.model_dump(by_alias=True) - restored = Item.model_validate(dumped) - assert restored == item -``` - -### Avoiding over-testing - -Unit tests focus on concrete scenarios and error paths. Property tests handle broad input coverage. Do not duplicate coverage between the two layers. diff --git a/.kiro/specs/code-style-refactor/requirements.md b/.kiro/specs/code-style-refactor/requirements.md deleted file mode 100644 index 10a7390..0000000 --- a/.kiro/specs/code-style-refactor/requirements.md +++ /dev/null @@ -1,155 +0,0 @@ -# Requirements Document - -## Introduction - -This refactor standardises the code style across the `aws_lambda_template` project. It covers package and module renaming, Pydantic/pydantic-settings idiom updates, introduction of a repository pattern for DynamoDB access, a `Handler` class wrapping all Lambda logic, and documentation of the resulting conventions in `AGENTS.md`. - -No new runtime behaviour is introduced; all existing tests must continue to pass after the changes, updated to reflect the new names and structure. - -## Glossary - -- **Package**: The top-level Python package, currently `aws_lambda_template`, renamed to `template`. -- **Scenario**: A self-contained Lambda use-case under `template/scenarios/`. Two scenarios exist: `api` (formerly `api_gateway_dynamodb`) and `stream` (formerly `dynamodb_stream`). -- **Handler_File**: The `handler.py` module inside each scenario. -- **Handler_Class**: A class named `Handler` defined in each `Handler_File`, containing all Lambda business logic in a `handle` method. -- **Main_Function**: The module-level function `main` in each `Handler_File`, which instantiates `Handler` and calls `handler.handle(event, context)`. This is the Lambda entry point. -- **Repository**: A class that encapsulates all DynamoDB read/write operations for a scenario, called from `Handler_Class`. -- **Pydantic_Model**: Any class that inherits from `pydantic.BaseModel`. -- **Settings_Class**: Any class that inherits from `pydantic_settings.BaseSettings`. -- **Field_Description**: A `pydantic.Field(description="...")` call used to document a model or settings field. -- **camelCase_Alias**: An alias generated by `alias_generator=to_camel` so that JSON keys use camelCase while Python attributes use snake_case. - ---- - -## Requirements - -### Requirement 1: Rename the base package - -**User Story:** As a developer, I want the base package to be named `template` instead of `aws_lambda_template`, so that import paths are shorter and less tied to a specific cloud provider. - -#### Acceptance Criteria - -1. THE Package SHALL be located at `template/` in the repository root. -2. WHEN any module imports from the old `aws_lambda_template` namespace, THE Package SHALL expose the same public API under the `template` namespace. -3. THE `pyproject.toml` SHALL declare `name = "template"` and `packages = [{include = "template"}]`. -4. THE `[tool.coverage.run]` section in `pyproject.toml` SHALL set `source = ["template"]`. -5. THE `[tool.poetry.scripts]` entry SHALL reference `template.app:main`. - ---- - -### Requirement 2: Rename scenarios - -**User Story:** As a developer, I want scenario directories to use short, one-word names, so that import paths and file-system navigation are simpler. - -#### Acceptance Criteria - -1. THE Scenario previously named `api_gateway_dynamodb` SHALL be located at `template/scenarios/api/`. -2. THE Scenario previously named `dynamodb_stream` SHALL be located at `template/scenarios/stream/`. -3. WHEN the infra stacks reference a scenario handler, THE infra stack SHALL use the new path (e.g. `templates.api.handler.main`). -4. WHEN tests import a scenario module, THE tests SHALL use the new module path (e.g. `templates.api.handler`). - ---- - -### Requirement 3: Use `Field(description=...)` for Pydantic and pydantic-settings fields - -**User Story:** As a developer, I want field documentation to live in `Field(description="...")` rather than in docstrings or inline comments, so that descriptions are machine-readable and appear in generated JSON schemas. - -#### Acceptance Criteria - -1. THE Pydantic_Model SHALL declare each field using `Field(description="...")` when a description is warranted. -2. THE Settings_Class SHALL declare each field using `Field(description="...")` in place of any docstring or inline comment that describes the field. -3. IF a field has no meaningful description, THEN THE Pydantic_Model or Settings_Class SHALL omit the `Field` wrapper and use a plain type annotation. - ---- - -### Requirement 4: Drop `model_config` / `SettingsConfigDict` in favour of constructor parameters - -**User Story:** As a developer, I want model and settings configuration expressed as class-level constructor keyword arguments rather than a `model_config` dict, so that the configuration is more concise and IDE-friendly. - -#### Acceptance Criteria - -1. THE Pydantic_Model SHALL NOT use a `model_config = ConfigDict(...)` class attribute. -2. THE Settings_Class SHALL NOT use a `model_config = SettingsConfigDict(...)` class attribute. -3. WHEN a Pydantic_Model requires configuration options (e.g. `extra="allow"`), THE Pydantic_Model SHALL pass those options as keyword arguments to `BaseModel` in the class definition (e.g. `class Foo(BaseModel, extra="allow")`). -4. WHEN a Settings_Class requires configuration options (e.g. `case_sensitive=False`), THE Settings_Class SHALL pass those options as keyword arguments to `BaseSettings` in the class definition. - ---- - -### Requirement 5: Add camelCase alias support to Pydantic models - -**User Story:** As a developer, I want Pydantic models to accept both camelCase JSON keys and snake_case Python attribute names, so that API payloads can use camelCase without manual mapping. - -#### Acceptance Criteria - -1. THE Pydantic_Model SHALL be defined with `populate_by_name=True` and `alias_generator=to_camel` keyword arguments. -2. WHEN a JSON payload uses camelCase keys, THE Pydantic_Model SHALL deserialise it correctly into snake_case attributes. -3. WHEN `model_dump()` is called without arguments, THE Pydantic_Model SHALL return snake_case keys (default behaviour). -4. WHEN `model_dump(by_alias=True)` is called, THE Pydantic_Model SHALL return camelCase keys. -5. THE `pydantic.alias_generators.to_camel` function SHALL be used as the `alias_generator`. - ---- - -### Requirement 6: Introduce a repository pattern for DynamoDB access - -**User Story:** As a developer, I want all DynamoDB read/write calls encapsulated in a `Repository` class, so that handler logic is decoupled from persistence and easier to test. - -#### Acceptance Criteria - -1. EACH Scenario SHALL define a `Repository` class (in a new `repository.py` module or within `handler.py`) that owns all `boto3` DynamoDB calls for that scenario. -2. THE `Repository` class SHALL be initialised with the DynamoDB table resource(s) it needs. -3. THE Handler_Class SHALL call `Repository` methods rather than calling `boto3` directly. -4. WHEN a test needs to verify DynamoDB interactions, THE test SHALL mock the `Repository` instance or its methods rather than patching `boto3` at the module level. - ---- - -### Requirement 7: Rename the Lambda entry-point function to `main` - -**User Story:** As a developer, I want the Lambda entry-point function to be named `main`, so that the naming convention is consistent and the infra handler path clearly identifies the file and function. - -#### Acceptance Criteria - -1. THE Main_Function in each `Handler_File` SHALL be named `main`. -2. THE infra handler path for the `api` scenario SHALL be `templates.api.handler.main`. -3. THE infra handler path for the `stream` scenario SHALL be `templates.stream.handler.main`. -4. THE `handler` function name SHALL NOT appear as the Lambda entry point in any infra stack after the rename. - ---- - -### Requirement 8: Wrap handler logic in a `Handler` class - -**User Story:** As a developer, I want all Lambda business logic inside a `Handler` class with a `handle` method, so that the code is easier to unit-test and extend. - -#### Acceptance Criteria - -1. EACH `Handler_File` SHALL define a class named `Handler` with a `handle(self, event, context)` method containing all business logic. -2. THE Main_Function SHALL instantiate `Handler` and call `handler_instance.handle(event, context)`, returning the result. -3. THE `Handler` class constructor SHALL accept the `Repository` instance(s) and any other dependencies needed by the scenario. -4. WHEN decorators from AWS Lambda Powertools (logger, tracer, metrics) are applied, THE decorators SHALL be applied to `main` (the module-level entry point), not to `Handler.handle`. - ---- - -### Requirement 9: Update tests - -**User Story:** As a developer, I want all tests updated to reflect the renamed package, scenarios, modules, and classes, so that the test suite remains green after the refactor. - -#### Acceptance Criteria - -1. THE test modules SHALL import from `templates.api.handler` and `templates.stream.handler` respectively. -2. WHEN a test fixture imports the handler module, THE fixture SHALL clear cached modules whose names contain `templates.api` or `templates.stream` (not the old names). -3. WHEN a test needs to verify DynamoDB interactions, THE test SHALL mock the `Repository` class or instance rather than patching `boto3.resource` at the module level. -4. THE test for the `api` scenario SHALL call `handler_module.main` as the Lambda entry point. -5. THE test for the `stream` scenario SHALL call `handler_module.main` as the Lambda entry point. - ---- - -### Requirement 10: Document style conventions in `AGENTS.md` - -**User Story:** As a developer, I want the coding conventions introduced by this refactor captured in `AGENTS.md`, so that future contributors and AI agents follow the same patterns. - -#### Acceptance Criteria - -1. THE `AGENTS.md` SHALL include a section describing the `Field(description="...")` convention for Pydantic and pydantic-settings fields. -2. THE `AGENTS.md` SHALL include a section describing the `Handler` class / `main` entry-point pattern for Lambda handlers. -3. THE `AGENTS.md` SHALL include a section describing the repository pattern for DynamoDB access. -4. THE `AGENTS.md` SHALL include a section describing the camelCase alias convention (`populate_by_name=True, alias_generator=to_camel`) for Pydantic models. -5. THE `AGENTS.md` SHALL reflect the updated package name (`template`) and scenario names (`api`, `stream`) in all examples and references. diff --git a/.kiro/specs/code-style-refactor/tasks.md b/.kiro/specs/code-style-refactor/tasks.md deleted file mode 100644 index 59d989b..0000000 --- a/.kiro/specs/code-style-refactor/tasks.md +++ /dev/null @@ -1,85 +0,0 @@ -# Implementation Plan: code-style-refactor - -## Overview - -Incremental refactor of the `aws_lambda_template` project: rename the package and scenarios, update Pydantic/pydantic-settings idioms, introduce the Repository and Handler patterns, update tests (including property-based tests), and document the new conventions in `AGENTS.md`. - -Each task builds on the previous one. No task leaves orphaned code that is not wired into the rest of the codebase. - -## Tasks - -- [x] 1. Rename base package `aws_lambda_template` → `template` - - Create `template/` directory with `__init__.py` and `app.py` copied from `aws_lambda_template/` - - Recreate the `template/scenarios/` sub-package tree (empty `__init__.py` files only; scenario content comes in later tasks) - - Update `pyproject.toml`: set `name = "template"`, `packages = [{include = "template"}]`, `app = "template.app:main"`, and `source = ["template"]` under `[tool.coverage.run]` - - Delete the old `aws_lambda_template/` directory tree after all content has been migrated - - _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5_ - -- [x] 2. Rename scenarios and migrate source files - - [x] 2.1 Create `template/scenarios/api/` with placeholder `__init__.py`; copy `handler.py`, `models.py`, `settings.py` from `aws_lambda_template/scenarios/api_gateway_dynamodb/`, updating all internal imports to use `templates.api.*` - - [x] 2.2 Create `template/scenarios/stream/` with placeholder `__init__.py`; copy `handler.py`, `models.py`, `settings.py` from `aws_lambda_template/scenarios/dynamodb_stream/`, updating all internal imports to use `templates.stream.*` - - [x] 2.3 Delete `aws_lambda_template/scenarios/api_gateway_dynamodb/` and `aws_lambda_template/scenarios/dynamodb_stream/` (and the now-empty `aws_lambda_template/` tree if not already removed in task 1) - - _Requirements: 2.1, 2.2_ - -- [x] 3. Update Pydantic models to new idioms - - [x] 3.1 Rewrite `template/scenarios/api/models.py`: replace bare `BaseModel` with `BaseModel, populate_by_name=True, alias_generator=to_camel`; add `Field(description="...")` to every field; remove any `model_config = ConfigDict(...)` attribute - - [x] 3.2 Rewrite `template/scenarios/stream/models.py`: same idiom changes; preserve `extra="allow"` as a constructor keyword argument (`class DestinationItem(BaseModel, extra="allow", populate_by_name=True, alias_generator=to_camel)`) - - [x] 3.3 Write property test for Property 1 (field descriptions stored in FieldInfo) and Property 2 (no class-level `model_config`) and Property 3 (camelCase alias config) and Property 4 (alias round-trip) in `tests/test_properties.py` - - **Property 1: Field descriptions are stored in Field metadata** - - **Property 2: No class-level `model_config` attribute** - - **Property 3: All Pydantic models have camelCase alias support** - - **Property 4: camelCase alias round-trip** - - **Validates: Requirements 3.1, 3.2, 4.1, 4.2, 4.3, 4.4, 5.1, 5.2, 5.3, 5.4, 5.5** - - Use `hypothesis` (`@given`, `st.sampled_from`, `st.builds`); tag each test with `# Feature: code-style-refactor, Property N: ...` - - Add `hypothesis` to dev dependencies via `poetry add --group dev hypothesis` - - _Requirements: 3.1, 3.2, 4.1, 4.3, 5.1, 5.2, 5.3, 5.4, 5.5_ - -- [x] 4. Update pydantic-settings classes - - [x] 4.1 Rewrite `template/scenarios/api/settings.py`: replace `model_config = SettingsConfigDict(...)` with constructor keyword args (`class Settings(BaseSettings, case_sensitive=False)`); add `Field(description="...")` to every field; remove `SettingsConfigDict` import - - [x] 4.2 Rewrite `template/scenarios/stream/settings.py`: same changes as 4.1 - - _Requirements: 3.2, 4.2, 4.4_ - -- [x] 5. Create `repository.py` for each scenario - - [x] 5.1 Create `template/scenarios/api/repository.py` with a `Repository` class that accepts a DynamoDB table resource and exposes `get_item(item_id: str) -> dict | None` and `put_item(item: dict) -> None` - - [x] 5.2 Create `template/scenarios/stream/repository.py` with a `Repository` class that accepts a destination table resource and exposes `put_item(item: dict) -> None` and `delete_item(key: dict) -> None` - - _Requirements: 6.1, 6.2_ - -- [x] 6. Refactor `api` handler to `Handler` class + `main` entry point - - [x] 6.1 Rewrite `template/scenarios/api/handler.py`: move all route logic into a `Handler` class (`__init__` accepts `Repository`; `_register_routes`, `_get_item`, `_create_item` as private methods; `handle(event, context)` delegates to `self._app.resolve`); add module-level `main` function that instantiates `Repository(table)` and `Handler(repo)` and calls `handle`; apply Powertools decorators to `main` only - - [x] 6.2 Update `infra/stacks/api_gateway_dynamodb_stack.py`: change handler string to `templates.api.handler.main` - - _Requirements: 6.3, 7.1, 7.2, 7.4, 8.1, 8.2, 8.3, 8.4_ - -- [x] 7. Refactor `stream` handler to `Handler` class + `main` entry point - - [x] 7.1 Rewrite `template/scenarios/stream/handler.py`: move all record-processing logic into a `Handler` class (`__init__` accepts `Repository`; `handle(event, context)` iterates records and calls repository methods); add module-level `main` function that instantiates `Repository(destination_table)` and `Handler(repo)` and calls `handle`; apply Powertools decorators to `main` only - - [x] 7.2 Update `infra/stacks/dynamodb_stream_stack.py`: change handler string to `templates.stream.handler.main` - - _Requirements: 6.3, 7.1, 7.3, 7.4, 8.1, 8.2, 8.3, 8.4_ - -- [x] 8. Checkpoint — ensure all source changes are consistent - - Ensure all tests pass, ask the user if questions arise. - -- [x] 9. Update `api` scenario tests - - [x] 9.1 Rename/move `tests/scenarios/api_gateway_dynamodb/` to `tests/scenarios/api/`; update `test_handler.py`: change import to `templates.api.handler`; update module-cache flush to match `templates.api`; replace `mocker.patch.object(handler_module, "table")` with a mock `Repository` instance injected via `mocker.patch`; call `handler_module.main` as the entry point; cover all six test cases listed in the design (`test_get_item_found`, `test_post_item_created`, `test_get_item_not_found`, `test_post_item_invalid_body`, `test_get_item_dynamodb_error`, `test_post_item_dynamodb_error`) - - [x] 9.2 Write unit tests for `Repository` methods in `tests/scenarios/api/test_repository.py` (mock the boto3 table; verify `get_item` and `put_item` call the table with correct arguments) - - _Requirements: 6.4, 9.3_ - - _Requirements: 9.1, 9.2, 9.4_ - -- [x] 10. Update `stream` scenario tests - - [x] 10.1 Rename/move `tests/scenarios/dynamodb_stream/` to `tests/scenarios/stream/`; update `test_handler.py`: change import to `templates.stream.handler`; update module-cache flush; replace `mocker.patch.object(handler_module, "destination_table")` with a mock `Repository` instance; call `handler_module.main` as the entry point; cover all five test cases (`test_insert_record_calls_put_item`, `test_modify_record_calls_put_item`, `test_remove_record_calls_delete_item`, `test_deserialisation_failure_continues`, `test_dynamodb_write_failure_continues`) - - [x] 10.2 Write unit tests for `Repository` methods in `tests/scenarios/stream/test_repository.py` (mock the boto3 table; verify `put_item` and `delete_item` call the table correctly) - - _Requirements: 6.4, 9.3_ - - _Requirements: 9.1, 9.2, 9.5_ - -- [x] 11. Final checkpoint — ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -- [x] 12. Update `AGENTS.md` with new conventions - - Add a "Coding Conventions" section covering: `Field(description="...")` for all Pydantic/pydantic-settings fields; `Handler` class + `main` entry-point pattern; Repository pattern for DynamoDB access; camelCase alias convention (`populate_by_name=True, alias_generator=to_camel`); updated package name (`template`) and scenario names (`api`, `stream`) in all examples and references - - _Requirements: 10.1, 10.2, 10.3, 10.4, 10.5_ - -## Notes - -- Tasks marked with `*` are optional and can be skipped for a faster MVP -- Each task references specific requirements for traceability -- Checkpoints ensure incremental validation -- Property tests (task 3.3) validate universal invariants using Hypothesis; run at least 100 iterations per property (Hypothesis default) -- Unit tests validate specific examples and error conditions diff --git a/.kiro/specs/documentation-update/.config.kiro b/.kiro/specs/documentation-update/.config.kiro deleted file mode 100644 index 3580b8a..0000000 --- a/.kiro/specs/documentation-update/.config.kiro +++ /dev/null @@ -1 +0,0 @@ -{"specId": "c5a2e92e-2f4f-462d-8fd8-b32efccb2871", "workflowType": "requirements-first", "specType": "feature"} diff --git a/.kiro/specs/documentation-update/design.md b/.kiro/specs/documentation-update/design.md deleted file mode 100644 index 46c9b2e..0000000 --- a/.kiro/specs/documentation-update/design.md +++ /dev/null @@ -1,219 +0,0 @@ -# Design Document: Documentation Update - -## Overview - -This feature updates the project's documentation to accurately reflect the current codebase. The existing `docs/README.md` was written for an earlier version of the template that had a single `project/` folder and no Lambda scenarios. The current codebase has two Lambda scenarios (`templates/api/` and `templates/stream/`), CDK infrastructure stacks, environment variable configuration via pydantic-settings, and a property-based testing setup using Hypothesis. The `mkdocs.yml` navigation also points to a single reference page with an incorrect module path. - -The changes are purely documentation — no source code is modified. The deliverables are: - -- An updated `docs/README.md` -- An updated `docs/CONTRIBUTING.md` -- An updated `mkdocs.yml` -- A corrected `docs/reference/app.md` -- Three new reference pages: `docs/reference/repository.md`, `docs/reference/api.md`, `docs/reference/stream.md` - -## Architecture - -The documentation site is built with [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) and API reference pages are generated from Python docstrings using [mkdocstrings](https://mkdocstrings.github.io/). The site is configured in `mkdocs.yml` at the repository root. - -``` -mkdocs.yml ← site config, nav, plugins -docs/ - README.md ← home page (also the PyPI/GitHub readme) - CONTRIBUTING.md ← contributor guide - reference/ - app.md ← ::: templates.app - repository.md ← ::: templates.repository - api.md ← ::: templates.api.handler - stream.md ← ::: templates.stream.handler -``` - -The `mkdocs gh-deploy` command (invoked via `make docs`) builds the site and pushes it to the `gh-pages` branch. `make local` serves it locally for preview. - -## Components and Interfaces - -### docs/README.md - -The primary documentation file. It serves as both the repository landing page and the MkDocs "Overview" home page (referenced in `mkdocs.yml` nav). It must cover: - -- Project description (two Lambda scenarios) -- Full tooling list -- Template renaming instructions (`make project`) -- Prerequisites -- Setup commands (all relevant `make` targets) -- API scenario: endpoints, data model, environment variables, response codes -- Stream scenario: event processing, data models, environment variables, partial batch failure -- CDK stacks and deployment commands -- Documentation build commands -- Property-based testing mention -- Accurate project structure tree - -### docs/CONTRIBUTING.md - -The contributor guide. Requires a small addition documenting where to place new property-based tests. - -### mkdocs.yml - -Site configuration. Requires: -- `site_name` updated to `AWS Lambda Template - Python` -- `nav` section updated to list all four reference pages under a "Reference" section - -### docs/reference/app.md - -Fix the incorrect mkdocstrings directive from `aws_lambda_template.app` to `templates.app`. - -### docs/reference/repository.md (new) - -New reference page with directive `::: templates.repository`. - -### docs/reference/api.md (new) - -New reference page with directive `::: templates.api.handler`. - -### docs/reference/stream.md (new) - -New reference page with directive `::: templates.stream.handler`. - -## Data Models - -This feature involves no code data models. The relevant "data" is the content structure of the documentation files. - -The key facts derived from the source code that must appear in the documentation: - -**API scenario (`templates/api/`)** -- Handler: `templates.api.handler` -- Endpoints: `GET /items/{id}`, `POST /items` -- Model fields: `id` (UUID string, auto-generated), `name` (string) -- Environment variables: `TABLE_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE` -- Response codes: 200, 201, 404, 422, 500 - -**Stream scenario (`templates/stream/`)** -- Handler: `templates.stream.handler` -- Trigger: DynamoDB Streams (INSERT, MODIFY → put to destination; REMOVE → delete from destination) -- Models: `SourceItem`, `DestinationItem` -- Environment variables: `SOURCE_TABLE_NAME`, `DESTINATION_TABLE_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE` -- Partial batch failure reporting enabled - -**CDK stacks (`infra/stacks/`)** -- `ApiGatewayDynamodbStack` → `make deploy STACK=api` -- `DynamodbStreamStack` → `make deploy STACK=stream` -- Entry point: `infra/app.py` -- Optional: `AWS_PROFILE` - -**Makefile targets to document** -- `make project NAME=... DESCRIPTION=... AUTHOR=... EMAIL=... GITHUB=... [SOURCE=...]` -- `make install`, `make precommit`, `make venv`, `make lint`, `make test` -- `make deploy STACK= [AWS_PROFILE=...]` -- `make destroy STACK= [AWS_PROFILE=...]` -- `make docs`, `make local` - -## Correctness Properties - -*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* - -### Property 1: README contains all required tooling names - -*For any* tool listed in the requirements (Poetry, Click, pytest, coverage, ruff, pyright, pre-commit, GitHub Actions, Dependabot, Dev Containers, MkDocs, mkdocstrings, Docker, AWS CDK, AWS Lambda Powertools), the README file content should contain that tool's name. - -**Validates: Requirements 1.2** - -### Property 2: README does not reference the removed project/ folder - -*For any* occurrence search of the string `project/` in the README, the result should be empty — the README must not contain any reference to the old `project/` directory. - -**Validates: Requirements 1.4** - -### Property 3: README contains all required Makefile target names - -*For any* Makefile target that the requirements mandate be documented (`install`, `precommit`, `venv`, `lint`, `test`, `deploy`, `destroy`, `project`, `docs`, `local`), the README should contain that target name, and each such target should also exist in the Makefile. - -**Validates: Requirements 2.2, 2.3, 2.4, 2.5, 2.6, 5.2, 5.3, 8.1, 9.2** - -### Property 4: README contains all required endpoint strings - -*For any* HTTP endpoint defined in the API scenario handler (`GET /items/{id}`, `POST /items`), the README should contain a string representation of that endpoint. - -**Validates: Requirements 3.2** - -### Property 5: README contains all required environment variable names - -*For any* environment variable required by either Lambda scenario (`TABLE_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, `SOURCE_TABLE_NAME`, `DESTINATION_TABLE_NAME`), the README should contain that variable name. - -**Validates: Requirements 3.4, 4.2** - -### Property 6: README contains all required HTTP response codes - -*For any* HTTP response code returned by the API scenario (200, 201, 404, 422, 500), the README should contain that status code. - -**Validates: Requirements 3.5** - -### Property 7: README contains all required data model field names - -*For any* data model field documented in the requirements (`id`, `name` for `Item`; `id`, `name` for `SourceItem` and `DestinationItem`), the README should contain that field name alongside its model. - -**Validates: Requirements 3.3, 4.4** - -### Property 8: All reference pages exist with correct mkdocstrings directives - -*For any* module that requires a reference page (`templates.app`, `templates.repository`, `templates.api.handler`, `templates.stream.handler`), the corresponding file under `docs/reference/` should exist and contain a mkdocstrings directive (`:::`) with the fully-qualified module path. - -**Validates: Requirements 6.1, 6.2, 6.3, 6.4, 9.3, 9.4** - -### Property 9: mkdocs.yml nav lists all four reference pages under Reference - -*For any* reference page that must appear in the nav (`app.md`, `repository.md`, `api.md`, `stream.md`), the `mkdocs.yml` nav section should contain an entry pointing to that page under the "Reference" section. - -**Validates: Requirements 6.5** - -### Property 10: All file paths referenced in the README exist in the repository - -*For any* file or directory path explicitly referenced in the README (e.g., `templates/`, `infra/`, `tests/`, `pyproject.toml`, `Makefile`), that path should exist in the repository. - -**Validates: Requirements 1.3, 9.1** - -## Error Handling - -This feature involves only static documentation files. There are no runtime errors to handle. The main failure modes are: - -- **Incorrect module path in a reference page**: mkdocstrings will fail to build the site with a `ModuleNotFoundError`. Mitigation: verify each `:::` directive against the actual package structure before committing. -- **Missing reference page file**: mkdocs will fail to build with a "file not found" error. Mitigation: create all four reference pages as part of this feature. -- **Broken nav entry in mkdocs.yml**: mkdocs will warn or error if a nav entry points to a non-existent file. Mitigation: ensure nav entries match the actual file paths. -- **Stale paths in README**: readers will encounter 404s or missing files. Mitigation: the correctness properties (P3, P10) explicitly check that all referenced paths and targets exist. - -## Testing Strategy - -This feature updates static documentation files. The testing approach is therefore document-content validation rather than unit or integration testing of executable code. - -### Unit Tests (example-based) - -Specific examples that verify concrete content requirements: - -- README contains the string `API_Scenario` or equivalent description -- README does not contain the string `project/` -- README contains a `make project` example invocation with placeholder values -- `docs/reference/app.md` contains `::: templates.app` (not `aws_lambda_template.app`) -- `mkdocs.yml` `site_name` equals `AWS Lambda Template - Python` -- CONTRIBUTING.md contains guidance on placing property-based tests - -### Property-Based Tests (Hypothesis) - -The project uses [Hypothesis](https://hypothesis.readthedocs.io) for property-based testing. For this documentation feature, the properties are validated by parameterised pytest tests (iterating over lists of required strings) rather than random generation, since the inputs are finite known sets. Each test is tagged with its design property reference. - -**Property test configuration**: minimum 1 iteration per parameterised case (inputs are deterministic lists, not random). Each test references its design property via a comment tag. - -Tag format: `# Feature: documentation-update, Property {N}: {property_text}` - -**Tests to implement**: - -- `test_readme_contains_all_tools` — iterates over the required tool list, asserts each appears in README content. *(Property 1)* -- `test_readme_no_project_folder_reference` — asserts `project/` not in README. *(Property 2)* -- `test_readme_contains_all_make_targets` — iterates over required target names, asserts each appears in README and in Makefile. *(Property 3)* -- `test_readme_contains_endpoints` — asserts endpoint strings present. *(Property 4)* -- `test_readme_contains_env_vars` — iterates over required env var names. *(Property 5)* -- `test_readme_contains_response_codes` — iterates over required status codes. *(Property 6)* -- `test_readme_contains_model_fields` — iterates over required field names. *(Property 7)* -- `test_reference_pages_exist_with_correct_directives` — iterates over module→file mapping, asserts file exists and contains correct `:::` directive. *(Property 8)* -- `test_mkdocs_nav_contains_all_reference_pages` — parses `mkdocs.yml`, asserts all four reference page paths appear under the Reference nav section. *(Property 9)* -- `test_readme_paths_exist` — iterates over paths referenced in README, asserts each exists on disk. *(Property 10)* - -Both unit and property tests live in `tests/` alongside existing tests and follow the same naming conventions. diff --git a/.kiro/specs/documentation-update/requirements.md b/.kiro/specs/documentation-update/requirements.md deleted file mode 100644 index 5ad1605..0000000 --- a/.kiro/specs/documentation-update/requirements.md +++ /dev/null @@ -1,140 +0,0 @@ -# Requirements Document - -## Introduction - -The project documentation is outdated and incomplete. The existing `docs/README.md` references a `project/` folder that no longer exists, omits both Lambda scenarios, the CDK infrastructure stacks, environment variable configuration, deployment workflows, and the property-based testing setup. The `mkdocs.yml` navigation only exposes a single reference page that points to a non-existent module. This feature updates all documentation to accurately reflect the current codebase and adds MkDocs reference pages for every documented module. - -## Glossary - -- **Docs_Site**: The MkDocs-based documentation site built from the `docs/` folder and `mkdocs.yml`. -- **README**: The file at `docs/README.md`, which serves as both the repository landing page and the MkDocs Overview home page. -- **CONTRIBUTING**: The file at `docs/CONTRIBUTING.md`, which describes how to contribute to the project. -- **API_Scenario**: The Lambda scenario under `templates/api/` that exposes a REST API via API Gateway backed by a single DynamoDB table. -- **Stream_Scenario**: The Lambda scenario under `templates/stream/` that consumes a DynamoDB Streams event source and replicates records to a destination DynamoDB table. -- **CDK_Stack**: An AWS CDK stack defined under `infra/stacks/` and deployed via `make deploy STACK=`. -- **Reference_Page**: A MkDocs page under `docs/reference/` that renders API documentation from Python docstrings using mkdocstrings. -- **Makefile_Target**: A named target in the project `Makefile` invoked with `make `. -- **Environment_Variable**: A runtime configuration value read by a Lambda handler through a pydantic-settings `Settings` class. - ---- - -## Requirements - -### Requirement 1: Accurate Project Overview - -**User Story:** As a developer evaluating this template, I want the README to describe what the project actually contains, so that I can quickly understand its purpose and structure. - -#### Acceptance Criteria - -1. THE README SHALL describe the project as a Python AWS Lambda template that provides two ready-to-use scenarios: API_Scenario and Stream_Scenario. -2. THE README SHALL list all tooling used by the project (Poetry, Click, pytest, coverage, ruff, pyright, pre-commit, GitHub Actions, Dependabot, Dev Containers, MkDocs, mkdocstrings, Docker, AWS CDK, AWS Lambda Powertools). -3. THE README SHALL include a project structure tree that reflects the actual layout: `templates/`, `templates/api/`, `templates/stream/`, `infra/`, `infra/stacks/`, `tests/`, `docs/`, and top-level configuration files. -4. THE README SHALL NOT reference the `project/` folder, which no longer exists in the repository. - ---- - -### Requirement 2: Setup and Prerequisites - -**User Story:** As a new contributor, I want clear setup instructions, so that I can get the project running locally without guessing. - -#### Acceptance Criteria - -1. THE README SHALL list the prerequisites for local development: Python 3.14+, Poetry, Docker (for Dev Containers), and AWS CDK CLI (for deployment). -2. THE README SHALL document the `make install` Makefile_Target as the command to install all project dependencies. -3. THE README SHALL document the `make precommit` Makefile_Target as the command to install pre-commit hooks. -4. THE README SHALL document the `make venv` Makefile_Target as the command to activate the virtual environment. -5. THE README SHALL document the `make lint` Makefile_Target as the command to format and lint the code. -6. THE README SHALL document the `make test` Makefile_Target as the command to run tests with coverage. - ---- - -### Requirement 3: Lambda Scenario Documentation — API Gateway + DynamoDB - -**User Story:** As a developer using this template, I want documentation for the API Gateway + DynamoDB scenario, so that I understand its endpoints, data model, and required configuration. - -#### Acceptance Criteria - -1. THE README SHALL describe the API_Scenario as a REST API Lambda function integrated with API Gateway and a single DynamoDB table. -2. THE README SHALL document the two HTTP endpoints exposed by the API_Scenario: `GET /items/{id}` and `POST /items`. -3. THE README SHALL document the `Item` data model fields: `id` (UUID string, auto-generated) and `name` (string). -4. THE README SHALL document the three Environment_Variables required by the API_Scenario: `TABLE_NAME`, `SERVICE_NAME`, and `METRICS_NAMESPACE`. -5. THE README SHALL document the HTTP response codes returned by the API_Scenario: 200 (item found), 201 (item created), 404 (item not found), 422 (validation error), and 500 (internal server error). - ---- - -### Requirement 4: Lambda Scenario Documentation — DynamoDB Streams - -**User Story:** As a developer using this template, I want documentation for the DynamoDB Streams scenario, so that I understand how it processes stream events and what configuration it requires. - -#### Acceptance Criteria - -1. THE README SHALL describe the Stream_Scenario as a Lambda function triggered by DynamoDB Streams that replicates INSERT and MODIFY events to a destination table and propagates REMOVE events as deletes. -2. THE README SHALL document the four Environment_Variables required by the Stream_Scenario: `SOURCE_TABLE_NAME`, `DESTINATION_TABLE_NAME`, `SERVICE_NAME`, and `METRICS_NAMESPACE`. -3. THE README SHALL document that the Stream_Scenario uses partial batch failure reporting so that individual record failures do not cause the entire batch to be retried. -4. THE README SHALL document the two data models used by the Stream_Scenario: `SourceItem` (read from the source table stream) and `DestinationItem` (written to the destination table). - ---- - -### Requirement 5: CDK Infrastructure and Deployment - -**User Story:** As a developer deploying this template, I want documentation for the CDK stacks and deployment commands, so that I can deploy and tear down infrastructure without reading the source code. - -#### Acceptance Criteria - -1. THE README SHALL document the two CDK_Stacks: `ApiGatewayDynamodbStack` (deployed with `make deploy STACK=api`) and `DynamodbStreamStack` (deployed with `make deploy STACK=stream`). -2. THE README SHALL document the `make deploy STACK=` Makefile_Target as the command to deploy a CDK_Stack to AWS. -3. THE README SHALL document the `make destroy STACK=` Makefile_Target as the command to destroy a deployed CDK_Stack. -4. THE README SHALL document the optional `AWS_PROFILE` variable that can be passed to `make deploy` and `make destroy` to select a named AWS CLI profile. -5. THE README SHALL document that the CDK entry point is `infra/app.py` and that the `STACK` environment variable selects which CDK_Stack to synthesise. - ---- - -### Requirement 6: Documentation Site Build and Navigation - -**User Story:** As a maintainer, I want the MkDocs site to have accurate navigation and working reference pages, so that generated API docs are accessible and correct. - -#### Acceptance Criteria - -1. THE Docs_Site SHALL include a Reference_Page for `templates.app` at `docs/reference/app.md` with the correct mkdocstrings directive `::: templates.app`. -2. THE Docs_Site SHALL include a Reference_Page for `templates.repository` at `docs/reference/repository.md`. -3. THE Docs_Site SHALL include a Reference_Page for `templates.api.handler` at `docs/reference/api.md` that documents the API_Scenario handler. -4. THE Docs_Site SHALL include a Reference_Page for `templates.stream.handler` at `docs/reference/stream.md` that documents the Stream_Scenario handler. -5. THE `mkdocs.yml` nav section SHALL list all four Reference_Pages under a "Reference" section. -6. THE `mkdocs.yml` site_name SHALL be updated to reflect the actual project name (`AWS Lambda Template - Python`). - ---- - -### Requirement 7: Property-Based Testing Documentation - -**User Story:** As a contributor, I want documentation on the property-based testing setup, so that I know how to write and run Hypothesis tests. - -#### Acceptance Criteria - -1. THE README SHALL document that the project uses [Hypothesis](https://hypothesis.readthedocs.io) for property-based testing alongside pytest. -2. THE README SHALL document that `make test` runs both standard pytest tests and Hypothesis property-based tests in a single command. -3. THE CONTRIBUTING SHALL document that new property-based tests should be placed in the `tests/` directory alongside standard pytest tests and follow the same naming conventions. - ---- - -### Requirement 8: Template Renaming Instructions - -**User Story:** As a developer starting a new project from this template, I want clear instructions for renaming the template, so that I can customise it for my own project without errors. - -#### Acceptance Criteria - -1. THE README SHALL document the `make project` Makefile_Target with all accepted parameters: `NAME`, `DESCRIPTION`, `AUTHOR`, `EMAIL`, `GITHUB`, and the optional `SOURCE`. -2. THE README SHALL document that `make project` must be run once after cloning, before any other setup steps. -3. THE README SHALL provide a concrete example invocation of `make project` with placeholder values for all parameters. - ---- - -### Requirement 9: Documentation Accuracy and Consistency - -**User Story:** As a reader of the documentation, I want all commands, paths, and module names to be accurate, so that I can follow the documentation without encountering errors. - -#### Acceptance Criteria - -1. WHEN the README references a file path, THE README SHALL use a path that exists in the current repository layout. -2. WHEN the README references a Makefile_Target, THE README SHALL use a target name that exists in the `Makefile`. -3. WHEN a Reference_Page references a Python module with a mkdocstrings directive, THE Docs_Site SHALL use the fully-qualified module path (e.g., `templates.api.handler`) that matches the actual package structure. -4. IF the `docs/reference/app.md` file contains an incorrect module path (currently `aws_lambda_template.app`), THEN THE Docs_Site SHALL replace it with the correct path `templates.app`. diff --git a/.kiro/specs/documentation-update/tasks.md b/.kiro/specs/documentation-update/tasks.md deleted file mode 100644 index c58239c..0000000 --- a/.kiro/specs/documentation-update/tasks.md +++ /dev/null @@ -1,144 +0,0 @@ -# Implementation Plan: Documentation Update - -## Overview - -Update all documentation files to accurately reflect the current codebase (two Lambda scenarios, CDK stacks, pydantic-settings, Hypothesis testing), fix the broken mkdocstrings directive, add three new reference pages, and write parameterised pytest tests that validate the correctness properties from the design. - -## Tasks - -- [x] 1. Fix and update mkdocs.yml and reference pages - - [x] 1.1 Update `mkdocs.yml` site_name and nav - - Set `site_name` to `AWS Lambda Template - Python` - - Add all four reference pages under the "Reference" nav section: `reference/app.md`, `reference/repository.md`, `reference/api.md`, `reference/stream.md` - - _Requirements: 6.5, 6.6_ - - - [x] 1.2 Fix `docs/reference/app.md` directive - - Replace `aws_lambda_template.app` with `templates.app` - - _Requirements: 6.1, 9.3, 9.4_ - - - [x] 1.3 Create `docs/reference/repository.md` - - Add mkdocstrings directive `::: templates.repository` - - _Requirements: 6.2_ - - - [x] 1.4 Create `docs/reference/api.md` - - Add mkdocstrings directive `::: templates.api.handler` - - _Requirements: 6.3_ - - - [x] 1.5 Create `docs/reference/stream.md` - - Add mkdocstrings directive `::: templates.stream.handler` - - _Requirements: 6.4_ - -- [x] 2. Update `docs/README.md` - - [x] 2.1 Rewrite project overview section - - Describe the project as a Python AWS Lambda template with two scenarios: API_Scenario and Stream_Scenario - - List all required tooling: Poetry, Click, pytest, coverage, ruff, pyright, pre-commit, GitHub Actions, Dependabot, Dev Containers, MkDocs, mkdocstrings, Docker, AWS CDK, AWS Lambda Powertools - - Remove all references to the `project/` folder - - _Requirements: 1.1, 1.2, 1.4_ - - - [x] 2.2 Update prerequisites and setup section - - List prerequisites: Python 3.14+, Poetry, Docker, AWS CDK CLI - - Document `make install`, `make precommit`, `make venv`, `make lint`, `make test` targets - - _Requirements: 2.1, 2.2, 2.3, 2.4, 2.5, 2.6_ - - - [x] 2.3 Add template renaming instructions - - Document `make project` with all parameters: `NAME`, `DESCRIPTION`, `AUTHOR`, `EMAIL`, `GITHUB`, `SOURCE` - - State it must be run once after cloning, before other setup steps - - Include a concrete example invocation with placeholder values - - _Requirements: 8.1, 8.2, 8.3_ - - - [x] 2.4 Add API scenario documentation section - - Describe API_Scenario: REST API via API Gateway backed by DynamoDB - - Document endpoints: `GET /items/{id}` and `POST /items` - - Document `Item` model fields: `id` (UUID string, auto-generated) and `name` (string) - - Document environment variables: `TABLE_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE` - - Document response codes: 200, 201, 404, 422, 500 - - _Requirements: 3.1, 3.2, 3.3, 3.4, 3.5_ - - - [x] 2.5 Add Stream scenario documentation section - - Describe Stream_Scenario: Lambda triggered by DynamoDB Streams, replicates INSERT/MODIFY, propagates REMOVE as deletes - - Document partial batch failure reporting - - Document data models: `SourceItem` and `DestinationItem` - - Document environment variables: `SOURCE_TABLE_NAME`, `DESTINATION_TABLE_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE` - - _Requirements: 4.1, 4.2, 4.3, 4.4_ - - - [x] 2.6 Add CDK infrastructure and deployment section - - Document `ApiGatewayDynamodbStack` and `DynamodbStreamStack` - - Document `make deploy STACK=` and `make destroy STACK=` - - Document optional `AWS_PROFILE` variable - - Document CDK entry point `infra/app.py` and `STACK` env var - - _Requirements: 5.1, 5.2, 5.3, 5.4, 5.5_ - - - [x] 2.7 Add property-based testing and documentation build sections - - Document Hypothesis for property-based testing alongside pytest - - Document that `make test` runs both standard and Hypothesis tests - - Document `make docs` and `make local` targets - - _Requirements: 7.1, 7.2, 9.2_ - - - [x] 2.8 Update project structure tree - - Replace old tree with one reflecting actual layout: `templates/`, `templates/api/`, `templates/stream/`, `infra/`, `infra/stacks/`, `tests/`, `docs/`, top-level config files - - Ensure every path referenced in the README exists in the repository - - _Requirements: 1.3, 9.1_ - -- [x] 3. Update `docs/CONTRIBUTING.md` - - [x] 3.1 Add property-based testing guidance - - Document that new property-based tests go in `tests/` alongside standard pytest tests and follow the same naming conventions - - _Requirements: 7.3_ - -- [x] 4. Checkpoint — verify docs build locally - - Ensure all tests pass, ask the user if questions arise. - -- [x] 5. Write documentation correctness tests - - [x] 5.1 Create `tests/test_documentation.py` with shared fixtures - - Add fixtures that read `docs/README.md`, `docs/CONTRIBUTING.md`, `mkdocs.yml`, and `Makefile` content once per session - - _Requirements: 9.1, 9.2_ - - - [ ]* 5.2 Write property test for README tooling coverage - - **Property 1: README contains all required tooling names** - - **Validates: Requirements 1.2** - - - [ ]* 5.3 Write property test for removed project/ folder reference - - **Property 2: README does not reference the removed project/ folder** - - **Validates: Requirements 1.4** - - - [ ]* 5.4 Write property test for Makefile target coverage - - **Property 3: README contains all required Makefile target names** - - **Validates: Requirements 2.2, 2.3, 2.4, 2.5, 2.6, 5.2, 5.3, 8.1, 9.2** - - - [ ]* 5.5 Write property test for API endpoint strings - - **Property 4: README contains all required endpoint strings** - - **Validates: Requirements 3.2** - - - [ ]* 5.6 Write property test for environment variable names - - **Property 5: README contains all required environment variable names** - - **Validates: Requirements 3.4, 4.2** - - - [ ]* 5.7 Write property test for HTTP response codes - - **Property 6: README contains all required HTTP response codes** - - **Validates: Requirements 3.5** - - - [ ]* 5.8 Write property test for data model field names - - **Property 7: README contains all required data model field names** - - **Validates: Requirements 3.3, 4.4** - - - [ ]* 5.9 Write property test for reference page directives - - **Property 8: All reference pages exist with correct mkdocstrings directives** - - **Validates: Requirements 6.1, 6.2, 6.3, 6.4, 9.3, 9.4** - - - [ ]* 5.10 Write property test for mkdocs.yml nav completeness - - **Property 9: mkdocs.yml nav lists all four reference pages under Reference** - - **Validates: Requirements 6.5** - - - [ ]* 5.11 Write property test for README path existence - - **Property 10: All file paths referenced in the README exist in the repository** - - **Validates: Requirements 1.3, 9.1** - -- [x] 6. Final checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -## Notes - -- Tasks marked with `*` are optional and can be skipped for faster MVP -- Each task references specific requirements for traceability -- Property tests use parameterised pytest (not random generation) since inputs are finite known sets -- Each property test must include a comment tag: `# Feature: documentation-update, Property {N}: {property_text}` -- Test file must end with `if __name__ == "__main__": main()` diff --git a/.kiro/specs/eventbridge-api-caller/.config.kiro b/.kiro/specs/eventbridge-api-caller/.config.kiro deleted file mode 100644 index 8e08b93..0000000 --- a/.kiro/specs/eventbridge-api-caller/.config.kiro +++ /dev/null @@ -1 +0,0 @@ -{"specId": "12c4d135-f0f6-4dc9-be8b-d93ebf2ee5af", "workflowType": "requirements-first", "specType": "feature"} diff --git a/.kiro/specs/eventbridge-api-caller/design.md b/.kiro/specs/eventbridge-api-caller/design.md deleted file mode 100644 index eae5d94..0000000 --- a/.kiro/specs/eventbridge-api-caller/design.md +++ /dev/null @@ -1,379 +0,0 @@ -# Design Document — eventbridge-api-caller - -## Overview - -`eventbridge-api-caller` is a Lambda scenario template that demonstrates a function triggered by an Amazon EventBridge scheduled (or event-pattern) rule that calls an external HTTP API on every invocation. The authentication token for the API is loaded at runtime from AWS Secrets Manager via `SecretsProvider` from AWS Lambda Powertools `parameters`, so credentials are never hard-coded or stored in environment variables. The API response is persisted to a DynamoDB table using the shared `Repository` class from `templates/repository.py`. - -The template follows all existing project conventions: Poetry, Pydantic v2, pydantic-settings, AWS Lambda Powertools (Logger / Tracer / Metrics), camelCase aliases, `Field(description=...)` on every field, and the same module layout used by the `api/` and `stream/` templates. - -### Key design decisions - -- `urllib.request` is used for outbound HTTP calls to avoid adding a third-party HTTP dependency. -- `SecretClient` and `ApiClient` are thin wrapper classes (mirroring the `Repository` pattern) so that tests can mock them at the object level rather than patching stdlib or boto3 internals. -- The `Handler` class owns the orchestration logic; the module-level `main` function is the Lambda entry point decorated with Powertools utilities. -- Metrics (`ApiCallSuccess` / `ApiCallFailure`) are emitted as CloudWatch EMF metrics via `Metrics.add_metric`. -- The shared `Repository` class from `templates/repository.py` is imported directly and initialised with `settings.table_name`; no new repository file is created. - ---- - -## Architecture - -```mermaid -sequenceDiagram - participant EB as EventBridge Rule - participant L as Lambda (handler.main) - participant SC as SecretClient - participant SM as Secrets Manager - participant AC as ApiClient - participant API as External HTTP API - participant DB as DynamoDB (TABLE_NAME) - - EB->>L: Scheduled / event-pattern trigger - L->>SC: get_token(secret_name) - SC->>SM: SecretsProvider.get(secret_name) - SM-->>SC: token string - SC-->>L: token - L->>AC: call(api_url, token) - AC->>API: GET {api_url} Authorization: Bearer {token} - API-->>AC: HTTP response - AC-->>L: ApiResponse - L->>DB: repository.put_item(response.model_dump()) - L-->>EB: (implicit success / exception) -``` - -```mermaid -graph TD - subgraph AWS - EB[EventBridge Rule
rate(5 minutes)] - L[Lambda Function
templates.eventbridge.handler.main] - SM[Secrets Manager
SECRET_NAME] - CW[CloudWatch Metrics
METRICS_NAMESPACE] - DB[DynamoDB Table
TABLE_NAME] - end - EXT[External HTTP API
API_URL] - - EB -->|invoke| L - L -->|GetSecretValue| SM - L -->|HTTP GET + Bearer| EXT - L -->|EMF metrics| CW - L -->|PutItem| DB -``` - ---- - -## Components and Interfaces - -### `templates/eventbridge/settings.py` — `Settings` - -Reads all required configuration from environment variables at cold-start using `pydantic-settings`. - -```python -class Settings(BaseSettings, case_sensitive=False): - api_url: str = Field(description="URL of the external HTTP API to call.") - secret_name: str = Field(description="AWS Secrets Manager secret name holding the API token.") - service_name: str = Field(description="Powertools service name used for Logger and Tracer.") - metrics_namespace: str = Field(description="CloudWatch namespace for Powertools Metrics.") - table_name: str = Field(description="DynamoDB table name for persisting API responses.") -``` - -Raises `ValidationError` on cold-start if any variable is absent. - ---- - -### `templates/eventbridge/models.py` — `EventBridgeEvent`, `ApiResponse` - -Both models use `populate_by_name=True` and `alias_generator=to_camel` so camelCase JSON payloads map to snake_case Python attributes. - -```python -class EventBridgeEvent(BaseModel, populate_by_name=True, alias_generator=to_camel): - source: str = Field(description="Event source (e.g. 'aws.events' for scheduled rules).") - detail_type: str = Field(description="Human-readable event type string.") - detail: dict[str, Any] = Field(description="Free-form event detail payload.") - -class ApiResponse(BaseModel, populate_by_name=True, alias_generator=to_camel): - status: str = Field(description="Status string returned by the external API.") -``` - -`detail_type` maps to the JSON key `detailType` via `to_camel`. - ---- - -### `templates/eventbridge/secret_client.py` — `SecretClient` - -Owns all interactions with AWS Secrets Manager. Uses `SecretsProvider` from `aws_lambda_powertools.utilities.parameters`. - -```python -class SecretClient: - def __init__(self) -> None: - self._provider = SecretsProvider() - - def get_token(self, secret_name: str) -> str: - return self._provider.get(secret_name) -``` - -Raises the underlying Powertools / boto3 exception on failure; the `Handler` catches and re-raises. - ---- - -### `templates/eventbridge/api_client.py` — `ApiClient` - -Owns all outbound HTTP calls using `urllib.request`. No third-party HTTP library. - -```python -class ApiClient: - def call(self, url: str, token: str) -> dict[str, Any]: - req = Request(url, headers={"Authorization": f"Bearer {token}"}) - with urlopen(req) as response: - if response.status < 200 or response.status >= 300: - raise HTTPError(...) - return loads(response.read()) -``` - -Raises `HTTPError` for non-2xx responses and propagates network-level exceptions (`URLError`, `TimeoutError`) unchanged. - ---- - -### `templates/repository.py` — `Repository` - -The shared DynamoDB abstraction imported directly from `templates/repository.py`. No new file is created for this scenario. The module-level singleton is initialised with `settings.table_name` and `put_item` is called with `response.model_dump()` after a successful API call. - -```python -# In handler.py (module level) -from templates.repository import Repository - -repository = Repository(settings.table_name) -``` - -`put_item` raises on any DynamoDB error; the `Handler` catches and re-raises. - ---- - -### `templates/eventbridge/handler.py` — `Handler` + `main` - -Orchestrates the invocation flow. Module-level singletons are created at cold-start; `main` is the Lambda entry point. - -```python -class Handler: - def __init__(self, secret_client: SecretClient, api_client: ApiClient, repository: Repository) -> None: ... - - @tracer.capture_method - def handle(self, event: EventBridgeEvent) -> ApiResponse: ... - -@logger.inject_lambda_context -@tracer.capture_lambda_handler -@metrics.log_metrics -def main(event: dict, context: LambdaContext) -> None: - try: - eb_event = EventBridgeEvent.model_validate(event) - except ValidationError as exc: - logger.error("Invalid EventBridge event", exc_info=exc) - return - handler.handle(eb_event) -``` - -Invocation flow inside `Handler.handle`: -1. Call `secret_client.get_token(settings.secret_name)` — raises on failure. -2. Call `api_client.call(settings.api_url, token)` — raises on non-2xx or network error. -3. Parse response into `ApiResponse`. -4a. Call `repository.put_item(response.model_dump())` — raises on failure. -5. Emit `ApiCallSuccess` metric and log success. -6. On any exception from steps 1–4a: emit `ApiCallFailure` metric, log error, re-raise. - ---- - -### `infra/stacks/eventbridge.py` — `EventBridgeApiCallerStack` - -CDK stack that provisions all required AWS resources. - -```python -class EventBridgeApiCallerStack(Stack): - # Lambda function (Runtime.PYTHON_3_13, handler=templates.eventbridge.handler.main) - # EventBridge Rule (Schedule.rate(Duration.minutes(5))) - # DynamoDB Table (partition_key=Attribute(name="id", type=AttributeType.STRING), RemovalPolicy.DESTROY) - # IAM: secret.grant_read(function) - # IAM: table.grant_write_data(function) → dynamodb:PutItem - # Environment: API_URL, SECRET_NAME, SERVICE_NAME, METRICS_NAMESPACE, TABLE_NAME -``` - ---- - -### `tests/eventbridge/test_handler.py` - -Unit tests using `pytest` + `pytest-mock`. The `autouse` fixture sets all required env vars including `TABLE_NAME`. Tests mock `handler.secret_client`, `handler.api_client`, and `handler.repository` at the object level. - ---- - -## Data Models - -### `EventBridgeEvent` - -| Python attribute | JSON key (camelCase) | Type | Description | -|---|---|---|---| -| `source` | `source` | `str` | Event source identifier | -| `detail_type` | `detailType` | `str` | Human-readable event type | -| `detail` | `detail` | `dict[str, Any]` | Free-form event detail payload | - -### `ApiResponse` - -| Python attribute | JSON key (camelCase) | Type | Description | -|---|---|---|---| -| `status` | `status` | `str` | Status string from the external API | - -### `Settings` - -| Attribute | Env var | Description | -|---|---|---| -| `api_url` | `API_URL` | External API URL | -| `secret_name` | `SECRET_NAME` | Secrets Manager secret name | -| `service_name` | `SERVICE_NAME` | Powertools service name | -| `metrics_namespace` | `METRICS_NAMESPACE` | CloudWatch metrics namespace | -| `table_name` | `TABLE_NAME` | DynamoDB table name for persisting API responses | - ---- - -## Correctness Properties - -*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* - -### Property 1: Handler accepts any valid EventBridge event shape - -*For any* event dict that contains the fields `source`, `detailType`, and `detail` (regardless of whether `source` is `"aws.events"` or an arbitrary custom string), `EventBridgeEvent.model_validate` should succeed and the handler should proceed to call the ApiClient. - -**Validates: Requirements 1.2** - ---- - -### Property 2: Invalid event prevents ApiClient call - -*For any* event dict that is missing one or more of the required fields (`source`, `detailType`, `detail`), the handler should return without invoking `ApiClient.call`. - -**Validates: Requirements 1.3** - ---- - -### Property 3: SecretClient exception propagates - -*For any* exception raised by `SecretClient.get_token`, the handler should propagate an exception (not swallow it silently), signalling a Lambda invocation failure. - -**Validates: Requirements 2.3** - ---- - -### Property 4: Bearer token header for any token string - -*For any* non-empty token string, `ApiClient.call` should include an `Authorization` header whose value is exactly `"Bearer {token}"`. - -**Validates: Requirements 3.2** - ---- - -### Property 5: 2xx response parsed into ApiResponse - -*For any* HTTP response body that is a valid JSON object containing a `status` field, when the HTTP status code is in the 2xx range, the handler should parse the body into an `ApiResponse` without raising. - -**Validates: Requirements 3.3** - ---- - -### Property 6: ApiClient failure propagates exception - -*For any* failure raised by `ApiClient.call` — whether a non-2xx HTTP status code or a network-level exception — the handler should propagate an exception to signal a Lambda invocation failure. - -**Validates: Requirements 3.4, 3.5** - ---- - -### Property 7: Missing required env var raises ValidationError - -*For any* subset of the required environment variables (`API_URL`, `SECRET_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, `TABLE_NAME`) that is absent, constructing `Settings()` should raise a `ValidationError`. - -**Validates: Requirements 5.1, 5.2** - ---- - -### Property 8: EventBridgeEvent camelCase round-trip - -*For any* dict with camelCase keys `source`, `detailType`, and `detail`, parsing with `EventBridgeEvent.model_validate` and re-serialising with `model_dump(by_alias=True)` should produce a dict equal to the original input. - -**Validates: Requirements 6.1, 6.3** - ---- - -### Property 9: ApiResponse camelCase round-trip - -*For any* dict with a `status` key, parsing with `ApiResponse.model_validate` and re-serialising with `model_dump(by_alias=True)` should produce a dict equal to the original input. - -**Validates: Requirements 6.2, 6.4** - ---- - -### Property 10: Successful API response is persisted to DynamoDB - -*For any* valid `ApiResponse`, after a successful `ApiClient.call`, `repository.put_item` should be called exactly once with the response dict (`response.model_dump()`). - -**Validates: Requirements 11.1** - ---- - -### Property 11: DynamoDB write failure propagates exception - -*For any* exception raised by `repository.put_item`, the handler should propagate an exception to signal a Lambda invocation failure. - -**Validates: Requirements 11.4** - ---- - -## Error Handling - -| Failure scenario | Detection point | Handler behaviour | -|---|---|---| -| Invalid EventBridge event payload | `EventBridgeEvent.model_validate` raises `ValidationError` | Log error, return (no exception — not a retriable failure) | -| Secrets Manager unavailable | `SecretClient.get_token` raises | Log error, re-raise → Lambda marks invocation failed | -| External API non-2xx response | `ApiClient.call` raises `HTTPError` | Log status code, emit `ApiCallFailure` metric, re-raise | -| Network-level exception (timeout, DNS) | `ApiClient.call` raises `URLError` / `TimeoutError` | Log exception, emit `ApiCallFailure` metric, re-raise | -| DynamoDB write failure | `repository.put_item` raises | Log error, emit `ApiCallFailure` metric, re-raise | -| Missing env var at cold-start | `Settings()` raises `ValidationError` | Lambda init fails immediately; no invocation proceeds | - ---- - -## Testing Strategy - -### Dual testing approach - -Both unit tests and property-based tests are used. Unit tests cover specific examples and integration wiring; property tests verify universal correctness across generated inputs. - -### Unit tests (`tests/eventbridge/test_handler.py`) - -Specific examples and error-path checks: - -- Successful invocation: token loaded, API called, `ApiCallSuccess` metric emitted, `repository.put_item` called with response dict. -- Secret loading failure: `SecretClient.get_token` raises → handler re-raises, `ApiCallFailure` emitted. -- API non-2xx response: `ApiClient.call` raises `HTTPError` → handler re-raises. -- API network exception: `ApiClient.call` raises `URLError` → handler re-raises. -- Invalid EventBridge event: handler returns without calling `ApiClient`. -- DynamoDB write failure: `repository.put_item` raises → handler re-raises, `ApiCallFailure` emitted. - -All tests use an `autouse` fixture to set env vars (including `TABLE_NAME`) via `monkeypatch.setenv`. `SecretClient`, `ApiClient`, and `repository` are mocked via `mocker.patch.object` on the module-level instances. - -### Property-based tests (`tests/eventbridge/test_properties.py`) - -Uses **Hypothesis** (already a dev dependency) with a minimum of 100 examples per property. - -Each test is tagged with a comment in the format: -`# Feature: eventbridge-api-caller, Property {N}: {property_text}` - -| Test | Property | Hypothesis strategy | -|---|---|---| -| `test_valid_event_shapes` | Property 1 | `st.text()` for source/detailType, `st.dictionaries(...)` for detail | -| `test_invalid_event_prevents_api_call` | Property 2 | `st.fixed_dictionaries` with one required key removed | -| `test_secret_exception_propagates` | Property 3 | `st.from_type(Exception)` for exception type | -| `test_bearer_token_header` | Property 4 | `st.text(min_size=1)` for token | -| `test_2xx_response_parsed` | Property 5 | `st.integers(200, 299)` for status code, `st.text()` for status field | -| `test_api_failure_propagates` | Property 6 | `st.integers(400, 599)` for non-2xx codes | -| `test_missing_env_var_raises` | Property 7 | `st.sampled_from` over required var names | -| `test_eventbridge_event_round_trip` | Property 8 | `st.text()` for source/detailType, `st.dictionaries(...)` for detail | -| `test_api_response_round_trip` | Property 9 | `st.text(min_size=1)` for status | -| `test_successful_response_persisted` | Property 10 | `st.text(min_size=1)` for status; assert `put_item` called once with `response.model_dump()` | -| `test_dynamodb_write_failure_propagates` | Property 11 | `st.from_type(Exception)` for exception raised by `put_item` | - -Each property test runs `@settings(max_examples=100)`. diff --git a/.kiro/specs/eventbridge-api-caller/requirements.md b/.kiro/specs/eventbridge-api-caller/requirements.md deleted file mode 100644 index cada37c..0000000 --- a/.kiro/specs/eventbridge-api-caller/requirements.md +++ /dev/null @@ -1,168 +0,0 @@ -# Requirements Document - -## Introduction - -This feature adds a new Lambda scenario template called `eventbridge-api-caller`. The template demonstrates a Lambda function triggered by an Amazon EventBridge scheduled rule or event-pattern rule that calls an external HTTP API using an authentication token loaded at runtime from AWS Secrets Manager via `SecretsProvider` from AWS Lambda Powertools `parameters`. The API response is persisted to a DynamoDB table using the shared `Repository` class. The template follows all existing project conventions (Poetry, Pydantic, AWS Lambda Powertools, repository pattern, camelCase aliases, Field descriptions) and is structured identically to the existing `api/` and `stream/` templates. - -## Glossary - -- **Handler**: The AWS Lambda function entry point module at `templates/eventbridge/handler.py`. -- **SecretClient**: The abstraction layer in `secret_client.py` that owns all calls to AWS Secrets Manager via `SecretsProvider`. -- **ApiClient**: The abstraction layer in `api_client.py` that owns all outbound HTTP calls to the external API. -- **Settings**: The Pydantic-settings class in `settings.py` that reads Lambda environment variables. -- **EventBridgeEvent**: The structured Pydantic model representing the incoming EventBridge event payload. -- **ApiResponse**: The structured Pydantic model representing the response received from the external API. -- **EventBridgeApiCallerStack**: The AWS CDK stack in `infra/stacks/eventbridge.py` that provisions all required AWS resources. -- **Token**: The authentication credential (e.g., API key or Bearer token) retrieved from AWS Secrets Manager and injected into outbound HTTP requests. -- **SecretSource**: AWS Secrets Manager, accessed via `SecretsProvider` from `aws_lambda_powertools.utilities.parameters`. -- **Repository**: The shared DynamoDB abstraction at `templates/repository.py` used to persist API responses. -- **TABLE_NAME**: The DynamoDB table name read from the `TABLE_NAME` environment variable. - ---- - -## Requirements - -### Requirement 1: EventBridge Trigger - -**User Story:** As a platform engineer, I want the Lambda function to be invoked by Amazon EventBridge, so that I can schedule or react to events without managing polling infrastructure. - -#### Acceptance Criteria - -1. WHEN an EventBridge rule fires, THE Handler SHALL be invoked with the EventBridge event payload. -2. THE Handler SHALL accept both scheduled-rule events (with `source: "aws.events"`) and custom event-pattern events. -3. IF the incoming event cannot be parsed into an EventBridgeEvent model, THEN THE Handler SHALL log the validation error and return without calling the external API. - ---- - -### Requirement 2: Token Loading from External Secret Source - -**User Story:** As a security-conscious engineer, I want the authentication token to be loaded from AWS Secrets Manager at runtime, so that credentials are never hard-coded or stored in environment variables. - -#### Acceptance Criteria - -1. WHEN the Handler initialises, THE SecretClient SHALL load the Token from AWS Secrets Manager using the secret name identified by the environment variable `SECRET_NAME`. -2. THE SecretClient SHALL use `SecretsProvider` from `aws_lambda_powertools.utilities.parameters` for all AWS Secrets Manager interactions. -3. IF the SecretSource is unavailable or returns an error, THEN THE Handler SHALL log the error and raise an exception to signal a Lambda invocation failure. - ---- - -### Requirement 3: External HTTP API Call - -**User Story:** As a platform engineer, I want the Lambda function to call an external HTTP API on each invocation, so that I can integrate with third-party services on a schedule or in response to events. - -#### Acceptance Criteria - -1. WHEN the Handler is invoked, THE ApiClient SHALL send an HTTP request to the URL specified by the environment variable `API_URL`. -2. THE ApiClient SHALL include the Token in the `Authorization` header of every outbound request using the `Bearer` scheme. -3. WHEN the external API returns a 2xx response, THE Handler SHALL parse the response body into an ApiResponse model and log a success metric. -4. IF the external API returns a non-2xx HTTP status code, THEN THE Handler SHALL log the status code and raise an exception to signal a Lambda invocation failure. -5. IF the HTTP request raises a network-level exception (e.g., timeout or connection error), THEN THE Handler SHALL log the exception and re-raise it to signal a Lambda invocation failure. -6. THE ApiClient SHALL use the `urllib.request` standard-library module for all outbound HTTP calls, with no additional third-party HTTP client dependency. - ---- - -### Requirement 4: Observability - -**User Story:** As an operations engineer, I want structured logs, distributed traces, and custom metrics emitted on every invocation, so that I can monitor and debug the function in production. - -#### Acceptance Criteria - -1. THE Handler SHALL emit structured JSON logs using AWS Lambda Powertools `Logger`. -2. THE Handler SHALL emit distributed traces using AWS Lambda Powertools `Tracer`. -3. THE Handler SHALL emit a custom metric `ApiCallSuccess` (count) to the CloudWatch namespace defined by `METRICS_NAMESPACE` on each successful API call. -4. THE Handler SHALL emit a custom metric `ApiCallFailure` (count) to the CloudWatch namespace defined by `METRICS_NAMESPACE` on each failed API call. -5. WHEN the Handler is invoked, THE Logger SHALL inject the Lambda context (request ID, function name) into every log record. - ---- - -### Requirement 5: Configuration via Environment Variables - -**User Story:** As a developer, I want all runtime configuration to be read from environment variables, so that the same deployment artifact can be used across environments without code changes. - -#### Acceptance Criteria - -1. THE Settings SHALL read the following required environment variables: `API_URL`, `SECRET_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, `TABLE_NAME`. -2. IF a required environment variable is absent at Lambda cold-start, THEN THE Settings SHALL raise a validation error that prevents the Handler from initialising. -3. THE Settings class SHALL use `pydantic-settings` `BaseSettings` with each field documented via `Field(description="...")`. - ---- - -### Requirement 6: Data Models - -**User Story:** As a developer, I want all event and response payloads to be validated with Pydantic models, so that type safety and schema documentation are enforced at runtime. - -#### Acceptance Criteria - -1. THE EventBridgeEvent model SHALL validate the incoming EventBridge event and expose at minimum the fields `source`, `detail_type`, and `detail`. -2. THE ApiResponse model SHALL validate the external API response body and expose at minimum a `status` field. -3. THE EventBridgeEvent model SHALL be defined with `populate_by_name=True` and `alias_generator=to_camel`. -4. THE ApiResponse model SHALL be defined with `populate_by_name=True` and `alias_generator=to_camel`. -5. EVERY field in EventBridgeEvent and ApiResponse SHALL be documented with `Field(description="...")`. - ---- - -### Requirement 7: CDK Infrastructure Stack - -**User Story:** As a platform engineer, I want a CDK stack that provisions all required AWS resources, so that the template can be deployed with a single command. - -#### Acceptance Criteria - -1. THE EventBridgeApiCallerStack SHALL provision an AWS Lambda function using `Runtime.PYTHON_3_13` and handler `templates.eventbridge.handler.main`. -2. THE EventBridgeApiCallerStack SHALL provision an EventBridge rule that triggers the Lambda function on a configurable schedule (default: every 5 minutes). -3. THE EventBridgeApiCallerStack SHALL grant the Lambda function `secretsmanager:GetSecretValue` permission on the secret identified by `SECRET_NAME`. -4. THE EventBridgeApiCallerStack SHALL pass `API_URL`, `SECRET_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, and `TABLE_NAME` to the Lambda function as environment variables. -5. THE EventBridgeApiCallerStack SHALL be registered in `infra/app.py` under the key `"eventbridge-api-caller"`. -6. THE EventBridgeApiCallerStack SHALL provision a DynamoDB table with a partition key `id` (String). -7. THE EventBridgeApiCallerStack SHALL grant the Lambda function `dynamodb:PutItem` permission on the DynamoDB table. -8. THE EventBridgeApiCallerStack SHALL pass `TABLE_NAME` to the Lambda function as an environment variable referencing the provisioned DynamoDB table name. - ---- - -### Requirement 8: Unit Tests - -**User Story:** As a developer, I want a comprehensive unit-test suite, so that I can verify handler behaviour without deploying to AWS. - -#### Acceptance Criteria - -1. THE test suite SHALL cover: successful invocation (token loaded, API called, success metric emitted, DynamoDB write performed), secret loading failure, API non-2xx response, API network exception, invalid EventBridge event payload, and DynamoDB write failure (`repository.put_item` raises → handler re-raises, `ApiCallFailure` emitted). -2. THE test suite SHALL mock the SecretClient and ApiClient instances on the handler module rather than patching boto3 or urllib directly. -3. THE test suite SHALL use `pytest` with `pytest-mock` and `monkeypatch` for all mocking, consistent with existing test conventions. -4. EVERY test file SHALL end with the `if __name__ == "__main__": main()` block. -5. THE test suite SHALL set all required environment variables via an `autouse` fixture using `monkeypatch.setenv`. - ---- - -### Requirement 9: Documentation - -**User Story:** As a developer, I want a documentation page for the new template, so that I can understand the scenario and find the relevant files quickly. - -#### Acceptance Criteria - -1. THE documentation SHALL be created at `docs/template/eventbridge.md` and follow the structure of `docs/template/stream.md`. -2. THE documentation SHALL list the trigger, destination, code locations, data models, and environment variables. -3. THE `docs/template/index.md` file SHALL be updated to include a link to the new documentation page. - ---- - -### Requirement 10: Makefile and Deployment Targets - -**User Story:** As a developer, I want `make deploy` and `make destroy` to support the new stack, so that I can deploy and tear down the template with the same workflow as existing templates. - -#### Acceptance Criteria - -1. THE Makefile `STACK_MAP` SHALL include an entry mapping `eventbridge-api-caller` to `EventBridgeApiCallerStack`. -2. WHEN `make deploy STACK=eventbridge-api-caller` is executed, THE Makefile SHALL invoke CDK deploy for `EventBridgeApiCallerStack`. -3. WHEN `make destroy STACK=eventbridge-api-caller` is executed, THE Makefile SHALL invoke CDK destroy for `EventBridgeApiCallerStack`. - ---- - -### Requirement 11: DynamoDB Persistence - -**User Story:** As a platform engineer, I want the API response to be persisted to a DynamoDB table, so that I have a durable record of each invocation's result. - -#### Acceptance Criteria - -1. WHEN the Handler receives a successful ApiResponse, THE Handler SHALL call `repository.put_item` to persist the response to DynamoDB. -2. THE Handler SHALL use the shared `Repository` class from `templates/repository.py` for all DynamoDB interactions. -3. THE Repository SHALL be initialised with the table name from the environment variable `TABLE_NAME`. -4. IF `repository.put_item` raises an exception, THEN THE Handler SHALL log the error and raise an exception to signal a Lambda invocation failure. diff --git a/.kiro/specs/eventbridge-api-caller/tasks.md b/.kiro/specs/eventbridge-api-caller/tasks.md deleted file mode 100644 index 43256e3..0000000 --- a/.kiro/specs/eventbridge-api-caller/tasks.md +++ /dev/null @@ -1,201 +0,0 @@ -# Implementation Plan: eventbridge-api-caller - -## Overview - -Implement the `eventbridge-api-caller` Lambda scenario template: a function triggered by an EventBridge rule that loads an auth token from Secrets Manager, calls an external HTTP API, and persists the response to a DynamoDB table using the shared `Repository` class. Includes CDK stack, unit tests, property-based tests, documentation, and Makefile wiring. - -## Tasks - -- [x] 1. Create feature branch - - Run `git checkout -b feat/eventbridge-api-caller` - - _Requirements: (prerequisite for all work)_ - -- [x] 2. Scaffold `templates/eventbridge/` module - - [x] 2.1 Create `templates/eventbridge/settings.py` - - Define `Settings(BaseSettings, case_sensitive=False)` with fields: `api_url`, `secret_name`, `service_name`, `metrics_namespace`, `table_name` - - Every field must use `Field(description="...")` - - Raises `ValidationError` on cold-start if any variable is absent - - _Requirements: 5.1, 5.2, 5.3, 11.3_ - - - [x] 2.2 Create `templates/eventbridge/models.py` - - Define `EventBridgeEvent(BaseModel, populate_by_name=True, alias_generator=to_camel)` with fields: `source`, `detail_type`, `detail` - - Define `ApiResponse(BaseModel, populate_by_name=True, alias_generator=to_camel)` with field: `status` - - Every field must use `Field(description="...")` - - _Requirements: 6.1, 6.2, 6.3, 6.4, 6.5_ - - - [x] 2.3 Create `templates/eventbridge/secret_client.py` - - Define `SecretClient` with `get_token(secret_name: str) -> str` using `SecretsProvider` from `aws_lambda_powertools.utilities.parameters` - - Propagate exceptions unchanged on failure - - _Requirements: 2.1, 2.2, 2.3_ - - - [x] 2.4 Create `templates/eventbridge/api_client.py` - - Define `ApiClient` with `call(url: str, token: str) -> dict[str, Any]` using `urllib.request` only - - Include `Authorization: Bearer {token}` header on every request - - Raise `HTTPError` for non-2xx responses; propagate `URLError`/`TimeoutError` unchanged - - _Requirements: 3.1, 3.2, 3.4, 3.5, 3.6_ - - - [x] 2.5 Create `templates/eventbridge/handler.py` - - Instantiate `Settings`, `Logger`, `Tracer`, `Metrics`, `SecretClient`, `ApiClient` at module level (cold-start) - - Define `Handler` class with `handle(event: EventBridgeEvent) -> ApiResponse` decorated with `@tracer.capture_method` - - Invocation flow: `get_token` → `api_client.call` → parse `ApiResponse` → emit `ApiCallSuccess` metric - - On any exception from token/API steps: emit `ApiCallFailure` metric, log error, re-raise - - Define module-level `main(event: dict, context: LambdaContext) -> None` decorated with `@logger.inject_lambda_context`, `@tracer.capture_lambda_handler`, `@metrics.log_metrics` - - In `main`: validate event with `EventBridgeEvent.model_validate`; on `ValidationError` log and return without calling handler - - _Requirements: 1.1, 1.2, 1.3, 2.3, 3.3, 3.4, 3.5, 4.1, 4.2, 4.3, 4.4, 4.5_ - - - [x] 2.6 Wire DynamoDB persistence into `handler.py` - - Import `Repository` from `templates.repository` and instantiate at module level: `repository = Repository(settings.table_name)` - - Pass `repository` into `Handler.__init__` alongside `secret_client` and `api_client` - - After parsing `ApiResponse`, call `repository.put_item(response.model_dump())` - - On `put_item` failure: emit `ApiCallFailure` metric, log error, re-raise - - _Requirements: 11.1, 11.2, 11.3, 11.4_ - - - [x] 2.7 Create `templates/eventbridge/__init__.py` - - Empty init to make the directory a package - - _Requirements: (module structure)_ - -- [x] 3. Create CDK stack and register it - - [x] 3.1 Create `infra/stacks/eventbridge.py` - - Define `EventBridgeApiCallerStack(Stack)` provisioning: - - `aws_lambda.Function` with `Runtime.PYTHON_3_13`, handler `templates.eventbridge.handler.main`, `Code.from_asset(".")` - - `aws_dynamodb.Table` with `partition_key=Attribute(name="id", type=AttributeType.STRING)` and `RemovalPolicy.DESTROY` - - `table.grant_write_data(function)` granting `dynamodb:PutItem` - - Environment variables: `API_URL`, `SECRET_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, `TABLE_NAME` (set to `table.table_name`) - - `aws_events.Rule` with `Schedule.rate(Duration.minutes(5))` targeting the Lambda function - - `aws_secretsmanager.Secret` (or reference by name) with `secret.grant_read(function)` for `secretsmanager:GetSecretValue` - - _Requirements: 7.1, 7.2, 7.3, 7.4, 7.5, 7.6, 7.7, 7.8_ - `aws_dynamodb.Table` with `partition_key=Attribute(name="id", type=AttributeType.STRING)` and `RemovalPolicy.DESTROY` - - `table.grant_write_data(function)` granting `dynamodb:PutItem` - - Environment variables: `API_URL`, `SECRET_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, `TABLE_NAME` (set to `table.table_name`) - - `aws_events.Rule` with `Schedule.rate(Duration.minutes(5))` targeting the Lambda function - - `aws_secretsmanager.Secret` (or reference by name) with `secret.grant_read(function)` for `secretsmanager:GetSecretValue` - - _Requirements: 7.1, 7.2, 7.3, 7.4, 7.5, 7.6, 7.7, 7.8, 11.3_ - - - [x] 3.2 Register stack in `infra/app.py` - - Add `from infra.stacks.eventbridge import EventBridgeApiCallerStack` - - Add `"eventbridge-api-caller": EventBridgeApiCallerStack` to `STACK_REGISTRY` - - _Requirements: 7.5_ - -- [x] 4. Checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -- [x] 5. Write unit tests - - [x] 5.1 Create `tests/eventbridge/__init__.py` - - Empty init file - - _Requirements: (test structure)_ - - - [x] 5.2 Create `tests/eventbridge/test_handler.py` - - Add `autouse` fixture that sets all required env vars via `monkeypatch.setenv`: `API_URL`, `SECRET_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, `TABLE_NAME` - - Mock `handler.secret_client`, `handler.api_client`, and `handler.repository` via `mocker.patch.object` on the module-level instances - - Cover: successful invocation (token loaded, API called, `ApiCallSuccess` metric emitted, `repository.put_item` called with `response.model_dump()`) - - Cover: secret loading failure (`SecretClient.get_token` raises → handler re-raises, `ApiCallFailure` emitted) - - Cover: API non-2xx response (`ApiClient.call` raises `HTTPError` → handler re-raises, `ApiCallFailure` emitted) - - Cover: API network exception (`ApiClient.call` raises `URLError` → handler re-raises, `ApiCallFailure` emitted) - - Cover: invalid EventBridge event (missing required field → handler returns without calling `ApiClient`) - - Cover: DynamoDB write failure (`repository.put_item` raises → handler re-raises, `ApiCallFailure` emitted) - - End file with `if __name__ == "__main__": main()` - - _Requirements: 8.1, 8.2, 8.3, 8.4, 8.5, 11.1, 11.4_ - -- [x] 6. Write property-based tests - - [x] 6.1 Create `tests/eventbridge/test_properties.py` - - Use `hypothesis` with `@settings(max_examples=100)` on every test - - Tag each test with a comment: `# Feature: eventbridge-api-caller, Property {N}: {property_text}` - - Add `autouse` fixture setting all required env vars via `monkeypatch.setenv` (including `TABLE_NAME`) - - - [ ]* 6.2 Write property test for valid event shapes (Property 1) - - **Property 1: Handler accepts any valid EventBridge event shape** - - Use `st.text()` for `source`/`detailType`, `st.dictionaries(st.text(), st.text())` for `detail` - - Assert `EventBridgeEvent.model_validate` succeeds and `ApiClient.call` is invoked - - **Validates: Requirements 1.2** - - - [ ]* 6.3 Write property test for invalid event prevents API call (Property 2) - - **Property 2: Invalid event prevents ApiClient call** - - Use `st.fixed_dictionaries` with one required key removed per run - - Assert `ApiClient.call` is never invoked - - **Validates: Requirements 1.3** - - - [ ]* 6.4 Write property test for secret exception propagation (Property 3) - - **Property 3: SecretClient exception propagates** - - Use `st.from_type(Exception)` for exception type raised by `get_token` - - Assert handler raises (does not swallow) - - **Validates: Requirements 2.3** - - - [ ]* 6.5 Write property test for Bearer token header (Property 4) - - **Property 4: Bearer token header for any token string** - - Use `st.text(min_size=1)` for token - - Assert `Authorization` header value equals `f"Bearer {token}"` - - **Validates: Requirements 3.2** - - - [ ]* 6.6 Write property test for 2xx response parsed into ApiResponse (Property 5) - - **Property 5: 2xx response parsed into ApiResponse** - - Use `st.integers(200, 299)` for status code, `st.text(min_size=1)` for status field - - Assert `ApiResponse.model_validate` succeeds without raising - - **Validates: Requirements 3.3** - - - [ ]* 6.7 Write property test for ApiClient failure propagation (Property 6) - - **Property 6: ApiClient failure propagates exception** - - Use `st.integers(400, 599)` for non-2xx codes - - Assert handler raises on any `ApiClient.call` failure - - **Validates: Requirements 3.4, 3.5** - - - [ ]* 6.8 Write property test for missing env var raises ValidationError (Property 7) - - **Property 7: Missing required env var raises ValidationError** - - Use `st.sampled_from(["API_URL", "SECRET_NAME", "SERVICE_NAME", "METRICS_NAMESPACE", "TABLE_NAME"])` to pick which var to omit - - Assert `Settings()` raises `ValidationError` - - **Validates: Requirements 5.1, 5.2** - - - [ ]* 6.9 Write property test for EventBridgeEvent camelCase round-trip (Property 8) - - **Property 8: EventBridgeEvent camelCase round-trip** - - Use `st.text()` for `source`/`detailType`, `st.dictionaries(st.text(), st.text())` for `detail` - - Assert `model_dump(by_alias=True)` equals the original camelCase input dict - - **Validates: Requirements 6.1, 6.3** - - - [ ]* 6.10 Write property test for ApiResponse camelCase round-trip (Property 9) - - **Property 9: ApiResponse camelCase round-trip** - - Use `st.text(min_size=1)` for `status` - - Assert `model_dump(by_alias=True)` equals the original input dict - - **Validates: Requirements 6.2, 6.4** - - - [ ]* 6.11 Write property test for successful response persisted to DynamoDB (Property 10) - - **Property 10: Successful API response is persisted to DynamoDB** - - Use `st.text(min_size=1)` for `status`; mock `secret_client`, `api_client`, and `repository` - - Assert `repository.put_item` is called exactly once with `response.model_dump()` - - **Validates: Requirements 11.1** - - - [ ]* 6.12 Write property test for DynamoDB write failure propagates exception (Property 11) - - **Property 11: DynamoDB write failure propagates exception** - - Use `st.from_type(Exception)` for exception raised by `repository.put_item` - - Assert handler raises (does not swallow) - - **Validates: Requirements 11.4** - - - End file with `if __name__ == "__main__": main()` - - _Requirements: 8.1, 8.3_ - -- [x] 7. Checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -- [x] 8. Add documentation - - [x] 8.1 Create `docs/template/eventbridge.md` - - Follow the structure of `docs/template/stream.md` - - Document: trigger (EventBridge rule), destination (external HTTP API + DynamoDB table), code locations, data models (`EventBridgeEvent`, `ApiResponse`, `Settings`), environment variables - - _Requirements: 9.1, 9.2_ - - - [x] 8.2 Update `docs/template/index.md` - - Add a link to `eventbridge.md` alongside the existing template entries - - _Requirements: 9.3_ - -- [x] 9. Update Makefile `STACK_MAP` - - Add `STACK_MAP_eventbridge-api-caller = EventBridgeApiCallerStack` to the `STACK_MAP` variable block - - Verify `make deploy STACK=eventbridge-api-caller` and `make destroy STACK=eventbridge-api-caller` resolve correctly - - _Requirements: 10.1, 10.2, 10.3_ - -- [x] 10. Final checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -## Notes - -- Sub-tasks marked with `*` are optional and can be skipped for a faster MVP -- Each task references specific requirements for traceability -- Property tests use Hypothesis (already a dev dependency); run with `make test` -- `ApiClient` uses `urllib.request` only — no new HTTP dependency required -- The Makefile `STACK_MAP` uses underscores in variable names; the key `eventbridge-api-caller` maps via `$(STACK_MAP_$(STACK))` substitution -- `Repository` is imported from `templates.repository` (shared class) — no new repository file is created for this scenario diff --git a/.kiro/specs/github-deploy-workflow/.config.kiro b/.kiro/specs/github-deploy-workflow/.config.kiro deleted file mode 100644 index c534059..0000000 --- a/.kiro/specs/github-deploy-workflow/.config.kiro +++ /dev/null @@ -1 +0,0 @@ -{"specId": "65031184-06f6-4e1c-9932-fab1ac82d1b0", "workflowType": "requirements-first", "specType": "feature"} diff --git a/.kiro/specs/github-deploy-workflow/design.md b/.kiro/specs/github-deploy-workflow/design.md deleted file mode 100644 index 0ebb48a..0000000 --- a/.kiro/specs/github-deploy-workflow/design.md +++ /dev/null @@ -1,225 +0,0 @@ -# Design Document - -## Overview - -This design describes a GitHub Actions deployment workflow (`deploy.yml`) for the project. The workflow automates AWS CDK stack deployments using OIDC-based authentication (no long-lived credentials), integrates with the existing `check.yml` quality gate, and supports both automatic deployment on push to `main` and manual on-demand deployment via `workflow_dispatch`. - -The two deployable stacks are: -- `api` → `ApiGatewayDynamodbStack` -- `stream` → `DynamodbStreamStack` - -These map directly to the `STACK_MAP_*` variables already defined in the `Makefile`. - -## Architecture - -The workflow is a single YAML file at `.github/workflows/deploy.yml`. It composes two logical phases: - -1. **Quality Gate** — reuses `check.yml` via `workflow_call` to run lint and tests before any deployment step. -2. **Deployment** — one or two jobs (depending on trigger) that authenticate with AWS via OIDC, set up the environment, and run `make deploy STACK=`. - -```mermaid -flowchart TD - A[Trigger: push to main\nor workflow_dispatch] --> B[check job\nreuses check.yml] - B -->|success| C{Trigger type?} - B -->|failure| Z[Halt — no deploy] - C -->|push to main| D[deploy-api job\nmake deploy STACK=api] - D -->|success| E[deploy-stream job\nmake deploy STACK=stream] - C -->|workflow_dispatch\nstack=api| F[deploy-api job\nmake deploy STACK=api] - C -->|workflow_dispatch\nstack=stream| G[deploy-stream job\nmake deploy STACK=stream] -``` - -### Trigger Strategy - -| Trigger | Stacks deployed | -|---|---| -| `push` to `main` | `api` then `stream` (sequential) | -| `workflow_dispatch` with `stack: api` | `api` only | -| `workflow_dispatch` with `stack: stream` | `stream` only | - -Sequential ordering on push-to-main is enforced via `needs:` between the two deploy jobs. - -### Job Conditional Logic - -Each deploy job uses an `if:` expression to decide whether it should run: - -- `deploy-api` runs when: trigger is `push` OR (`workflow_dispatch` AND input is `api`) -- `deploy-stream` runs when: trigger is `push` OR (`workflow_dispatch` AND input is `stream`) -- `deploy-stream` additionally has `needs: deploy-api` so it waits when both run (push to main), but the `if:` condition means it still runs independently for `workflow_dispatch stream`. - -## Components and Interfaces - -### Workflow File: `.github/workflows/deploy.yml` - -**Triggers:** -- `push` with `branches: [main]` -- `workflow_dispatch` with a required `choice` input `stack` accepting `api` or `stream` - -**Permissions (workflow-level):** -```yaml -permissions: - id-token: write # required for OIDC - contents: read # required for checkout -``` - -No additional permissions are granted. - -**Jobs:** - -| Job ID | Purpose | Depends on | -|---|---|---| -| `check` | Reuses `check.yml` via `workflow_call` | — | -| `deploy-api` | Deploys `api` stack | `check` | -| `deploy-stream` | Deploys `stream` stack | `check` (and `deploy-api` on push) | - -**Reusable workflow call:** -```yaml -check: - uses: ./.github/workflows/check.yml -``` - -**Deploy job steps (same structure for both stacks):** -1. `actions/checkout` — check out repository -2. `actions/setup-python` — Python 3.14 -3. Install Poetry + deps — `make poetry install` -4. Install AWS CDK CLI — `npm install -g aws-cdk` -5. `aws-actions/configure-aws-credentials` — OIDC role assumption -6. `make deploy STACK=` — CDK deployment - -### External Actions (pinned) - -All third-party actions are pinned to a specific SHA or version tag: - -| Action | Version/SHA to pin | -|---|---| -| `actions/checkout` | `v4` (pin to SHA in implementation) | -| `actions/setup-python` | `v5` (pin to SHA in implementation) | -| `aws-actions/configure-aws-credentials` | `v4` (pin to SHA in implementation) | - -### Secrets and Variables - -| Name | Type | Purpose | -|---|---|---| -| `AWS_DEPLOY_ROLE_ARN` | Secret | IAM role ARN assumed via OIDC | -| `AWS_REGION` | Variable | Target AWS region (e.g. `us-east-1`) | - -### Environment Variables - -`AWS_DEFAULT_REGION` is set at the job level to `${{ vars.AWS_REGION }}` so all steps (including CDK) pick up the correct region without explicit flags. - -## Data Models - -This feature is a GitHub Actions workflow — there are no runtime data models. The relevant "data" is the workflow YAML structure and the inputs/secrets it consumes. - -### workflow_dispatch Input Schema - -```yaml -inputs: - stack: - description: "CDK stack to deploy" - required: true - type: choice - options: - - api - - stream -``` - -### Makefile Deploy Interface - -The workflow calls `make deploy STACK=`. The Makefile maps: -- `STACK=api` → `cdk deploy ApiGatewayDynamodbStack` -- `STACK=stream` → `cdk deploy DynamodbStreamStack` - -The `--require-approval never` flag must be passed to CDK. Since the Makefile's `deploy` target does not currently include this flag, it will be added to the Makefile as part of implementation. - - -## Correctness Properties - -*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* - -For a GitHub Actions workflow, "correctness" is structural: the YAML file must have the right shape. Properties are validated by parsing the workflow YAML and asserting invariants over its structure. Two categories emerge from the prework analysis: - -- **Examples** — specific structural facts that must hold (a particular key exists, a particular value is set). These are best expressed as unit tests that parse the YAML and assert the expected value. -- **Properties** — universal rules that must hold across all elements of a collection (e.g., every action reference must be pinned). These are best expressed as property-based tests that iterate over all matching elements. - -### Property 1: All third-party actions are pinned - -*For any* action reference (`uses:` field) in the workflow YAML that references a third-party action (not a local path starting with `./`), the reference must include a pinned SHA (40-character hex string) or a version tag (e.g., `@v4`, `@v4.1.0`), and must not use a floating branch reference such as `@main` or `@master`. - -**Validates: Requirements 6.4** - -### Property 2: Permissions block is exactly least-privilege - -*For any* top-level `permissions` block in the workflow YAML, the set of granted permissions must be exactly `{id-token: write, contents: read}` — no more, no fewer entries. - -**Validates: Requirements 6.1, 6.2, 6.3** - -## Error Handling - -| Failure scenario | Behavior | -|---|---| -| `check` job fails (lint or test) | GitHub Actions halts all dependent jobs; deploy jobs never start | -| OIDC token exchange fails | `configure-aws-credentials` step exits non-zero; remaining steps in the job are skipped; job is marked failed | -| `make deploy` exits non-zero | GitHub Actions marks the step and job as failed; downstream jobs (e.g., `deploy-stream` after `deploy-api`) do not run | -| Unknown `STACK` value | Makefile guard (`[ -n "$(CDK_STACK)" ]`) exits 1 with an error message before CDK is invoked | -| Missing secret `AWS_DEPLOY_ROLE_ARN` | `configure-aws-credentials` receives an empty string and fails the OIDC exchange | -| Missing variable `AWS_REGION` | `aws-region` input is empty; `configure-aws-credentials` fails | - -No `continue-on-error` or `if: always()` overrides are used — all failures propagate naturally. - -## Testing Strategy - -This feature is a GitHub Actions YAML file and a Makefile change. There is no application runtime code to unit test in the traditional sense. Instead, correctness is validated by **parsing the YAML and Makefile as data** and asserting structural properties. - -### Testing Approach - -**Unit tests** (pytest, `tests/test_deploy_workflow.py`): -- Parse `.github/workflows/deploy.yml` with PyYAML -- Assert specific structural facts (examples from the prework analysis) -- Each test maps to one acceptance criterion - -**Property-based tests** (pytest + Hypothesis, same file): -- Iterate over collections within the parsed YAML (e.g., all `uses:` references) -- Assert universal invariants hold for every element -- Run minimum 100 iterations (Hypothesis default covers this for finite collections; for generative tests, `@settings(max_examples=100)`) - -### Unit Test Cases - -Each test loads the YAML once via a pytest fixture and asserts: - -1. `workflow_dispatch` input `stack` exists, is required, is type `choice`, and options are `["api", "stream"]` — **Req 1.2** -2. `push` trigger is scoped to `branches: [main]` — **Req 1.1** -3. `deploy-api` job has `needs: check` and correct `if:` condition covering both push and `workflow_dispatch stack=api` — **Req 1.3, 1.4, 2.1** -4. `deploy-stream` job has `needs: [check, deploy-api]` for push ordering and correct `if:` condition — **Req 1.3, 1.4, 2.1** -5. A step in each deploy job uses `aws-actions/configure-aws-credentials` — **Req 3.1** -6. That step's `role-to-assume` is `${{ secrets.AWS_DEPLOY_ROLE_ARN }}` — **Req 3.2** -7. That step's `aws-region` is `${{ vars.AWS_REGION }}` — **Req 3.3** -8. `setup-python` step uses `python-version: "3.14"` — **Req 4.1** -9. A step runs `make poetry install` — **Req 4.2** -10. A step installs the CDK CLI (contains `aws-cdk`) — **Req 4.3** -11. The deploy step in `deploy-api` runs `make deploy STACK=api` — **Req 5.1** -12. The deploy step in `deploy-stream` runs `make deploy STACK=stream` — **Req 5.1** -13. `AWS_DEFAULT_REGION` env var is set at the job level to `${{ vars.AWS_REGION }}` — **Req 5.4** -14. Makefile `deploy` target contains `--require-approval never` — **Req 5.2** -15. Top-level `permissions` block contains `id-token: write` and `contents: read` — **Req 6.1, 6.2** - -### Property-Based Test Cases - -**Property 1 test** — `test_all_actions_are_pinned`: -``` -# Feature: github-deploy-workflow, Property 1: all third-party actions are pinned -``` -Collect all `uses:` values from every step in every job. For each value that does not start with `./`, assert the `@` portion is a 40-char hex SHA or a semver/version tag (matches `v\d+` pattern). Assert no reference ends with `@main`, `@master`, or `@HEAD`. - -**Property 2 test** — `test_permissions_are_exactly_least_privilege`: -``` -# Feature: github-deploy-workflow, Property 2: permissions block is exactly least-privilege -``` -Load the top-level `permissions` dict. Assert it equals exactly `{"id-token": "write", "contents": "read"}` — same keys, same values, no extras. - -### Test Configuration - -- Library: `pytest` + `hypothesis` (already in `pyproject.toml` dev dependencies) -- PyYAML for parsing: `pip install pyyaml` or add to dev deps -- Hypothesis `@settings(max_examples=100)` on property tests -- Tests live in `tests/test_deploy_workflow.py` -- Run with `make test` (existing target) diff --git a/.kiro/specs/github-deploy-workflow/requirements.md b/.kiro/specs/github-deploy-workflow/requirements.md deleted file mode 100644 index 79f349e..0000000 --- a/.kiro/specs/github-deploy-workflow/requirements.md +++ /dev/null @@ -1,92 +0,0 @@ -# Requirements Document - -## Introduction - -This feature adds a GitHub Actions workflow that automates deployment of AWS CDK stacks to AWS when triggered. The workflow authenticates with AWS using OIDC (no long-lived credentials), runs `make deploy` with a specified stack target, and integrates with the existing CI check workflow. It supports deploying either the `api` or `stream` CDK stack, matching the existing `STACK` environment variable convention used in the Makefile. - -## Glossary - -- **Workflow**: The GitHub Actions YAML file that defines the automated deployment pipeline. -- **CDK_Stack**: One of the two deployable AWS CDK stacks: `api` (ApiGatewayDynamodbStack) or `stream` (DynamodbStreamStack), selected via the `STACK` environment variable. -- **OIDC**: OpenID Connect — the AWS-recommended mechanism for GitHub Actions to assume an IAM role without storing long-lived AWS credentials as secrets. -- **IAM_Role**: An AWS IAM role with the permissions required to deploy CDK stacks, configured to trust GitHub Actions via OIDC. -- **Deployment_Job**: The GitHub Actions job responsible for running the deployment steps. -- **Check_Workflow**: The existing `check.yml` workflow that runs lint and tests (`make lint` and `make test`). - ---- - -## Requirements - -### Requirement 1: Workflow Trigger - -**User Story:** As a developer, I want the deployment workflow to trigger automatically on pushes to `main` and manually on demand, so that production deployments happen consistently without manual steps. - -#### Acceptance Criteria - -1. WHEN a commit is pushed to the `main` branch, THE Workflow SHALL start a deployment run. -2. THE Workflow SHALL support manual triggering via `workflow_dispatch` with a required input selecting the target CDK_Stack (`api` or `stream`). -3. WHEN triggered by a push to `main`, THE Workflow SHALL default to deploying all stacks sequentially (`api` then `stream`). -4. WHEN triggered manually via `workflow_dispatch`, THE Workflow SHALL deploy only the CDK_Stack selected by the user input. - ---- - -### Requirement 2: Pre-deployment Quality Gate - -**User Story:** As a developer, I want the deployment workflow to run lint and tests before deploying, so that broken code is never deployed to AWS. - -#### Acceptance Criteria - -1. WHEN the Workflow is triggered, THE Deployment_Job SHALL depend on the Check_Workflow completing successfully before any deployment step runs. -2. IF the Check_Workflow fails, THEN THE Workflow SHALL halt and THE Deployment_Job SHALL not execute. - ---- - -### Requirement 3: AWS Authentication via OIDC - -**User Story:** As a platform engineer, I want the workflow to authenticate with AWS using OIDC, so that no long-lived AWS credentials are stored as GitHub secrets. - -#### Acceptance Criteria - -1. THE Workflow SHALL use the `aws-actions/configure-aws-credentials` action to assume an IAM_Role via OIDC. -2. THE Workflow SHALL read the IAM_Role ARN from a GitHub Actions secret named `AWS_DEPLOY_ROLE_ARN`. -3. THE Workflow SHALL read the target AWS region from a GitHub Actions variable named `AWS_REGION`. -4. IF the OIDC token exchange fails, THEN THE Workflow SHALL fail the Deployment_Job with a non-zero exit code and SHALL NOT proceed to the deploy step. -5. THE Workflow SHALL request the `id-token: write` permission required for OIDC authentication. - ---- - -### Requirement 4: Environment Setup - -**User Story:** As a developer, I want the workflow to install all project dependencies before deploying, so that CDK and Poetry-managed packages are available during deployment. - -#### Acceptance Criteria - -1. THE Deployment_Job SHALL use Python 3.14 to match the version declared in `pyproject.toml`. -2. THE Deployment_Job SHALL install Poetry and all project dependencies by running `make poetry install`. -3. THE Deployment_Job SHALL install the AWS CDK CLI so that `cdk` is available on the PATH during deployment. - ---- - -### Requirement 5: CDK Stack Deployment - -**User Story:** As a developer, I want the workflow to deploy the correct CDK stack using `make deploy`, so that the deployment is consistent with local developer workflows. - -#### Acceptance Criteria - -1. WHEN deploying a CDK_Stack, THE Deployment_Job SHALL run `make deploy STACK=` where `` is the target stack name. -2. THE Deployment_Job SHALL pass `--require-approval never` to CDK (via the Makefile or environment) so that the deployment does not wait for interactive confirmation. -3. WHEN `make deploy` exits with a non-zero code, THE Workflow SHALL mark the Deployment_Job as failed. -4. THE Deployment_Job SHALL set the `AWS_DEFAULT_REGION` environment variable to the value of the `AWS_REGION` GitHub Actions variable for all steps. - ---- - -### Requirement 6: Workflow Permissions and Security - -**User Story:** As a platform engineer, I want the workflow to follow least-privilege principles, so that the GitHub Actions runner has only the permissions it needs. - -#### Acceptance Criteria - -1. THE Workflow SHALL grant `id-token: write` permission to enable OIDC token issuance. -2. THE Workflow SHALL grant `contents: read` permission to allow repository checkout. -3. THE Workflow SHALL NOT grant any additional permissions beyond those listed in criteria 1 and 2. -4. THE Workflow SHALL pin all third-party GitHub Actions to a specific SHA or version tag to prevent supply-chain attacks. diff --git a/.kiro/specs/github-deploy-workflow/tasks.md b/.kiro/specs/github-deploy-workflow/tasks.md deleted file mode 100644 index e444a33..0000000 --- a/.kiro/specs/github-deploy-workflow/tasks.md +++ /dev/null @@ -1,76 +0,0 @@ -# Implementation Plan: GitHub Deploy Workflow - -## Overview - -Create `.github/workflows/deploy.yml` with OIDC-based AWS authentication, quality gate integration, and conditional stack deployment. Update the Makefile `deploy` target to pass `--require-approval never`. Add structural tests in `tests/test_deploy_workflow.py`. - -## Tasks - -- [x] 1. Update Makefile deploy target - - Add `--require-approval never` flag to the `cdk deploy` command in the `deploy` target - - _Requirements: 5.2_ - -- [x] 2. Create the deploy workflow file - - [x] 2.1 Create `.github/workflows/deploy.yml` with triggers and permissions - - Add `push` trigger scoped to `branches: [main]` - - Add `workflow_dispatch` trigger with required `choice` input `stack` accepting `api` or `stream` - - Set top-level `permissions` block to exactly `id-token: write` and `contents: read` - - _Requirements: 1.1, 1.2, 6.1, 6.2, 6.3_ - - - [x] 2.2 Add `check` job that reuses `check.yml` - - Add `check` job using `uses: ./.github/workflows/check.yml` - - _Requirements: 2.1, 2.2_ - - - [x] 2.3 Add `deploy-api` job - - Set `needs: check` - - Set `if:` condition to run on `push` OR (`workflow_dispatch` AND `inputs.stack == 'api'`) - - Set `AWS_DEFAULT_REGION: ${{ vars.AWS_REGION }}` at job level - - Add steps: `actions/checkout@v4`, `actions/setup-python@v5` with `python-version: "3.14"`, `make poetry install`, `npm install -g aws-cdk`, `aws-actions/configure-aws-credentials@v4` with `role-to-assume: ${{ secrets.AWS_DEPLOY_ROLE_ARN }}` and `aws-region: ${{ vars.AWS_REGION }}`, then `make deploy STACK=api` - - Pin all third-party actions to a specific SHA - - _Requirements: 1.3, 1.4, 3.1, 3.2, 3.3, 3.5, 4.1, 4.2, 4.3, 5.1, 5.4, 6.4_ - - - [x] 2.4 Add `deploy-stream` job - - Set `needs: [check, deploy-api]` - - Set `if:` condition to run on `push` OR (`workflow_dispatch` AND `inputs.stack == 'stream'`) - - Mirror the same steps as `deploy-api` but with `STACK=stream` - - _Requirements: 1.3, 1.4, 3.1, 3.2, 3.3, 3.5, 4.1, 4.2, 4.3, 5.1, 5.4, 6.4_ - -- [x]* 3. Create structural tests - - [x]* 3.1 Create `tests/test_deploy_workflow.py` with a pytest fixture that parses the workflow YAML - - Load `.github/workflows/deploy.yml` once via a session-scoped fixture using PyYAML - - Add `if __name__ == "__main__": main()` at end of file - - _Requirements: 1.1, 1.2_ - - - [x]* 3.2 Write unit tests for trigger and job structure - - Test `workflow_dispatch` input `stack` is required, type `choice`, options `["api", "stream"]` — Req 1.2 - - Test `push` trigger is scoped to `branches: [main]` — Req 1.1 - - Test `deploy-api` has `needs: check` and correct `if:` condition — Req 1.3, 1.4, 2.1 - - Test `deploy-stream` has `needs: [check, deploy-api]` and correct `if:` condition — Req 1.3, 1.4, 2.1 - - _Requirements: 1.1, 1.2, 1.3, 1.4, 2.1_ - - - [ ]* 3.3 Write unit tests for OIDC and environment setup steps - - Test each deploy job has a step using `aws-actions/configure-aws-credentials` — Req 3.1 - - Test that step's `role-to-assume` is `${{ secrets.AWS_DEPLOY_ROLE_ARN }}` — Req 3.2 - - Test that step's `aws-region` is `${{ vars.AWS_REGION }}` — Req 3.3 - - Test `setup-python` step uses `python-version: "3.14"` — Req 4.1 - - Test a step runs `make poetry install` — Req 4.2 - - Test a step installs the CDK CLI (contains `aws-cdk`) — Req 4.3 - - Test `deploy-api` step runs `make deploy STACK=api` — Req 5.1 - - Test `deploy-stream` step runs `make deploy STACK=stream` — Req 5.1 - - Test `AWS_DEFAULT_REGION` env is set to `${{ vars.AWS_REGION }}` at job level — Req 5.4 - - Test Makefile `deploy` target contains `--require-approval never` — Req 5.2 - - Test top-level `permissions` contains `id-token: write` and `contents: read` — Req 6.1, 6.2 - - _Requirements: 3.1, 3.2, 3.3, 4.1, 4.2, 4.3, 5.1, 5.2, 5.4, 6.1, 6.2_ - - - [ ]* 3.4 Write property test for Property 1: all third-party actions are pinned - - **Property 1: All third-party actions are pinned** - - Collect all `uses:` values from every step in every job; for each value not starting with `./`, assert the `@` portion is a 40-char hex SHA or a version tag matching `v\d+`; assert no reference ends with `@main`, `@master`, or `@HEAD` - - **Validates: Requirements 6.4** - - - [ ]* 3.5 Write property test for Property 2: permissions block is exactly least-privilege - - **Property 2: Permissions block is exactly least-privilege** - - Load the top-level `permissions` dict and assert it equals exactly `{"id-token": "write", "contents": "read"}` with no extra keys - - **Validates: Requirements 6.1, 6.2, 6.3** - -- [x]* 4. Checkpoint — Ensure all tests pass - - Run `make test` and confirm all tests in `tests/test_deploy_workflow.py` pass. Ask the user if questions arise. diff --git a/.kiro/specs/lambda-scenario-templates/.config.kiro b/.kiro/specs/lambda-scenario-templates/.config.kiro deleted file mode 100644 index 3580b8a..0000000 --- a/.kiro/specs/lambda-scenario-templates/.config.kiro +++ /dev/null @@ -1 +0,0 @@ -{"specId": "c5a2e92e-2f4f-462d-8fd8-b32efccb2871", "workflowType": "requirements-first", "specType": "feature"} diff --git a/.kiro/specs/lambda-scenario-templates/design.md b/.kiro/specs/lambda-scenario-templates/design.md deleted file mode 100644 index ce4f2d7..0000000 --- a/.kiro/specs/lambda-scenario-templates/design.md +++ /dev/null @@ -1,213 +0,0 @@ -# Design Document: lambda-scenario-templates - -## Overview - -This feature adds two self-contained Lambda scenario templates to the project under `aws_lambda_template/scenarios/`. Each scenario is a fully-wired starting point for a common Lambda integration pattern, combining: - -- AWS Lambda Powertools (Logger, Tracer, Metrics, event types, resolvers) -- Pydantic data models for domain objects -- Pydantic-settings `BaseSettings` for typed environment configuration -- AWS CDK stacks under `infra/stacks/` for infrastructure provisioning -- pytest test suites under `tests/scenarios/` with no real AWS calls - -The two initial scenarios are: - -1. `api_gateway_dynamodb` — API Gateway REST trigger backed by DynamoDB (CRUD) -2. `dynamodb_stream` — DynamoDB Streams trigger that fans out change records to a destination table - -The directory layout is designed so that each scenario is independently copyable and new scenarios can be added without touching existing ones. - ---- - -## Architecture - -```mermaid -graph TD - subgraph aws_lambda_template/scenarios/ - A[api_gateway_dynamodb/] - B[dynamodb_stream/] - end - - subgraph infra/stacks/ - C[api_gateway_dynamodb_stack.py] - D[dynamodb_stream_stack.py] - end - - subgraph tests/scenarios/ - E[test_api_gateway_dynamodb.py] - F[test_dynamodb_stream.py] - end - - A --> C - B --> D - A --> E - B --> F -``` - -### Scenario: api_gateway_dynamodb - -```mermaid -sequenceDiagram - participant Client - participant APIGW as API Gateway - participant Lambda - participant DDB as DynamoDB - - Client->>APIGW: GET /items/{id} - APIGW->>Lambda: Invoke (event) - Lambda->>DDB: GetItem - DDB-->>Lambda: Item / not found - Lambda-->>APIGW: 200 JSON / 404 - APIGW-->>Client: Response - - Client->>APIGW: POST /items - APIGW->>Lambda: Invoke (event) - Lambda->>DDB: PutItem - DDB-->>Lambda: OK / error - Lambda-->>APIGW: 201 / 422 / 500 - APIGW-->>Client: Response -``` - -### Scenario: dynamodb_stream - -```mermaid -sequenceDiagram - participant SourceTable as DynamoDB (source) - participant Stream as DynamoDB Stream - participant Lambda - participant DestTable as DynamoDB (destination) - - SourceTable->>Stream: Change record (INSERT/MODIFY/REMOVE) - Stream->>Lambda: DynamoDBStreamEvent - loop For each record - Lambda->>DestTable: PutItem (INSERT/MODIFY) or DeleteItem (REMOVE) - end -``` - ---- - -## Components and Interfaces - -### Shared conventions (all scenarios) - -Every scenario directory follows this layout: - -``` -aws_lambda_template/scenarios/{scenario_name}/ - __init__.py # exports the handler function - handler.py # Lambda entry point + Powertools decorators - models.py # Pydantic BaseModel domain objects - settings.py # Pydantic-settings BaseSettings subclass -``` - -The `handler.py` module-level initialisation pattern is: - -```python -settings = Settings() # parsed once at cold-start -logger = Logger(service=settings.service_name) -tracer = Tracer(service=settings.service_name) -metrics = Metrics(namespace=settings.metrics_namespace) -``` - -The handler function is decorated with: - -```python -@logger.inject_lambda_context -@tracer.capture_lambda_handler -@metrics.log_metrics -def handler(event, context): ... -``` - -### Scenario: api_gateway_dynamodb - -**`settings.py`** - -| Field | Type | Env var | Description | -|---|---|---|---| -| `table_name` | `str` | `TABLE_NAME` | DynamoDB table name | -| `service_name` | `str` | `SERVICE_NAME` | Powertools service name | -| `metrics_namespace` | `str` | `METRICS_NAMESPACE` | Powertools metrics namespace | - -**`models.py`** - -- `Item` — Pydantic `BaseModel` with at minimum `id: str` and `name: str`; used for POST body validation and GET response serialisation. - -**`handler.py`** - -- Uses `APIGatewayRestResolver` from Powertools. -- Routes: - - `GET /items/{id}` → `get_item(id: str)` — fetches from DynamoDB, returns 200 or 404. - - `POST /items` → `create_item()` — validates body as `Item`, writes to DynamoDB, returns 201 or 422/500. -- DynamoDB client is created once at module level via `boto3.client("dynamodb")` (or `boto3.resource`). - -**`infra/stacks/api_gateway_dynamodb_stack.py`** - -Provisions: -- `aws_cdk.aws_dynamodb.Table` (PAY_PER_REQUEST billing, string partition key `id`) -- `aws_cdk.aws_lambda.Function` with handler pointing to the scenario -- `aws_cdk.aws_apigateway.RestApi` with a Lambda proxy integration - -### Scenario: dynamodb_stream - -**`settings.py`** - -| Field | Type | Env var | Description | -|---|---|---|---| -| `source_table_name` | `str` | `SOURCE_TABLE_NAME` | Source DynamoDB table name | -| `destination_table_name` | `str` | `DESTINATION_TABLE_NAME` | Destination DynamoDB table name | -| `service_name` | `str` | `SERVICE_NAME` | Powertools service name | -| `metrics_namespace` | `str` | `METRICS_NAMESPACE` | Powertools metrics namespace | - -**`models.py`** - -- `DestinationItem` — Pydantic `BaseModel` representing the shape written to the destination table; fields derived from the source table's `NewImage`. - -**`handler.py`** - -- Accepts `DynamoDBStreamEvent` (Powertools event type). -- Iterates over `event.records`; for each record: - - `INSERT` / `MODIFY` → deserialise `NewImage` into `DestinationItem`, call `put_item` on destination table. - - `REMOVE` → extract key from `Keys`, call `delete_item` on destination table. - - Any per-record exception → log with Logger, emit `ProcessingError` metric, continue. - -**`infra/stacks/dynamodb_stream_stack.py`** - -Provisions: -- Source `Table` with `stream=StreamViewType.NEW_AND_OLD_IMAGES` -- Destination `Table` -- `Function` with `DynamoDBEventSource` pointing to the source stream - ---- - -## Data Models - -### api_gateway_dynamodb - -```python -from pydantic import BaseModel - -class Item(BaseModel): - id: str - name: str - # additional fields can be added per use-case -``` - -DynamoDB representation uses standard string/map attributes. The handler serialises/deserialises using `boto3`'s `TypeDeserializer` or the higher-level `resource` API. - -### dynamodb_stream - -```python -from pydantic import BaseModel - -class DestinationItem(BaseModel): - id: str - # fields mirror the source table's NewImage shape -``` - -Stream records carry DynamoDB-typed JSON (`{"S": "value"}`). The handler uses `boto3.dynamodb.types.TypeDeserializer` to convert to plain Python dicts before constructing `DestinationItem`. - -### Settings (both scenarios) - -Settings classes use `pydantic_settings.BaseSettings` with `model_config = SettingsConfigDict(case_sensitive=False)`. Missing required fields raise `pydantic.ValidationError` at import time (module-level instantiation), preventing the handler from processing any event with invalid configuration. - ---- diff --git a/.kiro/specs/lambda-scenario-templates/requirements.md b/.kiro/specs/lambda-scenario-templates/requirements.md deleted file mode 100644 index 9c184fc..0000000 --- a/.kiro/specs/lambda-scenario-templates/requirements.md +++ /dev/null @@ -1,132 +0,0 @@ -# Requirements Document - -## Introduction - -This feature adds a set of reusable, scenario-based AWS Lambda handler templates to the project. Each scenario provides a working starting point for a common Lambda integration pattern, wired up with AWS Lambda Powertools (logger, tracer, metrics, parameters, event handlers), Pydantic data models, Pydantic-settings environment config, and AWS CDK infrastructure definitions. Developers pick a scenario, get a fully-structured handler, and extend from there. - -The initial scenarios are: - -1. **REST API Gateway** — API Gateway (HTTP/REST) trigger with DynamoDB as the read/write backend. -2. **DynamoDB Stream** — DynamoDB Streams trigger that processes change records and writes results to a second DynamoDB table. - -The template structure is designed to be extensible so that additional scenarios can be added without modifying existing ones. - ---- - -## Glossary - -- **Template**: A self-contained directory under `aws_lambda_template/scenarios/` that implements one Lambda integration scenario. -- **Scenario**: A named integration pattern (e.g., `api_gateway_dynamodb`, `dynamodb_stream`). -- **Handler**: The Python function that AWS Lambda invokes as the entry point for a scenario. -- **Settings**: A Pydantic-settings `BaseSettings` subclass that reads environment variables for a scenario. -- **Model**: A Pydantic `BaseModel` subclass representing a domain object (e.g., an item stored in DynamoDB). -- **Powertools**: AWS Lambda Powertools for Python — provides Logger, Tracer, Metrics, Parameters, and event-type utilities. -- **CDK_Stack**: An AWS CDK `Stack` subclass under `infra/` that provisions the AWS resources for a scenario. -- **API_Gateway**: Amazon API Gateway (REST or HTTP API) used as a Lambda trigger. -- **DynamoDB**: Amazon DynamoDB used as a data store. -- **DynamoDB_Stream**: The change-data-capture stream attached to a DynamoDB table, used as a Lambda trigger. -- **Resolver**: The AWS Lambda Powertools `APIGatewayRestResolver` or `APIGatewayHttpResolver` used to route API Gateway requests to handler functions. - ---- - -## Requirements - -### Requirement 1: Scenario Directory Structure - -**User Story:** As a developer, I want each scenario to live in its own self-contained directory, so that I can copy or reference a single scenario without touching others. - -#### Acceptance Criteria - -1. THE Template SHALL be located at `aws_lambda_template/scenarios/{scenario_name}/`. -2. THE Template SHALL contain at minimum the following files: `handler.py`, `models.py`, `settings.py`. -3. THE Template SHALL contain an `__init__.py` that exports the Lambda handler function. -4. WHERE a scenario requires AWS infrastructure, THE Template SHALL have a corresponding CDK stack at `infra/stacks/{scenario_name}_stack.py`. -5. THE Template SHALL NOT share mutable state with other scenario directories. - ---- - -### Requirement 2: Shared Powertools Initialisation - -**User Story:** As a developer, I want Powertools utilities (Logger, Tracer, Metrics) initialised consistently across all scenarios, so that observability is uniform without duplicating boilerplate. - -#### Acceptance Criteria - -1. THE Handler SHALL initialise a Powertools `Logger` with the service name read from the scenario's `Settings`. -2. THE Handler SHALL initialise a Powertools `Tracer` with the service name read from the scenario's `Settings`. -3. THE Handler SHALL initialise a Powertools `Metrics` namespace read from the scenario's `Settings`. -4. WHEN the Lambda function is invoked, THE Handler SHALL use the `@logger.inject_lambda_context` decorator to inject request context into every log record. -5. WHEN the Lambda function is invoked, THE Handler SHALL use the `@tracer.capture_lambda_handler` decorator to trace the full invocation. -6. WHEN the Lambda function is invoked, THE Handler SHALL use the `@metrics.log_metrics` decorator to flush metrics after each invocation. - ---- - -### Requirement 3: Environment Configuration via Pydantic-Settings - -**User Story:** As a developer, I want all environment-specific values (table names, service name, etc.) loaded through a typed `Settings` class, so that configuration is validated at startup and never scattered as raw `os.environ` calls. - -#### Acceptance Criteria - -1. THE Settings SHALL be defined as a `pydantic_settings.BaseSettings` subclass in `settings.py` within each scenario directory. -2. THE Settings SHALL declare every environment variable the scenario depends on as a typed field with a description. -3. IF a required environment variable is missing at Lambda cold-start, THEN THE Settings SHALL raise a `ValidationError` before the handler processes any event. -4. THE Handler SHALL instantiate `Settings` once at module level (outside the handler function) to avoid re-parsing on every invocation. - ---- - -### Requirement 4: REST API Gateway + DynamoDB Scenario - -**User Story:** As a developer, I want a ready-to-run scenario for an API Gateway REST trigger backed by DynamoDB, so that I have a working CRUD starting point without writing boilerplate. - -#### Acceptance Criteria - -1. THE Handler SHALL use the Powertools `APIGatewayRestResolver` (or `APIGatewayHttpResolver`) to route incoming API Gateway events to typed route functions. -2. WHEN a `GET /items/{id}` request is received, THE Handler SHALL retrieve the item from DynamoDB and return it as a JSON response with HTTP status 200. -3. WHEN a `POST /items` request is received, THE Handler SHALL validate the request body against the `Item` Pydantic model and write the item to DynamoDB, returning HTTP status 201. -4. IF the requested item does not exist in DynamoDB, THEN THE Handler SHALL return an HTTP 404 response with a descriptive error message. -5. IF the request body fails Pydantic validation, THEN THE Handler SHALL return an HTTP 422 response with the validation error details. -6. IF a DynamoDB operation raises an exception, THEN THE Handler SHALL log the error with Powertools Logger and return an HTTP 500 response. -7. THE Settings FOR this scenario SHALL include: `TABLE_NAME` (str), `SERVICE_NAME` (str), `METRICS_NAMESPACE` (str). -8. THE CDK_Stack FOR this scenario SHALL provision: one DynamoDB table, one Lambda function with the handler, and one API Gateway REST API connected to the Lambda function. - ---- - -### Requirement 5: DynamoDB Stream Trigger Scenario - -**User Story:** As a developer, I want a ready-to-run scenario for a DynamoDB Streams trigger that fans out change records to a destination table, so that I have a working event-driven starting point. - -#### Acceptance Criteria - -1. THE Handler SHALL accept a `DynamoDBStreamEvent` (Powertools event type) as its input. -2. WHEN a DynamoDB stream event is received, THE Handler SHALL iterate over every record in the event. -3. WHEN a record with event name `INSERT` or `MODIFY` is received, THE Handler SHALL deserialise the `NewImage` into the `DestinationItem` Pydantic model and write it to the destination DynamoDB table. -4. WHEN a record with event name `REMOVE` is received, THE Handler SHALL delete the corresponding item from the destination DynamoDB table using the record's key. -5. IF deserialisation of a stream record fails, THEN THE Handler SHALL log the error with Powertools Logger, emit a `ProcessingError` metric, and continue processing the remaining records. -6. IF a DynamoDB write or delete operation fails, THEN THE Handler SHALL log the error with Powertools Logger, emit a `ProcessingError` metric, and continue processing the remaining records. -7. THE Settings FOR this scenario SHALL include: `SOURCE_TABLE_NAME` (str), `DESTINATION_TABLE_NAME` (str), `SERVICE_NAME` (str), `METRICS_NAMESPACE` (str). -8. THE CDK_Stack FOR this scenario SHALL provision: a source DynamoDB table with Streams enabled, a destination DynamoDB table, and a Lambda function with the handler connected to the source table's stream as an event source. - ---- - -### Requirement 6: Extensibility — Adding New Scenarios - -**User Story:** As a developer, I want to add a new scenario by following a clear convention, so that the template structure scales without modifying existing scenarios. - -#### Acceptance Criteria - -1. THE Template structure SHALL be documented so that a developer can add a new scenario by creating a new directory under `aws_lambda_template/scenarios/` and a corresponding CDK stack under `infra/stacks/`. -2. THE existing scenarios SHALL remain unmodified when a new scenario directory is added. -3. WHERE a new scenario shares infrastructure patterns with an existing scenario, THE CDK_Stack SHALL be independently deployable without depending on another scenario's stack. - ---- - -### Requirement 7: Testing - -**User Story:** As a developer, I want each scenario to have pytest tests that cover the happy path and key error conditions, so that I can verify correctness and use the tests as living documentation. - -#### Acceptance Criteria - -1. THE Template SHALL include a `tests/scenarios/{scenario_name}/` directory with at least one test module per scenario. -2. WHEN testing handler functions, THE test suite SHALL use `pytest-mock` and `monkeypatch` to mock AWS SDK (boto3) calls — no real AWS resources SHALL be contacted during tests. -3. WHEN testing the API Gateway scenario, THE test suite SHALL cover: successful GET, successful POST, item-not-found (404), invalid request body (422), and DynamoDB error (500). -4. WHEN testing the DynamoDB Stream scenario, THE test suite SHALL cover: INSERT record processed, MODIFY record processed, REMOVE record processed, deserialisation failure (error logged, processing continues), and DynamoDB write failure (error logged, processing continues). -5. THE test suite SHALL use Powertools `POWERTOOLS_SERVICE_NAME` and `POWERTOOLS_METRICS_NAMESPACE` environment variables set via `monkeypatch` or `pytest` fixtures to avoid Powertools initialisation errors during tests. diff --git a/.kiro/specs/lambda-scenario-templates/tasks.md b/.kiro/specs/lambda-scenario-templates/tasks.md deleted file mode 100644 index a0b5204..0000000 --- a/.kiro/specs/lambda-scenario-templates/tasks.md +++ /dev/null @@ -1,94 +0,0 @@ -# Implementation Plan: lambda-scenario-templates - -## Overview - -Implement two self-contained Lambda scenario templates (`api_gateway_dynamodb` and `dynamodb_stream`) under `aws_lambda_template/scenarios/`, with CDK stacks under `infra/stacks/` and pytest suites under `tests/scenarios/`. - -## Tasks - -- [x] 1. Scaffold scenario package structure and install dependencies - - Create `aws_lambda_template/scenarios/__init__.py` - - Create empty `__init__.py` files for each scenario directory: `aws_lambda_template/scenarios/api_gateway_dynamodb/` and `aws_lambda_template/scenarios/dynamodb_stream/` - - Create `tests/scenarios/__init__.py` and subdirectory `__init__.py` files for each scenario - - Run `poetry add aws-lambda-powertools pydantic-settings boto3 aws-cdk-lib constructs` (and `poetry add --group dev pytest-mock moto` for test dependencies) - - _Requirements: 1.1, 1.2, 1.3_ - -- [x] 2. Implement `api_gateway_dynamodb` scenario - - [x] 2.1 Create `aws_lambda_template/scenarios/api_gateway_dynamodb/settings.py` - - Define `Settings(BaseSettings)` with fields: `table_name: str`, `service_name: str`, `metrics_namespace: str` - - Use `SettingsConfigDict(case_sensitive=False)` - - _Requirements: 3.1, 3.2, 3.3, 3.4, 4.7_ - - - [x] 2.2 Create `aws_lambda_template/scenarios/api_gateway_dynamodb/models.py` - - Define `Item(BaseModel)` with `id: str` and `name: str` - - _Requirements: 4.3_ - - - [x] 2.3 Create `aws_lambda_template/scenarios/api_gateway_dynamodb/handler.py` - - Instantiate `Settings`, `Logger`, `Tracer`, `Metrics` at module level - - Create `boto3` DynamoDB resource at module level - - Implement `APIGatewayRestResolver` with `GET /items/{id}` (200/404) and `POST /items` (201/422/500) - - Apply `@logger.inject_lambda_context`, `@tracer.capture_lambda_handler`, `@metrics.log_metrics` decorators - - _Requirements: 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 4.1, 4.2, 4.3, 4.4, 4.5, 4.6_ - - - [x] 2.4 Create `aws_lambda_template/scenarios/api_gateway_dynamodb/__init__.py` - - Export `handler` function - - _Requirements: 1.3_ - - - [x] 2.5 Write unit tests for `api_gateway_dynamodb` handler - - Create `tests/scenarios/api_gateway_dynamodb/test_handler.py` - - Use `monkeypatch` to set `TABLE_NAME`, `SERVICE_NAME`, `METRICS_NAMESPACE`, `POWERTOOLS_SERVICE_NAME`, `POWERTOOLS_METRICS_NAMESPACE` - - Use `pytest-mock` to mock `boto3` DynamoDB calls - - Cover: successful GET (200), successful POST (201), item not found (404), invalid body (422), DynamoDB error (500) - - _Requirements: 7.1, 7.2, 7.3, 7.5_ - -- [x] 3. Implement `dynamodb_stream` scenario - - [x] 3.1 Create `aws_lambda_template/scenarios/dynamodb_stream/settings.py` - - Define `Settings(BaseSettings)` with fields: `source_table_name: str`, `destination_table_name: str`, `service_name: str`, `metrics_namespace: str` - - _Requirements: 3.1, 3.2, 3.3, 3.4, 5.7_ - - - [x] 3.2 Create `aws_lambda_template/scenarios/dynamodb_stream/models.py` - - Define `DestinationItem(BaseModel)` with `id: str` and any additional fields mirroring the source `NewImage` - - _Requirements: 5.3_ - - - [x] 3.3 Create `aws_lambda_template/scenarios/dynamodb_stream/handler.py` - - Instantiate `Settings`, `Logger`, `Tracer`, `Metrics` at module level - - Create `boto3` DynamoDB resource at module level - - Accept `DynamoDBStreamEvent`; iterate records; handle `INSERT`/`MODIFY` (deserialise `NewImage` via `TypeDeserializer`, write to destination), `REMOVE` (delete from destination) - - On per-record exception: log error, emit `ProcessingError` metric, continue - - Apply Powertools decorators - - _Requirements: 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 5.1, 5.2, 5.3, 5.4, 5.5, 5.6_ - - - [x] 3.4 Create `aws_lambda_template/scenarios/dynamodb_stream/__init__.py` - - Export `handler` function - - _Requirements: 1.3_ - - - [x] 3.5 Write unit tests for `dynamodb_stream` handler - - Create `tests/scenarios/dynamodb_stream/test_handler.py` - - Use `monkeypatch` for env vars and `pytest-mock` to mock `boto3` calls - - Cover: INSERT processed, MODIFY processed, REMOVE processed, deserialisation failure (error logged, continues), DynamoDB write failure (error logged, continues) - - _Requirements: 7.1, 7.2, 7.4, 7.5_ - -- [x] 4. Checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -- [x] 5. Implement CDK stacks - - [x] 5.1 Create `infra/__init__.py` and `infra/stacks/__init__.py` (if not present) - - _Requirements: 1.4_ - - - [x] 5.2 Create `infra/stacks/api_gateway_dynamodb_stack.py` - - Define `ApiGatewayDynamodbStack(Stack)` provisioning: `aws_dynamodb.Table` (PAY_PER_REQUEST, partition key `id`), `aws_lambda.Function` pointing to the scenario handler, `aws_apigateway.RestApi` with Lambda proxy integration - - _Requirements: 4.8_ - - - [x] 5.3 Create `infra/stacks/dynamodb_stream_stack.py` - - Define `DynamodbStreamStack(Stack)` provisioning: source `Table` with `StreamViewType.NEW_AND_OLD_IMAGES`, destination `Table`, `Function` with `DynamoDBEventSource` on the source stream - - _Requirements: 5.8_ - -- [x] 6. Final checkpoint — Ensure all tests pass - - Ensure all tests pass, ask the user if questions arise. - -## Notes - -- Tasks marked with `*` are optional and can be skipped for a faster MVP -- Each task references specific requirements for traceability -- Property-based tests are not included here as the logic is primarily I/O and integration-bound; unit tests cover the key correctness properties -- CDK stacks are standalone and independently deployable per requirement 6.3 diff --git a/.kiro/specs/s3-sqs-lambda-template/.config.kiro b/.kiro/specs/s3-sqs-lambda-template/.config.kiro deleted file mode 100644 index 6767747..0000000 --- a/.kiro/specs/s3-sqs-lambda-template/.config.kiro +++ /dev/null @@ -1 +0,0 @@ -{"specId": "fd266669-6168-4b07-b0f8-3d5251be9fce", "workflowType": "requirements-first", "specType": "feature"} diff --git a/.kiro/specs/s3-sqs-lambda-template/design.md b/.kiro/specs/s3-sqs-lambda-template/design.md deleted file mode 100644 index 9917969..0000000 --- a/.kiro/specs/s3-sqs-lambda-template/design.md +++ /dev/null @@ -1,244 +0,0 @@ -# Design Document: S3 → SQS Lambda Template - -## Overview - -This feature adds `templates/s3/` — a reusable Lambda function that receives S3 object-creation events, transforms each record into a typed `ProcessedMessage`, and publishes it to an SQS queue. It follows the established `templates//` pattern (a `handler.py` entry point and a `settings.py` using `pydantic-settings`) and is fully instrumented with AWS Lambda Powertools (Logger, Tracer, Metrics, S3Event). - -The module is intentionally self-contained so it can be copied as a starting point for any S3-triggered, SQS-publishing Lambda. - ---- - -## Architecture - -```mermaid -sequenceDiagram - participant S3 as Amazon S3 - participant Lambda as Lambda (handler.py) - participant SQS as Amazon SQS - participant CW as CloudWatch / X-Ray - - S3->>Lambda: S3Event (object-creation records) - Lambda->>Lambda: parse via S3Event utility - loop For each S3Record - Lambda->>Lambda: build ProcessedMessage - Lambda->>Lambda: model_dump_json() - Lambda->>SQS: send_message(QueueUrl, Body=model_dump_json(by_alias=True), MessageGroupId) - Lambda->>CW: structured log + Tracer subsegment - end - Lambda->>CW: flush Metrics (records_processed / publish_failure) - Lambda-->>Lambda Runtime: {batchItemFailures: [...]} -``` - -The handler is decorated with `@logger.inject_lambda_context`, `@tracer.capture_lambda_handler`, and `@metrics.log_metrics` so all cross-cutting concerns are handled by Powertools without manual boilerplate. - ---- - -## Components and Interfaces - -### `templates/s3/settings.py` - -```python -from pydantic import Field -from pydantic_settings import BaseSettings - - -class Settings(BaseSettings, case_sensitive=False): - sqs_queue_url: str = Field(description="SQS queue URL to publish processed messages to") - aws_region: str = Field(default="us-east-1", description="AWS region for the SQS client") - powertools_service_name: str = Field(default="s3-sqs-processor", description="Powertools service name") - log_level: str = Field(default="INFO", description="Log level for the Lambda Logger") -``` - -Instantiated once at module load: `settings = Settings()`. A missing `SQS_QUEUE_URL` raises `pydantic_settings.ValidationError` before the handler is ever invoked. - -### `templates/s3/handler.py` - -Public interface: a single Lambda entry point `lambda_handler(event, context)`. - -Internal helpers (unit-testable in isolation): - -| Function | Responsibility | -|---|---| -| `_parse_event(event)` | Wraps `S3Event(data=event)`, raises `ValueError` on failure | -| `_build_message(record)` | Constructs a `ProcessedMessage` from an `S3Record` | -| `_publish(msg, bucket)` | Calls `sqs_client.send_message`; raises on error | -| `lambda_handler` | Orchestrates the above; collects `batchItemFailures` | - -### `ProcessedMessage` (Pydantic model, defined in `handler.py` or a sibling `models.py`) - -```python -from enum import StrEnum - -from pydantic import BaseModel, Field -from pydantic.alias_generators import to_camel - - -class EventSource(StrEnum): - s3 = "s3" - - -class ProcessedMessage(BaseModel, populate_by_name=True, alias_generator=to_camel): - bucket: str = Field(description="S3 bucket name") - key: str = Field(description="S3 object key") - event_time: str = Field(description="ISO-8601 event timestamp") - source: EventSource = Field(description="Origin event source") -``` - -### SQS client - -A module-level `boto3.client("sqs", region_name=settings.aws_region)` instance, reused across invocations. - ---- - -## Data Models - -### Input: S3 Event (AWS canonical shape) - -Parsed by `aws_lambda_powertools.utilities.data_classes.S3Event`. Each record exposes: -- `s3.bucket.name` → `bucket: str` -- `s3.object.key` → `key: str` -- `event_time` → ISO-8601 string - -### Output: `ProcessedMessage` (JSON string sent to SQS) - -```json -{ - "bucket": "my-bucket", - "key": "path/to/object.csv", - "event_time": "2025-01-15T10:30:00+00:00", - "source": "s3" -} -``` - -### Lambda response: partial batch failure format - -```json -{ - "batchItemFailures": [ - {"itemIdentifier": "path/to/failed-object.csv"} - ] -} -``` - -`itemIdentifier` is the object key of the failed record. - ---- - -## Correctness Properties - -*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* - -### Property 1: Record count preservation - -*For any* non-empty list of valid S3 records (1–20), the number of SQS messages published by the handler SHALL equal the number of input records. - -**Validates: Requirements 1.1, 7.2, 7.5** - -### Property 2: Invalid event raises ValueError - -*For any* payload that is not a valid S3 event shape (missing required keys, wrong types), the handler SHALL raise a `ValueError` and publish zero SQS messages. - -**Validates: Requirements 1.3** - -### Property 3: Missing SQS_QUEUE_URL raises ValidationError - -*For any* environment where `SQS_QUEUE_URL` is absent or empty, instantiating `Settings` SHALL raise a `pydantic_settings.ValidationError` before the handler runs. - -**Validates: Requirements 2.5, 7.6** - -### Property 4: ProcessedMessage serialization round-trip - -*For any* valid `ProcessedMessage` instance, calling `.model_dump_json()` and then parsing the result with `ProcessedMessage.model_validate_json()` SHALL produce an object equal to the original. - -**Validates: Requirements 3.1, 3.2** - -### Property 5: MessageGroupId equals bucket name - -*For any* S3 record with a given bucket name, the `MessageGroupId` attribute of the corresponding SQS `send_message` call SHALL equal that bucket name. - -**Validates: Requirements 4.2** - -### Property 6: Partial batch failure independence - -*For any* batch of S3 records where a random subset fails to publish (mocked SQS errors), the handler SHALL: -1. Still publish all non-failing records to SQS. -2. Return a `batchItemFailures` list containing exactly the object keys of the failed records. -3. Not raise an unhandled exception. - -**Validates: Requirements 6.1, 6.2, 6.3, 6.4, 7.3** - -### Property 7: Non-idempotent double invocation - -*For any* valid S3 event with N records, invoking the handler twice with the same event SHALL result in exactly 2×N SQS messages in the queue (each invocation publishes independently). - -**Validates: Requirements 7.4** - -### Property 8: records_processed metric equals success count - -*For any* batch of N records where M fail, the `records_processed` metric value emitted by the handler SHALL equal N − M. - -**Validates: Requirements 4.4** - ---- - -## Error Handling - -| Failure scenario | Handler behaviour | -|---|---| -| Unparseable S3 event | Log error via Logger, raise `ValueError` — propagates to Lambda runtime (triggers retry / DLQ) | -| `ProcessedMessage` construction fails | Log error, add key to `batchItemFailures`, continue | -| `sqs.send_message` raises | Log error, increment `publish_failure` metric, add key to `batchItemFailures`, continue | -| `SQS_QUEUE_URL` missing at cold start | `Settings` raises `ValidationError` — Lambda init fails, no invocations proceed | - -Per-record failures never abort the batch. Only a completely unparseable event propagates as an exception to the runtime. - ---- - -## Testing Strategy - -### Tools - -- **pytest** — test runner -- **Hypothesis** — property-based testing (`@given` + `@settings`) -- **moto** (`mock_aws`) — SQS mocking; define a local `sqs` fixture using `mock_aws` (no `moto_aws` autouse fixture exists in `tests/conftest.py`) -- **monkeypatch** — inject environment variables (never modify `os.environ` directly in test bodies) - -### Test file - -`tests/s3/test_handler.py` - -### Unit tests (example-based) - -- `Settings` defaults (aws_region, powertools_service_name, log_level) -- `Settings` with all env vars overridden -- `_build_message` produces correct fields for a concrete S3 record -- Handler returns empty `batchItemFailures` for a single valid record -- Handler logs structured fields per record (mock Logger) -- `send_message` failure → record appears in `batchItemFailures`, handler does not raise -- Zero-record event → handler returns, zero SQS messages - -### Property-based tests (Hypothesis, minimum 100 iterations each) - -Each property test is tagged with a comment referencing the design property. - -``` -# Feature: s3-sqs-lambda-template, Property 1: Record count preservation -# Feature: s3-sqs-lambda-template, Property 2: Invalid event raises ValueError -# Feature: s3-sqs-lambda-template, Property 3: Missing SQS_QUEUE_URL raises ValidationError -# Feature: s3-sqs-lambda-template, Property 4: ProcessedMessage serialization round-trip -# Feature: s3-sqs-lambda-template, Property 5: MessageGroupId equals bucket name -# Feature: s3-sqs-lambda-template, Property 6: Partial batch failure independence -# Feature: s3-sqs-lambda-template, Property 7: Non-idempotent double invocation -# Feature: s3-sqs-lambda-template, Property 8: records_processed metric equals success count -``` - -Hypothesis strategies needed: -- `s3_record_strategy` — generates dicts with random but valid bucket names (S3 naming rules) and object keys -- `invalid_event_strategy` — generates arbitrary dicts that lack the S3 event structure -- `processed_message_strategy` — generates `ProcessedMessage` instances with random field values - -### Integration / smoke checks - -- Verify `@logger.inject_lambda_context`, `@tracer.capture_lambda_handler`, `@metrics.log_metrics` decorators are present (code inspection or import-time check) -- Verify `settings = Settings()` is at module level (not inside `lambda_handler`) -- Verify no `os.environ` / `os.getenv` calls in `handler.py` or `settings.py` diff --git a/.kiro/specs/s3-sqs-lambda-template/requirements.md b/.kiro/specs/s3-sqs-lambda-template/requirements.md deleted file mode 100644 index 225c68e..0000000 --- a/.kiro/specs/s3-sqs-lambda-template/requirements.md +++ /dev/null @@ -1,118 +0,0 @@ -# Requirements Document - -## Introduction - -This feature adds a reusable Lambda function template that is triggered by S3 object-creation events and forwards processed records to an SQS queue. The handler follows the existing `templates/` pattern: a `handler.py` entry point, a `settings.py` using `pydantic-settings` `BaseSettings`, and full AWS Lambda Powertools instrumentation (structured logging via `Logger`, tracing via `Tracer`, metrics via `Metrics`, and event parsing via the `S3Event` utility). Fixed string constants use `StrEnum`. Tests use `pytest` + `Hypothesis` with `moto` for AWS mocking. - -## Glossary - -- **Handler**: The Lambda function entry point (`templates/s3/handler.py`) that receives S3 events and publishes to SQS. -- **Settings**: The Pydantic `BaseSettings` model (`templates/s3/settings.py`) that reads configuration from environment variables. -- **S3Event**: The AWS Lambda Powertools utility class used to parse and iterate over S3 event records. -- **S3Record**: A single object-creation record extracted from an `S3Event`, containing bucket name and object key. -- **ProcessedMessage**: The Pydantic model representing the transformed payload sent to SQS. -- **SQS_Client**: The boto3 SQS client used to publish `ProcessedMessage` instances to the target queue. -- **Logger**: The AWS Lambda Powertools `Logger` instance providing structured JSON log output. -- **Tracer**: The AWS Lambda Powertools `Tracer` instance providing AWS X-Ray tracing. -- **Metrics**: The AWS Lambda Powertools `Metrics` instance publishing custom CloudWatch metrics. -- **DLQ**: Dead-letter queue — an SQS queue that receives messages that the Handler fails to process after exhausting retries. - ---- - -## Requirements - -### Requirement 1: S3 Event Parsing - -**User Story:** As a platform engineer, I want the Lambda function to reliably parse incoming S3 event payloads, so that each object-creation record is extracted and available for processing. - -#### Acceptance Criteria - -1. WHEN an S3 event is received, THE Handler SHALL parse it using the Powertools `S3Event` utility and iterate over every record it contains. -2. WHEN an S3 event contains zero records, THE Handler SHALL return without publishing any messages to SQS. -3. IF an S3 event payload cannot be parsed into a valid `S3Event`, THEN THE Handler SHALL log the error via Logger and raise a `ValueError` with a descriptive message. -4. THE Handler SHALL extract the bucket name and object key from each `S3Record` without accessing `os.environ` directly. - ---- - -### Requirement 2: Configuration via Environment Variables - -**User Story:** As a platform engineer, I want all runtime configuration to be read from environment variables through a typed Pydantic model, so that the function is portable and testable without code changes. - -#### Acceptance Criteria - -1. THE Settings SHALL expose a `sqs_queue_url` field of type `str` with no default value, sourced from the `SQS_QUEUE_URL` environment variable. -2. THE Settings SHALL expose a `aws_region` field of type `str` with a default value of `"us-east-1"`, sourced from the `AWS_DEFAULT_REGION` environment variable. -3. THE Settings SHALL expose a `powertools_service_name` field of type `str` with a default value of `"s3-sqs-processor"`, sourced from the `POWERTOOLS_SERVICE_NAME` environment variable. -4. THE Settings SHALL expose a `log_level` field of type `str` with a default value of `"INFO"`, sourced from the `LOG_LEVEL` environment variable. -5. IF the `sqs_queue_url` environment variable is absent or empty, THEN THE Settings SHALL raise a `ValidationError` during instantiation. -6. THE Handler SHALL instantiate Settings once at module load time and reuse the same instance for all invocations. - ---- - -### Requirement 3: Message Processing - -**User Story:** As a platform engineer, I want each S3 record to be transformed into a structured `ProcessedMessage`, so that downstream consumers receive a consistent, typed payload. - -#### Acceptance Criteria - -1. WHEN an `S3Record` is processed, THE Handler SHALL construct a `ProcessedMessage` containing: `bucket` (str), `key` (str), `event_time` (ISO-8601 datetime string), and `source` (a `StrEnum` value identifying the origin system). -2. THE ProcessedMessage SHALL be serialised to JSON using Pydantic's `.model_dump_json()` before being sent to SQS. -3. WHEN serialisation of a `ProcessedMessage` fails, THE Handler SHALL log the error via Logger and skip that record, continuing to process remaining records. -4. THE Handler SHALL add a structured log entry via Logger for each record processed, including `bucket`, `key`, and `event_time` fields. - ---- - -### Requirement 4: SQS Publishing - -**User Story:** As a platform engineer, I want each processed message to be published to an SQS queue, so that downstream services can consume the events asynchronously. - -#### Acceptance Criteria - -1. WHEN a `ProcessedMessage` is ready, THE SQS_Client SHALL publish it to the queue identified by `Settings.sqs_queue_url` using `send_message`. -2. THE Handler SHALL use the `MessageGroupId` attribute when publishing to a FIFO queue, derived from the S3 bucket name. -3. IF `SQS_Client.send_message` raises an exception, THEN THE Handler SHALL log the error via Logger, increment a `publish_failure` Metrics counter, and continue processing remaining records. -4. WHEN all records in an event have been processed, THE Handler SHALL publish a `records_processed` Metrics counter equal to the number of successfully published messages. -5. THE Handler SHALL flush Metrics at the end of every invocation regardless of success or failure. - ---- - -### Requirement 5: Observability - -**User Story:** As a platform engineer, I want the Lambda function to emit structured logs, X-Ray traces, and CloudWatch metrics, so that I can monitor and debug it in production. - -#### Acceptance Criteria - -1. THE Handler SHALL decorate the entry-point function with `@logger.inject_lambda_context` to automatically include Lambda context fields in every log entry. -2. THE Handler SHALL decorate the entry-point function with `@tracer.capture_lambda_handler` to create an X-Ray segment for each invocation. -3. THE Handler SHALL decorate the entry-point function with `@metrics.log_metrics` to ensure metrics are flushed after every invocation. -4. WHILE processing a record, THE Handler SHALL create a Tracer subsegment named `"process_record"` that wraps the per-record processing logic. -5. THE Logger SHALL be initialised with the service name sourced from `Settings.powertools_service_name`. -6. THE Metrics SHALL be initialised with a namespace of `"S3SQSProcessor"` and a service dimension sourced from `Settings.powertools_service_name`. - ---- - -### Requirement 6: Error Handling and Resilience - -**User Story:** As a platform engineer, I want the function to handle partial failures gracefully, so that a single bad record does not prevent other records in the same batch from being processed. - -#### Acceptance Criteria - -1. WHEN processing a batch of S3 records, THE Handler SHALL process each record independently so that a failure on one record does not abort processing of subsequent records. -2. IF an unhandled exception escapes the per-record processing block, THEN THE Handler SHALL log the exception via Logger with `exc_info=True` and continue to the next record. -3. WHEN at least one record in a batch fails to publish, THE Handler SHALL return a response that includes a `batchItemFailures` list containing the identifiers of failed records, following the Lambda partial batch response format. -4. THE Handler SHALL not raise an unhandled exception to the Lambda runtime for per-record failures; only a complete inability to parse the event SHALL propagate as an exception. - ---- - -### Requirement 7: Testing - -**User Story:** As a developer, I want a comprehensive test suite for the handler, so that regressions are caught before deployment. - -#### Acceptance Criteria - -1. THE test suite SHALL mock SQS using `moto` and set required environment variables via `monkeypatch` or `pytest` fixtures — never by modifying `os.environ` directly in test bodies. -2. WHEN a valid S3 event with N records is provided, THE Handler SHALL publish exactly N messages to the mocked SQS queue (property: output count equals input count for valid records). -3. WHEN an S3 event with a mix of valid and invalid records is provided, THE Handler SHALL publish only the valid records and include the invalid record identifiers in `batchItemFailures`. -4. FOR ALL non-empty lists of valid S3 records, processing the same event twice SHALL result in exactly twice as many SQS messages (idempotence is NOT expected; each invocation publishes independently). -5. THE test suite SHALL include a Hypothesis property test verifying that for any list of 1–20 valid S3 records, the number of SQS messages published equals the number of input records. -6. THE test suite SHALL verify that `Settings` raises `ValidationError` when `SQS_QUEUE_URL` is not set. diff --git a/.kiro/specs/s3-sqs-lambda-template/tasks.md b/.kiro/specs/s3-sqs-lambda-template/tasks.md deleted file mode 100644 index c265f96..0000000 --- a/.kiro/specs/s3-sqs-lambda-template/tasks.md +++ /dev/null @@ -1,124 +0,0 @@ -# Implementation Plan: S3 → SQS Lambda Template - -## Overview - -Implement `templates/s3/` — a reusable Lambda handler that receives S3 object-creation events, transforms each record into a typed `ProcessedMessage`, and publishes it to SQS. Fully instrumented with AWS Lambda Powertools and covered by pytest + Hypothesis property tests. - -## Tasks - -- [ ] 1. Create module skeleton and settings - - Create `templates/s3/__init__.py` (empty) - - Create `templates/s3/settings.py` with `Settings(BaseSettings, case_sensitive=False)` exposing `sqs_queue_url`, `aws_region`, `powertools_service_name`, and `log_level` fields, each annotated with `Field(description="...")`, sourced from the corresponding environment variables - - Instantiate `settings = Settings()` at module level in `settings.py` - - _Requirements: 2.1, 2.2, 2.3, 2.4, 2.5, 2.6_ - -- [~] 2. Implement data models - - [~] 2.1 Create `EventSource` StrEnum and `ProcessedMessage` Pydantic model in `templates/s3/handler.py` (or a sibling `models.py`) - - `EventSource(StrEnum)` with value `s3 = "s3"` - - `ProcessedMessage(BaseModel, populate_by_name=True, alias_generator=to_camel)` with fields: `bucket: str`, `key: str`, `event_time: str`, `source: EventSource` — every field must have `Field(description="...")` - - _Requirements: 3.1, 3.2_ - - - [ ]* 2.2 Write property test for ProcessedMessage serialization round-trip - - `# Feature: s3-sqs-lambda-template, Property 4: ProcessedMessage serialization round-trip` - - **Property 4: ProcessedMessage serialization round-trip** - - **Validates: Requirements 3.1, 3.2** - -- [~] 3. Implement handler internals - - [~] 3.1 Implement `_parse_event(event)` in `handler.py` - - Wraps `S3Event(data=event)`; raises `ValueError` with a descriptive message on failure - - _Requirements: 1.1, 1.3_ - - - [~] 3.2 Implement `_build_message(record)` in `handler.py` - - Constructs a `ProcessedMessage` from an `S3Record` (`bucket`, `key`, `event_time`, `source=EventSource.s3`) - - _Requirements: 3.1, 3.4_ - - - [~] 3.3 Implement `_publish(msg, bucket)` in `handler.py` - - Calls `sqs_client.send_message(QueueUrl=..., MessageBody=msg.model_dump_json(by_alias=True), MessageGroupId=bucket)` - - Raises on error (caller handles exception) - - _Requirements: 4.1, 4.2_ - - - [ ]* 3.4 Write property test for MessageGroupId equals bucket name - - `# Feature: s3-sqs-lambda-template, Property 5: MessageGroupId equals bucket name` - - **Property 5: MessageGroupId equals bucket name** - - **Validates: Requirements 4.2** - -- [~] 4. Implement `lambda_handler` orchestration - - [~] 4.1 Wire `_parse_event`, `_build_message`, and `_publish` inside `lambda_handler(event, context)` - - Decorate with `@logger.inject_lambda_context`, `@tracer.capture_lambda_handler`, `@metrics.log_metrics` - - Wrap per-record logic in a `tracer.provider.in_subsegment("process_record")` block - - Log structured fields (`bucket`, `key`, `event_time`) per record via Logger - - Collect `batchItemFailures` (object key as `itemIdentifier`) for any record that fails to build or publish - - Increment `publish_failure` metric on `send_message` exceptions - - Publish `records_processed` metric equal to successful publish count - - Return `{"batchItemFailures": [...]}` at the end of every invocation - - _Requirements: 1.1, 1.2, 1.4, 3.3, 3.4, 4.3, 4.4, 4.5, 5.1, 5.2, 5.3, 5.4, 6.1, 6.2, 6.3, 6.4_ - - - [~] 4.2 Initialise module-level Powertools instances and SQS client - - `logger = Logger(service=settings.powertools_service_name)` - - `tracer = Tracer()` - - `metrics = Metrics(namespace="S3SQSProcessor", service=settings.powertools_service_name)` - - `sqs_client = boto3.client("sqs", region_name=settings.aws_region)` - - _Requirements: 2.6, 5.5, 5.6_ - -- [~] 5. Checkpoint — verify module imports cleanly - - Ensure `from templates.s3.handler import lambda_handler` succeeds with `SQS_QUEUE_URL` set - - Ensure all tests pass, ask the user if questions arise. - -- [~] 6. Write example-based unit tests - - [ ] 6.1 Create `tests/s3/__init__.py` and `tests/s3/test_handler.py` - - Use `mock_aws` from `moto` (via a fixture) for SQS mocking — `moto_aws` does not exist in `tests/conftest.py`; define a local `aws_credentials` + `sqs` fixture or use `mock_aws` as a decorator/context manager - - Use `monkeypatch` to inject `SQS_QUEUE_URL` and other env vars — never modify `os.environ` directly - - Test: `Settings` defaults are correct - - Test: `Settings` raises `ValidationError` when `SQS_QUEUE_URL` is absent - - Test: zero-record event → handler returns, zero SQS messages published - - Test: single valid record → handler returns empty `batchItemFailures`, one SQS message published - - Test: `send_message` failure → record appears in `batchItemFailures`, handler does not raise - - Test: `_build_message` produces correct fields for a concrete `S3Record` - - _Requirements: 7.1, 7.3, 7.6, 1.2, 6.3_ - - - [ ]* 6.2 Write property test for record count preservation - - `# Feature: s3-sqs-lambda-template, Property 1: Record count preservation` - - **Property 1: Record count preservation** - - Use `s3_record_strategy` generating 1–20 valid S3 records - - **Validates: Requirements 1.1, 7.2, 7.5** - - - [ ]* 6.3 Write property test for invalid event raises ValueError - - `# Feature: s3-sqs-lambda-template, Property 2: Invalid event raises ValueError` - - **Property 2: Invalid event raises ValueError** - - Use `invalid_event_strategy` generating arbitrary dicts lacking S3 event structure - - **Validates: Requirements 1.3** - - - [ ]* 6.4 Write property test for missing SQS_QUEUE_URL raises ValidationError - - `# Feature: s3-sqs-lambda-template, Property 3: Missing SQS_QUEUE_URL raises ValidationError` - - **Property 3: Missing SQS_QUEUE_URL raises ValidationError** - - **Validates: Requirements 2.5, 7.6** - - - [ ]* 6.5 Write property test for partial batch failure independence - - `# Feature: s3-sqs-lambda-template, Property 6: Partial batch failure independence` - - **Property 6: Partial batch failure independence** - - Mock SQS to fail for a random subset of records; assert non-failing records are published and `batchItemFailures` contains exactly the failed keys - - **Validates: Requirements 6.1, 6.2, 6.3, 6.4, 7.3** - - - [ ]* 6.6 Write property test for non-idempotent double invocation - - `# Feature: s3-sqs-lambda-template, Property 7: Non-idempotent double invocation` - - **Property 7: Non-idempotent double invocation** - - Invoke handler twice with the same event; assert SQS message count equals 2×N - - **Validates: Requirements 7.4** - - - [ ]* 6.7 Write property test for records_processed metric equals success count - - `# Feature: s3-sqs-lambda-template, Property 8: records_processed metric equals success count` - - **Property 8: records_processed metric equals success count** - - For N records where M fail, assert emitted `records_processed` value equals N − M - - **Validates: Requirements 4.4** - -- [~] 7. Final checkpoint — ensure all tests pass - - Run `make test` and confirm all tests in `tests/s3/test_handler.py` pass - - Ensure all tests pass, ask the user if questions arise. - -## Notes - -- Tasks marked with `*` are optional and can be skipped for a faster MVP -- Each task references specific requirements for traceability -- Property tests require minimum 100 Hypothesis iterations each (`@settings(max_examples=100)`) -- The `.hypothesis/` directory must not be deleted — it stores shrinking examples -- No `os.environ` / `os.getenv` calls are permitted in `handler.py` or `settings.py` From 429a97418db075d2c6f0e385adcc0cc54f75726f Mon Sep 17 00:00:00 2001 From: Amr Abed Date: Sat, 11 Apr 2026 09:47:03 -0400 Subject: [PATCH 5/8] Update documentation --- docs/README.md | 20 ++++++++++++++++++-- docs/reference/agent.md | 2 +- docs/reference/api.md | 2 +- docs/reference/eventbridge.md | 3 +++ docs/reference/graphql.md | 3 +++ docs/reference/repository.md | 2 +- docs/reference/stream.md | 2 +- docs/template/index.md | 7 ------- mkdocs.yml | 13 ++++++++----- pyproject.toml | 2 +- templates/sqs/handler.py | 3 ++- 11 files changed, 39 insertions(+), 20 deletions(-) create mode 100644 docs/reference/eventbridge.md create mode 100644 docs/reference/graphql.md delete mode 100644 docs/template/index.md diff --git a/docs/README.md b/docs/README.md index 294ec40..39c18a5 100644 --- a/docs/README.md +++ b/docs/README.md @@ -3,9 +3,25 @@ [![Code style: ruff](https://img.shields.io/badge/code%20style-ruff-D7FF64.svg?logo=ruff&style=flat-square)](https://docs.astral.sh/ruff) [![License](https://img.shields.io/badge/license-MIT-blue.svg?style=flat-square)](LICENSE.md) -Production-ready Python AWS Lambda templates for different scenarios. -See available templates [here](template/index.md). +Production-ready plug-and-play AWS Lambda Python templates for different real-life scenarios. +The templates apply best practices by using [AWS Lambda Powertools](https://docs.aws.amazon.com/powertools/python) to for: + +- CloudWatch Logs and Metrics +- X-ray Tracing +- Batch Processing +- Event Handling +- Parameter/Secret Loading + +## Templates + +- [Bedrock Agent](template/agent.md): Handle Bedrock Agent function-based actions +- [GraphQL API](template/graphql.md): Handle AppSync GraphQL requests +- [REST API](template/api.md): Handle REST API requests +- [DynamoDB Stream](template/stream.md): Batch process stream events +- [EventBridge](template/eventbridge.md): Call external API on event +- [S3 to SQS](template/s3.md): Send messages to queue on S3 object changes +- [SQS to DynamoDB](template/sqs.md): Batch Process SQS messages ## Features diff --git a/docs/reference/agent.md b/docs/reference/agent.md index 44b957a..48c2d35 100644 --- a/docs/reference/agent.md +++ b/docs/reference/agent.md @@ -1,3 +1,3 @@ -# Agent Reference +# Bedrock Agent ::: templates.agent.handler \ No newline at end of file diff --git a/docs/reference/api.md b/docs/reference/api.md index 38a3643..deade8f 100644 --- a/docs/reference/api.md +++ b/docs/reference/api.md @@ -1,2 +1,2 @@ -# API +# REST API ::: templates.api.handler diff --git a/docs/reference/eventbridge.md b/docs/reference/eventbridge.md new file mode 100644 index 0000000..8d8afdf --- /dev/null +++ b/docs/reference/eventbridge.md @@ -0,0 +1,3 @@ +# EventBridge + +::: templates.eventbridge.handler \ No newline at end of file diff --git a/docs/reference/graphql.md b/docs/reference/graphql.md new file mode 100644 index 0000000..bba47e9 --- /dev/null +++ b/docs/reference/graphql.md @@ -0,0 +1,3 @@ +# GraphQL API + +::: templates.graphql.handler \ No newline at end of file diff --git a/docs/reference/repository.md b/docs/reference/repository.md index fc8f186..f966916 100644 --- a/docs/reference/repository.md +++ b/docs/reference/repository.md @@ -1,2 +1,2 @@ -# Repository +# DynamoDB Repository ::: templates.repository diff --git a/docs/reference/stream.md b/docs/reference/stream.md index 490f9d9..ffcda21 100644 --- a/docs/reference/stream.md +++ b/docs/reference/stream.md @@ -1,2 +1,2 @@ -# Stream +# DynamoDB Stream ::: templates.stream.handler diff --git a/docs/template/index.md b/docs/template/index.md deleted file mode 100644 index ffbebf4..0000000 --- a/docs/template/index.md +++ /dev/null @@ -1,7 +0,0 @@ -- [Bedrock Agent](agent.md): Handle Bedrock Agent function-based actions -- [GraphQL API](graphql.md): Handle AppSync GraphQL requests -- [REST API](api.md): Handle REST API requests -- [DynamoDB Stream](stream.md): Batch process stream events -- [EventBridge](eventbridge.md): Call external API on event -- [S3 to SQS](s3.md): Send messages to queue on S3 object changes -- [SQS to DynamoDB](sqs.md): Batch Process SQS messages and store processed messages in DynamoDB table \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 3ec35a0..6db7109 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -26,22 +26,25 @@ theme: nav: - Overview: - Overview: README.md + - Contributing: CONTRIBUTING.md - License: LICENSE.md - Templates: - - Templates: template/index.md - - Rest API: template/api.md - Bedrock Agent: template/agent.md + - GraphQL API: template/graphql.md + - REST API: template/api.md - DynamoDB Stream: template/stream.md + - EventBridge: template/eventbridge.md - S3 to SQS: template/s3.md - SQS to DynamoDB: template/sqs.md - Reference: - - API: reference/api.md - Bedrock Agent: reference/agent.md - - Stream: reference/stream.md + - GraphQL API: reference/graphql.md + - REST API: reference/api.md + - DynamoDB Stream: reference/stream.md + - EventBridge: reference/eventbridge.md - S3: reference/s3.md - SQS: reference/sqs.md - Repository: reference/repository.md - - Contributing: CONTRIBUTING.md plugins: - search diff --git a/pyproject.toml b/pyproject.toml index 01ed4e0..4844d5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ source = ["templates"] [tool.coverage.report] omit = ["tests/*"] -# fail_under = 90 +fail_under = 90 [tool.pyright] exclude = ["**/__pycache__", "**/.pytest_cache"] diff --git a/templates/sqs/handler.py b/templates/sqs/handler.py index 165f09c..09ab195 100644 --- a/templates/sqs/handler.py +++ b/templates/sqs/handler.py @@ -1,5 +1,6 @@ from aws_lambda_powertools import Logger, Metrics, Tracer from aws_lambda_powertools.utilities.batch import BatchProcessor, EventType, process_partial_response +from aws_lambda_powertools.utilities.batch.types import PartialItemFailureResponse from aws_lambda_powertools.utilities.data_classes.sqs_event import SQSRecord from aws_lambda_powertools.utilities.typing import LambdaContext @@ -55,7 +56,7 @@ def handle_record(self, record: SQSRecord) -> None: @logger.inject_lambda_context @tracer.capture_lambda_handler @metrics.log_metrics -def main(event: dict, context: LambdaContext): +def main(event: dict, context: LambdaContext) -> PartialItemFailureResponse: """Lambda entry point for the SQS-to-DynamoDB handler. Args: From 4e2c582b2ee34ef2fd2da42b0f11f865d35a99d4 Mon Sep 17 00:00:00 2001 From: Amr Abed Date: Sat, 11 Apr 2026 10:10:53 -0400 Subject: [PATCH 6/8] Clean up code --- templates/graphql/handler.py | 31 ++++++++------------ tests/conftest.py | 26 +++++++++++++++++ tests/graphql/test_handler.py | 54 ++++++++--------------------------- tests/sqs/conftest.py | 28 ------------------ tests/sqs/test_handler.py | 7 ++++- 5 files changed, 56 insertions(+), 90 deletions(-) delete mode 100644 tests/sqs/conftest.py diff --git a/templates/graphql/handler.py b/templates/graphql/handler.py index 143e907..7ba6e52 100644 --- a/templates/graphql/handler.py +++ b/templates/graphql/handler.py @@ -8,7 +8,7 @@ from templates.graphql.settings import Settings from templates.repository import Repository -settings = Settings() +settings = Settings() # type: ignore logger = Logger(service=settings.service_name) tracer = Tracer(service=settings.service_name) @@ -18,10 +18,6 @@ app = AppSyncResolver() -def get_repository() -> Repository: - return repository - - @app.resolver(type_name="Query", field_name="getItem") @tracer.capture_method def get_item(id: str) -> dict | None: @@ -34,10 +30,9 @@ def get_item(id: str) -> dict | None: The item if found, or None. """ try: - return get_repository().get_item(id) - except Exception as exc: - logger.error("DynamoDB get_item failed", exc_info=exc) - raise + return repository.get_item(id) + except Exception as error: + raise RuntimeError(f"Failed to get item with ID '{id}'. Cause: {error}") from error @app.resolver(type_name="Query", field_name="listItems") @@ -49,10 +44,9 @@ def list_items() -> list[dict]: A list of items. """ try: - return get_repository().list_items() - except Exception as exc: - logger.error("DynamoDB list_items failed", exc_info=exc) - raise + return repository.list_items() + except Exception as error: + raise RuntimeError(f"Failed to list items. Cause: {error}") from error @app.resolver(type_name="Mutation", field_name="createItem") @@ -67,12 +61,11 @@ def create_item(name: str) -> dict: The created item. """ try: - item = Item(name=name) - get_repository().put_item(item.model_dump()) - return item.dump() - except (ValidationError, Exception) as exc: - logger.error("Failed to create item", exc_info=exc) - raise + item = Item(name=name).dump() + repository.put_item(item) + return item + except (ValidationError, Exception) as error: + raise RuntimeError(f"Failed to create item with name '{name}'. Cause: {error}") from error @logger.inject_lambda_context(correlation_id_path=correlation_paths.APPSYNC_RESOLVER) diff --git a/tests/conftest.py b/tests/conftest.py index b5991b2..46efb5b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock from aws_lambda_powertools.utilities.typing import LambdaContext +from moto import mock_aws from pytest import fixture # aws_xray_sdk is not installed in the test environment; stub it out before @@ -28,3 +29,28 @@ def lambda_context(): ctx.invoked_function_arn = "arn:aws:lambda:us-east-1:123456789012:function:test-function" ctx.aws_request_id = "test-request-id" return ctx + + +@fixture +def table_name(): + return "test-table" + + +@fixture(autouse=True) +def mock_table(table_name): + from boto3 import resource + + with mock_aws(): + yield resource("dynamodb").create_table( + TableName=table_name, + KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], + AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], + BillingMode="PAY_PER_REQUEST", + ) + + +@fixture(autouse=True) +def repository(mock_table): + from templates.repository import Repository + + return Repository(mock_table.table_name) diff --git a/tests/graphql/test_handler.py b/tests/graphql/test_handler.py index 71babd6..f329d2a 100644 --- a/tests/graphql/test_handler.py +++ b/tests/graphql/test_handler.py @@ -1,6 +1,3 @@ -from unittest.mock import MagicMock - -from aws_lambda_powertools.utilities.typing import LambdaContext from pytest import fixture, main @@ -12,62 +9,35 @@ def env(monkeypatch): @fixture -def mock_repository(mocker): - mock = mocker.MagicMock() - mocker.patch("templates.graphql.handler.get_repository", return_value=mock) - return mock +def item(): + return {"id": "123", "name": "Test Item"} @fixture -def lambda_context(): - ctx = MagicMock(spec=LambdaContext) - ctx.function_name = "test-function" - return ctx - - -def test_get_item_resolver(mock_repository, lambda_context): +def test_get_item_resolver(repository, item, lambda_context): from templates.graphql.handler import main - mock_repository.get_item.return_value = {"id": "123", "name": "Test Item"} - event = { - "info": {"parentTypeName": "Query", "fieldName": "getItem"}, - "arguments": {"id": "123"}, - } - - result = main(event, lambda_context) - - assert result == {"id": "123", "name": "Test Item"} - mock_repository.get_item.assert_called_once_with("123") + event = {"info": {"parentTypeName": "Query", "fieldName": "getItem"}, "arguments": {"id": "123"}} + repository.put_item(item) + assert main(event, lambda_context) == item -def test_list_items_resolver(mock_repository, lambda_context): +def test_list_items_resolver(repository, item, lambda_context): from templates.graphql.handler import main - mock_repository.list_items.return_value = [{"id": "123", "name": "Test Item"}] - event = { - "info": {"parentTypeName": "Query", "fieldName": "listItems"}, - "arguments": {}, - } - - result = main(event, lambda_context) + event = {"info": {"parentTypeName": "Query", "fieldName": "listItems"}, "arguments": {}} + repository.put_item(item) + assert main(event, lambda_context) == [item] - assert result == [{"id": "123", "name": "Test Item"}] - mock_repository.list_items.assert_called_once() - -def test_create_item_resolver(mock_repository, lambda_context): +def test_create_item_resolver(lambda_context): from templates.graphql.handler import main - event = { - "info": {"parentTypeName": "Mutation", "fieldName": "createItem"}, - "arguments": {"name": "New Item"}, - } + event = {"info": {"parentTypeName": "Mutation", "fieldName": "createItem"}, "arguments": {"name": "New Item"}} result = main(event, lambda_context) - assert result["name"] == "New Item" assert "id" in result - mock_repository.put_item.assert_called_once() if __name__ == "__main__": diff --git a/tests/sqs/conftest.py b/tests/sqs/conftest.py deleted file mode 100644 index 6c69756..0000000 --- a/tests/sqs/conftest.py +++ /dev/null @@ -1,28 +0,0 @@ -from moto import mock_aws -from pytest import fixture - -from templates.repository import Repository - - -@fixture -def table_name(): - return "test-table" - - -@fixture(autouse=True) -def env(monkeypatch, table_name): - monkeypatch.setenv("TABLE_NAME", table_name) - - -@fixture -def repository(table_name): - from boto3 import resource - - with mock_aws(): - resource("dynamodb", region_name="us-east-1").create_table( - TableName=table_name, - KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}], - AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}], - BillingMode="PAY_PER_REQUEST", - ) - yield Repository(table_name) diff --git a/tests/sqs/test_handler.py b/tests/sqs/test_handler.py index 9aa66e5..0df5d0b 100644 --- a/tests/sqs/test_handler.py +++ b/tests/sqs/test_handler.py @@ -2,7 +2,12 @@ from unittest.mock import MagicMock from aws_lambda_powertools.utilities.typing import LambdaContext -from pytest import main +from pytest import fixture, main + + +@fixture(autouse=True) +def env(monkeypatch, table_name): + monkeypatch.setenv("TABLE_NAME", table_name) def test_handler_handle_record(repository): From f840b155cd84980750fc7750443beb4bc0823a84 Mon Sep 17 00:00:00 2001 From: Amr Abed Date: Sat, 11 Apr 2026 10:53:53 -0400 Subject: [PATCH 7/8] Update README.md --- docs/README.md | 40 ++++++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 18 deletions(-) diff --git a/docs/README.md b/docs/README.md index 39c18a5..4a888fc 100644 --- a/docs/README.md +++ b/docs/README.md @@ -5,7 +5,7 @@ Production-ready plug-and-play AWS Lambda Python templates for different real-life scenarios. -The templates apply best practices by using [AWS Lambda Powertools](https://docs.aws.amazon.com/powertools/python) to for: +The templates apply best practices by using [AWS Lambda Powertools](https://docs.aws.amazon.com/powertools/python) for: - CloudWatch Logs and Metrics - X-ray Tracing @@ -25,23 +25,19 @@ The templates apply best practices by using [AWS Lambda Powertools](https://docs ## Features -All templates come pre-wired with: - -- Clean AWS Lambda code following best practices using [AWS Lambda Powertools](https://docs.aws.amazon.com/powertools/python) -- Infrastructure as code using [AWS CDK](https://aws.amazon.com/cdk/) -- Testing using [pytest](https://pytest.org) and [Hypothesis](https://hypothesis.readthedocs.io) for property-based testing -- Workflow automation using [GitHub Actions](https://github.com/features/actions) -- Automatic documentation from code using [MkDocs](https://www.mkdocs.org) and [mkdocstrings](https://mkdocstrings.github.io) -- Packaging and dependency management using [Poetry](https://python-poetry.org) -- Code coverage using [coverage](https://coverage.readthedocs.io) -- Formatting, import sorting, and linting using [ruff](https://docs.astral.sh/ruff) -- Type checking using [pyright](https://microsoft.github.io/pyright) -- Pre-commit validations using [pre-commit](https://pre-commit.com) -- Automated dependency updates using [Dependabot](https://docs.github.com/en/code-security/dependabot) -- Dockerized development environment using [Dev Containers](https://code.visualstudio.com/docs/devcontainers/containers) -- Documentation auto-deployment to [GitHub Pages](https://pages.github.com) -- App container using [Docker](https://docker.com) +Templates come pre-wired with: +- **Clean Architecture**: Separation of concerns using the Repository pattern for data access +- **AWS Lambda Best Practices**: Integrated [AWS Lambda Powertools](https://docs.aws.amazon.com/powertools/python) +- **Data Modeling**: Strong typing and validation using [Pydantic](https://docs.pydantic.dev) +- **Infrastructure as Code**: [AWS CDK](https://aws.amazon.com/cdk) stacks +- **Testing**: Comprehensive [pytest](https://pytest.org) suite with [moto](http://docs.getmoto.org) for AWS mocking and [hypothesis](https://hypothesis.readthedocs.io) for property-based testing +- **Code Quality**: [ruff](https://docs.astral.sh/ruff) for linting and formatting, [pyright](https://microsoft.github.io/pyright) for type checking, and test coverage using [coverage](https://coverage.readthedocs.io) +- **Dependency Control**: [Poetry](https://python-poetry.org) for dependency management and [Dependabot](https://docs.github.com/en/code-security/dependabot) for automated dependency updates +- **Documentation**: Automatic documentation using [MkDocs](https://www.mkdocs.org) and [mkdocstrings](https://mkdocstrings.github.io) +- **Development environment**: [Dev Containers](https://code.visualstudio.com/docs/devcontainers/containers) for dockerized development environment +- **Pre-commit Validations**: [pre-commit](https://pre-commit.com) hooks +- **Workflow Automation**: [GitHub Actions](https://github.com/features/actions) for CI/CD and documentation auto-deployment to [GitHub Pages](https://pages.github.com) ### GitHub files The repository also comes preloaded with these GitHub files: @@ -152,7 +148,7 @@ make destroy STACK=stream AWS_PROFILE=my-profile make destroy STACK=s3 AWS_PROFILE=my-profile ``` -## Generating documentation +### Generating documentation To build and publish the project documentation to GitHub Pages, run: ```bash @@ -167,6 +163,14 @@ To serve the documentation on a local server, run: make local ``` +## Coding Conventions + +- **camelCase for JSON**: All models use `alias_generator=to_camel` so that JSON payloads use camelCase while Python attributes use snake_case. +- **Environment Variables**: Managed via `BaseSettings` in `settings.py` files. +- **Documentation**: Every field in a Pydantic model must include a `Field(description="...")`. +- **Repository Pattern**: All database calls are encapsulated in a `Repository` class for better testability. + + ## Project Structure ``` From f7bfe7aebc856bf70d919134019623b5e2d400dc Mon Sep 17 00:00:00 2001 From: Amr Abed Date: Sat, 11 Apr 2026 10:56:22 -0400 Subject: [PATCH 8/8] Add diagram --- docs/template/diagrams/graphql.png | Bin 0 -> 27534 bytes docs/template/graphql.md | 2 ++ 2 files changed, 2 insertions(+) create mode 100644 docs/template/diagrams/graphql.png diff --git a/docs/template/diagrams/graphql.png b/docs/template/diagrams/graphql.png new file mode 100644 index 0000000000000000000000000000000000000000..098620589b8ed27d80fdb780835da21fe81bbd0c GIT binary patch literal 27534 zcmeFZXHZk!7dHwbqS91Es-h?$(gXxVq$ow{h)VBBkrs*&S^z~liqd-%1f-V`N=QTm zr1zGPM5LD_(h?w)_lW;{$7k;M_tVXcnh}D%^fYi|%2+AMR{X=7)PkhX<6#`=m!FUtp&_W)^c-*J=o}9~Or-r?w|ZzrSyE z-5kQbo*Oen@2>1E-3(iih`}4>5gkg_eQQYNgNy8tRfYKdrHze9_?l+gR(i+Y=;F>- z^b$`9i9~8^Ycn)7l$4b8_xC3d2rw87jYhwG`Eq`KzPPw}U|=9UJ)J_KsHv%qj!#xp z*O7O3JG**gVq!ifrQ?47Dk-bF_dvtlJ8)xjD>g34-q};n&?5SMLjT~fxBu&BrZyHY zTndWH@C4!`9m9~YXk_d6gp|yM#U=mXxAhHh%Km<4ZowlRgYf7$|KKovW2?gAG7^d0 z*o+(+8G9WT>kA3BvU63_dfxLTt}DaK#lv@NYing?1&6~84-Xd>7P`5)0i1vR`ZYT{ zyNZfRLPCPEvGMos-$g}5p-|}R=Y>_^F}026BNeJh+RKlDf1GiDZ0tovC3^kOA8Ox1 zMIYcxx>xGjs&p$S>6xTOPhn3;QBgT!)m0um^F`rX&IF*l(^I=_{baG;E>k9O^-hY< zdl^5OEVRcxn4F9Z2G%^XoTkp7me#9fAI>W%%3G+Yc=BwIp;4)l5>-1HvbSveQHlC{ zxlPOdmz7cjdSJi*|MP#916Ch*mkXYfX-*Y?cv@s&ppzr(Mat}$l626iEX%z2oQ$D< z_G3UGtBL1!rrt`uL>+T3)uOI>gYUv{Be!T)qwmbED;rKwR@J4YU2es5C_6rEM;e3GPe2b-ro?RF@OD#jq3)M}#-OUi># zg=bpIXUUai$j$}$CsAA875~qB-*(4@HgDz(u=XvbhZuZgJwp4xn&kuX&?|aL6 z-SeWU2Y9Npc{YyR;8t5T1Dr*r}NLT#_#0&aiPC8g55Hee_ED{sA`?UwJ}L@Ly@1yb-o1`n-CWl=9s;=;XfD>wm@ab=LD?ab?Z( z`l!;d0f{!|OX2?#evwV|a?Y0QjVN3py|*GQmGZxYx!(=uyAs%JIyf5@b0 z#PIJA9Ql}jXUm3TB!i+HVytu7E`|QPwn@2j_4;^eXI%y9nQ&H5q5g%FC!T5jt9|8@ zR;z`!X3w~Ceu1kAi_{-<|9M)ql^It!MBfP>$f?O6Y;X@OAm3rx&t3ml@IC#zOLYi$ zS^Rur`RK5$spDJe`p6w&ES+}IAI-B8|0@Jfn;6+|dbsG4HTAQ7XJTphjV7u8`wKVG zM>MWB2Ds->$3|?z+TXesX4+7_kNM{~RwYp;xaZcU6VXPvMz11z0Fx!w6&$7i5k#v5 z3lt&gse@tl8XLKpN+brp$Ympjsy+R4)&8f6!s4aNG;qm&_81Gn8N`kf(Frp^cQxzZ zjr^iwf2nQWU>ietg{91>)v6^laHMN`BfKHni}z4;0+Gi*F65K=#>MqaqzuyTCBKNd zpk#X!UE@}UF~dzxYc--47RLAXP>gl|&sDVJ%Qsqo<`)Jf>aUjte7y0-m*-CMg)rjy z-hyjeDR#;X>8-V4NTvyA(Kwbt{Do?Sq{nliGRUIn#Ri_ZAK#eZ%;_3}PTpYqK-!8H zKjOGKu=p|lgwD^w-5}~cw;xi=xz>nh80`djXUmO%Po-V=JgvvK88B8J0A6S~%nE=3 zXrXkhLCNtytek%RLhDgt`Bx8kyG^qev9E}>Ins{&o3djVe0f$Q$lAfS`CG0fhM8UG z<}sT!yV3$zYsVCf7tCf5k>soriZS&K@wg}Du7ycWwT+F{gett~FO1JoOLb$~N+efO z`~FJMSNDg1wLndW%P0p*%DSL>36(ZMAW=CLR_S0`(yD@N9lV?m#!U>?2n1C5cKMA1 zQ$Tk<{=G#HMcH2_m6N*MahccdVJHhm*G}lMsuqIeo!sQORxg$8R#cXQ98tc_ysOHT zT+a9Eh@h6EpEI92uQBsrkw+)-0C#GPZ{y>EHMA8M=*g<^xOw>C{piL~ZSy@hkSm*d zM<66XNDt`>@H4s_y4p1o5tmbFw!gkfhs=jmxh%!_jzs7UOgUMuI{96%O($1pvZX&d zpI|bQOu#nE>YhTZ%3wyt#VQl|>%ptr+Ab;Bt^ zX+-|*!VZVkAbYAOXCp_O?jDf(cFAVV0h>C`mjS@ezlYn~ZY6$}Sx0Xc0S1JgO?%|i zq;n2CTejSEQ2?&JDTh<(WPmzLR2r1#x?Mt5TZi>NxR4+~muoHfgbVu; zR;p1O0T@BOmM!_9ojckK1V!H~@_V~Nb<8@q)79s-kV5$o&xrcQHpreQb}XIea!7cV zGG?P6RqQS!1<2{OQz4PO&k<00MMdqc=KGzogAnSIsB<8!8!YUkPAV!;uC5U#l;VB6 zm;T+i{yg|Ah^cooH;T^t=@%Q!L;-P~X5!NA;TfS4$#nA^!vEK;J2l zz*bViu=y~dO23$EC9_XIaBckku6e)sXS3rA8ueydBOISCUsD4Uu&z&uC^P}s!uTtl z`%X(T_*M0cNwqS1e@3B4?$lj{vWqL91G=;H)qG)VgxcStSB|BXxM;}SZowUAIhk`* zdUp4~VB^bsfb~it(%&<33@cMfS+^!e=C%~V{qi0d2_~8hKIv9)a!w{2-_7wlzK(WN zW0f}Mgjq^^x9@zUCAGB*0Ja~Z(kmB)p0&1f@J@s64+9j$qyw+Nl)Q6VC``YtrtpQ^ zR2_T(fFw>;bIe-p`3~cRku1Ys%pb zc|;v{%bKt9EGezvsJWuTj~*XtFOD>9`5ohLKyv!C>p6*24$>rl(TGt-h@nRIJAqn8 zz)#?&zNmW9$&H9Mm`^=P8OK~-Ow~2|vvc>f$@ZYwGI^R&7Qjp!l`1irC$+q~6#bZw zQ8wL1fkFw=EQHW-y~lYAeG_F1s~q_4JSGu%E)703I(6d9n**sUNtqyKd?-s81BpXt ztY-wVb>6J{?59Mt0Exf`6LP{ycfrqk8D1i*lLld_i5@-sdUee26KcvTKhnS*dG641 zgh-kU3`qgk?*_ha!GeI`> zDhG&EyZX(pqmkG@7m%ag0D{I+#K1+CA-ieHdC}21s#PEY?PW)vXW#Fu%~#GGwB(KK z4JtUE4fE+Y>H@ax7G zRX+XMj^DjrscPWHbjrPEe;Yg(7t>PeHl@LGfwNZ@|ygsQ-R-Xo| zJf(7|bI(RQ;(9S|tA*3XNV_jVms$}QF1^Zy=$#L<>oxsu+OmydCO|*z{lT0%+Dj7} zHwPR~!hZ0^3s-QI-Jrg!5dG59Y~Q3D&9&;hD-L({u6j~M%)SK{JvLC$-IFxld*UzL z{0cCY(BnnsP_drLt?9y(Mk1yoZy&|C(XJE~llo2)boBi)xK`259GZauKkS5;lqk8p zXPYJ~vELCzNv1emWG7)tfJJTisF5+^)jYLTU3LItvKfwio7`>Q}3hS zgPF>#t~ghc1siKtYgNdw9a6Y!zu;9JF=mQOn4?G=Apbx=`xOyBu%{+V*QhsC;z(0) zMq%co9m8{)-uC+_nOTGIUD`-%lME|Mj=v(c5nY&n8D!D2eXw=s0Q>IYyFUyS$`dsg zE_s|l%eC0UM2^Y|s^lI>pn2?|gU|4=g^cX*qE|qls4N8kF7fadsq`LMrib=YWwt`# z{?2?G#h^%D14%XntRK~4sS&+(jg>NC(4a%D$+t0Bl4GOI&Utsl#=uie%#KE6~Js}(>`>mL~i$R`>??BN$xbnpS< zUXSvE#D)}%PF6g-rm1$l!7hUG_NzoUz4uciN_|4gs)Z%m0*8uapzKAUPEzW}O1wmm zr55d&a~5sPxPHiK9_PH^vNx(A2A6rNNFJd3b55IBSQ+hQCVqNp(G1}o=z|9SPjEd+ zRVc{yGiXY~2cqra?231zP7rcSxyynYe+>VJy-de;o4b;+VojxAfY$>EuZYoc8jn^AopR$l%Gf za$*#xofPAQ`qRzyA1D8VD%Vd{ip$+IfSllg?dFMO`Vk$pZaU9cE} z#xBE%Y0jzO_r_I3i^k{!+sR?2JGfbq}2?|i=rYL8>Q8exM18@lxQP_nIi2lyw+UTXTO zm+P2UI!!(SwqeYl2{cc<&RP~QfACU5Mpqjju-x-`>KEV%r-z>YkR;oImN2cdj>lF0 zlb9%Kk- z4W3f2oUDJDB6$iQifS}UyV*s{RWhTHupe~kbd-qD<{c&Rvo{kZ%L2gWEQ$$Kna``l zG%18Uh@Rp&XJPWX3BKD0C$Fm2`!p26pC`u`*?&h#s(HXk?HY=1`#QXIEnoYR%MW+l z{DlhR4bJl+-vG;QW*z`GW!Q3?%V{oue@FX==cdn!2)g|GG3vFC=fg802CWD>g5dB) zOU_HbbnzMM8s?Ms8o-j!>-ImB5U0PrXjPk%tBaz&RfxZPLhqzd7|LMLB_OzhmTLps zt2iM#xoGBM-iaKq_d5?9JG@-A#Lv$sWBrR#7IzMWJ%BIG%E{8*u9-Azd=bE@P?YR{lk)BEi;G@JZ+{Rjw`7h$>6qnw9m08 zX(RZga@5#IT!zm@!a9bN)W;*p^qEAvXe71*cnfWwUezkgO=Y}b2O46+^@dVGfh2AM z%aA5N`9b-l>}WlBJo$0H=PDMz|ISO^y6!Zo&?v|I%(1&*@b#JIoe=@6o*+GYdn zNMA%cm>altN0SWTy5!vqo;b;QxT5y$Ed>B}_=tm-ENT*CZ&RsOC*BM$Y`-jGZW+Yi z;ui`D`(u5asuFJ&S|l%Tjs||+nMO=Mun>n51r)a&N zp1lgp>!~el7yar-2}F^Vd%hgtn*Q}m&w4Wy?Dzzo1T3|hLwboaM>X+1XTa2YG~=Am zE7{GpZc83uuzm%?_M&8FCt#V2Psv5-BkZ#zjo&TTTYT}wRere`YJQJ5<*DR}a{W{J z5IrGlM1mjKK0#CRTXELV8^#1$x(2TdcjGLC~@hoS6`X>Zra8$~ftvLxW@{+t7`r<`-9 zgKy>s46K1T@S*nvMKDp9)>xj%RomZ?p9V|MeE$}ic&BscDs?`fGv**->}AZMGzk0S zzKAAe+*7m>e;M3qf94E|?zwHDo>SGh=jFVH4o7o>AQhMbsx>bp%hy*cN}C$%_7~lwg@qSwD{}YL%2;Cqqm8skPW@z;5-# zruxZPJ{YpH^lTswSHGn&++oS}FT2>CVZRu+IS+O;>uz}~T-+INdu9dm6WyjY>{3dISkc6PF_Srzv*34#S9J~-dp0OG} zZ}d)L;vhu#cI7JK^#V(Ue-_9^>dG3+ALtb3rw@vwC_gyKnRuD@qQ=0K=YhRRub3&t ztuf-o`*pW~`Ci*&4SV-M-e;hyA4ML~C67VjQjHe(Qg~++Mp^=g5G#J0Bn*Ml1j=7d zWM!Aj$b~%Ey9lgp7f#Wjx6am6fM-|-qdH+k1xN)2%XT&(tm$$v1aFbQXUFvOi#Rsm zx#^$K+{#vuHr$By7T)uAmTnj&y7&CEwqAoX8LL5G>ANVHyOvwZ7IF}@8%758N+bOg zc*X2O>kat?_nfXbmdY9#RYsDQZQTd>woae*H;B8cfsBXybM>(@ZmmHjxECz?eXFGw zk13S3I@o4lVkNDvh0)oY&uYRjli27c%Y|tLsg8_~JMFsX8=k(>l&NFB^&ORab~4>0 zge#nEQG%p@%Q%*15JhuL%p%x>H=5i5@pzB$6#T*5#^J&Lg!1N^f$YglpUa>*^QiTxwES>yC3)`&{L))0+z(jbvkW120wovIS_1@R64DC|%|2cb zuMx{%8HEt1ShCL^_3LfBqLV6=+$&{SJqnxAGQ~oH>wc~A_oHg>0h$N@!0yO1q!1Cj z9f9ojH&t0)Fjf=_g-AGi>3voYx&%PRI4ohe7 z>I2yFyYuVOgAYvX=Ts;PXqj>?Jf*5idmFd!y?~~@=yd()>#DP}Bz5oEk@T$kd|bu!Wiyw26VX78En|CToGMNY6*B+b=91L=sVoO>f5#u z6-p)$#q_?NaP6(CxMre{xtIzi@Nla$_-OIy!=caluv-n<>ER|9Ggn>}9w&CJuIV|I zjXT+7wF$AVjXZqignz?8BNL?x9!?RF%*fz$!XQ~8yjPLZX^`FSC(l%fbE+*qilrxpa7wx?)FwDMr zQ(3l6L;AUOpbpE!86FlaLa$r+ZX@R`o2#~4uJ17Ih>w<}@o#4}qyDhB;G=b(!(WO? zw1WzT%0%$u8S+aM86~!4d$$5jnvEiycN(|!FW|;+?j{(<9WE;#dJUu7fpo^$%QMqn zG3xYO$&SZ27fPBE{r;OlOhUSBoP>&?tP%47^%-13EmkPHKo$2>4}w8b`Q-jiP!1O2 zzll=(9jWm3TV|8h5e*`1ohRz56vLh`=NuLt?fxN9 zFHd?=(M;7oYrMyE$WbdS*Vm` z_KA(pl6_73>%Ugbku>`Sdv{o;9D_HQULKw1H-T8bjSAJLaBoruns&p&eM)wuUQ`W4 z&GFsJR=;O2OXFUFc_miph(qo#B9{hiJ?eBB+x(ID6=*k&L+`PV_rMOFiMT!1=OYN7 z!)}H#|7qT=`rp!bm&%brDpIjx7S}h^)dwHKlgR*ZF0t1gjg!d_DTp9ulMl_=D!t|| zyX0Sx_>LG{P918?@^B(@m-TsZmUpS-C!3<~AN2?Wj$>2!tQ=yokvtju6&G*WKa1Fw z2fRSG^pp62P0`Pg2gogg?Qo1``|ok^(3!?F!iglvCiCtM>`@)nlr=AEF%_57zS5@G z5lwWs77B6TEsS=@HJ)UPwUMA1d@HGrB2NS#YOYxee;_qJA2bg+Us*7m^agi*rlv?Y zr+@@u&JX8#6HVm)J(~cQVx@hRs9Bfx3{SaLSvCOM;htVXi?v>cr+|<0SVt7f;h&Fc z_MX-~JV-gZmD0~j1i-lMVQ#d`@Vd&fd7wZ&9bt8RMxhWQ zT@U1fEF3S?4G@(c;sdVleJsH81y+kR7l3h>%PHJf)5(U<^?Di`~o6mB(2wA z&!7Uw+EbyMLF)SV6JGTK_D?&~85g{>8La7lA?6bVpWCsVv{g<)&(6x>rO2_m#8>z6 z0X*Hm0&tjT{78_6q+XcpN4cTC~tp9qiNy)eWrjBZGJL z3{J9Q`CE$lr62)Edp)Megl$`X91?=wj%c@+|J6xEC2?uylfo7Z-;nsqe-2Lg-qacv z@2RQiT`#$gO8FH?UkJG+vAC4}rNGhxdANb8ET|kqVqq4+XA=7;=9{EXifuEYH-E%D zZ7ynAS0T=F4YVOno?yFE%-l?*!3P{o$^RZ%9LPai#H3>S0=#8B$HFIGVS|#uxo?tr z(D=B+^&G|E&7KS?1F5g~DK|iY)h-gj8YPC;JVP@dtwNt08Z9d zj&zWT2_IitaK}(51_ZLlV1p|u>T|z?55<#^N1KW%c@XtolZ|hXlI2uw0Kcz7i8)96 zC4yF>mn&bFePHoRa9W7eD!Ky-D%;#8cs?5e>TNjyeK4X*kl-!GXF#MFj zZh_lChV$eSvGYi-#pwW}xNl31NALTY8_Z>un~wL$jj|IvJdBQAn{tSOY;wz-h&oN1 z?x<<7>2t#|2Ukc7`9+6p+lphhe0DI!+(+B8sp!X7YJ=!n?MhqqR*cx) z;W-+ZMx~@y@QF`5#J`_%Yy~Ogkq(eYp9l{3>8fA#z2|H>f9Snr5<)APW+xUu2d-SYvHo^^7+PcQWF3%I`w zV7YKqNH}7R-aKL*Lgav^hj_j(?4>`KUD-NWD}*~XZq5<4Y3N0K$NK$GcYE9|Uprc2 zEOqFk4CR1pXnDZqh^zR}XtmuDXLVKct>82Ar+7u8e;zYLKwnH^XSv4^RO5#kux9Fx zB6aPUHS9@ys7&0r>=iGDvgO7*OFzMTv94&=#h>ai#|7{I#+MTf@waa-Ezd+$)lkNE zq2!n!LS|Ez=a$AttCPwg70m~FV3s7kDbKC9LzRli-3`IhHJwo1pm^1TAYo2?Ms*8E z`5>6!no$97spyZSJ0nyDWYfR2KFnP+-0<%*jjt6#IvJ;QP9tpjA5iw^p|43TtdJDK z8(h3aV&KrFlX7oImT^IX4R+tE^|z-LIII@YeG1VO}TRA91NM0{hjo)_R0Fr4gLL+} zc_Qb|ZQUaTE2>!}ZVsSJP%y3$M4S!BJUQRA;L7#qm2>9WPZ}k@K~l~V*}(Az8{tUF z(HIcRt&(h3z6uDhb9TlTzJ@nkWt5+l^X>z?!VDsolzf?l&ybnRDxAi@Kj$Rx1!#D@ zE7(ecXB=7r8OmYNrp897f~|Jdl_oP3dnE`$)~Nf#%RCMI(=Jo}ggRde*eHxLDhq_y zf1KoZfP@R|{H?^++jX0-tvlviGT6oLe}_C@#5bqgd#djxFGOBQ;0S5Vtb;?G++IhL zRBtUi9Q&ZxaC#E%j%+(3+329_h2G^}qZy9lGm zQ}WBufMK-3NQtj5cGSu;s6kBRa%~sG*ZUGD!ebf*h>PyuH?<$OvsABI#X!L`%o*gF zDIBlV{nO^b+dWJ^_ReXJxQJN$4|VZmTi?KjN0EaH>t0%xw8L5&xCF2yX=58-E_Vw2 zi>Km!i{Ia*y^%9c^SU^Fmwp{?OPTRRlq0U%CLeMu~eDwxmSu4X=?h6&dKyOnDM4U@L4F)m3_|VjI_5`aPLXe7ryJw!gz{k z#o}S@!E$pGR}N3B9gYtC``T#J&xvHVvOu1|{V5PfqQ#Twth~B)t+{9HRmXLIP)}`gfKKi4kF=QS2#x}vA#HPn zOHBO*gg?YfniQ42$CdFxB%u=I^NzSQKL{h^0J;*7pWv~e1s@2VUh~_@81z-N+LwKU zz3rY%YM~qs2|k{(bC&_3t81X<_ujn2VUK*-goy9h@aQbojVBFGULH^)F_cyRN%ydV z?((=>1<37lFL~3G!uNjN!2UQS2vZUa^-q2oj%*kGVC{)ih`F+FiX8`$CgiU-Al+An)E&`${~N>u1PwHb&|&L)Rut+&V1WKQlr!5 z!i766m0P;ou;85xT3~BZW~y01e2^QhW>BbV%@AL0Yty#O1FL~fxnD7GjoVHc`4ABg zR|vDYxNB8XgFq!LjEJ_9ukkXgssy2Nj}}MBzZ3a4ixdwj&la8g@*psxoojz}y)=tb zyZm+zUW&L=AdX%6e^Vb_JwO;1ZF$xE8YupH{gwIYH$u?{g4xGV^pONb>}CBZic=S> zXR%;t;x?yXow>S#rJ6cTb`1exW$xCANlvEl5HvMAT_k93vyA9&8wT%8cq#t8^%1pL z9_D_yyKP<)vZ14MLF!VbU>Itqqx|JOmttc(@A(ewZ1sFai;PBB%Kz`!lywY+`}r$Fw_y z0NVz^HhAS|^T7RgTgh);Jlg0rNXmuSt+%VQUSbe#jB49g#5xB<6`Ii{C0$D&*;J1t zXIE1&L?V_D6p&@qyA}rOZ+eg6p+C1GdDV2frt1ueFM={haDF**M*HC#E6-^KGRRTOpw&Q z97!5$W#dp>_ez(W75O#;1SZ^2$7+vYh8c)9-*X#j}5t-Mv9LXzKBQMbp5Crlw^|ThA&cf5= z+>2k^mO|u#_7;I0(gXLDfO5(&?C!p)rci(pzwX3D|F+O$J2?0j>W@+ zK&A{^rWA8ZlIGvFyWCfppTZfn-zRvWpt{&Kc2fEfrBYjfJY6zOa@HCsv>C zns=KH|H=NdL?@9-Jf=o{%>oOFJ@B5MlEBJoLkX6H#z!wce|SK7f7!J>5Bco+#;=+V zrD)wy$><;E+InpT9Kjr6mNmLJ?+(0ePtc-lGIaRHF#~-U8ugzZ27p1~%{3TFkyfr* zMuRsl0kkK_!RF!iX| z({k2NFi3T*GS9dL$W+goVi|R>%V_ri%~|Zs}2TyNIl)xi1*J{*Y;M z*qr1DK_i|dH)I37BVVe#ZbA|bUuLJq7TqDHEkcm~Tor|p(?Z(>5Eji0PmmPN=3)Yp zw$`U5Yg13dY_7VM&xOH1P7JVTIZfAZjQ1=MTc-mjuwg2D72p@MFB z!bI#iIdw`CKhoiG{VC;5i23SzjAhO=*=Odl<)`&Yxt~KLhDgM3w%`|QI~0w}>nj36 zz!k3=jZ}Zd);l0UPwI-ylD;5ZzFyPXWe^i9cJV%Q?hD{7g!lIYMnrEfYA-Z%)he7< zx%l(|Xb=%iMv*nL3P&|d(l0^_lA23+$*Z+Ht;W|Issp4bD&1&wp^7X2 z#p+tSCIQfgXjvnU3#YN##x{PYwSZEbuSXRnKX0fYmr}&!_ejon?0S?#zn#doo40S| zu>guusxvl-5Y=jClIpzP!lvX|a`{`cAMa`7z;jyU)WVNyy|? z-ywx!wi)-&36x#~Q?s6U2ic5Csz(?(NmFo36jW@9>C6hu=aA{b>-+2 zF;7iEj~+xWrCK9c+NN*We0tZw-Q9v29!DQ~iYCQ|Sf+mLxa)sL?~K7&XnFsWdLJ-F z$0iDBNCEQp&S#1OY~ii@O0M!h-mPw2kntkY;(;3JxB_So+g4BlLFCCpc6P`q%sHIp zjS$bHnI0UEqW?!2IXs%zWWfnQJ~x%M{FsdIU7&H5QcZ$rUQA2m0P!P&xk zgJza5sFtkWZ$y_S-$#`tY@iz^mMc<#-rN}HoG&sCioXM`y!Q|dG`@X2mq4jV;|4W= zOx|;M>jN%H%1D`5+tT+ziKmoY3!r9yDXQ6n_6QYA~UDNS{TDyQGwxRfgaRCHeLVSy+G z%|9{7tkJhqbRx1)n^1aO$$Xr3r7pWA@LX|@%rxiJ2bhUPNi)PP*dFNEOTH{V?c`o( zT}9N$9!pE~y4ANg%yuqj9_g)UXymiTdP}^y8)HhA2G>-2C&t?a{6x!yT82e*nxmdjXdEbLaiPk%d;@bG9o2O$=Qh+d) zqgp~HIC0{|8MhwC_VBJZKYLPyp~P1~X;hsm)L~62UZWT&bbEW7(Lub^)q`<4ba{Mn z(e;lVOHCaa`5}vpIJ*D21Y*%ptyBfn4AajyCTu?P(>D0Y*`3#A!SfQE@)`2W-N!yO zT`*U#ourds3+EVLbnyJ&3ZJ${;vAfEG#JdSIK=*RO?0>#dNGNJO*%A8b&3f&2-oRe z>|z%i)dtEla}b+YnYO(*TK)e!&-#WjYPnMaN=$0nuYJh&M<&D9Uo30&=kcAQ)D4

+>|K3w@E+J+<~t>KunS_P2^5|Gf9?4XA}eU>LJ%(9Bv($#_? znmx;bBlmUa=ku6>-Zm%kXMxoniaibiW>dHxfCg6Y}M$eklJyO#?r_rs? z<(8>8h&`fw=T;@nhV21uem?cWyg^u5u)f(k!T$yhNGbMx&eg&I3S!qvmC2|JcM%VB zI1h9dQ5Ou@oleLbFxHxqG`)7=!zai?3&Rb1nqu~VFx2@cjqgl-NJ%yjnR=D9m7G7+ zthBo-!11!#tkS)II?OKlX!2Z<;!cq2>P&Grr+L~lj&Qbz(Yb3)3XLvoYkM5_C#UVY z%F+p#;NAsD41yQKZvlPL_D^BsX~sD2gwsH{MP;&(@aj2>Ldy9yAS_(8Wh3;6PA*AO zadXku5f=gHdE~c`bUmY50I`oj+#Ui3VMge{KsDhHgy*_dO4cIsSpZ#kJXyqGroCq- z+p2KT#O4{-4}X*pN97)SaTt+P2~v%2Y=LH_^bq}3_9}2qFb=`?EFi3J<<)~t+Wi6@ zQheV_{ORtN>|f|Hp>0(VQAt5ET(5ocWO%A6hhM1_=-(@Fg$NjoFO2s|eR_Vy8NHgZ zKGMR<3$%Nk@Xj#5Wbq9#PTX9h1$>uNj%kzWc?0Iq^}F(FuynBdIHM8kNq2^eV~Dry zuq$9`VmZ#tG=}b|qO$K52x2R_UWQ&p+y;8nPmh-SaO_-%w@cdr{+9aO`@VEWYy}E6 zVN9>xJFHMzb?vLTo*Bh{8inc-JS2T?%gX#z=Y8#%hGz3yx)%Omqrmfk+P$;p>ztUo zHx+A@1BIyT_V!d~deGId*T9snr)pBnHk;)aFRM(P`1EH>S8⪻{0mH1dy(EJSicF z8+)q?wO@Tf&j2+MrdFRoZ027~U=0%*~kK_F?T z;eTpKl0V+3a8x9S+#YqRK6T^ZE8K|h>-y6-B*ATTH&@p?Plr|ed^9k-ae5RF?%L7x zU4;GVRer;e+J2#Kl8zD^VIs!CivREhg8ZRphZ#7p^dz1nNCrCaZhu6|Wvrr?O@JuwJ0>Ol&1tfo?{l>~eV&z4%x&dCrpqXS z!bB^cJ7M`KFvP*3Sy&PnWmqppuiH#BKZKM&vP=BaoBw-oO`U#RJ;zjA?5kTTlxo)9 zGMbb%@cVfP>gE?nzw~7oXhD@QsBpJr)Yb2N_I+l;n(;aWa19)JZDL8&s=ML8W!tQ9 z%W!8_Sus_DX!PdPJ2p<`2YAy>owys84=7f~H%-x35b3LHR)Mf>%6-~A$e>a|uzGC` zGihFHSW{74QX{ntBJubo*gWJ`wPCSbUWV+N3+9&0!ZF8cNb)YwY_@GFK}dOo9p(vO z!mcWH^nJfwxg{&<@&EM#SCHk%wDu z-UP>DIDIw>SNiDTUQmu4h#r6y<%){aV}00emr%EH3%^^tW)f1$p+IXHBl)6(V5i9o z2h2tt2rUBgI(!4zqj*o~zyOTA5jGzsb|ogjpOpLE^b3HiZ-0nPzG)dY5YFnAo@C=F zf{q}*%bjwhGC1o!B0v9r)z{fHKXF4CU!k!Ndk-M9;Mkr;*t|l(iz`*lVlhwN&fJn&Vl&C%WwutbH1tmw z%+2SNY^OaEFrKk{f;`vmg)bPNjZgLj)>5jo#?}&2m;IbqYb|x0xd@w3;=VC3Nw5w~ z5_sJl*sMRnJgq-+;5Nyc;0%%b+f|@fVf(UFg8PbMV0wi|rN#X{c~7}GVoHEAfSvS> zh7p`)&?~P+;^l~C7*^4MqL&FL3|E7Es4f5@(R>#4eE+;@Gq~uEX7egDHx6*;W)fYE z#n+voz;MRS??VX3PJrjpa@%5@EnR(f3M?LoFzh|~ob<2IUp*(#yi(zInz* z8l&N0l&kF7fR=)5Q&PdoxQ)+efljgW^8w=tY{)|MHF9Sh&rvpMp|(-~5V(=-j%{!^ zBYbpHJnQt$wp-J%Fa3?embQ^>oZ*EknC-qYs7H&6j8_cPc;vL85ZL;)bQAG3?2i8toZz_qAGaQ;pi=9B-db(dxN!*#o zib@^)9erT!8rK!GFq!_|!&WGYRi)&1{V3=o)V$xkw2r7PrLFXz+TwLEOxk|gBl~!i z;a!`_Cqi^A#MDltq2*dlYa-{LY#8sYk@B1%|Gg!dP_El1#28ZRnTMN#&`fgTH80oq z_OEDzr6SxQ4vUSVN~{;pm3VvX;WIWP_A7Ak>l)MP&G&&${nxkNtrq220AB9h1JuA3 zMSqj7KmeT5V-;Ekq{$oNc_P2;Y`T39=K(PD zQY<#VbFe-e!Avl83j`$2;##gE*`kXZz;Eo+o=DUZPiboW#OfK~-28ZuxL00|-oqp| zzyJ_S@)TZS)nE1%1^Vv3g?$dlaf7g}ATEd8rL|ij?CBKbX1&{%@*+1k*I#*#81MI= ziU|ecidfr;+jY!W^Pj)lM1d9`hkD&}qO5`$tP=E>T%mkYb2D!Llb>ysP4fy(jm!ALc zrM-t!_I?0m4#xe|?_MYrH8rtkp(5dbzusZ)*mXT;Y!^w z&61tlAQ0xu;4Xjw(Z|zve5$yUv(steOJjbJo!tvDzIl0x`37K6Ip|UYX`$sIeF%V! zRL4VVJsRIB>EfxJ9DyBsxz)g=jf_13jQ;)Qk3A@K05<-~{&ybDgcmz;vfb&Y2=$Gp z^>!Ge=H)WX<3?0gU*GmN)b;`(%v;CvvsQkMJ~}C7&QRP3H^`zB@>U7~Not9ICsUKNH;V?wIDf~4?e%8?y!y{% z=U@~2c7HT z(doCD8mD)%0=1aKmJ(aip+dq}k_QlpD`i0n=^Q1wmudCO36sO|HTmCd z@^4hsZ1;koKOKp?V^3TE-m+=;$G4Xx)h^FJuhwVJ_5AlY zk9jrs?N`9-=`%0jJrknYeYegmhtd}QOUsMlai6z6w{yVP@@LE9RB!{9>46~z$#C;z+> za)vLBr=L#XKeA!ynsjV)cVSx$O;G-qs7q!R;u?Jy7JJWb?*3Hy&oX=|VrGp_wtKYC5<@rtLRg*TQG!{Bs zspyXfrKzcuvN8p#=>D(%{{{cqtcvOt${R;bMO9Eek#+uXYWyg323{p#xun9hOximL z0#znGD$8xuGsO2>o>LOH(JoBn?Msh*Zt7`TrsJ(!*rLeJP%3mq=f=Db(-`K{qo=`> zFL3$3u;l^HFLrlQhv*Ndy$%zlIw01<19wwfZm%CLG0-Wrt=0d$bmwMK%a2l@m7q6O z1$W0P_jv{8uPhDnd@m-HXl;vK3<S&A_0$(`0#sitYN2b( z&-(O)rbW;*hUVZB6pV(bbtkhf3OsNqTSghtqh!5p%c87y4qp@e4$9mu1I8aST`s(z zNlvDi0LXz1*FF21>(~8I<%tZHAQ5i+egD)Z5?|nUbsJoLbiHh@e1>6CQm`Z;_E3UiR`IDy4wxm_~!; zMrLMwkvx~f30D|9KXv^sHXibKRNz3tWJ0pGb2d(b^D&(5rtaJ~F- z(U%T%=q+!9=gb0h7=veZr0@ifInVfucsV`rT*?fz-C*z*`8|n+Q>__|Q_RGK;V`l} zJscXICg@Q)QwtZ6|Np7(JA<0+zI{;;L?8$VNE4(gh!p7^K>{jW6hVkckshQ=2#BGD zCV^Lq^demWNkB?Mm7-EY3nq}zq=zO1B$Si)&fGim|8PFuIdk&mnLU$TvYxfqUTd%4 z`bC~9f0BPJPrRV9dEp4zCIMsy-CrKrWCNBsnWL4c0AaRX1MR?({7rMCh9gxrnKbH5 zEyp$D>i*26gg_$kUa3+^mNkm~B$j(UyyaB+q)MlC=T%MBu3Qd=?^ z6sJB9biIBUbX~Dp%Zd$X#$H!ok!i?TZz-=@ zry0*FC_ITXv5j97eLJ82MC)7b{ofUVAD&6!W!Ru`LAr)g^?x{C_ft@RLx@R1VD(sU z%JrE#q2LDJ9ph;le~K(hqiaVU`Yc3L6lc)%;$buiUZr^Ls$8-5?&m^t!AHrWmldC$ z%$WJ}V>p7@()GWOlTP|;zdeun0--wLZVcBxfa*QE}9!a6|-Ea>m+ zV{M}81lV!Z{G)*D{!H-9L}KbE3cNL4_HpZ!{E>$--EnBPOAt&92 zfGM0}$OY1z9UBYg%o8878Yt}6&h1j4rz`&)Al5t_g=qt0qz@muwvGV-69bN#S6K*kxC64$zZOxB^9MD(W*2v4a zf^I)iS>5|mukW~H+eSRFa810LbX@({=e9e-^hYFPUpz_uq1ePYn6>H!dzkN)j@mFs z8&8OlvbqIZ?|Rt_{BrW+)lWAzfVrM4W$L>!E^>1^T)EO;e^(O!K7>6edmPk0%vaT` zaD`X>qV6)HwV*3h*Z&C@dI3ND^&a-O5ixJ&fc3%MROwZV5C5dXkOS72SNeUPY{pj{#C-NdT7wDJxWl&lHHwZ2PFX#a!U*Teyo=hkk5IM~ zPC?C;0nO3Iq36f}{4m1v>E4CR)!;VYk77_CJRy-#o2QRjlP zLQ~?7XXBdqiP(&W@bR33vdSNdUc{yP?0&<^EzF^>Uu4Vx2*mcIiy>L^DHX|Jz~uB} zHPLSBeLI$baLTIFPP&(M7oYh7cZa!lwkdi}M1k4pI(a>zq)l;VBtds`gGh1myPHN) zC$V<|9$R0pw_DILj!3B!kIw7HY+nCN_Zl?({YP8awb)6G`H=UXV)d^IwJmWiU!kl5I<;&NqAw%UVaO!gOoaoUtRW0M@_i6NSO%)2(VctM;N)+Bw&H zJZ)#y1)bU>JuSljWqu;5Z=)sHze?Bn=%Yd8gr(wKbjPaRdX|gCK@0bUG&Stw#seu$ zehCmtERqbLF_PTk?idd(s&&S5_=?O}^}^tZa53 zjAE}F3ceD7J&2t<#+lqbtdhM8L;U0XLO}mS^~@zzE&0Yk6K5bk3Jj1bu;?I zrLfB;tI0&BTf8%M8RCI}PEg$L1Fes}Jyso|aYs@0=qRFAmomIXp27q#DKIg6KdsjD z{hs#u?6e#|aG~oxmfMOvfE6R>G6WUit6Klt2&}Lx>t|eIe|K@S70>7sMIis(zj@ea zS>r@zgSp?@r699*O@@Pt(-VrpAl6z}VEa2WJ=SzAlaz_#Ui0FZBX#%MyeP z1uZUSdb3Boxz(=z^EU1Y%h&@f#^7YbcQO7j-FO7ElCM>?zGe;}+q=0OojBG6Ts+M( zJR7NgU9L%%^3IqXd+UQum>dFMN-OQ<2B;>&trV}@Iog0jAW-Gd_q#<4_$FR7+5(>L zYbgh*D0|H+_8{BIWMuh|hrU{*V4&^@s_03c5p21Lvi=}|*p={;4FiMKu9E;hTz3}Fo2Oc-3 z*pK%tc>Lp}S+CnTAXWE}3eQZxg8yissku|0MG@yj%2C4KPNqeC=+H&|Kxx6IDpJg zb`woUb^hoRw_*L-GlDZ?kChbrS!`0D%}f}!VC$9v1<{~sZnj?lGK#Ot~caq~GCx$Wr zKB`myYLd=%Tk56>)pqopYdTJyfl1P$AiM72A#1tRGgc0r5<{ntoT^3-TAgQNNqC-cyg1qim*TG(FoBwE^yl1BOz# z^Yq2Li4kBCsSNg#`lS@dZ`yztpX4}$d1ju5vP04Ifr*oA9c*Ei@DP| zd+ozj6I^bvaGha~CKIA)inex!`M59FB)uCe%WtL&St^h<w2+-k2Zv27*N$`o zsloV{h|jNYfEA8>r>eXBF9)b-OE0@I2pd|hzB{t3&Nkw=^omSH4ylelL5|A_)jvqL zO%R^Mp6YppF?HTVuh{GhhntZUH&=9M00M`<@ChYEX-q#IywtM*GnoE~5(9)=5G}my zoz_QW|BT$HPIJfkzqw^jJ=$gX#mNL;oy3PFCU_5(f#}Hg_x&~FX7^plW23jsT?<~N zj$e_qt6f%^NO63=ZG@pzqKjSPmQ!NM9@sQw)op9)>%?o;I)QH~LLMXqfA*dp``msA z;$5jQ`N5ov7=w%+!`!3G`iV>}AUuBSYSWk4hAjLhd*T$X3A5u9THG(NnTh4dxnB+6 zF?gj(BscC?R~6jp+L<1QhWVI{>FmS&%mtP2|8ipxF)OB^Tx3=*CEvkeN+~07Wu~Bn z$y5kzg%=|8HSn(L5^*N#>*IV13MD)HZ5z>8?R#ijw!7pa&Pk?cI*DsajMw9H2VDhZ z*%+^?p?ikh%^Qn1jklO@p=dWf0v;h-Av!`QrQ5ZIRpur7JOX4d53--uNV!3;Z~e^u z@Fr#2b{lo>7!X%HofpkX%?!LZ4h8?NeKxRMnQf1t9Qkr5g2xmdy5C(W6j@erJ5%-1 z%z-8u{`^I-eLf#33Gr>%2Al4`rV?#t4tt$6gA2Z>lzSF%;vA)nj~_16FMTBy^AA;< z`Dg}82Ue41bG|&BEpB2uyV7(kWNXiiq-}hAb>*9!lnGXcJ25wq@Cp5tNr8XSv%nML z{BmcG7?pI(?6_=AmuIWMw?yL-VOu=Hn1yWbxOcTtG&jt-5b7JF<>!_EeRj=8LACZr zk@pAk?AxloHNk=WZd+=~bl1_z34+2hXmGMQs$+HEhGcCzd{Um-pgwn~L=FggJsr?4 zfxOrroCsTyjok_^Kbdp--gF_?RuUF^o6%i)Fc^r`=nL2ao*r;hx~%iXV0>k|_1 z&uKp!^sIsokoA>I3)T1DHihATr{PA+Jqj{uz9IyN=w%&cHQqbc=*6*;$M}FiV|M2}7dxAS zFopL;VcwNceNDl2vivg7!dBVKxtMg|?z5r#3cdUB#di)wtM%|)Xn;^JB-dl3G(#}k zpOAS&qN3ebXe${#Hgo^LenPlSWX*Zn7{0;bO4SkV|pw;2< z%-cU*49Uym&lk1pb|03GN(`>Rs?A!}P0!Rq1|r!b!~rt?YA85hFN3eKpK>F9<&r{4pbVL$q>vVCwxHq)`Bq-6&lxUe;Il`&nv2! z!h#Y%kM@3fnqWVvkv)_pN%t)tns{W52Xr4jGo+^BVKme$bns`&v6151uis>^3u4Z& z9i(d+w3DyFAZ$*-r}kc>JvQYw(#k2=T^Kp45%=6K6;45EMD>~C8Szpd5|6XNwa}82 zlIR;zDi>7MZXQ4I9pXVIy&~KO7yNJ>0a!1*E;*@;NJIb?H@TK=r73U8Qxar9Ku^sY zkliDM%N(z{wq@R%ermZZer~z+kRm4(Df$l%)6$?wp*mQypf!j(8d(nkW%WOT%Np(g zq?mv+p@S2zKcNrtd@d8rJMaB;iMZF%oVYSs(BOaMtMBls zc~Hlp<`odIDjossfJ>DJjCc+>?!7D28C=7KPM!;LF~BD(n_8Z<0K45|#tEjH&FKF5 z(KWAIuiT7mw)Wy0f*~UlDQjgBL+*E85u8fFKQZ6NrOx~1md0LB0Mco|5f5o>*_TXa zG_KKwOONP8+B?$P&M+T2I{%v>Djsh%MZ59t)AO!Xr;E32p)`-pNnJ2wprgBS-+rl! z?v2a$3Cvv2pkr^u9XdL{@C`^Fd#BDD@B0%vU@s0g$~n4@ow;%yDKQ2;z(-Hgm34Z$ zHw#PgLrfh>11@RP7G(_~-jQlTrbZ=B?*a{JBP8uf!Jg7wb%+1BXaE1$=rH-HD#*?V z!!FKNICwTQr4NX&pA2DLX61FJ#zlZ zz|6}d80#9u^y`4ZLYR3U({o86L#c>^?rR{^{~hH}`2tI)r6D5hH+SBzR%1foPg6(S zU8>RMud%}zWk7fiE$iM~2&LMa&8x27dSdmJoTQ=;*j~S?Lx~Wy)E=VHY`*yX@df6_I7|}EH6E&

YbzC()qa};~oi0d$xfR~l%}LUuWf@o27pi?iT4pNl0O4tSL{~XZ zl68H%*qOOA*-KNFY@E;kOiio(4nX$`4RmsWOkcr*WY0K{!}N~s#vn~k>|da=L4 zAr zro`h(3VqYdxMuLye#+j?TwzpTnKn6ZQ)IFWnVEzFZxGx6oS}AU^aG5_Qgg1J5o>e< zSu~F|By4;F@ByW3Lp?}oVU9WqJ}_2ZxJ6z!GBfq1(n)0+lGg(v(At~v-MMFttz-F< zO7nQ!swvwO$@7A|J|T_;r*^8(Ca@uzMa0t^8%Zq&;B&p9_>7l(_CCrnzVaO1pQ@q) z0r(dU69h?l0hi#b*2 z)`XrwKzZ_;WnQ3{E3#F>v8IYeWcplp+VncRcD?RX6M!6Lew6pNpKg+>A+mN^_OVv( zk|W>jVV0!nVh)4!O2qzFWy|{HQR7dfYKFSy5m{d7M}$4k1pd#HH2~m*XMW8MsStRV zq%?`4{|y zcTa+Sf5<}wQe3`EACC4DQUB}vP&3D%_CQl`<@L*AJTE1b2D-=;B#&8pGs(TE?w5*=~Levp)<@!;|__IEAl)1r+K$+bL~M-xP(p+Zo6ZAs~oN%xTC z)mH7RE3gR@SX;>~4KAgW?oGDtZ0d$N+Dl0A$Gj9uVJv|;HiZ-hS*%5$l=wF32Iu&{ zr8K*RPHhFD4S~epB|gHlrCeB@kn6KG7akHE-n+k$)AFvt|9oUxvyIOg#~9T9CYv6( z#xoQK=U&RL=gUbDVX)pE{{q^oh1C%(cQz3?&{wDS{G%qBIAT_M&b@nk8avGFdE0ab zWR`gt1YyoOfmwYcAv6;H1qHrE+)UUs`bK=8+q__fVK?i!`VVgkJIUhApQ-}3GqQi! zY~#~D{)zt_zo{WLwiw<#xG17^%l*#g^9)sg2_#tB&JnC6Z+D&5PcV>iXU1Cs3hn97 zHq#juv0Az~Mey!~-$EnZcv@q4EA236506sKT5)HkIOgAPlsqGjdhv8!v42-*Jp&{CC#O>LZ zwe<@FvX_@k(X^yVzu*Ub$NZwx=a=E%$uR*}di5=8WtLS#^)-Vg4I$3DY72g;YJ}_F zLT-3fwhMoZ*-O{)~U5tx^hH;Ukfrqo!@Vt!~ z+c=w2ZZ(zfUn$3Kv4lmD=t%Ia13z`Sh2Io%zUK4<-b>OebkH9}JWB{=eUumC(!t%^DB> z6h+E~OC1JrOIGihnT%jcj30tY;LxWu?GsNcg60{ZclN}}n|*FRXiGx+RJ9aos zLNY6$^=@^z^)2(Zn%hMbQ_*i#=l5#rW@eN8mna_et=6C32MFKc!_F1Bc2$bZIA2ayhR4C3lc6 ziZ;O=3iTox<|?7n1g?5XVqs6*Jg)R2al8aOkFAZ6b@)?M2DS4Vn7}M5 zVbM`GM2uOR>xI1Bl|jk$xks!w{uPryO}p2Qri`g+1PMITWw9zK-i$tM0Rc}=InI|F z!T8>-E*^`Dtt%)6IZZD1KWPU;<7l4VleX1I{ytrvpDFO3pKtwc#v>>~J@${~Uuc_X z3zK0>X`i6Pe1Vby{dSxOp9jdlbsPoO!e)DZq`Sq8Ww;$N`+P-Nwc4F@W3~pCiauHv54OF7#dPZnHb(;ROo>Ki zl;%%vN*ghK_(AdmyU|yvlNL0-TNN5Xi*NLu+^~}pVrbYra!%xxFKDKKQ6zgvWbTKy zAUZjGKT+^hRsmsCrsU7eJyqIVW+7PU*$qn743c`M4TUXi`z4$CV@in(PGEbumH zal;1&gT6|-k~bNw-)RvPHB2sC zOco4!VWdDi@!4jU_loK&iJ^wKFO%0NPF~Tay`ugD7hDR8TnL?kpH7glVVSFX zI1D_3mRWsSMQPTJGlDo-mVD;gW1+Cfk6^GOEs1Ze%bHwxX%J>{Ybr9YhVb__by_$G z5zI&UDCYkSR$_*{^5(Z5PDeYy)dlz(>((`ByzijVv$&6jUCXgI2=MEM#yQ>py;)RO zOVa>JyM=g4OwNn=AxQ@sr$K%x+ezi?&TqRYu=ESaH72Kj8PGWf#xWS=*HsY^wZ$$x zLf3>kUD6F$6UpkMc_v?qyhx9Xs4a-zJ6FAO#>`=`p8Pib$vR4WFN10ECmy4{_lH50 zkI524y z5j4IOlC@*&o)xM>wJUZt*us@jJ6OCOulyY%ji z*yE6s4oorVggReQk4bw%)?H{{Hpl)L)j?HP7-r3g7?s`$TqYRl4p?q?)5xR$eaVl$ z4!`s)gi7Xy{!6Q$2PUdMAAuR0oq&vUc|oJn3p;4sG5ro zTLgvHzVErlJXTaM{_y39w0EEI47sK6TJ*~psg+NS(87pkrW#+=*Vr1MN|xkpzYkWI zItG?S?3U3tH+);;Rq`7X0+QmKP8-m5CM}UiV`Z@rqf90>U;NDHUvg=iNP)_4`d7e@ zjruBB$=*Ruau8uUmwFtTuW|g1uWCEVBS(;-&Lz<8#6fBwJsrdkDVve*c!8C5)#`VU z1A~+Q4Yd58{Zj(<-N^k#i^u5b20x0E|NXC}H*PRmG)G5w>w>#5?a^WN=0BSwn>+)! z|3N4gZB73hKYc&X*w>!8q}6w5>d@9N5qQrw(8VdxRm0iemG+=hlvh-dl~&p`;=sFRvjlA8wDc`;QJjU>6U!*Z+M7Hb(Fgt;3D~H$$L@k841nlaJqj-^Y&2 SO#NS}n%pxp{CxNM+y4Tq