diff --git a/.vale/styles/spelling-exceptions.txt b/.vale/styles/spelling-exceptions.txt index ecba179f..2adc2f1c 100644 --- a/.vale/styles/spelling-exceptions.txt +++ b/.vale/styles/spelling-exceptions.txt @@ -133,6 +133,7 @@ validators Version Control Vitest VLANs +yaml Yaml yamllint YouTube diff --git a/docs/docs/infrahubctl/infrahubctl-create.mdx b/docs/docs/infrahubctl/infrahubctl-create.mdx new file mode 100644 index 00000000..bc491139 --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-create.mdx @@ -0,0 +1,32 @@ +# `infrahubctl create` + +Create a new object in Infrahub. + +Provide field values with repeatable --set flags or supply a +JSON/YAML object file via --file. The two modes are mutually exclusive. + + +Examples: + infrahubctl create InfraDevice --set name=spine01 --set status=active + infrahubctl create InfraDevice --set name=spine01 --set location=DC1 + infrahubctl create InfraDevice --file devices.yml + +**Usage**: + +```console +$ infrahubctl create [OPTIONS] KIND +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind to create [required] + +**Options**: + +* `--set TEXT`: Field value in key=value format +* `-f, --file PATH`: JSON or YAML file with object data +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-delete.mdx b/docs/docs/infrahubctl/infrahubctl-delete.mdx new file mode 100644 index 00000000..140f147b --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-delete.mdx @@ -0,0 +1,31 @@ +# `infrahubctl delete` + +Delete an Infrahub object. + +Fetches the object by KIND and IDENTIFIER, then deletes it. +Unless --yes is provided, a confirmation prompt is shown first. + + +Examples: + infrahubctl delete InfraDevice spine01 + infrahubctl delete InfraDevice spine01 --yes + +**Usage**: + +```console +$ infrahubctl delete [OPTIONS] KIND IDENTIFIER +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind [required] +* `IDENTIFIER`: UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS) [required] + +**Options**: + +* `-y, --yes`: Skip confirmation prompt +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-get.mdx b/docs/docs/infrahubctl/infrahubctl-get.mdx new file mode 100644 index 00000000..ca4abe8e --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-get.mdx @@ -0,0 +1,42 @@ +# `infrahubctl get` + +Query and display Infrahub objects. + +When IDENTIFIER is omitted the command lists all objects of the given +KIND. When IDENTIFIER is provided it displays a single object in +detail view. Empty columns are hidden by default (use --all-columns). + + +Examples: + infrahubctl get InfraDevice + infrahubctl get InfraDevice spine01 + infrahubctl get InfraDevice --filter name__value=spine01 + infrahubctl get InfraDevice --output json + infrahubctl get InfraDevice --output yaml > backup.yml + +Exit codes: 0 = results found, 1 = error (including not found in detail +mode), 80 = list query succeeded but returned zero objects. + +**Usage**: + +```console +$ infrahubctl get [OPTIONS] KIND [IDENTIFIER] +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind to query [required] +* `[IDENTIFIER]`: UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS) + +**Options**: + +* `--filter TEXT`: Filter in attr__value=x format +* `-o, --output [table|json|csv|yaml]`: Output format +* `-b, --branch TEXT`: Target branch +* `--limit INTEGER`: Maximum results +* `--offset INTEGER`: Skip first N results +* `--all-columns`: Show all columns including empty ones +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-schema.mdx b/docs/docs/infrahubctl/infrahubctl-schema.mdx index 1467eae8..34865f6d 100644 --- a/docs/docs/infrahubctl/infrahubctl-schema.mdx +++ b/docs/docs/infrahubctl/infrahubctl-schema.mdx @@ -18,7 +18,9 @@ $ infrahubctl schema [OPTIONS] COMMAND [ARGS]... * `check`: Check if schema files are valid and what... * `export`: Export the schema from Infrahub as YAML... +* `list`: List all available schema kinds. * `load`: Load one or multiple schema files into... +* `show`: Show details for a specific schema kind. ## `infrahubctl schema check` @@ -60,6 +62,31 @@ $ infrahubctl schema export [OPTIONS] * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. +## `infrahubctl schema list` + +List all available schema kinds. + +Displays a table of all node schema entries. Use --filter to narrow +results by a case-insensitive match on the kind name. + + +Examples: + infrahubctl schema list + infrahubctl schema list --filter Device + +**Usage**: + +```console +$ infrahubctl schema list [OPTIONS] +``` + +**Options**: + +* `--filter TEXT`: Filter kinds by name +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--help`: Show this message and exit. + ## `infrahubctl schema load` Load one or multiple schema files into Infrahub. @@ -81,3 +108,30 @@ $ infrahubctl schema load [OPTIONS] SCHEMAS... * `--wait INTEGER`: Time in seconds to wait until the schema has converged across all workers [default: 0] * `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] * `--help`: Show this message and exit. + +## `infrahubctl schema show` + +Show details for a specific schema kind. + +Displays metadata, attributes, and relationships for the requested +schema kind in a human-readable format. + + +Examples: + infrahubctl schema show InfraDevice + +**Usage**: + +```console +$ infrahubctl schema show [OPTIONS] KIND +``` + +**Arguments**: + +* `KIND`: Schema kind to display [required] + +**Options**: + +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--help`: Show this message and exit. diff --git a/docs/docs/infrahubctl/infrahubctl-update.mdx b/docs/docs/infrahubctl/infrahubctl-update.mdx new file mode 100644 index 00000000..d4ec4846 --- /dev/null +++ b/docs/docs/infrahubctl/infrahubctl-update.mdx @@ -0,0 +1,33 @@ +# `infrahubctl update` + +Update an existing object in Infrahub. + +Fetches the object by KIND and IDENTIFIER, applies the requested +changes, and saves back to the server. Use --set or --file. + + +Examples: + infrahubctl update InfraDevice spine01 --set status=active + infrahubctl update InfraDevice spine01 --set location=DC1 + infrahubctl update InfraDevice spine01 --file updates.yml + +**Usage**: + +```console +$ infrahubctl update [OPTIONS] KIND IDENTIFIER +``` + +**Arguments**: + +* `KIND`: Infrahub schema kind [required] +* `IDENTIFIER`: UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS) [required] + +**Options**: + +* `--set TEXT`: Field value in key=value format +* `-f, --file PATH`: JSON or YAML file with update data +* `-b, --branch TEXT`: Target branch +* `--config-file TEXT`: [env var: INFRAHUBCTL_CONFIG; default: infrahubctl.toml] +* `--install-completion`: Install completion for the current shell. +* `--show-completion`: Show completion for the current shell, to copy it or customize the installation. +* `--help`: Show this message and exit. diff --git a/infrahub_sdk/ctl/cli_commands.py b/infrahub_sdk/ctl/cli_commands.py index d7a636ed..292bebdb 100644 --- a/infrahub_sdk/ctl/cli_commands.py +++ b/infrahub_sdk/ctl/cli_commands.py @@ -11,14 +11,14 @@ from pathlib import Path from typing import TYPE_CHECKING, Any -import typer -import ujson -from rich.console import Console -from rich.layout import Layout -from rich.logging import RichHandler -from rich.panel import Panel -from rich.pretty import Pretty -from rich.table import Table +import typer # pyright: ignore[reportMissingImports] +import ujson # pyright: ignore[reportMissingModuleSource] +from rich.console import Console # pyright: ignore[reportMissingImports] +from rich.layout import Layout # pyright: ignore[reportMissingImports] +from rich.logging import RichHandler # pyright: ignore[reportMissingImports] +from rich.panel import Panel # pyright: ignore[reportMissingImports] +from rich.pretty import Pretty # pyright: ignore[reportMissingImports] +from rich.table import Table # pyright: ignore[reportMissingImports] from .. import __version__ as sdk_version from ..async_typer import AsyncTyper @@ -51,6 +51,10 @@ from ..template.exceptions import JinjaTemplateError from ..utils import write_to_file from ..yaml import SchemaFile +from .commands.create import create_command +from .commands.delete import delete_command +from .commands.get import get_command +from .commands.update import update_command from .exporter import dump from .importer import load from .parameters import CONFIG_PARAM @@ -71,6 +75,17 @@ app.command(name="dump")(dump) app.command(name="load")(load) +app.command(name="get")(get_command) +app.command(name="create")(create_command) +app.command(name="update")(update_command) +app.command(name="delete")(delete_command) + +# Expose command functions under their command names for typer doc generation +# (typer --func looks up module-level names) +get = get_command +create = create_command +update = update_command +delete = delete_command console = Console() diff --git a/infrahub_sdk/ctl/commands/__init__.py b/infrahub_sdk/ctl/commands/__init__.py new file mode 100644 index 00000000..9866927c --- /dev/null +++ b/infrahub_sdk/ctl/commands/__init__.py @@ -0,0 +1,3 @@ +"""Command modules for the ``infrahub`` end-user CLI.""" + +from __future__ import annotations diff --git a/infrahub_sdk/ctl/commands/create.py b/infrahub_sdk/ctl/commands/create.py new file mode 100644 index 00000000..a54e4f25 --- /dev/null +++ b/infrahub_sdk/ctl/commands/create.py @@ -0,0 +1,81 @@ +"""Command implementation for ``infrahub create``. + +Creates a new object in Infrahub either from inline ``--set`` key=value +arguments or from a JSON/YAML object file specified via ``--file``. +""" + +from __future__ import annotations + +import contextlib +from pathlib import Path + +import typer # pyright: ignore[reportMissingImports] +from rich.console import Console # pyright: ignore[reportMissingImports] + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_node, resolve_relationship_values +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields +from infrahub_sdk.ctl.utils import catch_exception +from infrahub_sdk.spec.object import ObjectFile + +console = Console() + + +@catch_exception(console=console) +async def create_command( + kind: str = typer.Argument(..., help="Infrahub schema kind to create"), + set_args: list[str] | None = typer.Option(None, "--set", help="Field value in key=value format"), + file: Path | None = typer.Option(None, "--file", "-f", help="JSON or YAML file with object data"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Create a new object in Infrahub. + + Provide field values with repeatable --set flags or supply a + JSON/YAML object file via --file. The two modes are mutually exclusive. + + \b + Examples: + infrahubctl create InfraDevice --set name=spine01 --set status=active + infrahubctl create InfraDevice --set name=spine01 --set location=DC1 + infrahubctl create InfraDevice --file devices.yml + """ + if set_args and file: + console.print("[red]Error: --set and --file are mutually exclusive.") + raise typer.Exit(code=1) + if not set_args and not file: + console.print("[red]Error: provide --set key=value or --file .") + console.print("Example: infrahubctl create MyKind --set name=foo --set status=active") + raise typer.Exit(code=1) + + client = initialize_client(branch=branch) + + if file: + files = ObjectFile.load_from_disk(paths=[file]) + for obj_file in files: + await obj_file.validate_format(client=client, branch=branch) + await obj_file.process(client=client, branch=branch) + object_count = len(obj_file.spec.data) + console.print(f"[green]Created {object_count} objects of kind {obj_file.spec.kind}") + else: + data = parse_set_args(set_args) # type: ignore[arg-type] + schema = await client.schema.get(kind=kind, branch=branch) + validate_set_fields(data, schema.attribute_names, schema.relationship_names) + data = await resolve_relationship_values(client, data, schema, branch=branch) + + # Check if node already exists to distinguish create from upsert + existing = None + name_value = data.get("name") + if name_value is not None: + with contextlib.suppress(Exception): + existing = await resolve_node(client, kind, str(name_value), schema=schema, branch=branch) + + node = await client.create(kind=kind, data=data, branch=branch) + await node.save(allow_upsert=True) + label = node.display_label or name_value or node.id + + if existing: + console.print(f"[yellow]Updated {kind} '{label}' (id: {node.id}) — already existed") + else: + console.print(f"[green]Created {kind} '{label}' (id: {node.id})") diff --git a/infrahub_sdk/ctl/commands/delete.py b/infrahub_sdk/ctl/commands/delete.py new file mode 100644 index 00000000..90830b14 --- /dev/null +++ b/infrahub_sdk/ctl/commands/delete.py @@ -0,0 +1,44 @@ +"""``infrahub delete`` command -- delete an Infrahub object by ID or display name. + +Prompts for confirmation before deletion unless ``--yes`` is passed. +""" + +from __future__ import annotations + +import typer +from rich.console import Console + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_node +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.utils import catch_exception + +console = Console() + + +@catch_exception(console=console) +async def delete_command( + kind: str = typer.Argument(..., help="Infrahub schema kind"), + identifier: str = typer.Argument(..., help="UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS)"), + yes: bool = typer.Option(False, "--yes", "-y", help="Skip confirmation prompt"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Delete an Infrahub object. + + Fetches the object by KIND and IDENTIFIER, then deletes it. + Unless --yes is provided, a confirmation prompt is shown first. + + \b + Examples: + infrahubctl delete InfraDevice spine01 + infrahubctl delete InfraDevice spine01 --yes + """ + client = initialize_client(branch=branch) + node = await resolve_node(client, kind, identifier, branch=branch) + + if not yes: + typer.confirm(f"Delete {kind} '{node.display_label}'?", abort=True) + + await node.delete() + console.print(f"[green]Deleted {kind} '{node.display_label}' (id: {node.id})") diff --git a/infrahub_sdk/ctl/commands/get.py b/infrahub_sdk/ctl/commands/get.py new file mode 100644 index 00000000..ae16f4d0 --- /dev/null +++ b/infrahub_sdk/ctl/commands/get.py @@ -0,0 +1,93 @@ +"""``infrahub get`` command -- query and display Infrahub objects. + +Supports both list mode (all objects of a kind) and detail mode (a single +object by ID or display name). Output is auto-detected as ``table`` for +interactive terminals and ``json`` when piped, but can be overridden with +``--output``. +""" + +from __future__ import annotations + +from typing import Any + +import typer +from rich.console import Console + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_node +from infrahub_sdk.ctl.formatters import OutputFormat, detect_output_format, get_formatter +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.parsers import parse_filter_args +from infrahub_sdk.ctl.utils import catch_exception + +EXIT_CODE_NO_RESULTS = 80 + +console = Console() +console_stderr = Console(stderr=True) + + +@catch_exception(console=console) +async def get_command( + kind: str = typer.Argument(..., help="Infrahub schema kind to query"), + identifier: str | None = typer.Argument(None, help="UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS)"), + filter_args: list[str] | None = typer.Option(None, "--filter", help="Filter in attr__value=x format"), + output: OutputFormat | None = typer.Option(None, "--output", "-o", help="Output format"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + limit: int | None = typer.Option(None, "--limit", help="Maximum results"), + offset: int | None = typer.Option(None, "--offset", help="Skip first N results"), + all_columns: bool = typer.Option(False, "--all-columns", help="Show all columns including empty ones"), + _: str = CONFIG_PARAM, +) -> None: + """Query and display Infrahub objects. + + When IDENTIFIER is omitted the command lists all objects of the given + KIND. When IDENTIFIER is provided it displays a single object in + detail view. Empty columns are hidden by default (use --all-columns). + + \b + Examples: + infrahubctl get InfraDevice + infrahubctl get InfraDevice spine01 + infrahubctl get InfraDevice --filter name__value=spine01 + infrahubctl get InfraDevice --output json + infrahubctl get InfraDevice --output yaml > backup.yml + + Exit codes: 0 = results found, 1 = error (including not found in detail + mode), 80 = list query succeeded but returned zero objects. + """ + client = initialize_client(branch=branch) + schema = await client.schema.get(kind=kind, branch=branch) + + fmt = output or detect_output_format() + formatter = get_formatter(fmt) + + if identifier is not None: + node = await resolve_node(client, kind, identifier, schema=schema, branch=branch) + result = formatter.format_detail(node, schema) + if fmt == OutputFormat.TABLE: + console.print(result, highlight=False) + else: + typer.echo(result) + return + + filters: dict[str, Any] = parse_filter_args(filter_args or []) + nodes = await client.filters( + kind=kind, + **filters, + offset=offset, + limit=limit, + prefetch_relationships=True, + ) + + count = len(nodes) + result = formatter.format_list(nodes, schema, show_all_columns=all_columns) + + if fmt == OutputFormat.TABLE: + console.print(result, highlight=False) + console.print(f"\n{count} object(s) found.", style="dim") + else: + typer.echo(result) + + if count == 0: + console_stderr.print(f"No objects of kind {kind} found.", style="yellow") + raise typer.Exit(code=EXIT_CODE_NO_RESULTS) diff --git a/infrahub_sdk/ctl/commands/update.py b/infrahub_sdk/ctl/commands/update.py new file mode 100644 index 00000000..2beb76ce --- /dev/null +++ b/infrahub_sdk/ctl/commands/update.py @@ -0,0 +1,155 @@ +"""Update command for the ``infrahub`` end-user CLI. + +Fetches an existing object by kind and identifier, applies field changes +supplied via ``--set`` flags or a ``--file`` path, and saves the result. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import typer # pyright: ignore[reportMissingImports] +from rich.console import Console # pyright: ignore[reportMissingImports] + +from infrahub_sdk.ctl.client import initialize_client +from infrahub_sdk.ctl.commands.utils import resolve_node, resolve_relationship_values +from infrahub_sdk.ctl.parameters import CONFIG_PARAM +from infrahub_sdk.ctl.parsers import parse_set_args, validate_set_fields +from infrahub_sdk.ctl.utils import catch_exception +from infrahub_sdk.spec.object import ObjectFile + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + +console = Console() + + +@catch_exception(console=console) +async def update_command( + kind: str = typer.Argument(..., help="Infrahub schema kind"), + identifier: str = typer.Argument(..., help="UUID, name, or HFID (use / for multi-part, e.g. Cisco/NX-OS)"), + set_args: list[str] | None = typer.Option(None, "--set", help="Field value in key=value format"), + file: Path | None = typer.Option(None, "--file", "-f", help="JSON or YAML file with update data"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Update an existing object in Infrahub. + + Fetches the object by KIND and IDENTIFIER, applies the requested + changes, and saves back to the server. Use --set or --file. + + \b + Examples: + infrahubctl update InfraDevice spine01 --set status=active + infrahubctl update InfraDevice spine01 --set location=DC1 + infrahubctl update InfraDevice spine01 --file updates.yml + """ + if set_args and file: + console.print("[red]Error: --set and --file are mutually exclusive.") + raise typer.Exit(code=1) + + if not set_args and not file: + console.print("[red]Error: provide --set key=value or --file .") + console.print("Example: infrahubctl update MyKind my-node --set field=value") + raise typer.Exit(code=1) + + client = initialize_client(branch=branch) + + if set_args: + await _update_with_set_args( + client=client, + kind=kind, + identifier=identifier, + set_args=set_args, + branch=branch, + ) + elif file: + console.print( + "[dim]Note: KIND and IDENTIFIER are ignored in --file mode; the file defines target objects.[/dim]" + ) + await _update_with_file( + client=client, + file=file, + branch=branch, + ) + + +async def _update_with_set_args( + client: InfrahubClient, + kind: str, + identifier: str, + set_args: list[str], + branch: str | None, +) -> None: + """Apply inline --set key=value updates to an existing object. + + Parses the set arguments, validates them against the schema, fetches + the target node, applies changes, and saves. + + Args: + client: Initialised async Infrahub client. + kind: Infrahub schema kind. + identifier: Object UUID or display name. + set_args: List of "key=value" strings. + branch: Optional target branch. + """ + data = parse_set_args(set_args) + schema = await client.schema.get(kind=kind, branch=branch) + validate_set_fields(data, schema.attribute_names, schema.relationship_names) + + node = await resolve_node(client, kind, identifier, schema=schema, branch=branch) + + resolved_data = await resolve_relationship_values(client, data, schema, branch=branch) + + changes: list[tuple[str, object, object]] = [] + for key, new_value in data.items(): + if key in schema.attribute_names: + attr = getattr(node, key) + old_value = attr.value + attr.value = new_value + changes.append((key, old_value, new_value)) + elif key in schema.relationship_names: + rel = getattr(node, key) + old_id = getattr(rel, "id", None) + old_display = getattr(rel, "display_label", old_id) + resolved = resolved_data[key] + new_id = resolved.get("id") if isinstance(resolved, dict) else resolved + if old_id != new_id: + setattr(node, key, resolved) + changes.append((key, old_display, new_value)) + + actual_changes = [(f, o, n) for f, o, n in changes if str(o) != str(n)] + + if not actual_changes: + console.print(f"[yellow]No changes — {kind} '{identifier}' already has the requested values.") + return + + await node.save() + + console.print(f"[green]Updated {kind} '{identifier}' successfully.") + for field_name, old_val, new_val in actual_changes: + console.print(f" {field_name}: {old_val} -> {new_val}") + + +async def _update_with_file( + client: InfrahubClient, + file: Path, + branch: str | None, +) -> None: + """Apply updates from a YAML/JSON object file. + + Loads the file, validates its format against the server schema, + then processes it to apply the changes. + + Args: + client: Initialised async Infrahub client. + file: Path to the YAML or JSON object file. + branch: Optional target branch. + """ + files = ObjectFile.load_from_disk(paths=[file]) + for obj_file in files: + await obj_file.validate_format(client=client, branch=branch) + await obj_file.process(client=client, branch=branch) + + console.print(f"[green]Processed update from file '{file}' successfully.") diff --git a/infrahub_sdk/ctl/commands/utils.py b/infrahub_sdk/ctl/commands/utils.py new file mode 100644 index 00000000..cd62074d --- /dev/null +++ b/infrahub_sdk/ctl/commands/utils.py @@ -0,0 +1,195 @@ +"""Shared utilities for end-user CLI commands.""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING, Any + +from infrahub_sdk.exceptions import NodeNotFoundError, SchemaNotFoundError +from infrahub_sdk.schema import NodeSchemaAPI +from infrahub_sdk.utils import is_valid_uuid + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from infrahub_sdk import InfrahubClient + from infrahub_sdk.node import InfrahubNode + from infrahub_sdk.schema import MainSchemaTypesAPI + + +async def resolve_node( + client: InfrahubClient, + kind: str, + identifier: str, + schema: MainSchemaTypesAPI | None = None, + branch: str | None = None, +) -> InfrahubNode: + """Resolve a node by identifier, trying multiple lookup strategies. + + Lookup order: + 1. UUID — if the identifier looks like a valid UUID. + 2. Default filter — if the schema defines a ``default_filter`` + (e.g., ``name__value``), use it as a keyword filter. + 3. HFID — if the schema defines a ``human_friendly_id``, treat + the identifier as HFID components (split on ``/`` for + multi-component HFIDs, or as a single component). + + Args: + client: Initialised async Infrahub client. + kind: Infrahub schema kind. + identifier: UUID, display name, or HFID string. + schema: Pre-fetched schema (fetched if not provided). + branch: Optional target branch. + + Returns: + The resolved InfrahubNode. + + Raises: + NodeNotFoundError: If no lookup strategy finds the node. + """ + if schema is None: + schema = await client.schema.get(kind=kind, branch=branch) + + # 1. UUID + if is_valid_uuid(identifier): + return await client.get(kind=kind, id=identifier, branch=branch) + + # 2. Default filter + if isinstance(schema, NodeSchemaAPI) and schema.default_filter: + filters: dict[str, Any] = {schema.default_filter: identifier} + node = await client.get( + kind=kind, + branch=branch, + raise_when_missing=False, + **filters, + ) + if node is not None: + return node + + # 3. HFID (single or multi-component separated by /) + if isinstance(schema, NodeSchemaAPI) and schema.human_friendly_id: + hfid_parts = identifier.split("/") if "/" in identifier else [identifier] + node = await client.get( + kind=kind, + hfid=hfid_parts, + branch=branch, + raise_when_missing=False, + ) + if node is not None: + return node + + # Nothing found — raise with a helpful error via the standard path + return await client.get(kind=kind, id=identifier, branch=branch) + + +async def resolve_relationship_values( + client: InfrahubClient, + data: dict[str, Any], + schema: MainSchemaTypesAPI, + branch: str | None = None, +) -> dict[str, Any]: + """Resolve relationship string values in a data dict to node IDs. + + For each key that is a relationship name in the schema, attempts to + look up the target node by the string value (using the relationship's + peer kind). The value is replaced with ``{"id": ""}`` so the + SDK can create/update the node correctly. + + Attribute values are passed through unchanged. + + Args: + client: Initialised async Infrahub client. + data: Parsed data from ``--set`` arguments. + schema: Schema for the kind being created/updated. + branch: Optional target branch. + + Returns: + A new dict with relationship values resolved to ID references. + """ + resolved: dict[str, Any] = {} + + for key, value in data.items(): + if key not in schema.relationship_names: + resolved[key] = value + continue + + # Already a dict (e.g. {"id": "uuid"}) — pass through + if isinstance(value, dict): + resolved[key] = value + continue + + str_value = str(value) + rel_schema = schema.get_relationship(key) + peer_kind = rel_schema.peer + + # Try to resolve the string value as a node identifier. + # Only fall back to generic peer search on lookup-miss errors; + # re-raise auth, network, and other unexpected errors. + try: + peer_node = await resolve_node(client, peer_kind, str_value, branch=branch) + resolved[key] = {"id": peer_node.id} + except (NodeNotFoundError, SchemaNotFoundError, ValueError, IndexError): + node = await _search_generic_peer(client, str_value, branch=branch) + if node is not None: + resolved[key] = {"id": node.id} + else: + resolved[key] = value + + return resolved + + +async def _search_generic_peer( + client: InfrahubClient, + identifier: str, + branch: str | None = None, +) -> InfrahubNode | None: + """Search across all node schemas for a node matching the identifier. + + Used as a fallback when the relationship peer is a generic type + and the direct lookup fails. + + Args: + client: Initialised async Infrahub client. + identifier: Display name or HFID to search for. + branch: Optional target branch. + + Returns: + The matched node, or None if not found. + """ + all_schemas = await client.schema.all(branch=branch) + hfid_parts = identifier.split("/") if "/" in identifier else [identifier] + + for schema in all_schemas.values(): + if not isinstance(schema, NodeSchemaAPI): + continue + + # Try default_filter first + if schema.default_filter: + try: + filters: dict[str, Any] = {schema.default_filter: identifier} + node = await client.get( # type: ignore[arg-type] + kind=schema.kind, + branch=branch, + raise_when_missing=False, + **filters, + ) + if node is not None: + return node + except Exception: + logger.debug("Failed default_filter for %r via %s", identifier, schema.kind) + + # Try HFID + if schema.human_friendly_id: + try: + node = await client.get( + kind=schema.kind, + hfid=hfid_parts, + branch=branch, + raise_when_missing=False, + ) + if node is not None: + return node + except Exception: + logger.debug("Failed HFID for %r via %s", identifier, schema.kind) + + return None diff --git a/infrahub_sdk/ctl/formatters/__init__.py b/infrahub_sdk/ctl/formatters/__init__.py new file mode 100644 index 00000000..bb3fe010 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/__init__.py @@ -0,0 +1,76 @@ +"""Output formatters for the ``infrahub`` end-user CLI. + +Provides an ``OutputFormat`` enum for selecting the desired output style and a +``get_formatter`` factory that returns the appropriate ``BaseFormatter`` +implementation. +""" + +from __future__ import annotations + +import sys +from enum import Enum +from typing import TYPE_CHECKING + +from infrahub_sdk.ctl.formatters.csv import CsvFormatter +from infrahub_sdk.ctl.formatters.json import JsonFormatter +from infrahub_sdk.ctl.formatters.table import TableFormatter +from infrahub_sdk.ctl.formatters.yaml import YamlFormatter + +if TYPE_CHECKING: + from infrahub_sdk.ctl.formatters.base import BaseFormatter + +__all__ = [ + "CsvFormatter", + "JsonFormatter", + "OutputFormat", + "TableFormatter", + "YamlFormatter", + "detect_output_format", + "get_formatter", +] + + +class OutputFormat(str, Enum): + """Supported CLI output formats.""" + + TABLE = "table" + JSON = "json" + CSV = "csv" + YAML = "yaml" + + +def detect_output_format() -> OutputFormat: + """Auto-detect output format based on whether stdout is a TTY. + + Returns: + ``OutputFormat.TABLE`` when stdout is connected to a terminal, + ``OutputFormat.JSON`` otherwise (e.g. when piped). + """ + return OutputFormat.TABLE if sys.stdout.isatty() else OutputFormat.JSON + + +def get_formatter(output_format: OutputFormat) -> BaseFormatter: + """Return the appropriate formatter for the given output format. + + Args: + output_format: The desired output format. + + Returns: + A ``BaseFormatter`` subclass instance matching *output_format*. + + Raises: + ValueError: If *output_format* is not a recognised format. + """ + formatters: dict[OutputFormat, type[BaseFormatter]] = { + OutputFormat.TABLE: TableFormatter, + OutputFormat.JSON: JsonFormatter, + OutputFormat.CSV: CsvFormatter, + OutputFormat.YAML: YamlFormatter, + } + + formatter_class = formatters.get(output_format) + if formatter_class is None: + msg = f"Unsupported output format: {output_format}" + raise ValueError(msg) + + return formatter_class() diff --git a/infrahub_sdk/ctl/formatters/base.py b/infrahub_sdk/ctl/formatters/base.py new file mode 100644 index 00000000..8ab5edf4 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/base.py @@ -0,0 +1,184 @@ +"""Base formatter protocol and shared helper functions for node data extraction.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Protocol + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class BaseFormatter(Protocol): + """Protocol defining the interface all formatters must implement. + + Formatters convert InfrahubNode objects into string representations + for display in various output formats (table, JSON, CSV, YAML). + """ + + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, + ) -> str: + """Format a list of nodes for display. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + show_all_columns: When True, include columns where every value is empty. + + Returns: + Formatted string representation of all nodes. + """ + ... + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node's detail view. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + Formatted string with full node details. + """ + ... + + +def _extract_relationship_value( + node: InfrahubNode, + rel_name: str, + cardinality: str, +) -> str: + """Extract a display value from a relationship on a node. + + Args: + node: The node containing the relationship. + rel_name: Name of the relationship attribute. + cardinality: Either "one" or "many". + + Returns: + Display string for the relationship value. + """ + rel = getattr(node, rel_name, None) + if rel is None: + return "" + + if cardinality == "one": + return rel.display_label or rel.id or "" + + # cardinality == "many": RelationshipManager with .peers + peers = getattr(rel, "peers", []) + labels = [p.display_label or p.id or "" for p in peers] + return ", ".join(labels) + + +def extract_node_data( + node: InfrahubNode, + schema: MainSchemaTypesAPI, +) -> dict[str, Any]: + """Extract a flat dict of field names to display values from a node. + + Handles both attributes and relationships. Attribute values of None + are converted to empty strings. Relationship values are rendered as + display labels. + + Args: + node: The InfrahubNode to extract data from. + schema: Schema definition describing attributes and relationships. + + Returns: + Dict mapping field names to their string display values. + """ + data: dict[str, Any] = {} + + for attr_name in schema.attribute_names: + attr = getattr(node, attr_name, None) + value = attr.value if attr is not None else None + data[attr_name] = value if value is not None else "" + + for rel_name in schema.relationship_names: + rel_schema = schema.get_relationship(rel_name) + data[rel_name] = _extract_relationship_value(node, rel_name, rel_schema.cardinality) + + return data + + +def non_empty_columns(rows: list[dict[str, Any]], columns: list[str]) -> list[str]: + """Return only columns that have at least one non-empty value across all rows. + + Args: + rows: List of row dicts (from extract_node_data). + columns: All candidate column names. + + Returns: + Filtered list of column names with data. + """ + return [col for col in columns if any(str(row.get(col, "")).strip() for row in rows)] + + +def extract_node_detail( + node: InfrahubNode, + schema: MainSchemaTypesAPI, +) -> dict[str, Any]: + """Extract a rich detail dict from a node including metadata. + + Similar to extract_node_data but includes the node ID, display label, + and schema kind as additional metadata fields. + + Args: + node: The InfrahubNode to extract data from. + schema: Schema definition describing attributes and relationships. + + Returns: + Dict with metadata fields (id, display_label, kind) followed + by attribute and relationship values. + """ + detail: dict[str, Any] = { + "id": node.id or "", + "display_label": node.display_label or "", + "kind": schema.kind, + } + + for attr_name in schema.attribute_names: + attr = getattr(node, attr_name, None) + if attr is not None: + detail[attr_name] = { + "value": attr.value if attr.value is not None else "", + } + else: + detail[attr_name] = {"value": ""} + + for rel_name in schema.relationship_names: + rel_schema = schema.get_relationship(rel_name) + rel = getattr(node, rel_name, None) + + if rel_schema.cardinality == "one": + if rel is not None: + detail[rel_name] = { + "display_label": rel.display_label or "", + "id": rel.id or "", + "cardinality": "one", + } + else: + detail[rel_name] = { + "display_label": "", + "id": "", + "cardinality": "one", + } + else: + peers = getattr(rel, "peers", []) if rel is not None else [] + detail[rel_name] = { + "peers": [ + { + "display_label": p.display_label or "", + "id": p.id or "", + } + for p in peers + ], + "cardinality": "many", + } + + return detail diff --git a/infrahub_sdk/ctl/formatters/csv.py b/infrahub_sdk/ctl/formatters/csv.py new file mode 100644 index 00000000..9f437847 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/csv.py @@ -0,0 +1,98 @@ +"""CSV formatter for InfrahubNode query results.""" + +from __future__ import annotations + +import csv +import io +from typing import TYPE_CHECKING + +from .base import extract_node_data, extract_node_detail, non_empty_columns + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class CsvFormatter: + """Formats InfrahubNode data as CSV strings. + + Uses stdlib csv module for proper escaping and quoting of values. + """ + + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, + ) -> str: + """Format a list of nodes as CSV with a header row. + + Columns correspond to schema attribute and relationship names. + Each node produces one data row. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + show_all_columns: When True, include columns where every value is empty. + + Returns: + CSV string with header and data rows. + """ + all_columns = schema.attribute_names + schema.relationship_names + rows = [extract_node_data(node, schema) for node in nodes] + columns = all_columns if not rows or show_all_columns else non_empty_columns(rows, all_columns) + + output = io.StringIO() + writer = csv.writer(output) + writer.writerow(columns) + + for row_data in rows: + writer.writerow([str(row_data.get(col, "")) for col in columns]) + + return output.getvalue() + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as a two-column CSV (field, value). + + Includes metadata fields (id, display_label, kind) followed + by all attributes and relationships. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + CSV string with field/value columns. + """ + detail = extract_node_detail(node, schema) + output = io.StringIO() + writer = csv.writer(output) + + writer.writerow(["field", "value"]) + + # Metadata rows + writer.writerow(["id", str(detail.get("id", ""))]) + writer.writerow(["display_label", str(detail.get("display_label", ""))]) + writer.writerow(["kind", str(detail.get("kind", ""))]) + + # Attribute rows + for attr_name in schema.attribute_names: + attr_detail = detail.get(attr_name, {}) + value = attr_detail.get("value", "") if isinstance(attr_detail, dict) else attr_detail + writer.writerow([attr_name, str(value)]) + + # Relationship rows + for rel_name in schema.relationship_names: + rel_detail = detail.get(rel_name, {}) + if not isinstance(rel_detail, dict): + writer.writerow([rel_name, str(rel_detail)]) + continue + + if rel_detail.get("cardinality") == "one": + writer.writerow([rel_name, str(rel_detail.get("display_label", ""))]) + else: + peers = rel_detail.get("peers", []) + labels = [p.get("display_label", "") for p in peers] + writer.writerow([rel_name, ", ".join(labels)]) + + return output.getvalue() diff --git a/infrahub_sdk/ctl/formatters/json.py b/infrahub_sdk/ctl/formatters/json.py new file mode 100644 index 00000000..27e029e8 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/json.py @@ -0,0 +1,57 @@ +"""JSON formatter for InfrahubNode query results.""" + +from __future__ import annotations + +import json +from typing import TYPE_CHECKING + +from .base import extract_node_data, extract_node_detail + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class JsonFormatter: + """Formats InfrahubNode data as JSON strings. + + Uses stdlib json module with indentation for readable output. + """ + + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, # noqa: ARG002 + ) -> str: + """Format a list of nodes as a JSON array. + + Each node is represented as a dict with attribute and + relationship field names as keys. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + show_all_columns: Accepted for interface compatibility; not used for JSON. + + Returns: + JSON array string. + """ + items = [extract_node_data(node, schema) for node in nodes] + return json.dumps(items, indent=2, default=str) + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as a JSON object. + + Includes metadata (id, display_label, kind) along with + all attributes and relationships. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + JSON object string. + """ + detail = extract_node_detail(node, schema) + return json.dumps(detail, indent=2, default=str) diff --git a/infrahub_sdk/ctl/formatters/table.py b/infrahub_sdk/ctl/formatters/table.py new file mode 100644 index 00000000..b90854f1 --- /dev/null +++ b/infrahub_sdk/ctl/formatters/table.py @@ -0,0 +1,121 @@ +"""Rich table formatter for InfrahubNode query results.""" + +from __future__ import annotations + +from io import StringIO +from typing import TYPE_CHECKING + +from rich.console import Console +from rich.table import Table + +from .base import extract_node_data, extract_node_detail, non_empty_columns + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + + +class TableFormatter: + """Formats InfrahubNode data as Rich tables. + + Uses Rich library to render attractive, aligned tables with + column headers derived from the schema field names. + """ + + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, + ) -> str: + """Format a list of nodes as a Rich table. + + Creates a table with one column per attribute and relationship, + and one row per node. + + Args: + nodes: List of InfrahubNode objects to format. + schema: Schema definition for the node kind. + show_all_columns: When True, include columns where every value is empty. + + Returns: + Rendered table string. + """ + all_columns = schema.attribute_names + schema.relationship_names + rows = [extract_node_data(node, schema) for node in nodes] + + columns = all_columns if not rows or show_all_columns else non_empty_columns(rows, all_columns) + + table = Table(title=schema.kind, show_lines=False) + for col in columns: + table.add_column(col, overflow="fold") + + for row_data in rows: + table.add_row(*(str(row_data.get(col, "")) for col in columns)) + + return self._render(table) + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as a key-value detail view. + + Renders a two-column table (Field / Value) with metadata, + attributes, and relationships sections. + + Args: + node: The InfrahubNode to format. + schema: Schema definition for the node kind. + + Returns: + Rendered detail string. + """ + detail = extract_node_detail(node, schema) + + table = Table( + title=f"{schema.kind} Detail", + show_lines=True, + ) + table.add_column("Field", style="bold") + table.add_column("Value") + + # Metadata section + table.add_row("id", str(detail.get("id", ""))) + table.add_row("display_label", str(detail.get("display_label", ""))) + table.add_row("kind", str(detail.get("kind", ""))) + + # Attributes section + for attr_name in schema.attribute_names: + attr_detail = detail.get(attr_name, {}) + value = attr_detail.get("value", "") if isinstance(attr_detail, dict) else attr_detail + table.add_row(attr_name, str(value)) + + # Relationships section + for rel_name in schema.relationship_names: + rel_detail = detail.get(rel_name, {}) + if not isinstance(rel_detail, dict): + table.add_row(rel_name, str(rel_detail)) + continue + + if rel_detail.get("cardinality") == "one": + label = rel_detail.get("display_label", "") + table.add_row(rel_name, str(label)) + else: + peers = rel_detail.get("peers", []) + labels = [p.get("display_label", "") for p in peers] + table.add_row(rel_name, ", ".join(labels)) + + return self._render(table) + + @staticmethod + def _render(renderable: Table) -> str: + """Capture Rich renderable output to a string. + + Args: + renderable: A Rich Table or other renderable object. + + Returns: + The rendered string output. + """ + buffer = StringIO() + console = Console(file=buffer, force_terminal=False, width=120) + console.print(renderable) + return buffer.getvalue() diff --git a/infrahub_sdk/ctl/formatters/yaml.py b/infrahub_sdk/ctl/formatters/yaml.py new file mode 100644 index 00000000..9100fd7c --- /dev/null +++ b/infrahub_sdk/ctl/formatters/yaml.py @@ -0,0 +1,130 @@ +"""YAML formatter for InfrahubNode query results in Infrahub object format. + +Produces YAML that is round-trippable with ``infrahubctl object load``. +Empty/null attribute values and unset relationships are omitted so the +output can be loaded back without validation errors. +""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +import yaml # type: ignore[import-untyped] + +if TYPE_CHECKING: + from ...node import InfrahubNode + from ...schema import MainSchemaTypesAPI + +_INFRAHUB_API_VERSION = "infrahub.app/v1" +_INFRAHUB_KIND = "Object" + + +class YamlFormatter: + """Formats InfrahubNode data as YAML in the Infrahub object spec format. + + Output follows the standard Infrahub file structure:: + + --- + apiVersion: infrahub.app/v1 + kind: Object + spec: + kind: + data: + - field1: value1 + field2: value2 + """ + + def format_list( + self, + nodes: list[InfrahubNode], + schema: MainSchemaTypesAPI, + show_all_columns: bool = False, # noqa: ARG002 + ) -> str: + """Format a list of nodes as an Infrahub YAML object document.""" + data_items = [self._node_to_data_entry(node, schema) for node in nodes] + return self._build_document(schema.kind, data_items) + + def format_detail(self, node: InfrahubNode, schema: MainSchemaTypesAPI) -> str: + """Format a single node as an Infrahub YAML object document.""" + data_entry = self._node_to_data_entry(node, schema) + return self._build_document(schema.kind, [data_entry]) + + def _node_to_data_entry( + self, + node: InfrahubNode, + schema: MainSchemaTypesAPI, + ) -> dict[str, Any]: + """Convert a node into a dict compatible with ObjectFile spec format. + + Omits empty/null attribute values and unset relationships so the + output can be loaded back via ``infrahubctl object load`` without + validation errors. + """ + entry: dict[str, Any] = {} + + # Attributes: only include non-empty values + for attr_name in schema.attribute_names: + attr = getattr(node, attr_name, None) + if attr is None: + continue + value = attr.value + if not value and value != 0 and value is not False: + continue + entry[attr_name] = value + + # Relationships: skip unset, use HFID when available + for rel_name in schema.relationship_names: + rel_schema = schema.get_relationship(rel_name) + rel = getattr(node, rel_name, None) + if rel is None: + continue + + if rel_schema.cardinality == "one": + ref = _related_node_ref(rel) + if ref is not None: + entry[rel_name] = ref + else: + peers = getattr(rel, "peers", None) or [] + refs = [r for p in peers if (r := _related_node_ref(p)) is not None] + if refs: + entry[rel_name] = {"data": refs} + + return entry + + @staticmethod + def _build_document(kind: str, data: list[dict[str, Any]]) -> str: + """Build the full Infrahub YAML document structure.""" + document = { + "apiVersion": _INFRAHUB_API_VERSION, + "kind": _INFRAHUB_KIND, + "spec": { + "kind": kind, + "data": data, + }, + } + return "---\n" + yaml.dump( + document, + default_flow_style=False, + sort_keys=False, + allow_unicode=True, + ) + + +def _related_node_ref(rel: Any) -> str | list[str] | None: + """Build a reference value for a related node suitable for ObjectFile. + + Uses the HFID if available. For single-component HFIDs, returns a + plain string. For multi-component HFIDs, returns a list. Falls back + to display_label. + + Args: + rel: A RelatedNode object. + + Returns: + A string, list of strings, or None if the relationship is unset. + """ + hfid = getattr(rel, "hfid", None) + if hfid: + return hfid[0] if len(hfid) == 1 else list(hfid) + label = getattr(rel, "display_label", None) + return label or None diff --git a/infrahub_sdk/ctl/parsers.py b/infrahub_sdk/ctl/parsers.py new file mode 100644 index 00000000..006c09e8 --- /dev/null +++ b/infrahub_sdk/ctl/parsers.py @@ -0,0 +1,129 @@ +from __future__ import annotations + +from typing import Any + +import typer + + +def _coerce_value(value: str) -> Any: + """Attempt to coerce a string value to an appropriate Python type. + + Tries, in order: int, float, bool (true/false), None (null/none). + Falls back to the original string if no conversion matches. + + Args: + value: The raw string value from the CLI. + + Returns: + The coerced Python value. + """ + # Try integer (preserve leading zeros — "00123" stays a string) + try: + int_val = int(value) + if str(int_val) == value: + return int_val + except ValueError: + pass + + # Try float (preserve leading zeros) + try: + float_val = float(value) + if str(float_val) == value: + return float_val + except ValueError: + pass + + # Try boolean + lower = value.lower() + if lower in {"true", "yes"}: + return True + if lower in {"false", "no"}: + return False + + # Try null + if lower in {"null", "none"}: + return None + + return value + + +def parse_set_args(set_args: list[str]) -> dict[str, Any]: + """Parse --set key=value arguments into a dictionary. + + Splits each argument on the first ``=`` sign, allowing values + to contain additional ``=`` characters. Values are automatically + coerced to int, float, bool, or None where possible. + + Args: + set_args: List of "key=value" strings from the CLI. + + Returns: + Dictionary mapping field names to coerced Python values. + + Raises: + typer.BadParameter: If any argument is not in key=value format. + """ + result: dict[str, Any] = {} + for arg in set_args: + if "=" not in arg: + raise typer.BadParameter(f"Invalid format '{arg}'. Expected key=value.") + key, value = arg.split("=", maxsplit=1) + key = key.strip() + if not key: + raise typer.BadParameter(f"Invalid format '{arg}'. Key must not be empty.") + result[key] = _coerce_value(value) + return result + + +def parse_filter_args(filter_args: list[str]) -> dict[str, Any]: + """Parse --filter arguments into kwargs for client.filters(). + + Uses the same split-on-first-``=`` logic as :func:`parse_set_args`. + Keys are expected to follow SDK filter conventions + (e.g. ``attribute__value``, ``relationship__id``) but format + validation is left to the SDK. + + Args: + filter_args: List of "attr__value=x" strings from the CLI. + + Returns: + Dictionary of filter kwargs to pass to client.filters(). + + Raises: + typer.BadParameter: If any argument is not in key=value format. + """ + result: dict[str, Any] = {} + for arg in filter_args: + if "=" not in arg: + raise typer.BadParameter(f"Invalid format '{arg}'. Expected key=value.") + key, value = arg.split("=", maxsplit=1) + key = key.strip() + if not key: + raise typer.BadParameter(f"Invalid format '{arg}'. Key must not be empty.") + result[key] = value + return result + + +def validate_set_fields( + data: dict[str, Any], + attribute_names: list[str], + relationship_names: list[str], +) -> None: + """Validate that all keys in data are valid attribute or relationship names. + + Args: + data: Parsed set data from parse_set_args. + attribute_names: Valid attribute names from schema. + relationship_names: Valid relationship names from schema. + + Raises: + typer.BadParameter: If any key is not a valid field name, + with a message listing valid fields. + """ + valid_fields = set(attribute_names) | set(relationship_names) + invalid_keys = sorted(set(data.keys()) - valid_fields) + if invalid_keys: + valid_sorted = sorted(valid_fields) + raise typer.BadParameter( + f"Unknown field(s): {', '.join(invalid_keys)}. Valid fields: {', '.join(valid_sorted)}" + ) diff --git a/infrahub_sdk/ctl/schema.py b/infrahub_sdk/ctl/schema.py index 9532959e..c5b3498a 100644 --- a/infrahub_sdk/ctl/schema.py +++ b/infrahub_sdk/ctl/schema.py @@ -10,12 +10,13 @@ import yaml from pydantic import ValidationError from rich.console import Console +from rich.table import Table from ..async_typer import AsyncTyper from ..ctl.client import initialize_client from ..ctl.utils import catch_exception, init_logging from ..queries import SCHEMA_HASH_SYNC_STATUS -from ..schema import SchemaWarning +from ..schema import NodeSchemaAPI, SchemaWarning from ..yaml import SchemaFile from .parameters import CONFIG_PARAM from .utils import load_yamlfile_from_disk_and_exit @@ -258,3 +259,108 @@ async def export( console.print(f"[green] Exported namespace '{ns}' to {output_file}") console.print(f"[green] Schema exported to {directory}") + + +@app.command(name="list") +@catch_exception(console=console) +async def schema_list( + filter_text: str | None = typer.Option(None, "--filter", help="Filter kinds by name"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """List all available schema kinds. + + Displays a table of all node schema entries. Use --filter to narrow + results by a case-insensitive match on the kind name. + + \b + Examples: + infrahubctl schema list + infrahubctl schema list --filter Device + """ + client = initialize_client(branch=branch) + schemas = await client.schema.all(branch=branch) + + items = list(schemas.values()) + if filter_text: + items = [s for s in items if filter_text.lower() in s.kind.lower()] + + items = [s for s in items if isinstance(s, NodeSchemaAPI)] + items.sort(key=lambda s: s.kind) + + table = Table(title="Schema Kinds") + table.add_column("Namespace") + table.add_column("Name") + table.add_column("Kind") + table.add_column("Description") + + for schema_item in items: + table.add_row( + schema_item.namespace, + schema_item.name, + schema_item.kind, + schema_item.description or "", + ) + + console.print(table) + + +@app.command(name="show") +@catch_exception(console=console) +async def schema_show( + kind: str = typer.Argument(..., help="Schema kind to display"), + branch: str | None = typer.Option(None, "--branch", "-b", help="Target branch"), + _: str = CONFIG_PARAM, +) -> None: + """Show details for a specific schema kind. + + Displays metadata, attributes, and relationships for the requested + schema kind in a human-readable format. + + \b + Examples: + infrahubctl schema show InfraDevice + """ + client = initialize_client(branch=branch) + node_schema = await client.schema.get(kind=kind, branch=branch) + + console.print(f"\n[bold]{node_schema.kind}[/bold]") + if node_schema.description: + console.print(f" {node_schema.description}") + console.print(f" Namespace: {node_schema.namespace}") + console.print(f" Display Labels: {node_schema.display_labels or 'N/A'}") + console.print(f" Human Friendly ID: {node_schema.human_friendly_id or 'N/A'}") + + if node_schema.attributes: + attr_table = Table(title="Attributes") + attr_table.add_column("Name") + attr_table.add_column("Type") + attr_table.add_column("Required") + attr_table.add_column("Default") + attr_table.add_column("Description") + + for attr in node_schema.attributes: + attr_table.add_row( + attr.name, + str(attr.kind), + "Yes" if not attr.optional else "No", + str(attr.default_value) if attr.default_value is not None else "", + attr.description or "", + ) + console.print(attr_table) + + if node_schema.relationships: + rel_table = Table(title="Relationships") + rel_table.add_column("Name") + rel_table.add_column("Peer") + rel_table.add_column("Cardinality") + rel_table.add_column("Optional") + + for rel in node_schema.relationships: + rel_table.add_row( + rel.name, + rel.peer, + rel.cardinality, + "Yes" if rel.optional else "No", + ) + console.print(rel_table) diff --git a/specs/001-end-user-cli/checklists/requirements.md b/specs/001-end-user-cli/checklists/requirements.md new file mode 100644 index 00000000..ecd3be28 --- /dev/null +++ b/specs/001-end-user-cli/checklists/requirements.md @@ -0,0 +1,36 @@ +# Specification Quality Checklist: End-User CLI (`infrahub` command) + +**Purpose**: Validate specification completeness and quality before proceeding to planning +**Created**: 2026-03-28 +**Feature**: [spec.md](../spec.md) + +## Content Quality + +- [x] No implementation details (languages, frameworks, APIs) +- [x] Focused on user value and business needs +- [x] Written for non-technical stakeholders +- [x] All mandatory sections completed + +## Requirement Completeness + +- [x] No [NEEDS CLARIFICATION] markers remain +- [x] Requirements are testable and unambiguous +- [x] Success criteria are measurable +- [x] Success criteria are technology-agnostic (no implementation details) +- [x] All acceptance scenarios are defined +- [x] Edge cases are identified +- [x] Scope is clearly bounded +- [x] Dependencies and assumptions identified + +## Feature Readiness + +- [x] All functional requirements have clear acceptance criteria +- [x] User scenarios cover primary flows +- [x] Feature meets measurable outcomes defined in Success Criteria +- [x] No implementation details leak into specification + +## Notes + +- All items pass validation. Spec is ready for `/speckit.clarify` or `/speckit.plan`. +- Assumptions section documents reasonable defaults for unspecified details. +- CLI command examples in acceptance scenarios use generic syntax (not framework-specific). diff --git a/specs/001-end-user-cli/contracts/cli-commands.md b/specs/001-end-user-cli/contracts/cli-commands.md new file mode 100644 index 00000000..044ab67a --- /dev/null +++ b/specs/001-end-user-cli/contracts/cli-commands.md @@ -0,0 +1,64 @@ +# CLI Command Contracts + +## Global Options + +All commands accept: + +- `--branch TEXT` — Target Infrahub branch (default: from config) +- `--config-file PATH` — Configuration file path (default: infrahubctl.toml) +- `--output [table|json|csv|yaml]` — Output format (default: table if TTY, json if piped) + +## `infrahub get [identifier]` + +**List mode** (no identifier): + +- Input: kind (positional), --filter (repeatable), --limit INT, --offset INT +- Output: Table with columns for each attribute + relationship (display names) +- Exit 0: results found | Exit 80: no results (empty list) | Exit 1: invalid kind + +**Detail mode** (with identifier): + +- Input: kind (positional), identifier (positional — UUID or display name) +- Output: Key-value display of all attributes, relationships, metadata +- Exit 0: found | Exit 1: not found + +**Filters**: `--filter name__value="spine01"` (repeatable) + +## `infrahub create ` + +- Input: kind (positional), --set key=value (repeatable), --file PATH +- --set and --file are mutually exclusive +- Output: Confirmation with created object ID and display label +- Exit 0: created | Exit 1: validation error | Exit 1: server error + +**File input**: JSON or YAML in Infrahub Object format +(`apiVersion: infrahub.app/v1`) + +## `infrahub update ` + +- Input: kind (positional), identifier (positional), --set key=value + (repeatable), --file PATH +- --set and --file are mutually exclusive +- Output: Confirmation with old → new values for changed fields +- Exit 0: updated | Exit 1: not found | Exit 1: validation error + +## `infrahub delete ` + +- Input: kind (positional), identifier (positional), --yes (skip confirmation) +- Output: Confirmation prompt (unless --yes), then success message +- Exit 0: deleted | Exit 1: not found | Exit 1: dependency conflict + +## `infrahub schema list` + +- Input: --filter TEXT (substring match on kind name) +- Output: Table with columns: Namespace, Name, Kind, Description +- Exit 0: always (empty table if no matches) + +## `infrahub schema show ` + +- Input: kind (positional) +- Output: Formatted display of: + - Kind metadata (namespace, label, description, display_labels, HFID) + - Attributes table (name, type, required, default, description) + - Relationships table (name, peer kind, cardinality, optional) +- Exit 0: found | Exit 1: invalid kind diff --git a/specs/001-end-user-cli/data-model.md b/specs/001-end-user-cli/data-model.md new file mode 100644 index 00000000..7baaeb60 --- /dev/null +++ b/specs/001-end-user-cli/data-model.md @@ -0,0 +1,52 @@ +# Data Model: End-User CLI + +This feature does not introduce new persistent data entities. It operates on +Infrahub's existing data model (Kinds, Nodes, Attributes, Relationships) via +the SDK client. + +The CLI introduces transient structures for formatting and serialization: + +## Output Format Envelope + +Used when serializing query results to YAML output format. + +**Fields**: + +- `apiVersion` (str): Always `"infrahub.app/v1"` +- `kind` (str): Always `"Object"` +- `spec.kind` (str): The Infrahub Kind being exported (e.g., `"InfraDevice"`) +- `spec.data` (list[dict]): Array of serialized node objects + +Each node in `spec.data` contains: + +- Attribute fields as `key: value` pairs +- Relationship fields as `key: display_name` (single) or + `key: {data: [list]}` (many) + +This structure matches the existing `InfrahubObjectFileData` model in +`infrahub_sdk/spec/object.py` and is round-trippable with `ObjectFile`. + +## Set Flag Parser + +Parses `--set key=value` arguments into a dict suitable for SDK calls. + +**Input**: List of `"key=value"` strings from CLI +**Output**: `dict[str, str | list[str]]` + +Validation rules: + +- Key MUST exist as an attribute or relationship name in the target Kind's schema +- Value is a string; the SDK handles type coercion +- For relationships, value is the display name or UUID of the target node + +## Filter Parser + +Parses `--filter key=value` arguments into kwargs for `client.filters()`. + +**Input**: List of `"attribute__value=x"` strings from CLI +**Output**: `dict[str, Any]` passed as `**kwargs` + +Validation rules: + +- Key MUST follow the `attribute__value` or `relationship__id` pattern +- Invalid keys produce a validation error with available field names diff --git a/specs/001-end-user-cli/plan.md b/specs/001-end-user-cli/plan.md new file mode 100644 index 00000000..9f7c31b8 --- /dev/null +++ b/specs/001-end-user-cli/plan.md @@ -0,0 +1,113 @@ +# Implementation Plan: End-User CLI (`infrahub` command) + +**Branch**: `001-end-user-cli` | **Date**: 2026-03-28 | **Spec**: [spec.md](spec.md) +**Input**: Feature specification from `specs/001-end-user-cli/spec.md` + +## Summary + +Create a new `infrahub` CLI entry point for end users to perform CRUD operations +on Infrahub data and discover schema. The CLI reuses the existing SDK client, +configuration, and AsyncTyper framework from `infrahubctl`, adding commands for +`get`, `create`, `update`, `delete`, and `schema` operations with multiple output +formats including round-trippable Infrahub Object YAML. + +## Technical Context + +**Language/Version**: Python 3.10-3.13 +**Primary Dependencies**: typer (via AsyncTyper), rich, pyyaml, httpx (via SDK client) +**Storage**: N/A (all data in Infrahub server via SDK) +**Testing**: pytest (unit + integration) +**Target Platform**: Linux, macOS, Windows (CLI) +**Project Type**: Single project (extension of existing SDK package) +**Performance Goals**: Query results < 5s for < 1000 objects +**Constraints**: Must coexist with `infrahubctl`; shared config +**Scale/Scope**: ~10 new modules, ~1500-2000 lines of production code + +## Constitution Check + +*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.* + +| Principle | Status | Notes | +| --------- | ------ | ----- | +| I. Async/Sync Dual Pattern | PASS | CLI commands are async (via AsyncTyper). No new public SDK API surface requiring dual pattern — CLI is async-only consumer. | +| II. Type Safety | PASS | All new functions will have type hints. mypy/ty must pass. | +| III. Test Discipline | PASS | FR-015 requires unit + integration tests. 70% coverage target. | +| IV. API Stability | PASS | New entry point, no changes to existing public API. No new dependencies needed. | +| V. Documentation Completeness | PASS | Google-style docstrings required. `docs-generate` after CLI changes. | + +No violations. No complexity tracking needed. + +## Project Structure + +### Documentation (this feature) + +```text +specs/001-end-user-cli/ +├── plan.md # This file +├── spec.md # Feature specification +├── research.md # Phase 0 research findings +├── data-model.md # Data model (transient structures) +├── quickstart.md # Usage quickstart guide +├── contracts/ +│ └── cli-commands.md # CLI command contracts +└── tasks.md # Phase 2 output (/speckit.tasks) +``` + +### Source Code (repository root) + +```text +infrahub_sdk/ctl/ +├── enduser_cli.py # New: main app + entry point for `infrahub` +├── enduser_commands.py # New: top-level command registration +├── commands/ +│ ├── __init__.py # New: commands package +│ ├── get.py # New: `infrahub get` command +│ ├── create.py # New: `infrahub create` command +│ ├── update.py # New: `infrahub update` command +│ ├── delete.py # New: `infrahub delete` command +│ └── schema.py # New: `infrahub schema` command group +├── formatters/ +│ ├── __init__.py # New: formatters package +│ ├── base.py # New: base formatter protocol/ABC +│ ├── table.py # New: Rich table formatter +│ ├── json.py # New: JSON formatter +│ ├── csv.py # New: CSV formatter +│ └── yaml.py # New: Infrahub Object YAML formatter +└── parsers.py # New: --set and --filter argument parsers + +tests/unit/ctl/ +├── commands/ +│ ├── __init__.py +│ ├── test_get.py # New: unit tests for get command +│ ├── test_create.py # New: unit tests for create command +│ ├── test_update.py # New: unit tests for update command +│ ├── test_delete.py # New: unit tests for delete command +│ └── test_schema.py # New: unit tests for schema commands +├── formatters/ +│ ├── __init__.py +│ ├── test_table.py # New: table formatter tests +│ ├── test_json.py # New: JSON formatter tests +│ ├── test_csv.py # New: CSV formatter tests +│ └── test_yaml.py # New: YAML formatter tests +└── test_parsers.py # New: parser tests + +tests/integration/ +└── test_enduser_cli.py # New: integration tests against Infrahub +``` + +**Structure Decision**: Extend `infrahub_sdk/ctl/` with a parallel entry point. +New commands go in a `commands/` subdirectory to separate end-user commands from +existing `infrahubctl` modules. Formatters are isolated in `formatters/` for +testability and reuse across commands. + +## Post-Design Constitution Re-Check + +| Principle | Status | Notes | +| --------- | ------ | ----- | +| I. Async/Sync Dual Pattern | PASS | No new public SDK API. CLI is async consumer only. | +| II. Type Safety | PASS | All modules typed. No generated code modified. | +| III. Test Discipline | PASS | Test structure mirrors source structure. Unit tests mock SDK client. Integration tests hit Infrahub. | +| IV. API Stability | PASS | New `infrahub` entry point in pyproject.toml. No existing API changes. pyyaml, rich, typer already in `[ctl]` deps. | +| V. Documentation Completeness | PASS | Each new module gets docstrings. `docs-generate` run after completion. | + +All gates pass. Ready for `/speckit.tasks`. diff --git a/specs/001-end-user-cli/quickstart.md b/specs/001-end-user-cli/quickstart.md new file mode 100644 index 00000000..2043a27b --- /dev/null +++ b/specs/001-end-user-cli/quickstart.md @@ -0,0 +1,116 @@ +# Quickstart: `infrahub` CLI + +## Prerequisites + +- Python 3.10+ +- Infrahub SDK installed with CLI extras: `pip install infrahub-sdk[ctl]` +- A running Infrahub instance + +## Configuration + +The `infrahub` command uses the same configuration as `infrahubctl`. + +Set via environment variables: + +```bash +export INFRAHUB_ADDRESS="http://localhost:8000" +export INFRAHUB_API_TOKEN="your-api-token" +``` + +Or via `infrahubctl.toml`: + +```toml +[infrahub] +server_address = "http://localhost:8000" +api_token = "your-api-token" +``` + +## Discover Your Schema + +```bash +# List all available kinds +infrahub schema list + +# Filter by name +infrahub schema list --filter "Device" + +# Show details for a specific kind +infrahub schema show InfraDevice +``` + +## Query Data + +```bash +# List all devices +infrahub get InfraDevice + +# Filter by attribute +infrahub get InfraDevice --filter name__value="spine01" + +# Get a single device's full details +infrahub get InfraDevice spine01 + +# Output as JSON (for scripting) +infrahub get InfraDevice --output json + +# Export as Infrahub Object YAML (round-trippable) +infrahub get InfraDevice --output yaml > devices.yaml + +# Query a specific branch +infrahub get InfraDevice --branch develop + +# Paginate results +infrahub get InfraDevice --limit 10 --offset 20 +``` + +## Create Objects + +```bash +# Create with inline flags +infrahub create InfraDevice \ + --set name="spine03" \ + --set description="New spine switch" \ + --set site="dc1" + +# Create from a YAML file +infrahub create InfraDevice --file new-devices.yaml +``` + +## Update Objects + +```bash +# Update an attribute +infrahub update InfraDevice spine03 \ + --set description="Updated spine switch" + +# Update from file +infrahub update InfraDevice spine03 --file updates.yaml +``` + +## Delete Objects + +```bash +# Delete with confirmation prompt +infrahub delete InfraDevice spine03 + +# Skip confirmation +infrahub delete InfraDevice spine03 --yes +``` + +## Output Formats + +| Format | Flag | Use Case | +| ------ | ---- | -------- | +| Table | `--output table` | Interactive terminal (default) | +| JSON | `--output json` | Scripting, piping (default when piped) | +| CSV | `--output csv` | Spreadsheet import, data analysis | +| YAML | `--output yaml` | Backup, round-trip with `--file` | + +## Validation + +To verify the CLI is working: + +1. `infrahub schema list` — confirms connection and authentication +2. `infrahub get ` — confirms data access +3. `infrahub get --output yaml > test.yaml` then + `infrahub create --file test.yaml` — confirms round-trip diff --git a/specs/001-end-user-cli/research.md b/specs/001-end-user-cli/research.md new file mode 100644 index 00000000..db3195c1 --- /dev/null +++ b/specs/001-end-user-cli/research.md @@ -0,0 +1,108 @@ +# Research: End-User CLI (infrahubctl CRUD commands) + +## R1: Entry Point & Packaging Strategy + +**Decision**: Register new commands (`get`, `create`, `update`, `delete`) as top-level +commands on the existing `infrahubctl` app in `infrahub_sdk/ctl/cli_commands.py`, and +add `schema list` / `schema show` to the existing `infrahubctl schema` subgroup. + +**Rationale**: The existing `infrahubctl` entry point already has the CLI framework, +configuration, authentication, and client initialization. Adding commands to it avoids +a separate entry point and keeps the user experience unified under one tool. + +**Alternatives considered**: + +- Separate `infrahub` entry point: initially implemented, then reversed — added + unnecessary complexity and user confusion with two CLI tools +- Separate Python package: rejected — duplicates config/client code, complicates releases + +## R2: CLI Framework & Async Pattern + +**Decision**: Use `AsyncTyper` (existing wrapper at `infrahub_sdk/async_typer.py`) with +Rich console output, matching the `infrahubctl` patterns exactly. + +**Rationale**: The project already has a proven async CLI pattern. AsyncTyper wraps +`asyncio.run()` around async command functions. All existing utilities (`catch_exception`, +`initialize_client`, `CONFIG_PARAM`) work with this pattern. + +**Alternatives considered**: + +- Click directly: rejected — less ergonomic, would diverge from existing patterns +- Sync-only CLI: rejected — SDK client methods are async-first + +## R3: Query Implementation + +**Decision**: Use `client.all()` for list queries and `client.get()` for single-object +detail view. Filters pass through as `**kwargs` to `client.filters()`. + +**Rationale**: `client.all()` wraps `client.filters()` internally and supports +`offset`, `limit`, `prefetch_relationships`, `include`, `exclude`, and `order` +parameters. Filter syntax is `attribute__value="x"` or `relationship__id="uuid"`. +Pagination is handled automatically with `client.pagination_size`. + +**Key findings**: + +- `node.display_label` provides the human-readable name for table display +- `node..value` accesses attribute values +- `schema.attribute_names` and `schema.relationship_names` enumerate fields +- `schema.display_labels` identifies which attributes form the display label + +## R4: Object YAML Round-Trip Format + +**Decision**: Reuse the existing `InfrahubObjectFileData` model from +`infrahub_sdk/spec/object.py` for file input. For YAML output, build the reverse: +serialize query results into the same `apiVersion: infrahub.app/v1` / `kind: Object` +structure. + +**Rationale**: The spec object format is already defined with Pydantic models. Input +parsing uses `ObjectFile.load_from_disk()` → `InfrahubObjectFileData.process()`. The +reverse direction needs a serializer that walks node attributes and relationships to +produce the same dict structure. + +**Key classes**: + +- `InfrahubObjectFileData` — spec model with `kind`, `parameters`, `data` fields +- `ObjectFile` — file wrapper with `validate_content()` and `process()` methods +- Relationship formats: `ONE_REF`, `MANY_REF`, `ONE_OBJ`, `MANY_OBJ_DICT_LIST` + +## R5: Schema Discovery + +**Decision**: Use `client.schema.all(branch=branch)` for listing kinds and +`client.schema.get(kind=kind, branch=branch)` for kind details. + +**Rationale**: Schema API returns `NodeSchemaAPI` / `GenericSchemaAPI` objects with +`attribute_names`, `relationship_names`, `mandatory_input_names`, `display_labels`, +`human_friendly_id`, `namespace`, `label`, and `description` properties. + +## R6: Create/Update/Delete Implementation + +**Decision**: Use existing SDK CRUD methods: + +- Create: `client.create(kind=kind, data=data)` → `node.save(allow_upsert=True)` +- Update: `client.get(kind=kind, id=identifier)` → modify attrs → `node.save()` +- Delete: `client.get(kind=kind, id=identifier)` → `node.delete()` + +**Rationale**: These are the standard SDK patterns used by `infrahubctl` commands. +The `--set` flag maps directly to the `data` dict passed to `client.create()` or +applied to node attributes before `node.save()`. + +**Key detail**: Identifier resolution accepts both UUID and HFID (human-friendly ID) +via the `id` parameter of `client.get()`. + +## R7: Configuration Reuse + +**Decision**: Reuse `infrahub_sdk/ctl/config.py` and `CONFIG_PARAM` from +`infrahub_sdk/ctl/parameters.py` directly. + +**Rationale**: The `Settings` class reads from `infrahubctl.toml` (or +`INFRAHUBCTL_CONFIG` env var) with `server_address`, `api_token`, and +`default_branch`. No new configuration mechanism needed. + +## R8: Output Formatting + +**Decision**: Implement four output formatters: table (Rich), JSON, CSV, YAML. +Auto-detect: table when stdout is a TTY, JSON when piped. + +**Rationale**: Rich is already a dependency. JSON uses stdlib `json`. CSV uses stdlib +`csv`. YAML uses `pyyaml` (already in `[ctl]` deps). The auto-detect pattern +(`sys.stdout.isatty()`) is standard CLI practice. diff --git a/specs/001-end-user-cli/spec.md b/specs/001-end-user-cli/spec.md new file mode 100644 index 00000000..41363415 --- /dev/null +++ b/specs/001-end-user-cli/spec.md @@ -0,0 +1,152 @@ +# Feature Specification: End-User CLI (`infrahubctl` CRUD commands) + +**Feature Branch**: `001-end-user-cli` +**Created**: 2026-03-28 +**Status**: Draft +**Input**: User description: "Add CRUD and schema discovery commands to `infrahubctl` for end users to query, create, and modify data in the Infrahub database." + +## User Scenarios & Testing *(mandatory)* + +### User Story 1 - Query Data from Infrahub (Priority: P1) + +An end user wants to retrieve data from Infrahub to answer operational questions. They open a terminal, run a command specifying the type of object they want (e.g., devices, interfaces, IP addresses), and receive a formatted table of results. They can filter results by attribute values and choose output formats (table, JSON, CSV) depending on whether they are reading interactively or piping to another tool. + +**Why this priority**: Reading data is the most fundamental operation. Without query capability, no other CRUD operations provide value. This is also the lowest-risk operation (read-only) and serves the widest audience. + +**Independent Test**: Can be fully tested by querying any existing node type in an Infrahub instance and verifying correct output. Delivers immediate value for operational visibility. + +**Acceptance Scenarios**: + +1. **Given** a running Infrahub instance with data, **When** the user runs `infrahubctl get `, **Then** a formatted table of all objects of that kind is displayed with attribute columns and relationship columns (showing display names). +2. **Given** a running Infrahub instance, **When** the user runs `infrahubctl get --filter name__value="spine01"`, **Then** only objects matching the filter are returned. +3. **Given** a running Infrahub instance, **When** the user runs `infrahubctl get --output json`, **Then** the results are printed as valid JSON to stdout. +4. **Given** a running Infrahub instance, **When** the user runs `infrahubctl get --output yaml`, **Then** the results are printed in Infrahub Object YAML format (with `apiVersion: infrahub.app/v1`, `kind: Object`, `spec.kind`, and `spec.data` array), suitable for round-tripping back into `infrahubctl create --file`. +5. **Given** an Infrahub instance, **When** the user runs `infrahubctl get --branch develop`, **Then** data from the specified branch is returned. +6. **Given** an invalid kind name, **When** the user runs `infrahubctl get UnknownKind`, **Then** a clear error message is displayed listing available kinds or suggesting corrections. +7. **Given** an existing object, **When** the user runs `infrahubctl get `, **Then** a detailed view is displayed showing all attributes, relationships, and metadata for that single object. + +--- + +### User Story 2 - Create New Objects (Priority: P2) + +An end user needs to add new infrastructure data to Infrahub. They run a command specifying the object kind and its attribute values, and the system creates the object and confirms success. They can also create objects from a file (JSON or YAML) for batch operations. + +**Why this priority**: After querying, creation is the next most common operation. Users need to populate Infrahub with data. This is a natural progression from read to write. + +**Independent Test**: Can be tested by creating an object of any kind and then querying it back to verify it exists with correct attributes. + +**Acceptance Scenarios**: + +1. **Given** a running Infrahub instance, **When** the user runs `infrahubctl create --set name="spine03" --set description="New spine switch"`, **Then** the object is created and a confirmation with the object ID is displayed. +2. **Given** a YAML file with object definitions, **When** the user runs `infrahubctl create --file objects.yaml`, **Then** all objects in the file are created and a summary of results (created count, errors) is displayed. +3. **Given** invalid attribute or relationship names, **When** the user runs `infrahubctl create --set invalid_field="value"`, **Then** a clear validation error is displayed indicating which fields are invalid and what the valid attributes and relationships are. + +--- + +### User Story 3 - Modify Existing Objects (Priority: P3) + +An end user needs to update attributes on existing infrastructure objects. They identify the object by kind and name (or ID), specify the attributes to change, and the system applies the update and confirms. + +**Why this priority**: Modification completes the core CRUD workflow. Users who can query and create also need to update existing records as infrastructure changes. + +**Independent Test**: Can be tested by modifying an attribute on an existing object and querying it back to verify the change persists. + +**Acceptance Scenarios**: + +1. **Given** an existing object, **When** the user runs `infrahubctl update --set description="Updated description"`, **Then** the object is updated and a confirmation is displayed showing old and new values. +2. **Given** an existing object, **When** the user runs `infrahubctl update --file updates.yaml`, **Then** the object is updated from the file contents. +3. **Given** a non-existent object identifier, **When** the user runs `infrahubctl update nonexistent`, **Then** a clear error message indicates the object was not found. + +--- + +### User Story 4 - Delete Objects (Priority: P4) + +An end user needs to remove obsolete infrastructure data from Infrahub. They specify the object to delete by kind and identifier, confirm the deletion, and the system removes it. + +**Why this priority**: Deletion is the least frequent CRUD operation and the most dangerous. It completes the full lifecycle but is lower priority than the core read/create/update loop. + +**Independent Test**: Can be tested by creating an object, deleting it, and confirming it no longer appears in query results. + +**Acceptance Scenarios**: + +1. **Given** an existing object, **When** the user runs `infrahubctl delete `, **Then** a confirmation prompt is shown, and upon confirmation the object is deleted with a success message. +2. **Given** an existing object, **When** the user runs `infrahubctl delete --yes`, **Then** the object is deleted without a confirmation prompt. +3. **Given** an object with dependencies, **When** the user attempts to delete it, **Then** a clear error message explains what depends on it and how to resolve the conflict. + +--- + +### User Story 5 - Discover Available Schema (Priority: P5) + +An end user unfamiliar with the data model wants to explore what kinds of objects exist in Infrahub and what attributes each kind has. They run a command to list available kinds and inspect their schema. + +**Why this priority**: Schema discovery supports all other operations. Without knowing what kinds and attributes exist, users cannot effectively query, create, or update. However, this is a supporting operation, not a core data operation. + +**Independent Test**: Can be tested by listing schema kinds and inspecting a known kind's attributes against the actual schema definition. + +**Acceptance Scenarios**: + +1. **Given** a running Infrahub instance, **When** the user runs `infrahubctl schema list`, **Then** a table of all available kinds is displayed with their namespace, name, and description. +2. **Given** a valid kind name, **When** the user runs `infrahubctl schema show `, **Then** the kind's attributes, relationships, and constraints are displayed in a readable format. +3. **Given** a partial kind name, **When** the user runs `infrahubctl schema list --filter "Network"`, **Then** only kinds matching the filter are shown. + +--- + +### Edge Cases + +- What happens when the Infrahub server is unreachable? Clear connection error with the configured server address shown. +- What happens when the API token is missing or expired? Authentication error with instructions on how to configure credentials. +- What happens when the user queries a kind with thousands of objects? Results are paginated with a default limit and the user is informed of total count. +- What happens when a create/update operation partially fails in batch mode? A detailed report shows which objects succeeded and which failed, with per-object error messages. +- What happens when the user provides attributes in the wrong format? Validation error specifying expected format for each attribute. + +## Clarifications + +### Session 2026-03-28 + +- Q: How should users specify relationships in create/update commands? → A: Unified `--set` flag for both attributes and relationships (e.g., `--set name="x" --set site="my-site"`). +- Q: Should there be a single-object detail view? → A: `infrahubctl get ` shows a detail view with all attributes, relationships, and metadata. +- Q: How should relationships appear in list/table output? → A: Show as columns with their display name (e.g., site column shows "my-site"). Full relationship detail in detail view only. + +## Requirements *(mandatory)* + +### Functional Requirements + +- **FR-001**: The system MUST provide CRUD and schema discovery commands within `infrahubctl`. +- **FR-002**: The system MUST support querying objects by kind with `infrahubctl get ` (list view) and `infrahubctl get ` (detail view showing all attributes, relationships, and metadata). +- **FR-003**: The system MUST support filtering query results by attribute values. +- **FR-004**: The system MUST support multiple output formats: human-readable table (default), JSON, CSV, and Infrahub Object YAML (`--output yaml`). The YAML format MUST use the Infrahub spec object structure (`apiVersion: infrahub.app/v1`, `kind: Object`, with `spec.kind` and `spec.data` fields), matching the format used by `infrahubctl create --file`. +- **FR-005**: The system MUST support creating objects with `infrahubctl create ` using inline `--set` flags (for both attributes and relationships) or file input. +- **FR-006**: The system MUST support updating objects with `infrahubctl update ` using inline `--set` flags (for both attributes and relationships) or file input. +- **FR-007**: The system MUST support deleting objects with `infrahubctl delete ` with confirmation. +- **FR-008**: The system MUST support schema discovery with `infrahubctl schema list` and `infrahubctl schema show `. +- **FR-009**: The system MUST support specifying a target branch for all operations via `--branch`. +- **FR-010**: The system MUST reuse the existing SDK configuration mechanism (server address, API token) from `infrahubctl.toml` or environment variables. +- **FR-011**: The system MUST display clear, actionable error messages for all failure modes (connection, authentication, validation, not found). +- **FR-012**: The system MUST paginate large result sets with configurable page size via `--limit` and `--offset`. +- **FR-013**: The system MUST support batch operations from file input (JSON or YAML) for create and update commands. +- **FR-014**: The system MUST provide a `--yes` flag to skip confirmation prompts for destructive operations. +- **FR-015**: All new code MUST have unit tests covering public functions and integration tests covering Infrahub server interactions, consistent with the project's test discipline standards. + +### Key Entities + +- **Kind**: A type definition in the Infrahub schema (e.g., InfraDevice, IpamIPAddress). Has a namespace, name, attributes, and relationships. +- **Node**: An instance of a Kind stored in Infrahub. Has an ID, attribute values, and relationship connections. +- **Attribute**: A named property on a Kind with a type, optional constraints, and a value on each Node. +- **Relationship**: A typed connection between two Nodes, defined in the schema with cardinality and direction. + +## Assumptions + +- The `infrahub` command shares the same configuration file and environment variables as `infrahubctl` (no separate config needed). +- Object identifiers in update/delete commands accept either the object's display name or its UUID. +- The default output format for interactive terminals is a human-readable table; when stdout is piped, JSON is used automatically. +- Batch file input supports both JSON and YAML formats with the same schema. + +## Success Criteria *(mandatory)* + +### Measurable Outcomes + +- **SC-001**: Users can query any object kind and receive formatted results within 5 seconds for datasets under 1000 objects. +- **SC-002**: Users can create a single object in under 3 commands (configure once, then one create command). +- **SC-003**: 90% of first-time users can successfully query data without consulting documentation beyond `--help`. +- **SC-004**: All error messages include a suggested corrective action (not just a failure description). +- **SC-005**: The CLI supports all CRUD operations and schema discovery as a single installable command alongside `infrahubctl`. diff --git a/specs/001-end-user-cli/tasks.md b/specs/001-end-user-cli/tasks.md new file mode 100644 index 00000000..48e1ac6e --- /dev/null +++ b/specs/001-end-user-cli/tasks.md @@ -0,0 +1,277 @@ +# Tasks: End-User CLI (`infrahub` command) + +**Input**: Design documents from `/specs/001-end-user-cli/` +**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/ + +**Tests**: Tests are REQUIRED per FR-015. Unit tests for all public functions, integration tests for Infrahub server interactions. + +**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story. + +## Format: `[ID] [P?] [Story] Description` + +- **[P]**: Can run in parallel (different files, no dependencies) +- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3) +- Include exact file paths in descriptions + +## Path Conventions + +- **Source**: `infrahub_sdk/ctl/` (extends existing CLI package) +- **Unit tests**: `tests/unit/ctl/` +- **Integration tests**: `tests/integration/` + +--- + +## Phase 1: Setup (Shared Infrastructure) + +**Purpose**: Create the `infrahub` entry point and package structure + +- [x] T001 Add `infrahub` entry point to `[project.scripts]` in pyproject.toml pointing to `infrahub_sdk.ctl.enduser_cli:app` +- [x] T002 Create CLI entry point module in infrahub_sdk/ctl/enduser_cli.py with AsyncTyper app and error-handling wrapper (matching infrahub_sdk/ctl/cli.py pattern) +- [x] T003 [P] Create commands package with infrahub_sdk/ctl/commands/\_\_init\_\_.py +- [x] T004 [P] Create formatters package with infrahub_sdk/ctl/formatters/\_\_init\_\_.py + +--- + +## Phase 2: Foundational (Blocking Prerequisites) + +**Purpose**: Core infrastructure reused by ALL user story commands + +**CRITICAL**: No user story work can begin until this phase is complete + +- [x] T005 Implement `--set` flag parser (parse `key=value` strings into dict) in infrahub_sdk/ctl/parsers.py +- [x] T006 Implement `--filter` flag parser (parse `attr__value=x` strings into kwargs dict) in infrahub_sdk/ctl/parsers.py +- [x] T007 Implement output format auto-detection (TTY → table, piped → json) and `OutputFormat` enum in infrahub_sdk/ctl/formatters/\_\_init\_\_.py +- [x] T008 [P] Implement base formatter protocol with `format_list()` and `format_detail()` methods in infrahub_sdk/ctl/formatters/base.py +- [x] T009 [P] Implement Rich table formatter (list view: attribute + relationship columns with display names; detail view: key-value pairs) in infrahub_sdk/ctl/formatters/table.py +- [x] T010 [P] Implement JSON formatter (list and detail mode) in infrahub_sdk/ctl/formatters/json.py +- [x] T011 [P] Implement CSV formatter (list mode; detail mode falls back to key-value) in infrahub_sdk/ctl/formatters/csv.py +- [x] T012 [P] Implement Infrahub Object YAML formatter (serialize nodes to apiVersion/kind/spec.kind/spec.data structure, round-trippable with ObjectFile) in infrahub_sdk/ctl/formatters/yaml.py +- [x] T013 Create command registration module in infrahub_sdk/ctl/enduser_commands.py (register all command groups on the app) +- [x] T014 [P] Write unit tests for set/filter parsers in tests/unit/ctl/test_parsers.py +- [x] T015 [P] Write unit tests for table formatter in tests/unit/ctl/formatters/test_table.py +- [x] T016 [P] Write unit tests for JSON formatter in tests/unit/ctl/formatters/test_json.py +- [x] T017 [P] Write unit tests for CSV formatter in tests/unit/ctl/formatters/test_csv.py +- [x] T018 [P] Write unit tests for YAML formatter (verify round-trip structure matches InfrahubObjectFileData) in tests/unit/ctl/formatters/test_yaml.py + +**Checkpoint**: Foundation ready - all formatters, parsers, and app skeleton in place. User story commands can now be implemented. + +--- + +## Phase 3: User Story 1 - Query Data (Priority: P1) MVP + +**Goal**: Users can retrieve data from Infrahub with `infrahub get ` (list) and `infrahub get ` (detail), with filtering, pagination, and all output formats. + +**Independent Test**: Run `infrahub get ` against an Infrahub instance and verify formatted output. Test all four output formats. Test `--filter`, `--limit`, `--offset`, `--branch`. + +### Tests for User Story 1 + +- [x] T019 [P] [US1] Write unit tests for get command (list mode, detail mode, invalid kind error, filter passthrough, pagination args, output format selection) in tests/unit/ctl/commands/test_get.py +- [x] T020 [P] [US1] Write integration test for get command (query real data, verify table/json/yaml/csv output) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 1 + +- [x] T021 [US1] Implement `infrahub get` command with list mode (`client.filters()` with kwargs from --filter, --limit, --offset, --branch) and detail mode (`client.get()` with identifier) in infrahub_sdk/ctl/commands/get.py +- [x] T022 [US1] Wire get command into enduser_commands.py and verify `infrahub get` works end-to-end +- [x] T023 [US1] Add error handling for invalid kind (suggest similar kinds from schema), not-found identifier, and connection failures in infrahub_sdk/ctl/commands/get.py + +**Checkpoint**: `infrahub get` fully functional with all output formats, filtering, pagination, detail view. MVP complete. + +--- + +## Phase 4: User Story 2 - Create Objects (Priority: P2) + +**Goal**: Users can create new objects with `infrahub create --set key=value` or `infrahub create --file objects.yaml`. + +**Independent Test**: Create an object via `--set` flags, then verify it exists with `infrahub get`. Create objects from a YAML file and verify batch results. + +### Tests for User Story 2 + +- [x] T024 [P] [US2] Write unit tests for create command (inline --set, file input, mutual exclusivity of --set/--file, validation errors, batch summary) in tests/unit/ctl/commands/test_create.py +- [x] T025 [P] [US2] Write integration test for create command (create via --set, create via --file, verify with get) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 2 + +- [x] T026 [US2] Implement `infrahub create` command with inline mode (`client.create()` + `node.save()` using parsed --set data) and file mode (load via ObjectFile, validate, process) in infrahub_sdk/ctl/commands/create.py +- [x] T027 [US2] Wire create command into enduser_commands.py +- [x] T028 [US2] Add validation error handling (invalid fields → show valid attribute/relationship names from schema) and batch result summary in infrahub_sdk/ctl/commands/create.py + +**Checkpoint**: `infrahub create` works with both inline and file input. Users can create and then query back objects. + +--- + +## Phase 5: User Story 3 - Update Objects (Priority: P3) + +**Goal**: Users can update existing objects with `infrahub update --set key=value` or `--file`. + +**Independent Test**: Update an attribute on an existing object, then query it to verify the change. Show old vs new values in confirmation. + +### Tests for User Story 3 + +- [x] T029 [P] [US3] Write unit tests for update command (inline --set, file input, not-found error, old/new value display) in tests/unit/ctl/commands/test_update.py +- [x] T030 [P] [US3] Write integration test for update command (update attribute, verify change persists) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 3 + +- [x] T031 [US3] Implement `infrahub update` command (`client.get()` to fetch node, apply --set changes to attributes/relationships, `node.save()`, display old → new values) in infrahub_sdk/ctl/commands/update.py +- [x] T032 [US3] Wire update command into enduser_commands.py +- [x] T033 [US3] Add file-based update mode and not-found error handling in infrahub_sdk/ctl/commands/update.py + +**Checkpoint**: `infrahub update` works. Full create → query → update → query cycle verified. + +--- + +## Phase 6: User Story 4 - Delete Objects (Priority: P4) + +**Goal**: Users can delete objects with `infrahub delete ` with confirmation prompt and `--yes` bypass. + +**Independent Test**: Create an object, delete it (with and without --yes), verify it no longer appears in query results. + +### Tests for User Story 4 + +- [x] T034 [P] [US4] Write unit tests for delete command (confirmation prompt, --yes bypass, not-found, dependency conflict error) in tests/unit/ctl/commands/test_delete.py +- [x] T035 [P] [US4] Write integration test for delete command (create, delete, verify gone) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 4 + +- [x] T036 [US4] Implement `infrahub delete` command (`client.get()` to fetch, confirmation prompt via typer.confirm(), `node.delete()`, --yes flag to skip) in infrahub_sdk/ctl/commands/delete.py +- [x] T037 [US4] Wire delete command into enduser_commands.py +- [x] T038 [US4] Add dependency conflict error handling (catch server error, display dependent objects) in infrahub_sdk/ctl/commands/delete.py + +**Checkpoint**: Full CRUD cycle complete. All data operations functional. + +--- + +## Phase 7: User Story 5 - Schema Discovery (Priority: P5) + +**Goal**: Users can explore the data model with `infrahub schema list` and `infrahub schema show `. + +**Independent Test**: List all schema kinds, verify output matches actual schema. Show a specific kind's attributes and relationships. + +### Tests for User Story 5 + +- [x] T039 [P] [US5] Write unit tests for schema list and schema show commands (list with filter, show with valid/invalid kind, attribute/relationship table output) in tests/unit/ctl/commands/test_schema.py +- [x] T040 [P] [US5] Write integration test for schema commands (list against real instance, show known kind) in tests/integration/test_enduser_cli.py + +### Implementation for User Story 5 + +- [x] T041 [US5] Implement `infrahub schema list` command (`client.schema.all()`, filter by substring, display table with Namespace/Name/Kind/Description columns) in infrahub_sdk/ctl/commands/schema.py +- [x] T042 [US5] Implement `infrahub schema show ` command (`client.schema.get()`, display metadata + attributes table + relationships table) in infrahub_sdk/ctl/commands/schema.py +- [x] T043 [US5] Wire schema command group into enduser_commands.py + +**Checkpoint**: All 5 user stories complete. Full CLI feature set available. + +--- + +## Phase 8: Polish & Cross-Cutting Concerns + +**Purpose**: Quality gates, documentation, and validation + +- [x] T044 Run `uv run invoke format` and `uv run invoke lint-code` across all new files +- [x] T045 Run `uv run pytest tests/unit/ctl/` to verify all unit tests pass +- [x] T046 Run `uv run invoke docs-generate` and `uv run invoke docs-validate` to update CLI documentation +- [x] T047 Verify type checking passes: `uv run invoke lint` (includes mypy and ty) +- [x] T048 Run quickstart.md validation: manually execute the quickstart steps against a test instance +- [x] T049 [P] Add Google-style docstrings to all new modules, classes, and public functions if not already present + +--- + +## Dependencies & Execution Order + +### Phase Dependencies + +- **Setup (Phase 1)**: No dependencies - can start immediately +- **Foundational (Phase 2)**: Depends on Setup (T001-T004) - BLOCKS all user stories +- **User Stories (Phase 3-7)**: All depend on Foundational phase completion + - US1 (get) has no dependencies on other stories + - US2 (create) has no dependencies on other stories (reuses parsers from Phase 2) + - US3 (update) has no dependencies on other stories + - US4 (delete) has no dependencies on other stories + - US5 (schema) has no dependencies on other stories +- **Polish (Phase 8)**: Depends on all user stories being complete + +### User Story Dependencies + +- **US1 (P1)**: Can start after Phase 2. No cross-story dependencies. +- **US2 (P2)**: Can start after Phase 2. Independent of US1 (uses same parsers/formatters). +- **US3 (P3)**: Can start after Phase 2. Independent of US1/US2. +- **US4 (P4)**: Can start after Phase 2. Independent of US1/US2/US3. +- **US5 (P5)**: Can start after Phase 2. Independent of all other stories. + +### Within Each User Story + +- Unit tests written first (marked [P] where independent) +- Command implementation after tests exist +- Integration into enduser_commands.py after command works +- Error handling as final step in each story + +### Parallel Opportunities + +- T003, T004: Package init files can be created in parallel +- T008-T012: All formatters can be implemented in parallel (different files) +- T014-T018: All foundational unit tests can run in parallel +- T019, T020: US1 tests can be written in parallel +- T024, T025: US2 tests can be written in parallel +- After Phase 2, all user stories (Phase 3-7) can proceed in parallel + +--- + +## Parallel Example: Foundational Phase + +```text +# Launch all formatters in parallel (different files, no dependencies): +Task: T008 "Base formatter protocol in infrahub_sdk/ctl/formatters/base.py" +Task: T009 "Rich table formatter in infrahub_sdk/ctl/formatters/table.py" +Task: T010 "JSON formatter in infrahub_sdk/ctl/formatters/json.py" +Task: T011 "CSV formatter in infrahub_sdk/ctl/formatters/csv.py" +Task: T012 "YAML formatter in infrahub_sdk/ctl/formatters/yaml.py" + +# Launch all formatter tests in parallel: +Task: T015 "Table formatter tests in tests/unit/ctl/formatters/test_table.py" +Task: T016 "JSON formatter tests in tests/unit/ctl/formatters/test_json.py" +Task: T017 "CSV formatter tests in tests/unit/ctl/formatters/test_csv.py" +Task: T018 "YAML formatter tests in tests/unit/ctl/formatters/test_yaml.py" +``` + +--- + +## Implementation Strategy + +### MVP First (User Story 1 Only) + +1. Complete Phase 1: Setup (T001-T004) +2. Complete Phase 2: Foundational (T005-T018) +3. Complete Phase 3: User Story 1 - Query (T019-T023) +4. **STOP and VALIDATE**: `infrahub get ` works with all output formats +5. Demo/review if ready + +### Incremental Delivery + +1. Setup + Foundational → CLI skeleton with formatters ready +2. Add US1 (Query) → MVP: read-only data access +3. Add US2 (Create) → Users can populate data +4. Add US3 (Update) → Full data management +5. Add US4 (Delete) → Complete CRUD lifecycle +6. Add US5 (Schema) → Self-service discovery +7. Polish → Production-ready + +### Parallel Agent Strategy + +With multiple agents: + +1. Complete Setup + Foundational together +2. Once Foundational is done, dispatch in parallel: + - Agent A: US1 (Query) + US5 (Schema) — both read-only + - Agent B: US2 (Create) + US3 (Update) — both write operations + - Agent C: US4 (Delete) — standalone +3. All stories integrate independently via enduser_commands.py + +--- + +## Notes + +- [P] tasks = different files, no dependencies +- [Story] label maps task to specific user story for traceability +- Each user story is independently completable and testable +- Unit tests mock the SDK client; integration tests hit a real Infrahub instance +- Commit after each phase or logical task group +- Stop at any checkpoint to validate independently diff --git a/tests/integration/test_enduser_cli.py b/tests/integration/test_enduser_cli.py new file mode 100644 index 00000000..4e90ba9c --- /dev/null +++ b/tests/integration/test_enduser_cli.py @@ -0,0 +1,224 @@ +"""Integration tests for the ``infrahub`` end-user CLI. + +Requires a running Infrahub instance with the TestingAnimal schema loaded. +Uses the same ``TestInfrahubDockerClient`` + ``SchemaAnimal`` fixtures as +the existing ``test_infrahubctl.py`` integration tests. +""" + +from __future__ import annotations + +import json +import os +from typing import TYPE_CHECKING + +import pytest +import yaml +from typer.testing import CliRunner + +from infrahub_sdk.ctl import config +from infrahub_sdk.ctl.cli_commands import app +from infrahub_sdk.ctl.parameters import load_configuration +from infrahub_sdk.testing.docker import TestInfrahubDockerClient +from infrahub_sdk.testing.schemas.animal import SchemaAnimal + +if TYPE_CHECKING: + from collections.abc import Generator + + from infrahub_sdk import InfrahubClient + from infrahub_sdk.node import InfrahubNode + +runner = CliRunner() + + +class _EnduserCliBase(TestInfrahubDockerClient, SchemaAnimal): + """Shared fixtures for end-user CLI integration tests.""" + + @pytest.fixture(scope="class") + async def base_dataset( + self, + client: InfrahubClient, + load_schema: None, + person_liam: InfrahubNode, + person_ethan: InfrahubNode, + person_sophia: InfrahubNode, + cat_luna: InfrahubNode, + cat_bella: InfrahubNode, + dog_daisy: InfrahubNode, + dog_rocky: InfrahubNode, + ctl_client_config: Generator[None, None, None], + ) -> None: + """Ensure schema and test data are loaded before running tests.""" + + @pytest.fixture(scope="class") + def ctl_client_config(self, client: InfrahubClient) -> Generator[None, None, None]: + """Configure the CLI to talk to the test Infrahub instance.""" + load_configuration(value="infrahubctl.toml") + assert config.SETTINGS._settings + config.SETTINGS._settings.server_address = client.config.address + original_username = os.environ.get("INFRAHUB_USERNAME") + original_password = os.environ.get("INFRAHUB_PASSWORD") + if client.config.username and client.config.password: + os.environ["INFRAHUB_USERNAME"] = client.config.username + os.environ["INFRAHUB_PASSWORD"] = client.config.password + yield + if original_username: + os.environ["INFRAHUB_USERNAME"] = original_username + if original_password: + os.environ["INFRAHUB_PASSWORD"] = original_password + + +class TestEnduserCliRead(_EnduserCliBase): + """Read-only CLI tests: version, schema discovery, and get queries.""" + + def test_version(self) -> None: + """Verify the version subcommand works without a server.""" + result = runner.invoke(app, ["version"]) + assert result.exit_code == 0 + assert "SDK" in result.stdout + + def test_schema_list(self, base_dataset: None) -> None: + """List schema kinds and verify TestingPerson is present.""" + result = runner.invoke(app, ["schema", "list"]) + assert result.exit_code == 0 + assert "TestingPerson" in result.stdout + + def test_schema_list_with_filter(self, base_dataset: None) -> None: + """Filter schema list by substring.""" + result = runner.invoke(app, ["schema", "list", "--filter", "Dog"]) + assert result.exit_code == 0 + assert "TestingDog" in result.stdout + assert "TestingCat" not in result.stdout + + def test_schema_show(self, base_dataset: None) -> None: + """Show details of a schema kind including attributes and relationships.""" + result = runner.invoke(app, ["schema", "show", "TestingPerson"]) + assert result.exit_code == 0 + assert "TestingPerson" in result.stdout + assert "name" in result.stdout + assert "height" in result.stdout + assert "animals" in result.stdout + + def test_get_list_table(self, base_dataset: None) -> None: + """Query all persons and verify table output contains known names.""" + result = runner.invoke(app, ["get", "TestingPerson"]) + assert result.exit_code == 0 + assert "Ethan Carter" in result.stdout + assert "Liam Walker" in result.stdout + + def test_get_list_json(self, base_dataset: None) -> None: + """Query all persons with JSON output and verify valid JSON array.""" + result = runner.invoke(app, ["get", "TestingPerson", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert isinstance(data, list) + assert len(data) >= 3 + names = [item.get("name", "") for item in data] + assert "Ethan Carter" in names + + def test_get_list_csv(self, base_dataset: None) -> None: + """Query all persons with CSV output.""" + result = runner.invoke(app, ["get", "TestingPerson", "--output", "csv"]) + assert result.exit_code == 0 + assert "name" in result.stdout + assert "Ethan Carter" in result.stdout + + def test_get_list_yaml(self, base_dataset: None) -> None: + """Query all persons with YAML output in Infrahub object format.""" + result = runner.invoke(app, ["get", "TestingPerson", "--output", "yaml"]) + assert result.exit_code == 0 + doc = yaml.safe_load(result.stdout) + assert doc["apiVersion"] == "infrahub.app/v1" + assert doc["kind"] == "Object" + assert doc["spec"]["kind"] == "TestingPerson" + assert isinstance(doc["spec"]["data"], list) + names = [item.get("name", "") for item in doc["spec"]["data"]] + assert "Ethan Carter" in names + + def test_get_list_with_filter(self, base_dataset: None) -> None: + """Query persons filtered by name.""" + result = runner.invoke(app, ["get", "TestingPerson", "--filter", "name__value=Liam Walker", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert len(data) == 1 + assert data[0]["name"] == "Liam Walker" + + def test_get_list_with_limit(self, base_dataset: None) -> None: + """Query persons with a limit on results.""" + result = runner.invoke(app, ["get", "TestingPerson", "--limit", "1", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert len(data) == 1 + + def test_get_detail(self, base_dataset: None) -> None: + """Get detail view of a single person by display name.""" + result = runner.invoke(app, ["get", "TestingPerson", "Ethan Carter"]) + assert result.exit_code == 0 + assert "Ethan Carter" in result.stdout + assert "185" in result.stdout + + def test_get_detail_json(self, base_dataset: None) -> None: + """Get detail view in JSON format.""" + result = runner.invoke(app, ["get", "TestingPerson", "Ethan Carter", "--output", "json"]) + assert result.exit_code == 0 + data = json.loads(result.stdout) + assert data["kind"] == "TestingPerson" + assert data["display_label"] + + def test_get_invalid_kind(self, base_dataset: None) -> None: + """Querying an invalid kind returns an error.""" + result = runner.invoke(app, ["get", "NonExistentKind"]) + assert result.exit_code != 0 + + +class TestEnduserCliWrite(_EnduserCliBase): + """Write CLI tests: create, update, delete operations.""" + + def test_create_inline(self, base_dataset: None) -> None: + """Create a person using inline --set flags.""" + result = runner.invoke( + app, + ["create", "TestingPerson", "--set", "name=Integration Test Person", "--set", "height=190"], + ) + assert result.exit_code == 0, f"create failed: {result.output}" + assert "Created" in result.stdout + + async def test_create_inline_verify(self, base_dataset: None, client: InfrahubClient) -> None: + """Verify the object created by test_create_inline exists.""" + node = await client.get(kind="TestingPerson", id="Integration Test Person") + assert node.name.value == "Integration Test Person" # type: ignore[union-attr] + assert node.height.value == 190 # type: ignore[union-attr] + + def test_create_missing_args(self, base_dataset: None) -> None: + """Create without --set or --file fails.""" + result = runner.invoke(app, ["create", "TestingPerson"]) + assert result.exit_code != 0 + + def test_update_inline(self, base_dataset: None) -> None: + """Update a person's height using --set.""" + result = runner.invoke( + app, + ["update", "TestingPerson", "Sophia Walker", "--set", "height=175"], + ) + assert result.exit_code == 0, f"update failed: {result.output}" + assert "Updated" in result.stdout + + async def test_update_inline_verify(self, base_dataset: None, client: InfrahubClient) -> None: + """Verify the update from test_update_inline persisted.""" + node = await client.get(kind="TestingPerson", id="Sophia Walker") + assert node.height.value == 175 # type: ignore[union-attr] + + async def test_delete_setup(self, base_dataset: None, client: InfrahubClient) -> None: + """Create a throwaway object for the delete test.""" + obj = await client.create(kind="TestingPerson", name="Delete Me", height=100) + await obj.save() + + def test_delete_with_yes(self, base_dataset: None) -> None: + """Delete a person using --yes to skip confirmation.""" + result = runner.invoke(app, ["delete", "TestingPerson", "Delete Me", "--yes"]) + assert result.exit_code == 0 + assert "Deleted" in result.stdout + + async def test_delete_verify(self, base_dataset: None, client: InfrahubClient) -> None: + """Verify the object from test_delete_with_yes is gone.""" + node = await client.get(kind="TestingPerson", id="Delete Me", raise_when_missing=False) + assert node is None diff --git a/tests/unit/ctl/commands/__init__.py b/tests/unit/ctl/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/ctl/commands/test_create.py b/tests/unit/ctl/commands/test_create.py new file mode 100644 index 00000000..05f81a43 --- /dev/null +++ b/tests/unit/ctl/commands/test_create.py @@ -0,0 +1,197 @@ +"""Unit tests for the ``infrahub create`` end-user CLI command.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from typer.testing import CliRunner + +from infrahub_sdk.ctl.cli_commands import app + +runner = CliRunner() + + +def test_create_help() -> None: + """``create --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["create", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_create_mutual_exclusivity() -> None: + """Passing both --set and --file exits with a non-zero code.""" + result = runner.invoke(app, ["create", "InfraDevice", "--set", "name=router1", "--file", "objects.yml"]) + assert result.exit_code != 0 + + +def test_create_no_args() -> None: + """Omitting both --set and --file exits with a non-zero code.""" + result = runner.invoke(app, ["create", "InfraDevice"]) + assert result.exit_code != 0 + + +def test_create_with_set_args() -> None: + """``create`` with --set creates a node and prints a confirmation.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = ["site"] + + mock_node = MagicMock() + mock_node.id = "test-id-001" + mock_node.display_label = "router1" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.create = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["create", "InfraDevice", "--set", "name=router1"]) + + assert result.exit_code == 0, result.stdout + assert "Created" in result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + mock_client.create.assert_awaited_once_with(kind="InfraDevice", data={"name": "router1"}, branch=None) + mock_node.save.assert_awaited_once_with(allow_upsert=True) + + +def test_create_with_set_args_and_branch() -> None: + """``create`` forwards --branch to client calls.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "test-id-002" + mock_node.display_label = "router2" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.create = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke( + app, + ["create", "InfraDevice", "--set", "name=router2", "--branch", "dev"], + ) + + assert result.exit_code == 0, result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch="dev") + mock_client.create.assert_awaited_once_with(kind="InfraDevice", data={"name": "router2"}, branch="dev") + + +def test_create_with_file() -> None: + """``create`` with --file delegates to ObjectFile and prints a confirmation.""" + mock_file = MagicMock() + mock_file.spec.data = [{"name": "router-a"}, {"name": "router-b"}] + mock_file.spec.kind = "InfraDevice" + mock_file.validate_format = AsyncMock() + mock_file.process = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.create.ObjectFile.load_from_disk", + return_value=[mock_file], + ), + ): + result = runner.invoke(app, ["create", "InfraDevice", "--file", "devices.yml"]) + + assert result.exit_code == 0, result.stdout + assert "Created" in result.stdout + assert "2" in result.stdout + assert "InfraDevice" in result.stdout + mock_file.validate_format.assert_awaited_once_with(client=mock_client, branch=None) + mock_file.process.assert_awaited_once_with(client=mock_client, branch=None) + + +def test_create_with_file_multiple_files() -> None: + """``create`` with --file processes every file returned by load_from_disk.""" + + def make_obj_file(kind: str, count: int) -> MagicMock: + obj = MagicMock() + obj.spec.data = [{"name": f"item-{i}"} for i in range(count)] + obj.spec.kind = kind + obj.validate_format = AsyncMock() + obj.process = AsyncMock() + return obj + + file_a = make_obj_file("InfraDevice", 2) + file_b = make_obj_file("InfraPrefix", 3) + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.create.ObjectFile.load_from_disk", + return_value=[file_a, file_b], + ), + ): + result = runner.invoke(app, ["create", "InfraDevice", "--file", "multi.yml"]) + + assert result.exit_code == 0, result.stdout + file_a.validate_format.assert_awaited_once() + file_a.process.assert_awaited_once() + file_b.validate_format.assert_awaited_once() + file_b.process.assert_awaited_once() + + +def test_create_invalid_field() -> None: + """Using --set with an unknown field name exits with a non-zero code.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = ["site"] + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["create", "InfraDevice", "--set", "nonexistent_field=value"]) + + assert result.exit_code != 0 + + +def test_create_multiple_set_args() -> None: + """``create`` accepts multiple --set options and passes all fields to the client.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "test-id-003" + mock_node.display_label = "router3" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.create = AsyncMock(return_value=mock_node) + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke( + app, + ["create", "InfraDevice", "--set", "name=router3", "--set", "description=core router"], + ) + + assert result.exit_code == 0, result.stdout + _, call_kwargs = mock_client.create.call_args + assert call_kwargs["data"] == {"name": "router3", "description": "core router"} + + +@pytest.mark.parametrize("bad_arg", ["noequals", "=emptykey"]) +def test_create_malformed_set_arg(bad_arg: str) -> None: + """Malformed --set arguments (no ``=`` or empty key) exit with a non-zero code.""" + mock_client = MagicMock() + + with patch("infrahub_sdk.ctl.commands.create.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["create", "InfraDevice", "--set", bad_arg]) + + assert result.exit_code != 0 diff --git a/tests/unit/ctl/commands/test_delete.py b/tests/unit/ctl/commands/test_delete.py new file mode 100644 index 00000000..b2b53748 --- /dev/null +++ b/tests/unit/ctl/commands/test_delete.py @@ -0,0 +1,139 @@ +"""Unit tests for the ``infrahub delete`` end-user CLI command.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +from typer.testing import CliRunner + +from infrahub_sdk.ctl.cli_commands import app + +runner = CliRunner() + + +def test_delete_help() -> None: + """``delete --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["delete", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_delete_with_yes() -> None: + """``delete --yes`` skips confirmation, deletes the node, and prints a confirmation.""" + mock_node = MagicMock() + mock_node.id = "node-del-001" + mock_node.display_label = "router-to-delete" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): + result = runner.invoke(app, ["delete", "InfraDevice", "node-del-001", "--yes"]) + + assert result.exit_code == 0, result.stdout + assert "Deleted" in result.stdout + mock_node.delete.assert_awaited_once() + + +def test_delete_with_yes_short_flag() -> None: + """``delete -y`` is equivalent to ``--yes``.""" + mock_node = MagicMock() + mock_node.id = "node-del-002" + mock_node.display_label = "router-b" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): + result = runner.invoke(app, ["delete", "InfraDevice", "node-del-002", "-y"]) + + assert result.exit_code == 0, result.stdout + mock_node.delete.assert_awaited_once() + + +def test_delete_with_branch() -> None: + """``delete`` forwards --branch to initialize_client.""" + mock_node = MagicMock() + mock_node.id = "node-br-del" + mock_node.display_label = "device-in-branch" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client) as mock_init, + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): + result = runner.invoke( + app, + ["delete", "InfraDevice", "node-br-del", "--yes", "--branch", "my-branch"], + ) + + assert result.exit_code == 0, result.stdout + mock_init.assert_called_once_with(branch="my-branch") + mock_node.delete.assert_awaited_once() + + +def test_delete_confirmation_abort() -> None: + """Answering ``n`` at the confirmation prompt aborts deletion without calling delete.""" + mock_node = MagicMock() + mock_node.id = "node-abort" + mock_node.display_label = "router-keep" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): + result = runner.invoke(app, ["delete", "InfraDevice", "node-abort"], input="n\n") + + assert result.exit_code != 0 + mock_node.delete.assert_not_awaited() + + +def test_delete_confirmation_yes_input() -> None: + """Answering ``y`` at the confirmation prompt proceeds with deletion.""" + mock_node = MagicMock() + mock_node.id = "node-confirm" + mock_node.display_label = "router-confirm" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): + result = runner.invoke(app, ["delete", "InfraDevice", "node-confirm"], input="y\n") + + assert result.exit_code == 0, result.stdout + assert "Deleted" in result.stdout + mock_node.delete.assert_awaited_once() + + +def test_delete_output_contains_id_and_label() -> None: + """Deletion confirmation message includes the node ID and display label.""" + mock_node = MagicMock() + mock_node.id = "unique-id-xyz" + mock_node.display_label = "specific-router" + mock_node.delete = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.delete.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.delete.resolve_node", new_callable=AsyncMock, return_value=mock_node), + ): + result = runner.invoke(app, ["delete", "InfraDevice", "unique-id-xyz", "--yes"]) + + assert result.exit_code == 0, result.stdout + assert "unique-id-xyz" in result.stdout + assert "specific-router" in result.stdout diff --git a/tests/unit/ctl/commands/test_get.py b/tests/unit/ctl/commands/test_get.py new file mode 100644 index 00000000..845ae3c4 --- /dev/null +++ b/tests/unit/ctl/commands/test_get.py @@ -0,0 +1,118 @@ +"""Unit tests for the ``infrahub get`` end-user CLI command.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from typer.testing import CliRunner + +from infrahub_sdk.ctl.cli_commands import app + +runner = CliRunner() + + +def test_get_help() -> None: + """``get --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["get", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_get_list_mode() -> None: + """List mode calls ``client.filters`` and prints node data.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "abc-123" + mock_node.display_label = "test-device" + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.filters = AsyncMock(return_value=[mock_node]) + + mock_formatter = MagicMock() + mock_formatter.format_list.return_value = "device-list-output" + + with ( + patch("infrahub_sdk.ctl.commands.get.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.get.detect_output_format", return_value="json"), + patch("infrahub_sdk.ctl.commands.get.get_formatter", return_value=mock_formatter), + ): + result = runner.invoke(app, ["get", "InfraDevice"]) + + assert result.exit_code == 0 + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + mock_client.filters.assert_awaited_once() + mock_formatter.format_list.assert_called_once_with([mock_node], mock_schema, show_all_columns=False) + + +def test_get_detail_mode() -> None: + """Detail mode calls ``resolve_node`` when an identifier is supplied.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_node = MagicMock() + mock_node.id = "abc-123" + mock_node.display_label = "test-device" + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + mock_formatter = MagicMock() + mock_formatter.format_detail.return_value = '{"id": "abc-123"}' + + with ( + patch("infrahub_sdk.ctl.commands.get.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.get.detect_output_format", return_value="json"), + patch("infrahub_sdk.ctl.commands.get.get_formatter", return_value=mock_formatter), + patch( + "infrahub_sdk.ctl.commands.get.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ) as mock_resolve, + ): + result = runner.invoke(app, ["get", "InfraDevice", "abc-123"]) + + assert result.exit_code == 0 + mock_resolve.assert_awaited_once_with(mock_client, "InfraDevice", "abc-123", schema=mock_schema, branch=None) + mock_formatter.format_detail.assert_called_once_with(mock_node, mock_schema) + + +@pytest.mark.parametrize( + "extra_args", + [ + ["--branch", "my-branch"], + ["--limit", "10"], + ["--offset", "5"], + ["--filter", "name__value=router1"], + ], +) +def test_get_list_mode_with_options(extra_args: list[str]) -> None: + """List mode accepts optional flags without error.""" + mock_schema = MagicMock() + mock_schema.attribute_names = [] + mock_schema.relationship_names = [] + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + mock_client.filters = AsyncMock(return_value=[]) + + mock_formatter = MagicMock() + mock_formatter.format_list.return_value = "[]" + + with ( + patch("infrahub_sdk.ctl.commands.get.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.get.detect_output_format", return_value="json"), + patch("infrahub_sdk.ctl.commands.get.get_formatter", return_value=mock_formatter), + ): + result = runner.invoke(app, ["get", "InfraDevice", *extra_args]) + + # Exit 80 = query succeeded but no results (empty mock) + assert result.exit_code == 80 diff --git a/tests/unit/ctl/commands/test_schema.py b/tests/unit/ctl/commands/test_schema.py new file mode 100644 index 00000000..d1d60d21 --- /dev/null +++ b/tests/unit/ctl/commands/test_schema.py @@ -0,0 +1,323 @@ +"""Unit tests for the ``infrahub schema`` end-user CLI subcommand group.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +from typer.testing import CliRunner + +from infrahub_sdk.ctl.cli_commands import app +from infrahub_sdk.schema import NodeSchemaAPI + +runner = CliRunner() + + +def _make_node_schema(kind: str, namespace: str, name: str, description: str = "") -> MagicMock: + """Build a MagicMock that satisfies ``isinstance(obj, NodeSchemaAPI)`` checks. + + Args: + kind: Full kind string, e.g. ``"InfraDevice"``. + namespace: Namespace portion, e.g. ``"Infra"``. + name: Name portion, e.g. ``"Device"``. + description: Optional human-readable description. + + Returns: + A MagicMock with spec=NodeSchemaAPI and the given property values. + """ + schema = MagicMock(spec=NodeSchemaAPI) + schema.kind = kind + schema.namespace = namespace + schema.name = name + schema.description = description + return schema + + +def _make_attr( + name: str, + kind: str = "Text", + optional: bool = True, + default_value: object = None, + description: str = "", +) -> MagicMock: + """Build a mock attribute object for use in schema_show tests. + + Args: + name: Attribute name. + kind: Attribute type/kind string. + optional: Whether the attribute is optional. + default_value: Default value for the attribute. + description: Optional description. + + Returns: + A plain MagicMock with the given property values. + """ + attr = MagicMock() + attr.name = name + attr.kind = kind + attr.optional = optional + attr.default_value = default_value + attr.description = description + return attr + + +def _make_rel(name: str, peer: str, cardinality: str = "one", optional: bool = True) -> MagicMock: + """Build a mock relationship object for use in schema_show tests. + + Args: + name: Relationship name. + peer: Peer kind string. + cardinality: ``"one"`` or ``"many"``. + optional: Whether the relationship is optional. + + Returns: + A plain MagicMock with the given property values. + """ + rel = MagicMock() + rel.name = name + rel.peer = peer + rel.cardinality = cardinality + rel.optional = optional + return rel + + +# --------------------------------------------------------------------------- +# Help tests +# --------------------------------------------------------------------------- + + +def test_schema_list_help() -> None: + """``schema list --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["schema", "list", "--help"]) + assert result.exit_code == 0 + assert "Usage" in result.stdout + + +def test_schema_show_help() -> None: + """``schema show --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["schema", "show", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +# --------------------------------------------------------------------------- +# schema list tests +# --------------------------------------------------------------------------- + + +def test_schema_list_returns_table() -> None: + """``schema list`` renders a table containing the returned kind names.""" + device_schema = _make_node_schema("InfraDevice", "Infra", "Device", "A network device") + interface_schema = _make_node_schema("InfraInterface", "Infra", "Interface", "A network interface") + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"InfraDevice": device_schema, "InfraInterface": interface_schema}) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "InfraInterface" in result.stdout + mock_client.schema.all.assert_awaited_once_with(branch=None) + + +def test_schema_list_with_filter() -> None: + """``schema list --filter`` restricts output to kinds matching the substring.""" + device_schema = _make_node_schema("InfraDevice", "Infra", "Device") + prefix_schema = _make_node_schema("IpamPrefix", "Ipam", "Prefix") + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"InfraDevice": device_schema, "IpamPrefix": prefix_schema}) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list", "--filter", "infra"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "IpamPrefix" not in result.stdout + + +def test_schema_list_empty() -> None: + """``schema list`` exits cleanly when no schemas are returned.""" + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={}) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list"]) + + assert result.exit_code == 0, result.stdout + + +def test_schema_list_with_branch() -> None: + """``schema list --branch`` passes the branch name through to the client.""" + schema = _make_node_schema("CoreAccount", "Core", "Account") + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"CoreAccount": schema}) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list", "--branch", "feature-x"]) + + assert result.exit_code == 0, result.stdout + mock_client.schema.all.assert_awaited_once_with(branch="feature-x") + + +def test_schema_list_skips_non_node_schema_entries() -> None: + """``schema list`` silently skips entries that are not NodeSchemaAPI instances.""" + node_schema = _make_node_schema("InfraDevice", "Infra", "Device") + # A plain MagicMock without spec=NodeSchemaAPI will fail isinstance(x, NodeSchemaAPI) + generic_schema = MagicMock() + generic_schema.kind = "SomeGenericKind" + + mock_client = MagicMock() + mock_client.schema.all = AsyncMock(return_value={"InfraDevice": node_schema, "SomeGenericKind": generic_schema}) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "list"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "SomeGenericKind" not in result.stdout + + +# --------------------------------------------------------------------------- +# schema show tests +# --------------------------------------------------------------------------- + + +def _make_full_schema( + kind: str = "InfraDevice", + namespace: str = "Infra", + description: str = "A network device", + display_labels: list[str] | None = None, + human_friendly_id: list[str] | None = None, + attributes: list[MagicMock] | None = None, + relationships: list[MagicMock] | None = None, +) -> MagicMock: + """Build a detailed schema mock suitable for schema_show. + + Args: + kind: Full kind string. + namespace: Namespace portion. + description: Human-readable description. + display_labels: List of display label expressions. + human_friendly_id: List of human-friendly ID expressions. + attributes: List of attribute mocks. + relationships: List of relationship mocks. + + Returns: + A MagicMock configured with all schema_show-required fields. + """ + schema = MagicMock() + schema.kind = kind + schema.namespace = namespace + schema.description = description + schema.display_labels = display_labels + schema.human_friendly_id = human_friendly_id + schema.attributes = attributes or [] + schema.relationships = relationships or [] + return schema + + +def test_schema_show_displays_metadata() -> None: + """``schema show`` prints kind, description and namespace.""" + schema = _make_full_schema() + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "InfraDevice" in result.stdout + assert "A network device" in result.stdout + assert "Infra" in result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + + +def test_schema_show_displays_attributes() -> None: + """``schema show`` renders the Attributes table with all column values.""" + attrs = [ + _make_attr("hostname", kind="Text", optional=False, default_value=None, description="Device hostname"), + _make_attr("role", kind="Text", optional=True, default_value="router", description="Device role"), + ] + schema = _make_full_schema(attributes=attrs) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "Attributes" in result.stdout + assert "hostname" in result.stdout + assert "role" in result.stdout + # Required attribute should show "Yes", optional should show "No" + assert "Yes" in result.stdout + assert "No" in result.stdout + assert "router" in result.stdout + assert "Device hostname" in result.stdout + + +def test_schema_show_displays_relationships() -> None: + """``schema show`` renders the Relationships table with peer and cardinality.""" + rels = [ + _make_rel("interfaces", peer="InfraInterface", cardinality="many", optional=True), + _make_rel("site", peer="LocationSite", cardinality="one", optional=False), + ] + schema = _make_full_schema(relationships=rels) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "Relationships" in result.stdout + assert "interfaces" in result.stdout + assert "InfraInterface" in result.stdout + assert "many" in result.stdout + assert "site" in result.stdout + assert "LocationSite" in result.stdout + assert "one" in result.stdout + + +def test_schema_show_no_attributes_or_relationships() -> None: + """``schema show`` exits cleanly for a schema with no attributes or relationships.""" + schema = _make_full_schema(attributes=[], relationships=[]) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "Attributes" not in result.stdout + assert "Relationships" not in result.stdout + + +def test_schema_show_with_branch() -> None: + """``schema show --branch`` passes the branch name through to the client.""" + schema = _make_full_schema() + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice", "--branch", "feature-x"]) + + assert result.exit_code == 0, result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch="feature-x") + + +def test_schema_show_attribute_with_default_value() -> None: + """``schema show`` displays the default value when set on an attribute.""" + attrs = [_make_attr("speed", kind="Number", optional=True, default_value=1000)] + schema = _make_full_schema(attributes=attrs) + mock_client = MagicMock() + mock_client.schema.get = AsyncMock(return_value=schema) + + with patch("infrahub_sdk.ctl.schema.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["schema", "show", "InfraDevice"]) + + assert result.exit_code == 0, result.stdout + assert "1000" in result.stdout diff --git a/tests/unit/ctl/commands/test_update.py b/tests/unit/ctl/commands/test_update.py new file mode 100644 index 00000000..b0f70892 --- /dev/null +++ b/tests/unit/ctl/commands/test_update.py @@ -0,0 +1,311 @@ +"""Unit tests for the ``infrahub update`` end-user CLI command.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from typer.testing import CliRunner + +from infrahub_sdk.ctl.cli_commands import app + +runner = CliRunner() + + +def test_update_help() -> None: + """``update --help`` exits cleanly and includes usage text.""" + result = runner.invoke(app, ["update", "--help"]) + assert result.exit_code == 0 + assert "kind" in result.stdout.lower() or "Usage" in result.stdout + + +def test_update_mutual_exclusivity() -> None: + """Passing both --set and --file exits with a non-zero code.""" + result = runner.invoke( + app, + ["update", "InfraDevice", "abc-123", "--set", "name=router1", "--file", "objects.yml"], + ) + assert result.exit_code != 0 + + +def test_update_no_args() -> None: + """Omitting both --set and --file exits with a non-zero code.""" + result = runner.invoke(app, ["update", "InfraDevice", "abc-123"]) + assert result.exit_code != 0 + + +def test_update_with_set_args() -> None: + """``update`` with --set fetches the node, applies the change, and saves it.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name", "description"] + mock_schema.relationship_names = [] + + mock_attr = MagicMock() + mock_attr.value = "old-name" + + mock_node = MagicMock() + mock_node.id = "abc-123" + mock_node.display_label = "router1" + mock_node.name = mock_attr + mock_node.save = AsyncMock() + + def getattr_side_effect(obj: object, name: str) -> MagicMock: + if name == "name": + return mock_attr + return MagicMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.update.getattr", side_effect=getattr_side_effect, create=True), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ) as mock_resolve, + ): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", "name=router1"]) + + assert result.exit_code == 0, result.stdout + assert "Updated" in result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch=None) + mock_resolve.assert_awaited_once_with(mock_client, "InfraDevice", "abc-123", schema=mock_schema, branch=None) + mock_node.save.assert_awaited_once() + + +def test_update_with_set_args_attribute_applied() -> None: + """``update`` with an attribute --set updates the attribute value on the node.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["description"] + mock_schema.relationship_names = [] + + mock_attr = MagicMock() + mock_attr.value = "old description" + + mock_node = MagicMock() + mock_node.id = "node-001" + mock_node.display_label = "device-a" + mock_node.save = AsyncMock() + + # Make getattr(node, "description") return mock_attr + type(mock_node).description = mock_attr + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ), + ): + result = runner.invoke(app, ["update", "InfraDevice", "node-001", "--set", "description=new description"]) + + assert result.exit_code == 0, result.stdout + assert "Updated" in result.stdout + mock_node.save.assert_awaited_once() + + +def test_update_with_set_args_and_branch() -> None: + """``update`` forwards --branch to schema and resolve_node calls.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_attr = MagicMock() + mock_attr.value = "old" + + mock_node = MagicMock() + mock_node.id = "node-br" + mock_node.display_label = "device-br" + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ) as mock_resolve, + ): + result = runner.invoke( + app, + ["update", "InfraDevice", "node-br", "--set", "name=newname", "--branch", "feature-x"], + ) + + assert result.exit_code == 0, result.stdout + mock_client.schema.get.assert_awaited_once_with(kind="InfraDevice", branch="feature-x") + mock_resolve.assert_awaited_once_with(mock_client, "InfraDevice", "node-br", schema=mock_schema, branch="feature-x") + + +def test_update_invalid_field() -> None: + """Using --set with an unknown field name exits with a non-zero code.""" + mock_schema = MagicMock() + mock_schema.attribute_names = ["name"] + mock_schema.relationship_names = [] + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch("infrahub_sdk.ctl.commands.update.resolve_node", new_callable=AsyncMock), + ): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", "unknown_field=value"]) + + assert result.exit_code != 0 + + +def test_update_with_file() -> None: + """``update`` with --file delegates to ObjectFile and prints a confirmation.""" + mock_file = MagicMock() + mock_file.validate_format = AsyncMock() + mock_file.process = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.ObjectFile.load_from_disk", + return_value=[mock_file], + ), + ): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--file", "updates.yml"]) + + assert result.exit_code == 0, result.stdout + assert "Processed" in result.stdout or "successfully" in result.stdout.lower() + mock_file.validate_format.assert_awaited_once_with(client=mock_client, branch=None) + mock_file.process.assert_awaited_once_with(client=mock_client, branch=None) + + +def test_update_with_file_and_branch() -> None: + """``update`` with --file forwards --branch to validate_format and process.""" + mock_file = MagicMock() + mock_file.validate_format = AsyncMock() + mock_file.process = AsyncMock() + + mock_client = MagicMock() + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.ObjectFile.load_from_disk", + return_value=[mock_file], + ), + ): + result = runner.invoke( + app, + ["update", "InfraDevice", "abc-123", "--file", "updates.yml", "--branch", "staging"], + ) + + assert result.exit_code == 0, result.stdout + mock_file.validate_format.assert_awaited_once_with(client=mock_client, branch="staging") + mock_file.process.assert_awaited_once_with(client=mock_client, branch="staging") + + +def test_update_with_set_args_relationship() -> None: + """``update`` with a relationship --set field fetches the rel and sets the new id.""" + mock_schema = MagicMock() + mock_schema.attribute_names = [] + mock_schema.relationship_names = ["site"] + + mock_rel = MagicMock() + mock_rel.id = "old-site-id" + mock_rel.display_label = "old-site" + mock_rel.fetch = AsyncMock() + + mock_node = MagicMock() + mock_node.id = "node-rel-001" + mock_node.display_label = "device-rel" + mock_node.site = mock_rel + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + async def passthrough_resolve(client: object, data: object, schema: object, **kwargs: object) -> dict: + return {"site": {"id": "new-site-id"}} + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ), + patch( + "infrahub_sdk.ctl.commands.update.resolve_relationship_values", + side_effect=passthrough_resolve, + ), + ): + result = runner.invoke(app, ["update", "InfraDevice", "node-rel-001", "--set", "site=new-site-id"]) + + assert result.exit_code == 0, result.stdout + assert "Updated" in result.stdout + mock_node.save.assert_awaited_once() + + +def test_update_with_set_args_relationship_noop() -> None: + """``update`` with a relationship --set that resolves to the same target is a no-op.""" + mock_schema = MagicMock() + mock_schema.attribute_names = [] + mock_schema.relationship_names = ["site"] + + mock_rel = MagicMock() + mock_rel.id = "same-site-id" + mock_rel.display_label = "same-site" + + mock_node = MagicMock() + mock_node.id = "node-noop-001" + mock_node.display_label = "device-noop" + mock_node.site = mock_rel + mock_node.save = AsyncMock() + + mock_client = MagicMock() + mock_client.schema = MagicMock() + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + async def resolve_to_same(client: object, data: object, schema: object, **kwargs: object) -> dict: + return {"site": {"id": "same-site-id"}} + + with ( + patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client), + patch( + "infrahub_sdk.ctl.commands.update.resolve_node", + new_callable=AsyncMock, + return_value=mock_node, + ), + patch( + "infrahub_sdk.ctl.commands.update.resolve_relationship_values", + side_effect=resolve_to_same, + ), + ): + result = runner.invoke(app, ["update", "InfraDevice", "node-noop-001", "--set", "site=same-site"]) + + assert result.exit_code == 0, result.stdout + assert "No changes" in result.stdout + mock_node.save.assert_not_awaited() + + +@pytest.mark.parametrize("bad_arg", ["noequals", "=emptykey"]) +def test_update_malformed_set_arg(bad_arg: str) -> None: + """Malformed --set arguments (no ``=`` or empty key) exit with a non-zero code.""" + mock_client = MagicMock() + + with patch("infrahub_sdk.ctl.commands.update.initialize_client", return_value=mock_client): + result = runner.invoke(app, ["update", "InfraDevice", "abc-123", "--set", bad_arg]) + + assert result.exit_code != 0 diff --git a/tests/unit/ctl/commands/test_utils.py b/tests/unit/ctl/commands/test_utils.py new file mode 100644 index 00000000..43be4d5b --- /dev/null +++ b/tests/unit/ctl/commands/test_utils.py @@ -0,0 +1,165 @@ +"""Unit tests for ``infrahub_sdk.ctl.commands.utils``.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from infrahub_sdk.ctl.commands.utils import resolve_node +from infrahub_sdk.exceptions import NodeNotFoundError +from infrahub_sdk.schema import NodeSchemaAPI + + +@pytest.fixture +def mock_client() -> MagicMock: + """Return a mock InfrahubClient with async schema and get methods.""" + client = MagicMock() + client.schema = MagicMock() + client.schema.get = AsyncMock() + client.get = AsyncMock() + return client + + +async def test_resolve_by_uuid(mock_client: MagicMock) -> None: + """When the identifier is a valid UUID, ``client.get(id=...)`` is called directly.""" + mock_schema = MagicMock() + mock_schema.default_filter = None + mock_schema.human_friendly_id = None + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + uuid_identifier = "12345678-1234-5678-1234-567812345678" + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=True): + result = await resolve_node(mock_client, "InfraDevice", uuid_identifier) + + assert result is expected_node + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id=uuid_identifier, branch=None) + + +async def test_resolve_by_default_filter(mock_client: MagicMock) -> None: + """When the schema has a ``default_filter``, it is used as a keyword filter.""" + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = "name__value" + mock_schema.human_friendly_id = None + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "router1") + + assert result is expected_node + mock_client.get.assert_awaited_once_with( + kind="InfraDevice", + branch=None, + raise_when_missing=False, + name__value="router1", + ) + + +async def test_resolve_by_hfid(mock_client: MagicMock) -> None: + """When the schema defines ``human_friendly_id``, ``client.get(hfid=...)`` is used.""" + + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = None + mock_schema.human_friendly_id = ["name__value"] + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "router1") + + assert result is expected_node + mock_client.get.assert_awaited_once_with( + kind="InfraDevice", + hfid=["router1"], + branch=None, + raise_when_missing=False, + ) + + +async def test_resolve_by_hfid_multi_component(mock_client: MagicMock) -> None: + """Multi-component HFID strings (``a/b``) are split on ``/``.""" + + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = None + mock_schema.human_friendly_id = ["site__name__value", "name__value"] + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "london/router1") + + assert result is expected_node + mock_client.get.assert_awaited_once_with( + kind="InfraDevice", + hfid=["london", "router1"], + branch=None, + raise_when_missing=False, + ) + + +async def test_resolve_fallback_raises(mock_client: MagicMock) -> None: + """When no lookup strategy matches, the fallback ``client.get(id=...)`` call raises.""" + + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = None + mock_schema.human_friendly_id = None + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + mock_client.get = AsyncMock( + side_effect=NodeNotFoundError(identifier={"id": ["unknown-name"]}, node_type="InfraDevice") + ) + + with ( + patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False), + pytest.raises(NodeNotFoundError), + ): + await resolve_node(mock_client, "InfraDevice", "unknown-name") + + mock_client.get.assert_awaited_once_with(kind="InfraDevice", id="unknown-name", branch=None) + + +async def test_resolve_uses_provided_schema(mock_client: MagicMock) -> None: + """When ``schema`` is provided, ``client.schema.get`` is not called.""" + pre_fetched_schema = MagicMock(spec=NodeSchemaAPI) + pre_fetched_schema.default_filter = None + pre_fetched_schema.human_friendly_id = None + + expected_node = MagicMock() + mock_client.get = AsyncMock(return_value=expected_node) + + uuid_identifier = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee" + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=True): + result = await resolve_node(mock_client, "InfraDevice", uuid_identifier, schema=pre_fetched_schema) + + assert result is expected_node + mock_client.schema.get.assert_not_awaited() + + +async def test_resolve_default_filter_miss_falls_through_to_hfid(mock_client: MagicMock) -> None: + """When the default-filter lookup returns ``None``, the HFID strategy is tried next.""" + mock_schema = MagicMock(spec=NodeSchemaAPI) + mock_schema.default_filter = "name__value" + mock_schema.human_friendly_id = ["name__value"] + mock_client.schema.get = AsyncMock(return_value=mock_schema) + + expected_node = MagicMock() + # First call (default_filter) returns None; second call (hfid) returns the node. + mock_client.get = AsyncMock(side_effect=[None, expected_node]) + + with patch("infrahub_sdk.ctl.commands.utils.is_valid_uuid", return_value=False): + result = await resolve_node(mock_client, "InfraDevice", "router1") + + assert result is expected_node + assert mock_client.get.await_count == 2 diff --git a/tests/unit/ctl/formatters/__init__.py b/tests/unit/ctl/formatters/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/ctl/formatters/test_csv.py b/tests/unit/ctl/formatters/test_csv.py new file mode 100644 index 00000000..e7969ab5 --- /dev/null +++ b/tests/unit/ctl/formatters/test_csv.py @@ -0,0 +1,248 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.csv (CsvFormatter).""" + +from __future__ import annotations + +import csv +import io +from unittest.mock import MagicMock + +from infrahub_sdk.ctl.formatters.csv import CsvFormatter + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + setattr(node, rel_name, rel) + return node + + +def _parse_csv(text: str) -> list[dict[str, str]]: + """Parse a CSV string into a list of row dicts. + + Args: + text: CSV-formatted string. + + Returns: + List of dicts keyed by header row values. + """ + return list(csv.DictReader(io.StringIO(text))) + + +class TestCsvFormatterFormatList: + """Tests for CsvFormatter.format_list.""" + + def test_format_list_returns_string(self) -> None: + """Test that format_list returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + assert isinstance(result, str) + + def test_format_list_has_header_row_with_attribute_name(self) -> None: + """Test that the first row contains attribute column headers.""" + schema = _make_mock_schema(["name", "status"], []) + node = _make_mock_node({"name": "router1", "status": "active"}, {}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert "name" in rows[0] + assert "status" in rows[0] + + def test_format_list_has_header_row_with_relationship_name(self) -> None: + """Test that the first row contains relationship column headers.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert "site" in rows[0] + + def test_format_list_data_row_contains_attribute_value(self) -> None: + """Test that data rows contain the node attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert rows[0]["name"] == "router1" + + def test_format_list_data_row_contains_relationship_value(self) -> None: + """Test that data rows contain the relationship display label.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = CsvFormatter() + + result = formatter.format_list([node], schema) + + rows = _parse_csv(result) + assert rows[0]["site"] == "DC1" + + def test_format_list_one_data_row_per_node(self) -> None: + """Test that format_list produces one data row per node.""" + schema = _make_mock_schema(["name"], []) + node1 = _make_mock_node({"name": "router1"}, {}, node_id="id-1") + node2 = _make_mock_node({"name": "router2"}, {}, node_id="id-2") + formatter = CsvFormatter() + + result = formatter.format_list([node1, node2], schema) + + rows = _parse_csv(result) + assert len(rows) == 2 + + def test_format_list_empty_nodes_returns_header_only(self) -> None: + """Test that format_list with no nodes returns only the header row.""" + schema = _make_mock_schema(["name"], []) + formatter = CsvFormatter() + + result = formatter.format_list([], schema) + + rows = _parse_csv(result) + assert rows == [] + # With no data rows, all column headers are still shown + assert "name" in result + + +class TestCsvFormatterFormatDetail: + """Tests for CsvFormatter.format_detail.""" + + def test_format_detail_returns_string(self) -> None: + """Test that format_detail returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + assert isinstance(result, str) + + def test_format_detail_has_field_value_headers(self) -> None: + """Test that format_detail output has field and value column headers.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + assert "field" in rows[0] + assert "value" in rows[0] + + def test_format_detail_contains_id_row(self) -> None: + """Test that format_detail includes a row for the node id.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, node_id="abc-123") + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + id_row = next(r for r in rows if r["field"] == "id") + assert id_row["value"] == "abc-123" + + def test_format_detail_contains_display_label_row(self) -> None: + """Test that format_detail includes a row for display_label.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, display_label="Router One") + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + label_row = next(r for r in rows if r["field"] == "display_label") + assert label_row["value"] == "Router One" + + def test_format_detail_contains_kind_row(self) -> None: + """Test that format_detail includes a row for the schema kind.""" + schema = _make_mock_schema(["name"], [], kind="InfraDevice") + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + kind_row = next(r for r in rows if r["field"] == "kind") + assert kind_row["value"] == "InfraDevice" + + def test_format_detail_contains_attribute_row(self) -> None: + """Test that format_detail includes a row for each attribute.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + name_row = next(r for r in rows if r["field"] == "name") + assert name_row["value"] == "router1" + + def test_format_detail_contains_relationship_row(self) -> None: + """Test that format_detail includes a row for each relationship.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = CsvFormatter() + + result = formatter.format_detail(node, schema) + + rows = _parse_csv(result) + site_row = next(r for r in rows if r["field"] == "site") + assert site_row["value"] == "DC1" diff --git a/tests/unit/ctl/formatters/test_init.py b/tests/unit/ctl/formatters/test_init.py new file mode 100644 index 00000000..d822ab8d --- /dev/null +++ b/tests/unit/ctl/formatters/test_init.py @@ -0,0 +1,55 @@ +"""Tests for infrahub_sdk.ctl.formatters package init (OutputFormat, detect/get).""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest + +from infrahub_sdk.ctl.formatters import ( + CsvFormatter, + JsonFormatter, + OutputFormat, + TableFormatter, + YamlFormatter, + detect_output_format, + get_formatter, +) + + +class TestOutputFormat: + def test_enum_values(self) -> None: + assert OutputFormat.TABLE == "table" + assert OutputFormat.JSON == "json" + assert OutputFormat.CSV == "csv" + assert OutputFormat.YAML == "yaml" + + +class TestDetectOutputFormat: + def test_returns_table_when_tty(self) -> None: + with patch("sys.stdout") as mock_stdout: + mock_stdout.isatty.return_value = True + assert detect_output_format() == OutputFormat.TABLE + + def test_returns_json_when_not_tty(self) -> None: + with patch("sys.stdout") as mock_stdout: + mock_stdout.isatty.return_value = False + assert detect_output_format() == OutputFormat.JSON + + +class TestGetFormatter: + def test_returns_table_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.TABLE), TableFormatter) + + def test_returns_json_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.JSON), JsonFormatter) + + def test_returns_csv_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.CSV), CsvFormatter) + + def test_returns_yaml_formatter(self) -> None: + assert isinstance(get_formatter(OutputFormat.YAML), YamlFormatter) + + def test_raises_for_invalid_format(self) -> None: + with pytest.raises(ValueError, match="Unsupported output format"): + get_formatter("invalid") # type: ignore[arg-type] diff --git a/tests/unit/ctl/formatters/test_json.py b/tests/unit/ctl/formatters/test_json.py new file mode 100644 index 00000000..b7cc1caa --- /dev/null +++ b/tests/unit/ctl/formatters/test_json.py @@ -0,0 +1,201 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.json (JsonFormatter).""" + +from __future__ import annotations + +import json +from typing import TYPE_CHECKING, cast +from unittest.mock import MagicMock + +from infrahub_sdk.ctl.formatters.json import JsonFormatter + +if TYPE_CHECKING: + from infrahub_sdk.node import InfrahubNode + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + setattr(node, rel_name, rel) + return node + + +class TestJsonFormatterFormatList: + """Tests for JsonFormatter.format_list.""" + + def test_format_list_returns_valid_json(self) -> None: + """Test that format_list output is valid JSON.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_list([node], schema) + + parsed = json.loads(result) + assert isinstance(parsed, list) + + def test_format_list_contains_attribute_value(self) -> None: + """Test that format_list includes the node attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_list([node], schema) + + parsed = json.loads(result) + assert parsed[0]["name"] == "router1" + + def test_format_list_multiple_nodes(self) -> None: + """Test that format_list produces one array entry per node.""" + schema = _make_mock_schema(["name"], []) + nodes = [ + _make_mock_node({"name": "router1"}, {}, node_id="id-1"), + _make_mock_node({"name": "router2"}, {}, node_id="id-2"), + ] + formatter = JsonFormatter() + + result = formatter.format_list(cast("list[InfrahubNode]", nodes), schema) + + parsed = json.loads(result) + assert len(parsed) == 2 + + def test_format_list_empty_list_returns_json_array(self) -> None: + """Test that format_list with an empty node list returns a JSON empty array.""" + schema = _make_mock_schema(["name"], []) + formatter = JsonFormatter() + + result = formatter.format_list([], schema) + + assert result.strip() == "[]" + + def test_format_list_includes_relationship_value(self) -> None: + """Test that format_list includes relationship display labels.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = JsonFormatter() + + result = formatter.format_list([node], schema) + + parsed = json.loads(result) + assert parsed[0]["site"] == "DC1" + + +class TestJsonFormatterFormatDetail: + """Tests for JsonFormatter.format_detail.""" + + def test_format_detail_returns_valid_json(self) -> None: + """Test that format_detail output is valid JSON.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert isinstance(parsed, dict) + + def test_format_detail_contains_id(self) -> None: + """Test that format_detail includes the node id field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, node_id="abc-123") + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["id"] == "abc-123" + + def test_format_detail_contains_display_label(self) -> None: + """Test that format_detail includes the display_label metadata field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, display_label="Router One") + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["display_label"] == "Router One" + + def test_format_detail_contains_kind(self) -> None: + """Test that format_detail includes the kind metadata field from schema.""" + schema = _make_mock_schema(["name"], [], kind="InfraDevice") + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["kind"] == "InfraDevice" + + def test_format_detail_contains_attribute_value(self) -> None: + """Test that format_detail includes attribute values nested under their name.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert parsed["name"]["value"] == "router1" + + def test_format_detail_contains_relationship(self) -> None: + """Test that format_detail includes relationship data.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = JsonFormatter() + + result = formatter.format_detail(node, schema) + + parsed = json.loads(result) + assert "site" in parsed + assert parsed["site"]["display_label"] == "DC1" diff --git a/tests/unit/ctl/formatters/test_table.py b/tests/unit/ctl/formatters/test_table.py new file mode 100644 index 00000000..967e9b23 --- /dev/null +++ b/tests/unit/ctl/formatters/test_table.py @@ -0,0 +1,210 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.table (TableFormatter).""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +from infrahub_sdk.ctl.formatters.table import TableFormatter + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + setattr(node, rel_name, rel) + return node + + +class TestTableFormatterFormatList: + """Tests for TableFormatter.format_list.""" + + def test_format_list_returns_string(self) -> None: + """Test that format_list returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert isinstance(result, str) + + def test_format_list_contains_attribute_column_header(self) -> None: + """Test that format_list output includes attribute names as column headers.""" + schema = _make_mock_schema(["name", "status"], []) + node = _make_mock_node({"name": "router1", "status": "active"}, {}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "name" in result + assert "status" in result + + def test_format_list_contains_relationship_column_header(self) -> None: + """Test that format_list output includes relationship names as column headers.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "site" in result + + def test_format_list_contains_attribute_value(self) -> None: + """Test that format_list output includes node attribute values.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "router1" in result + + def test_format_list_contains_relationship_value(self) -> None: + """Test that format_list output includes relationship display labels.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = TableFormatter() + + result = formatter.format_list([node], schema) + + assert "DC1" in result + + def test_format_list_multiple_nodes(self) -> None: + """Test that format_list renders all nodes.""" + schema = _make_mock_schema(["name"], []) + node1 = _make_mock_node({"name": "router1"}, {}, node_id="id-1") + node2 = _make_mock_node({"name": "router2"}, {}, node_id="id-2") + formatter = TableFormatter() + + result = formatter.format_list([node1, node2], schema) + + assert "router1" in result + assert "router2" in result + + +class TestTableFormatterFormatDetail: + """Tests for TableFormatter.format_detail.""" + + def test_format_detail_returns_string(self) -> None: + """Test that format_detail returns a string.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert isinstance(result, str) + + def test_format_detail_contains_field_column_header(self) -> None: + """Test that format_detail output includes the Field column header.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "Field" in result + + def test_format_detail_contains_value_column_header(self) -> None: + """Test that format_detail output includes the Value column header.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "Value" in result + + def test_format_detail_contains_id_field(self) -> None: + """Test that format_detail output includes the id metadata field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, node_id="abc-123") + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "id" in result + assert "abc-123" in result + + def test_format_detail_contains_display_label_field(self) -> None: + """Test that format_detail output includes the display_label metadata field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}, display_label="Router One") + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "display_label" in result + assert "Router One" in result + + def test_format_detail_contains_attribute_name_and_value(self) -> None: + """Test that format_detail includes attribute field names and values.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "name" in result + assert "router1" in result + + def test_format_detail_contains_relationship_name_and_value(self) -> None: + """Test that format_detail includes relationship field names and display labels.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = TableFormatter() + + result = formatter.format_detail(node, schema) + + assert "site" in result + assert "DC1" in result diff --git a/tests/unit/ctl/formatters/test_yaml.py b/tests/unit/ctl/formatters/test_yaml.py new file mode 100644 index 00000000..e0a66268 --- /dev/null +++ b/tests/unit/ctl/formatters/test_yaml.py @@ -0,0 +1,342 @@ +"""Unit tests for infrahub_sdk.ctl.formatters.yaml (YamlFormatter).""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +import yaml # pyright: ignore[reportMissingModuleSource] + +from infrahub_sdk.ctl.formatters.yaml import YamlFormatter + + +def _make_mock_schema( + attr_names: list[str], + rel_names: list[str], + kind: str = "TestKind", +) -> MagicMock: + """Build a minimal schema mock with the given attribute and relationship names. + + Args: + attr_names: List of attribute names. + rel_names: List of relationship names. + kind: Schema kind string. + + Returns: + MagicMock configured to behave like a MainSchemaTypesAPI object. + """ + schema = MagicMock() + schema.kind = kind + schema.attribute_names = attr_names + schema.relationship_names = rel_names + for _name in rel_names: + rel = MagicMock() + rel.cardinality = "one" + schema.get_relationship.return_value = rel + return schema + + +def _make_mock_node( + attr_values: dict[str, object], + rel_values: dict[str, str], + node_id: str = "test-id", + display_label: str = "Test", +) -> MagicMock: + """Build a minimal node mock with the given attribute and relationship values. + + Args: + attr_values: Mapping of attribute name to value. + rel_values: Mapping of relationship name to display_label string. + The display_label is also used as a single-component HFID. + node_id: The node ID. + display_label: The display label for the node. + + Returns: + MagicMock configured to behave like an InfrahubNode object. + """ + node = MagicMock() + node.id = node_id + node.display_label = display_label + for attr_name, value in attr_values.items(): + attr = MagicMock() + attr.value = value + setattr(node, attr_name, attr) + for rel_name, label in rel_values.items(): + rel = MagicMock() + rel.display_label = label + rel.id = f"{rel_name}-id" + rel.hfid = [label] if label else None + setattr(node, rel_name, rel) + return node + + +class TestYamlFormatterFormatList: + """Tests for YamlFormatter.format_list.""" + + def test_format_list_produces_valid_yaml(self) -> None: + """Test that format_list output can be parsed as valid YAML.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert isinstance(parsed, dict) + + def test_format_list_contains_api_version(self) -> None: + """Test that format_list output contains the infrahub apiVersion field.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["apiVersion"] == "infrahub.app/v1" + + def test_format_list_contains_kind_object(self) -> None: + """Test that format_list output has kind set to Object.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["kind"] == "Object" + + def test_format_list_spec_kind_matches_schema(self) -> None: + """Test that spec.kind matches the schema kind.""" + schema = _make_mock_schema(["name"], [], kind="InfraDevice") + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["kind"] == "InfraDevice" + + def test_format_list_spec_data_is_list(self) -> None: + """Test that spec.data is a list.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert isinstance(parsed["spec"]["data"], list) + + def test_format_list_data_contains_attribute_value(self) -> None: + """Test that spec.data entries contain the attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_list([node], schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["name"] == "router1" + + def test_format_list_data_one_entry_per_node(self) -> None: + """Test that spec.data contains one entry per node.""" + schema = _make_mock_schema(["name"], []) + node1 = _make_mock_node({"name": "router1"}, {}, node_id="id-1") + node2 = _make_mock_node({"name": "router2"}, {}, node_id="id-2") + formatter = YamlFormatter() + + result = formatter.format_list([node1, node2], schema) + + parsed = yaml.safe_load(result) + assert len(parsed["spec"]["data"]) == 2 + + def test_format_list_starts_with_document_separator(self) -> None: + """Test that the YAML output starts with the --- document separator.""" + schema = _make_mock_schema(["name"], []) + formatter = YamlFormatter() + + result = formatter.format_list([], schema) + + assert result.startswith("---") + + +class TestYamlFormatterFormatDetail: + """Tests for YamlFormatter.format_detail.""" + + def test_format_detail_produces_valid_yaml(self) -> None: + """Test that format_detail output can be parsed as valid YAML.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert isinstance(parsed, dict) + + def test_format_detail_spec_data_has_single_entry(self) -> None: + """Test that format_detail produces exactly one entry in spec.data.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert len(parsed["spec"]["data"]) == 1 + + def test_format_detail_data_entry_contains_attribute(self) -> None: + """Test that the single spec.data entry contains the attribute value.""" + schema = _make_mock_schema(["name"], []) + node = _make_mock_node({"name": "router1"}, {}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["name"] == "router1" + + def test_format_detail_relationship_uses_display_label(self) -> None: + """Test that relationship values are stored as display_label strings.""" + schema = _make_mock_schema(["name"], ["site"]) + node = _make_mock_node({"name": "router1"}, {"site": "DC1"}) + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["site"] == "DC1" + + +class TestYamlFormatterEdgeCases: + """Edge case tests for YamlFormatter._node_to_data_entry.""" + + def test_null_attribute_omitted(self) -> None: + """Attributes with None values are omitted from the output.""" + schema = _make_mock_schema(["name", "desc"], []) + node = MagicMock() + name_attr = MagicMock() + name_attr.value = "router1" + node.name = name_attr + desc_attr = MagicMock() + desc_attr.value = None + node.desc = desc_attr + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + entry = parsed["spec"]["data"][0] + assert entry["name"] == "router1" + assert "desc" not in entry + + def test_empty_string_attribute_omitted(self) -> None: + """Attributes with empty string values are omitted.""" + schema = _make_mock_schema(["name", "desc"], []) + node = MagicMock() + name_attr = MagicMock() + name_attr.value = "router1" + node.name = name_attr + desc_attr = MagicMock() + desc_attr.value = "" + node.desc = desc_attr + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert "desc" not in parsed["spec"]["data"][0] + + def test_zero_attribute_preserved(self) -> None: + """Numeric zero is a valid value and must not be omitted.""" + schema = _make_mock_schema(["count"], []) + node = MagicMock() + attr = MagicMock() + attr.value = 0 + node.count = attr + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["count"] == 0 + + def test_false_attribute_preserved(self) -> None: + """Boolean False is a valid value and must not be omitted.""" + schema = _make_mock_schema(["enabled"], []) + node = MagicMock() + attr = MagicMock() + attr.value = False + node.enabled = attr + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["enabled"] is False + + def test_rel_cardinality_one_unset_omitted(self) -> None: + """Cardinality-one relationship with no display_label or hfid is omitted.""" + schema = _make_mock_schema([], ["site"]) + node = MagicMock() + rel = MagicMock() + rel.display_label = None + rel.hfid = None + node.site = rel + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert "site" not in parsed["spec"]["data"][0] + + def test_rel_cardinality_many_empty_peers_omitted(self) -> None: + """Cardinality-many with no peers is omitted from output.""" + schema = MagicMock() + schema.kind = "TestKind" + schema.attribute_names = [] + schema.relationship_names = ["tags"] + rel_schema = MagicMock() + rel_schema.cardinality = "many" + schema.get_relationship.return_value = rel_schema + + node = MagicMock() + rel_manager = MagicMock() + rel_manager.peers = [] + node.tags = rel_manager + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert "tags" not in parsed["spec"]["data"][0] + + def test_rel_cardinality_many_with_peers_uses_hfid(self) -> None: + """Cardinality-many peers use HFID when available.""" + schema = MagicMock() + schema.kind = "TestKind" + schema.attribute_names = [] + schema.relationship_names = ["tags"] + rel_schema = MagicMock() + rel_schema.cardinality = "many" + schema.get_relationship.return_value = rel_schema + + node = MagicMock() + peer1 = MagicMock(display_label="tag1", hfid=["tag1"]) + peer2 = MagicMock(display_label="tag2", hfid=["tag2"]) + rel_manager = MagicMock() + rel_manager.peers = [peer1, peer2] + node.tags = rel_manager + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["tags"] == {"data": ["tag1", "tag2"]} + + def test_rel_multi_component_hfid(self) -> None: + """Multi-component HFID renders as a list.""" + schema = _make_mock_schema([], ["platform"]) + node = MagicMock() + rel = MagicMock() + rel.display_label = "Cisco NX-OS" + rel.hfid = ["Cisco", "NX-OS"] + node.platform = rel + formatter = YamlFormatter() + + result = formatter.format_detail(node, schema) + parsed = yaml.safe_load(result) + assert parsed["spec"]["data"][0]["platform"] == ["Cisco", "NX-OS"] diff --git a/tests/unit/ctl/test_parsers.py b/tests/unit/ctl/test_parsers.py new file mode 100644 index 00000000..02850802 --- /dev/null +++ b/tests/unit/ctl/test_parsers.py @@ -0,0 +1,183 @@ +"""Unit tests for infrahub_sdk.ctl.parsers.""" + +from __future__ import annotations + +import pytest +import typer + +from infrahub_sdk.ctl.parsers import parse_filter_args, parse_set_args, validate_set_fields + + +class TestCoerceValue: + """Tests for type coercion via parse_set_args.""" + + def test_integer(self) -> None: + result = parse_set_args(["count=42"]) + assert result["count"] == 42 + assert isinstance(result["count"], int) + + def test_negative_integer(self) -> None: + result = parse_set_args(["offset=-10"]) + assert result["offset"] == -10 + + def test_float(self) -> None: + result = parse_set_args(["ratio=2.5"]) + assert result["ratio"] == 2.5 + assert isinstance(result["ratio"], float) + + def test_bool_true(self) -> None: + assert parse_set_args(["flag=true"])["flag"] is True + assert parse_set_args(["flag=True"])["flag"] is True + assert parse_set_args(["flag=yes"])["flag"] is True + + def test_bool_false(self) -> None: + assert parse_set_args(["flag=false"])["flag"] is False + assert parse_set_args(["flag=False"])["flag"] is False + assert parse_set_args(["flag=no"])["flag"] is False + + def test_null(self) -> None: + assert parse_set_args(["field=null"])["field"] is None + assert parse_set_args(["field=none"])["field"] is None + assert parse_set_args(["field=None"])["field"] is None + + def test_string_passthrough(self) -> None: + assert parse_set_args(["name=router1"])["name"] == "router1" + + def test_string_with_spaces(self) -> None: + assert parse_set_args(["name=my device"])["name"] == "my device" + + def test_leading_zero_stays_string(self) -> None: + result = parse_set_args(["code=00123"]) + assert result["code"] == "00123" + assert isinstance(result["code"], str) + + def test_empty_string(self) -> None: + result = parse_set_args(["name="]) + assert not result["name"] + assert isinstance(result["name"], str) + + +class TestParseSetArgs: + """Tests for parse_set_args.""" + + def test_single_key_value_pair(self) -> None: + """Test parse_set_args with a single valid key=value argument.""" + result = parse_set_args(["name=router1"]) + assert result == {"name": "router1"} + + def test_multiple_key_value_pairs(self) -> None: + """Test parse_set_args with multiple valid key=value arguments.""" + result = parse_set_args(["name=router1", "status=active"]) + assert result == {"name": "router1", "status": "active"} + + def test_value_containing_equals_sign(self) -> None: + """Test that only the first = is used as the split point.""" + result = parse_set_args(["description=a=b=c"]) + assert result == {"description": "a=b=c"} + + def test_numeric_value_coerced(self) -> None: + """Test that numeric string values are coerced to int/float.""" + result = parse_set_args(["height=190"]) + assert result == {"height": 190} + assert isinstance(result["height"], int) + + def test_empty_list(self) -> None: + """Test parse_set_args with an empty list returns an empty dict.""" + result = parse_set_args([]) + assert result == {} + + def test_missing_equals_raises_bad_parameter(self) -> None: + """Test parse_set_args raises BadParameter when = is absent.""" + with pytest.raises(typer.BadParameter, match="Invalid format"): + parse_set_args(["nameonly"]) + + def test_empty_key_raises_bad_parameter(self) -> None: + """Test parse_set_args raises BadParameter when key is empty.""" + with pytest.raises(typer.BadParameter, match="Key must not be empty"): + parse_set_args(["=value"]) + + def test_whitespace_only_key_raises_bad_parameter(self) -> None: + """Test parse_set_args raises BadParameter when key is only whitespace.""" + with pytest.raises(typer.BadParameter, match="Key must not be empty"): + parse_set_args([" =value"]) + + def test_value_can_be_empty_string(self) -> None: + """Test parse_set_args accepts an empty string value.""" + result = parse_set_args(["name="]) + assert result == {"name": ""} + + +class TestParseFilterArgs: + """Tests for parse_filter_args.""" + + def test_single_filter_argument(self) -> None: + """Test parse_filter_args with a single valid filter argument.""" + result = parse_filter_args(["name__value=router1"]) + assert result == {"name__value": "router1"} + + def test_multiple_filter_arguments(self) -> None: + """Test parse_filter_args with multiple valid filter arguments.""" + result = parse_filter_args(["name__value=router1", "status__value=active"]) + assert result == {"name__value": "router1", "status__value": "active"} + + def test_empty_list(self) -> None: + """Test parse_filter_args with an empty list returns an empty dict.""" + result = parse_filter_args([]) + assert result == {} + + def test_missing_equals_raises_bad_parameter(self) -> None: + """Test parse_filter_args raises BadParameter when = is absent.""" + with pytest.raises(typer.BadParameter, match="Invalid format"): + parse_filter_args(["name__value"]) + + def test_value_containing_equals_sign(self) -> None: + """Test that only the first = splits the filter argument.""" + result = parse_filter_args(["description__value=x=y"]) + assert result == {"description__value": "x=y"} + + +class TestValidateSetFields: + """Tests for validate_set_fields.""" + + def test_all_attribute_fields_valid(self) -> None: + """Test validate_set_fields passes when all keys are valid attribute names.""" + data = {"name": "router1", "status": "active"} + validate_set_fields(data, attribute_names=["name", "status"], relationship_names=[]) + + def test_all_relationship_fields_valid(self) -> None: + """Test validate_set_fields passes when all keys are valid relationship names.""" + data = {"site": "dc1"} + validate_set_fields(data, attribute_names=[], relationship_names=["site"]) + + def test_mixed_attribute_and_relationship_fields_valid(self) -> None: + """Test validate_set_fields passes with a mix of attribute and relationship keys.""" + data = {"name": "router1", "site": "dc1"} + validate_set_fields(data, attribute_names=["name"], relationship_names=["site"]) + + def test_empty_data_passes(self) -> None: + """Test validate_set_fields passes when data is empty.""" + validate_set_fields({}, attribute_names=["name"], relationship_names=["site"]) + + def test_unknown_field_raises_bad_parameter(self) -> None: + """Test validate_set_fields raises BadParameter for an unknown field.""" + data = {"unknown_field": "value"} + with pytest.raises(typer.BadParameter, match="Unknown field"): + validate_set_fields(data, attribute_names=["name"], relationship_names=["site"]) + + def test_error_message_lists_invalid_field(self) -> None: + """Test that the error message includes the invalid field name.""" + data = {"bogus": "value"} + with pytest.raises(typer.BadParameter, match="bogus"): + validate_set_fields(data, attribute_names=["name"], relationship_names=[]) + + def test_error_message_lists_valid_fields(self) -> None: + """Test that the error message includes the list of valid fields.""" + data = {"bogus": "value"} + with pytest.raises(typer.BadParameter, match="name"): + validate_set_fields(data, attribute_names=["name"], relationship_names=["site"]) + + def test_multiple_unknown_fields_raises_bad_parameter(self) -> None: + """Test validate_set_fields raises BadParameter listing multiple unknown fields.""" + data = {"bad1": "x", "bad2": "y"} + with pytest.raises(typer.BadParameter, match="bad1"): + validate_set_fields(data, attribute_names=["name"], relationship_names=[])